1
1
mirror of https://github.com/rui314/mold.git synced 2024-11-10 10:57:55 +03:00
mold/macho/arch-x86-64.cc

224 lines
5.9 KiB
C++
Raw Normal View History

2021-12-02 05:35:48 +03:00
#include "mold.h"
namespace mold::macho {
2021-12-03 12:20:25 +03:00
template <>
void StubsSection<X86_64>::copy_buf(Context<X86_64> &ctx) {
u8 *buf = ctx.buf + this->hdr.offset;
for (i64 i = 0; i < syms.size(); i++) {
// `ff 25 xx xx xx xx` is a RIP-relative indirect jump instruction,
// i.e., `jmp *IMM(%rip)`. It loads an address from la_symbol_ptr
// and jump there.
2021-12-04 06:10:18 +03:00
static_assert(X86_64::stub_size == 6);
2021-12-03 12:20:25 +03:00
buf[i * 6] = 0xff;
buf[i * 6 + 1] = 0x25;
*(ul32 *)(buf + i * 6 + 2) =
2021-12-07 15:55:04 +03:00
ctx.lazy_symbol_ptr.hdr.addr + i * X86_64::word_size -
2021-12-03 12:20:25 +03:00
(this->hdr.addr + i * 6 + 6);
}
}
template <>
void StubHelperSection<X86_64>::copy_buf(Context<X86_64> &ctx) {
u8 *start = ctx.buf + this->hdr.offset;
u8 *buf = start;
2021-12-04 06:10:18 +03:00
static const u8 insn0[] = {
2021-12-03 12:20:25 +03:00
0x4c, 0x8d, 0x1d, 0, 0, 0, 0, // lea $__dyld_private(%rip), %r11
0x41, 0x53, // push %r11
0xff, 0x25, 0, 0, 0, 0, // jmp *$dyld_stub_binder@GOT(%rip)
0x90, // nop
};
2021-12-04 06:10:18 +03:00
static_assert(sizeof(insn0) == X86_64::stub_helper_hdr_size);
2021-12-03 12:20:25 +03:00
memcpy(buf, insn0, sizeof(insn0));
*(ul32 *)(buf + 3) =
2021-12-31 15:27:00 +03:00
get_symbol(ctx, "__dyld_private")->get_addr(ctx) - this->hdr.addr - 7;
*(ul32 *)(buf + 11) =
2021-12-31 15:27:00 +03:00
get_symbol(ctx, "dyld_stub_binder")->get_got_addr(ctx) - this->hdr.addr - 15;
2021-12-03 12:20:25 +03:00
buf += 16;
for (i64 i = 0; i < ctx.stubs.syms.size(); i++) {
2021-12-04 06:10:18 +03:00
u8 insn[] = {
2021-12-03 12:20:25 +03:00
0x68, 0, 0, 0, 0, // push $bind_offset
0xe9, 0, 0, 0, 0, // jmp $__stub_helper
};
2021-12-04 06:10:18 +03:00
static_assert(sizeof(insn) == X86_64::stub_helper_size);
2021-12-03 12:20:25 +03:00
2021-12-04 06:10:18 +03:00
memcpy(buf, insn, sizeof(insn));
*(ul32 *)(buf + 1) = ctx.stubs.bind_offsets[i];
*(ul32 *)(buf + 6) = start - buf - 10;
2021-12-03 12:20:25 +03:00
buf += 10;
}
}
2021-12-02 05:35:48 +03:00
static i64 get_reloc_addend(u32 type) {
switch (type) {
case X86_64_RELOC_SIGNED_1:
return 1;
case X86_64_RELOC_SIGNED_2:
return 2;
case X86_64_RELOC_SIGNED_4:
return 4;
default:
return 0;
}
}
static i64 read_addend(u8 *buf, const MachRel &r) {
2021-12-05 07:34:19 +03:00
if (r.p2size == 2)
return *(il32 *)(buf + r.offset) + get_reloc_addend(r.type);
2021-12-05 07:34:19 +03:00
if (r.p2size == 3)
2022-05-08 14:56:36 +03:00
return *(il64 *)(buf + r.offset) + get_reloc_addend(r.type);
2021-12-05 07:34:19 +03:00
unreachable();
2021-12-02 05:35:48 +03:00
}
2021-12-03 09:47:42 +03:00
static Relocation<X86_64>
2021-12-02 05:35:48 +03:00
read_reloc(Context<X86_64> &ctx, ObjectFile<X86_64> &file,
2021-12-03 09:47:42 +03:00
const MachSection &hdr, MachRel &r) {
2021-12-02 05:35:48 +03:00
if (r.p2size != 2 && r.p2size != 3)
Fatal(ctx) << file << ": invalid r.p2size: " << (u32)r.p2size;
if (r.is_pcrel) {
if (r.p2size != 2)
Fatal(ctx) << file << ": invalid PC-relative reloc: " << r.offset;
} else {
if (r.p2size != 3)
Fatal(ctx) << file << ": invalid non-PC-relative reloc: " << r.offset;
}
u8 *buf = (u8 *)file.mf->data + hdr.offset;
Relocation<X86_64> rel{r.offset, (u8)r.type, (u8)r.p2size, (bool)r.is_pcrel};
i64 addend = read_addend(buf, r);
if (r.is_extern) {
rel.sym = file.syms[r.idx];
rel.addend = addend;
return rel;
}
u32 addr;
if (r.is_pcrel)
addr = hdr.addr + r.offset + 4 + addend;
else
addr = addend;
Subsection<X86_64> *target = file.find_subsection(ctx, addr);
if (!target)
Fatal(ctx) << file << ": bad relocation: " << r.offset;
rel.subsec = target;
rel.addend = addr - target->input_addr;
return rel;
}
2021-12-03 09:47:42 +03:00
template <>
std::vector<Relocation<X86_64>>
read_relocations(Context<X86_64> &ctx, ObjectFile<X86_64> &file,
const MachSection &hdr) {
std::vector<Relocation<X86_64>> vec;
vec.reserve(hdr.nreloc);
MachRel *rels = (MachRel *)(file.mf->data + hdr.reloff);
for (i64 i = 0; i < hdr.nreloc; i++)
vec.push_back(read_reloc(ctx, file, hdr, rels[i]));
return vec;
}
2021-12-02 05:35:48 +03:00
template <>
void Subsection<X86_64>::scan_relocations(Context<X86_64> &ctx) {
2021-12-05 07:34:19 +03:00
for (Relocation<X86_64> &r : get_rels()) {
Symbol<X86_64> *sym = r.sym;
2021-12-02 05:35:48 +03:00
if (!sym)
continue;
2021-12-05 07:34:19 +03:00
switch (r.type) {
2021-12-02 05:35:48 +03:00
case X86_64_RELOC_GOT_LOAD:
2022-05-10 13:57:24 +03:00
if (sym->is_imported)
2021-12-02 05:35:48 +03:00
sym->flags |= NEEDS_GOT;
break;
case X86_64_RELOC_GOT:
sym->flags |= NEEDS_GOT;
break;
case X86_64_RELOC_TLV:
2022-05-10 13:57:24 +03:00
if (sym->is_imported)
2021-12-02 05:35:48 +03:00
sym->flags |= NEEDS_THREAD_PTR;
break;
}
2022-05-10 13:57:24 +03:00
if (sym->is_imported) {
2021-12-02 05:35:48 +03:00
sym->flags |= NEEDS_STUB;
2022-05-10 13:57:24 +03:00
if (sym->file->is_dylib)
((DylibFile<X86_64> *)sym->file)->is_needed = true;
2021-12-02 05:35:48 +03:00
}
}
}
template <>
void Subsection<X86_64>::apply_reloc(Context<X86_64> &ctx, u8 *buf) {
2021-12-05 07:34:19 +03:00
for (const Relocation<X86_64> &r : get_rels()) {
if (r.sym && !r.sym->file) {
Error(ctx) << "undefined symbol: " << isec.file << ": " << *r.sym;
2021-12-02 05:35:48 +03:00
continue;
}
u64 val = 0;
2021-12-05 07:34:19 +03:00
switch (r.type) {
2021-12-02 05:35:48 +03:00
case X86_64_RELOC_UNSIGNED:
case X86_64_RELOC_SIGNED:
case X86_64_RELOC_BRANCH:
case X86_64_RELOC_SIGNED_1:
case X86_64_RELOC_SIGNED_2:
case X86_64_RELOC_SIGNED_4:
2021-12-05 07:34:19 +03:00
val = r.sym ? r.sym->get_addr(ctx) : r.subsec->get_addr(ctx);
2021-12-02 05:35:48 +03:00
break;
case X86_64_RELOC_GOT_LOAD:
2021-12-05 07:34:19 +03:00
if (r.sym->got_idx != -1) {
val = r.sym->get_got_addr(ctx);
2021-12-02 05:35:48 +03:00
} else {
// Relax MOVQ into LEAQ
2021-12-05 07:34:19 +03:00
if (buf[r.offset - 2] != 0x8b)
2021-12-02 05:35:48 +03:00
Error(ctx) << isec << ": invalid GOT_LOAD relocation";
2021-12-05 07:34:19 +03:00
buf[r.offset - 2] = 0x8d;
val = r.sym->get_addr(ctx);
2021-12-02 05:35:48 +03:00
}
break;
case X86_64_RELOC_GOT:
2021-12-05 07:34:19 +03:00
val = r.sym->get_got_addr(ctx);
2021-12-02 05:35:48 +03:00
break;
case X86_64_RELOC_TLV:
2021-12-05 07:34:19 +03:00
if (r.sym->tlv_idx != -1) {
val = r.sym->get_tlv_addr(ctx);
2021-12-02 05:35:48 +03:00
} else {
// Relax MOVQ into LEAQ
2021-12-05 07:34:19 +03:00
if (buf[r.offset - 2] != 0x8b)
2021-12-02 05:35:48 +03:00
Error(ctx) << isec << ": invalid TLV relocation";
2021-12-05 07:34:19 +03:00
buf[r.offset - 2] = 0x8d;
val = r.sym->get_addr(ctx);
2021-12-02 05:35:48 +03:00
}
break;
default:
2021-12-05 07:34:19 +03:00
Fatal(ctx) << isec << ": unknown reloc: " << (int)r.type;
2021-12-02 05:35:48 +03:00
}
2021-12-05 07:34:19 +03:00
val += r.addend;
2021-12-02 05:35:48 +03:00
2021-12-05 07:34:19 +03:00
if (r.is_pcrel)
val -= get_addr(ctx) + r.offset + 4 + get_reloc_addend(r.type);
2021-12-02 05:35:48 +03:00
2021-12-05 07:34:19 +03:00
if (r.p2size == 2)
*(ul32 *)(buf + r.offset) = val;
2021-12-05 07:34:19 +03:00
else if (r.p2size == 3)
2022-05-02 09:56:34 +03:00
*(ul64 *)(buf + r.offset) = val;
2021-12-05 07:34:19 +03:00
else
2021-12-02 05:35:48 +03:00
unreachable();
}
}
} // namespace mold::macho