wasmer_compiler/engine/
link.rs

1//! Linking for Universal-compiled code.
2
3use crate::{
4    FunctionExtent, get_libcall_trampoline,
5    types::{
6        relocation::{RelocationKind, RelocationLike, RelocationTarget},
7        section::SectionIndex,
8    },
9};
10use std::{
11    collections::{HashMap, HashSet},
12    ptr::{read_unaligned, write_unaligned},
13};
14
15use wasmer_types::{FunctionIndex, LocalFunctionIndex, ModuleInfo, entity::PrimaryMap};
16use wasmer_vm::{FunctionBodyPtr, SectionBodyPtr, libcalls::function_pointer};
17
18const LOW6_BITS_MASK: u8 = 0x3f;
19
20#[allow(clippy::too_many_arguments)]
21fn apply_relocation(
22    body: usize,
23    r: &impl RelocationLike,
24    allocated_functions: &PrimaryMap<LocalFunctionIndex, FunctionExtent>,
25    allocated_dynamic_function_trampolines: &PrimaryMap<FunctionIndex, FunctionBodyPtr>,
26    allocated_sections: &PrimaryMap<SectionIndex, SectionBodyPtr>,
27    libcall_trampolines_sec_idx: SectionIndex,
28    libcall_trampoline_len: usize,
29    riscv_pcrel_hi20s: &mut HashMap<usize, u32>,
30    get_got_address: &dyn Fn(RelocationTarget) -> Option<usize>,
31) {
32    let reloc_target = r.reloc_target();
33
34    // Note: if the relocation needs GOT and its addend is not zero we will relax the
35    // relocation and, instead of making it use the GOT entry, we will fixup the assembly to
36    // use the final pointer directly, without any indirection. Also, see the comment in
37    // compiler-llvm/src/object_file.rs:288.
38    let target_func_address: usize = if r.kind().needs_got() && r.addend() == 0 {
39        if let Some(got_address) = get_got_address(reloc_target) {
40            got_address
41        } else {
42            panic!("No GOT entry for reloc target {reloc_target:?}")
43        }
44    } else {
45        match reloc_target {
46            RelocationTarget::LocalFunc(index) => *allocated_functions[index].ptr as usize,
47            RelocationTarget::DynamicTrampoline(index) => {
48                *allocated_dynamic_function_trampolines[index] as usize
49            }
50            RelocationTarget::LibCall(libcall) => {
51                // Use the direct target of the libcall if the relocation supports
52                // a full 64-bit address. Otherwise use a trampoline.
53                if matches!(
54                    r.kind(),
55                    RelocationKind::Abs8
56                        | RelocationKind::PCRel8
57                        | RelocationKind::MachoArm64RelocUnsigned
58                        | RelocationKind::MachoX86_64RelocUnsigned
59                ) {
60                    function_pointer(libcall)
61                } else {
62                    get_libcall_trampoline(
63                        libcall,
64                        allocated_sections[libcall_trampolines_sec_idx].0 as usize,
65                        libcall_trampoline_len,
66                    )
67                }
68            }
69            RelocationTarget::CustomSection(custom_section) => {
70                *allocated_sections[custom_section] as usize
71            }
72        }
73    };
74
75    // A set of addresses at which a SUBTRACTOR relocation was applied.
76    let mut macho_aarch64_subtractor_addresses = HashSet::new();
77
78    match r.kind() {
79        RelocationKind::Abs6Bits => unsafe {
80            let (reloc_address, reloc_abs) = r.for_address(body, target_func_address as u64);
81            let value = read_unaligned(reloc_address as *mut u8) & !LOW6_BITS_MASK;
82            write_unaligned(
83                reloc_address as *mut u8,
84                value | ((reloc_abs as u8) & LOW6_BITS_MASK),
85            );
86        },
87        RelocationKind::Abs => unsafe {
88            let (reloc_address, reloc_abs) = r.for_address(body, target_func_address as u64);
89            write_unaligned(reloc_address as *mut u8, reloc_abs as u8);
90        },
91        RelocationKind::Abs2 => unsafe {
92            let (reloc_address, reloc_abs) = r.for_address(body, target_func_address as u64);
93            write_unaligned(reloc_address as *mut u16, reloc_abs as u16);
94        },
95        RelocationKind::Abs4 => unsafe {
96            let (reloc_address, reloc_abs) = r.for_address(body, target_func_address as u64);
97            write_unaligned(reloc_address as *mut u32, reloc_abs as u32);
98        },
99        RelocationKind::Abs8 => unsafe {
100            let (reloc_address, reloc_abs) = r.for_address(body, target_func_address as u64);
101            write_unaligned(reloc_address as *mut u64, reloc_abs);
102        },
103        RelocationKind::PCRel4 => unsafe {
104            let (reloc_address, reloc_delta) = r.for_address(body, target_func_address as u64);
105            write_unaligned(reloc_address as *mut u32, reloc_delta as _);
106        },
107        RelocationKind::PCRel8 => unsafe {
108            let (reloc_address, reloc_delta) = r.for_address(body, target_func_address as u64);
109            write_unaligned(reloc_address as *mut u64, reloc_delta);
110        },
111        RelocationKind::X86CallPCRel4 => unsafe {
112            let (reloc_address, reloc_delta) = r.for_address(body, target_func_address as u64);
113            write_unaligned(reloc_address as *mut u32, reloc_delta as _);
114        },
115        RelocationKind::Arm64Call => unsafe {
116            let (reloc_address, reloc_delta) = r.for_address(body, target_func_address as u64);
117            if (reloc_delta as i64).abs() >= 0x1000_0000 {
118                panic!(
119                    "Relocation to big for {:?} for {:?} with {:x}, current val {:x}",
120                    r.kind(),
121                    r.reloc_target(),
122                    reloc_delta,
123                    read_unaligned(reloc_address as *mut u32)
124                )
125            }
126            let reloc_delta = (((reloc_delta / 4) as u32) & 0x3ff_ffff)
127                | (read_unaligned(reloc_address as *mut u32) & 0xfc00_0000);
128            write_unaligned(reloc_address as *mut u32, reloc_delta);
129        },
130        RelocationKind::Arm64Movw0 => unsafe {
131            let (reloc_address, reloc_delta) = r.for_address(body, target_func_address as u64);
132            let reloc_delta =
133                (((reloc_delta & 0xffff) as u32) << 5) | read_unaligned(reloc_address as *mut u32);
134            write_unaligned(reloc_address as *mut u32, reloc_delta);
135        },
136        RelocationKind::Arm64Movw1 => unsafe {
137            let (reloc_address, reloc_delta) = r.for_address(body, target_func_address as u64);
138            let reloc_delta = ((((reloc_delta >> 16) & 0xffff) as u32) << 5)
139                | read_unaligned(reloc_address as *mut u32);
140            write_unaligned(reloc_address as *mut u32, reloc_delta);
141        },
142        RelocationKind::Arm64Movw2 => unsafe {
143            let (reloc_address, reloc_delta) = r.for_address(body, target_func_address as u64);
144            let reloc_delta = ((((reloc_delta >> 32) & 0xffff) as u32) << 5)
145                | read_unaligned(reloc_address as *mut u32);
146            write_unaligned(reloc_address as *mut u32, reloc_delta);
147        },
148        RelocationKind::Arm64Movw3 => unsafe {
149            let (reloc_address, reloc_delta) = r.for_address(body, target_func_address as u64);
150            let reloc_delta = ((((reloc_delta >> 48) & 0xffff) as u32) << 5)
151                | read_unaligned(reloc_address as *mut u32);
152            write_unaligned(reloc_address as *mut u32, reloc_delta);
153        },
154        RelocationKind::RiscvPCRelHi20 => unsafe {
155            let (reloc_address, reloc_delta) = r.for_address(body, target_func_address as u64);
156
157            // save for later reference with RiscvPCRelLo12I
158            riscv_pcrel_hi20s.insert(reloc_address, reloc_delta as u32);
159
160            let reloc_delta = ((reloc_delta.wrapping_add(0x800) & 0xfffff000) as u32)
161                | read_unaligned(reloc_address as *mut u32);
162            write_unaligned(reloc_address as *mut u32, reloc_delta);
163        },
164        RelocationKind::RiscvPCRelLo12I => unsafe {
165            let (reloc_address, reloc_abs) = r.for_address(body, target_func_address as u64);
166            let reloc_delta = ((riscv_pcrel_hi20s.get(&(reloc_abs as usize)).expect(
167                "R_RISCV_PCREL_LO12_I relocation target must be a symbol with R_RISCV_PCREL_HI20",
168            ) & 0xfff)
169                << 20)
170                | read_unaligned(reloc_address as *mut u32);
171            write_unaligned(reloc_address as *mut u32, reloc_delta);
172        },
173        RelocationKind::RiscvCall => unsafe {
174            let (reloc_address, reloc_delta) = r.for_address(body, target_func_address as u64);
175            let reloc_delta = ((reloc_delta & 0xfff) << 52)
176                | (reloc_delta.wrapping_add(0x800) & 0xfffff000)
177                | read_unaligned(reloc_address as *mut u64);
178            write_unaligned(reloc_address as *mut u64, reloc_delta);
179        },
180        RelocationKind::LArchAbsHi20 | RelocationKind::LArchPCAlaHi20 => unsafe {
181            let (reloc_address, reloc_abs) = r.for_address(body, target_func_address as u64);
182            let reloc_abs = ((((reloc_abs >> 12) & 0xfffff) as u32) << 5)
183                | read_unaligned(reloc_address as *mut u32);
184            write_unaligned(reloc_address as *mut u32, reloc_abs);
185        },
186        RelocationKind::LArchAbsLo12 | RelocationKind::LArchPCAlaLo12 => unsafe {
187            let (reloc_address, reloc_abs) = r.for_address(body, target_func_address as u64);
188            let reloc_abs =
189                (((reloc_abs & 0xfff) as u32) << 10) | read_unaligned(reloc_address as *mut u32);
190            write_unaligned(reloc_address as *mut u32, reloc_abs);
191        },
192        RelocationKind::LArchAbs64Hi12 | RelocationKind::LArchPCAla64Hi12 => unsafe {
193            let (reloc_address, reloc_abs) = r.for_address(body, target_func_address as u64);
194            let reloc_abs = ((((reloc_abs >> 52) & 0xfff) as u32) << 10)
195                | read_unaligned(reloc_address as *mut u32);
196            write_unaligned(reloc_address as *mut u32, reloc_abs);
197        },
198        RelocationKind::LArchAbs64Lo20 | RelocationKind::LArchPCAla64Lo20 => unsafe {
199            let (reloc_address, reloc_abs) = r.for_address(body, target_func_address as u64);
200            let reloc_abs = ((((reloc_abs >> 32) & 0xfffff) as u32) << 5)
201                | read_unaligned(reloc_address as *mut u32);
202            write_unaligned(reloc_address as *mut u32, reloc_abs);
203        },
204        RelocationKind::LArchCall36 => unsafe {
205            let (reloc_address, reloc_delta) = r.for_address(body, target_func_address as u64);
206            let reloc_delta1 = ((((reloc_delta >> 18) & 0xfffff) as u32) << 5)
207                | read_unaligned(reloc_address as *mut u32);
208            write_unaligned(reloc_address as *mut u32, reloc_delta1);
209            let reloc_delta2 = ((((reloc_delta >> 2) & 0xffff) as u32) << 10)
210                | read_unaligned((reloc_address + 4) as *mut u32);
211            write_unaligned((reloc_address + 4) as *mut u32, reloc_delta2);
212        },
213        RelocationKind::Aarch64AdrPrelPgHi21 => unsafe {
214            let (reloc_address, delta) = r.for_address(body, target_func_address as u64);
215
216            let delta = delta as isize;
217            assert!(
218                ((-1 << 32)..(1 << 32)).contains(&delta),
219                "can't generate page-relative relocation with ±4GB `adrp` instruction"
220            );
221
222            let op = read_unaligned(reloc_address as *mut u32);
223            let delta = delta >> 12;
224            let immlo = ((delta as u32) & 0b11) << 29;
225            let immhi = (((delta as u32) >> 2) & 0x7ffff) << 5;
226            let mask = !((0x7ffff << 5) | (0b11 << 29));
227            let op = (op & mask) | immlo | immhi;
228
229            write_unaligned(reloc_address as *mut u32, op);
230        },
231        RelocationKind::Aarch64AdrPrelLo21 => unsafe {
232            let (reloc_address, delta) = r.for_address(body, target_func_address as u64);
233
234            let delta = delta as isize;
235            assert!(
236                ((-1 << 20)..(1 << 20)).contains(&delta),
237                "can't generate an ADR_PREL_LO21 relocation with an immediate larger than 20 bits"
238            );
239
240            let op = read_unaligned(reloc_address as *mut u32);
241            let immlo = ((delta as u32) & 0b11) << 29;
242            let immhi = (((delta as u32) >> 2) & 0x7ffff) << 5;
243            let mask = !((0x7ffff << 5) | (0b11 << 29));
244            let op = (op & mask) | immlo | immhi;
245
246            write_unaligned(reloc_address as *mut u32, op);
247        },
248        RelocationKind::Aarch64AddAbsLo12Nc => unsafe {
249            let (reloc_address, delta) = r.for_address(body, target_func_address as u64);
250
251            let delta = delta as isize;
252            let op = read_unaligned(reloc_address as *mut u32);
253            let imm = ((delta as u32) & 0xfff) << 10;
254            let mask = !((0xfff) << 10);
255            let op = (op & mask) | imm;
256
257            write_unaligned(reloc_address as *mut u32, op);
258        },
259        RelocationKind::Aarch64Ldst128AbsLo12Nc => unsafe {
260            let (reloc_address, reloc_delta) = r.for_address(body, target_func_address as u64);
261            let reloc_delta = ((reloc_delta as u32 & 0xfff) >> 4) << 10
262                | (read_unaligned(reloc_address as *mut u32) & 0xFFC003FF);
263            write_unaligned(reloc_address as *mut u32, reloc_delta);
264        },
265        RelocationKind::Aarch64Ldst64AbsLo12Nc => unsafe {
266            let (reloc_address, reloc_delta) = r.for_address(body, target_func_address as u64);
267            let reloc_delta = ((reloc_delta as u32 & 0xfff) >> 3) << 10
268                | (read_unaligned(reloc_address as *mut u32) & 0xFFC003FF);
269            write_unaligned(reloc_address as *mut u32, reloc_delta);
270        },
271        RelocationKind::MachoArm64RelocSubtractor | RelocationKind::MachoX86_64RelocSubtractor => unsafe {
272            let (reloc_address, reloc_sub) = r.for_address(body, target_func_address as u64);
273            macho_aarch64_subtractor_addresses.insert(reloc_address);
274            write_unaligned(reloc_address as *mut u64, reloc_sub);
275        },
276        RelocationKind::MachoArm64RelocGotLoadPage21
277        | RelocationKind::MachoArm64RelocTlvpLoadPage21 => unsafe {
278            let (reloc_address, _) = r.for_address(body, target_func_address as u64);
279            let target_func_page = target_func_address & !0xfff;
280            let reloc_at_page = reloc_address & !0xfff;
281            let pcrel = (target_func_page as isize)
282                .checked_sub(reloc_at_page as isize)
283                .unwrap();
284            assert!(
285                (-1 << 32) <= (pcrel as i64) && (pcrel as i64) < (1 << 32),
286                "can't reach GOT page with ±4GB `adrp` instruction"
287            );
288            let val = pcrel >> 12;
289
290            let immlo = ((val as u32) & 0b11) << 29;
291            let immhi = (((val as u32) >> 2) & 0x7ffff) << 5;
292            let mask = !((0x7ffff << 5) | (0b11 << 29));
293            let op = read_unaligned(reloc_address as *mut u32);
294            write_unaligned(reloc_address as *mut u32, (op & mask) | immlo | immhi);
295        },
296
297        RelocationKind::MachoArm64RelocPage21 => unsafe {
298            let target_page: u64 =
299                ((target_func_address.wrapping_add(r.addend() as _)) & !0xfff) as u64;
300            let reloc_address = body.wrapping_add(r.offset() as _);
301            let pc_page: u64 = (reloc_address & !0xfff) as u64;
302            let page_delta = target_page - pc_page;
303            let raw_instr = read_unaligned(reloc_address as *mut u32);
304            assert_eq!(
305                (raw_instr & 0xffffffe0),
306                0x90000000,
307                "raw_instr isn't an ADRP instruction"
308            );
309
310            let immlo: u32 = ((page_delta >> 12) & 0x3) as _;
311            let immhi: u32 = ((page_delta >> 14) & 0x7ffff) as _;
312            let fixed_instr = raw_instr | (immlo << 29) | (immhi << 5);
313            write_unaligned(reloc_address as *mut u32, fixed_instr);
314        },
315        RelocationKind::MachoArm64RelocPageoff12 => unsafe {
316            let target_offset: u64 =
317                ((target_func_address.wrapping_add(r.addend() as _)) & 0xfff) as u64;
318
319            let reloc_address = body.wrapping_add(r.offset() as _);
320            let raw_instr = read_unaligned(reloc_address as *mut u32);
321            let imm_shift = {
322                const VEC128_MASK: u32 = 0x04800000;
323
324                const LOAD_STORE_IMM12_MASK: u32 = 0x3b000000;
325                let is_load_store_imm12 = (raw_instr & LOAD_STORE_IMM12_MASK) == 0x39000000;
326
327                if is_load_store_imm12 {
328                    let mut implicit_shift = raw_instr >> 30;
329
330                    if implicit_shift == 0 && (raw_instr & VEC128_MASK) == VEC128_MASK {
331                        implicit_shift = 4;
332                    }
333
334                    implicit_shift
335                } else {
336                    0
337                }
338            };
339
340            assert_eq!(
341                target_offset & ((1 << imm_shift) - 1),
342                0,
343                "PAGEOFF12 target is not aligned"
344            );
345
346            let encoded_imm: u32 = ((target_offset as u32) >> imm_shift) << 10;
347            let fixed_instr: u32 = raw_instr | encoded_imm;
348            write_unaligned(reloc_address as *mut u32, fixed_instr);
349        },
350
351        RelocationKind::MachoArm64RelocGotLoadPageoff12 => unsafe {
352            // See comment at the top of the function. TLDR: if addend != 0 we can't really use the
353            // GOT entry. We fixup this relocation to use a `add` rather than a `ldr` instruction,
354            // skipping the indirection from the GOT.
355            if r.addend() == 0 {
356                let (reloc_address, _) = r.for_address(body, target_func_address as u64);
357                assert_eq!(target_func_address & 0b111, 0);
358                let val = target_func_address >> 3;
359                let imm9 = ((val & 0x1ff) << 10) as u32;
360                let mask = !(0x1ff << 10);
361                let op = read_unaligned(reloc_address as *mut u32);
362                write_unaligned(reloc_address as *mut u32, (op & mask) | imm9);
363            } else {
364                let fixup_ptr = body + r.offset() as usize;
365                let target_address: usize = target_func_address + r.addend() as usize;
366
367                let raw_instr = read_unaligned(fixup_ptr as *mut u32);
368
369                assert_eq!(
370                    raw_instr & 0xfffffc00,
371                    0xf9400000,
372                    "raw_instr isn't a 64-bit LDR immediate (bits: {raw_instr:032b}, hex: {raw_instr:x})"
373                );
374
375                let reg: u32 = raw_instr & 0b11111;
376
377                let mut fixup_ldr = 0x91000000 | (reg << 5) | reg;
378                fixup_ldr |= ((target_address & 0xfff) as u32) << 10;
379
380                write_unaligned(fixup_ptr as *mut u32, fixup_ldr);
381            }
382        },
383        RelocationKind::MachoArm64RelocUnsigned | RelocationKind::MachoX86_64RelocUnsigned => unsafe {
384            let (reloc_address, mut reloc_delta) = r.for_address(body, target_func_address as u64);
385
386            if macho_aarch64_subtractor_addresses.contains(&reloc_address) {
387                reloc_delta -= read_unaligned(reloc_address as *mut u64);
388            }
389
390            write_unaligned(reloc_address as *mut u64, reloc_delta);
391        },
392
393        RelocationKind::MachoArm64RelocPointerToGot => unsafe {
394            let at = body + r.offset() as usize;
395            let pcrel = i32::try_from((target_func_address as isize) - (at as isize)).unwrap();
396            write_unaligned(at as *mut i32, pcrel);
397        },
398
399        RelocationKind::MachoArm64RelocBranch26 => unsafe {
400            let fixup_ptr = body + r.offset() as usize;
401            assert_eq!(fixup_ptr & 0x3, 0, "Branch-inst is not 32-bit aligned");
402            let value = i32::try_from((target_func_address as isize) - (fixup_ptr as isize))
403                .unwrap()
404                .wrapping_add(r.addend() as _);
405            assert!(
406                value & 0x3 == 0,
407                "BranchPCRel26 target is not 32-bit aligned"
408            );
409
410            assert!(
411                (-(1 << 27)..=((1 << 27) - 1)).contains(&value),
412                "out of range BranchPCRel26 target"
413            );
414
415            let raw_instr = read_unaligned(fixup_ptr as *mut u32);
416
417            assert_eq!(
418                raw_instr & 0x7fffffff,
419                0x14000000,
420                "RawInstr isn't a B or BR immediate instruction"
421            );
422            let imm: u32 = ((value as u32) & ((1 << 28) - 1)) >> 2;
423            let fixed_instr: u32 = raw_instr | imm;
424
425            write_unaligned(fixup_ptr as *mut u32, fixed_instr);
426        },
427        RelocationKind::Add => unsafe {
428            let (reloc_address, reloc_abs) = r.for_address(body, target_func_address as u64);
429            let value = read_unaligned(reloc_address as *mut u8);
430            write_unaligned(
431                reloc_address as *mut u8,
432                value.wrapping_add(reloc_abs as u8),
433            );
434        },
435        RelocationKind::Add2 => unsafe {
436            let (reloc_address, reloc_abs) = r.for_address(body, target_func_address as u64);
437            let value = read_unaligned(reloc_address as *mut u16);
438            write_unaligned(
439                reloc_address as *mut u16,
440                value.wrapping_add(reloc_abs as u16),
441            );
442        },
443        RelocationKind::Add4 => unsafe {
444            let (reloc_address, reloc_abs) = r.for_address(body, target_func_address as u64);
445            let value = read_unaligned(reloc_address as *mut u32);
446            write_unaligned(
447                reloc_address as *mut u32,
448                value.wrapping_add(reloc_abs as u32),
449            );
450        },
451        RelocationKind::Add8 => unsafe {
452            let (reloc_address, reloc_abs) = r.for_address(body, target_func_address as u64);
453            let value = read_unaligned(reloc_address as *mut u64);
454            write_unaligned(reloc_address as *mut u64, value.wrapping_add(reloc_abs));
455        },
456        RelocationKind::Sub6Bits => unsafe {
457            let (reloc_address, reloc_abs) = r.for_address(body, target_func_address as u64);
458            let value = read_unaligned(reloc_address as *mut u8);
459            let upper_2_bits = value & !LOW6_BITS_MASK;
460            write_unaligned(
461                reloc_address as *mut u8,
462                (value.wrapping_sub((reloc_abs as u8) & LOW6_BITS_MASK) & LOW6_BITS_MASK)
463                    | upper_2_bits,
464            );
465        },
466        RelocationKind::Sub => unsafe {
467            let (reloc_address, reloc_abs) = r.for_address(body, target_func_address as u64);
468            let value = read_unaligned(reloc_address as *mut u8);
469            write_unaligned(
470                reloc_address as *mut u8,
471                value.wrapping_sub(reloc_abs as u8),
472            );
473        },
474        RelocationKind::Sub2 => unsafe {
475            let (reloc_address, reloc_abs) = r.for_address(body, target_func_address as u64);
476            let value = read_unaligned(reloc_address as *mut u16);
477            write_unaligned(
478                reloc_address as *mut u16,
479                value.wrapping_sub(reloc_abs as u16),
480            );
481        },
482        RelocationKind::Sub4 => unsafe {
483            let (reloc_address, reloc_abs) = r.for_address(body, target_func_address as u64);
484            let value = read_unaligned(reloc_address as *mut u32);
485            write_unaligned(
486                reloc_address as *mut u32,
487                value.wrapping_sub(reloc_abs as u32),
488            );
489        },
490        RelocationKind::Sub8 => unsafe {
491            let (reloc_address, reloc_abs) = r.for_address(body, target_func_address as u64);
492            let value = read_unaligned(reloc_address as *mut u64);
493            write_unaligned(reloc_address as *mut u64, value.wrapping_sub(reloc_abs));
494        },
495        kind => panic!("Relocation kind unsupported in the current architecture: {kind:?}"),
496    }
497}
498
499/// Links a module, patching the allocated functions with the
500/// required relocations and jump tables.
501#[allow(clippy::too_many_arguments)]
502pub fn link_module<'a>(
503    _module: &ModuleInfo,
504    allocated_functions: &PrimaryMap<LocalFunctionIndex, FunctionExtent>,
505    allocated_dynamic_function_trampolines: &PrimaryMap<FunctionIndex, FunctionBodyPtr>,
506    function_relocations: impl Iterator<
507        Item = (
508            LocalFunctionIndex,
509            impl Iterator<Item = &'a (impl RelocationLike + 'a)>,
510        ),
511    >,
512    allocated_sections: &PrimaryMap<SectionIndex, SectionBodyPtr>,
513    section_relocations: impl Iterator<
514        Item = (
515            SectionIndex,
516            impl Iterator<Item = &'a (impl RelocationLike + 'a)>,
517        ),
518    >,
519    libcall_trampolines: SectionIndex,
520    trampoline_len: usize,
521    get_got_address: &'a dyn Fn(RelocationTarget) -> Option<usize>,
522) {
523    let mut riscv_pcrel_hi20s: HashMap<usize, u32> = HashMap::new();
524
525    for (i, section_relocs) in section_relocations {
526        let body = *allocated_sections[i] as usize;
527        for r in section_relocs {
528            apply_relocation(
529                body,
530                r,
531                allocated_functions,
532                allocated_dynamic_function_trampolines,
533                allocated_sections,
534                libcall_trampolines,
535                trampoline_len,
536                &mut riscv_pcrel_hi20s,
537                get_got_address,
538            );
539        }
540    }
541    for (i, function_relocs) in function_relocations {
542        let body = *allocated_functions[i].ptr as usize;
543        for r in function_relocs {
544            apply_relocation(
545                body,
546                r,
547                allocated_functions,
548                allocated_dynamic_function_trampolines,
549                allocated_sections,
550                libcall_trampolines,
551                trampoline_len,
552                &mut riscv_pcrel_hi20s,
553                get_got_address,
554            );
555        }
556    }
557}