wasmer_compiler_llvm/trampoline/
wasm.rs

1// TODO: Remove
2#![allow(unused)]
3
4use crate::{
5    abi::{Abi, G0M0FunctionKind, get_abi},
6    config::LLVM,
7    error::{err, err_nt},
8    object_file::{CompiledFunction, load_object_file},
9    translator::intrinsics::{Intrinsics, type_to_llvm, type_to_llvm_ptr},
10};
11use inkwell::{
12    AddressSpace, DLLStorageClass,
13    attributes::{Attribute, AttributeLoc},
14    context::Context,
15    module::{Linkage, Module},
16    passes::PassBuilderOptions,
17    targets::{FileType, TargetMachine},
18    types::FunctionType,
19    values::{BasicMetadataValueEnum, FunctionValue},
20};
21use std::{cmp, convert::TryInto};
22use target_lexicon::BinaryFormat;
23use wasmer_compiler::{
24    misc::CompiledKind,
25    types::{
26        function::FunctionBody,
27        module::CompileModuleInfo,
28        relocation::{Relocation, RelocationTarget},
29        section::{CustomSection, CustomSectionProtection, SectionBody, SectionIndex},
30    },
31};
32use wasmer_types::{
33    CompileError, FunctionIndex, FunctionType as FuncType, LocalFunctionIndex, entity::PrimaryMap,
34};
35use wasmer_vm::MemoryStyle;
36
37pub struct FuncTrampoline {
38    ctx: Context,
39    target_machine: TargetMachine,
40    abi: Box<dyn Abi>,
41    binary_fmt: BinaryFormat,
42    func_section: String,
43}
44
45const FUNCTION_SECTION_ELF: &str = "__TEXT,wasmer_trmpl"; // Needs to be between 1 and 16 chars
46const FUNCTION_SECTION_MACHO: &str = "wasmer_trmpl"; // Needs to be between 1 and 16 chars
47
48impl FuncTrampoline {
49    pub fn new(
50        target_machine: TargetMachine,
51        binary_fmt: BinaryFormat,
52    ) -> Result<Self, CompileError> {
53        let abi = get_abi(&target_machine);
54        Ok(Self {
55            ctx: Context::create(),
56            target_machine,
57            abi,
58            func_section: match binary_fmt {
59                BinaryFormat::Elf => FUNCTION_SECTION_ELF.to_string(),
60                BinaryFormat::Macho => FUNCTION_SECTION_MACHO.to_string(),
61                _ => {
62                    return Err(CompileError::UnsupportedTarget(format!(
63                        "Unsupported binary format: {binary_fmt:?}",
64                    )));
65                }
66            },
67            binary_fmt,
68        })
69    }
70
71    pub fn trampoline_to_module(
72        &self,
73        ty: &FuncType,
74        config: &LLVM,
75        name: &str,
76        compile_info: &CompileModuleInfo,
77    ) -> Result<Module<'_>, CompileError> {
78        // The function type, used for the callbacks.
79        let function = CompiledKind::FunctionCallTrampoline(ty.clone());
80        let module = self.ctx.create_module("");
81        let target_machine = &self.target_machine;
82        let target_triple = target_machine.get_triple();
83        let target_data = target_machine.get_target_data();
84        module.set_triple(&target_triple);
85        module.set_data_layout(&target_data.get_data_layout());
86        let intrinsics = Intrinsics::declare(&module, &self.ctx, &target_data, &self.binary_fmt);
87
88        let func_kind = if config.enable_g0m0_opt {
89            Some(G0M0FunctionKind::Local)
90        } else {
91            None
92        };
93
94        let (callee_ty, callee_attrs) =
95            self.abi
96                .func_type_to_llvm(&self.ctx, &intrinsics, None, ty, func_kind)?;
97        let trampoline_ty = intrinsics.void_ty.fn_type(
98            &[
99                intrinsics.ptr_ty.into(), // vmctx ptr
100                intrinsics.ptr_ty.into(), // callee function address
101                intrinsics.ptr_ty.into(), // in/out values ptr
102            ],
103            false,
104        );
105
106        let trampoline_func = module.add_function(name, trampoline_ty, Some(Linkage::External));
107        trampoline_func
108            .as_global_value()
109            .set_section(Some(&self.func_section));
110        trampoline_func
111            .as_global_value()
112            .set_linkage(Linkage::DLLExport);
113        trampoline_func
114            .as_global_value()
115            .set_dll_storage_class(DLLStorageClass::Export);
116        trampoline_func.add_attribute(AttributeLoc::Function, intrinsics.uwtable);
117        trampoline_func.add_attribute(AttributeLoc::Function, intrinsics.frame_pointer);
118        self.generate_trampoline(
119            config,
120            compile_info,
121            trampoline_func,
122            ty,
123            callee_ty,
124            &callee_attrs,
125            &self.ctx,
126            &intrinsics,
127        )?;
128
129        if let Some(ref callbacks) = config.callbacks {
130            callbacks.preopt_ir(&function, &module);
131        }
132
133        let mut passes = vec![];
134
135        if config.enable_verifier {
136            passes.push("verify");
137        }
138
139        passes.push("instcombine");
140        module
141            .run_passes(
142                passes.join(",").as_str(),
143                target_machine,
144                PassBuilderOptions::create(),
145            )
146            .unwrap();
147
148        if let Some(ref callbacks) = config.callbacks {
149            callbacks.postopt_ir(&function, &module);
150        }
151
152        // -- Uncomment to enable dumping intermediate LLVM objects
153        //module
154        //    .print_to_file(format!(
155        //        "{}/obj_trmpl.ll",
156        //        std::env!("LLVM_EH_TESTS_DUMP_DIR")
157        //    ))
158        //    .unwrap();
159        Ok(module)
160    }
161
162    pub fn trampoline(
163        &self,
164        ty: &FuncType,
165        config: &LLVM,
166        name: &str,
167        compile_info: &CompileModuleInfo,
168    ) -> Result<FunctionBody, CompileError> {
169        let module = self.trampoline_to_module(ty, config, name, compile_info)?;
170        let function = CompiledKind::FunctionCallTrampoline(ty.clone());
171        let target_machine = &self.target_machine;
172
173        let memory_buffer = target_machine
174            .write_to_memory_buffer(&module, FileType::Object)
175            .unwrap();
176
177        if let Some(ref callbacks) = config.callbacks {
178            callbacks.obj_memory_buffer(&function, &memory_buffer);
179            let asm_buffer = target_machine
180                .write_to_memory_buffer(&module, FileType::Assembly)
181                .unwrap();
182            callbacks.asm_memory_buffer(&function, &asm_buffer);
183        }
184
185        let mem_buf_slice = memory_buffer.as_slice();
186
187        // Use a dummy function index to detect relocations against the trampoline
188        // function's address, which shouldn't exist and are not supported.
189        // Note, we just drop all custom sections, and verify that the function
190        // body itself has no relocations at all. This value should never be
191        // touched at all. However, it is set up so that if we do touch it (maybe
192        // due to someone changing the code later on), it'll explode, which is desirable!
193        let dummy_reloc_target =
194            RelocationTarget::DynamicTrampoline(FunctionIndex::from_u32(u32::MAX - 1));
195
196        // Note: we don't count .gcc_except_table here because native-to-wasm
197        // trampolines are not supposed to generate any LSDA sections. We *want* them
198        // to terminate libunwind's stack searches.
199        let CompiledFunction {
200            compiled_function,
201            custom_sections,
202            eh_frame_section_indices,
203            mut compact_unwind_section_indices,
204            ..
205        } = load_object_file(
206            mem_buf_slice,
207            &self.func_section,
208            dummy_reloc_target,
209            |name: &str| {
210                Err(CompileError::Codegen(format!(
211                    "trampoline generation produced reference to unknown function {name}",
212                )))
213            },
214            self.binary_fmt,
215        )?;
216        let mut all_sections_are_eh_sections = true;
217        let mut unwind_section_indices = eh_frame_section_indices;
218        unwind_section_indices.append(&mut compact_unwind_section_indices);
219        if unwind_section_indices.len() != custom_sections.len() {
220            all_sections_are_eh_sections = false;
221        } else {
222            unwind_section_indices.sort_unstable();
223            for (idx, section_idx) in unwind_section_indices.iter().enumerate() {
224                if idx as u32 != section_idx.as_u32() {
225                    all_sections_are_eh_sections = false;
226                    break;
227                }
228            }
229        }
230        if !all_sections_are_eh_sections {
231            return Err(CompileError::Codegen(
232                "trampoline generation produced non-eh custom sections".into(),
233            ));
234        }
235        if !compiled_function.relocations.is_empty() {
236            return Err(CompileError::Codegen(
237                "trampoline generation produced relocations".into(),
238            ));
239        }
240        // Ignore CompiledFunctionFrameInfo. Extra frame info isn't a problem.
241
242        Ok(FunctionBody {
243            body: compiled_function.body.body,
244            unwind_info: compiled_function.body.unwind_info,
245        })
246    }
247
248    pub fn dynamic_trampoline_to_module(
249        &self,
250        ty: &FuncType,
251        config: &LLVM,
252        name: &str,
253    ) -> Result<Module<'_>, CompileError> {
254        // The function type, used for the callbacks
255        let function = CompiledKind::DynamicFunctionTrampoline(ty.clone());
256        let module = self.ctx.create_module("");
257        let target_machine = &self.target_machine;
258        let target_data = target_machine.get_target_data();
259        let target_triple = target_machine.get_triple();
260        module.set_triple(&target_triple);
261        module.set_data_layout(&target_data.get_data_layout());
262        let intrinsics = Intrinsics::declare(&module, &self.ctx, &target_data, &self.binary_fmt);
263
264        let (trampoline_ty, trampoline_attrs) =
265            self.abi
266                .func_type_to_llvm(&self.ctx, &intrinsics, None, ty, None)?;
267        let trampoline_func = module.add_function(name, trampoline_ty, Some(Linkage::External));
268        trampoline_func.set_personality_function(intrinsics.personality);
269        trampoline_func.add_attribute(AttributeLoc::Function, intrinsics.frame_pointer);
270        for (attr, attr_loc) in trampoline_attrs {
271            trampoline_func.add_attribute(attr_loc, attr);
272        }
273        trampoline_func
274            .as_global_value()
275            .set_section(Some(&self.func_section));
276        trampoline_func
277            .as_global_value()
278            .set_linkage(Linkage::DLLExport);
279        trampoline_func
280            .as_global_value()
281            .set_dll_storage_class(DLLStorageClass::Export);
282        self.generate_dynamic_trampoline(trampoline_func, ty, &self.ctx, &intrinsics)?;
283
284        if let Some(ref callbacks) = config.callbacks {
285            callbacks.preopt_ir(&function, &module);
286        }
287
288        let mut passes = vec![];
289
290        if config.enable_verifier {
291            passes.push("verify");
292        }
293
294        passes.push("early-cse");
295        module
296            .run_passes(
297                passes.join(",").as_str(),
298                target_machine,
299                PassBuilderOptions::create(),
300            )
301            .unwrap();
302
303        if let Some(ref callbacks) = config.callbacks {
304            callbacks.postopt_ir(&function, &module);
305        }
306
307        Ok(module)
308    }
309
310    #[allow(clippy::too_many_arguments)]
311    pub fn dynamic_trampoline(
312        &self,
313        ty: &FuncType,
314        config: &LLVM,
315        name: &str,
316        dynamic_trampoline_index: u32,
317        final_module_custom_sections: &mut PrimaryMap<SectionIndex, CustomSection>,
318        eh_frame_section_bytes: &mut Vec<u8>,
319        eh_frame_section_relocations: &mut Vec<Relocation>,
320        compact_unwind_section_bytes: &mut Vec<u8>,
321        compact_unwind_section_relocations: &mut Vec<Relocation>,
322    ) -> Result<FunctionBody, CompileError> {
323        let function = CompiledKind::DynamicFunctionTrampoline(ty.clone());
324        let target_machine = &self.target_machine;
325
326        let module = self.dynamic_trampoline_to_module(ty, config, name)?;
327
328        let memory_buffer = target_machine
329            .write_to_memory_buffer(&module, FileType::Object)
330            .unwrap();
331
332        if let Some(ref callbacks) = config.callbacks {
333            callbacks.obj_memory_buffer(&function, &memory_buffer);
334            let asm_buffer = target_machine
335                .write_to_memory_buffer(&module, FileType::Assembly)
336                .unwrap();
337            callbacks.asm_memory_buffer(&function, &asm_buffer)
338        }
339
340        let mem_buf_slice = memory_buffer.as_slice();
341        let CompiledFunction {
342            compiled_function,
343            custom_sections,
344            eh_frame_section_indices,
345            compact_unwind_section_indices,
346            gcc_except_table_section_indices,
347        } = load_object_file(
348            mem_buf_slice,
349            &self.func_section,
350            RelocationTarget::DynamicTrampoline(FunctionIndex::from_u32(dynamic_trampoline_index)),
351            |name: &str| {
352                Err(CompileError::Codegen(format!(
353                    "trampoline generation produced reference to unknown function {name}",
354                )))
355            },
356            self.binary_fmt,
357        )?;
358
359        if !compiled_function.relocations.is_empty() {
360            return Err(CompileError::Codegen(
361                "trampoline generation produced relocations".into(),
362            ));
363        }
364        // Ignore CompiledFunctionFrameInfo. Extra frame info isn't a problem.
365
366        // Also append EH-related sections to the final module, since we expect
367        // dynamic trampolines to participate in unwinding
368        {
369            let first_section = final_module_custom_sections.len() as u32;
370            for (section_index, mut custom_section) in custom_sections.into_iter() {
371                for reloc in &mut custom_section.relocations {
372                    if let RelocationTarget::CustomSection(index) = reloc.reloc_target {
373                        reloc.reloc_target = RelocationTarget::CustomSection(
374                            SectionIndex::from_u32(first_section + index.as_u32()),
375                        )
376                    }
377
378                    if reloc.kind.needs_got() {
379                        return Err(CompileError::Codegen(
380                            "trampoline generation produced GOT relocation".into(),
381                        ));
382                    }
383                }
384
385                if eh_frame_section_indices.contains(&section_index) {
386                    let offset = eh_frame_section_bytes.len() as u32;
387                    for reloc in &mut custom_section.relocations {
388                        reloc.offset += offset;
389                    }
390                    eh_frame_section_bytes.extend_from_slice(custom_section.bytes.as_slice());
391                    // Terminate the eh_frame info with a zero-length CIE.
392                    eh_frame_section_bytes.extend_from_slice(&[0, 0, 0, 0]);
393                    eh_frame_section_relocations.extend(custom_section.relocations);
394                    // TODO: we do this to keep the count right, remove it.
395                    final_module_custom_sections.push(CustomSection {
396                        protection: CustomSectionProtection::Read,
397                        alignment: None,
398                        bytes: SectionBody::new_with_vec(vec![]),
399                        relocations: vec![],
400                    });
401                } else if compact_unwind_section_indices.contains(&section_index) {
402                    let offset = compact_unwind_section_bytes.len() as u32;
403                    for reloc in &mut custom_section.relocations {
404                        reloc.offset += offset;
405                    }
406                    compact_unwind_section_bytes.extend_from_slice(custom_section.bytes.as_slice());
407                    compact_unwind_section_relocations.extend(custom_section.relocations);
408                    // TODO: we do this to keep the count right, remove it.
409                    final_module_custom_sections.push(CustomSection {
410                        protection: CustomSectionProtection::Read,
411                        alignment: None,
412                        bytes: SectionBody::new_with_vec(vec![]),
413                        relocations: vec![],
414                    });
415                } else if gcc_except_table_section_indices.contains(&section_index) {
416                    final_module_custom_sections.push(custom_section);
417                } else {
418                    return Err(CompileError::Codegen(
419                        "trampoline generation produced non-eh custom sections".into(),
420                    ));
421                }
422            }
423        }
424
425        Ok(FunctionBody {
426            body: compiled_function.body.body,
427            unwind_info: compiled_function.body.unwind_info,
428        })
429    }
430
431    #[allow(clippy::too_many_arguments)]
432    fn generate_trampoline<'ctx>(
433        &self,
434        config: &LLVM,
435        compile_info: &CompileModuleInfo,
436        trampoline_func: FunctionValue,
437        func_sig: &FuncType,
438        llvm_func_type: FunctionType,
439        func_attrs: &[(Attribute, AttributeLoc)],
440        context: &'ctx Context,
441        intrinsics: &Intrinsics<'ctx>,
442    ) -> Result<(), CompileError> {
443        let entry_block = context.append_basic_block(trampoline_func, "entry");
444        let builder = context.create_builder();
445        builder.position_at_end(entry_block);
446
447        let (callee_vmctx_ptr, func_ptr, args_rets_ptr) =
448            match *trampoline_func.get_params().as_slice() {
449                [callee_vmctx_ptr, func_ptr, args_rets_ptr] => (
450                    callee_vmctx_ptr,
451                    func_ptr.into_pointer_value(),
452                    args_rets_ptr.into_pointer_value(),
453                ),
454                _ => {
455                    return Err(CompileError::Codegen(
456                        "trampoline function unimplemented".to_string(),
457                    ));
458                }
459            };
460
461        let mut args_vec: Vec<BasicMetadataValueEnum> =
462            Vec::with_capacity(if config.enable_g0m0_opt {
463                func_sig.params().len() + 3
464            } else {
465                func_sig.params().len() + 1
466            });
467
468        if self.abi.is_sret(func_sig)? {
469            let basic_types: Vec<_> = func_sig
470                .results()
471                .iter()
472                .map(|&ty| type_to_llvm(intrinsics, ty))
473                .collect::<Result<_, _>>()?;
474
475            let sret_ty = context.struct_type(&basic_types, false);
476            args_vec.push(err!(builder.build_alloca(sret_ty, "sret")).into());
477        }
478
479        args_vec.push(callee_vmctx_ptr.into());
480
481        if config.enable_g0m0_opt {
482            let wasm_module = &compile_info.module;
483            let memory_styles = &compile_info.memory_styles;
484            let callee_vmctx_ptr_value = callee_vmctx_ptr.into_pointer_value();
485            // get value of G0, get a pointer to M0's base
486
487            let offsets = wasmer_vm::VMOffsets::new(8, wasm_module);
488
489            let global_index = wasmer_types::GlobalIndex::from_u32(0);
490            let global_type = wasm_module.globals[global_index];
491            let global_value_type = global_type.ty;
492            let global_mutability = global_type.mutability;
493
494            let offset =
495                if let Some(local_global_index) = wasm_module.local_global_index(global_index) {
496                    offsets.vmctx_vmglobal_definition(local_global_index)
497                } else {
498                    offsets.vmctx_vmglobal_import(global_index)
499                };
500            let offset = intrinsics.i32_ty.const_int(offset.into(), false);
501            let global_ptr = {
502                let global_ptr_ptr = unsafe {
503                    err!(builder.build_gep(intrinsics.i8_ty, callee_vmctx_ptr_value, &[offset], ""))
504                };
505                let global_ptr_ptr =
506                    err!(builder.build_bit_cast(global_ptr_ptr, intrinsics.ptr_ty, ""))
507                        .into_pointer_value();
508
509                err!(builder.build_load(intrinsics.ptr_ty, global_ptr_ptr, "")).into_pointer_value()
510            };
511
512            let global_ptr = err!(builder.build_bit_cast(
513                global_ptr,
514                type_to_llvm_ptr(intrinsics, global_value_type)?,
515                "",
516            ))
517            .into_pointer_value();
518
519            let global_value = match global_mutability {
520                wasmer_types::Mutability::Const => {
521                    err!(builder.build_load(
522                        type_to_llvm(intrinsics, global_value_type)?,
523                        global_ptr,
524                        "g0",
525                    ))
526                }
527                wasmer_types::Mutability::Var => {
528                    err!(builder.build_load(
529                        type_to_llvm(intrinsics, global_value_type)?,
530                        global_ptr,
531                        ""
532                    ))
533                }
534            };
535
536            global_value.set_name("trmpl_g0");
537            args_vec.push(global_value.into());
538
539            // load mem
540            let memory_index = wasmer_types::MemoryIndex::from_u32(0);
541            let memory_definition_ptr = if let Some(local_memory_index) =
542                wasm_module.local_memory_index(memory_index)
543            {
544                let offset = offsets.vmctx_vmmemory_definition(local_memory_index);
545                let offset = intrinsics.i32_ty.const_int(offset.into(), false);
546                unsafe {
547                    err!(builder.build_gep(intrinsics.i8_ty, callee_vmctx_ptr_value, &[offset], ""))
548                }
549            } else {
550                let offset = offsets.vmctx_vmmemory_import(memory_index);
551                let offset = intrinsics.i32_ty.const_int(offset.into(), false);
552                let memory_definition_ptr_ptr = unsafe {
553                    err!(builder.build_gep(intrinsics.i8_ty, callee_vmctx_ptr_value, &[offset], ""))
554                };
555                let memory_definition_ptr_ptr =
556                    err!(builder.build_bit_cast(memory_definition_ptr_ptr, intrinsics.ptr_ty, "",))
557                        .into_pointer_value();
558
559                err!(builder.build_load(intrinsics.ptr_ty, memory_definition_ptr_ptr, ""))
560                    .into_pointer_value()
561            };
562            let memory_definition_ptr =
563                err!(builder.build_bit_cast(memory_definition_ptr, intrinsics.ptr_ty, "",))
564                    .into_pointer_value();
565            let base_ptr = err!(builder.build_struct_gep(
566                intrinsics.vmmemory_definition_ty,
567                memory_definition_ptr,
568                intrinsics.vmmemory_definition_base_element,
569                "",
570            ));
571
572            let memory_style = &memory_styles[memory_index];
573            let base_ptr = if let MemoryStyle::Dynamic { .. } = memory_style {
574                base_ptr
575            } else {
576                err!(builder.build_load(intrinsics.ptr_ty, base_ptr, "")).into_pointer_value()
577            };
578
579            base_ptr.set_name("trmpl_m0_base_ptr");
580
581            args_vec.push(base_ptr.into());
582        }
583
584        for (i, param_ty) in func_sig.params().iter().enumerate() {
585            let index = intrinsics.i32_ty.const_int(i as _, false);
586            let item_pointer = unsafe {
587                err!(builder.build_in_bounds_gep(
588                    intrinsics.i128_ty,
589                    args_rets_ptr,
590                    &[index],
591                    "arg_ptr"
592                ))
593            };
594
595            let casted_type = type_to_llvm(intrinsics, *param_ty)?;
596            let casted_pointer_type = type_to_llvm_ptr(intrinsics, *param_ty)?;
597
598            let typed_item_pointer = err!(builder.build_pointer_cast(
599                item_pointer,
600                casted_pointer_type,
601                "typed_arg_pointer"
602            ));
603
604            let arg = err!(builder.build_load(casted_type, typed_item_pointer, "arg"));
605            args_vec.push(arg.into());
606        }
607
608        let call_site = err!(builder.build_indirect_call(
609            llvm_func_type,
610            func_ptr,
611            args_vec.as_slice(),
612            "call"
613        ));
614        for (attr, attr_loc) in func_attrs {
615            call_site.add_attribute(*attr_loc, *attr);
616        }
617
618        let rets = self
619            .abi
620            .rets_from_call(&builder, intrinsics, call_site, func_sig)?;
621        for (idx, v) in rets.into_iter().enumerate() {
622            let ptr = unsafe {
623                err!(builder.build_gep(
624                    intrinsics.i128_ty,
625                    args_rets_ptr,
626                    &[intrinsics.i32_ty.const_int(idx as u64, false)],
627                    "",
628                ))
629            };
630            let ptr = err!(builder.build_pointer_cast(
631                ptr,
632                self.ctx.ptr_type(AddressSpace::default()),
633                ""
634            ));
635            err!(builder.build_store(ptr, v));
636        }
637
638        err!(builder.build_return(None));
639        Ok(())
640    }
641
642    fn generate_dynamic_trampoline<'ctx>(
643        &self,
644        trampoline_func: FunctionValue,
645        func_sig: &FuncType,
646        context: &'ctx Context,
647        intrinsics: &Intrinsics<'ctx>,
648    ) -> Result<(), CompileError> {
649        let entry_block = context.append_basic_block(trampoline_func, "entry");
650        let builder = context.create_builder();
651        builder.position_at_end(entry_block);
652
653        // Allocate stack space for the params and results.
654        let values = err!(builder.build_alloca(
655            intrinsics.i128_ty.array_type(cmp::max(
656                func_sig.params().len().try_into().unwrap(),
657                func_sig.results().len().try_into().unwrap(),
658            )),
659            "",
660        ));
661
662        // Copy params to 'values'.
663        let first_user_param = if self.abi.is_sret(func_sig)? { 2 } else { 1 };
664        for i in 0..func_sig.params().len() {
665            let ptr = unsafe {
666                err!(builder.build_in_bounds_gep(
667                    intrinsics.i128_ty,
668                    values,
669                    &[intrinsics.i32_ty.const_int(i.try_into().unwrap(), false)],
670                    "args",
671                ))
672            };
673            let ptr = err!(builder.build_bit_cast(
674                ptr,
675                type_to_llvm_ptr(intrinsics, func_sig.params()[i])?,
676                ""
677            ))
678            .into_pointer_value();
679            err!(
680                builder.build_store(
681                    ptr,
682                    trampoline_func
683                        .get_nth_param(i as u32 + first_user_param)
684                        .unwrap(),
685                )
686            );
687        }
688
689        let callee_ptr_ty = intrinsics.void_ty.fn_type(
690            &[
691                intrinsics.ptr_ty.into(), // vmctx ptr
692                intrinsics.ptr_ty.into(), // in/out values ptr
693            ],
694            false,
695        );
696        let vmctx = self.abi.get_vmctx_ptr_param(&trampoline_func);
697        let callee_ty =
698            err!(builder.build_bit_cast(vmctx, self.ctx.ptr_type(AddressSpace::default()), ""));
699        let callee =
700            err!(builder.build_load(intrinsics.ptr_ty, callee_ty.into_pointer_value(), ""))
701                .into_pointer_value();
702
703        let values_ptr = err!(builder.build_pointer_cast(values, intrinsics.ptr_ty, ""));
704        err!(builder.build_indirect_call(
705            callee_ptr_ty,
706            callee,
707            &[vmctx.into(), values_ptr.into()],
708            "",
709        ));
710
711        if func_sig.results().is_empty() {
712            err!(builder.build_return(None));
713        } else {
714            let results = func_sig
715                .results()
716                .iter()
717                .enumerate()
718                .map(|(idx, ty)| {
719                    let ptr = unsafe {
720                        err!(builder.build_gep(
721                            intrinsics.i128_ty,
722                            values,
723                            &[intrinsics.i32_ty.const_int(idx.try_into().unwrap(), false)],
724                            "",
725                        ))
726                    };
727                    let ptr = err!(builder.build_pointer_cast(
728                        ptr,
729                        type_to_llvm_ptr(intrinsics, *ty)?,
730                        ""
731                    ));
732                    err_nt!(builder.build_load(type_to_llvm(intrinsics, *ty)?, ptr, ""))
733                })
734                .collect::<Result<Vec<_>, CompileError>>()?;
735
736            if self.abi.is_sret(func_sig)? {
737                let sret = trampoline_func
738                    .get_first_param()
739                    .unwrap()
740                    .into_pointer_value();
741
742                let basic_types: Vec<_> = func_sig
743                    .results()
744                    .iter()
745                    .map(|&ty| type_to_llvm(intrinsics, ty))
746                    .collect::<Result<_, _>>()?;
747                let mut struct_value = context.struct_type(&basic_types, false).get_undef();
748
749                for (idx, value) in results.iter().enumerate() {
750                    let value = err!(builder.build_bit_cast(
751                        *value,
752                        type_to_llvm(intrinsics, func_sig.results()[idx])?,
753                        "",
754                    ));
755                    struct_value =
756                        err!(builder.build_insert_value(struct_value, value, idx as u32, ""))
757                            .into_struct_value();
758                }
759                err!(builder.build_store(sret, struct_value));
760                err!(builder.build_return(None));
761            } else {
762                err!(
763                    builder.build_return(Some(&self.abi.pack_values_for_register_return(
764                        intrinsics,
765                        &builder,
766                        results.as_slice(),
767                        &trampoline_func.get_type(),
768                    )?))
769                );
770            }
771        }
772
773        Ok(())
774    }
775}