wasmer_compiler_llvm/trampoline/
wasm.rs

1// TODO: Remove
2#![allow(unused)]
3
4use crate::{
5    abi::{Abi, G0M0FunctionKind, get_abi},
6    config::LLVM,
7    error::{err, err_nt},
8    object_file::{CompiledFunction, load_object_file},
9    translator::intrinsics::{Intrinsics, type_to_llvm, type_to_llvm_ptr},
10};
11use inkwell::{
12    AddressSpace, DLLStorageClass,
13    attributes::{Attribute, AttributeLoc},
14    context::Context,
15    module::{Linkage, Module},
16    passes::PassBuilderOptions,
17    targets::{FileType, TargetMachine},
18    types::FunctionType,
19    values::{BasicMetadataValueEnum, FunctionValue},
20};
21use std::{cmp, convert::TryInto};
22use target_lexicon::{BinaryFormat, Triple};
23use wasmer_compiler::{
24    misc::CompiledKind,
25    types::{
26        function::FunctionBody,
27        module::CompileModuleInfo,
28        relocation::{Relocation, RelocationTarget},
29        section::{CustomSection, CustomSectionProtection, SectionBody, SectionIndex},
30    },
31};
32use wasmer_types::{
33    CompileError, FunctionIndex, FunctionType as FuncType, LocalFunctionIndex, entity::PrimaryMap,
34};
35use wasmer_vm::MemoryStyle;
36
37pub struct FuncTrampoline {
38    ctx: Context,
39    target_machine: TargetMachine,
40    target_triple: Triple,
41    abi: Box<dyn Abi>,
42    binary_fmt: BinaryFormat,
43    func_section: String,
44}
45
46const FUNCTION_SECTION_ELF: &str = "__TEXT,wasmer_trmpl"; // Needs to be between 1 and 16 chars
47const FUNCTION_SECTION_MACHO: &str = "wasmer_trmpl"; // Needs to be between 1 and 16 chars
48
49impl FuncTrampoline {
50    pub fn new(
51        target_machine: TargetMachine,
52        target_triple: Triple,
53        binary_fmt: BinaryFormat,
54    ) -> Result<Self, CompileError> {
55        let abi = get_abi(&target_machine);
56        Ok(Self {
57            ctx: Context::create(),
58            target_machine,
59            target_triple,
60            abi,
61            func_section: match binary_fmt {
62                BinaryFormat::Elf => FUNCTION_SECTION_ELF.to_string(),
63                BinaryFormat::Macho => FUNCTION_SECTION_MACHO.to_string(),
64                _ => {
65                    return Err(CompileError::UnsupportedTarget(format!(
66                        "Unsupported binary format: {binary_fmt:?}",
67                    )));
68                }
69            },
70            binary_fmt,
71        })
72    }
73
74    pub fn trampoline_to_module(
75        &self,
76        ty: &FuncType,
77        config: &LLVM,
78        name: &str,
79        compile_info: &CompileModuleInfo,
80    ) -> Result<Module<'_>, CompileError> {
81        // The function type, used for the callbacks.
82        let function = CompiledKind::FunctionCallTrampoline(ty.clone());
83        let module = self.ctx.create_module("");
84        let target_machine = &self.target_machine;
85        let target_triple = target_machine.get_triple();
86        let target_data: inkwell::targets::TargetData = target_machine.get_target_data();
87        module.set_triple(&target_triple);
88        module.set_data_layout(&target_data.get_data_layout());
89        let intrinsics = Intrinsics::declare(
90            &module,
91            &self.ctx,
92            &target_data,
93            &self.target_triple,
94            &self.binary_fmt,
95        );
96
97        let func_kind = if config.enable_g0m0_opt {
98            Some(G0M0FunctionKind::Local)
99        } else {
100            None
101        };
102
103        let (callee_ty, callee_attrs) =
104            self.abi
105                .func_type_to_llvm(&self.ctx, &intrinsics, None, ty, func_kind)?;
106        let trampoline_ty = intrinsics.void_ty.fn_type(
107            &[
108                intrinsics.ptr_ty.into(), // vmctx ptr
109                intrinsics.ptr_ty.into(), // callee function address
110                intrinsics.ptr_ty.into(), // in/out values ptr
111            ],
112            false,
113        );
114
115        let trampoline_func = module.add_function(name, trampoline_ty, Some(Linkage::External));
116        trampoline_func
117            .as_global_value()
118            .set_section(Some(&self.func_section));
119        trampoline_func
120            .as_global_value()
121            .set_linkage(Linkage::DLLExport);
122        trampoline_func
123            .as_global_value()
124            .set_dll_storage_class(DLLStorageClass::Export);
125        trampoline_func.add_attribute(AttributeLoc::Function, intrinsics.uwtable);
126        trampoline_func.add_attribute(AttributeLoc::Function, intrinsics.frame_pointer);
127        self.generate_trampoline(
128            config,
129            compile_info,
130            trampoline_func,
131            ty,
132            callee_ty,
133            &callee_attrs,
134            &self.ctx,
135            &intrinsics,
136        )?;
137
138        if let Some(ref callbacks) = config.callbacks {
139            callbacks.preopt_ir(&function, &compile_info.module.hash_string(), &module);
140        }
141
142        let mut passes = vec![];
143
144        if config.enable_verifier {
145            passes.push("verify");
146        }
147
148        passes.push("instcombine");
149        module
150            .run_passes(
151                passes.join(",").as_str(),
152                target_machine,
153                PassBuilderOptions::create(),
154            )
155            .unwrap();
156
157        if let Some(ref callbacks) = config.callbacks {
158            callbacks.postopt_ir(&function, &compile_info.module.hash_string(), &module);
159        }
160        Ok(module)
161    }
162
163    pub fn trampoline(
164        &self,
165        ty: &FuncType,
166        config: &LLVM,
167        name: &str,
168        compile_info: &CompileModuleInfo,
169    ) -> Result<FunctionBody, CompileError> {
170        let module = self.trampoline_to_module(ty, config, name, compile_info)?;
171        let function = CompiledKind::FunctionCallTrampoline(ty.clone());
172        let target_machine = &self.target_machine;
173
174        let memory_buffer = target_machine
175            .write_to_memory_buffer(&module, FileType::Object)
176            .unwrap();
177
178        if let Some(ref callbacks) = config.callbacks {
179            let module_hash = compile_info.module.hash_string();
180            callbacks.obj_memory_buffer(&function, &module_hash, &memory_buffer);
181            let asm_buffer = target_machine
182                .write_to_memory_buffer(&module, FileType::Assembly)
183                .unwrap();
184            callbacks.asm_memory_buffer(&function, &module_hash, &asm_buffer);
185        }
186
187        let mem_buf_slice = memory_buffer.as_slice();
188
189        // Use a dummy function index to detect relocations against the trampoline
190        // function's address, which shouldn't exist and are not supported.
191        // Note, we just drop all custom sections, and verify that the function
192        // body itself has no relocations at all. This value should never be
193        // touched at all. However, it is set up so that if we do touch it (maybe
194        // due to someone changing the code later on), it'll explode, which is desirable!
195        let dummy_reloc_target =
196            RelocationTarget::DynamicTrampoline(FunctionIndex::from_u32(u32::MAX - 1));
197
198        // Note: we don't count .gcc_except_table here because native-to-wasm
199        // trampolines are not supposed to generate any LSDA sections. We *want* them
200        // to terminate libunwind's stack searches.
201        let CompiledFunction {
202            compiled_function,
203            custom_sections,
204            eh_frame_section_indices,
205            mut compact_unwind_section_indices,
206            ..
207        } = load_object_file(
208            mem_buf_slice,
209            &self.func_section,
210            dummy_reloc_target,
211            |name: &str| {
212                Err(CompileError::Codegen(format!(
213                    "trampoline generation produced reference to unknown function {name}",
214                )))
215            },
216            self.binary_fmt,
217        )?;
218        let mut all_sections_are_eh_sections = true;
219        let mut unwind_section_indices = eh_frame_section_indices;
220        unwind_section_indices.append(&mut compact_unwind_section_indices);
221        if unwind_section_indices.len() != custom_sections.len() {
222            all_sections_are_eh_sections = false;
223        } else {
224            unwind_section_indices.sort_unstable();
225            for (idx, section_idx) in unwind_section_indices.iter().enumerate() {
226                if idx as u32 != section_idx.as_u32() {
227                    all_sections_are_eh_sections = false;
228                    break;
229                }
230            }
231        }
232        if !all_sections_are_eh_sections {
233            return Err(CompileError::Codegen(
234                "trampoline generation produced non-eh custom sections".into(),
235            ));
236        }
237        if !compiled_function.relocations.is_empty() {
238            return Err(CompileError::Codegen(
239                "trampoline generation produced relocations".into(),
240            ));
241        }
242        // Ignore CompiledFunctionFrameInfo. Extra frame info isn't a problem.
243
244        Ok(FunctionBody {
245            body: compiled_function.body.body,
246            unwind_info: compiled_function.body.unwind_info,
247        })
248    }
249
250    pub fn dynamic_trampoline_to_module(
251        &self,
252        ty: &FuncType,
253        config: &LLVM,
254        name: &str,
255        module_hash: &Option<String>,
256    ) -> Result<Module<'_>, CompileError> {
257        // The function type, used for the callbacks
258        let function = CompiledKind::DynamicFunctionTrampoline(ty.clone());
259        let module = self.ctx.create_module("");
260        let target_machine = &self.target_machine;
261        let target_data = target_machine.get_target_data();
262        let target_triple = target_machine.get_triple();
263        module.set_triple(&target_triple);
264        module.set_data_layout(&target_data.get_data_layout());
265        let intrinsics = Intrinsics::declare(
266            &module,
267            &self.ctx,
268            &target_data,
269            &self.target_triple,
270            &self.binary_fmt,
271        );
272
273        let (trampoline_ty, trampoline_attrs) =
274            self.abi
275                .func_type_to_llvm(&self.ctx, &intrinsics, None, ty, None)?;
276        let trampoline_func = module.add_function(name, trampoline_ty, Some(Linkage::External));
277        trampoline_func.set_personality_function(intrinsics.personality);
278        trampoline_func.add_attribute(AttributeLoc::Function, intrinsics.frame_pointer);
279        for (attr, attr_loc) in trampoline_attrs {
280            trampoline_func.add_attribute(attr_loc, attr);
281        }
282        trampoline_func
283            .as_global_value()
284            .set_section(Some(&self.func_section));
285        trampoline_func
286            .as_global_value()
287            .set_linkage(Linkage::DLLExport);
288        trampoline_func
289            .as_global_value()
290            .set_dll_storage_class(DLLStorageClass::Export);
291        self.generate_dynamic_trampoline(trampoline_func, ty, &self.ctx, &intrinsics)?;
292
293        if let Some(ref callbacks) = config.callbacks {
294            callbacks.preopt_ir(&function, module_hash, &module);
295        }
296
297        let mut passes = vec![];
298
299        if config.enable_verifier {
300            passes.push("verify");
301        }
302
303        passes.push("early-cse");
304        module
305            .run_passes(
306                passes.join(",").as_str(),
307                target_machine,
308                PassBuilderOptions::create(),
309            )
310            .unwrap();
311
312        if let Some(ref callbacks) = config.callbacks {
313            callbacks.postopt_ir(&function, module_hash, &module);
314        }
315
316        Ok(module)
317    }
318
319    #[allow(clippy::too_many_arguments)]
320    pub fn dynamic_trampoline(
321        &self,
322        ty: &FuncType,
323        config: &LLVM,
324        name: &str,
325        dynamic_trampoline_index: u32,
326        final_module_custom_sections: &mut PrimaryMap<SectionIndex, CustomSection>,
327        eh_frame_section_bytes: &mut Vec<u8>,
328        eh_frame_section_relocations: &mut Vec<Relocation>,
329        compact_unwind_section_bytes: &mut Vec<u8>,
330        compact_unwind_section_relocations: &mut Vec<Relocation>,
331        module_hash: &Option<String>,
332    ) -> Result<FunctionBody, CompileError> {
333        let function = CompiledKind::DynamicFunctionTrampoline(ty.clone());
334        let target_machine = &self.target_machine;
335
336        let module = self.dynamic_trampoline_to_module(ty, config, name, module_hash)?;
337
338        let memory_buffer = target_machine
339            .write_to_memory_buffer(&module, FileType::Object)
340            .unwrap();
341
342        if let Some(ref callbacks) = config.callbacks {
343            callbacks.obj_memory_buffer(&function, module_hash, &memory_buffer);
344            let asm_buffer = target_machine
345                .write_to_memory_buffer(&module, FileType::Assembly)
346                .unwrap();
347            callbacks.asm_memory_buffer(&function, module_hash, &asm_buffer)
348        }
349
350        let mem_buf_slice = memory_buffer.as_slice();
351        let CompiledFunction {
352            compiled_function,
353            custom_sections,
354            eh_frame_section_indices,
355            compact_unwind_section_indices,
356            gcc_except_table_section_indices,
357            data_dw_ref_personality_section_indices,
358        } = load_object_file(
359            mem_buf_slice,
360            &self.func_section,
361            RelocationTarget::DynamicTrampoline(FunctionIndex::from_u32(dynamic_trampoline_index)),
362            |name: &str| {
363                Err(CompileError::Codegen(format!(
364                    "trampoline generation produced reference to unknown function {name}",
365                )))
366            },
367            self.binary_fmt,
368        )?;
369
370        if !compiled_function.relocations.is_empty() {
371            return Err(CompileError::Codegen(
372                "trampoline generation produced relocations".into(),
373            ));
374        }
375        // Ignore CompiledFunctionFrameInfo. Extra frame info isn't a problem.
376
377        // Also append EH-related sections to the final module, since we expect
378        // dynamic trampolines to participate in unwinding
379        {
380            let first_section = final_module_custom_sections.len() as u32;
381            for (section_index, mut custom_section) in custom_sections.into_iter() {
382                for reloc in &mut custom_section.relocations {
383                    if let RelocationTarget::CustomSection(index) = reloc.reloc_target {
384                        reloc.reloc_target = RelocationTarget::CustomSection(
385                            SectionIndex::from_u32(first_section + index.as_u32()),
386                        )
387                    }
388
389                    if reloc.kind.needs_got() {
390                        return Err(CompileError::Codegen(
391                            "trampoline generation produced GOT relocation".into(),
392                        ));
393                    }
394                }
395
396                if eh_frame_section_indices.contains(&section_index) {
397                    let offset = eh_frame_section_bytes.len() as u32;
398                    for reloc in &mut custom_section.relocations {
399                        reloc.offset += offset;
400                    }
401                    eh_frame_section_bytes.extend_from_slice(custom_section.bytes.as_slice());
402                    // Terminate the eh_frame info with a zero-length CIE.
403                    eh_frame_section_bytes.extend_from_slice(&[0, 0, 0, 0]);
404                    eh_frame_section_relocations.extend(custom_section.relocations);
405                    // TODO: we do this to keep the count right, remove it.
406                    final_module_custom_sections.push(CustomSection {
407                        protection: CustomSectionProtection::Read,
408                        alignment: None,
409                        bytes: SectionBody::new_with_vec(vec![]),
410                        relocations: vec![],
411                    });
412                } else if compact_unwind_section_indices.contains(&section_index) {
413                    let offset = compact_unwind_section_bytes.len() as u32;
414                    for reloc in &mut custom_section.relocations {
415                        reloc.offset += offset;
416                    }
417                    compact_unwind_section_bytes.extend_from_slice(custom_section.bytes.as_slice());
418                    compact_unwind_section_relocations.extend(custom_section.relocations);
419                    // TODO: we do this to keep the count right, remove it.
420                    final_module_custom_sections.push(CustomSection {
421                        protection: CustomSectionProtection::Read,
422                        alignment: None,
423                        bytes: SectionBody::new_with_vec(vec![]),
424                        relocations: vec![],
425                    });
426                } else if gcc_except_table_section_indices.contains(&section_index)
427                    || data_dw_ref_personality_section_indices.contains(&section_index)
428                {
429                    final_module_custom_sections.push(custom_section);
430                } else {
431                    return Err(CompileError::Codegen(
432                        "trampoline generation produced non-eh custom sections".into(),
433                    ));
434                }
435            }
436        }
437
438        Ok(FunctionBody {
439            body: compiled_function.body.body,
440            unwind_info: compiled_function.body.unwind_info,
441        })
442    }
443
444    #[allow(clippy::too_many_arguments)]
445    fn generate_trampoline<'ctx>(
446        &self,
447        config: &LLVM,
448        compile_info: &CompileModuleInfo,
449        trampoline_func: FunctionValue,
450        func_sig: &FuncType,
451        llvm_func_type: FunctionType,
452        func_attrs: &[(Attribute, AttributeLoc)],
453        context: &'ctx Context,
454        intrinsics: &Intrinsics<'ctx>,
455    ) -> Result<(), CompileError> {
456        let entry_block = context.append_basic_block(trampoline_func, "entry");
457        let builder = context.create_builder();
458        builder.position_at_end(entry_block);
459
460        let (callee_vmctx_ptr, func_ptr, args_rets_ptr) =
461            match *trampoline_func.get_params().as_slice() {
462                [callee_vmctx_ptr, func_ptr, args_rets_ptr] => (
463                    callee_vmctx_ptr,
464                    func_ptr.into_pointer_value(),
465                    args_rets_ptr.into_pointer_value(),
466                ),
467                _ => {
468                    return Err(CompileError::Codegen(
469                        "trampoline function unimplemented".to_string(),
470                    ));
471                }
472            };
473
474        let mut args_vec: Vec<BasicMetadataValueEnum> =
475            Vec::with_capacity(if config.enable_g0m0_opt {
476                func_sig.params().len() + 3
477            } else {
478                func_sig.params().len() + 1
479            });
480
481        if self.abi.is_sret(func_sig)? {
482            let basic_types: Vec<_> = func_sig
483                .results()
484                .iter()
485                .map(|&ty| type_to_llvm(intrinsics, ty))
486                .collect::<Result<_, _>>()?;
487
488            let sret_ty = context.struct_type(&basic_types, false);
489            args_vec.push(err!(builder.build_alloca(sret_ty, "sret")).into());
490        }
491
492        args_vec.push(callee_vmctx_ptr.into());
493
494        if config.enable_g0m0_opt {
495            let wasm_module = &compile_info.module;
496            let memory_styles = &compile_info.memory_styles;
497            let callee_vmctx_ptr_value = callee_vmctx_ptr.into_pointer_value();
498            // get value of G0, get a pointer to M0's base
499
500            let offsets = wasmer_vm::VMOffsets::new(8, wasm_module);
501
502            let global_index = wasmer_types::GlobalIndex::from_u32(0);
503            let global_type = wasm_module.globals[global_index];
504            let global_value_type = global_type.ty;
505            let global_mutability = global_type.mutability;
506
507            let offset =
508                if let Some(local_global_index) = wasm_module.local_global_index(global_index) {
509                    offsets.vmctx_vmglobal_definition(local_global_index)
510                } else {
511                    offsets.vmctx_vmglobal_import(global_index)
512                };
513            let offset = intrinsics.i32_ty.const_int(offset.into(), false);
514            let global_ptr = {
515                let global_ptr_ptr = unsafe {
516                    err!(builder.build_gep(intrinsics.i8_ty, callee_vmctx_ptr_value, &[offset], ""))
517                };
518                let global_ptr_ptr =
519                    err!(builder.build_bit_cast(global_ptr_ptr, intrinsics.ptr_ty, ""))
520                        .into_pointer_value();
521
522                err!(builder.build_load(intrinsics.ptr_ty, global_ptr_ptr, "")).into_pointer_value()
523            };
524
525            let global_ptr = err!(builder.build_bit_cast(
526                global_ptr,
527                type_to_llvm_ptr(intrinsics, global_value_type)?,
528                "",
529            ))
530            .into_pointer_value();
531
532            let global_value = match global_mutability {
533                wasmer_types::Mutability::Const => {
534                    err!(builder.build_load(
535                        type_to_llvm(intrinsics, global_value_type)?,
536                        global_ptr,
537                        "g0",
538                    ))
539                }
540                wasmer_types::Mutability::Var => {
541                    err!(builder.build_load(
542                        type_to_llvm(intrinsics, global_value_type)?,
543                        global_ptr,
544                        ""
545                    ))
546                }
547            };
548
549            global_value.set_name("trmpl_g0");
550            args_vec.push(global_value.into());
551
552            // load mem
553            let memory_index = wasmer_types::MemoryIndex::from_u32(0);
554            let memory_definition_ptr = if let Some(local_memory_index) =
555                wasm_module.local_memory_index(memory_index)
556            {
557                let offset = offsets.vmctx_vmmemory_definition(local_memory_index);
558                let offset = intrinsics.i32_ty.const_int(offset.into(), false);
559                unsafe {
560                    err!(builder.build_gep(intrinsics.i8_ty, callee_vmctx_ptr_value, &[offset], ""))
561                }
562            } else {
563                let offset = offsets.vmctx_vmmemory_import(memory_index);
564                let offset = intrinsics.i32_ty.const_int(offset.into(), false);
565                let memory_definition_ptr_ptr = unsafe {
566                    err!(builder.build_gep(intrinsics.i8_ty, callee_vmctx_ptr_value, &[offset], ""))
567                };
568                let memory_definition_ptr_ptr =
569                    err!(builder.build_bit_cast(memory_definition_ptr_ptr, intrinsics.ptr_ty, "",))
570                        .into_pointer_value();
571
572                err!(builder.build_load(intrinsics.ptr_ty, memory_definition_ptr_ptr, ""))
573                    .into_pointer_value()
574            };
575            let memory_definition_ptr =
576                err!(builder.build_bit_cast(memory_definition_ptr, intrinsics.ptr_ty, "",))
577                    .into_pointer_value();
578            let base_ptr = err!(builder.build_struct_gep(
579                intrinsics.vmmemory_definition_ty,
580                memory_definition_ptr,
581                intrinsics.vmmemory_definition_base_element,
582                "",
583            ));
584
585            let memory_style = &memory_styles[memory_index];
586            let base_ptr = if let MemoryStyle::Dynamic { .. } = memory_style {
587                base_ptr
588            } else {
589                err!(builder.build_load(intrinsics.ptr_ty, base_ptr, "")).into_pointer_value()
590            };
591
592            base_ptr.set_name("trmpl_m0_base_ptr");
593
594            args_vec.push(base_ptr.into());
595        }
596
597        for (i, param_ty) in func_sig.params().iter().enumerate() {
598            let index = intrinsics.i32_ty.const_int(i as _, false);
599            let item_pointer = unsafe {
600                err!(builder.build_in_bounds_gep(
601                    intrinsics.i128_ty,
602                    args_rets_ptr,
603                    &[index],
604                    "arg_ptr"
605                ))
606            };
607
608            let casted_type = type_to_llvm(intrinsics, *param_ty)?;
609            let casted_pointer_type = type_to_llvm_ptr(intrinsics, *param_ty)?;
610
611            let typed_item_pointer = err!(builder.build_pointer_cast(
612                item_pointer,
613                casted_pointer_type,
614                "typed_arg_pointer"
615            ));
616
617            let arg = err!(builder.build_load(casted_type, typed_item_pointer, "arg"));
618            args_vec.push(arg.into());
619        }
620
621        let call_site = err!(builder.build_indirect_call(
622            llvm_func_type,
623            func_ptr,
624            args_vec.as_slice(),
625            "call"
626        ));
627        for (attr, attr_loc) in func_attrs {
628            call_site.add_attribute(*attr_loc, *attr);
629        }
630
631        let rets = self
632            .abi
633            .rets_from_call(&builder, intrinsics, call_site, func_sig)?;
634        for (idx, v) in rets.into_iter().enumerate() {
635            let ptr = unsafe {
636                err!(builder.build_gep(
637                    intrinsics.i128_ty,
638                    args_rets_ptr,
639                    &[intrinsics.i32_ty.const_int(idx as u64, false)],
640                    "",
641                ))
642            };
643            let ptr = err!(builder.build_pointer_cast(
644                ptr,
645                self.ctx.ptr_type(AddressSpace::default()),
646                ""
647            ));
648            err!(builder.build_store(ptr, v));
649        }
650
651        err!(builder.build_return(None));
652        Ok(())
653    }
654
655    fn generate_dynamic_trampoline<'ctx>(
656        &self,
657        trampoline_func: FunctionValue,
658        func_sig: &FuncType,
659        context: &'ctx Context,
660        intrinsics: &Intrinsics<'ctx>,
661    ) -> Result<(), CompileError> {
662        let entry_block = context.append_basic_block(trampoline_func, "entry");
663        let builder = context.create_builder();
664        builder.position_at_end(entry_block);
665
666        // Allocate stack space for the params and results.
667        let values = err!(builder.build_alloca(
668            intrinsics.i128_ty.array_type(cmp::max(
669                func_sig.params().len().try_into().unwrap(),
670                func_sig.results().len().try_into().unwrap(),
671            )),
672            "",
673        ));
674
675        // Copy params to 'values'.
676        let first_user_param = if self.abi.is_sret(func_sig)? { 2 } else { 1 };
677        for i in 0..func_sig.params().len() {
678            let ptr = unsafe {
679                err!(builder.build_in_bounds_gep(
680                    intrinsics.i128_ty,
681                    values,
682                    &[intrinsics.i32_ty.const_int(i.try_into().unwrap(), false)],
683                    "args",
684                ))
685            };
686            let ptr = err!(builder.build_bit_cast(
687                ptr,
688                type_to_llvm_ptr(intrinsics, func_sig.params()[i])?,
689                ""
690            ))
691            .into_pointer_value();
692            err!(
693                builder.build_store(
694                    ptr,
695                    trampoline_func
696                        .get_nth_param(i as u32 + first_user_param)
697                        .unwrap(),
698                )
699            );
700        }
701
702        let callee_ptr_ty = intrinsics.void_ty.fn_type(
703            &[
704                intrinsics.ptr_ty.into(), // vmctx ptr
705                intrinsics.ptr_ty.into(), // in/out values ptr
706            ],
707            false,
708        );
709        let vmctx = self.abi.get_vmctx_ptr_param(&trampoline_func);
710        let callee_ty =
711            err!(builder.build_bit_cast(vmctx, self.ctx.ptr_type(AddressSpace::default()), ""));
712        let callee =
713            err!(builder.build_load(intrinsics.ptr_ty, callee_ty.into_pointer_value(), ""))
714                .into_pointer_value();
715
716        let values_ptr = err!(builder.build_pointer_cast(values, intrinsics.ptr_ty, ""));
717        err!(builder.build_indirect_call(
718            callee_ptr_ty,
719            callee,
720            &[vmctx.into(), values_ptr.into()],
721            "",
722        ));
723
724        if func_sig.results().is_empty() {
725            err!(builder.build_return(None));
726        } else {
727            let results = func_sig
728                .results()
729                .iter()
730                .enumerate()
731                .map(|(idx, ty)| {
732                    let ptr = unsafe {
733                        err!(builder.build_gep(
734                            intrinsics.i128_ty,
735                            values,
736                            &[intrinsics.i32_ty.const_int(idx.try_into().unwrap(), false)],
737                            "",
738                        ))
739                    };
740                    let ptr = err!(builder.build_pointer_cast(
741                        ptr,
742                        type_to_llvm_ptr(intrinsics, *ty)?,
743                        ""
744                    ));
745                    err_nt!(builder.build_load(type_to_llvm(intrinsics, *ty)?, ptr, ""))
746                })
747                .collect::<Result<Vec<_>, CompileError>>()?;
748
749            if self.abi.is_sret(func_sig)? {
750                let sret = trampoline_func
751                    .get_first_param()
752                    .unwrap()
753                    .into_pointer_value();
754
755                let basic_types: Vec<_> = func_sig
756                    .results()
757                    .iter()
758                    .map(|&ty| type_to_llvm(intrinsics, ty))
759                    .collect::<Result<_, _>>()?;
760                let mut struct_value = context.struct_type(&basic_types, false).get_undef();
761
762                for (idx, value) in results.iter().enumerate() {
763                    let value = err!(builder.build_bit_cast(
764                        *value,
765                        type_to_llvm(intrinsics, func_sig.results()[idx])?,
766                        "",
767                    ));
768                    struct_value =
769                        err!(builder.build_insert_value(struct_value, value, idx as u32, ""))
770                            .into_struct_value();
771                }
772                err!(builder.build_store(sret, struct_value));
773                err!(builder.build_return(None));
774            } else {
775                err!(
776                    builder.build_return(Some(&self.abi.pack_values_for_register_return(
777                        intrinsics,
778                        &builder,
779                        results.as_slice(),
780                        &trampoline_func.get_type(),
781                    )?))
782                );
783            }
784        }
785
786        Ok(())
787    }
788}