wasmer_compiler_llvm/trampoline/
wasm.rs

1use crate::{
2    abi::{Abi, G0M0FunctionKind, get_abi},
3    config::{CompiledKind, LLVM},
4    error::{err, err_nt},
5    object_file::{CompiledFunction, load_object_file},
6    translator::intrinsics::{Intrinsics, type_to_llvm, type_to_llvm_ptr},
7};
8use inkwell::{
9    AddressSpace, DLLStorageClass,
10    attributes::{Attribute, AttributeLoc},
11    context::Context,
12    module::{Linkage, Module},
13    passes::PassBuilderOptions,
14    targets::{FileType, TargetMachine},
15    types::FunctionType,
16    values::{BasicMetadataValueEnum, FunctionValue},
17};
18use std::{cmp, convert::TryInto};
19use target_lexicon::BinaryFormat;
20use wasmer_compiler::types::{
21    function::FunctionBody, module::CompileModuleInfo, relocation::RelocationTarget,
22};
23use wasmer_types::{CompileError, FunctionType as FuncType, LocalFunctionIndex};
24use wasmer_vm::MemoryStyle;
25
26pub struct FuncTrampoline {
27    ctx: Context,
28    target_machine: TargetMachine,
29    abi: Box<dyn Abi>,
30    binary_fmt: BinaryFormat,
31    func_section: String,
32}
33
34const FUNCTION_SECTION_ELF: &str = "__TEXT,wasmer_trmpl"; // Needs to be between 1 and 16 chars
35const FUNCTION_SECTION_MACHO: &str = "wasmer_trmpl"; // Needs to be between 1 and 16 chars
36
37impl FuncTrampoline {
38    pub fn new(
39        target_machine: TargetMachine,
40        binary_fmt: BinaryFormat,
41    ) -> Result<Self, CompileError> {
42        let abi = get_abi(&target_machine);
43        Ok(Self {
44            ctx: Context::create(),
45            target_machine,
46            abi,
47            func_section: match binary_fmt {
48                BinaryFormat::Elf => FUNCTION_SECTION_ELF.to_string(),
49                BinaryFormat::Macho => FUNCTION_SECTION_MACHO.to_string(),
50                _ => {
51                    return Err(CompileError::UnsupportedTarget(format!(
52                        "Unsupported binary format: {binary_fmt:?}",
53                    )));
54                }
55            },
56            binary_fmt,
57        })
58    }
59
60    pub fn trampoline_to_module(
61        &self,
62        ty: &FuncType,
63        config: &LLVM,
64        name: &str,
65        compile_info: &CompileModuleInfo,
66    ) -> Result<Module, CompileError> {
67        // The function type, used for the callbacks.
68        let function = CompiledKind::FunctionCallTrampoline(ty.clone());
69        let module = self.ctx.create_module("");
70        let target_machine = &self.target_machine;
71        let target_triple = target_machine.get_triple();
72        let target_data = target_machine.get_target_data();
73        module.set_triple(&target_triple);
74        module.set_data_layout(&target_data.get_data_layout());
75        let intrinsics = Intrinsics::declare(&module, &self.ctx, &target_data, &self.binary_fmt);
76
77        let func_kind = if config.enable_g0m0_opt {
78            Some(G0M0FunctionKind::Local)
79        } else {
80            None
81        };
82
83        let (callee_ty, callee_attrs) =
84            self.abi
85                .func_type_to_llvm(&self.ctx, &intrinsics, None, ty, func_kind)?;
86        let trampoline_ty = intrinsics.void_ty.fn_type(
87            &[
88                intrinsics.ptr_ty.into(), // vmctx ptr
89                intrinsics.ptr_ty.into(), // callee function address
90                intrinsics.ptr_ty.into(), // in/out values ptr
91            ],
92            false,
93        );
94
95        let trampoline_func = module.add_function(name, trampoline_ty, Some(Linkage::External));
96        trampoline_func
97            .as_global_value()
98            .set_section(Some(&self.func_section));
99        trampoline_func
100            .as_global_value()
101            .set_linkage(Linkage::DLLExport);
102        trampoline_func
103            .as_global_value()
104            .set_dll_storage_class(DLLStorageClass::Export);
105        trampoline_func.add_attribute(AttributeLoc::Function, intrinsics.uwtable);
106        trampoline_func.add_attribute(AttributeLoc::Function, intrinsics.frame_pointer);
107        self.generate_trampoline(
108            config,
109            compile_info,
110            trampoline_func,
111            ty,
112            callee_ty,
113            &callee_attrs,
114            &self.ctx,
115            &intrinsics,
116        )?;
117
118        if let Some(ref callbacks) = config.callbacks {
119            callbacks.preopt_ir(&function, &module);
120        }
121
122        let mut passes = vec![];
123
124        if config.enable_verifier {
125            passes.push("verify");
126        }
127
128        passes.push("instcombine");
129        module
130            .run_passes(
131                passes.join(",").as_str(),
132                target_machine,
133                PassBuilderOptions::create(),
134            )
135            .unwrap();
136
137        if let Some(ref callbacks) = config.callbacks {
138            callbacks.postopt_ir(&function, &module);
139        }
140
141        // -- Uncomment to enable dumping intermediate LLVM objects
142        //module
143        //    .print_to_file(format!(
144        //        "{}/obj_trmpl.ll",
145        //        std::env!("LLVM_EH_TESTS_DUMP_DIR")
146        //    ))
147        //    .unwrap();
148        Ok(module)
149    }
150
151    pub fn trampoline(
152        &self,
153        ty: &FuncType,
154        config: &LLVM,
155        name: &str,
156        compile_info: &CompileModuleInfo,
157    ) -> Result<FunctionBody, CompileError> {
158        let module = self.trampoline_to_module(ty, config, name, compile_info)?;
159        let function = CompiledKind::FunctionCallTrampoline(ty.clone());
160        let target_machine = &self.target_machine;
161
162        let memory_buffer = target_machine
163            .write_to_memory_buffer(&module, FileType::Object)
164            .unwrap();
165
166        if let Some(ref callbacks) = config.callbacks {
167            callbacks.obj_memory_buffer(&function, &memory_buffer);
168            let asm_buffer = target_machine
169                .write_to_memory_buffer(&module, FileType::Assembly)
170                .unwrap();
171            callbacks.asm_memory_buffer(&function, &asm_buffer);
172        }
173
174        let mem_buf_slice = memory_buffer.as_slice();
175        let CompiledFunction {
176            compiled_function,
177            custom_sections,
178            eh_frame_section_indices,
179            mut compact_unwind_section_indices,
180            ..
181        } = load_object_file(
182            mem_buf_slice,
183            &self.func_section,
184            RelocationTarget::LocalFunc(LocalFunctionIndex::from_u32(0)),
185            |name: &str| {
186                Err(CompileError::Codegen(format!(
187                    "trampoline generation produced reference to unknown function {name}",
188                )))
189            },
190            self.binary_fmt,
191        )?;
192        let mut all_sections_are_eh_sections = true;
193        let mut unwind_section_indices = eh_frame_section_indices;
194        unwind_section_indices.append(&mut compact_unwind_section_indices);
195        if unwind_section_indices.len() != custom_sections.len() {
196            all_sections_are_eh_sections = false;
197        } else {
198            unwind_section_indices.sort_unstable();
199            for (idx, section_idx) in unwind_section_indices.iter().enumerate() {
200                if idx as u32 != section_idx.as_u32() {
201                    all_sections_are_eh_sections = false;
202                    break;
203                }
204            }
205        }
206        if !all_sections_are_eh_sections {
207            return Err(CompileError::Codegen(
208                "trampoline generation produced non-eh custom sections".into(),
209            ));
210        }
211        if !compiled_function.relocations.is_empty() {
212            return Err(CompileError::Codegen(
213                "trampoline generation produced relocations".into(),
214            ));
215        }
216        // Ignore CompiledFunctionFrameInfo. Extra frame info isn't a problem.
217
218        Ok(FunctionBody {
219            body: compiled_function.body.body,
220            unwind_info: compiled_function.body.unwind_info,
221        })
222    }
223
224    pub fn dynamic_trampoline_to_module(
225        &self,
226        ty: &FuncType,
227        config: &LLVM,
228        name: &str,
229    ) -> Result<Module, CompileError> {
230        // The function type, used for the callbacks
231        let function = CompiledKind::DynamicFunctionTrampoline(ty.clone());
232        let module = self.ctx.create_module("");
233        let target_machine = &self.target_machine;
234        let target_data = target_machine.get_target_data();
235        let target_triple = target_machine.get_triple();
236        module.set_triple(&target_triple);
237        module.set_data_layout(&target_data.get_data_layout());
238        let intrinsics = Intrinsics::declare(&module, &self.ctx, &target_data, &self.binary_fmt);
239
240        let (trampoline_ty, trampoline_attrs) =
241            self.abi
242                .func_type_to_llvm(&self.ctx, &intrinsics, None, ty, None)?;
243        let trampoline_func = module.add_function(name, trampoline_ty, Some(Linkage::External));
244        for (attr, attr_loc) in trampoline_attrs {
245            trampoline_func.add_attribute(attr_loc, attr);
246        }
247        trampoline_func
248            .as_global_value()
249            .set_section(Some(&self.func_section));
250        trampoline_func
251            .as_global_value()
252            .set_linkage(Linkage::DLLExport);
253        trampoline_func
254            .as_global_value()
255            .set_dll_storage_class(DLLStorageClass::Export);
256        self.generate_dynamic_trampoline(trampoline_func, ty, &self.ctx, &intrinsics)?;
257
258        if let Some(ref callbacks) = config.callbacks {
259            callbacks.preopt_ir(&function, &module);
260        }
261
262        let mut passes = vec![];
263
264        if config.enable_verifier {
265            passes.push("verify");
266        }
267
268        passes.push("early-cse");
269        module
270            .run_passes(
271                passes.join(",").as_str(),
272                target_machine,
273                PassBuilderOptions::create(),
274            )
275            .unwrap();
276
277        if let Some(ref callbacks) = config.callbacks {
278            callbacks.postopt_ir(&function, &module);
279        }
280
281        Ok(module)
282    }
283    pub fn dynamic_trampoline(
284        &self,
285        ty: &FuncType,
286        config: &LLVM,
287        name: &str,
288    ) -> Result<FunctionBody, CompileError> {
289        let function = CompiledKind::DynamicFunctionTrampoline(ty.clone());
290        let target_machine = &self.target_machine;
291
292        let module = self.dynamic_trampoline_to_module(ty, config, name)?;
293
294        let memory_buffer = target_machine
295            .write_to_memory_buffer(&module, FileType::Object)
296            .unwrap();
297
298        if let Some(ref callbacks) = config.callbacks {
299            callbacks.obj_memory_buffer(&function, &memory_buffer);
300            let asm_buffer = target_machine
301                .write_to_memory_buffer(&module, FileType::Assembly)
302                .unwrap();
303            callbacks.asm_memory_buffer(&function, &asm_buffer)
304        }
305
306        let mem_buf_slice = memory_buffer.as_slice();
307        let CompiledFunction {
308            compiled_function,
309            custom_sections,
310            eh_frame_section_indices,
311            mut compact_unwind_section_indices,
312            ..
313        } = load_object_file(
314            mem_buf_slice,
315            &self.func_section,
316            RelocationTarget::LocalFunc(LocalFunctionIndex::from_u32(0)),
317            |name: &str| {
318                Err(CompileError::Codegen(format!(
319                    "trampoline generation produced reference to unknown function {name}",
320                )))
321            },
322            self.binary_fmt,
323        )?;
324        let mut all_sections_are_eh_sections = true;
325        let mut unwind_section_indices = eh_frame_section_indices;
326        unwind_section_indices.append(&mut compact_unwind_section_indices);
327
328        if unwind_section_indices.len() != custom_sections.len() {
329            all_sections_are_eh_sections = false;
330        } else {
331            unwind_section_indices.sort_unstable();
332            for (idx, section_idx) in unwind_section_indices.iter().enumerate() {
333                if idx as u32 != section_idx.as_u32() {
334                    all_sections_are_eh_sections = false;
335                    break;
336                }
337            }
338        }
339        if !all_sections_are_eh_sections {
340            return Err(CompileError::Codegen(
341                "trampoline generation produced non-eh custom sections".into(),
342            ));
343        }
344        if !compiled_function.relocations.is_empty() {
345            return Err(CompileError::Codegen(
346                "trampoline generation produced relocations".into(),
347            ));
348        }
349        // Ignore CompiledFunctionFrameInfo. Extra frame info isn't a problem.
350
351        Ok(FunctionBody {
352            body: compiled_function.body.body,
353            unwind_info: compiled_function.body.unwind_info,
354        })
355    }
356
357    #[allow(clippy::too_many_arguments)]
358    fn generate_trampoline<'ctx>(
359        &self,
360        config: &LLVM,
361        compile_info: &CompileModuleInfo,
362        trampoline_func: FunctionValue,
363        func_sig: &FuncType,
364        llvm_func_type: FunctionType,
365        func_attrs: &[(Attribute, AttributeLoc)],
366        context: &'ctx Context,
367        intrinsics: &Intrinsics<'ctx>,
368    ) -> Result<(), CompileError> {
369        let entry_block = context.append_basic_block(trampoline_func, "entry");
370        let builder = context.create_builder();
371        builder.position_at_end(entry_block);
372
373        let (callee_vmctx_ptr, func_ptr, args_rets_ptr) =
374            match *trampoline_func.get_params().as_slice() {
375                [callee_vmctx_ptr, func_ptr, args_rets_ptr] => (
376                    callee_vmctx_ptr,
377                    func_ptr.into_pointer_value(),
378                    args_rets_ptr.into_pointer_value(),
379                ),
380                _ => {
381                    return Err(CompileError::Codegen(
382                        "trampoline function unimplemented".to_string(),
383                    ));
384                }
385            };
386
387        let mut args_vec: Vec<BasicMetadataValueEnum> =
388            Vec::with_capacity(if config.enable_g0m0_opt {
389                func_sig.params().len() + 3
390            } else {
391                func_sig.params().len() + 1
392            });
393
394        if self.abi.is_sret(func_sig)? {
395            let basic_types: Vec<_> = func_sig
396                .results()
397                .iter()
398                .map(|&ty| type_to_llvm(intrinsics, ty))
399                .collect::<Result<_, _>>()?;
400
401            let sret_ty = context.struct_type(&basic_types, false);
402            args_vec.push(err!(builder.build_alloca(sret_ty, "sret")).into());
403        }
404
405        args_vec.push(callee_vmctx_ptr.into());
406
407        if config.enable_g0m0_opt {
408            let wasm_module = &compile_info.module;
409            let memory_styles = &compile_info.memory_styles;
410            let callee_vmctx_ptr_value = callee_vmctx_ptr.into_pointer_value();
411            // get value of G0, get a pointer to M0's base
412
413            let offsets = wasmer_vm::VMOffsets::new(8, wasm_module);
414
415            let global_index = wasmer_types::GlobalIndex::from_u32(0);
416            let global_type = wasm_module.globals[global_index];
417            let global_value_type = global_type.ty;
418            let global_mutability = global_type.mutability;
419
420            let offset =
421                if let Some(local_global_index) = wasm_module.local_global_index(global_index) {
422                    offsets.vmctx_vmglobal_definition(local_global_index)
423                } else {
424                    offsets.vmctx_vmglobal_import(global_index)
425                };
426            let offset = intrinsics.i32_ty.const_int(offset.into(), false);
427            let global_ptr = {
428                let global_ptr_ptr = unsafe {
429                    err!(builder.build_gep(intrinsics.i8_ty, callee_vmctx_ptr_value, &[offset], ""))
430                };
431                let global_ptr_ptr =
432                    err!(builder.build_bit_cast(global_ptr_ptr, intrinsics.ptr_ty, ""))
433                        .into_pointer_value();
434
435                err!(builder.build_load(intrinsics.ptr_ty, global_ptr_ptr, "")).into_pointer_value()
436            };
437
438            let global_ptr = err!(builder.build_bit_cast(
439                global_ptr,
440                type_to_llvm_ptr(intrinsics, global_value_type)?,
441                "",
442            ))
443            .into_pointer_value();
444
445            let global_value = match global_mutability {
446                wasmer_types::Mutability::Const => {
447                    err!(builder.build_load(
448                        type_to_llvm(intrinsics, global_value_type)?,
449                        global_ptr,
450                        "g0",
451                    ))
452                }
453                wasmer_types::Mutability::Var => {
454                    err!(builder.build_load(
455                        type_to_llvm(intrinsics, global_value_type)?,
456                        global_ptr,
457                        ""
458                    ))
459                }
460            };
461
462            global_value.set_name("trmpl_g0");
463            args_vec.push(global_value.into());
464
465            // load mem
466            let memory_index = wasmer_types::MemoryIndex::from_u32(0);
467            let memory_definition_ptr = if let Some(local_memory_index) =
468                wasm_module.local_memory_index(memory_index)
469            {
470                let offset = offsets.vmctx_vmmemory_definition(local_memory_index);
471                let offset = intrinsics.i32_ty.const_int(offset.into(), false);
472                unsafe {
473                    err!(builder.build_gep(intrinsics.i8_ty, callee_vmctx_ptr_value, &[offset], ""))
474                }
475            } else {
476                let offset = offsets.vmctx_vmmemory_import(memory_index);
477                let offset = intrinsics.i32_ty.const_int(offset.into(), false);
478                let memory_definition_ptr_ptr = unsafe {
479                    err!(builder.build_gep(intrinsics.i8_ty, callee_vmctx_ptr_value, &[offset], ""))
480                };
481                let memory_definition_ptr_ptr =
482                    err!(builder.build_bit_cast(memory_definition_ptr_ptr, intrinsics.ptr_ty, "",))
483                        .into_pointer_value();
484
485                err!(builder.build_load(intrinsics.ptr_ty, memory_definition_ptr_ptr, ""))
486                    .into_pointer_value()
487            };
488            let memory_definition_ptr =
489                err!(builder.build_bit_cast(memory_definition_ptr, intrinsics.ptr_ty, "",))
490                    .into_pointer_value();
491            let base_ptr = err!(builder.build_struct_gep(
492                intrinsics.vmmemory_definition_ty,
493                memory_definition_ptr,
494                intrinsics.vmmemory_definition_base_element,
495                "",
496            ));
497
498            let memory_style = &memory_styles[memory_index];
499            let base_ptr = if let MemoryStyle::Dynamic { .. } = memory_style {
500                base_ptr
501            } else {
502                err!(builder.build_load(intrinsics.ptr_ty, base_ptr, "")).into_pointer_value()
503            };
504
505            base_ptr.set_name("trmpl_m0_base_ptr");
506
507            args_vec.push(base_ptr.into());
508        }
509
510        for (i, param_ty) in func_sig.params().iter().enumerate() {
511            let index = intrinsics.i32_ty.const_int(i as _, false);
512            let item_pointer = unsafe {
513                err!(builder.build_in_bounds_gep(
514                    intrinsics.i128_ty,
515                    args_rets_ptr,
516                    &[index],
517                    "arg_ptr"
518                ))
519            };
520
521            let casted_type = type_to_llvm(intrinsics, *param_ty)?;
522            let casted_pointer_type = type_to_llvm_ptr(intrinsics, *param_ty)?;
523
524            let typed_item_pointer = err!(builder.build_pointer_cast(
525                item_pointer,
526                casted_pointer_type,
527                "typed_arg_pointer"
528            ));
529
530            let arg = err!(builder.build_load(casted_type, typed_item_pointer, "arg"));
531            args_vec.push(arg.into());
532        }
533
534        let call_site = err!(builder.build_indirect_call(
535            llvm_func_type,
536            func_ptr,
537            args_vec.as_slice(),
538            "call"
539        ));
540        for (attr, attr_loc) in func_attrs {
541            call_site.add_attribute(*attr_loc, *attr);
542        }
543
544        let rets = self
545            .abi
546            .rets_from_call(&builder, intrinsics, call_site, func_sig)?;
547        for (idx, v) in rets.into_iter().enumerate() {
548            let ptr = unsafe {
549                err!(builder.build_gep(
550                    intrinsics.i128_ty,
551                    args_rets_ptr,
552                    &[intrinsics.i32_ty.const_int(idx as u64, false)],
553                    "",
554                ))
555            };
556            let ptr = err!(builder.build_pointer_cast(
557                ptr,
558                self.ctx.ptr_type(AddressSpace::default()),
559                ""
560            ));
561            err!(builder.build_store(ptr, v));
562        }
563
564        err!(builder.build_return(None));
565        Ok(())
566    }
567
568    fn generate_dynamic_trampoline<'ctx>(
569        &self,
570        trampoline_func: FunctionValue,
571        func_sig: &FuncType,
572        context: &'ctx Context,
573        intrinsics: &Intrinsics<'ctx>,
574    ) -> Result<(), CompileError> {
575        let entry_block = context.append_basic_block(trampoline_func, "entry");
576        let builder = context.create_builder();
577        builder.position_at_end(entry_block);
578
579        // Allocate stack space for the params and results.
580        let values = err!(builder.build_alloca(
581            intrinsics.i128_ty.array_type(cmp::max(
582                func_sig.params().len().try_into().unwrap(),
583                func_sig.results().len().try_into().unwrap(),
584            )),
585            "",
586        ));
587
588        // Copy params to 'values'.
589        let first_user_param = if self.abi.is_sret(func_sig)? { 2 } else { 1 };
590        for i in 0..func_sig.params().len() {
591            let ptr = unsafe {
592                err!(builder.build_in_bounds_gep(
593                    intrinsics.i128_ty,
594                    values,
595                    &[intrinsics.i32_ty.const_int(i.try_into().unwrap(), false)],
596                    "args",
597                ))
598            };
599            let ptr = err!(builder.build_bit_cast(
600                ptr,
601                type_to_llvm_ptr(intrinsics, func_sig.params()[i])?,
602                ""
603            ))
604            .into_pointer_value();
605            err!(
606                builder.build_store(
607                    ptr,
608                    trampoline_func
609                        .get_nth_param(i as u32 + first_user_param)
610                        .unwrap(),
611                )
612            );
613        }
614
615        let callee_ptr_ty = intrinsics.void_ty.fn_type(
616            &[
617                intrinsics.ptr_ty.into(), // vmctx ptr
618                intrinsics.ptr_ty.into(), // in/out values ptr
619            ],
620            false,
621        );
622        let vmctx = self.abi.get_vmctx_ptr_param(&trampoline_func);
623        let callee_ty =
624            err!(builder.build_bit_cast(vmctx, self.ctx.ptr_type(AddressSpace::default()), ""));
625        let callee =
626            err!(builder.build_load(intrinsics.ptr_ty, callee_ty.into_pointer_value(), ""))
627                .into_pointer_value();
628
629        let values_ptr = err!(builder.build_pointer_cast(values, intrinsics.ptr_ty, ""));
630        err!(builder.build_indirect_call(
631            callee_ptr_ty,
632            callee,
633            &[vmctx.into(), values_ptr.into()],
634            "",
635        ));
636
637        if func_sig.results().is_empty() {
638            err!(builder.build_return(None));
639        } else {
640            let results = func_sig
641                .results()
642                .iter()
643                .enumerate()
644                .map(|(idx, ty)| {
645                    let ptr = unsafe {
646                        err!(builder.build_gep(
647                            intrinsics.i128_ty,
648                            values,
649                            &[intrinsics.i32_ty.const_int(idx.try_into().unwrap(), false)],
650                            "",
651                        ))
652                    };
653                    let ptr = err!(builder.build_pointer_cast(
654                        ptr,
655                        type_to_llvm_ptr(intrinsics, *ty)?,
656                        ""
657                    ));
658                    err_nt!(builder.build_load(type_to_llvm(intrinsics, *ty)?, ptr, ""))
659                })
660                .collect::<Result<Vec<_>, CompileError>>()?;
661
662            if self.abi.is_sret(func_sig)? {
663                let sret = trampoline_func
664                    .get_first_param()
665                    .unwrap()
666                    .into_pointer_value();
667
668                let basic_types: Vec<_> = func_sig
669                    .results()
670                    .iter()
671                    .map(|&ty| type_to_llvm(intrinsics, ty))
672                    .collect::<Result<_, _>>()?;
673                let mut struct_value = context.struct_type(&basic_types, false).get_undef();
674
675                for (idx, value) in results.iter().enumerate() {
676                    let value = err!(builder.build_bit_cast(
677                        *value,
678                        type_to_llvm(intrinsics, func_sig.results()[idx])?,
679                        "",
680                    ));
681                    struct_value =
682                        err!(builder.build_insert_value(struct_value, value, idx as u32, ""))
683                            .into_struct_value();
684                }
685                err!(builder.build_store(sret, struct_value));
686                err!(builder.build_return(None));
687            } else {
688                err!(
689                    builder.build_return(Some(&self.abi.pack_values_for_register_return(
690                        intrinsics,
691                        &builder,
692                        results.as_slice(),
693                        &trampoline_func.get_type(),
694                    )?))
695                );
696            }
697        }
698
699        Ok(())
700    }
701}