1#![allow(unused)]
3
4use crate::{
5 abi::{Abi, G0M0FunctionKind, get_abi},
6 config::LLVM,
7 error::{err, err_nt},
8 object_file::{CompiledFunction, load_object_file},
9 translator::intrinsics::{Intrinsics, type_to_llvm, type_to_llvm_ptr},
10};
11use inkwell::{
12 AddressSpace, DLLStorageClass,
13 attributes::{Attribute, AttributeLoc},
14 context::Context,
15 module::{Linkage, Module},
16 passes::PassBuilderOptions,
17 targets::{FileType, TargetMachine},
18 types::FunctionType,
19 values::{BasicMetadataValueEnum, FunctionValue},
20};
21use std::{cmp, convert::TryInto};
22use target_lexicon::BinaryFormat;
23use wasmer_compiler::{
24 misc::CompiledKind,
25 types::{
26 function::FunctionBody,
27 module::CompileModuleInfo,
28 relocation::{Relocation, RelocationTarget},
29 section::{CustomSection, CustomSectionProtection, SectionBody, SectionIndex},
30 },
31};
32use wasmer_types::{
33 CompileError, FunctionIndex, FunctionType as FuncType, LocalFunctionIndex, entity::PrimaryMap,
34};
35use wasmer_vm::MemoryStyle;
36
37pub struct FuncTrampoline {
38 ctx: Context,
39 target_machine: TargetMachine,
40 abi: Box<dyn Abi>,
41 binary_fmt: BinaryFormat,
42 func_section: String,
43}
44
45const FUNCTION_SECTION_ELF: &str = "__TEXT,wasmer_trmpl"; const FUNCTION_SECTION_MACHO: &str = "wasmer_trmpl"; impl FuncTrampoline {
49 pub fn new(
50 target_machine: TargetMachine,
51 binary_fmt: BinaryFormat,
52 ) -> Result<Self, CompileError> {
53 let abi = get_abi(&target_machine);
54 Ok(Self {
55 ctx: Context::create(),
56 target_machine,
57 abi,
58 func_section: match binary_fmt {
59 BinaryFormat::Elf => FUNCTION_SECTION_ELF.to_string(),
60 BinaryFormat::Macho => FUNCTION_SECTION_MACHO.to_string(),
61 _ => {
62 return Err(CompileError::UnsupportedTarget(format!(
63 "Unsupported binary format: {binary_fmt:?}",
64 )));
65 }
66 },
67 binary_fmt,
68 })
69 }
70
71 pub fn trampoline_to_module(
72 &self,
73 ty: &FuncType,
74 config: &LLVM,
75 name: &str,
76 compile_info: &CompileModuleInfo,
77 ) -> Result<Module<'_>, CompileError> {
78 let function = CompiledKind::FunctionCallTrampoline(ty.clone());
80 let module = self.ctx.create_module("");
81 let target_machine = &self.target_machine;
82 let target_triple = target_machine.get_triple();
83 let target_data = target_machine.get_target_data();
84 module.set_triple(&target_triple);
85 module.set_data_layout(&target_data.get_data_layout());
86 let intrinsics = Intrinsics::declare(&module, &self.ctx, &target_data, &self.binary_fmt);
87
88 let func_kind = if config.enable_g0m0_opt {
89 Some(G0M0FunctionKind::Local)
90 } else {
91 None
92 };
93
94 let (callee_ty, callee_attrs) =
95 self.abi
96 .func_type_to_llvm(&self.ctx, &intrinsics, None, ty, func_kind)?;
97 let trampoline_ty = intrinsics.void_ty.fn_type(
98 &[
99 intrinsics.ptr_ty.into(), intrinsics.ptr_ty.into(), intrinsics.ptr_ty.into(), ],
103 false,
104 );
105
106 let trampoline_func = module.add_function(name, trampoline_ty, Some(Linkage::External));
107 trampoline_func
108 .as_global_value()
109 .set_section(Some(&self.func_section));
110 trampoline_func
111 .as_global_value()
112 .set_linkage(Linkage::DLLExport);
113 trampoline_func
114 .as_global_value()
115 .set_dll_storage_class(DLLStorageClass::Export);
116 trampoline_func.add_attribute(AttributeLoc::Function, intrinsics.uwtable);
117 trampoline_func.add_attribute(AttributeLoc::Function, intrinsics.frame_pointer);
118 self.generate_trampoline(
119 config,
120 compile_info,
121 trampoline_func,
122 ty,
123 callee_ty,
124 &callee_attrs,
125 &self.ctx,
126 &intrinsics,
127 )?;
128
129 if let Some(ref callbacks) = config.callbacks {
130 callbacks.preopt_ir(&function, &module);
131 }
132
133 let mut passes = vec![];
134
135 if config.enable_verifier {
136 passes.push("verify");
137 }
138
139 passes.push("instcombine");
140 module
141 .run_passes(
142 passes.join(",").as_str(),
143 target_machine,
144 PassBuilderOptions::create(),
145 )
146 .unwrap();
147
148 if let Some(ref callbacks) = config.callbacks {
149 callbacks.postopt_ir(&function, &module);
150 }
151 Ok(module)
152 }
153
154 pub fn trampoline(
155 &self,
156 ty: &FuncType,
157 config: &LLVM,
158 name: &str,
159 compile_info: &CompileModuleInfo,
160 ) -> Result<FunctionBody, CompileError> {
161 let module = self.trampoline_to_module(ty, config, name, compile_info)?;
162 let function = CompiledKind::FunctionCallTrampoline(ty.clone());
163 let target_machine = &self.target_machine;
164
165 let memory_buffer = target_machine
166 .write_to_memory_buffer(&module, FileType::Object)
167 .unwrap();
168
169 if let Some(ref callbacks) = config.callbacks {
170 callbacks.obj_memory_buffer(&function, &memory_buffer);
171 let asm_buffer = target_machine
172 .write_to_memory_buffer(&module, FileType::Assembly)
173 .unwrap();
174 callbacks.asm_memory_buffer(&function, &asm_buffer);
175 }
176
177 let mem_buf_slice = memory_buffer.as_slice();
178
179 let dummy_reloc_target =
186 RelocationTarget::DynamicTrampoline(FunctionIndex::from_u32(u32::MAX - 1));
187
188 let CompiledFunction {
192 compiled_function,
193 custom_sections,
194 eh_frame_section_indices,
195 mut compact_unwind_section_indices,
196 ..
197 } = load_object_file(
198 mem_buf_slice,
199 &self.func_section,
200 dummy_reloc_target,
201 |name: &str| {
202 Err(CompileError::Codegen(format!(
203 "trampoline generation produced reference to unknown function {name}",
204 )))
205 },
206 self.binary_fmt,
207 )?;
208 let mut all_sections_are_eh_sections = true;
209 let mut unwind_section_indices = eh_frame_section_indices;
210 unwind_section_indices.append(&mut compact_unwind_section_indices);
211 if unwind_section_indices.len() != custom_sections.len() {
212 all_sections_are_eh_sections = false;
213 } else {
214 unwind_section_indices.sort_unstable();
215 for (idx, section_idx) in unwind_section_indices.iter().enumerate() {
216 if idx as u32 != section_idx.as_u32() {
217 all_sections_are_eh_sections = false;
218 break;
219 }
220 }
221 }
222 if !all_sections_are_eh_sections {
223 return Err(CompileError::Codegen(
224 "trampoline generation produced non-eh custom sections".into(),
225 ));
226 }
227 if !compiled_function.relocations.is_empty() {
228 return Err(CompileError::Codegen(
229 "trampoline generation produced relocations".into(),
230 ));
231 }
232 Ok(FunctionBody {
235 body: compiled_function.body.body,
236 unwind_info: compiled_function.body.unwind_info,
237 })
238 }
239
240 pub fn dynamic_trampoline_to_module(
241 &self,
242 ty: &FuncType,
243 config: &LLVM,
244 name: &str,
245 ) -> Result<Module<'_>, CompileError> {
246 let function = CompiledKind::DynamicFunctionTrampoline(ty.clone());
248 let module = self.ctx.create_module("");
249 let target_machine = &self.target_machine;
250 let target_data = target_machine.get_target_data();
251 let target_triple = target_machine.get_triple();
252 module.set_triple(&target_triple);
253 module.set_data_layout(&target_data.get_data_layout());
254 let intrinsics = Intrinsics::declare(&module, &self.ctx, &target_data, &self.binary_fmt);
255
256 let (trampoline_ty, trampoline_attrs) =
257 self.abi
258 .func_type_to_llvm(&self.ctx, &intrinsics, None, ty, None)?;
259 let trampoline_func = module.add_function(name, trampoline_ty, Some(Linkage::External));
260 trampoline_func.set_personality_function(intrinsics.personality);
261 trampoline_func.add_attribute(AttributeLoc::Function, intrinsics.frame_pointer);
262 for (attr, attr_loc) in trampoline_attrs {
263 trampoline_func.add_attribute(attr_loc, attr);
264 }
265 trampoline_func
266 .as_global_value()
267 .set_section(Some(&self.func_section));
268 trampoline_func
269 .as_global_value()
270 .set_linkage(Linkage::DLLExport);
271 trampoline_func
272 .as_global_value()
273 .set_dll_storage_class(DLLStorageClass::Export);
274 self.generate_dynamic_trampoline(trampoline_func, ty, &self.ctx, &intrinsics)?;
275
276 if let Some(ref callbacks) = config.callbacks {
277 callbacks.preopt_ir(&function, &module);
278 }
279
280 let mut passes = vec![];
281
282 if config.enable_verifier {
283 passes.push("verify");
284 }
285
286 passes.push("early-cse");
287 module
288 .run_passes(
289 passes.join(",").as_str(),
290 target_machine,
291 PassBuilderOptions::create(),
292 )
293 .unwrap();
294
295 if let Some(ref callbacks) = config.callbacks {
296 callbacks.postopt_ir(&function, &module);
297 }
298
299 Ok(module)
300 }
301
302 #[allow(clippy::too_many_arguments)]
303 pub fn dynamic_trampoline(
304 &self,
305 ty: &FuncType,
306 config: &LLVM,
307 name: &str,
308 dynamic_trampoline_index: u32,
309 final_module_custom_sections: &mut PrimaryMap<SectionIndex, CustomSection>,
310 eh_frame_section_bytes: &mut Vec<u8>,
311 eh_frame_section_relocations: &mut Vec<Relocation>,
312 compact_unwind_section_bytes: &mut Vec<u8>,
313 compact_unwind_section_relocations: &mut Vec<Relocation>,
314 ) -> Result<FunctionBody, CompileError> {
315 let function = CompiledKind::DynamicFunctionTrampoline(ty.clone());
316 let target_machine = &self.target_machine;
317
318 let module = self.dynamic_trampoline_to_module(ty, config, name)?;
319
320 let memory_buffer = target_machine
321 .write_to_memory_buffer(&module, FileType::Object)
322 .unwrap();
323
324 if let Some(ref callbacks) = config.callbacks {
325 callbacks.obj_memory_buffer(&function, &memory_buffer);
326 let asm_buffer = target_machine
327 .write_to_memory_buffer(&module, FileType::Assembly)
328 .unwrap();
329 callbacks.asm_memory_buffer(&function, &asm_buffer)
330 }
331
332 let mem_buf_slice = memory_buffer.as_slice();
333 let CompiledFunction {
334 compiled_function,
335 custom_sections,
336 eh_frame_section_indices,
337 compact_unwind_section_indices,
338 gcc_except_table_section_indices,
339 data_dw_ref_personality_section_indices,
340 } = load_object_file(
341 mem_buf_slice,
342 &self.func_section,
343 RelocationTarget::DynamicTrampoline(FunctionIndex::from_u32(dynamic_trampoline_index)),
344 |name: &str| {
345 Err(CompileError::Codegen(format!(
346 "trampoline generation produced reference to unknown function {name}",
347 )))
348 },
349 self.binary_fmt,
350 )?;
351
352 if !compiled_function.relocations.is_empty() {
353 return Err(CompileError::Codegen(
354 "trampoline generation produced relocations".into(),
355 ));
356 }
357 {
362 let first_section = final_module_custom_sections.len() as u32;
363 for (section_index, mut custom_section) in custom_sections.into_iter() {
364 for reloc in &mut custom_section.relocations {
365 if let RelocationTarget::CustomSection(index) = reloc.reloc_target {
366 reloc.reloc_target = RelocationTarget::CustomSection(
367 SectionIndex::from_u32(first_section + index.as_u32()),
368 )
369 }
370
371 if reloc.kind.needs_got() {
372 return Err(CompileError::Codegen(
373 "trampoline generation produced GOT relocation".into(),
374 ));
375 }
376 }
377
378 if eh_frame_section_indices.contains(§ion_index) {
379 let offset = eh_frame_section_bytes.len() as u32;
380 for reloc in &mut custom_section.relocations {
381 reloc.offset += offset;
382 }
383 eh_frame_section_bytes.extend_from_slice(custom_section.bytes.as_slice());
384 eh_frame_section_bytes.extend_from_slice(&[0, 0, 0, 0]);
386 eh_frame_section_relocations.extend(custom_section.relocations);
387 final_module_custom_sections.push(CustomSection {
389 protection: CustomSectionProtection::Read,
390 alignment: None,
391 bytes: SectionBody::new_with_vec(vec![]),
392 relocations: vec![],
393 });
394 } else if compact_unwind_section_indices.contains(§ion_index) {
395 let offset = compact_unwind_section_bytes.len() as u32;
396 for reloc in &mut custom_section.relocations {
397 reloc.offset += offset;
398 }
399 compact_unwind_section_bytes.extend_from_slice(custom_section.bytes.as_slice());
400 compact_unwind_section_relocations.extend(custom_section.relocations);
401 final_module_custom_sections.push(CustomSection {
403 protection: CustomSectionProtection::Read,
404 alignment: None,
405 bytes: SectionBody::new_with_vec(vec![]),
406 relocations: vec![],
407 });
408 } else if gcc_except_table_section_indices.contains(§ion_index)
409 || data_dw_ref_personality_section_indices.contains(§ion_index)
410 {
411 final_module_custom_sections.push(custom_section);
412 } else {
413 return Err(CompileError::Codegen(
414 "trampoline generation produced non-eh custom sections".into(),
415 ));
416 }
417 }
418 }
419
420 Ok(FunctionBody {
421 body: compiled_function.body.body,
422 unwind_info: compiled_function.body.unwind_info,
423 })
424 }
425
426 #[allow(clippy::too_many_arguments)]
427 fn generate_trampoline<'ctx>(
428 &self,
429 config: &LLVM,
430 compile_info: &CompileModuleInfo,
431 trampoline_func: FunctionValue,
432 func_sig: &FuncType,
433 llvm_func_type: FunctionType,
434 func_attrs: &[(Attribute, AttributeLoc)],
435 context: &'ctx Context,
436 intrinsics: &Intrinsics<'ctx>,
437 ) -> Result<(), CompileError> {
438 let entry_block = context.append_basic_block(trampoline_func, "entry");
439 let builder = context.create_builder();
440 builder.position_at_end(entry_block);
441
442 let (callee_vmctx_ptr, func_ptr, args_rets_ptr) =
443 match *trampoline_func.get_params().as_slice() {
444 [callee_vmctx_ptr, func_ptr, args_rets_ptr] => (
445 callee_vmctx_ptr,
446 func_ptr.into_pointer_value(),
447 args_rets_ptr.into_pointer_value(),
448 ),
449 _ => {
450 return Err(CompileError::Codegen(
451 "trampoline function unimplemented".to_string(),
452 ));
453 }
454 };
455
456 let mut args_vec: Vec<BasicMetadataValueEnum> =
457 Vec::with_capacity(if config.enable_g0m0_opt {
458 func_sig.params().len() + 3
459 } else {
460 func_sig.params().len() + 1
461 });
462
463 if self.abi.is_sret(func_sig)? {
464 let basic_types: Vec<_> = func_sig
465 .results()
466 .iter()
467 .map(|&ty| type_to_llvm(intrinsics, ty))
468 .collect::<Result<_, _>>()?;
469
470 let sret_ty = context.struct_type(&basic_types, false);
471 args_vec.push(err!(builder.build_alloca(sret_ty, "sret")).into());
472 }
473
474 args_vec.push(callee_vmctx_ptr.into());
475
476 if config.enable_g0m0_opt {
477 let wasm_module = &compile_info.module;
478 let memory_styles = &compile_info.memory_styles;
479 let callee_vmctx_ptr_value = callee_vmctx_ptr.into_pointer_value();
480 let offsets = wasmer_vm::VMOffsets::new(8, wasm_module);
483
484 let global_index = wasmer_types::GlobalIndex::from_u32(0);
485 let global_type = wasm_module.globals[global_index];
486 let global_value_type = global_type.ty;
487 let global_mutability = global_type.mutability;
488
489 let offset =
490 if let Some(local_global_index) = wasm_module.local_global_index(global_index) {
491 offsets.vmctx_vmglobal_definition(local_global_index)
492 } else {
493 offsets.vmctx_vmglobal_import(global_index)
494 };
495 let offset = intrinsics.i32_ty.const_int(offset.into(), false);
496 let global_ptr = {
497 let global_ptr_ptr = unsafe {
498 err!(builder.build_gep(intrinsics.i8_ty, callee_vmctx_ptr_value, &[offset], ""))
499 };
500 let global_ptr_ptr =
501 err!(builder.build_bit_cast(global_ptr_ptr, intrinsics.ptr_ty, ""))
502 .into_pointer_value();
503
504 err!(builder.build_load(intrinsics.ptr_ty, global_ptr_ptr, "")).into_pointer_value()
505 };
506
507 let global_ptr = err!(builder.build_bit_cast(
508 global_ptr,
509 type_to_llvm_ptr(intrinsics, global_value_type)?,
510 "",
511 ))
512 .into_pointer_value();
513
514 let global_value = match global_mutability {
515 wasmer_types::Mutability::Const => {
516 err!(builder.build_load(
517 type_to_llvm(intrinsics, global_value_type)?,
518 global_ptr,
519 "g0",
520 ))
521 }
522 wasmer_types::Mutability::Var => {
523 err!(builder.build_load(
524 type_to_llvm(intrinsics, global_value_type)?,
525 global_ptr,
526 ""
527 ))
528 }
529 };
530
531 global_value.set_name("trmpl_g0");
532 args_vec.push(global_value.into());
533
534 let memory_index = wasmer_types::MemoryIndex::from_u32(0);
536 let memory_definition_ptr = if let Some(local_memory_index) =
537 wasm_module.local_memory_index(memory_index)
538 {
539 let offset = offsets.vmctx_vmmemory_definition(local_memory_index);
540 let offset = intrinsics.i32_ty.const_int(offset.into(), false);
541 unsafe {
542 err!(builder.build_gep(intrinsics.i8_ty, callee_vmctx_ptr_value, &[offset], ""))
543 }
544 } else {
545 let offset = offsets.vmctx_vmmemory_import(memory_index);
546 let offset = intrinsics.i32_ty.const_int(offset.into(), false);
547 let memory_definition_ptr_ptr = unsafe {
548 err!(builder.build_gep(intrinsics.i8_ty, callee_vmctx_ptr_value, &[offset], ""))
549 };
550 let memory_definition_ptr_ptr =
551 err!(builder.build_bit_cast(memory_definition_ptr_ptr, intrinsics.ptr_ty, "",))
552 .into_pointer_value();
553
554 err!(builder.build_load(intrinsics.ptr_ty, memory_definition_ptr_ptr, ""))
555 .into_pointer_value()
556 };
557 let memory_definition_ptr =
558 err!(builder.build_bit_cast(memory_definition_ptr, intrinsics.ptr_ty, "",))
559 .into_pointer_value();
560 let base_ptr = err!(builder.build_struct_gep(
561 intrinsics.vmmemory_definition_ty,
562 memory_definition_ptr,
563 intrinsics.vmmemory_definition_base_element,
564 "",
565 ));
566
567 let memory_style = &memory_styles[memory_index];
568 let base_ptr = if let MemoryStyle::Dynamic { .. } = memory_style {
569 base_ptr
570 } else {
571 err!(builder.build_load(intrinsics.ptr_ty, base_ptr, "")).into_pointer_value()
572 };
573
574 base_ptr.set_name("trmpl_m0_base_ptr");
575
576 args_vec.push(base_ptr.into());
577 }
578
579 for (i, param_ty) in func_sig.params().iter().enumerate() {
580 let index = intrinsics.i32_ty.const_int(i as _, false);
581 let item_pointer = unsafe {
582 err!(builder.build_in_bounds_gep(
583 intrinsics.i128_ty,
584 args_rets_ptr,
585 &[index],
586 "arg_ptr"
587 ))
588 };
589
590 let casted_type = type_to_llvm(intrinsics, *param_ty)?;
591 let casted_pointer_type = type_to_llvm_ptr(intrinsics, *param_ty)?;
592
593 let typed_item_pointer = err!(builder.build_pointer_cast(
594 item_pointer,
595 casted_pointer_type,
596 "typed_arg_pointer"
597 ));
598
599 let arg = err!(builder.build_load(casted_type, typed_item_pointer, "arg"));
600 args_vec.push(arg.into());
601 }
602
603 let call_site = err!(builder.build_indirect_call(
604 llvm_func_type,
605 func_ptr,
606 args_vec.as_slice(),
607 "call"
608 ));
609 for (attr, attr_loc) in func_attrs {
610 call_site.add_attribute(*attr_loc, *attr);
611 }
612
613 let rets = self
614 .abi
615 .rets_from_call(&builder, intrinsics, call_site, func_sig)?;
616 for (idx, v) in rets.into_iter().enumerate() {
617 let ptr = unsafe {
618 err!(builder.build_gep(
619 intrinsics.i128_ty,
620 args_rets_ptr,
621 &[intrinsics.i32_ty.const_int(idx as u64, false)],
622 "",
623 ))
624 };
625 let ptr = err!(builder.build_pointer_cast(
626 ptr,
627 self.ctx.ptr_type(AddressSpace::default()),
628 ""
629 ));
630 err!(builder.build_store(ptr, v));
631 }
632
633 err!(builder.build_return(None));
634 Ok(())
635 }
636
637 fn generate_dynamic_trampoline<'ctx>(
638 &self,
639 trampoline_func: FunctionValue,
640 func_sig: &FuncType,
641 context: &'ctx Context,
642 intrinsics: &Intrinsics<'ctx>,
643 ) -> Result<(), CompileError> {
644 let entry_block = context.append_basic_block(trampoline_func, "entry");
645 let builder = context.create_builder();
646 builder.position_at_end(entry_block);
647
648 let values = err!(builder.build_alloca(
650 intrinsics.i128_ty.array_type(cmp::max(
651 func_sig.params().len().try_into().unwrap(),
652 func_sig.results().len().try_into().unwrap(),
653 )),
654 "",
655 ));
656
657 let first_user_param = if self.abi.is_sret(func_sig)? { 2 } else { 1 };
659 for i in 0..func_sig.params().len() {
660 let ptr = unsafe {
661 err!(builder.build_in_bounds_gep(
662 intrinsics.i128_ty,
663 values,
664 &[intrinsics.i32_ty.const_int(i.try_into().unwrap(), false)],
665 "args",
666 ))
667 };
668 let ptr = err!(builder.build_bit_cast(
669 ptr,
670 type_to_llvm_ptr(intrinsics, func_sig.params()[i])?,
671 ""
672 ))
673 .into_pointer_value();
674 err!(
675 builder.build_store(
676 ptr,
677 trampoline_func
678 .get_nth_param(i as u32 + first_user_param)
679 .unwrap(),
680 )
681 );
682 }
683
684 let callee_ptr_ty = intrinsics.void_ty.fn_type(
685 &[
686 intrinsics.ptr_ty.into(), intrinsics.ptr_ty.into(), ],
689 false,
690 );
691 let vmctx = self.abi.get_vmctx_ptr_param(&trampoline_func);
692 let callee_ty =
693 err!(builder.build_bit_cast(vmctx, self.ctx.ptr_type(AddressSpace::default()), ""));
694 let callee =
695 err!(builder.build_load(intrinsics.ptr_ty, callee_ty.into_pointer_value(), ""))
696 .into_pointer_value();
697
698 let values_ptr = err!(builder.build_pointer_cast(values, intrinsics.ptr_ty, ""));
699 err!(builder.build_indirect_call(
700 callee_ptr_ty,
701 callee,
702 &[vmctx.into(), values_ptr.into()],
703 "",
704 ));
705
706 if func_sig.results().is_empty() {
707 err!(builder.build_return(None));
708 } else {
709 let results = func_sig
710 .results()
711 .iter()
712 .enumerate()
713 .map(|(idx, ty)| {
714 let ptr = unsafe {
715 err!(builder.build_gep(
716 intrinsics.i128_ty,
717 values,
718 &[intrinsics.i32_ty.const_int(idx.try_into().unwrap(), false)],
719 "",
720 ))
721 };
722 let ptr = err!(builder.build_pointer_cast(
723 ptr,
724 type_to_llvm_ptr(intrinsics, *ty)?,
725 ""
726 ));
727 err_nt!(builder.build_load(type_to_llvm(intrinsics, *ty)?, ptr, ""))
728 })
729 .collect::<Result<Vec<_>, CompileError>>()?;
730
731 if self.abi.is_sret(func_sig)? {
732 let sret = trampoline_func
733 .get_first_param()
734 .unwrap()
735 .into_pointer_value();
736
737 let basic_types: Vec<_> = func_sig
738 .results()
739 .iter()
740 .map(|&ty| type_to_llvm(intrinsics, ty))
741 .collect::<Result<_, _>>()?;
742 let mut struct_value = context.struct_type(&basic_types, false).get_undef();
743
744 for (idx, value) in results.iter().enumerate() {
745 let value = err!(builder.build_bit_cast(
746 *value,
747 type_to_llvm(intrinsics, func_sig.results()[idx])?,
748 "",
749 ));
750 struct_value =
751 err!(builder.build_insert_value(struct_value, value, idx as u32, ""))
752 .into_struct_value();
753 }
754 err!(builder.build_store(sret, struct_value));
755 err!(builder.build_return(None));
756 } else {
757 err!(
758 builder.build_return(Some(&self.abi.pack_values_for_register_return(
759 intrinsics,
760 &builder,
761 results.as_slice(),
762 &trampoline_func.get_type(),
763 )?))
764 );
765 }
766 }
767
768 Ok(())
769 }
770}