1use crate::{
5 HashMap,
6 heap::{Heap, HeapData, HeapStyle},
7 table::{TableData, TableSize},
8 translator::{
9 EXN_REF_TYPE, FuncEnvironment as BaseFuncEnvironment, GlobalVariable, LandingPad, TAG_TYPE,
10 TargetEnvironment,
11 },
12};
13use cranelift_codegen::{
14 cursor::FuncCursor,
15 ir::{
16 self, AbiParam, ArgumentPurpose, BlockArg, Endianness, ExceptionTableData,
17 ExceptionTableItem, ExceptionTag, Function, InstBuilder, MemFlags, Signature,
18 UserExternalName,
19 condcodes::IntCC,
20 immediates::{Offset32, Uimm64},
21 types::*,
22 },
23 isa::TargetFrontendConfig,
24};
25use cranelift_frontend::FunctionBuilder;
26use smallvec::SmallVec;
27use std::convert::TryFrom;
28use wasmer_compiler::wasmparser::HeapType;
29use wasmer_types::{
30 FunctionIndex, FunctionType, GlobalIndex, LocalFunctionIndex, MemoryIndex, MemoryStyle,
31 ModuleInfo, SignatureIndex, TableIndex, TableStyle, TagIndex, Type as WasmerType,
32 VMBuiltinFunctionIndex, VMOffsets, WasmError, WasmResult,
33 entity::{EntityRef, PrimaryMap, SecondaryMap},
34};
35
36pub fn get_function_name(func: &mut Function, func_index: FunctionIndex) -> ir::ExternalName {
38 ir::ExternalName::user(
39 func.params
40 .ensure_user_func_name(UserExternalName::new(0, func_index.as_u32())),
41 )
42}
43
44#[allow(unused)]
46pub fn type_of_vmtable_definition_current_elements(vmoffsets: &VMOffsets) -> ir::Type {
47 ir::Type::int(u16::from(vmoffsets.size_of_vmtable_definition_current_elements()) * 8).unwrap()
48}
49
50#[derive(Clone)]
51struct ExceptionFieldLayout {
52 offset: u32,
53 ty: ir::Type,
54}
55
56#[derive(Clone)]
57struct ExceptionTypeLayout {
58 fields: SmallVec<[ExceptionFieldLayout; 4]>,
59}
60
61pub struct FuncEnvironment<'module_environment> {
63 target_config: TargetFrontendConfig,
65
66 module: &'module_environment ModuleInfo,
68
69 type_stack: Vec<WasmerType>,
71
72 signatures: &'module_environment PrimaryMap<SignatureIndex, ir::Signature>,
74
75 heaps: PrimaryMap<Heap, HeapData>,
77
78 vmctx: Option<ir::GlobalValue>,
80
81 memory32_size_sig: Option<ir::SigRef>,
84
85 table_size_sig: Option<ir::SigRef>,
88
89 memory_grow_sig: Option<ir::SigRef>,
92
93 table_grow_sig: Option<ir::SigRef>,
96
97 table_copy_sig: Option<ir::SigRef>,
100
101 table_init_sig: Option<ir::SigRef>,
103
104 elem_drop_sig: Option<ir::SigRef>,
106
107 memory_copy_sig: Option<ir::SigRef>,
110
111 memory_fill_sig: Option<ir::SigRef>,
114
115 memory_init_sig: Option<ir::SigRef>,
117
118 data_drop_sig: Option<ir::SigRef>,
120
121 table_get_sig: Option<ir::SigRef>,
123
124 table_set_sig: Option<ir::SigRef>,
126
127 func_ref_sig: Option<ir::SigRef>,
129
130 table_fill_sig: Option<ir::SigRef>,
132
133 memory32_atomic_wait32_sig: Option<ir::SigRef>,
135
136 memory32_atomic_wait64_sig: Option<ir::SigRef>,
138
139 memory32_atomic_notify_sig: Option<ir::SigRef>,
141
142 personality2_sig: Option<ir::SigRef>,
144 throw_sig: Option<ir::SigRef>,
145 alloc_exception_sig: Option<ir::SigRef>,
146 read_exception_sig: Option<ir::SigRef>,
147 read_exnref_sig: Option<ir::SigRef>,
148
149 exception_type_layouts: HashMap<u32, ExceptionTypeLayout>,
151
152 offsets: VMOffsets,
154
155 memory_styles: &'module_environment PrimaryMap<MemoryIndex, MemoryStyle>,
157
158 tables: SecondaryMap<TableIndex, Option<TableData>>,
160
161 table_styles: &'module_environment PrimaryMap<TableIndex, TableStyle>,
162}
163
164impl<'module_environment> FuncEnvironment<'module_environment> {
165 pub fn new(
166 target_config: TargetFrontendConfig,
167 module: &'module_environment ModuleInfo,
168 signatures: &'module_environment PrimaryMap<SignatureIndex, ir::Signature>,
169 memory_styles: &'module_environment PrimaryMap<MemoryIndex, MemoryStyle>,
170 table_styles: &'module_environment PrimaryMap<TableIndex, TableStyle>,
171 ) -> Self {
172 Self {
173 target_config,
174 module,
175 signatures,
176 type_stack: vec![],
177 heaps: PrimaryMap::new(),
178 vmctx: None,
179 memory32_size_sig: None,
180 table_size_sig: None,
181 memory_grow_sig: None,
182 table_grow_sig: None,
183 table_copy_sig: None,
184 table_init_sig: None,
185 elem_drop_sig: None,
186 memory_copy_sig: None,
187 memory_fill_sig: None,
188 memory_init_sig: None,
189 table_get_sig: None,
190 table_set_sig: None,
191 data_drop_sig: None,
192 func_ref_sig: None,
193 table_fill_sig: None,
194 memory32_atomic_wait32_sig: None,
195 memory32_atomic_wait64_sig: None,
196 memory32_atomic_notify_sig: None,
197 personality2_sig: None,
198 throw_sig: None,
199 alloc_exception_sig: None,
200 read_exception_sig: None,
201 read_exnref_sig: None,
202 exception_type_layouts: HashMap::new(),
203 offsets: VMOffsets::new(target_config.pointer_bytes(), module),
204 memory_styles,
205 tables: Default::default(),
206 table_styles,
207 }
208 }
209
210 fn pointer_type(&self) -> ir::Type {
211 self.target_config.pointer_type()
212 }
213
214 fn ensure_table_exists(&mut self, func: &mut ir::Function, index: TableIndex) {
215 if self.tables[index].is_some() {
216 return;
217 }
218
219 let pointer_type = self.pointer_type();
220
221 let (ptr, base_offset, current_elements_offset) = {
222 let vmctx = self.vmctx(func);
223 if let Some(def_index) = self.module.local_table_index(index) {
224 let base_offset =
225 i32::try_from(self.offsets.vmctx_vmtable_definition_base(def_index)).unwrap();
226 let current_elements_offset = i32::try_from(
227 self.offsets
228 .vmctx_vmtable_definition_current_elements(def_index),
229 )
230 .unwrap();
231 (vmctx, base_offset, current_elements_offset)
232 } else {
233 let from_offset = self.offsets.vmctx_vmtable_import(index);
234 let table = func.create_global_value(ir::GlobalValueData::Load {
235 base: vmctx,
236 offset: Offset32::new(i32::try_from(from_offset).unwrap()),
237 global_type: pointer_type,
238 flags: MemFlags::trusted().with_readonly(),
239 });
240 let base_offset = i32::from(self.offsets.vmtable_definition_base());
241 let current_elements_offset =
242 i32::from(self.offsets.vmtable_definition_current_elements());
243 (table, base_offset, current_elements_offset)
244 }
245 };
246
247 let table = &self.module.tables[index];
248 let element_size = self.reference_type().bytes();
249
250 let base_gv = func.create_global_value(ir::GlobalValueData::Load {
251 base: ptr,
252 offset: Offset32::new(base_offset),
253 global_type: pointer_type,
254 flags: if Some(table.minimum) == table.maximum {
255 MemFlags::trusted().with_readonly()
258 } else {
259 MemFlags::trusted()
260 },
261 });
262
263 let bound = if Some(table.minimum) == table.maximum {
264 TableSize::Static {
265 bound: table.minimum,
266 }
267 } else {
268 TableSize::Dynamic {
269 bound_gv: func.create_global_value(ir::GlobalValueData::Load {
270 base: ptr,
271 offset: Offset32::new(current_elements_offset),
272 global_type: ir::Type::int(
273 u16::from(self.offsets.size_of_vmtable_definition_current_elements()) * 8,
274 )
275 .unwrap(),
276 flags: MemFlags::trusted(),
277 }),
278 }
279 };
280
281 self.tables[index] = Some(TableData {
282 base_gv,
283 bound,
284 element_size,
285 });
286 }
287
288 fn vmctx(&mut self, func: &mut Function) -> ir::GlobalValue {
289 self.vmctx.unwrap_or_else(|| {
290 let vmctx = func.create_global_value(ir::GlobalValueData::VMContext);
291 self.vmctx = Some(vmctx);
292 vmctx
293 })
294 }
295
296 fn get_table_fill_sig(&mut self, func: &mut Function) -> ir::SigRef {
297 let sig = self.table_fill_sig.unwrap_or_else(|| {
298 func.import_signature(Signature {
299 params: vec![
300 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
301 AbiParam::new(I32),
303 AbiParam::new(I32),
305 AbiParam::new(self.reference_type()),
307 AbiParam::new(I32),
309 ],
310 returns: vec![],
311 call_conv: self.target_config.default_call_conv,
312 })
313 });
314 self.table_fill_sig = Some(sig);
315 sig
316 }
317
318 fn get_table_fill_func(
319 &mut self,
320 func: &mut Function,
321 table_index: TableIndex,
322 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
323 (
324 self.get_table_fill_sig(func),
325 table_index.index(),
326 VMBuiltinFunctionIndex::get_table_fill_index(),
327 )
328 }
329
330 fn get_func_ref_sig(&mut self, func: &mut Function) -> ir::SigRef {
331 let sig = self.func_ref_sig.unwrap_or_else(|| {
332 func.import_signature(Signature {
333 params: vec![
334 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
335 AbiParam::new(I32),
336 ],
337 returns: vec![AbiParam::new(self.reference_type())],
338 call_conv: self.target_config.default_call_conv,
339 })
340 });
341 self.func_ref_sig = Some(sig);
342 sig
343 }
344
345 fn get_func_ref_func(
346 &mut self,
347 func: &mut Function,
348 function_index: FunctionIndex,
349 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
350 (
351 self.get_func_ref_sig(func),
352 function_index.index(),
353 VMBuiltinFunctionIndex::get_func_ref_index(),
354 )
355 }
356
357 fn get_table_get_sig(&mut self, func: &mut Function) -> ir::SigRef {
358 let sig = self.table_get_sig.unwrap_or_else(|| {
359 func.import_signature(Signature {
360 params: vec![
361 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
362 AbiParam::new(I32),
363 AbiParam::new(I32),
364 ],
365 returns: vec![AbiParam::new(self.reference_type())],
366 call_conv: self.target_config.default_call_conv,
367 })
368 });
369 self.table_get_sig = Some(sig);
370 sig
371 }
372
373 fn get_table_get_func(
374 &mut self,
375 func: &mut Function,
376 table_index: TableIndex,
377 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
378 if self.module.is_imported_table(table_index) {
379 (
380 self.get_table_get_sig(func),
381 table_index.index(),
382 VMBuiltinFunctionIndex::get_imported_table_get_index(),
383 )
384 } else {
385 (
386 self.get_table_get_sig(func),
387 self.module.local_table_index(table_index).unwrap().index(),
388 VMBuiltinFunctionIndex::get_table_get_index(),
389 )
390 }
391 }
392
393 fn get_table_set_sig(&mut self, func: &mut Function) -> ir::SigRef {
394 let sig = self.table_set_sig.unwrap_or_else(|| {
395 func.import_signature(Signature {
396 params: vec![
397 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
398 AbiParam::new(I32),
399 AbiParam::new(I32),
400 AbiParam::new(self.reference_type()),
401 ],
402 returns: vec![],
403 call_conv: self.target_config.default_call_conv,
404 })
405 });
406 self.table_set_sig = Some(sig);
407 sig
408 }
409
410 fn get_table_set_func(
411 &mut self,
412 func: &mut Function,
413 table_index: TableIndex,
414 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
415 if self.module.is_imported_table(table_index) {
416 (
417 self.get_table_set_sig(func),
418 table_index.index(),
419 VMBuiltinFunctionIndex::get_imported_table_set_index(),
420 )
421 } else {
422 (
423 self.get_table_set_sig(func),
424 self.module.local_table_index(table_index).unwrap().index(),
425 VMBuiltinFunctionIndex::get_table_set_index(),
426 )
427 }
428 }
429
430 fn get_table_grow_sig(&mut self, func: &mut Function) -> ir::SigRef {
431 let sig = self.table_grow_sig.unwrap_or_else(|| {
432 func.import_signature(Signature {
433 params: vec![
434 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
435 AbiParam::new(self.reference_type()),
437 AbiParam::new(I32),
438 AbiParam::new(I32),
439 ],
440 returns: vec![AbiParam::new(I32)],
441 call_conv: self.target_config.default_call_conv,
442 })
443 });
444 self.table_grow_sig = Some(sig);
445 sig
446 }
447
448 fn get_table_grow_func(
451 &mut self,
452 func: &mut Function,
453 index: TableIndex,
454 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
455 if self.module.is_imported_table(index) {
456 (
457 self.get_table_grow_sig(func),
458 index.index(),
459 VMBuiltinFunctionIndex::get_imported_table_grow_index(),
460 )
461 } else {
462 (
463 self.get_table_grow_sig(func),
464 self.module.local_table_index(index).unwrap().index(),
465 VMBuiltinFunctionIndex::get_table_grow_index(),
466 )
467 }
468 }
469
470 fn get_memory_grow_sig(&mut self, func: &mut Function) -> ir::SigRef {
471 let sig = self.memory_grow_sig.unwrap_or_else(|| {
472 func.import_signature(Signature {
473 params: vec![
474 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
475 AbiParam::new(I32),
476 AbiParam::new(I32),
477 ],
478 returns: vec![AbiParam::new(I32)],
479 call_conv: self.target_config.default_call_conv,
480 })
481 });
482 self.memory_grow_sig = Some(sig);
483 sig
484 }
485
486 fn get_memory_grow_func(
489 &mut self,
490 func: &mut Function,
491 index: MemoryIndex,
492 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
493 if self.module.is_imported_memory(index) {
494 (
495 self.get_memory_grow_sig(func),
496 index.index(),
497 VMBuiltinFunctionIndex::get_imported_memory32_grow_index(),
498 )
499 } else {
500 (
501 self.get_memory_grow_sig(func),
502 self.module.local_memory_index(index).unwrap().index(),
503 VMBuiltinFunctionIndex::get_memory32_grow_index(),
504 )
505 }
506 }
507
508 fn get_table_size_sig(&mut self, func: &mut Function) -> ir::SigRef {
509 let sig = self.table_size_sig.unwrap_or_else(|| {
510 func.import_signature(Signature {
511 params: vec![
512 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
513 AbiParam::new(I32),
514 ],
515 returns: vec![AbiParam::new(I32)],
516 call_conv: self.target_config.default_call_conv,
517 })
518 });
519 self.table_size_sig = Some(sig);
520 sig
521 }
522
523 fn get_table_size_func(
526 &mut self,
527 func: &mut Function,
528 index: TableIndex,
529 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
530 if self.module.is_imported_table(index) {
531 (
532 self.get_table_size_sig(func),
533 index.index(),
534 VMBuiltinFunctionIndex::get_imported_table_size_index(),
535 )
536 } else {
537 (
538 self.get_table_size_sig(func),
539 self.module.local_table_index(index).unwrap().index(),
540 VMBuiltinFunctionIndex::get_table_size_index(),
541 )
542 }
543 }
544
545 fn get_memory32_size_sig(&mut self, func: &mut Function) -> ir::SigRef {
546 let sig = self.memory32_size_sig.unwrap_or_else(|| {
547 func.import_signature(Signature {
548 params: vec![
549 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
550 AbiParam::new(I32),
551 ],
552 returns: vec![AbiParam::new(I32)],
553 call_conv: self.target_config.default_call_conv,
554 })
555 });
556 self.memory32_size_sig = Some(sig);
557 sig
558 }
559
560 fn get_memory_size_func(
563 &mut self,
564 func: &mut Function,
565 index: MemoryIndex,
566 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
567 if self.module.is_imported_memory(index) {
568 (
569 self.get_memory32_size_sig(func),
570 index.index(),
571 VMBuiltinFunctionIndex::get_imported_memory32_size_index(),
572 )
573 } else {
574 (
575 self.get_memory32_size_sig(func),
576 self.module.local_memory_index(index).unwrap().index(),
577 VMBuiltinFunctionIndex::get_memory32_size_index(),
578 )
579 }
580 }
581
582 fn get_table_copy_sig(&mut self, func: &mut Function) -> ir::SigRef {
583 let sig = self.table_copy_sig.unwrap_or_else(|| {
584 func.import_signature(Signature {
585 params: vec![
586 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
587 AbiParam::new(I32),
589 AbiParam::new(I32),
591 AbiParam::new(I32),
593 AbiParam::new(I32),
595 AbiParam::new(I32),
597 ],
598 returns: vec![],
599 call_conv: self.target_config.default_call_conv,
600 })
601 });
602 self.table_copy_sig = Some(sig);
603 sig
604 }
605
606 fn get_table_copy_func(
607 &mut self,
608 func: &mut Function,
609 dst_table_index: TableIndex,
610 src_table_index: TableIndex,
611 ) -> (ir::SigRef, usize, usize, VMBuiltinFunctionIndex) {
612 let sig = self.get_table_copy_sig(func);
613 (
614 sig,
615 dst_table_index.as_u32() as usize,
616 src_table_index.as_u32() as usize,
617 VMBuiltinFunctionIndex::get_table_copy_index(),
618 )
619 }
620
621 fn get_table_init_sig(&mut self, func: &mut Function) -> ir::SigRef {
622 let sig = self.table_init_sig.unwrap_or_else(|| {
623 func.import_signature(Signature {
624 params: vec![
625 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
626 AbiParam::new(I32),
628 AbiParam::new(I32),
630 AbiParam::new(I32),
632 AbiParam::new(I32),
634 AbiParam::new(I32),
636 ],
637 returns: vec![],
638 call_conv: self.target_config.default_call_conv,
639 })
640 });
641 self.table_init_sig = Some(sig);
642 sig
643 }
644
645 fn get_table_init_func(
646 &mut self,
647 func: &mut Function,
648 table_index: TableIndex,
649 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
650 let sig = self.get_table_init_sig(func);
651 let table_index = table_index.as_u32() as usize;
652 (
653 sig,
654 table_index,
655 VMBuiltinFunctionIndex::get_table_init_index(),
656 )
657 }
658
659 fn get_elem_drop_sig(&mut self, func: &mut Function) -> ir::SigRef {
660 let sig = self.elem_drop_sig.unwrap_or_else(|| {
661 func.import_signature(Signature {
662 params: vec![
663 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
664 AbiParam::new(I32),
666 ],
667 returns: vec![],
668 call_conv: self.target_config.default_call_conv,
669 })
670 });
671 self.elem_drop_sig = Some(sig);
672 sig
673 }
674
675 fn get_elem_drop_func(&mut self, func: &mut Function) -> (ir::SigRef, VMBuiltinFunctionIndex) {
676 let sig = self.get_elem_drop_sig(func);
677 (sig, VMBuiltinFunctionIndex::get_elem_drop_index())
678 }
679
680 fn get_memory_copy_sig(&mut self, func: &mut Function) -> ir::SigRef {
681 let sig = self.memory_copy_sig.unwrap_or_else(|| {
682 func.import_signature(Signature {
683 params: vec![
684 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
685 AbiParam::new(I32),
687 AbiParam::new(I32),
689 AbiParam::new(I32),
691 AbiParam::new(I32),
693 ],
694 returns: vec![],
695 call_conv: self.target_config.default_call_conv,
696 })
697 });
698 self.memory_copy_sig = Some(sig);
699 sig
700 }
701
702 fn get_memory_copy_func(
703 &mut self,
704 func: &mut Function,
705 memory_index: MemoryIndex,
706 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
707 let sig = self.get_memory_copy_sig(func);
708 if let Some(local_memory_index) = self.module.local_memory_index(memory_index) {
709 (
710 sig,
711 local_memory_index.index(),
712 VMBuiltinFunctionIndex::get_memory_copy_index(),
713 )
714 } else {
715 (
716 sig,
717 memory_index.index(),
718 VMBuiltinFunctionIndex::get_imported_memory_copy_index(),
719 )
720 }
721 }
722
723 fn get_memory_fill_sig(&mut self, func: &mut Function) -> ir::SigRef {
724 let sig = self.memory_fill_sig.unwrap_or_else(|| {
725 func.import_signature(Signature {
726 params: vec![
727 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
728 AbiParam::new(I32),
730 AbiParam::new(I32),
732 AbiParam::new(I32),
734 AbiParam::new(I32),
736 ],
737 returns: vec![],
738 call_conv: self.target_config.default_call_conv,
739 })
740 });
741 self.memory_fill_sig = Some(sig);
742 sig
743 }
744
745 fn get_memory_fill_func(
746 &mut self,
747 func: &mut Function,
748 memory_index: MemoryIndex,
749 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
750 let sig = self.get_memory_fill_sig(func);
751 if let Some(local_memory_index) = self.module.local_memory_index(memory_index) {
752 (
753 sig,
754 local_memory_index.index(),
755 VMBuiltinFunctionIndex::get_memory_fill_index(),
756 )
757 } else {
758 (
759 sig,
760 memory_index.index(),
761 VMBuiltinFunctionIndex::get_imported_memory_fill_index(),
762 )
763 }
764 }
765
766 fn get_memory_init_sig(&mut self, func: &mut Function) -> ir::SigRef {
767 let sig = self.memory_init_sig.unwrap_or_else(|| {
768 func.import_signature(Signature {
769 params: vec![
770 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
771 AbiParam::new(I32),
773 AbiParam::new(I32),
775 AbiParam::new(I32),
777 AbiParam::new(I32),
779 AbiParam::new(I32),
781 ],
782 returns: vec![],
783 call_conv: self.target_config.default_call_conv,
784 })
785 });
786 self.memory_init_sig = Some(sig);
787 sig
788 }
789
790 fn get_memory_init_func(
791 &mut self,
792 func: &mut Function,
793 ) -> (ir::SigRef, VMBuiltinFunctionIndex) {
794 let sig = self.get_memory_init_sig(func);
795 (sig, VMBuiltinFunctionIndex::get_memory_init_index())
796 }
797
798 fn get_data_drop_sig(&mut self, func: &mut Function) -> ir::SigRef {
799 let sig = self.data_drop_sig.unwrap_or_else(|| {
800 func.import_signature(Signature {
801 params: vec![
802 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
803 AbiParam::new(I32),
805 ],
806 returns: vec![],
807 call_conv: self.target_config.default_call_conv,
808 })
809 });
810 self.data_drop_sig = Some(sig);
811 sig
812 }
813
814 fn get_data_drop_func(&mut self, func: &mut Function) -> (ir::SigRef, VMBuiltinFunctionIndex) {
815 let sig = self.get_data_drop_sig(func);
816 (sig, VMBuiltinFunctionIndex::get_data_drop_index())
817 }
818
819 fn get_memory32_atomic_wait32_sig(&mut self, func: &mut Function) -> ir::SigRef {
820 let sig = self.memory32_atomic_wait32_sig.unwrap_or_else(|| {
821 func.import_signature(Signature {
822 params: vec![
823 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
824 AbiParam::new(I32),
826 AbiParam::new(I32),
828 AbiParam::new(I32),
830 AbiParam::new(I64),
832 ],
833 returns: vec![AbiParam::new(I32)],
834 call_conv: self.target_config.default_call_conv,
835 })
836 });
837 self.memory32_atomic_wait32_sig = Some(sig);
838 sig
839 }
840
841 fn get_memory_atomic_wait32_func(
845 &mut self,
846 func: &mut Function,
847 index: MemoryIndex,
848 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
849 if self.module.is_imported_memory(index) {
850 (
851 self.get_memory32_atomic_wait32_sig(func),
852 index.index(),
853 VMBuiltinFunctionIndex::get_imported_memory_atomic_wait32_index(),
854 )
855 } else {
856 (
857 self.get_memory32_atomic_wait32_sig(func),
858 self.module.local_memory_index(index).unwrap().index(),
859 VMBuiltinFunctionIndex::get_memory_atomic_wait32_index(),
860 )
861 }
862 }
863
864 fn get_memory32_atomic_wait64_sig(&mut self, func: &mut Function) -> ir::SigRef {
865 let sig = self.memory32_atomic_wait64_sig.unwrap_or_else(|| {
866 func.import_signature(Signature {
867 params: vec![
868 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
869 AbiParam::new(I32),
871 AbiParam::new(I32),
873 AbiParam::new(I64),
875 AbiParam::new(I64),
877 ],
878 returns: vec![AbiParam::new(I32)],
879 call_conv: self.target_config.default_call_conv,
880 })
881 });
882 self.memory32_atomic_wait64_sig = Some(sig);
883 sig
884 }
885
886 fn get_memory_atomic_wait64_func(
890 &mut self,
891 func: &mut Function,
892 index: MemoryIndex,
893 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
894 if self.module.is_imported_memory(index) {
895 (
896 self.get_memory32_atomic_wait64_sig(func),
897 index.index(),
898 VMBuiltinFunctionIndex::get_imported_memory_atomic_wait64_index(),
899 )
900 } else {
901 (
902 self.get_memory32_atomic_wait64_sig(func),
903 self.module.local_memory_index(index).unwrap().index(),
904 VMBuiltinFunctionIndex::get_memory_atomic_wait64_index(),
905 )
906 }
907 }
908
909 fn get_memory32_atomic_notify_sig(&mut self, func: &mut Function) -> ir::SigRef {
910 let sig = self.memory32_atomic_notify_sig.unwrap_or_else(|| {
911 func.import_signature(Signature {
912 params: vec![
913 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
914 AbiParam::new(I32),
916 AbiParam::new(I32),
918 AbiParam::new(I32),
920 ],
921 returns: vec![AbiParam::new(I32)],
922 call_conv: self.target_config.default_call_conv,
923 })
924 });
925 self.memory32_atomic_notify_sig = Some(sig);
926 sig
927 }
928
929 fn get_memory_atomic_notify_func(
933 &mut self,
934 func: &mut Function,
935 index: MemoryIndex,
936 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
937 if self.module.is_imported_memory(index) {
938 (
939 self.get_memory32_atomic_notify_sig(func),
940 index.index(),
941 VMBuiltinFunctionIndex::get_imported_memory_atomic_notify_index(),
942 )
943 } else {
944 (
945 self.get_memory32_atomic_notify_sig(func),
946 self.module.local_memory_index(index).unwrap().index(),
947 VMBuiltinFunctionIndex::get_memory_atomic_notify_index(),
948 )
949 }
950 }
951
952 fn get_personality2_func(
953 &mut self,
954 func: &mut Function,
955 ) -> (ir::SigRef, VMBuiltinFunctionIndex) {
956 let sig = self.personality2_sig.unwrap_or_else(|| {
957 let mut signature = Signature::new(self.target_config.default_call_conv);
958 signature.params.push(AbiParam::new(self.pointer_type()));
959 signature.params.push(AbiParam::new(self.pointer_type()));
960 signature.returns.push(AbiParam::new(TAG_TYPE));
961 let sig = func.import_signature(signature);
962 self.personality2_sig = Some(sig);
963 sig
964 });
965 (
966 sig,
967 VMBuiltinFunctionIndex::get_imported_personality2_index(),
968 )
969 }
970
971 fn get_throw_func(&mut self, func: &mut Function) -> (ir::SigRef, VMBuiltinFunctionIndex) {
972 let sig = self.throw_sig.unwrap_or_else(|| {
973 let mut signature = Signature::new(self.target_config.default_call_conv);
974 signature.params.push(AbiParam::special(
975 self.pointer_type(),
976 ArgumentPurpose::VMContext,
977 ));
978 signature.params.push(AbiParam::new(EXN_REF_TYPE));
979 let sig = func.import_signature(signature);
980 self.throw_sig = Some(sig);
981 sig
982 });
983 (sig, VMBuiltinFunctionIndex::get_imported_throw_index())
984 }
985
986 fn get_alloc_exception_func(
987 &mut self,
988 func: &mut Function,
989 ) -> (ir::SigRef, VMBuiltinFunctionIndex) {
990 let sig = self.alloc_exception_sig.unwrap_or_else(|| {
991 let mut signature = Signature::new(self.target_config.default_call_conv);
992 signature.params.push(AbiParam::special(
993 self.pointer_type(),
994 ArgumentPurpose::VMContext,
995 ));
996 signature.params.push(AbiParam::new(TAG_TYPE));
997 signature.returns.push(AbiParam::new(EXN_REF_TYPE));
998 let sig = func.import_signature(signature);
999 self.alloc_exception_sig = Some(sig);
1000 sig
1001 });
1002 (
1003 sig,
1004 VMBuiltinFunctionIndex::get_imported_alloc_exception_index(),
1005 )
1006 }
1007
1008 fn get_read_exnref_func(
1009 &mut self,
1010 func: &mut Function,
1011 ) -> (ir::SigRef, VMBuiltinFunctionIndex) {
1012 let sig = self.read_exnref_sig.unwrap_or_else(|| {
1013 let mut signature = Signature::new(self.target_config.default_call_conv);
1014 signature.params.push(AbiParam::special(
1015 self.pointer_type(),
1016 ArgumentPurpose::VMContext,
1017 ));
1018 signature.params.push(AbiParam::new(EXN_REF_TYPE));
1019 signature.returns.push(AbiParam::new(self.pointer_type()));
1020 let sig = func.import_signature(signature);
1021 self.read_exnref_sig = Some(sig);
1022 sig
1023 });
1024 (
1025 sig,
1026 VMBuiltinFunctionIndex::get_imported_read_exnref_index(),
1027 )
1028 }
1029
1030 fn get_read_exception_func(
1031 &mut self,
1032 func: &mut Function,
1033 ) -> (ir::SigRef, VMBuiltinFunctionIndex) {
1034 let sig = self.read_exception_sig.unwrap_or_else(|| {
1035 let mut signature = Signature::new(self.target_config.default_call_conv);
1036 signature.params.push(AbiParam::new(self.pointer_type()));
1037 signature.returns.push(AbiParam::new(EXN_REF_TYPE));
1038 let sig = func.import_signature(signature);
1039 self.read_exception_sig = Some(sig);
1040 sig
1041 });
1042 (
1043 sig,
1044 VMBuiltinFunctionIndex::get_imported_exception_into_exnref_index(),
1045 )
1046 }
1047
1048 fn exception_type_layout(&mut self, tag_index: TagIndex) -> WasmResult<&ExceptionTypeLayout> {
1049 let key = tag_index.as_u32();
1050 if !self.exception_type_layouts.contains_key(&key) {
1051 let layout = self.compute_exception_type_layout(tag_index)?;
1052 self.exception_type_layouts.insert(key, layout);
1053 }
1054 Ok(self.exception_type_layouts.get(&key).unwrap())
1055 }
1056
1057 fn compute_exception_type_layout(
1058 &self,
1059 tag_index: TagIndex,
1060 ) -> WasmResult<ExceptionTypeLayout> {
1061 let sig_index = self.module.tags[tag_index];
1062 let func_type = &self.module.signatures[sig_index];
1063 let mut offset = 0u32;
1064 let mut max_align = 1u32;
1065 let mut fields = SmallVec::<[ExceptionFieldLayout; 4]>::new();
1066
1067 for wasm_ty in func_type.params() {
1068 let ir_ty = self.map_wasmer_type_to_ir(*wasm_ty)?;
1069 let field_size = ir_ty.bytes();
1070 let align = field_size.max(1);
1071 max_align = max_align.max(align);
1072 offset = offset.next_multiple_of(align);
1073 fields.push(ExceptionFieldLayout { offset, ty: ir_ty });
1074 offset = offset
1075 .checked_add(field_size)
1076 .ok_or_else(|| WasmError::Unsupported("exception payload too large".to_string()))?;
1077 }
1078
1079 Ok(ExceptionTypeLayout { fields })
1080 }
1081
1082 fn map_wasmer_type_to_ir(&self, ty: WasmerType) -> WasmResult<ir::Type> {
1083 Ok(match ty {
1084 WasmerType::I32 => ir::types::I32,
1085 WasmerType::I64 => ir::types::I64,
1086 WasmerType::F32 => ir::types::F32,
1087 WasmerType::F64 => ir::types::F64,
1088 WasmerType::V128 => ir::types::I8X16,
1089 WasmerType::FuncRef | WasmerType::ExternRef | WasmerType::ExceptionRef => {
1090 self.reference_type()
1091 }
1092 })
1093 }
1094
1095 fn call_with_handlers(
1096 &mut self,
1097 builder: &mut FunctionBuilder,
1098 callee: ir::FuncRef,
1099 args: &[ir::Value],
1100 context: Option<ir::Value>,
1101 landing_pad: Option<LandingPad>,
1102 unreachable_on_return: bool,
1103 ) -> SmallVec<[ir::Value; 4]> {
1104 let sig_ref = builder.func.dfg.ext_funcs[callee].signature;
1105 let return_types: SmallVec<[ir::Type; 4]> = builder.func.dfg.signatures[sig_ref]
1106 .returns
1107 .iter()
1108 .map(|ret| ret.value_type)
1109 .collect();
1110
1111 if landing_pad.is_none() {
1112 let inst = builder.ins().call(callee, args);
1113 let results: SmallVec<[ir::Value; 4]> =
1114 builder.inst_results(inst).iter().copied().collect();
1115 if unreachable_on_return {
1116 builder.ins().trap(crate::TRAP_UNREACHABLE);
1117 }
1118 return results;
1119 }
1120
1121 let continuation = builder.create_block();
1122 let mut normal_args = SmallVec::<[BlockArg; 4]>::with_capacity(return_types.len());
1123 let mut result_values = SmallVec::<[ir::Value; 4]>::with_capacity(return_types.len());
1124 for (i, ty) in return_types.iter().enumerate() {
1125 let val = builder.append_block_param(continuation, *ty);
1126 result_values.push(val);
1127 normal_args.push(BlockArg::TryCallRet(u32::try_from(i).unwrap()));
1128 }
1129 let continuation_call = builder
1130 .func
1131 .dfg
1132 .block_call(continuation, normal_args.iter());
1133
1134 let mut table_items = Vec::new();
1135 if let Some(ctx) = context {
1136 table_items.push(ExceptionTableItem::Context(ctx));
1137 }
1138 if let Some(landing_pad) = landing_pad {
1139 for tag in landing_pad.clauses {
1140 let block_call = builder.func.dfg.block_call(
1141 landing_pad.block,
1142 &[BlockArg::TryCallExn(0), BlockArg::TryCallExn(1)],
1143 );
1144 table_items.push(match tag.wasm_tag {
1145 Some(tag) => ExceptionTableItem::Tag(ExceptionTag::from_u32(tag), block_call),
1146 None => ExceptionTableItem::Default(block_call),
1147 });
1148 }
1149 }
1150 let etd = ExceptionTableData::new(sig_ref, continuation_call, table_items);
1151 let et = builder.func.dfg.exception_tables.push(etd);
1152 builder.ins().try_call(callee, args, et);
1153 builder.switch_to_block(continuation);
1154 builder.seal_block(continuation);
1155 if unreachable_on_return {
1156 builder.ins().trap(crate::TRAP_UNREACHABLE);
1157 }
1158 result_values
1159 }
1160
1161 #[allow(clippy::too_many_arguments)]
1162 fn call_indirect_with_handlers(
1163 &mut self,
1164 builder: &mut FunctionBuilder,
1165 sig: ir::SigRef,
1166 func_addr: ir::Value,
1167 args: &[ir::Value],
1168 context: Option<ir::Value>,
1169 landing_pad: Option<LandingPad>,
1170 unreachable_on_return: bool,
1171 ) -> SmallVec<[ir::Value; 4]> {
1172 let return_types: SmallVec<[ir::Type; 4]> = builder.func.dfg.signatures[sig]
1173 .returns
1174 .iter()
1175 .map(|ret| ret.value_type)
1176 .collect();
1177
1178 if landing_pad.is_none() {
1179 let inst = builder.ins().call_indirect(sig, func_addr, args);
1180 let results: SmallVec<[ir::Value; 4]> =
1181 builder.inst_results(inst).iter().copied().collect();
1182 if unreachable_on_return {
1183 builder.ins().trap(crate::TRAP_UNREACHABLE);
1184 }
1185 return results;
1186 }
1187
1188 let continuation = builder.create_block();
1189 let current_block = builder.current_block().expect("current block");
1190 builder.insert_block_after(continuation, current_block);
1191
1192 let mut normal_args = SmallVec::<[BlockArg; 4]>::with_capacity(return_types.len());
1193 let mut result_values = SmallVec::<[ir::Value; 4]>::with_capacity(return_types.len());
1194 for (i, ty) in return_types.iter().enumerate() {
1195 let val = builder.append_block_param(continuation, *ty);
1196 result_values.push(val);
1197 normal_args.push(BlockArg::TryCallRet(u32::try_from(i).unwrap()));
1198 }
1199 let continuation_call = builder
1200 .func
1201 .dfg
1202 .block_call(continuation, normal_args.iter());
1203
1204 let mut table_items = Vec::new();
1205 if let Some(ctx) = context {
1206 table_items.push(ExceptionTableItem::Context(ctx));
1207 }
1208 if let Some(landing_pad) = landing_pad {
1209 for tag in landing_pad.clauses {
1210 let block_call = builder.func.dfg.block_call(
1211 landing_pad.block,
1212 &[BlockArg::TryCallExn(0), BlockArg::TryCallExn(1)],
1213 );
1214 table_items.push(match tag.wasm_tag {
1215 Some(tag) => ExceptionTableItem::Tag(ExceptionTag::from_u32(tag), block_call),
1216 None => ExceptionTableItem::Default(block_call),
1217 });
1218 }
1219 }
1220
1221 let etd = ExceptionTableData::new(sig, continuation_call, table_items);
1222 let et = builder.func.dfg.exception_tables.push(etd);
1223 builder.ins().try_call_indirect(func_addr, args, et);
1224 builder.switch_to_block(continuation);
1225 builder.seal_block(continuation);
1226 if unreachable_on_return {
1227 builder.ins().trap(crate::TRAP_UNREACHABLE);
1228 }
1229
1230 result_values
1231 }
1232
1233 fn translate_load_builtin_function_address(
1236 &mut self,
1237 pos: &mut FuncCursor<'_>,
1238 callee_func_idx: VMBuiltinFunctionIndex,
1239 ) -> (ir::Value, ir::Value) {
1240 let pointer_type = self.pointer_type();
1242 let vmctx = self.vmctx(pos.func);
1243 let base = pos.ins().global_value(pointer_type, vmctx);
1244
1245 let mut mem_flags = ir::MemFlags::trusted();
1246 mem_flags.set_readonly();
1247
1248 let body_offset =
1250 i32::try_from(self.offsets.vmctx_builtin_function(callee_func_idx)).unwrap();
1251 let func_addr = pos.ins().load(pointer_type, mem_flags, base, body_offset);
1252
1253 (base, func_addr)
1254 }
1255
1256 fn get_or_init_funcref_table_elem(
1257 &mut self,
1258 builder: &mut FunctionBuilder,
1259 table_index: TableIndex,
1260 index: ir::Value,
1261 ) -> ir::Value {
1262 let pointer_type = self.pointer_type();
1263 self.ensure_table_exists(builder.func, table_index);
1264 let table_data = self.tables[table_index].as_ref().unwrap();
1265
1266 let (table_entry_addr, flags) =
1271 table_data.prepare_table_addr(builder, index, pointer_type, false);
1272 builder.ins().load(pointer_type, flags, table_entry_addr, 0)
1273 }
1274}
1275
1276impl TargetEnvironment for FuncEnvironment<'_> {
1277 fn target_config(&self) -> TargetFrontendConfig {
1278 self.target_config
1279 }
1280}
1281
1282impl BaseFuncEnvironment for FuncEnvironment<'_> {
1283 fn is_wasm_parameter(&self, _signature: &ir::Signature, index: usize) -> bool {
1284 index >= 1
1286 }
1287
1288 fn translate_table_grow(
1289 &mut self,
1290 mut pos: cranelift_codegen::cursor::FuncCursor<'_>,
1291 table_index: TableIndex,
1292 delta: ir::Value,
1293 init_value: ir::Value,
1294 ) -> WasmResult<ir::Value> {
1295 self.ensure_table_exists(pos.func, table_index);
1296 let (func_sig, index_arg, func_idx) = self.get_table_grow_func(pos.func, table_index);
1297 let table_index = pos.ins().iconst(I32, index_arg as i64);
1298 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1299 let call_inst = pos.ins().call_indirect(
1300 func_sig,
1301 func_addr,
1302 &[vmctx, init_value, delta, table_index],
1303 );
1304 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1305 }
1306
1307 fn translate_table_get(
1308 &mut self,
1309 builder: &mut FunctionBuilder,
1310 table_index: TableIndex,
1311 index: ir::Value,
1312 ) -> WasmResult<ir::Value> {
1313 self.ensure_table_exists(builder.func, table_index);
1314 let mut pos = builder.cursor();
1315
1316 let (func_sig, table_index_arg, func_idx) = self.get_table_get_func(pos.func, table_index);
1317 let table_index = pos.ins().iconst(I32, table_index_arg as i64);
1318 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1319 let call_inst = pos
1320 .ins()
1321 .call_indirect(func_sig, func_addr, &[vmctx, table_index, index]);
1322 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1323 }
1324
1325 fn translate_table_set(
1326 &mut self,
1327 builder: &mut FunctionBuilder,
1328 table_index: TableIndex,
1329 value: ir::Value,
1330 index: ir::Value,
1331 ) -> WasmResult<()> {
1332 self.ensure_table_exists(builder.func, table_index);
1333 let mut pos = builder.cursor();
1334
1335 let (func_sig, table_index_arg, func_idx) = self.get_table_set_func(pos.func, table_index);
1336 let n_table_index = pos.ins().iconst(I32, table_index_arg as i64);
1337 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1338 pos.ins()
1339 .call_indirect(func_sig, func_addr, &[vmctx, n_table_index, index, value]);
1340 Ok(())
1341 }
1342
1343 fn translate_table_fill(
1344 &mut self,
1345 mut pos: cranelift_codegen::cursor::FuncCursor<'_>,
1346 table_index: TableIndex,
1347 dst: ir::Value,
1348 val: ir::Value,
1349 len: ir::Value,
1350 ) -> WasmResult<()> {
1351 self.ensure_table_exists(pos.func, table_index);
1352 let (func_sig, table_index_arg, func_idx) = self.get_table_fill_func(pos.func, table_index);
1353 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1354
1355 let table_index_arg = pos.ins().iconst(I32, table_index_arg as i64);
1356 pos.ins().call_indirect(
1357 func_sig,
1358 func_addr,
1359 &[vmctx, table_index_arg, dst, val, len],
1360 );
1361
1362 Ok(())
1363 }
1364
1365 fn translate_ref_null(
1366 &mut self,
1367 mut pos: cranelift_codegen::cursor::FuncCursor,
1368 ty: HeapType,
1369 ) -> WasmResult<ir::Value> {
1370 Ok(match ty {
1371 HeapType::Abstract { ty, .. } => match ty {
1372 wasmer_compiler::wasmparser::AbstractHeapType::Func
1373 | wasmer_compiler::wasmparser::AbstractHeapType::Extern
1374 | wasmer_compiler::wasmparser::AbstractHeapType::Exn => pos.ins().iconst(
1375 if matches!(ty, wasmer_compiler::wasmparser::AbstractHeapType::Exn) {
1376 I32
1377 } else {
1378 self.reference_type()
1379 },
1380 0,
1381 ),
1382 _ => {
1383 return Err(WasmError::Unsupported(format!(
1384 "`ref.null T` that is not a `funcref`, an `externref` or an `exn`: {ty:?}"
1385 )));
1386 }
1387 },
1388 HeapType::Concrete(_) => {
1389 return Err(WasmError::Unsupported(
1390 "`ref.null T` that is not a `funcref` or an `externref`".into(),
1391 ));
1392 }
1393 HeapType::Exact(_) => {
1394 return Err(WasmError::Unsupported(
1395 "custom-descriptors not supported yet".into(),
1396 ));
1397 }
1398 })
1399 }
1400
1401 fn translate_ref_is_null(
1402 &mut self,
1403 mut pos: cranelift_codegen::cursor::FuncCursor,
1404 value: ir::Value,
1405 ) -> WasmResult<ir::Value> {
1406 let bool_is_null =
1407 pos.ins()
1408 .icmp_imm(cranelift_codegen::ir::condcodes::IntCC::Equal, value, 0);
1409 Ok(pos.ins().uextend(ir::types::I32, bool_is_null))
1410 }
1411
1412 fn translate_ref_func(
1413 &mut self,
1414 mut pos: cranelift_codegen::cursor::FuncCursor<'_>,
1415 func_index: FunctionIndex,
1416 ) -> WasmResult<ir::Value> {
1417 let (func_sig, func_index_arg, func_idx) = self.get_func_ref_func(pos.func, func_index);
1418 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1419
1420 let func_index_arg = pos.ins().iconst(I32, func_index_arg as i64);
1421 let call_inst = pos
1422 .ins()
1423 .call_indirect(func_sig, func_addr, &[vmctx, func_index_arg]);
1424
1425 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1426 }
1427
1428 fn translate_custom_global_get(
1429 &mut self,
1430 mut _pos: cranelift_codegen::cursor::FuncCursor<'_>,
1431 _index: GlobalIndex,
1432 ) -> WasmResult<ir::Value> {
1433 unreachable!("we don't make any custom globals")
1434 }
1435
1436 fn translate_custom_global_set(
1437 &mut self,
1438 mut _pos: cranelift_codegen::cursor::FuncCursor<'_>,
1439 _index: GlobalIndex,
1440 _value: ir::Value,
1441 ) -> WasmResult<()> {
1442 unreachable!("we don't make any custom globals")
1443 }
1444
1445 fn make_heap(&mut self, func: &mut ir::Function, index: MemoryIndex) -> WasmResult<Heap> {
1446 let pointer_type = self.pointer_type();
1447
1448 let (ptr, base_offset, current_length_offset) = {
1449 let vmctx = self.vmctx(func);
1450 if let Some(def_index) = self.module.local_memory_index(index) {
1451 let base_offset =
1452 i32::try_from(self.offsets.vmctx_vmmemory_definition_base(def_index)).unwrap();
1453 let current_length_offset = i32::try_from(
1454 self.offsets
1455 .vmctx_vmmemory_definition_current_length(def_index),
1456 )
1457 .unwrap();
1458 (vmctx, base_offset, current_length_offset)
1459 } else {
1460 let from_offset = self.offsets.vmctx_vmmemory_import_definition(index);
1461 let memory = func.create_global_value(ir::GlobalValueData::Load {
1462 base: vmctx,
1463 offset: Offset32::new(i32::try_from(from_offset).unwrap()),
1464 global_type: pointer_type,
1465 flags: ir::MemFlags::trusted().with_readonly(),
1466 });
1467 let base_offset = i32::from(self.offsets.vmmemory_definition_base());
1468 let current_length_offset =
1469 i32::from(self.offsets.vmmemory_definition_current_length());
1470 (memory, base_offset, current_length_offset)
1471 }
1472 };
1473
1474 let (offset_guard_size, heap_style, readonly_base) = match self.memory_styles[index] {
1477 MemoryStyle::Dynamic { offset_guard_size } => {
1478 let heap_bound = func.create_global_value(ir::GlobalValueData::Load {
1479 base: ptr,
1480 offset: Offset32::new(current_length_offset),
1481 global_type: pointer_type,
1482 flags: ir::MemFlags::trusted(),
1483 });
1484 (
1485 Uimm64::new(offset_guard_size),
1486 HeapStyle::Dynamic {
1487 bound_gv: heap_bound,
1488 },
1489 false,
1490 )
1491 }
1492 MemoryStyle::Static {
1493 bound,
1494 offset_guard_size,
1495 } => (
1496 Uimm64::new(offset_guard_size),
1497 HeapStyle::Static {
1498 bound: bound.bytes().0 as u64,
1499 },
1500 true,
1501 ),
1502 };
1503
1504 let heap_base = func.create_global_value(ir::GlobalValueData::Load {
1505 base: ptr,
1506 offset: Offset32::new(base_offset),
1507 global_type: pointer_type,
1508 flags: if readonly_base {
1509 ir::MemFlags::trusted().with_readonly()
1510 } else {
1511 ir::MemFlags::trusted()
1512 },
1513 });
1514 Ok(self.heaps.push(HeapData {
1515 base: heap_base,
1516 min_size: 0,
1517 max_size: None,
1518 memory_type: None,
1519 offset_guard_size: offset_guard_size.into(),
1520 style: heap_style,
1521 index_type: I32,
1522 page_size_log2: self.target_config.page_size_align_log2,
1523 }))
1524 }
1525
1526 fn make_global(
1527 &mut self,
1528 func: &mut ir::Function,
1529 index: GlobalIndex,
1530 ) -> WasmResult<GlobalVariable> {
1531 let pointer_type = self.pointer_type();
1532
1533 let (ptr, offset) = {
1534 let vmctx = self.vmctx(func);
1535
1536 let from_offset = if let Some(def_index) = self.module.local_global_index(index) {
1537 self.offsets.vmctx_vmglobal_definition(def_index)
1538 } else {
1539 self.offsets.vmctx_vmglobal_import_definition(index)
1540 };
1541
1542 let global = func.create_global_value(ir::GlobalValueData::Load {
1543 base: vmctx,
1544 offset: Offset32::new(i32::try_from(from_offset).unwrap()),
1545 global_type: pointer_type,
1546 flags: MemFlags::trusted(),
1547 });
1548
1549 (global, 0)
1550 };
1551
1552 Ok(GlobalVariable::Memory {
1553 gv: ptr,
1554 offset: offset.into(),
1555 ty: match self.module.globals[index].ty {
1556 WasmerType::I32 => ir::types::I32,
1557 WasmerType::I64 => ir::types::I64,
1558 WasmerType::F32 => ir::types::F32,
1559 WasmerType::F64 => ir::types::F64,
1560 WasmerType::V128 => ir::types::I8X16,
1561 WasmerType::FuncRef | WasmerType::ExternRef | WasmerType::ExceptionRef => {
1562 self.reference_type()
1563 }
1564 },
1565 })
1566 }
1567
1568 fn make_indirect_sig(
1569 &mut self,
1570 func: &mut ir::Function,
1571 index: SignatureIndex,
1572 ) -> WasmResult<ir::SigRef> {
1573 Ok(func.import_signature(self.signatures[index].clone()))
1574 }
1575
1576 fn make_direct_func(
1577 &mut self,
1578 func: &mut ir::Function,
1579 index: FunctionIndex,
1580 ) -> WasmResult<ir::FuncRef> {
1581 let sigidx = self.module.functions[index];
1582 let signature = func.import_signature(self.signatures[sigidx].clone());
1583 let name = get_function_name(func, index);
1584
1585 Ok(func.import_function(ir::ExtFuncData {
1586 name,
1587 signature,
1588 colocated: true,
1589 patchable: false,
1590 }))
1591 }
1592
1593 fn translate_call_indirect(
1594 &mut self,
1595 builder: &mut FunctionBuilder,
1596 table_index: TableIndex,
1597 sig_index: SignatureIndex,
1598 sig_ref: ir::SigRef,
1599 callee: ir::Value,
1600 call_args: &[ir::Value],
1601 landing_pad: Option<LandingPad>,
1602 ) -> WasmResult<SmallVec<[ir::Value; 4]>> {
1603 let pointer_type = self.pointer_type();
1604
1605 let anyfunc_ptr = self.get_or_init_funcref_table_elem(builder, table_index, callee);
1607
1608 let mem_flags = ir::MemFlags::trusted();
1610
1611 builder
1613 .ins()
1614 .trapz(anyfunc_ptr, crate::TRAP_INDIRECT_CALL_TO_NULL);
1615
1616 let func_addr = builder.ins().load(
1617 pointer_type,
1618 mem_flags,
1619 anyfunc_ptr,
1620 i32::from(self.offsets.vmcaller_checked_anyfunc_func_ptr()),
1621 );
1622
1623 match self.table_styles[table_index] {
1625 TableStyle::CallerChecksSignature => {
1626 let sig_id_size = self.offsets.size_of_vmshared_signature_index();
1627 let sig_id_type = ir::Type::int(u16::from(sig_id_size) * 8).unwrap();
1628 let vmctx = self.vmctx(builder.func);
1629 let base = builder.ins().global_value(pointer_type, vmctx);
1630 let offset =
1631 i32::try_from(self.offsets.vmctx_vmshared_signature_id(sig_index)).unwrap();
1632
1633 let mut mem_flags = ir::MemFlags::trusted();
1635 mem_flags.set_readonly();
1636 let caller_sig_id = builder.ins().load(sig_id_type, mem_flags, base, offset);
1637
1638 let mem_flags = ir::MemFlags::trusted();
1640 let callee_sig_id = builder.ins().load(
1641 sig_id_type,
1642 mem_flags,
1643 anyfunc_ptr,
1644 i32::from(self.offsets.vmcaller_checked_anyfunc_type_index()),
1645 );
1646
1647 let cmp = builder
1649 .ins()
1650 .icmp(IntCC::Equal, callee_sig_id, caller_sig_id);
1651 builder.ins().trapz(cmp, crate::TRAP_BAD_SIGNATURE);
1652 }
1653 }
1654
1655 let mut real_call_args = Vec::with_capacity(call_args.len() + 2);
1656
1657 let vmctx = builder.ins().load(
1659 pointer_type,
1660 mem_flags,
1661 anyfunc_ptr,
1662 i32::from(self.offsets.vmcaller_checked_anyfunc_vmctx()),
1663 );
1664 real_call_args.push(vmctx);
1665
1666 real_call_args.extend_from_slice(call_args);
1668
1669 let results = self.call_indirect_with_handlers(
1670 builder,
1671 sig_ref,
1672 func_addr,
1673 &real_call_args,
1674 Some(vmctx),
1675 landing_pad,
1676 false,
1677 );
1678 Ok(results)
1679 }
1680
1681 fn translate_call(
1682 &mut self,
1683 builder: &mut FunctionBuilder,
1684 callee_index: FunctionIndex,
1685 callee: ir::FuncRef,
1686 call_args: &[ir::Value],
1687 landing_pad: Option<LandingPad>,
1688 ) -> WasmResult<SmallVec<[ir::Value; 4]>> {
1689 let mut real_call_args = Vec::with_capacity(call_args.len() + 2);
1690
1691 if !self.module.is_imported_function(callee_index) {
1693 let caller_vmctx = builder
1695 .func
1696 .special_param(ArgumentPurpose::VMContext)
1697 .unwrap();
1698 real_call_args.push(caller_vmctx);
1701
1702 real_call_args.extend_from_slice(call_args);
1704
1705 let results = self.call_with_handlers(
1706 builder,
1707 callee,
1708 &real_call_args,
1709 Some(caller_vmctx),
1710 landing_pad,
1711 false,
1712 );
1713 return Ok(results);
1714 }
1715
1716 let pointer_type = self.pointer_type();
1719 let sig_ref = builder.func.dfg.ext_funcs[callee].signature;
1720 let vmctx = self.vmctx(builder.func);
1721 let base = builder.ins().global_value(pointer_type, vmctx);
1722
1723 let mem_flags = ir::MemFlags::trusted();
1724
1725 let body_offset =
1727 i32::try_from(self.offsets.vmctx_vmfunction_import_body(callee_index)).unwrap();
1728 let func_addr = builder
1729 .ins()
1730 .load(pointer_type, mem_flags, base, body_offset);
1731
1732 let vmctx_offset =
1734 i32::try_from(self.offsets.vmctx_vmfunction_import_vmctx(callee_index)).unwrap();
1735 let vmctx = builder
1736 .ins()
1737 .load(pointer_type, mem_flags, base, vmctx_offset);
1738 real_call_args.push(vmctx);
1739
1740 real_call_args.extend_from_slice(call_args);
1742
1743 let results = self.call_indirect_with_handlers(
1744 builder,
1745 sig_ref,
1746 func_addr,
1747 &real_call_args,
1748 Some(vmctx),
1749 landing_pad,
1750 false,
1751 );
1752 Ok(results)
1753 }
1754
1755 fn tag_param_arity(&self, tag_index: TagIndex) -> usize {
1756 let sig_index = self.module.tags[tag_index];
1757 let signature = &self.module.signatures[sig_index];
1758 signature.params().len()
1759 }
1760
1761 fn translate_exn_pointer_to_ref(
1762 &mut self,
1763 builder: &mut FunctionBuilder,
1764 exn_ptr: ir::Value,
1765 ) -> ir::Value {
1766 let (read_sig, read_idx) = self.get_read_exception_func(builder.func);
1767 let mut pos = builder.cursor();
1768 let (_, read_addr) = self.translate_load_builtin_function_address(&mut pos, read_idx);
1769 let read_call = builder.ins().call_indirect(read_sig, read_addr, &[exn_ptr]);
1770 builder.inst_results(read_call)[0]
1771 }
1772
1773 fn translate_exn_unbox(
1774 &mut self,
1775 builder: &mut FunctionBuilder,
1776 tag_index: TagIndex,
1777 exnref: ir::Value,
1778 ) -> WasmResult<SmallVec<[ir::Value; 4]>> {
1779 let layout = self.exception_type_layout(tag_index)?.clone();
1780
1781 let (read_exnref_sig, read_exnref_idx) = self.get_read_exnref_func(builder.func);
1782 let mut pos = builder.cursor();
1783 let (vmctx, read_exnref_addr) =
1784 self.translate_load_builtin_function_address(&mut pos, read_exnref_idx);
1785 let read_exnref_call =
1786 builder
1787 .ins()
1788 .call_indirect(read_exnref_sig, read_exnref_addr, &[vmctx, exnref]);
1789 let payload_ptr = builder.inst_results(read_exnref_call)[0];
1790
1791 let mut values = SmallVec::<[ir::Value; 4]>::with_capacity(layout.fields.len());
1792 let data_flags = ir::MemFlags::trusted();
1793 for field in &layout.fields {
1794 let value = builder.ins().load(
1795 field.ty,
1796 data_flags,
1797 payload_ptr,
1798 Offset32::new(field.offset as i32),
1799 );
1800 values.push(value);
1801 }
1802
1803 Ok(values)
1804 }
1805
1806 fn translate_exn_throw(
1807 &mut self,
1808 builder: &mut FunctionBuilder,
1809 tag_index: TagIndex,
1810 args: &[ir::Value],
1811 landing_pad: Option<LandingPad>,
1812 ) -> WasmResult<()> {
1813 let layout = self.exception_type_layout(tag_index)?.clone();
1814 if layout.fields.len() != args.len() {
1815 return Err(WasmError::Generic(format!(
1816 "exception payload arity mismatch: expected {}, got {}",
1817 layout.fields.len(),
1818 args.len()
1819 )));
1820 }
1821
1822 let (alloc_sig, alloc_idx) = self.get_alloc_exception_func(builder.func);
1823 let mut pos = builder.cursor();
1824 let (vmctx, alloc_addr) = self.translate_load_builtin_function_address(&mut pos, alloc_idx);
1825 let tag_value = builder
1826 .ins()
1827 .iconst(TAG_TYPE, i64::from(tag_index.as_u32()));
1828 let alloc_call = builder
1829 .ins()
1830 .call_indirect(alloc_sig, alloc_addr, &[vmctx, tag_value]);
1831 let exnref = builder.inst_results(alloc_call)[0];
1832
1833 let (read_exnref_sig, read_exnref_idx) = self.get_read_exnref_func(builder.func);
1834 let mut pos = builder.cursor();
1835 let (vmctx, read_exnref_addr) =
1836 self.translate_load_builtin_function_address(&mut pos, read_exnref_idx);
1837 let read_exnref_call =
1838 builder
1839 .ins()
1840 .call_indirect(read_exnref_sig, read_exnref_addr, &[vmctx, exnref]);
1841 let payload_ptr = builder.inst_results(read_exnref_call)[0];
1842
1843 let store_flags = ir::MemFlags::trusted();
1844 for (field, value) in layout.fields.iter().zip(args.iter()) {
1845 debug_assert_eq!(
1846 builder.func.dfg.value_type(*value),
1847 field.ty,
1848 "exception payload type mismatch"
1849 );
1850 builder.ins().store(
1851 store_flags,
1852 *value,
1853 payload_ptr,
1854 Offset32::new(field.offset as i32),
1855 );
1856 }
1857
1858 let (throw_sig, throw_idx) = self.get_throw_func(builder.func);
1859 let mut pos = builder.cursor();
1860 let (vmctx_value, throw_addr) =
1861 self.translate_load_builtin_function_address(&mut pos, throw_idx);
1862 let call_args = [vmctx_value, exnref];
1863
1864 let _ = self.call_indirect_with_handlers(
1865 builder,
1866 throw_sig,
1867 throw_addr,
1868 &call_args,
1869 Some(vmctx_value),
1870 landing_pad,
1871 true,
1872 );
1873
1874 Ok(())
1875 }
1876
1877 fn translate_exn_throw_ref(
1878 &mut self,
1879 builder: &mut FunctionBuilder,
1880 exnref: ir::Value,
1881 landing_pad: Option<LandingPad>,
1882 ) -> WasmResult<()> {
1883 let (throw_sig, throw_idx) = self.get_throw_func(builder.func);
1884 let mut pos = builder.cursor();
1885 let (vmctx_value, throw_addr) =
1886 self.translate_load_builtin_function_address(&mut pos, throw_idx);
1887 let call_args = [vmctx_value, exnref];
1888
1889 let _ = self.call_indirect_with_handlers(
1890 builder,
1891 throw_sig,
1892 throw_addr,
1893 &call_args,
1894 Some(vmctx_value),
1895 landing_pad,
1896 true,
1897 );
1898
1899 Ok(())
1900 }
1901
1902 fn translate_exn_personality_selector(
1903 &mut self,
1904 builder: &mut FunctionBuilder,
1905 exn_ptr: ir::Value,
1906 ) -> WasmResult<ir::Value> {
1907 let (sig, idx) = self.get_personality2_func(builder.func);
1908 let pointer_type = self.pointer_type();
1909 let exn_ty = builder.func.dfg.value_type(exn_ptr);
1910 let exn_arg = if exn_ty == pointer_type {
1911 exn_ptr
1912 } else {
1913 let mut flags = MemFlags::new();
1914 flags.set_endianness(Endianness::Little);
1915 builder.ins().bitcast(pointer_type, flags, exn_ptr)
1916 };
1917
1918 let mut pos = builder.cursor();
1919 let (vmctx_value, func_addr) = self.translate_load_builtin_function_address(&mut pos, idx);
1920 let call = builder
1921 .ins()
1922 .call_indirect(sig, func_addr, &[vmctx_value, exn_arg]);
1923 Ok(builder.inst_results(call)[0])
1924 }
1925
1926 fn translate_exn_reraise_unmatched(
1927 &mut self,
1928 builder: &mut FunctionBuilder,
1929 exnref: ir::Value,
1930 ) -> WasmResult<()> {
1931 let (throw_sig, throw_idx) = self.get_throw_func(builder.func);
1932 let mut pos = builder.cursor();
1933 let (vmctx_value, throw_addr) =
1934 self.translate_load_builtin_function_address(&mut pos, throw_idx);
1935 builder
1936 .ins()
1937 .call_indirect(throw_sig, throw_addr, &[vmctx_value, exnref]);
1938 builder.ins().trap(crate::TRAP_UNREACHABLE);
1939 Ok(())
1940 }
1941
1942 fn translate_memory_grow(
1943 &mut self,
1944 mut pos: FuncCursor<'_>,
1945 index: MemoryIndex,
1946 _heap: Heap,
1947 val: ir::Value,
1948 ) -> WasmResult<ir::Value> {
1949 let (func_sig, index_arg, func_idx) = self.get_memory_grow_func(pos.func, index);
1950 let memory_index = pos.ins().iconst(I32, index_arg as i64);
1951 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1952 let call_inst = pos
1953 .ins()
1954 .call_indirect(func_sig, func_addr, &[vmctx, val, memory_index]);
1955 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1956 }
1957
1958 fn translate_memory_size(
1959 &mut self,
1960 mut pos: FuncCursor<'_>,
1961 index: MemoryIndex,
1962 _heap: Heap,
1963 ) -> WasmResult<ir::Value> {
1964 let (func_sig, index_arg, func_idx) = self.get_memory_size_func(pos.func, index);
1965 let memory_index = pos.ins().iconst(I32, index_arg as i64);
1966 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1967 let call_inst = pos
1968 .ins()
1969 .call_indirect(func_sig, func_addr, &[vmctx, memory_index]);
1970 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1971 }
1972
1973 fn translate_memory_copy(
1974 &mut self,
1975 mut pos: FuncCursor,
1976 src_index: MemoryIndex,
1977 _src_heap: Heap,
1978 _dst_index: MemoryIndex,
1979 _dst_heap: Heap,
1980 dst: ir::Value,
1981 src: ir::Value,
1982 len: ir::Value,
1983 ) -> WasmResult<()> {
1984 let (func_sig, src_index, func_idx) = self.get_memory_copy_func(pos.func, src_index);
1985
1986 let src_index_arg = pos.ins().iconst(I32, src_index as i64);
1987
1988 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1989
1990 pos.ins()
1991 .call_indirect(func_sig, func_addr, &[vmctx, src_index_arg, dst, src, len]);
1992
1993 Ok(())
1994 }
1995
1996 fn translate_memory_fill(
1997 &mut self,
1998 mut pos: FuncCursor,
1999 memory_index: MemoryIndex,
2000 _heap: Heap,
2001 dst: ir::Value,
2002 val: ir::Value,
2003 len: ir::Value,
2004 ) -> WasmResult<()> {
2005 let (func_sig, memory_index, func_idx) = self.get_memory_fill_func(pos.func, memory_index);
2006
2007 let memory_index_arg = pos.ins().iconst(I32, memory_index as i64);
2008
2009 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2010
2011 pos.ins().call_indirect(
2012 func_sig,
2013 func_addr,
2014 &[vmctx, memory_index_arg, dst, val, len],
2015 );
2016
2017 Ok(())
2018 }
2019
2020 fn translate_memory_init(
2021 &mut self,
2022 mut pos: FuncCursor,
2023 memory_index: MemoryIndex,
2024 _heap: Heap,
2025 seg_index: u32,
2026 dst: ir::Value,
2027 src: ir::Value,
2028 len: ir::Value,
2029 ) -> WasmResult<()> {
2030 let (func_sig, func_idx) = self.get_memory_init_func(pos.func);
2031
2032 let memory_index_arg = pos.ins().iconst(I32, memory_index.index() as i64);
2033 let seg_index_arg = pos.ins().iconst(I32, seg_index as i64);
2034
2035 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2036
2037 pos.ins().call_indirect(
2038 func_sig,
2039 func_addr,
2040 &[vmctx, memory_index_arg, seg_index_arg, dst, src, len],
2041 );
2042
2043 Ok(())
2044 }
2045
2046 fn translate_data_drop(&mut self, mut pos: FuncCursor, seg_index: u32) -> WasmResult<()> {
2047 let (func_sig, func_idx) = self.get_data_drop_func(pos.func);
2048 let seg_index_arg = pos.ins().iconst(I32, seg_index as i64);
2049 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2050 pos.ins()
2051 .call_indirect(func_sig, func_addr, &[vmctx, seg_index_arg]);
2052 Ok(())
2053 }
2054
2055 fn translate_table_size(
2056 &mut self,
2057 mut pos: FuncCursor,
2058 table_index: TableIndex,
2059 ) -> WasmResult<ir::Value> {
2060 self.ensure_table_exists(pos.func, table_index);
2061 let (func_sig, index_arg, func_idx) = self.get_table_size_func(pos.func, table_index);
2062 let table_index = pos.ins().iconst(I32, index_arg as i64);
2063 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2064 let call_inst = pos
2065 .ins()
2066 .call_indirect(func_sig, func_addr, &[vmctx, table_index]);
2067 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
2068 }
2069
2070 fn translate_table_copy(
2071 &mut self,
2072 mut pos: FuncCursor,
2073 dst_table_index: TableIndex,
2074 src_table_index: TableIndex,
2075 dst: ir::Value,
2076 src: ir::Value,
2077 len: ir::Value,
2078 ) -> WasmResult<()> {
2079 self.ensure_table_exists(pos.func, src_table_index);
2080 self.ensure_table_exists(pos.func, dst_table_index);
2081 let (func_sig, dst_table_index_arg, src_table_index_arg, func_idx) =
2082 self.get_table_copy_func(pos.func, dst_table_index, src_table_index);
2083
2084 let dst_table_index_arg = pos.ins().iconst(I32, dst_table_index_arg as i64);
2085 let src_table_index_arg = pos.ins().iconst(I32, src_table_index_arg as i64);
2086
2087 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2088
2089 pos.ins().call_indirect(
2090 func_sig,
2091 func_addr,
2092 &[
2093 vmctx,
2094 dst_table_index_arg,
2095 src_table_index_arg,
2096 dst,
2097 src,
2098 len,
2099 ],
2100 );
2101
2102 Ok(())
2103 }
2104
2105 fn translate_table_init(
2106 &mut self,
2107 mut pos: FuncCursor,
2108 seg_index: u32,
2109 table_index: TableIndex,
2110 dst: ir::Value,
2111 src: ir::Value,
2112 len: ir::Value,
2113 ) -> WasmResult<()> {
2114 self.ensure_table_exists(pos.func, table_index);
2115 let (func_sig, table_index_arg, func_idx) = self.get_table_init_func(pos.func, table_index);
2116
2117 let table_index_arg = pos.ins().iconst(I32, table_index_arg as i64);
2118 let seg_index_arg = pos.ins().iconst(I32, seg_index as i64);
2119
2120 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2121
2122 pos.ins().call_indirect(
2123 func_sig,
2124 func_addr,
2125 &[vmctx, table_index_arg, seg_index_arg, dst, src, len],
2126 );
2127
2128 Ok(())
2129 }
2130
2131 fn translate_elem_drop(&mut self, mut pos: FuncCursor, elem_index: u32) -> WasmResult<()> {
2132 let (func_sig, func_idx) = self.get_elem_drop_func(pos.func);
2133
2134 let elem_index_arg = pos.ins().iconst(I32, elem_index as i64);
2135
2136 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2137
2138 pos.ins()
2139 .call_indirect(func_sig, func_addr, &[vmctx, elem_index_arg]);
2140
2141 Ok(())
2142 }
2143
2144 fn translate_atomic_wait(
2145 &mut self,
2146 mut pos: FuncCursor,
2147 index: MemoryIndex,
2148 _heap: Heap,
2149 addr: ir::Value,
2150 expected: ir::Value,
2151 timeout: ir::Value,
2152 ) -> WasmResult<ir::Value> {
2153 let (func_sig, index_arg, func_idx) = if pos.func.dfg.value_type(expected) == I64 {
2154 self.get_memory_atomic_wait64_func(pos.func, index)
2155 } else {
2156 self.get_memory_atomic_wait32_func(pos.func, index)
2157 };
2158 let memory_index = pos.ins().iconst(I32, index_arg as i64);
2159 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2160 let call_inst = pos.ins().call_indirect(
2161 func_sig,
2162 func_addr,
2163 &[vmctx, memory_index, addr, expected, timeout],
2164 );
2165 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
2166 }
2167
2168 fn translate_atomic_notify(
2169 &mut self,
2170 mut pos: FuncCursor,
2171 index: MemoryIndex,
2172 _heap: Heap,
2173 addr: ir::Value,
2174 count: ir::Value,
2175 ) -> WasmResult<ir::Value> {
2176 let (func_sig, index_arg, func_idx) = self.get_memory_atomic_notify_func(pos.func, index);
2177 let memory_index = pos.ins().iconst(I32, index_arg as i64);
2178 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2179 let call_inst =
2180 pos.ins()
2181 .call_indirect(func_sig, func_addr, &[vmctx, memory_index, addr, count]);
2182 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
2183 }
2184
2185 fn get_global_type(&self, global_index: GlobalIndex) -> Option<WasmerType> {
2186 Some(self.module.globals.get(global_index)?.ty)
2187 }
2188
2189 fn push_local_decl_on_stack(&mut self, ty: WasmerType) {
2190 self.type_stack.push(ty);
2191 }
2192
2193 fn push_params_on_stack(&mut self, function_index: LocalFunctionIndex) {
2194 let func_index = self.module.func_index(function_index);
2195 let sig_idx = self.module.functions[func_index];
2196 let signature = &self.module.signatures[sig_idx];
2197 for param in signature.params() {
2198 self.type_stack.push(*param);
2199 }
2200 }
2201
2202 fn get_local_type(&self, local_index: u32) -> Option<WasmerType> {
2203 self.type_stack.get(local_index as usize).cloned()
2204 }
2205
2206 fn get_local_types(&self) -> &[WasmerType] {
2207 &self.type_stack
2208 }
2209
2210 fn get_function_type(&self, function_index: FunctionIndex) -> Option<&FunctionType> {
2211 let sig_idx = self.module.functions.get(function_index)?;
2212 Some(&self.module.signatures[*sig_idx])
2213 }
2214
2215 fn get_function_sig(&self, sig_index: SignatureIndex) -> Option<&FunctionType> {
2216 self.module.signatures.get(sig_index)
2217 }
2218
2219 fn heap_access_spectre_mitigation(&self) -> bool {
2220 false
2221 }
2222
2223 fn proof_carrying_code(&self) -> bool {
2224 false
2225 }
2226
2227 fn heaps(&self) -> &PrimaryMap<Heap, HeapData> {
2228 &self.heaps
2229 }
2230}