1use crate::{
5 HashMap,
6 heap::{Heap, HeapData, HeapStyle},
7 table::{TableData, TableSize},
8 translator::{
9 EXN_REF_TYPE, FuncEnvironment as BaseFuncEnvironment, GlobalVariable, LandingPad, TAG_TYPE,
10 TargetEnvironment,
11 },
12};
13use cranelift_codegen::{
14 cursor::FuncCursor,
15 ir::{
16 self, AbiParam, ArgumentPurpose, BlockArg, Endianness, ExceptionTableData,
17 ExceptionTableItem, ExceptionTag, Function, InstBuilder, MemFlags, Signature,
18 UserExternalName,
19 condcodes::IntCC,
20 immediates::{Offset32, Uimm64},
21 types::*,
22 },
23 isa::TargetFrontendConfig,
24};
25use cranelift_frontend::FunctionBuilder;
26use smallvec::SmallVec;
27use std::convert::TryFrom;
28use wasmer_compiler::wasmparser::HeapType;
29use wasmer_types::{
30 FunctionIndex, FunctionType, GlobalIndex, LocalFunctionIndex, MemoryIndex, MemoryStyle,
31 ModuleInfo, SignatureIndex, TableIndex, TableStyle, TagIndex, Type as WasmerType,
32 VMBuiltinFunctionIndex, VMOffsets, WasmError, WasmResult,
33 entity::{EntityRef, PrimaryMap, SecondaryMap},
34};
35
36pub fn get_function_name(func: &mut Function, func_index: FunctionIndex) -> ir::ExternalName {
38 ir::ExternalName::user(
39 func.params
40 .ensure_user_func_name(UserExternalName::new(0, func_index.as_u32())),
41 )
42}
43
44#[allow(unused)]
46pub fn type_of_vmtable_definition_current_elements(vmoffsets: &VMOffsets) -> ir::Type {
47 ir::Type::int(u16::from(vmoffsets.size_of_vmtable_definition_current_elements()) * 8).unwrap()
48}
49
50#[derive(Clone)]
51struct ExceptionFieldLayout {
52 offset: u32,
53 ty: ir::Type,
54}
55
56#[derive(Clone)]
57struct ExceptionTypeLayout {
58 fields: SmallVec<[ExceptionFieldLayout; 4]>,
59}
60
61pub struct FuncEnvironment<'module_environment> {
63 target_config: TargetFrontendConfig,
65
66 module: &'module_environment ModuleInfo,
68
69 type_stack: Vec<WasmerType>,
71
72 signatures: &'module_environment PrimaryMap<SignatureIndex, ir::Signature>,
74
75 heaps: PrimaryMap<Heap, HeapData>,
77
78 vmctx: Option<ir::GlobalValue>,
80
81 memory32_size_sig: Option<ir::SigRef>,
84
85 table_size_sig: Option<ir::SigRef>,
88
89 memory_grow_sig: Option<ir::SigRef>,
92
93 table_grow_sig: Option<ir::SigRef>,
96
97 table_copy_sig: Option<ir::SigRef>,
100
101 table_init_sig: Option<ir::SigRef>,
103
104 elem_drop_sig: Option<ir::SigRef>,
106
107 memory_copy_sig: Option<ir::SigRef>,
110
111 memory_fill_sig: Option<ir::SigRef>,
114
115 memory_init_sig: Option<ir::SigRef>,
117
118 data_drop_sig: Option<ir::SigRef>,
120
121 table_get_sig: Option<ir::SigRef>,
123
124 table_set_sig: Option<ir::SigRef>,
126
127 func_ref_sig: Option<ir::SigRef>,
129
130 table_fill_sig: Option<ir::SigRef>,
132
133 memory32_atomic_wait32_sig: Option<ir::SigRef>,
135
136 memory32_atomic_wait64_sig: Option<ir::SigRef>,
138
139 memory32_atomic_notify_sig: Option<ir::SigRef>,
141
142 personality2_sig: Option<ir::SigRef>,
144 throw_sig: Option<ir::SigRef>,
145 alloc_exception_sig: Option<ir::SigRef>,
146 read_exception_sig: Option<ir::SigRef>,
147 read_exnref_sig: Option<ir::SigRef>,
148
149 exception_type_layouts: HashMap<u32, ExceptionTypeLayout>,
151
152 offsets: VMOffsets,
154
155 memory_styles: &'module_environment PrimaryMap<MemoryIndex, MemoryStyle>,
157
158 tables: SecondaryMap<TableIndex, Option<TableData>>,
160
161 table_styles: &'module_environment PrimaryMap<TableIndex, TableStyle>,
162}
163
164impl<'module_environment> FuncEnvironment<'module_environment> {
165 pub fn new(
166 target_config: TargetFrontendConfig,
167 module: &'module_environment ModuleInfo,
168 signatures: &'module_environment PrimaryMap<SignatureIndex, ir::Signature>,
169 memory_styles: &'module_environment PrimaryMap<MemoryIndex, MemoryStyle>,
170 table_styles: &'module_environment PrimaryMap<TableIndex, TableStyle>,
171 ) -> Self {
172 Self {
173 target_config,
174 module,
175 signatures,
176 type_stack: vec![],
177 heaps: PrimaryMap::new(),
178 vmctx: None,
179 memory32_size_sig: None,
180 table_size_sig: None,
181 memory_grow_sig: None,
182 table_grow_sig: None,
183 table_copy_sig: None,
184 table_init_sig: None,
185 elem_drop_sig: None,
186 memory_copy_sig: None,
187 memory_fill_sig: None,
188 memory_init_sig: None,
189 table_get_sig: None,
190 table_set_sig: None,
191 data_drop_sig: None,
192 func_ref_sig: None,
193 table_fill_sig: None,
194 memory32_atomic_wait32_sig: None,
195 memory32_atomic_wait64_sig: None,
196 memory32_atomic_notify_sig: None,
197 personality2_sig: None,
198 throw_sig: None,
199 alloc_exception_sig: None,
200 read_exception_sig: None,
201 read_exnref_sig: None,
202 exception_type_layouts: HashMap::new(),
203 offsets: VMOffsets::new(target_config.pointer_bytes(), module),
204 memory_styles,
205 tables: Default::default(),
206 table_styles,
207 }
208 }
209
210 fn pointer_type(&self) -> ir::Type {
211 self.target_config.pointer_type()
212 }
213
214 fn ensure_table_exists(&mut self, func: &mut ir::Function, index: TableIndex) {
215 if self.tables[index].is_some() {
216 return;
217 }
218
219 let pointer_type = self.pointer_type();
220 let table = &self.module.tables[index];
221
222 let (base_gv, table_base_offset, bound, element_size, inline_anyfunc) =
223 if let Some(def_index) = self.module.local_table_index(index)
224 && table.is_fixed_funcref_table()
225 {
226 (
227 self.vmctx(func),
228 i32::try_from(
229 self.offsets
230 .vmctx_fixed_funcref_table_anyfuncs(def_index)
231 .expect("fixed funcref table must have inline VMContext storage"),
232 )
233 .unwrap(),
234 TableSize::Static {
235 bound: table.minimum,
236 },
237 u32::from(self.offsets.size_of_vmcaller_checked_anyfunc()),
238 true,
239 )
240 } else {
241 let (ptr, base_offset, current_elements_offset) = {
242 let vmctx = self.vmctx(func);
243 if let Some(def_index) = self.module.local_table_index(index) {
244 let base_offset =
245 i32::try_from(self.offsets.vmctx_vmtable_definition_base(def_index))
246 .unwrap();
247 let current_elements_offset = i32::try_from(
248 self.offsets
249 .vmctx_vmtable_definition_current_elements(def_index),
250 )
251 .unwrap();
252 (vmctx, base_offset, current_elements_offset)
253 } else {
254 let from_offset = self.offsets.vmctx_vmtable_import(index);
255 let table = func.create_global_value(ir::GlobalValueData::Load {
256 base: vmctx,
257 offset: Offset32::new(i32::try_from(from_offset).unwrap()),
258 global_type: pointer_type,
259 flags: MemFlags::trusted().with_readonly(),
260 });
261 let base_offset = i32::from(self.offsets.vmtable_definition_base());
262 let current_elements_offset =
263 i32::from(self.offsets.vmtable_definition_current_elements());
264 (table, base_offset, current_elements_offset)
265 }
266 };
267
268 let base_gv = func.create_global_value(ir::GlobalValueData::Load {
269 base: ptr,
270 offset: Offset32::new(base_offset),
271 global_type: pointer_type,
272 flags: if Some(table.minimum) == table.maximum {
273 MemFlags::trusted().with_readonly()
276 } else {
277 MemFlags::trusted()
278 },
279 });
280
281 let bound = if Some(table.minimum) == table.maximum {
282 TableSize::Static {
283 bound: table.minimum,
284 }
285 } else {
286 TableSize::Dynamic {
287 bound_gv: func.create_global_value(ir::GlobalValueData::Load {
288 base: ptr,
289 offset: Offset32::new(current_elements_offset),
290 global_type: ir::Type::int(
291 u16::from(
292 self.offsets.size_of_vmtable_definition_current_elements(),
293 ) * 8,
294 )
295 .unwrap(),
296 flags: MemFlags::trusted(),
297 }),
298 }
299 };
300
301 (base_gv, 0, bound, self.reference_type().bytes(), false)
302 };
303
304 self.tables[index] = Some(TableData {
305 base_gv,
306 base_offset: table_base_offset,
307 bound,
308 element_size,
309 inline_anyfunc,
310 });
311 }
312
313 fn vmctx(&mut self, func: &mut Function) -> ir::GlobalValue {
314 self.vmctx.unwrap_or_else(|| {
315 let vmctx = func.create_global_value(ir::GlobalValueData::VMContext);
316 self.vmctx = Some(vmctx);
317 vmctx
318 })
319 }
320
321 fn get_table_fill_sig(&mut self, func: &mut Function) -> ir::SigRef {
322 let sig = self.table_fill_sig.unwrap_or_else(|| {
323 func.import_signature(Signature {
324 params: vec![
325 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
326 AbiParam::new(I32),
328 AbiParam::new(I32),
330 AbiParam::new(self.reference_type()),
332 AbiParam::new(I32),
334 ],
335 returns: vec![],
336 call_conv: self.target_config.default_call_conv,
337 })
338 });
339 self.table_fill_sig = Some(sig);
340 sig
341 }
342
343 fn get_table_fill_func(
344 &mut self,
345 func: &mut Function,
346 table_index: TableIndex,
347 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
348 (
349 self.get_table_fill_sig(func),
350 table_index.index(),
351 VMBuiltinFunctionIndex::get_table_fill_index(),
352 )
353 }
354
355 fn get_func_ref_sig(&mut self, func: &mut Function) -> ir::SigRef {
356 let sig = self.func_ref_sig.unwrap_or_else(|| {
357 func.import_signature(Signature {
358 params: vec![
359 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
360 AbiParam::new(I32),
361 ],
362 returns: vec![AbiParam::new(self.reference_type())],
363 call_conv: self.target_config.default_call_conv,
364 })
365 });
366 self.func_ref_sig = Some(sig);
367 sig
368 }
369
370 fn get_func_ref_func(
371 &mut self,
372 func: &mut Function,
373 function_index: FunctionIndex,
374 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
375 (
376 self.get_func_ref_sig(func),
377 function_index.index(),
378 VMBuiltinFunctionIndex::get_func_ref_index(),
379 )
380 }
381
382 fn get_table_get_sig(&mut self, func: &mut Function) -> ir::SigRef {
383 let sig = self.table_get_sig.unwrap_or_else(|| {
384 func.import_signature(Signature {
385 params: vec![
386 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
387 AbiParam::new(I32),
388 AbiParam::new(I32),
389 ],
390 returns: vec![AbiParam::new(self.reference_type())],
391 call_conv: self.target_config.default_call_conv,
392 })
393 });
394 self.table_get_sig = Some(sig);
395 sig
396 }
397
398 fn get_table_get_func(
399 &mut self,
400 func: &mut Function,
401 table_index: TableIndex,
402 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
403 if self.module.is_imported_table(table_index) {
404 (
405 self.get_table_get_sig(func),
406 table_index.index(),
407 VMBuiltinFunctionIndex::get_imported_table_get_index(),
408 )
409 } else {
410 (
411 self.get_table_get_sig(func),
412 self.module.local_table_index(table_index).unwrap().index(),
413 VMBuiltinFunctionIndex::get_table_get_index(),
414 )
415 }
416 }
417
418 fn get_table_set_sig(&mut self, func: &mut Function) -> ir::SigRef {
419 let sig = self.table_set_sig.unwrap_or_else(|| {
420 func.import_signature(Signature {
421 params: vec![
422 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
423 AbiParam::new(I32),
424 AbiParam::new(I32),
425 AbiParam::new(self.reference_type()),
426 ],
427 returns: vec![],
428 call_conv: self.target_config.default_call_conv,
429 })
430 });
431 self.table_set_sig = Some(sig);
432 sig
433 }
434
435 fn get_table_set_func(
436 &mut self,
437 func: &mut Function,
438 table_index: TableIndex,
439 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
440 if self.module.is_imported_table(table_index) {
441 (
442 self.get_table_set_sig(func),
443 table_index.index(),
444 VMBuiltinFunctionIndex::get_imported_table_set_index(),
445 )
446 } else {
447 (
448 self.get_table_set_sig(func),
449 self.module.local_table_index(table_index).unwrap().index(),
450 VMBuiltinFunctionIndex::get_table_set_index(),
451 )
452 }
453 }
454
455 fn get_table_grow_sig(&mut self, func: &mut Function) -> ir::SigRef {
456 let sig = self.table_grow_sig.unwrap_or_else(|| {
457 func.import_signature(Signature {
458 params: vec![
459 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
460 AbiParam::new(self.reference_type()),
462 AbiParam::new(I32),
463 AbiParam::new(I32),
464 ],
465 returns: vec![AbiParam::new(I32)],
466 call_conv: self.target_config.default_call_conv,
467 })
468 });
469 self.table_grow_sig = Some(sig);
470 sig
471 }
472
473 fn get_table_grow_func(
476 &mut self,
477 func: &mut Function,
478 index: TableIndex,
479 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
480 if self.module.is_imported_table(index) {
481 (
482 self.get_table_grow_sig(func),
483 index.index(),
484 VMBuiltinFunctionIndex::get_imported_table_grow_index(),
485 )
486 } else {
487 (
488 self.get_table_grow_sig(func),
489 self.module.local_table_index(index).unwrap().index(),
490 VMBuiltinFunctionIndex::get_table_grow_index(),
491 )
492 }
493 }
494
495 fn get_memory_grow_sig(&mut self, func: &mut Function) -> ir::SigRef {
496 let sig = self.memory_grow_sig.unwrap_or_else(|| {
497 func.import_signature(Signature {
498 params: vec![
499 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
500 AbiParam::new(I32),
501 AbiParam::new(I32),
502 ],
503 returns: vec![AbiParam::new(I32)],
504 call_conv: self.target_config.default_call_conv,
505 })
506 });
507 self.memory_grow_sig = Some(sig);
508 sig
509 }
510
511 fn get_memory_grow_func(
514 &mut self,
515 func: &mut Function,
516 index: MemoryIndex,
517 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
518 if self.module.is_imported_memory(index) {
519 (
520 self.get_memory_grow_sig(func),
521 index.index(),
522 VMBuiltinFunctionIndex::get_imported_memory32_grow_index(),
523 )
524 } else {
525 (
526 self.get_memory_grow_sig(func),
527 self.module.local_memory_index(index).unwrap().index(),
528 VMBuiltinFunctionIndex::get_memory32_grow_index(),
529 )
530 }
531 }
532
533 fn get_table_size_sig(&mut self, func: &mut Function) -> ir::SigRef {
534 let sig = self.table_size_sig.unwrap_or_else(|| {
535 func.import_signature(Signature {
536 params: vec![
537 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
538 AbiParam::new(I32),
539 ],
540 returns: vec![AbiParam::new(I32)],
541 call_conv: self.target_config.default_call_conv,
542 })
543 });
544 self.table_size_sig = Some(sig);
545 sig
546 }
547
548 fn get_table_size_func(
551 &mut self,
552 func: &mut Function,
553 index: TableIndex,
554 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
555 if self.module.is_imported_table(index) {
556 (
557 self.get_table_size_sig(func),
558 index.index(),
559 VMBuiltinFunctionIndex::get_imported_table_size_index(),
560 )
561 } else {
562 (
563 self.get_table_size_sig(func),
564 self.module.local_table_index(index).unwrap().index(),
565 VMBuiltinFunctionIndex::get_table_size_index(),
566 )
567 }
568 }
569
570 fn get_memory32_size_sig(&mut self, func: &mut Function) -> ir::SigRef {
571 let sig = self.memory32_size_sig.unwrap_or_else(|| {
572 func.import_signature(Signature {
573 params: vec![
574 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
575 AbiParam::new(I32),
576 ],
577 returns: vec![AbiParam::new(I32)],
578 call_conv: self.target_config.default_call_conv,
579 })
580 });
581 self.memory32_size_sig = Some(sig);
582 sig
583 }
584
585 fn get_memory_size_func(
588 &mut self,
589 func: &mut Function,
590 index: MemoryIndex,
591 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
592 if self.module.is_imported_memory(index) {
593 (
594 self.get_memory32_size_sig(func),
595 index.index(),
596 VMBuiltinFunctionIndex::get_imported_memory32_size_index(),
597 )
598 } else {
599 (
600 self.get_memory32_size_sig(func),
601 self.module.local_memory_index(index).unwrap().index(),
602 VMBuiltinFunctionIndex::get_memory32_size_index(),
603 )
604 }
605 }
606
607 fn get_table_copy_sig(&mut self, func: &mut Function) -> ir::SigRef {
608 let sig = self.table_copy_sig.unwrap_or_else(|| {
609 func.import_signature(Signature {
610 params: vec![
611 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
612 AbiParam::new(I32),
614 AbiParam::new(I32),
616 AbiParam::new(I32),
618 AbiParam::new(I32),
620 AbiParam::new(I32),
622 ],
623 returns: vec![],
624 call_conv: self.target_config.default_call_conv,
625 })
626 });
627 self.table_copy_sig = Some(sig);
628 sig
629 }
630
631 fn get_table_copy_func(
632 &mut self,
633 func: &mut Function,
634 dst_table_index: TableIndex,
635 src_table_index: TableIndex,
636 ) -> (ir::SigRef, usize, usize, VMBuiltinFunctionIndex) {
637 let sig = self.get_table_copy_sig(func);
638 (
639 sig,
640 dst_table_index.as_u32() as usize,
641 src_table_index.as_u32() as usize,
642 VMBuiltinFunctionIndex::get_table_copy_index(),
643 )
644 }
645
646 fn get_table_init_sig(&mut self, func: &mut Function) -> ir::SigRef {
647 let sig = self.table_init_sig.unwrap_or_else(|| {
648 func.import_signature(Signature {
649 params: vec![
650 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
651 AbiParam::new(I32),
653 AbiParam::new(I32),
655 AbiParam::new(I32),
657 AbiParam::new(I32),
659 AbiParam::new(I32),
661 ],
662 returns: vec![],
663 call_conv: self.target_config.default_call_conv,
664 })
665 });
666 self.table_init_sig = Some(sig);
667 sig
668 }
669
670 fn get_table_init_func(
671 &mut self,
672 func: &mut Function,
673 table_index: TableIndex,
674 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
675 let sig = self.get_table_init_sig(func);
676 let table_index = table_index.as_u32() as usize;
677 (
678 sig,
679 table_index,
680 VMBuiltinFunctionIndex::get_table_init_index(),
681 )
682 }
683
684 fn get_elem_drop_sig(&mut self, func: &mut Function) -> ir::SigRef {
685 let sig = self.elem_drop_sig.unwrap_or_else(|| {
686 func.import_signature(Signature {
687 params: vec![
688 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
689 AbiParam::new(I32),
691 ],
692 returns: vec![],
693 call_conv: self.target_config.default_call_conv,
694 })
695 });
696 self.elem_drop_sig = Some(sig);
697 sig
698 }
699
700 fn get_elem_drop_func(&mut self, func: &mut Function) -> (ir::SigRef, VMBuiltinFunctionIndex) {
701 let sig = self.get_elem_drop_sig(func);
702 (sig, VMBuiltinFunctionIndex::get_elem_drop_index())
703 }
704
705 fn get_memory_copy_sig(&mut self, func: &mut Function) -> ir::SigRef {
706 let sig = self.memory_copy_sig.unwrap_or_else(|| {
707 func.import_signature(Signature {
708 params: vec![
709 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
710 AbiParam::new(I32),
712 AbiParam::new(I32),
714 AbiParam::new(I32),
716 AbiParam::new(I32),
718 ],
719 returns: vec![],
720 call_conv: self.target_config.default_call_conv,
721 })
722 });
723 self.memory_copy_sig = Some(sig);
724 sig
725 }
726
727 fn get_memory_copy_func(
728 &mut self,
729 func: &mut Function,
730 memory_index: MemoryIndex,
731 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
732 let sig = self.get_memory_copy_sig(func);
733 if let Some(local_memory_index) = self.module.local_memory_index(memory_index) {
734 (
735 sig,
736 local_memory_index.index(),
737 VMBuiltinFunctionIndex::get_memory_copy_index(),
738 )
739 } else {
740 (
741 sig,
742 memory_index.index(),
743 VMBuiltinFunctionIndex::get_imported_memory_copy_index(),
744 )
745 }
746 }
747
748 fn get_memory_fill_sig(&mut self, func: &mut Function) -> ir::SigRef {
749 let sig = self.memory_fill_sig.unwrap_or_else(|| {
750 func.import_signature(Signature {
751 params: vec![
752 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
753 AbiParam::new(I32),
755 AbiParam::new(I32),
757 AbiParam::new(I32),
759 AbiParam::new(I32),
761 ],
762 returns: vec![],
763 call_conv: self.target_config.default_call_conv,
764 })
765 });
766 self.memory_fill_sig = Some(sig);
767 sig
768 }
769
770 fn get_memory_fill_func(
771 &mut self,
772 func: &mut Function,
773 memory_index: MemoryIndex,
774 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
775 let sig = self.get_memory_fill_sig(func);
776 if let Some(local_memory_index) = self.module.local_memory_index(memory_index) {
777 (
778 sig,
779 local_memory_index.index(),
780 VMBuiltinFunctionIndex::get_memory_fill_index(),
781 )
782 } else {
783 (
784 sig,
785 memory_index.index(),
786 VMBuiltinFunctionIndex::get_imported_memory_fill_index(),
787 )
788 }
789 }
790
791 fn get_memory_init_sig(&mut self, func: &mut Function) -> ir::SigRef {
792 let sig = self.memory_init_sig.unwrap_or_else(|| {
793 func.import_signature(Signature {
794 params: vec![
795 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
796 AbiParam::new(I32),
798 AbiParam::new(I32),
800 AbiParam::new(I32),
802 AbiParam::new(I32),
804 AbiParam::new(I32),
806 ],
807 returns: vec![],
808 call_conv: self.target_config.default_call_conv,
809 })
810 });
811 self.memory_init_sig = Some(sig);
812 sig
813 }
814
815 fn get_memory_init_func(
816 &mut self,
817 func: &mut Function,
818 ) -> (ir::SigRef, VMBuiltinFunctionIndex) {
819 let sig = self.get_memory_init_sig(func);
820 (sig, VMBuiltinFunctionIndex::get_memory_init_index())
821 }
822
823 fn get_data_drop_sig(&mut self, func: &mut Function) -> ir::SigRef {
824 let sig = self.data_drop_sig.unwrap_or_else(|| {
825 func.import_signature(Signature {
826 params: vec![
827 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
828 AbiParam::new(I32),
830 ],
831 returns: vec![],
832 call_conv: self.target_config.default_call_conv,
833 })
834 });
835 self.data_drop_sig = Some(sig);
836 sig
837 }
838
839 fn get_data_drop_func(&mut self, func: &mut Function) -> (ir::SigRef, VMBuiltinFunctionIndex) {
840 let sig = self.get_data_drop_sig(func);
841 (sig, VMBuiltinFunctionIndex::get_data_drop_index())
842 }
843
844 fn get_memory32_atomic_wait32_sig(&mut self, func: &mut Function) -> ir::SigRef {
845 let sig = self.memory32_atomic_wait32_sig.unwrap_or_else(|| {
846 func.import_signature(Signature {
847 params: vec![
848 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
849 AbiParam::new(I32),
851 AbiParam::new(I32),
853 AbiParam::new(I32),
855 AbiParam::new(I64),
857 ],
858 returns: vec![AbiParam::new(I32)],
859 call_conv: self.target_config.default_call_conv,
860 })
861 });
862 self.memory32_atomic_wait32_sig = Some(sig);
863 sig
864 }
865
866 fn get_memory_atomic_wait32_func(
870 &mut self,
871 func: &mut Function,
872 index: MemoryIndex,
873 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
874 if self.module.is_imported_memory(index) {
875 (
876 self.get_memory32_atomic_wait32_sig(func),
877 index.index(),
878 VMBuiltinFunctionIndex::get_imported_memory_atomic_wait32_index(),
879 )
880 } else {
881 (
882 self.get_memory32_atomic_wait32_sig(func),
883 self.module.local_memory_index(index).unwrap().index(),
884 VMBuiltinFunctionIndex::get_memory_atomic_wait32_index(),
885 )
886 }
887 }
888
889 fn get_memory32_atomic_wait64_sig(&mut self, func: &mut Function) -> ir::SigRef {
890 let sig = self.memory32_atomic_wait64_sig.unwrap_or_else(|| {
891 func.import_signature(Signature {
892 params: vec![
893 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
894 AbiParam::new(I32),
896 AbiParam::new(I32),
898 AbiParam::new(I64),
900 AbiParam::new(I64),
902 ],
903 returns: vec![AbiParam::new(I32)],
904 call_conv: self.target_config.default_call_conv,
905 })
906 });
907 self.memory32_atomic_wait64_sig = Some(sig);
908 sig
909 }
910
911 fn get_memory_atomic_wait64_func(
915 &mut self,
916 func: &mut Function,
917 index: MemoryIndex,
918 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
919 if self.module.is_imported_memory(index) {
920 (
921 self.get_memory32_atomic_wait64_sig(func),
922 index.index(),
923 VMBuiltinFunctionIndex::get_imported_memory_atomic_wait64_index(),
924 )
925 } else {
926 (
927 self.get_memory32_atomic_wait64_sig(func),
928 self.module.local_memory_index(index).unwrap().index(),
929 VMBuiltinFunctionIndex::get_memory_atomic_wait64_index(),
930 )
931 }
932 }
933
934 fn get_memory32_atomic_notify_sig(&mut self, func: &mut Function) -> ir::SigRef {
935 let sig = self.memory32_atomic_notify_sig.unwrap_or_else(|| {
936 func.import_signature(Signature {
937 params: vec![
938 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
939 AbiParam::new(I32),
941 AbiParam::new(I32),
943 AbiParam::new(I32),
945 ],
946 returns: vec![AbiParam::new(I32)],
947 call_conv: self.target_config.default_call_conv,
948 })
949 });
950 self.memory32_atomic_notify_sig = Some(sig);
951 sig
952 }
953
954 fn get_memory_atomic_notify_func(
958 &mut self,
959 func: &mut Function,
960 index: MemoryIndex,
961 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
962 if self.module.is_imported_memory(index) {
963 (
964 self.get_memory32_atomic_notify_sig(func),
965 index.index(),
966 VMBuiltinFunctionIndex::get_imported_memory_atomic_notify_index(),
967 )
968 } else {
969 (
970 self.get_memory32_atomic_notify_sig(func),
971 self.module.local_memory_index(index).unwrap().index(),
972 VMBuiltinFunctionIndex::get_memory_atomic_notify_index(),
973 )
974 }
975 }
976
977 fn get_personality2_func(
978 &mut self,
979 func: &mut Function,
980 ) -> (ir::SigRef, VMBuiltinFunctionIndex) {
981 let sig = self.personality2_sig.unwrap_or_else(|| {
982 let mut signature = Signature::new(self.target_config.default_call_conv);
983 signature.params.push(AbiParam::new(self.pointer_type()));
984 signature.params.push(AbiParam::new(self.pointer_type()));
985 signature.returns.push(AbiParam::new(TAG_TYPE));
986 let sig = func.import_signature(signature);
987 self.personality2_sig = Some(sig);
988 sig
989 });
990 (
991 sig,
992 VMBuiltinFunctionIndex::get_imported_personality2_index(),
993 )
994 }
995
996 fn get_throw_func(&mut self, func: &mut Function) -> (ir::SigRef, VMBuiltinFunctionIndex) {
997 let sig = self.throw_sig.unwrap_or_else(|| {
998 let mut signature = Signature::new(self.target_config.default_call_conv);
999 signature.params.push(AbiParam::special(
1000 self.pointer_type(),
1001 ArgumentPurpose::VMContext,
1002 ));
1003 signature.params.push(AbiParam::new(EXN_REF_TYPE));
1004 let sig = func.import_signature(signature);
1005 self.throw_sig = Some(sig);
1006 sig
1007 });
1008 (sig, VMBuiltinFunctionIndex::get_imported_throw_index())
1009 }
1010
1011 fn get_alloc_exception_func(
1012 &mut self,
1013 func: &mut Function,
1014 ) -> (ir::SigRef, VMBuiltinFunctionIndex) {
1015 let sig = self.alloc_exception_sig.unwrap_or_else(|| {
1016 let mut signature = Signature::new(self.target_config.default_call_conv);
1017 signature.params.push(AbiParam::special(
1018 self.pointer_type(),
1019 ArgumentPurpose::VMContext,
1020 ));
1021 signature.params.push(AbiParam::new(TAG_TYPE));
1022 signature.returns.push(AbiParam::new(EXN_REF_TYPE));
1023 let sig = func.import_signature(signature);
1024 self.alloc_exception_sig = Some(sig);
1025 sig
1026 });
1027 (
1028 sig,
1029 VMBuiltinFunctionIndex::get_imported_alloc_exception_index(),
1030 )
1031 }
1032
1033 fn get_read_exnref_func(
1034 &mut self,
1035 func: &mut Function,
1036 ) -> (ir::SigRef, VMBuiltinFunctionIndex) {
1037 let sig = self.read_exnref_sig.unwrap_or_else(|| {
1038 let mut signature = Signature::new(self.target_config.default_call_conv);
1039 signature.params.push(AbiParam::special(
1040 self.pointer_type(),
1041 ArgumentPurpose::VMContext,
1042 ));
1043 signature.params.push(AbiParam::new(EXN_REF_TYPE));
1044 signature.returns.push(AbiParam::new(self.pointer_type()));
1045 let sig = func.import_signature(signature);
1046 self.read_exnref_sig = Some(sig);
1047 sig
1048 });
1049 (
1050 sig,
1051 VMBuiltinFunctionIndex::get_imported_read_exnref_index(),
1052 )
1053 }
1054
1055 fn get_read_exception_func(
1056 &mut self,
1057 func: &mut Function,
1058 ) -> (ir::SigRef, VMBuiltinFunctionIndex) {
1059 let sig = self.read_exception_sig.unwrap_or_else(|| {
1060 let mut signature = Signature::new(self.target_config.default_call_conv);
1061 signature.params.push(AbiParam::new(self.pointer_type()));
1062 signature.returns.push(AbiParam::new(EXN_REF_TYPE));
1063 let sig = func.import_signature(signature);
1064 self.read_exception_sig = Some(sig);
1065 sig
1066 });
1067 (
1068 sig,
1069 VMBuiltinFunctionIndex::get_imported_exception_into_exnref_index(),
1070 )
1071 }
1072
1073 fn exception_type_layout(&mut self, tag_index: TagIndex) -> WasmResult<&ExceptionTypeLayout> {
1074 let key = tag_index.as_u32();
1075 if !self.exception_type_layouts.contains_key(&key) {
1076 let layout = self.compute_exception_type_layout(tag_index)?;
1077 self.exception_type_layouts.insert(key, layout);
1078 }
1079 Ok(self.exception_type_layouts.get(&key).unwrap())
1080 }
1081
1082 fn compute_exception_type_layout(
1083 &self,
1084 tag_index: TagIndex,
1085 ) -> WasmResult<ExceptionTypeLayout> {
1086 let sig_index = self.module.tags[tag_index];
1087 let func_type = &self.module.signatures[sig_index];
1088 let mut offset = 0u32;
1089 let mut max_align = 1u32;
1090 let mut fields = SmallVec::<[ExceptionFieldLayout; 4]>::new();
1091
1092 for wasm_ty in func_type.params() {
1093 let ir_ty = self.map_wasmer_type_to_ir(*wasm_ty)?;
1094 let field_size = ir_ty.bytes();
1095 let align = field_size.max(1);
1096 max_align = max_align.max(align);
1097 offset = offset.next_multiple_of(align);
1098 fields.push(ExceptionFieldLayout { offset, ty: ir_ty });
1099 offset = offset
1100 .checked_add(field_size)
1101 .ok_or_else(|| WasmError::Unsupported("exception payload too large".to_string()))?;
1102 }
1103
1104 Ok(ExceptionTypeLayout { fields })
1105 }
1106
1107 fn map_wasmer_type_to_ir(&self, ty: WasmerType) -> WasmResult<ir::Type> {
1108 Ok(match ty {
1109 WasmerType::I32 => ir::types::I32,
1110 WasmerType::I64 => ir::types::I64,
1111 WasmerType::F32 => ir::types::F32,
1112 WasmerType::F64 => ir::types::F64,
1113 WasmerType::V128 => ir::types::I8X16,
1114 WasmerType::FuncRef | WasmerType::ExternRef | WasmerType::ExceptionRef => {
1115 self.reference_type()
1116 }
1117 })
1118 }
1119
1120 fn call_with_handlers(
1121 &mut self,
1122 builder: &mut FunctionBuilder,
1123 callee: ir::FuncRef,
1124 args: &[ir::Value],
1125 context: Option<ir::Value>,
1126 landing_pad: Option<LandingPad>,
1127 unreachable_on_return: bool,
1128 ) -> SmallVec<[ir::Value; 4]> {
1129 let sig_ref = builder.func.dfg.ext_funcs[callee].signature;
1130 let return_types: SmallVec<[ir::Type; 4]> = builder.func.dfg.signatures[sig_ref]
1131 .returns
1132 .iter()
1133 .map(|ret| ret.value_type)
1134 .collect();
1135
1136 if landing_pad.is_none() {
1137 let inst = builder.ins().call(callee, args);
1138 let results: SmallVec<[ir::Value; 4]> =
1139 builder.inst_results(inst).iter().copied().collect();
1140 if unreachable_on_return {
1141 builder.ins().trap(crate::TRAP_UNREACHABLE);
1142 }
1143 return results;
1144 }
1145
1146 let continuation = builder.create_block();
1147 let mut normal_args = SmallVec::<[BlockArg; 4]>::with_capacity(return_types.len());
1148 let mut result_values = SmallVec::<[ir::Value; 4]>::with_capacity(return_types.len());
1149 for (i, ty) in return_types.iter().enumerate() {
1150 let val = builder.append_block_param(continuation, *ty);
1151 result_values.push(val);
1152 normal_args.push(BlockArg::TryCallRet(u32::try_from(i).unwrap()));
1153 }
1154 let continuation_call = builder
1155 .func
1156 .dfg
1157 .block_call(continuation, normal_args.iter());
1158
1159 let mut table_items = Vec::new();
1160 if let Some(ctx) = context {
1161 table_items.push(ExceptionTableItem::Context(ctx));
1162 }
1163 if let Some(landing_pad) = landing_pad {
1164 for tag in landing_pad.clauses {
1165 let block_call = builder.func.dfg.block_call(
1166 landing_pad.block,
1167 &[BlockArg::TryCallExn(0), BlockArg::TryCallExn(1)],
1168 );
1169 table_items.push(match tag.wasm_tag {
1170 Some(tag) => ExceptionTableItem::Tag(ExceptionTag::from_u32(tag), block_call),
1171 None => ExceptionTableItem::Default(block_call),
1172 });
1173 }
1174 }
1175 let etd = ExceptionTableData::new(sig_ref, continuation_call, table_items);
1176 let et = builder.func.dfg.exception_tables.push(etd);
1177 builder.ins().try_call(callee, args, et);
1178 builder.switch_to_block(continuation);
1179 builder.seal_block(continuation);
1180 if unreachable_on_return {
1181 builder.ins().trap(crate::TRAP_UNREACHABLE);
1182 }
1183 result_values
1184 }
1185
1186 #[allow(clippy::too_many_arguments)]
1187 fn call_indirect_with_handlers(
1188 &mut self,
1189 builder: &mut FunctionBuilder,
1190 sig: ir::SigRef,
1191 func_addr: ir::Value,
1192 args: &[ir::Value],
1193 context: Option<ir::Value>,
1194 landing_pad: Option<LandingPad>,
1195 unreachable_on_return: bool,
1196 ) -> SmallVec<[ir::Value; 4]> {
1197 let return_types: SmallVec<[ir::Type; 4]> = builder.func.dfg.signatures[sig]
1198 .returns
1199 .iter()
1200 .map(|ret| ret.value_type)
1201 .collect();
1202
1203 if landing_pad.is_none() {
1204 let inst = builder.ins().call_indirect(sig, func_addr, args);
1205 let results: SmallVec<[ir::Value; 4]> =
1206 builder.inst_results(inst).iter().copied().collect();
1207 if unreachable_on_return {
1208 builder.ins().trap(crate::TRAP_UNREACHABLE);
1209 }
1210 return results;
1211 }
1212
1213 let continuation = builder.create_block();
1214 let current_block = builder.current_block().expect("current block");
1215 builder.insert_block_after(continuation, current_block);
1216
1217 let mut normal_args = SmallVec::<[BlockArg; 4]>::with_capacity(return_types.len());
1218 let mut result_values = SmallVec::<[ir::Value; 4]>::with_capacity(return_types.len());
1219 for (i, ty) in return_types.iter().enumerate() {
1220 let val = builder.append_block_param(continuation, *ty);
1221 result_values.push(val);
1222 normal_args.push(BlockArg::TryCallRet(u32::try_from(i).unwrap()));
1223 }
1224 let continuation_call = builder
1225 .func
1226 .dfg
1227 .block_call(continuation, normal_args.iter());
1228
1229 let mut table_items = Vec::new();
1230 if let Some(ctx) = context {
1231 table_items.push(ExceptionTableItem::Context(ctx));
1232 }
1233 if let Some(landing_pad) = landing_pad {
1234 for tag in landing_pad.clauses {
1235 let block_call = builder.func.dfg.block_call(
1236 landing_pad.block,
1237 &[BlockArg::TryCallExn(0), BlockArg::TryCallExn(1)],
1238 );
1239 table_items.push(match tag.wasm_tag {
1240 Some(tag) => ExceptionTableItem::Tag(ExceptionTag::from_u32(tag), block_call),
1241 None => ExceptionTableItem::Default(block_call),
1242 });
1243 }
1244 }
1245
1246 let etd = ExceptionTableData::new(sig, continuation_call, table_items);
1247 let et = builder.func.dfg.exception_tables.push(etd);
1248 builder.ins().try_call_indirect(func_addr, args, et);
1249 builder.switch_to_block(continuation);
1250 builder.seal_block(continuation);
1251 if unreachable_on_return {
1252 builder.ins().trap(crate::TRAP_UNREACHABLE);
1253 }
1254
1255 result_values
1256 }
1257
1258 fn translate_load_builtin_function_address(
1261 &mut self,
1262 pos: &mut FuncCursor<'_>,
1263 callee_func_idx: VMBuiltinFunctionIndex,
1264 ) -> (ir::Value, ir::Value) {
1265 let pointer_type = self.pointer_type();
1267 let vmctx = self.vmctx(pos.func);
1268 let base = pos.ins().global_value(pointer_type, vmctx);
1269
1270 let mut mem_flags = ir::MemFlags::trusted();
1271 mem_flags.set_readonly();
1272
1273 let body_offset =
1275 i32::try_from(self.offsets.vmctx_builtin_function(callee_func_idx)).unwrap();
1276 let func_addr = pos.ins().load(pointer_type, mem_flags, base, body_offset);
1277
1278 (base, func_addr)
1279 }
1280
1281 fn get_or_init_funcref_table_elem(
1282 &mut self,
1283 builder: &mut FunctionBuilder,
1284 table_index: TableIndex,
1285 index: ir::Value,
1286 ) -> (ir::Value, bool) {
1287 let pointer_type = self.pointer_type();
1288 self.ensure_table_exists(builder.func, table_index);
1289 let table_data = self.tables[table_index].as_ref().unwrap();
1290
1291 let (table_entry_addr, flags) =
1292 table_data.prepare_table_addr(builder, index, pointer_type, false);
1293 if table_data.inline_anyfunc {
1294 (table_entry_addr, true)
1295 } else {
1296 (
1297 builder.ins().load(pointer_type, flags, table_entry_addr, 0),
1298 false,
1299 )
1300 }
1301 }
1302}
1303
1304impl TargetEnvironment for FuncEnvironment<'_> {
1305 fn target_config(&self) -> TargetFrontendConfig {
1306 self.target_config
1307 }
1308}
1309
1310impl BaseFuncEnvironment for FuncEnvironment<'_> {
1311 fn is_wasm_parameter(&self, _signature: &ir::Signature, index: usize) -> bool {
1312 index >= 1
1314 }
1315
1316 fn translate_table_grow(
1317 &mut self,
1318 mut pos: cranelift_codegen::cursor::FuncCursor<'_>,
1319 table_index: TableIndex,
1320 delta: ir::Value,
1321 init_value: ir::Value,
1322 ) -> WasmResult<ir::Value> {
1323 self.ensure_table_exists(pos.func, table_index);
1324 let (func_sig, index_arg, func_idx) = self.get_table_grow_func(pos.func, table_index);
1325 let table_index = pos.ins().iconst(I32, index_arg as i64);
1326 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1327 let call_inst = pos.ins().call_indirect(
1328 func_sig,
1329 func_addr,
1330 &[vmctx, init_value, delta, table_index],
1331 );
1332 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1333 }
1334
1335 fn translate_table_get(
1336 &mut self,
1337 builder: &mut FunctionBuilder,
1338 table_index: TableIndex,
1339 index: ir::Value,
1340 ) -> WasmResult<ir::Value> {
1341 self.ensure_table_exists(builder.func, table_index);
1342 let mut pos = builder.cursor();
1343
1344 let (func_sig, table_index_arg, func_idx) = self.get_table_get_func(pos.func, table_index);
1345 let table_index = pos.ins().iconst(I32, table_index_arg as i64);
1346 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1347 let call_inst = pos
1348 .ins()
1349 .call_indirect(func_sig, func_addr, &[vmctx, table_index, index]);
1350 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1351 }
1352
1353 fn translate_table_set(
1354 &mut self,
1355 builder: &mut FunctionBuilder,
1356 table_index: TableIndex,
1357 value: ir::Value,
1358 index: ir::Value,
1359 ) -> WasmResult<()> {
1360 self.ensure_table_exists(builder.func, table_index);
1361 let mut pos = builder.cursor();
1362
1363 let (func_sig, table_index_arg, func_idx) = self.get_table_set_func(pos.func, table_index);
1364 let n_table_index = pos.ins().iconst(I32, table_index_arg as i64);
1365 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1366 pos.ins()
1367 .call_indirect(func_sig, func_addr, &[vmctx, n_table_index, index, value]);
1368 Ok(())
1369 }
1370
1371 fn translate_table_fill(
1372 &mut self,
1373 mut pos: cranelift_codegen::cursor::FuncCursor<'_>,
1374 table_index: TableIndex,
1375 dst: ir::Value,
1376 val: ir::Value,
1377 len: ir::Value,
1378 ) -> WasmResult<()> {
1379 self.ensure_table_exists(pos.func, table_index);
1380 let (func_sig, table_index_arg, func_idx) = self.get_table_fill_func(pos.func, table_index);
1381 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1382
1383 let table_index_arg = pos.ins().iconst(I32, table_index_arg as i64);
1384 pos.ins().call_indirect(
1385 func_sig,
1386 func_addr,
1387 &[vmctx, table_index_arg, dst, val, len],
1388 );
1389
1390 Ok(())
1391 }
1392
1393 fn translate_ref_null(
1394 &mut self,
1395 mut pos: cranelift_codegen::cursor::FuncCursor,
1396 ty: HeapType,
1397 ) -> WasmResult<ir::Value> {
1398 Ok(match ty {
1399 HeapType::Abstract { ty, .. } => match ty {
1400 wasmer_compiler::wasmparser::AbstractHeapType::Func
1401 | wasmer_compiler::wasmparser::AbstractHeapType::Extern
1402 | wasmer_compiler::wasmparser::AbstractHeapType::Exn => pos.ins().iconst(
1403 if matches!(ty, wasmer_compiler::wasmparser::AbstractHeapType::Exn) {
1404 I32
1405 } else {
1406 self.reference_type()
1407 },
1408 0,
1409 ),
1410 _ => {
1411 return Err(WasmError::Unsupported(format!(
1412 "`ref.null T` that is not a `funcref`, an `externref` or an `exn`: {ty:?}"
1413 )));
1414 }
1415 },
1416 HeapType::Concrete(_) => {
1417 return Err(WasmError::Unsupported(
1418 "`ref.null T` that is not a `funcref` or an `externref`".into(),
1419 ));
1420 }
1421 HeapType::Exact(_) => {
1422 return Err(WasmError::Unsupported(
1423 "custom-descriptors not supported yet".into(),
1424 ));
1425 }
1426 })
1427 }
1428
1429 fn translate_ref_is_null(
1430 &mut self,
1431 mut pos: cranelift_codegen::cursor::FuncCursor,
1432 value: ir::Value,
1433 ) -> WasmResult<ir::Value> {
1434 let bool_is_null =
1435 pos.ins()
1436 .icmp_imm(cranelift_codegen::ir::condcodes::IntCC::Equal, value, 0);
1437 Ok(pos.ins().uextend(ir::types::I32, bool_is_null))
1438 }
1439
1440 fn translate_ref_func(
1441 &mut self,
1442 mut pos: cranelift_codegen::cursor::FuncCursor<'_>,
1443 func_index: FunctionIndex,
1444 ) -> WasmResult<ir::Value> {
1445 let (func_sig, func_index_arg, func_idx) = self.get_func_ref_func(pos.func, func_index);
1446 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1447
1448 let func_index_arg = pos.ins().iconst(I32, func_index_arg as i64);
1449 let call_inst = pos
1450 .ins()
1451 .call_indirect(func_sig, func_addr, &[vmctx, func_index_arg]);
1452
1453 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1454 }
1455
1456 fn translate_custom_global_get(
1457 &mut self,
1458 mut _pos: cranelift_codegen::cursor::FuncCursor<'_>,
1459 _index: GlobalIndex,
1460 ) -> WasmResult<ir::Value> {
1461 unreachable!("we don't make any custom globals")
1462 }
1463
1464 fn translate_custom_global_set(
1465 &mut self,
1466 mut _pos: cranelift_codegen::cursor::FuncCursor<'_>,
1467 _index: GlobalIndex,
1468 _value: ir::Value,
1469 ) -> WasmResult<()> {
1470 unreachable!("we don't make any custom globals")
1471 }
1472
1473 fn make_heap(&mut self, func: &mut ir::Function, index: MemoryIndex) -> WasmResult<Heap> {
1474 let pointer_type = self.pointer_type();
1475
1476 let (ptr, base_offset, current_length_offset) = {
1477 let vmctx = self.vmctx(func);
1478 if let Some(def_index) = self.module.local_memory_index(index) {
1479 let base_offset =
1480 i32::try_from(self.offsets.vmctx_vmmemory_definition_base(def_index)).unwrap();
1481 let current_length_offset = i32::try_from(
1482 self.offsets
1483 .vmctx_vmmemory_definition_current_length(def_index),
1484 )
1485 .unwrap();
1486 (vmctx, base_offset, current_length_offset)
1487 } else {
1488 let from_offset = self.offsets.vmctx_vmmemory_import_definition(index);
1489 let memory = func.create_global_value(ir::GlobalValueData::Load {
1490 base: vmctx,
1491 offset: Offset32::new(i32::try_from(from_offset).unwrap()),
1492 global_type: pointer_type,
1493 flags: ir::MemFlags::trusted().with_readonly(),
1494 });
1495 let base_offset = i32::from(self.offsets.vmmemory_definition_base());
1496 let current_length_offset =
1497 i32::from(self.offsets.vmmemory_definition_current_length());
1498 (memory, base_offset, current_length_offset)
1499 }
1500 };
1501
1502 let (offset_guard_size, heap_style, readonly_base) = match self.memory_styles[index] {
1505 MemoryStyle::Dynamic { offset_guard_size } => {
1506 let heap_bound = func.create_global_value(ir::GlobalValueData::Load {
1507 base: ptr,
1508 offset: Offset32::new(current_length_offset),
1509 global_type: pointer_type,
1510 flags: ir::MemFlags::trusted(),
1511 });
1512 (
1513 Uimm64::new(offset_guard_size),
1514 HeapStyle::Dynamic {
1515 bound_gv: heap_bound,
1516 },
1517 false,
1518 )
1519 }
1520 MemoryStyle::Static {
1521 bound,
1522 offset_guard_size,
1523 } => (
1524 Uimm64::new(offset_guard_size),
1525 HeapStyle::Static {
1526 bound: bound.bytes().0 as u64,
1527 },
1528 true,
1529 ),
1530 };
1531
1532 let heap_base = func.create_global_value(ir::GlobalValueData::Load {
1533 base: ptr,
1534 offset: Offset32::new(base_offset),
1535 global_type: pointer_type,
1536 flags: if readonly_base {
1537 ir::MemFlags::trusted().with_readonly()
1538 } else {
1539 ir::MemFlags::trusted()
1540 },
1541 });
1542 Ok(self.heaps.push(HeapData {
1543 base: heap_base,
1544 min_size: 0,
1545 max_size: None,
1546 memory_type: None,
1547 offset_guard_size: offset_guard_size.into(),
1548 style: heap_style,
1549 index_type: I32,
1550 page_size_log2: self.target_config.page_size_align_log2,
1551 }))
1552 }
1553
1554 fn make_global(
1555 &mut self,
1556 func: &mut ir::Function,
1557 index: GlobalIndex,
1558 ) -> WasmResult<GlobalVariable> {
1559 let pointer_type = self.pointer_type();
1560
1561 let (ptr, offset) = {
1562 let vmctx = self.vmctx(func);
1563
1564 if let Some(def_index) = self.module.local_global_index(index) {
1565 let from_offset = self.offsets.vmctx_vmglobal_definition(def_index);
1566 let global = func.create_global_value(ir::GlobalValueData::VMContext);
1567 (global, i32::try_from(from_offset).unwrap())
1568 } else {
1569 let from_offset = self.offsets.vmctx_vmglobal_import_definition(index);
1570 let global = func.create_global_value(ir::GlobalValueData::Load {
1571 base: vmctx,
1572 offset: Offset32::new(i32::try_from(from_offset).unwrap()),
1573 global_type: pointer_type,
1574 flags: MemFlags::trusted(),
1575 });
1576 (global, 0)
1577 }
1578 };
1579
1580 Ok(GlobalVariable::Memory {
1581 gv: ptr,
1582 offset: offset.into(),
1583 ty: match self.module.globals[index].ty {
1584 WasmerType::I32 => ir::types::I32,
1585 WasmerType::I64 => ir::types::I64,
1586 WasmerType::F32 => ir::types::F32,
1587 WasmerType::F64 => ir::types::F64,
1588 WasmerType::V128 => ir::types::I8X16,
1589 WasmerType::FuncRef | WasmerType::ExternRef | WasmerType::ExceptionRef => {
1590 self.reference_type()
1591 }
1592 },
1593 })
1594 }
1595
1596 fn make_indirect_sig(
1597 &mut self,
1598 func: &mut ir::Function,
1599 index: SignatureIndex,
1600 ) -> WasmResult<ir::SigRef> {
1601 Ok(func.import_signature(self.signatures[index].clone()))
1602 }
1603
1604 fn make_direct_func(
1605 &mut self,
1606 func: &mut ir::Function,
1607 index: FunctionIndex,
1608 ) -> WasmResult<ir::FuncRef> {
1609 let sigidx = self.module.functions[index];
1610 let signature = func.import_signature(self.signatures[sigidx].clone());
1611 let name = get_function_name(func, index);
1612
1613 Ok(func.import_function(ir::ExtFuncData {
1614 name,
1615 signature,
1616 colocated: true,
1617 patchable: false,
1618 }))
1619 }
1620
1621 fn translate_call_indirect(
1622 &mut self,
1623 builder: &mut FunctionBuilder,
1624 table_index: TableIndex,
1625 sig_index: SignatureIndex,
1626 sig_ref: ir::SigRef,
1627 callee: ir::Value,
1628 call_args: &[ir::Value],
1629 landing_pad: Option<LandingPad>,
1630 ) -> WasmResult<SmallVec<[ir::Value; 4]>> {
1631 let pointer_type = self.pointer_type();
1632
1633 let (anyfunc_ptr, inline_anyfunc) =
1635 self.get_or_init_funcref_table_elem(builder, table_index, callee);
1636
1637 let mem_flags = ir::MemFlags::trusted();
1639
1640 if !inline_anyfunc {
1642 builder
1643 .ins()
1644 .trapz(anyfunc_ptr, crate::TRAP_INDIRECT_CALL_TO_NULL);
1645 }
1646
1647 let func_addr = builder.ins().load(
1648 pointer_type,
1649 mem_flags,
1650 anyfunc_ptr,
1651 i32::from(self.offsets.vmcaller_checked_anyfunc_func_ptr()),
1652 );
1653
1654 if inline_anyfunc {
1655 builder
1656 .ins()
1657 .trapz(func_addr, crate::TRAP_INDIRECT_CALL_TO_NULL);
1658 }
1659
1660 match self.table_styles[table_index] {
1662 TableStyle::CallerChecksSignature => {
1663 let sig_id_size = self.offsets.size_of_vmshared_signature_index();
1664 let sig_id_type = ir::Type::int(u16::from(sig_id_size) * 8).unwrap();
1665 let vmctx = self.vmctx(builder.func);
1666 let base = builder.ins().global_value(pointer_type, vmctx);
1667 let offset =
1668 i32::try_from(self.offsets.vmctx_vmshared_signature_id(sig_index)).unwrap();
1669
1670 let mut mem_flags = ir::MemFlags::trusted();
1672 mem_flags.set_readonly();
1673 let caller_sig_id = builder.ins().load(sig_id_type, mem_flags, base, offset);
1674
1675 let mem_flags = ir::MemFlags::trusted();
1677 let callee_sig_id = builder.ins().load(
1678 sig_id_type,
1679 mem_flags,
1680 anyfunc_ptr,
1681 i32::from(self.offsets.vmcaller_checked_anyfunc_type_index()),
1682 );
1683
1684 let cmp = builder
1686 .ins()
1687 .icmp(IntCC::Equal, callee_sig_id, caller_sig_id);
1688 builder.ins().trapz(cmp, crate::TRAP_BAD_SIGNATURE);
1689 }
1690 }
1691
1692 let mut real_call_args = Vec::with_capacity(call_args.len() + 2);
1693
1694 let vmctx = builder.ins().load(
1696 pointer_type,
1697 mem_flags,
1698 anyfunc_ptr,
1699 i32::from(self.offsets.vmcaller_checked_anyfunc_vmctx()),
1700 );
1701 real_call_args.push(vmctx);
1702
1703 real_call_args.extend_from_slice(call_args);
1705
1706 let results = self.call_indirect_with_handlers(
1707 builder,
1708 sig_ref,
1709 func_addr,
1710 &real_call_args,
1711 Some(vmctx),
1712 landing_pad,
1713 false,
1714 );
1715 Ok(results)
1716 }
1717
1718 fn translate_call(
1719 &mut self,
1720 builder: &mut FunctionBuilder,
1721 callee_index: FunctionIndex,
1722 callee: ir::FuncRef,
1723 call_args: &[ir::Value],
1724 landing_pad: Option<LandingPad>,
1725 ) -> WasmResult<SmallVec<[ir::Value; 4]>> {
1726 let mut real_call_args = Vec::with_capacity(call_args.len() + 2);
1727
1728 if !self.module.is_imported_function(callee_index) {
1730 let caller_vmctx = builder
1732 .func
1733 .special_param(ArgumentPurpose::VMContext)
1734 .unwrap();
1735 real_call_args.push(caller_vmctx);
1738
1739 real_call_args.extend_from_slice(call_args);
1741
1742 let results = self.call_with_handlers(
1743 builder,
1744 callee,
1745 &real_call_args,
1746 Some(caller_vmctx),
1747 landing_pad,
1748 false,
1749 );
1750 return Ok(results);
1751 }
1752
1753 let pointer_type = self.pointer_type();
1756 let sig_ref = builder.func.dfg.ext_funcs[callee].signature;
1757 let vmctx = self.vmctx(builder.func);
1758 let base = builder.ins().global_value(pointer_type, vmctx);
1759
1760 let mem_flags = ir::MemFlags::trusted();
1761
1762 let body_offset =
1764 i32::try_from(self.offsets.vmctx_vmfunction_import_body(callee_index)).unwrap();
1765 let func_addr = builder
1766 .ins()
1767 .load(pointer_type, mem_flags, base, body_offset);
1768
1769 let vmctx_offset =
1771 i32::try_from(self.offsets.vmctx_vmfunction_import_vmctx(callee_index)).unwrap();
1772 let vmctx = builder
1773 .ins()
1774 .load(pointer_type, mem_flags, base, vmctx_offset);
1775 real_call_args.push(vmctx);
1776
1777 real_call_args.extend_from_slice(call_args);
1779
1780 let results = self.call_indirect_with_handlers(
1781 builder,
1782 sig_ref,
1783 func_addr,
1784 &real_call_args,
1785 Some(vmctx),
1786 landing_pad,
1787 false,
1788 );
1789 Ok(results)
1790 }
1791
1792 fn tag_param_arity(&self, tag_index: TagIndex) -> usize {
1793 let sig_index = self.module.tags[tag_index];
1794 let signature = &self.module.signatures[sig_index];
1795 signature.params().len()
1796 }
1797
1798 fn translate_exn_pointer_to_ref(
1799 &mut self,
1800 builder: &mut FunctionBuilder,
1801 exn_ptr: ir::Value,
1802 ) -> ir::Value {
1803 let (read_sig, read_idx) = self.get_read_exception_func(builder.func);
1804 let mut pos = builder.cursor();
1805 let (_, read_addr) = self.translate_load_builtin_function_address(&mut pos, read_idx);
1806 let read_call = builder.ins().call_indirect(read_sig, read_addr, &[exn_ptr]);
1807 builder.inst_results(read_call)[0]
1808 }
1809
1810 fn translate_exn_unbox(
1811 &mut self,
1812 builder: &mut FunctionBuilder,
1813 tag_index: TagIndex,
1814 exnref: ir::Value,
1815 ) -> WasmResult<SmallVec<[ir::Value; 4]>> {
1816 let layout = self.exception_type_layout(tag_index)?.clone();
1817
1818 let (read_exnref_sig, read_exnref_idx) = self.get_read_exnref_func(builder.func);
1819 let mut pos = builder.cursor();
1820 let (vmctx, read_exnref_addr) =
1821 self.translate_load_builtin_function_address(&mut pos, read_exnref_idx);
1822 let read_exnref_call =
1823 builder
1824 .ins()
1825 .call_indirect(read_exnref_sig, read_exnref_addr, &[vmctx, exnref]);
1826 let payload_ptr = builder.inst_results(read_exnref_call)[0];
1827
1828 let mut values = SmallVec::<[ir::Value; 4]>::with_capacity(layout.fields.len());
1829 let data_flags = ir::MemFlags::trusted();
1830 for field in &layout.fields {
1831 let value = builder.ins().load(
1832 field.ty,
1833 data_flags,
1834 payload_ptr,
1835 Offset32::new(field.offset as i32),
1836 );
1837 values.push(value);
1838 }
1839
1840 Ok(values)
1841 }
1842
1843 fn translate_exn_throw(
1844 &mut self,
1845 builder: &mut FunctionBuilder,
1846 tag_index: TagIndex,
1847 args: &[ir::Value],
1848 landing_pad: Option<LandingPad>,
1849 ) -> WasmResult<()> {
1850 let layout = self.exception_type_layout(tag_index)?.clone();
1851 if layout.fields.len() != args.len() {
1852 return Err(WasmError::Generic(format!(
1853 "exception payload arity mismatch: expected {}, got {}",
1854 layout.fields.len(),
1855 args.len()
1856 )));
1857 }
1858
1859 let (alloc_sig, alloc_idx) = self.get_alloc_exception_func(builder.func);
1860 let mut pos = builder.cursor();
1861 let (vmctx, alloc_addr) = self.translate_load_builtin_function_address(&mut pos, alloc_idx);
1862 let tag_value = builder
1863 .ins()
1864 .iconst(TAG_TYPE, i64::from(tag_index.as_u32()));
1865 let alloc_call = builder
1866 .ins()
1867 .call_indirect(alloc_sig, alloc_addr, &[vmctx, tag_value]);
1868 let exnref = builder.inst_results(alloc_call)[0];
1869
1870 let (read_exnref_sig, read_exnref_idx) = self.get_read_exnref_func(builder.func);
1871 let mut pos = builder.cursor();
1872 let (vmctx, read_exnref_addr) =
1873 self.translate_load_builtin_function_address(&mut pos, read_exnref_idx);
1874 let read_exnref_call =
1875 builder
1876 .ins()
1877 .call_indirect(read_exnref_sig, read_exnref_addr, &[vmctx, exnref]);
1878 let payload_ptr = builder.inst_results(read_exnref_call)[0];
1879
1880 let store_flags = ir::MemFlags::trusted();
1881 for (field, value) in layout.fields.iter().zip(args.iter()) {
1882 debug_assert_eq!(
1883 builder.func.dfg.value_type(*value),
1884 field.ty,
1885 "exception payload type mismatch"
1886 );
1887 builder.ins().store(
1888 store_flags,
1889 *value,
1890 payload_ptr,
1891 Offset32::new(field.offset as i32),
1892 );
1893 }
1894
1895 let (throw_sig, throw_idx) = self.get_throw_func(builder.func);
1896 let mut pos = builder.cursor();
1897 let (vmctx_value, throw_addr) =
1898 self.translate_load_builtin_function_address(&mut pos, throw_idx);
1899 let call_args = [vmctx_value, exnref];
1900
1901 let _ = self.call_indirect_with_handlers(
1902 builder,
1903 throw_sig,
1904 throw_addr,
1905 &call_args,
1906 Some(vmctx_value),
1907 landing_pad,
1908 true,
1909 );
1910
1911 Ok(())
1912 }
1913
1914 fn translate_exn_throw_ref(
1915 &mut self,
1916 builder: &mut FunctionBuilder,
1917 exnref: ir::Value,
1918 landing_pad: Option<LandingPad>,
1919 ) -> WasmResult<()> {
1920 let (throw_sig, throw_idx) = self.get_throw_func(builder.func);
1921 let mut pos = builder.cursor();
1922 let (vmctx_value, throw_addr) =
1923 self.translate_load_builtin_function_address(&mut pos, throw_idx);
1924 let call_args = [vmctx_value, exnref];
1925
1926 let _ = self.call_indirect_with_handlers(
1927 builder,
1928 throw_sig,
1929 throw_addr,
1930 &call_args,
1931 Some(vmctx_value),
1932 landing_pad,
1933 true,
1934 );
1935
1936 Ok(())
1937 }
1938
1939 fn translate_exn_personality_selector(
1940 &mut self,
1941 builder: &mut FunctionBuilder,
1942 exn_ptr: ir::Value,
1943 ) -> WasmResult<ir::Value> {
1944 let (sig, idx) = self.get_personality2_func(builder.func);
1945 let pointer_type = self.pointer_type();
1946 let exn_ty = builder.func.dfg.value_type(exn_ptr);
1947 let exn_arg = if exn_ty == pointer_type {
1948 exn_ptr
1949 } else {
1950 let mut flags = MemFlags::new();
1951 flags.set_endianness(Endianness::Little);
1952 builder.ins().bitcast(pointer_type, flags, exn_ptr)
1953 };
1954
1955 let mut pos = builder.cursor();
1956 let (vmctx_value, func_addr) = self.translate_load_builtin_function_address(&mut pos, idx);
1957 let call = builder
1958 .ins()
1959 .call_indirect(sig, func_addr, &[vmctx_value, exn_arg]);
1960 Ok(builder.inst_results(call)[0])
1961 }
1962
1963 fn translate_exn_reraise_unmatched(
1964 &mut self,
1965 builder: &mut FunctionBuilder,
1966 exnref: ir::Value,
1967 ) -> WasmResult<()> {
1968 let (throw_sig, throw_idx) = self.get_throw_func(builder.func);
1969 let mut pos = builder.cursor();
1970 let (vmctx_value, throw_addr) =
1971 self.translate_load_builtin_function_address(&mut pos, throw_idx);
1972 builder
1973 .ins()
1974 .call_indirect(throw_sig, throw_addr, &[vmctx_value, exnref]);
1975 builder.ins().trap(crate::TRAP_UNREACHABLE);
1976 Ok(())
1977 }
1978
1979 fn translate_memory_grow(
1980 &mut self,
1981 mut pos: FuncCursor<'_>,
1982 index: MemoryIndex,
1983 _heap: Heap,
1984 val: ir::Value,
1985 ) -> WasmResult<ir::Value> {
1986 let (func_sig, index_arg, func_idx) = self.get_memory_grow_func(pos.func, index);
1987 let memory_index = pos.ins().iconst(I32, index_arg as i64);
1988 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1989 let call_inst = pos
1990 .ins()
1991 .call_indirect(func_sig, func_addr, &[vmctx, val, memory_index]);
1992 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1993 }
1994
1995 fn translate_memory_size(
1996 &mut self,
1997 mut pos: FuncCursor<'_>,
1998 index: MemoryIndex,
1999 _heap: Heap,
2000 ) -> WasmResult<ir::Value> {
2001 let (func_sig, index_arg, func_idx) = self.get_memory_size_func(pos.func, index);
2002 let memory_index = pos.ins().iconst(I32, index_arg as i64);
2003 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2004 let call_inst = pos
2005 .ins()
2006 .call_indirect(func_sig, func_addr, &[vmctx, memory_index]);
2007 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
2008 }
2009
2010 fn translate_memory_copy(
2011 &mut self,
2012 mut pos: FuncCursor,
2013 src_index: MemoryIndex,
2014 _src_heap: Heap,
2015 _dst_index: MemoryIndex,
2016 _dst_heap: Heap,
2017 dst: ir::Value,
2018 src: ir::Value,
2019 len: ir::Value,
2020 ) -> WasmResult<()> {
2021 let (func_sig, src_index, func_idx) = self.get_memory_copy_func(pos.func, src_index);
2022
2023 let src_index_arg = pos.ins().iconst(I32, src_index as i64);
2024
2025 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2026
2027 pos.ins()
2028 .call_indirect(func_sig, func_addr, &[vmctx, src_index_arg, dst, src, len]);
2029
2030 Ok(())
2031 }
2032
2033 fn translate_memory_fill(
2034 &mut self,
2035 mut pos: FuncCursor,
2036 memory_index: MemoryIndex,
2037 _heap: Heap,
2038 dst: ir::Value,
2039 val: ir::Value,
2040 len: ir::Value,
2041 ) -> WasmResult<()> {
2042 let (func_sig, memory_index, func_idx) = self.get_memory_fill_func(pos.func, memory_index);
2043
2044 let memory_index_arg = pos.ins().iconst(I32, memory_index as i64);
2045
2046 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2047
2048 pos.ins().call_indirect(
2049 func_sig,
2050 func_addr,
2051 &[vmctx, memory_index_arg, dst, val, len],
2052 );
2053
2054 Ok(())
2055 }
2056
2057 fn translate_memory_init(
2058 &mut self,
2059 mut pos: FuncCursor,
2060 memory_index: MemoryIndex,
2061 _heap: Heap,
2062 seg_index: u32,
2063 dst: ir::Value,
2064 src: ir::Value,
2065 len: ir::Value,
2066 ) -> WasmResult<()> {
2067 let (func_sig, func_idx) = self.get_memory_init_func(pos.func);
2068
2069 let memory_index_arg = pos.ins().iconst(I32, memory_index.index() as i64);
2070 let seg_index_arg = pos.ins().iconst(I32, seg_index as i64);
2071
2072 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2073
2074 pos.ins().call_indirect(
2075 func_sig,
2076 func_addr,
2077 &[vmctx, memory_index_arg, seg_index_arg, dst, src, len],
2078 );
2079
2080 Ok(())
2081 }
2082
2083 fn translate_data_drop(&mut self, mut pos: FuncCursor, seg_index: u32) -> WasmResult<()> {
2084 let (func_sig, func_idx) = self.get_data_drop_func(pos.func);
2085 let seg_index_arg = pos.ins().iconst(I32, seg_index as i64);
2086 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2087 pos.ins()
2088 .call_indirect(func_sig, func_addr, &[vmctx, seg_index_arg]);
2089 Ok(())
2090 }
2091
2092 fn translate_table_size(
2093 &mut self,
2094 mut pos: FuncCursor,
2095 table_index: TableIndex,
2096 ) -> WasmResult<ir::Value> {
2097 self.ensure_table_exists(pos.func, table_index);
2098 let (func_sig, index_arg, func_idx) = self.get_table_size_func(pos.func, table_index);
2099 let table_index = pos.ins().iconst(I32, index_arg as i64);
2100 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2101 let call_inst = pos
2102 .ins()
2103 .call_indirect(func_sig, func_addr, &[vmctx, table_index]);
2104 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
2105 }
2106
2107 fn translate_table_copy(
2108 &mut self,
2109 mut pos: FuncCursor,
2110 dst_table_index: TableIndex,
2111 src_table_index: TableIndex,
2112 dst: ir::Value,
2113 src: ir::Value,
2114 len: ir::Value,
2115 ) -> WasmResult<()> {
2116 self.ensure_table_exists(pos.func, src_table_index);
2117 self.ensure_table_exists(pos.func, dst_table_index);
2118 let (func_sig, dst_table_index_arg, src_table_index_arg, func_idx) =
2119 self.get_table_copy_func(pos.func, dst_table_index, src_table_index);
2120
2121 let dst_table_index_arg = pos.ins().iconst(I32, dst_table_index_arg as i64);
2122 let src_table_index_arg = pos.ins().iconst(I32, src_table_index_arg as i64);
2123
2124 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2125
2126 pos.ins().call_indirect(
2127 func_sig,
2128 func_addr,
2129 &[
2130 vmctx,
2131 dst_table_index_arg,
2132 src_table_index_arg,
2133 dst,
2134 src,
2135 len,
2136 ],
2137 );
2138
2139 Ok(())
2140 }
2141
2142 fn translate_table_init(
2143 &mut self,
2144 mut pos: FuncCursor,
2145 seg_index: u32,
2146 table_index: TableIndex,
2147 dst: ir::Value,
2148 src: ir::Value,
2149 len: ir::Value,
2150 ) -> WasmResult<()> {
2151 self.ensure_table_exists(pos.func, table_index);
2152 let (func_sig, table_index_arg, func_idx) = self.get_table_init_func(pos.func, table_index);
2153
2154 let table_index_arg = pos.ins().iconst(I32, table_index_arg as i64);
2155 let seg_index_arg = pos.ins().iconst(I32, seg_index as i64);
2156
2157 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2158
2159 pos.ins().call_indirect(
2160 func_sig,
2161 func_addr,
2162 &[vmctx, table_index_arg, seg_index_arg, dst, src, len],
2163 );
2164
2165 Ok(())
2166 }
2167
2168 fn translate_elem_drop(&mut self, mut pos: FuncCursor, elem_index: u32) -> WasmResult<()> {
2169 let (func_sig, func_idx) = self.get_elem_drop_func(pos.func);
2170
2171 let elem_index_arg = pos.ins().iconst(I32, elem_index as i64);
2172
2173 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2174
2175 pos.ins()
2176 .call_indirect(func_sig, func_addr, &[vmctx, elem_index_arg]);
2177
2178 Ok(())
2179 }
2180
2181 fn translate_atomic_wait(
2182 &mut self,
2183 mut pos: FuncCursor,
2184 index: MemoryIndex,
2185 _heap: Heap,
2186 addr: ir::Value,
2187 expected: ir::Value,
2188 timeout: ir::Value,
2189 ) -> WasmResult<ir::Value> {
2190 let (func_sig, index_arg, func_idx) = if pos.func.dfg.value_type(expected) == I64 {
2191 self.get_memory_atomic_wait64_func(pos.func, index)
2192 } else {
2193 self.get_memory_atomic_wait32_func(pos.func, index)
2194 };
2195 let memory_index = pos.ins().iconst(I32, index_arg as i64);
2196 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2197 let call_inst = pos.ins().call_indirect(
2198 func_sig,
2199 func_addr,
2200 &[vmctx, memory_index, addr, expected, timeout],
2201 );
2202 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
2203 }
2204
2205 fn translate_atomic_notify(
2206 &mut self,
2207 mut pos: FuncCursor,
2208 index: MemoryIndex,
2209 _heap: Heap,
2210 addr: ir::Value,
2211 count: ir::Value,
2212 ) -> WasmResult<ir::Value> {
2213 let (func_sig, index_arg, func_idx) = self.get_memory_atomic_notify_func(pos.func, index);
2214 let memory_index = pos.ins().iconst(I32, index_arg as i64);
2215 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2216 let call_inst =
2217 pos.ins()
2218 .call_indirect(func_sig, func_addr, &[vmctx, memory_index, addr, count]);
2219 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
2220 }
2221
2222 fn get_global_type(&self, global_index: GlobalIndex) -> Option<WasmerType> {
2223 Some(self.module.globals.get(global_index)?.ty)
2224 }
2225
2226 fn push_local_decl_on_stack(&mut self, ty: WasmerType) {
2227 self.type_stack.push(ty);
2228 }
2229
2230 fn push_params_on_stack(&mut self, function_index: LocalFunctionIndex) {
2231 let func_index = self.module.func_index(function_index);
2232 let sig_idx = self.module.functions[func_index];
2233 let signature = &self.module.signatures[sig_idx];
2234 for param in signature.params() {
2235 self.type_stack.push(*param);
2236 }
2237 }
2238
2239 fn get_local_type(&self, local_index: u32) -> Option<WasmerType> {
2240 self.type_stack.get(local_index as usize).cloned()
2241 }
2242
2243 fn get_local_types(&self) -> &[WasmerType] {
2244 &self.type_stack
2245 }
2246
2247 fn get_function_type(&self, function_index: FunctionIndex) -> Option<&FunctionType> {
2248 let sig_idx = self.module.functions.get(function_index)?;
2249 Some(&self.module.signatures[*sig_idx])
2250 }
2251
2252 fn get_function_sig(&self, sig_index: SignatureIndex) -> Option<&FunctionType> {
2253 self.module.signatures.get(sig_index)
2254 }
2255
2256 fn heap_access_spectre_mitigation(&self) -> bool {
2257 false
2258 }
2259
2260 fn proof_carrying_code(&self) -> bool {
2261 false
2262 }
2263
2264 fn heaps(&self) -> &PrimaryMap<Heap, HeapData> {
2265 &self.heaps
2266 }
2267}