1use crate::{
5 HashMap,
6 heap::{Heap, HeapData, HeapStyle},
7 table::{TableData, TableSize},
8 translator::{
9 EXN_REF_TYPE, FuncEnvironment as BaseFuncEnvironment, GlobalVariable, LandingPad, TAG_TYPE,
10 TargetEnvironment,
11 },
12};
13use cranelift_codegen::{
14 cursor::FuncCursor,
15 ir::{
16 self, AbiParam, ArgumentPurpose, BlockArg, Endianness, ExceptionTableData,
17 ExceptionTableItem, ExceptionTag, Function, InstBuilder, MemFlags, Signature,
18 UserExternalName,
19 condcodes::IntCC,
20 immediates::{Offset32, Uimm64},
21 types::*,
22 },
23 isa::TargetFrontendConfig,
24};
25use cranelift_frontend::FunctionBuilder;
26use smallvec::SmallVec;
27use std::convert::TryFrom;
28use wasmer_compiler::wasmparser::HeapType;
29use wasmer_types::{
30 FunctionIndex, FunctionType, GlobalIndex, LocalFunctionIndex, MemoryIndex, MemoryStyle,
31 ModuleInfo, SignatureIndex, TableIndex, TableStyle, TagIndex, Type as WasmerType,
32 VMBuiltinFunctionIndex, VMOffsets, WasmError, WasmResult,
33 entity::{EntityRef, PrimaryMap, SecondaryMap},
34};
35
36pub fn get_function_name(func: &mut Function, func_index: FunctionIndex) -> ir::ExternalName {
38 ir::ExternalName::user(
39 func.params
40 .ensure_user_func_name(UserExternalName::new(0, func_index.as_u32())),
41 )
42}
43
44#[allow(unused)]
46pub fn type_of_vmtable_definition_current_elements(vmoffsets: &VMOffsets) -> ir::Type {
47 ir::Type::int(u16::from(vmoffsets.size_of_vmtable_definition_current_elements()) * 8).unwrap()
48}
49
50#[derive(Clone)]
51struct ExceptionFieldLayout {
52 offset: u32,
53 ty: ir::Type,
54}
55
56#[derive(Clone)]
57struct ExceptionTypeLayout {
58 fields: SmallVec<[ExceptionFieldLayout; 4]>,
59}
60
61pub struct FuncEnvironment<'module_environment> {
63 target_config: TargetFrontendConfig,
65
66 module: &'module_environment ModuleInfo,
68
69 type_stack: Vec<WasmerType>,
71
72 signatures: &'module_environment PrimaryMap<SignatureIndex, ir::Signature>,
74
75 heaps: PrimaryMap<Heap, HeapData>,
77
78 vmctx: Option<ir::GlobalValue>,
80
81 memory32_size_sig: Option<ir::SigRef>,
84
85 table_size_sig: Option<ir::SigRef>,
88
89 memory_grow_sig: Option<ir::SigRef>,
92
93 table_grow_sig: Option<ir::SigRef>,
96
97 table_copy_sig: Option<ir::SigRef>,
100
101 table_init_sig: Option<ir::SigRef>,
103
104 elem_drop_sig: Option<ir::SigRef>,
106
107 memory_copy_sig: Option<ir::SigRef>,
110
111 memory_fill_sig: Option<ir::SigRef>,
114
115 memory_init_sig: Option<ir::SigRef>,
117
118 data_drop_sig: Option<ir::SigRef>,
120
121 table_get_sig: Option<ir::SigRef>,
123
124 table_set_sig: Option<ir::SigRef>,
126
127 func_ref_sig: Option<ir::SigRef>,
129
130 table_fill_sig: Option<ir::SigRef>,
132
133 memory32_atomic_wait32_sig: Option<ir::SigRef>,
135
136 memory32_atomic_wait64_sig: Option<ir::SigRef>,
138
139 memory32_atomic_notify_sig: Option<ir::SigRef>,
141
142 personality2_sig: Option<ir::SigRef>,
144 throw_sig: Option<ir::SigRef>,
145 alloc_exception_sig: Option<ir::SigRef>,
146 read_exception_sig: Option<ir::SigRef>,
147 read_exnref_sig: Option<ir::SigRef>,
148
149 exception_type_layouts: HashMap<u32, ExceptionTypeLayout>,
151
152 offsets: VMOffsets,
154
155 memory_styles: &'module_environment PrimaryMap<MemoryIndex, MemoryStyle>,
157
158 tables: SecondaryMap<TableIndex, Option<TableData>>,
160
161 table_styles: &'module_environment PrimaryMap<TableIndex, TableStyle>,
162}
163
164impl<'module_environment> FuncEnvironment<'module_environment> {
165 pub fn new(
166 target_config: TargetFrontendConfig,
167 module: &'module_environment ModuleInfo,
168 signatures: &'module_environment PrimaryMap<SignatureIndex, ir::Signature>,
169 memory_styles: &'module_environment PrimaryMap<MemoryIndex, MemoryStyle>,
170 table_styles: &'module_environment PrimaryMap<TableIndex, TableStyle>,
171 ) -> Self {
172 Self {
173 target_config,
174 module,
175 signatures,
176 type_stack: vec![],
177 heaps: PrimaryMap::new(),
178 vmctx: None,
179 memory32_size_sig: None,
180 table_size_sig: None,
181 memory_grow_sig: None,
182 table_grow_sig: None,
183 table_copy_sig: None,
184 table_init_sig: None,
185 elem_drop_sig: None,
186 memory_copy_sig: None,
187 memory_fill_sig: None,
188 memory_init_sig: None,
189 table_get_sig: None,
190 table_set_sig: None,
191 data_drop_sig: None,
192 func_ref_sig: None,
193 table_fill_sig: None,
194 memory32_atomic_wait32_sig: None,
195 memory32_atomic_wait64_sig: None,
196 memory32_atomic_notify_sig: None,
197 personality2_sig: None,
198 throw_sig: None,
199 alloc_exception_sig: None,
200 read_exception_sig: None,
201 read_exnref_sig: None,
202 exception_type_layouts: HashMap::new(),
203 offsets: VMOffsets::new(target_config.pointer_bytes(), module),
204 memory_styles,
205 tables: Default::default(),
206 table_styles,
207 }
208 }
209
210 fn pointer_type(&self) -> ir::Type {
211 self.target_config.pointer_type()
212 }
213
214 fn ensure_table_exists(&mut self, func: &mut ir::Function, index: TableIndex) {
215 if self.tables[index].is_some() {
216 return;
217 }
218
219 let pointer_type = self.pointer_type();
220
221 let (ptr, base_offset, current_elements_offset) = {
222 let vmctx = self.vmctx(func);
223 if let Some(def_index) = self.module.local_table_index(index) {
224 let base_offset =
225 i32::try_from(self.offsets.vmctx_vmtable_definition_base(def_index)).unwrap();
226 let current_elements_offset = i32::try_from(
227 self.offsets
228 .vmctx_vmtable_definition_current_elements(def_index),
229 )
230 .unwrap();
231 (vmctx, base_offset, current_elements_offset)
232 } else {
233 let from_offset = self.offsets.vmctx_vmtable_import(index);
234 let table = func.create_global_value(ir::GlobalValueData::Load {
235 base: vmctx,
236 offset: Offset32::new(i32::try_from(from_offset).unwrap()),
237 global_type: pointer_type,
238 flags: MemFlags::trusted().with_readonly(),
239 });
240 let base_offset = i32::from(self.offsets.vmtable_definition_base());
241 let current_elements_offset =
242 i32::from(self.offsets.vmtable_definition_current_elements());
243 (table, base_offset, current_elements_offset)
244 }
245 };
246
247 let table = &self.module.tables[index];
248 let element_size = self.reference_type().bytes();
249
250 let base_gv = func.create_global_value(ir::GlobalValueData::Load {
251 base: ptr,
252 offset: Offset32::new(base_offset),
253 global_type: pointer_type,
254 flags: if Some(table.minimum) == table.maximum {
255 MemFlags::trusted().with_readonly()
258 } else {
259 MemFlags::trusted()
260 },
261 });
262
263 let bound = if Some(table.minimum) == table.maximum {
264 TableSize::Static {
265 bound: table.minimum,
266 }
267 } else {
268 TableSize::Dynamic {
269 bound_gv: func.create_global_value(ir::GlobalValueData::Load {
270 base: ptr,
271 offset: Offset32::new(current_elements_offset),
272 global_type: ir::Type::int(
273 u16::from(self.offsets.size_of_vmtable_definition_current_elements()) * 8,
274 )
275 .unwrap(),
276 flags: MemFlags::trusted(),
277 }),
278 }
279 };
280
281 self.tables[index] = Some(TableData {
282 base_gv,
283 bound,
284 element_size,
285 });
286 }
287
288 fn vmctx(&mut self, func: &mut Function) -> ir::GlobalValue {
289 self.vmctx.unwrap_or_else(|| {
290 let vmctx = func.create_global_value(ir::GlobalValueData::VMContext);
291 self.vmctx = Some(vmctx);
292 vmctx
293 })
294 }
295
296 fn get_table_fill_sig(&mut self, func: &mut Function) -> ir::SigRef {
297 let sig = self.table_fill_sig.unwrap_or_else(|| {
298 func.import_signature(Signature {
299 params: vec![
300 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
301 AbiParam::new(I32),
303 AbiParam::new(I32),
305 AbiParam::new(self.reference_type()),
307 AbiParam::new(I32),
309 ],
310 returns: vec![],
311 call_conv: self.target_config.default_call_conv,
312 })
313 });
314 self.table_fill_sig = Some(sig);
315 sig
316 }
317
318 fn get_table_fill_func(
319 &mut self,
320 func: &mut Function,
321 table_index: TableIndex,
322 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
323 (
324 self.get_table_fill_sig(func),
325 table_index.index(),
326 VMBuiltinFunctionIndex::get_table_fill_index(),
327 )
328 }
329
330 fn get_func_ref_sig(&mut self, func: &mut Function) -> ir::SigRef {
331 let sig = self.func_ref_sig.unwrap_or_else(|| {
332 func.import_signature(Signature {
333 params: vec![
334 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
335 AbiParam::new(I32),
336 ],
337 returns: vec![AbiParam::new(self.reference_type())],
338 call_conv: self.target_config.default_call_conv,
339 })
340 });
341 self.func_ref_sig = Some(sig);
342 sig
343 }
344
345 fn get_func_ref_func(
346 &mut self,
347 func: &mut Function,
348 function_index: FunctionIndex,
349 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
350 (
351 self.get_func_ref_sig(func),
352 function_index.index(),
353 VMBuiltinFunctionIndex::get_func_ref_index(),
354 )
355 }
356
357 fn get_table_get_sig(&mut self, func: &mut Function) -> ir::SigRef {
358 let sig = self.table_get_sig.unwrap_or_else(|| {
359 func.import_signature(Signature {
360 params: vec![
361 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
362 AbiParam::new(I32),
363 AbiParam::new(I32),
364 ],
365 returns: vec![AbiParam::new(self.reference_type())],
366 call_conv: self.target_config.default_call_conv,
367 })
368 });
369 self.table_get_sig = Some(sig);
370 sig
371 }
372
373 fn get_table_get_func(
374 &mut self,
375 func: &mut Function,
376 table_index: TableIndex,
377 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
378 if self.module.is_imported_table(table_index) {
379 (
380 self.get_table_get_sig(func),
381 table_index.index(),
382 VMBuiltinFunctionIndex::get_imported_table_get_index(),
383 )
384 } else {
385 (
386 self.get_table_get_sig(func),
387 self.module.local_table_index(table_index).unwrap().index(),
388 VMBuiltinFunctionIndex::get_table_get_index(),
389 )
390 }
391 }
392
393 fn get_table_set_sig(&mut self, func: &mut Function) -> ir::SigRef {
394 let sig = self.table_set_sig.unwrap_or_else(|| {
395 func.import_signature(Signature {
396 params: vec![
397 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
398 AbiParam::new(I32),
399 AbiParam::new(I32),
400 AbiParam::new(self.reference_type()),
401 ],
402 returns: vec![],
403 call_conv: self.target_config.default_call_conv,
404 })
405 });
406 self.table_set_sig = Some(sig);
407 sig
408 }
409
410 fn get_table_set_func(
411 &mut self,
412 func: &mut Function,
413 table_index: TableIndex,
414 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
415 if self.module.is_imported_table(table_index) {
416 (
417 self.get_table_set_sig(func),
418 table_index.index(),
419 VMBuiltinFunctionIndex::get_imported_table_set_index(),
420 )
421 } else {
422 (
423 self.get_table_set_sig(func),
424 self.module.local_table_index(table_index).unwrap().index(),
425 VMBuiltinFunctionIndex::get_table_set_index(),
426 )
427 }
428 }
429
430 fn get_table_grow_sig(&mut self, func: &mut Function) -> ir::SigRef {
431 let sig = self.table_grow_sig.unwrap_or_else(|| {
432 func.import_signature(Signature {
433 params: vec![
434 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
435 AbiParam::new(self.reference_type()),
437 AbiParam::new(I32),
438 AbiParam::new(I32),
439 ],
440 returns: vec![AbiParam::new(I32)],
441 call_conv: self.target_config.default_call_conv,
442 })
443 });
444 self.table_grow_sig = Some(sig);
445 sig
446 }
447
448 fn get_table_grow_func(
451 &mut self,
452 func: &mut Function,
453 index: TableIndex,
454 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
455 if self.module.is_imported_table(index) {
456 (
457 self.get_table_grow_sig(func),
458 index.index(),
459 VMBuiltinFunctionIndex::get_imported_table_grow_index(),
460 )
461 } else {
462 (
463 self.get_table_grow_sig(func),
464 self.module.local_table_index(index).unwrap().index(),
465 VMBuiltinFunctionIndex::get_table_grow_index(),
466 )
467 }
468 }
469
470 fn get_memory_grow_sig(&mut self, func: &mut Function) -> ir::SigRef {
471 let sig = self.memory_grow_sig.unwrap_or_else(|| {
472 func.import_signature(Signature {
473 params: vec![
474 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
475 AbiParam::new(I32),
476 AbiParam::new(I32),
477 ],
478 returns: vec![AbiParam::new(I32)],
479 call_conv: self.target_config.default_call_conv,
480 })
481 });
482 self.memory_grow_sig = Some(sig);
483 sig
484 }
485
486 fn get_memory_grow_func(
489 &mut self,
490 func: &mut Function,
491 index: MemoryIndex,
492 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
493 if self.module.is_imported_memory(index) {
494 (
495 self.get_memory_grow_sig(func),
496 index.index(),
497 VMBuiltinFunctionIndex::get_imported_memory32_grow_index(),
498 )
499 } else {
500 (
501 self.get_memory_grow_sig(func),
502 self.module.local_memory_index(index).unwrap().index(),
503 VMBuiltinFunctionIndex::get_memory32_grow_index(),
504 )
505 }
506 }
507
508 fn get_table_size_sig(&mut self, func: &mut Function) -> ir::SigRef {
509 let sig = self.table_size_sig.unwrap_or_else(|| {
510 func.import_signature(Signature {
511 params: vec![
512 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
513 AbiParam::new(I32),
514 ],
515 returns: vec![AbiParam::new(I32)],
516 call_conv: self.target_config.default_call_conv,
517 })
518 });
519 self.table_size_sig = Some(sig);
520 sig
521 }
522
523 fn get_table_size_func(
526 &mut self,
527 func: &mut Function,
528 index: TableIndex,
529 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
530 if self.module.is_imported_table(index) {
531 (
532 self.get_table_size_sig(func),
533 index.index(),
534 VMBuiltinFunctionIndex::get_imported_table_size_index(),
535 )
536 } else {
537 (
538 self.get_table_size_sig(func),
539 self.module.local_table_index(index).unwrap().index(),
540 VMBuiltinFunctionIndex::get_table_size_index(),
541 )
542 }
543 }
544
545 fn get_memory32_size_sig(&mut self, func: &mut Function) -> ir::SigRef {
546 let sig = self.memory32_size_sig.unwrap_or_else(|| {
547 func.import_signature(Signature {
548 params: vec![
549 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
550 AbiParam::new(I32),
551 ],
552 returns: vec![AbiParam::new(I32)],
553 call_conv: self.target_config.default_call_conv,
554 })
555 });
556 self.memory32_size_sig = Some(sig);
557 sig
558 }
559
560 fn get_memory_size_func(
563 &mut self,
564 func: &mut Function,
565 index: MemoryIndex,
566 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
567 if self.module.is_imported_memory(index) {
568 (
569 self.get_memory32_size_sig(func),
570 index.index(),
571 VMBuiltinFunctionIndex::get_imported_memory32_size_index(),
572 )
573 } else {
574 (
575 self.get_memory32_size_sig(func),
576 self.module.local_memory_index(index).unwrap().index(),
577 VMBuiltinFunctionIndex::get_memory32_size_index(),
578 )
579 }
580 }
581
582 fn get_table_copy_sig(&mut self, func: &mut Function) -> ir::SigRef {
583 let sig = self.table_copy_sig.unwrap_or_else(|| {
584 func.import_signature(Signature {
585 params: vec![
586 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
587 AbiParam::new(I32),
589 AbiParam::new(I32),
591 AbiParam::new(I32),
593 AbiParam::new(I32),
595 AbiParam::new(I32),
597 ],
598 returns: vec![],
599 call_conv: self.target_config.default_call_conv,
600 })
601 });
602 self.table_copy_sig = Some(sig);
603 sig
604 }
605
606 fn get_table_copy_func(
607 &mut self,
608 func: &mut Function,
609 dst_table_index: TableIndex,
610 src_table_index: TableIndex,
611 ) -> (ir::SigRef, usize, usize, VMBuiltinFunctionIndex) {
612 let sig = self.get_table_copy_sig(func);
613 (
614 sig,
615 dst_table_index.as_u32() as usize,
616 src_table_index.as_u32() as usize,
617 VMBuiltinFunctionIndex::get_table_copy_index(),
618 )
619 }
620
621 fn get_table_init_sig(&mut self, func: &mut Function) -> ir::SigRef {
622 let sig = self.table_init_sig.unwrap_or_else(|| {
623 func.import_signature(Signature {
624 params: vec![
625 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
626 AbiParam::new(I32),
628 AbiParam::new(I32),
630 AbiParam::new(I32),
632 AbiParam::new(I32),
634 AbiParam::new(I32),
636 ],
637 returns: vec![],
638 call_conv: self.target_config.default_call_conv,
639 })
640 });
641 self.table_init_sig = Some(sig);
642 sig
643 }
644
645 fn get_table_init_func(
646 &mut self,
647 func: &mut Function,
648 table_index: TableIndex,
649 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
650 let sig = self.get_table_init_sig(func);
651 let table_index = table_index.as_u32() as usize;
652 (
653 sig,
654 table_index,
655 VMBuiltinFunctionIndex::get_table_init_index(),
656 )
657 }
658
659 fn get_elem_drop_sig(&mut self, func: &mut Function) -> ir::SigRef {
660 let sig = self.elem_drop_sig.unwrap_or_else(|| {
661 func.import_signature(Signature {
662 params: vec![
663 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
664 AbiParam::new(I32),
666 ],
667 returns: vec![],
668 call_conv: self.target_config.default_call_conv,
669 })
670 });
671 self.elem_drop_sig = Some(sig);
672 sig
673 }
674
675 fn get_elem_drop_func(&mut self, func: &mut Function) -> (ir::SigRef, VMBuiltinFunctionIndex) {
676 let sig = self.get_elem_drop_sig(func);
677 (sig, VMBuiltinFunctionIndex::get_elem_drop_index())
678 }
679
680 fn get_memory_copy_sig(&mut self, func: &mut Function) -> ir::SigRef {
681 let sig = self.memory_copy_sig.unwrap_or_else(|| {
682 func.import_signature(Signature {
683 params: vec![
684 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
685 AbiParam::new(I32),
687 AbiParam::new(I32),
689 AbiParam::new(I32),
691 AbiParam::new(I32),
693 ],
694 returns: vec![],
695 call_conv: self.target_config.default_call_conv,
696 })
697 });
698 self.memory_copy_sig = Some(sig);
699 sig
700 }
701
702 fn get_memory_copy_func(
703 &mut self,
704 func: &mut Function,
705 memory_index: MemoryIndex,
706 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
707 let sig = self.get_memory_copy_sig(func);
708 if let Some(local_memory_index) = self.module.local_memory_index(memory_index) {
709 (
710 sig,
711 local_memory_index.index(),
712 VMBuiltinFunctionIndex::get_memory_copy_index(),
713 )
714 } else {
715 (
716 sig,
717 memory_index.index(),
718 VMBuiltinFunctionIndex::get_imported_memory_copy_index(),
719 )
720 }
721 }
722
723 fn get_memory_fill_sig(&mut self, func: &mut Function) -> ir::SigRef {
724 let sig = self.memory_fill_sig.unwrap_or_else(|| {
725 func.import_signature(Signature {
726 params: vec![
727 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
728 AbiParam::new(I32),
730 AbiParam::new(I32),
732 AbiParam::new(I32),
734 AbiParam::new(I32),
736 ],
737 returns: vec![],
738 call_conv: self.target_config.default_call_conv,
739 })
740 });
741 self.memory_fill_sig = Some(sig);
742 sig
743 }
744
745 fn get_memory_fill_func(
746 &mut self,
747 func: &mut Function,
748 memory_index: MemoryIndex,
749 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
750 let sig = self.get_memory_fill_sig(func);
751 if let Some(local_memory_index) = self.module.local_memory_index(memory_index) {
752 (
753 sig,
754 local_memory_index.index(),
755 VMBuiltinFunctionIndex::get_memory_fill_index(),
756 )
757 } else {
758 (
759 sig,
760 memory_index.index(),
761 VMBuiltinFunctionIndex::get_imported_memory_fill_index(),
762 )
763 }
764 }
765
766 fn get_memory_init_sig(&mut self, func: &mut Function) -> ir::SigRef {
767 let sig = self.memory_init_sig.unwrap_or_else(|| {
768 func.import_signature(Signature {
769 params: vec![
770 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
771 AbiParam::new(I32),
773 AbiParam::new(I32),
775 AbiParam::new(I32),
777 AbiParam::new(I32),
779 AbiParam::new(I32),
781 ],
782 returns: vec![],
783 call_conv: self.target_config.default_call_conv,
784 })
785 });
786 self.memory_init_sig = Some(sig);
787 sig
788 }
789
790 fn get_memory_init_func(
791 &mut self,
792 func: &mut Function,
793 ) -> (ir::SigRef, VMBuiltinFunctionIndex) {
794 let sig = self.get_memory_init_sig(func);
795 (sig, VMBuiltinFunctionIndex::get_memory_init_index())
796 }
797
798 fn get_data_drop_sig(&mut self, func: &mut Function) -> ir::SigRef {
799 let sig = self.data_drop_sig.unwrap_or_else(|| {
800 func.import_signature(Signature {
801 params: vec![
802 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
803 AbiParam::new(I32),
805 ],
806 returns: vec![],
807 call_conv: self.target_config.default_call_conv,
808 })
809 });
810 self.data_drop_sig = Some(sig);
811 sig
812 }
813
814 fn get_data_drop_func(&mut self, func: &mut Function) -> (ir::SigRef, VMBuiltinFunctionIndex) {
815 let sig = self.get_data_drop_sig(func);
816 (sig, VMBuiltinFunctionIndex::get_data_drop_index())
817 }
818
819 fn get_memory32_atomic_wait32_sig(&mut self, func: &mut Function) -> ir::SigRef {
820 let sig = self.memory32_atomic_wait32_sig.unwrap_or_else(|| {
821 func.import_signature(Signature {
822 params: vec![
823 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
824 AbiParam::new(I32),
826 AbiParam::new(I32),
828 AbiParam::new(I32),
830 AbiParam::new(I64),
832 ],
833 returns: vec![AbiParam::new(I32)],
834 call_conv: self.target_config.default_call_conv,
835 })
836 });
837 self.memory32_atomic_wait32_sig = Some(sig);
838 sig
839 }
840
841 fn get_memory_atomic_wait32_func(
845 &mut self,
846 func: &mut Function,
847 index: MemoryIndex,
848 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
849 if self.module.is_imported_memory(index) {
850 (
851 self.get_memory32_atomic_wait32_sig(func),
852 index.index(),
853 VMBuiltinFunctionIndex::get_imported_memory_atomic_wait32_index(),
854 )
855 } else {
856 (
857 self.get_memory32_atomic_wait32_sig(func),
858 self.module.local_memory_index(index).unwrap().index(),
859 VMBuiltinFunctionIndex::get_memory_atomic_wait32_index(),
860 )
861 }
862 }
863
864 fn get_memory32_atomic_wait64_sig(&mut self, func: &mut Function) -> ir::SigRef {
865 let sig = self.memory32_atomic_wait64_sig.unwrap_or_else(|| {
866 func.import_signature(Signature {
867 params: vec![
868 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
869 AbiParam::new(I32),
871 AbiParam::new(I32),
873 AbiParam::new(I64),
875 AbiParam::new(I64),
877 ],
878 returns: vec![AbiParam::new(I32)],
879 call_conv: self.target_config.default_call_conv,
880 })
881 });
882 self.memory32_atomic_wait64_sig = Some(sig);
883 sig
884 }
885
886 fn get_memory_atomic_wait64_func(
890 &mut self,
891 func: &mut Function,
892 index: MemoryIndex,
893 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
894 if self.module.is_imported_memory(index) {
895 (
896 self.get_memory32_atomic_wait64_sig(func),
897 index.index(),
898 VMBuiltinFunctionIndex::get_imported_memory_atomic_wait64_index(),
899 )
900 } else {
901 (
902 self.get_memory32_atomic_wait64_sig(func),
903 self.module.local_memory_index(index).unwrap().index(),
904 VMBuiltinFunctionIndex::get_memory_atomic_wait64_index(),
905 )
906 }
907 }
908
909 fn get_memory32_atomic_notify_sig(&mut self, func: &mut Function) -> ir::SigRef {
910 let sig = self.memory32_atomic_notify_sig.unwrap_or_else(|| {
911 func.import_signature(Signature {
912 params: vec![
913 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
914 AbiParam::new(I32),
916 AbiParam::new(I32),
918 AbiParam::new(I32),
920 ],
921 returns: vec![AbiParam::new(I32)],
922 call_conv: self.target_config.default_call_conv,
923 })
924 });
925 self.memory32_atomic_notify_sig = Some(sig);
926 sig
927 }
928
929 fn get_memory_atomic_notify_func(
933 &mut self,
934 func: &mut Function,
935 index: MemoryIndex,
936 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
937 if self.module.is_imported_memory(index) {
938 (
939 self.get_memory32_atomic_notify_sig(func),
940 index.index(),
941 VMBuiltinFunctionIndex::get_imported_memory_atomic_notify_index(),
942 )
943 } else {
944 (
945 self.get_memory32_atomic_notify_sig(func),
946 self.module.local_memory_index(index).unwrap().index(),
947 VMBuiltinFunctionIndex::get_memory_atomic_notify_index(),
948 )
949 }
950 }
951
952 fn get_personality2_func(
953 &mut self,
954 func: &mut Function,
955 ) -> (ir::SigRef, VMBuiltinFunctionIndex) {
956 let sig = self.personality2_sig.unwrap_or_else(|| {
957 let mut signature = Signature::new(self.target_config.default_call_conv);
958 signature.params.push(AbiParam::new(self.pointer_type()));
959 signature.params.push(AbiParam::new(self.pointer_type()));
960 signature.returns.push(AbiParam::new(TAG_TYPE));
961 let sig = func.import_signature(signature);
962 self.personality2_sig = Some(sig);
963 sig
964 });
965 (
966 sig,
967 VMBuiltinFunctionIndex::get_imported_personality2_index(),
968 )
969 }
970
971 fn get_throw_func(&mut self, func: &mut Function) -> (ir::SigRef, VMBuiltinFunctionIndex) {
972 let sig = self.throw_sig.unwrap_or_else(|| {
973 let mut signature = Signature::new(self.target_config.default_call_conv);
974 signature.params.push(AbiParam::special(
975 self.pointer_type(),
976 ArgumentPurpose::VMContext,
977 ));
978 signature.params.push(AbiParam::new(EXN_REF_TYPE));
979 let sig = func.import_signature(signature);
980 self.throw_sig = Some(sig);
981 sig
982 });
983 (sig, VMBuiltinFunctionIndex::get_imported_throw_index())
984 }
985
986 fn get_alloc_exception_func(
987 &mut self,
988 func: &mut Function,
989 ) -> (ir::SigRef, VMBuiltinFunctionIndex) {
990 let sig = self.alloc_exception_sig.unwrap_or_else(|| {
991 let mut signature = Signature::new(self.target_config.default_call_conv);
992 signature.params.push(AbiParam::special(
993 self.pointer_type(),
994 ArgumentPurpose::VMContext,
995 ));
996 signature.params.push(AbiParam::new(TAG_TYPE));
997 signature.returns.push(AbiParam::new(EXN_REF_TYPE));
998 let sig = func.import_signature(signature);
999 self.alloc_exception_sig = Some(sig);
1000 sig
1001 });
1002 (
1003 sig,
1004 VMBuiltinFunctionIndex::get_imported_alloc_exception_index(),
1005 )
1006 }
1007
1008 fn get_read_exnref_func(
1009 &mut self,
1010 func: &mut Function,
1011 ) -> (ir::SigRef, VMBuiltinFunctionIndex) {
1012 let sig = self.read_exnref_sig.unwrap_or_else(|| {
1013 let mut signature = Signature::new(self.target_config.default_call_conv);
1014 signature.params.push(AbiParam::special(
1015 self.pointer_type(),
1016 ArgumentPurpose::VMContext,
1017 ));
1018 signature.params.push(AbiParam::new(EXN_REF_TYPE));
1019 signature.returns.push(AbiParam::new(self.pointer_type()));
1020 let sig = func.import_signature(signature);
1021 self.read_exnref_sig = Some(sig);
1022 sig
1023 });
1024 (
1025 sig,
1026 VMBuiltinFunctionIndex::get_imported_read_exnref_index(),
1027 )
1028 }
1029
1030 fn get_read_exception_func(
1031 &mut self,
1032 func: &mut Function,
1033 ) -> (ir::SigRef, VMBuiltinFunctionIndex) {
1034 let sig = self.read_exception_sig.unwrap_or_else(|| {
1035 let mut signature = Signature::new(self.target_config.default_call_conv);
1036 signature.params.push(AbiParam::new(self.pointer_type()));
1037 signature.returns.push(AbiParam::new(EXN_REF_TYPE));
1038 let sig = func.import_signature(signature);
1039 self.read_exception_sig = Some(sig);
1040 sig
1041 });
1042 (
1043 sig,
1044 VMBuiltinFunctionIndex::get_imported_exception_into_exnref_index(),
1045 )
1046 }
1047
1048 fn exception_type_layout(&mut self, tag_index: TagIndex) -> WasmResult<&ExceptionTypeLayout> {
1049 let key = tag_index.as_u32();
1050 if !self.exception_type_layouts.contains_key(&key) {
1051 let layout = self.compute_exception_type_layout(tag_index)?;
1052 self.exception_type_layouts.insert(key, layout);
1053 }
1054 Ok(self.exception_type_layouts.get(&key).unwrap())
1055 }
1056
1057 fn compute_exception_type_layout(
1058 &self,
1059 tag_index: TagIndex,
1060 ) -> WasmResult<ExceptionTypeLayout> {
1061 let sig_index = self.module.tags[tag_index];
1062 let func_type = &self.module.signatures[sig_index];
1063 let mut offset = 0u32;
1064 let mut max_align = 1u32;
1065 let mut fields = SmallVec::<[ExceptionFieldLayout; 4]>::new();
1066
1067 for wasm_ty in func_type.params() {
1068 let ir_ty = self.map_wasmer_type_to_ir(*wasm_ty)?;
1069 let field_size = ir_ty.bytes();
1070 let align = field_size.max(1);
1071 max_align = max_align.max(align);
1072 offset = offset.next_multiple_of(align);
1073 fields.push(ExceptionFieldLayout { offset, ty: ir_ty });
1074 offset = offset
1075 .checked_add(field_size)
1076 .ok_or_else(|| WasmError::Unsupported("exception payload too large".to_string()))?;
1077 }
1078
1079 Ok(ExceptionTypeLayout { fields })
1080 }
1081
1082 fn map_wasmer_type_to_ir(&self, ty: WasmerType) -> WasmResult<ir::Type> {
1083 Ok(match ty {
1084 WasmerType::I32 => ir::types::I32,
1085 WasmerType::I64 => ir::types::I64,
1086 WasmerType::F32 => ir::types::F32,
1087 WasmerType::F64 => ir::types::F64,
1088 WasmerType::V128 => ir::types::I8X16,
1089 WasmerType::FuncRef | WasmerType::ExternRef | WasmerType::ExceptionRef => {
1090 self.reference_type()
1091 }
1092 })
1093 }
1094
1095 fn call_with_handlers(
1096 &mut self,
1097 builder: &mut FunctionBuilder,
1098 callee: ir::FuncRef,
1099 args: &[ir::Value],
1100 context: Option<ir::Value>,
1101 landing_pad: Option<LandingPad>,
1102 unreachable_on_return: bool,
1103 ) -> SmallVec<[ir::Value; 4]> {
1104 let sig_ref = builder.func.dfg.ext_funcs[callee].signature;
1105 let return_types: SmallVec<[ir::Type; 4]> = builder.func.dfg.signatures[sig_ref]
1106 .returns
1107 .iter()
1108 .map(|ret| ret.value_type)
1109 .collect();
1110
1111 if landing_pad.is_none() {
1112 let inst = builder.ins().call(callee, args);
1113 let results: SmallVec<[ir::Value; 4]> =
1114 builder.inst_results(inst).iter().copied().collect();
1115 if unreachable_on_return {
1116 builder.ins().trap(crate::TRAP_UNREACHABLE);
1117 }
1118 return results;
1119 }
1120
1121 let continuation = builder.create_block();
1122 let mut normal_args = SmallVec::<[BlockArg; 4]>::with_capacity(return_types.len());
1123 let mut result_values = SmallVec::<[ir::Value; 4]>::with_capacity(return_types.len());
1124 for (i, ty) in return_types.iter().enumerate() {
1125 let val = builder.append_block_param(continuation, *ty);
1126 result_values.push(val);
1127 normal_args.push(BlockArg::TryCallRet(u32::try_from(i).unwrap()));
1128 }
1129 let continuation_call = builder
1130 .func
1131 .dfg
1132 .block_call(continuation, normal_args.iter());
1133
1134 let mut table_items = Vec::new();
1135 if let Some(ctx) = context {
1136 table_items.push(ExceptionTableItem::Context(ctx));
1137 }
1138 if let Some(landing_pad) = landing_pad {
1139 for tag in landing_pad.clauses {
1140 let block_call = builder.func.dfg.block_call(
1141 landing_pad.block,
1142 &[BlockArg::TryCallExn(0), BlockArg::TryCallExn(1)],
1143 );
1144 table_items.push(match tag.wasm_tag {
1145 Some(tag) => ExceptionTableItem::Tag(ExceptionTag::from_u32(tag), block_call),
1146 None => ExceptionTableItem::Default(block_call),
1147 });
1148 }
1149 }
1150 let etd = ExceptionTableData::new(sig_ref, continuation_call, table_items);
1151 let et = builder.func.dfg.exception_tables.push(etd);
1152 builder.ins().try_call(callee, args, et);
1153 builder.switch_to_block(continuation);
1154 builder.seal_block(continuation);
1155 if unreachable_on_return {
1156 builder.ins().trap(crate::TRAP_UNREACHABLE);
1157 }
1158 result_values
1159 }
1160
1161 #[allow(clippy::too_many_arguments)]
1162 fn call_indirect_with_handlers(
1163 &mut self,
1164 builder: &mut FunctionBuilder,
1165 sig: ir::SigRef,
1166 func_addr: ir::Value,
1167 args: &[ir::Value],
1168 context: Option<ir::Value>,
1169 landing_pad: Option<LandingPad>,
1170 unreachable_on_return: bool,
1171 ) -> SmallVec<[ir::Value; 4]> {
1172 let return_types: SmallVec<[ir::Type; 4]> = builder.func.dfg.signatures[sig]
1173 .returns
1174 .iter()
1175 .map(|ret| ret.value_type)
1176 .collect();
1177
1178 if landing_pad.is_none() {
1179 let inst = builder.ins().call_indirect(sig, func_addr, args);
1180 let results: SmallVec<[ir::Value; 4]> =
1181 builder.inst_results(inst).iter().copied().collect();
1182 if unreachable_on_return {
1183 builder.ins().trap(crate::TRAP_UNREACHABLE);
1184 }
1185 return results;
1186 }
1187
1188 let continuation = builder.create_block();
1189 let current_block = builder.current_block().expect("current block");
1190 builder.insert_block_after(continuation, current_block);
1191
1192 let mut normal_args = SmallVec::<[BlockArg; 4]>::with_capacity(return_types.len());
1193 let mut result_values = SmallVec::<[ir::Value; 4]>::with_capacity(return_types.len());
1194 for (i, ty) in return_types.iter().enumerate() {
1195 let val = builder.append_block_param(continuation, *ty);
1196 result_values.push(val);
1197 normal_args.push(BlockArg::TryCallRet(u32::try_from(i).unwrap()));
1198 }
1199 let continuation_call = builder
1200 .func
1201 .dfg
1202 .block_call(continuation, normal_args.iter());
1203
1204 let mut table_items = Vec::new();
1205 if let Some(ctx) = context {
1206 table_items.push(ExceptionTableItem::Context(ctx));
1207 }
1208 if let Some(landing_pad) = landing_pad {
1209 for tag in landing_pad.clauses {
1210 let block_call = builder.func.dfg.block_call(
1211 landing_pad.block,
1212 &[BlockArg::TryCallExn(0), BlockArg::TryCallExn(1)],
1213 );
1214 table_items.push(match tag.wasm_tag {
1215 Some(tag) => ExceptionTableItem::Tag(ExceptionTag::from_u32(tag), block_call),
1216 None => ExceptionTableItem::Default(block_call),
1217 });
1218 }
1219 }
1220
1221 let etd = ExceptionTableData::new(sig, continuation_call, table_items);
1222 let et = builder.func.dfg.exception_tables.push(etd);
1223 builder.ins().try_call_indirect(func_addr, args, et);
1224 builder.switch_to_block(continuation);
1225 builder.seal_block(continuation);
1226 if unreachable_on_return {
1227 builder.ins().trap(crate::TRAP_UNREACHABLE);
1228 }
1229
1230 result_values
1231 }
1232
1233 fn translate_load_builtin_function_address(
1236 &mut self,
1237 pos: &mut FuncCursor<'_>,
1238 callee_func_idx: VMBuiltinFunctionIndex,
1239 ) -> (ir::Value, ir::Value) {
1240 let pointer_type = self.pointer_type();
1242 let vmctx = self.vmctx(pos.func);
1243 let base = pos.ins().global_value(pointer_type, vmctx);
1244
1245 let mut mem_flags = ir::MemFlags::trusted();
1246 mem_flags.set_readonly();
1247
1248 let body_offset =
1250 i32::try_from(self.offsets.vmctx_builtin_function(callee_func_idx)).unwrap();
1251 let func_addr = pos.ins().load(pointer_type, mem_flags, base, body_offset);
1252
1253 (base, func_addr)
1254 }
1255
1256 fn get_or_init_funcref_table_elem(
1257 &mut self,
1258 builder: &mut FunctionBuilder,
1259 table_index: TableIndex,
1260 index: ir::Value,
1261 ) -> ir::Value {
1262 let pointer_type = self.pointer_type();
1263 self.ensure_table_exists(builder.func, table_index);
1264 let table_data = self.tables[table_index].as_ref().unwrap();
1265
1266 let (table_entry_addr, flags) =
1271 table_data.prepare_table_addr(builder, index, pointer_type, false);
1272 builder.ins().load(pointer_type, flags, table_entry_addr, 0)
1273 }
1274}
1275
1276impl TargetEnvironment for FuncEnvironment<'_> {
1277 fn target_config(&self) -> TargetFrontendConfig {
1278 self.target_config
1279 }
1280}
1281
1282impl BaseFuncEnvironment for FuncEnvironment<'_> {
1283 fn is_wasm_parameter(&self, _signature: &ir::Signature, index: usize) -> bool {
1284 index >= 1
1286 }
1287
1288 fn translate_table_grow(
1289 &mut self,
1290 mut pos: cranelift_codegen::cursor::FuncCursor<'_>,
1291 table_index: TableIndex,
1292 delta: ir::Value,
1293 init_value: ir::Value,
1294 ) -> WasmResult<ir::Value> {
1295 self.ensure_table_exists(pos.func, table_index);
1296 let (func_sig, index_arg, func_idx) = self.get_table_grow_func(pos.func, table_index);
1297 let table_index = pos.ins().iconst(I32, index_arg as i64);
1298 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1299 let call_inst = pos.ins().call_indirect(
1300 func_sig,
1301 func_addr,
1302 &[vmctx, init_value, delta, table_index],
1303 );
1304 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1305 }
1306
1307 fn translate_table_get(
1308 &mut self,
1309 builder: &mut FunctionBuilder,
1310 table_index: TableIndex,
1311 index: ir::Value,
1312 ) -> WasmResult<ir::Value> {
1313 self.ensure_table_exists(builder.func, table_index);
1314 let mut pos = builder.cursor();
1315
1316 let (func_sig, table_index_arg, func_idx) = self.get_table_get_func(pos.func, table_index);
1317 let table_index = pos.ins().iconst(I32, table_index_arg as i64);
1318 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1319 let call_inst = pos
1320 .ins()
1321 .call_indirect(func_sig, func_addr, &[vmctx, table_index, index]);
1322 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1323 }
1324
1325 fn translate_table_set(
1326 &mut self,
1327 builder: &mut FunctionBuilder,
1328 table_index: TableIndex,
1329 value: ir::Value,
1330 index: ir::Value,
1331 ) -> WasmResult<()> {
1332 self.ensure_table_exists(builder.func, table_index);
1333 let mut pos = builder.cursor();
1334
1335 let (func_sig, table_index_arg, func_idx) = self.get_table_set_func(pos.func, table_index);
1336 let n_table_index = pos.ins().iconst(I32, table_index_arg as i64);
1337 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1338 pos.ins()
1339 .call_indirect(func_sig, func_addr, &[vmctx, n_table_index, index, value]);
1340 Ok(())
1341 }
1342
1343 fn translate_table_fill(
1344 &mut self,
1345 mut pos: cranelift_codegen::cursor::FuncCursor<'_>,
1346 table_index: TableIndex,
1347 dst: ir::Value,
1348 val: ir::Value,
1349 len: ir::Value,
1350 ) -> WasmResult<()> {
1351 self.ensure_table_exists(pos.func, table_index);
1352 let (func_sig, table_index_arg, func_idx) = self.get_table_fill_func(pos.func, table_index);
1353 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1354
1355 let table_index_arg = pos.ins().iconst(I32, table_index_arg as i64);
1356 pos.ins().call_indirect(
1357 func_sig,
1358 func_addr,
1359 &[vmctx, table_index_arg, dst, val, len],
1360 );
1361
1362 Ok(())
1363 }
1364
1365 fn translate_ref_null(
1366 &mut self,
1367 mut pos: cranelift_codegen::cursor::FuncCursor,
1368 ty: HeapType,
1369 ) -> WasmResult<ir::Value> {
1370 Ok(match ty {
1371 HeapType::Abstract { ty, .. } => match ty {
1372 wasmer_compiler::wasmparser::AbstractHeapType::Func
1373 | wasmer_compiler::wasmparser::AbstractHeapType::Extern => {
1374 pos.ins().iconst(self.reference_type(), 0)
1375 }
1376 _ => {
1377 return Err(WasmError::Unsupported(format!(
1378 "`ref.null T` that is not a `funcref` or an `externref`: {ty:?}"
1379 )));
1380 }
1381 },
1382 HeapType::Concrete(_) => {
1383 return Err(WasmError::Unsupported(
1384 "`ref.null T` that is not a `funcref` or an `externref`".into(),
1385 ));
1386 }
1387 HeapType::Exact(_) => {
1388 return Err(WasmError::Unsupported(
1389 "custom-descriptors not supported yet".into(),
1390 ));
1391 }
1392 })
1393 }
1394
1395 fn translate_ref_is_null(
1396 &mut self,
1397 mut pos: cranelift_codegen::cursor::FuncCursor,
1398 value: ir::Value,
1399 ) -> WasmResult<ir::Value> {
1400 let bool_is_null =
1401 pos.ins()
1402 .icmp_imm(cranelift_codegen::ir::condcodes::IntCC::Equal, value, 0);
1403 Ok(pos.ins().uextend(ir::types::I32, bool_is_null))
1404 }
1405
1406 fn translate_ref_func(
1407 &mut self,
1408 mut pos: cranelift_codegen::cursor::FuncCursor<'_>,
1409 func_index: FunctionIndex,
1410 ) -> WasmResult<ir::Value> {
1411 let (func_sig, func_index_arg, func_idx) = self.get_func_ref_func(pos.func, func_index);
1412 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1413
1414 let func_index_arg = pos.ins().iconst(I32, func_index_arg as i64);
1415 let call_inst = pos
1416 .ins()
1417 .call_indirect(func_sig, func_addr, &[vmctx, func_index_arg]);
1418
1419 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1420 }
1421
1422 fn translate_custom_global_get(
1423 &mut self,
1424 mut _pos: cranelift_codegen::cursor::FuncCursor<'_>,
1425 _index: GlobalIndex,
1426 ) -> WasmResult<ir::Value> {
1427 unreachable!("we don't make any custom globals")
1428 }
1429
1430 fn translate_custom_global_set(
1431 &mut self,
1432 mut _pos: cranelift_codegen::cursor::FuncCursor<'_>,
1433 _index: GlobalIndex,
1434 _value: ir::Value,
1435 ) -> WasmResult<()> {
1436 unreachable!("we don't make any custom globals")
1437 }
1438
1439 fn make_heap(&mut self, func: &mut ir::Function, index: MemoryIndex) -> WasmResult<Heap> {
1440 let pointer_type = self.pointer_type();
1441
1442 let (ptr, base_offset, current_length_offset) = {
1443 let vmctx = self.vmctx(func);
1444 if let Some(def_index) = self.module.local_memory_index(index) {
1445 let base_offset =
1446 i32::try_from(self.offsets.vmctx_vmmemory_definition_base(def_index)).unwrap();
1447 let current_length_offset = i32::try_from(
1448 self.offsets
1449 .vmctx_vmmemory_definition_current_length(def_index),
1450 )
1451 .unwrap();
1452 (vmctx, base_offset, current_length_offset)
1453 } else {
1454 let from_offset = self.offsets.vmctx_vmmemory_import_definition(index);
1455 let memory = func.create_global_value(ir::GlobalValueData::Load {
1456 base: vmctx,
1457 offset: Offset32::new(i32::try_from(from_offset).unwrap()),
1458 global_type: pointer_type,
1459 flags: ir::MemFlags::trusted().with_readonly(),
1460 });
1461 let base_offset = i32::from(self.offsets.vmmemory_definition_base());
1462 let current_length_offset =
1463 i32::from(self.offsets.vmmemory_definition_current_length());
1464 (memory, base_offset, current_length_offset)
1465 }
1466 };
1467
1468 let (offset_guard_size, heap_style, readonly_base) = match self.memory_styles[index] {
1471 MemoryStyle::Dynamic { offset_guard_size } => {
1472 let heap_bound = func.create_global_value(ir::GlobalValueData::Load {
1473 base: ptr,
1474 offset: Offset32::new(current_length_offset),
1475 global_type: pointer_type,
1476 flags: ir::MemFlags::trusted(),
1477 });
1478 (
1479 Uimm64::new(offset_guard_size),
1480 HeapStyle::Dynamic {
1481 bound_gv: heap_bound,
1482 },
1483 false,
1484 )
1485 }
1486 MemoryStyle::Static {
1487 bound,
1488 offset_guard_size,
1489 } => (
1490 Uimm64::new(offset_guard_size),
1491 HeapStyle::Static {
1492 bound: bound.bytes().0 as u64,
1493 },
1494 true,
1495 ),
1496 };
1497
1498 let heap_base = func.create_global_value(ir::GlobalValueData::Load {
1499 base: ptr,
1500 offset: Offset32::new(base_offset),
1501 global_type: pointer_type,
1502 flags: if readonly_base {
1503 ir::MemFlags::trusted().with_readonly()
1504 } else {
1505 ir::MemFlags::trusted()
1506 },
1507 });
1508 Ok(self.heaps.push(HeapData {
1509 base: heap_base,
1510 min_size: 0,
1511 max_size: None,
1512 memory_type: None,
1513 offset_guard_size: offset_guard_size.into(),
1514 style: heap_style,
1515 index_type: I32,
1516 page_size_log2: self.target_config.page_size_align_log2,
1517 }))
1518 }
1519
1520 fn make_global(
1521 &mut self,
1522 func: &mut ir::Function,
1523 index: GlobalIndex,
1524 ) -> WasmResult<GlobalVariable> {
1525 let pointer_type = self.pointer_type();
1526
1527 let (ptr, offset) = {
1528 let vmctx = self.vmctx(func);
1529
1530 let from_offset = if let Some(def_index) = self.module.local_global_index(index) {
1531 self.offsets.vmctx_vmglobal_definition(def_index)
1532 } else {
1533 self.offsets.vmctx_vmglobal_import_definition(index)
1534 };
1535
1536 let global = func.create_global_value(ir::GlobalValueData::Load {
1537 base: vmctx,
1538 offset: Offset32::new(i32::try_from(from_offset).unwrap()),
1539 global_type: pointer_type,
1540 flags: MemFlags::trusted(),
1541 });
1542
1543 (global, 0)
1544 };
1545
1546 Ok(GlobalVariable::Memory {
1547 gv: ptr,
1548 offset: offset.into(),
1549 ty: match self.module.globals[index].ty {
1550 WasmerType::I32 => ir::types::I32,
1551 WasmerType::I64 => ir::types::I64,
1552 WasmerType::F32 => ir::types::F32,
1553 WasmerType::F64 => ir::types::F64,
1554 WasmerType::V128 => ir::types::I8X16,
1555 WasmerType::FuncRef | WasmerType::ExternRef | WasmerType::ExceptionRef => {
1556 self.reference_type()
1557 }
1558 },
1559 })
1560 }
1561
1562 fn make_indirect_sig(
1563 &mut self,
1564 func: &mut ir::Function,
1565 index: SignatureIndex,
1566 ) -> WasmResult<ir::SigRef> {
1567 Ok(func.import_signature(self.signatures[index].clone()))
1568 }
1569
1570 fn make_direct_func(
1571 &mut self,
1572 func: &mut ir::Function,
1573 index: FunctionIndex,
1574 ) -> WasmResult<ir::FuncRef> {
1575 let sigidx = self.module.functions[index];
1576 let signature = func.import_signature(self.signatures[sigidx].clone());
1577 let name = get_function_name(func, index);
1578
1579 Ok(func.import_function(ir::ExtFuncData {
1580 name,
1581 signature,
1582 colocated: true,
1583 }))
1584 }
1585
1586 fn translate_call_indirect(
1587 &mut self,
1588 builder: &mut FunctionBuilder,
1589 table_index: TableIndex,
1590 sig_index: SignatureIndex,
1591 sig_ref: ir::SigRef,
1592 callee: ir::Value,
1593 call_args: &[ir::Value],
1594 landing_pad: Option<LandingPad>,
1595 ) -> WasmResult<SmallVec<[ir::Value; 4]>> {
1596 let pointer_type = self.pointer_type();
1597
1598 let anyfunc_ptr = self.get_or_init_funcref_table_elem(builder, table_index, callee);
1600
1601 let mem_flags = ir::MemFlags::trusted();
1603
1604 builder
1606 .ins()
1607 .trapz(anyfunc_ptr, crate::TRAP_INDIRECT_CALL_TO_NULL);
1608
1609 let func_addr = builder.ins().load(
1610 pointer_type,
1611 mem_flags,
1612 anyfunc_ptr,
1613 i32::from(self.offsets.vmcaller_checked_anyfunc_func_ptr()),
1614 );
1615
1616 match self.table_styles[table_index] {
1618 TableStyle::CallerChecksSignature => {
1619 let sig_id_size = self.offsets.size_of_vmshared_signature_index();
1620 let sig_id_type = ir::Type::int(u16::from(sig_id_size) * 8).unwrap();
1621 let vmctx = self.vmctx(builder.func);
1622 let base = builder.ins().global_value(pointer_type, vmctx);
1623 let offset =
1624 i32::try_from(self.offsets.vmctx_vmshared_signature_id(sig_index)).unwrap();
1625
1626 let mut mem_flags = ir::MemFlags::trusted();
1628 mem_flags.set_readonly();
1629 let caller_sig_id = builder.ins().load(sig_id_type, mem_flags, base, offset);
1630
1631 let mem_flags = ir::MemFlags::trusted();
1633 let callee_sig_id = builder.ins().load(
1634 sig_id_type,
1635 mem_flags,
1636 anyfunc_ptr,
1637 i32::from(self.offsets.vmcaller_checked_anyfunc_type_index()),
1638 );
1639
1640 let cmp = builder
1642 .ins()
1643 .icmp(IntCC::Equal, callee_sig_id, caller_sig_id);
1644 builder.ins().trapz(cmp, crate::TRAP_BAD_SIGNATURE);
1645 }
1646 }
1647
1648 let mut real_call_args = Vec::with_capacity(call_args.len() + 2);
1649
1650 let vmctx = builder.ins().load(
1652 pointer_type,
1653 mem_flags,
1654 anyfunc_ptr,
1655 i32::from(self.offsets.vmcaller_checked_anyfunc_vmctx()),
1656 );
1657 real_call_args.push(vmctx);
1658
1659 real_call_args.extend_from_slice(call_args);
1661
1662 let results = self.call_indirect_with_handlers(
1663 builder,
1664 sig_ref,
1665 func_addr,
1666 &real_call_args,
1667 Some(vmctx),
1668 landing_pad,
1669 false,
1670 );
1671 Ok(results)
1672 }
1673
1674 fn translate_call(
1675 &mut self,
1676 builder: &mut FunctionBuilder,
1677 callee_index: FunctionIndex,
1678 callee: ir::FuncRef,
1679 call_args: &[ir::Value],
1680 landing_pad: Option<LandingPad>,
1681 ) -> WasmResult<SmallVec<[ir::Value; 4]>> {
1682 let mut real_call_args = Vec::with_capacity(call_args.len() + 2);
1683
1684 if !self.module.is_imported_function(callee_index) {
1686 let caller_vmctx = builder
1688 .func
1689 .special_param(ArgumentPurpose::VMContext)
1690 .unwrap();
1691 real_call_args.push(caller_vmctx);
1694
1695 real_call_args.extend_from_slice(call_args);
1697
1698 let results = self.call_with_handlers(
1699 builder,
1700 callee,
1701 &real_call_args,
1702 Some(caller_vmctx),
1703 landing_pad,
1704 false,
1705 );
1706 return Ok(results);
1707 }
1708
1709 let pointer_type = self.pointer_type();
1712 let sig_ref = builder.func.dfg.ext_funcs[callee].signature;
1713 let vmctx = self.vmctx(builder.func);
1714 let base = builder.ins().global_value(pointer_type, vmctx);
1715
1716 let mem_flags = ir::MemFlags::trusted();
1717
1718 let body_offset =
1720 i32::try_from(self.offsets.vmctx_vmfunction_import_body(callee_index)).unwrap();
1721 let func_addr = builder
1722 .ins()
1723 .load(pointer_type, mem_flags, base, body_offset);
1724
1725 let vmctx_offset =
1727 i32::try_from(self.offsets.vmctx_vmfunction_import_vmctx(callee_index)).unwrap();
1728 let vmctx = builder
1729 .ins()
1730 .load(pointer_type, mem_flags, base, vmctx_offset);
1731 real_call_args.push(vmctx);
1732
1733 real_call_args.extend_from_slice(call_args);
1735
1736 let results = self.call_indirect_with_handlers(
1737 builder,
1738 sig_ref,
1739 func_addr,
1740 &real_call_args,
1741 Some(vmctx),
1742 landing_pad,
1743 false,
1744 );
1745 Ok(results)
1746 }
1747
1748 fn tag_param_arity(&self, tag_index: TagIndex) -> usize {
1749 let sig_index = self.module.tags[tag_index];
1750 let signature = &self.module.signatures[sig_index];
1751 signature.params().len()
1752 }
1753
1754 fn translate_exn_pointer_to_ref(
1755 &mut self,
1756 builder: &mut FunctionBuilder,
1757 exn_ptr: ir::Value,
1758 ) -> ir::Value {
1759 let (read_sig, read_idx) = self.get_read_exception_func(builder.func);
1760 let mut pos = builder.cursor();
1761 let (_, read_addr) = self.translate_load_builtin_function_address(&mut pos, read_idx);
1762 let read_call = builder.ins().call_indirect(read_sig, read_addr, &[exn_ptr]);
1763 builder.inst_results(read_call)[0]
1764 }
1765
1766 fn translate_exn_unbox(
1767 &mut self,
1768 builder: &mut FunctionBuilder,
1769 tag_index: TagIndex,
1770 exnref: ir::Value,
1771 ) -> WasmResult<SmallVec<[ir::Value; 4]>> {
1772 let layout = self.exception_type_layout(tag_index)?.clone();
1773
1774 let (read_exnref_sig, read_exnref_idx) = self.get_read_exnref_func(builder.func);
1775 let mut pos = builder.cursor();
1776 let (vmctx, read_exnref_addr) =
1777 self.translate_load_builtin_function_address(&mut pos, read_exnref_idx);
1778 let read_exnref_call =
1779 builder
1780 .ins()
1781 .call_indirect(read_exnref_sig, read_exnref_addr, &[vmctx, exnref]);
1782 let payload_ptr = builder.inst_results(read_exnref_call)[0];
1783
1784 let mut values = SmallVec::<[ir::Value; 4]>::with_capacity(layout.fields.len());
1785 let data_flags = ir::MemFlags::trusted();
1786 for field in &layout.fields {
1787 let value = builder.ins().load(
1788 field.ty,
1789 data_flags,
1790 payload_ptr,
1791 Offset32::new(field.offset as i32),
1792 );
1793 values.push(value);
1794 }
1795
1796 Ok(values)
1797 }
1798
1799 fn translate_exn_throw(
1800 &mut self,
1801 builder: &mut FunctionBuilder,
1802 tag_index: TagIndex,
1803 args: &[ir::Value],
1804 landing_pad: Option<LandingPad>,
1805 ) -> WasmResult<()> {
1806 let layout = self.exception_type_layout(tag_index)?.clone();
1807 if layout.fields.len() != args.len() {
1808 return Err(WasmError::Generic(format!(
1809 "exception payload arity mismatch: expected {}, got {}",
1810 layout.fields.len(),
1811 args.len()
1812 )));
1813 }
1814
1815 let (alloc_sig, alloc_idx) = self.get_alloc_exception_func(builder.func);
1816 let mut pos = builder.cursor();
1817 let (vmctx, alloc_addr) = self.translate_load_builtin_function_address(&mut pos, alloc_idx);
1818 let tag_value = builder
1819 .ins()
1820 .iconst(TAG_TYPE, i64::from(tag_index.as_u32()));
1821 let alloc_call = builder
1822 .ins()
1823 .call_indirect(alloc_sig, alloc_addr, &[vmctx, tag_value]);
1824 let exnref = builder.inst_results(alloc_call)[0];
1825
1826 let (read_exnref_sig, read_exnref_idx) = self.get_read_exnref_func(builder.func);
1827 let mut pos = builder.cursor();
1828 let (vmctx, read_exnref_addr) =
1829 self.translate_load_builtin_function_address(&mut pos, read_exnref_idx);
1830 let read_exnref_call =
1831 builder
1832 .ins()
1833 .call_indirect(read_exnref_sig, read_exnref_addr, &[vmctx, exnref]);
1834 let payload_ptr = builder.inst_results(read_exnref_call)[0];
1835
1836 let store_flags = ir::MemFlags::trusted();
1837 for (field, value) in layout.fields.iter().zip(args.iter()) {
1838 debug_assert_eq!(
1839 builder.func.dfg.value_type(*value),
1840 field.ty,
1841 "exception payload type mismatch"
1842 );
1843 builder.ins().store(
1844 store_flags,
1845 *value,
1846 payload_ptr,
1847 Offset32::new(field.offset as i32),
1848 );
1849 }
1850
1851 let (throw_sig, throw_idx) = self.get_throw_func(builder.func);
1852 let mut pos = builder.cursor();
1853 let (vmctx_value, throw_addr) =
1854 self.translate_load_builtin_function_address(&mut pos, throw_idx);
1855 let call_args = [vmctx_value, exnref];
1856
1857 let _ = self.call_indirect_with_handlers(
1858 builder,
1859 throw_sig,
1860 throw_addr,
1861 &call_args,
1862 Some(vmctx_value),
1863 landing_pad,
1864 true,
1865 );
1866
1867 Ok(())
1868 }
1869
1870 fn translate_exn_throw_ref(
1871 &mut self,
1872 builder: &mut FunctionBuilder,
1873 exnref: ir::Value,
1874 landing_pad: Option<LandingPad>,
1875 ) -> WasmResult<()> {
1876 let (throw_sig, throw_idx) = self.get_throw_func(builder.func);
1877 let mut pos = builder.cursor();
1878 let (vmctx_value, throw_addr) =
1879 self.translate_load_builtin_function_address(&mut pos, throw_idx);
1880 let call_args = [vmctx_value, exnref];
1881
1882 let _ = self.call_indirect_with_handlers(
1883 builder,
1884 throw_sig,
1885 throw_addr,
1886 &call_args,
1887 Some(vmctx_value),
1888 landing_pad,
1889 true,
1890 );
1891
1892 Ok(())
1893 }
1894
1895 fn translate_exn_personality_selector(
1896 &mut self,
1897 builder: &mut FunctionBuilder,
1898 exn_ptr: ir::Value,
1899 ) -> WasmResult<ir::Value> {
1900 let (sig, idx) = self.get_personality2_func(builder.func);
1901 let pointer_type = self.pointer_type();
1902 let exn_ty = builder.func.dfg.value_type(exn_ptr);
1903 let exn_arg = if exn_ty == pointer_type {
1904 exn_ptr
1905 } else {
1906 let mut flags = MemFlags::new();
1907 flags.set_endianness(Endianness::Little);
1908 builder.ins().bitcast(pointer_type, flags, exn_ptr)
1909 };
1910
1911 let mut pos = builder.cursor();
1912 let (vmctx_value, func_addr) = self.translate_load_builtin_function_address(&mut pos, idx);
1913 let call = builder
1914 .ins()
1915 .call_indirect(sig, func_addr, &[vmctx_value, exn_arg]);
1916 Ok(builder.inst_results(call)[0])
1917 }
1918
1919 fn translate_exn_reraise_unmatched(
1920 &mut self,
1921 builder: &mut FunctionBuilder,
1922 exnref: ir::Value,
1923 ) -> WasmResult<()> {
1924 let (throw_sig, throw_idx) = self.get_throw_func(builder.func);
1925 let mut pos = builder.cursor();
1926 let (vmctx_value, throw_addr) =
1927 self.translate_load_builtin_function_address(&mut pos, throw_idx);
1928 builder
1929 .ins()
1930 .call_indirect(throw_sig, throw_addr, &[vmctx_value, exnref]);
1931 builder.ins().trap(crate::TRAP_UNREACHABLE);
1932 Ok(())
1933 }
1934
1935 fn translate_memory_grow(
1936 &mut self,
1937 mut pos: FuncCursor<'_>,
1938 index: MemoryIndex,
1939 _heap: Heap,
1940 val: ir::Value,
1941 ) -> WasmResult<ir::Value> {
1942 let (func_sig, index_arg, func_idx) = self.get_memory_grow_func(pos.func, index);
1943 let memory_index = pos.ins().iconst(I32, index_arg as i64);
1944 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1945 let call_inst = pos
1946 .ins()
1947 .call_indirect(func_sig, func_addr, &[vmctx, val, memory_index]);
1948 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1949 }
1950
1951 fn translate_memory_size(
1952 &mut self,
1953 mut pos: FuncCursor<'_>,
1954 index: MemoryIndex,
1955 _heap: Heap,
1956 ) -> WasmResult<ir::Value> {
1957 let (func_sig, index_arg, func_idx) = self.get_memory_size_func(pos.func, index);
1958 let memory_index = pos.ins().iconst(I32, index_arg as i64);
1959 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1960 let call_inst = pos
1961 .ins()
1962 .call_indirect(func_sig, func_addr, &[vmctx, memory_index]);
1963 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1964 }
1965
1966 fn translate_memory_copy(
1967 &mut self,
1968 mut pos: FuncCursor,
1969 src_index: MemoryIndex,
1970 _src_heap: Heap,
1971 _dst_index: MemoryIndex,
1972 _dst_heap: Heap,
1973 dst: ir::Value,
1974 src: ir::Value,
1975 len: ir::Value,
1976 ) -> WasmResult<()> {
1977 let (func_sig, src_index, func_idx) = self.get_memory_copy_func(pos.func, src_index);
1978
1979 let src_index_arg = pos.ins().iconst(I32, src_index as i64);
1980
1981 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1982
1983 pos.ins()
1984 .call_indirect(func_sig, func_addr, &[vmctx, src_index_arg, dst, src, len]);
1985
1986 Ok(())
1987 }
1988
1989 fn translate_memory_fill(
1990 &mut self,
1991 mut pos: FuncCursor,
1992 memory_index: MemoryIndex,
1993 _heap: Heap,
1994 dst: ir::Value,
1995 val: ir::Value,
1996 len: ir::Value,
1997 ) -> WasmResult<()> {
1998 let (func_sig, memory_index, func_idx) = self.get_memory_fill_func(pos.func, memory_index);
1999
2000 let memory_index_arg = pos.ins().iconst(I32, memory_index as i64);
2001
2002 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2003
2004 pos.ins().call_indirect(
2005 func_sig,
2006 func_addr,
2007 &[vmctx, memory_index_arg, dst, val, len],
2008 );
2009
2010 Ok(())
2011 }
2012
2013 fn translate_memory_init(
2014 &mut self,
2015 mut pos: FuncCursor,
2016 memory_index: MemoryIndex,
2017 _heap: Heap,
2018 seg_index: u32,
2019 dst: ir::Value,
2020 src: ir::Value,
2021 len: ir::Value,
2022 ) -> WasmResult<()> {
2023 let (func_sig, func_idx) = self.get_memory_init_func(pos.func);
2024
2025 let memory_index_arg = pos.ins().iconst(I32, memory_index.index() as i64);
2026 let seg_index_arg = pos.ins().iconst(I32, seg_index as i64);
2027
2028 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2029
2030 pos.ins().call_indirect(
2031 func_sig,
2032 func_addr,
2033 &[vmctx, memory_index_arg, seg_index_arg, dst, src, len],
2034 );
2035
2036 Ok(())
2037 }
2038
2039 fn translate_data_drop(&mut self, mut pos: FuncCursor, seg_index: u32) -> WasmResult<()> {
2040 let (func_sig, func_idx) = self.get_data_drop_func(pos.func);
2041 let seg_index_arg = pos.ins().iconst(I32, seg_index as i64);
2042 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2043 pos.ins()
2044 .call_indirect(func_sig, func_addr, &[vmctx, seg_index_arg]);
2045 Ok(())
2046 }
2047
2048 fn translate_table_size(
2049 &mut self,
2050 mut pos: FuncCursor,
2051 table_index: TableIndex,
2052 ) -> WasmResult<ir::Value> {
2053 self.ensure_table_exists(pos.func, table_index);
2054 let (func_sig, index_arg, func_idx) = self.get_table_size_func(pos.func, table_index);
2055 let table_index = pos.ins().iconst(I32, index_arg as i64);
2056 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2057 let call_inst = pos
2058 .ins()
2059 .call_indirect(func_sig, func_addr, &[vmctx, table_index]);
2060 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
2061 }
2062
2063 fn translate_table_copy(
2064 &mut self,
2065 mut pos: FuncCursor,
2066 dst_table_index: TableIndex,
2067 src_table_index: TableIndex,
2068 dst: ir::Value,
2069 src: ir::Value,
2070 len: ir::Value,
2071 ) -> WasmResult<()> {
2072 self.ensure_table_exists(pos.func, src_table_index);
2073 self.ensure_table_exists(pos.func, dst_table_index);
2074 let (func_sig, dst_table_index_arg, src_table_index_arg, func_idx) =
2075 self.get_table_copy_func(pos.func, dst_table_index, src_table_index);
2076
2077 let dst_table_index_arg = pos.ins().iconst(I32, dst_table_index_arg as i64);
2078 let src_table_index_arg = pos.ins().iconst(I32, src_table_index_arg as i64);
2079
2080 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2081
2082 pos.ins().call_indirect(
2083 func_sig,
2084 func_addr,
2085 &[
2086 vmctx,
2087 dst_table_index_arg,
2088 src_table_index_arg,
2089 dst,
2090 src,
2091 len,
2092 ],
2093 );
2094
2095 Ok(())
2096 }
2097
2098 fn translate_table_init(
2099 &mut self,
2100 mut pos: FuncCursor,
2101 seg_index: u32,
2102 table_index: TableIndex,
2103 dst: ir::Value,
2104 src: ir::Value,
2105 len: ir::Value,
2106 ) -> WasmResult<()> {
2107 self.ensure_table_exists(pos.func, table_index);
2108 let (func_sig, table_index_arg, func_idx) = self.get_table_init_func(pos.func, table_index);
2109
2110 let table_index_arg = pos.ins().iconst(I32, table_index_arg as i64);
2111 let seg_index_arg = pos.ins().iconst(I32, seg_index as i64);
2112
2113 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2114
2115 pos.ins().call_indirect(
2116 func_sig,
2117 func_addr,
2118 &[vmctx, table_index_arg, seg_index_arg, dst, src, len],
2119 );
2120
2121 Ok(())
2122 }
2123
2124 fn translate_elem_drop(&mut self, mut pos: FuncCursor, elem_index: u32) -> WasmResult<()> {
2125 let (func_sig, func_idx) = self.get_elem_drop_func(pos.func);
2126
2127 let elem_index_arg = pos.ins().iconst(I32, elem_index as i64);
2128
2129 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2130
2131 pos.ins()
2132 .call_indirect(func_sig, func_addr, &[vmctx, elem_index_arg]);
2133
2134 Ok(())
2135 }
2136
2137 fn translate_atomic_wait(
2138 &mut self,
2139 mut pos: FuncCursor,
2140 index: MemoryIndex,
2141 _heap: Heap,
2142 addr: ir::Value,
2143 expected: ir::Value,
2144 timeout: ir::Value,
2145 ) -> WasmResult<ir::Value> {
2146 let (func_sig, index_arg, func_idx) = if pos.func.dfg.value_type(expected) == I64 {
2147 self.get_memory_atomic_wait64_func(pos.func, index)
2148 } else {
2149 self.get_memory_atomic_wait32_func(pos.func, index)
2150 };
2151 let memory_index = pos.ins().iconst(I32, index_arg as i64);
2152 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2153 let call_inst = pos.ins().call_indirect(
2154 func_sig,
2155 func_addr,
2156 &[vmctx, memory_index, addr, expected, timeout],
2157 );
2158 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
2159 }
2160
2161 fn translate_atomic_notify(
2162 &mut self,
2163 mut pos: FuncCursor,
2164 index: MemoryIndex,
2165 _heap: Heap,
2166 addr: ir::Value,
2167 count: ir::Value,
2168 ) -> WasmResult<ir::Value> {
2169 let (func_sig, index_arg, func_idx) = self.get_memory_atomic_notify_func(pos.func, index);
2170 let memory_index = pos.ins().iconst(I32, index_arg as i64);
2171 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2172 let call_inst =
2173 pos.ins()
2174 .call_indirect(func_sig, func_addr, &[vmctx, memory_index, addr, count]);
2175 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
2176 }
2177
2178 fn get_global_type(&self, global_index: GlobalIndex) -> Option<WasmerType> {
2179 Some(self.module.globals.get(global_index)?.ty)
2180 }
2181
2182 fn push_local_decl_on_stack(&mut self, ty: WasmerType) {
2183 self.type_stack.push(ty);
2184 }
2185
2186 fn push_params_on_stack(&mut self, function_index: LocalFunctionIndex) {
2187 let func_index = self.module.func_index(function_index);
2188 let sig_idx = self.module.functions[func_index];
2189 let signature = &self.module.signatures[sig_idx];
2190 for param in signature.params() {
2191 self.type_stack.push(*param);
2192 }
2193 }
2194
2195 fn get_local_type(&self, local_index: u32) -> Option<WasmerType> {
2196 self.type_stack.get(local_index as usize).cloned()
2197 }
2198
2199 fn get_local_types(&self) -> &[WasmerType] {
2200 &self.type_stack
2201 }
2202
2203 fn get_function_type(&self, function_index: FunctionIndex) -> Option<&FunctionType> {
2204 let sig_idx = self.module.functions.get(function_index)?;
2205 Some(&self.module.signatures[*sig_idx])
2206 }
2207
2208 fn get_function_sig(&self, sig_index: SignatureIndex) -> Option<&FunctionType> {
2209 self.module.signatures.get(sig_index)
2210 }
2211
2212 fn heap_access_spectre_mitigation(&self) -> bool {
2213 false
2214 }
2215
2216 fn proof_carrying_code(&self) -> bool {
2217 false
2218 }
2219
2220 fn heaps(&self) -> &PrimaryMap<Heap, HeapData> {
2221 &self.heaps
2222 }
2223}