1use crate::{
5 heap::{Heap, HeapData, HeapStyle},
6 table::{TableData, TableSize},
7 translator::{FuncEnvironment as BaseFuncEnvironment, GlobalVariable, TargetEnvironment},
8};
9use cranelift_codegen::{
10 cursor::FuncCursor,
11 ir::{
12 self, AbiParam, ArgumentPurpose, Function, InstBuilder, MemFlags, Signature,
13 condcodes::IntCC,
14 immediates::{Offset32, Uimm64},
15 types::*,
16 },
17 isa::TargetFrontendConfig,
18};
19use cranelift_frontend::FunctionBuilder;
20use std::convert::TryFrom;
21use wasmer_compiler::wasmparser::HeapType;
22use wasmer_types::{
23 FunctionIndex, FunctionType, GlobalIndex, LocalFunctionIndex, MemoryIndex, MemoryStyle,
24 ModuleInfo, SignatureIndex, TableIndex, TableStyle, Type as WasmerType, VMBuiltinFunctionIndex,
25 VMOffsets, WasmError, WasmResult,
26 entity::{EntityRef, PrimaryMap, SecondaryMap},
27};
28
29pub fn get_function_name(func_index: FunctionIndex) -> ir::ExternalName {
31 ir::ExternalName::user(ir::UserExternalNameRef::from_u32(func_index.as_u32()))
32}
33
34#[allow(unused)]
36pub fn type_of_vmtable_definition_current_elements(vmoffsets: &VMOffsets) -> ir::Type {
37 ir::Type::int(u16::from(vmoffsets.size_of_vmtable_definition_current_elements()) * 8).unwrap()
38}
39
40pub struct FuncEnvironment<'module_environment> {
42 target_config: TargetFrontendConfig,
44
45 module: &'module_environment ModuleInfo,
47
48 type_stack: Vec<WasmerType>,
50
51 signatures: &'module_environment PrimaryMap<SignatureIndex, ir::Signature>,
53
54 heaps: PrimaryMap<Heap, HeapData>,
56
57 vmctx: Option<ir::GlobalValue>,
59
60 memory32_size_sig: Option<ir::SigRef>,
63
64 table_size_sig: Option<ir::SigRef>,
67
68 memory_grow_sig: Option<ir::SigRef>,
71
72 table_grow_sig: Option<ir::SigRef>,
75
76 table_copy_sig: Option<ir::SigRef>,
79
80 table_init_sig: Option<ir::SigRef>,
82
83 elem_drop_sig: Option<ir::SigRef>,
85
86 memory_copy_sig: Option<ir::SigRef>,
89
90 memory_fill_sig: Option<ir::SigRef>,
93
94 memory_init_sig: Option<ir::SigRef>,
96
97 data_drop_sig: Option<ir::SigRef>,
99
100 table_get_sig: Option<ir::SigRef>,
102
103 table_set_sig: Option<ir::SigRef>,
105
106 func_ref_sig: Option<ir::SigRef>,
108
109 table_fill_sig: Option<ir::SigRef>,
111
112 memory32_atomic_wait32_sig: Option<ir::SigRef>,
114
115 memory32_atomic_wait64_sig: Option<ir::SigRef>,
117
118 memory32_atomic_notify_sig: Option<ir::SigRef>,
120
121 offsets: VMOffsets,
123
124 memory_styles: &'module_environment PrimaryMap<MemoryIndex, MemoryStyle>,
126
127 tables: SecondaryMap<TableIndex, Option<TableData>>,
129
130 table_styles: &'module_environment PrimaryMap<TableIndex, TableStyle>,
131}
132
133impl<'module_environment> FuncEnvironment<'module_environment> {
134 pub fn new(
135 target_config: TargetFrontendConfig,
136 module: &'module_environment ModuleInfo,
137 signatures: &'module_environment PrimaryMap<SignatureIndex, ir::Signature>,
138 memory_styles: &'module_environment PrimaryMap<MemoryIndex, MemoryStyle>,
139 table_styles: &'module_environment PrimaryMap<TableIndex, TableStyle>,
140 ) -> Self {
141 Self {
142 target_config,
143 module,
144 signatures,
145 type_stack: vec![],
146 heaps: PrimaryMap::new(),
147 vmctx: None,
148 memory32_size_sig: None,
149 table_size_sig: None,
150 memory_grow_sig: None,
151 table_grow_sig: None,
152 table_copy_sig: None,
153 table_init_sig: None,
154 elem_drop_sig: None,
155 memory_copy_sig: None,
156 memory_fill_sig: None,
157 memory_init_sig: None,
158 table_get_sig: None,
159 table_set_sig: None,
160 data_drop_sig: None,
161 func_ref_sig: None,
162 table_fill_sig: None,
163 memory32_atomic_wait32_sig: None,
164 memory32_atomic_wait64_sig: None,
165 memory32_atomic_notify_sig: None,
166 offsets: VMOffsets::new(target_config.pointer_bytes(), module),
167 memory_styles,
168 tables: Default::default(),
169 table_styles,
170 }
171 }
172
173 fn pointer_type(&self) -> ir::Type {
174 self.target_config.pointer_type()
175 }
176
177 fn ensure_table_exists(&mut self, func: &mut ir::Function, index: TableIndex) {
178 if self.tables[index].is_some() {
179 return;
180 }
181
182 let pointer_type = self.pointer_type();
183
184 let (ptr, base_offset, current_elements_offset) = {
185 let vmctx = self.vmctx(func);
186 if let Some(def_index) = self.module.local_table_index(index) {
187 let base_offset =
188 i32::try_from(self.offsets.vmctx_vmtable_definition_base(def_index)).unwrap();
189 let current_elements_offset = i32::try_from(
190 self.offsets
191 .vmctx_vmtable_definition_current_elements(def_index),
192 )
193 .unwrap();
194 (vmctx, base_offset, current_elements_offset)
195 } else {
196 let from_offset = self.offsets.vmctx_vmtable_import(index);
197 let table = func.create_global_value(ir::GlobalValueData::Load {
198 base: vmctx,
199 offset: Offset32::new(i32::try_from(from_offset).unwrap()),
200 global_type: pointer_type,
201 flags: MemFlags::trusted().with_readonly(),
202 });
203 let base_offset = i32::from(self.offsets.vmtable_definition_base());
204 let current_elements_offset =
205 i32::from(self.offsets.vmtable_definition_current_elements());
206 (table, base_offset, current_elements_offset)
207 }
208 };
209
210 let table = &self.module.tables[index];
211 let element_size = self.reference_type().bytes();
212
213 let base_gv = func.create_global_value(ir::GlobalValueData::Load {
214 base: ptr,
215 offset: Offset32::new(base_offset),
216 global_type: pointer_type,
217 flags: if Some(table.minimum) == table.maximum {
218 MemFlags::trusted().with_readonly()
221 } else {
222 MemFlags::trusted()
223 },
224 });
225
226 let bound = if Some(table.minimum) == table.maximum {
227 TableSize::Static {
228 bound: table.minimum,
229 }
230 } else {
231 TableSize::Dynamic {
232 bound_gv: func.create_global_value(ir::GlobalValueData::Load {
233 base: ptr,
234 offset: Offset32::new(current_elements_offset),
235 global_type: ir::Type::int(
236 u16::from(self.offsets.size_of_vmtable_definition_current_elements()) * 8,
237 )
238 .unwrap(),
239 flags: MemFlags::trusted(),
240 }),
241 }
242 };
243
244 self.tables[index] = Some(TableData {
245 base_gv,
246 bound,
247 element_size,
248 });
249 }
250
251 fn vmctx(&mut self, func: &mut Function) -> ir::GlobalValue {
252 self.vmctx.unwrap_or_else(|| {
253 let vmctx = func.create_global_value(ir::GlobalValueData::VMContext);
254 self.vmctx = Some(vmctx);
255 vmctx
256 })
257 }
258
259 fn get_table_fill_sig(&mut self, func: &mut Function) -> ir::SigRef {
260 let sig = self.table_fill_sig.unwrap_or_else(|| {
261 func.import_signature(Signature {
262 params: vec![
263 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
264 AbiParam::new(I32),
266 AbiParam::new(I32),
268 AbiParam::new(self.reference_type()),
270 AbiParam::new(I32),
272 ],
273 returns: vec![],
274 call_conv: self.target_config.default_call_conv,
275 })
276 });
277 self.table_fill_sig = Some(sig);
278 sig
279 }
280
281 fn get_table_fill_func(
282 &mut self,
283 func: &mut Function,
284 table_index: TableIndex,
285 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
286 (
287 self.get_table_fill_sig(func),
288 table_index.index(),
289 VMBuiltinFunctionIndex::get_table_fill_index(),
290 )
291 }
292
293 fn get_func_ref_sig(&mut self, func: &mut Function) -> ir::SigRef {
294 let sig = self.func_ref_sig.unwrap_or_else(|| {
295 func.import_signature(Signature {
296 params: vec![
297 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
298 AbiParam::new(I32),
299 ],
300 returns: vec![AbiParam::new(self.reference_type())],
301 call_conv: self.target_config.default_call_conv,
302 })
303 });
304 self.func_ref_sig = Some(sig);
305 sig
306 }
307
308 fn get_func_ref_func(
309 &mut self,
310 func: &mut Function,
311 function_index: FunctionIndex,
312 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
313 (
314 self.get_func_ref_sig(func),
315 function_index.index(),
316 VMBuiltinFunctionIndex::get_func_ref_index(),
317 )
318 }
319
320 fn get_table_get_sig(&mut self, func: &mut Function) -> ir::SigRef {
321 let sig = self.table_get_sig.unwrap_or_else(|| {
322 func.import_signature(Signature {
323 params: vec![
324 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
325 AbiParam::new(I32),
326 AbiParam::new(I32),
327 ],
328 returns: vec![AbiParam::new(self.reference_type())],
329 call_conv: self.target_config.default_call_conv,
330 })
331 });
332 self.table_get_sig = Some(sig);
333 sig
334 }
335
336 fn get_table_get_func(
337 &mut self,
338 func: &mut Function,
339 table_index: TableIndex,
340 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
341 if self.module.is_imported_table(table_index) {
342 (
343 self.get_table_get_sig(func),
344 table_index.index(),
345 VMBuiltinFunctionIndex::get_imported_table_get_index(),
346 )
347 } else {
348 (
349 self.get_table_get_sig(func),
350 self.module.local_table_index(table_index).unwrap().index(),
351 VMBuiltinFunctionIndex::get_table_get_index(),
352 )
353 }
354 }
355
356 fn get_table_set_sig(&mut self, func: &mut Function) -> ir::SigRef {
357 let sig = self.table_set_sig.unwrap_or_else(|| {
358 func.import_signature(Signature {
359 params: vec![
360 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
361 AbiParam::new(I32),
362 AbiParam::new(I32),
363 AbiParam::new(self.reference_type()),
364 ],
365 returns: vec![],
366 call_conv: self.target_config.default_call_conv,
367 })
368 });
369 self.table_set_sig = Some(sig);
370 sig
371 }
372
373 fn get_table_set_func(
374 &mut self,
375 func: &mut Function,
376 table_index: TableIndex,
377 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
378 if self.module.is_imported_table(table_index) {
379 (
380 self.get_table_set_sig(func),
381 table_index.index(),
382 VMBuiltinFunctionIndex::get_imported_table_set_index(),
383 )
384 } else {
385 (
386 self.get_table_set_sig(func),
387 self.module.local_table_index(table_index).unwrap().index(),
388 VMBuiltinFunctionIndex::get_table_set_index(),
389 )
390 }
391 }
392
393 fn get_table_grow_sig(&mut self, func: &mut Function) -> ir::SigRef {
394 let sig = self.table_grow_sig.unwrap_or_else(|| {
395 func.import_signature(Signature {
396 params: vec![
397 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
398 AbiParam::new(self.reference_type()),
400 AbiParam::new(I32),
401 AbiParam::new(I32),
402 ],
403 returns: vec![AbiParam::new(I32)],
404 call_conv: self.target_config.default_call_conv,
405 })
406 });
407 self.table_grow_sig = Some(sig);
408 sig
409 }
410
411 fn get_table_grow_func(
414 &mut self,
415 func: &mut Function,
416 index: TableIndex,
417 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
418 if self.module.is_imported_table(index) {
419 (
420 self.get_table_grow_sig(func),
421 index.index(),
422 VMBuiltinFunctionIndex::get_imported_table_grow_index(),
423 )
424 } else {
425 (
426 self.get_table_grow_sig(func),
427 self.module.local_table_index(index).unwrap().index(),
428 VMBuiltinFunctionIndex::get_table_grow_index(),
429 )
430 }
431 }
432
433 fn get_memory_grow_sig(&mut self, func: &mut Function) -> ir::SigRef {
434 let sig = self.memory_grow_sig.unwrap_or_else(|| {
435 func.import_signature(Signature {
436 params: vec![
437 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
438 AbiParam::new(I32),
439 AbiParam::new(I32),
440 ],
441 returns: vec![AbiParam::new(I32)],
442 call_conv: self.target_config.default_call_conv,
443 })
444 });
445 self.memory_grow_sig = Some(sig);
446 sig
447 }
448
449 fn get_memory_grow_func(
452 &mut self,
453 func: &mut Function,
454 index: MemoryIndex,
455 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
456 if self.module.is_imported_memory(index) {
457 (
458 self.get_memory_grow_sig(func),
459 index.index(),
460 VMBuiltinFunctionIndex::get_imported_memory32_grow_index(),
461 )
462 } else {
463 (
464 self.get_memory_grow_sig(func),
465 self.module.local_memory_index(index).unwrap().index(),
466 VMBuiltinFunctionIndex::get_memory32_grow_index(),
467 )
468 }
469 }
470
471 fn get_table_size_sig(&mut self, func: &mut Function) -> ir::SigRef {
472 let sig = self.table_size_sig.unwrap_or_else(|| {
473 func.import_signature(Signature {
474 params: vec![
475 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
476 AbiParam::new(I32),
477 ],
478 returns: vec![AbiParam::new(I32)],
479 call_conv: self.target_config.default_call_conv,
480 })
481 });
482 self.table_size_sig = Some(sig);
483 sig
484 }
485
486 fn get_table_size_func(
489 &mut self,
490 func: &mut Function,
491 index: TableIndex,
492 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
493 if self.module.is_imported_table(index) {
494 (
495 self.get_table_size_sig(func),
496 index.index(),
497 VMBuiltinFunctionIndex::get_imported_table_size_index(),
498 )
499 } else {
500 (
501 self.get_table_size_sig(func),
502 self.module.local_table_index(index).unwrap().index(),
503 VMBuiltinFunctionIndex::get_table_size_index(),
504 )
505 }
506 }
507
508 fn get_memory32_size_sig(&mut self, func: &mut Function) -> ir::SigRef {
509 let sig = self.memory32_size_sig.unwrap_or_else(|| {
510 func.import_signature(Signature {
511 params: vec![
512 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
513 AbiParam::new(I32),
514 ],
515 returns: vec![AbiParam::new(I32)],
516 call_conv: self.target_config.default_call_conv,
517 })
518 });
519 self.memory32_size_sig = Some(sig);
520 sig
521 }
522
523 fn get_memory_size_func(
526 &mut self,
527 func: &mut Function,
528 index: MemoryIndex,
529 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
530 if self.module.is_imported_memory(index) {
531 (
532 self.get_memory32_size_sig(func),
533 index.index(),
534 VMBuiltinFunctionIndex::get_imported_memory32_size_index(),
535 )
536 } else {
537 (
538 self.get_memory32_size_sig(func),
539 self.module.local_memory_index(index).unwrap().index(),
540 VMBuiltinFunctionIndex::get_memory32_size_index(),
541 )
542 }
543 }
544
545 fn get_table_copy_sig(&mut self, func: &mut Function) -> ir::SigRef {
546 let sig = self.table_copy_sig.unwrap_or_else(|| {
547 func.import_signature(Signature {
548 params: vec![
549 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
550 AbiParam::new(I32),
552 AbiParam::new(I32),
554 AbiParam::new(I32),
556 AbiParam::new(I32),
558 AbiParam::new(I32),
560 ],
561 returns: vec![],
562 call_conv: self.target_config.default_call_conv,
563 })
564 });
565 self.table_copy_sig = Some(sig);
566 sig
567 }
568
569 fn get_table_copy_func(
570 &mut self,
571 func: &mut Function,
572 dst_table_index: TableIndex,
573 src_table_index: TableIndex,
574 ) -> (ir::SigRef, usize, usize, VMBuiltinFunctionIndex) {
575 let sig = self.get_table_copy_sig(func);
576 (
577 sig,
578 dst_table_index.as_u32() as usize,
579 src_table_index.as_u32() as usize,
580 VMBuiltinFunctionIndex::get_table_copy_index(),
581 )
582 }
583
584 fn get_table_init_sig(&mut self, func: &mut Function) -> ir::SigRef {
585 let sig = self.table_init_sig.unwrap_or_else(|| {
586 func.import_signature(Signature {
587 params: vec![
588 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
589 AbiParam::new(I32),
591 AbiParam::new(I32),
593 AbiParam::new(I32),
595 AbiParam::new(I32),
597 AbiParam::new(I32),
599 ],
600 returns: vec![],
601 call_conv: self.target_config.default_call_conv,
602 })
603 });
604 self.table_init_sig = Some(sig);
605 sig
606 }
607
608 fn get_table_init_func(
609 &mut self,
610 func: &mut Function,
611 table_index: TableIndex,
612 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
613 let sig = self.get_table_init_sig(func);
614 let table_index = table_index.as_u32() as usize;
615 (
616 sig,
617 table_index,
618 VMBuiltinFunctionIndex::get_table_init_index(),
619 )
620 }
621
622 fn get_elem_drop_sig(&mut self, func: &mut Function) -> ir::SigRef {
623 let sig = self.elem_drop_sig.unwrap_or_else(|| {
624 func.import_signature(Signature {
625 params: vec![
626 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
627 AbiParam::new(I32),
629 ],
630 returns: vec![],
631 call_conv: self.target_config.default_call_conv,
632 })
633 });
634 self.elem_drop_sig = Some(sig);
635 sig
636 }
637
638 fn get_elem_drop_func(&mut self, func: &mut Function) -> (ir::SigRef, VMBuiltinFunctionIndex) {
639 let sig = self.get_elem_drop_sig(func);
640 (sig, VMBuiltinFunctionIndex::get_elem_drop_index())
641 }
642
643 fn get_memory_copy_sig(&mut self, func: &mut Function) -> ir::SigRef {
644 let sig = self.memory_copy_sig.unwrap_or_else(|| {
645 func.import_signature(Signature {
646 params: vec![
647 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
648 AbiParam::new(I32),
650 AbiParam::new(I32),
652 AbiParam::new(I32),
654 AbiParam::new(I32),
656 ],
657 returns: vec![],
658 call_conv: self.target_config.default_call_conv,
659 })
660 });
661 self.memory_copy_sig = Some(sig);
662 sig
663 }
664
665 fn get_memory_copy_func(
666 &mut self,
667 func: &mut Function,
668 memory_index: MemoryIndex,
669 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
670 let sig = self.get_memory_copy_sig(func);
671 if let Some(local_memory_index) = self.module.local_memory_index(memory_index) {
672 (
673 sig,
674 local_memory_index.index(),
675 VMBuiltinFunctionIndex::get_memory_copy_index(),
676 )
677 } else {
678 (
679 sig,
680 memory_index.index(),
681 VMBuiltinFunctionIndex::get_imported_memory_copy_index(),
682 )
683 }
684 }
685
686 fn get_memory_fill_sig(&mut self, func: &mut Function) -> ir::SigRef {
687 let sig = self.memory_fill_sig.unwrap_or_else(|| {
688 func.import_signature(Signature {
689 params: vec![
690 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
691 AbiParam::new(I32),
693 AbiParam::new(I32),
695 AbiParam::new(I32),
697 AbiParam::new(I32),
699 ],
700 returns: vec![],
701 call_conv: self.target_config.default_call_conv,
702 })
703 });
704 self.memory_fill_sig = Some(sig);
705 sig
706 }
707
708 fn get_memory_fill_func(
709 &mut self,
710 func: &mut Function,
711 memory_index: MemoryIndex,
712 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
713 let sig = self.get_memory_fill_sig(func);
714 if let Some(local_memory_index) = self.module.local_memory_index(memory_index) {
715 (
716 sig,
717 local_memory_index.index(),
718 VMBuiltinFunctionIndex::get_memory_fill_index(),
719 )
720 } else {
721 (
722 sig,
723 memory_index.index(),
724 VMBuiltinFunctionIndex::get_imported_memory_fill_index(),
725 )
726 }
727 }
728
729 fn get_memory_init_sig(&mut self, func: &mut Function) -> ir::SigRef {
730 let sig = self.memory_init_sig.unwrap_or_else(|| {
731 func.import_signature(Signature {
732 params: vec![
733 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
734 AbiParam::new(I32),
736 AbiParam::new(I32),
738 AbiParam::new(I32),
740 AbiParam::new(I32),
742 AbiParam::new(I32),
744 ],
745 returns: vec![],
746 call_conv: self.target_config.default_call_conv,
747 })
748 });
749 self.memory_init_sig = Some(sig);
750 sig
751 }
752
753 fn get_memory_init_func(
754 &mut self,
755 func: &mut Function,
756 ) -> (ir::SigRef, VMBuiltinFunctionIndex) {
757 let sig = self.get_memory_init_sig(func);
758 (sig, VMBuiltinFunctionIndex::get_memory_init_index())
759 }
760
761 fn get_data_drop_sig(&mut self, func: &mut Function) -> ir::SigRef {
762 let sig = self.data_drop_sig.unwrap_or_else(|| {
763 func.import_signature(Signature {
764 params: vec![
765 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
766 AbiParam::new(I32),
768 ],
769 returns: vec![],
770 call_conv: self.target_config.default_call_conv,
771 })
772 });
773 self.data_drop_sig = Some(sig);
774 sig
775 }
776
777 fn get_data_drop_func(&mut self, func: &mut Function) -> (ir::SigRef, VMBuiltinFunctionIndex) {
778 let sig = self.get_data_drop_sig(func);
779 (sig, VMBuiltinFunctionIndex::get_data_drop_index())
780 }
781
782 fn get_memory32_atomic_wait32_sig(&mut self, func: &mut Function) -> ir::SigRef {
783 let sig = self.memory32_atomic_wait32_sig.unwrap_or_else(|| {
784 func.import_signature(Signature {
785 params: vec![
786 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
787 AbiParam::new(I32),
789 AbiParam::new(I32),
791 AbiParam::new(I32),
793 AbiParam::new(I64),
795 ],
796 returns: vec![AbiParam::new(I32)],
797 call_conv: self.target_config.default_call_conv,
798 })
799 });
800 self.memory32_atomic_wait32_sig = Some(sig);
801 sig
802 }
803
804 fn get_memory_atomic_wait32_func(
808 &mut self,
809 func: &mut Function,
810 index: MemoryIndex,
811 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
812 if self.module.is_imported_memory(index) {
813 (
814 self.get_memory32_atomic_wait32_sig(func),
815 index.index(),
816 VMBuiltinFunctionIndex::get_imported_memory_atomic_wait32_index(),
817 )
818 } else {
819 (
820 self.get_memory32_atomic_wait32_sig(func),
821 self.module.local_memory_index(index).unwrap().index(),
822 VMBuiltinFunctionIndex::get_memory_atomic_wait32_index(),
823 )
824 }
825 }
826
827 fn get_memory32_atomic_wait64_sig(&mut self, func: &mut Function) -> ir::SigRef {
828 let sig = self.memory32_atomic_wait64_sig.unwrap_or_else(|| {
829 func.import_signature(Signature {
830 params: vec![
831 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
832 AbiParam::new(I32),
834 AbiParam::new(I32),
836 AbiParam::new(I64),
838 AbiParam::new(I64),
840 ],
841 returns: vec![AbiParam::new(I32)],
842 call_conv: self.target_config.default_call_conv,
843 })
844 });
845 self.memory32_atomic_wait64_sig = Some(sig);
846 sig
847 }
848
849 fn get_memory_atomic_wait64_func(
853 &mut self,
854 func: &mut Function,
855 index: MemoryIndex,
856 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
857 if self.module.is_imported_memory(index) {
858 (
859 self.get_memory32_atomic_wait64_sig(func),
860 index.index(),
861 VMBuiltinFunctionIndex::get_imported_memory_atomic_wait64_index(),
862 )
863 } else {
864 (
865 self.get_memory32_atomic_wait64_sig(func),
866 self.module.local_memory_index(index).unwrap().index(),
867 VMBuiltinFunctionIndex::get_memory_atomic_wait64_index(),
868 )
869 }
870 }
871
872 fn get_memory32_atomic_notify_sig(&mut self, func: &mut Function) -> ir::SigRef {
873 let sig = self.memory32_atomic_notify_sig.unwrap_or_else(|| {
874 func.import_signature(Signature {
875 params: vec![
876 AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
877 AbiParam::new(I32),
879 AbiParam::new(I32),
881 AbiParam::new(I32),
883 ],
884 returns: vec![AbiParam::new(I32)],
885 call_conv: self.target_config.default_call_conv,
886 })
887 });
888 self.memory32_atomic_notify_sig = Some(sig);
889 sig
890 }
891
892 fn get_memory_atomic_notify_func(
896 &mut self,
897 func: &mut Function,
898 index: MemoryIndex,
899 ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
900 if self.module.is_imported_memory(index) {
901 (
902 self.get_memory32_atomic_notify_sig(func),
903 index.index(),
904 VMBuiltinFunctionIndex::get_imported_memory_atomic_notify_index(),
905 )
906 } else {
907 (
908 self.get_memory32_atomic_notify_sig(func),
909 self.module.local_memory_index(index).unwrap().index(),
910 VMBuiltinFunctionIndex::get_memory_atomic_notify_index(),
911 )
912 }
913 }
914
915 fn translate_load_builtin_function_address(
918 &mut self,
919 pos: &mut FuncCursor<'_>,
920 callee_func_idx: VMBuiltinFunctionIndex,
921 ) -> (ir::Value, ir::Value) {
922 let pointer_type = self.pointer_type();
924 let vmctx = self.vmctx(pos.func);
925 let base = pos.ins().global_value(pointer_type, vmctx);
926
927 let mut mem_flags = ir::MemFlags::trusted();
928 mem_flags.set_readonly();
929
930 let body_offset =
932 i32::try_from(self.offsets.vmctx_builtin_function(callee_func_idx)).unwrap();
933 let func_addr = pos.ins().load(pointer_type, mem_flags, base, body_offset);
934
935 (base, func_addr)
936 }
937
938 fn get_or_init_funcref_table_elem(
939 &mut self,
940 builder: &mut FunctionBuilder,
941 table_index: TableIndex,
942 index: ir::Value,
943 ) -> ir::Value {
944 let pointer_type = self.pointer_type();
945 self.ensure_table_exists(builder.func, table_index);
946 let table_data = self.tables[table_index].as_ref().unwrap();
947
948 let (table_entry_addr, flags) =
953 table_data.prepare_table_addr(builder, index, pointer_type, false);
954 builder.ins().load(pointer_type, flags, table_entry_addr, 0)
955 }
956}
957
958impl TargetEnvironment for FuncEnvironment<'_> {
959 fn target_config(&self) -> TargetFrontendConfig {
960 self.target_config
961 }
962}
963
964impl BaseFuncEnvironment for FuncEnvironment<'_> {
965 fn is_wasm_parameter(&self, _signature: &ir::Signature, index: usize) -> bool {
966 index >= 1
968 }
969
970 fn translate_table_grow(
971 &mut self,
972 mut pos: cranelift_codegen::cursor::FuncCursor<'_>,
973 table_index: TableIndex,
974 delta: ir::Value,
975 init_value: ir::Value,
976 ) -> WasmResult<ir::Value> {
977 self.ensure_table_exists(pos.func, table_index);
978 let (func_sig, index_arg, func_idx) = self.get_table_grow_func(pos.func, table_index);
979 let table_index = pos.ins().iconst(I32, index_arg as i64);
980 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
981 let call_inst = pos.ins().call_indirect(
982 func_sig,
983 func_addr,
984 &[vmctx, init_value, delta, table_index],
985 );
986 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
987 }
988
989 fn translate_table_get(
990 &mut self,
991 builder: &mut FunctionBuilder,
992 table_index: TableIndex,
993 index: ir::Value,
994 ) -> WasmResult<ir::Value> {
995 self.ensure_table_exists(builder.func, table_index);
996 let mut pos = builder.cursor();
997
998 let (func_sig, table_index_arg, func_idx) = self.get_table_get_func(pos.func, table_index);
999 let table_index = pos.ins().iconst(I32, table_index_arg as i64);
1000 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1001 let call_inst = pos
1002 .ins()
1003 .call_indirect(func_sig, func_addr, &[vmctx, table_index, index]);
1004 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1005 }
1006
1007 fn translate_table_set(
1008 &mut self,
1009 builder: &mut FunctionBuilder,
1010 table_index: TableIndex,
1011 value: ir::Value,
1012 index: ir::Value,
1013 ) -> WasmResult<()> {
1014 self.ensure_table_exists(builder.func, table_index);
1015 let mut pos = builder.cursor();
1016
1017 let (func_sig, table_index_arg, func_idx) = self.get_table_set_func(pos.func, table_index);
1018 let n_table_index = pos.ins().iconst(I32, table_index_arg as i64);
1019 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1020 pos.ins()
1021 .call_indirect(func_sig, func_addr, &[vmctx, n_table_index, index, value]);
1022 Ok(())
1023 }
1024
1025 fn translate_table_fill(
1026 &mut self,
1027 mut pos: cranelift_codegen::cursor::FuncCursor<'_>,
1028 table_index: TableIndex,
1029 dst: ir::Value,
1030 val: ir::Value,
1031 len: ir::Value,
1032 ) -> WasmResult<()> {
1033 self.ensure_table_exists(pos.func, table_index);
1034 let (func_sig, table_index_arg, func_idx) = self.get_table_fill_func(pos.func, table_index);
1035 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1036
1037 let table_index_arg = pos.ins().iconst(I32, table_index_arg as i64);
1038 pos.ins().call_indirect(
1039 func_sig,
1040 func_addr,
1041 &[vmctx, table_index_arg, dst, val, len],
1042 );
1043
1044 Ok(())
1045 }
1046
1047 fn translate_ref_null(
1048 &mut self,
1049 mut pos: cranelift_codegen::cursor::FuncCursor,
1050 ty: HeapType,
1051 ) -> WasmResult<ir::Value> {
1052 Ok(match ty {
1053 HeapType::Abstract { ty, .. } => match ty {
1054 wasmer_compiler::wasmparser::AbstractHeapType::Func
1055 | wasmer_compiler::wasmparser::AbstractHeapType::Extern => {
1056 pos.ins().iconst(self.reference_type(), 0)
1057 }
1058 _ => {
1059 return Err(WasmError::Unsupported(
1060 "`ref.null T` that is not a `funcref` or an `externref`".into(),
1061 ));
1062 }
1063 },
1064 HeapType::Concrete(_) => {
1065 return Err(WasmError::Unsupported(
1066 "`ref.null T` that is not a `funcref` or an `externref`".into(),
1067 ));
1068 }
1069 })
1070 }
1071
1072 fn translate_ref_is_null(
1073 &mut self,
1074 mut pos: cranelift_codegen::cursor::FuncCursor,
1075 value: ir::Value,
1076 ) -> WasmResult<ir::Value> {
1077 let bool_is_null =
1078 pos.ins()
1079 .icmp_imm(cranelift_codegen::ir::condcodes::IntCC::Equal, value, 0);
1080 Ok(pos.ins().uextend(ir::types::I32, bool_is_null))
1081 }
1082
1083 fn translate_ref_func(
1084 &mut self,
1085 mut pos: cranelift_codegen::cursor::FuncCursor<'_>,
1086 func_index: FunctionIndex,
1087 ) -> WasmResult<ir::Value> {
1088 let (func_sig, func_index_arg, func_idx) = self.get_func_ref_func(pos.func, func_index);
1089 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1090
1091 let func_index_arg = pos.ins().iconst(I32, func_index_arg as i64);
1092 let call_inst = pos
1093 .ins()
1094 .call_indirect(func_sig, func_addr, &[vmctx, func_index_arg]);
1095
1096 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1097 }
1098
1099 fn translate_custom_global_get(
1100 &mut self,
1101 mut _pos: cranelift_codegen::cursor::FuncCursor<'_>,
1102 _index: GlobalIndex,
1103 ) -> WasmResult<ir::Value> {
1104 unreachable!("we don't make any custom globals")
1105 }
1106
1107 fn translate_custom_global_set(
1108 &mut self,
1109 mut _pos: cranelift_codegen::cursor::FuncCursor<'_>,
1110 _index: GlobalIndex,
1111 _value: ir::Value,
1112 ) -> WasmResult<()> {
1113 unreachable!("we don't make any custom globals")
1114 }
1115
1116 fn make_heap(&mut self, func: &mut ir::Function, index: MemoryIndex) -> WasmResult<Heap> {
1117 let pointer_type = self.pointer_type();
1118
1119 let (ptr, base_offset, current_length_offset) = {
1120 let vmctx = self.vmctx(func);
1121 if let Some(def_index) = self.module.local_memory_index(index) {
1122 let base_offset =
1123 i32::try_from(self.offsets.vmctx_vmmemory_definition_base(def_index)).unwrap();
1124 let current_length_offset = i32::try_from(
1125 self.offsets
1126 .vmctx_vmmemory_definition_current_length(def_index),
1127 )
1128 .unwrap();
1129 (vmctx, base_offset, current_length_offset)
1130 } else {
1131 let from_offset = self.offsets.vmctx_vmmemory_import_definition(index);
1132 let memory = func.create_global_value(ir::GlobalValueData::Load {
1133 base: vmctx,
1134 offset: Offset32::new(i32::try_from(from_offset).unwrap()),
1135 global_type: pointer_type,
1136 flags: ir::MemFlags::trusted().with_readonly(),
1137 });
1138 let base_offset = i32::from(self.offsets.vmmemory_definition_base());
1139 let current_length_offset =
1140 i32::from(self.offsets.vmmemory_definition_current_length());
1141 (memory, base_offset, current_length_offset)
1142 }
1143 };
1144
1145 let (offset_guard_size, heap_style, readonly_base) = match self.memory_styles[index] {
1148 MemoryStyle::Dynamic { offset_guard_size } => {
1149 let heap_bound = func.create_global_value(ir::GlobalValueData::Load {
1150 base: ptr,
1151 offset: Offset32::new(current_length_offset),
1152 global_type: pointer_type,
1153 flags: ir::MemFlags::trusted(),
1154 });
1155 (
1156 Uimm64::new(offset_guard_size),
1157 HeapStyle::Dynamic {
1158 bound_gv: heap_bound,
1159 },
1160 false,
1161 )
1162 }
1163 MemoryStyle::Static {
1164 bound,
1165 offset_guard_size,
1166 } => (
1167 Uimm64::new(offset_guard_size),
1168 HeapStyle::Static {
1169 bound: bound.bytes().0 as u64,
1170 },
1171 true,
1172 ),
1173 };
1174
1175 let heap_base = func.create_global_value(ir::GlobalValueData::Load {
1176 base: ptr,
1177 offset: Offset32::new(base_offset),
1178 global_type: pointer_type,
1179 flags: if readonly_base {
1180 ir::MemFlags::trusted().with_readonly()
1181 } else {
1182 ir::MemFlags::trusted()
1183 },
1184 });
1185 Ok(self.heaps.push(HeapData {
1186 base: heap_base,
1187 min_size: 0,
1188 max_size: None,
1189 memory_type: None,
1190 offset_guard_size: offset_guard_size.into(),
1191 style: heap_style,
1192 index_type: I32,
1193 page_size_log2: self.target_config.page_size_align_log2,
1194 }))
1195 }
1196
1197 fn make_global(
1198 &mut self,
1199 func: &mut ir::Function,
1200 index: GlobalIndex,
1201 ) -> WasmResult<GlobalVariable> {
1202 let pointer_type = self.pointer_type();
1203
1204 let (ptr, offset) = {
1205 let vmctx = self.vmctx(func);
1206
1207 let from_offset = if let Some(def_index) = self.module.local_global_index(index) {
1208 self.offsets.vmctx_vmglobal_definition(def_index)
1209 } else {
1210 self.offsets.vmctx_vmglobal_import_definition(index)
1211 };
1212
1213 let global = func.create_global_value(ir::GlobalValueData::Load {
1214 base: vmctx,
1215 offset: Offset32::new(i32::try_from(from_offset).unwrap()),
1216 global_type: pointer_type,
1217 flags: MemFlags::trusted(),
1218 });
1219
1220 (global, 0)
1221 };
1222
1223 Ok(GlobalVariable::Memory {
1224 gv: ptr,
1225 offset: offset.into(),
1226 ty: match self.module.globals[index].ty {
1227 WasmerType::I32 => ir::types::I32,
1228 WasmerType::I64 => ir::types::I64,
1229 WasmerType::F32 => ir::types::F32,
1230 WasmerType::F64 => ir::types::F64,
1231 WasmerType::V128 => ir::types::I8X16,
1232 WasmerType::FuncRef | WasmerType::ExternRef | WasmerType::ExceptionRef => {
1233 self.reference_type()
1234 }
1235 },
1236 })
1237 }
1238
1239 fn make_indirect_sig(
1240 &mut self,
1241 func: &mut ir::Function,
1242 index: SignatureIndex,
1243 ) -> WasmResult<ir::SigRef> {
1244 Ok(func.import_signature(self.signatures[index].clone()))
1245 }
1246
1247 fn make_direct_func(
1248 &mut self,
1249 func: &mut ir::Function,
1250 index: FunctionIndex,
1251 ) -> WasmResult<ir::FuncRef> {
1252 let sigidx = self.module.functions[index];
1253 let signature = func.import_signature(self.signatures[sigidx].clone());
1254 let name = get_function_name(index);
1255 Ok(func.import_function(ir::ExtFuncData {
1256 name,
1257 signature,
1258 colocated: true,
1259 }))
1260 }
1261
1262 fn translate_call_indirect(
1263 &mut self,
1264 builder: &mut FunctionBuilder,
1265 table_index: TableIndex,
1266 sig_index: SignatureIndex,
1267 sig_ref: ir::SigRef,
1268 callee: ir::Value,
1269 call_args: &[ir::Value],
1270 ) -> WasmResult<ir::Inst> {
1271 let pointer_type = self.pointer_type();
1272
1273 let anyfunc_ptr = self.get_or_init_funcref_table_elem(builder, table_index, callee);
1275
1276 let mem_flags = ir::MemFlags::trusted();
1278
1279 builder
1281 .ins()
1282 .trapz(anyfunc_ptr, crate::TRAP_INDIRECT_CALL_TO_NULL);
1283
1284 let func_addr = builder.ins().load(
1285 pointer_type,
1286 mem_flags,
1287 anyfunc_ptr,
1288 i32::from(self.offsets.vmcaller_checked_anyfunc_func_ptr()),
1289 );
1290
1291 match self.table_styles[table_index] {
1293 TableStyle::CallerChecksSignature => {
1294 let sig_id_size = self.offsets.size_of_vmshared_signature_index();
1295 let sig_id_type = ir::Type::int(u16::from(sig_id_size) * 8).unwrap();
1296 let vmctx = self.vmctx(builder.func);
1297 let base = builder.ins().global_value(pointer_type, vmctx);
1298 let offset =
1299 i32::try_from(self.offsets.vmctx_vmshared_signature_id(sig_index)).unwrap();
1300
1301 let mut mem_flags = ir::MemFlags::trusted();
1303 mem_flags.set_readonly();
1304 let caller_sig_id = builder.ins().load(sig_id_type, mem_flags, base, offset);
1305
1306 let mem_flags = ir::MemFlags::trusted();
1308 let callee_sig_id = builder.ins().load(
1309 sig_id_type,
1310 mem_flags,
1311 anyfunc_ptr,
1312 i32::from(self.offsets.vmcaller_checked_anyfunc_type_index()),
1313 );
1314
1315 let cmp = builder
1317 .ins()
1318 .icmp(IntCC::Equal, callee_sig_id, caller_sig_id);
1319 builder.ins().trapz(cmp, crate::TRAP_BAD_SIGNATURE);
1320 }
1321 }
1322
1323 let mut real_call_args = Vec::with_capacity(call_args.len() + 2);
1324
1325 let vmctx = builder.ins().load(
1327 pointer_type,
1328 mem_flags,
1329 anyfunc_ptr,
1330 i32::from(self.offsets.vmcaller_checked_anyfunc_vmctx()),
1331 );
1332 real_call_args.push(vmctx);
1333
1334 real_call_args.extend_from_slice(call_args);
1336
1337 Ok(builder
1338 .ins()
1339 .call_indirect(sig_ref, func_addr, &real_call_args))
1340 }
1341
1342 fn translate_call(
1343 &mut self,
1344 builder: &mut FunctionBuilder,
1345 callee_index: FunctionIndex,
1346 callee: ir::FuncRef,
1347 call_args: &[ir::Value],
1348 ) -> WasmResult<ir::Inst> {
1349 let mut real_call_args = Vec::with_capacity(call_args.len() + 2);
1350
1351 if !self.module.is_imported_function(callee_index) {
1353 let caller_vmctx = builder
1355 .func
1356 .special_param(ArgumentPurpose::VMContext)
1357 .unwrap();
1358 real_call_args.push(caller_vmctx);
1361
1362 real_call_args.extend_from_slice(call_args);
1364
1365 return Ok(builder.ins().call(callee, &real_call_args));
1366 }
1367
1368 let pointer_type = self.pointer_type();
1371 let sig_ref = builder.func.dfg.ext_funcs[callee].signature;
1372 let vmctx = self.vmctx(builder.func);
1373 let base = builder.ins().global_value(pointer_type, vmctx);
1374
1375 let mem_flags = ir::MemFlags::trusted();
1376
1377 let body_offset =
1379 i32::try_from(self.offsets.vmctx_vmfunction_import_body(callee_index)).unwrap();
1380 let func_addr = builder
1381 .ins()
1382 .load(pointer_type, mem_flags, base, body_offset);
1383
1384 let vmctx_offset =
1386 i32::try_from(self.offsets.vmctx_vmfunction_import_vmctx(callee_index)).unwrap();
1387 let vmctx = builder
1388 .ins()
1389 .load(pointer_type, mem_flags, base, vmctx_offset);
1390 real_call_args.push(vmctx);
1391
1392 real_call_args.extend_from_slice(call_args);
1394
1395 Ok(builder
1396 .ins()
1397 .call_indirect(sig_ref, func_addr, &real_call_args))
1398 }
1399
1400 fn translate_memory_grow(
1401 &mut self,
1402 mut pos: FuncCursor<'_>,
1403 index: MemoryIndex,
1404 _heap: Heap,
1405 val: ir::Value,
1406 ) -> WasmResult<ir::Value> {
1407 let (func_sig, index_arg, func_idx) = self.get_memory_grow_func(pos.func, index);
1408 let memory_index = pos.ins().iconst(I32, index_arg as i64);
1409 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1410 let call_inst = pos
1411 .ins()
1412 .call_indirect(func_sig, func_addr, &[vmctx, val, memory_index]);
1413 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1414 }
1415
1416 fn translate_memory_size(
1417 &mut self,
1418 mut pos: FuncCursor<'_>,
1419 index: MemoryIndex,
1420 _heap: Heap,
1421 ) -> WasmResult<ir::Value> {
1422 let (func_sig, index_arg, func_idx) = self.get_memory_size_func(pos.func, index);
1423 let memory_index = pos.ins().iconst(I32, index_arg as i64);
1424 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1425 let call_inst = pos
1426 .ins()
1427 .call_indirect(func_sig, func_addr, &[vmctx, memory_index]);
1428 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1429 }
1430
1431 fn translate_memory_copy(
1432 &mut self,
1433 mut pos: FuncCursor,
1434 src_index: MemoryIndex,
1435 _src_heap: Heap,
1436 _dst_index: MemoryIndex,
1437 _dst_heap: Heap,
1438 dst: ir::Value,
1439 src: ir::Value,
1440 len: ir::Value,
1441 ) -> WasmResult<()> {
1442 let (func_sig, src_index, func_idx) = self.get_memory_copy_func(pos.func, src_index);
1443
1444 let src_index_arg = pos.ins().iconst(I32, src_index as i64);
1445
1446 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1447
1448 pos.ins()
1449 .call_indirect(func_sig, func_addr, &[vmctx, src_index_arg, dst, src, len]);
1450
1451 Ok(())
1452 }
1453
1454 fn translate_memory_fill(
1455 &mut self,
1456 mut pos: FuncCursor,
1457 memory_index: MemoryIndex,
1458 _heap: Heap,
1459 dst: ir::Value,
1460 val: ir::Value,
1461 len: ir::Value,
1462 ) -> WasmResult<()> {
1463 let (func_sig, memory_index, func_idx) = self.get_memory_fill_func(pos.func, memory_index);
1464
1465 let memory_index_arg = pos.ins().iconst(I32, memory_index as i64);
1466
1467 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1468
1469 pos.ins().call_indirect(
1470 func_sig,
1471 func_addr,
1472 &[vmctx, memory_index_arg, dst, val, len],
1473 );
1474
1475 Ok(())
1476 }
1477
1478 fn translate_memory_init(
1479 &mut self,
1480 mut pos: FuncCursor,
1481 memory_index: MemoryIndex,
1482 _heap: Heap,
1483 seg_index: u32,
1484 dst: ir::Value,
1485 src: ir::Value,
1486 len: ir::Value,
1487 ) -> WasmResult<()> {
1488 let (func_sig, func_idx) = self.get_memory_init_func(pos.func);
1489
1490 let memory_index_arg = pos.ins().iconst(I32, memory_index.index() as i64);
1491 let seg_index_arg = pos.ins().iconst(I32, seg_index as i64);
1492
1493 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1494
1495 pos.ins().call_indirect(
1496 func_sig,
1497 func_addr,
1498 &[vmctx, memory_index_arg, seg_index_arg, dst, src, len],
1499 );
1500
1501 Ok(())
1502 }
1503
1504 fn translate_data_drop(&mut self, mut pos: FuncCursor, seg_index: u32) -> WasmResult<()> {
1505 let (func_sig, func_idx) = self.get_data_drop_func(pos.func);
1506 let seg_index_arg = pos.ins().iconst(I32, seg_index as i64);
1507 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1508 pos.ins()
1509 .call_indirect(func_sig, func_addr, &[vmctx, seg_index_arg]);
1510 Ok(())
1511 }
1512
1513 fn translate_table_size(
1514 &mut self,
1515 mut pos: FuncCursor,
1516 table_index: TableIndex,
1517 ) -> WasmResult<ir::Value> {
1518 self.ensure_table_exists(pos.func, table_index);
1519 let (func_sig, index_arg, func_idx) = self.get_table_size_func(pos.func, table_index);
1520 let table_index = pos.ins().iconst(I32, index_arg as i64);
1521 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1522 let call_inst = pos
1523 .ins()
1524 .call_indirect(func_sig, func_addr, &[vmctx, table_index]);
1525 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1526 }
1527
1528 fn translate_table_copy(
1529 &mut self,
1530 mut pos: FuncCursor,
1531 dst_table_index: TableIndex,
1532 src_table_index: TableIndex,
1533 dst: ir::Value,
1534 src: ir::Value,
1535 len: ir::Value,
1536 ) -> WasmResult<()> {
1537 self.ensure_table_exists(pos.func, src_table_index);
1538 self.ensure_table_exists(pos.func, dst_table_index);
1539 let (func_sig, dst_table_index_arg, src_table_index_arg, func_idx) =
1540 self.get_table_copy_func(pos.func, dst_table_index, src_table_index);
1541
1542 let dst_table_index_arg = pos.ins().iconst(I32, dst_table_index_arg as i64);
1543 let src_table_index_arg = pos.ins().iconst(I32, src_table_index_arg as i64);
1544
1545 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1546
1547 pos.ins().call_indirect(
1548 func_sig,
1549 func_addr,
1550 &[
1551 vmctx,
1552 dst_table_index_arg,
1553 src_table_index_arg,
1554 dst,
1555 src,
1556 len,
1557 ],
1558 );
1559
1560 Ok(())
1561 }
1562
1563 fn translate_table_init(
1564 &mut self,
1565 mut pos: FuncCursor,
1566 seg_index: u32,
1567 table_index: TableIndex,
1568 dst: ir::Value,
1569 src: ir::Value,
1570 len: ir::Value,
1571 ) -> WasmResult<()> {
1572 self.ensure_table_exists(pos.func, table_index);
1573 let (func_sig, table_index_arg, func_idx) = self.get_table_init_func(pos.func, table_index);
1574
1575 let table_index_arg = pos.ins().iconst(I32, table_index_arg as i64);
1576 let seg_index_arg = pos.ins().iconst(I32, seg_index as i64);
1577
1578 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1579
1580 pos.ins().call_indirect(
1581 func_sig,
1582 func_addr,
1583 &[vmctx, table_index_arg, seg_index_arg, dst, src, len],
1584 );
1585
1586 Ok(())
1587 }
1588
1589 fn translate_elem_drop(&mut self, mut pos: FuncCursor, elem_index: u32) -> WasmResult<()> {
1590 let (func_sig, func_idx) = self.get_elem_drop_func(pos.func);
1591
1592 let elem_index_arg = pos.ins().iconst(I32, elem_index as i64);
1593
1594 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1595
1596 pos.ins()
1597 .call_indirect(func_sig, func_addr, &[vmctx, elem_index_arg]);
1598
1599 Ok(())
1600 }
1601
1602 fn translate_atomic_wait(
1603 &mut self,
1604 mut pos: FuncCursor,
1605 index: MemoryIndex,
1606 _heap: Heap,
1607 addr: ir::Value,
1608 expected: ir::Value,
1609 timeout: ir::Value,
1610 ) -> WasmResult<ir::Value> {
1611 let (func_sig, index_arg, func_idx) = if pos.func.dfg.value_type(expected) == I64 {
1612 self.get_memory_atomic_wait64_func(pos.func, index)
1613 } else {
1614 self.get_memory_atomic_wait32_func(pos.func, index)
1615 };
1616 let memory_index = pos.ins().iconst(I32, index_arg as i64);
1617 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1618 let call_inst = pos.ins().call_indirect(
1619 func_sig,
1620 func_addr,
1621 &[vmctx, memory_index, addr, expected, timeout],
1622 );
1623 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1624 }
1625
1626 fn translate_atomic_notify(
1627 &mut self,
1628 mut pos: FuncCursor,
1629 index: MemoryIndex,
1630 _heap: Heap,
1631 addr: ir::Value,
1632 count: ir::Value,
1633 ) -> WasmResult<ir::Value> {
1634 let (func_sig, index_arg, func_idx) = self.get_memory_atomic_notify_func(pos.func, index);
1635 let memory_index = pos.ins().iconst(I32, index_arg as i64);
1636 let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1637 let call_inst =
1638 pos.ins()
1639 .call_indirect(func_sig, func_addr, &[vmctx, memory_index, addr, count]);
1640 Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1641 }
1642
1643 fn get_global_type(&self, global_index: GlobalIndex) -> Option<WasmerType> {
1644 Some(self.module.globals.get(global_index)?.ty)
1645 }
1646
1647 fn push_local_decl_on_stack(&mut self, ty: WasmerType) {
1648 self.type_stack.push(ty);
1649 }
1650
1651 fn push_params_on_stack(&mut self, function_index: LocalFunctionIndex) {
1652 let func_index = self.module.func_index(function_index);
1653 let sig_idx = self.module.functions[func_index];
1654 let signature = &self.module.signatures[sig_idx];
1655 for param in signature.params() {
1656 self.type_stack.push(*param);
1657 }
1658 }
1659
1660 fn get_local_type(&self, local_index: u32) -> Option<WasmerType> {
1661 self.type_stack.get(local_index as usize).cloned()
1662 }
1663
1664 fn get_local_types(&self) -> &[WasmerType] {
1665 &self.type_stack
1666 }
1667
1668 fn get_function_type(&self, function_index: FunctionIndex) -> Option<&FunctionType> {
1669 let sig_idx = self.module.functions.get(function_index)?;
1670 Some(&self.module.signatures[*sig_idx])
1671 }
1672
1673 fn get_function_sig(&self, sig_index: SignatureIndex) -> Option<&FunctionType> {
1674 self.module.signatures.get(sig_index)
1675 }
1676
1677 fn heap_access_spectre_mitigation(&self) -> bool {
1678 false
1679 }
1680
1681 fn proof_carrying_code(&self) -> bool {
1682 false
1683 }
1684
1685 fn heaps(&self) -> &PrimaryMap<Heap, HeapData> {
1686 &self.heaps
1687 }
1688}