1#[cfg(feature = "unwind")]
2use crate::dwarf::WriterRelocate;
3
4use crate::{
5 address_map::get_function_address_map,
6 codegen_error,
7 common_decl::*,
8 config::Singlepass,
9 location::{Location, Reg},
10 machine::{Label, Machine, MachineStackOffset, NATIVE_PAGE_SIZE, UnsignedCondition},
11 unwind::UnwindFrame,
12};
13#[cfg(feature = "unwind")]
14use gimli::write::Address;
15use smallvec::{SmallVec, smallvec};
16use std::{cmp, iter};
17
18use wasmer_compiler::{
19 FunctionBodyData,
20 types::{
21 function::{CompiledFunction, CompiledFunctionFrameInfo, FunctionBody},
22 relocation::{Relocation, RelocationTarget},
23 section::SectionIndex,
24 },
25 wasmparser::{
26 BlockType as WpTypeOrFuncType, HeapType as WpHeapType, Operator, RefType as WpRefType,
27 ValType as WpType,
28 },
29};
30
31#[cfg(feature = "unwind")]
32use wasmer_compiler::types::unwind::CompiledFunctionUnwindInfo;
33
34use wasmer_types::target::CallingConvention;
35use wasmer_types::{
36 CompileError, FunctionIndex, FunctionType, GlobalIndex, LocalFunctionIndex, LocalMemoryIndex,
37 MemoryIndex, MemoryStyle, ModuleInfo, SignatureIndex, TableIndex, TableStyle, TrapCode, Type,
38 VMBuiltinFunctionIndex, VMOffsets,
39 entity::{EntityRef, PrimaryMap},
40};
41
42#[allow(type_alias_bounds)]
43type LocationWithCanonicalization<M: Machine> = (Location<M::GPR, M::SIMD>, CanonicalizeType);
44
45pub struct FuncGen<'a, M: Machine> {
47 module: &'a ModuleInfo,
50
51 config: &'a Singlepass,
53
54 vmoffsets: &'a VMOffsets,
56
57 memory_styles: &'a PrimaryMap<MemoryIndex, MemoryStyle>,
59
60 signature: FunctionType,
64
65 locals: Vec<Location<M::GPR, M::SIMD>>,
68
69 local_types: Vec<WpType>,
71
72 value_stack: Vec<LocationWithCanonicalization<M>>,
74
75 control_stack: Vec<ControlFrame>,
77
78 stack_offset: MachineStackOffset,
79
80 save_area_offset: Option<MachineStackOffset>,
81
82 machine: M,
84
85 unreachable_depth: usize,
87
88 local_func_index: LocalFunctionIndex,
90
91 relocations: Vec<Relocation>,
93
94 special_labels: SpecialLabelSet,
96
97 calling_convention: CallingConvention,
99}
100
101struct SpecialLabelSet {
102 integer_division_by_zero: Label,
103 integer_overflow: Label,
104 heap_access_oob: Label,
105 table_access_oob: Label,
106 indirect_call_null: Label,
107 bad_signature: Label,
108 unaligned_atomic: Label,
109}
110
111#[derive(Copy, Clone, Debug)]
114enum CanonicalizeType {
115 None,
116 F32,
117 F64,
118}
119
120impl CanonicalizeType {
121 fn to_size(self) -> Option<Size> {
122 match self {
123 CanonicalizeType::F32 => Some(Size::S32),
124 CanonicalizeType::F64 => Some(Size::S64),
125 CanonicalizeType::None => None,
126 }
127 }
128
129 fn promote(self) -> Result<Self, CompileError> {
130 match self {
131 CanonicalizeType::None => Ok(CanonicalizeType::None),
132 CanonicalizeType::F32 => Ok(CanonicalizeType::F64),
133 CanonicalizeType::F64 => codegen_error!("cannot promote F64"),
134 }
135 }
136
137 fn demote(self) -> Result<Self, CompileError> {
138 match self {
139 CanonicalizeType::None => Ok(CanonicalizeType::None),
140 CanonicalizeType::F32 => codegen_error!("cannot demote F64"),
141 CanonicalizeType::F64 => Ok(CanonicalizeType::F32),
142 }
143 }
144}
145
146trait WpTypeExt {
147 fn is_float(&self) -> bool;
148}
149
150impl WpTypeExt for WpType {
151 fn is_float(&self) -> bool {
152 matches!(self, WpType::F32 | WpType::F64)
153 }
154}
155
156#[derive(Debug, Copy, Clone)]
157pub enum ControlState {
158 Function,
159 Block,
160 Loop,
161 If(Label),
162 Else,
163}
164
165#[derive(Debug, Clone)]
166pub struct ControlFrame {
167 pub state: ControlState,
168 pub label: Label,
169 pub returns: SmallVec<[WpType; 1]>,
170 pub value_stack_depth: usize,
171}
172
173fn type_to_wp_type(ty: Type) -> WpType {
174 match ty {
175 Type::I32 => WpType::I32,
176 Type::I64 => WpType::I64,
177 Type::F32 => WpType::F32,
178 Type::F64 => WpType::F64,
179 Type::V128 => WpType::V128,
180 Type::ExternRef => WpType::Ref(WpRefType::new(true, WpHeapType::EXTERN).unwrap()),
181 Type::FuncRef => WpType::Ref(WpRefType::new(true, WpHeapType::FUNC).unwrap()),
182 Type::ExceptionRef => todo!(),
183 }
184}
185
186struct I2O1<R: Reg, S: Reg> {
189 loc_a: Location<R, S>,
190 loc_b: Location<R, S>,
191 ret: Location<R, S>,
192}
193
194enum NativeCallType {
196 IncludeVMCtxArgument,
197 Unreachable,
198}
199
200impl<'a, M: Machine> FuncGen<'a, M> {
201 fn get_stack_offset(&self) -> usize {
202 self.stack_offset.0
203 }
204
205 fn acquire_location(&mut self, ty: &WpType) -> Result<Location<M::GPR, M::SIMD>, CompileError> {
210 let mut delta_stack_offset: usize = 0;
211
212 let loc = match *ty {
213 WpType::F32 | WpType::F64 => self.machine.pick_simd().map(Location::SIMD),
214 WpType::I32 | WpType::I64 => self.machine.pick_gpr().map(Location::GPR),
215 WpType::Ref(ty) if ty.is_extern_ref() || ty.is_func_ref() => {
216 self.machine.pick_gpr().map(Location::GPR)
217 }
218 _ => codegen_error!("can't acquire location for type {:?}", ty),
219 };
220
221 let loc = if let Some(x) = loc {
222 x
223 } else {
224 self.stack_offset.0 += 8;
225 delta_stack_offset += 8;
226 self.machine.local_on_stack(self.stack_offset.0 as i32)
227 };
228 if let Location::GPR(x) = loc {
229 self.machine.reserve_gpr(x);
230 } else if let Location::SIMD(x) = loc {
231 self.machine.reserve_simd(x);
232 }
233
234 let delta_stack_offset = self.machine.round_stack_adjust(delta_stack_offset);
235 if delta_stack_offset != 0 {
236 self.machine.adjust_stack(delta_stack_offset as u32)?;
237 }
238 Ok(loc)
239 }
240
241 fn release_locations(
243 &mut self,
244 locs: &[LocationWithCanonicalization<M>],
245 ) -> Result<(), CompileError> {
246 let mut delta_stack_offset: usize = 0;
247
248 for (loc, _) in locs.iter().rev() {
249 match *loc {
250 Location::GPR(ref x) => {
251 self.machine.release_gpr(*x);
252 }
253 Location::SIMD(ref x) => {
254 self.machine.release_simd(*x);
255 }
256 Location::Memory(y, x) => {
257 if y == self.machine.local_pointer() {
258 if x >= 0 {
259 codegen_error!("Invalid memory offset {}", x);
260 }
261 let offset = (-x) as usize;
262 if offset != self.stack_offset.0 {
263 codegen_error!(
264 "Invalid memory offset {}!={}",
265 offset,
266 self.stack_offset.0
267 );
268 }
269 self.stack_offset.0 -= 8;
270 delta_stack_offset += 8;
271 }
272 }
273 _ => {}
274 }
275 }
276 let delta_stack_offset = self.machine.round_stack_adjust(delta_stack_offset);
277 if delta_stack_offset != 0 {
278 self.machine.restore_stack(delta_stack_offset as u32)?;
279 }
280 Ok(())
281 }
282 fn release_locations_value(&mut self, stack_depth: usize) -> Result<(), CompileError> {
284 let mut delta_stack_offset: usize = 0;
285 let locs = &self.value_stack[stack_depth..];
286
287 for (loc, _) in locs.iter().rev() {
288 match *loc {
289 Location::GPR(ref x) => {
290 self.machine.release_gpr(*x);
291 }
292 Location::SIMD(ref x) => {
293 self.machine.release_simd(*x);
294 }
295 Location::Memory(y, x) => {
296 if y == self.machine.local_pointer() {
297 if x >= 0 {
298 codegen_error!("Invalid memory offset {}", x);
299 }
300 let offset = (-x) as usize;
301 if offset != self.stack_offset.0 {
302 codegen_error!(
303 "Invalid memory offset {}!={}",
304 offset,
305 self.stack_offset.0
306 );
307 }
308 self.stack_offset.0 -= 8;
309 delta_stack_offset += 8;
310 }
311 }
312 _ => {}
313 }
314 }
315
316 let delta_stack_offset = self.machine.round_stack_adjust(delta_stack_offset);
317 if delta_stack_offset != 0 {
318 self.machine.adjust_stack(delta_stack_offset as u32)?;
319 }
320 Ok(())
321 }
322
323 fn release_locations_only_regs(
324 &mut self,
325 locs: &[LocationWithCanonicalization<M>],
326 ) -> Result<(), CompileError> {
327 for (loc, _) in locs.iter().rev() {
328 match *loc {
329 Location::GPR(ref x) => {
330 self.machine.release_gpr(*x);
331 }
332 Location::SIMD(ref x) => {
333 self.machine.release_simd(*x);
334 }
335 _ => {}
336 }
337 }
338 Ok(())
339 }
340
341 fn release_locations_only_stack(
342 &mut self,
343 locs: &[LocationWithCanonicalization<M>],
344 ) -> Result<(), CompileError> {
345 let mut delta_stack_offset: usize = 0;
346
347 for (loc, _) in locs.iter().rev() {
348 if let Location::Memory(y, x) = *loc
349 && y == self.machine.local_pointer()
350 {
351 if x >= 0 {
352 codegen_error!("Invalid memory offset {}", x);
353 }
354 let offset = (-x) as usize;
355 if offset != self.stack_offset.0 {
356 codegen_error!("Invalid memory offset {}!={}", offset, self.stack_offset.0);
357 }
358 self.stack_offset.0 -= 8;
359 delta_stack_offset += 8;
360 }
361 }
362
363 let delta_stack_offset = self.machine.round_stack_adjust(delta_stack_offset);
364 if delta_stack_offset != 0 {
365 self.machine.pop_stack_locals(delta_stack_offset as u32)?;
366 }
367 Ok(())
368 }
369
370 fn release_locations_keep_state(&mut self, stack_depth: usize) -> Result<(), CompileError> {
371 let mut delta_stack_offset: usize = 0;
372 let mut stack_offset = self.stack_offset.0;
373 let locs = &self.value_stack[stack_depth..];
374
375 for (loc, _) in locs.iter().rev() {
376 if let Location::Memory(y, x) = *loc
377 && y == self.machine.local_pointer()
378 {
379 if x >= 0 {
380 codegen_error!("Invalid memory offset {}", x);
381 }
382 let offset = (-x) as usize;
383 if offset != stack_offset {
384 codegen_error!("Invalid memory offset {}!={}", offset, self.stack_offset.0);
385 }
386 stack_offset -= 8;
387 delta_stack_offset += 8;
388 }
389 }
390
391 let delta_stack_offset = self.machine.round_stack_adjust(delta_stack_offset);
392 if delta_stack_offset != 0 {
393 self.machine.pop_stack_locals(delta_stack_offset as u32)?;
394 }
395 Ok(())
396 }
397
398 #[allow(clippy::type_complexity)]
399 fn init_locals(
400 &mut self,
401 n: usize,
402 sig: FunctionType,
403 calling_convention: CallingConvention,
404 ) -> Result<Vec<Location<M::GPR, M::SIMD>>, CompileError> {
405 let num_mem_slots = (0..n)
407 .filter(|&x| self.machine.is_local_on_stack(x))
408 .count();
409
410 let mut static_area_size: usize = 0;
413
414 for i in 0..n {
417 if !self.machine.is_local_on_stack(i) {
419 static_area_size += 8;
420 }
421 }
422
423 static_area_size += 8;
425
426 static_area_size += 8 * self.machine.list_to_save(calling_convention).len();
428
429 let callee_saved_regs_size = static_area_size;
431
432 let locations: Vec<Location<M::GPR, M::SIMD>> = (0..n)
434 .map(|i| self.machine.get_local_location(i, callee_saved_regs_size))
435 .collect();
436
437 static_area_size += num_mem_slots * 8;
439
440 static_area_size = self.machine.round_stack_adjust(static_area_size);
442
443 for i in (sig.params().len()..n)
448 .step_by(NATIVE_PAGE_SIZE / 8)
449 .skip(1)
450 {
451 self.machine.zero_location(Size::S64, locations[i])?;
452 }
453
454 self.machine.adjust_stack(static_area_size as _)?;
455
456 for loc in locations.iter() {
458 if let Location::GPR(_) = *loc {
459 self.stack_offset.0 += 8;
460 self.machine.move_local(self.stack_offset.0 as i32, *loc)?;
461 }
462 }
463
464 self.stack_offset.0 += 8;
466 self.machine.move_local(
467 self.stack_offset.0 as i32,
468 Location::GPR(self.machine.get_vmctx_reg()),
469 )?;
470
471 let regs_to_save = self.machine.list_to_save(calling_convention);
473 for loc in regs_to_save.iter() {
474 self.stack_offset.0 += 8;
475 self.machine.move_local(self.stack_offset.0 as i32, *loc)?;
476 }
477
478 self.save_area_offset = Some(MachineStackOffset(self.stack_offset.0));
480
481 let mut stack_offset: usize = 0;
485 for (i, param) in sig.params().iter().enumerate() {
486 let sz = match *param {
487 Type::I32 | Type::F32 => Size::S32,
488 Type::I64 | Type::F64 => Size::S64,
489 Type::ExternRef | Type::FuncRef => Size::S64,
490 _ => codegen_error!("singlepass init_local unimplemented"),
491 };
492 let loc = self.machine.get_call_param_location(
493 i + 1,
494 sz,
495 &mut stack_offset,
496 calling_convention,
497 );
498 self.machine
499 .move_location_extend(sz, false, loc, Size::S64, locations[i])?;
500 }
501
502 self.machine.move_location(
504 Size::S64,
505 self.machine
506 .get_simple_param_location(0, calling_convention),
507 Location::GPR(self.machine.get_vmctx_reg()),
508 )?;
509
510 let mut init_stack_loc_cnt = 0;
512 let mut last_stack_loc = Location::Memory(self.machine.local_pointer(), i32::MAX);
513 for location in locations.iter().take(n).skip(sig.params().len()) {
514 match location {
515 Location::Memory(_, _) => {
516 init_stack_loc_cnt += 1;
517 last_stack_loc = cmp::min(last_stack_loc, *location);
518 }
519 Location::GPR(_) => {
520 self.machine.zero_location(Size::S64, *location)?;
521 }
522 _ => codegen_error!("singlepass init_local unreachable"),
523 }
524 }
525 if init_stack_loc_cnt > 0 {
526 self.machine
527 .init_stack_loc(init_stack_loc_cnt, last_stack_loc)?;
528 }
529
530 self.stack_offset.0 += static_area_size - callee_saved_regs_size;
532
533 Ok(locations)
534 }
535
536 fn finalize_locals(
537 &mut self,
538 calling_convention: CallingConvention,
539 ) -> Result<(), CompileError> {
540 self.machine
542 .restore_saved_area(self.save_area_offset.as_ref().unwrap().0 as i32)?;
543
544 let regs_to_save = self.machine.list_to_save(calling_convention);
545 for loc in regs_to_save.iter().rev() {
546 self.machine.pop_location(*loc)?;
547 }
548
549 self.machine
551 .pop_location(Location::GPR(self.machine.get_vmctx_reg()))?;
552
553 for loc in self.locals.iter().rev() {
555 if let Location::GPR(_) = *loc {
556 self.machine.pop_location(*loc)?;
557 }
558 }
559 Ok(())
560 }
561
562 pub fn set_srcloc(&mut self, offset: u32) {
564 self.machine.set_srcloc(offset);
565 }
566
567 fn get_location_released(
568 &mut self,
569 loc: (Location<M::GPR, M::SIMD>, CanonicalizeType),
570 ) -> Result<LocationWithCanonicalization<M>, CompileError> {
571 self.release_locations(&[loc])?;
572 Ok(loc)
573 }
574
575 fn pop_value_released(&mut self) -> Result<LocationWithCanonicalization<M>, CompileError> {
576 let loc = self.value_stack.pop().ok_or_else(|| {
577 CompileError::Codegen("pop_value_released: value stack is empty".to_owned())
578 })?;
579 self.get_location_released(loc)?;
580 Ok(loc)
581 }
582
583 fn i2o1_prepare(
585 &mut self,
586 ty: WpType,
587 canonicalize: CanonicalizeType,
588 ) -> Result<I2O1<M::GPR, M::SIMD>, CompileError> {
589 let loc_b = self.pop_value_released()?.0;
590 let loc_a = self.pop_value_released()?.0;
591 let ret = self.acquire_location(&ty)?;
592 self.value_stack.push((ret, canonicalize));
593 Ok(I2O1 { loc_a, loc_b, ret })
594 }
595
596 fn emit_call_native<
601 I: Iterator<Item = Location<M::GPR, M::SIMD>>,
602 J: Iterator<Item = WpType>,
603 F: FnOnce(&mut Self) -> Result<(), CompileError>,
604 >(
605 &mut self,
606 cb: F,
607 params: I,
608 params_type: J,
609 call_type: NativeCallType,
610 ) -> Result<(), CompileError> {
611 let params: Vec<_> = params.collect();
612 let params_size: Vec<_> = params_type
613 .map(|x| match x {
614 WpType::F32 | WpType::I32 => Size::S32,
615 WpType::V128 => unimplemented!(),
616 _ => Size::S64,
617 })
618 .collect();
619
620 let used_gprs = self.machine.get_used_gprs();
622 let mut used_stack = self.machine.push_used_gpr(&used_gprs)?;
623
624 let used_simds = self.machine.get_used_simd();
626 if !used_simds.is_empty() {
627 used_stack += self.machine.push_used_simd(&used_simds)?;
628 }
629 self.machine
631 .reserve_unused_temp_gpr(self.machine.get_grp_for_call());
632
633 let calling_convention = self.calling_convention;
634
635 let stack_padding: usize = match calling_convention {
636 CallingConvention::WindowsFastcall => 32,
637 _ => 0,
638 };
639
640 let mut stack_offset: usize = 0;
641 let mut args: Vec<Location<M::GPR, M::SIMD>> = vec![];
642 for (i, _param) in params.iter().enumerate() {
644 args.push(self.machine.get_param_location(
645 match call_type {
646 NativeCallType::IncludeVMCtxArgument => 1,
647 NativeCallType::Unreachable => 0,
648 } + i,
649 params_size[i],
650 &mut stack_offset,
651 calling_convention,
652 ));
653 }
654
655 let stack_unaligned =
657 (self.machine.round_stack_adjust(self.get_stack_offset()) + used_stack + stack_offset)
658 % 16;
659 if stack_unaligned != 0 {
660 stack_offset += 16 - stack_unaligned;
661 }
662 self.machine.adjust_stack(stack_offset as u32)?;
663
664 #[allow(clippy::type_complexity)]
665 let mut call_movs: Vec<(Location<M::GPR, M::SIMD>, M::GPR)> = vec![];
666 for (i, param) in params.iter().enumerate().rev() {
668 let loc = args[i];
669 match loc {
670 Location::GPR(x) => {
671 call_movs.push((*param, x));
672 }
673 Location::Memory(_, _) => {
674 self.machine
675 .move_location_for_native(params_size[i], *param, loc)?;
676 }
677 _ => {
678 return Err(CompileError::Codegen(
679 "emit_call_native loc: unreachable code".to_owned(),
680 ));
681 }
682 }
683 }
684
685 Self::sort_call_movs(&mut call_movs);
687
688 for (loc, gpr) in call_movs {
690 if loc != Location::GPR(gpr) {
691 self.machine
692 .move_location(Size::S64, loc, Location::GPR(gpr))?;
693 }
694 }
695
696 if matches!(call_type, NativeCallType::IncludeVMCtxArgument) {
697 self.machine.move_location(
699 Size::S64,
700 Location::GPR(self.machine.get_vmctx_reg()),
701 self.machine
702 .get_simple_param_location(0, calling_convention),
703 )?; }
705
706 if stack_padding > 0 {
707 self.machine.adjust_stack(stack_padding as u32)?;
708 }
709 self.machine.release_gpr(self.machine.get_grp_for_call());
711
712 let begin = self.machine.assembler_get_offset().0;
713 cb(self)?;
714 if matches!(call_type, NativeCallType::Unreachable) {
715 let end = self.machine.assembler_get_offset().0;
716 self.machine.mark_address_range_with_trap_code(
717 TrapCode::UnreachableCodeReached,
718 begin,
719 end,
720 );
721 }
722
723 if stack_offset + stack_padding > 0 {
725 self.machine.restore_stack(
726 self.machine
727 .round_stack_adjust(stack_offset + stack_padding) as u32,
728 )?;
729 if !stack_offset.is_multiple_of(8) {
730 return Err(CompileError::Codegen(
731 "emit_call_native: Bad restoring stack alignement".to_owned(),
732 ));
733 }
734 }
735
736 if !used_simds.is_empty() {
738 self.machine.pop_used_simd(&used_simds)?;
739 }
740
741 self.machine.pop_used_gpr(&used_gprs)?;
743
744 Ok(())
745 }
746
747 fn _emit_call_native_label<
749 I: Iterator<Item = Location<M::GPR, M::SIMD>>,
750 J: Iterator<Item = WpType>,
751 >(
752 &mut self,
753 label: Label,
754 params: I,
755 params_type: J,
756 ) -> Result<(), CompileError> {
757 self.emit_call_native(
758 |this| this.machine.emit_call_label(label),
759 params,
760 params_type,
761 NativeCallType::IncludeVMCtxArgument,
762 )?;
763 Ok(())
764 }
765
766 fn op_memory<
768 F: FnOnce(&mut Self, bool, bool, i32, Label, Label) -> Result<(), CompileError>,
769 >(
770 &mut self,
771 cb: F,
772 ) -> Result<(), CompileError> {
773 let need_check = match self.memory_styles[MemoryIndex::new(0)] {
774 MemoryStyle::Static { .. } => false,
775 MemoryStyle::Dynamic { .. } => true,
776 };
777
778 let offset = if self.module.num_imported_memories != 0 {
779 self.vmoffsets
780 .vmctx_vmmemory_import_definition(MemoryIndex::new(0))
781 } else {
782 self.vmoffsets
783 .vmctx_vmmemory_definition(LocalMemoryIndex::new(0))
784 };
785 cb(
786 self,
787 need_check,
788 self.module.num_imported_memories != 0,
789 offset as i32,
790 self.special_labels.heap_access_oob,
791 self.special_labels.unaligned_atomic,
792 )
793 }
794
795 fn emit_head(&mut self) -> Result<(), CompileError> {
796 self.machine.emit_function_prolog()?;
797
798 self.locals = self.init_locals(
800 self.local_types.len(),
801 self.signature.clone(),
802 self.calling_convention,
803 )?;
804
805 self.machine.adjust_stack(32)?;
807
808 self.control_stack.push(ControlFrame {
809 state: ControlState::Function,
810 label: self.machine.get_label(),
811 returns: self
812 .signature
813 .results()
814 .iter()
815 .map(|&x| type_to_wp_type(x))
816 .collect(),
817 value_stack_depth: 0,
818 });
819
820 self.machine.insert_stackoverflow();
825
826 Ok(())
827 }
828
829 #[allow(clippy::too_many_arguments)]
830 pub fn new(
831 module: &'a ModuleInfo,
832 config: &'a Singlepass,
833 vmoffsets: &'a VMOffsets,
834 memory_styles: &'a PrimaryMap<MemoryIndex, MemoryStyle>,
835 _table_styles: &'a PrimaryMap<TableIndex, TableStyle>,
836 local_func_index: LocalFunctionIndex,
837 local_types_excluding_arguments: &[WpType],
838 machine: M,
839 calling_convention: CallingConvention,
840 ) -> Result<FuncGen<'a, M>, CompileError> {
841 let func_index = module.func_index(local_func_index);
842 let sig_index = module.functions[func_index];
843 let signature = module.signatures[sig_index].clone();
844
845 let mut local_types: Vec<_> = signature
846 .params()
847 .iter()
848 .map(|&x| type_to_wp_type(x))
849 .collect();
850 local_types.extend_from_slice(local_types_excluding_arguments);
851
852 let mut machine = machine;
853 let special_labels = SpecialLabelSet {
854 integer_division_by_zero: machine.get_label(),
855 integer_overflow: machine.get_label(),
856 heap_access_oob: machine.get_label(),
857 table_access_oob: machine.get_label(),
858 indirect_call_null: machine.get_label(),
859 bad_signature: machine.get_label(),
860 unaligned_atomic: machine.get_label(),
861 };
862
863 let mut fg = FuncGen {
864 module,
865 config,
866 vmoffsets,
867 memory_styles,
868 signature,
870 locals: vec![], local_types,
872 value_stack: vec![],
873 control_stack: vec![],
874 stack_offset: MachineStackOffset(0),
875 save_area_offset: None,
876 machine,
877 unreachable_depth: 0,
878 local_func_index,
879 relocations: vec![],
880 special_labels,
881 calling_convention,
882 };
883 fg.emit_head()?;
884 Ok(fg)
885 }
886
887 pub fn has_control_frames(&self) -> bool {
888 !self.control_stack.is_empty()
889 }
890
891 pub fn feed_operator(&mut self, op: Operator) -> Result<(), CompileError> {
892 let was_unreachable;
893
894 if self.unreachable_depth > 0 {
895 was_unreachable = true;
896
897 match op {
898 Operator::Block { .. } | Operator::Loop { .. } | Operator::If { .. } => {
899 self.unreachable_depth += 1;
900 }
901 Operator::End => {
902 self.unreachable_depth -= 1;
903 }
904 Operator::Else => {
905 if self.unreachable_depth == 1
907 && let Some(ControlState::If(_)) =
908 self.control_stack.last().map(|x| x.state)
909 {
910 self.unreachable_depth -= 1;
911 }
912 }
913 _ => {}
914 }
915 if self.unreachable_depth > 0 {
916 return Ok(());
917 }
918 } else {
919 was_unreachable = false;
920 }
921
922 match op {
923 Operator::GlobalGet { global_index } => {
924 let global_index = GlobalIndex::from_u32(global_index);
925
926 let ty = type_to_wp_type(self.module.globals[global_index].ty);
927 let loc = self.acquire_location(&ty)?;
928 self.value_stack.push((loc, CanonicalizeType::None));
929
930 let tmp = self.machine.acquire_temp_gpr().unwrap();
931
932 let src = if let Some(local_global_index) =
933 self.module.local_global_index(global_index)
934 {
935 let offset = self.vmoffsets.vmctx_vmglobal_definition(local_global_index);
936 self.machine.emit_relaxed_mov(
937 Size::S64,
938 Location::Memory(self.machine.get_vmctx_reg(), offset as i32),
939 Location::GPR(tmp),
940 )?;
941 Location::Memory(tmp, 0)
942 } else {
943 let offset = self
945 .vmoffsets
946 .vmctx_vmglobal_import_definition(global_index);
947 self.machine.emit_relaxed_mov(
948 Size::S64,
949 Location::Memory(self.machine.get_vmctx_reg(), offset as i32),
950 Location::GPR(tmp),
951 )?;
952 Location::Memory(tmp, 0)
953 };
954
955 self.machine.emit_relaxed_mov(Size::S64, src, loc)?;
956
957 self.machine.release_gpr(tmp);
958 }
959 Operator::GlobalSet { global_index } => {
960 let global_index = GlobalIndex::from_u32(global_index);
961 let tmp = self.machine.acquire_temp_gpr().unwrap();
962 let dst = if let Some(local_global_index) =
963 self.module.local_global_index(global_index)
964 {
965 let offset = self.vmoffsets.vmctx_vmglobal_definition(local_global_index);
966 self.machine.emit_relaxed_mov(
967 Size::S64,
968 Location::Memory(self.machine.get_vmctx_reg(), offset as i32),
969 Location::GPR(tmp),
970 )?;
971 Location::Memory(tmp, 0)
972 } else {
973 let offset = self
975 .vmoffsets
976 .vmctx_vmglobal_import_definition(global_index);
977 self.machine.emit_relaxed_mov(
978 Size::S64,
979 Location::Memory(self.machine.get_vmctx_reg(), offset as i32),
980 Location::GPR(tmp),
981 )?;
982 Location::Memory(tmp, 0)
983 };
984 let (loc, canonicalize) = self.pop_value_released()?;
985 if let Some(canonicalize_size) = canonicalize.to_size() {
986 if self.machine.arch_supports_canonicalize_nan()
987 && self.config.enable_nan_canonicalization
988 {
989 self.machine.canonicalize_nan(canonicalize_size, loc, dst)?;
990 } else {
991 self.machine.emit_relaxed_mov(Size::S64, loc, dst)?;
992 }
993 } else {
994 self.machine.emit_relaxed_mov(Size::S64, loc, dst)?;
995 }
996 self.machine.release_gpr(tmp);
997 }
998 Operator::LocalGet { local_index } => {
999 let local_index = local_index as usize;
1000 let ret = self.acquire_location(&WpType::I64)?;
1001 self.machine
1002 .emit_relaxed_mov(Size::S64, self.locals[local_index], ret)?;
1003 self.value_stack.push((ret, CanonicalizeType::None));
1004 }
1005 Operator::LocalSet { local_index } => {
1006 let local_index = local_index as usize;
1007 let (loc, canonicalize) = self.pop_value_released()?;
1008
1009 if self.local_types[local_index].is_float()
1010 && let Some(canonicalize_size) = canonicalize.to_size()
1011 {
1012 if self.machine.arch_supports_canonicalize_nan()
1013 && self.config.enable_nan_canonicalization
1014 {
1015 self.machine.canonicalize_nan(
1016 canonicalize_size,
1017 loc,
1018 self.locals[local_index],
1019 )
1020 } else {
1021 self.machine
1022 .emit_relaxed_mov(Size::S64, loc, self.locals[local_index])
1023 }
1024 } else {
1025 self.machine
1026 .emit_relaxed_mov(Size::S64, loc, self.locals[local_index])
1027 }?;
1028 }
1029 Operator::LocalTee { local_index } => {
1030 let local_index = local_index as usize;
1031 let (loc, canonicalize) = *self.value_stack.last().unwrap();
1032
1033 if self.local_types[local_index].is_float()
1034 && let Some(canonicalize_size) = canonicalize.to_size()
1035 {
1036 if self.machine.arch_supports_canonicalize_nan()
1037 && self.config.enable_nan_canonicalization
1038 {
1039 self.machine.canonicalize_nan(
1040 canonicalize_size,
1041 loc,
1042 self.locals[local_index],
1043 )
1044 } else {
1045 self.machine
1046 .emit_relaxed_mov(Size::S64, loc, self.locals[local_index])
1047 }
1048 } else {
1049 self.machine
1050 .emit_relaxed_mov(Size::S64, loc, self.locals[local_index])
1051 }?;
1052 }
1053 Operator::I32Const { value } => {
1054 self.value_stack
1055 .push((Location::Imm32(value as u32), CanonicalizeType::None));
1056 }
1057 Operator::I32Add => {
1058 let I2O1 { loc_a, loc_b, ret } =
1059 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1060 self.machine.emit_binop_add32(loc_a, loc_b, ret)?;
1061 }
1062 Operator::I32Sub => {
1063 let I2O1 { loc_a, loc_b, ret } =
1064 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1065 self.machine.emit_binop_sub32(loc_a, loc_b, ret)?;
1066 }
1067 Operator::I32Mul => {
1068 let I2O1 { loc_a, loc_b, ret } =
1069 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1070 self.machine.emit_binop_mul32(loc_a, loc_b, ret)?;
1071 }
1072 Operator::I32DivU => {
1073 let I2O1 { loc_a, loc_b, ret } =
1074 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1075 self.machine.emit_binop_udiv32(
1076 loc_a,
1077 loc_b,
1078 ret,
1079 self.special_labels.integer_division_by_zero,
1080 )?;
1081 }
1082 Operator::I32DivS => {
1083 let I2O1 { loc_a, loc_b, ret } =
1084 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1085 self.machine.emit_binop_sdiv32(
1086 loc_a,
1087 loc_b,
1088 ret,
1089 self.special_labels.integer_division_by_zero,
1090 self.special_labels.integer_overflow,
1091 )?;
1092 }
1093 Operator::I32RemU => {
1094 let I2O1 { loc_a, loc_b, ret } =
1095 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1096 self.machine.emit_binop_urem32(
1097 loc_a,
1098 loc_b,
1099 ret,
1100 self.special_labels.integer_division_by_zero,
1101 )?;
1102 }
1103 Operator::I32RemS => {
1104 let I2O1 { loc_a, loc_b, ret } =
1105 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1106 self.machine.emit_binop_srem32(
1107 loc_a,
1108 loc_b,
1109 ret,
1110 self.special_labels.integer_division_by_zero,
1111 )?;
1112 }
1113 Operator::I32And => {
1114 let I2O1 { loc_a, loc_b, ret } =
1115 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1116 self.machine.emit_binop_and32(loc_a, loc_b, ret)?;
1117 }
1118 Operator::I32Or => {
1119 let I2O1 { loc_a, loc_b, ret } =
1120 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1121 self.machine.emit_binop_or32(loc_a, loc_b, ret)?;
1122 }
1123 Operator::I32Xor => {
1124 let I2O1 { loc_a, loc_b, ret } =
1125 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1126 self.machine.emit_binop_xor32(loc_a, loc_b, ret)?;
1127 }
1128 Operator::I32Eq => {
1129 let I2O1 { loc_a, loc_b, ret } =
1130 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1131 self.machine.i32_cmp_eq(loc_a, loc_b, ret)?;
1132 }
1133 Operator::I32Ne => {
1134 let I2O1 { loc_a, loc_b, ret } =
1135 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1136 self.machine.i32_cmp_ne(loc_a, loc_b, ret)?;
1137 }
1138 Operator::I32Eqz => {
1139 let loc_a = self.pop_value_released()?.0;
1140 let ret = self.acquire_location(&WpType::I32)?;
1141 self.machine.i32_cmp_eq(loc_a, Location::Imm32(0), ret)?;
1142 self.value_stack.push((ret, CanonicalizeType::None));
1143 }
1144 Operator::I32Clz => {
1145 let loc = self.pop_value_released()?.0;
1146 let ret = self.acquire_location(&WpType::I32)?;
1147 self.value_stack.push((ret, CanonicalizeType::None));
1148 self.machine.i32_clz(loc, ret)?;
1149 }
1150 Operator::I32Ctz => {
1151 let loc = self.pop_value_released()?.0;
1152 let ret = self.acquire_location(&WpType::I32)?;
1153 self.value_stack.push((ret, CanonicalizeType::None));
1154 self.machine.i32_ctz(loc, ret)?;
1155 }
1156 Operator::I32Popcnt => {
1157 let loc = self.pop_value_released()?.0;
1158 let ret = self.acquire_location(&WpType::I32)?;
1159 self.value_stack.push((ret, CanonicalizeType::None));
1160 self.machine.i32_popcnt(loc, ret)?;
1161 }
1162 Operator::I32Shl => {
1163 let I2O1 { loc_a, loc_b, ret } =
1164 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1165 self.machine.i32_shl(loc_a, loc_b, ret)?;
1166 }
1167 Operator::I32ShrU => {
1168 let I2O1 { loc_a, loc_b, ret } =
1169 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1170 self.machine.i32_shr(loc_a, loc_b, ret)?;
1171 }
1172 Operator::I32ShrS => {
1173 let I2O1 { loc_a, loc_b, ret } =
1174 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1175 self.machine.i32_sar(loc_a, loc_b, ret)?;
1176 }
1177 Operator::I32Rotl => {
1178 let I2O1 { loc_a, loc_b, ret } =
1179 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1180 self.machine.i32_rol(loc_a, loc_b, ret)?;
1181 }
1182 Operator::I32Rotr => {
1183 let I2O1 { loc_a, loc_b, ret } =
1184 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1185 self.machine.i32_ror(loc_a, loc_b, ret)?;
1186 }
1187 Operator::I32LtU => {
1188 let I2O1 { loc_a, loc_b, ret } =
1189 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1190 self.machine.i32_cmp_lt_u(loc_a, loc_b, ret)?;
1191 }
1192 Operator::I32LeU => {
1193 let I2O1 { loc_a, loc_b, ret } =
1194 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1195 self.machine.i32_cmp_le_u(loc_a, loc_b, ret)?;
1196 }
1197 Operator::I32GtU => {
1198 let I2O1 { loc_a, loc_b, ret } =
1199 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1200 self.machine.i32_cmp_gt_u(loc_a, loc_b, ret)?;
1201 }
1202 Operator::I32GeU => {
1203 let I2O1 { loc_a, loc_b, ret } =
1204 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1205 self.machine.i32_cmp_ge_u(loc_a, loc_b, ret)?;
1206 }
1207 Operator::I32LtS => {
1208 let I2O1 { loc_a, loc_b, ret } =
1209 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1210 self.machine.i32_cmp_lt_s(loc_a, loc_b, ret)?;
1211 }
1212 Operator::I32LeS => {
1213 let I2O1 { loc_a, loc_b, ret } =
1214 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1215 self.machine.i32_cmp_le_s(loc_a, loc_b, ret)?;
1216 }
1217 Operator::I32GtS => {
1218 let I2O1 { loc_a, loc_b, ret } =
1219 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1220 self.machine.i32_cmp_gt_s(loc_a, loc_b, ret)?;
1221 }
1222 Operator::I32GeS => {
1223 let I2O1 { loc_a, loc_b, ret } =
1224 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1225 self.machine.i32_cmp_ge_s(loc_a, loc_b, ret)?;
1226 }
1227 Operator::I64Const { value } => {
1228 let value = value as u64;
1229 self.value_stack
1230 .push((Location::Imm64(value), CanonicalizeType::None));
1231 }
1232 Operator::I64Add => {
1233 let I2O1 { loc_a, loc_b, ret } =
1234 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1235 self.machine.emit_binop_add64(loc_a, loc_b, ret)?;
1236 }
1237 Operator::I64Sub => {
1238 let I2O1 { loc_a, loc_b, ret } =
1239 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1240 self.machine.emit_binop_sub64(loc_a, loc_b, ret)?;
1241 }
1242 Operator::I64Mul => {
1243 let I2O1 { loc_a, loc_b, ret } =
1244 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1245 self.machine.emit_binop_mul64(loc_a, loc_b, ret)?;
1246 }
1247 Operator::I64DivU => {
1248 let I2O1 { loc_a, loc_b, ret } =
1249 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1250 self.machine.emit_binop_udiv64(
1251 loc_a,
1252 loc_b,
1253 ret,
1254 self.special_labels.integer_division_by_zero,
1255 )?;
1256 }
1257 Operator::I64DivS => {
1258 let I2O1 { loc_a, loc_b, ret } =
1259 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1260 self.machine.emit_binop_sdiv64(
1261 loc_a,
1262 loc_b,
1263 ret,
1264 self.special_labels.integer_division_by_zero,
1265 self.special_labels.integer_overflow,
1266 )?;
1267 }
1268 Operator::I64RemU => {
1269 let I2O1 { loc_a, loc_b, ret } =
1270 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1271 self.machine.emit_binop_urem64(
1272 loc_a,
1273 loc_b,
1274 ret,
1275 self.special_labels.integer_division_by_zero,
1276 )?;
1277 }
1278 Operator::I64RemS => {
1279 let I2O1 { loc_a, loc_b, ret } =
1280 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1281 self.machine.emit_binop_srem64(
1282 loc_a,
1283 loc_b,
1284 ret,
1285 self.special_labels.integer_division_by_zero,
1286 )?;
1287 }
1288 Operator::I64And => {
1289 let I2O1 { loc_a, loc_b, ret } =
1290 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1291 self.machine.emit_binop_and64(loc_a, loc_b, ret)?;
1292 }
1293 Operator::I64Or => {
1294 let I2O1 { loc_a, loc_b, ret } =
1295 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1296 self.machine.emit_binop_or64(loc_a, loc_b, ret)?;
1297 }
1298 Operator::I64Xor => {
1299 let I2O1 { loc_a, loc_b, ret } =
1300 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1301 self.machine.emit_binop_xor64(loc_a, loc_b, ret)?;
1302 }
1303 Operator::I64Eq => {
1304 let I2O1 { loc_a, loc_b, ret } =
1305 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1306 self.machine.i64_cmp_eq(loc_a, loc_b, ret)?;
1307 }
1308 Operator::I64Ne => {
1309 let I2O1 { loc_a, loc_b, ret } =
1310 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1311 self.machine.i64_cmp_ne(loc_a, loc_b, ret)?;
1312 }
1313 Operator::I64Eqz => {
1314 let loc_a = self.pop_value_released()?.0;
1315 let ret = self.acquire_location(&WpType::I64)?;
1316 self.machine.i64_cmp_eq(loc_a, Location::Imm64(0), ret)?;
1317 self.value_stack.push((ret, CanonicalizeType::None));
1318 }
1319 Operator::I64Clz => {
1320 let loc = self.pop_value_released()?.0;
1321 let ret = self.acquire_location(&WpType::I64)?;
1322 self.value_stack.push((ret, CanonicalizeType::None));
1323 self.machine.i64_clz(loc, ret)?;
1324 }
1325 Operator::I64Ctz => {
1326 let loc = self.pop_value_released()?.0;
1327 let ret = self.acquire_location(&WpType::I64)?;
1328 self.value_stack.push((ret, CanonicalizeType::None));
1329 self.machine.i64_ctz(loc, ret)?;
1330 }
1331 Operator::I64Popcnt => {
1332 let loc = self.pop_value_released()?.0;
1333 let ret = self.acquire_location(&WpType::I64)?;
1334 self.value_stack.push((ret, CanonicalizeType::None));
1335 self.machine.i64_popcnt(loc, ret)?;
1336 }
1337 Operator::I64Shl => {
1338 let I2O1 { loc_a, loc_b, ret } =
1339 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1340 self.machine.i64_shl(loc_a, loc_b, ret)?;
1341 }
1342 Operator::I64ShrU => {
1343 let I2O1 { loc_a, loc_b, ret } =
1344 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1345 self.machine.i64_shr(loc_a, loc_b, ret)?;
1346 }
1347 Operator::I64ShrS => {
1348 let I2O1 { loc_a, loc_b, ret } =
1349 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1350 self.machine.i64_sar(loc_a, loc_b, ret)?;
1351 }
1352 Operator::I64Rotl => {
1353 let I2O1 { loc_a, loc_b, ret } =
1354 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1355 self.machine.i64_rol(loc_a, loc_b, ret)?;
1356 }
1357 Operator::I64Rotr => {
1358 let I2O1 { loc_a, loc_b, ret } =
1359 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1360 self.machine.i64_ror(loc_a, loc_b, ret)?;
1361 }
1362 Operator::I64LtU => {
1363 let I2O1 { loc_a, loc_b, ret } =
1364 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1365 self.machine.i64_cmp_lt_u(loc_a, loc_b, ret)?;
1366 }
1367 Operator::I64LeU => {
1368 let I2O1 { loc_a, loc_b, ret } =
1369 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1370 self.machine.i64_cmp_le_u(loc_a, loc_b, ret)?;
1371 }
1372 Operator::I64GtU => {
1373 let I2O1 { loc_a, loc_b, ret } =
1374 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1375 self.machine.i64_cmp_gt_u(loc_a, loc_b, ret)?;
1376 }
1377 Operator::I64GeU => {
1378 let I2O1 { loc_a, loc_b, ret } =
1379 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1380 self.machine.i64_cmp_ge_u(loc_a, loc_b, ret)?;
1381 }
1382 Operator::I64LtS => {
1383 let I2O1 { loc_a, loc_b, ret } =
1384 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1385 self.machine.i64_cmp_lt_s(loc_a, loc_b, ret)?;
1386 }
1387 Operator::I64LeS => {
1388 let I2O1 { loc_a, loc_b, ret } =
1389 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1390 self.machine.i64_cmp_le_s(loc_a, loc_b, ret)?;
1391 }
1392 Operator::I64GtS => {
1393 let I2O1 { loc_a, loc_b, ret } =
1394 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1395 self.machine.i64_cmp_gt_s(loc_a, loc_b, ret)?;
1396 }
1397 Operator::I64GeS => {
1398 let I2O1 { loc_a, loc_b, ret } =
1399 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1400 self.machine.i64_cmp_ge_s(loc_a, loc_b, ret)?;
1401 }
1402 Operator::I64ExtendI32U => {
1403 let loc = self.pop_value_released()?.0;
1404 let ret = self.acquire_location(&WpType::I64)?;
1405 self.value_stack.push((ret, CanonicalizeType::None));
1406 self.machine.emit_relaxed_mov(Size::S32, loc, ret)?;
1407
1408 if let Location::Memory(base, off) = ret {
1411 self.machine.emit_relaxed_mov(
1412 Size::S32,
1413 Location::Imm32(0),
1414 Location::Memory(base, off + 4),
1415 )?;
1416 }
1417 }
1418 Operator::I64ExtendI32S => {
1419 let loc = self.pop_value_released()?.0;
1420 let ret = self.acquire_location(&WpType::I64)?;
1421 self.value_stack.push((ret, CanonicalizeType::None));
1422 self.machine
1423 .emit_relaxed_sign_extension(Size::S32, loc, Size::S64, ret)?;
1424 }
1425 Operator::I32Extend8S => {
1426 let loc = self.pop_value_released()?.0;
1427 let ret = self.acquire_location(&WpType::I32)?;
1428 self.value_stack.push((ret, CanonicalizeType::None));
1429
1430 self.machine
1431 .emit_relaxed_sign_extension(Size::S8, loc, Size::S32, ret)?;
1432 }
1433 Operator::I32Extend16S => {
1434 let loc = self.pop_value_released()?.0;
1435 let ret = self.acquire_location(&WpType::I32)?;
1436 self.value_stack.push((ret, CanonicalizeType::None));
1437
1438 self.machine
1439 .emit_relaxed_sign_extension(Size::S16, loc, Size::S32, ret)?;
1440 }
1441 Operator::I64Extend8S => {
1442 let loc = self.pop_value_released()?.0;
1443 let ret = self.acquire_location(&WpType::I64)?;
1444 self.value_stack.push((ret, CanonicalizeType::None));
1445
1446 self.machine
1447 .emit_relaxed_sign_extension(Size::S8, loc, Size::S64, ret)?;
1448 }
1449 Operator::I64Extend16S => {
1450 let loc = self.pop_value_released()?.0;
1451 let ret = self.acquire_location(&WpType::I64)?;
1452 self.value_stack.push((ret, CanonicalizeType::None));
1453
1454 self.machine
1455 .emit_relaxed_sign_extension(Size::S16, loc, Size::S64, ret)?;
1456 }
1457 Operator::I64Extend32S => {
1458 let loc = self.pop_value_released()?.0;
1459 let ret = self.acquire_location(&WpType::I64)?;
1460 self.value_stack.push((ret, CanonicalizeType::None));
1461
1462 self.machine
1463 .emit_relaxed_sign_extension(Size::S32, loc, Size::S64, ret)?;
1464 }
1465 Operator::I32WrapI64 => {
1466 let loc = self.pop_value_released()?.0;
1467 let ret = self.acquire_location(&WpType::I32)?;
1468 self.value_stack.push((ret, CanonicalizeType::None));
1469 self.machine.emit_relaxed_mov(Size::S32, loc, ret)?;
1470 }
1471
1472 Operator::F32Const { value } => {
1473 self.value_stack
1474 .push((Location::Imm32(value.bits()), CanonicalizeType::None));
1475 }
1476 Operator::F32Add => {
1477 let I2O1 { loc_a, loc_b, ret } =
1478 self.i2o1_prepare(WpType::F64, CanonicalizeType::F32)?;
1479 self.machine.f32_add(loc_a, loc_b, ret)?;
1480 }
1481 Operator::F32Sub => {
1482 let I2O1 { loc_a, loc_b, ret } =
1483 self.i2o1_prepare(WpType::F64, CanonicalizeType::F32)?;
1484 self.machine.f32_sub(loc_a, loc_b, ret)?;
1485 }
1486 Operator::F32Mul => {
1487 let I2O1 { loc_a, loc_b, ret } =
1488 self.i2o1_prepare(WpType::F64, CanonicalizeType::F32)?;
1489 self.machine.f32_mul(loc_a, loc_b, ret)?;
1490 }
1491 Operator::F32Div => {
1492 let I2O1 { loc_a, loc_b, ret } =
1493 self.i2o1_prepare(WpType::F64, CanonicalizeType::F32)?;
1494 self.machine.f32_div(loc_a, loc_b, ret)?;
1495 }
1496 Operator::F32Max => {
1497 let I2O1 { loc_a, loc_b, ret } =
1498 self.i2o1_prepare(WpType::F64, CanonicalizeType::None)?;
1499 self.machine.f32_max(loc_a, loc_b, ret)?;
1500 }
1501 Operator::F32Min => {
1502 let I2O1 { loc_a, loc_b, ret } =
1503 self.i2o1_prepare(WpType::F64, CanonicalizeType::None)?;
1504 self.machine.f32_min(loc_a, loc_b, ret)?;
1505 }
1506 Operator::F32Eq => {
1507 let I2O1 { loc_a, loc_b, ret } =
1508 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1509 self.machine.f32_cmp_eq(loc_a, loc_b, ret)?;
1510 }
1511 Operator::F32Ne => {
1512 let I2O1 { loc_a, loc_b, ret } =
1513 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1514 self.machine.f32_cmp_ne(loc_a, loc_b, ret)?;
1515 }
1516 Operator::F32Lt => {
1517 let I2O1 { loc_a, loc_b, ret } =
1518 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1519 self.machine.f32_cmp_lt(loc_a, loc_b, ret)?;
1520 }
1521 Operator::F32Le => {
1522 let I2O1 { loc_a, loc_b, ret } =
1523 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1524 self.machine.f32_cmp_le(loc_a, loc_b, ret)?;
1525 }
1526 Operator::F32Gt => {
1527 let I2O1 { loc_a, loc_b, ret } =
1528 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1529 self.machine.f32_cmp_gt(loc_a, loc_b, ret)?;
1530 }
1531 Operator::F32Ge => {
1532 let I2O1 { loc_a, loc_b, ret } =
1533 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1534 self.machine.f32_cmp_ge(loc_a, loc_b, ret)?;
1535 }
1536 Operator::F32Nearest => {
1537 let loc = self.pop_value_released()?.0;
1538 let ret = self.acquire_location(&WpType::F64)?;
1539 self.value_stack.push((ret, CanonicalizeType::F32));
1540 self.machine.f32_nearest(loc, ret)?;
1541 }
1542 Operator::F32Floor => {
1543 let loc = self.pop_value_released()?.0;
1544 let ret = self.acquire_location(&WpType::F64)?;
1545 self.value_stack.push((ret, CanonicalizeType::F32));
1546 self.machine.f32_floor(loc, ret)?;
1547 }
1548 Operator::F32Ceil => {
1549 let loc = self.pop_value_released()?.0;
1550 let ret = self.acquire_location(&WpType::F64)?;
1551 self.value_stack.push((ret, CanonicalizeType::F32));
1552 self.machine.f32_ceil(loc, ret)?;
1553 }
1554 Operator::F32Trunc => {
1555 let loc = self.pop_value_released()?.0;
1556 let ret = self.acquire_location(&WpType::F64)?;
1557 self.value_stack.push((ret, CanonicalizeType::F32));
1558 self.machine.f32_trunc(loc, ret)?;
1559 }
1560 Operator::F32Sqrt => {
1561 let loc = self.pop_value_released()?.0;
1562 let ret = self.acquire_location(&WpType::F64)?;
1563 self.value_stack.push((ret, CanonicalizeType::F32));
1564 self.machine.f32_sqrt(loc, ret)?;
1565 }
1566
1567 Operator::F32Copysign => {
1568 let loc_b = self.pop_value_released()?;
1569 let loc_a = self.pop_value_released()?;
1570 let ret = self.acquire_location(&WpType::F32)?;
1571 self.value_stack.push((ret, CanonicalizeType::None));
1572
1573 let tmp1 = self.machine.acquire_temp_gpr().unwrap();
1574 let tmp2 = self.machine.acquire_temp_gpr().unwrap();
1575
1576 if self.machine.arch_supports_canonicalize_nan()
1577 && self.config.enable_nan_canonicalization
1578 {
1579 for ((loc, fp), tmp) in [(loc_a, tmp1), (loc_b, tmp2)].iter() {
1580 if fp.to_size().is_some() {
1581 self.machine
1582 .canonicalize_nan(Size::S32, *loc, Location::GPR(*tmp))?
1583 } else {
1584 self.machine
1585 .move_location(Size::S32, *loc, Location::GPR(*tmp))?
1586 }
1587 }
1588 } else {
1589 self.machine
1590 .move_location(Size::S32, loc_a.0, Location::GPR(tmp1))?;
1591 self.machine
1592 .move_location(Size::S32, loc_b.0, Location::GPR(tmp2))?;
1593 }
1594 self.machine.emit_i32_copysign(tmp1, tmp2)?;
1595 self.machine
1596 .move_location(Size::S32, Location::GPR(tmp1), ret)?;
1597 self.machine.release_gpr(tmp2);
1598 self.machine.release_gpr(tmp1);
1599 }
1600
1601 Operator::F32Abs => {
1602 let loc = self.pop_value_released()?.0;
1605 let ret = self.acquire_location(&WpType::F32)?;
1606 self.value_stack.push((ret, CanonicalizeType::None));
1607
1608 self.machine.f32_abs(loc, ret)?;
1609 }
1610
1611 Operator::F32Neg => {
1612 let loc = self.pop_value_released()?.0;
1615 let ret = self.acquire_location(&WpType::F32)?;
1616 self.value_stack.push((ret, CanonicalizeType::None));
1617
1618 self.machine.f32_neg(loc, ret)?;
1619 }
1620
1621 Operator::F64Const { value } => {
1622 self.value_stack
1623 .push((Location::Imm64(value.bits()), CanonicalizeType::None));
1624 }
1625 Operator::F64Add => {
1626 let I2O1 { loc_a, loc_b, ret } =
1627 self.i2o1_prepare(WpType::F64, CanonicalizeType::F64)?;
1628 self.machine.f64_add(loc_a, loc_b, ret)?;
1629 }
1630 Operator::F64Sub => {
1631 let I2O1 { loc_a, loc_b, ret } =
1632 self.i2o1_prepare(WpType::F64, CanonicalizeType::F64)?;
1633 self.machine.f64_sub(loc_a, loc_b, ret)?;
1634 }
1635 Operator::F64Mul => {
1636 let I2O1 { loc_a, loc_b, ret } =
1637 self.i2o1_prepare(WpType::F64, CanonicalizeType::F64)?;
1638 self.machine.f64_mul(loc_a, loc_b, ret)?;
1639 }
1640 Operator::F64Div => {
1641 let I2O1 { loc_a, loc_b, ret } =
1642 self.i2o1_prepare(WpType::F64, CanonicalizeType::F64)?;
1643 self.machine.f64_div(loc_a, loc_b, ret)?;
1644 }
1645 Operator::F64Max => {
1646 let I2O1 { loc_a, loc_b, ret } =
1647 self.i2o1_prepare(WpType::F64, CanonicalizeType::None)?;
1648 self.machine.f64_max(loc_a, loc_b, ret)?;
1649 }
1650 Operator::F64Min => {
1651 let I2O1 { loc_a, loc_b, ret } =
1652 self.i2o1_prepare(WpType::F64, CanonicalizeType::None)?;
1653 self.machine.f64_min(loc_a, loc_b, ret)?;
1654 }
1655 Operator::F64Eq => {
1656 let I2O1 { loc_a, loc_b, ret } =
1657 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1658 self.machine.f64_cmp_eq(loc_a, loc_b, ret)?;
1659 }
1660 Operator::F64Ne => {
1661 let I2O1 { loc_a, loc_b, ret } =
1662 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1663 self.machine.f64_cmp_ne(loc_a, loc_b, ret)?;
1664 }
1665 Operator::F64Lt => {
1666 let I2O1 { loc_a, loc_b, ret } =
1667 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1668 self.machine.f64_cmp_lt(loc_a, loc_b, ret)?;
1669 }
1670 Operator::F64Le => {
1671 let I2O1 { loc_a, loc_b, ret } =
1672 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1673 self.machine.f64_cmp_le(loc_a, loc_b, ret)?;
1674 }
1675 Operator::F64Gt => {
1676 let I2O1 { loc_a, loc_b, ret } =
1677 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1678 self.machine.f64_cmp_gt(loc_a, loc_b, ret)?;
1679 }
1680 Operator::F64Ge => {
1681 let I2O1 { loc_a, loc_b, ret } =
1682 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1683 self.machine.f64_cmp_ge(loc_a, loc_b, ret)?;
1684 }
1685 Operator::F64Nearest => {
1686 let loc = self.pop_value_released()?.0;
1687 let ret = self.acquire_location(&WpType::F64)?;
1688 self.value_stack.push((ret, CanonicalizeType::F64));
1689 self.machine.f64_nearest(loc, ret)?;
1690 }
1691 Operator::F64Floor => {
1692 let loc = self.pop_value_released()?.0;
1693 let ret = self.acquire_location(&WpType::F64)?;
1694 self.value_stack.push((ret, CanonicalizeType::F64));
1695 self.machine.f64_floor(loc, ret)?;
1696 }
1697 Operator::F64Ceil => {
1698 let loc = self.pop_value_released()?.0;
1699 let ret = self.acquire_location(&WpType::F64)?;
1700 self.value_stack.push((ret, CanonicalizeType::F64));
1701 self.machine.f64_ceil(loc, ret)?;
1702 }
1703 Operator::F64Trunc => {
1704 let loc = self.pop_value_released()?.0;
1705 let ret = self.acquire_location(&WpType::F64)?;
1706 self.value_stack.push((ret, CanonicalizeType::F64));
1707 self.machine.f64_trunc(loc, ret)?;
1708 }
1709 Operator::F64Sqrt => {
1710 let loc = self.pop_value_released()?.0;
1711 let ret = self.acquire_location(&WpType::F64)?;
1712 self.value_stack.push((ret, CanonicalizeType::F64));
1713 self.machine.f64_sqrt(loc, ret)?;
1714 }
1715
1716 Operator::F64Copysign => {
1717 let loc_b = self.pop_value_released()?;
1718 let loc_a = self.pop_value_released()?;
1719 let ret = self.acquire_location(&WpType::F64)?;
1720 self.value_stack.push((ret, CanonicalizeType::None));
1721
1722 let tmp1 = self.machine.acquire_temp_gpr().unwrap();
1723 let tmp2 = self.machine.acquire_temp_gpr().unwrap();
1724
1725 if self.machine.arch_supports_canonicalize_nan()
1726 && self.config.enable_nan_canonicalization
1727 {
1728 for ((loc, fp), tmp) in [(loc_a, tmp1), (loc_b, tmp2)].iter() {
1729 if fp.to_size().is_some() {
1730 self.machine
1731 .canonicalize_nan(Size::S64, *loc, Location::GPR(*tmp))?
1732 } else {
1733 self.machine
1734 .move_location(Size::S64, *loc, Location::GPR(*tmp))?
1735 }
1736 }
1737 } else {
1738 self.machine
1739 .move_location(Size::S64, loc_a.0, Location::GPR(tmp1))?;
1740 self.machine
1741 .move_location(Size::S64, loc_b.0, Location::GPR(tmp2))?;
1742 }
1743 self.machine.emit_i64_copysign(tmp1, tmp2)?;
1744 self.machine
1745 .move_location(Size::S64, Location::GPR(tmp1), ret)?;
1746
1747 self.machine.release_gpr(tmp2);
1748 self.machine.release_gpr(tmp1);
1749 }
1750
1751 Operator::F64Abs => {
1752 let (loc, canonicalize) = self.pop_value_released()?;
1753 let ret = self.acquire_location(&WpType::F64)?;
1754 self.value_stack.push((ret, canonicalize));
1755
1756 self.machine.f64_abs(loc, ret)?;
1757 }
1758
1759 Operator::F64Neg => {
1760 let (loc, canonicalize) = self.pop_value_released()?;
1761 let ret = self.acquire_location(&WpType::F64)?;
1762 self.value_stack.push((ret, canonicalize));
1763
1764 self.machine.f64_neg(loc, ret)?;
1765 }
1766
1767 Operator::F64PromoteF32 => {
1768 let (loc, canonicalize) = self.pop_value_released()?;
1769 let ret = self.acquire_location(&WpType::F64)?;
1770 self.value_stack.push((ret, canonicalize.promote()?));
1771 self.machine.convert_f64_f32(loc, ret)?;
1772 }
1773 Operator::F32DemoteF64 => {
1774 let (loc, canonicalize) = self.pop_value_released()?;
1775 let ret = self.acquire_location(&WpType::F64)?;
1776 self.value_stack.push((ret, canonicalize.demote()?));
1777 self.machine.convert_f32_f64(loc, ret)?;
1778 }
1779
1780 Operator::I32ReinterpretF32 => {
1781 let (loc, canonicalize) = self.pop_value_released()?;
1782 let ret = self.acquire_location(&WpType::I32)?;
1783 self.value_stack.push((ret, CanonicalizeType::None));
1784
1785 if !self.machine.arch_supports_canonicalize_nan()
1786 || !self.config.enable_nan_canonicalization
1787 || matches!(canonicalize, CanonicalizeType::None)
1788 {
1789 if loc != ret {
1790 self.machine.emit_relaxed_mov(Size::S32, loc, ret)?;
1791 }
1792 } else {
1793 self.machine.canonicalize_nan(Size::S32, loc, ret)?;
1794 }
1795 }
1796 Operator::F32ReinterpretI32 => {
1797 let loc = self.pop_value_released()?.0;
1798 let ret = self.acquire_location(&WpType::F32)?;
1799 self.value_stack.push((ret, CanonicalizeType::None));
1800
1801 if loc != ret {
1802 self.machine.emit_relaxed_mov(Size::S32, loc, ret)?;
1803 }
1804 }
1805
1806 Operator::I64ReinterpretF64 => {
1807 let (loc, canonicalize) = self.pop_value_released()?;
1808 let ret = self.acquire_location(&WpType::I64)?;
1809 self.value_stack.push((ret, CanonicalizeType::None));
1810
1811 if !self.machine.arch_supports_canonicalize_nan()
1812 || !self.config.enable_nan_canonicalization
1813 || matches!(canonicalize, CanonicalizeType::None)
1814 {
1815 if loc != ret {
1816 self.machine.emit_relaxed_mov(Size::S64, loc, ret)?;
1817 }
1818 } else {
1819 self.machine.canonicalize_nan(Size::S64, loc, ret)?;
1820 }
1821 }
1822 Operator::F64ReinterpretI64 => {
1823 let loc = self.pop_value_released()?.0;
1824 let ret = self.acquire_location(&WpType::F64)?;
1825 self.value_stack.push((ret, CanonicalizeType::None));
1826
1827 if loc != ret {
1828 self.machine.emit_relaxed_mov(Size::S64, loc, ret)?;
1829 }
1830 }
1831
1832 Operator::I32TruncF32U => {
1833 let loc = self.pop_value_released()?.0;
1834 let ret = self.acquire_location(&WpType::I32)?;
1835 self.value_stack.push((ret, CanonicalizeType::None));
1836
1837 self.machine.convert_i32_f32(loc, ret, false, false)?;
1838 }
1839
1840 Operator::I32TruncSatF32U => {
1841 let loc = self.pop_value_released()?.0;
1842 let ret = self.acquire_location(&WpType::I32)?;
1843 self.value_stack.push((ret, CanonicalizeType::None));
1844
1845 self.machine.convert_i32_f32(loc, ret, false, true)?;
1846 }
1847
1848 Operator::I32TruncF32S => {
1849 let loc = self.pop_value_released()?.0;
1850 let ret = self.acquire_location(&WpType::I32)?;
1851 self.value_stack.push((ret, CanonicalizeType::None));
1852
1853 self.machine.convert_i32_f32(loc, ret, true, false)?;
1854 }
1855 Operator::I32TruncSatF32S => {
1856 let loc = self.pop_value_released()?.0;
1857 let ret = self.acquire_location(&WpType::I32)?;
1858 self.value_stack.push((ret, CanonicalizeType::None));
1859
1860 self.machine.convert_i32_f32(loc, ret, true, true)?;
1861 }
1862
1863 Operator::I64TruncF32S => {
1864 let loc = self.pop_value_released()?.0;
1865 let ret = self.acquire_location(&WpType::I64)?;
1866 self.value_stack.push((ret, CanonicalizeType::None));
1867
1868 self.machine.convert_i64_f32(loc, ret, true, false)?;
1869 }
1870
1871 Operator::I64TruncSatF32S => {
1872 let loc = self.pop_value_released()?.0;
1873 let ret = self.acquire_location(&WpType::I64)?;
1874 self.value_stack.push((ret, CanonicalizeType::None));
1875
1876 self.machine.convert_i64_f32(loc, ret, true, true)?;
1877 }
1878
1879 Operator::I64TruncF32U => {
1880 let loc = self.pop_value_released()?.0;
1881 let ret = self.acquire_location(&WpType::I64)?;
1882 self.value_stack.push((ret, CanonicalizeType::None));
1883
1884 self.machine.convert_i64_f32(loc, ret, false, false)?;
1885 }
1886 Operator::I64TruncSatF32U => {
1887 let loc = self.pop_value_released()?.0;
1888 let ret = self.acquire_location(&WpType::I64)?;
1889 self.value_stack.push((ret, CanonicalizeType::None));
1890
1891 self.machine.convert_i64_f32(loc, ret, false, true)?;
1892 }
1893
1894 Operator::I32TruncF64U => {
1895 let loc = self.pop_value_released()?.0;
1896 let ret = self.acquire_location(&WpType::I32)?;
1897 self.value_stack.push((ret, CanonicalizeType::None));
1898
1899 self.machine.convert_i32_f64(loc, ret, false, false)?;
1900 }
1901
1902 Operator::I32TruncSatF64U => {
1903 let loc = self.pop_value_released()?.0;
1904 let ret = self.acquire_location(&WpType::I32)?;
1905 self.value_stack.push((ret, CanonicalizeType::None));
1906
1907 self.machine.convert_i32_f64(loc, ret, false, true)?;
1908 }
1909
1910 Operator::I32TruncF64S => {
1911 let loc = self.pop_value_released()?.0;
1912 let ret = self.acquire_location(&WpType::I32)?;
1913 self.value_stack.push((ret, CanonicalizeType::None));
1914
1915 self.machine.convert_i32_f64(loc, ret, true, false)?;
1916 }
1917
1918 Operator::I32TruncSatF64S => {
1919 let loc = self.pop_value_released()?.0;
1920 let ret = self.acquire_location(&WpType::I32)?;
1921 self.value_stack.push((ret, CanonicalizeType::None));
1922
1923 self.machine.convert_i32_f64(loc, ret, true, true)?;
1924 }
1925
1926 Operator::I64TruncF64S => {
1927 let loc = self.pop_value_released()?.0;
1928 let ret = self.acquire_location(&WpType::I64)?;
1929 self.value_stack.push((ret, CanonicalizeType::None));
1930
1931 self.machine.convert_i64_f64(loc, ret, true, false)?;
1932 }
1933
1934 Operator::I64TruncSatF64S => {
1935 let loc = self.pop_value_released()?.0;
1936 let ret = self.acquire_location(&WpType::I64)?;
1937 self.value_stack.push((ret, CanonicalizeType::None));
1938
1939 self.machine.convert_i64_f64(loc, ret, true, true)?;
1940 }
1941
1942 Operator::I64TruncF64U => {
1943 let loc = self.pop_value_released()?.0;
1944 let ret = self.acquire_location(&WpType::I64)?;
1945 self.value_stack.push((ret, CanonicalizeType::None));
1946
1947 self.machine.convert_i64_f64(loc, ret, false, false)?;
1948 }
1949
1950 Operator::I64TruncSatF64U => {
1951 let loc = self.pop_value_released()?.0;
1952 let ret = self.acquire_location(&WpType::I64)?;
1953 self.value_stack.push((ret, CanonicalizeType::None));
1954
1955 self.machine.convert_i64_f64(loc, ret, false, true)?;
1956 }
1957
1958 Operator::F32ConvertI32S => {
1959 let loc = self.pop_value_released()?.0;
1960 let ret = self.acquire_location(&WpType::F32)?;
1961 self.value_stack.push((ret, CanonicalizeType::None));
1962
1963 self.machine.convert_f32_i32(loc, true, ret)?;
1964 }
1965 Operator::F32ConvertI32U => {
1966 let loc = self.pop_value_released()?.0;
1967 let ret = self.acquire_location(&WpType::F32)?;
1968 self.value_stack.push((ret, CanonicalizeType::None));
1969
1970 self.machine.convert_f32_i32(loc, false, ret)?;
1971 }
1972 Operator::F32ConvertI64S => {
1973 let loc = self.pop_value_released()?.0;
1974 let ret = self.acquire_location(&WpType::F32)?;
1975 self.value_stack.push((ret, CanonicalizeType::None));
1976
1977 self.machine.convert_f32_i64(loc, true, ret)?;
1978 }
1979 Operator::F32ConvertI64U => {
1980 let loc = self.pop_value_released()?.0;
1981 let ret = self.acquire_location(&WpType::F32)?;
1982 self.value_stack.push((ret, CanonicalizeType::None));
1983
1984 self.machine.convert_f32_i64(loc, false, ret)?;
1985 }
1986
1987 Operator::F64ConvertI32S => {
1988 let loc = self.pop_value_released()?.0;
1989 let ret = self.acquire_location(&WpType::F64)?;
1990 self.value_stack.push((ret, CanonicalizeType::None));
1991
1992 self.machine.convert_f64_i32(loc, true, ret)?;
1993 }
1994 Operator::F64ConvertI32U => {
1995 let loc = self.pop_value_released()?.0;
1996 let ret = self.acquire_location(&WpType::F64)?;
1997 self.value_stack.push((ret, CanonicalizeType::None));
1998
1999 self.machine.convert_f64_i32(loc, false, ret)?;
2000 }
2001 Operator::F64ConvertI64S => {
2002 let loc = self.pop_value_released()?.0;
2003 let ret = self.acquire_location(&WpType::F64)?;
2004 self.value_stack.push((ret, CanonicalizeType::None));
2005
2006 self.machine.convert_f64_i64(loc, true, ret)?;
2007 }
2008 Operator::F64ConvertI64U => {
2009 let loc = self.pop_value_released()?.0;
2010 let ret = self.acquire_location(&WpType::F64)?;
2011 self.value_stack.push((ret, CanonicalizeType::None));
2012
2013 self.machine.convert_f64_i64(loc, false, ret)?;
2014 }
2015
2016 Operator::Call { function_index } => {
2017 let function_index = function_index as usize;
2018
2019 let sig_index = *self
2020 .module
2021 .functions
2022 .get(FunctionIndex::new(function_index))
2023 .unwrap();
2024 let sig = self.module.signatures.get(sig_index).unwrap();
2025 let param_types: SmallVec<[WpType; 8]> =
2026 sig.params().iter().cloned().map(type_to_wp_type).collect();
2027 let return_types: SmallVec<[WpType; 1]> =
2028 sig.results().iter().cloned().map(type_to_wp_type).collect();
2029
2030 let params: SmallVec<[_; 8]> = self
2031 .value_stack
2032 .drain(self.value_stack.len() - param_types.len()..)
2033 .collect();
2034 self.release_locations_only_regs(¶ms)?;
2035
2036 if self.machine.arch_supports_canonicalize_nan()
2041 && self.config.enable_nan_canonicalization
2042 {
2043 for (loc, canonicalize) in params.iter() {
2044 if let Some(size) = canonicalize.to_size() {
2045 self.machine.canonicalize_nan(size, *loc, *loc)?;
2046 }
2047 }
2048 }
2049
2050 let reloc_target = if function_index < self.module.num_imported_functions {
2052 RelocationTarget::CustomSection(SectionIndex::new(function_index))
2053 } else {
2054 RelocationTarget::LocalFunc(LocalFunctionIndex::new(
2055 function_index - self.module.num_imported_functions,
2056 ))
2057 };
2058 let calling_convention = self.calling_convention;
2059
2060 self.emit_call_native(
2061 |this| {
2062 let offset = this
2063 .machine
2064 .mark_instruction_with_trap_code(TrapCode::StackOverflow);
2065 let mut relocations = this
2066 .machine
2067 .emit_call_with_reloc(calling_convention, reloc_target)?;
2068 this.machine.mark_instruction_address_end(offset);
2069 this.relocations.append(&mut relocations);
2070 Ok(())
2071 },
2072 params.iter().map(|(loc, _)| *loc),
2073 param_types.iter().copied(),
2074 NativeCallType::IncludeVMCtxArgument,
2075 )?;
2076
2077 self.release_locations_only_stack(¶ms)?;
2078
2079 if !return_types.is_empty() {
2080 let ret = self.acquire_location(&return_types[0])?;
2081 self.value_stack.push((ret, CanonicalizeType::None));
2082 if return_types[0].is_float() {
2083 self.machine.move_location(
2084 Size::S64,
2085 Location::SIMD(self.machine.get_simd_for_ret()),
2086 ret,
2087 )?;
2088 } else {
2089 self.machine.move_location(
2090 Size::S64,
2091 Location::GPR(self.machine.get_gpr_for_ret()),
2092 ret,
2093 )?;
2094 }
2095 }
2096 }
2097 Operator::CallIndirect {
2098 type_index,
2099 table_index,
2100 } => {
2101 let table_index = TableIndex::new(table_index as _);
2104 let index = SignatureIndex::new(type_index as usize);
2105 let sig = self.module.signatures.get(index).unwrap();
2106 let param_types: SmallVec<[WpType; 8]> =
2107 sig.params().iter().cloned().map(type_to_wp_type).collect();
2108 let return_types: SmallVec<[WpType; 1]> =
2109 sig.results().iter().cloned().map(type_to_wp_type).collect();
2110
2111 let func_index = self.pop_value_released()?.0;
2112
2113 let params: SmallVec<[_; 8]> = self
2114 .value_stack
2115 .drain(self.value_stack.len() - param_types.len()..)
2116 .collect();
2117 self.release_locations_only_regs(¶ms)?;
2118
2119 if self.machine.arch_supports_canonicalize_nan()
2124 && self.config.enable_nan_canonicalization
2125 {
2126 for (loc, canonicalize) in params.iter() {
2127 if let Some(size) = canonicalize.to_size() {
2128 self.machine.canonicalize_nan(size, *loc, *loc)?;
2129 }
2130 }
2131 }
2132
2133 let table_base = self.machine.acquire_temp_gpr().unwrap();
2134 let table_count = self.machine.acquire_temp_gpr().unwrap();
2135 let sigidx = self.machine.acquire_temp_gpr().unwrap();
2136
2137 if let Some(local_table_index) = self.module.local_table_index(table_index) {
2138 let (vmctx_offset_base, vmctx_offset_len) = (
2139 self.vmoffsets.vmctx_vmtable_definition(local_table_index),
2140 self.vmoffsets
2141 .vmctx_vmtable_definition_current_elements(local_table_index),
2142 );
2143 self.machine.move_location(
2144 Size::S64,
2145 Location::Memory(self.machine.get_vmctx_reg(), vmctx_offset_base as i32),
2146 Location::GPR(table_base),
2147 )?;
2148 self.machine.move_location(
2149 Size::S32,
2150 Location::Memory(self.machine.get_vmctx_reg(), vmctx_offset_len as i32),
2151 Location::GPR(table_count),
2152 )?;
2153 } else {
2154 let import_offset = self.vmoffsets.vmctx_vmtable_import(table_index);
2156 self.machine.move_location(
2157 Size::S64,
2158 Location::Memory(self.machine.get_vmctx_reg(), import_offset as i32),
2159 Location::GPR(table_base),
2160 )?;
2161
2162 self.machine.move_location(
2164 Size::S32,
2165 Location::Memory(
2166 table_base,
2167 self.vmoffsets.vmtable_definition_current_elements() as _,
2168 ),
2169 Location::GPR(table_count),
2170 )?;
2171
2172 self.machine.move_location(
2174 Size::S64,
2175 Location::Memory(table_base, self.vmoffsets.vmtable_definition_base() as _),
2176 Location::GPR(table_base),
2177 )?;
2178 }
2179
2180 self.machine.jmp_on_condition(
2181 UnsignedCondition::BelowEqual,
2182 Size::S32,
2183 Location::GPR(table_count),
2184 func_index,
2185 self.special_labels.table_access_oob,
2186 )?;
2187 self.machine
2188 .move_location(Size::S32, func_index, Location::GPR(table_count))?;
2189 self.machine.emit_imul_imm32(
2190 Size::S64,
2191 self.vmoffsets.size_of_vm_funcref() as u32,
2192 table_count,
2193 )?;
2194 self.machine.location_add(
2195 Size::S64,
2196 Location::GPR(table_base),
2197 Location::GPR(table_count),
2198 false,
2199 )?;
2200
2201 self.machine.move_location(
2203 Size::S64,
2204 Location::Memory(table_count, self.vmoffsets.vm_funcref_anyfunc_ptr() as i32),
2205 Location::GPR(table_count),
2206 )?;
2207 self.machine.jmp_on_condition(
2209 UnsignedCondition::Equal,
2210 Size::S64,
2211 Location::GPR(table_count),
2212 Location::Imm32(0),
2213 self.special_labels.indirect_call_null,
2214 )?;
2215 self.machine.move_location(
2216 Size::S64,
2217 Location::Memory(
2218 self.machine.get_vmctx_reg(),
2219 self.vmoffsets.vmctx_vmshared_signature_id(index) as i32,
2220 ),
2221 Location::GPR(sigidx),
2222 )?;
2223
2224 self.machine.jmp_on_condition(
2226 UnsignedCondition::NotEqual,
2227 Size::S32,
2228 Location::GPR(sigidx),
2229 Location::Memory(
2230 table_count,
2231 (self.vmoffsets.vmcaller_checked_anyfunc_type_index() as usize) as i32,
2232 ),
2233 self.special_labels.bad_signature,
2234 )?;
2235 self.machine.release_gpr(sigidx);
2236 self.machine.release_gpr(table_count);
2237 self.machine.release_gpr(table_base);
2238
2239 let gpr_for_call = self.machine.get_grp_for_call();
2240 if table_count != gpr_for_call {
2241 self.machine.move_location(
2242 Size::S64,
2243 Location::GPR(table_count),
2244 Location::GPR(gpr_for_call),
2245 )?;
2246 }
2247
2248 let vmcaller_checked_anyfunc_func_ptr =
2249 self.vmoffsets.vmcaller_checked_anyfunc_func_ptr() as usize;
2250 let vmcaller_checked_anyfunc_vmctx =
2251 self.vmoffsets.vmcaller_checked_anyfunc_vmctx() as usize;
2252 let calling_convention = self.calling_convention;
2253
2254 self.emit_call_native(
2255 |this| {
2256 if this.machine.arch_requires_indirect_call_trampoline() {
2257 this.machine
2258 .arch_emit_indirect_call_with_trampoline(Location::Memory(
2259 gpr_for_call,
2260 vmcaller_checked_anyfunc_func_ptr as i32,
2261 ))
2262 } else {
2263 let offset = this
2264 .machine
2265 .mark_instruction_with_trap_code(TrapCode::StackOverflow);
2266
2267 this.machine.move_location(
2269 Size::S64,
2270 Location::Memory(
2271 gpr_for_call,
2272 vmcaller_checked_anyfunc_vmctx as i32,
2273 ),
2274 this.machine
2275 .get_simple_param_location(0, calling_convention),
2276 )?;
2277
2278 this.machine.emit_call_location(Location::Memory(
2279 gpr_for_call,
2280 vmcaller_checked_anyfunc_func_ptr as i32,
2281 ))?;
2282 this.machine.mark_instruction_address_end(offset);
2283 Ok(())
2284 }
2285 },
2286 params.iter().map(|(loc, _)| *loc),
2287 param_types.iter().copied(),
2288 NativeCallType::IncludeVMCtxArgument,
2289 )?;
2290
2291 self.release_locations_only_stack(¶ms)?;
2292
2293 if !return_types.is_empty() {
2294 let ret = self.acquire_location(&return_types[0])?;
2295 self.value_stack.push((ret, CanonicalizeType::None));
2296 if return_types[0].is_float() {
2297 self.machine.move_location(
2298 Size::S64,
2299 Location::SIMD(self.machine.get_simd_for_ret()),
2300 ret,
2301 )?;
2302 } else {
2303 self.machine.move_location(
2304 Size::S64,
2305 Location::GPR(self.machine.get_gpr_for_ret()),
2306 ret,
2307 )?;
2308 }
2309 }
2310 }
2311 Operator::If { blockty } => {
2312 let label_end = self.machine.get_label();
2313 let label_else = self.machine.get_label();
2314
2315 let cond = self.pop_value_released()?.0;
2316
2317 let frame = ControlFrame {
2318 state: ControlState::If(label_else),
2319 label: label_end,
2320 returns: match blockty {
2321 WpTypeOrFuncType::Empty => smallvec![],
2322 WpTypeOrFuncType::Type(inner_ty) => smallvec![inner_ty],
2323 _ => {
2324 return Err(CompileError::Codegen(
2325 "If: multi-value returns not yet implemented".to_owned(),
2326 ));
2327 }
2328 },
2329 value_stack_depth: self.value_stack.len(),
2330 };
2331 self.control_stack.push(frame);
2332 self.machine.jmp_on_condition(
2333 UnsignedCondition::Equal,
2334 Size::S32,
2335 cond,
2336 Location::Imm32(0),
2337 label_else,
2338 )?;
2339 }
2340 Operator::Else => {
2341 let frame = self.control_stack.last_mut().unwrap();
2342
2343 if !was_unreachable && !frame.returns.is_empty() {
2344 let first_return = frame.returns[0];
2345 let (loc, canonicalize) = *self.value_stack.last().unwrap();
2346 let canonicalize = self.machine.arch_supports_canonicalize_nan()
2347 && self.config.enable_nan_canonicalization
2348 && !matches!(canonicalize, CanonicalizeType::None);
2349 self.machine
2350 .emit_function_return_value(first_return, canonicalize, loc)?;
2351 }
2352
2353 let frame = &self.control_stack.last_mut().unwrap();
2354 let stack_depth = frame.value_stack_depth;
2355 self.release_locations_value(stack_depth)?;
2356 self.value_stack.truncate(stack_depth);
2357 let frame = &mut self.control_stack.last_mut().unwrap();
2358
2359 match frame.state {
2360 ControlState::If(label) => {
2361 self.machine.jmp_unconditional(frame.label)?;
2362 self.machine.emit_label(label)?;
2363 frame.state = ControlState::Else;
2364 }
2365 _ => {
2366 return Err(CompileError::Codegen(
2367 "Else: frame.if_else unreachable code".to_owned(),
2368 ));
2369 }
2370 }
2371 }
2372 Operator::TypedSelect { .. } | Operator::Select => {
2375 let cond = self.pop_value_released()?.0;
2376 let (v_b, canonicalize_b) = self.pop_value_released()?;
2377 let (v_a, canonicalize_a) = self.pop_value_released()?;
2378 let ret = self.acquire_location(&WpType::I64)?;
2379 self.value_stack.push((ret, CanonicalizeType::None));
2380
2381 let end_label = self.machine.get_label();
2382 let zero_label = self.machine.get_label();
2383
2384 self.machine.jmp_on_condition(
2385 UnsignedCondition::Equal,
2386 Size::S32,
2387 cond,
2388 Location::Imm32(0),
2389 zero_label,
2390 )?;
2391 if self.machine.arch_supports_canonicalize_nan()
2392 && self.config.enable_nan_canonicalization
2393 && let Some(size) = canonicalize_a.to_size()
2394 {
2395 self.machine.canonicalize_nan(size, v_a, ret)?;
2396 } else if v_a != ret {
2397 self.machine.emit_relaxed_mov(Size::S64, v_a, ret)?;
2398 }
2399 self.machine.jmp_unconditional(end_label)?;
2400 self.machine.emit_label(zero_label)?;
2401 if self.machine.arch_supports_canonicalize_nan()
2402 && self.config.enable_nan_canonicalization
2403 && let Some(size) = canonicalize_b.to_size()
2404 {
2405 self.machine.canonicalize_nan(size, v_b, ret)?;
2406 } else if v_b != ret {
2407 self.machine.emit_relaxed_mov(Size::S64, v_b, ret)?;
2408 }
2409 self.machine.emit_label(end_label)?;
2410 }
2411 Operator::Block { blockty } => {
2412 let frame = ControlFrame {
2413 state: ControlState::Block,
2414 label: self.machine.get_label(),
2415 returns: match blockty {
2416 WpTypeOrFuncType::Empty => smallvec![],
2417 WpTypeOrFuncType::Type(inner_ty) => smallvec![inner_ty],
2418 _ => {
2419 return Err(CompileError::Codegen(
2420 "Block: multi-value returns not yet implemented".to_owned(),
2421 ));
2422 }
2423 },
2424 value_stack_depth: self.value_stack.len(),
2425 };
2426 self.control_stack.push(frame);
2427 }
2428 Operator::Loop { blockty } => {
2429 self.machine.align_for_loop()?;
2430 let label = self.machine.get_label();
2431
2432 self.control_stack.push(ControlFrame {
2433 state: ControlState::Loop,
2434 label,
2435 returns: match blockty {
2436 WpTypeOrFuncType::Empty => smallvec![],
2437 WpTypeOrFuncType::Type(inner_ty) => smallvec![inner_ty],
2438 _ => {
2439 return Err(CompileError::Codegen(
2440 "Loop: multi-value returns not yet implemented".to_owned(),
2441 ));
2442 }
2443 },
2444 value_stack_depth: self.value_stack.len(),
2445 });
2446 self.machine.emit_label(label)?;
2447
2448 }
2450 Operator::Nop => {}
2451 Operator::MemorySize { mem } => {
2452 let memory_index = MemoryIndex::new(mem as usize);
2453 self.machine.move_location(
2454 Size::S64,
2455 Location::Memory(
2456 self.machine.get_vmctx_reg(),
2457 self.vmoffsets.vmctx_builtin_function(
2458 if self.module.local_memory_index(memory_index).is_some() {
2459 VMBuiltinFunctionIndex::get_memory32_size_index()
2460 } else {
2461 VMBuiltinFunctionIndex::get_imported_memory32_size_index()
2462 },
2463 ) as i32,
2464 ),
2465 Location::GPR(self.machine.get_grp_for_call()),
2466 )?;
2467 self.emit_call_native(
2468 |this| {
2469 this.machine
2470 .emit_call_register(this.machine.get_grp_for_call())
2471 },
2472 iter::once(Location::Imm32(memory_index.index() as u32)),
2474 iter::once(WpType::I64),
2475 NativeCallType::IncludeVMCtxArgument,
2476 )?;
2477 let ret = self.acquire_location(&WpType::I64)?;
2478 self.value_stack.push((ret, CanonicalizeType::None));
2479 self.machine.move_location(
2480 Size::S64,
2481 Location::GPR(self.machine.get_gpr_for_ret()),
2482 ret,
2483 )?;
2484 }
2485 Operator::MemoryInit { data_index, mem } => {
2486 let len = self.value_stack.pop().unwrap();
2487 let src = self.value_stack.pop().unwrap();
2488 let dst = self.value_stack.pop().unwrap();
2489 self.release_locations_only_regs(&[len, src, dst])?;
2490
2491 self.machine.move_location(
2492 Size::S64,
2493 Location::Memory(
2494 self.machine.get_vmctx_reg(),
2495 self.vmoffsets
2496 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_memory_init_index())
2497 as i32,
2498 ),
2499 Location::GPR(self.machine.get_grp_for_call()),
2500 )?;
2501
2502 self.emit_call_native(
2503 |this| {
2504 this.machine
2505 .emit_call_register(this.machine.get_grp_for_call())
2506 },
2507 [
2509 Location::Imm32(mem),
2510 Location::Imm32(data_index),
2511 dst.0,
2512 src.0,
2513 len.0,
2514 ]
2515 .iter()
2516 .cloned(),
2517 [
2518 WpType::I64,
2519 WpType::I64,
2520 WpType::I64,
2521 WpType::I64,
2522 WpType::I64,
2523 ]
2524 .iter()
2525 .cloned(),
2526 NativeCallType::IncludeVMCtxArgument,
2527 )?;
2528 self.release_locations_only_stack(&[dst, src, len])?;
2529 }
2530 Operator::DataDrop { data_index } => {
2531 self.machine.move_location(
2532 Size::S64,
2533 Location::Memory(
2534 self.machine.get_vmctx_reg(),
2535 self.vmoffsets
2536 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_data_drop_index())
2537 as i32,
2538 ),
2539 Location::GPR(self.machine.get_grp_for_call()),
2540 )?;
2541
2542 self.emit_call_native(
2543 |this| {
2544 this.machine
2545 .emit_call_register(this.machine.get_grp_for_call())
2546 },
2547 iter::once(Location::Imm32(data_index)),
2549 iter::once(WpType::I64),
2550 NativeCallType::IncludeVMCtxArgument,
2551 )?;
2552 }
2553 Operator::MemoryCopy { src_mem, .. } => {
2554 let len = self.value_stack.pop().unwrap();
2556 let src_pos = self.value_stack.pop().unwrap();
2557 let dst_pos = self.value_stack.pop().unwrap();
2558 self.release_locations_only_regs(&[len, src_pos, dst_pos])?;
2559
2560 let memory_index = MemoryIndex::new(src_mem as usize);
2561 let (memory_copy_index, memory_index) =
2562 if self.module.local_memory_index(memory_index).is_some() {
2563 (
2564 VMBuiltinFunctionIndex::get_memory_copy_index(),
2565 memory_index,
2566 )
2567 } else {
2568 (
2569 VMBuiltinFunctionIndex::get_imported_memory_copy_index(),
2570 memory_index,
2571 )
2572 };
2573
2574 self.machine.move_location(
2575 Size::S64,
2576 Location::Memory(
2577 self.machine.get_vmctx_reg(),
2578 self.vmoffsets.vmctx_builtin_function(memory_copy_index) as i32,
2579 ),
2580 Location::GPR(self.machine.get_grp_for_call()),
2581 )?;
2582
2583 self.emit_call_native(
2584 |this| {
2585 this.machine
2586 .emit_call_register(this.machine.get_grp_for_call())
2587 },
2588 [
2590 Location::Imm32(memory_index.index() as u32),
2591 dst_pos.0,
2592 src_pos.0,
2593 len.0,
2594 ]
2595 .iter()
2596 .cloned(),
2597 [WpType::I32, WpType::I64, WpType::I64, WpType::I64]
2598 .iter()
2599 .cloned(),
2600 NativeCallType::IncludeVMCtxArgument,
2601 )?;
2602 self.release_locations_only_stack(&[dst_pos, src_pos, len])?;
2603 }
2604 Operator::MemoryFill { mem } => {
2605 let len = self.value_stack.pop().unwrap();
2606 let val = self.value_stack.pop().unwrap();
2607 let dst = self.value_stack.pop().unwrap();
2608 self.release_locations_only_regs(&[len, val, dst])?;
2609
2610 let memory_index = MemoryIndex::new(mem as usize);
2611 let (memory_fill_index, memory_index) =
2612 if self.module.local_memory_index(memory_index).is_some() {
2613 (
2614 VMBuiltinFunctionIndex::get_memory_fill_index(),
2615 memory_index,
2616 )
2617 } else {
2618 (
2619 VMBuiltinFunctionIndex::get_imported_memory_fill_index(),
2620 memory_index,
2621 )
2622 };
2623
2624 self.machine.move_location(
2625 Size::S64,
2626 Location::Memory(
2627 self.machine.get_vmctx_reg(),
2628 self.vmoffsets.vmctx_builtin_function(memory_fill_index) as i32,
2629 ),
2630 Location::GPR(self.machine.get_grp_for_call()),
2631 )?;
2632
2633 self.emit_call_native(
2634 |this| {
2635 this.machine
2636 .emit_call_register(this.machine.get_grp_for_call())
2637 },
2638 [
2640 Location::Imm32(memory_index.index() as u32),
2641 dst.0,
2642 val.0,
2643 len.0,
2644 ]
2645 .iter()
2646 .cloned(),
2647 [WpType::I32, WpType::I64, WpType::I64, WpType::I64]
2648 .iter()
2649 .cloned(),
2650 NativeCallType::IncludeVMCtxArgument,
2651 )?;
2652 self.release_locations_only_stack(&[dst, val, len])?;
2653 }
2654 Operator::MemoryGrow { mem } => {
2655 let memory_index = MemoryIndex::new(mem as usize);
2656 let param_pages = self.value_stack.pop().unwrap();
2657
2658 self.release_locations_only_regs(&[param_pages])?;
2659
2660 self.machine.move_location(
2661 Size::S64,
2662 Location::Memory(
2663 self.machine.get_vmctx_reg(),
2664 self.vmoffsets.vmctx_builtin_function(
2665 if self.module.local_memory_index(memory_index).is_some() {
2666 VMBuiltinFunctionIndex::get_memory32_grow_index()
2667 } else {
2668 VMBuiltinFunctionIndex::get_imported_memory32_grow_index()
2669 },
2670 ) as i32,
2671 ),
2672 Location::GPR(self.machine.get_grp_for_call()),
2673 )?;
2674
2675 self.emit_call_native(
2676 |this| {
2677 this.machine
2678 .emit_call_register(this.machine.get_grp_for_call())
2679 },
2680 iter::once(param_pages.0)
2682 .chain(iter::once(Location::Imm32(memory_index.index() as u32))),
2683 [WpType::I64, WpType::I64].iter().cloned(),
2684 NativeCallType::IncludeVMCtxArgument,
2685 )?;
2686
2687 self.release_locations_only_stack(&[param_pages])?;
2688
2689 let ret = self.acquire_location(&WpType::I64)?;
2690 self.value_stack.push((ret, CanonicalizeType::None));
2691 self.machine.move_location(
2692 Size::S64,
2693 Location::GPR(self.machine.get_gpr_for_ret()),
2694 ret,
2695 )?;
2696 }
2697 Operator::I32Load { ref memarg } => {
2698 let target = self.pop_value_released()?.0;
2699 let ret = self.acquire_location(&WpType::I32)?;
2700 self.value_stack.push((ret, CanonicalizeType::None));
2701 self.op_memory(
2702 |this,
2703 need_check,
2704 imported_memories,
2705 offset,
2706 heap_access_oob,
2707 unaligned_atomic| {
2708 this.machine.i32_load(
2709 target,
2710 memarg,
2711 ret,
2712 need_check,
2713 imported_memories,
2714 offset,
2715 heap_access_oob,
2716 unaligned_atomic,
2717 )
2718 },
2719 )?;
2720 }
2721 Operator::F32Load { ref memarg } => {
2722 let target = self.pop_value_released()?.0;
2723 let ret = self.acquire_location(&WpType::F32)?;
2724 self.value_stack.push((ret, CanonicalizeType::None));
2725 self.op_memory(
2726 |this,
2727 need_check,
2728 imported_memories,
2729 offset,
2730 heap_access_oob,
2731 unaligned_atomic| {
2732 this.machine.f32_load(
2733 target,
2734 memarg,
2735 ret,
2736 need_check,
2737 imported_memories,
2738 offset,
2739 heap_access_oob,
2740 unaligned_atomic,
2741 )
2742 },
2743 )?;
2744 }
2745 Operator::I32Load8U { ref memarg } => {
2746 let target = self.pop_value_released()?.0;
2747 let ret = self.acquire_location(&WpType::I32)?;
2748 self.value_stack.push((ret, CanonicalizeType::None));
2749 self.op_memory(
2750 |this,
2751 need_check,
2752 imported_memories,
2753 offset,
2754 heap_access_oob,
2755 unaligned_atomic| {
2756 this.machine.i32_load_8u(
2757 target,
2758 memarg,
2759 ret,
2760 need_check,
2761 imported_memories,
2762 offset,
2763 heap_access_oob,
2764 unaligned_atomic,
2765 )
2766 },
2767 )?;
2768 }
2769 Operator::I32Load8S { ref memarg } => {
2770 let target = self.pop_value_released()?.0;
2771 let ret = self.acquire_location(&WpType::I32)?;
2772 self.value_stack.push((ret, CanonicalizeType::None));
2773 self.op_memory(
2774 |this,
2775 need_check,
2776 imported_memories,
2777 offset,
2778 heap_access_oob,
2779 unaligned_atomic| {
2780 this.machine.i32_load_8s(
2781 target,
2782 memarg,
2783 ret,
2784 need_check,
2785 imported_memories,
2786 offset,
2787 heap_access_oob,
2788 unaligned_atomic,
2789 )
2790 },
2791 )?;
2792 }
2793 Operator::I32Load16U { ref memarg } => {
2794 let target = self.pop_value_released()?.0;
2795 let ret = self.acquire_location(&WpType::I32)?;
2796 self.value_stack.push((ret, CanonicalizeType::None));
2797 self.op_memory(
2798 |this,
2799 need_check,
2800 imported_memories,
2801 offset,
2802 heap_access_oob,
2803 unaligned_atomic| {
2804 this.machine.i32_load_16u(
2805 target,
2806 memarg,
2807 ret,
2808 need_check,
2809 imported_memories,
2810 offset,
2811 heap_access_oob,
2812 unaligned_atomic,
2813 )
2814 },
2815 )?;
2816 }
2817 Operator::I32Load16S { ref memarg } => {
2818 let target = self.pop_value_released()?.0;
2819 let ret = self.acquire_location(&WpType::I32)?;
2820 self.value_stack.push((ret, CanonicalizeType::None));
2821 self.op_memory(
2822 |this,
2823 need_check,
2824 imported_memories,
2825 offset,
2826 heap_access_oob,
2827 unaligned_atomic| {
2828 this.machine.i32_load_16s(
2829 target,
2830 memarg,
2831 ret,
2832 need_check,
2833 imported_memories,
2834 offset,
2835 heap_access_oob,
2836 unaligned_atomic,
2837 )
2838 },
2839 )?;
2840 }
2841 Operator::I32Store { ref memarg } => {
2842 let target_value = self.pop_value_released()?.0;
2843 let target_addr = self.pop_value_released()?.0;
2844 self.op_memory(
2845 |this,
2846 need_check,
2847 imported_memories,
2848 offset,
2849 heap_access_oob,
2850 unaligned_atomic| {
2851 this.machine.i32_save(
2852 target_value,
2853 memarg,
2854 target_addr,
2855 need_check,
2856 imported_memories,
2857 offset,
2858 heap_access_oob,
2859 unaligned_atomic,
2860 )
2861 },
2862 )?;
2863 }
2864 Operator::F32Store { ref memarg } => {
2865 let (target_value, canonicalize) = self.pop_value_released()?;
2866 let target_addr = self.pop_value_released()?.0;
2867 self.op_memory(
2868 |this,
2869 need_check,
2870 imported_memories,
2871 offset,
2872 heap_access_oob,
2873 unaligned_atomic| {
2874 this.machine.f32_save(
2875 target_value,
2876 memarg,
2877 target_addr,
2878 self.config.enable_nan_canonicalization
2879 && !matches!(canonicalize, CanonicalizeType::None),
2880 need_check,
2881 imported_memories,
2882 offset,
2883 heap_access_oob,
2884 unaligned_atomic,
2885 )
2886 },
2887 )?;
2888 }
2889 Operator::I32Store8 { ref memarg } => {
2890 let target_value = self.pop_value_released()?.0;
2891 let target_addr = self.pop_value_released()?.0;
2892 self.op_memory(
2893 |this,
2894 need_check,
2895 imported_memories,
2896 offset,
2897 heap_access_oob,
2898 unaligned_atomic| {
2899 this.machine.i32_save_8(
2900 target_value,
2901 memarg,
2902 target_addr,
2903 need_check,
2904 imported_memories,
2905 offset,
2906 heap_access_oob,
2907 unaligned_atomic,
2908 )
2909 },
2910 )?;
2911 }
2912 Operator::I32Store16 { ref memarg } => {
2913 let target_value = self.pop_value_released()?.0;
2914 let target_addr = self.pop_value_released()?.0;
2915 self.op_memory(
2916 |this,
2917 need_check,
2918 imported_memories,
2919 offset,
2920 heap_access_oob,
2921 unaligned_atomic| {
2922 this.machine.i32_save_16(
2923 target_value,
2924 memarg,
2925 target_addr,
2926 need_check,
2927 imported_memories,
2928 offset,
2929 heap_access_oob,
2930 unaligned_atomic,
2931 )
2932 },
2933 )?;
2934 }
2935 Operator::I64Load { ref memarg } => {
2936 let target = self.pop_value_released()?.0;
2937 let ret = self.acquire_location(&WpType::I64)?;
2938 self.value_stack.push((ret, CanonicalizeType::None));
2939 self.op_memory(
2940 |this,
2941 need_check,
2942 imported_memories,
2943 offset,
2944 heap_access_oob,
2945 unaligned_atomic| {
2946 this.machine.i64_load(
2947 target,
2948 memarg,
2949 ret,
2950 need_check,
2951 imported_memories,
2952 offset,
2953 heap_access_oob,
2954 unaligned_atomic,
2955 )
2956 },
2957 )?;
2958 }
2959 Operator::F64Load { ref memarg } => {
2960 let target = self.pop_value_released()?.0;
2961 let ret = self.acquire_location(&WpType::F64)?;
2962 self.value_stack.push((ret, CanonicalizeType::None));
2963 self.op_memory(
2964 |this,
2965 need_check,
2966 imported_memories,
2967 offset,
2968 heap_access_oob,
2969 unaligned_atomic| {
2970 this.machine.f64_load(
2971 target,
2972 memarg,
2973 ret,
2974 need_check,
2975 imported_memories,
2976 offset,
2977 heap_access_oob,
2978 unaligned_atomic,
2979 )
2980 },
2981 )?;
2982 }
2983 Operator::I64Load8U { ref memarg } => {
2984 let target = self.pop_value_released()?.0;
2985 let ret = self.acquire_location(&WpType::I64)?;
2986 self.value_stack.push((ret, CanonicalizeType::None));
2987 self.op_memory(
2988 |this,
2989 need_check,
2990 imported_memories,
2991 offset,
2992 heap_access_oob,
2993 unaligned_atomic| {
2994 this.machine.i64_load_8u(
2995 target,
2996 memarg,
2997 ret,
2998 need_check,
2999 imported_memories,
3000 offset,
3001 heap_access_oob,
3002 unaligned_atomic,
3003 )
3004 },
3005 )?;
3006 }
3007 Operator::I64Load8S { ref memarg } => {
3008 let target = self.pop_value_released()?.0;
3009 let ret = self.acquire_location(&WpType::I64)?;
3010 self.value_stack.push((ret, CanonicalizeType::None));
3011 self.op_memory(
3012 |this,
3013 need_check,
3014 imported_memories,
3015 offset,
3016 heap_access_oob,
3017 unaligned_atomic| {
3018 this.machine.i64_load_8s(
3019 target,
3020 memarg,
3021 ret,
3022 need_check,
3023 imported_memories,
3024 offset,
3025 heap_access_oob,
3026 unaligned_atomic,
3027 )
3028 },
3029 )?;
3030 }
3031 Operator::I64Load16U { ref memarg } => {
3032 let target = self.pop_value_released()?.0;
3033 let ret = self.acquire_location(&WpType::I64)?;
3034 self.value_stack.push((ret, CanonicalizeType::None));
3035 self.op_memory(
3036 |this,
3037 need_check,
3038 imported_memories,
3039 offset,
3040 heap_access_oob,
3041 unaligned_atomic| {
3042 this.machine.i64_load_16u(
3043 target,
3044 memarg,
3045 ret,
3046 need_check,
3047 imported_memories,
3048 offset,
3049 heap_access_oob,
3050 unaligned_atomic,
3051 )
3052 },
3053 )?;
3054 }
3055 Operator::I64Load16S { ref memarg } => {
3056 let target = self.pop_value_released()?.0;
3057 let ret = self.acquire_location(&WpType::I64)?;
3058 self.value_stack.push((ret, CanonicalizeType::None));
3059 self.op_memory(
3060 |this,
3061 need_check,
3062 imported_memories,
3063 offset,
3064 heap_access_oob,
3065 unaligned_atomic| {
3066 this.machine.i64_load_16s(
3067 target,
3068 memarg,
3069 ret,
3070 need_check,
3071 imported_memories,
3072 offset,
3073 heap_access_oob,
3074 unaligned_atomic,
3075 )
3076 },
3077 )?;
3078 }
3079 Operator::I64Load32U { ref memarg } => {
3080 let target = self.pop_value_released()?.0;
3081 let ret = self.acquire_location(&WpType::I64)?;
3082 self.value_stack.push((ret, CanonicalizeType::None));
3083 self.op_memory(
3084 |this,
3085 need_check,
3086 imported_memories,
3087 offset,
3088 heap_access_oob,
3089 unaligned_atomic| {
3090 this.machine.i64_load_32u(
3091 target,
3092 memarg,
3093 ret,
3094 need_check,
3095 imported_memories,
3096 offset,
3097 heap_access_oob,
3098 unaligned_atomic,
3099 )
3100 },
3101 )?;
3102 }
3103 Operator::I64Load32S { ref memarg } => {
3104 let target = self.pop_value_released()?.0;
3105 let ret = self.acquire_location(&WpType::I64)?;
3106 self.value_stack.push((ret, CanonicalizeType::None));
3107 self.op_memory(
3108 |this,
3109 need_check,
3110 imported_memories,
3111 offset,
3112 heap_access_oob,
3113 unaligned_atomic| {
3114 this.machine.i64_load_32s(
3115 target,
3116 memarg,
3117 ret,
3118 need_check,
3119 imported_memories,
3120 offset,
3121 heap_access_oob,
3122 unaligned_atomic,
3123 )
3124 },
3125 )?;
3126 }
3127 Operator::I64Store { ref memarg } => {
3128 let target_value = self.pop_value_released()?.0;
3129 let target_addr = self.pop_value_released()?.0;
3130
3131 self.op_memory(
3132 |this,
3133 need_check,
3134 imported_memories,
3135 offset,
3136 heap_access_oob,
3137 unaligned_atomic| {
3138 this.machine.i64_save(
3139 target_value,
3140 memarg,
3141 target_addr,
3142 need_check,
3143 imported_memories,
3144 offset,
3145 heap_access_oob,
3146 unaligned_atomic,
3147 )
3148 },
3149 )?;
3150 }
3151 Operator::F64Store { ref memarg } => {
3152 let (target_value, canonicalize) = self.pop_value_released()?;
3153 let target_addr = self.pop_value_released()?.0;
3154 self.op_memory(
3155 |this,
3156 need_check,
3157 imported_memories,
3158 offset,
3159 heap_access_oob,
3160 unaligned_atomic| {
3161 this.machine.f64_save(
3162 target_value,
3163 memarg,
3164 target_addr,
3165 self.config.enable_nan_canonicalization
3166 && !matches!(canonicalize, CanonicalizeType::None),
3167 need_check,
3168 imported_memories,
3169 offset,
3170 heap_access_oob,
3171 unaligned_atomic,
3172 )
3173 },
3174 )?;
3175 }
3176 Operator::I64Store8 { ref memarg } => {
3177 let target_value = self.pop_value_released()?.0;
3178 let target_addr = self.pop_value_released()?.0;
3179 self.op_memory(
3180 |this,
3181 need_check,
3182 imported_memories,
3183 offset,
3184 heap_access_oob,
3185 unaligned_atomic| {
3186 this.machine.i64_save_8(
3187 target_value,
3188 memarg,
3189 target_addr,
3190 need_check,
3191 imported_memories,
3192 offset,
3193 heap_access_oob,
3194 unaligned_atomic,
3195 )
3196 },
3197 )?;
3198 }
3199 Operator::I64Store16 { ref memarg } => {
3200 let target_value = self.pop_value_released()?.0;
3201 let target_addr = self.pop_value_released()?.0;
3202 self.op_memory(
3203 |this,
3204 need_check,
3205 imported_memories,
3206 offset,
3207 heap_access_oob,
3208 unaligned_atomic| {
3209 this.machine.i64_save_16(
3210 target_value,
3211 memarg,
3212 target_addr,
3213 need_check,
3214 imported_memories,
3215 offset,
3216 heap_access_oob,
3217 unaligned_atomic,
3218 )
3219 },
3220 )?;
3221 }
3222 Operator::I64Store32 { ref memarg } => {
3223 let target_value = self.pop_value_released()?.0;
3224 let target_addr = self.pop_value_released()?.0;
3225 self.op_memory(
3226 |this,
3227 need_check,
3228 imported_memories,
3229 offset,
3230 heap_access_oob,
3231 unaligned_atomic| {
3232 this.machine.i64_save_32(
3233 target_value,
3234 memarg,
3235 target_addr,
3236 need_check,
3237 imported_memories,
3238 offset,
3239 heap_access_oob,
3240 unaligned_atomic,
3241 )
3242 },
3243 )?;
3244 }
3245 Operator::Unreachable => {
3246 self.machine.move_location(
3247 Size::S64,
3248 Location::Memory(
3249 self.machine.get_vmctx_reg(),
3250 self.vmoffsets
3251 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_raise_trap_index())
3252 as i32,
3253 ),
3254 Location::GPR(self.machine.get_grp_for_call()),
3255 )?;
3256
3257 self.emit_call_native(
3258 |this| {
3259 this.machine
3260 .emit_call_register(this.machine.get_grp_for_call())
3261 },
3262 [Location::Imm32(TrapCode::UnreachableCodeReached as u32)]
3264 .iter()
3265 .cloned(),
3266 [WpType::I32].iter().cloned(),
3267 NativeCallType::Unreachable,
3268 )?;
3269 self.unreachable_depth = 1;
3270 }
3271 Operator::Return => {
3272 let frame = &self.control_stack[0];
3273 if !frame.returns.is_empty() {
3274 if frame.returns.len() != 1 {
3275 return Err(CompileError::Codegen(
3276 "Return: incorrect frame.returns".to_owned(),
3277 ));
3278 }
3279 let first_return = frame.returns[0];
3280 let (loc, canonicalize) = *self.value_stack.last().unwrap();
3281 let canonicalize = self.machine.arch_supports_canonicalize_nan()
3282 && self.config.enable_nan_canonicalization
3283 && !matches!(canonicalize, CanonicalizeType::None);
3284 self.machine
3285 .emit_function_return_value(first_return, canonicalize, loc)?;
3286 }
3287 let frame = &self.control_stack[0];
3288 let frame_depth = frame.value_stack_depth;
3289 let label = frame.label;
3290 self.release_locations_keep_state(frame_depth)?;
3291 self.machine.jmp_unconditional(label)?;
3292 self.unreachable_depth = 1;
3293 }
3294 Operator::Br { relative_depth } => {
3295 let frame =
3296 &self.control_stack[self.control_stack.len() - 1 - (relative_depth as usize)];
3297 if !matches!(frame.state, ControlState::Loop) && !frame.returns.is_empty() {
3298 if frame.returns.len() != 1 {
3299 return Err(CompileError::Codegen(
3300 "Br: incorrect frame.returns".to_owned(),
3301 ));
3302 }
3303 let first_return = frame.returns[0];
3304 let (loc, canonicalize) = *self.value_stack.last().unwrap();
3305 let canonicalize = self.machine.arch_supports_canonicalize_nan()
3306 && self.config.enable_nan_canonicalization
3307 && !matches!(canonicalize, CanonicalizeType::None);
3308 self.machine
3309 .emit_function_return_value(first_return, canonicalize, loc)?;
3310 }
3311 let stack_len = self.control_stack.len();
3312 let frame = &mut self.control_stack[stack_len - 1 - (relative_depth as usize)];
3313 let frame_depth = frame.value_stack_depth;
3314 let label = frame.label;
3315
3316 self.release_locations_keep_state(frame_depth)?;
3317 self.machine.jmp_unconditional(label)?;
3318 self.unreachable_depth = 1;
3319 }
3320 Operator::BrIf { relative_depth } => {
3321 let after = self.machine.get_label();
3322 let cond = self.pop_value_released()?.0;
3323 self.machine.jmp_on_condition(
3324 UnsignedCondition::Equal,
3325 Size::S32,
3326 cond,
3327 Location::Imm32(0),
3328 after,
3329 )?;
3330
3331 let frame =
3332 &self.control_stack[self.control_stack.len() - 1 - (relative_depth as usize)];
3333 if !matches!(frame.state, ControlState::Loop) && !frame.returns.is_empty() {
3334 if frame.returns.len() != 1 {
3335 return Err(CompileError::Codegen(
3336 "BrIf: incorrect frame.returns".to_owned(),
3337 ));
3338 }
3339
3340 let first_return = frame.returns[0];
3341 let (loc, canonicalize) = *self.value_stack.last().unwrap();
3342 let canonicalize = self.machine.arch_supports_canonicalize_nan()
3343 && self.config.enable_nan_canonicalization
3344 && !matches!(canonicalize, CanonicalizeType::None);
3345 self.machine
3346 .emit_function_return_value(first_return, canonicalize, loc)?;
3347 }
3348 let stack_len = self.control_stack.len();
3349 let frame = &mut self.control_stack[stack_len - 1 - (relative_depth as usize)];
3350 let stack_depth = frame.value_stack_depth;
3351 let label = frame.label;
3352 self.release_locations_keep_state(stack_depth)?;
3353 self.machine.jmp_unconditional(label)?;
3354
3355 self.machine.emit_label(after)?;
3356 }
3357 Operator::BrTable { ref targets } => {
3358 let default_target = targets.default();
3359 let targets = targets
3360 .targets()
3361 .collect::<Result<Vec<_>, _>>()
3362 .map_err(|e| CompileError::Codegen(format!("BrTable read_table: {e:?}")))?;
3363 let cond = self.pop_value_released()?.0;
3364 let table_label = self.machine.get_label();
3365 let mut table: Vec<Label> = vec![];
3366 let default_br = self.machine.get_label();
3367 self.machine.jmp_on_condition(
3368 UnsignedCondition::AboveEqual,
3369 Size::S32,
3370 cond,
3371 Location::Imm32(targets.len() as u32),
3372 default_br,
3373 )?;
3374
3375 self.machine.emit_jmp_to_jumptable(table_label, cond)?;
3376
3377 for target in targets.iter() {
3378 let label = self.machine.get_label();
3379 self.machine.emit_label(label)?;
3380 table.push(label);
3381 let frame =
3382 &self.control_stack[self.control_stack.len() - 1 - (*target as usize)];
3383 if !matches!(frame.state, ControlState::Loop) && !frame.returns.is_empty() {
3384 if frame.returns.len() != 1 {
3385 return Err(CompileError::Codegen(format!(
3386 "BrTable: incorrect frame.returns for {target:?}",
3387 )));
3388 }
3389
3390 let first_return = frame.returns[0];
3391 let (loc, canonicalize) = *self.value_stack.last().unwrap();
3392 let canonicalize = self.machine.arch_supports_canonicalize_nan()
3393 && self.config.enable_nan_canonicalization
3394 && !matches!(canonicalize, CanonicalizeType::None);
3395 self.machine
3396 .emit_function_return_value(first_return, canonicalize, loc)?;
3397 }
3398 let frame =
3399 &self.control_stack[self.control_stack.len() - 1 - (*target as usize)];
3400 let stack_depth = frame.value_stack_depth;
3401 let label = frame.label;
3402 self.release_locations_keep_state(stack_depth)?;
3403 self.machine.jmp_unconditional(label)?;
3404 }
3405 self.machine.emit_label(default_br)?;
3406
3407 {
3408 let frame = &self.control_stack
3409 [self.control_stack.len() - 1 - (default_target as usize)];
3410 if !matches!(frame.state, ControlState::Loop) && !frame.returns.is_empty() {
3411 if frame.returns.len() != 1 {
3412 return Err(CompileError::Codegen(
3413 "BrTable: incorrect frame.returns".to_owned(),
3414 ));
3415 }
3416
3417 let first_return = frame.returns[0];
3418 let (loc, canonicalize) = *self.value_stack.last().unwrap();
3419 let canonicalize = self.machine.arch_supports_canonicalize_nan()
3420 && self.config.enable_nan_canonicalization
3421 && !matches!(canonicalize, CanonicalizeType::None);
3422 self.machine
3423 .emit_function_return_value(first_return, canonicalize, loc)?;
3424 }
3425 let frame = &self.control_stack
3426 [self.control_stack.len() - 1 - (default_target as usize)];
3427 let stack_depth = frame.value_stack_depth;
3428 let label = frame.label;
3429 self.release_locations_keep_state(stack_depth)?;
3430 self.machine.jmp_unconditional(label)?;
3431 }
3432
3433 self.machine.emit_label(table_label)?;
3434 for x in table {
3435 self.machine.jmp_unconditional(x)?;
3436 }
3437 self.unreachable_depth = 1;
3438 }
3439 Operator::Drop => {
3440 self.pop_value_released()?;
3441 }
3442 Operator::End => {
3443 let frame = self.control_stack.pop().unwrap();
3444
3445 if !was_unreachable && !frame.returns.is_empty() {
3446 let (loc, canonicalize) = *self.value_stack.last().unwrap();
3447 let canonicalize = self.machine.arch_supports_canonicalize_nan()
3448 && self.config.enable_nan_canonicalization
3449 && !matches!(canonicalize, CanonicalizeType::None);
3450 self.machine
3451 .emit_function_return_value(frame.returns[0], canonicalize, loc)?;
3452 }
3453
3454 if self.control_stack.is_empty() {
3455 self.machine.emit_label(frame.label)?;
3456 self.finalize_locals(self.calling_convention)?;
3457 self.machine.emit_function_epilog()?;
3458
3459 match self.signature.results() {
3461 [x] if *x == Type::F32 || *x == Type::F64 => {
3462 self.machine.emit_function_return_float()?;
3463 }
3464 _ => {}
3465 }
3466 self.machine.emit_ret()?;
3467 } else {
3468 let released = &self.value_stack.clone()[frame.value_stack_depth..];
3469 self.release_locations(released)?;
3470 self.value_stack.truncate(frame.value_stack_depth);
3471
3472 if !matches!(frame.state, ControlState::Loop) {
3473 self.machine.emit_label(frame.label)?;
3474 }
3475
3476 if let ControlState::If(label) = frame.state {
3477 self.machine.emit_label(label)?;
3478 }
3479
3480 if !frame.returns.is_empty() {
3481 if frame.returns.len() != 1 {
3482 return Err(CompileError::Codegen(
3483 "End: incorrect frame.returns".to_owned(),
3484 ));
3485 }
3486 let loc = self.acquire_location(&frame.returns[0])?;
3487 self.machine.move_location(
3488 Size::S64,
3489 Location::GPR(self.machine.get_gpr_for_ret()),
3490 loc,
3491 )?;
3492 self.value_stack.push((loc, CanonicalizeType::None));
3493 }
3495 }
3496 }
3497 Operator::AtomicFence => {
3498 self.machine.emit_memory_fence()?;
3506 }
3507 Operator::I32AtomicLoad { ref memarg } => {
3508 let target = self.pop_value_released()?.0;
3509 let ret = self.acquire_location(&WpType::I32)?;
3510 self.value_stack.push((ret, CanonicalizeType::None));
3511 self.op_memory(
3512 |this,
3513 need_check,
3514 imported_memories,
3515 offset,
3516 heap_access_oob,
3517 unaligned_atomic| {
3518 this.machine.i32_atomic_load(
3519 target,
3520 memarg,
3521 ret,
3522 need_check,
3523 imported_memories,
3524 offset,
3525 heap_access_oob,
3526 unaligned_atomic,
3527 )
3528 },
3529 )?;
3530 }
3531 Operator::I32AtomicLoad8U { ref memarg } => {
3532 let target = self.pop_value_released()?.0;
3533 let ret = self.acquire_location(&WpType::I32)?;
3534 self.value_stack.push((ret, CanonicalizeType::None));
3535 self.op_memory(
3536 |this,
3537 need_check,
3538 imported_memories,
3539 offset,
3540 heap_access_oob,
3541 unaligned_atomic| {
3542 this.machine.i32_atomic_load_8u(
3543 target,
3544 memarg,
3545 ret,
3546 need_check,
3547 imported_memories,
3548 offset,
3549 heap_access_oob,
3550 unaligned_atomic,
3551 )
3552 },
3553 )?;
3554 }
3555 Operator::I32AtomicLoad16U { ref memarg } => {
3556 let target = self.pop_value_released()?.0;
3557 let ret = self.acquire_location(&WpType::I32)?;
3558 self.value_stack.push((ret, CanonicalizeType::None));
3559 self.op_memory(
3560 |this,
3561 need_check,
3562 imported_memories,
3563 offset,
3564 heap_access_oob,
3565 unaligned_atomic| {
3566 this.machine.i32_atomic_load_16u(
3567 target,
3568 memarg,
3569 ret,
3570 need_check,
3571 imported_memories,
3572 offset,
3573 heap_access_oob,
3574 unaligned_atomic,
3575 )
3576 },
3577 )?;
3578 }
3579 Operator::I32AtomicStore { ref memarg } => {
3580 let target_value = self.pop_value_released()?.0;
3581 let target_addr = self.pop_value_released()?.0;
3582 self.op_memory(
3583 |this,
3584 need_check,
3585 imported_memories,
3586 offset,
3587 heap_access_oob,
3588 unaligned_atomic| {
3589 this.machine.i32_atomic_save(
3590 target_value,
3591 memarg,
3592 target_addr,
3593 need_check,
3594 imported_memories,
3595 offset,
3596 heap_access_oob,
3597 unaligned_atomic,
3598 )
3599 },
3600 )?;
3601 }
3602 Operator::I32AtomicStore8 { ref memarg } => {
3603 let target_value = self.pop_value_released()?.0;
3604 let target_addr = self.pop_value_released()?.0;
3605 self.op_memory(
3606 |this,
3607 need_check,
3608 imported_memories,
3609 offset,
3610 heap_access_oob,
3611 unaligned_atomic| {
3612 this.machine.i32_atomic_save_8(
3613 target_value,
3614 memarg,
3615 target_addr,
3616 need_check,
3617 imported_memories,
3618 offset,
3619 heap_access_oob,
3620 unaligned_atomic,
3621 )
3622 },
3623 )?;
3624 }
3625 Operator::I32AtomicStore16 { ref memarg } => {
3626 let target_value = self.pop_value_released()?.0;
3627 let target_addr = self.pop_value_released()?.0;
3628 self.op_memory(
3629 |this,
3630 need_check,
3631 imported_memories,
3632 offset,
3633 heap_access_oob,
3634 unaligned_atomic| {
3635 this.machine.i32_atomic_save_16(
3636 target_value,
3637 memarg,
3638 target_addr,
3639 need_check,
3640 imported_memories,
3641 offset,
3642 heap_access_oob,
3643 unaligned_atomic,
3644 )
3645 },
3646 )?;
3647 }
3648 Operator::I64AtomicLoad { ref memarg } => {
3649 let target = self.pop_value_released()?.0;
3650 let ret = self.acquire_location(&WpType::I64)?;
3651 self.value_stack.push((ret, CanonicalizeType::None));
3652 self.op_memory(
3653 |this,
3654 need_check,
3655 imported_memories,
3656 offset,
3657 heap_access_oob,
3658 unaligned_atomic| {
3659 this.machine.i64_atomic_load(
3660 target,
3661 memarg,
3662 ret,
3663 need_check,
3664 imported_memories,
3665 offset,
3666 heap_access_oob,
3667 unaligned_atomic,
3668 )
3669 },
3670 )?;
3671 }
3672 Operator::I64AtomicLoad8U { ref memarg } => {
3673 let target = self.pop_value_released()?.0;
3674 let ret = self.acquire_location(&WpType::I64)?;
3675 self.value_stack.push((ret, CanonicalizeType::None));
3676 self.op_memory(
3677 |this,
3678 need_check,
3679 imported_memories,
3680 offset,
3681 heap_access_oob,
3682 unaligned_atomic| {
3683 this.machine.i64_atomic_load_8u(
3684 target,
3685 memarg,
3686 ret,
3687 need_check,
3688 imported_memories,
3689 offset,
3690 heap_access_oob,
3691 unaligned_atomic,
3692 )
3693 },
3694 )?;
3695 }
3696 Operator::I64AtomicLoad16U { ref memarg } => {
3697 let target = self.pop_value_released()?.0;
3698 let ret = self.acquire_location(&WpType::I64)?;
3699 self.value_stack.push((ret, CanonicalizeType::None));
3700 self.op_memory(
3701 |this,
3702 need_check,
3703 imported_memories,
3704 offset,
3705 heap_access_oob,
3706 unaligned_atomic| {
3707 this.machine.i64_atomic_load_16u(
3708 target,
3709 memarg,
3710 ret,
3711 need_check,
3712 imported_memories,
3713 offset,
3714 heap_access_oob,
3715 unaligned_atomic,
3716 )
3717 },
3718 )?;
3719 }
3720 Operator::I64AtomicLoad32U { ref memarg } => {
3721 let target = self.pop_value_released()?.0;
3722 let ret = self.acquire_location(&WpType::I64)?;
3723 self.value_stack.push((ret, CanonicalizeType::None));
3724 self.op_memory(
3725 |this,
3726 need_check,
3727 imported_memories,
3728 offset,
3729 heap_access_oob,
3730 unaligned_atomic| {
3731 this.machine.i64_atomic_load_32u(
3732 target,
3733 memarg,
3734 ret,
3735 need_check,
3736 imported_memories,
3737 offset,
3738 heap_access_oob,
3739 unaligned_atomic,
3740 )
3741 },
3742 )?;
3743 }
3744 Operator::I64AtomicStore { ref memarg } => {
3745 let target_value = self.pop_value_released()?.0;
3746 let target_addr = self.pop_value_released()?.0;
3747 self.op_memory(
3748 |this,
3749 need_check,
3750 imported_memories,
3751 offset,
3752 heap_access_oob,
3753 unaligned_atomic| {
3754 this.machine.i64_atomic_save(
3755 target_value,
3756 memarg,
3757 target_addr,
3758 need_check,
3759 imported_memories,
3760 offset,
3761 heap_access_oob,
3762 unaligned_atomic,
3763 )
3764 },
3765 )?;
3766 }
3767 Operator::I64AtomicStore8 { ref memarg } => {
3768 let target_value = self.pop_value_released()?.0;
3769 let target_addr = self.pop_value_released()?.0;
3770 self.op_memory(
3771 |this,
3772 need_check,
3773 imported_memories,
3774 offset,
3775 heap_access_oob,
3776 unaligned_atomic| {
3777 this.machine.i64_atomic_save_8(
3778 target_value,
3779 memarg,
3780 target_addr,
3781 need_check,
3782 imported_memories,
3783 offset,
3784 heap_access_oob,
3785 unaligned_atomic,
3786 )
3787 },
3788 )?;
3789 }
3790 Operator::I64AtomicStore16 { ref memarg } => {
3791 let target_value = self.pop_value_released()?.0;
3792 let target_addr = self.pop_value_released()?.0;
3793 self.op_memory(
3794 |this,
3795 need_check,
3796 imported_memories,
3797 offset,
3798 heap_access_oob,
3799 unaligned_atomic| {
3800 this.machine.i64_atomic_save_16(
3801 target_value,
3802 memarg,
3803 target_addr,
3804 need_check,
3805 imported_memories,
3806 offset,
3807 heap_access_oob,
3808 unaligned_atomic,
3809 )
3810 },
3811 )?;
3812 }
3813 Operator::I64AtomicStore32 { ref memarg } => {
3814 let target_value = self.pop_value_released()?.0;
3815 let target_addr = self.pop_value_released()?.0;
3816 self.op_memory(
3817 |this,
3818 need_check,
3819 imported_memories,
3820 offset,
3821 heap_access_oob,
3822 unaligned_atomic| {
3823 this.machine.i64_atomic_save_32(
3824 target_value,
3825 memarg,
3826 target_addr,
3827 need_check,
3828 imported_memories,
3829 offset,
3830 heap_access_oob,
3831 unaligned_atomic,
3832 )
3833 },
3834 )?;
3835 }
3836 Operator::I32AtomicRmwAdd { ref memarg } => {
3837 let loc = self.pop_value_released()?.0;
3838 let target = self.pop_value_released()?.0;
3839 let ret = self.acquire_location(&WpType::I32)?;
3840 self.value_stack.push((ret, CanonicalizeType::None));
3841 self.op_memory(
3842 |this,
3843 need_check,
3844 imported_memories,
3845 offset,
3846 heap_access_oob,
3847 unaligned_atomic| {
3848 this.machine.i32_atomic_add(
3849 loc,
3850 target,
3851 memarg,
3852 ret,
3853 need_check,
3854 imported_memories,
3855 offset,
3856 heap_access_oob,
3857 unaligned_atomic,
3858 )
3859 },
3860 )?;
3861 }
3862 Operator::I64AtomicRmwAdd { ref memarg } => {
3863 let loc = self.pop_value_released()?.0;
3864 let target = self.pop_value_released()?.0;
3865 let ret = self.acquire_location(&WpType::I64)?;
3866 self.value_stack.push((ret, CanonicalizeType::None));
3867 self.op_memory(
3868 |this,
3869 need_check,
3870 imported_memories,
3871 offset,
3872 heap_access_oob,
3873 unaligned_atomic| {
3874 this.machine.i64_atomic_add(
3875 loc,
3876 target,
3877 memarg,
3878 ret,
3879 need_check,
3880 imported_memories,
3881 offset,
3882 heap_access_oob,
3883 unaligned_atomic,
3884 )
3885 },
3886 )?;
3887 }
3888 Operator::I32AtomicRmw8AddU { ref memarg } => {
3889 let loc = self.pop_value_released()?.0;
3890 let target = self.pop_value_released()?.0;
3891 let ret = self.acquire_location(&WpType::I32)?;
3892 self.value_stack.push((ret, CanonicalizeType::None));
3893 self.op_memory(
3894 |this,
3895 need_check,
3896 imported_memories,
3897 offset,
3898 heap_access_oob,
3899 unaligned_atomic| {
3900 this.machine.i32_atomic_add_8u(
3901 loc,
3902 target,
3903 memarg,
3904 ret,
3905 need_check,
3906 imported_memories,
3907 offset,
3908 heap_access_oob,
3909 unaligned_atomic,
3910 )
3911 },
3912 )?;
3913 }
3914 Operator::I32AtomicRmw16AddU { ref memarg } => {
3915 let loc = self.pop_value_released()?.0;
3916 let target = self.pop_value_released()?.0;
3917 let ret = self.acquire_location(&WpType::I32)?;
3918 self.value_stack.push((ret, CanonicalizeType::None));
3919 self.op_memory(
3920 |this,
3921 need_check,
3922 imported_memories,
3923 offset,
3924 heap_access_oob,
3925 unaligned_atomic| {
3926 this.machine.i32_atomic_add_16u(
3927 loc,
3928 target,
3929 memarg,
3930 ret,
3931 need_check,
3932 imported_memories,
3933 offset,
3934 heap_access_oob,
3935 unaligned_atomic,
3936 )
3937 },
3938 )?;
3939 }
3940 Operator::I64AtomicRmw8AddU { ref memarg } => {
3941 let loc = self.pop_value_released()?.0;
3942 let target = self.pop_value_released()?.0;
3943 let ret = self.acquire_location(&WpType::I64)?;
3944 self.value_stack.push((ret, CanonicalizeType::None));
3945 self.op_memory(
3946 |this,
3947 need_check,
3948 imported_memories,
3949 offset,
3950 heap_access_oob,
3951 unaligned_atomic| {
3952 this.machine.i64_atomic_add_8u(
3953 loc,
3954 target,
3955 memarg,
3956 ret,
3957 need_check,
3958 imported_memories,
3959 offset,
3960 heap_access_oob,
3961 unaligned_atomic,
3962 )
3963 },
3964 )?;
3965 }
3966 Operator::I64AtomicRmw16AddU { ref memarg } => {
3967 let loc = self.pop_value_released()?.0;
3968 let target = self.pop_value_released()?.0;
3969 let ret = self.acquire_location(&WpType::I64)?;
3970 self.value_stack.push((ret, CanonicalizeType::None));
3971 self.op_memory(
3972 |this,
3973 need_check,
3974 imported_memories,
3975 offset,
3976 heap_access_oob,
3977 unaligned_atomic| {
3978 this.machine.i64_atomic_add_16u(
3979 loc,
3980 target,
3981 memarg,
3982 ret,
3983 need_check,
3984 imported_memories,
3985 offset,
3986 heap_access_oob,
3987 unaligned_atomic,
3988 )
3989 },
3990 )?;
3991 }
3992 Operator::I64AtomicRmw32AddU { ref memarg } => {
3993 let loc = self.pop_value_released()?.0;
3994 let target = self.pop_value_released()?.0;
3995 let ret = self.acquire_location(&WpType::I64)?;
3996 self.value_stack.push((ret, CanonicalizeType::None));
3997 self.op_memory(
3998 |this,
3999 need_check,
4000 imported_memories,
4001 offset,
4002 heap_access_oob,
4003 unaligned_atomic| {
4004 this.machine.i64_atomic_add_32u(
4005 loc,
4006 target,
4007 memarg,
4008 ret,
4009 need_check,
4010 imported_memories,
4011 offset,
4012 heap_access_oob,
4013 unaligned_atomic,
4014 )
4015 },
4016 )?;
4017 }
4018 Operator::I32AtomicRmwSub { ref memarg } => {
4019 let loc = self.pop_value_released()?.0;
4020 let target = self.pop_value_released()?.0;
4021 let ret = self.acquire_location(&WpType::I32)?;
4022 self.value_stack.push((ret, CanonicalizeType::None));
4023 self.op_memory(
4024 |this,
4025 need_check,
4026 imported_memories,
4027 offset,
4028 heap_access_oob,
4029 unaligned_atomic| {
4030 this.machine.i32_atomic_sub(
4031 loc,
4032 target,
4033 memarg,
4034 ret,
4035 need_check,
4036 imported_memories,
4037 offset,
4038 heap_access_oob,
4039 unaligned_atomic,
4040 )
4041 },
4042 )?;
4043 }
4044 Operator::I64AtomicRmwSub { ref memarg } => {
4045 let loc = self.pop_value_released()?.0;
4046 let target = self.pop_value_released()?.0;
4047 let ret = self.acquire_location(&WpType::I64)?;
4048 self.value_stack.push((ret, CanonicalizeType::None));
4049 self.op_memory(
4050 |this,
4051 need_check,
4052 imported_memories,
4053 offset,
4054 heap_access_oob,
4055 unaligned_atomic| {
4056 this.machine.i64_atomic_sub(
4057 loc,
4058 target,
4059 memarg,
4060 ret,
4061 need_check,
4062 imported_memories,
4063 offset,
4064 heap_access_oob,
4065 unaligned_atomic,
4066 )
4067 },
4068 )?;
4069 }
4070 Operator::I32AtomicRmw8SubU { ref memarg } => {
4071 let loc = self.pop_value_released()?.0;
4072 let target = self.pop_value_released()?.0;
4073 let ret = self.acquire_location(&WpType::I32)?;
4074 self.value_stack.push((ret, CanonicalizeType::None));
4075 self.op_memory(
4076 |this,
4077 need_check,
4078 imported_memories,
4079 offset,
4080 heap_access_oob,
4081 unaligned_atomic| {
4082 this.machine.i32_atomic_sub_8u(
4083 loc,
4084 target,
4085 memarg,
4086 ret,
4087 need_check,
4088 imported_memories,
4089 offset,
4090 heap_access_oob,
4091 unaligned_atomic,
4092 )
4093 },
4094 )?;
4095 }
4096 Operator::I32AtomicRmw16SubU { ref memarg } => {
4097 let loc = self.pop_value_released()?.0;
4098 let target = self.pop_value_released()?.0;
4099 let ret = self.acquire_location(&WpType::I32)?;
4100 self.value_stack.push((ret, CanonicalizeType::None));
4101 self.op_memory(
4102 |this,
4103 need_check,
4104 imported_memories,
4105 offset,
4106 heap_access_oob,
4107 unaligned_atomic| {
4108 this.machine.i32_atomic_sub_16u(
4109 loc,
4110 target,
4111 memarg,
4112 ret,
4113 need_check,
4114 imported_memories,
4115 offset,
4116 heap_access_oob,
4117 unaligned_atomic,
4118 )
4119 },
4120 )?;
4121 }
4122 Operator::I64AtomicRmw8SubU { ref memarg } => {
4123 let loc = self.pop_value_released()?.0;
4124 let target = self.pop_value_released()?.0;
4125 let ret = self.acquire_location(&WpType::I64)?;
4126 self.value_stack.push((ret, CanonicalizeType::None));
4127 self.op_memory(
4128 |this,
4129 need_check,
4130 imported_memories,
4131 offset,
4132 heap_access_oob,
4133 unaligned_atomic| {
4134 this.machine.i64_atomic_sub_8u(
4135 loc,
4136 target,
4137 memarg,
4138 ret,
4139 need_check,
4140 imported_memories,
4141 offset,
4142 heap_access_oob,
4143 unaligned_atomic,
4144 )
4145 },
4146 )?;
4147 }
4148 Operator::I64AtomicRmw16SubU { ref memarg } => {
4149 let loc = self.pop_value_released()?.0;
4150 let target = self.pop_value_released()?.0;
4151 let ret = self.acquire_location(&WpType::I64)?;
4152 self.value_stack.push((ret, CanonicalizeType::None));
4153 self.op_memory(
4154 |this,
4155 need_check,
4156 imported_memories,
4157 offset,
4158 heap_access_oob,
4159 unaligned_atomic| {
4160 this.machine.i64_atomic_sub_16u(
4161 loc,
4162 target,
4163 memarg,
4164 ret,
4165 need_check,
4166 imported_memories,
4167 offset,
4168 heap_access_oob,
4169 unaligned_atomic,
4170 )
4171 },
4172 )?;
4173 }
4174 Operator::I64AtomicRmw32SubU { ref memarg } => {
4175 let loc = self.pop_value_released()?.0;
4176 let target = self.pop_value_released()?.0;
4177 let ret = self.acquire_location(&WpType::I64)?;
4178 self.value_stack.push((ret, CanonicalizeType::None));
4179 self.op_memory(
4180 |this,
4181 need_check,
4182 imported_memories,
4183 offset,
4184 heap_access_oob,
4185 unaligned_atomic| {
4186 this.machine.i64_atomic_sub_32u(
4187 loc,
4188 target,
4189 memarg,
4190 ret,
4191 need_check,
4192 imported_memories,
4193 offset,
4194 heap_access_oob,
4195 unaligned_atomic,
4196 )
4197 },
4198 )?;
4199 }
4200 Operator::I32AtomicRmwAnd { ref memarg } => {
4201 let loc = self.pop_value_released()?.0;
4202 let target = self.pop_value_released()?.0;
4203 let ret = self.acquire_location(&WpType::I32)?;
4204 self.value_stack.push((ret, CanonicalizeType::None));
4205 self.op_memory(
4206 |this,
4207 need_check,
4208 imported_memories,
4209 offset,
4210 heap_access_oob,
4211 unaligned_atomic| {
4212 this.machine.i32_atomic_and(
4213 loc,
4214 target,
4215 memarg,
4216 ret,
4217 need_check,
4218 imported_memories,
4219 offset,
4220 heap_access_oob,
4221 unaligned_atomic,
4222 )
4223 },
4224 )?;
4225 }
4226 Operator::I64AtomicRmwAnd { ref memarg } => {
4227 let loc = self.pop_value_released()?.0;
4228 let target = self.pop_value_released()?.0;
4229 let ret = self.acquire_location(&WpType::I64)?;
4230 self.value_stack.push((ret, CanonicalizeType::None));
4231 self.op_memory(
4232 |this,
4233 need_check,
4234 imported_memories,
4235 offset,
4236 heap_access_oob,
4237 unaligned_atomic| {
4238 this.machine.i64_atomic_and(
4239 loc,
4240 target,
4241 memarg,
4242 ret,
4243 need_check,
4244 imported_memories,
4245 offset,
4246 heap_access_oob,
4247 unaligned_atomic,
4248 )
4249 },
4250 )?;
4251 }
4252 Operator::I32AtomicRmw8AndU { ref memarg } => {
4253 let loc = self.pop_value_released()?.0;
4254 let target = self.pop_value_released()?.0;
4255 let ret = self.acquire_location(&WpType::I32)?;
4256 self.value_stack.push((ret, CanonicalizeType::None));
4257 self.op_memory(
4258 |this,
4259 need_check,
4260 imported_memories,
4261 offset,
4262 heap_access_oob,
4263 unaligned_atomic| {
4264 this.machine.i32_atomic_and_8u(
4265 loc,
4266 target,
4267 memarg,
4268 ret,
4269 need_check,
4270 imported_memories,
4271 offset,
4272 heap_access_oob,
4273 unaligned_atomic,
4274 )
4275 },
4276 )?;
4277 }
4278 Operator::I32AtomicRmw16AndU { ref memarg } => {
4279 let loc = self.pop_value_released()?.0;
4280 let target = self.pop_value_released()?.0;
4281 let ret = self.acquire_location(&WpType::I32)?;
4282 self.value_stack.push((ret, CanonicalizeType::None));
4283 self.op_memory(
4284 |this,
4285 need_check,
4286 imported_memories,
4287 offset,
4288 heap_access_oob,
4289 unaligned_atomic| {
4290 this.machine.i32_atomic_and_16u(
4291 loc,
4292 target,
4293 memarg,
4294 ret,
4295 need_check,
4296 imported_memories,
4297 offset,
4298 heap_access_oob,
4299 unaligned_atomic,
4300 )
4301 },
4302 )?;
4303 }
4304 Operator::I64AtomicRmw8AndU { ref memarg } => {
4305 let loc = self.pop_value_released()?.0;
4306 let target = self.pop_value_released()?.0;
4307 let ret = self.acquire_location(&WpType::I64)?;
4308 self.value_stack.push((ret, CanonicalizeType::None));
4309 self.op_memory(
4310 |this,
4311 need_check,
4312 imported_memories,
4313 offset,
4314 heap_access_oob,
4315 unaligned_atomic| {
4316 this.machine.i64_atomic_and_8u(
4317 loc,
4318 target,
4319 memarg,
4320 ret,
4321 need_check,
4322 imported_memories,
4323 offset,
4324 heap_access_oob,
4325 unaligned_atomic,
4326 )
4327 },
4328 )?;
4329 }
4330 Operator::I64AtomicRmw16AndU { ref memarg } => {
4331 let loc = self.pop_value_released()?.0;
4332 let target = self.pop_value_released()?.0;
4333 let ret = self.acquire_location(&WpType::I64)?;
4334 self.value_stack.push((ret, CanonicalizeType::None));
4335 self.op_memory(
4336 |this,
4337 need_check,
4338 imported_memories,
4339 offset,
4340 heap_access_oob,
4341 unaligned_atomic| {
4342 this.machine.i64_atomic_and_16u(
4343 loc,
4344 target,
4345 memarg,
4346 ret,
4347 need_check,
4348 imported_memories,
4349 offset,
4350 heap_access_oob,
4351 unaligned_atomic,
4352 )
4353 },
4354 )?;
4355 }
4356 Operator::I64AtomicRmw32AndU { ref memarg } => {
4357 let loc = self.pop_value_released()?.0;
4358 let target = self.pop_value_released()?.0;
4359 let ret = self.acquire_location(&WpType::I64)?;
4360 self.value_stack.push((ret, CanonicalizeType::None));
4361 self.op_memory(
4362 |this,
4363 need_check,
4364 imported_memories,
4365 offset,
4366 heap_access_oob,
4367 unaligned_atomic| {
4368 this.machine.i64_atomic_and_32u(
4369 loc,
4370 target,
4371 memarg,
4372 ret,
4373 need_check,
4374 imported_memories,
4375 offset,
4376 heap_access_oob,
4377 unaligned_atomic,
4378 )
4379 },
4380 )?;
4381 }
4382 Operator::I32AtomicRmwOr { ref memarg } => {
4383 let loc = self.pop_value_released()?.0;
4384 let target = self.pop_value_released()?.0;
4385 let ret = self.acquire_location(&WpType::I32)?;
4386 self.value_stack.push((ret, CanonicalizeType::None));
4387 self.op_memory(
4388 |this,
4389 need_check,
4390 imported_memories,
4391 offset,
4392 heap_access_oob,
4393 unaligned_atomic| {
4394 this.machine.i32_atomic_or(
4395 loc,
4396 target,
4397 memarg,
4398 ret,
4399 need_check,
4400 imported_memories,
4401 offset,
4402 heap_access_oob,
4403 unaligned_atomic,
4404 )
4405 },
4406 )?;
4407 }
4408 Operator::I64AtomicRmwOr { ref memarg } => {
4409 let loc = self.pop_value_released()?.0;
4410 let target = self.pop_value_released()?.0;
4411 let ret = self.acquire_location(&WpType::I64)?;
4412 self.value_stack.push((ret, CanonicalizeType::None));
4413 self.op_memory(
4414 |this,
4415 need_check,
4416 imported_memories,
4417 offset,
4418 heap_access_oob,
4419 unaligned_atomic| {
4420 this.machine.i64_atomic_or(
4421 loc,
4422 target,
4423 memarg,
4424 ret,
4425 need_check,
4426 imported_memories,
4427 offset,
4428 heap_access_oob,
4429 unaligned_atomic,
4430 )
4431 },
4432 )?;
4433 }
4434 Operator::I32AtomicRmw8OrU { ref memarg } => {
4435 let loc = self.pop_value_released()?.0;
4436 let target = self.pop_value_released()?.0;
4437 let ret = self.acquire_location(&WpType::I32)?;
4438 self.value_stack.push((ret, CanonicalizeType::None));
4439 self.op_memory(
4440 |this,
4441 need_check,
4442 imported_memories,
4443 offset,
4444 heap_access_oob,
4445 unaligned_atomic| {
4446 this.machine.i32_atomic_or_8u(
4447 loc,
4448 target,
4449 memarg,
4450 ret,
4451 need_check,
4452 imported_memories,
4453 offset,
4454 heap_access_oob,
4455 unaligned_atomic,
4456 )
4457 },
4458 )?;
4459 }
4460 Operator::I32AtomicRmw16OrU { ref memarg } => {
4461 let loc = self.pop_value_released()?.0;
4462 let target = self.pop_value_released()?.0;
4463 let ret = self.acquire_location(&WpType::I32)?;
4464 self.value_stack.push((ret, CanonicalizeType::None));
4465 self.op_memory(
4466 |this,
4467 need_check,
4468 imported_memories,
4469 offset,
4470 heap_access_oob,
4471 unaligned_atomic| {
4472 this.machine.i32_atomic_or_16u(
4473 loc,
4474 target,
4475 memarg,
4476 ret,
4477 need_check,
4478 imported_memories,
4479 offset,
4480 heap_access_oob,
4481 unaligned_atomic,
4482 )
4483 },
4484 )?;
4485 }
4486 Operator::I64AtomicRmw8OrU { ref memarg } => {
4487 let loc = self.pop_value_released()?.0;
4488 let target = self.pop_value_released()?.0;
4489 let ret = self.acquire_location(&WpType::I64)?;
4490 self.value_stack.push((ret, CanonicalizeType::None));
4491 self.op_memory(
4492 |this,
4493 need_check,
4494 imported_memories,
4495 offset,
4496 heap_access_oob,
4497 unaligned_atomic| {
4498 this.machine.i64_atomic_or_8u(
4499 loc,
4500 target,
4501 memarg,
4502 ret,
4503 need_check,
4504 imported_memories,
4505 offset,
4506 heap_access_oob,
4507 unaligned_atomic,
4508 )
4509 },
4510 )?;
4511 }
4512 Operator::I64AtomicRmw16OrU { ref memarg } => {
4513 let loc = self.pop_value_released()?.0;
4514 let target = self.pop_value_released()?.0;
4515 let ret = self.acquire_location(&WpType::I64)?;
4516 self.value_stack.push((ret, CanonicalizeType::None));
4517 self.op_memory(
4518 |this,
4519 need_check,
4520 imported_memories,
4521 offset,
4522 heap_access_oob,
4523 unaligned_atomic| {
4524 this.machine.i64_atomic_or_16u(
4525 loc,
4526 target,
4527 memarg,
4528 ret,
4529 need_check,
4530 imported_memories,
4531 offset,
4532 heap_access_oob,
4533 unaligned_atomic,
4534 )
4535 },
4536 )?;
4537 }
4538 Operator::I64AtomicRmw32OrU { ref memarg } => {
4539 let loc = self.pop_value_released()?.0;
4540 let target = self.pop_value_released()?.0;
4541 let ret = self.acquire_location(&WpType::I64)?;
4542 self.value_stack.push((ret, CanonicalizeType::None));
4543 self.op_memory(
4544 |this,
4545 need_check,
4546 imported_memories,
4547 offset,
4548 heap_access_oob,
4549 unaligned_atomic| {
4550 this.machine.i64_atomic_or_32u(
4551 loc,
4552 target,
4553 memarg,
4554 ret,
4555 need_check,
4556 imported_memories,
4557 offset,
4558 heap_access_oob,
4559 unaligned_atomic,
4560 )
4561 },
4562 )?;
4563 }
4564 Operator::I32AtomicRmwXor { ref memarg } => {
4565 let loc = self.pop_value_released()?.0;
4566 let target = self.pop_value_released()?.0;
4567 let ret = self.acquire_location(&WpType::I32)?;
4568 self.value_stack.push((ret, CanonicalizeType::None));
4569 self.op_memory(
4570 |this,
4571 need_check,
4572 imported_memories,
4573 offset,
4574 heap_access_oob,
4575 unaligned_atomic| {
4576 this.machine.i32_atomic_xor(
4577 loc,
4578 target,
4579 memarg,
4580 ret,
4581 need_check,
4582 imported_memories,
4583 offset,
4584 heap_access_oob,
4585 unaligned_atomic,
4586 )
4587 },
4588 )?;
4589 }
4590 Operator::I64AtomicRmwXor { ref memarg } => {
4591 let loc = self.pop_value_released()?.0;
4592 let target = self.pop_value_released()?.0;
4593 let ret = self.acquire_location(&WpType::I64)?;
4594 self.value_stack.push((ret, CanonicalizeType::None));
4595 self.op_memory(
4596 |this,
4597 need_check,
4598 imported_memories,
4599 offset,
4600 heap_access_oob,
4601 unaligned_atomic| {
4602 this.machine.i64_atomic_xor(
4603 loc,
4604 target,
4605 memarg,
4606 ret,
4607 need_check,
4608 imported_memories,
4609 offset,
4610 heap_access_oob,
4611 unaligned_atomic,
4612 )
4613 },
4614 )?;
4615 }
4616 Operator::I32AtomicRmw8XorU { ref memarg } => {
4617 let loc = self.pop_value_released()?.0;
4618 let target = self.pop_value_released()?.0;
4619 let ret = self.acquire_location(&WpType::I32)?;
4620 self.value_stack.push((ret, CanonicalizeType::None));
4621 self.op_memory(
4622 |this,
4623 need_check,
4624 imported_memories,
4625 offset,
4626 heap_access_oob,
4627 unaligned_atomic| {
4628 this.machine.i32_atomic_xor_8u(
4629 loc,
4630 target,
4631 memarg,
4632 ret,
4633 need_check,
4634 imported_memories,
4635 offset,
4636 heap_access_oob,
4637 unaligned_atomic,
4638 )
4639 },
4640 )?;
4641 }
4642 Operator::I32AtomicRmw16XorU { ref memarg } => {
4643 let loc = self.pop_value_released()?.0;
4644 let target = self.pop_value_released()?.0;
4645 let ret = self.acquire_location(&WpType::I32)?;
4646 self.value_stack.push((ret, CanonicalizeType::None));
4647 self.op_memory(
4648 |this,
4649 need_check,
4650 imported_memories,
4651 offset,
4652 heap_access_oob,
4653 unaligned_atomic| {
4654 this.machine.i32_atomic_xor_16u(
4655 loc,
4656 target,
4657 memarg,
4658 ret,
4659 need_check,
4660 imported_memories,
4661 offset,
4662 heap_access_oob,
4663 unaligned_atomic,
4664 )
4665 },
4666 )?;
4667 }
4668 Operator::I64AtomicRmw8XorU { ref memarg } => {
4669 let loc = self.pop_value_released()?.0;
4670 let target = self.pop_value_released()?.0;
4671 let ret = self.acquire_location(&WpType::I64)?;
4672 self.value_stack.push((ret, CanonicalizeType::None));
4673 self.op_memory(
4674 |this,
4675 need_check,
4676 imported_memories,
4677 offset,
4678 heap_access_oob,
4679 unaligned_atomic| {
4680 this.machine.i64_atomic_xor_8u(
4681 loc,
4682 target,
4683 memarg,
4684 ret,
4685 need_check,
4686 imported_memories,
4687 offset,
4688 heap_access_oob,
4689 unaligned_atomic,
4690 )
4691 },
4692 )?;
4693 }
4694 Operator::I64AtomicRmw16XorU { ref memarg } => {
4695 let loc = self.pop_value_released()?.0;
4696 let target = self.pop_value_released()?.0;
4697 let ret = self.acquire_location(&WpType::I64)?;
4698 self.value_stack.push((ret, CanonicalizeType::None));
4699 self.op_memory(
4700 |this,
4701 need_check,
4702 imported_memories,
4703 offset,
4704 heap_access_oob,
4705 unaligned_atomic| {
4706 this.machine.i64_atomic_xor_16u(
4707 loc,
4708 target,
4709 memarg,
4710 ret,
4711 need_check,
4712 imported_memories,
4713 offset,
4714 heap_access_oob,
4715 unaligned_atomic,
4716 )
4717 },
4718 )?;
4719 }
4720 Operator::I64AtomicRmw32XorU { ref memarg } => {
4721 let loc = self.pop_value_released()?.0;
4722 let target = self.pop_value_released()?.0;
4723 let ret = self.acquire_location(&WpType::I64)?;
4724 self.value_stack.push((ret, CanonicalizeType::None));
4725 self.op_memory(
4726 |this,
4727 need_check,
4728 imported_memories,
4729 offset,
4730 heap_access_oob,
4731 unaligned_atomic| {
4732 this.machine.i64_atomic_xor_32u(
4733 loc,
4734 target,
4735 memarg,
4736 ret,
4737 need_check,
4738 imported_memories,
4739 offset,
4740 heap_access_oob,
4741 unaligned_atomic,
4742 )
4743 },
4744 )?;
4745 }
4746 Operator::I32AtomicRmwXchg { ref memarg } => {
4747 let loc = self.pop_value_released()?.0;
4748 let target = self.pop_value_released()?.0;
4749 let ret = self.acquire_location(&WpType::I32)?;
4750 self.value_stack.push((ret, CanonicalizeType::None));
4751 self.op_memory(
4752 |this,
4753 need_check,
4754 imported_memories,
4755 offset,
4756 heap_access_oob,
4757 unaligned_atomic| {
4758 this.machine.i32_atomic_xchg(
4759 loc,
4760 target,
4761 memarg,
4762 ret,
4763 need_check,
4764 imported_memories,
4765 offset,
4766 heap_access_oob,
4767 unaligned_atomic,
4768 )
4769 },
4770 )?;
4771 }
4772 Operator::I64AtomicRmwXchg { ref memarg } => {
4773 let loc = self.pop_value_released()?.0;
4774 let target = self.pop_value_released()?.0;
4775 let ret = self.acquire_location(&WpType::I64)?;
4776 self.value_stack.push((ret, CanonicalizeType::None));
4777 self.op_memory(
4778 |this,
4779 need_check,
4780 imported_memories,
4781 offset,
4782 heap_access_oob,
4783 unaligned_atomic| {
4784 this.machine.i64_atomic_xchg(
4785 loc,
4786 target,
4787 memarg,
4788 ret,
4789 need_check,
4790 imported_memories,
4791 offset,
4792 heap_access_oob,
4793 unaligned_atomic,
4794 )
4795 },
4796 )?;
4797 }
4798 Operator::I32AtomicRmw8XchgU { ref memarg } => {
4799 let loc = self.pop_value_released()?.0;
4800 let target = self.pop_value_released()?.0;
4801 let ret = self.acquire_location(&WpType::I32)?;
4802 self.value_stack.push((ret, CanonicalizeType::None));
4803 self.op_memory(
4804 |this,
4805 need_check,
4806 imported_memories,
4807 offset,
4808 heap_access_oob,
4809 unaligned_atomic| {
4810 this.machine.i32_atomic_xchg_8u(
4811 loc,
4812 target,
4813 memarg,
4814 ret,
4815 need_check,
4816 imported_memories,
4817 offset,
4818 heap_access_oob,
4819 unaligned_atomic,
4820 )
4821 },
4822 )?;
4823 }
4824 Operator::I32AtomicRmw16XchgU { ref memarg } => {
4825 let loc = self.pop_value_released()?.0;
4826 let target = self.pop_value_released()?.0;
4827 let ret = self.acquire_location(&WpType::I32)?;
4828 self.value_stack.push((ret, CanonicalizeType::None));
4829 self.op_memory(
4830 |this,
4831 need_check,
4832 imported_memories,
4833 offset,
4834 heap_access_oob,
4835 unaligned_atomic| {
4836 this.machine.i32_atomic_xchg_16u(
4837 loc,
4838 target,
4839 memarg,
4840 ret,
4841 need_check,
4842 imported_memories,
4843 offset,
4844 heap_access_oob,
4845 unaligned_atomic,
4846 )
4847 },
4848 )?;
4849 }
4850 Operator::I64AtomicRmw8XchgU { ref memarg } => {
4851 let loc = self.pop_value_released()?.0;
4852 let target = self.pop_value_released()?.0;
4853 let ret = self.acquire_location(&WpType::I64)?;
4854 self.value_stack.push((ret, CanonicalizeType::None));
4855 self.op_memory(
4856 |this,
4857 need_check,
4858 imported_memories,
4859 offset,
4860 heap_access_oob,
4861 unaligned_atomic| {
4862 this.machine.i64_atomic_xchg_8u(
4863 loc,
4864 target,
4865 memarg,
4866 ret,
4867 need_check,
4868 imported_memories,
4869 offset,
4870 heap_access_oob,
4871 unaligned_atomic,
4872 )
4873 },
4874 )?;
4875 }
4876 Operator::I64AtomicRmw16XchgU { ref memarg } => {
4877 let loc = self.pop_value_released()?.0;
4878 let target = self.pop_value_released()?.0;
4879 let ret = self.acquire_location(&WpType::I64)?;
4880 self.value_stack.push((ret, CanonicalizeType::None));
4881 self.op_memory(
4882 |this,
4883 need_check,
4884 imported_memories,
4885 offset,
4886 heap_access_oob,
4887 unaligned_atomic| {
4888 this.machine.i64_atomic_xchg_16u(
4889 loc,
4890 target,
4891 memarg,
4892 ret,
4893 need_check,
4894 imported_memories,
4895 offset,
4896 heap_access_oob,
4897 unaligned_atomic,
4898 )
4899 },
4900 )?;
4901 }
4902 Operator::I64AtomicRmw32XchgU { ref memarg } => {
4903 let loc = self.pop_value_released()?.0;
4904 let target = self.pop_value_released()?.0;
4905 let ret = self.acquire_location(&WpType::I64)?;
4906 self.value_stack.push((ret, CanonicalizeType::None));
4907 self.op_memory(
4908 |this,
4909 need_check,
4910 imported_memories,
4911 offset,
4912 heap_access_oob,
4913 unaligned_atomic| {
4914 this.machine.i64_atomic_xchg_32u(
4915 loc,
4916 target,
4917 memarg,
4918 ret,
4919 need_check,
4920 imported_memories,
4921 offset,
4922 heap_access_oob,
4923 unaligned_atomic,
4924 )
4925 },
4926 )?;
4927 }
4928 Operator::I32AtomicRmwCmpxchg { ref memarg } => {
4929 let new = self.pop_value_released()?.0;
4930 let cmp = self.pop_value_released()?.0;
4931 let target = self.pop_value_released()?.0;
4932 let ret = self.acquire_location(&WpType::I32)?;
4933 self.value_stack.push((ret, CanonicalizeType::None));
4934 self.op_memory(
4935 |this,
4936 need_check,
4937 imported_memories,
4938 offset,
4939 heap_access_oob,
4940 unaligned_atomic| {
4941 this.machine.i32_atomic_cmpxchg(
4942 new,
4943 cmp,
4944 target,
4945 memarg,
4946 ret,
4947 need_check,
4948 imported_memories,
4949 offset,
4950 heap_access_oob,
4951 unaligned_atomic,
4952 )
4953 },
4954 )?;
4955 }
4956 Operator::I64AtomicRmwCmpxchg { ref memarg } => {
4957 let new = self.pop_value_released()?.0;
4958 let cmp = self.pop_value_released()?.0;
4959 let target = self.pop_value_released()?.0;
4960 let ret = self.acquire_location(&WpType::I64)?;
4961 self.value_stack.push((ret, CanonicalizeType::None));
4962 self.op_memory(
4963 |this,
4964 need_check,
4965 imported_memories,
4966 offset,
4967 heap_access_oob,
4968 unaligned_atomic| {
4969 this.machine.i64_atomic_cmpxchg(
4970 new,
4971 cmp,
4972 target,
4973 memarg,
4974 ret,
4975 need_check,
4976 imported_memories,
4977 offset,
4978 heap_access_oob,
4979 unaligned_atomic,
4980 )
4981 },
4982 )?;
4983 }
4984 Operator::I32AtomicRmw8CmpxchgU { ref memarg } => {
4985 let new = self.pop_value_released()?.0;
4986 let cmp = self.pop_value_released()?.0;
4987 let target = self.pop_value_released()?.0;
4988 let ret = self.acquire_location(&WpType::I32)?;
4989 self.value_stack.push((ret, CanonicalizeType::None));
4990 self.op_memory(
4991 |this,
4992 need_check,
4993 imported_memories,
4994 offset,
4995 heap_access_oob,
4996 unaligned_atomic| {
4997 this.machine.i32_atomic_cmpxchg_8u(
4998 new,
4999 cmp,
5000 target,
5001 memarg,
5002 ret,
5003 need_check,
5004 imported_memories,
5005 offset,
5006 heap_access_oob,
5007 unaligned_atomic,
5008 )
5009 },
5010 )?;
5011 }
5012 Operator::I32AtomicRmw16CmpxchgU { ref memarg } => {
5013 let new = self.pop_value_released()?.0;
5014 let cmp = self.pop_value_released()?.0;
5015 let target = self.pop_value_released()?.0;
5016 let ret = self.acquire_location(&WpType::I32)?;
5017 self.value_stack.push((ret, CanonicalizeType::None));
5018 self.op_memory(
5019 |this,
5020 need_check,
5021 imported_memories,
5022 offset,
5023 heap_access_oob,
5024 unaligned_atomic| {
5025 this.machine.i32_atomic_cmpxchg_16u(
5026 new,
5027 cmp,
5028 target,
5029 memarg,
5030 ret,
5031 need_check,
5032 imported_memories,
5033 offset,
5034 heap_access_oob,
5035 unaligned_atomic,
5036 )
5037 },
5038 )?;
5039 }
5040 Operator::I64AtomicRmw8CmpxchgU { ref memarg } => {
5041 let new = self.pop_value_released()?.0;
5042 let cmp = self.pop_value_released()?.0;
5043 let target = self.pop_value_released()?.0;
5044 let ret = self.acquire_location(&WpType::I64)?;
5045 self.value_stack.push((ret, CanonicalizeType::None));
5046 self.op_memory(
5047 |this,
5048 need_check,
5049 imported_memories,
5050 offset,
5051 heap_access_oob,
5052 unaligned_atomic| {
5053 this.machine.i64_atomic_cmpxchg_8u(
5054 new,
5055 cmp,
5056 target,
5057 memarg,
5058 ret,
5059 need_check,
5060 imported_memories,
5061 offset,
5062 heap_access_oob,
5063 unaligned_atomic,
5064 )
5065 },
5066 )?;
5067 }
5068 Operator::I64AtomicRmw16CmpxchgU { ref memarg } => {
5069 let new = self.pop_value_released()?.0;
5070 let cmp = self.pop_value_released()?.0;
5071 let target = self.pop_value_released()?.0;
5072 let ret = self.acquire_location(&WpType::I64)?;
5073 self.value_stack.push((ret, CanonicalizeType::None));
5074 self.op_memory(
5075 |this,
5076 need_check,
5077 imported_memories,
5078 offset,
5079 heap_access_oob,
5080 unaligned_atomic| {
5081 this.machine.i64_atomic_cmpxchg_16u(
5082 new,
5083 cmp,
5084 target,
5085 memarg,
5086 ret,
5087 need_check,
5088 imported_memories,
5089 offset,
5090 heap_access_oob,
5091 unaligned_atomic,
5092 )
5093 },
5094 )?;
5095 }
5096 Operator::I64AtomicRmw32CmpxchgU { ref memarg } => {
5097 let new = self.pop_value_released()?.0;
5098 let cmp = self.pop_value_released()?.0;
5099 let target = self.pop_value_released()?.0;
5100 let ret = self.acquire_location(&WpType::I64)?;
5101 self.value_stack.push((ret, CanonicalizeType::None));
5102 self.op_memory(
5103 |this,
5104 need_check,
5105 imported_memories,
5106 offset,
5107 heap_access_oob,
5108 unaligned_atomic| {
5109 this.machine.i64_atomic_cmpxchg_32u(
5110 new,
5111 cmp,
5112 target,
5113 memarg,
5114 ret,
5115 need_check,
5116 imported_memories,
5117 offset,
5118 heap_access_oob,
5119 unaligned_atomic,
5120 )
5121 },
5122 )?;
5123 }
5124
5125 Operator::RefNull { .. } => {
5126 self.value_stack
5127 .push((Location::Imm64(0), CanonicalizeType::None));
5128 }
5129 Operator::RefFunc { function_index } => {
5130 self.machine.move_location(
5131 Size::S64,
5132 Location::Memory(
5133 self.machine.get_vmctx_reg(),
5134 self.vmoffsets
5135 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_func_ref_index())
5136 as i32,
5137 ),
5138 Location::GPR(self.machine.get_grp_for_call()),
5139 )?;
5140
5141 self.emit_call_native(
5142 |this| {
5143 this.machine
5144 .emit_call_register(this.machine.get_grp_for_call())
5145 },
5146 iter::once(Location::Imm32(function_index as u32)),
5148 iter::once(WpType::I64),
5149 NativeCallType::IncludeVMCtxArgument,
5150 )?;
5151
5152 let ret = self.acquire_location(&WpType::Ref(
5153 WpRefType::new(true, WpHeapType::FUNC).unwrap(),
5154 ))?;
5155 self.value_stack.push((ret, CanonicalizeType::None));
5156 self.machine.move_location(
5157 Size::S64,
5158 Location::GPR(self.machine.get_gpr_for_ret()),
5159 ret,
5160 )?;
5161 }
5162 Operator::RefIsNull => {
5163 let loc_a = self.pop_value_released()?.0;
5164 let ret = self.acquire_location(&WpType::I32)?;
5165 self.machine.i64_cmp_eq(loc_a, Location::Imm64(0), ret)?;
5166 self.value_stack.push((ret, CanonicalizeType::None));
5167 }
5168 Operator::TableSet { table: index } => {
5169 let table_index = TableIndex::new(index as _);
5170 let value = self.value_stack.pop().unwrap();
5171 let index = self.value_stack.pop().unwrap();
5172
5173 self.release_locations_only_regs(&[value, index])?;
5175
5176 self.machine.move_location(
5177 Size::S64,
5178 Location::Memory(
5179 self.machine.get_vmctx_reg(),
5180 self.vmoffsets.vmctx_builtin_function(
5181 if self.module.local_table_index(table_index).is_some() {
5182 VMBuiltinFunctionIndex::get_table_set_index()
5183 } else {
5184 VMBuiltinFunctionIndex::get_imported_table_set_index()
5185 },
5186 ) as i32,
5187 ),
5188 Location::GPR(self.machine.get_grp_for_call()),
5189 )?;
5190
5191 self.emit_call_native(
5192 |this| {
5193 this.machine
5194 .emit_call_register(this.machine.get_grp_for_call())
5195 },
5196 [
5198 Location::Imm32(table_index.index() as u32),
5199 index.0,
5200 value.0,
5201 ]
5202 .iter()
5203 .cloned(),
5204 [WpType::I32, WpType::I64, WpType::I64].iter().cloned(),
5205 NativeCallType::IncludeVMCtxArgument,
5206 )?;
5207
5208 self.release_locations_only_stack(&[index, value])?;
5209 }
5210 Operator::TableGet { table: index } => {
5211 let table_index = TableIndex::new(index as _);
5212 let index = self.value_stack.pop().unwrap();
5213
5214 self.release_locations_only_regs(&[index])?;
5215
5216 self.machine.move_location(
5217 Size::S64,
5218 Location::Memory(
5219 self.machine.get_vmctx_reg(),
5220 self.vmoffsets.vmctx_builtin_function(
5221 if self.module.local_table_index(table_index).is_some() {
5222 VMBuiltinFunctionIndex::get_table_get_index()
5223 } else {
5224 VMBuiltinFunctionIndex::get_imported_table_get_index()
5225 },
5226 ) as i32,
5227 ),
5228 Location::GPR(self.machine.get_grp_for_call()),
5229 )?;
5230
5231 self.emit_call_native(
5232 |this| {
5233 this.machine
5234 .emit_call_register(this.machine.get_grp_for_call())
5235 },
5236 [Location::Imm32(table_index.index() as u32), index.0]
5238 .iter()
5239 .cloned(),
5240 [WpType::I32, WpType::I64].iter().cloned(),
5241 NativeCallType::IncludeVMCtxArgument,
5242 )?;
5243
5244 self.release_locations_only_stack(&[index])?;
5245
5246 let ret = self.acquire_location(&WpType::Ref(
5247 WpRefType::new(true, WpHeapType::FUNC).unwrap(),
5248 ))?;
5249 self.value_stack.push((ret, CanonicalizeType::None));
5250 self.machine.move_location(
5251 Size::S64,
5252 Location::GPR(self.machine.get_gpr_for_ret()),
5253 ret,
5254 )?;
5255 }
5256 Operator::TableSize { table: index } => {
5257 let table_index = TableIndex::new(index as _);
5258
5259 self.machine.move_location(
5260 Size::S64,
5261 Location::Memory(
5262 self.machine.get_vmctx_reg(),
5263 self.vmoffsets.vmctx_builtin_function(
5264 if self.module.local_table_index(table_index).is_some() {
5265 VMBuiltinFunctionIndex::get_table_size_index()
5266 } else {
5267 VMBuiltinFunctionIndex::get_imported_table_size_index()
5268 },
5269 ) as i32,
5270 ),
5271 Location::GPR(self.machine.get_grp_for_call()),
5272 )?;
5273
5274 self.emit_call_native(
5275 |this| {
5276 this.machine
5277 .emit_call_register(this.machine.get_grp_for_call())
5278 },
5279 iter::once(Location::Imm32(table_index.index() as u32)),
5281 iter::once(WpType::I32),
5282 NativeCallType::IncludeVMCtxArgument,
5283 )?;
5284
5285 let ret = self.acquire_location(&WpType::I32)?;
5286 self.value_stack.push((ret, CanonicalizeType::None));
5287 self.machine.move_location(
5288 Size::S32,
5289 Location::GPR(self.machine.get_gpr_for_ret()),
5290 ret,
5291 )?;
5292 }
5293 Operator::TableGrow { table: index } => {
5294 let table_index = TableIndex::new(index as _);
5295 let delta = self.value_stack.pop().unwrap();
5296 let init_value = self.value_stack.pop().unwrap();
5297 self.release_locations_only_regs(&[delta, init_value])?;
5298
5299 self.machine.move_location(
5300 Size::S64,
5301 Location::Memory(
5302 self.machine.get_vmctx_reg(),
5303 self.vmoffsets.vmctx_builtin_function(
5304 if self.module.local_table_index(table_index).is_some() {
5305 VMBuiltinFunctionIndex::get_table_grow_index()
5306 } else {
5307 VMBuiltinFunctionIndex::get_imported_table_grow_index()
5308 },
5309 ) as i32,
5310 ),
5311 Location::GPR(self.machine.get_grp_for_call()),
5312 )?;
5313
5314 self.emit_call_native(
5315 |this| {
5316 this.machine
5317 .emit_call_register(this.machine.get_grp_for_call())
5318 },
5319 [
5321 init_value.0,
5322 delta.0,
5323 Location::Imm32(table_index.index() as u32),
5324 ]
5325 .iter()
5326 .cloned(),
5327 [WpType::I64, WpType::I64, WpType::I64].iter().cloned(),
5328 NativeCallType::IncludeVMCtxArgument,
5329 )?;
5330
5331 self.release_locations_only_stack(&[init_value, delta])?;
5332
5333 let ret = self.acquire_location(&WpType::I32)?;
5334 self.value_stack.push((ret, CanonicalizeType::None));
5335 self.machine.move_location(
5336 Size::S32,
5337 Location::GPR(self.machine.get_gpr_for_ret()),
5338 ret,
5339 )?;
5340 }
5341 Operator::TableCopy {
5342 dst_table,
5343 src_table,
5344 } => {
5345 let len = self.value_stack.pop().unwrap();
5346 let src = self.value_stack.pop().unwrap();
5347 let dest = self.value_stack.pop().unwrap();
5348 self.release_locations_only_regs(&[len, src, dest])?;
5349
5350 self.machine.move_location(
5351 Size::S64,
5352 Location::Memory(
5353 self.machine.get_vmctx_reg(),
5354 self.vmoffsets
5355 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_table_copy_index())
5356 as i32,
5357 ),
5358 Location::GPR(self.machine.get_grp_for_call()),
5359 )?;
5360
5361 self.emit_call_native(
5362 |this| {
5363 this.machine
5364 .emit_call_register(this.machine.get_grp_for_call())
5365 },
5366 [
5368 Location::Imm32(dst_table),
5369 Location::Imm32(src_table),
5370 dest.0,
5371 src.0,
5372 len.0,
5373 ]
5374 .iter()
5375 .cloned(),
5376 [
5377 WpType::I32,
5378 WpType::I32,
5379 WpType::I64,
5380 WpType::I64,
5381 WpType::I64,
5382 ]
5383 .iter()
5384 .cloned(),
5385 NativeCallType::IncludeVMCtxArgument,
5386 )?;
5387
5388 self.release_locations_only_stack(&[dest, src, len])?;
5389 }
5390
5391 Operator::TableFill { table } => {
5392 let len = self.value_stack.pop().unwrap();
5393 let val = self.value_stack.pop().unwrap();
5394 let dest = self.value_stack.pop().unwrap();
5395 self.release_locations_only_regs(&[len, val, dest])?;
5396
5397 self.machine.move_location(
5398 Size::S64,
5399 Location::Memory(
5400 self.machine.get_vmctx_reg(),
5401 self.vmoffsets
5402 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_table_fill_index())
5403 as i32,
5404 ),
5405 Location::GPR(self.machine.get_grp_for_call()),
5406 )?;
5407
5408 self.emit_call_native(
5409 |this| {
5410 this.machine
5411 .emit_call_register(this.machine.get_grp_for_call())
5412 },
5413 [Location::Imm32(table), dest.0, val.0, len.0]
5415 .iter()
5416 .cloned(),
5417 [WpType::I32, WpType::I64, WpType::I64, WpType::I64]
5418 .iter()
5419 .cloned(),
5420 NativeCallType::IncludeVMCtxArgument,
5421 )?;
5422
5423 self.release_locations_only_stack(&[dest, val, len])?;
5424 }
5425 Operator::TableInit { elem_index, table } => {
5426 let len = self.value_stack.pop().unwrap();
5427 let src = self.value_stack.pop().unwrap();
5428 let dest = self.value_stack.pop().unwrap();
5429 self.release_locations_only_regs(&[len, src, dest])?;
5430
5431 self.machine.move_location(
5432 Size::S64,
5433 Location::Memory(
5434 self.machine.get_vmctx_reg(),
5435 self.vmoffsets
5436 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_table_init_index())
5437 as i32,
5438 ),
5439 Location::GPR(self.machine.get_grp_for_call()),
5440 )?;
5441
5442 self.emit_call_native(
5443 |this| {
5444 this.machine
5445 .emit_call_register(this.machine.get_grp_for_call())
5446 },
5447 [
5449 Location::Imm32(table),
5450 Location::Imm32(elem_index),
5451 dest.0,
5452 src.0,
5453 len.0,
5454 ]
5455 .iter()
5456 .cloned(),
5457 [
5458 WpType::I32,
5459 WpType::I32,
5460 WpType::I64,
5461 WpType::I64,
5462 WpType::I64,
5463 ]
5464 .iter()
5465 .cloned(),
5466 NativeCallType::IncludeVMCtxArgument,
5467 )?;
5468
5469 self.release_locations_only_stack(&[dest, src, len])?;
5470 }
5471 Operator::ElemDrop { elem_index } => {
5472 self.machine.move_location(
5473 Size::S64,
5474 Location::Memory(
5475 self.machine.get_vmctx_reg(),
5476 self.vmoffsets
5477 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_elem_drop_index())
5478 as i32,
5479 ),
5480 Location::GPR(self.machine.get_grp_for_call()),
5481 )?;
5482
5483 self.emit_call_native(
5484 |this| {
5485 this.machine
5486 .emit_call_register(this.machine.get_grp_for_call())
5487 },
5488 [Location::Imm32(elem_index)].iter().cloned(),
5490 [WpType::I32].iter().cloned(),
5491 NativeCallType::IncludeVMCtxArgument,
5492 )?;
5493 }
5494 Operator::MemoryAtomicWait32 { ref memarg } => {
5495 let timeout = self.value_stack.pop().unwrap();
5496 let val = self.value_stack.pop().unwrap();
5497 let dst = self.value_stack.pop().unwrap();
5498 self.release_locations_only_regs(&[timeout, val, dst])?;
5499
5500 let memory_index = MemoryIndex::new(memarg.memory as usize);
5501 let (memory_atomic_wait32, memory_index) =
5502 if self.module.local_memory_index(memory_index).is_some() {
5503 (
5504 VMBuiltinFunctionIndex::get_memory_atomic_wait32_index(),
5505 memory_index,
5506 )
5507 } else {
5508 (
5509 VMBuiltinFunctionIndex::get_imported_memory_atomic_wait32_index(),
5510 memory_index,
5511 )
5512 };
5513
5514 self.machine.move_location(
5515 Size::S64,
5516 Location::Memory(
5517 self.machine.get_vmctx_reg(),
5518 self.vmoffsets.vmctx_builtin_function(memory_atomic_wait32) as i32,
5519 ),
5520 Location::GPR(self.machine.get_grp_for_call()),
5521 )?;
5522
5523 self.emit_call_native(
5524 |this| {
5525 this.machine
5526 .emit_call_register(this.machine.get_grp_for_call())
5527 },
5528 [
5530 Location::Imm32(memory_index.index() as u32),
5531 dst.0,
5532 val.0,
5533 timeout.0,
5534 ]
5535 .iter()
5536 .cloned(),
5537 [WpType::I32, WpType::I32, WpType::I32, WpType::I64]
5538 .iter()
5539 .cloned(),
5540 NativeCallType::IncludeVMCtxArgument,
5541 )?;
5542 self.release_locations_only_stack(&[dst, val, timeout])?;
5543 let ret = self.acquire_location(&WpType::I32)?;
5544 self.value_stack.push((ret, CanonicalizeType::None));
5545 self.machine.move_location(
5546 Size::S32,
5547 Location::GPR(self.machine.get_gpr_for_ret()),
5548 ret,
5549 )?;
5550 }
5551 Operator::MemoryAtomicWait64 { ref memarg } => {
5552 let timeout = self.value_stack.pop().unwrap();
5553 let val = self.value_stack.pop().unwrap();
5554 let dst = self.value_stack.pop().unwrap();
5555 self.release_locations_only_regs(&[timeout, val, dst])?;
5556
5557 let memory_index = MemoryIndex::new(memarg.memory as usize);
5558 let (memory_atomic_wait64, memory_index) =
5559 if self.module.local_memory_index(memory_index).is_some() {
5560 (
5561 VMBuiltinFunctionIndex::get_memory_atomic_wait64_index(),
5562 memory_index,
5563 )
5564 } else {
5565 (
5566 VMBuiltinFunctionIndex::get_imported_memory_atomic_wait64_index(),
5567 memory_index,
5568 )
5569 };
5570
5571 self.machine.move_location(
5572 Size::S64,
5573 Location::Memory(
5574 self.machine.get_vmctx_reg(),
5575 self.vmoffsets.vmctx_builtin_function(memory_atomic_wait64) as i32,
5576 ),
5577 Location::GPR(self.machine.get_grp_for_call()),
5578 )?;
5579
5580 self.emit_call_native(
5581 |this| {
5582 this.machine
5583 .emit_call_register(this.machine.get_grp_for_call())
5584 },
5585 [
5587 Location::Imm32(memory_index.index() as u32),
5588 dst.0,
5589 val.0,
5590 timeout.0,
5591 ]
5592 .iter()
5593 .cloned(),
5594 [WpType::I32, WpType::I32, WpType::I64, WpType::I64]
5595 .iter()
5596 .cloned(),
5597 NativeCallType::IncludeVMCtxArgument,
5598 )?;
5599 self.release_locations_only_stack(&[dst, val, timeout])?;
5600 let ret = self.acquire_location(&WpType::I32)?;
5601 self.value_stack.push((ret, CanonicalizeType::None));
5602 self.machine.move_location(
5603 Size::S32,
5604 Location::GPR(self.machine.get_gpr_for_ret()),
5605 ret,
5606 )?;
5607 }
5608 Operator::MemoryAtomicNotify { ref memarg } => {
5609 let cnt = self.value_stack.pop().unwrap();
5610 let dst = self.value_stack.pop().unwrap();
5611 self.release_locations_only_regs(&[cnt, dst])?;
5612
5613 let memory_index = MemoryIndex::new(memarg.memory as usize);
5614 let (memory_atomic_notify, memory_index) =
5615 if self.module.local_memory_index(memory_index).is_some() {
5616 (
5617 VMBuiltinFunctionIndex::get_memory_atomic_notify_index(),
5618 memory_index,
5619 )
5620 } else {
5621 (
5622 VMBuiltinFunctionIndex::get_imported_memory_atomic_notify_index(),
5623 memory_index,
5624 )
5625 };
5626
5627 self.machine.move_location(
5628 Size::S64,
5629 Location::Memory(
5630 self.machine.get_vmctx_reg(),
5631 self.vmoffsets.vmctx_builtin_function(memory_atomic_notify) as i32,
5632 ),
5633 Location::GPR(self.machine.get_grp_for_call()),
5634 )?;
5635
5636 self.emit_call_native(
5637 |this| {
5638 this.machine
5639 .emit_call_register(this.machine.get_grp_for_call())
5640 },
5641 [Location::Imm32(memory_index.index() as u32), dst.0]
5643 .iter()
5644 .cloned(),
5645 [WpType::I32, WpType::I32].iter().cloned(),
5646 NativeCallType::IncludeVMCtxArgument,
5647 )?;
5648 self.release_locations_only_stack(&[dst, cnt])?;
5649 let ret = self.acquire_location(&WpType::I32)?;
5650 self.value_stack.push((ret, CanonicalizeType::None));
5651 self.machine.move_location(
5652 Size::S32,
5653 Location::GPR(self.machine.get_gpr_for_ret()),
5654 ret,
5655 )?;
5656 }
5657 _ => {
5658 return Err(CompileError::Codegen(format!(
5659 "not yet implemented: {op:?}"
5660 )));
5661 }
5662 }
5663
5664 Ok(())
5665 }
5666
5667 pub fn finalize(
5668 mut self,
5669 data: &FunctionBodyData,
5670 ) -> Result<(CompiledFunction, Option<UnwindFrame>), CompileError> {
5671 self.machine
5673 .emit_label(self.special_labels.integer_division_by_zero)?;
5674 self.machine
5675 .emit_illegal_op(TrapCode::IntegerDivisionByZero)?;
5676
5677 self.machine
5678 .emit_label(self.special_labels.integer_overflow)?;
5679 self.machine.emit_illegal_op(TrapCode::IntegerOverflow)?;
5680
5681 self.machine
5682 .emit_label(self.special_labels.heap_access_oob)?;
5683 self.machine
5684 .emit_illegal_op(TrapCode::HeapAccessOutOfBounds)?;
5685
5686 self.machine
5687 .emit_label(self.special_labels.table_access_oob)?;
5688 self.machine
5689 .emit_illegal_op(TrapCode::TableAccessOutOfBounds)?;
5690
5691 self.machine
5692 .emit_label(self.special_labels.indirect_call_null)?;
5693 self.machine.emit_illegal_op(TrapCode::IndirectCallToNull)?;
5694
5695 self.machine.emit_label(self.special_labels.bad_signature)?;
5696 self.machine.emit_illegal_op(TrapCode::BadSignature)?;
5697
5698 self.machine
5699 .emit_label(self.special_labels.unaligned_atomic)?;
5700 self.machine.emit_illegal_op(TrapCode::UnalignedAtomic)?;
5701
5702 self.machine.finalize_function()?;
5704
5705 let body_len = self.machine.assembler_get_offset().0;
5706
5707 #[cfg_attr(not(feature = "unwind"), allow(unused_mut))]
5708 let mut unwind_info = None;
5709 #[cfg_attr(not(feature = "unwind"), allow(unused_mut))]
5710 let mut fde = None;
5711 #[cfg(feature = "unwind")]
5712 match self.calling_convention {
5713 CallingConvention::SystemV | CallingConvention::AppleAarch64 => {
5714 let unwind = self.machine.gen_dwarf_unwind_info(body_len);
5715 if let Some(unwind) = unwind {
5716 fde = Some(unwind.to_fde(Address::Symbol {
5717 symbol: WriterRelocate::FUNCTION_SYMBOL,
5718 addend: self.local_func_index.index() as _,
5719 }));
5720 unwind_info = Some(CompiledFunctionUnwindInfo::Dwarf);
5721 }
5722 }
5723 CallingConvention::WindowsFastcall => {
5724 let unwind = self.machine.gen_windows_unwind_info(body_len);
5725 if let Some(unwind) = unwind {
5726 unwind_info = Some(CompiledFunctionUnwindInfo::WindowsX64(unwind));
5727 }
5728 }
5729 _ => (),
5730 };
5731
5732 let address_map =
5733 get_function_address_map(self.machine.instructions_address_map(), data, body_len);
5734 let traps = self.machine.collect_trap_information();
5735 let mut body = self.machine.assembler_finalize()?;
5736 body.shrink_to_fit();
5737
5738 Ok((
5739 CompiledFunction {
5740 body: FunctionBody { body, unwind_info },
5741 relocations: self.relocations.clone(),
5742 frame_info: CompiledFunctionFrameInfo { traps, address_map },
5743 },
5744 fde,
5745 ))
5746 }
5747 #[allow(clippy::type_complexity)]
5750 fn sort_call_movs(movs: &mut [(Location<M::GPR, M::SIMD>, M::GPR)]) {
5751 for i in 0..movs.len() {
5752 for j in (i + 1)..movs.len() {
5753 if let Location::GPR(src_gpr) = movs[j].0
5754 && src_gpr == movs[i].1
5755 {
5756 movs.swap(i, j);
5757 }
5758 }
5759 }
5760 }
5761
5762 }