1#[cfg(feature = "unwind")]
2use crate::dwarf::WriterRelocate;
3
4use crate::{
5 address_map::get_function_address_map,
6 codegen_error,
7 common_decl::*,
8 config::Singlepass,
9 location::{Location, Reg},
10 machine::{
11 AssemblyComment, FinalizedAssembly, Label, Machine, NATIVE_PAGE_SIZE, UnsignedCondition,
12 },
13 unwind::UnwindFrame,
14};
15#[cfg(feature = "unwind")]
16use gimli::write::Address;
17use itertools::Itertools;
18use smallvec::{SmallVec, smallvec};
19use std::{cmp, collections::HashMap, iter, ops::Neg};
20use target_lexicon::Architecture;
21
22use wasmer_compiler::{
23 FunctionBodyData,
24 misc::CompiledKind,
25 types::{
26 function::{CompiledFunction, CompiledFunctionFrameInfo, FunctionBody},
27 relocation::{Relocation, RelocationTarget},
28 section::SectionIndex,
29 },
30 wasmparser::{
31 BlockType as WpTypeOrFuncType, HeapType as WpHeapType, Operator, RefType as WpRefType,
32 ValType as WpType,
33 },
34};
35
36#[cfg(feature = "unwind")]
37use wasmer_compiler::types::unwind::CompiledFunctionUnwindInfo;
38
39use wasmer_types::target::CallingConvention;
40use wasmer_types::{
41 CompileError, FunctionIndex, FunctionType, GlobalIndex, LocalFunctionIndex, LocalMemoryIndex,
42 MemoryIndex, MemoryStyle, ModuleInfo, SignatureIndex, TableIndex, TableStyle, TrapCode, Type,
43 VMBuiltinFunctionIndex, VMOffsets,
44 entity::{EntityRef, PrimaryMap},
45};
46
47#[allow(type_alias_bounds)]
48type LocationWithCanonicalization<M: Machine> = (Location<M::GPR, M::SIMD>, CanonicalizeType);
49
50pub struct FuncGen<'a, M: Machine> {
52 module: &'a ModuleInfo,
55
56 config: &'a Singlepass,
58
59 vmoffsets: &'a VMOffsets,
61
62 memory_styles: &'a PrimaryMap<MemoryIndex, MemoryStyle>,
64
65 signature: FunctionType,
69
70 locals: Vec<Location<M::GPR, M::SIMD>>,
73
74 local_types: Vec<WpType>,
76
77 value_stack: Vec<LocationWithCanonicalization<M>>,
79
80 control_stack: Vec<ControlFrame<M>>,
82
83 stack_offset: usize,
85
86 save_area_offset: Option<usize>,
87
88 machine: M,
90
91 unreachable_depth: usize,
93
94 local_func_index: LocalFunctionIndex,
96
97 relocations: Vec<Relocation>,
99
100 special_labels: SpecialLabelSet,
102
103 calling_convention: CallingConvention,
105
106 function_name: String,
108
109 assembly_comments: HashMap<usize, AssemblyComment>,
111}
112
113struct SpecialLabelSet {
114 integer_division_by_zero: Label,
115 integer_overflow: Label,
116 heap_access_oob: Label,
117 table_access_oob: Label,
118 indirect_call_null: Label,
119 bad_signature: Label,
120 unaligned_atomic: Label,
121}
122
123#[derive(Copy, Clone, Debug)]
126pub(crate) enum CanonicalizeType {
127 None,
128 F32,
129 F64,
130}
131
132impl CanonicalizeType {
133 fn to_size(self) -> Option<Size> {
134 match self {
135 CanonicalizeType::F32 => Some(Size::S32),
136 CanonicalizeType::F64 => Some(Size::S64),
137 CanonicalizeType::None => None,
138 }
139 }
140
141 fn promote(self) -> Result<Self, CompileError> {
142 match self {
143 CanonicalizeType::None => Ok(CanonicalizeType::None),
144 CanonicalizeType::F32 => Ok(CanonicalizeType::F64),
145 CanonicalizeType::F64 => codegen_error!("cannot promote F64"),
146 }
147 }
148
149 fn demote(self) -> Result<Self, CompileError> {
150 match self {
151 CanonicalizeType::None => Ok(CanonicalizeType::None),
152 CanonicalizeType::F32 => codegen_error!("cannot demote F64"),
153 CanonicalizeType::F64 => Ok(CanonicalizeType::F32),
154 }
155 }
156}
157
158trait WpTypeExt {
159 fn is_float(&self) -> bool;
160}
161
162impl WpTypeExt for WpType {
163 fn is_float(&self) -> bool {
164 matches!(self, WpType::F32 | WpType::F64)
165 }
166}
167
168#[derive(Clone)]
169pub enum ControlState<M: Machine> {
170 Function,
171 Block,
172 Loop,
173 If {
174 label_else: Label,
175 inputs: SmallVec<[LocationWithCanonicalization<M>; 1]>,
178 },
179 Else,
180}
181
182#[derive(Clone)]
183struct ControlFrame<M: Machine> {
184 pub state: ControlState<M>,
185 pub label: Label,
186 pub param_types: SmallVec<[WpType; 8]>,
187 pub return_types: SmallVec<[WpType; 1]>,
188 value_stack_depth: usize,
190}
191
192impl<M: Machine> ControlFrame<M> {
193 fn value_stack_depth_after(&self) -> usize {
195 let mut depth: usize = self.value_stack_depth - self.param_types.len();
196
197 if matches!(self.state, ControlState::Loop) {
199 depth -= self.param_types.len();
200 }
201
202 depth
203 }
204
205 fn value_stack_depth_for_release(&self) -> usize {
208 self.value_stack_depth - self.param_types.len()
209 }
210}
211
212fn type_to_wp_type(ty: &Type) -> WpType {
213 match ty {
214 Type::I32 => WpType::I32,
215 Type::I64 => WpType::I64,
216 Type::F32 => WpType::F32,
217 Type::F64 => WpType::F64,
218 Type::V128 => WpType::V128,
219 Type::ExternRef => WpType::Ref(WpRefType::new(true, WpHeapType::EXTERN).unwrap()),
220 Type::FuncRef => WpType::Ref(WpRefType::new(true, WpHeapType::FUNC).unwrap()),
221 Type::ExceptionRef => todo!(),
222 }
223}
224
225struct I2O1<R: Reg, S: Reg> {
228 loc_a: Location<R, S>,
229 loc_b: Location<R, S>,
230 ret: Location<R, S>,
231}
232
233enum NativeCallType {
235 IncludeVMCtxArgument,
236 Unreachable,
237}
238
239impl<'a, M: Machine> FuncGen<'a, M> {
240 fn acquire_location(&mut self, ty: &WpType) -> Result<Location<M::GPR, M::SIMD>, CompileError> {
245 let loc = match *ty {
246 WpType::F32 | WpType::F64 => self.machine.pick_simd().map(Location::SIMD),
247 WpType::I32 | WpType::I64 => self.machine.pick_gpr().map(Location::GPR),
248 WpType::Ref(ty) if ty.is_extern_ref() || ty.is_func_ref() => {
249 self.machine.pick_gpr().map(Location::GPR)
250 }
251 _ => codegen_error!("can't acquire location for type {:?}", ty),
252 };
253
254 let Some(loc) = loc else {
255 return self.acquire_location_on_stack();
256 };
257
258 if let Location::GPR(x) = loc {
259 self.machine.reserve_gpr(x);
260 } else if let Location::SIMD(x) = loc {
261 self.machine.reserve_simd(x);
262 }
263 Ok(loc)
264 }
265
266 fn acquire_location_on_stack(&mut self) -> Result<Location<M::GPR, M::SIMD>, CompileError> {
268 self.stack_offset += 8;
269 let loc = self.machine.local_on_stack(self.stack_offset as i32);
270 self.machine
271 .extend_stack(self.machine.round_stack_adjust(8) as u32)?;
272
273 Ok(loc)
274 }
275
276 fn release_locations(
278 &mut self,
279 locs: &[LocationWithCanonicalization<M>],
280 ) -> Result<(), CompileError> {
281 self.release_stack_locations(locs)?;
282 self.release_reg_locations(locs)
283 }
284
285 fn release_reg_locations(
286 &mut self,
287 locs: &[LocationWithCanonicalization<M>],
288 ) -> Result<(), CompileError> {
289 for (loc, _) in locs.iter().rev() {
290 match *loc {
291 Location::GPR(ref x) => {
292 self.machine.release_gpr(*x);
293 }
294 Location::SIMD(ref x) => {
295 self.machine.release_simd(*x);
296 }
297 _ => {}
298 }
299 }
300 Ok(())
301 }
302
303 fn release_stack_locations(
304 &mut self,
305 locs: &[LocationWithCanonicalization<M>],
306 ) -> Result<(), CompileError> {
307 for (loc, _) in locs.iter().rev() {
308 if let Location::Memory(..) = *loc {
309 self.check_location_on_stack(loc, self.stack_offset)?;
310 self.stack_offset -= 8;
311 self.machine
312 .truncate_stack(self.machine.round_stack_adjust(8) as u32)?;
313 }
314 }
315
316 Ok(())
317 }
318
319 fn release_stack_locations_keep_stack_offset(
320 &mut self,
321 stack_depth: usize,
322 ) -> Result<(), CompileError> {
323 let mut stack_offset = self.stack_offset;
324 let locs = &self.value_stack[stack_depth..];
325
326 for (loc, _) in locs.iter().rev() {
327 if let Location::Memory(..) = *loc {
328 self.check_location_on_stack(loc, stack_offset)?;
329 stack_offset -= 8;
330 self.machine
331 .truncate_stack(self.machine.round_stack_adjust(8) as u32)?;
332 }
333 }
334
335 Ok(())
336 }
337
338 fn check_location_on_stack(
339 &self,
340 loc: &Location<M::GPR, M::SIMD>,
341 expected_stack_offset: usize,
342 ) -> Result<(), CompileError> {
343 let Location::Memory(reg, offset) = loc else {
344 codegen_error!("Expected stack memory location");
345 };
346 if reg != &self.machine.local_pointer() {
347 codegen_error!("Expected location pointer for value on stack");
348 }
349 if *offset >= 0 {
350 codegen_error!("Invalid memory offset {offset}");
351 }
352 let offset = offset.neg() as usize;
353 if offset != expected_stack_offset {
354 codegen_error!("Invalid memory offset {offset}!={}", self.stack_offset);
355 }
356 Ok(())
357 }
358
359 fn allocate_return_slots_and_swap(
367 &mut self,
368 stack_slots: usize,
369 return_slots: usize,
370 ) -> Result<(), CompileError> {
371 if return_slots == 0 {
373 return Ok(());
374 }
375
376 let latest_slots = self
380 .value_stack
381 .drain(self.value_stack.len() - stack_slots..)
382 .collect_vec();
383 let extra_slots = (0..return_slots)
384 .map(|_| self.acquire_location_on_stack())
385 .collect::<Result<Vec<_>, _>>()?;
386
387 let mut all_memory_slots = latest_slots
388 .iter()
389 .filter_map(|(loc, _)| {
390 if let Location::Memory(..) = loc {
391 Some(loc)
392 } else {
393 None
394 }
395 })
396 .chain(extra_slots.iter())
397 .collect_vec();
398
399 self.value_stack.extend(
401 all_memory_slots
402 .iter()
403 .take(return_slots)
404 .map(|loc| (**loc, CanonicalizeType::None)),
405 );
406
407 let mut new_params_reversed = Vec::new();
409 for (loc, canonicalize) in latest_slots.iter().rev() {
410 let mapped_loc = if matches!(loc, Location::Memory(..)) {
411 let dest = all_memory_slots.pop().unwrap();
412 self.machine.emit_relaxed_mov(Size::S64, *loc, *dest)?;
413 *dest
414 } else {
415 *loc
416 };
417 new_params_reversed.push((mapped_loc, *canonicalize));
418 }
419 self.value_stack
420 .extend(new_params_reversed.into_iter().rev());
421
422 Ok(())
423 }
424
425 #[allow(clippy::type_complexity)]
426 fn init_locals(
427 &mut self,
428 n: usize,
429 sig: FunctionType,
430 calling_convention: CallingConvention,
431 ) -> Result<Vec<Location<M::GPR, M::SIMD>>, CompileError> {
432 self.add_assembly_comment(AssemblyComment::InitializeLocals);
433
434 let num_mem_slots = (0..n)
436 .filter(|&x| self.machine.is_local_on_stack(x))
437 .count();
438
439 let mut static_area_size: usize = 0;
442
443 for i in 0..n {
446 if !self.machine.is_local_on_stack(i) {
448 static_area_size += 8;
449 }
450 }
451
452 static_area_size += 8;
454
455 static_area_size += 8 * self.machine.list_to_save(calling_convention).len();
457
458 let callee_saved_regs_size = static_area_size;
460
461 let locations: Vec<Location<M::GPR, M::SIMD>> = (0..n)
463 .map(|i| self.machine.get_local_location(i, callee_saved_regs_size))
464 .collect();
465
466 static_area_size += num_mem_slots * 8;
468
469 static_area_size = self.machine.round_stack_adjust(static_area_size);
471
472 for i in (sig.params().len()..n)
477 .step_by(NATIVE_PAGE_SIZE / 8)
478 .skip(1)
479 {
480 self.machine.zero_location(Size::S64, locations[i])?;
481 }
482
483 self.machine.extend_stack(static_area_size as _)?;
484
485 for loc in locations.iter() {
487 if let Location::GPR(_) = *loc {
488 self.stack_offset += 8;
489 self.machine.move_local(self.stack_offset as i32, *loc)?;
490 }
491 }
492
493 self.stack_offset += 8;
495 self.machine.move_local(
496 self.stack_offset as i32,
497 Location::GPR(self.machine.get_vmctx_reg()),
498 )?;
499
500 let regs_to_save = self.machine.list_to_save(calling_convention);
502 for loc in regs_to_save.iter() {
503 self.stack_offset += 8;
504 self.machine.move_local(self.stack_offset as i32, *loc)?;
505 }
506
507 self.save_area_offset = Some(self.stack_offset);
509
510 let mut stack_offset: usize = 0;
514 for (i, param) in sig.params().iter().enumerate() {
515 let sz = match *param {
516 Type::I32 | Type::F32 => Size::S32,
517 Type::I64 | Type::F64 => Size::S64,
518 Type::ExternRef | Type::FuncRef => Size::S64,
519 _ => {
520 codegen_error!("singlepass init_local unimplemented type: {param}")
521 }
522 };
523 let loc = self.machine.get_call_param_location(
524 sig.results().len(),
525 i + 1,
526 sz,
527 &mut stack_offset,
528 calling_convention,
529 );
530 self.machine
531 .move_location_extend(sz, false, loc, Size::S64, locations[i])?;
532 }
533
534 self.machine.move_location(
536 Size::S64,
537 Location::GPR(
538 self.machine
539 .get_simple_param_location(0, calling_convention),
540 ),
541 Location::GPR(self.machine.get_vmctx_reg()),
542 )?;
543
544 let mut init_stack_loc_cnt = 0;
546 let mut last_stack_loc = Location::Memory(self.machine.local_pointer(), i32::MAX);
547 for location in locations.iter().take(n).skip(sig.params().len()) {
548 match location {
549 Location::Memory(_, _) => {
550 init_stack_loc_cnt += 1;
551 last_stack_loc = cmp::min(last_stack_loc, *location);
552 }
553 Location::GPR(_) => {
554 self.machine.zero_location(Size::S64, *location)?;
555 }
556 _ => codegen_error!("singlepass init_local unreachable"),
557 }
558 }
559 if init_stack_loc_cnt > 0 {
560 self.machine
561 .init_stack_loc(init_stack_loc_cnt, last_stack_loc)?;
562 }
563
564 self.stack_offset += static_area_size - callee_saved_regs_size;
566
567 Ok(locations)
568 }
569
570 fn finalize_locals(
571 &mut self,
572 calling_convention: CallingConvention,
573 ) -> Result<(), CompileError> {
574 self.machine
576 .restore_saved_area(self.save_area_offset.unwrap() as i32)?;
577
578 let regs_to_save = self.machine.list_to_save(calling_convention);
579 for loc in regs_to_save.iter().rev() {
580 self.machine.pop_location(*loc)?;
581 }
582
583 self.machine
585 .pop_location(Location::GPR(self.machine.get_vmctx_reg()))?;
586
587 for loc in self.locals.iter().rev() {
589 if let Location::GPR(_) = *loc {
590 self.machine.pop_location(*loc)?;
591 }
592 }
593 Ok(())
594 }
595
596 pub fn set_srcloc(&mut self, offset: u32) {
598 self.machine.set_srcloc(offset);
599 }
600
601 fn get_location_released(
602 &mut self,
603 loc: (Location<M::GPR, M::SIMD>, CanonicalizeType),
604 ) -> Result<LocationWithCanonicalization<M>, CompileError> {
605 self.release_locations(&[loc])?;
606 Ok(loc)
607 }
608
609 fn pop_value_released(&mut self) -> Result<LocationWithCanonicalization<M>, CompileError> {
610 let loc = self.value_stack.pop().ok_or_else(|| {
611 CompileError::Codegen("pop_value_released: value stack is empty".to_owned())
612 })?;
613 self.get_location_released(loc)?;
614 Ok(loc)
615 }
616
617 fn i2o1_prepare(
619 &mut self,
620 ty: WpType,
621 canonicalize: CanonicalizeType,
622 ) -> Result<I2O1<M::GPR, M::SIMD>, CompileError> {
623 let loc_b = self.pop_value_released()?.0;
624 let loc_a = self.pop_value_released()?.0;
625 let ret = self.acquire_location(&ty)?;
626 self.value_stack.push((ret, canonicalize));
627 Ok(I2O1 { loc_a, loc_b, ret })
628 }
629
630 fn emit_call_native<
635 I: Iterator<Item = (Location<M::GPR, M::SIMD>, CanonicalizeType)>,
636 J: Iterator<Item = WpType>,
637 K: Iterator<Item = WpType>,
638 F: FnOnce(&mut Self) -> Result<(), CompileError>,
639 >(
640 &mut self,
641 cb: F,
642 params: I,
643 params_type: J,
644 return_types: K,
645 call_type: NativeCallType,
646 ) -> Result<(), CompileError> {
647 let params = params.collect_vec();
648 let stack_params = params
649 .iter()
650 .copied()
651 .filter(|(param, _)| {
652 if let Location::Memory(reg, _) = param {
653 debug_assert_eq!(reg, &self.machine.local_pointer());
654 true
655 } else {
656 false
657 }
658 })
659 .collect_vec();
660 let get_size = |param_type: WpType| match param_type {
661 WpType::F32 | WpType::I32 => Size::S32,
662 WpType::V128 => unimplemented!(),
663 _ => Size::S64,
664 };
665 let param_sizes = params_type.map(get_size).collect_vec();
666 let return_value_sizes = return_types.map(get_size).collect_vec();
667
668 let used_stack_params = stack_params
670 .iter()
671 .take(return_value_sizes.len())
672 .copied()
673 .collect_vec();
674 let mut return_values = used_stack_params.clone();
675 let extra_return_values = (0..return_value_sizes.len().saturating_sub(stack_params.len()))
676 .map(|_| -> Result<_, CompileError> {
677 Ok((self.acquire_location_on_stack()?, CanonicalizeType::None))
678 })
679 .collect::<Result<Vec<_>, _>>()?;
680 return_values.extend(extra_return_values);
681
682 self.release_reg_locations(¶ms)?;
684
685 let used_gprs = self.machine.get_used_gprs();
687 let mut used_stack = self.machine.push_used_gpr(&used_gprs)?;
688
689 let used_simds = self.machine.get_used_simd();
691 if !used_simds.is_empty() {
692 used_stack += self.machine.push_used_simd(&used_simds)?;
693 }
694 self.machine
696 .reserve_unused_temp_gpr(self.machine.get_gpr_for_call());
697
698 let calling_convention = self.calling_convention;
699
700 let stack_padding: usize = match calling_convention {
701 CallingConvention::WindowsFastcall => 32,
702 _ => 0,
703 };
704
705 let mut stack_offset: usize = 0;
706 let mut return_args = Vec::with_capacity(return_value_sizes.len());
708 for i in 0..return_value_sizes.len() {
709 return_args.push(self.machine.get_return_value_location(
710 i,
711 &mut stack_offset,
712 self.calling_convention,
713 ));
714 }
715
716 let mut args = Vec::with_capacity(params.len());
718 for (i, param_size) in param_sizes.iter().enumerate() {
719 args.push(self.machine.get_param_location(
720 match call_type {
721 NativeCallType::IncludeVMCtxArgument => 1,
722 NativeCallType::Unreachable => 0,
723 } + i,
724 *param_size,
725 &mut stack_offset,
726 calling_convention,
727 ));
728 }
729
730 let stack_unaligned =
732 (self.machine.round_stack_adjust(self.stack_offset) + used_stack + stack_offset) % 16;
733 if stack_unaligned != 0 {
734 stack_offset += 16 - stack_unaligned;
735 }
736 self.machine.extend_stack(stack_offset as u32)?;
737
738 #[allow(clippy::type_complexity)]
739 let mut call_movs: Vec<(Location<M::GPR, M::SIMD>, M::GPR)> = vec![];
740 for (i, (param, _)) in params.iter().enumerate().rev() {
742 let loc = args[i];
743 match loc {
744 Location::GPR(x) => {
745 call_movs.push((*param, x));
746 }
747 Location::Memory(_, _) => {
748 self.machine
749 .move_location_for_native(param_sizes[i], *param, loc)?;
750 }
751 _ => {
752 return Err(CompileError::Codegen(
753 "emit_call_native loc: unreachable code".to_owned(),
754 ));
755 }
756 }
757 }
758
759 Self::sort_call_movs(&mut call_movs);
761
762 for (loc, gpr) in call_movs {
764 if loc != Location::GPR(gpr) {
765 self.machine
766 .move_location(Size::S64, loc, Location::GPR(gpr))?;
767 }
768 }
769
770 if matches!(call_type, NativeCallType::IncludeVMCtxArgument) {
771 self.machine.move_location(
773 Size::S64,
774 Location::GPR(self.machine.get_vmctx_reg()),
775 Location::GPR(
776 self.machine
777 .get_simple_param_location(0, calling_convention),
778 ),
779 )?; }
781
782 if stack_padding > 0 {
783 self.machine.extend_stack(stack_padding as u32)?;
784 }
785 self.machine.release_gpr(self.machine.get_gpr_for_call());
787
788 let begin = self.machine.assembler_get_offset().0;
789 cb(self)?;
790 if matches!(call_type, NativeCallType::Unreachable) {
791 let end = self.machine.assembler_get_offset().0;
792 self.machine.mark_address_range_with_trap_code(
793 TrapCode::UnreachableCodeReached,
794 begin,
795 end,
796 );
797 }
798
799 for (i, &return_type) in return_value_sizes.iter().enumerate() {
801 self.machine.move_location_for_native(
802 return_type,
803 return_args[i],
804 return_values[i].0,
805 )?;
806 }
807
808 if stack_offset + stack_padding > 0 {
810 self.machine
811 .truncate_stack((stack_offset + stack_padding) as u32)?;
812 }
813
814 if !used_simds.is_empty() {
816 self.machine.pop_used_simd(&used_simds)?;
817 }
818
819 self.machine.pop_used_gpr(&used_gprs)?;
821
822 let params_to_release =
825 &stack_params[cmp::min(stack_params.len(), return_value_sizes.len())..];
826 self.release_stack_locations(params_to_release)?;
827
828 self.value_stack.extend(return_values);
829
830 Ok(())
831 }
832
833 fn op_memory<
835 F: FnOnce(&mut Self, bool, bool, i32, Label, Label) -> Result<(), CompileError>,
836 >(
837 &mut self,
838 cb: F,
839 ) -> Result<(), CompileError> {
840 let need_check = match self.memory_styles[MemoryIndex::new(0)] {
841 MemoryStyle::Static { .. } => false,
842 MemoryStyle::Dynamic { .. } => true,
843 };
844
845 let offset = if self.module.num_imported_memories != 0 {
846 self.vmoffsets
847 .vmctx_vmmemory_import_definition(MemoryIndex::new(0))
848 } else {
849 self.vmoffsets
850 .vmctx_vmmemory_definition(LocalMemoryIndex::new(0))
851 };
852 cb(
853 self,
854 need_check,
855 self.module.num_imported_memories != 0,
856 offset as i32,
857 self.special_labels.heap_access_oob,
858 self.special_labels.unaligned_atomic,
859 )
860 }
861
862 fn emit_head(&mut self) -> Result<(), CompileError> {
863 self.add_assembly_comment(AssemblyComment::FunctionPrologue);
864 self.machine.emit_function_prolog()?;
865
866 self.locals = self.init_locals(
868 self.local_types.len(),
869 self.signature.clone(),
870 self.calling_convention,
871 )?;
872
873 self.add_assembly_comment(AssemblyComment::RedZone);
875 self.machine.extend_stack(32)?;
876
877 let return_types: SmallVec<_> = self
878 .signature
879 .results()
880 .iter()
881 .map(type_to_wp_type)
882 .collect();
883
884 self.value_stack.extend((0..return_types.len()).map(|i| {
886 (
887 self.machine
888 .get_call_return_value_location(i, self.calling_convention),
889 CanonicalizeType::None,
890 )
891 }));
892
893 self.control_stack.push(ControlFrame {
894 state: ControlState::Function,
895 label: self.machine.get_label(),
896 value_stack_depth: return_types.len(),
897 param_types: smallvec![],
898 return_types,
899 });
900
901 self.machine.insert_stackoverflow();
906 self.add_assembly_comment(AssemblyComment::FunctionBody);
907
908 Ok(())
909 }
910
911 #[allow(clippy::too_many_arguments)]
912 pub fn new(
913 module: &'a ModuleInfo,
914 config: &'a Singlepass,
915 vmoffsets: &'a VMOffsets,
916 memory_styles: &'a PrimaryMap<MemoryIndex, MemoryStyle>,
917 _table_styles: &'a PrimaryMap<TableIndex, TableStyle>,
918 local_func_index: LocalFunctionIndex,
919 local_types_excluding_arguments: &[WpType],
920 machine: M,
921 calling_convention: CallingConvention,
922 ) -> Result<FuncGen<'a, M>, CompileError> {
923 let func_index = module.func_index(local_func_index);
924 let sig_index = module.functions[func_index];
925 let signature = module.signatures[sig_index].clone();
926
927 let mut local_types: Vec<_> = signature.params().iter().map(type_to_wp_type).collect();
928 local_types.extend_from_slice(local_types_excluding_arguments);
929
930 let mut machine = machine;
931 let special_labels = SpecialLabelSet {
932 integer_division_by_zero: machine.get_label(),
933 integer_overflow: machine.get_label(),
934 heap_access_oob: machine.get_label(),
935 table_access_oob: machine.get_label(),
936 indirect_call_null: machine.get_label(),
937 bad_signature: machine.get_label(),
938 unaligned_atomic: machine.get_label(),
939 };
940 let function_name = module
941 .function_names
942 .get(&func_index)
943 .map(|fname| fname.to_string())
944 .unwrap_or_else(|| format!("function_{}", func_index.as_u32()));
945
946 let mut fg = FuncGen {
947 module,
948 config,
949 vmoffsets,
950 memory_styles,
951 signature,
953 locals: vec![], local_types,
955 value_stack: vec![],
956 control_stack: vec![],
957 stack_offset: 0,
958 save_area_offset: None,
959 machine,
960 unreachable_depth: 0,
961 local_func_index,
962 relocations: vec![],
963 special_labels,
964 calling_convention,
965 function_name,
966 assembly_comments: HashMap::new(),
967 };
968 fg.emit_head()?;
969 Ok(fg)
970 }
971
972 pub fn has_control_frames(&self) -> bool {
973 !self.control_stack.is_empty()
974 }
975
976 fn emit_return_values(
982 &mut self,
983 value_stack_depth_after: usize,
984 return_values: usize,
985 ) -> Result<(), CompileError> {
986 for (i, (stack_value, canonicalize)) in self
987 .value_stack
988 .iter()
989 .rev()
990 .take(return_values)
991 .enumerate()
992 {
993 let dst = self.value_stack[value_stack_depth_after - i - 1].0;
994 if let Some(canonicalize_size) = canonicalize.to_size()
995 && self.config.enable_nan_canonicalization
996 {
997 self.machine
998 .canonicalize_nan(canonicalize_size, *stack_value, dst)?;
999 } else {
1000 self.machine
1001 .emit_relaxed_mov(Size::S64, *stack_value, dst)?;
1002 }
1003 }
1004
1005 Ok(())
1006 }
1007
1008 fn emit_loop_params_store(
1011 &mut self,
1012 value_stack_depth_after: usize,
1013 param_count: usize,
1014 ) -> Result<(), CompileError> {
1015 for (i, (stack_value, _)) in self
1016 .value_stack
1017 .iter()
1018 .rev()
1019 .take(param_count)
1020 .rev()
1021 .enumerate()
1022 {
1023 let dst = self.value_stack[value_stack_depth_after + i].0;
1024 self.machine
1025 .emit_relaxed_mov(Size::S64, *stack_value, dst)?;
1026 }
1027
1028 Ok(())
1029 }
1030
1031 fn return_types_for_block(&self, block_type: WpTypeOrFuncType) -> SmallVec<[WpType; 1]> {
1032 match block_type {
1033 WpTypeOrFuncType::Empty => smallvec![],
1034 WpTypeOrFuncType::Type(inner_ty) => smallvec![inner_ty],
1035 WpTypeOrFuncType::FuncType(sig_index) => SmallVec::from_iter(
1036 self.module.signatures[SignatureIndex::from_u32(sig_index)]
1037 .results()
1038 .iter()
1039 .map(type_to_wp_type),
1040 ),
1041 }
1042 }
1043
1044 fn param_types_for_block(&self, block_type: WpTypeOrFuncType) -> SmallVec<[WpType; 8]> {
1045 match block_type {
1046 WpTypeOrFuncType::Empty | WpTypeOrFuncType::Type(_) => smallvec![],
1047 WpTypeOrFuncType::FuncType(sig_index) => SmallVec::from_iter(
1048 self.module.signatures[SignatureIndex::from_u32(sig_index)]
1049 .params()
1050 .iter()
1051 .map(type_to_wp_type),
1052 ),
1053 }
1054 }
1055
1056 pub fn feed_operator(&mut self, op: Operator) -> Result<(), CompileError> {
1057 let was_unreachable;
1058
1059 if self.unreachable_depth > 0 {
1060 was_unreachable = true;
1061
1062 match op {
1063 Operator::Block { .. } | Operator::Loop { .. } | Operator::If { .. } => {
1064 self.unreachable_depth += 1;
1065 }
1066 Operator::End => {
1067 self.unreachable_depth -= 1;
1068 }
1069 Operator::Else
1070 if self.unreachable_depth == 1
1071 && self.control_stack.last().is_some_and(|frame| {
1072 matches!(frame.state, ControlState::If { .. })
1073 }) =>
1074 {
1075 self.unreachable_depth -= 1;
1077 }
1078
1079 _ => {}
1080 }
1081 if self.unreachable_depth > 0 {
1082 return Ok(());
1083 }
1084 } else {
1085 was_unreachable = false;
1086 }
1087
1088 match op {
1089 Operator::GlobalGet { global_index } => {
1090 let global_index = GlobalIndex::from_u32(global_index);
1091
1092 let ty = type_to_wp_type(&self.module.globals[global_index].ty);
1093 let loc = self.acquire_location(&ty)?;
1094 self.value_stack.push((loc, CanonicalizeType::None));
1095
1096 let tmp = self.machine.acquire_temp_gpr().unwrap();
1097
1098 let src = if let Some(local_global_index) =
1099 self.module.local_global_index(global_index)
1100 {
1101 let offset = self.vmoffsets.vmctx_vmglobal_definition(local_global_index);
1102 self.machine.emit_relaxed_mov(
1103 Size::S64,
1104 Location::Memory(self.machine.get_vmctx_reg(), offset as i32),
1105 Location::GPR(tmp),
1106 )?;
1107 Location::Memory(tmp, 0)
1108 } else {
1109 let offset = self
1111 .vmoffsets
1112 .vmctx_vmglobal_import_definition(global_index);
1113 self.machine.emit_relaxed_mov(
1114 Size::S64,
1115 Location::Memory(self.machine.get_vmctx_reg(), offset as i32),
1116 Location::GPR(tmp),
1117 )?;
1118 Location::Memory(tmp, 0)
1119 };
1120
1121 self.machine.emit_relaxed_mov(Size::S64, src, loc)?;
1122
1123 self.machine.release_gpr(tmp);
1124 }
1125 Operator::GlobalSet { global_index } => {
1126 let global_index = GlobalIndex::from_u32(global_index);
1127 let tmp = self.machine.acquire_temp_gpr().unwrap();
1128 let dst = if let Some(local_global_index) =
1129 self.module.local_global_index(global_index)
1130 {
1131 let offset = self.vmoffsets.vmctx_vmglobal_definition(local_global_index);
1132 self.machine.emit_relaxed_mov(
1133 Size::S64,
1134 Location::Memory(self.machine.get_vmctx_reg(), offset as i32),
1135 Location::GPR(tmp),
1136 )?;
1137 Location::Memory(tmp, 0)
1138 } else {
1139 let offset = self
1141 .vmoffsets
1142 .vmctx_vmglobal_import_definition(global_index);
1143 self.machine.emit_relaxed_mov(
1144 Size::S64,
1145 Location::Memory(self.machine.get_vmctx_reg(), offset as i32),
1146 Location::GPR(tmp),
1147 )?;
1148 Location::Memory(tmp, 0)
1149 };
1150 let (loc, canonicalize) = self.pop_value_released()?;
1151 if let Some(canonicalize_size) = canonicalize.to_size() {
1152 if self.config.enable_nan_canonicalization {
1153 self.machine.canonicalize_nan(canonicalize_size, loc, dst)?;
1154 } else {
1155 self.machine.emit_relaxed_mov(Size::S64, loc, dst)?;
1156 }
1157 } else {
1158 self.machine.emit_relaxed_mov(Size::S64, loc, dst)?;
1159 }
1160 self.machine.release_gpr(tmp);
1161 }
1162 Operator::LocalGet { local_index } => {
1163 let local_index = local_index as usize;
1164 let ret = self.acquire_location(&WpType::I64)?;
1165 self.machine
1166 .emit_relaxed_mov(Size::S64, self.locals[local_index], ret)?;
1167 self.value_stack.push((ret, CanonicalizeType::None));
1168 }
1169 Operator::LocalSet { local_index } => {
1170 let local_index = local_index as usize;
1171 let (loc, canonicalize) = self.pop_value_released()?;
1172
1173 if self.local_types[local_index].is_float()
1174 && let Some(canonicalize_size) = canonicalize.to_size()
1175 {
1176 if self.config.enable_nan_canonicalization {
1177 self.machine.canonicalize_nan(
1178 canonicalize_size,
1179 loc,
1180 self.locals[local_index],
1181 )
1182 } else {
1183 self.machine
1184 .emit_relaxed_mov(Size::S64, loc, self.locals[local_index])
1185 }
1186 } else {
1187 self.machine
1188 .emit_relaxed_mov(Size::S64, loc, self.locals[local_index])
1189 }?;
1190 }
1191 Operator::LocalTee { local_index } => {
1192 let local_index = local_index as usize;
1193 let (loc, canonicalize) = *self.value_stack.last().unwrap();
1194
1195 if self.local_types[local_index].is_float()
1196 && let Some(canonicalize_size) = canonicalize.to_size()
1197 {
1198 if self.config.enable_nan_canonicalization {
1199 self.machine.canonicalize_nan(
1200 canonicalize_size,
1201 loc,
1202 self.locals[local_index],
1203 )
1204 } else {
1205 self.machine
1206 .emit_relaxed_mov(Size::S64, loc, self.locals[local_index])
1207 }
1208 } else {
1209 self.machine
1210 .emit_relaxed_mov(Size::S64, loc, self.locals[local_index])
1211 }?;
1212 }
1213 Operator::I32Const { value } => {
1214 self.value_stack
1215 .push((Location::Imm32(value as u32), CanonicalizeType::None));
1216 }
1217 Operator::I32Add => {
1218 let I2O1 { loc_a, loc_b, ret } =
1219 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1220 self.machine.emit_binop_add32(loc_a, loc_b, ret)?;
1221 }
1222 Operator::I32Sub => {
1223 let I2O1 { loc_a, loc_b, ret } =
1224 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1225 self.machine.emit_binop_sub32(loc_a, loc_b, ret)?;
1226 }
1227 Operator::I32Mul => {
1228 let I2O1 { loc_a, loc_b, ret } =
1229 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1230 self.machine.emit_binop_mul32(loc_a, loc_b, ret)?;
1231 }
1232 Operator::I32DivU => {
1233 let I2O1 { loc_a, loc_b, ret } =
1234 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1235 self.machine.emit_binop_udiv32(
1236 loc_a,
1237 loc_b,
1238 ret,
1239 self.special_labels.integer_division_by_zero,
1240 )?;
1241 }
1242 Operator::I32DivS => {
1243 let I2O1 { loc_a, loc_b, ret } =
1244 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1245 self.machine.emit_binop_sdiv32(
1246 loc_a,
1247 loc_b,
1248 ret,
1249 self.special_labels.integer_division_by_zero,
1250 self.special_labels.integer_overflow,
1251 )?;
1252 }
1253 Operator::I32RemU => {
1254 let I2O1 { loc_a, loc_b, ret } =
1255 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1256 self.machine.emit_binop_urem32(
1257 loc_a,
1258 loc_b,
1259 ret,
1260 self.special_labels.integer_division_by_zero,
1261 )?;
1262 }
1263 Operator::I32RemS => {
1264 let I2O1 { loc_a, loc_b, ret } =
1265 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1266 self.machine.emit_binop_srem32(
1267 loc_a,
1268 loc_b,
1269 ret,
1270 self.special_labels.integer_division_by_zero,
1271 )?;
1272 }
1273 Operator::I32And => {
1274 let I2O1 { loc_a, loc_b, ret } =
1275 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1276 self.machine.emit_binop_and32(loc_a, loc_b, ret)?;
1277 }
1278 Operator::I32Or => {
1279 let I2O1 { loc_a, loc_b, ret } =
1280 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1281 self.machine.emit_binop_or32(loc_a, loc_b, ret)?;
1282 }
1283 Operator::I32Xor => {
1284 let I2O1 { loc_a, loc_b, ret } =
1285 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1286 self.machine.emit_binop_xor32(loc_a, loc_b, ret)?;
1287 }
1288 Operator::I32Eq => {
1289 let I2O1 { loc_a, loc_b, ret } =
1290 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1291 self.machine.i32_cmp_eq(loc_a, loc_b, ret)?;
1292 }
1293 Operator::I32Ne => {
1294 let I2O1 { loc_a, loc_b, ret } =
1295 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1296 self.machine.i32_cmp_ne(loc_a, loc_b, ret)?;
1297 }
1298 Operator::I32Eqz => {
1299 let loc_a = self.pop_value_released()?.0;
1300 let ret = self.acquire_location(&WpType::I32)?;
1301 self.machine.i32_cmp_eq(loc_a, Location::Imm32(0), ret)?;
1302 self.value_stack.push((ret, CanonicalizeType::None));
1303 }
1304 Operator::I32Clz => {
1305 let loc = self.pop_value_released()?.0;
1306 let ret = self.acquire_location(&WpType::I32)?;
1307 self.value_stack.push((ret, CanonicalizeType::None));
1308 self.machine.i32_clz(loc, ret)?;
1309 }
1310 Operator::I32Ctz => {
1311 let loc = self.pop_value_released()?.0;
1312 let ret = self.acquire_location(&WpType::I32)?;
1313 self.value_stack.push((ret, CanonicalizeType::None));
1314 self.machine.i32_ctz(loc, ret)?;
1315 }
1316 Operator::I32Popcnt => {
1317 let loc = self.pop_value_released()?.0;
1318 let ret = self.acquire_location(&WpType::I32)?;
1319 self.value_stack.push((ret, CanonicalizeType::None));
1320 self.machine.i32_popcnt(loc, ret)?;
1321 }
1322 Operator::I32Shl => {
1323 let I2O1 { loc_a, loc_b, ret } =
1324 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1325 self.machine.i32_shl(loc_a, loc_b, ret)?;
1326 }
1327 Operator::I32ShrU => {
1328 let I2O1 { loc_a, loc_b, ret } =
1329 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1330 self.machine.i32_shr(loc_a, loc_b, ret)?;
1331 }
1332 Operator::I32ShrS => {
1333 let I2O1 { loc_a, loc_b, ret } =
1334 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1335 self.machine.i32_sar(loc_a, loc_b, ret)?;
1336 }
1337 Operator::I32Rotl => {
1338 let I2O1 { loc_a, loc_b, ret } =
1339 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1340 self.machine.i32_rol(loc_a, loc_b, ret)?;
1341 }
1342 Operator::I32Rotr => {
1343 let I2O1 { loc_a, loc_b, ret } =
1344 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1345 self.machine.i32_ror(loc_a, loc_b, ret)?;
1346 }
1347 Operator::I32LtU => {
1348 let I2O1 { loc_a, loc_b, ret } =
1349 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1350 self.machine.i32_cmp_lt_u(loc_a, loc_b, ret)?;
1351 }
1352 Operator::I32LeU => {
1353 let I2O1 { loc_a, loc_b, ret } =
1354 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1355 self.machine.i32_cmp_le_u(loc_a, loc_b, ret)?;
1356 }
1357 Operator::I32GtU => {
1358 let I2O1 { loc_a, loc_b, ret } =
1359 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1360 self.machine.i32_cmp_gt_u(loc_a, loc_b, ret)?;
1361 }
1362 Operator::I32GeU => {
1363 let I2O1 { loc_a, loc_b, ret } =
1364 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1365 self.machine.i32_cmp_ge_u(loc_a, loc_b, ret)?;
1366 }
1367 Operator::I32LtS => {
1368 let I2O1 { loc_a, loc_b, ret } =
1369 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1370 self.machine.i32_cmp_lt_s(loc_a, loc_b, ret)?;
1371 }
1372 Operator::I32LeS => {
1373 let I2O1 { loc_a, loc_b, ret } =
1374 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1375 self.machine.i32_cmp_le_s(loc_a, loc_b, ret)?;
1376 }
1377 Operator::I32GtS => {
1378 let I2O1 { loc_a, loc_b, ret } =
1379 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1380 self.machine.i32_cmp_gt_s(loc_a, loc_b, ret)?;
1381 }
1382 Operator::I32GeS => {
1383 let I2O1 { loc_a, loc_b, ret } =
1384 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1385 self.machine.i32_cmp_ge_s(loc_a, loc_b, ret)?;
1386 }
1387 Operator::I64Const { value } => {
1388 let value = value as u64;
1389 self.value_stack
1390 .push((Location::Imm64(value), CanonicalizeType::None));
1391 }
1392 Operator::I64Add => {
1393 let I2O1 { loc_a, loc_b, ret } =
1394 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1395 self.machine.emit_binop_add64(loc_a, loc_b, ret)?;
1396 }
1397 Operator::I64Sub => {
1398 let I2O1 { loc_a, loc_b, ret } =
1399 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1400 self.machine.emit_binop_sub64(loc_a, loc_b, ret)?;
1401 }
1402 Operator::I64Mul => {
1403 let I2O1 { loc_a, loc_b, ret } =
1404 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1405 self.machine.emit_binop_mul64(loc_a, loc_b, ret)?;
1406 }
1407 Operator::I64DivU => {
1408 let I2O1 { loc_a, loc_b, ret } =
1409 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1410 self.machine.emit_binop_udiv64(
1411 loc_a,
1412 loc_b,
1413 ret,
1414 self.special_labels.integer_division_by_zero,
1415 )?;
1416 }
1417 Operator::I64DivS => {
1418 let I2O1 { loc_a, loc_b, ret } =
1419 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1420 self.machine.emit_binop_sdiv64(
1421 loc_a,
1422 loc_b,
1423 ret,
1424 self.special_labels.integer_division_by_zero,
1425 self.special_labels.integer_overflow,
1426 )?;
1427 }
1428 Operator::I64RemU => {
1429 let I2O1 { loc_a, loc_b, ret } =
1430 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1431 self.machine.emit_binop_urem64(
1432 loc_a,
1433 loc_b,
1434 ret,
1435 self.special_labels.integer_division_by_zero,
1436 )?;
1437 }
1438 Operator::I64RemS => {
1439 let I2O1 { loc_a, loc_b, ret } =
1440 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1441 self.machine.emit_binop_srem64(
1442 loc_a,
1443 loc_b,
1444 ret,
1445 self.special_labels.integer_division_by_zero,
1446 )?;
1447 }
1448 Operator::I64And => {
1449 let I2O1 { loc_a, loc_b, ret } =
1450 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1451 self.machine.emit_binop_and64(loc_a, loc_b, ret)?;
1452 }
1453 Operator::I64Or => {
1454 let I2O1 { loc_a, loc_b, ret } =
1455 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1456 self.machine.emit_binop_or64(loc_a, loc_b, ret)?;
1457 }
1458 Operator::I64Xor => {
1459 let I2O1 { loc_a, loc_b, ret } =
1460 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1461 self.machine.emit_binop_xor64(loc_a, loc_b, ret)?;
1462 }
1463 Operator::I64Eq => {
1464 let I2O1 { loc_a, loc_b, ret } =
1465 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1466 self.machine.i64_cmp_eq(loc_a, loc_b, ret)?;
1467 }
1468 Operator::I64Ne => {
1469 let I2O1 { loc_a, loc_b, ret } =
1470 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1471 self.machine.i64_cmp_ne(loc_a, loc_b, ret)?;
1472 }
1473 Operator::I64Eqz => {
1474 let loc_a = self.pop_value_released()?.0;
1475 let ret = self.acquire_location(&WpType::I64)?;
1476 self.machine.i64_cmp_eq(loc_a, Location::Imm64(0), ret)?;
1477 self.value_stack.push((ret, CanonicalizeType::None));
1478 }
1479 Operator::I64Clz => {
1480 let loc = self.pop_value_released()?.0;
1481 let ret = self.acquire_location(&WpType::I64)?;
1482 self.value_stack.push((ret, CanonicalizeType::None));
1483 self.machine.i64_clz(loc, ret)?;
1484 }
1485 Operator::I64Ctz => {
1486 let loc = self.pop_value_released()?.0;
1487 let ret = self.acquire_location(&WpType::I64)?;
1488 self.value_stack.push((ret, CanonicalizeType::None));
1489 self.machine.i64_ctz(loc, ret)?;
1490 }
1491 Operator::I64Popcnt => {
1492 let loc = self.pop_value_released()?.0;
1493 let ret = self.acquire_location(&WpType::I64)?;
1494 self.value_stack.push((ret, CanonicalizeType::None));
1495 self.machine.i64_popcnt(loc, ret)?;
1496 }
1497 Operator::I64Shl => {
1498 let I2O1 { loc_a, loc_b, ret } =
1499 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1500 self.machine.i64_shl(loc_a, loc_b, ret)?;
1501 }
1502 Operator::I64ShrU => {
1503 let I2O1 { loc_a, loc_b, ret } =
1504 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1505 self.machine.i64_shr(loc_a, loc_b, ret)?;
1506 }
1507 Operator::I64ShrS => {
1508 let I2O1 { loc_a, loc_b, ret } =
1509 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1510 self.machine.i64_sar(loc_a, loc_b, ret)?;
1511 }
1512 Operator::I64Rotl => {
1513 let I2O1 { loc_a, loc_b, ret } =
1514 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1515 self.machine.i64_rol(loc_a, loc_b, ret)?;
1516 }
1517 Operator::I64Rotr => {
1518 let I2O1 { loc_a, loc_b, ret } =
1519 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1520 self.machine.i64_ror(loc_a, loc_b, ret)?;
1521 }
1522 Operator::I64LtU => {
1523 let I2O1 { loc_a, loc_b, ret } =
1524 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1525 self.machine.i64_cmp_lt_u(loc_a, loc_b, ret)?;
1526 }
1527 Operator::I64LeU => {
1528 let I2O1 { loc_a, loc_b, ret } =
1529 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1530 self.machine.i64_cmp_le_u(loc_a, loc_b, ret)?;
1531 }
1532 Operator::I64GtU => {
1533 let I2O1 { loc_a, loc_b, ret } =
1534 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1535 self.machine.i64_cmp_gt_u(loc_a, loc_b, ret)?;
1536 }
1537 Operator::I64GeU => {
1538 let I2O1 { loc_a, loc_b, ret } =
1539 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1540 self.machine.i64_cmp_ge_u(loc_a, loc_b, ret)?;
1541 }
1542 Operator::I64LtS => {
1543 let I2O1 { loc_a, loc_b, ret } =
1544 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1545 self.machine.i64_cmp_lt_s(loc_a, loc_b, ret)?;
1546 }
1547 Operator::I64LeS => {
1548 let I2O1 { loc_a, loc_b, ret } =
1549 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1550 self.machine.i64_cmp_le_s(loc_a, loc_b, ret)?;
1551 }
1552 Operator::I64GtS => {
1553 let I2O1 { loc_a, loc_b, ret } =
1554 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1555 self.machine.i64_cmp_gt_s(loc_a, loc_b, ret)?;
1556 }
1557 Operator::I64GeS => {
1558 let I2O1 { loc_a, loc_b, ret } =
1559 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1560 self.machine.i64_cmp_ge_s(loc_a, loc_b, ret)?;
1561 }
1562 Operator::I64ExtendI32U => {
1563 let loc = self.pop_value_released()?.0;
1564 let ret = self.acquire_location(&WpType::I64)?;
1565 self.value_stack.push((ret, CanonicalizeType::None));
1566 self.machine.emit_relaxed_mov(Size::S32, loc, ret)?;
1567
1568 if let Location::Memory(base, off) = ret {
1571 self.machine.emit_relaxed_mov(
1572 Size::S32,
1573 Location::Imm32(0),
1574 Location::Memory(base, off + 4),
1575 )?;
1576 }
1577 }
1578 Operator::I64ExtendI32S => {
1579 let loc = self.pop_value_released()?.0;
1580 let ret = self.acquire_location(&WpType::I64)?;
1581 self.value_stack.push((ret, CanonicalizeType::None));
1582 self.machine
1583 .emit_relaxed_sign_extension(Size::S32, loc, Size::S64, ret)?;
1584 }
1585 Operator::I32Extend8S => {
1586 let loc = self.pop_value_released()?.0;
1587 let ret = self.acquire_location(&WpType::I32)?;
1588 self.value_stack.push((ret, CanonicalizeType::None));
1589
1590 self.machine
1591 .emit_relaxed_sign_extension(Size::S8, loc, Size::S32, ret)?;
1592 }
1593 Operator::I32Extend16S => {
1594 let loc = self.pop_value_released()?.0;
1595 let ret = self.acquire_location(&WpType::I32)?;
1596 self.value_stack.push((ret, CanonicalizeType::None));
1597
1598 self.machine
1599 .emit_relaxed_sign_extension(Size::S16, loc, Size::S32, ret)?;
1600 }
1601 Operator::I64Extend8S => {
1602 let loc = self.pop_value_released()?.0;
1603 let ret = self.acquire_location(&WpType::I64)?;
1604 self.value_stack.push((ret, CanonicalizeType::None));
1605
1606 self.machine
1607 .emit_relaxed_sign_extension(Size::S8, loc, Size::S64, ret)?;
1608 }
1609 Operator::I64Extend16S => {
1610 let loc = self.pop_value_released()?.0;
1611 let ret = self.acquire_location(&WpType::I64)?;
1612 self.value_stack.push((ret, CanonicalizeType::None));
1613
1614 self.machine
1615 .emit_relaxed_sign_extension(Size::S16, loc, Size::S64, ret)?;
1616 }
1617 Operator::I64Extend32S => {
1618 let loc = self.pop_value_released()?.0;
1619 let ret = self.acquire_location(&WpType::I64)?;
1620 self.value_stack.push((ret, CanonicalizeType::None));
1621
1622 self.machine
1623 .emit_relaxed_sign_extension(Size::S32, loc, Size::S64, ret)?;
1624 }
1625 Operator::I32WrapI64 => {
1626 let loc = self.pop_value_released()?.0;
1627 let ret = self.acquire_location(&WpType::I32)?;
1628 self.value_stack.push((ret, CanonicalizeType::None));
1629 self.machine.emit_relaxed_mov(Size::S32, loc, ret)?;
1630 }
1631
1632 Operator::F32Const { value } => {
1633 self.value_stack
1634 .push((Location::Imm32(value.bits()), CanonicalizeType::None));
1635 }
1636 Operator::F32Add => {
1637 let I2O1 { loc_a, loc_b, ret } =
1638 self.i2o1_prepare(WpType::F64, CanonicalizeType::F32)?;
1639 self.machine.f32_add(loc_a, loc_b, ret)?;
1640 }
1641 Operator::F32Sub => {
1642 let I2O1 { loc_a, loc_b, ret } =
1643 self.i2o1_prepare(WpType::F64, CanonicalizeType::F32)?;
1644 self.machine.f32_sub(loc_a, loc_b, ret)?;
1645 }
1646 Operator::F32Mul => {
1647 let I2O1 { loc_a, loc_b, ret } =
1648 self.i2o1_prepare(WpType::F64, CanonicalizeType::F32)?;
1649 self.machine.f32_mul(loc_a, loc_b, ret)?;
1650 }
1651 Operator::F32Div => {
1652 let I2O1 { loc_a, loc_b, ret } =
1653 self.i2o1_prepare(WpType::F64, CanonicalizeType::F32)?;
1654 self.machine.f32_div(loc_a, loc_b, ret)?;
1655 }
1656 Operator::F32Max => {
1657 let I2O1 { loc_a, loc_b, ret } =
1658 self.i2o1_prepare(WpType::F64, CanonicalizeType::None)?;
1659 self.machine.f32_max(loc_a, loc_b, ret)?;
1660 }
1661 Operator::F32Min => {
1662 let I2O1 { loc_a, loc_b, ret } =
1663 self.i2o1_prepare(WpType::F64, CanonicalizeType::None)?;
1664 self.machine.f32_min(loc_a, loc_b, ret)?;
1665 }
1666 Operator::F32Eq => {
1667 let I2O1 { loc_a, loc_b, ret } =
1668 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1669 self.machine.f32_cmp_eq(loc_a, loc_b, ret)?;
1670 }
1671 Operator::F32Ne => {
1672 let I2O1 { loc_a, loc_b, ret } =
1673 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1674 self.machine.f32_cmp_ne(loc_a, loc_b, ret)?;
1675 }
1676 Operator::F32Lt => {
1677 let I2O1 { loc_a, loc_b, ret } =
1678 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1679 self.machine.f32_cmp_lt(loc_a, loc_b, ret)?;
1680 }
1681 Operator::F32Le => {
1682 let I2O1 { loc_a, loc_b, ret } =
1683 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1684 self.machine.f32_cmp_le(loc_a, loc_b, ret)?;
1685 }
1686 Operator::F32Gt => {
1687 let I2O1 { loc_a, loc_b, ret } =
1688 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1689 self.machine.f32_cmp_gt(loc_a, loc_b, ret)?;
1690 }
1691 Operator::F32Ge => {
1692 let I2O1 { loc_a, loc_b, ret } =
1693 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1694 self.machine.f32_cmp_ge(loc_a, loc_b, ret)?;
1695 }
1696 Operator::F32Nearest => {
1697 let loc = self.pop_value_released()?.0;
1698 let ret = self.acquire_location(&WpType::F64)?;
1699 self.value_stack.push((ret, CanonicalizeType::F32));
1700 self.machine.f32_nearest(loc, ret)?;
1701 }
1702 Operator::F32Floor => {
1703 let loc = self.pop_value_released()?.0;
1704 let ret = self.acquire_location(&WpType::F64)?;
1705 self.value_stack.push((ret, CanonicalizeType::F32));
1706 self.machine.f32_floor(loc, ret)?;
1707 }
1708 Operator::F32Ceil => {
1709 let loc = self.pop_value_released()?.0;
1710 let ret = self.acquire_location(&WpType::F64)?;
1711 self.value_stack.push((ret, CanonicalizeType::F32));
1712 self.machine.f32_ceil(loc, ret)?;
1713 }
1714 Operator::F32Trunc => {
1715 let loc = self.pop_value_released()?.0;
1716 let ret = self.acquire_location(&WpType::F64)?;
1717 self.value_stack.push((ret, CanonicalizeType::F32));
1718 self.machine.f32_trunc(loc, ret)?;
1719 }
1720 Operator::F32Sqrt => {
1721 let loc = self.pop_value_released()?.0;
1722 let ret = self.acquire_location(&WpType::F64)?;
1723 self.value_stack.push((ret, CanonicalizeType::F32));
1724 self.machine.f32_sqrt(loc, ret)?;
1725 }
1726
1727 Operator::F32Copysign => {
1728 let loc_b = self.pop_value_released()?;
1729 let loc_a = self.pop_value_released()?;
1730 let ret = self.acquire_location(&WpType::F32)?;
1731 self.value_stack.push((ret, CanonicalizeType::None));
1732
1733 let tmp1 = self.machine.acquire_temp_gpr().unwrap();
1734 let tmp2 = self.machine.acquire_temp_gpr().unwrap();
1735
1736 if self.config.enable_nan_canonicalization {
1737 for ((loc, fp), tmp) in [(loc_a, tmp1), (loc_b, tmp2)] {
1738 if fp.to_size().is_some() {
1739 self.machine
1740 .canonicalize_nan(Size::S32, loc, Location::GPR(tmp))?
1741 } else {
1742 self.machine
1743 .move_location(Size::S32, loc, Location::GPR(tmp))?
1744 }
1745 }
1746 } else {
1747 self.machine
1748 .move_location(Size::S32, loc_a.0, Location::GPR(tmp1))?;
1749 self.machine
1750 .move_location(Size::S32, loc_b.0, Location::GPR(tmp2))?;
1751 }
1752 self.machine.emit_i32_copysign(tmp1, tmp2)?;
1753 self.machine
1754 .move_location(Size::S32, Location::GPR(tmp1), ret)?;
1755 self.machine.release_gpr(tmp2);
1756 self.machine.release_gpr(tmp1);
1757 }
1758
1759 Operator::F32Abs => {
1760 let loc = self.pop_value_released()?.0;
1763 let ret = self.acquire_location(&WpType::F32)?;
1764 self.value_stack.push((ret, CanonicalizeType::None));
1765
1766 self.machine.f32_abs(loc, ret)?;
1767 }
1768
1769 Operator::F32Neg => {
1770 let loc = self.pop_value_released()?.0;
1773 let ret = self.acquire_location(&WpType::F32)?;
1774 self.value_stack.push((ret, CanonicalizeType::None));
1775
1776 self.machine.f32_neg(loc, ret)?;
1777 }
1778
1779 Operator::F64Const { value } => {
1780 self.value_stack
1781 .push((Location::Imm64(value.bits()), CanonicalizeType::None));
1782 }
1783 Operator::F64Add => {
1784 let I2O1 { loc_a, loc_b, ret } =
1785 self.i2o1_prepare(WpType::F64, CanonicalizeType::F64)?;
1786 self.machine.f64_add(loc_a, loc_b, ret)?;
1787 }
1788 Operator::F64Sub => {
1789 let I2O1 { loc_a, loc_b, ret } =
1790 self.i2o1_prepare(WpType::F64, CanonicalizeType::F64)?;
1791 self.machine.f64_sub(loc_a, loc_b, ret)?;
1792 }
1793 Operator::F64Mul => {
1794 let I2O1 { loc_a, loc_b, ret } =
1795 self.i2o1_prepare(WpType::F64, CanonicalizeType::F64)?;
1796 self.machine.f64_mul(loc_a, loc_b, ret)?;
1797 }
1798 Operator::F64Div => {
1799 let I2O1 { loc_a, loc_b, ret } =
1800 self.i2o1_prepare(WpType::F64, CanonicalizeType::F64)?;
1801 self.machine.f64_div(loc_a, loc_b, ret)?;
1802 }
1803 Operator::F64Max => {
1804 let I2O1 { loc_a, loc_b, ret } =
1805 self.i2o1_prepare(WpType::F64, CanonicalizeType::None)?;
1806 self.machine.f64_max(loc_a, loc_b, ret)?;
1807 }
1808 Operator::F64Min => {
1809 let I2O1 { loc_a, loc_b, ret } =
1810 self.i2o1_prepare(WpType::F64, CanonicalizeType::None)?;
1811 self.machine.f64_min(loc_a, loc_b, ret)?;
1812 }
1813 Operator::F64Eq => {
1814 let I2O1 { loc_a, loc_b, ret } =
1815 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1816 self.machine.f64_cmp_eq(loc_a, loc_b, ret)?;
1817 }
1818 Operator::F64Ne => {
1819 let I2O1 { loc_a, loc_b, ret } =
1820 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1821 self.machine.f64_cmp_ne(loc_a, loc_b, ret)?;
1822 }
1823 Operator::F64Lt => {
1824 let I2O1 { loc_a, loc_b, ret } =
1825 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1826 self.machine.f64_cmp_lt(loc_a, loc_b, ret)?;
1827 }
1828 Operator::F64Le => {
1829 let I2O1 { loc_a, loc_b, ret } =
1830 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1831 self.machine.f64_cmp_le(loc_a, loc_b, ret)?;
1832 }
1833 Operator::F64Gt => {
1834 let I2O1 { loc_a, loc_b, ret } =
1835 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1836 self.machine.f64_cmp_gt(loc_a, loc_b, ret)?;
1837 }
1838 Operator::F64Ge => {
1839 let I2O1 { loc_a, loc_b, ret } =
1840 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1841 self.machine.f64_cmp_ge(loc_a, loc_b, ret)?;
1842 }
1843 Operator::F64Nearest => {
1844 let loc = self.pop_value_released()?.0;
1845 let ret = self.acquire_location(&WpType::F64)?;
1846 self.value_stack.push((ret, CanonicalizeType::F64));
1847 self.machine.f64_nearest(loc, ret)?;
1848 }
1849 Operator::F64Floor => {
1850 let loc = self.pop_value_released()?.0;
1851 let ret = self.acquire_location(&WpType::F64)?;
1852 self.value_stack.push((ret, CanonicalizeType::F64));
1853 self.machine.f64_floor(loc, ret)?;
1854 }
1855 Operator::F64Ceil => {
1856 let loc = self.pop_value_released()?.0;
1857 let ret = self.acquire_location(&WpType::F64)?;
1858 self.value_stack.push((ret, CanonicalizeType::F64));
1859 self.machine.f64_ceil(loc, ret)?;
1860 }
1861 Operator::F64Trunc => {
1862 let loc = self.pop_value_released()?.0;
1863 let ret = self.acquire_location(&WpType::F64)?;
1864 self.value_stack.push((ret, CanonicalizeType::F64));
1865 self.machine.f64_trunc(loc, ret)?;
1866 }
1867 Operator::F64Sqrt => {
1868 let loc = self.pop_value_released()?.0;
1869 let ret = self.acquire_location(&WpType::F64)?;
1870 self.value_stack.push((ret, CanonicalizeType::F64));
1871 self.machine.f64_sqrt(loc, ret)?;
1872 }
1873
1874 Operator::F64Copysign => {
1875 let loc_b = self.pop_value_released()?;
1876 let loc_a = self.pop_value_released()?;
1877 let ret = self.acquire_location(&WpType::F64)?;
1878 self.value_stack.push((ret, CanonicalizeType::None));
1879
1880 let tmp1 = self.machine.acquire_temp_gpr().unwrap();
1881 let tmp2 = self.machine.acquire_temp_gpr().unwrap();
1882
1883 if self.config.enable_nan_canonicalization {
1884 for ((loc, fp), tmp) in [(loc_a, tmp1), (loc_b, tmp2)] {
1885 if fp.to_size().is_some() {
1886 self.machine
1887 .canonicalize_nan(Size::S64, loc, Location::GPR(tmp))?
1888 } else {
1889 self.machine
1890 .move_location(Size::S64, loc, Location::GPR(tmp))?
1891 }
1892 }
1893 } else {
1894 self.machine
1895 .move_location(Size::S64, loc_a.0, Location::GPR(tmp1))?;
1896 self.machine
1897 .move_location(Size::S64, loc_b.0, Location::GPR(tmp2))?;
1898 }
1899 self.machine.emit_i64_copysign(tmp1, tmp2)?;
1900 self.machine
1901 .move_location(Size::S64, Location::GPR(tmp1), ret)?;
1902
1903 self.machine.release_gpr(tmp2);
1904 self.machine.release_gpr(tmp1);
1905 }
1906
1907 Operator::F64Abs => {
1908 let (loc, canonicalize) = self.pop_value_released()?;
1909 let ret = self.acquire_location(&WpType::F64)?;
1910 self.value_stack.push((ret, canonicalize));
1911
1912 self.machine.f64_abs(loc, ret)?;
1913 }
1914
1915 Operator::F64Neg => {
1916 let (loc, canonicalize) = self.pop_value_released()?;
1917 let ret = self.acquire_location(&WpType::F64)?;
1918 self.value_stack.push((ret, canonicalize));
1919
1920 self.machine.f64_neg(loc, ret)?;
1921 }
1922
1923 Operator::F64PromoteF32 => {
1924 let (loc, canonicalize) = self.pop_value_released()?;
1925 let ret = self.acquire_location(&WpType::F64)?;
1926 self.value_stack.push((ret, canonicalize.promote()?));
1927 self.machine.convert_f64_f32(loc, ret)?;
1928 }
1929 Operator::F32DemoteF64 => {
1930 let (loc, canonicalize) = self.pop_value_released()?;
1931 let ret = self.acquire_location(&WpType::F64)?;
1932 self.value_stack.push((ret, canonicalize.demote()?));
1933 self.machine.convert_f32_f64(loc, ret)?;
1934 }
1935
1936 Operator::I32ReinterpretF32 => {
1937 let (loc, canonicalize) = self.pop_value_released()?;
1938 let ret = self.acquire_location(&WpType::I32)?;
1939 self.value_stack.push((ret, CanonicalizeType::None));
1940
1941 if !self.config.enable_nan_canonicalization
1942 || matches!(canonicalize, CanonicalizeType::None)
1943 {
1944 if loc != ret {
1945 self.machine.emit_relaxed_mov(Size::S32, loc, ret)?;
1946 }
1947 } else {
1948 self.machine.canonicalize_nan(Size::S32, loc, ret)?;
1949 }
1950 }
1951 Operator::F32ReinterpretI32 => {
1952 let loc = self.pop_value_released()?.0;
1953 let ret = self.acquire_location(&WpType::F32)?;
1954 self.value_stack.push((ret, CanonicalizeType::None));
1955
1956 if loc != ret {
1957 self.machine.emit_relaxed_mov(Size::S32, loc, ret)?;
1958 }
1959 }
1960
1961 Operator::I64ReinterpretF64 => {
1962 let (loc, canonicalize) = self.pop_value_released()?;
1963 let ret = self.acquire_location(&WpType::I64)?;
1964 self.value_stack.push((ret, CanonicalizeType::None));
1965
1966 if !self.config.enable_nan_canonicalization
1967 || matches!(canonicalize, CanonicalizeType::None)
1968 {
1969 if loc != ret {
1970 self.machine.emit_relaxed_mov(Size::S64, loc, ret)?;
1971 }
1972 } else {
1973 self.machine.canonicalize_nan(Size::S64, loc, ret)?;
1974 }
1975 }
1976 Operator::F64ReinterpretI64 => {
1977 let loc = self.pop_value_released()?.0;
1978 let ret = self.acquire_location(&WpType::F64)?;
1979 self.value_stack.push((ret, CanonicalizeType::None));
1980
1981 if loc != ret {
1982 self.machine.emit_relaxed_mov(Size::S64, loc, ret)?;
1983 }
1984 }
1985
1986 Operator::I32TruncF32U => {
1987 let loc = self.pop_value_released()?.0;
1988 let ret = self.acquire_location(&WpType::I32)?;
1989 self.value_stack.push((ret, CanonicalizeType::None));
1990
1991 self.machine.convert_i32_f32(loc, ret, false, false)?;
1992 }
1993
1994 Operator::I32TruncSatF32U => {
1995 let loc = self.pop_value_released()?.0;
1996 let ret = self.acquire_location(&WpType::I32)?;
1997 self.value_stack.push((ret, CanonicalizeType::None));
1998
1999 self.machine.convert_i32_f32(loc, ret, false, true)?;
2000 }
2001
2002 Operator::I32TruncF32S => {
2003 let loc = self.pop_value_released()?.0;
2004 let ret = self.acquire_location(&WpType::I32)?;
2005 self.value_stack.push((ret, CanonicalizeType::None));
2006
2007 self.machine.convert_i32_f32(loc, ret, true, false)?;
2008 }
2009 Operator::I32TruncSatF32S => {
2010 let loc = self.pop_value_released()?.0;
2011 let ret = self.acquire_location(&WpType::I32)?;
2012 self.value_stack.push((ret, CanonicalizeType::None));
2013
2014 self.machine.convert_i32_f32(loc, ret, true, true)?;
2015 }
2016
2017 Operator::I64TruncF32S => {
2018 let loc = self.pop_value_released()?.0;
2019 let ret = self.acquire_location(&WpType::I64)?;
2020 self.value_stack.push((ret, CanonicalizeType::None));
2021
2022 self.machine.convert_i64_f32(loc, ret, true, false)?;
2023 }
2024
2025 Operator::I64TruncSatF32S => {
2026 let loc = self.pop_value_released()?.0;
2027 let ret = self.acquire_location(&WpType::I64)?;
2028 self.value_stack.push((ret, CanonicalizeType::None));
2029
2030 self.machine.convert_i64_f32(loc, ret, true, true)?;
2031 }
2032
2033 Operator::I64TruncF32U => {
2034 let loc = self.pop_value_released()?.0;
2035 let ret = self.acquire_location(&WpType::I64)?;
2036 self.value_stack.push((ret, CanonicalizeType::None));
2037
2038 self.machine.convert_i64_f32(loc, ret, false, false)?;
2039 }
2040 Operator::I64TruncSatF32U => {
2041 let loc = self.pop_value_released()?.0;
2042 let ret = self.acquire_location(&WpType::I64)?;
2043 self.value_stack.push((ret, CanonicalizeType::None));
2044
2045 self.machine.convert_i64_f32(loc, ret, false, true)?;
2046 }
2047
2048 Operator::I32TruncF64U => {
2049 let loc = self.pop_value_released()?.0;
2050 let ret = self.acquire_location(&WpType::I32)?;
2051 self.value_stack.push((ret, CanonicalizeType::None));
2052
2053 self.machine.convert_i32_f64(loc, ret, false, false)?;
2054 }
2055
2056 Operator::I32TruncSatF64U => {
2057 let loc = self.pop_value_released()?.0;
2058 let ret = self.acquire_location(&WpType::I32)?;
2059 self.value_stack.push((ret, CanonicalizeType::None));
2060
2061 self.machine.convert_i32_f64(loc, ret, false, true)?;
2062 }
2063
2064 Operator::I32TruncF64S => {
2065 let loc = self.pop_value_released()?.0;
2066 let ret = self.acquire_location(&WpType::I32)?;
2067 self.value_stack.push((ret, CanonicalizeType::None));
2068
2069 self.machine.convert_i32_f64(loc, ret, true, false)?;
2070 }
2071
2072 Operator::I32TruncSatF64S => {
2073 let loc = self.pop_value_released()?.0;
2074 let ret = self.acquire_location(&WpType::I32)?;
2075 self.value_stack.push((ret, CanonicalizeType::None));
2076
2077 self.machine.convert_i32_f64(loc, ret, true, true)?;
2078 }
2079
2080 Operator::I64TruncF64S => {
2081 let loc = self.pop_value_released()?.0;
2082 let ret = self.acquire_location(&WpType::I64)?;
2083 self.value_stack.push((ret, CanonicalizeType::None));
2084
2085 self.machine.convert_i64_f64(loc, ret, true, false)?;
2086 }
2087
2088 Operator::I64TruncSatF64S => {
2089 let loc = self.pop_value_released()?.0;
2090 let ret = self.acquire_location(&WpType::I64)?;
2091 self.value_stack.push((ret, CanonicalizeType::None));
2092
2093 self.machine.convert_i64_f64(loc, ret, true, true)?;
2094 }
2095
2096 Operator::I64TruncF64U => {
2097 let loc = self.pop_value_released()?.0;
2098 let ret = self.acquire_location(&WpType::I64)?;
2099 self.value_stack.push((ret, CanonicalizeType::None));
2100
2101 self.machine.convert_i64_f64(loc, ret, false, false)?;
2102 }
2103
2104 Operator::I64TruncSatF64U => {
2105 let loc = self.pop_value_released()?.0;
2106 let ret = self.acquire_location(&WpType::I64)?;
2107 self.value_stack.push((ret, CanonicalizeType::None));
2108
2109 self.machine.convert_i64_f64(loc, ret, false, true)?;
2110 }
2111
2112 Operator::F32ConvertI32S => {
2113 let loc = self.pop_value_released()?.0;
2114 let ret = self.acquire_location(&WpType::F32)?;
2115 self.value_stack.push((ret, CanonicalizeType::None));
2116
2117 self.machine.convert_f32_i32(loc, true, ret)?;
2118 }
2119 Operator::F32ConvertI32U => {
2120 let loc = self.pop_value_released()?.0;
2121 let ret = self.acquire_location(&WpType::F32)?;
2122 self.value_stack.push((ret, CanonicalizeType::None));
2123
2124 self.machine.convert_f32_i32(loc, false, ret)?;
2125 }
2126 Operator::F32ConvertI64S => {
2127 let loc = self.pop_value_released()?.0;
2128 let ret = self.acquire_location(&WpType::F32)?;
2129 self.value_stack.push((ret, CanonicalizeType::None));
2130
2131 self.machine.convert_f32_i64(loc, true, ret)?;
2132 }
2133 Operator::F32ConvertI64U => {
2134 let loc = self.pop_value_released()?.0;
2135 let ret = self.acquire_location(&WpType::F32)?;
2136 self.value_stack.push((ret, CanonicalizeType::None));
2137
2138 self.machine.convert_f32_i64(loc, false, ret)?;
2139 }
2140
2141 Operator::F64ConvertI32S => {
2142 let loc = self.pop_value_released()?.0;
2143 let ret = self.acquire_location(&WpType::F64)?;
2144 self.value_stack.push((ret, CanonicalizeType::None));
2145
2146 self.machine.convert_f64_i32(loc, true, ret)?;
2147 }
2148 Operator::F64ConvertI32U => {
2149 let loc = self.pop_value_released()?.0;
2150 let ret = self.acquire_location(&WpType::F64)?;
2151 self.value_stack.push((ret, CanonicalizeType::None));
2152
2153 self.machine.convert_f64_i32(loc, false, ret)?;
2154 }
2155 Operator::F64ConvertI64S => {
2156 let loc = self.pop_value_released()?.0;
2157 let ret = self.acquire_location(&WpType::F64)?;
2158 self.value_stack.push((ret, CanonicalizeType::None));
2159
2160 self.machine.convert_f64_i64(loc, true, ret)?;
2161 }
2162 Operator::F64ConvertI64U => {
2163 let loc = self.pop_value_released()?.0;
2164 let ret = self.acquire_location(&WpType::F64)?;
2165 self.value_stack.push((ret, CanonicalizeType::None));
2166
2167 self.machine.convert_f64_i64(loc, false, ret)?;
2168 }
2169
2170 Operator::Call { function_index } => {
2171 let function_index = function_index as usize;
2172
2173 let sig_index = *self
2174 .module
2175 .functions
2176 .get(FunctionIndex::new(function_index))
2177 .unwrap();
2178 let sig = self.module.signatures.get(sig_index).unwrap();
2179 let param_types: SmallVec<[WpType; 8]> =
2180 sig.params().iter().map(type_to_wp_type).collect();
2181 let return_types: SmallVec<[WpType; 1]> =
2182 sig.results().iter().map(type_to_wp_type).collect();
2183
2184 let params: SmallVec<[_; 8]> = self
2185 .value_stack
2186 .drain(self.value_stack.len() - param_types.len()..)
2187 .collect();
2188
2189 if self.config.enable_nan_canonicalization {
2194 for (loc, canonicalize) in params.iter() {
2195 if let Some(size) = canonicalize.to_size() {
2196 self.machine.canonicalize_nan(size, *loc, *loc)?;
2197 }
2198 }
2199 }
2200
2201 let reloc_target = if function_index < self.module.num_imported_functions {
2203 RelocationTarget::CustomSection(SectionIndex::new(function_index))
2204 } else {
2205 RelocationTarget::LocalFunc(LocalFunctionIndex::new(
2206 function_index - self.module.num_imported_functions,
2207 ))
2208 };
2209 let calling_convention = self.calling_convention;
2210
2211 self.emit_call_native(
2212 |this| {
2213 let offset = this
2214 .machine
2215 .mark_instruction_with_trap_code(TrapCode::StackOverflow);
2216 let mut relocations = this
2217 .machine
2218 .emit_call_with_reloc(calling_convention, reloc_target)?;
2219 this.machine.mark_instruction_address_end(offset);
2220 this.relocations.append(&mut relocations);
2221 Ok(())
2222 },
2223 params.iter().copied(),
2224 param_types.iter().copied(),
2225 return_types.iter().copied(),
2226 NativeCallType::IncludeVMCtxArgument,
2227 )?;
2228 }
2229 Operator::CallIndirect {
2230 type_index,
2231 table_index,
2232 } => {
2233 let table_index = TableIndex::new(table_index as _);
2236 let index = SignatureIndex::new(type_index as usize);
2237 let sig = self.module.signatures.get(index).unwrap();
2238 let param_types: SmallVec<[WpType; 8]> =
2239 sig.params().iter().map(type_to_wp_type).collect();
2240 let return_types: SmallVec<[WpType; 1]> =
2241 sig.results().iter().map(type_to_wp_type).collect();
2242
2243 let func_index = self.pop_value_released()?.0;
2244
2245 let params: SmallVec<[_; 8]> = self
2246 .value_stack
2247 .drain(self.value_stack.len() - param_types.len()..)
2248 .collect();
2249
2250 if self.config.enable_nan_canonicalization {
2255 for (loc, canonicalize) in params.iter() {
2256 if let Some(size) = canonicalize.to_size() {
2257 self.machine.canonicalize_nan(size, *loc, *loc)?;
2258 }
2259 }
2260 }
2261
2262 let table_base = self.machine.acquire_temp_gpr().unwrap();
2263 let table_count = self.machine.acquire_temp_gpr().unwrap();
2264 let sigidx = self.machine.acquire_temp_gpr().unwrap();
2265
2266 if let Some(local_table_index) = self.module.local_table_index(table_index) {
2267 let (vmctx_offset_base, vmctx_offset_len) = (
2268 self.vmoffsets.vmctx_vmtable_definition(local_table_index),
2269 self.vmoffsets
2270 .vmctx_vmtable_definition_current_elements(local_table_index),
2271 );
2272 self.machine.move_location(
2273 Size::S64,
2274 Location::Memory(self.machine.get_vmctx_reg(), vmctx_offset_base as i32),
2275 Location::GPR(table_base),
2276 )?;
2277 self.machine.move_location(
2278 Size::S32,
2279 Location::Memory(self.machine.get_vmctx_reg(), vmctx_offset_len as i32),
2280 Location::GPR(table_count),
2281 )?;
2282 } else {
2283 let import_offset = self.vmoffsets.vmctx_vmtable_import(table_index);
2285 self.machine.move_location(
2286 Size::S64,
2287 Location::Memory(self.machine.get_vmctx_reg(), import_offset as i32),
2288 Location::GPR(table_base),
2289 )?;
2290
2291 self.machine.move_location(
2293 Size::S32,
2294 Location::Memory(
2295 table_base,
2296 self.vmoffsets.vmtable_definition_current_elements() as _,
2297 ),
2298 Location::GPR(table_count),
2299 )?;
2300
2301 self.machine.move_location(
2303 Size::S64,
2304 Location::Memory(table_base, self.vmoffsets.vmtable_definition_base() as _),
2305 Location::GPR(table_base),
2306 )?;
2307 }
2308
2309 self.machine.jmp_on_condition(
2310 UnsignedCondition::BelowEqual,
2311 Size::S32,
2312 Location::GPR(table_count),
2313 func_index,
2314 self.special_labels.table_access_oob,
2315 )?;
2316 self.machine
2317 .move_location(Size::S32, func_index, Location::GPR(table_count))?;
2318 self.machine.emit_imul_imm32(
2319 Size::S64,
2320 self.vmoffsets.size_of_vm_funcref() as u32,
2321 table_count,
2322 )?;
2323 self.machine.location_add(
2324 Size::S64,
2325 Location::GPR(table_base),
2326 Location::GPR(table_count),
2327 false,
2328 )?;
2329
2330 self.machine.move_location(
2332 Size::S64,
2333 Location::Memory(table_count, self.vmoffsets.vm_funcref_anyfunc_ptr() as i32),
2334 Location::GPR(table_count),
2335 )?;
2336 self.machine.jmp_on_condition(
2338 UnsignedCondition::Equal,
2339 Size::S64,
2340 Location::GPR(table_count),
2341 Location::Imm32(0),
2342 self.special_labels.indirect_call_null,
2343 )?;
2344 self.machine.move_location(
2345 Size::S32,
2346 Location::Memory(
2347 self.machine.get_vmctx_reg(),
2348 self.vmoffsets.vmctx_vmshared_signature_id(index) as i32,
2349 ),
2350 Location::GPR(sigidx),
2351 )?;
2352
2353 self.machine.jmp_on_condition(
2355 UnsignedCondition::NotEqual,
2356 Size::S32,
2357 Location::GPR(sigidx),
2358 Location::Memory(
2359 table_count,
2360 (self.vmoffsets.vmcaller_checked_anyfunc_type_index() as usize) as i32,
2361 ),
2362 self.special_labels.bad_signature,
2363 )?;
2364 self.machine.release_gpr(sigidx);
2365 self.machine.release_gpr(table_count);
2366 self.machine.release_gpr(table_base);
2367
2368 let gpr_for_call = self.machine.get_gpr_for_call();
2369 if table_count != gpr_for_call {
2370 self.machine.move_location(
2371 Size::S64,
2372 Location::GPR(table_count),
2373 Location::GPR(gpr_for_call),
2374 )?;
2375 }
2376
2377 let vmcaller_checked_anyfunc_func_ptr =
2378 self.vmoffsets.vmcaller_checked_anyfunc_func_ptr() as usize;
2379 let vmcaller_checked_anyfunc_vmctx =
2380 self.vmoffsets.vmcaller_checked_anyfunc_vmctx() as usize;
2381 let calling_convention = self.calling_convention;
2382
2383 self.emit_call_native(
2384 |this| {
2385 let offset = this
2386 .machine
2387 .mark_instruction_with_trap_code(TrapCode::StackOverflow);
2388
2389 this.machine.move_location(
2391 Size::S64,
2392 Location::Memory(gpr_for_call, vmcaller_checked_anyfunc_vmctx as i32),
2393 Location::GPR(
2394 this.machine
2395 .get_simple_param_location(0, calling_convention),
2396 ),
2397 )?;
2398
2399 this.machine.emit_call_location(Location::Memory(
2400 gpr_for_call,
2401 vmcaller_checked_anyfunc_func_ptr as i32,
2402 ))?;
2403 this.machine.mark_instruction_address_end(offset);
2404 Ok(())
2405 },
2406 params.iter().copied(),
2407 param_types.iter().copied(),
2408 return_types.iter().copied(),
2409 NativeCallType::IncludeVMCtxArgument,
2410 )?;
2411 }
2412 Operator::If { blockty } => {
2413 let label_end = self.machine.get_label();
2414 let label_else = self.machine.get_label();
2415
2416 let return_types = self.return_types_for_block(blockty);
2417 let param_types = self.param_types_for_block(blockty);
2418 self.allocate_return_slots_and_swap(param_types.len() + 1, return_types.len())?;
2419
2420 let cond = self.pop_value_released()?.0;
2421
2422 if param_types.len() == return_types.len() {
2425 for (input, return_value) in self
2426 .value_stack
2427 .iter()
2428 .rev()
2429 .take(param_types.len())
2430 .zip(self.value_stack.iter().rev().skip(param_types.len()))
2431 {
2432 self.machine
2433 .emit_relaxed_mov(Size::S64, input.0, return_value.0)?;
2434 }
2435 }
2436
2437 let frame = ControlFrame {
2438 state: ControlState::If {
2439 label_else,
2440 inputs: SmallVec::from_iter(
2441 self.value_stack
2442 .iter()
2443 .rev()
2444 .take(param_types.len())
2445 .rev()
2446 .copied(),
2447 ),
2448 },
2449 label: label_end,
2450 param_types,
2451 return_types,
2452 value_stack_depth: self.value_stack.len(),
2453 };
2454 self.control_stack.push(frame);
2455 self.machine.jmp_on_condition(
2456 UnsignedCondition::Equal,
2457 Size::S32,
2458 cond,
2459 Location::Imm32(0),
2460 label_else,
2461 )?;
2462 }
2463 Operator::Else => {
2464 let frame = self.control_stack.last().unwrap();
2465
2466 if !was_unreachable && !frame.return_types.is_empty() {
2467 self.emit_return_values(
2468 frame.value_stack_depth_after(),
2469 frame.return_types.len(),
2470 )?;
2471 }
2472
2473 let frame = &self.control_stack.last_mut().unwrap();
2474 let locs = self
2475 .value_stack
2476 .drain(frame.value_stack_depth_after()..)
2477 .collect_vec();
2478 self.release_locations(&locs)?;
2479 let frame = &mut self.control_stack.last_mut().unwrap();
2480
2481 let ControlState::If {
2484 label_else,
2485 ref inputs,
2486 } = frame.state
2487 else {
2488 panic!("Operator::Else must be connected to Operator::If statement");
2489 };
2490 for (input, _) in inputs {
2491 match input {
2492 Location::GPR(x) => {
2493 self.machine.reserve_gpr(*x);
2494 }
2495 Location::SIMD(x) => {
2496 self.machine.reserve_simd(*x);
2497 }
2498 Location::Memory(reg, _) => {
2499 debug_assert_eq!(reg, &self.machine.local_pointer());
2500 self.stack_offset += 8;
2501 }
2502 _ => {}
2503 }
2504 }
2505 self.value_stack.extend(inputs);
2506
2507 self.machine.jmp_unconditional(frame.label)?;
2508 self.machine.emit_label(label_else)?;
2509 frame.state = ControlState::Else;
2510 }
2511 Operator::TypedSelect { .. } | Operator::Select => {
2514 let cond = self.pop_value_released()?.0;
2515 let (v_b, canonicalize_b) = self.pop_value_released()?;
2516 let (v_a, canonicalize_a) = self.pop_value_released()?;
2517 let ret = self.acquire_location(&WpType::I64)?;
2518 self.value_stack.push((ret, CanonicalizeType::None));
2519
2520 let end_label = self.machine.get_label();
2521 let zero_label = self.machine.get_label();
2522
2523 self.machine.jmp_on_condition(
2524 UnsignedCondition::Equal,
2525 Size::S32,
2526 cond,
2527 Location::Imm32(0),
2528 zero_label,
2529 )?;
2530 if self.config.enable_nan_canonicalization
2531 && let Some(size) = canonicalize_a.to_size()
2532 {
2533 self.machine.canonicalize_nan(size, v_a, ret)?;
2534 } else if v_a != ret {
2535 self.machine.emit_relaxed_mov(Size::S64, v_a, ret)?;
2536 }
2537 self.machine.jmp_unconditional(end_label)?;
2538 self.machine.emit_label(zero_label)?;
2539 if self.config.enable_nan_canonicalization
2540 && let Some(size) = canonicalize_b.to_size()
2541 {
2542 self.machine.canonicalize_nan(size, v_b, ret)?;
2543 } else if v_b != ret {
2544 self.machine.emit_relaxed_mov(Size::S64, v_b, ret)?;
2545 }
2546 self.machine.emit_label(end_label)?;
2547 }
2548 Operator::Block { blockty } => {
2549 let return_types = self.return_types_for_block(blockty);
2550 let param_types = self.param_types_for_block(blockty);
2551 self.allocate_return_slots_and_swap(param_types.len(), return_types.len())?;
2552
2553 let frame = ControlFrame {
2554 state: ControlState::Block,
2555 label: self.machine.get_label(),
2556 param_types,
2557 return_types,
2558 value_stack_depth: self.value_stack.len(),
2559 };
2560 self.control_stack.push(frame);
2561 }
2562 Operator::Loop { blockty } => {
2563 self.machine.align_for_loop()?;
2564 let label = self.machine.get_label();
2565
2566 let return_types = self.return_types_for_block(blockty);
2567 let param_types = self.param_types_for_block(blockty);
2568 let params_count = param_types.len();
2569 self.allocate_return_slots_and_swap(
2571 param_types.len(),
2572 param_types.len() + return_types.len(),
2573 )?;
2574
2575 self.control_stack.push(ControlFrame {
2576 state: ControlState::Loop,
2577 label,
2578 param_types: param_types.clone(),
2579 return_types: return_types.clone(),
2580 value_stack_depth: self.value_stack.len(),
2581 });
2582
2583 let params = self
2585 .value_stack
2586 .drain((self.value_stack.len() - params_count)..)
2587 .collect_vec();
2588 for (param, phi_param) in params.iter().rev().zip(self.value_stack.iter().rev()) {
2589 self.machine
2590 .emit_relaxed_mov(Size::S64, param.0, phi_param.0)?;
2591 }
2592 self.release_locations(¶ms)?;
2593
2594 self.machine.emit_label(label)?;
2595
2596 let phi_params = self
2598 .value_stack
2599 .iter()
2600 .rev()
2601 .take(params_count)
2602 .rev()
2603 .copied()
2604 .collect_vec();
2605 for (i, phi_param) in phi_params.into_iter().enumerate() {
2606 let loc = self.acquire_location(¶m_types[i])?;
2607 self.machine.emit_relaxed_mov(Size::S64, phi_param.0, loc)?;
2608 self.value_stack.push((loc, phi_param.1));
2609 }
2610
2611 }
2613 Operator::Nop => {}
2614 Operator::MemorySize { mem } => {
2615 let memory_index = MemoryIndex::new(mem as usize);
2616 self.machine.move_location(
2617 Size::S64,
2618 Location::Memory(
2619 self.machine.get_vmctx_reg(),
2620 self.vmoffsets.vmctx_builtin_function(
2621 if self.module.local_memory_index(memory_index).is_some() {
2622 VMBuiltinFunctionIndex::get_memory32_size_index()
2623 } else {
2624 VMBuiltinFunctionIndex::get_imported_memory32_size_index()
2625 },
2626 ) as i32,
2627 ),
2628 Location::GPR(self.machine.get_gpr_for_call()),
2629 )?;
2630 self.emit_call_native(
2631 |this| {
2632 this.machine
2633 .emit_call_register(this.machine.get_gpr_for_call())
2634 },
2635 iter::once((
2637 Location::Imm32(memory_index.index() as u32),
2638 CanonicalizeType::None,
2639 )),
2640 iter::once(WpType::I64),
2641 iter::once(WpType::I64),
2642 NativeCallType::IncludeVMCtxArgument,
2643 )?;
2644 }
2645 Operator::MemoryInit { data_index, mem } => {
2646 let len = self.value_stack.pop().unwrap();
2647 let src = self.value_stack.pop().unwrap();
2648 let dst = self.value_stack.pop().unwrap();
2649
2650 self.machine.move_location(
2651 Size::S64,
2652 Location::Memory(
2653 self.machine.get_vmctx_reg(),
2654 self.vmoffsets
2655 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_memory_init_index())
2656 as i32,
2657 ),
2658 Location::GPR(self.machine.get_gpr_for_call()),
2659 )?;
2660
2661 self.emit_call_native(
2662 |this| {
2663 this.machine
2664 .emit_call_register(this.machine.get_gpr_for_call())
2665 },
2666 [
2668 (Location::Imm32(mem), CanonicalizeType::None),
2669 (Location::Imm32(data_index), CanonicalizeType::None),
2670 dst,
2671 src,
2672 len,
2673 ]
2674 .iter()
2675 .cloned(),
2676 [
2677 WpType::I64,
2678 WpType::I64,
2679 WpType::I64,
2680 WpType::I64,
2681 WpType::I64,
2682 ]
2683 .iter()
2684 .cloned(),
2685 iter::empty(),
2686 NativeCallType::IncludeVMCtxArgument,
2687 )?;
2688 }
2689 Operator::DataDrop { data_index } => {
2690 self.machine.move_location(
2691 Size::S64,
2692 Location::Memory(
2693 self.machine.get_vmctx_reg(),
2694 self.vmoffsets
2695 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_data_drop_index())
2696 as i32,
2697 ),
2698 Location::GPR(self.machine.get_gpr_for_call()),
2699 )?;
2700
2701 self.emit_call_native(
2702 |this| {
2703 this.machine
2704 .emit_call_register(this.machine.get_gpr_for_call())
2705 },
2706 iter::once((Location::Imm32(data_index), CanonicalizeType::None)),
2708 iter::once(WpType::I64),
2709 iter::empty(),
2710 NativeCallType::IncludeVMCtxArgument,
2711 )?;
2712 }
2713 Operator::MemoryCopy { src_mem, .. } => {
2714 let len = self.value_stack.pop().unwrap();
2716 let src_pos = self.value_stack.pop().unwrap();
2717 let dst_pos = self.value_stack.pop().unwrap();
2718
2719 let memory_index = MemoryIndex::new(src_mem as usize);
2720 let (memory_copy_index, memory_index) =
2721 if self.module.local_memory_index(memory_index).is_some() {
2722 (
2723 VMBuiltinFunctionIndex::get_memory_copy_index(),
2724 memory_index,
2725 )
2726 } else {
2727 (
2728 VMBuiltinFunctionIndex::get_imported_memory_copy_index(),
2729 memory_index,
2730 )
2731 };
2732
2733 self.machine.move_location(
2734 Size::S64,
2735 Location::Memory(
2736 self.machine.get_vmctx_reg(),
2737 self.vmoffsets.vmctx_builtin_function(memory_copy_index) as i32,
2738 ),
2739 Location::GPR(self.machine.get_gpr_for_call()),
2740 )?;
2741
2742 self.emit_call_native(
2743 |this| {
2744 this.machine
2745 .emit_call_register(this.machine.get_gpr_for_call())
2746 },
2747 [
2749 (
2750 Location::Imm32(memory_index.index() as u32),
2751 CanonicalizeType::None,
2752 ),
2753 dst_pos,
2754 src_pos,
2755 len,
2756 ]
2757 .iter()
2758 .cloned(),
2759 [WpType::I32, WpType::I64, WpType::I64, WpType::I64]
2760 .iter()
2761 .cloned(),
2762 iter::empty(),
2763 NativeCallType::IncludeVMCtxArgument,
2764 )?;
2765 }
2766 Operator::MemoryFill { mem } => {
2767 let len = self.value_stack.pop().unwrap();
2768 let val = self.value_stack.pop().unwrap();
2769 let dst = self.value_stack.pop().unwrap();
2770
2771 let memory_index = MemoryIndex::new(mem as usize);
2772 let (memory_fill_index, memory_index) =
2773 if self.module.local_memory_index(memory_index).is_some() {
2774 (
2775 VMBuiltinFunctionIndex::get_memory_fill_index(),
2776 memory_index,
2777 )
2778 } else {
2779 (
2780 VMBuiltinFunctionIndex::get_imported_memory_fill_index(),
2781 memory_index,
2782 )
2783 };
2784
2785 self.machine.move_location(
2786 Size::S64,
2787 Location::Memory(
2788 self.machine.get_vmctx_reg(),
2789 self.vmoffsets.vmctx_builtin_function(memory_fill_index) as i32,
2790 ),
2791 Location::GPR(self.machine.get_gpr_for_call()),
2792 )?;
2793
2794 self.emit_call_native(
2795 |this| {
2796 this.machine
2797 .emit_call_register(this.machine.get_gpr_for_call())
2798 },
2799 [
2801 (
2802 Location::Imm32(memory_index.index() as u32),
2803 CanonicalizeType::None,
2804 ),
2805 dst,
2806 val,
2807 len,
2808 ]
2809 .iter()
2810 .cloned(),
2811 [WpType::I32, WpType::I64, WpType::I64, WpType::I64]
2812 .iter()
2813 .cloned(),
2814 iter::empty(),
2815 NativeCallType::IncludeVMCtxArgument,
2816 )?;
2817 }
2818 Operator::MemoryGrow { mem } => {
2819 let memory_index = MemoryIndex::new(mem as usize);
2820 let param_pages = self.value_stack.pop().unwrap();
2821
2822 self.machine.move_location(
2823 Size::S64,
2824 Location::Memory(
2825 self.machine.get_vmctx_reg(),
2826 self.vmoffsets.vmctx_builtin_function(
2827 if self.module.local_memory_index(memory_index).is_some() {
2828 VMBuiltinFunctionIndex::get_memory32_grow_index()
2829 } else {
2830 VMBuiltinFunctionIndex::get_imported_memory32_grow_index()
2831 },
2832 ) as i32,
2833 ),
2834 Location::GPR(self.machine.get_gpr_for_call()),
2835 )?;
2836
2837 self.emit_call_native(
2838 |this| {
2839 this.machine
2840 .emit_call_register(this.machine.get_gpr_for_call())
2841 },
2842 [
2844 param_pages,
2845 (
2846 Location::Imm32(memory_index.index() as u32),
2847 CanonicalizeType::None,
2848 ),
2849 ]
2850 .iter()
2851 .cloned(),
2852 [WpType::I64, WpType::I64].iter().cloned(),
2853 iter::once(WpType::I64),
2854 NativeCallType::IncludeVMCtxArgument,
2855 )?;
2856 }
2857 Operator::I32Load { ref memarg } => {
2858 let target = self.pop_value_released()?.0;
2859 let ret = self.acquire_location(&WpType::I32)?;
2860 self.value_stack.push((ret, CanonicalizeType::None));
2861 self.op_memory(
2862 |this,
2863 need_check,
2864 imported_memories,
2865 offset,
2866 heap_access_oob,
2867 unaligned_atomic| {
2868 this.machine.i32_load(
2869 target,
2870 memarg,
2871 ret,
2872 need_check,
2873 imported_memories,
2874 offset,
2875 heap_access_oob,
2876 unaligned_atomic,
2877 )
2878 },
2879 )?;
2880 }
2881 Operator::F32Load { ref memarg } => {
2882 let target = self.pop_value_released()?.0;
2883 let ret = self.acquire_location(&WpType::F32)?;
2884 self.value_stack.push((ret, CanonicalizeType::None));
2885 self.op_memory(
2886 |this,
2887 need_check,
2888 imported_memories,
2889 offset,
2890 heap_access_oob,
2891 unaligned_atomic| {
2892 this.machine.f32_load(
2893 target,
2894 memarg,
2895 ret,
2896 need_check,
2897 imported_memories,
2898 offset,
2899 heap_access_oob,
2900 unaligned_atomic,
2901 )
2902 },
2903 )?;
2904 }
2905 Operator::I32Load8U { ref memarg } => {
2906 let target = self.pop_value_released()?.0;
2907 let ret = self.acquire_location(&WpType::I32)?;
2908 self.value_stack.push((ret, CanonicalizeType::None));
2909 self.op_memory(
2910 |this,
2911 need_check,
2912 imported_memories,
2913 offset,
2914 heap_access_oob,
2915 unaligned_atomic| {
2916 this.machine.i32_load_8u(
2917 target,
2918 memarg,
2919 ret,
2920 need_check,
2921 imported_memories,
2922 offset,
2923 heap_access_oob,
2924 unaligned_atomic,
2925 )
2926 },
2927 )?;
2928 }
2929 Operator::I32Load8S { ref memarg } => {
2930 let target = self.pop_value_released()?.0;
2931 let ret = self.acquire_location(&WpType::I32)?;
2932 self.value_stack.push((ret, CanonicalizeType::None));
2933 self.op_memory(
2934 |this,
2935 need_check,
2936 imported_memories,
2937 offset,
2938 heap_access_oob,
2939 unaligned_atomic| {
2940 this.machine.i32_load_8s(
2941 target,
2942 memarg,
2943 ret,
2944 need_check,
2945 imported_memories,
2946 offset,
2947 heap_access_oob,
2948 unaligned_atomic,
2949 )
2950 },
2951 )?;
2952 }
2953 Operator::I32Load16U { ref memarg } => {
2954 let target = self.pop_value_released()?.0;
2955 let ret = self.acquire_location(&WpType::I32)?;
2956 self.value_stack.push((ret, CanonicalizeType::None));
2957 self.op_memory(
2958 |this,
2959 need_check,
2960 imported_memories,
2961 offset,
2962 heap_access_oob,
2963 unaligned_atomic| {
2964 this.machine.i32_load_16u(
2965 target,
2966 memarg,
2967 ret,
2968 need_check,
2969 imported_memories,
2970 offset,
2971 heap_access_oob,
2972 unaligned_atomic,
2973 )
2974 },
2975 )?;
2976 }
2977 Operator::I32Load16S { ref memarg } => {
2978 let target = self.pop_value_released()?.0;
2979 let ret = self.acquire_location(&WpType::I32)?;
2980 self.value_stack.push((ret, CanonicalizeType::None));
2981 self.op_memory(
2982 |this,
2983 need_check,
2984 imported_memories,
2985 offset,
2986 heap_access_oob,
2987 unaligned_atomic| {
2988 this.machine.i32_load_16s(
2989 target,
2990 memarg,
2991 ret,
2992 need_check,
2993 imported_memories,
2994 offset,
2995 heap_access_oob,
2996 unaligned_atomic,
2997 )
2998 },
2999 )?;
3000 }
3001 Operator::I32Store { ref memarg } => {
3002 let target_value = self.pop_value_released()?.0;
3003 let target_addr = self.pop_value_released()?.0;
3004 self.op_memory(
3005 |this,
3006 need_check,
3007 imported_memories,
3008 offset,
3009 heap_access_oob,
3010 unaligned_atomic| {
3011 this.machine.i32_save(
3012 target_value,
3013 memarg,
3014 target_addr,
3015 need_check,
3016 imported_memories,
3017 offset,
3018 heap_access_oob,
3019 unaligned_atomic,
3020 )
3021 },
3022 )?;
3023 }
3024 Operator::F32Store { ref memarg } => {
3025 let (target_value, canonicalize) = self.pop_value_released()?;
3026 let target_addr = self.pop_value_released()?.0;
3027 self.op_memory(
3028 |this,
3029 need_check,
3030 imported_memories,
3031 offset,
3032 heap_access_oob,
3033 unaligned_atomic| {
3034 this.machine.f32_save(
3035 target_value,
3036 memarg,
3037 target_addr,
3038 self.config.enable_nan_canonicalization
3039 && !matches!(canonicalize, CanonicalizeType::None),
3040 need_check,
3041 imported_memories,
3042 offset,
3043 heap_access_oob,
3044 unaligned_atomic,
3045 )
3046 },
3047 )?;
3048 }
3049 Operator::I32Store8 { ref memarg } => {
3050 let target_value = self.pop_value_released()?.0;
3051 let target_addr = self.pop_value_released()?.0;
3052 self.op_memory(
3053 |this,
3054 need_check,
3055 imported_memories,
3056 offset,
3057 heap_access_oob,
3058 unaligned_atomic| {
3059 this.machine.i32_save_8(
3060 target_value,
3061 memarg,
3062 target_addr,
3063 need_check,
3064 imported_memories,
3065 offset,
3066 heap_access_oob,
3067 unaligned_atomic,
3068 )
3069 },
3070 )?;
3071 }
3072 Operator::I32Store16 { ref memarg } => {
3073 let target_value = self.pop_value_released()?.0;
3074 let target_addr = self.pop_value_released()?.0;
3075 self.op_memory(
3076 |this,
3077 need_check,
3078 imported_memories,
3079 offset,
3080 heap_access_oob,
3081 unaligned_atomic| {
3082 this.machine.i32_save_16(
3083 target_value,
3084 memarg,
3085 target_addr,
3086 need_check,
3087 imported_memories,
3088 offset,
3089 heap_access_oob,
3090 unaligned_atomic,
3091 )
3092 },
3093 )?;
3094 }
3095 Operator::I64Load { ref memarg } => {
3096 let target = self.pop_value_released()?.0;
3097 let ret = self.acquire_location(&WpType::I64)?;
3098 self.value_stack.push((ret, CanonicalizeType::None));
3099 self.op_memory(
3100 |this,
3101 need_check,
3102 imported_memories,
3103 offset,
3104 heap_access_oob,
3105 unaligned_atomic| {
3106 this.machine.i64_load(
3107 target,
3108 memarg,
3109 ret,
3110 need_check,
3111 imported_memories,
3112 offset,
3113 heap_access_oob,
3114 unaligned_atomic,
3115 )
3116 },
3117 )?;
3118 }
3119 Operator::F64Load { ref memarg } => {
3120 let target = self.pop_value_released()?.0;
3121 let ret = self.acquire_location(&WpType::F64)?;
3122 self.value_stack.push((ret, CanonicalizeType::None));
3123 self.op_memory(
3124 |this,
3125 need_check,
3126 imported_memories,
3127 offset,
3128 heap_access_oob,
3129 unaligned_atomic| {
3130 this.machine.f64_load(
3131 target,
3132 memarg,
3133 ret,
3134 need_check,
3135 imported_memories,
3136 offset,
3137 heap_access_oob,
3138 unaligned_atomic,
3139 )
3140 },
3141 )?;
3142 }
3143 Operator::I64Load8U { ref memarg } => {
3144 let target = self.pop_value_released()?.0;
3145 let ret = self.acquire_location(&WpType::I64)?;
3146 self.value_stack.push((ret, CanonicalizeType::None));
3147 self.op_memory(
3148 |this,
3149 need_check,
3150 imported_memories,
3151 offset,
3152 heap_access_oob,
3153 unaligned_atomic| {
3154 this.machine.i64_load_8u(
3155 target,
3156 memarg,
3157 ret,
3158 need_check,
3159 imported_memories,
3160 offset,
3161 heap_access_oob,
3162 unaligned_atomic,
3163 )
3164 },
3165 )?;
3166 }
3167 Operator::I64Load8S { ref memarg } => {
3168 let target = self.pop_value_released()?.0;
3169 let ret = self.acquire_location(&WpType::I64)?;
3170 self.value_stack.push((ret, CanonicalizeType::None));
3171 self.op_memory(
3172 |this,
3173 need_check,
3174 imported_memories,
3175 offset,
3176 heap_access_oob,
3177 unaligned_atomic| {
3178 this.machine.i64_load_8s(
3179 target,
3180 memarg,
3181 ret,
3182 need_check,
3183 imported_memories,
3184 offset,
3185 heap_access_oob,
3186 unaligned_atomic,
3187 )
3188 },
3189 )?;
3190 }
3191 Operator::I64Load16U { ref memarg } => {
3192 let target = self.pop_value_released()?.0;
3193 let ret = self.acquire_location(&WpType::I64)?;
3194 self.value_stack.push((ret, CanonicalizeType::None));
3195 self.op_memory(
3196 |this,
3197 need_check,
3198 imported_memories,
3199 offset,
3200 heap_access_oob,
3201 unaligned_atomic| {
3202 this.machine.i64_load_16u(
3203 target,
3204 memarg,
3205 ret,
3206 need_check,
3207 imported_memories,
3208 offset,
3209 heap_access_oob,
3210 unaligned_atomic,
3211 )
3212 },
3213 )?;
3214 }
3215 Operator::I64Load16S { ref memarg } => {
3216 let target = self.pop_value_released()?.0;
3217 let ret = self.acquire_location(&WpType::I64)?;
3218 self.value_stack.push((ret, CanonicalizeType::None));
3219 self.op_memory(
3220 |this,
3221 need_check,
3222 imported_memories,
3223 offset,
3224 heap_access_oob,
3225 unaligned_atomic| {
3226 this.machine.i64_load_16s(
3227 target,
3228 memarg,
3229 ret,
3230 need_check,
3231 imported_memories,
3232 offset,
3233 heap_access_oob,
3234 unaligned_atomic,
3235 )
3236 },
3237 )?;
3238 }
3239 Operator::I64Load32U { ref memarg } => {
3240 let target = self.pop_value_released()?.0;
3241 let ret = self.acquire_location(&WpType::I64)?;
3242 self.value_stack.push((ret, CanonicalizeType::None));
3243 self.op_memory(
3244 |this,
3245 need_check,
3246 imported_memories,
3247 offset,
3248 heap_access_oob,
3249 unaligned_atomic| {
3250 this.machine.i64_load_32u(
3251 target,
3252 memarg,
3253 ret,
3254 need_check,
3255 imported_memories,
3256 offset,
3257 heap_access_oob,
3258 unaligned_atomic,
3259 )
3260 },
3261 )?;
3262 }
3263 Operator::I64Load32S { ref memarg } => {
3264 let target = self.pop_value_released()?.0;
3265 let ret = self.acquire_location(&WpType::I64)?;
3266 self.value_stack.push((ret, CanonicalizeType::None));
3267 self.op_memory(
3268 |this,
3269 need_check,
3270 imported_memories,
3271 offset,
3272 heap_access_oob,
3273 unaligned_atomic| {
3274 this.machine.i64_load_32s(
3275 target,
3276 memarg,
3277 ret,
3278 need_check,
3279 imported_memories,
3280 offset,
3281 heap_access_oob,
3282 unaligned_atomic,
3283 )
3284 },
3285 )?;
3286 }
3287 Operator::I64Store { ref memarg } => {
3288 let target_value = self.pop_value_released()?.0;
3289 let target_addr = self.pop_value_released()?.0;
3290
3291 self.op_memory(
3292 |this,
3293 need_check,
3294 imported_memories,
3295 offset,
3296 heap_access_oob,
3297 unaligned_atomic| {
3298 this.machine.i64_save(
3299 target_value,
3300 memarg,
3301 target_addr,
3302 need_check,
3303 imported_memories,
3304 offset,
3305 heap_access_oob,
3306 unaligned_atomic,
3307 )
3308 },
3309 )?;
3310 }
3311 Operator::F64Store { ref memarg } => {
3312 let (target_value, canonicalize) = self.pop_value_released()?;
3313 let target_addr = self.pop_value_released()?.0;
3314 self.op_memory(
3315 |this,
3316 need_check,
3317 imported_memories,
3318 offset,
3319 heap_access_oob,
3320 unaligned_atomic| {
3321 this.machine.f64_save(
3322 target_value,
3323 memarg,
3324 target_addr,
3325 self.config.enable_nan_canonicalization
3326 && !matches!(canonicalize, CanonicalizeType::None),
3327 need_check,
3328 imported_memories,
3329 offset,
3330 heap_access_oob,
3331 unaligned_atomic,
3332 )
3333 },
3334 )?;
3335 }
3336 Operator::I64Store8 { ref memarg } => {
3337 let target_value = self.pop_value_released()?.0;
3338 let target_addr = self.pop_value_released()?.0;
3339 self.op_memory(
3340 |this,
3341 need_check,
3342 imported_memories,
3343 offset,
3344 heap_access_oob,
3345 unaligned_atomic| {
3346 this.machine.i64_save_8(
3347 target_value,
3348 memarg,
3349 target_addr,
3350 need_check,
3351 imported_memories,
3352 offset,
3353 heap_access_oob,
3354 unaligned_atomic,
3355 )
3356 },
3357 )?;
3358 }
3359 Operator::I64Store16 { ref memarg } => {
3360 let target_value = self.pop_value_released()?.0;
3361 let target_addr = self.pop_value_released()?.0;
3362 self.op_memory(
3363 |this,
3364 need_check,
3365 imported_memories,
3366 offset,
3367 heap_access_oob,
3368 unaligned_atomic| {
3369 this.machine.i64_save_16(
3370 target_value,
3371 memarg,
3372 target_addr,
3373 need_check,
3374 imported_memories,
3375 offset,
3376 heap_access_oob,
3377 unaligned_atomic,
3378 )
3379 },
3380 )?;
3381 }
3382 Operator::I64Store32 { ref memarg } => {
3383 let target_value = self.pop_value_released()?.0;
3384 let target_addr = self.pop_value_released()?.0;
3385 self.op_memory(
3386 |this,
3387 need_check,
3388 imported_memories,
3389 offset,
3390 heap_access_oob,
3391 unaligned_atomic| {
3392 this.machine.i64_save_32(
3393 target_value,
3394 memarg,
3395 target_addr,
3396 need_check,
3397 imported_memories,
3398 offset,
3399 heap_access_oob,
3400 unaligned_atomic,
3401 )
3402 },
3403 )?;
3404 }
3405 Operator::Unreachable => {
3406 self.machine.move_location(
3407 Size::S64,
3408 Location::Memory(
3409 self.machine.get_vmctx_reg(),
3410 self.vmoffsets
3411 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_raise_trap_index())
3412 as i32,
3413 ),
3414 Location::GPR(self.machine.get_gpr_for_call()),
3415 )?;
3416
3417 self.emit_call_native(
3418 |this| {
3419 this.machine
3420 .emit_call_register(this.machine.get_gpr_for_call())
3421 },
3422 [(
3424 Location::Imm32(TrapCode::UnreachableCodeReached as u32),
3425 CanonicalizeType::None,
3426 )]
3427 .iter()
3428 .cloned(),
3429 [WpType::I32].iter().cloned(),
3430 iter::empty(),
3431 NativeCallType::Unreachable,
3432 )?;
3433 self.unreachable_depth = 1;
3434 }
3435 Operator::Return => {
3436 let frame = &self.control_stack[0];
3437 if !frame.return_types.is_empty() {
3438 self.emit_return_values(
3439 frame.value_stack_depth_after(),
3440 frame.return_types.len(),
3441 )?;
3442 }
3443 let frame = &self.control_stack[0];
3444 let frame_depth = frame.value_stack_depth_for_release();
3445 let label = frame.label;
3446 self.release_stack_locations_keep_stack_offset(frame_depth)?;
3447 self.machine.jmp_unconditional(label)?;
3448 self.unreachable_depth = 1;
3449 }
3450 Operator::Br { relative_depth } => {
3451 let frame =
3452 &self.control_stack[self.control_stack.len() - 1 - (relative_depth as usize)];
3453 if !frame.return_types.is_empty() {
3454 if matches!(frame.state, ControlState::Loop) {
3455 self.emit_loop_params_store(
3457 frame.value_stack_depth_after(),
3458 frame.param_types.len(),
3459 )?;
3460 } else {
3461 self.emit_return_values(
3462 frame.value_stack_depth_after(),
3463 frame.return_types.len(),
3464 )?;
3465 }
3466 }
3467 let stack_len = self.control_stack.len();
3468 let frame = &mut self.control_stack[stack_len - 1 - (relative_depth as usize)];
3469 let frame_depth = frame.value_stack_depth_for_release();
3470 let label = frame.label;
3471
3472 self.release_stack_locations_keep_stack_offset(frame_depth)?;
3473 self.machine.jmp_unconditional(label)?;
3474 self.unreachable_depth = 1;
3475 }
3476 Operator::BrIf { relative_depth } => {
3477 let after = self.machine.get_label();
3478 let cond = self.pop_value_released()?.0;
3479 self.machine.jmp_on_condition(
3480 UnsignedCondition::Equal,
3481 Size::S32,
3482 cond,
3483 Location::Imm32(0),
3484 after,
3485 )?;
3486
3487 let frame =
3488 &self.control_stack[self.control_stack.len() - 1 - (relative_depth as usize)];
3489 if !frame.return_types.is_empty() {
3490 if matches!(frame.state, ControlState::Loop) {
3491 self.emit_loop_params_store(
3493 frame.value_stack_depth_after(),
3494 frame.param_types.len(),
3495 )?;
3496 } else {
3497 self.emit_return_values(
3498 frame.value_stack_depth_after(),
3499 frame.return_types.len(),
3500 )?;
3501 }
3502 }
3503 let stack_len = self.control_stack.len();
3504 let frame = &mut self.control_stack[stack_len - 1 - (relative_depth as usize)];
3505 let stack_depth = frame.value_stack_depth_for_release();
3506 let label = frame.label;
3507 self.release_stack_locations_keep_stack_offset(stack_depth)?;
3508 self.machine.jmp_unconditional(label)?;
3509
3510 self.machine.emit_label(after)?;
3511 }
3512 Operator::BrTable { ref targets } => {
3513 let default_target = targets.default();
3514 let targets = targets
3515 .targets()
3516 .collect::<Result<Vec<_>, _>>()
3517 .map_err(|e| CompileError::Codegen(format!("BrTable read_table: {e:?}")))?;
3518 let cond = self.pop_value_released()?.0;
3519 let table_label = self.machine.get_label();
3520 let mut table: Vec<Label> = vec![];
3521 let default_br = self.machine.get_label();
3522 self.machine.jmp_on_condition(
3523 UnsignedCondition::AboveEqual,
3524 Size::S32,
3525 cond,
3526 Location::Imm32(targets.len() as u32),
3527 default_br,
3528 )?;
3529
3530 self.machine.emit_jmp_to_jumptable(table_label, cond)?;
3531
3532 for target in targets.iter() {
3533 let label = self.machine.get_label();
3534 self.machine.emit_label(label)?;
3535 table.push(label);
3536 let frame =
3537 &self.control_stack[self.control_stack.len() - 1 - (*target as usize)];
3538 if !frame.return_types.is_empty() {
3539 if matches!(frame.state, ControlState::Loop) {
3540 self.emit_loop_params_store(
3542 frame.value_stack_depth_after(),
3543 frame.param_types.len(),
3544 )?;
3545 } else {
3546 self.emit_return_values(
3547 frame.value_stack_depth_after(),
3548 frame.return_types.len(),
3549 )?;
3550 }
3551 }
3552 let frame =
3553 &self.control_stack[self.control_stack.len() - 1 - (*target as usize)];
3554 let stack_depth = frame.value_stack_depth_for_release();
3555 let label = frame.label;
3556 self.release_stack_locations_keep_stack_offset(stack_depth)?;
3557 self.machine.jmp_unconditional(label)?;
3558 }
3559 self.machine.emit_label(default_br)?;
3560
3561 {
3562 let frame = &self.control_stack
3563 [self.control_stack.len() - 1 - (default_target as usize)];
3564 if !frame.return_types.is_empty() {
3565 if matches!(frame.state, ControlState::Loop) {
3566 self.emit_loop_params_store(
3568 frame.value_stack_depth_after(),
3569 frame.param_types.len(),
3570 )?;
3571 } else {
3572 self.emit_return_values(
3573 frame.value_stack_depth_after(),
3574 frame.return_types.len(),
3575 )?;
3576 }
3577 }
3578 let frame = &self.control_stack
3579 [self.control_stack.len() - 1 - (default_target as usize)];
3580 let stack_depth = frame.value_stack_depth_for_release();
3581 let label = frame.label;
3582 self.release_stack_locations_keep_stack_offset(stack_depth)?;
3583 self.machine.jmp_unconditional(label)?;
3584 }
3585
3586 self.machine.emit_label(table_label)?;
3587 for x in table {
3588 self.machine.jmp_unconditional(x)?;
3589 }
3590 self.unreachable_depth = 1;
3591 }
3592 Operator::Drop => {
3593 self.pop_value_released()?;
3594 }
3595 Operator::End => {
3596 let frame = self.control_stack.pop().unwrap();
3597
3598 if !was_unreachable && !frame.return_types.is_empty() {
3599 self.emit_return_values(
3600 frame.value_stack_depth_after(),
3601 frame.return_types.len(),
3602 )?;
3603 }
3604
3605 if self.control_stack.is_empty() {
3606 self.machine.emit_label(frame.label)?;
3607 self.finalize_locals(self.calling_convention)?;
3608 self.machine.emit_function_epilog()?;
3609
3610 #[allow(clippy::collapsible_if, reason = "hard to read otherwise")]
3612 if let Ok(&return_type) = self.signature.results().iter().exactly_one()
3613 && (return_type == Type::F32 || return_type == Type::F64)
3614 {
3615 self.machine.emit_function_return_float()?;
3616 }
3617 self.machine.emit_ret()?;
3618 } else {
3619 let released = &self.value_stack.clone()[frame.value_stack_depth_after()..];
3620 self.release_locations(released)?;
3621 self.value_stack.truncate(frame.value_stack_depth_after());
3622
3623 if !matches!(frame.state, ControlState::Loop) {
3624 self.machine.emit_label(frame.label)?;
3625 }
3626
3627 if let ControlState::If { label_else, .. } = frame.state {
3628 self.machine.emit_label(label_else)?;
3629 }
3630
3631 }
3633 }
3634 Operator::AtomicFence => {
3635 self.machine.emit_memory_fence()?;
3643 }
3644 Operator::I32AtomicLoad { ref memarg } => {
3645 let target = self.pop_value_released()?.0;
3646 let ret = self.acquire_location(&WpType::I32)?;
3647 self.value_stack.push((ret, CanonicalizeType::None));
3648 self.op_memory(
3649 |this,
3650 need_check,
3651 imported_memories,
3652 offset,
3653 heap_access_oob,
3654 unaligned_atomic| {
3655 this.machine.i32_atomic_load(
3656 target,
3657 memarg,
3658 ret,
3659 need_check,
3660 imported_memories,
3661 offset,
3662 heap_access_oob,
3663 unaligned_atomic,
3664 )
3665 },
3666 )?;
3667 }
3668 Operator::I32AtomicLoad8U { ref memarg } => {
3669 let target = self.pop_value_released()?.0;
3670 let ret = self.acquire_location(&WpType::I32)?;
3671 self.value_stack.push((ret, CanonicalizeType::None));
3672 self.op_memory(
3673 |this,
3674 need_check,
3675 imported_memories,
3676 offset,
3677 heap_access_oob,
3678 unaligned_atomic| {
3679 this.machine.i32_atomic_load_8u(
3680 target,
3681 memarg,
3682 ret,
3683 need_check,
3684 imported_memories,
3685 offset,
3686 heap_access_oob,
3687 unaligned_atomic,
3688 )
3689 },
3690 )?;
3691 }
3692 Operator::I32AtomicLoad16U { ref memarg } => {
3693 let target = self.pop_value_released()?.0;
3694 let ret = self.acquire_location(&WpType::I32)?;
3695 self.value_stack.push((ret, CanonicalizeType::None));
3696 self.op_memory(
3697 |this,
3698 need_check,
3699 imported_memories,
3700 offset,
3701 heap_access_oob,
3702 unaligned_atomic| {
3703 this.machine.i32_atomic_load_16u(
3704 target,
3705 memarg,
3706 ret,
3707 need_check,
3708 imported_memories,
3709 offset,
3710 heap_access_oob,
3711 unaligned_atomic,
3712 )
3713 },
3714 )?;
3715 }
3716 Operator::I32AtomicStore { ref memarg } => {
3717 let target_value = self.pop_value_released()?.0;
3718 let target_addr = self.pop_value_released()?.0;
3719 self.op_memory(
3720 |this,
3721 need_check,
3722 imported_memories,
3723 offset,
3724 heap_access_oob,
3725 unaligned_atomic| {
3726 this.machine.i32_atomic_save(
3727 target_value,
3728 memarg,
3729 target_addr,
3730 need_check,
3731 imported_memories,
3732 offset,
3733 heap_access_oob,
3734 unaligned_atomic,
3735 )
3736 },
3737 )?;
3738 }
3739 Operator::I32AtomicStore8 { ref memarg } => {
3740 let target_value = self.pop_value_released()?.0;
3741 let target_addr = self.pop_value_released()?.0;
3742 self.op_memory(
3743 |this,
3744 need_check,
3745 imported_memories,
3746 offset,
3747 heap_access_oob,
3748 unaligned_atomic| {
3749 this.machine.i32_atomic_save_8(
3750 target_value,
3751 memarg,
3752 target_addr,
3753 need_check,
3754 imported_memories,
3755 offset,
3756 heap_access_oob,
3757 unaligned_atomic,
3758 )
3759 },
3760 )?;
3761 }
3762 Operator::I32AtomicStore16 { ref memarg } => {
3763 let target_value = self.pop_value_released()?.0;
3764 let target_addr = self.pop_value_released()?.0;
3765 self.op_memory(
3766 |this,
3767 need_check,
3768 imported_memories,
3769 offset,
3770 heap_access_oob,
3771 unaligned_atomic| {
3772 this.machine.i32_atomic_save_16(
3773 target_value,
3774 memarg,
3775 target_addr,
3776 need_check,
3777 imported_memories,
3778 offset,
3779 heap_access_oob,
3780 unaligned_atomic,
3781 )
3782 },
3783 )?;
3784 }
3785 Operator::I64AtomicLoad { ref memarg } => {
3786 let target = self.pop_value_released()?.0;
3787 let ret = self.acquire_location(&WpType::I64)?;
3788 self.value_stack.push((ret, CanonicalizeType::None));
3789 self.op_memory(
3790 |this,
3791 need_check,
3792 imported_memories,
3793 offset,
3794 heap_access_oob,
3795 unaligned_atomic| {
3796 this.machine.i64_atomic_load(
3797 target,
3798 memarg,
3799 ret,
3800 need_check,
3801 imported_memories,
3802 offset,
3803 heap_access_oob,
3804 unaligned_atomic,
3805 )
3806 },
3807 )?;
3808 }
3809 Operator::I64AtomicLoad8U { ref memarg } => {
3810 let target = self.pop_value_released()?.0;
3811 let ret = self.acquire_location(&WpType::I64)?;
3812 self.value_stack.push((ret, CanonicalizeType::None));
3813 self.op_memory(
3814 |this,
3815 need_check,
3816 imported_memories,
3817 offset,
3818 heap_access_oob,
3819 unaligned_atomic| {
3820 this.machine.i64_atomic_load_8u(
3821 target,
3822 memarg,
3823 ret,
3824 need_check,
3825 imported_memories,
3826 offset,
3827 heap_access_oob,
3828 unaligned_atomic,
3829 )
3830 },
3831 )?;
3832 }
3833 Operator::I64AtomicLoad16U { ref memarg } => {
3834 let target = self.pop_value_released()?.0;
3835 let ret = self.acquire_location(&WpType::I64)?;
3836 self.value_stack.push((ret, CanonicalizeType::None));
3837 self.op_memory(
3838 |this,
3839 need_check,
3840 imported_memories,
3841 offset,
3842 heap_access_oob,
3843 unaligned_atomic| {
3844 this.machine.i64_atomic_load_16u(
3845 target,
3846 memarg,
3847 ret,
3848 need_check,
3849 imported_memories,
3850 offset,
3851 heap_access_oob,
3852 unaligned_atomic,
3853 )
3854 },
3855 )?;
3856 }
3857 Operator::I64AtomicLoad32U { ref memarg } => {
3858 let target = self.pop_value_released()?.0;
3859 let ret = self.acquire_location(&WpType::I64)?;
3860 self.value_stack.push((ret, CanonicalizeType::None));
3861 self.op_memory(
3862 |this,
3863 need_check,
3864 imported_memories,
3865 offset,
3866 heap_access_oob,
3867 unaligned_atomic| {
3868 this.machine.i64_atomic_load_32u(
3869 target,
3870 memarg,
3871 ret,
3872 need_check,
3873 imported_memories,
3874 offset,
3875 heap_access_oob,
3876 unaligned_atomic,
3877 )
3878 },
3879 )?;
3880 }
3881 Operator::I64AtomicStore { ref memarg } => {
3882 let target_value = self.pop_value_released()?.0;
3883 let target_addr = self.pop_value_released()?.0;
3884 self.op_memory(
3885 |this,
3886 need_check,
3887 imported_memories,
3888 offset,
3889 heap_access_oob,
3890 unaligned_atomic| {
3891 this.machine.i64_atomic_save(
3892 target_value,
3893 memarg,
3894 target_addr,
3895 need_check,
3896 imported_memories,
3897 offset,
3898 heap_access_oob,
3899 unaligned_atomic,
3900 )
3901 },
3902 )?;
3903 }
3904 Operator::I64AtomicStore8 { ref memarg } => {
3905 let target_value = self.pop_value_released()?.0;
3906 let target_addr = self.pop_value_released()?.0;
3907 self.op_memory(
3908 |this,
3909 need_check,
3910 imported_memories,
3911 offset,
3912 heap_access_oob,
3913 unaligned_atomic| {
3914 this.machine.i64_atomic_save_8(
3915 target_value,
3916 memarg,
3917 target_addr,
3918 need_check,
3919 imported_memories,
3920 offset,
3921 heap_access_oob,
3922 unaligned_atomic,
3923 )
3924 },
3925 )?;
3926 }
3927 Operator::I64AtomicStore16 { ref memarg } => {
3928 let target_value = self.pop_value_released()?.0;
3929 let target_addr = self.pop_value_released()?.0;
3930 self.op_memory(
3931 |this,
3932 need_check,
3933 imported_memories,
3934 offset,
3935 heap_access_oob,
3936 unaligned_atomic| {
3937 this.machine.i64_atomic_save_16(
3938 target_value,
3939 memarg,
3940 target_addr,
3941 need_check,
3942 imported_memories,
3943 offset,
3944 heap_access_oob,
3945 unaligned_atomic,
3946 )
3947 },
3948 )?;
3949 }
3950 Operator::I64AtomicStore32 { ref memarg } => {
3951 let target_value = self.pop_value_released()?.0;
3952 let target_addr = self.pop_value_released()?.0;
3953 self.op_memory(
3954 |this,
3955 need_check,
3956 imported_memories,
3957 offset,
3958 heap_access_oob,
3959 unaligned_atomic| {
3960 this.machine.i64_atomic_save_32(
3961 target_value,
3962 memarg,
3963 target_addr,
3964 need_check,
3965 imported_memories,
3966 offset,
3967 heap_access_oob,
3968 unaligned_atomic,
3969 )
3970 },
3971 )?;
3972 }
3973 Operator::I32AtomicRmwAdd { ref memarg } => {
3974 let loc = self.pop_value_released()?.0;
3975 let target = self.pop_value_released()?.0;
3976 let ret = self.acquire_location(&WpType::I32)?;
3977 self.value_stack.push((ret, CanonicalizeType::None));
3978 self.op_memory(
3979 |this,
3980 need_check,
3981 imported_memories,
3982 offset,
3983 heap_access_oob,
3984 unaligned_atomic| {
3985 this.machine.i32_atomic_add(
3986 loc,
3987 target,
3988 memarg,
3989 ret,
3990 need_check,
3991 imported_memories,
3992 offset,
3993 heap_access_oob,
3994 unaligned_atomic,
3995 )
3996 },
3997 )?;
3998 }
3999 Operator::I64AtomicRmwAdd { ref memarg } => {
4000 let loc = self.pop_value_released()?.0;
4001 let target = self.pop_value_released()?.0;
4002 let ret = self.acquire_location(&WpType::I64)?;
4003 self.value_stack.push((ret, CanonicalizeType::None));
4004 self.op_memory(
4005 |this,
4006 need_check,
4007 imported_memories,
4008 offset,
4009 heap_access_oob,
4010 unaligned_atomic| {
4011 this.machine.i64_atomic_add(
4012 loc,
4013 target,
4014 memarg,
4015 ret,
4016 need_check,
4017 imported_memories,
4018 offset,
4019 heap_access_oob,
4020 unaligned_atomic,
4021 )
4022 },
4023 )?;
4024 }
4025 Operator::I32AtomicRmw8AddU { ref memarg } => {
4026 let loc = self.pop_value_released()?.0;
4027 let target = self.pop_value_released()?.0;
4028 let ret = self.acquire_location(&WpType::I32)?;
4029 self.value_stack.push((ret, CanonicalizeType::None));
4030 self.op_memory(
4031 |this,
4032 need_check,
4033 imported_memories,
4034 offset,
4035 heap_access_oob,
4036 unaligned_atomic| {
4037 this.machine.i32_atomic_add_8u(
4038 loc,
4039 target,
4040 memarg,
4041 ret,
4042 need_check,
4043 imported_memories,
4044 offset,
4045 heap_access_oob,
4046 unaligned_atomic,
4047 )
4048 },
4049 )?;
4050 }
4051 Operator::I32AtomicRmw16AddU { ref memarg } => {
4052 let loc = self.pop_value_released()?.0;
4053 let target = self.pop_value_released()?.0;
4054 let ret = self.acquire_location(&WpType::I32)?;
4055 self.value_stack.push((ret, CanonicalizeType::None));
4056 self.op_memory(
4057 |this,
4058 need_check,
4059 imported_memories,
4060 offset,
4061 heap_access_oob,
4062 unaligned_atomic| {
4063 this.machine.i32_atomic_add_16u(
4064 loc,
4065 target,
4066 memarg,
4067 ret,
4068 need_check,
4069 imported_memories,
4070 offset,
4071 heap_access_oob,
4072 unaligned_atomic,
4073 )
4074 },
4075 )?;
4076 }
4077 Operator::I64AtomicRmw8AddU { ref memarg } => {
4078 let loc = self.pop_value_released()?.0;
4079 let target = self.pop_value_released()?.0;
4080 let ret = self.acquire_location(&WpType::I64)?;
4081 self.value_stack.push((ret, CanonicalizeType::None));
4082 self.op_memory(
4083 |this,
4084 need_check,
4085 imported_memories,
4086 offset,
4087 heap_access_oob,
4088 unaligned_atomic| {
4089 this.machine.i64_atomic_add_8u(
4090 loc,
4091 target,
4092 memarg,
4093 ret,
4094 need_check,
4095 imported_memories,
4096 offset,
4097 heap_access_oob,
4098 unaligned_atomic,
4099 )
4100 },
4101 )?;
4102 }
4103 Operator::I64AtomicRmw16AddU { ref memarg } => {
4104 let loc = self.pop_value_released()?.0;
4105 let target = self.pop_value_released()?.0;
4106 let ret = self.acquire_location(&WpType::I64)?;
4107 self.value_stack.push((ret, CanonicalizeType::None));
4108 self.op_memory(
4109 |this,
4110 need_check,
4111 imported_memories,
4112 offset,
4113 heap_access_oob,
4114 unaligned_atomic| {
4115 this.machine.i64_atomic_add_16u(
4116 loc,
4117 target,
4118 memarg,
4119 ret,
4120 need_check,
4121 imported_memories,
4122 offset,
4123 heap_access_oob,
4124 unaligned_atomic,
4125 )
4126 },
4127 )?;
4128 }
4129 Operator::I64AtomicRmw32AddU { ref memarg } => {
4130 let loc = self.pop_value_released()?.0;
4131 let target = self.pop_value_released()?.0;
4132 let ret = self.acquire_location(&WpType::I64)?;
4133 self.value_stack.push((ret, CanonicalizeType::None));
4134 self.op_memory(
4135 |this,
4136 need_check,
4137 imported_memories,
4138 offset,
4139 heap_access_oob,
4140 unaligned_atomic| {
4141 this.machine.i64_atomic_add_32u(
4142 loc,
4143 target,
4144 memarg,
4145 ret,
4146 need_check,
4147 imported_memories,
4148 offset,
4149 heap_access_oob,
4150 unaligned_atomic,
4151 )
4152 },
4153 )?;
4154 }
4155 Operator::I32AtomicRmwSub { ref memarg } => {
4156 let loc = self.pop_value_released()?.0;
4157 let target = self.pop_value_released()?.0;
4158 let ret = self.acquire_location(&WpType::I32)?;
4159 self.value_stack.push((ret, CanonicalizeType::None));
4160 self.op_memory(
4161 |this,
4162 need_check,
4163 imported_memories,
4164 offset,
4165 heap_access_oob,
4166 unaligned_atomic| {
4167 this.machine.i32_atomic_sub(
4168 loc,
4169 target,
4170 memarg,
4171 ret,
4172 need_check,
4173 imported_memories,
4174 offset,
4175 heap_access_oob,
4176 unaligned_atomic,
4177 )
4178 },
4179 )?;
4180 }
4181 Operator::I64AtomicRmwSub { ref memarg } => {
4182 let loc = self.pop_value_released()?.0;
4183 let target = self.pop_value_released()?.0;
4184 let ret = self.acquire_location(&WpType::I64)?;
4185 self.value_stack.push((ret, CanonicalizeType::None));
4186 self.op_memory(
4187 |this,
4188 need_check,
4189 imported_memories,
4190 offset,
4191 heap_access_oob,
4192 unaligned_atomic| {
4193 this.machine.i64_atomic_sub(
4194 loc,
4195 target,
4196 memarg,
4197 ret,
4198 need_check,
4199 imported_memories,
4200 offset,
4201 heap_access_oob,
4202 unaligned_atomic,
4203 )
4204 },
4205 )?;
4206 }
4207 Operator::I32AtomicRmw8SubU { ref memarg } => {
4208 let loc = self.pop_value_released()?.0;
4209 let target = self.pop_value_released()?.0;
4210 let ret = self.acquire_location(&WpType::I32)?;
4211 self.value_stack.push((ret, CanonicalizeType::None));
4212 self.op_memory(
4213 |this,
4214 need_check,
4215 imported_memories,
4216 offset,
4217 heap_access_oob,
4218 unaligned_atomic| {
4219 this.machine.i32_atomic_sub_8u(
4220 loc,
4221 target,
4222 memarg,
4223 ret,
4224 need_check,
4225 imported_memories,
4226 offset,
4227 heap_access_oob,
4228 unaligned_atomic,
4229 )
4230 },
4231 )?;
4232 }
4233 Operator::I32AtomicRmw16SubU { ref memarg } => {
4234 let loc = self.pop_value_released()?.0;
4235 let target = self.pop_value_released()?.0;
4236 let ret = self.acquire_location(&WpType::I32)?;
4237 self.value_stack.push((ret, CanonicalizeType::None));
4238 self.op_memory(
4239 |this,
4240 need_check,
4241 imported_memories,
4242 offset,
4243 heap_access_oob,
4244 unaligned_atomic| {
4245 this.machine.i32_atomic_sub_16u(
4246 loc,
4247 target,
4248 memarg,
4249 ret,
4250 need_check,
4251 imported_memories,
4252 offset,
4253 heap_access_oob,
4254 unaligned_atomic,
4255 )
4256 },
4257 )?;
4258 }
4259 Operator::I64AtomicRmw8SubU { ref memarg } => {
4260 let loc = self.pop_value_released()?.0;
4261 let target = self.pop_value_released()?.0;
4262 let ret = self.acquire_location(&WpType::I64)?;
4263 self.value_stack.push((ret, CanonicalizeType::None));
4264 self.op_memory(
4265 |this,
4266 need_check,
4267 imported_memories,
4268 offset,
4269 heap_access_oob,
4270 unaligned_atomic| {
4271 this.machine.i64_atomic_sub_8u(
4272 loc,
4273 target,
4274 memarg,
4275 ret,
4276 need_check,
4277 imported_memories,
4278 offset,
4279 heap_access_oob,
4280 unaligned_atomic,
4281 )
4282 },
4283 )?;
4284 }
4285 Operator::I64AtomicRmw16SubU { ref memarg } => {
4286 let loc = self.pop_value_released()?.0;
4287 let target = self.pop_value_released()?.0;
4288 let ret = self.acquire_location(&WpType::I64)?;
4289 self.value_stack.push((ret, CanonicalizeType::None));
4290 self.op_memory(
4291 |this,
4292 need_check,
4293 imported_memories,
4294 offset,
4295 heap_access_oob,
4296 unaligned_atomic| {
4297 this.machine.i64_atomic_sub_16u(
4298 loc,
4299 target,
4300 memarg,
4301 ret,
4302 need_check,
4303 imported_memories,
4304 offset,
4305 heap_access_oob,
4306 unaligned_atomic,
4307 )
4308 },
4309 )?;
4310 }
4311 Operator::I64AtomicRmw32SubU { ref memarg } => {
4312 let loc = self.pop_value_released()?.0;
4313 let target = self.pop_value_released()?.0;
4314 let ret = self.acquire_location(&WpType::I64)?;
4315 self.value_stack.push((ret, CanonicalizeType::None));
4316 self.op_memory(
4317 |this,
4318 need_check,
4319 imported_memories,
4320 offset,
4321 heap_access_oob,
4322 unaligned_atomic| {
4323 this.machine.i64_atomic_sub_32u(
4324 loc,
4325 target,
4326 memarg,
4327 ret,
4328 need_check,
4329 imported_memories,
4330 offset,
4331 heap_access_oob,
4332 unaligned_atomic,
4333 )
4334 },
4335 )?;
4336 }
4337 Operator::I32AtomicRmwAnd { ref memarg } => {
4338 let loc = self.pop_value_released()?.0;
4339 let target = self.pop_value_released()?.0;
4340 let ret = self.acquire_location(&WpType::I32)?;
4341 self.value_stack.push((ret, CanonicalizeType::None));
4342 self.op_memory(
4343 |this,
4344 need_check,
4345 imported_memories,
4346 offset,
4347 heap_access_oob,
4348 unaligned_atomic| {
4349 this.machine.i32_atomic_and(
4350 loc,
4351 target,
4352 memarg,
4353 ret,
4354 need_check,
4355 imported_memories,
4356 offset,
4357 heap_access_oob,
4358 unaligned_atomic,
4359 )
4360 },
4361 )?;
4362 }
4363 Operator::I64AtomicRmwAnd { ref memarg } => {
4364 let loc = self.pop_value_released()?.0;
4365 let target = self.pop_value_released()?.0;
4366 let ret = self.acquire_location(&WpType::I64)?;
4367 self.value_stack.push((ret, CanonicalizeType::None));
4368 self.op_memory(
4369 |this,
4370 need_check,
4371 imported_memories,
4372 offset,
4373 heap_access_oob,
4374 unaligned_atomic| {
4375 this.machine.i64_atomic_and(
4376 loc,
4377 target,
4378 memarg,
4379 ret,
4380 need_check,
4381 imported_memories,
4382 offset,
4383 heap_access_oob,
4384 unaligned_atomic,
4385 )
4386 },
4387 )?;
4388 }
4389 Operator::I32AtomicRmw8AndU { ref memarg } => {
4390 let loc = self.pop_value_released()?.0;
4391 let target = self.pop_value_released()?.0;
4392 let ret = self.acquire_location(&WpType::I32)?;
4393 self.value_stack.push((ret, CanonicalizeType::None));
4394 self.op_memory(
4395 |this,
4396 need_check,
4397 imported_memories,
4398 offset,
4399 heap_access_oob,
4400 unaligned_atomic| {
4401 this.machine.i32_atomic_and_8u(
4402 loc,
4403 target,
4404 memarg,
4405 ret,
4406 need_check,
4407 imported_memories,
4408 offset,
4409 heap_access_oob,
4410 unaligned_atomic,
4411 )
4412 },
4413 )?;
4414 }
4415 Operator::I32AtomicRmw16AndU { ref memarg } => {
4416 let loc = self.pop_value_released()?.0;
4417 let target = self.pop_value_released()?.0;
4418 let ret = self.acquire_location(&WpType::I32)?;
4419 self.value_stack.push((ret, CanonicalizeType::None));
4420 self.op_memory(
4421 |this,
4422 need_check,
4423 imported_memories,
4424 offset,
4425 heap_access_oob,
4426 unaligned_atomic| {
4427 this.machine.i32_atomic_and_16u(
4428 loc,
4429 target,
4430 memarg,
4431 ret,
4432 need_check,
4433 imported_memories,
4434 offset,
4435 heap_access_oob,
4436 unaligned_atomic,
4437 )
4438 },
4439 )?;
4440 }
4441 Operator::I64AtomicRmw8AndU { ref memarg } => {
4442 let loc = self.pop_value_released()?.0;
4443 let target = self.pop_value_released()?.0;
4444 let ret = self.acquire_location(&WpType::I64)?;
4445 self.value_stack.push((ret, CanonicalizeType::None));
4446 self.op_memory(
4447 |this,
4448 need_check,
4449 imported_memories,
4450 offset,
4451 heap_access_oob,
4452 unaligned_atomic| {
4453 this.machine.i64_atomic_and_8u(
4454 loc,
4455 target,
4456 memarg,
4457 ret,
4458 need_check,
4459 imported_memories,
4460 offset,
4461 heap_access_oob,
4462 unaligned_atomic,
4463 )
4464 },
4465 )?;
4466 }
4467 Operator::I64AtomicRmw16AndU { ref memarg } => {
4468 let loc = self.pop_value_released()?.0;
4469 let target = self.pop_value_released()?.0;
4470 let ret = self.acquire_location(&WpType::I64)?;
4471 self.value_stack.push((ret, CanonicalizeType::None));
4472 self.op_memory(
4473 |this,
4474 need_check,
4475 imported_memories,
4476 offset,
4477 heap_access_oob,
4478 unaligned_atomic| {
4479 this.machine.i64_atomic_and_16u(
4480 loc,
4481 target,
4482 memarg,
4483 ret,
4484 need_check,
4485 imported_memories,
4486 offset,
4487 heap_access_oob,
4488 unaligned_atomic,
4489 )
4490 },
4491 )?;
4492 }
4493 Operator::I64AtomicRmw32AndU { ref memarg } => {
4494 let loc = self.pop_value_released()?.0;
4495 let target = self.pop_value_released()?.0;
4496 let ret = self.acquire_location(&WpType::I64)?;
4497 self.value_stack.push((ret, CanonicalizeType::None));
4498 self.op_memory(
4499 |this,
4500 need_check,
4501 imported_memories,
4502 offset,
4503 heap_access_oob,
4504 unaligned_atomic| {
4505 this.machine.i64_atomic_and_32u(
4506 loc,
4507 target,
4508 memarg,
4509 ret,
4510 need_check,
4511 imported_memories,
4512 offset,
4513 heap_access_oob,
4514 unaligned_atomic,
4515 )
4516 },
4517 )?;
4518 }
4519 Operator::I32AtomicRmwOr { ref memarg } => {
4520 let loc = self.pop_value_released()?.0;
4521 let target = self.pop_value_released()?.0;
4522 let ret = self.acquire_location(&WpType::I32)?;
4523 self.value_stack.push((ret, CanonicalizeType::None));
4524 self.op_memory(
4525 |this,
4526 need_check,
4527 imported_memories,
4528 offset,
4529 heap_access_oob,
4530 unaligned_atomic| {
4531 this.machine.i32_atomic_or(
4532 loc,
4533 target,
4534 memarg,
4535 ret,
4536 need_check,
4537 imported_memories,
4538 offset,
4539 heap_access_oob,
4540 unaligned_atomic,
4541 )
4542 },
4543 )?;
4544 }
4545 Operator::I64AtomicRmwOr { ref memarg } => {
4546 let loc = self.pop_value_released()?.0;
4547 let target = self.pop_value_released()?.0;
4548 let ret = self.acquire_location(&WpType::I64)?;
4549 self.value_stack.push((ret, CanonicalizeType::None));
4550 self.op_memory(
4551 |this,
4552 need_check,
4553 imported_memories,
4554 offset,
4555 heap_access_oob,
4556 unaligned_atomic| {
4557 this.machine.i64_atomic_or(
4558 loc,
4559 target,
4560 memarg,
4561 ret,
4562 need_check,
4563 imported_memories,
4564 offset,
4565 heap_access_oob,
4566 unaligned_atomic,
4567 )
4568 },
4569 )?;
4570 }
4571 Operator::I32AtomicRmw8OrU { ref memarg } => {
4572 let loc = self.pop_value_released()?.0;
4573 let target = self.pop_value_released()?.0;
4574 let ret = self.acquire_location(&WpType::I32)?;
4575 self.value_stack.push((ret, CanonicalizeType::None));
4576 self.op_memory(
4577 |this,
4578 need_check,
4579 imported_memories,
4580 offset,
4581 heap_access_oob,
4582 unaligned_atomic| {
4583 this.machine.i32_atomic_or_8u(
4584 loc,
4585 target,
4586 memarg,
4587 ret,
4588 need_check,
4589 imported_memories,
4590 offset,
4591 heap_access_oob,
4592 unaligned_atomic,
4593 )
4594 },
4595 )?;
4596 }
4597 Operator::I32AtomicRmw16OrU { ref memarg } => {
4598 let loc = self.pop_value_released()?.0;
4599 let target = self.pop_value_released()?.0;
4600 let ret = self.acquire_location(&WpType::I32)?;
4601 self.value_stack.push((ret, CanonicalizeType::None));
4602 self.op_memory(
4603 |this,
4604 need_check,
4605 imported_memories,
4606 offset,
4607 heap_access_oob,
4608 unaligned_atomic| {
4609 this.machine.i32_atomic_or_16u(
4610 loc,
4611 target,
4612 memarg,
4613 ret,
4614 need_check,
4615 imported_memories,
4616 offset,
4617 heap_access_oob,
4618 unaligned_atomic,
4619 )
4620 },
4621 )?;
4622 }
4623 Operator::I64AtomicRmw8OrU { ref memarg } => {
4624 let loc = self.pop_value_released()?.0;
4625 let target = self.pop_value_released()?.0;
4626 let ret = self.acquire_location(&WpType::I64)?;
4627 self.value_stack.push((ret, CanonicalizeType::None));
4628 self.op_memory(
4629 |this,
4630 need_check,
4631 imported_memories,
4632 offset,
4633 heap_access_oob,
4634 unaligned_atomic| {
4635 this.machine.i64_atomic_or_8u(
4636 loc,
4637 target,
4638 memarg,
4639 ret,
4640 need_check,
4641 imported_memories,
4642 offset,
4643 heap_access_oob,
4644 unaligned_atomic,
4645 )
4646 },
4647 )?;
4648 }
4649 Operator::I64AtomicRmw16OrU { ref memarg } => {
4650 let loc = self.pop_value_released()?.0;
4651 let target = self.pop_value_released()?.0;
4652 let ret = self.acquire_location(&WpType::I64)?;
4653 self.value_stack.push((ret, CanonicalizeType::None));
4654 self.op_memory(
4655 |this,
4656 need_check,
4657 imported_memories,
4658 offset,
4659 heap_access_oob,
4660 unaligned_atomic| {
4661 this.machine.i64_atomic_or_16u(
4662 loc,
4663 target,
4664 memarg,
4665 ret,
4666 need_check,
4667 imported_memories,
4668 offset,
4669 heap_access_oob,
4670 unaligned_atomic,
4671 )
4672 },
4673 )?;
4674 }
4675 Operator::I64AtomicRmw32OrU { ref memarg } => {
4676 let loc = self.pop_value_released()?.0;
4677 let target = self.pop_value_released()?.0;
4678 let ret = self.acquire_location(&WpType::I64)?;
4679 self.value_stack.push((ret, CanonicalizeType::None));
4680 self.op_memory(
4681 |this,
4682 need_check,
4683 imported_memories,
4684 offset,
4685 heap_access_oob,
4686 unaligned_atomic| {
4687 this.machine.i64_atomic_or_32u(
4688 loc,
4689 target,
4690 memarg,
4691 ret,
4692 need_check,
4693 imported_memories,
4694 offset,
4695 heap_access_oob,
4696 unaligned_atomic,
4697 )
4698 },
4699 )?;
4700 }
4701 Operator::I32AtomicRmwXor { ref memarg } => {
4702 let loc = self.pop_value_released()?.0;
4703 let target = self.pop_value_released()?.0;
4704 let ret = self.acquire_location(&WpType::I32)?;
4705 self.value_stack.push((ret, CanonicalizeType::None));
4706 self.op_memory(
4707 |this,
4708 need_check,
4709 imported_memories,
4710 offset,
4711 heap_access_oob,
4712 unaligned_atomic| {
4713 this.machine.i32_atomic_xor(
4714 loc,
4715 target,
4716 memarg,
4717 ret,
4718 need_check,
4719 imported_memories,
4720 offset,
4721 heap_access_oob,
4722 unaligned_atomic,
4723 )
4724 },
4725 )?;
4726 }
4727 Operator::I64AtomicRmwXor { ref memarg } => {
4728 let loc = self.pop_value_released()?.0;
4729 let target = self.pop_value_released()?.0;
4730 let ret = self.acquire_location(&WpType::I64)?;
4731 self.value_stack.push((ret, CanonicalizeType::None));
4732 self.op_memory(
4733 |this,
4734 need_check,
4735 imported_memories,
4736 offset,
4737 heap_access_oob,
4738 unaligned_atomic| {
4739 this.machine.i64_atomic_xor(
4740 loc,
4741 target,
4742 memarg,
4743 ret,
4744 need_check,
4745 imported_memories,
4746 offset,
4747 heap_access_oob,
4748 unaligned_atomic,
4749 )
4750 },
4751 )?;
4752 }
4753 Operator::I32AtomicRmw8XorU { ref memarg } => {
4754 let loc = self.pop_value_released()?.0;
4755 let target = self.pop_value_released()?.0;
4756 let ret = self.acquire_location(&WpType::I32)?;
4757 self.value_stack.push((ret, CanonicalizeType::None));
4758 self.op_memory(
4759 |this,
4760 need_check,
4761 imported_memories,
4762 offset,
4763 heap_access_oob,
4764 unaligned_atomic| {
4765 this.machine.i32_atomic_xor_8u(
4766 loc,
4767 target,
4768 memarg,
4769 ret,
4770 need_check,
4771 imported_memories,
4772 offset,
4773 heap_access_oob,
4774 unaligned_atomic,
4775 )
4776 },
4777 )?;
4778 }
4779 Operator::I32AtomicRmw16XorU { ref memarg } => {
4780 let loc = self.pop_value_released()?.0;
4781 let target = self.pop_value_released()?.0;
4782 let ret = self.acquire_location(&WpType::I32)?;
4783 self.value_stack.push((ret, CanonicalizeType::None));
4784 self.op_memory(
4785 |this,
4786 need_check,
4787 imported_memories,
4788 offset,
4789 heap_access_oob,
4790 unaligned_atomic| {
4791 this.machine.i32_atomic_xor_16u(
4792 loc,
4793 target,
4794 memarg,
4795 ret,
4796 need_check,
4797 imported_memories,
4798 offset,
4799 heap_access_oob,
4800 unaligned_atomic,
4801 )
4802 },
4803 )?;
4804 }
4805 Operator::I64AtomicRmw8XorU { ref memarg } => {
4806 let loc = self.pop_value_released()?.0;
4807 let target = self.pop_value_released()?.0;
4808 let ret = self.acquire_location(&WpType::I64)?;
4809 self.value_stack.push((ret, CanonicalizeType::None));
4810 self.op_memory(
4811 |this,
4812 need_check,
4813 imported_memories,
4814 offset,
4815 heap_access_oob,
4816 unaligned_atomic| {
4817 this.machine.i64_atomic_xor_8u(
4818 loc,
4819 target,
4820 memarg,
4821 ret,
4822 need_check,
4823 imported_memories,
4824 offset,
4825 heap_access_oob,
4826 unaligned_atomic,
4827 )
4828 },
4829 )?;
4830 }
4831 Operator::I64AtomicRmw16XorU { ref memarg } => {
4832 let loc = self.pop_value_released()?.0;
4833 let target = self.pop_value_released()?.0;
4834 let ret = self.acquire_location(&WpType::I64)?;
4835 self.value_stack.push((ret, CanonicalizeType::None));
4836 self.op_memory(
4837 |this,
4838 need_check,
4839 imported_memories,
4840 offset,
4841 heap_access_oob,
4842 unaligned_atomic| {
4843 this.machine.i64_atomic_xor_16u(
4844 loc,
4845 target,
4846 memarg,
4847 ret,
4848 need_check,
4849 imported_memories,
4850 offset,
4851 heap_access_oob,
4852 unaligned_atomic,
4853 )
4854 },
4855 )?;
4856 }
4857 Operator::I64AtomicRmw32XorU { ref memarg } => {
4858 let loc = self.pop_value_released()?.0;
4859 let target = self.pop_value_released()?.0;
4860 let ret = self.acquire_location(&WpType::I64)?;
4861 self.value_stack.push((ret, CanonicalizeType::None));
4862 self.op_memory(
4863 |this,
4864 need_check,
4865 imported_memories,
4866 offset,
4867 heap_access_oob,
4868 unaligned_atomic| {
4869 this.machine.i64_atomic_xor_32u(
4870 loc,
4871 target,
4872 memarg,
4873 ret,
4874 need_check,
4875 imported_memories,
4876 offset,
4877 heap_access_oob,
4878 unaligned_atomic,
4879 )
4880 },
4881 )?;
4882 }
4883 Operator::I32AtomicRmwXchg { ref memarg } => {
4884 let loc = self.pop_value_released()?.0;
4885 let target = self.pop_value_released()?.0;
4886 let ret = self.acquire_location(&WpType::I32)?;
4887 self.value_stack.push((ret, CanonicalizeType::None));
4888 self.op_memory(
4889 |this,
4890 need_check,
4891 imported_memories,
4892 offset,
4893 heap_access_oob,
4894 unaligned_atomic| {
4895 this.machine.i32_atomic_xchg(
4896 loc,
4897 target,
4898 memarg,
4899 ret,
4900 need_check,
4901 imported_memories,
4902 offset,
4903 heap_access_oob,
4904 unaligned_atomic,
4905 )
4906 },
4907 )?;
4908 }
4909 Operator::I64AtomicRmwXchg { ref memarg } => {
4910 let loc = self.pop_value_released()?.0;
4911 let target = self.pop_value_released()?.0;
4912 let ret = self.acquire_location(&WpType::I64)?;
4913 self.value_stack.push((ret, CanonicalizeType::None));
4914 self.op_memory(
4915 |this,
4916 need_check,
4917 imported_memories,
4918 offset,
4919 heap_access_oob,
4920 unaligned_atomic| {
4921 this.machine.i64_atomic_xchg(
4922 loc,
4923 target,
4924 memarg,
4925 ret,
4926 need_check,
4927 imported_memories,
4928 offset,
4929 heap_access_oob,
4930 unaligned_atomic,
4931 )
4932 },
4933 )?;
4934 }
4935 Operator::I32AtomicRmw8XchgU { ref memarg } => {
4936 let loc = self.pop_value_released()?.0;
4937 let target = self.pop_value_released()?.0;
4938 let ret = self.acquire_location(&WpType::I32)?;
4939 self.value_stack.push((ret, CanonicalizeType::None));
4940 self.op_memory(
4941 |this,
4942 need_check,
4943 imported_memories,
4944 offset,
4945 heap_access_oob,
4946 unaligned_atomic| {
4947 this.machine.i32_atomic_xchg_8u(
4948 loc,
4949 target,
4950 memarg,
4951 ret,
4952 need_check,
4953 imported_memories,
4954 offset,
4955 heap_access_oob,
4956 unaligned_atomic,
4957 )
4958 },
4959 )?;
4960 }
4961 Operator::I32AtomicRmw16XchgU { ref memarg } => {
4962 let loc = self.pop_value_released()?.0;
4963 let target = self.pop_value_released()?.0;
4964 let ret = self.acquire_location(&WpType::I32)?;
4965 self.value_stack.push((ret, CanonicalizeType::None));
4966 self.op_memory(
4967 |this,
4968 need_check,
4969 imported_memories,
4970 offset,
4971 heap_access_oob,
4972 unaligned_atomic| {
4973 this.machine.i32_atomic_xchg_16u(
4974 loc,
4975 target,
4976 memarg,
4977 ret,
4978 need_check,
4979 imported_memories,
4980 offset,
4981 heap_access_oob,
4982 unaligned_atomic,
4983 )
4984 },
4985 )?;
4986 }
4987 Operator::I64AtomicRmw8XchgU { ref memarg } => {
4988 let loc = self.pop_value_released()?.0;
4989 let target = self.pop_value_released()?.0;
4990 let ret = self.acquire_location(&WpType::I64)?;
4991 self.value_stack.push((ret, CanonicalizeType::None));
4992 self.op_memory(
4993 |this,
4994 need_check,
4995 imported_memories,
4996 offset,
4997 heap_access_oob,
4998 unaligned_atomic| {
4999 this.machine.i64_atomic_xchg_8u(
5000 loc,
5001 target,
5002 memarg,
5003 ret,
5004 need_check,
5005 imported_memories,
5006 offset,
5007 heap_access_oob,
5008 unaligned_atomic,
5009 )
5010 },
5011 )?;
5012 }
5013 Operator::I64AtomicRmw16XchgU { ref memarg } => {
5014 let loc = self.pop_value_released()?.0;
5015 let target = self.pop_value_released()?.0;
5016 let ret = self.acquire_location(&WpType::I64)?;
5017 self.value_stack.push((ret, CanonicalizeType::None));
5018 self.op_memory(
5019 |this,
5020 need_check,
5021 imported_memories,
5022 offset,
5023 heap_access_oob,
5024 unaligned_atomic| {
5025 this.machine.i64_atomic_xchg_16u(
5026 loc,
5027 target,
5028 memarg,
5029 ret,
5030 need_check,
5031 imported_memories,
5032 offset,
5033 heap_access_oob,
5034 unaligned_atomic,
5035 )
5036 },
5037 )?;
5038 }
5039 Operator::I64AtomicRmw32XchgU { ref memarg } => {
5040 let loc = self.pop_value_released()?.0;
5041 let target = self.pop_value_released()?.0;
5042 let ret = self.acquire_location(&WpType::I64)?;
5043 self.value_stack.push((ret, CanonicalizeType::None));
5044 self.op_memory(
5045 |this,
5046 need_check,
5047 imported_memories,
5048 offset,
5049 heap_access_oob,
5050 unaligned_atomic| {
5051 this.machine.i64_atomic_xchg_32u(
5052 loc,
5053 target,
5054 memarg,
5055 ret,
5056 need_check,
5057 imported_memories,
5058 offset,
5059 heap_access_oob,
5060 unaligned_atomic,
5061 )
5062 },
5063 )?;
5064 }
5065 Operator::I32AtomicRmwCmpxchg { ref memarg } => {
5066 let new = self.pop_value_released()?.0;
5067 let cmp = self.pop_value_released()?.0;
5068 let target = self.pop_value_released()?.0;
5069 let ret = self.acquire_location(&WpType::I32)?;
5070 self.value_stack.push((ret, CanonicalizeType::None));
5071 self.op_memory(
5072 |this,
5073 need_check,
5074 imported_memories,
5075 offset,
5076 heap_access_oob,
5077 unaligned_atomic| {
5078 this.machine.i32_atomic_cmpxchg(
5079 new,
5080 cmp,
5081 target,
5082 memarg,
5083 ret,
5084 need_check,
5085 imported_memories,
5086 offset,
5087 heap_access_oob,
5088 unaligned_atomic,
5089 )
5090 },
5091 )?;
5092 }
5093 Operator::I64AtomicRmwCmpxchg { ref memarg } => {
5094 let new = self.pop_value_released()?.0;
5095 let cmp = self.pop_value_released()?.0;
5096 let target = self.pop_value_released()?.0;
5097 let ret = self.acquire_location(&WpType::I64)?;
5098 self.value_stack.push((ret, CanonicalizeType::None));
5099 self.op_memory(
5100 |this,
5101 need_check,
5102 imported_memories,
5103 offset,
5104 heap_access_oob,
5105 unaligned_atomic| {
5106 this.machine.i64_atomic_cmpxchg(
5107 new,
5108 cmp,
5109 target,
5110 memarg,
5111 ret,
5112 need_check,
5113 imported_memories,
5114 offset,
5115 heap_access_oob,
5116 unaligned_atomic,
5117 )
5118 },
5119 )?;
5120 }
5121 Operator::I32AtomicRmw8CmpxchgU { ref memarg } => {
5122 let new = self.pop_value_released()?.0;
5123 let cmp = self.pop_value_released()?.0;
5124 let target = self.pop_value_released()?.0;
5125 let ret = self.acquire_location(&WpType::I32)?;
5126 self.value_stack.push((ret, CanonicalizeType::None));
5127 self.op_memory(
5128 |this,
5129 need_check,
5130 imported_memories,
5131 offset,
5132 heap_access_oob,
5133 unaligned_atomic| {
5134 this.machine.i32_atomic_cmpxchg_8u(
5135 new,
5136 cmp,
5137 target,
5138 memarg,
5139 ret,
5140 need_check,
5141 imported_memories,
5142 offset,
5143 heap_access_oob,
5144 unaligned_atomic,
5145 )
5146 },
5147 )?;
5148 }
5149 Operator::I32AtomicRmw16CmpxchgU { ref memarg } => {
5150 let new = self.pop_value_released()?.0;
5151 let cmp = self.pop_value_released()?.0;
5152 let target = self.pop_value_released()?.0;
5153 let ret = self.acquire_location(&WpType::I32)?;
5154 self.value_stack.push((ret, CanonicalizeType::None));
5155 self.op_memory(
5156 |this,
5157 need_check,
5158 imported_memories,
5159 offset,
5160 heap_access_oob,
5161 unaligned_atomic| {
5162 this.machine.i32_atomic_cmpxchg_16u(
5163 new,
5164 cmp,
5165 target,
5166 memarg,
5167 ret,
5168 need_check,
5169 imported_memories,
5170 offset,
5171 heap_access_oob,
5172 unaligned_atomic,
5173 )
5174 },
5175 )?;
5176 }
5177 Operator::I64AtomicRmw8CmpxchgU { ref memarg } => {
5178 let new = self.pop_value_released()?.0;
5179 let cmp = self.pop_value_released()?.0;
5180 let target = self.pop_value_released()?.0;
5181 let ret = self.acquire_location(&WpType::I64)?;
5182 self.value_stack.push((ret, CanonicalizeType::None));
5183 self.op_memory(
5184 |this,
5185 need_check,
5186 imported_memories,
5187 offset,
5188 heap_access_oob,
5189 unaligned_atomic| {
5190 this.machine.i64_atomic_cmpxchg_8u(
5191 new,
5192 cmp,
5193 target,
5194 memarg,
5195 ret,
5196 need_check,
5197 imported_memories,
5198 offset,
5199 heap_access_oob,
5200 unaligned_atomic,
5201 )
5202 },
5203 )?;
5204 }
5205 Operator::I64AtomicRmw16CmpxchgU { ref memarg } => {
5206 let new = self.pop_value_released()?.0;
5207 let cmp = self.pop_value_released()?.0;
5208 let target = self.pop_value_released()?.0;
5209 let ret = self.acquire_location(&WpType::I64)?;
5210 self.value_stack.push((ret, CanonicalizeType::None));
5211 self.op_memory(
5212 |this,
5213 need_check,
5214 imported_memories,
5215 offset,
5216 heap_access_oob,
5217 unaligned_atomic| {
5218 this.machine.i64_atomic_cmpxchg_16u(
5219 new,
5220 cmp,
5221 target,
5222 memarg,
5223 ret,
5224 need_check,
5225 imported_memories,
5226 offset,
5227 heap_access_oob,
5228 unaligned_atomic,
5229 )
5230 },
5231 )?;
5232 }
5233 Operator::I64AtomicRmw32CmpxchgU { ref memarg } => {
5234 let new = self.pop_value_released()?.0;
5235 let cmp = self.pop_value_released()?.0;
5236 let target = self.pop_value_released()?.0;
5237 let ret = self.acquire_location(&WpType::I64)?;
5238 self.value_stack.push((ret, CanonicalizeType::None));
5239 self.op_memory(
5240 |this,
5241 need_check,
5242 imported_memories,
5243 offset,
5244 heap_access_oob,
5245 unaligned_atomic| {
5246 this.machine.i64_atomic_cmpxchg_32u(
5247 new,
5248 cmp,
5249 target,
5250 memarg,
5251 ret,
5252 need_check,
5253 imported_memories,
5254 offset,
5255 heap_access_oob,
5256 unaligned_atomic,
5257 )
5258 },
5259 )?;
5260 }
5261
5262 Operator::RefNull { .. } => {
5263 self.value_stack
5264 .push((Location::Imm64(0), CanonicalizeType::None));
5265 }
5266 Operator::RefFunc { function_index } => {
5267 self.machine.move_location(
5268 Size::S64,
5269 Location::Memory(
5270 self.machine.get_vmctx_reg(),
5271 self.vmoffsets
5272 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_func_ref_index())
5273 as i32,
5274 ),
5275 Location::GPR(self.machine.get_gpr_for_call()),
5276 )?;
5277
5278 self.emit_call_native(
5279 |this| {
5280 this.machine
5281 .emit_call_register(this.machine.get_gpr_for_call())
5282 },
5283 iter::once((
5285 Location::Imm32(function_index as u32),
5286 CanonicalizeType::None,
5287 )),
5288 iter::once(WpType::I64),
5289 iter::once(WpType::Ref(WpRefType::new(true, WpHeapType::FUNC).unwrap())),
5290 NativeCallType::IncludeVMCtxArgument,
5291 )?;
5292 }
5293 Operator::RefIsNull => {
5294 let loc_a = self.pop_value_released()?.0;
5295 let ret = self.acquire_location(&WpType::I32)?;
5296 self.machine.i64_cmp_eq(loc_a, Location::Imm64(0), ret)?;
5297 self.value_stack.push((ret, CanonicalizeType::None));
5298 }
5299 Operator::TableSet { table: index } => {
5300 let table_index = TableIndex::new(index as _);
5301 let value = self.value_stack.pop().unwrap();
5302 let index = self.value_stack.pop().unwrap();
5303
5304 self.machine.move_location(
5305 Size::S64,
5306 Location::Memory(
5307 self.machine.get_vmctx_reg(),
5308 self.vmoffsets.vmctx_builtin_function(
5309 if self.module.local_table_index(table_index).is_some() {
5310 VMBuiltinFunctionIndex::get_table_set_index()
5311 } else {
5312 VMBuiltinFunctionIndex::get_imported_table_set_index()
5313 },
5314 ) as i32,
5315 ),
5316 Location::GPR(self.machine.get_gpr_for_call()),
5317 )?;
5318
5319 self.emit_call_native(
5320 |this| {
5321 this.machine
5322 .emit_call_register(this.machine.get_gpr_for_call())
5323 },
5324 [
5326 (
5327 Location::Imm32(table_index.index() as u32),
5328 CanonicalizeType::None,
5329 ),
5330 index,
5331 value,
5332 ]
5333 .iter()
5334 .cloned(),
5335 [WpType::I32, WpType::I64, WpType::I64].iter().cloned(),
5336 iter::empty(),
5337 NativeCallType::IncludeVMCtxArgument,
5338 )?;
5339 }
5340 Operator::TableGet { table: index } => {
5341 let table_index = TableIndex::new(index as _);
5342 let index = self.value_stack.pop().unwrap();
5343
5344 self.machine.move_location(
5345 Size::S64,
5346 Location::Memory(
5347 self.machine.get_vmctx_reg(),
5348 self.vmoffsets.vmctx_builtin_function(
5349 if self.module.local_table_index(table_index).is_some() {
5350 VMBuiltinFunctionIndex::get_table_get_index()
5351 } else {
5352 VMBuiltinFunctionIndex::get_imported_table_get_index()
5353 },
5354 ) as i32,
5355 ),
5356 Location::GPR(self.machine.get_gpr_for_call()),
5357 )?;
5358
5359 self.emit_call_native(
5360 |this| {
5361 this.machine
5362 .emit_call_register(this.machine.get_gpr_for_call())
5363 },
5364 [
5366 (
5367 Location::Imm32(table_index.index() as u32),
5368 CanonicalizeType::None,
5369 ),
5370 index,
5371 ]
5372 .iter()
5373 .cloned(),
5374 [WpType::I32, WpType::I64].iter().cloned(),
5375 iter::once(WpType::Ref(WpRefType::new(true, WpHeapType::FUNC).unwrap())),
5376 NativeCallType::IncludeVMCtxArgument,
5377 )?;
5378 }
5379 Operator::TableSize { table: index } => {
5380 let table_index = TableIndex::new(index as _);
5381
5382 self.machine.move_location(
5383 Size::S64,
5384 Location::Memory(
5385 self.machine.get_vmctx_reg(),
5386 self.vmoffsets.vmctx_builtin_function(
5387 if self.module.local_table_index(table_index).is_some() {
5388 VMBuiltinFunctionIndex::get_table_size_index()
5389 } else {
5390 VMBuiltinFunctionIndex::get_imported_table_size_index()
5391 },
5392 ) as i32,
5393 ),
5394 Location::GPR(self.machine.get_gpr_for_call()),
5395 )?;
5396
5397 self.emit_call_native(
5398 |this| {
5399 this.machine
5400 .emit_call_register(this.machine.get_gpr_for_call())
5401 },
5402 iter::once((
5404 Location::Imm32(table_index.index() as u32),
5405 CanonicalizeType::None,
5406 )),
5407 iter::once(WpType::I32),
5408 iter::once(WpType::I32),
5409 NativeCallType::IncludeVMCtxArgument,
5410 )?;
5411 }
5412 Operator::TableGrow { table: index } => {
5413 let table_index = TableIndex::new(index as _);
5414 let delta = self.value_stack.pop().unwrap();
5415 let init_value = self.value_stack.pop().unwrap();
5416
5417 self.machine.move_location(
5418 Size::S64,
5419 Location::Memory(
5420 self.machine.get_vmctx_reg(),
5421 self.vmoffsets.vmctx_builtin_function(
5422 if self.module.local_table_index(table_index).is_some() {
5423 VMBuiltinFunctionIndex::get_table_grow_index()
5424 } else {
5425 VMBuiltinFunctionIndex::get_imported_table_grow_index()
5426 },
5427 ) as i32,
5428 ),
5429 Location::GPR(self.machine.get_gpr_for_call()),
5430 )?;
5431
5432 self.emit_call_native(
5433 |this| {
5434 this.machine
5435 .emit_call_register(this.machine.get_gpr_for_call())
5436 },
5437 [
5439 init_value,
5440 delta,
5441 (
5442 Location::Imm32(table_index.index() as u32),
5443 CanonicalizeType::None,
5444 ),
5445 ]
5446 .iter()
5447 .cloned(),
5448 [WpType::I64, WpType::I64, WpType::I64].iter().cloned(),
5449 iter::once(WpType::I32),
5450 NativeCallType::IncludeVMCtxArgument,
5451 )?;
5452 }
5453 Operator::TableCopy {
5454 dst_table,
5455 src_table,
5456 } => {
5457 let len = self.value_stack.pop().unwrap();
5458 let src = self.value_stack.pop().unwrap();
5459 let dest = self.value_stack.pop().unwrap();
5460
5461 self.machine.move_location(
5462 Size::S64,
5463 Location::Memory(
5464 self.machine.get_vmctx_reg(),
5465 self.vmoffsets
5466 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_table_copy_index())
5467 as i32,
5468 ),
5469 Location::GPR(self.machine.get_gpr_for_call()),
5470 )?;
5471
5472 self.emit_call_native(
5473 |this| {
5474 this.machine
5475 .emit_call_register(this.machine.get_gpr_for_call())
5476 },
5477 [
5479 (Location::Imm32(dst_table), CanonicalizeType::None),
5480 (Location::Imm32(src_table), CanonicalizeType::None),
5481 dest,
5482 src,
5483 len,
5484 ]
5485 .iter()
5486 .cloned(),
5487 [
5488 WpType::I32,
5489 WpType::I32,
5490 WpType::I64,
5491 WpType::I64,
5492 WpType::I64,
5493 ]
5494 .iter()
5495 .cloned(),
5496 iter::empty(),
5497 NativeCallType::IncludeVMCtxArgument,
5498 )?;
5499 }
5500
5501 Operator::TableFill { table } => {
5502 let len = self.value_stack.pop().unwrap();
5503 let val = self.value_stack.pop().unwrap();
5504 let dest = self.value_stack.pop().unwrap();
5505
5506 self.machine.move_location(
5507 Size::S64,
5508 Location::Memory(
5509 self.machine.get_vmctx_reg(),
5510 self.vmoffsets
5511 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_table_fill_index())
5512 as i32,
5513 ),
5514 Location::GPR(self.machine.get_gpr_for_call()),
5515 )?;
5516
5517 self.emit_call_native(
5518 |this| {
5519 this.machine
5520 .emit_call_register(this.machine.get_gpr_for_call())
5521 },
5522 [
5524 (Location::Imm32(table), CanonicalizeType::None),
5525 dest,
5526 val,
5527 len,
5528 ]
5529 .iter()
5530 .cloned(),
5531 [WpType::I32, WpType::I64, WpType::I64, WpType::I64]
5532 .iter()
5533 .cloned(),
5534 iter::empty(),
5535 NativeCallType::IncludeVMCtxArgument,
5536 )?;
5537 }
5538 Operator::TableInit { elem_index, table } => {
5539 let len = self.value_stack.pop().unwrap();
5540 let src = self.value_stack.pop().unwrap();
5541 let dest = self.value_stack.pop().unwrap();
5542
5543 self.machine.move_location(
5544 Size::S64,
5545 Location::Memory(
5546 self.machine.get_vmctx_reg(),
5547 self.vmoffsets
5548 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_table_init_index())
5549 as i32,
5550 ),
5551 Location::GPR(self.machine.get_gpr_for_call()),
5552 )?;
5553
5554 self.emit_call_native(
5555 |this| {
5556 this.machine
5557 .emit_call_register(this.machine.get_gpr_for_call())
5558 },
5559 [
5561 (Location::Imm32(table), CanonicalizeType::None),
5562 (Location::Imm32(elem_index), CanonicalizeType::None),
5563 dest,
5564 src,
5565 len,
5566 ]
5567 .iter()
5568 .cloned(),
5569 [
5570 WpType::I32,
5571 WpType::I32,
5572 WpType::I64,
5573 WpType::I64,
5574 WpType::I64,
5575 ]
5576 .iter()
5577 .cloned(),
5578 iter::empty(),
5579 NativeCallType::IncludeVMCtxArgument,
5580 )?;
5581 }
5582 Operator::ElemDrop { elem_index } => {
5583 self.machine.move_location(
5584 Size::S64,
5585 Location::Memory(
5586 self.machine.get_vmctx_reg(),
5587 self.vmoffsets
5588 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_elem_drop_index())
5589 as i32,
5590 ),
5591 Location::GPR(self.machine.get_gpr_for_call()),
5592 )?;
5593
5594 self.emit_call_native(
5595 |this| {
5596 this.machine
5597 .emit_call_register(this.machine.get_gpr_for_call())
5598 },
5599 iter::once((Location::Imm32(elem_index), CanonicalizeType::None)),
5601 [WpType::I32].iter().cloned(),
5602 iter::empty(),
5603 NativeCallType::IncludeVMCtxArgument,
5604 )?;
5605 }
5606 Operator::MemoryAtomicWait32 { ref memarg } => {
5607 let timeout = self.value_stack.pop().unwrap();
5608 let val = self.value_stack.pop().unwrap();
5609 let dst = self.value_stack.pop().unwrap();
5610
5611 let memory_index = MemoryIndex::new(memarg.memory as usize);
5612 let (memory_atomic_wait32, memory_index) =
5613 if self.module.local_memory_index(memory_index).is_some() {
5614 (
5615 VMBuiltinFunctionIndex::get_memory_atomic_wait32_index(),
5616 memory_index,
5617 )
5618 } else {
5619 (
5620 VMBuiltinFunctionIndex::get_imported_memory_atomic_wait32_index(),
5621 memory_index,
5622 )
5623 };
5624
5625 self.machine.move_location(
5626 Size::S64,
5627 Location::Memory(
5628 self.machine.get_vmctx_reg(),
5629 self.vmoffsets.vmctx_builtin_function(memory_atomic_wait32) as i32,
5630 ),
5631 Location::GPR(self.machine.get_gpr_for_call()),
5632 )?;
5633
5634 self.emit_call_native(
5635 |this| {
5636 this.machine
5637 .emit_call_register(this.machine.get_gpr_for_call())
5638 },
5639 [
5641 (
5642 Location::Imm32(memory_index.index() as u32),
5643 CanonicalizeType::None,
5644 ),
5645 dst,
5646 val,
5647 timeout,
5648 ]
5649 .iter()
5650 .cloned(),
5651 [WpType::I32, WpType::I32, WpType::I32, WpType::I64]
5652 .iter()
5653 .cloned(),
5654 iter::once(WpType::I32),
5655 NativeCallType::IncludeVMCtxArgument,
5656 )?;
5657 }
5658 Operator::MemoryAtomicWait64 { ref memarg } => {
5659 let timeout = self.value_stack.pop().unwrap();
5660 let val = self.value_stack.pop().unwrap();
5661 let dst = self.value_stack.pop().unwrap();
5662
5663 let memory_index = MemoryIndex::new(memarg.memory as usize);
5664 let (memory_atomic_wait64, memory_index) =
5665 if self.module.local_memory_index(memory_index).is_some() {
5666 (
5667 VMBuiltinFunctionIndex::get_memory_atomic_wait64_index(),
5668 memory_index,
5669 )
5670 } else {
5671 (
5672 VMBuiltinFunctionIndex::get_imported_memory_atomic_wait64_index(),
5673 memory_index,
5674 )
5675 };
5676
5677 self.machine.move_location(
5678 Size::S64,
5679 Location::Memory(
5680 self.machine.get_vmctx_reg(),
5681 self.vmoffsets.vmctx_builtin_function(memory_atomic_wait64) as i32,
5682 ),
5683 Location::GPR(self.machine.get_gpr_for_call()),
5684 )?;
5685
5686 self.emit_call_native(
5687 |this| {
5688 this.machine
5689 .emit_call_register(this.machine.get_gpr_for_call())
5690 },
5691 [
5693 (
5694 Location::Imm32(memory_index.index() as u32),
5695 CanonicalizeType::None,
5696 ),
5697 dst,
5698 val,
5699 timeout,
5700 ]
5701 .iter()
5702 .cloned(),
5703 [WpType::I32, WpType::I32, WpType::I64, WpType::I64]
5704 .iter()
5705 .cloned(),
5706 iter::once(WpType::I32),
5707 NativeCallType::IncludeVMCtxArgument,
5708 )?;
5709 }
5710 Operator::MemoryAtomicNotify { ref memarg } => {
5711 let cnt = self.value_stack.pop().unwrap();
5712 let dst = self.value_stack.pop().unwrap();
5713
5714 let memory_index = MemoryIndex::new(memarg.memory as usize);
5715 let (memory_atomic_notify, memory_index) =
5716 if self.module.local_memory_index(memory_index).is_some() {
5717 (
5718 VMBuiltinFunctionIndex::get_memory_atomic_notify_index(),
5719 memory_index,
5720 )
5721 } else {
5722 (
5723 VMBuiltinFunctionIndex::get_imported_memory_atomic_notify_index(),
5724 memory_index,
5725 )
5726 };
5727
5728 self.machine.move_location(
5729 Size::S64,
5730 Location::Memory(
5731 self.machine.get_vmctx_reg(),
5732 self.vmoffsets.vmctx_builtin_function(memory_atomic_notify) as i32,
5733 ),
5734 Location::GPR(self.machine.get_gpr_for_call()),
5735 )?;
5736
5737 self.emit_call_native(
5738 |this| {
5739 this.machine
5740 .emit_call_register(this.machine.get_gpr_for_call())
5741 },
5742 [
5744 (
5745 Location::Imm32(memory_index.index() as u32),
5746 CanonicalizeType::None,
5747 ),
5748 dst,
5749 cnt,
5750 ]
5751 .iter()
5752 .cloned(),
5753 [WpType::I32, WpType::I32, WpType::I32].iter().cloned(),
5754 iter::once(WpType::I32),
5755 NativeCallType::IncludeVMCtxArgument,
5756 )?;
5757 }
5758 _ => {
5759 return Err(CompileError::Codegen(format!(
5760 "not yet implemented: {op:?}"
5761 )));
5762 }
5763 }
5764
5765 Ok(())
5766 }
5767
5768 fn add_assembly_comment(&mut self, comment: AssemblyComment) {
5769 if self.config.callbacks.is_some() {
5771 self.assembly_comments
5772 .insert(self.machine.get_offset().0, comment);
5773 }
5774 }
5775
5776 pub fn finalize(
5777 mut self,
5778 data: &FunctionBodyData,
5779 arch: Architecture,
5780 ) -> Result<(CompiledFunction, Option<UnwindFrame>), CompileError> {
5781 self.add_assembly_comment(AssemblyComment::TrapHandlersTable);
5782 self.machine
5784 .emit_label(self.special_labels.integer_division_by_zero)?;
5785 self.machine
5786 .emit_illegal_op(TrapCode::IntegerDivisionByZero)?;
5787
5788 self.machine
5789 .emit_label(self.special_labels.integer_overflow)?;
5790 self.machine.emit_illegal_op(TrapCode::IntegerOverflow)?;
5791
5792 self.machine
5793 .emit_label(self.special_labels.heap_access_oob)?;
5794 self.machine
5795 .emit_illegal_op(TrapCode::HeapAccessOutOfBounds)?;
5796
5797 self.machine
5798 .emit_label(self.special_labels.table_access_oob)?;
5799 self.machine
5800 .emit_illegal_op(TrapCode::TableAccessOutOfBounds)?;
5801
5802 self.machine
5803 .emit_label(self.special_labels.indirect_call_null)?;
5804 self.machine.emit_illegal_op(TrapCode::IndirectCallToNull)?;
5805
5806 self.machine.emit_label(self.special_labels.bad_signature)?;
5807 self.machine.emit_illegal_op(TrapCode::BadSignature)?;
5808
5809 self.machine
5810 .emit_label(self.special_labels.unaligned_atomic)?;
5811 self.machine.emit_illegal_op(TrapCode::UnalignedAtomic)?;
5812
5813 self.machine.finalize_function()?;
5815
5816 let body_len = self.machine.assembler_get_offset().0;
5817
5818 #[cfg_attr(not(feature = "unwind"), allow(unused_mut))]
5819 let mut unwind_info = None;
5820 #[cfg_attr(not(feature = "unwind"), allow(unused_mut))]
5821 let mut fde = None;
5822 #[cfg(feature = "unwind")]
5823 match self.calling_convention {
5824 CallingConvention::SystemV | CallingConvention::AppleAarch64 => {
5825 let unwind = self.machine.gen_dwarf_unwind_info(body_len);
5826 if let Some(unwind) = unwind {
5827 fde = Some(unwind.to_fde(Address::Symbol {
5828 symbol: WriterRelocate::FUNCTION_SYMBOL,
5829 addend: self.local_func_index.index() as _,
5830 }));
5831 unwind_info = Some(CompiledFunctionUnwindInfo::Dwarf);
5832 }
5833 }
5834 CallingConvention::WindowsFastcall => {
5835 let unwind = self.machine.gen_windows_unwind_info(body_len);
5836 if let Some(unwind) = unwind {
5837 unwind_info = Some(CompiledFunctionUnwindInfo::WindowsX64(unwind));
5838 }
5839 }
5840 _ => (),
5841 };
5842
5843 let address_map =
5844 get_function_address_map(self.machine.instructions_address_map(), data, body_len);
5845 let traps = self.machine.collect_trap_information();
5846 let FinalizedAssembly {
5847 mut body,
5848 assembly_comments,
5849 } = self.machine.assembler_finalize(self.assembly_comments)?;
5850 body.shrink_to_fit();
5851
5852 if let Some(callbacks) = self.config.callbacks.as_ref() {
5853 callbacks.obj_memory_buffer(
5854 &CompiledKind::Local(self.local_func_index, self.function_name.clone()),
5855 &self.module.hash_string(),
5856 &body,
5857 );
5858 callbacks.asm_memory_buffer(
5859 &CompiledKind::Local(self.local_func_index, self.function_name.clone()),
5860 &self.module.hash_string(),
5861 arch,
5862 &body,
5863 assembly_comments,
5864 )?;
5865 }
5866
5867 Ok((
5868 CompiledFunction {
5869 body: FunctionBody { body, unwind_info },
5870 relocations: self.relocations.clone(),
5871 frame_info: CompiledFunctionFrameInfo { traps, address_map },
5872 },
5873 fde,
5874 ))
5875 }
5876 #[allow(clippy::type_complexity)]
5879 fn sort_call_movs(movs: &mut [(Location<M::GPR, M::SIMD>, M::GPR)]) {
5880 for i in 0..movs.len() {
5881 for j in (i + 1)..movs.len() {
5882 if let Location::GPR(src_gpr) = movs[j].0
5883 && src_gpr == movs[i].1
5884 {
5885 movs.swap(i, j);
5886 }
5887 }
5888 }
5889 }
5890
5891 }