1#[cfg(feature = "unwind")]
2use crate::dwarf::WriterRelocate;
3
4use crate::{
5 address_map::get_function_address_map,
6 codegen_error,
7 common_decl::*,
8 config::Singlepass,
9 location::{Location, Reg},
10 machine::{
11 AssemblyComment, FinalizedAssembly, Label, Machine, NATIVE_PAGE_SIZE, UnsignedCondition,
12 },
13 unwind::UnwindFrame,
14};
15#[cfg(feature = "unwind")]
16use gimli::write::Address;
17use itertools::Itertools;
18use smallvec::{SmallVec, smallvec};
19use std::{cmp, collections::HashMap, iter, ops::Neg};
20use target_lexicon::Architecture;
21
22use wasmer_compiler::{
23 FunctionBodyData,
24 misc::CompiledKind,
25 types::{
26 function::{CompiledFunction, CompiledFunctionFrameInfo, FunctionBody},
27 relocation::{Relocation, RelocationTarget},
28 section::SectionIndex,
29 },
30 wasmparser::{
31 BlockType as WpTypeOrFuncType, HeapType as WpHeapType, Operator, RefType as WpRefType,
32 ValType as WpType,
33 },
34};
35
36#[cfg(feature = "unwind")]
37use wasmer_compiler::types::unwind::CompiledFunctionUnwindInfo;
38
39use wasmer_types::target::CallingConvention;
40use wasmer_types::{
41 CompileError, FunctionIndex, FunctionType, GlobalIndex, LocalFunctionIndex, LocalMemoryIndex,
42 MemoryIndex, MemoryStyle, ModuleInfo, SignatureIndex, TableIndex, TableStyle, TrapCode, Type,
43 VMBuiltinFunctionIndex, VMOffsets,
44 entity::{EntityRef, PrimaryMap},
45};
46
47#[allow(type_alias_bounds)]
48type LocationWithCanonicalization<M: Machine> = (Location<M::GPR, M::SIMD>, CanonicalizeType);
49
50pub struct FuncGen<'a, M: Machine> {
52 module: &'a ModuleInfo,
55
56 config: &'a Singlepass,
58
59 vmoffsets: &'a VMOffsets,
61
62 memory_styles: &'a PrimaryMap<MemoryIndex, MemoryStyle>,
64
65 signature: FunctionType,
69
70 locals: Vec<Location<M::GPR, M::SIMD>>,
73
74 local_types: Vec<WpType>,
76
77 value_stack: Vec<LocationWithCanonicalization<M>>,
79
80 control_stack: Vec<ControlFrame<M>>,
82
83 stack_offset: usize,
85
86 save_area_offset: Option<usize>,
87
88 machine: M,
90
91 unreachable_depth: usize,
93
94 local_func_index: LocalFunctionIndex,
96
97 relocations: Vec<Relocation>,
99
100 special_labels: SpecialLabelSet,
102
103 calling_convention: CallingConvention,
105
106 function_name: String,
108
109 assembly_comments: HashMap<usize, AssemblyComment>,
111}
112
113struct SpecialLabelSet {
114 integer_division_by_zero: Label,
115 integer_overflow: Label,
116 heap_access_oob: Label,
117 table_access_oob: Label,
118 indirect_call_null: Label,
119 bad_signature: Label,
120 unaligned_atomic: Label,
121}
122
123#[derive(Copy, Clone, Debug)]
126pub(crate) enum CanonicalizeType {
127 None,
128 F32,
129 F64,
130}
131
132impl CanonicalizeType {
133 fn to_size(self) -> Option<Size> {
134 match self {
135 CanonicalizeType::F32 => Some(Size::S32),
136 CanonicalizeType::F64 => Some(Size::S64),
137 CanonicalizeType::None => None,
138 }
139 }
140
141 fn promote(self) -> Result<Self, CompileError> {
142 match self {
143 CanonicalizeType::None => Ok(CanonicalizeType::None),
144 CanonicalizeType::F32 => Ok(CanonicalizeType::F64),
145 CanonicalizeType::F64 => codegen_error!("cannot promote F64"),
146 }
147 }
148
149 fn demote(self) -> Result<Self, CompileError> {
150 match self {
151 CanonicalizeType::None => Ok(CanonicalizeType::None),
152 CanonicalizeType::F32 => codegen_error!("cannot demote F64"),
153 CanonicalizeType::F64 => Ok(CanonicalizeType::F32),
154 }
155 }
156}
157
158trait WpTypeExt {
159 fn is_float(&self) -> bool;
160}
161
162impl WpTypeExt for WpType {
163 fn is_float(&self) -> bool {
164 matches!(self, WpType::F32 | WpType::F64)
165 }
166}
167
168#[derive(Clone)]
169pub enum ControlState<M: Machine> {
170 Function,
171 Block,
172 Loop,
173 If {
174 label_else: Label,
175 inputs: SmallVec<[LocationWithCanonicalization<M>; 1]>,
178 },
179 Else,
180}
181
182#[derive(Clone)]
183struct ControlFrame<M: Machine> {
184 pub state: ControlState<M>,
185 pub label: Label,
186 pub param_types: SmallVec<[WpType; 8]>,
187 pub return_types: SmallVec<[WpType; 1]>,
188 value_stack_depth: usize,
190}
191
192impl<M: Machine> ControlFrame<M> {
193 fn value_stack_depth_after(&self) -> usize {
195 let mut depth: usize = self.value_stack_depth - self.param_types.len();
196
197 if matches!(self.state, ControlState::Loop) {
199 depth -= self.param_types.len();
200 }
201
202 depth
203 }
204
205 fn value_stack_depth_for_release(&self) -> usize {
208 self.value_stack_depth - self.param_types.len()
209 }
210}
211
212fn type_to_wp_type(ty: &Type) -> WpType {
213 match ty {
214 Type::I32 => WpType::I32,
215 Type::I64 => WpType::I64,
216 Type::F32 => WpType::F32,
217 Type::F64 => WpType::F64,
218 Type::V128 => WpType::V128,
219 Type::ExternRef => WpType::Ref(WpRefType::new(true, WpHeapType::EXTERN).unwrap()),
220 Type::FuncRef => WpType::Ref(WpRefType::new(true, WpHeapType::FUNC).unwrap()),
221 Type::ExceptionRef => todo!(),
222 }
223}
224
225struct I2O1<R: Reg, S: Reg> {
228 loc_a: Location<R, S>,
229 loc_b: Location<R, S>,
230 ret: Location<R, S>,
231}
232
233enum NativeCallType {
235 IncludeVMCtxArgument,
236 Unreachable,
237}
238
239impl<'a, M: Machine> FuncGen<'a, M> {
240 fn acquire_location(&mut self, ty: &WpType) -> Result<Location<M::GPR, M::SIMD>, CompileError> {
245 let loc = match *ty {
246 WpType::F32 | WpType::F64 => self.machine.pick_simd().map(Location::SIMD),
247 WpType::I32 | WpType::I64 => self.machine.pick_gpr().map(Location::GPR),
248 WpType::Ref(ty) if ty.is_extern_ref() || ty.is_func_ref() => {
249 self.machine.pick_gpr().map(Location::GPR)
250 }
251 _ => codegen_error!("can't acquire location for type {:?}", ty),
252 };
253
254 let Some(loc) = loc else {
255 return self.acquire_location_on_stack();
256 };
257
258 if let Location::GPR(x) = loc {
259 self.machine.reserve_gpr(x);
260 } else if let Location::SIMD(x) = loc {
261 self.machine.reserve_simd(x);
262 }
263 Ok(loc)
264 }
265
266 fn acquire_location_on_stack(&mut self) -> Result<Location<M::GPR, M::SIMD>, CompileError> {
268 self.stack_offset += 8;
269 let loc = self.machine.local_on_stack(self.stack_offset as i32);
270 self.machine
271 .extend_stack(self.machine.round_stack_adjust(8) as u32)?;
272
273 Ok(loc)
274 }
275
276 fn release_locations(
278 &mut self,
279 locs: &[LocationWithCanonicalization<M>],
280 ) -> Result<(), CompileError> {
281 self.release_stack_locations(locs)?;
282 self.release_reg_locations(locs)
283 }
284
285 fn release_reg_locations(
286 &mut self,
287 locs: &[LocationWithCanonicalization<M>],
288 ) -> Result<(), CompileError> {
289 for (loc, _) in locs.iter().rev() {
290 match *loc {
291 Location::GPR(ref x) => {
292 self.machine.release_gpr(*x);
293 }
294 Location::SIMD(ref x) => {
295 self.machine.release_simd(*x);
296 }
297 _ => {}
298 }
299 }
300 Ok(())
301 }
302
303 fn release_stack_locations(
304 &mut self,
305 locs: &[LocationWithCanonicalization<M>],
306 ) -> Result<(), CompileError> {
307 for (loc, _) in locs.iter().rev() {
308 if let Location::Memory(..) = *loc {
309 self.check_location_on_stack(loc, self.stack_offset)?;
310 self.stack_offset -= 8;
311 self.machine
312 .truncate_stack(self.machine.round_stack_adjust(8) as u32)?;
313 }
314 }
315
316 Ok(())
317 }
318
319 fn release_stack_locations_keep_stack_offset(
320 &mut self,
321 stack_depth: usize,
322 ) -> Result<(), CompileError> {
323 let mut stack_offset = self.stack_offset;
324 let locs = &self.value_stack[stack_depth..];
325
326 for (loc, _) in locs.iter().rev() {
327 if let Location::Memory(..) = *loc {
328 self.check_location_on_stack(loc, stack_offset)?;
329 stack_offset -= 8;
330 self.machine
331 .truncate_stack(self.machine.round_stack_adjust(8) as u32)?;
332 }
333 }
334
335 Ok(())
336 }
337
338 fn check_location_on_stack(
339 &self,
340 loc: &Location<M::GPR, M::SIMD>,
341 expected_stack_offset: usize,
342 ) -> Result<(), CompileError> {
343 let Location::Memory(reg, offset) = loc else {
344 codegen_error!("Expected stack memory location");
345 };
346 if reg != &self.machine.local_pointer() {
347 codegen_error!("Expected location pointer for value on stack");
348 }
349 if *offset >= 0 {
350 codegen_error!("Invalid memory offset {offset}");
351 }
352 let offset = offset.neg() as usize;
353 if offset != expected_stack_offset {
354 codegen_error!("Invalid memory offset {offset}!={}", self.stack_offset);
355 }
356 Ok(())
357 }
358
359 fn allocate_return_slots_and_swap(
367 &mut self,
368 stack_slots: usize,
369 return_slots: usize,
370 ) -> Result<(), CompileError> {
371 if return_slots == 0 {
373 return Ok(());
374 }
375
376 let latest_slots = self
380 .value_stack
381 .drain(self.value_stack.len() - stack_slots..)
382 .collect_vec();
383 let extra_slots = (0..return_slots)
384 .map(|_| self.acquire_location_on_stack())
385 .collect::<Result<Vec<_>, _>>()?;
386
387 let mut all_memory_slots = latest_slots
388 .iter()
389 .filter_map(|(loc, _)| {
390 if let Location::Memory(..) = loc {
391 Some(loc)
392 } else {
393 None
394 }
395 })
396 .chain(extra_slots.iter())
397 .collect_vec();
398
399 self.value_stack.extend(
401 all_memory_slots
402 .iter()
403 .take(return_slots)
404 .map(|loc| (**loc, CanonicalizeType::None)),
405 );
406
407 let mut new_params_reversed = Vec::new();
409 for (loc, canonicalize) in latest_slots.iter().rev() {
410 let mapped_loc = if matches!(loc, Location::Memory(..)) {
411 let dest = all_memory_slots.pop().unwrap();
412 self.machine.emit_relaxed_mov(Size::S64, *loc, *dest)?;
413 *dest
414 } else {
415 *loc
416 };
417 new_params_reversed.push((mapped_loc, *canonicalize));
418 }
419 self.value_stack
420 .extend(new_params_reversed.into_iter().rev());
421
422 Ok(())
423 }
424
425 #[allow(clippy::type_complexity)]
426 fn init_locals(
427 &mut self,
428 n: usize,
429 sig: FunctionType,
430 calling_convention: CallingConvention,
431 ) -> Result<Vec<Location<M::GPR, M::SIMD>>, CompileError> {
432 self.add_assembly_comment(AssemblyComment::InitializeLocals);
433
434 let num_mem_slots = (0..n)
436 .filter(|&x| self.machine.is_local_on_stack(x))
437 .count();
438
439 let mut static_area_size: usize = 0;
442
443 for i in 0..n {
446 if !self.machine.is_local_on_stack(i) {
448 static_area_size += 8;
449 }
450 }
451
452 static_area_size += 8;
454
455 static_area_size += 8 * self.machine.list_to_save(calling_convention).len();
457
458 let callee_saved_regs_size = static_area_size;
460
461 let locations: Vec<Location<M::GPR, M::SIMD>> = (0..n)
463 .map(|i| self.machine.get_local_location(i, callee_saved_regs_size))
464 .collect();
465
466 static_area_size += num_mem_slots * 8;
468
469 static_area_size = self.machine.round_stack_adjust(static_area_size);
471
472 for i in (sig.params().len()..n)
477 .step_by(NATIVE_PAGE_SIZE / 8)
478 .skip(1)
479 {
480 self.machine.zero_location(Size::S64, locations[i])?;
481 }
482
483 self.machine.extend_stack(static_area_size as _)?;
484
485 for loc in locations.iter() {
487 if let Location::GPR(_) = *loc {
488 self.stack_offset += 8;
489 self.machine.move_local(self.stack_offset as i32, *loc)?;
490 }
491 }
492
493 self.stack_offset += 8;
495 self.machine.move_local(
496 self.stack_offset as i32,
497 Location::GPR(self.machine.get_vmctx_reg()),
498 )?;
499
500 let regs_to_save = self.machine.list_to_save(calling_convention);
502 for loc in regs_to_save.iter() {
503 self.stack_offset += 8;
504 self.machine.move_local(self.stack_offset as i32, *loc)?;
505 }
506
507 self.save_area_offset = Some(self.stack_offset);
509
510 let mut stack_offset: usize = 0;
514 for (i, param) in sig.params().iter().enumerate() {
515 let sz = match *param {
516 Type::I32 | Type::F32 => Size::S32,
517 Type::I64 | Type::F64 => Size::S64,
518 Type::ExternRef | Type::FuncRef => Size::S64,
519 _ => codegen_error!("singlepass init_local unimplemented"),
520 };
521 let loc = self.machine.get_call_param_location(
522 sig.results().len(),
523 i + 1,
524 sz,
525 &mut stack_offset,
526 calling_convention,
527 );
528 self.machine
529 .move_location_extend(sz, false, loc, Size::S64, locations[i])?;
530 }
531
532 self.machine.move_location(
534 Size::S64,
535 Location::GPR(
536 self.machine
537 .get_simple_param_location(0, calling_convention),
538 ),
539 Location::GPR(self.machine.get_vmctx_reg()),
540 )?;
541
542 let mut init_stack_loc_cnt = 0;
544 let mut last_stack_loc = Location::Memory(self.machine.local_pointer(), i32::MAX);
545 for location in locations.iter().take(n).skip(sig.params().len()) {
546 match location {
547 Location::Memory(_, _) => {
548 init_stack_loc_cnt += 1;
549 last_stack_loc = cmp::min(last_stack_loc, *location);
550 }
551 Location::GPR(_) => {
552 self.machine.zero_location(Size::S64, *location)?;
553 }
554 _ => codegen_error!("singlepass init_local unreachable"),
555 }
556 }
557 if init_stack_loc_cnt > 0 {
558 self.machine
559 .init_stack_loc(init_stack_loc_cnt, last_stack_loc)?;
560 }
561
562 self.stack_offset += static_area_size - callee_saved_regs_size;
564
565 Ok(locations)
566 }
567
568 fn finalize_locals(
569 &mut self,
570 calling_convention: CallingConvention,
571 ) -> Result<(), CompileError> {
572 self.machine
574 .restore_saved_area(self.save_area_offset.unwrap() as i32)?;
575
576 let regs_to_save = self.machine.list_to_save(calling_convention);
577 for loc in regs_to_save.iter().rev() {
578 self.machine.pop_location(*loc)?;
579 }
580
581 self.machine
583 .pop_location(Location::GPR(self.machine.get_vmctx_reg()))?;
584
585 for loc in self.locals.iter().rev() {
587 if let Location::GPR(_) = *loc {
588 self.machine.pop_location(*loc)?;
589 }
590 }
591 Ok(())
592 }
593
594 pub fn set_srcloc(&mut self, offset: u32) {
596 self.machine.set_srcloc(offset);
597 }
598
599 fn get_location_released(
600 &mut self,
601 loc: (Location<M::GPR, M::SIMD>, CanonicalizeType),
602 ) -> Result<LocationWithCanonicalization<M>, CompileError> {
603 self.release_locations(&[loc])?;
604 Ok(loc)
605 }
606
607 fn pop_value_released(&mut self) -> Result<LocationWithCanonicalization<M>, CompileError> {
608 let loc = self.value_stack.pop().ok_or_else(|| {
609 CompileError::Codegen("pop_value_released: value stack is empty".to_owned())
610 })?;
611 self.get_location_released(loc)?;
612 Ok(loc)
613 }
614
615 fn i2o1_prepare(
617 &mut self,
618 ty: WpType,
619 canonicalize: CanonicalizeType,
620 ) -> Result<I2O1<M::GPR, M::SIMD>, CompileError> {
621 let loc_b = self.pop_value_released()?.0;
622 let loc_a = self.pop_value_released()?.0;
623 let ret = self.acquire_location(&ty)?;
624 self.value_stack.push((ret, canonicalize));
625 Ok(I2O1 { loc_a, loc_b, ret })
626 }
627
628 fn emit_call_native<
633 I: Iterator<Item = (Location<M::GPR, M::SIMD>, CanonicalizeType)>,
634 J: Iterator<Item = WpType>,
635 K: Iterator<Item = WpType>,
636 F: FnOnce(&mut Self) -> Result<(), CompileError>,
637 >(
638 &mut self,
639 cb: F,
640 params: I,
641 params_type: J,
642 return_types: K,
643 call_type: NativeCallType,
644 ) -> Result<(), CompileError> {
645 let params = params.collect_vec();
646 let stack_params = params
647 .iter()
648 .copied()
649 .filter(|(param, _)| {
650 if let Location::Memory(reg, _) = param {
651 debug_assert_eq!(reg, &self.machine.local_pointer());
652 true
653 } else {
654 false
655 }
656 })
657 .collect_vec();
658 let get_size = |param_type: WpType| match param_type {
659 WpType::F32 | WpType::I32 => Size::S32,
660 WpType::V128 => unimplemented!(),
661 _ => Size::S64,
662 };
663 let param_sizes = params_type.map(get_size).collect_vec();
664 let return_value_sizes = return_types.map(get_size).collect_vec();
665
666 let used_stack_params = stack_params
668 .iter()
669 .take(return_value_sizes.len())
670 .copied()
671 .collect_vec();
672 let mut return_values = used_stack_params.clone();
673 let extra_return_values = (0..return_value_sizes.len().saturating_sub(stack_params.len()))
674 .map(|_| -> Result<_, CompileError> {
675 Ok((self.acquire_location_on_stack()?, CanonicalizeType::None))
676 })
677 .collect::<Result<Vec<_>, _>>()?;
678 return_values.extend(extra_return_values);
679
680 self.release_reg_locations(¶ms)?;
682
683 let used_gprs = self.machine.get_used_gprs();
685 let mut used_stack = self.machine.push_used_gpr(&used_gprs)?;
686
687 let used_simds = self.machine.get_used_simd();
689 if !used_simds.is_empty() {
690 used_stack += self.machine.push_used_simd(&used_simds)?;
691 }
692 self.machine
694 .reserve_unused_temp_gpr(self.machine.get_gpr_for_call());
695
696 let calling_convention = self.calling_convention;
697
698 let stack_padding: usize = match calling_convention {
699 CallingConvention::WindowsFastcall => 32,
700 _ => 0,
701 };
702
703 let mut stack_offset: usize = 0;
704 let mut return_args = Vec::with_capacity(return_value_sizes.len());
706 for i in 0..return_value_sizes.len() {
707 return_args.push(self.machine.get_return_value_location(
708 i,
709 &mut stack_offset,
710 self.calling_convention,
711 ));
712 }
713
714 let mut args = Vec::with_capacity(params.len());
716 for (i, param_size) in param_sizes.iter().enumerate() {
717 args.push(self.machine.get_param_location(
718 match call_type {
719 NativeCallType::IncludeVMCtxArgument => 1,
720 NativeCallType::Unreachable => 0,
721 } + i,
722 *param_size,
723 &mut stack_offset,
724 calling_convention,
725 ));
726 }
727
728 let stack_unaligned =
730 (self.machine.round_stack_adjust(self.stack_offset) + used_stack + stack_offset) % 16;
731 if stack_unaligned != 0 {
732 stack_offset += 16 - stack_unaligned;
733 }
734 self.machine.extend_stack(stack_offset as u32)?;
735
736 #[allow(clippy::type_complexity)]
737 let mut call_movs: Vec<(Location<M::GPR, M::SIMD>, M::GPR)> = vec![];
738 for (i, (param, _)) in params.iter().enumerate().rev() {
740 let loc = args[i];
741 match loc {
742 Location::GPR(x) => {
743 call_movs.push((*param, x));
744 }
745 Location::Memory(_, _) => {
746 self.machine
747 .move_location_for_native(param_sizes[i], *param, loc)?;
748 }
749 _ => {
750 return Err(CompileError::Codegen(
751 "emit_call_native loc: unreachable code".to_owned(),
752 ));
753 }
754 }
755 }
756
757 Self::sort_call_movs(&mut call_movs);
759
760 for (loc, gpr) in call_movs {
762 if loc != Location::GPR(gpr) {
763 self.machine
764 .move_location(Size::S64, loc, Location::GPR(gpr))?;
765 }
766 }
767
768 if matches!(call_type, NativeCallType::IncludeVMCtxArgument) {
769 self.machine.move_location(
771 Size::S64,
772 Location::GPR(self.machine.get_vmctx_reg()),
773 Location::GPR(
774 self.machine
775 .get_simple_param_location(0, calling_convention),
776 ),
777 )?; }
779
780 if stack_padding > 0 {
781 self.machine.extend_stack(stack_padding as u32)?;
782 }
783 self.machine.release_gpr(self.machine.get_gpr_for_call());
785
786 let begin = self.machine.assembler_get_offset().0;
787 cb(self)?;
788 if matches!(call_type, NativeCallType::Unreachable) {
789 let end = self.machine.assembler_get_offset().0;
790 self.machine.mark_address_range_with_trap_code(
791 TrapCode::UnreachableCodeReached,
792 begin,
793 end,
794 );
795 }
796
797 for (i, &return_type) in return_value_sizes.iter().enumerate() {
799 self.machine.move_location_for_native(
800 return_type,
801 return_args[i],
802 return_values[i].0,
803 )?;
804 }
805
806 if stack_offset + stack_padding > 0 {
808 self.machine
809 .truncate_stack((stack_offset + stack_padding) as u32)?;
810 }
811
812 if !used_simds.is_empty() {
814 self.machine.pop_used_simd(&used_simds)?;
815 }
816
817 self.machine.pop_used_gpr(&used_gprs)?;
819
820 let params_to_release =
823 &stack_params[cmp::min(stack_params.len(), return_value_sizes.len())..];
824 self.release_stack_locations(params_to_release)?;
825
826 self.value_stack.extend(return_values);
827
828 Ok(())
829 }
830
831 fn op_memory<
833 F: FnOnce(&mut Self, bool, bool, i32, Label, Label) -> Result<(), CompileError>,
834 >(
835 &mut self,
836 cb: F,
837 ) -> Result<(), CompileError> {
838 let need_check = match self.memory_styles[MemoryIndex::new(0)] {
839 MemoryStyle::Static { .. } => false,
840 MemoryStyle::Dynamic { .. } => true,
841 };
842
843 let offset = if self.module.num_imported_memories != 0 {
844 self.vmoffsets
845 .vmctx_vmmemory_import_definition(MemoryIndex::new(0))
846 } else {
847 self.vmoffsets
848 .vmctx_vmmemory_definition(LocalMemoryIndex::new(0))
849 };
850 cb(
851 self,
852 need_check,
853 self.module.num_imported_memories != 0,
854 offset as i32,
855 self.special_labels.heap_access_oob,
856 self.special_labels.unaligned_atomic,
857 )
858 }
859
860 fn emit_head(&mut self) -> Result<(), CompileError> {
861 self.add_assembly_comment(AssemblyComment::FunctionPrologue);
862 self.machine.emit_function_prolog()?;
863
864 self.locals = self.init_locals(
866 self.local_types.len(),
867 self.signature.clone(),
868 self.calling_convention,
869 )?;
870
871 self.add_assembly_comment(AssemblyComment::RedZone);
873 self.machine.extend_stack(32)?;
874
875 let return_types: SmallVec<_> = self
876 .signature
877 .results()
878 .iter()
879 .map(type_to_wp_type)
880 .collect();
881
882 self.value_stack.extend((0..return_types.len()).map(|i| {
884 (
885 self.machine
886 .get_call_return_value_location(i, self.calling_convention),
887 CanonicalizeType::None,
888 )
889 }));
890
891 self.control_stack.push(ControlFrame {
892 state: ControlState::Function,
893 label: self.machine.get_label(),
894 value_stack_depth: return_types.len(),
895 param_types: smallvec![],
896 return_types,
897 });
898
899 self.machine.insert_stackoverflow();
904 self.add_assembly_comment(AssemblyComment::FunctionBody);
905
906 Ok(())
907 }
908
909 #[allow(clippy::too_many_arguments)]
910 pub fn new(
911 module: &'a ModuleInfo,
912 config: &'a Singlepass,
913 vmoffsets: &'a VMOffsets,
914 memory_styles: &'a PrimaryMap<MemoryIndex, MemoryStyle>,
915 _table_styles: &'a PrimaryMap<TableIndex, TableStyle>,
916 local_func_index: LocalFunctionIndex,
917 local_types_excluding_arguments: &[WpType],
918 machine: M,
919 calling_convention: CallingConvention,
920 ) -> Result<FuncGen<'a, M>, CompileError> {
921 let func_index = module.func_index(local_func_index);
922 let sig_index = module.functions[func_index];
923 let signature = module.signatures[sig_index].clone();
924
925 let mut local_types: Vec<_> = signature.params().iter().map(type_to_wp_type).collect();
926 local_types.extend_from_slice(local_types_excluding_arguments);
927
928 let mut machine = machine;
929 let special_labels = SpecialLabelSet {
930 integer_division_by_zero: machine.get_label(),
931 integer_overflow: machine.get_label(),
932 heap_access_oob: machine.get_label(),
933 table_access_oob: machine.get_label(),
934 indirect_call_null: machine.get_label(),
935 bad_signature: machine.get_label(),
936 unaligned_atomic: machine.get_label(),
937 };
938 let function_name = module
939 .function_names
940 .get(&func_index)
941 .map(|fname| fname.to_string())
942 .unwrap_or_else(|| format!("function_{}", func_index.as_u32()));
943
944 let mut fg = FuncGen {
945 module,
946 config,
947 vmoffsets,
948 memory_styles,
949 signature,
951 locals: vec![], local_types,
953 value_stack: vec![],
954 control_stack: vec![],
955 stack_offset: 0,
956 save_area_offset: None,
957 machine,
958 unreachable_depth: 0,
959 local_func_index,
960 relocations: vec![],
961 special_labels,
962 calling_convention,
963 function_name,
964 assembly_comments: HashMap::new(),
965 };
966 fg.emit_head()?;
967 Ok(fg)
968 }
969
970 pub fn has_control_frames(&self) -> bool {
971 !self.control_stack.is_empty()
972 }
973
974 fn emit_return_values(
980 &mut self,
981 value_stack_depth_after: usize,
982 return_values: usize,
983 ) -> Result<(), CompileError> {
984 for (i, (stack_value, canonicalize)) in self
985 .value_stack
986 .iter()
987 .rev()
988 .take(return_values)
989 .enumerate()
990 {
991 let dst = self.value_stack[value_stack_depth_after - i - 1].0;
992 if let Some(canonicalize_size) = canonicalize.to_size()
993 && self.config.enable_nan_canonicalization
994 {
995 self.machine
996 .canonicalize_nan(canonicalize_size, *stack_value, dst)?;
997 } else {
998 self.machine
999 .emit_relaxed_mov(Size::S64, *stack_value, dst)?;
1000 }
1001 }
1002
1003 Ok(())
1004 }
1005
1006 fn emit_loop_params_store(
1009 &mut self,
1010 value_stack_depth_after: usize,
1011 param_count: usize,
1012 ) -> Result<(), CompileError> {
1013 for (i, (stack_value, _)) in self
1014 .value_stack
1015 .iter()
1016 .rev()
1017 .take(param_count)
1018 .rev()
1019 .enumerate()
1020 {
1021 let dst = self.value_stack[value_stack_depth_after + i].0;
1022 self.machine
1023 .emit_relaxed_mov(Size::S64, *stack_value, dst)?;
1024 }
1025
1026 Ok(())
1027 }
1028
1029 fn return_types_for_block(&self, block_type: WpTypeOrFuncType) -> SmallVec<[WpType; 1]> {
1030 match block_type {
1031 WpTypeOrFuncType::Empty => smallvec![],
1032 WpTypeOrFuncType::Type(inner_ty) => smallvec![inner_ty],
1033 WpTypeOrFuncType::FuncType(sig_index) => SmallVec::from_iter(
1034 self.module.signatures[SignatureIndex::from_u32(sig_index)]
1035 .results()
1036 .iter()
1037 .map(type_to_wp_type),
1038 ),
1039 }
1040 }
1041
1042 fn param_types_for_block(&self, block_type: WpTypeOrFuncType) -> SmallVec<[WpType; 8]> {
1043 match block_type {
1044 WpTypeOrFuncType::Empty | WpTypeOrFuncType::Type(_) => smallvec![],
1045 WpTypeOrFuncType::FuncType(sig_index) => SmallVec::from_iter(
1046 self.module.signatures[SignatureIndex::from_u32(sig_index)]
1047 .params()
1048 .iter()
1049 .map(type_to_wp_type),
1050 ),
1051 }
1052 }
1053
1054 pub fn feed_operator(&mut self, op: Operator) -> Result<(), CompileError> {
1055 let was_unreachable;
1056
1057 if self.unreachable_depth > 0 {
1058 was_unreachable = true;
1059
1060 match op {
1061 Operator::Block { .. } | Operator::Loop { .. } | Operator::If { .. } => {
1062 self.unreachable_depth += 1;
1063 }
1064 Operator::End => {
1065 self.unreachable_depth -= 1;
1066 }
1067 Operator::Else => {
1068 if self.unreachable_depth == 1
1070 && self
1071 .control_stack
1072 .last()
1073 .is_some_and(|frame| matches!(frame.state, ControlState::If { .. }))
1074 {
1075 self.unreachable_depth -= 1;
1076 }
1077 }
1078 _ => {}
1079 }
1080 if self.unreachable_depth > 0 {
1081 return Ok(());
1082 }
1083 } else {
1084 was_unreachable = false;
1085 }
1086
1087 match op {
1088 Operator::GlobalGet { global_index } => {
1089 let global_index = GlobalIndex::from_u32(global_index);
1090
1091 let ty = type_to_wp_type(&self.module.globals[global_index].ty);
1092 let loc = self.acquire_location(&ty)?;
1093 self.value_stack.push((loc, CanonicalizeType::None));
1094
1095 let tmp = self.machine.acquire_temp_gpr().unwrap();
1096
1097 let src = if let Some(local_global_index) =
1098 self.module.local_global_index(global_index)
1099 {
1100 let offset = self.vmoffsets.vmctx_vmglobal_definition(local_global_index);
1101 self.machine.emit_relaxed_mov(
1102 Size::S64,
1103 Location::Memory(self.machine.get_vmctx_reg(), offset as i32),
1104 Location::GPR(tmp),
1105 )?;
1106 Location::Memory(tmp, 0)
1107 } else {
1108 let offset = self
1110 .vmoffsets
1111 .vmctx_vmglobal_import_definition(global_index);
1112 self.machine.emit_relaxed_mov(
1113 Size::S64,
1114 Location::Memory(self.machine.get_vmctx_reg(), offset as i32),
1115 Location::GPR(tmp),
1116 )?;
1117 Location::Memory(tmp, 0)
1118 };
1119
1120 self.machine.emit_relaxed_mov(Size::S64, src, loc)?;
1121
1122 self.machine.release_gpr(tmp);
1123 }
1124 Operator::GlobalSet { global_index } => {
1125 let global_index = GlobalIndex::from_u32(global_index);
1126 let tmp = self.machine.acquire_temp_gpr().unwrap();
1127 let dst = if let Some(local_global_index) =
1128 self.module.local_global_index(global_index)
1129 {
1130 let offset = self.vmoffsets.vmctx_vmglobal_definition(local_global_index);
1131 self.machine.emit_relaxed_mov(
1132 Size::S64,
1133 Location::Memory(self.machine.get_vmctx_reg(), offset as i32),
1134 Location::GPR(tmp),
1135 )?;
1136 Location::Memory(tmp, 0)
1137 } else {
1138 let offset = self
1140 .vmoffsets
1141 .vmctx_vmglobal_import_definition(global_index);
1142 self.machine.emit_relaxed_mov(
1143 Size::S64,
1144 Location::Memory(self.machine.get_vmctx_reg(), offset as i32),
1145 Location::GPR(tmp),
1146 )?;
1147 Location::Memory(tmp, 0)
1148 };
1149 let (loc, canonicalize) = self.pop_value_released()?;
1150 if let Some(canonicalize_size) = canonicalize.to_size() {
1151 if self.config.enable_nan_canonicalization {
1152 self.machine.canonicalize_nan(canonicalize_size, loc, dst)?;
1153 } else {
1154 self.machine.emit_relaxed_mov(Size::S64, loc, dst)?;
1155 }
1156 } else {
1157 self.machine.emit_relaxed_mov(Size::S64, loc, dst)?;
1158 }
1159 self.machine.release_gpr(tmp);
1160 }
1161 Operator::LocalGet { local_index } => {
1162 let local_index = local_index as usize;
1163 let ret = self.acquire_location(&WpType::I64)?;
1164 self.machine
1165 .emit_relaxed_mov(Size::S64, self.locals[local_index], ret)?;
1166 self.value_stack.push((ret, CanonicalizeType::None));
1167 }
1168 Operator::LocalSet { local_index } => {
1169 let local_index = local_index as usize;
1170 let (loc, canonicalize) = self.pop_value_released()?;
1171
1172 if self.local_types[local_index].is_float()
1173 && let Some(canonicalize_size) = canonicalize.to_size()
1174 {
1175 if self.config.enable_nan_canonicalization {
1176 self.machine.canonicalize_nan(
1177 canonicalize_size,
1178 loc,
1179 self.locals[local_index],
1180 )
1181 } else {
1182 self.machine
1183 .emit_relaxed_mov(Size::S64, loc, self.locals[local_index])
1184 }
1185 } else {
1186 self.machine
1187 .emit_relaxed_mov(Size::S64, loc, self.locals[local_index])
1188 }?;
1189 }
1190 Operator::LocalTee { local_index } => {
1191 let local_index = local_index as usize;
1192 let (loc, canonicalize) = *self.value_stack.last().unwrap();
1193
1194 if self.local_types[local_index].is_float()
1195 && let Some(canonicalize_size) = canonicalize.to_size()
1196 {
1197 if self.config.enable_nan_canonicalization {
1198 self.machine.canonicalize_nan(
1199 canonicalize_size,
1200 loc,
1201 self.locals[local_index],
1202 )
1203 } else {
1204 self.machine
1205 .emit_relaxed_mov(Size::S64, loc, self.locals[local_index])
1206 }
1207 } else {
1208 self.machine
1209 .emit_relaxed_mov(Size::S64, loc, self.locals[local_index])
1210 }?;
1211 }
1212 Operator::I32Const { value } => {
1213 self.value_stack
1214 .push((Location::Imm32(value as u32), CanonicalizeType::None));
1215 }
1216 Operator::I32Add => {
1217 let I2O1 { loc_a, loc_b, ret } =
1218 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1219 self.machine.emit_binop_add32(loc_a, loc_b, ret)?;
1220 }
1221 Operator::I32Sub => {
1222 let I2O1 { loc_a, loc_b, ret } =
1223 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1224 self.machine.emit_binop_sub32(loc_a, loc_b, ret)?;
1225 }
1226 Operator::I32Mul => {
1227 let I2O1 { loc_a, loc_b, ret } =
1228 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1229 self.machine.emit_binop_mul32(loc_a, loc_b, ret)?;
1230 }
1231 Operator::I32DivU => {
1232 let I2O1 { loc_a, loc_b, ret } =
1233 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1234 self.machine.emit_binop_udiv32(
1235 loc_a,
1236 loc_b,
1237 ret,
1238 self.special_labels.integer_division_by_zero,
1239 )?;
1240 }
1241 Operator::I32DivS => {
1242 let I2O1 { loc_a, loc_b, ret } =
1243 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1244 self.machine.emit_binop_sdiv32(
1245 loc_a,
1246 loc_b,
1247 ret,
1248 self.special_labels.integer_division_by_zero,
1249 self.special_labels.integer_overflow,
1250 )?;
1251 }
1252 Operator::I32RemU => {
1253 let I2O1 { loc_a, loc_b, ret } =
1254 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1255 self.machine.emit_binop_urem32(
1256 loc_a,
1257 loc_b,
1258 ret,
1259 self.special_labels.integer_division_by_zero,
1260 )?;
1261 }
1262 Operator::I32RemS => {
1263 let I2O1 { loc_a, loc_b, ret } =
1264 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1265 self.machine.emit_binop_srem32(
1266 loc_a,
1267 loc_b,
1268 ret,
1269 self.special_labels.integer_division_by_zero,
1270 )?;
1271 }
1272 Operator::I32And => {
1273 let I2O1 { loc_a, loc_b, ret } =
1274 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1275 self.machine.emit_binop_and32(loc_a, loc_b, ret)?;
1276 }
1277 Operator::I32Or => {
1278 let I2O1 { loc_a, loc_b, ret } =
1279 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1280 self.machine.emit_binop_or32(loc_a, loc_b, ret)?;
1281 }
1282 Operator::I32Xor => {
1283 let I2O1 { loc_a, loc_b, ret } =
1284 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1285 self.machine.emit_binop_xor32(loc_a, loc_b, ret)?;
1286 }
1287 Operator::I32Eq => {
1288 let I2O1 { loc_a, loc_b, ret } =
1289 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1290 self.machine.i32_cmp_eq(loc_a, loc_b, ret)?;
1291 }
1292 Operator::I32Ne => {
1293 let I2O1 { loc_a, loc_b, ret } =
1294 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1295 self.machine.i32_cmp_ne(loc_a, loc_b, ret)?;
1296 }
1297 Operator::I32Eqz => {
1298 let loc_a = self.pop_value_released()?.0;
1299 let ret = self.acquire_location(&WpType::I32)?;
1300 self.machine.i32_cmp_eq(loc_a, Location::Imm32(0), ret)?;
1301 self.value_stack.push((ret, CanonicalizeType::None));
1302 }
1303 Operator::I32Clz => {
1304 let loc = self.pop_value_released()?.0;
1305 let ret = self.acquire_location(&WpType::I32)?;
1306 self.value_stack.push((ret, CanonicalizeType::None));
1307 self.machine.i32_clz(loc, ret)?;
1308 }
1309 Operator::I32Ctz => {
1310 let loc = self.pop_value_released()?.0;
1311 let ret = self.acquire_location(&WpType::I32)?;
1312 self.value_stack.push((ret, CanonicalizeType::None));
1313 self.machine.i32_ctz(loc, ret)?;
1314 }
1315 Operator::I32Popcnt => {
1316 let loc = self.pop_value_released()?.0;
1317 let ret = self.acquire_location(&WpType::I32)?;
1318 self.value_stack.push((ret, CanonicalizeType::None));
1319 self.machine.i32_popcnt(loc, ret)?;
1320 }
1321 Operator::I32Shl => {
1322 let I2O1 { loc_a, loc_b, ret } =
1323 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1324 self.machine.i32_shl(loc_a, loc_b, ret)?;
1325 }
1326 Operator::I32ShrU => {
1327 let I2O1 { loc_a, loc_b, ret } =
1328 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1329 self.machine.i32_shr(loc_a, loc_b, ret)?;
1330 }
1331 Operator::I32ShrS => {
1332 let I2O1 { loc_a, loc_b, ret } =
1333 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1334 self.machine.i32_sar(loc_a, loc_b, ret)?;
1335 }
1336 Operator::I32Rotl => {
1337 let I2O1 { loc_a, loc_b, ret } =
1338 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1339 self.machine.i32_rol(loc_a, loc_b, ret)?;
1340 }
1341 Operator::I32Rotr => {
1342 let I2O1 { loc_a, loc_b, ret } =
1343 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1344 self.machine.i32_ror(loc_a, loc_b, ret)?;
1345 }
1346 Operator::I32LtU => {
1347 let I2O1 { loc_a, loc_b, ret } =
1348 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1349 self.machine.i32_cmp_lt_u(loc_a, loc_b, ret)?;
1350 }
1351 Operator::I32LeU => {
1352 let I2O1 { loc_a, loc_b, ret } =
1353 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1354 self.machine.i32_cmp_le_u(loc_a, loc_b, ret)?;
1355 }
1356 Operator::I32GtU => {
1357 let I2O1 { loc_a, loc_b, ret } =
1358 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1359 self.machine.i32_cmp_gt_u(loc_a, loc_b, ret)?;
1360 }
1361 Operator::I32GeU => {
1362 let I2O1 { loc_a, loc_b, ret } =
1363 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1364 self.machine.i32_cmp_ge_u(loc_a, loc_b, ret)?;
1365 }
1366 Operator::I32LtS => {
1367 let I2O1 { loc_a, loc_b, ret } =
1368 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1369 self.machine.i32_cmp_lt_s(loc_a, loc_b, ret)?;
1370 }
1371 Operator::I32LeS => {
1372 let I2O1 { loc_a, loc_b, ret } =
1373 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1374 self.machine.i32_cmp_le_s(loc_a, loc_b, ret)?;
1375 }
1376 Operator::I32GtS => {
1377 let I2O1 { loc_a, loc_b, ret } =
1378 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1379 self.machine.i32_cmp_gt_s(loc_a, loc_b, ret)?;
1380 }
1381 Operator::I32GeS => {
1382 let I2O1 { loc_a, loc_b, ret } =
1383 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1384 self.machine.i32_cmp_ge_s(loc_a, loc_b, ret)?;
1385 }
1386 Operator::I64Const { value } => {
1387 let value = value as u64;
1388 self.value_stack
1389 .push((Location::Imm64(value), CanonicalizeType::None));
1390 }
1391 Operator::I64Add => {
1392 let I2O1 { loc_a, loc_b, ret } =
1393 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1394 self.machine.emit_binop_add64(loc_a, loc_b, ret)?;
1395 }
1396 Operator::I64Sub => {
1397 let I2O1 { loc_a, loc_b, ret } =
1398 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1399 self.machine.emit_binop_sub64(loc_a, loc_b, ret)?;
1400 }
1401 Operator::I64Mul => {
1402 let I2O1 { loc_a, loc_b, ret } =
1403 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1404 self.machine.emit_binop_mul64(loc_a, loc_b, ret)?;
1405 }
1406 Operator::I64DivU => {
1407 let I2O1 { loc_a, loc_b, ret } =
1408 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1409 self.machine.emit_binop_udiv64(
1410 loc_a,
1411 loc_b,
1412 ret,
1413 self.special_labels.integer_division_by_zero,
1414 )?;
1415 }
1416 Operator::I64DivS => {
1417 let I2O1 { loc_a, loc_b, ret } =
1418 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1419 self.machine.emit_binop_sdiv64(
1420 loc_a,
1421 loc_b,
1422 ret,
1423 self.special_labels.integer_division_by_zero,
1424 self.special_labels.integer_overflow,
1425 )?;
1426 }
1427 Operator::I64RemU => {
1428 let I2O1 { loc_a, loc_b, ret } =
1429 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1430 self.machine.emit_binop_urem64(
1431 loc_a,
1432 loc_b,
1433 ret,
1434 self.special_labels.integer_division_by_zero,
1435 )?;
1436 }
1437 Operator::I64RemS => {
1438 let I2O1 { loc_a, loc_b, ret } =
1439 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1440 self.machine.emit_binop_srem64(
1441 loc_a,
1442 loc_b,
1443 ret,
1444 self.special_labels.integer_division_by_zero,
1445 )?;
1446 }
1447 Operator::I64And => {
1448 let I2O1 { loc_a, loc_b, ret } =
1449 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1450 self.machine.emit_binop_and64(loc_a, loc_b, ret)?;
1451 }
1452 Operator::I64Or => {
1453 let I2O1 { loc_a, loc_b, ret } =
1454 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1455 self.machine.emit_binop_or64(loc_a, loc_b, ret)?;
1456 }
1457 Operator::I64Xor => {
1458 let I2O1 { loc_a, loc_b, ret } =
1459 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1460 self.machine.emit_binop_xor64(loc_a, loc_b, ret)?;
1461 }
1462 Operator::I64Eq => {
1463 let I2O1 { loc_a, loc_b, ret } =
1464 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1465 self.machine.i64_cmp_eq(loc_a, loc_b, ret)?;
1466 }
1467 Operator::I64Ne => {
1468 let I2O1 { loc_a, loc_b, ret } =
1469 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1470 self.machine.i64_cmp_ne(loc_a, loc_b, ret)?;
1471 }
1472 Operator::I64Eqz => {
1473 let loc_a = self.pop_value_released()?.0;
1474 let ret = self.acquire_location(&WpType::I64)?;
1475 self.machine.i64_cmp_eq(loc_a, Location::Imm64(0), ret)?;
1476 self.value_stack.push((ret, CanonicalizeType::None));
1477 }
1478 Operator::I64Clz => {
1479 let loc = self.pop_value_released()?.0;
1480 let ret = self.acquire_location(&WpType::I64)?;
1481 self.value_stack.push((ret, CanonicalizeType::None));
1482 self.machine.i64_clz(loc, ret)?;
1483 }
1484 Operator::I64Ctz => {
1485 let loc = self.pop_value_released()?.0;
1486 let ret = self.acquire_location(&WpType::I64)?;
1487 self.value_stack.push((ret, CanonicalizeType::None));
1488 self.machine.i64_ctz(loc, ret)?;
1489 }
1490 Operator::I64Popcnt => {
1491 let loc = self.pop_value_released()?.0;
1492 let ret = self.acquire_location(&WpType::I64)?;
1493 self.value_stack.push((ret, CanonicalizeType::None));
1494 self.machine.i64_popcnt(loc, ret)?;
1495 }
1496 Operator::I64Shl => {
1497 let I2O1 { loc_a, loc_b, ret } =
1498 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1499 self.machine.i64_shl(loc_a, loc_b, ret)?;
1500 }
1501 Operator::I64ShrU => {
1502 let I2O1 { loc_a, loc_b, ret } =
1503 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1504 self.machine.i64_shr(loc_a, loc_b, ret)?;
1505 }
1506 Operator::I64ShrS => {
1507 let I2O1 { loc_a, loc_b, ret } =
1508 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1509 self.machine.i64_sar(loc_a, loc_b, ret)?;
1510 }
1511 Operator::I64Rotl => {
1512 let I2O1 { loc_a, loc_b, ret } =
1513 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1514 self.machine.i64_rol(loc_a, loc_b, ret)?;
1515 }
1516 Operator::I64Rotr => {
1517 let I2O1 { loc_a, loc_b, ret } =
1518 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1519 self.machine.i64_ror(loc_a, loc_b, ret)?;
1520 }
1521 Operator::I64LtU => {
1522 let I2O1 { loc_a, loc_b, ret } =
1523 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1524 self.machine.i64_cmp_lt_u(loc_a, loc_b, ret)?;
1525 }
1526 Operator::I64LeU => {
1527 let I2O1 { loc_a, loc_b, ret } =
1528 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1529 self.machine.i64_cmp_le_u(loc_a, loc_b, ret)?;
1530 }
1531 Operator::I64GtU => {
1532 let I2O1 { loc_a, loc_b, ret } =
1533 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1534 self.machine.i64_cmp_gt_u(loc_a, loc_b, ret)?;
1535 }
1536 Operator::I64GeU => {
1537 let I2O1 { loc_a, loc_b, ret } =
1538 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1539 self.machine.i64_cmp_ge_u(loc_a, loc_b, ret)?;
1540 }
1541 Operator::I64LtS => {
1542 let I2O1 { loc_a, loc_b, ret } =
1543 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1544 self.machine.i64_cmp_lt_s(loc_a, loc_b, ret)?;
1545 }
1546 Operator::I64LeS => {
1547 let I2O1 { loc_a, loc_b, ret } =
1548 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1549 self.machine.i64_cmp_le_s(loc_a, loc_b, ret)?;
1550 }
1551 Operator::I64GtS => {
1552 let I2O1 { loc_a, loc_b, ret } =
1553 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1554 self.machine.i64_cmp_gt_s(loc_a, loc_b, ret)?;
1555 }
1556 Operator::I64GeS => {
1557 let I2O1 { loc_a, loc_b, ret } =
1558 self.i2o1_prepare(WpType::I64, CanonicalizeType::None)?;
1559 self.machine.i64_cmp_ge_s(loc_a, loc_b, ret)?;
1560 }
1561 Operator::I64ExtendI32U => {
1562 let loc = self.pop_value_released()?.0;
1563 let ret = self.acquire_location(&WpType::I64)?;
1564 self.value_stack.push((ret, CanonicalizeType::None));
1565 self.machine.emit_relaxed_mov(Size::S32, loc, ret)?;
1566
1567 if let Location::Memory(base, off) = ret {
1570 self.machine.emit_relaxed_mov(
1571 Size::S32,
1572 Location::Imm32(0),
1573 Location::Memory(base, off + 4),
1574 )?;
1575 }
1576 }
1577 Operator::I64ExtendI32S => {
1578 let loc = self.pop_value_released()?.0;
1579 let ret = self.acquire_location(&WpType::I64)?;
1580 self.value_stack.push((ret, CanonicalizeType::None));
1581 self.machine
1582 .emit_relaxed_sign_extension(Size::S32, loc, Size::S64, ret)?;
1583 }
1584 Operator::I32Extend8S => {
1585 let loc = self.pop_value_released()?.0;
1586 let ret = self.acquire_location(&WpType::I32)?;
1587 self.value_stack.push((ret, CanonicalizeType::None));
1588
1589 self.machine
1590 .emit_relaxed_sign_extension(Size::S8, loc, Size::S32, ret)?;
1591 }
1592 Operator::I32Extend16S => {
1593 let loc = self.pop_value_released()?.0;
1594 let ret = self.acquire_location(&WpType::I32)?;
1595 self.value_stack.push((ret, CanonicalizeType::None));
1596
1597 self.machine
1598 .emit_relaxed_sign_extension(Size::S16, loc, Size::S32, ret)?;
1599 }
1600 Operator::I64Extend8S => {
1601 let loc = self.pop_value_released()?.0;
1602 let ret = self.acquire_location(&WpType::I64)?;
1603 self.value_stack.push((ret, CanonicalizeType::None));
1604
1605 self.machine
1606 .emit_relaxed_sign_extension(Size::S8, loc, Size::S64, ret)?;
1607 }
1608 Operator::I64Extend16S => {
1609 let loc = self.pop_value_released()?.0;
1610 let ret = self.acquire_location(&WpType::I64)?;
1611 self.value_stack.push((ret, CanonicalizeType::None));
1612
1613 self.machine
1614 .emit_relaxed_sign_extension(Size::S16, loc, Size::S64, ret)?;
1615 }
1616 Operator::I64Extend32S => {
1617 let loc = self.pop_value_released()?.0;
1618 let ret = self.acquire_location(&WpType::I64)?;
1619 self.value_stack.push((ret, CanonicalizeType::None));
1620
1621 self.machine
1622 .emit_relaxed_sign_extension(Size::S32, loc, Size::S64, ret)?;
1623 }
1624 Operator::I32WrapI64 => {
1625 let loc = self.pop_value_released()?.0;
1626 let ret = self.acquire_location(&WpType::I32)?;
1627 self.value_stack.push((ret, CanonicalizeType::None));
1628 self.machine.emit_relaxed_mov(Size::S32, loc, ret)?;
1629 }
1630
1631 Operator::F32Const { value } => {
1632 self.value_stack
1633 .push((Location::Imm32(value.bits()), CanonicalizeType::None));
1634 }
1635 Operator::F32Add => {
1636 let I2O1 { loc_a, loc_b, ret } =
1637 self.i2o1_prepare(WpType::F64, CanonicalizeType::F32)?;
1638 self.machine.f32_add(loc_a, loc_b, ret)?;
1639 }
1640 Operator::F32Sub => {
1641 let I2O1 { loc_a, loc_b, ret } =
1642 self.i2o1_prepare(WpType::F64, CanonicalizeType::F32)?;
1643 self.machine.f32_sub(loc_a, loc_b, ret)?;
1644 }
1645 Operator::F32Mul => {
1646 let I2O1 { loc_a, loc_b, ret } =
1647 self.i2o1_prepare(WpType::F64, CanonicalizeType::F32)?;
1648 self.machine.f32_mul(loc_a, loc_b, ret)?;
1649 }
1650 Operator::F32Div => {
1651 let I2O1 { loc_a, loc_b, ret } =
1652 self.i2o1_prepare(WpType::F64, CanonicalizeType::F32)?;
1653 self.machine.f32_div(loc_a, loc_b, ret)?;
1654 }
1655 Operator::F32Max => {
1656 let I2O1 { loc_a, loc_b, ret } =
1657 self.i2o1_prepare(WpType::F64, CanonicalizeType::None)?;
1658 self.machine.f32_max(loc_a, loc_b, ret)?;
1659 }
1660 Operator::F32Min => {
1661 let I2O1 { loc_a, loc_b, ret } =
1662 self.i2o1_prepare(WpType::F64, CanonicalizeType::None)?;
1663 self.machine.f32_min(loc_a, loc_b, ret)?;
1664 }
1665 Operator::F32Eq => {
1666 let I2O1 { loc_a, loc_b, ret } =
1667 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1668 self.machine.f32_cmp_eq(loc_a, loc_b, ret)?;
1669 }
1670 Operator::F32Ne => {
1671 let I2O1 { loc_a, loc_b, ret } =
1672 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1673 self.machine.f32_cmp_ne(loc_a, loc_b, ret)?;
1674 }
1675 Operator::F32Lt => {
1676 let I2O1 { loc_a, loc_b, ret } =
1677 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1678 self.machine.f32_cmp_lt(loc_a, loc_b, ret)?;
1679 }
1680 Operator::F32Le => {
1681 let I2O1 { loc_a, loc_b, ret } =
1682 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1683 self.machine.f32_cmp_le(loc_a, loc_b, ret)?;
1684 }
1685 Operator::F32Gt => {
1686 let I2O1 { loc_a, loc_b, ret } =
1687 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1688 self.machine.f32_cmp_gt(loc_a, loc_b, ret)?;
1689 }
1690 Operator::F32Ge => {
1691 let I2O1 { loc_a, loc_b, ret } =
1692 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1693 self.machine.f32_cmp_ge(loc_a, loc_b, ret)?;
1694 }
1695 Operator::F32Nearest => {
1696 let loc = self.pop_value_released()?.0;
1697 let ret = self.acquire_location(&WpType::F64)?;
1698 self.value_stack.push((ret, CanonicalizeType::F32));
1699 self.machine.f32_nearest(loc, ret)?;
1700 }
1701 Operator::F32Floor => {
1702 let loc = self.pop_value_released()?.0;
1703 let ret = self.acquire_location(&WpType::F64)?;
1704 self.value_stack.push((ret, CanonicalizeType::F32));
1705 self.machine.f32_floor(loc, ret)?;
1706 }
1707 Operator::F32Ceil => {
1708 let loc = self.pop_value_released()?.0;
1709 let ret = self.acquire_location(&WpType::F64)?;
1710 self.value_stack.push((ret, CanonicalizeType::F32));
1711 self.machine.f32_ceil(loc, ret)?;
1712 }
1713 Operator::F32Trunc => {
1714 let loc = self.pop_value_released()?.0;
1715 let ret = self.acquire_location(&WpType::F64)?;
1716 self.value_stack.push((ret, CanonicalizeType::F32));
1717 self.machine.f32_trunc(loc, ret)?;
1718 }
1719 Operator::F32Sqrt => {
1720 let loc = self.pop_value_released()?.0;
1721 let ret = self.acquire_location(&WpType::F64)?;
1722 self.value_stack.push((ret, CanonicalizeType::F32));
1723 self.machine.f32_sqrt(loc, ret)?;
1724 }
1725
1726 Operator::F32Copysign => {
1727 let loc_b = self.pop_value_released()?;
1728 let loc_a = self.pop_value_released()?;
1729 let ret = self.acquire_location(&WpType::F32)?;
1730 self.value_stack.push((ret, CanonicalizeType::None));
1731
1732 let tmp1 = self.machine.acquire_temp_gpr().unwrap();
1733 let tmp2 = self.machine.acquire_temp_gpr().unwrap();
1734
1735 if self.config.enable_nan_canonicalization {
1736 for ((loc, fp), tmp) in [(loc_a, tmp1), (loc_b, tmp2)] {
1737 if fp.to_size().is_some() {
1738 self.machine
1739 .canonicalize_nan(Size::S32, loc, Location::GPR(tmp))?
1740 } else {
1741 self.machine
1742 .move_location(Size::S32, loc, Location::GPR(tmp))?
1743 }
1744 }
1745 } else {
1746 self.machine
1747 .move_location(Size::S32, loc_a.0, Location::GPR(tmp1))?;
1748 self.machine
1749 .move_location(Size::S32, loc_b.0, Location::GPR(tmp2))?;
1750 }
1751 self.machine.emit_i32_copysign(tmp1, tmp2)?;
1752 self.machine
1753 .move_location(Size::S32, Location::GPR(tmp1), ret)?;
1754 self.machine.release_gpr(tmp2);
1755 self.machine.release_gpr(tmp1);
1756 }
1757
1758 Operator::F32Abs => {
1759 let loc = self.pop_value_released()?.0;
1762 let ret = self.acquire_location(&WpType::F32)?;
1763 self.value_stack.push((ret, CanonicalizeType::None));
1764
1765 self.machine.f32_abs(loc, ret)?;
1766 }
1767
1768 Operator::F32Neg => {
1769 let loc = self.pop_value_released()?.0;
1772 let ret = self.acquire_location(&WpType::F32)?;
1773 self.value_stack.push((ret, CanonicalizeType::None));
1774
1775 self.machine.f32_neg(loc, ret)?;
1776 }
1777
1778 Operator::F64Const { value } => {
1779 self.value_stack
1780 .push((Location::Imm64(value.bits()), CanonicalizeType::None));
1781 }
1782 Operator::F64Add => {
1783 let I2O1 { loc_a, loc_b, ret } =
1784 self.i2o1_prepare(WpType::F64, CanonicalizeType::F64)?;
1785 self.machine.f64_add(loc_a, loc_b, ret)?;
1786 }
1787 Operator::F64Sub => {
1788 let I2O1 { loc_a, loc_b, ret } =
1789 self.i2o1_prepare(WpType::F64, CanonicalizeType::F64)?;
1790 self.machine.f64_sub(loc_a, loc_b, ret)?;
1791 }
1792 Operator::F64Mul => {
1793 let I2O1 { loc_a, loc_b, ret } =
1794 self.i2o1_prepare(WpType::F64, CanonicalizeType::F64)?;
1795 self.machine.f64_mul(loc_a, loc_b, ret)?;
1796 }
1797 Operator::F64Div => {
1798 let I2O1 { loc_a, loc_b, ret } =
1799 self.i2o1_prepare(WpType::F64, CanonicalizeType::F64)?;
1800 self.machine.f64_div(loc_a, loc_b, ret)?;
1801 }
1802 Operator::F64Max => {
1803 let I2O1 { loc_a, loc_b, ret } =
1804 self.i2o1_prepare(WpType::F64, CanonicalizeType::None)?;
1805 self.machine.f64_max(loc_a, loc_b, ret)?;
1806 }
1807 Operator::F64Min => {
1808 let I2O1 { loc_a, loc_b, ret } =
1809 self.i2o1_prepare(WpType::F64, CanonicalizeType::None)?;
1810 self.machine.f64_min(loc_a, loc_b, ret)?;
1811 }
1812 Operator::F64Eq => {
1813 let I2O1 { loc_a, loc_b, ret } =
1814 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1815 self.machine.f64_cmp_eq(loc_a, loc_b, ret)?;
1816 }
1817 Operator::F64Ne => {
1818 let I2O1 { loc_a, loc_b, ret } =
1819 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1820 self.machine.f64_cmp_ne(loc_a, loc_b, ret)?;
1821 }
1822 Operator::F64Lt => {
1823 let I2O1 { loc_a, loc_b, ret } =
1824 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1825 self.machine.f64_cmp_lt(loc_a, loc_b, ret)?;
1826 }
1827 Operator::F64Le => {
1828 let I2O1 { loc_a, loc_b, ret } =
1829 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1830 self.machine.f64_cmp_le(loc_a, loc_b, ret)?;
1831 }
1832 Operator::F64Gt => {
1833 let I2O1 { loc_a, loc_b, ret } =
1834 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1835 self.machine.f64_cmp_gt(loc_a, loc_b, ret)?;
1836 }
1837 Operator::F64Ge => {
1838 let I2O1 { loc_a, loc_b, ret } =
1839 self.i2o1_prepare(WpType::I32, CanonicalizeType::None)?;
1840 self.machine.f64_cmp_ge(loc_a, loc_b, ret)?;
1841 }
1842 Operator::F64Nearest => {
1843 let loc = self.pop_value_released()?.0;
1844 let ret = self.acquire_location(&WpType::F64)?;
1845 self.value_stack.push((ret, CanonicalizeType::F64));
1846 self.machine.f64_nearest(loc, ret)?;
1847 }
1848 Operator::F64Floor => {
1849 let loc = self.pop_value_released()?.0;
1850 let ret = self.acquire_location(&WpType::F64)?;
1851 self.value_stack.push((ret, CanonicalizeType::F64));
1852 self.machine.f64_floor(loc, ret)?;
1853 }
1854 Operator::F64Ceil => {
1855 let loc = self.pop_value_released()?.0;
1856 let ret = self.acquire_location(&WpType::F64)?;
1857 self.value_stack.push((ret, CanonicalizeType::F64));
1858 self.machine.f64_ceil(loc, ret)?;
1859 }
1860 Operator::F64Trunc => {
1861 let loc = self.pop_value_released()?.0;
1862 let ret = self.acquire_location(&WpType::F64)?;
1863 self.value_stack.push((ret, CanonicalizeType::F64));
1864 self.machine.f64_trunc(loc, ret)?;
1865 }
1866 Operator::F64Sqrt => {
1867 let loc = self.pop_value_released()?.0;
1868 let ret = self.acquire_location(&WpType::F64)?;
1869 self.value_stack.push((ret, CanonicalizeType::F64));
1870 self.machine.f64_sqrt(loc, ret)?;
1871 }
1872
1873 Operator::F64Copysign => {
1874 let loc_b = self.pop_value_released()?;
1875 let loc_a = self.pop_value_released()?;
1876 let ret = self.acquire_location(&WpType::F64)?;
1877 self.value_stack.push((ret, CanonicalizeType::None));
1878
1879 let tmp1 = self.machine.acquire_temp_gpr().unwrap();
1880 let tmp2 = self.machine.acquire_temp_gpr().unwrap();
1881
1882 if self.config.enable_nan_canonicalization {
1883 for ((loc, fp), tmp) in [(loc_a, tmp1), (loc_b, tmp2)] {
1884 if fp.to_size().is_some() {
1885 self.machine
1886 .canonicalize_nan(Size::S64, loc, Location::GPR(tmp))?
1887 } else {
1888 self.machine
1889 .move_location(Size::S64, loc, Location::GPR(tmp))?
1890 }
1891 }
1892 } else {
1893 self.machine
1894 .move_location(Size::S64, loc_a.0, Location::GPR(tmp1))?;
1895 self.machine
1896 .move_location(Size::S64, loc_b.0, Location::GPR(tmp2))?;
1897 }
1898 self.machine.emit_i64_copysign(tmp1, tmp2)?;
1899 self.machine
1900 .move_location(Size::S64, Location::GPR(tmp1), ret)?;
1901
1902 self.machine.release_gpr(tmp2);
1903 self.machine.release_gpr(tmp1);
1904 }
1905
1906 Operator::F64Abs => {
1907 let (loc, canonicalize) = self.pop_value_released()?;
1908 let ret = self.acquire_location(&WpType::F64)?;
1909 self.value_stack.push((ret, canonicalize));
1910
1911 self.machine.f64_abs(loc, ret)?;
1912 }
1913
1914 Operator::F64Neg => {
1915 let (loc, canonicalize) = self.pop_value_released()?;
1916 let ret = self.acquire_location(&WpType::F64)?;
1917 self.value_stack.push((ret, canonicalize));
1918
1919 self.machine.f64_neg(loc, ret)?;
1920 }
1921
1922 Operator::F64PromoteF32 => {
1923 let (loc, canonicalize) = self.pop_value_released()?;
1924 let ret = self.acquire_location(&WpType::F64)?;
1925 self.value_stack.push((ret, canonicalize.promote()?));
1926 self.machine.convert_f64_f32(loc, ret)?;
1927 }
1928 Operator::F32DemoteF64 => {
1929 let (loc, canonicalize) = self.pop_value_released()?;
1930 let ret = self.acquire_location(&WpType::F64)?;
1931 self.value_stack.push((ret, canonicalize.demote()?));
1932 self.machine.convert_f32_f64(loc, ret)?;
1933 }
1934
1935 Operator::I32ReinterpretF32 => {
1936 let (loc, canonicalize) = self.pop_value_released()?;
1937 let ret = self.acquire_location(&WpType::I32)?;
1938 self.value_stack.push((ret, CanonicalizeType::None));
1939
1940 if !self.config.enable_nan_canonicalization
1941 || matches!(canonicalize, CanonicalizeType::None)
1942 {
1943 if loc != ret {
1944 self.machine.emit_relaxed_mov(Size::S32, loc, ret)?;
1945 }
1946 } else {
1947 self.machine.canonicalize_nan(Size::S32, loc, ret)?;
1948 }
1949 }
1950 Operator::F32ReinterpretI32 => {
1951 let loc = self.pop_value_released()?.0;
1952 let ret = self.acquire_location(&WpType::F32)?;
1953 self.value_stack.push((ret, CanonicalizeType::None));
1954
1955 if loc != ret {
1956 self.machine.emit_relaxed_mov(Size::S32, loc, ret)?;
1957 }
1958 }
1959
1960 Operator::I64ReinterpretF64 => {
1961 let (loc, canonicalize) = self.pop_value_released()?;
1962 let ret = self.acquire_location(&WpType::I64)?;
1963 self.value_stack.push((ret, CanonicalizeType::None));
1964
1965 if !self.config.enable_nan_canonicalization
1966 || matches!(canonicalize, CanonicalizeType::None)
1967 {
1968 if loc != ret {
1969 self.machine.emit_relaxed_mov(Size::S64, loc, ret)?;
1970 }
1971 } else {
1972 self.machine.canonicalize_nan(Size::S64, loc, ret)?;
1973 }
1974 }
1975 Operator::F64ReinterpretI64 => {
1976 let loc = self.pop_value_released()?.0;
1977 let ret = self.acquire_location(&WpType::F64)?;
1978 self.value_stack.push((ret, CanonicalizeType::None));
1979
1980 if loc != ret {
1981 self.machine.emit_relaxed_mov(Size::S64, loc, ret)?;
1982 }
1983 }
1984
1985 Operator::I32TruncF32U => {
1986 let loc = self.pop_value_released()?.0;
1987 let ret = self.acquire_location(&WpType::I32)?;
1988 self.value_stack.push((ret, CanonicalizeType::None));
1989
1990 self.machine.convert_i32_f32(loc, ret, false, false)?;
1991 }
1992
1993 Operator::I32TruncSatF32U => {
1994 let loc = self.pop_value_released()?.0;
1995 let ret = self.acquire_location(&WpType::I32)?;
1996 self.value_stack.push((ret, CanonicalizeType::None));
1997
1998 self.machine.convert_i32_f32(loc, ret, false, true)?;
1999 }
2000
2001 Operator::I32TruncF32S => {
2002 let loc = self.pop_value_released()?.0;
2003 let ret = self.acquire_location(&WpType::I32)?;
2004 self.value_stack.push((ret, CanonicalizeType::None));
2005
2006 self.machine.convert_i32_f32(loc, ret, true, false)?;
2007 }
2008 Operator::I32TruncSatF32S => {
2009 let loc = self.pop_value_released()?.0;
2010 let ret = self.acquire_location(&WpType::I32)?;
2011 self.value_stack.push((ret, CanonicalizeType::None));
2012
2013 self.machine.convert_i32_f32(loc, ret, true, true)?;
2014 }
2015
2016 Operator::I64TruncF32S => {
2017 let loc = self.pop_value_released()?.0;
2018 let ret = self.acquire_location(&WpType::I64)?;
2019 self.value_stack.push((ret, CanonicalizeType::None));
2020
2021 self.machine.convert_i64_f32(loc, ret, true, false)?;
2022 }
2023
2024 Operator::I64TruncSatF32S => {
2025 let loc = self.pop_value_released()?.0;
2026 let ret = self.acquire_location(&WpType::I64)?;
2027 self.value_stack.push((ret, CanonicalizeType::None));
2028
2029 self.machine.convert_i64_f32(loc, ret, true, true)?;
2030 }
2031
2032 Operator::I64TruncF32U => {
2033 let loc = self.pop_value_released()?.0;
2034 let ret = self.acquire_location(&WpType::I64)?;
2035 self.value_stack.push((ret, CanonicalizeType::None));
2036
2037 self.machine.convert_i64_f32(loc, ret, false, false)?;
2038 }
2039 Operator::I64TruncSatF32U => {
2040 let loc = self.pop_value_released()?.0;
2041 let ret = self.acquire_location(&WpType::I64)?;
2042 self.value_stack.push((ret, CanonicalizeType::None));
2043
2044 self.machine.convert_i64_f32(loc, ret, false, true)?;
2045 }
2046
2047 Operator::I32TruncF64U => {
2048 let loc = self.pop_value_released()?.0;
2049 let ret = self.acquire_location(&WpType::I32)?;
2050 self.value_stack.push((ret, CanonicalizeType::None));
2051
2052 self.machine.convert_i32_f64(loc, ret, false, false)?;
2053 }
2054
2055 Operator::I32TruncSatF64U => {
2056 let loc = self.pop_value_released()?.0;
2057 let ret = self.acquire_location(&WpType::I32)?;
2058 self.value_stack.push((ret, CanonicalizeType::None));
2059
2060 self.machine.convert_i32_f64(loc, ret, false, true)?;
2061 }
2062
2063 Operator::I32TruncF64S => {
2064 let loc = self.pop_value_released()?.0;
2065 let ret = self.acquire_location(&WpType::I32)?;
2066 self.value_stack.push((ret, CanonicalizeType::None));
2067
2068 self.machine.convert_i32_f64(loc, ret, true, false)?;
2069 }
2070
2071 Operator::I32TruncSatF64S => {
2072 let loc = self.pop_value_released()?.0;
2073 let ret = self.acquire_location(&WpType::I32)?;
2074 self.value_stack.push((ret, CanonicalizeType::None));
2075
2076 self.machine.convert_i32_f64(loc, ret, true, true)?;
2077 }
2078
2079 Operator::I64TruncF64S => {
2080 let loc = self.pop_value_released()?.0;
2081 let ret = self.acquire_location(&WpType::I64)?;
2082 self.value_stack.push((ret, CanonicalizeType::None));
2083
2084 self.machine.convert_i64_f64(loc, ret, true, false)?;
2085 }
2086
2087 Operator::I64TruncSatF64S => {
2088 let loc = self.pop_value_released()?.0;
2089 let ret = self.acquire_location(&WpType::I64)?;
2090 self.value_stack.push((ret, CanonicalizeType::None));
2091
2092 self.machine.convert_i64_f64(loc, ret, true, true)?;
2093 }
2094
2095 Operator::I64TruncF64U => {
2096 let loc = self.pop_value_released()?.0;
2097 let ret = self.acquire_location(&WpType::I64)?;
2098 self.value_stack.push((ret, CanonicalizeType::None));
2099
2100 self.machine.convert_i64_f64(loc, ret, false, false)?;
2101 }
2102
2103 Operator::I64TruncSatF64U => {
2104 let loc = self.pop_value_released()?.0;
2105 let ret = self.acquire_location(&WpType::I64)?;
2106 self.value_stack.push((ret, CanonicalizeType::None));
2107
2108 self.machine.convert_i64_f64(loc, ret, false, true)?;
2109 }
2110
2111 Operator::F32ConvertI32S => {
2112 let loc = self.pop_value_released()?.0;
2113 let ret = self.acquire_location(&WpType::F32)?;
2114 self.value_stack.push((ret, CanonicalizeType::None));
2115
2116 self.machine.convert_f32_i32(loc, true, ret)?;
2117 }
2118 Operator::F32ConvertI32U => {
2119 let loc = self.pop_value_released()?.0;
2120 let ret = self.acquire_location(&WpType::F32)?;
2121 self.value_stack.push((ret, CanonicalizeType::None));
2122
2123 self.machine.convert_f32_i32(loc, false, ret)?;
2124 }
2125 Operator::F32ConvertI64S => {
2126 let loc = self.pop_value_released()?.0;
2127 let ret = self.acquire_location(&WpType::F32)?;
2128 self.value_stack.push((ret, CanonicalizeType::None));
2129
2130 self.machine.convert_f32_i64(loc, true, ret)?;
2131 }
2132 Operator::F32ConvertI64U => {
2133 let loc = self.pop_value_released()?.0;
2134 let ret = self.acquire_location(&WpType::F32)?;
2135 self.value_stack.push((ret, CanonicalizeType::None));
2136
2137 self.machine.convert_f32_i64(loc, false, ret)?;
2138 }
2139
2140 Operator::F64ConvertI32S => {
2141 let loc = self.pop_value_released()?.0;
2142 let ret = self.acquire_location(&WpType::F64)?;
2143 self.value_stack.push((ret, CanonicalizeType::None));
2144
2145 self.machine.convert_f64_i32(loc, true, ret)?;
2146 }
2147 Operator::F64ConvertI32U => {
2148 let loc = self.pop_value_released()?.0;
2149 let ret = self.acquire_location(&WpType::F64)?;
2150 self.value_stack.push((ret, CanonicalizeType::None));
2151
2152 self.machine.convert_f64_i32(loc, false, ret)?;
2153 }
2154 Operator::F64ConvertI64S => {
2155 let loc = self.pop_value_released()?.0;
2156 let ret = self.acquire_location(&WpType::F64)?;
2157 self.value_stack.push((ret, CanonicalizeType::None));
2158
2159 self.machine.convert_f64_i64(loc, true, ret)?;
2160 }
2161 Operator::F64ConvertI64U => {
2162 let loc = self.pop_value_released()?.0;
2163 let ret = self.acquire_location(&WpType::F64)?;
2164 self.value_stack.push((ret, CanonicalizeType::None));
2165
2166 self.machine.convert_f64_i64(loc, false, ret)?;
2167 }
2168
2169 Operator::Call { function_index } => {
2170 let function_index = function_index as usize;
2171
2172 let sig_index = *self
2173 .module
2174 .functions
2175 .get(FunctionIndex::new(function_index))
2176 .unwrap();
2177 let sig = self.module.signatures.get(sig_index).unwrap();
2178 let param_types: SmallVec<[WpType; 8]> =
2179 sig.params().iter().map(type_to_wp_type).collect();
2180 let return_types: SmallVec<[WpType; 1]> =
2181 sig.results().iter().map(type_to_wp_type).collect();
2182
2183 let params: SmallVec<[_; 8]> = self
2184 .value_stack
2185 .drain(self.value_stack.len() - param_types.len()..)
2186 .collect();
2187
2188 if self.config.enable_nan_canonicalization {
2193 for (loc, canonicalize) in params.iter() {
2194 if let Some(size) = canonicalize.to_size() {
2195 self.machine.canonicalize_nan(size, *loc, *loc)?;
2196 }
2197 }
2198 }
2199
2200 let reloc_target = if function_index < self.module.num_imported_functions {
2202 RelocationTarget::CustomSection(SectionIndex::new(function_index))
2203 } else {
2204 RelocationTarget::LocalFunc(LocalFunctionIndex::new(
2205 function_index - self.module.num_imported_functions,
2206 ))
2207 };
2208 let calling_convention = self.calling_convention;
2209
2210 self.emit_call_native(
2211 |this| {
2212 let offset = this
2213 .machine
2214 .mark_instruction_with_trap_code(TrapCode::StackOverflow);
2215 let mut relocations = this
2216 .machine
2217 .emit_call_with_reloc(calling_convention, reloc_target)?;
2218 this.machine.mark_instruction_address_end(offset);
2219 this.relocations.append(&mut relocations);
2220 Ok(())
2221 },
2222 params.iter().copied(),
2223 param_types.iter().copied(),
2224 return_types.iter().copied(),
2225 NativeCallType::IncludeVMCtxArgument,
2226 )?;
2227 }
2228 Operator::CallIndirect {
2229 type_index,
2230 table_index,
2231 } => {
2232 let table_index = TableIndex::new(table_index as _);
2235 let index = SignatureIndex::new(type_index as usize);
2236 let sig = self.module.signatures.get(index).unwrap();
2237 let param_types: SmallVec<[WpType; 8]> =
2238 sig.params().iter().map(type_to_wp_type).collect();
2239 let return_types: SmallVec<[WpType; 1]> =
2240 sig.results().iter().map(type_to_wp_type).collect();
2241
2242 let func_index = self.pop_value_released()?.0;
2243
2244 let params: SmallVec<[_; 8]> = self
2245 .value_stack
2246 .drain(self.value_stack.len() - param_types.len()..)
2247 .collect();
2248
2249 if self.config.enable_nan_canonicalization {
2254 for (loc, canonicalize) in params.iter() {
2255 if let Some(size) = canonicalize.to_size() {
2256 self.machine.canonicalize_nan(size, *loc, *loc)?;
2257 }
2258 }
2259 }
2260
2261 let table_base = self.machine.acquire_temp_gpr().unwrap();
2262 let table_count = self.machine.acquire_temp_gpr().unwrap();
2263 let sigidx = self.machine.acquire_temp_gpr().unwrap();
2264
2265 if let Some(local_table_index) = self.module.local_table_index(table_index) {
2266 let (vmctx_offset_base, vmctx_offset_len) = (
2267 self.vmoffsets.vmctx_vmtable_definition(local_table_index),
2268 self.vmoffsets
2269 .vmctx_vmtable_definition_current_elements(local_table_index),
2270 );
2271 self.machine.move_location(
2272 Size::S64,
2273 Location::Memory(self.machine.get_vmctx_reg(), vmctx_offset_base as i32),
2274 Location::GPR(table_base),
2275 )?;
2276 self.machine.move_location(
2277 Size::S32,
2278 Location::Memory(self.machine.get_vmctx_reg(), vmctx_offset_len as i32),
2279 Location::GPR(table_count),
2280 )?;
2281 } else {
2282 let import_offset = self.vmoffsets.vmctx_vmtable_import(table_index);
2284 self.machine.move_location(
2285 Size::S64,
2286 Location::Memory(self.machine.get_vmctx_reg(), import_offset as i32),
2287 Location::GPR(table_base),
2288 )?;
2289
2290 self.machine.move_location(
2292 Size::S32,
2293 Location::Memory(
2294 table_base,
2295 self.vmoffsets.vmtable_definition_current_elements() as _,
2296 ),
2297 Location::GPR(table_count),
2298 )?;
2299
2300 self.machine.move_location(
2302 Size::S64,
2303 Location::Memory(table_base, self.vmoffsets.vmtable_definition_base() as _),
2304 Location::GPR(table_base),
2305 )?;
2306 }
2307
2308 self.machine.jmp_on_condition(
2309 UnsignedCondition::BelowEqual,
2310 Size::S32,
2311 Location::GPR(table_count),
2312 func_index,
2313 self.special_labels.table_access_oob,
2314 )?;
2315 self.machine
2316 .move_location(Size::S32, func_index, Location::GPR(table_count))?;
2317 self.machine.emit_imul_imm32(
2318 Size::S64,
2319 self.vmoffsets.size_of_vm_funcref() as u32,
2320 table_count,
2321 )?;
2322 self.machine.location_add(
2323 Size::S64,
2324 Location::GPR(table_base),
2325 Location::GPR(table_count),
2326 false,
2327 )?;
2328
2329 self.machine.move_location(
2331 Size::S64,
2332 Location::Memory(table_count, self.vmoffsets.vm_funcref_anyfunc_ptr() as i32),
2333 Location::GPR(table_count),
2334 )?;
2335 self.machine.jmp_on_condition(
2337 UnsignedCondition::Equal,
2338 Size::S64,
2339 Location::GPR(table_count),
2340 Location::Imm32(0),
2341 self.special_labels.indirect_call_null,
2342 )?;
2343 self.machine.move_location(
2344 Size::S64,
2345 Location::Memory(
2346 self.machine.get_vmctx_reg(),
2347 self.vmoffsets.vmctx_vmshared_signature_id(index) as i32,
2348 ),
2349 Location::GPR(sigidx),
2350 )?;
2351
2352 self.machine.jmp_on_condition(
2354 UnsignedCondition::NotEqual,
2355 Size::S32,
2356 Location::GPR(sigidx),
2357 Location::Memory(
2358 table_count,
2359 (self.vmoffsets.vmcaller_checked_anyfunc_type_index() as usize) as i32,
2360 ),
2361 self.special_labels.bad_signature,
2362 )?;
2363 self.machine.release_gpr(sigidx);
2364 self.machine.release_gpr(table_count);
2365 self.machine.release_gpr(table_base);
2366
2367 let gpr_for_call = self.machine.get_gpr_for_call();
2368 if table_count != gpr_for_call {
2369 self.machine.move_location(
2370 Size::S64,
2371 Location::GPR(table_count),
2372 Location::GPR(gpr_for_call),
2373 )?;
2374 }
2375
2376 let vmcaller_checked_anyfunc_func_ptr =
2377 self.vmoffsets.vmcaller_checked_anyfunc_func_ptr() as usize;
2378 let vmcaller_checked_anyfunc_vmctx =
2379 self.vmoffsets.vmcaller_checked_anyfunc_vmctx() as usize;
2380 let calling_convention = self.calling_convention;
2381
2382 self.emit_call_native(
2383 |this| {
2384 let offset = this
2385 .machine
2386 .mark_instruction_with_trap_code(TrapCode::StackOverflow);
2387
2388 this.machine.move_location(
2390 Size::S64,
2391 Location::Memory(gpr_for_call, vmcaller_checked_anyfunc_vmctx as i32),
2392 Location::GPR(
2393 this.machine
2394 .get_simple_param_location(0, calling_convention),
2395 ),
2396 )?;
2397
2398 this.machine.emit_call_location(Location::Memory(
2399 gpr_for_call,
2400 vmcaller_checked_anyfunc_func_ptr as i32,
2401 ))?;
2402 this.machine.mark_instruction_address_end(offset);
2403 Ok(())
2404 },
2405 params.iter().copied(),
2406 param_types.iter().copied(),
2407 return_types.iter().copied(),
2408 NativeCallType::IncludeVMCtxArgument,
2409 )?;
2410 }
2411 Operator::If { blockty } => {
2412 let label_end = self.machine.get_label();
2413 let label_else = self.machine.get_label();
2414
2415 let return_types = self.return_types_for_block(blockty);
2416 let param_types = self.param_types_for_block(blockty);
2417 self.allocate_return_slots_and_swap(param_types.len() + 1, return_types.len())?;
2418
2419 let cond = self.pop_value_released()?.0;
2420
2421 if param_types.len() == return_types.len() {
2424 for (input, return_value) in self
2425 .value_stack
2426 .iter()
2427 .rev()
2428 .take(param_types.len())
2429 .zip(self.value_stack.iter().rev().skip(param_types.len()))
2430 {
2431 self.machine
2432 .emit_relaxed_mov(Size::S64, input.0, return_value.0)?;
2433 }
2434 }
2435
2436 let frame = ControlFrame {
2437 state: ControlState::If {
2438 label_else,
2439 inputs: SmallVec::from_iter(
2440 self.value_stack
2441 .iter()
2442 .rev()
2443 .take(param_types.len())
2444 .rev()
2445 .copied(),
2446 ),
2447 },
2448 label: label_end,
2449 param_types,
2450 return_types,
2451 value_stack_depth: self.value_stack.len(),
2452 };
2453 self.control_stack.push(frame);
2454 self.machine.jmp_on_condition(
2455 UnsignedCondition::Equal,
2456 Size::S32,
2457 cond,
2458 Location::Imm32(0),
2459 label_else,
2460 )?;
2461 }
2462 Operator::Else => {
2463 let frame = self.control_stack.last().unwrap();
2464
2465 if !was_unreachable && !frame.return_types.is_empty() {
2466 self.emit_return_values(
2467 frame.value_stack_depth_after(),
2468 frame.return_types.len(),
2469 )?;
2470 }
2471
2472 let frame = &self.control_stack.last_mut().unwrap();
2473 let locs = self
2474 .value_stack
2475 .drain(frame.value_stack_depth_after()..)
2476 .collect_vec();
2477 self.release_locations(&locs)?;
2478 let frame = &mut self.control_stack.last_mut().unwrap();
2479
2480 let ControlState::If {
2483 label_else,
2484 ref inputs,
2485 } = frame.state
2486 else {
2487 panic!("Operator::Else must be connected to Operator::If statement");
2488 };
2489 for (input, _) in inputs {
2490 match input {
2491 Location::GPR(x) => {
2492 self.machine.reserve_gpr(*x);
2493 }
2494 Location::SIMD(x) => {
2495 self.machine.reserve_simd(*x);
2496 }
2497 Location::Memory(reg, _) => {
2498 debug_assert_eq!(reg, &self.machine.local_pointer());
2499 self.stack_offset += 8;
2500 }
2501 _ => {}
2502 }
2503 }
2504 self.value_stack.extend(inputs);
2505
2506 self.machine.jmp_unconditional(frame.label)?;
2507 self.machine.emit_label(label_else)?;
2508 frame.state = ControlState::Else;
2509 }
2510 Operator::TypedSelect { .. } | Operator::Select => {
2513 let cond = self.pop_value_released()?.0;
2514 let (v_b, canonicalize_b) = self.pop_value_released()?;
2515 let (v_a, canonicalize_a) = self.pop_value_released()?;
2516 let ret = self.acquire_location(&WpType::I64)?;
2517 self.value_stack.push((ret, CanonicalizeType::None));
2518
2519 let end_label = self.machine.get_label();
2520 let zero_label = self.machine.get_label();
2521
2522 self.machine.jmp_on_condition(
2523 UnsignedCondition::Equal,
2524 Size::S32,
2525 cond,
2526 Location::Imm32(0),
2527 zero_label,
2528 )?;
2529 if self.config.enable_nan_canonicalization
2530 && let Some(size) = canonicalize_a.to_size()
2531 {
2532 self.machine.canonicalize_nan(size, v_a, ret)?;
2533 } else if v_a != ret {
2534 self.machine.emit_relaxed_mov(Size::S64, v_a, ret)?;
2535 }
2536 self.machine.jmp_unconditional(end_label)?;
2537 self.machine.emit_label(zero_label)?;
2538 if self.config.enable_nan_canonicalization
2539 && let Some(size) = canonicalize_b.to_size()
2540 {
2541 self.machine.canonicalize_nan(size, v_b, ret)?;
2542 } else if v_b != ret {
2543 self.machine.emit_relaxed_mov(Size::S64, v_b, ret)?;
2544 }
2545 self.machine.emit_label(end_label)?;
2546 }
2547 Operator::Block { blockty } => {
2548 let return_types = self.return_types_for_block(blockty);
2549 let param_types = self.param_types_for_block(blockty);
2550 self.allocate_return_slots_and_swap(param_types.len(), return_types.len())?;
2551
2552 let frame = ControlFrame {
2553 state: ControlState::Block,
2554 label: self.machine.get_label(),
2555 param_types,
2556 return_types,
2557 value_stack_depth: self.value_stack.len(),
2558 };
2559 self.control_stack.push(frame);
2560 }
2561 Operator::Loop { blockty } => {
2562 self.machine.align_for_loop()?;
2563 let label = self.machine.get_label();
2564
2565 let return_types = self.return_types_for_block(blockty);
2566 let param_types = self.param_types_for_block(blockty);
2567 let params_count = param_types.len();
2568 self.allocate_return_slots_and_swap(
2570 param_types.len(),
2571 param_types.len() + return_types.len(),
2572 )?;
2573
2574 self.control_stack.push(ControlFrame {
2575 state: ControlState::Loop,
2576 label,
2577 param_types: param_types.clone(),
2578 return_types: return_types.clone(),
2579 value_stack_depth: self.value_stack.len(),
2580 });
2581
2582 let params = self
2584 .value_stack
2585 .drain((self.value_stack.len() - params_count)..)
2586 .collect_vec();
2587 for (param, phi_param) in params.iter().rev().zip(self.value_stack.iter().rev()) {
2588 self.machine
2589 .emit_relaxed_mov(Size::S64, param.0, phi_param.0)?;
2590 }
2591 self.release_locations(¶ms)?;
2592
2593 self.machine.emit_label(label)?;
2594
2595 let phi_params = self
2597 .value_stack
2598 .iter()
2599 .rev()
2600 .take(params_count)
2601 .rev()
2602 .copied()
2603 .collect_vec();
2604 for (i, phi_param) in phi_params.into_iter().enumerate() {
2605 let loc = self.acquire_location(¶m_types[i])?;
2606 self.machine.emit_relaxed_mov(Size::S64, phi_param.0, loc)?;
2607 self.value_stack.push((loc, phi_param.1));
2608 }
2609
2610 }
2612 Operator::Nop => {}
2613 Operator::MemorySize { mem } => {
2614 let memory_index = MemoryIndex::new(mem as usize);
2615 self.machine.move_location(
2616 Size::S64,
2617 Location::Memory(
2618 self.machine.get_vmctx_reg(),
2619 self.vmoffsets.vmctx_builtin_function(
2620 if self.module.local_memory_index(memory_index).is_some() {
2621 VMBuiltinFunctionIndex::get_memory32_size_index()
2622 } else {
2623 VMBuiltinFunctionIndex::get_imported_memory32_size_index()
2624 },
2625 ) as i32,
2626 ),
2627 Location::GPR(self.machine.get_gpr_for_call()),
2628 )?;
2629 self.emit_call_native(
2630 |this| {
2631 this.machine
2632 .emit_call_register(this.machine.get_gpr_for_call())
2633 },
2634 iter::once((
2636 Location::Imm32(memory_index.index() as u32),
2637 CanonicalizeType::None,
2638 )),
2639 iter::once(WpType::I64),
2640 iter::once(WpType::I64),
2641 NativeCallType::IncludeVMCtxArgument,
2642 )?;
2643 }
2644 Operator::MemoryInit { data_index, mem } => {
2645 let len = self.value_stack.pop().unwrap();
2646 let src = self.value_stack.pop().unwrap();
2647 let dst = self.value_stack.pop().unwrap();
2648
2649 self.machine.move_location(
2650 Size::S64,
2651 Location::Memory(
2652 self.machine.get_vmctx_reg(),
2653 self.vmoffsets
2654 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_memory_init_index())
2655 as i32,
2656 ),
2657 Location::GPR(self.machine.get_gpr_for_call()),
2658 )?;
2659
2660 self.emit_call_native(
2661 |this| {
2662 this.machine
2663 .emit_call_register(this.machine.get_gpr_for_call())
2664 },
2665 [
2667 (Location::Imm32(mem), CanonicalizeType::None),
2668 (Location::Imm32(data_index), CanonicalizeType::None),
2669 dst,
2670 src,
2671 len,
2672 ]
2673 .iter()
2674 .cloned(),
2675 [
2676 WpType::I64,
2677 WpType::I64,
2678 WpType::I64,
2679 WpType::I64,
2680 WpType::I64,
2681 ]
2682 .iter()
2683 .cloned(),
2684 iter::empty(),
2685 NativeCallType::IncludeVMCtxArgument,
2686 )?;
2687 }
2688 Operator::DataDrop { data_index } => {
2689 self.machine.move_location(
2690 Size::S64,
2691 Location::Memory(
2692 self.machine.get_vmctx_reg(),
2693 self.vmoffsets
2694 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_data_drop_index())
2695 as i32,
2696 ),
2697 Location::GPR(self.machine.get_gpr_for_call()),
2698 )?;
2699
2700 self.emit_call_native(
2701 |this| {
2702 this.machine
2703 .emit_call_register(this.machine.get_gpr_for_call())
2704 },
2705 iter::once((Location::Imm32(data_index), CanonicalizeType::None)),
2707 iter::once(WpType::I64),
2708 iter::empty(),
2709 NativeCallType::IncludeVMCtxArgument,
2710 )?;
2711 }
2712 Operator::MemoryCopy { src_mem, .. } => {
2713 let len = self.value_stack.pop().unwrap();
2715 let src_pos = self.value_stack.pop().unwrap();
2716 let dst_pos = self.value_stack.pop().unwrap();
2717
2718 let memory_index = MemoryIndex::new(src_mem as usize);
2719 let (memory_copy_index, memory_index) =
2720 if self.module.local_memory_index(memory_index).is_some() {
2721 (
2722 VMBuiltinFunctionIndex::get_memory_copy_index(),
2723 memory_index,
2724 )
2725 } else {
2726 (
2727 VMBuiltinFunctionIndex::get_imported_memory_copy_index(),
2728 memory_index,
2729 )
2730 };
2731
2732 self.machine.move_location(
2733 Size::S64,
2734 Location::Memory(
2735 self.machine.get_vmctx_reg(),
2736 self.vmoffsets.vmctx_builtin_function(memory_copy_index) as i32,
2737 ),
2738 Location::GPR(self.machine.get_gpr_for_call()),
2739 )?;
2740
2741 self.emit_call_native(
2742 |this| {
2743 this.machine
2744 .emit_call_register(this.machine.get_gpr_for_call())
2745 },
2746 [
2748 (
2749 Location::Imm32(memory_index.index() as u32),
2750 CanonicalizeType::None,
2751 ),
2752 dst_pos,
2753 src_pos,
2754 len,
2755 ]
2756 .iter()
2757 .cloned(),
2758 [WpType::I32, WpType::I64, WpType::I64, WpType::I64]
2759 .iter()
2760 .cloned(),
2761 iter::empty(),
2762 NativeCallType::IncludeVMCtxArgument,
2763 )?;
2764 }
2765 Operator::MemoryFill { mem } => {
2766 let len = self.value_stack.pop().unwrap();
2767 let val = self.value_stack.pop().unwrap();
2768 let dst = self.value_stack.pop().unwrap();
2769
2770 let memory_index = MemoryIndex::new(mem as usize);
2771 let (memory_fill_index, memory_index) =
2772 if self.module.local_memory_index(memory_index).is_some() {
2773 (
2774 VMBuiltinFunctionIndex::get_memory_fill_index(),
2775 memory_index,
2776 )
2777 } else {
2778 (
2779 VMBuiltinFunctionIndex::get_imported_memory_fill_index(),
2780 memory_index,
2781 )
2782 };
2783
2784 self.machine.move_location(
2785 Size::S64,
2786 Location::Memory(
2787 self.machine.get_vmctx_reg(),
2788 self.vmoffsets.vmctx_builtin_function(memory_fill_index) as i32,
2789 ),
2790 Location::GPR(self.machine.get_gpr_for_call()),
2791 )?;
2792
2793 self.emit_call_native(
2794 |this| {
2795 this.machine
2796 .emit_call_register(this.machine.get_gpr_for_call())
2797 },
2798 [
2800 (
2801 Location::Imm32(memory_index.index() as u32),
2802 CanonicalizeType::None,
2803 ),
2804 dst,
2805 val,
2806 len,
2807 ]
2808 .iter()
2809 .cloned(),
2810 [WpType::I32, WpType::I64, WpType::I64, WpType::I64]
2811 .iter()
2812 .cloned(),
2813 iter::empty(),
2814 NativeCallType::IncludeVMCtxArgument,
2815 )?;
2816 }
2817 Operator::MemoryGrow { mem } => {
2818 let memory_index = MemoryIndex::new(mem as usize);
2819 let param_pages = self.value_stack.pop().unwrap();
2820
2821 self.machine.move_location(
2822 Size::S64,
2823 Location::Memory(
2824 self.machine.get_vmctx_reg(),
2825 self.vmoffsets.vmctx_builtin_function(
2826 if self.module.local_memory_index(memory_index).is_some() {
2827 VMBuiltinFunctionIndex::get_memory32_grow_index()
2828 } else {
2829 VMBuiltinFunctionIndex::get_imported_memory32_grow_index()
2830 },
2831 ) as i32,
2832 ),
2833 Location::GPR(self.machine.get_gpr_for_call()),
2834 )?;
2835
2836 self.emit_call_native(
2837 |this| {
2838 this.machine
2839 .emit_call_register(this.machine.get_gpr_for_call())
2840 },
2841 [
2843 param_pages,
2844 (
2845 Location::Imm32(memory_index.index() as u32),
2846 CanonicalizeType::None,
2847 ),
2848 ]
2849 .iter()
2850 .cloned(),
2851 [WpType::I64, WpType::I64].iter().cloned(),
2852 iter::once(WpType::I64),
2853 NativeCallType::IncludeVMCtxArgument,
2854 )?;
2855 }
2856 Operator::I32Load { ref memarg } => {
2857 let target = self.pop_value_released()?.0;
2858 let ret = self.acquire_location(&WpType::I32)?;
2859 self.value_stack.push((ret, CanonicalizeType::None));
2860 self.op_memory(
2861 |this,
2862 need_check,
2863 imported_memories,
2864 offset,
2865 heap_access_oob,
2866 unaligned_atomic| {
2867 this.machine.i32_load(
2868 target,
2869 memarg,
2870 ret,
2871 need_check,
2872 imported_memories,
2873 offset,
2874 heap_access_oob,
2875 unaligned_atomic,
2876 )
2877 },
2878 )?;
2879 }
2880 Operator::F32Load { ref memarg } => {
2881 let target = self.pop_value_released()?.0;
2882 let ret = self.acquire_location(&WpType::F32)?;
2883 self.value_stack.push((ret, CanonicalizeType::None));
2884 self.op_memory(
2885 |this,
2886 need_check,
2887 imported_memories,
2888 offset,
2889 heap_access_oob,
2890 unaligned_atomic| {
2891 this.machine.f32_load(
2892 target,
2893 memarg,
2894 ret,
2895 need_check,
2896 imported_memories,
2897 offset,
2898 heap_access_oob,
2899 unaligned_atomic,
2900 )
2901 },
2902 )?;
2903 }
2904 Operator::I32Load8U { ref memarg } => {
2905 let target = self.pop_value_released()?.0;
2906 let ret = self.acquire_location(&WpType::I32)?;
2907 self.value_stack.push((ret, CanonicalizeType::None));
2908 self.op_memory(
2909 |this,
2910 need_check,
2911 imported_memories,
2912 offset,
2913 heap_access_oob,
2914 unaligned_atomic| {
2915 this.machine.i32_load_8u(
2916 target,
2917 memarg,
2918 ret,
2919 need_check,
2920 imported_memories,
2921 offset,
2922 heap_access_oob,
2923 unaligned_atomic,
2924 )
2925 },
2926 )?;
2927 }
2928 Operator::I32Load8S { ref memarg } => {
2929 let target = self.pop_value_released()?.0;
2930 let ret = self.acquire_location(&WpType::I32)?;
2931 self.value_stack.push((ret, CanonicalizeType::None));
2932 self.op_memory(
2933 |this,
2934 need_check,
2935 imported_memories,
2936 offset,
2937 heap_access_oob,
2938 unaligned_atomic| {
2939 this.machine.i32_load_8s(
2940 target,
2941 memarg,
2942 ret,
2943 need_check,
2944 imported_memories,
2945 offset,
2946 heap_access_oob,
2947 unaligned_atomic,
2948 )
2949 },
2950 )?;
2951 }
2952 Operator::I32Load16U { ref memarg } => {
2953 let target = self.pop_value_released()?.0;
2954 let ret = self.acquire_location(&WpType::I32)?;
2955 self.value_stack.push((ret, CanonicalizeType::None));
2956 self.op_memory(
2957 |this,
2958 need_check,
2959 imported_memories,
2960 offset,
2961 heap_access_oob,
2962 unaligned_atomic| {
2963 this.machine.i32_load_16u(
2964 target,
2965 memarg,
2966 ret,
2967 need_check,
2968 imported_memories,
2969 offset,
2970 heap_access_oob,
2971 unaligned_atomic,
2972 )
2973 },
2974 )?;
2975 }
2976 Operator::I32Load16S { ref memarg } => {
2977 let target = self.pop_value_released()?.0;
2978 let ret = self.acquire_location(&WpType::I32)?;
2979 self.value_stack.push((ret, CanonicalizeType::None));
2980 self.op_memory(
2981 |this,
2982 need_check,
2983 imported_memories,
2984 offset,
2985 heap_access_oob,
2986 unaligned_atomic| {
2987 this.machine.i32_load_16s(
2988 target,
2989 memarg,
2990 ret,
2991 need_check,
2992 imported_memories,
2993 offset,
2994 heap_access_oob,
2995 unaligned_atomic,
2996 )
2997 },
2998 )?;
2999 }
3000 Operator::I32Store { ref memarg } => {
3001 let target_value = self.pop_value_released()?.0;
3002 let target_addr = self.pop_value_released()?.0;
3003 self.op_memory(
3004 |this,
3005 need_check,
3006 imported_memories,
3007 offset,
3008 heap_access_oob,
3009 unaligned_atomic| {
3010 this.machine.i32_save(
3011 target_value,
3012 memarg,
3013 target_addr,
3014 need_check,
3015 imported_memories,
3016 offset,
3017 heap_access_oob,
3018 unaligned_atomic,
3019 )
3020 },
3021 )?;
3022 }
3023 Operator::F32Store { ref memarg } => {
3024 let (target_value, canonicalize) = self.pop_value_released()?;
3025 let target_addr = self.pop_value_released()?.0;
3026 self.op_memory(
3027 |this,
3028 need_check,
3029 imported_memories,
3030 offset,
3031 heap_access_oob,
3032 unaligned_atomic| {
3033 this.machine.f32_save(
3034 target_value,
3035 memarg,
3036 target_addr,
3037 self.config.enable_nan_canonicalization
3038 && !matches!(canonicalize, CanonicalizeType::None),
3039 need_check,
3040 imported_memories,
3041 offset,
3042 heap_access_oob,
3043 unaligned_atomic,
3044 )
3045 },
3046 )?;
3047 }
3048 Operator::I32Store8 { ref memarg } => {
3049 let target_value = self.pop_value_released()?.0;
3050 let target_addr = self.pop_value_released()?.0;
3051 self.op_memory(
3052 |this,
3053 need_check,
3054 imported_memories,
3055 offset,
3056 heap_access_oob,
3057 unaligned_atomic| {
3058 this.machine.i32_save_8(
3059 target_value,
3060 memarg,
3061 target_addr,
3062 need_check,
3063 imported_memories,
3064 offset,
3065 heap_access_oob,
3066 unaligned_atomic,
3067 )
3068 },
3069 )?;
3070 }
3071 Operator::I32Store16 { ref memarg } => {
3072 let target_value = self.pop_value_released()?.0;
3073 let target_addr = self.pop_value_released()?.0;
3074 self.op_memory(
3075 |this,
3076 need_check,
3077 imported_memories,
3078 offset,
3079 heap_access_oob,
3080 unaligned_atomic| {
3081 this.machine.i32_save_16(
3082 target_value,
3083 memarg,
3084 target_addr,
3085 need_check,
3086 imported_memories,
3087 offset,
3088 heap_access_oob,
3089 unaligned_atomic,
3090 )
3091 },
3092 )?;
3093 }
3094 Operator::I64Load { ref memarg } => {
3095 let target = self.pop_value_released()?.0;
3096 let ret = self.acquire_location(&WpType::I64)?;
3097 self.value_stack.push((ret, CanonicalizeType::None));
3098 self.op_memory(
3099 |this,
3100 need_check,
3101 imported_memories,
3102 offset,
3103 heap_access_oob,
3104 unaligned_atomic| {
3105 this.machine.i64_load(
3106 target,
3107 memarg,
3108 ret,
3109 need_check,
3110 imported_memories,
3111 offset,
3112 heap_access_oob,
3113 unaligned_atomic,
3114 )
3115 },
3116 )?;
3117 }
3118 Operator::F64Load { ref memarg } => {
3119 let target = self.pop_value_released()?.0;
3120 let ret = self.acquire_location(&WpType::F64)?;
3121 self.value_stack.push((ret, CanonicalizeType::None));
3122 self.op_memory(
3123 |this,
3124 need_check,
3125 imported_memories,
3126 offset,
3127 heap_access_oob,
3128 unaligned_atomic| {
3129 this.machine.f64_load(
3130 target,
3131 memarg,
3132 ret,
3133 need_check,
3134 imported_memories,
3135 offset,
3136 heap_access_oob,
3137 unaligned_atomic,
3138 )
3139 },
3140 )?;
3141 }
3142 Operator::I64Load8U { ref memarg } => {
3143 let target = self.pop_value_released()?.0;
3144 let ret = self.acquire_location(&WpType::I64)?;
3145 self.value_stack.push((ret, CanonicalizeType::None));
3146 self.op_memory(
3147 |this,
3148 need_check,
3149 imported_memories,
3150 offset,
3151 heap_access_oob,
3152 unaligned_atomic| {
3153 this.machine.i64_load_8u(
3154 target,
3155 memarg,
3156 ret,
3157 need_check,
3158 imported_memories,
3159 offset,
3160 heap_access_oob,
3161 unaligned_atomic,
3162 )
3163 },
3164 )?;
3165 }
3166 Operator::I64Load8S { ref memarg } => {
3167 let target = self.pop_value_released()?.0;
3168 let ret = self.acquire_location(&WpType::I64)?;
3169 self.value_stack.push((ret, CanonicalizeType::None));
3170 self.op_memory(
3171 |this,
3172 need_check,
3173 imported_memories,
3174 offset,
3175 heap_access_oob,
3176 unaligned_atomic| {
3177 this.machine.i64_load_8s(
3178 target,
3179 memarg,
3180 ret,
3181 need_check,
3182 imported_memories,
3183 offset,
3184 heap_access_oob,
3185 unaligned_atomic,
3186 )
3187 },
3188 )?;
3189 }
3190 Operator::I64Load16U { ref memarg } => {
3191 let target = self.pop_value_released()?.0;
3192 let ret = self.acquire_location(&WpType::I64)?;
3193 self.value_stack.push((ret, CanonicalizeType::None));
3194 self.op_memory(
3195 |this,
3196 need_check,
3197 imported_memories,
3198 offset,
3199 heap_access_oob,
3200 unaligned_atomic| {
3201 this.machine.i64_load_16u(
3202 target,
3203 memarg,
3204 ret,
3205 need_check,
3206 imported_memories,
3207 offset,
3208 heap_access_oob,
3209 unaligned_atomic,
3210 )
3211 },
3212 )?;
3213 }
3214 Operator::I64Load16S { ref memarg } => {
3215 let target = self.pop_value_released()?.0;
3216 let ret = self.acquire_location(&WpType::I64)?;
3217 self.value_stack.push((ret, CanonicalizeType::None));
3218 self.op_memory(
3219 |this,
3220 need_check,
3221 imported_memories,
3222 offset,
3223 heap_access_oob,
3224 unaligned_atomic| {
3225 this.machine.i64_load_16s(
3226 target,
3227 memarg,
3228 ret,
3229 need_check,
3230 imported_memories,
3231 offset,
3232 heap_access_oob,
3233 unaligned_atomic,
3234 )
3235 },
3236 )?;
3237 }
3238 Operator::I64Load32U { ref memarg } => {
3239 let target = self.pop_value_released()?.0;
3240 let ret = self.acquire_location(&WpType::I64)?;
3241 self.value_stack.push((ret, CanonicalizeType::None));
3242 self.op_memory(
3243 |this,
3244 need_check,
3245 imported_memories,
3246 offset,
3247 heap_access_oob,
3248 unaligned_atomic| {
3249 this.machine.i64_load_32u(
3250 target,
3251 memarg,
3252 ret,
3253 need_check,
3254 imported_memories,
3255 offset,
3256 heap_access_oob,
3257 unaligned_atomic,
3258 )
3259 },
3260 )?;
3261 }
3262 Operator::I64Load32S { ref memarg } => {
3263 let target = self.pop_value_released()?.0;
3264 let ret = self.acquire_location(&WpType::I64)?;
3265 self.value_stack.push((ret, CanonicalizeType::None));
3266 self.op_memory(
3267 |this,
3268 need_check,
3269 imported_memories,
3270 offset,
3271 heap_access_oob,
3272 unaligned_atomic| {
3273 this.machine.i64_load_32s(
3274 target,
3275 memarg,
3276 ret,
3277 need_check,
3278 imported_memories,
3279 offset,
3280 heap_access_oob,
3281 unaligned_atomic,
3282 )
3283 },
3284 )?;
3285 }
3286 Operator::I64Store { ref memarg } => {
3287 let target_value = self.pop_value_released()?.0;
3288 let target_addr = self.pop_value_released()?.0;
3289
3290 self.op_memory(
3291 |this,
3292 need_check,
3293 imported_memories,
3294 offset,
3295 heap_access_oob,
3296 unaligned_atomic| {
3297 this.machine.i64_save(
3298 target_value,
3299 memarg,
3300 target_addr,
3301 need_check,
3302 imported_memories,
3303 offset,
3304 heap_access_oob,
3305 unaligned_atomic,
3306 )
3307 },
3308 )?;
3309 }
3310 Operator::F64Store { ref memarg } => {
3311 let (target_value, canonicalize) = self.pop_value_released()?;
3312 let target_addr = self.pop_value_released()?.0;
3313 self.op_memory(
3314 |this,
3315 need_check,
3316 imported_memories,
3317 offset,
3318 heap_access_oob,
3319 unaligned_atomic| {
3320 this.machine.f64_save(
3321 target_value,
3322 memarg,
3323 target_addr,
3324 self.config.enable_nan_canonicalization
3325 && !matches!(canonicalize, CanonicalizeType::None),
3326 need_check,
3327 imported_memories,
3328 offset,
3329 heap_access_oob,
3330 unaligned_atomic,
3331 )
3332 },
3333 )?;
3334 }
3335 Operator::I64Store8 { ref memarg } => {
3336 let target_value = self.pop_value_released()?.0;
3337 let target_addr = self.pop_value_released()?.0;
3338 self.op_memory(
3339 |this,
3340 need_check,
3341 imported_memories,
3342 offset,
3343 heap_access_oob,
3344 unaligned_atomic| {
3345 this.machine.i64_save_8(
3346 target_value,
3347 memarg,
3348 target_addr,
3349 need_check,
3350 imported_memories,
3351 offset,
3352 heap_access_oob,
3353 unaligned_atomic,
3354 )
3355 },
3356 )?;
3357 }
3358 Operator::I64Store16 { ref memarg } => {
3359 let target_value = self.pop_value_released()?.0;
3360 let target_addr = self.pop_value_released()?.0;
3361 self.op_memory(
3362 |this,
3363 need_check,
3364 imported_memories,
3365 offset,
3366 heap_access_oob,
3367 unaligned_atomic| {
3368 this.machine.i64_save_16(
3369 target_value,
3370 memarg,
3371 target_addr,
3372 need_check,
3373 imported_memories,
3374 offset,
3375 heap_access_oob,
3376 unaligned_atomic,
3377 )
3378 },
3379 )?;
3380 }
3381 Operator::I64Store32 { ref memarg } => {
3382 let target_value = self.pop_value_released()?.0;
3383 let target_addr = self.pop_value_released()?.0;
3384 self.op_memory(
3385 |this,
3386 need_check,
3387 imported_memories,
3388 offset,
3389 heap_access_oob,
3390 unaligned_atomic| {
3391 this.machine.i64_save_32(
3392 target_value,
3393 memarg,
3394 target_addr,
3395 need_check,
3396 imported_memories,
3397 offset,
3398 heap_access_oob,
3399 unaligned_atomic,
3400 )
3401 },
3402 )?;
3403 }
3404 Operator::Unreachable => {
3405 self.machine.move_location(
3406 Size::S64,
3407 Location::Memory(
3408 self.machine.get_vmctx_reg(),
3409 self.vmoffsets
3410 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_raise_trap_index())
3411 as i32,
3412 ),
3413 Location::GPR(self.machine.get_gpr_for_call()),
3414 )?;
3415
3416 self.emit_call_native(
3417 |this| {
3418 this.machine
3419 .emit_call_register(this.machine.get_gpr_for_call())
3420 },
3421 [(
3423 Location::Imm32(TrapCode::UnreachableCodeReached as u32),
3424 CanonicalizeType::None,
3425 )]
3426 .iter()
3427 .cloned(),
3428 [WpType::I32].iter().cloned(),
3429 iter::empty(),
3430 NativeCallType::Unreachable,
3431 )?;
3432 self.unreachable_depth = 1;
3433 }
3434 Operator::Return => {
3435 let frame = &self.control_stack[0];
3436 if !frame.return_types.is_empty() {
3437 self.emit_return_values(
3438 frame.value_stack_depth_after(),
3439 frame.return_types.len(),
3440 )?;
3441 }
3442 let frame = &self.control_stack[0];
3443 let frame_depth = frame.value_stack_depth_for_release();
3444 let label = frame.label;
3445 self.release_stack_locations_keep_stack_offset(frame_depth)?;
3446 self.machine.jmp_unconditional(label)?;
3447 self.unreachable_depth = 1;
3448 }
3449 Operator::Br { relative_depth } => {
3450 let frame =
3451 &self.control_stack[self.control_stack.len() - 1 - (relative_depth as usize)];
3452 if !frame.return_types.is_empty() {
3453 if matches!(frame.state, ControlState::Loop) {
3454 self.emit_loop_params_store(
3456 frame.value_stack_depth_after(),
3457 frame.param_types.len(),
3458 )?;
3459 } else {
3460 self.emit_return_values(
3461 frame.value_stack_depth_after(),
3462 frame.return_types.len(),
3463 )?;
3464 }
3465 }
3466 let stack_len = self.control_stack.len();
3467 let frame = &mut self.control_stack[stack_len - 1 - (relative_depth as usize)];
3468 let frame_depth = frame.value_stack_depth_for_release();
3469 let label = frame.label;
3470
3471 self.release_stack_locations_keep_stack_offset(frame_depth)?;
3472 self.machine.jmp_unconditional(label)?;
3473 self.unreachable_depth = 1;
3474 }
3475 Operator::BrIf { relative_depth } => {
3476 let after = self.machine.get_label();
3477 let cond = self.pop_value_released()?.0;
3478 self.machine.jmp_on_condition(
3479 UnsignedCondition::Equal,
3480 Size::S32,
3481 cond,
3482 Location::Imm32(0),
3483 after,
3484 )?;
3485
3486 let frame =
3487 &self.control_stack[self.control_stack.len() - 1 - (relative_depth as usize)];
3488 if !frame.return_types.is_empty() {
3489 if matches!(frame.state, ControlState::Loop) {
3490 self.emit_loop_params_store(
3492 frame.value_stack_depth_after(),
3493 frame.param_types.len(),
3494 )?;
3495 } else {
3496 self.emit_return_values(
3497 frame.value_stack_depth_after(),
3498 frame.return_types.len(),
3499 )?;
3500 }
3501 }
3502 let stack_len = self.control_stack.len();
3503 let frame = &mut self.control_stack[stack_len - 1 - (relative_depth as usize)];
3504 let stack_depth = frame.value_stack_depth_for_release();
3505 let label = frame.label;
3506 self.release_stack_locations_keep_stack_offset(stack_depth)?;
3507 self.machine.jmp_unconditional(label)?;
3508
3509 self.machine.emit_label(after)?;
3510 }
3511 Operator::BrTable { ref targets } => {
3512 let default_target = targets.default();
3513 let targets = targets
3514 .targets()
3515 .collect::<Result<Vec<_>, _>>()
3516 .map_err(|e| CompileError::Codegen(format!("BrTable read_table: {e:?}")))?;
3517 let cond = self.pop_value_released()?.0;
3518 let table_label = self.machine.get_label();
3519 let mut table: Vec<Label> = vec![];
3520 let default_br = self.machine.get_label();
3521 self.machine.jmp_on_condition(
3522 UnsignedCondition::AboveEqual,
3523 Size::S32,
3524 cond,
3525 Location::Imm32(targets.len() as u32),
3526 default_br,
3527 )?;
3528
3529 self.machine.emit_jmp_to_jumptable(table_label, cond)?;
3530
3531 for target in targets.iter() {
3532 let label = self.machine.get_label();
3533 self.machine.emit_label(label)?;
3534 table.push(label);
3535 let frame =
3536 &self.control_stack[self.control_stack.len() - 1 - (*target as usize)];
3537 if !frame.return_types.is_empty() {
3538 if matches!(frame.state, ControlState::Loop) {
3539 self.emit_loop_params_store(
3541 frame.value_stack_depth_after(),
3542 frame.param_types.len(),
3543 )?;
3544 } else {
3545 self.emit_return_values(
3546 frame.value_stack_depth_after(),
3547 frame.return_types.len(),
3548 )?;
3549 }
3550 }
3551 let frame =
3552 &self.control_stack[self.control_stack.len() - 1 - (*target as usize)];
3553 let stack_depth = frame.value_stack_depth_for_release();
3554 let label = frame.label;
3555 self.release_stack_locations_keep_stack_offset(stack_depth)?;
3556 self.machine.jmp_unconditional(label)?;
3557 }
3558 self.machine.emit_label(default_br)?;
3559
3560 {
3561 let frame = &self.control_stack
3562 [self.control_stack.len() - 1 - (default_target as usize)];
3563 if !frame.return_types.is_empty() {
3564 if matches!(frame.state, ControlState::Loop) {
3565 self.emit_loop_params_store(
3567 frame.value_stack_depth_after(),
3568 frame.param_types.len(),
3569 )?;
3570 } else {
3571 self.emit_return_values(
3572 frame.value_stack_depth_after(),
3573 frame.return_types.len(),
3574 )?;
3575 }
3576 }
3577 let frame = &self.control_stack
3578 [self.control_stack.len() - 1 - (default_target as usize)];
3579 let stack_depth = frame.value_stack_depth_for_release();
3580 let label = frame.label;
3581 self.release_stack_locations_keep_stack_offset(stack_depth)?;
3582 self.machine.jmp_unconditional(label)?;
3583 }
3584
3585 self.machine.emit_label(table_label)?;
3586 for x in table {
3587 self.machine.jmp_unconditional(x)?;
3588 }
3589 self.unreachable_depth = 1;
3590 }
3591 Operator::Drop => {
3592 self.pop_value_released()?;
3593 }
3594 Operator::End => {
3595 let frame = self.control_stack.pop().unwrap();
3596
3597 if !was_unreachable && !frame.return_types.is_empty() {
3598 self.emit_return_values(
3599 frame.value_stack_depth_after(),
3600 frame.return_types.len(),
3601 )?;
3602 }
3603
3604 if self.control_stack.is_empty() {
3605 self.machine.emit_label(frame.label)?;
3606 self.finalize_locals(self.calling_convention)?;
3607 self.machine.emit_function_epilog()?;
3608
3609 if let Ok(&return_type) = self.signature.results().iter().exactly_one()
3611 && (return_type == Type::F32 || return_type == Type::F64)
3612 {
3613 self.machine.emit_function_return_float()?;
3614 }
3615 self.machine.emit_ret()?;
3616 } else {
3617 let released = &self.value_stack.clone()[frame.value_stack_depth_after()..];
3618 self.release_locations(released)?;
3619 self.value_stack.truncate(frame.value_stack_depth_after());
3620
3621 if !matches!(frame.state, ControlState::Loop) {
3622 self.machine.emit_label(frame.label)?;
3623 }
3624
3625 if let ControlState::If { label_else, .. } = frame.state {
3626 self.machine.emit_label(label_else)?;
3627 }
3628
3629 }
3631 }
3632 Operator::AtomicFence => {
3633 self.machine.emit_memory_fence()?;
3641 }
3642 Operator::I32AtomicLoad { ref memarg } => {
3643 let target = self.pop_value_released()?.0;
3644 let ret = self.acquire_location(&WpType::I32)?;
3645 self.value_stack.push((ret, CanonicalizeType::None));
3646 self.op_memory(
3647 |this,
3648 need_check,
3649 imported_memories,
3650 offset,
3651 heap_access_oob,
3652 unaligned_atomic| {
3653 this.machine.i32_atomic_load(
3654 target,
3655 memarg,
3656 ret,
3657 need_check,
3658 imported_memories,
3659 offset,
3660 heap_access_oob,
3661 unaligned_atomic,
3662 )
3663 },
3664 )?;
3665 }
3666 Operator::I32AtomicLoad8U { ref memarg } => {
3667 let target = self.pop_value_released()?.0;
3668 let ret = self.acquire_location(&WpType::I32)?;
3669 self.value_stack.push((ret, CanonicalizeType::None));
3670 self.op_memory(
3671 |this,
3672 need_check,
3673 imported_memories,
3674 offset,
3675 heap_access_oob,
3676 unaligned_atomic| {
3677 this.machine.i32_atomic_load_8u(
3678 target,
3679 memarg,
3680 ret,
3681 need_check,
3682 imported_memories,
3683 offset,
3684 heap_access_oob,
3685 unaligned_atomic,
3686 )
3687 },
3688 )?;
3689 }
3690 Operator::I32AtomicLoad16U { ref memarg } => {
3691 let target = self.pop_value_released()?.0;
3692 let ret = self.acquire_location(&WpType::I32)?;
3693 self.value_stack.push((ret, CanonicalizeType::None));
3694 self.op_memory(
3695 |this,
3696 need_check,
3697 imported_memories,
3698 offset,
3699 heap_access_oob,
3700 unaligned_atomic| {
3701 this.machine.i32_atomic_load_16u(
3702 target,
3703 memarg,
3704 ret,
3705 need_check,
3706 imported_memories,
3707 offset,
3708 heap_access_oob,
3709 unaligned_atomic,
3710 )
3711 },
3712 )?;
3713 }
3714 Operator::I32AtomicStore { ref memarg } => {
3715 let target_value = self.pop_value_released()?.0;
3716 let target_addr = self.pop_value_released()?.0;
3717 self.op_memory(
3718 |this,
3719 need_check,
3720 imported_memories,
3721 offset,
3722 heap_access_oob,
3723 unaligned_atomic| {
3724 this.machine.i32_atomic_save(
3725 target_value,
3726 memarg,
3727 target_addr,
3728 need_check,
3729 imported_memories,
3730 offset,
3731 heap_access_oob,
3732 unaligned_atomic,
3733 )
3734 },
3735 )?;
3736 }
3737 Operator::I32AtomicStore8 { ref memarg } => {
3738 let target_value = self.pop_value_released()?.0;
3739 let target_addr = self.pop_value_released()?.0;
3740 self.op_memory(
3741 |this,
3742 need_check,
3743 imported_memories,
3744 offset,
3745 heap_access_oob,
3746 unaligned_atomic| {
3747 this.machine.i32_atomic_save_8(
3748 target_value,
3749 memarg,
3750 target_addr,
3751 need_check,
3752 imported_memories,
3753 offset,
3754 heap_access_oob,
3755 unaligned_atomic,
3756 )
3757 },
3758 )?;
3759 }
3760 Operator::I32AtomicStore16 { ref memarg } => {
3761 let target_value = self.pop_value_released()?.0;
3762 let target_addr = self.pop_value_released()?.0;
3763 self.op_memory(
3764 |this,
3765 need_check,
3766 imported_memories,
3767 offset,
3768 heap_access_oob,
3769 unaligned_atomic| {
3770 this.machine.i32_atomic_save_16(
3771 target_value,
3772 memarg,
3773 target_addr,
3774 need_check,
3775 imported_memories,
3776 offset,
3777 heap_access_oob,
3778 unaligned_atomic,
3779 )
3780 },
3781 )?;
3782 }
3783 Operator::I64AtomicLoad { ref memarg } => {
3784 let target = self.pop_value_released()?.0;
3785 let ret = self.acquire_location(&WpType::I64)?;
3786 self.value_stack.push((ret, CanonicalizeType::None));
3787 self.op_memory(
3788 |this,
3789 need_check,
3790 imported_memories,
3791 offset,
3792 heap_access_oob,
3793 unaligned_atomic| {
3794 this.machine.i64_atomic_load(
3795 target,
3796 memarg,
3797 ret,
3798 need_check,
3799 imported_memories,
3800 offset,
3801 heap_access_oob,
3802 unaligned_atomic,
3803 )
3804 },
3805 )?;
3806 }
3807 Operator::I64AtomicLoad8U { ref memarg } => {
3808 let target = self.pop_value_released()?.0;
3809 let ret = self.acquire_location(&WpType::I64)?;
3810 self.value_stack.push((ret, CanonicalizeType::None));
3811 self.op_memory(
3812 |this,
3813 need_check,
3814 imported_memories,
3815 offset,
3816 heap_access_oob,
3817 unaligned_atomic| {
3818 this.machine.i64_atomic_load_8u(
3819 target,
3820 memarg,
3821 ret,
3822 need_check,
3823 imported_memories,
3824 offset,
3825 heap_access_oob,
3826 unaligned_atomic,
3827 )
3828 },
3829 )?;
3830 }
3831 Operator::I64AtomicLoad16U { ref memarg } => {
3832 let target = self.pop_value_released()?.0;
3833 let ret = self.acquire_location(&WpType::I64)?;
3834 self.value_stack.push((ret, CanonicalizeType::None));
3835 self.op_memory(
3836 |this,
3837 need_check,
3838 imported_memories,
3839 offset,
3840 heap_access_oob,
3841 unaligned_atomic| {
3842 this.machine.i64_atomic_load_16u(
3843 target,
3844 memarg,
3845 ret,
3846 need_check,
3847 imported_memories,
3848 offset,
3849 heap_access_oob,
3850 unaligned_atomic,
3851 )
3852 },
3853 )?;
3854 }
3855 Operator::I64AtomicLoad32U { ref memarg } => {
3856 let target = self.pop_value_released()?.0;
3857 let ret = self.acquire_location(&WpType::I64)?;
3858 self.value_stack.push((ret, CanonicalizeType::None));
3859 self.op_memory(
3860 |this,
3861 need_check,
3862 imported_memories,
3863 offset,
3864 heap_access_oob,
3865 unaligned_atomic| {
3866 this.machine.i64_atomic_load_32u(
3867 target,
3868 memarg,
3869 ret,
3870 need_check,
3871 imported_memories,
3872 offset,
3873 heap_access_oob,
3874 unaligned_atomic,
3875 )
3876 },
3877 )?;
3878 }
3879 Operator::I64AtomicStore { ref memarg } => {
3880 let target_value = self.pop_value_released()?.0;
3881 let target_addr = self.pop_value_released()?.0;
3882 self.op_memory(
3883 |this,
3884 need_check,
3885 imported_memories,
3886 offset,
3887 heap_access_oob,
3888 unaligned_atomic| {
3889 this.machine.i64_atomic_save(
3890 target_value,
3891 memarg,
3892 target_addr,
3893 need_check,
3894 imported_memories,
3895 offset,
3896 heap_access_oob,
3897 unaligned_atomic,
3898 )
3899 },
3900 )?;
3901 }
3902 Operator::I64AtomicStore8 { ref memarg } => {
3903 let target_value = self.pop_value_released()?.0;
3904 let target_addr = self.pop_value_released()?.0;
3905 self.op_memory(
3906 |this,
3907 need_check,
3908 imported_memories,
3909 offset,
3910 heap_access_oob,
3911 unaligned_atomic| {
3912 this.machine.i64_atomic_save_8(
3913 target_value,
3914 memarg,
3915 target_addr,
3916 need_check,
3917 imported_memories,
3918 offset,
3919 heap_access_oob,
3920 unaligned_atomic,
3921 )
3922 },
3923 )?;
3924 }
3925 Operator::I64AtomicStore16 { ref memarg } => {
3926 let target_value = self.pop_value_released()?.0;
3927 let target_addr = self.pop_value_released()?.0;
3928 self.op_memory(
3929 |this,
3930 need_check,
3931 imported_memories,
3932 offset,
3933 heap_access_oob,
3934 unaligned_atomic| {
3935 this.machine.i64_atomic_save_16(
3936 target_value,
3937 memarg,
3938 target_addr,
3939 need_check,
3940 imported_memories,
3941 offset,
3942 heap_access_oob,
3943 unaligned_atomic,
3944 )
3945 },
3946 )?;
3947 }
3948 Operator::I64AtomicStore32 { ref memarg } => {
3949 let target_value = self.pop_value_released()?.0;
3950 let target_addr = self.pop_value_released()?.0;
3951 self.op_memory(
3952 |this,
3953 need_check,
3954 imported_memories,
3955 offset,
3956 heap_access_oob,
3957 unaligned_atomic| {
3958 this.machine.i64_atomic_save_32(
3959 target_value,
3960 memarg,
3961 target_addr,
3962 need_check,
3963 imported_memories,
3964 offset,
3965 heap_access_oob,
3966 unaligned_atomic,
3967 )
3968 },
3969 )?;
3970 }
3971 Operator::I32AtomicRmwAdd { ref memarg } => {
3972 let loc = self.pop_value_released()?.0;
3973 let target = self.pop_value_released()?.0;
3974 let ret = self.acquire_location(&WpType::I32)?;
3975 self.value_stack.push((ret, CanonicalizeType::None));
3976 self.op_memory(
3977 |this,
3978 need_check,
3979 imported_memories,
3980 offset,
3981 heap_access_oob,
3982 unaligned_atomic| {
3983 this.machine.i32_atomic_add(
3984 loc,
3985 target,
3986 memarg,
3987 ret,
3988 need_check,
3989 imported_memories,
3990 offset,
3991 heap_access_oob,
3992 unaligned_atomic,
3993 )
3994 },
3995 )?;
3996 }
3997 Operator::I64AtomicRmwAdd { ref memarg } => {
3998 let loc = self.pop_value_released()?.0;
3999 let target = self.pop_value_released()?.0;
4000 let ret = self.acquire_location(&WpType::I64)?;
4001 self.value_stack.push((ret, CanonicalizeType::None));
4002 self.op_memory(
4003 |this,
4004 need_check,
4005 imported_memories,
4006 offset,
4007 heap_access_oob,
4008 unaligned_atomic| {
4009 this.machine.i64_atomic_add(
4010 loc,
4011 target,
4012 memarg,
4013 ret,
4014 need_check,
4015 imported_memories,
4016 offset,
4017 heap_access_oob,
4018 unaligned_atomic,
4019 )
4020 },
4021 )?;
4022 }
4023 Operator::I32AtomicRmw8AddU { ref memarg } => {
4024 let loc = self.pop_value_released()?.0;
4025 let target = self.pop_value_released()?.0;
4026 let ret = self.acquire_location(&WpType::I32)?;
4027 self.value_stack.push((ret, CanonicalizeType::None));
4028 self.op_memory(
4029 |this,
4030 need_check,
4031 imported_memories,
4032 offset,
4033 heap_access_oob,
4034 unaligned_atomic| {
4035 this.machine.i32_atomic_add_8u(
4036 loc,
4037 target,
4038 memarg,
4039 ret,
4040 need_check,
4041 imported_memories,
4042 offset,
4043 heap_access_oob,
4044 unaligned_atomic,
4045 )
4046 },
4047 )?;
4048 }
4049 Operator::I32AtomicRmw16AddU { ref memarg } => {
4050 let loc = self.pop_value_released()?.0;
4051 let target = self.pop_value_released()?.0;
4052 let ret = self.acquire_location(&WpType::I32)?;
4053 self.value_stack.push((ret, CanonicalizeType::None));
4054 self.op_memory(
4055 |this,
4056 need_check,
4057 imported_memories,
4058 offset,
4059 heap_access_oob,
4060 unaligned_atomic| {
4061 this.machine.i32_atomic_add_16u(
4062 loc,
4063 target,
4064 memarg,
4065 ret,
4066 need_check,
4067 imported_memories,
4068 offset,
4069 heap_access_oob,
4070 unaligned_atomic,
4071 )
4072 },
4073 )?;
4074 }
4075 Operator::I64AtomicRmw8AddU { ref memarg } => {
4076 let loc = self.pop_value_released()?.0;
4077 let target = self.pop_value_released()?.0;
4078 let ret = self.acquire_location(&WpType::I64)?;
4079 self.value_stack.push((ret, CanonicalizeType::None));
4080 self.op_memory(
4081 |this,
4082 need_check,
4083 imported_memories,
4084 offset,
4085 heap_access_oob,
4086 unaligned_atomic| {
4087 this.machine.i64_atomic_add_8u(
4088 loc,
4089 target,
4090 memarg,
4091 ret,
4092 need_check,
4093 imported_memories,
4094 offset,
4095 heap_access_oob,
4096 unaligned_atomic,
4097 )
4098 },
4099 )?;
4100 }
4101 Operator::I64AtomicRmw16AddU { ref memarg } => {
4102 let loc = self.pop_value_released()?.0;
4103 let target = self.pop_value_released()?.0;
4104 let ret = self.acquire_location(&WpType::I64)?;
4105 self.value_stack.push((ret, CanonicalizeType::None));
4106 self.op_memory(
4107 |this,
4108 need_check,
4109 imported_memories,
4110 offset,
4111 heap_access_oob,
4112 unaligned_atomic| {
4113 this.machine.i64_atomic_add_16u(
4114 loc,
4115 target,
4116 memarg,
4117 ret,
4118 need_check,
4119 imported_memories,
4120 offset,
4121 heap_access_oob,
4122 unaligned_atomic,
4123 )
4124 },
4125 )?;
4126 }
4127 Operator::I64AtomicRmw32AddU { ref memarg } => {
4128 let loc = self.pop_value_released()?.0;
4129 let target = self.pop_value_released()?.0;
4130 let ret = self.acquire_location(&WpType::I64)?;
4131 self.value_stack.push((ret, CanonicalizeType::None));
4132 self.op_memory(
4133 |this,
4134 need_check,
4135 imported_memories,
4136 offset,
4137 heap_access_oob,
4138 unaligned_atomic| {
4139 this.machine.i64_atomic_add_32u(
4140 loc,
4141 target,
4142 memarg,
4143 ret,
4144 need_check,
4145 imported_memories,
4146 offset,
4147 heap_access_oob,
4148 unaligned_atomic,
4149 )
4150 },
4151 )?;
4152 }
4153 Operator::I32AtomicRmwSub { ref memarg } => {
4154 let loc = self.pop_value_released()?.0;
4155 let target = self.pop_value_released()?.0;
4156 let ret = self.acquire_location(&WpType::I32)?;
4157 self.value_stack.push((ret, CanonicalizeType::None));
4158 self.op_memory(
4159 |this,
4160 need_check,
4161 imported_memories,
4162 offset,
4163 heap_access_oob,
4164 unaligned_atomic| {
4165 this.machine.i32_atomic_sub(
4166 loc,
4167 target,
4168 memarg,
4169 ret,
4170 need_check,
4171 imported_memories,
4172 offset,
4173 heap_access_oob,
4174 unaligned_atomic,
4175 )
4176 },
4177 )?;
4178 }
4179 Operator::I64AtomicRmwSub { ref memarg } => {
4180 let loc = self.pop_value_released()?.0;
4181 let target = self.pop_value_released()?.0;
4182 let ret = self.acquire_location(&WpType::I64)?;
4183 self.value_stack.push((ret, CanonicalizeType::None));
4184 self.op_memory(
4185 |this,
4186 need_check,
4187 imported_memories,
4188 offset,
4189 heap_access_oob,
4190 unaligned_atomic| {
4191 this.machine.i64_atomic_sub(
4192 loc,
4193 target,
4194 memarg,
4195 ret,
4196 need_check,
4197 imported_memories,
4198 offset,
4199 heap_access_oob,
4200 unaligned_atomic,
4201 )
4202 },
4203 )?;
4204 }
4205 Operator::I32AtomicRmw8SubU { ref memarg } => {
4206 let loc = self.pop_value_released()?.0;
4207 let target = self.pop_value_released()?.0;
4208 let ret = self.acquire_location(&WpType::I32)?;
4209 self.value_stack.push((ret, CanonicalizeType::None));
4210 self.op_memory(
4211 |this,
4212 need_check,
4213 imported_memories,
4214 offset,
4215 heap_access_oob,
4216 unaligned_atomic| {
4217 this.machine.i32_atomic_sub_8u(
4218 loc,
4219 target,
4220 memarg,
4221 ret,
4222 need_check,
4223 imported_memories,
4224 offset,
4225 heap_access_oob,
4226 unaligned_atomic,
4227 )
4228 },
4229 )?;
4230 }
4231 Operator::I32AtomicRmw16SubU { ref memarg } => {
4232 let loc = self.pop_value_released()?.0;
4233 let target = self.pop_value_released()?.0;
4234 let ret = self.acquire_location(&WpType::I32)?;
4235 self.value_stack.push((ret, CanonicalizeType::None));
4236 self.op_memory(
4237 |this,
4238 need_check,
4239 imported_memories,
4240 offset,
4241 heap_access_oob,
4242 unaligned_atomic| {
4243 this.machine.i32_atomic_sub_16u(
4244 loc,
4245 target,
4246 memarg,
4247 ret,
4248 need_check,
4249 imported_memories,
4250 offset,
4251 heap_access_oob,
4252 unaligned_atomic,
4253 )
4254 },
4255 )?;
4256 }
4257 Operator::I64AtomicRmw8SubU { ref memarg } => {
4258 let loc = self.pop_value_released()?.0;
4259 let target = self.pop_value_released()?.0;
4260 let ret = self.acquire_location(&WpType::I64)?;
4261 self.value_stack.push((ret, CanonicalizeType::None));
4262 self.op_memory(
4263 |this,
4264 need_check,
4265 imported_memories,
4266 offset,
4267 heap_access_oob,
4268 unaligned_atomic| {
4269 this.machine.i64_atomic_sub_8u(
4270 loc,
4271 target,
4272 memarg,
4273 ret,
4274 need_check,
4275 imported_memories,
4276 offset,
4277 heap_access_oob,
4278 unaligned_atomic,
4279 )
4280 },
4281 )?;
4282 }
4283 Operator::I64AtomicRmw16SubU { ref memarg } => {
4284 let loc = self.pop_value_released()?.0;
4285 let target = self.pop_value_released()?.0;
4286 let ret = self.acquire_location(&WpType::I64)?;
4287 self.value_stack.push((ret, CanonicalizeType::None));
4288 self.op_memory(
4289 |this,
4290 need_check,
4291 imported_memories,
4292 offset,
4293 heap_access_oob,
4294 unaligned_atomic| {
4295 this.machine.i64_atomic_sub_16u(
4296 loc,
4297 target,
4298 memarg,
4299 ret,
4300 need_check,
4301 imported_memories,
4302 offset,
4303 heap_access_oob,
4304 unaligned_atomic,
4305 )
4306 },
4307 )?;
4308 }
4309 Operator::I64AtomicRmw32SubU { ref memarg } => {
4310 let loc = self.pop_value_released()?.0;
4311 let target = self.pop_value_released()?.0;
4312 let ret = self.acquire_location(&WpType::I64)?;
4313 self.value_stack.push((ret, CanonicalizeType::None));
4314 self.op_memory(
4315 |this,
4316 need_check,
4317 imported_memories,
4318 offset,
4319 heap_access_oob,
4320 unaligned_atomic| {
4321 this.machine.i64_atomic_sub_32u(
4322 loc,
4323 target,
4324 memarg,
4325 ret,
4326 need_check,
4327 imported_memories,
4328 offset,
4329 heap_access_oob,
4330 unaligned_atomic,
4331 )
4332 },
4333 )?;
4334 }
4335 Operator::I32AtomicRmwAnd { ref memarg } => {
4336 let loc = self.pop_value_released()?.0;
4337 let target = self.pop_value_released()?.0;
4338 let ret = self.acquire_location(&WpType::I32)?;
4339 self.value_stack.push((ret, CanonicalizeType::None));
4340 self.op_memory(
4341 |this,
4342 need_check,
4343 imported_memories,
4344 offset,
4345 heap_access_oob,
4346 unaligned_atomic| {
4347 this.machine.i32_atomic_and(
4348 loc,
4349 target,
4350 memarg,
4351 ret,
4352 need_check,
4353 imported_memories,
4354 offset,
4355 heap_access_oob,
4356 unaligned_atomic,
4357 )
4358 },
4359 )?;
4360 }
4361 Operator::I64AtomicRmwAnd { ref memarg } => {
4362 let loc = self.pop_value_released()?.0;
4363 let target = self.pop_value_released()?.0;
4364 let ret = self.acquire_location(&WpType::I64)?;
4365 self.value_stack.push((ret, CanonicalizeType::None));
4366 self.op_memory(
4367 |this,
4368 need_check,
4369 imported_memories,
4370 offset,
4371 heap_access_oob,
4372 unaligned_atomic| {
4373 this.machine.i64_atomic_and(
4374 loc,
4375 target,
4376 memarg,
4377 ret,
4378 need_check,
4379 imported_memories,
4380 offset,
4381 heap_access_oob,
4382 unaligned_atomic,
4383 )
4384 },
4385 )?;
4386 }
4387 Operator::I32AtomicRmw8AndU { ref memarg } => {
4388 let loc = self.pop_value_released()?.0;
4389 let target = self.pop_value_released()?.0;
4390 let ret = self.acquire_location(&WpType::I32)?;
4391 self.value_stack.push((ret, CanonicalizeType::None));
4392 self.op_memory(
4393 |this,
4394 need_check,
4395 imported_memories,
4396 offset,
4397 heap_access_oob,
4398 unaligned_atomic| {
4399 this.machine.i32_atomic_and_8u(
4400 loc,
4401 target,
4402 memarg,
4403 ret,
4404 need_check,
4405 imported_memories,
4406 offset,
4407 heap_access_oob,
4408 unaligned_atomic,
4409 )
4410 },
4411 )?;
4412 }
4413 Operator::I32AtomicRmw16AndU { ref memarg } => {
4414 let loc = self.pop_value_released()?.0;
4415 let target = self.pop_value_released()?.0;
4416 let ret = self.acquire_location(&WpType::I32)?;
4417 self.value_stack.push((ret, CanonicalizeType::None));
4418 self.op_memory(
4419 |this,
4420 need_check,
4421 imported_memories,
4422 offset,
4423 heap_access_oob,
4424 unaligned_atomic| {
4425 this.machine.i32_atomic_and_16u(
4426 loc,
4427 target,
4428 memarg,
4429 ret,
4430 need_check,
4431 imported_memories,
4432 offset,
4433 heap_access_oob,
4434 unaligned_atomic,
4435 )
4436 },
4437 )?;
4438 }
4439 Operator::I64AtomicRmw8AndU { ref memarg } => {
4440 let loc = self.pop_value_released()?.0;
4441 let target = self.pop_value_released()?.0;
4442 let ret = self.acquire_location(&WpType::I64)?;
4443 self.value_stack.push((ret, CanonicalizeType::None));
4444 self.op_memory(
4445 |this,
4446 need_check,
4447 imported_memories,
4448 offset,
4449 heap_access_oob,
4450 unaligned_atomic| {
4451 this.machine.i64_atomic_and_8u(
4452 loc,
4453 target,
4454 memarg,
4455 ret,
4456 need_check,
4457 imported_memories,
4458 offset,
4459 heap_access_oob,
4460 unaligned_atomic,
4461 )
4462 },
4463 )?;
4464 }
4465 Operator::I64AtomicRmw16AndU { ref memarg } => {
4466 let loc = self.pop_value_released()?.0;
4467 let target = self.pop_value_released()?.0;
4468 let ret = self.acquire_location(&WpType::I64)?;
4469 self.value_stack.push((ret, CanonicalizeType::None));
4470 self.op_memory(
4471 |this,
4472 need_check,
4473 imported_memories,
4474 offset,
4475 heap_access_oob,
4476 unaligned_atomic| {
4477 this.machine.i64_atomic_and_16u(
4478 loc,
4479 target,
4480 memarg,
4481 ret,
4482 need_check,
4483 imported_memories,
4484 offset,
4485 heap_access_oob,
4486 unaligned_atomic,
4487 )
4488 },
4489 )?;
4490 }
4491 Operator::I64AtomicRmw32AndU { ref memarg } => {
4492 let loc = self.pop_value_released()?.0;
4493 let target = self.pop_value_released()?.0;
4494 let ret = self.acquire_location(&WpType::I64)?;
4495 self.value_stack.push((ret, CanonicalizeType::None));
4496 self.op_memory(
4497 |this,
4498 need_check,
4499 imported_memories,
4500 offset,
4501 heap_access_oob,
4502 unaligned_atomic| {
4503 this.machine.i64_atomic_and_32u(
4504 loc,
4505 target,
4506 memarg,
4507 ret,
4508 need_check,
4509 imported_memories,
4510 offset,
4511 heap_access_oob,
4512 unaligned_atomic,
4513 )
4514 },
4515 )?;
4516 }
4517 Operator::I32AtomicRmwOr { ref memarg } => {
4518 let loc = self.pop_value_released()?.0;
4519 let target = self.pop_value_released()?.0;
4520 let ret = self.acquire_location(&WpType::I32)?;
4521 self.value_stack.push((ret, CanonicalizeType::None));
4522 self.op_memory(
4523 |this,
4524 need_check,
4525 imported_memories,
4526 offset,
4527 heap_access_oob,
4528 unaligned_atomic| {
4529 this.machine.i32_atomic_or(
4530 loc,
4531 target,
4532 memarg,
4533 ret,
4534 need_check,
4535 imported_memories,
4536 offset,
4537 heap_access_oob,
4538 unaligned_atomic,
4539 )
4540 },
4541 )?;
4542 }
4543 Operator::I64AtomicRmwOr { ref memarg } => {
4544 let loc = self.pop_value_released()?.0;
4545 let target = self.pop_value_released()?.0;
4546 let ret = self.acquire_location(&WpType::I64)?;
4547 self.value_stack.push((ret, CanonicalizeType::None));
4548 self.op_memory(
4549 |this,
4550 need_check,
4551 imported_memories,
4552 offset,
4553 heap_access_oob,
4554 unaligned_atomic| {
4555 this.machine.i64_atomic_or(
4556 loc,
4557 target,
4558 memarg,
4559 ret,
4560 need_check,
4561 imported_memories,
4562 offset,
4563 heap_access_oob,
4564 unaligned_atomic,
4565 )
4566 },
4567 )?;
4568 }
4569 Operator::I32AtomicRmw8OrU { ref memarg } => {
4570 let loc = self.pop_value_released()?.0;
4571 let target = self.pop_value_released()?.0;
4572 let ret = self.acquire_location(&WpType::I32)?;
4573 self.value_stack.push((ret, CanonicalizeType::None));
4574 self.op_memory(
4575 |this,
4576 need_check,
4577 imported_memories,
4578 offset,
4579 heap_access_oob,
4580 unaligned_atomic| {
4581 this.machine.i32_atomic_or_8u(
4582 loc,
4583 target,
4584 memarg,
4585 ret,
4586 need_check,
4587 imported_memories,
4588 offset,
4589 heap_access_oob,
4590 unaligned_atomic,
4591 )
4592 },
4593 )?;
4594 }
4595 Operator::I32AtomicRmw16OrU { ref memarg } => {
4596 let loc = self.pop_value_released()?.0;
4597 let target = self.pop_value_released()?.0;
4598 let ret = self.acquire_location(&WpType::I32)?;
4599 self.value_stack.push((ret, CanonicalizeType::None));
4600 self.op_memory(
4601 |this,
4602 need_check,
4603 imported_memories,
4604 offset,
4605 heap_access_oob,
4606 unaligned_atomic| {
4607 this.machine.i32_atomic_or_16u(
4608 loc,
4609 target,
4610 memarg,
4611 ret,
4612 need_check,
4613 imported_memories,
4614 offset,
4615 heap_access_oob,
4616 unaligned_atomic,
4617 )
4618 },
4619 )?;
4620 }
4621 Operator::I64AtomicRmw8OrU { ref memarg } => {
4622 let loc = self.pop_value_released()?.0;
4623 let target = self.pop_value_released()?.0;
4624 let ret = self.acquire_location(&WpType::I64)?;
4625 self.value_stack.push((ret, CanonicalizeType::None));
4626 self.op_memory(
4627 |this,
4628 need_check,
4629 imported_memories,
4630 offset,
4631 heap_access_oob,
4632 unaligned_atomic| {
4633 this.machine.i64_atomic_or_8u(
4634 loc,
4635 target,
4636 memarg,
4637 ret,
4638 need_check,
4639 imported_memories,
4640 offset,
4641 heap_access_oob,
4642 unaligned_atomic,
4643 )
4644 },
4645 )?;
4646 }
4647 Operator::I64AtomicRmw16OrU { ref memarg } => {
4648 let loc = self.pop_value_released()?.0;
4649 let target = self.pop_value_released()?.0;
4650 let ret = self.acquire_location(&WpType::I64)?;
4651 self.value_stack.push((ret, CanonicalizeType::None));
4652 self.op_memory(
4653 |this,
4654 need_check,
4655 imported_memories,
4656 offset,
4657 heap_access_oob,
4658 unaligned_atomic| {
4659 this.machine.i64_atomic_or_16u(
4660 loc,
4661 target,
4662 memarg,
4663 ret,
4664 need_check,
4665 imported_memories,
4666 offset,
4667 heap_access_oob,
4668 unaligned_atomic,
4669 )
4670 },
4671 )?;
4672 }
4673 Operator::I64AtomicRmw32OrU { ref memarg } => {
4674 let loc = self.pop_value_released()?.0;
4675 let target = self.pop_value_released()?.0;
4676 let ret = self.acquire_location(&WpType::I64)?;
4677 self.value_stack.push((ret, CanonicalizeType::None));
4678 self.op_memory(
4679 |this,
4680 need_check,
4681 imported_memories,
4682 offset,
4683 heap_access_oob,
4684 unaligned_atomic| {
4685 this.machine.i64_atomic_or_32u(
4686 loc,
4687 target,
4688 memarg,
4689 ret,
4690 need_check,
4691 imported_memories,
4692 offset,
4693 heap_access_oob,
4694 unaligned_atomic,
4695 )
4696 },
4697 )?;
4698 }
4699 Operator::I32AtomicRmwXor { ref memarg } => {
4700 let loc = self.pop_value_released()?.0;
4701 let target = self.pop_value_released()?.0;
4702 let ret = self.acquire_location(&WpType::I32)?;
4703 self.value_stack.push((ret, CanonicalizeType::None));
4704 self.op_memory(
4705 |this,
4706 need_check,
4707 imported_memories,
4708 offset,
4709 heap_access_oob,
4710 unaligned_atomic| {
4711 this.machine.i32_atomic_xor(
4712 loc,
4713 target,
4714 memarg,
4715 ret,
4716 need_check,
4717 imported_memories,
4718 offset,
4719 heap_access_oob,
4720 unaligned_atomic,
4721 )
4722 },
4723 )?;
4724 }
4725 Operator::I64AtomicRmwXor { ref memarg } => {
4726 let loc = self.pop_value_released()?.0;
4727 let target = self.pop_value_released()?.0;
4728 let ret = self.acquire_location(&WpType::I64)?;
4729 self.value_stack.push((ret, CanonicalizeType::None));
4730 self.op_memory(
4731 |this,
4732 need_check,
4733 imported_memories,
4734 offset,
4735 heap_access_oob,
4736 unaligned_atomic| {
4737 this.machine.i64_atomic_xor(
4738 loc,
4739 target,
4740 memarg,
4741 ret,
4742 need_check,
4743 imported_memories,
4744 offset,
4745 heap_access_oob,
4746 unaligned_atomic,
4747 )
4748 },
4749 )?;
4750 }
4751 Operator::I32AtomicRmw8XorU { ref memarg } => {
4752 let loc = self.pop_value_released()?.0;
4753 let target = self.pop_value_released()?.0;
4754 let ret = self.acquire_location(&WpType::I32)?;
4755 self.value_stack.push((ret, CanonicalizeType::None));
4756 self.op_memory(
4757 |this,
4758 need_check,
4759 imported_memories,
4760 offset,
4761 heap_access_oob,
4762 unaligned_atomic| {
4763 this.machine.i32_atomic_xor_8u(
4764 loc,
4765 target,
4766 memarg,
4767 ret,
4768 need_check,
4769 imported_memories,
4770 offset,
4771 heap_access_oob,
4772 unaligned_atomic,
4773 )
4774 },
4775 )?;
4776 }
4777 Operator::I32AtomicRmw16XorU { ref memarg } => {
4778 let loc = self.pop_value_released()?.0;
4779 let target = self.pop_value_released()?.0;
4780 let ret = self.acquire_location(&WpType::I32)?;
4781 self.value_stack.push((ret, CanonicalizeType::None));
4782 self.op_memory(
4783 |this,
4784 need_check,
4785 imported_memories,
4786 offset,
4787 heap_access_oob,
4788 unaligned_atomic| {
4789 this.machine.i32_atomic_xor_16u(
4790 loc,
4791 target,
4792 memarg,
4793 ret,
4794 need_check,
4795 imported_memories,
4796 offset,
4797 heap_access_oob,
4798 unaligned_atomic,
4799 )
4800 },
4801 )?;
4802 }
4803 Operator::I64AtomicRmw8XorU { ref memarg } => {
4804 let loc = self.pop_value_released()?.0;
4805 let target = self.pop_value_released()?.0;
4806 let ret = self.acquire_location(&WpType::I64)?;
4807 self.value_stack.push((ret, CanonicalizeType::None));
4808 self.op_memory(
4809 |this,
4810 need_check,
4811 imported_memories,
4812 offset,
4813 heap_access_oob,
4814 unaligned_atomic| {
4815 this.machine.i64_atomic_xor_8u(
4816 loc,
4817 target,
4818 memarg,
4819 ret,
4820 need_check,
4821 imported_memories,
4822 offset,
4823 heap_access_oob,
4824 unaligned_atomic,
4825 )
4826 },
4827 )?;
4828 }
4829 Operator::I64AtomicRmw16XorU { ref memarg } => {
4830 let loc = self.pop_value_released()?.0;
4831 let target = self.pop_value_released()?.0;
4832 let ret = self.acquire_location(&WpType::I64)?;
4833 self.value_stack.push((ret, CanonicalizeType::None));
4834 self.op_memory(
4835 |this,
4836 need_check,
4837 imported_memories,
4838 offset,
4839 heap_access_oob,
4840 unaligned_atomic| {
4841 this.machine.i64_atomic_xor_16u(
4842 loc,
4843 target,
4844 memarg,
4845 ret,
4846 need_check,
4847 imported_memories,
4848 offset,
4849 heap_access_oob,
4850 unaligned_atomic,
4851 )
4852 },
4853 )?;
4854 }
4855 Operator::I64AtomicRmw32XorU { ref memarg } => {
4856 let loc = self.pop_value_released()?.0;
4857 let target = self.pop_value_released()?.0;
4858 let ret = self.acquire_location(&WpType::I64)?;
4859 self.value_stack.push((ret, CanonicalizeType::None));
4860 self.op_memory(
4861 |this,
4862 need_check,
4863 imported_memories,
4864 offset,
4865 heap_access_oob,
4866 unaligned_atomic| {
4867 this.machine.i64_atomic_xor_32u(
4868 loc,
4869 target,
4870 memarg,
4871 ret,
4872 need_check,
4873 imported_memories,
4874 offset,
4875 heap_access_oob,
4876 unaligned_atomic,
4877 )
4878 },
4879 )?;
4880 }
4881 Operator::I32AtomicRmwXchg { ref memarg } => {
4882 let loc = self.pop_value_released()?.0;
4883 let target = self.pop_value_released()?.0;
4884 let ret = self.acquire_location(&WpType::I32)?;
4885 self.value_stack.push((ret, CanonicalizeType::None));
4886 self.op_memory(
4887 |this,
4888 need_check,
4889 imported_memories,
4890 offset,
4891 heap_access_oob,
4892 unaligned_atomic| {
4893 this.machine.i32_atomic_xchg(
4894 loc,
4895 target,
4896 memarg,
4897 ret,
4898 need_check,
4899 imported_memories,
4900 offset,
4901 heap_access_oob,
4902 unaligned_atomic,
4903 )
4904 },
4905 )?;
4906 }
4907 Operator::I64AtomicRmwXchg { ref memarg } => {
4908 let loc = self.pop_value_released()?.0;
4909 let target = self.pop_value_released()?.0;
4910 let ret = self.acquire_location(&WpType::I64)?;
4911 self.value_stack.push((ret, CanonicalizeType::None));
4912 self.op_memory(
4913 |this,
4914 need_check,
4915 imported_memories,
4916 offset,
4917 heap_access_oob,
4918 unaligned_atomic| {
4919 this.machine.i64_atomic_xchg(
4920 loc,
4921 target,
4922 memarg,
4923 ret,
4924 need_check,
4925 imported_memories,
4926 offset,
4927 heap_access_oob,
4928 unaligned_atomic,
4929 )
4930 },
4931 )?;
4932 }
4933 Operator::I32AtomicRmw8XchgU { ref memarg } => {
4934 let loc = self.pop_value_released()?.0;
4935 let target = self.pop_value_released()?.0;
4936 let ret = self.acquire_location(&WpType::I32)?;
4937 self.value_stack.push((ret, CanonicalizeType::None));
4938 self.op_memory(
4939 |this,
4940 need_check,
4941 imported_memories,
4942 offset,
4943 heap_access_oob,
4944 unaligned_atomic| {
4945 this.machine.i32_atomic_xchg_8u(
4946 loc,
4947 target,
4948 memarg,
4949 ret,
4950 need_check,
4951 imported_memories,
4952 offset,
4953 heap_access_oob,
4954 unaligned_atomic,
4955 )
4956 },
4957 )?;
4958 }
4959 Operator::I32AtomicRmw16XchgU { ref memarg } => {
4960 let loc = self.pop_value_released()?.0;
4961 let target = self.pop_value_released()?.0;
4962 let ret = self.acquire_location(&WpType::I32)?;
4963 self.value_stack.push((ret, CanonicalizeType::None));
4964 self.op_memory(
4965 |this,
4966 need_check,
4967 imported_memories,
4968 offset,
4969 heap_access_oob,
4970 unaligned_atomic| {
4971 this.machine.i32_atomic_xchg_16u(
4972 loc,
4973 target,
4974 memarg,
4975 ret,
4976 need_check,
4977 imported_memories,
4978 offset,
4979 heap_access_oob,
4980 unaligned_atomic,
4981 )
4982 },
4983 )?;
4984 }
4985 Operator::I64AtomicRmw8XchgU { ref memarg } => {
4986 let loc = self.pop_value_released()?.0;
4987 let target = self.pop_value_released()?.0;
4988 let ret = self.acquire_location(&WpType::I64)?;
4989 self.value_stack.push((ret, CanonicalizeType::None));
4990 self.op_memory(
4991 |this,
4992 need_check,
4993 imported_memories,
4994 offset,
4995 heap_access_oob,
4996 unaligned_atomic| {
4997 this.machine.i64_atomic_xchg_8u(
4998 loc,
4999 target,
5000 memarg,
5001 ret,
5002 need_check,
5003 imported_memories,
5004 offset,
5005 heap_access_oob,
5006 unaligned_atomic,
5007 )
5008 },
5009 )?;
5010 }
5011 Operator::I64AtomicRmw16XchgU { ref memarg } => {
5012 let loc = self.pop_value_released()?.0;
5013 let target = self.pop_value_released()?.0;
5014 let ret = self.acquire_location(&WpType::I64)?;
5015 self.value_stack.push((ret, CanonicalizeType::None));
5016 self.op_memory(
5017 |this,
5018 need_check,
5019 imported_memories,
5020 offset,
5021 heap_access_oob,
5022 unaligned_atomic| {
5023 this.machine.i64_atomic_xchg_16u(
5024 loc,
5025 target,
5026 memarg,
5027 ret,
5028 need_check,
5029 imported_memories,
5030 offset,
5031 heap_access_oob,
5032 unaligned_atomic,
5033 )
5034 },
5035 )?;
5036 }
5037 Operator::I64AtomicRmw32XchgU { ref memarg } => {
5038 let loc = self.pop_value_released()?.0;
5039 let target = self.pop_value_released()?.0;
5040 let ret = self.acquire_location(&WpType::I64)?;
5041 self.value_stack.push((ret, CanonicalizeType::None));
5042 self.op_memory(
5043 |this,
5044 need_check,
5045 imported_memories,
5046 offset,
5047 heap_access_oob,
5048 unaligned_atomic| {
5049 this.machine.i64_atomic_xchg_32u(
5050 loc,
5051 target,
5052 memarg,
5053 ret,
5054 need_check,
5055 imported_memories,
5056 offset,
5057 heap_access_oob,
5058 unaligned_atomic,
5059 )
5060 },
5061 )?;
5062 }
5063 Operator::I32AtomicRmwCmpxchg { ref memarg } => {
5064 let new = self.pop_value_released()?.0;
5065 let cmp = self.pop_value_released()?.0;
5066 let target = self.pop_value_released()?.0;
5067 let ret = self.acquire_location(&WpType::I32)?;
5068 self.value_stack.push((ret, CanonicalizeType::None));
5069 self.op_memory(
5070 |this,
5071 need_check,
5072 imported_memories,
5073 offset,
5074 heap_access_oob,
5075 unaligned_atomic| {
5076 this.machine.i32_atomic_cmpxchg(
5077 new,
5078 cmp,
5079 target,
5080 memarg,
5081 ret,
5082 need_check,
5083 imported_memories,
5084 offset,
5085 heap_access_oob,
5086 unaligned_atomic,
5087 )
5088 },
5089 )?;
5090 }
5091 Operator::I64AtomicRmwCmpxchg { ref memarg } => {
5092 let new = self.pop_value_released()?.0;
5093 let cmp = self.pop_value_released()?.0;
5094 let target = self.pop_value_released()?.0;
5095 let ret = self.acquire_location(&WpType::I64)?;
5096 self.value_stack.push((ret, CanonicalizeType::None));
5097 self.op_memory(
5098 |this,
5099 need_check,
5100 imported_memories,
5101 offset,
5102 heap_access_oob,
5103 unaligned_atomic| {
5104 this.machine.i64_atomic_cmpxchg(
5105 new,
5106 cmp,
5107 target,
5108 memarg,
5109 ret,
5110 need_check,
5111 imported_memories,
5112 offset,
5113 heap_access_oob,
5114 unaligned_atomic,
5115 )
5116 },
5117 )?;
5118 }
5119 Operator::I32AtomicRmw8CmpxchgU { ref memarg } => {
5120 let new = self.pop_value_released()?.0;
5121 let cmp = self.pop_value_released()?.0;
5122 let target = self.pop_value_released()?.0;
5123 let ret = self.acquire_location(&WpType::I32)?;
5124 self.value_stack.push((ret, CanonicalizeType::None));
5125 self.op_memory(
5126 |this,
5127 need_check,
5128 imported_memories,
5129 offset,
5130 heap_access_oob,
5131 unaligned_atomic| {
5132 this.machine.i32_atomic_cmpxchg_8u(
5133 new,
5134 cmp,
5135 target,
5136 memarg,
5137 ret,
5138 need_check,
5139 imported_memories,
5140 offset,
5141 heap_access_oob,
5142 unaligned_atomic,
5143 )
5144 },
5145 )?;
5146 }
5147 Operator::I32AtomicRmw16CmpxchgU { ref memarg } => {
5148 let new = self.pop_value_released()?.0;
5149 let cmp = self.pop_value_released()?.0;
5150 let target = self.pop_value_released()?.0;
5151 let ret = self.acquire_location(&WpType::I32)?;
5152 self.value_stack.push((ret, CanonicalizeType::None));
5153 self.op_memory(
5154 |this,
5155 need_check,
5156 imported_memories,
5157 offset,
5158 heap_access_oob,
5159 unaligned_atomic| {
5160 this.machine.i32_atomic_cmpxchg_16u(
5161 new,
5162 cmp,
5163 target,
5164 memarg,
5165 ret,
5166 need_check,
5167 imported_memories,
5168 offset,
5169 heap_access_oob,
5170 unaligned_atomic,
5171 )
5172 },
5173 )?;
5174 }
5175 Operator::I64AtomicRmw8CmpxchgU { ref memarg } => {
5176 let new = self.pop_value_released()?.0;
5177 let cmp = self.pop_value_released()?.0;
5178 let target = self.pop_value_released()?.0;
5179 let ret = self.acquire_location(&WpType::I64)?;
5180 self.value_stack.push((ret, CanonicalizeType::None));
5181 self.op_memory(
5182 |this,
5183 need_check,
5184 imported_memories,
5185 offset,
5186 heap_access_oob,
5187 unaligned_atomic| {
5188 this.machine.i64_atomic_cmpxchg_8u(
5189 new,
5190 cmp,
5191 target,
5192 memarg,
5193 ret,
5194 need_check,
5195 imported_memories,
5196 offset,
5197 heap_access_oob,
5198 unaligned_atomic,
5199 )
5200 },
5201 )?;
5202 }
5203 Operator::I64AtomicRmw16CmpxchgU { ref memarg } => {
5204 let new = self.pop_value_released()?.0;
5205 let cmp = self.pop_value_released()?.0;
5206 let target = self.pop_value_released()?.0;
5207 let ret = self.acquire_location(&WpType::I64)?;
5208 self.value_stack.push((ret, CanonicalizeType::None));
5209 self.op_memory(
5210 |this,
5211 need_check,
5212 imported_memories,
5213 offset,
5214 heap_access_oob,
5215 unaligned_atomic| {
5216 this.machine.i64_atomic_cmpxchg_16u(
5217 new,
5218 cmp,
5219 target,
5220 memarg,
5221 ret,
5222 need_check,
5223 imported_memories,
5224 offset,
5225 heap_access_oob,
5226 unaligned_atomic,
5227 )
5228 },
5229 )?;
5230 }
5231 Operator::I64AtomicRmw32CmpxchgU { ref memarg } => {
5232 let new = self.pop_value_released()?.0;
5233 let cmp = self.pop_value_released()?.0;
5234 let target = self.pop_value_released()?.0;
5235 let ret = self.acquire_location(&WpType::I64)?;
5236 self.value_stack.push((ret, CanonicalizeType::None));
5237 self.op_memory(
5238 |this,
5239 need_check,
5240 imported_memories,
5241 offset,
5242 heap_access_oob,
5243 unaligned_atomic| {
5244 this.machine.i64_atomic_cmpxchg_32u(
5245 new,
5246 cmp,
5247 target,
5248 memarg,
5249 ret,
5250 need_check,
5251 imported_memories,
5252 offset,
5253 heap_access_oob,
5254 unaligned_atomic,
5255 )
5256 },
5257 )?;
5258 }
5259
5260 Operator::RefNull { .. } => {
5261 self.value_stack
5262 .push((Location::Imm64(0), CanonicalizeType::None));
5263 }
5264 Operator::RefFunc { function_index } => {
5265 self.machine.move_location(
5266 Size::S64,
5267 Location::Memory(
5268 self.machine.get_vmctx_reg(),
5269 self.vmoffsets
5270 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_func_ref_index())
5271 as i32,
5272 ),
5273 Location::GPR(self.machine.get_gpr_for_call()),
5274 )?;
5275
5276 self.emit_call_native(
5277 |this| {
5278 this.machine
5279 .emit_call_register(this.machine.get_gpr_for_call())
5280 },
5281 iter::once((
5283 Location::Imm32(function_index as u32),
5284 CanonicalizeType::None,
5285 )),
5286 iter::once(WpType::I64),
5287 iter::once(WpType::Ref(WpRefType::new(true, WpHeapType::FUNC).unwrap())),
5288 NativeCallType::IncludeVMCtxArgument,
5289 )?;
5290 }
5291 Operator::RefIsNull => {
5292 let loc_a = self.pop_value_released()?.0;
5293 let ret = self.acquire_location(&WpType::I32)?;
5294 self.machine.i64_cmp_eq(loc_a, Location::Imm64(0), ret)?;
5295 self.value_stack.push((ret, CanonicalizeType::None));
5296 }
5297 Operator::TableSet { table: index } => {
5298 let table_index = TableIndex::new(index as _);
5299 let value = self.value_stack.pop().unwrap();
5300 let index = self.value_stack.pop().unwrap();
5301
5302 self.machine.move_location(
5303 Size::S64,
5304 Location::Memory(
5305 self.machine.get_vmctx_reg(),
5306 self.vmoffsets.vmctx_builtin_function(
5307 if self.module.local_table_index(table_index).is_some() {
5308 VMBuiltinFunctionIndex::get_table_set_index()
5309 } else {
5310 VMBuiltinFunctionIndex::get_imported_table_set_index()
5311 },
5312 ) as i32,
5313 ),
5314 Location::GPR(self.machine.get_gpr_for_call()),
5315 )?;
5316
5317 self.emit_call_native(
5318 |this| {
5319 this.machine
5320 .emit_call_register(this.machine.get_gpr_for_call())
5321 },
5322 [
5324 (
5325 Location::Imm32(table_index.index() as u32),
5326 CanonicalizeType::None,
5327 ),
5328 index,
5329 value,
5330 ]
5331 .iter()
5332 .cloned(),
5333 [WpType::I32, WpType::I64, WpType::I64].iter().cloned(),
5334 iter::empty(),
5335 NativeCallType::IncludeVMCtxArgument,
5336 )?;
5337 }
5338 Operator::TableGet { table: index } => {
5339 let table_index = TableIndex::new(index as _);
5340 let index = self.value_stack.pop().unwrap();
5341
5342 self.machine.move_location(
5343 Size::S64,
5344 Location::Memory(
5345 self.machine.get_vmctx_reg(),
5346 self.vmoffsets.vmctx_builtin_function(
5347 if self.module.local_table_index(table_index).is_some() {
5348 VMBuiltinFunctionIndex::get_table_get_index()
5349 } else {
5350 VMBuiltinFunctionIndex::get_imported_table_get_index()
5351 },
5352 ) as i32,
5353 ),
5354 Location::GPR(self.machine.get_gpr_for_call()),
5355 )?;
5356
5357 self.emit_call_native(
5358 |this| {
5359 this.machine
5360 .emit_call_register(this.machine.get_gpr_for_call())
5361 },
5362 [
5364 (
5365 Location::Imm32(table_index.index() as u32),
5366 CanonicalizeType::None,
5367 ),
5368 index,
5369 ]
5370 .iter()
5371 .cloned(),
5372 [WpType::I32, WpType::I64].iter().cloned(),
5373 iter::once(WpType::Ref(WpRefType::new(true, WpHeapType::FUNC).unwrap())),
5374 NativeCallType::IncludeVMCtxArgument,
5375 )?;
5376 }
5377 Operator::TableSize { table: index } => {
5378 let table_index = TableIndex::new(index as _);
5379
5380 self.machine.move_location(
5381 Size::S64,
5382 Location::Memory(
5383 self.machine.get_vmctx_reg(),
5384 self.vmoffsets.vmctx_builtin_function(
5385 if self.module.local_table_index(table_index).is_some() {
5386 VMBuiltinFunctionIndex::get_table_size_index()
5387 } else {
5388 VMBuiltinFunctionIndex::get_imported_table_size_index()
5389 },
5390 ) as i32,
5391 ),
5392 Location::GPR(self.machine.get_gpr_for_call()),
5393 )?;
5394
5395 self.emit_call_native(
5396 |this| {
5397 this.machine
5398 .emit_call_register(this.machine.get_gpr_for_call())
5399 },
5400 iter::once((
5402 Location::Imm32(table_index.index() as u32),
5403 CanonicalizeType::None,
5404 )),
5405 iter::once(WpType::I32),
5406 iter::once(WpType::I32),
5407 NativeCallType::IncludeVMCtxArgument,
5408 )?;
5409 }
5410 Operator::TableGrow { table: index } => {
5411 let table_index = TableIndex::new(index as _);
5412 let delta = self.value_stack.pop().unwrap();
5413 let init_value = self.value_stack.pop().unwrap();
5414
5415 self.machine.move_location(
5416 Size::S64,
5417 Location::Memory(
5418 self.machine.get_vmctx_reg(),
5419 self.vmoffsets.vmctx_builtin_function(
5420 if self.module.local_table_index(table_index).is_some() {
5421 VMBuiltinFunctionIndex::get_table_grow_index()
5422 } else {
5423 VMBuiltinFunctionIndex::get_imported_table_grow_index()
5424 },
5425 ) as i32,
5426 ),
5427 Location::GPR(self.machine.get_gpr_for_call()),
5428 )?;
5429
5430 self.emit_call_native(
5431 |this| {
5432 this.machine
5433 .emit_call_register(this.machine.get_gpr_for_call())
5434 },
5435 [
5437 init_value,
5438 delta,
5439 (
5440 Location::Imm32(table_index.index() as u32),
5441 CanonicalizeType::None,
5442 ),
5443 ]
5444 .iter()
5445 .cloned(),
5446 [WpType::I64, WpType::I64, WpType::I64].iter().cloned(),
5447 iter::once(WpType::I32),
5448 NativeCallType::IncludeVMCtxArgument,
5449 )?;
5450 }
5451 Operator::TableCopy {
5452 dst_table,
5453 src_table,
5454 } => {
5455 let len = self.value_stack.pop().unwrap();
5456 let src = self.value_stack.pop().unwrap();
5457 let dest = self.value_stack.pop().unwrap();
5458
5459 self.machine.move_location(
5460 Size::S64,
5461 Location::Memory(
5462 self.machine.get_vmctx_reg(),
5463 self.vmoffsets
5464 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_table_copy_index())
5465 as i32,
5466 ),
5467 Location::GPR(self.machine.get_gpr_for_call()),
5468 )?;
5469
5470 self.emit_call_native(
5471 |this| {
5472 this.machine
5473 .emit_call_register(this.machine.get_gpr_for_call())
5474 },
5475 [
5477 (Location::Imm32(dst_table), CanonicalizeType::None),
5478 (Location::Imm32(src_table), CanonicalizeType::None),
5479 dest,
5480 src,
5481 len,
5482 ]
5483 .iter()
5484 .cloned(),
5485 [
5486 WpType::I32,
5487 WpType::I32,
5488 WpType::I64,
5489 WpType::I64,
5490 WpType::I64,
5491 ]
5492 .iter()
5493 .cloned(),
5494 iter::empty(),
5495 NativeCallType::IncludeVMCtxArgument,
5496 )?;
5497 }
5498
5499 Operator::TableFill { table } => {
5500 let len = self.value_stack.pop().unwrap();
5501 let val = self.value_stack.pop().unwrap();
5502 let dest = self.value_stack.pop().unwrap();
5503
5504 self.machine.move_location(
5505 Size::S64,
5506 Location::Memory(
5507 self.machine.get_vmctx_reg(),
5508 self.vmoffsets
5509 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_table_fill_index())
5510 as i32,
5511 ),
5512 Location::GPR(self.machine.get_gpr_for_call()),
5513 )?;
5514
5515 self.emit_call_native(
5516 |this| {
5517 this.machine
5518 .emit_call_register(this.machine.get_gpr_for_call())
5519 },
5520 [
5522 (Location::Imm32(table), CanonicalizeType::None),
5523 dest,
5524 val,
5525 len,
5526 ]
5527 .iter()
5528 .cloned(),
5529 [WpType::I32, WpType::I64, WpType::I64, WpType::I64]
5530 .iter()
5531 .cloned(),
5532 iter::empty(),
5533 NativeCallType::IncludeVMCtxArgument,
5534 )?;
5535 }
5536 Operator::TableInit { elem_index, table } => {
5537 let len = self.value_stack.pop().unwrap();
5538 let src = self.value_stack.pop().unwrap();
5539 let dest = self.value_stack.pop().unwrap();
5540
5541 self.machine.move_location(
5542 Size::S64,
5543 Location::Memory(
5544 self.machine.get_vmctx_reg(),
5545 self.vmoffsets
5546 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_table_init_index())
5547 as i32,
5548 ),
5549 Location::GPR(self.machine.get_gpr_for_call()),
5550 )?;
5551
5552 self.emit_call_native(
5553 |this| {
5554 this.machine
5555 .emit_call_register(this.machine.get_gpr_for_call())
5556 },
5557 [
5559 (Location::Imm32(table), CanonicalizeType::None),
5560 (Location::Imm32(elem_index), CanonicalizeType::None),
5561 dest,
5562 src,
5563 len,
5564 ]
5565 .iter()
5566 .cloned(),
5567 [
5568 WpType::I32,
5569 WpType::I32,
5570 WpType::I64,
5571 WpType::I64,
5572 WpType::I64,
5573 ]
5574 .iter()
5575 .cloned(),
5576 iter::empty(),
5577 NativeCallType::IncludeVMCtxArgument,
5578 )?;
5579 }
5580 Operator::ElemDrop { elem_index } => {
5581 self.machine.move_location(
5582 Size::S64,
5583 Location::Memory(
5584 self.machine.get_vmctx_reg(),
5585 self.vmoffsets
5586 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_elem_drop_index())
5587 as i32,
5588 ),
5589 Location::GPR(self.machine.get_gpr_for_call()),
5590 )?;
5591
5592 self.emit_call_native(
5593 |this| {
5594 this.machine
5595 .emit_call_register(this.machine.get_gpr_for_call())
5596 },
5597 iter::once((Location::Imm32(elem_index), CanonicalizeType::None)),
5599 [WpType::I32].iter().cloned(),
5600 iter::empty(),
5601 NativeCallType::IncludeVMCtxArgument,
5602 )?;
5603 }
5604 Operator::MemoryAtomicWait32 { ref memarg } => {
5605 let timeout = self.value_stack.pop().unwrap();
5606 let val = self.value_stack.pop().unwrap();
5607 let dst = self.value_stack.pop().unwrap();
5608
5609 let memory_index = MemoryIndex::new(memarg.memory as usize);
5610 let (memory_atomic_wait32, memory_index) =
5611 if self.module.local_memory_index(memory_index).is_some() {
5612 (
5613 VMBuiltinFunctionIndex::get_memory_atomic_wait32_index(),
5614 memory_index,
5615 )
5616 } else {
5617 (
5618 VMBuiltinFunctionIndex::get_imported_memory_atomic_wait32_index(),
5619 memory_index,
5620 )
5621 };
5622
5623 self.machine.move_location(
5624 Size::S64,
5625 Location::Memory(
5626 self.machine.get_vmctx_reg(),
5627 self.vmoffsets.vmctx_builtin_function(memory_atomic_wait32) as i32,
5628 ),
5629 Location::GPR(self.machine.get_gpr_for_call()),
5630 )?;
5631
5632 self.emit_call_native(
5633 |this| {
5634 this.machine
5635 .emit_call_register(this.machine.get_gpr_for_call())
5636 },
5637 [
5639 (
5640 Location::Imm32(memory_index.index() as u32),
5641 CanonicalizeType::None,
5642 ),
5643 dst,
5644 val,
5645 timeout,
5646 ]
5647 .iter()
5648 .cloned(),
5649 [WpType::I32, WpType::I32, WpType::I32, WpType::I64]
5650 .iter()
5651 .cloned(),
5652 iter::once(WpType::I32),
5653 NativeCallType::IncludeVMCtxArgument,
5654 )?;
5655 }
5656 Operator::MemoryAtomicWait64 { ref memarg } => {
5657 let timeout = self.value_stack.pop().unwrap();
5658 let val = self.value_stack.pop().unwrap();
5659 let dst = self.value_stack.pop().unwrap();
5660
5661 let memory_index = MemoryIndex::new(memarg.memory as usize);
5662 let (memory_atomic_wait64, memory_index) =
5663 if self.module.local_memory_index(memory_index).is_some() {
5664 (
5665 VMBuiltinFunctionIndex::get_memory_atomic_wait64_index(),
5666 memory_index,
5667 )
5668 } else {
5669 (
5670 VMBuiltinFunctionIndex::get_imported_memory_atomic_wait64_index(),
5671 memory_index,
5672 )
5673 };
5674
5675 self.machine.move_location(
5676 Size::S64,
5677 Location::Memory(
5678 self.machine.get_vmctx_reg(),
5679 self.vmoffsets.vmctx_builtin_function(memory_atomic_wait64) as i32,
5680 ),
5681 Location::GPR(self.machine.get_gpr_for_call()),
5682 )?;
5683
5684 self.emit_call_native(
5685 |this| {
5686 this.machine
5687 .emit_call_register(this.machine.get_gpr_for_call())
5688 },
5689 [
5691 (
5692 Location::Imm32(memory_index.index() as u32),
5693 CanonicalizeType::None,
5694 ),
5695 dst,
5696 val,
5697 timeout,
5698 ]
5699 .iter()
5700 .cloned(),
5701 [WpType::I32, WpType::I32, WpType::I64, WpType::I64]
5702 .iter()
5703 .cloned(),
5704 iter::once(WpType::I32),
5705 NativeCallType::IncludeVMCtxArgument,
5706 )?;
5707 }
5708 Operator::MemoryAtomicNotify { ref memarg } => {
5709 let _cnt = self.value_stack.pop().unwrap();
5710 let dst = self.value_stack.pop().unwrap();
5711
5712 let memory_index = MemoryIndex::new(memarg.memory as usize);
5713 let (memory_atomic_notify, memory_index) =
5714 if self.module.local_memory_index(memory_index).is_some() {
5715 (
5716 VMBuiltinFunctionIndex::get_memory_atomic_notify_index(),
5717 memory_index,
5718 )
5719 } else {
5720 (
5721 VMBuiltinFunctionIndex::get_imported_memory_atomic_notify_index(),
5722 memory_index,
5723 )
5724 };
5725
5726 self.machine.move_location(
5727 Size::S64,
5728 Location::Memory(
5729 self.machine.get_vmctx_reg(),
5730 self.vmoffsets.vmctx_builtin_function(memory_atomic_notify) as i32,
5731 ),
5732 Location::GPR(self.machine.get_gpr_for_call()),
5733 )?;
5734
5735 self.emit_call_native(
5736 |this| {
5737 this.machine
5738 .emit_call_register(this.machine.get_gpr_for_call())
5739 },
5740 [
5742 (
5743 Location::Imm32(memory_index.index() as u32),
5744 CanonicalizeType::None,
5745 ),
5746 dst,
5747 ]
5748 .iter()
5749 .cloned(),
5750 [WpType::I32, WpType::I32].iter().cloned(),
5751 iter::once(WpType::I32),
5752 NativeCallType::IncludeVMCtxArgument,
5753 )?;
5754 }
5755 _ => {
5756 return Err(CompileError::Codegen(format!(
5757 "not yet implemented: {op:?}"
5758 )));
5759 }
5760 }
5761
5762 Ok(())
5763 }
5764
5765 fn add_assembly_comment(&mut self, comment: AssemblyComment) {
5766 if self.config.callbacks.is_some() {
5768 self.assembly_comments
5769 .insert(self.machine.get_offset().0, comment);
5770 }
5771 }
5772
5773 pub fn finalize(
5774 mut self,
5775 data: &FunctionBodyData,
5776 arch: Architecture,
5777 ) -> Result<(CompiledFunction, Option<UnwindFrame>), CompileError> {
5778 self.add_assembly_comment(AssemblyComment::TrapHandlersTable);
5779 self.machine
5781 .emit_label(self.special_labels.integer_division_by_zero)?;
5782 self.machine
5783 .emit_illegal_op(TrapCode::IntegerDivisionByZero)?;
5784
5785 self.machine
5786 .emit_label(self.special_labels.integer_overflow)?;
5787 self.machine.emit_illegal_op(TrapCode::IntegerOverflow)?;
5788
5789 self.machine
5790 .emit_label(self.special_labels.heap_access_oob)?;
5791 self.machine
5792 .emit_illegal_op(TrapCode::HeapAccessOutOfBounds)?;
5793
5794 self.machine
5795 .emit_label(self.special_labels.table_access_oob)?;
5796 self.machine
5797 .emit_illegal_op(TrapCode::TableAccessOutOfBounds)?;
5798
5799 self.machine
5800 .emit_label(self.special_labels.indirect_call_null)?;
5801 self.machine.emit_illegal_op(TrapCode::IndirectCallToNull)?;
5802
5803 self.machine.emit_label(self.special_labels.bad_signature)?;
5804 self.machine.emit_illegal_op(TrapCode::BadSignature)?;
5805
5806 self.machine
5807 .emit_label(self.special_labels.unaligned_atomic)?;
5808 self.machine.emit_illegal_op(TrapCode::UnalignedAtomic)?;
5809
5810 self.machine.finalize_function()?;
5812
5813 let body_len = self.machine.assembler_get_offset().0;
5814
5815 #[cfg_attr(not(feature = "unwind"), allow(unused_mut))]
5816 let mut unwind_info = None;
5817 #[cfg_attr(not(feature = "unwind"), allow(unused_mut))]
5818 let mut fde = None;
5819 #[cfg(feature = "unwind")]
5820 match self.calling_convention {
5821 CallingConvention::SystemV | CallingConvention::AppleAarch64 => {
5822 let unwind = self.machine.gen_dwarf_unwind_info(body_len);
5823 if let Some(unwind) = unwind {
5824 fde = Some(unwind.to_fde(Address::Symbol {
5825 symbol: WriterRelocate::FUNCTION_SYMBOL,
5826 addend: self.local_func_index.index() as _,
5827 }));
5828 unwind_info = Some(CompiledFunctionUnwindInfo::Dwarf);
5829 }
5830 }
5831 CallingConvention::WindowsFastcall => {
5832 let unwind = self.machine.gen_windows_unwind_info(body_len);
5833 if let Some(unwind) = unwind {
5834 unwind_info = Some(CompiledFunctionUnwindInfo::WindowsX64(unwind));
5835 }
5836 }
5837 _ => (),
5838 };
5839
5840 let address_map =
5841 get_function_address_map(self.machine.instructions_address_map(), data, body_len);
5842 let traps = self.machine.collect_trap_information();
5843 let FinalizedAssembly {
5844 mut body,
5845 assembly_comments,
5846 } = self.machine.assembler_finalize(self.assembly_comments)?;
5847 body.shrink_to_fit();
5848
5849 if let Some(callbacks) = self.config.callbacks.as_ref() {
5850 callbacks.obj_memory_buffer(
5851 &CompiledKind::Local(self.local_func_index, self.function_name.clone()),
5852 &body,
5853 );
5854 callbacks.asm_memory_buffer(
5855 &CompiledKind::Local(self.local_func_index, self.function_name.clone()),
5856 arch,
5857 &body,
5858 assembly_comments,
5859 )?;
5860 }
5861
5862 Ok((
5863 CompiledFunction {
5864 body: FunctionBody { body, unwind_info },
5865 relocations: self.relocations.clone(),
5866 frame_info: CompiledFunctionFrameInfo { traps, address_map },
5867 },
5868 fde,
5869 ))
5870 }
5871 #[allow(clippy::type_complexity)]
5874 fn sort_call_movs(movs: &mut [(Location<M::GPR, M::SIMD>, M::GPR)]) {
5875 for i in 0..movs.len() {
5876 for j in (i + 1)..movs.len() {
5877 if let Location::GPR(src_gpr) = movs[j].0
5878 && src_gpr == movs[i].1
5879 {
5880 movs.swap(i, j);
5881 }
5882 }
5883 }
5884 }
5885
5886 }