1#[cfg(feature = "unwind")]
2use crate::dwarf::WriterRelocate;
3
4use crate::{
5 address_map::get_function_address_map,
6 codegen_error,
7 common_decl::*,
8 config::Singlepass,
9 location::{Location, Reg},
10 machine::{Label, Machine, MachineStackOffset, NATIVE_PAGE_SIZE, UnsignedCondition},
11 unwind::UnwindFrame,
12};
13#[cfg(feature = "unwind")]
14use gimli::write::Address;
15use smallvec::{SmallVec, smallvec};
16use std::{cmp, iter};
17
18use wasmer_compiler::{
19 FunctionBodyData,
20 types::{
21 function::{CompiledFunction, CompiledFunctionFrameInfo, FunctionBody},
22 relocation::{Relocation, RelocationTarget},
23 section::SectionIndex,
24 },
25 wasmparser::{
26 BlockType as WpTypeOrFuncType, HeapType as WpHeapType, Operator, RefType as WpRefType,
27 ValType as WpType,
28 },
29};
30
31#[cfg(feature = "unwind")]
32use wasmer_compiler::types::unwind::CompiledFunctionUnwindInfo;
33
34use wasmer_types::target::CallingConvention;
35use wasmer_types::{
36 CompileError, FunctionIndex, FunctionType, GlobalIndex, LocalFunctionIndex, LocalMemoryIndex,
37 MemoryIndex, MemoryStyle, ModuleInfo, SignatureIndex, TableIndex, TableStyle, TrapCode, Type,
38 VMBuiltinFunctionIndex, VMOffsets,
39 entity::{EntityRef, PrimaryMap},
40};
41pub struct FuncGen<'a, M: Machine> {
43 module: &'a ModuleInfo,
46
47 config: &'a Singlepass,
49
50 vmoffsets: &'a VMOffsets,
52
53 memory_styles: &'a PrimaryMap<MemoryIndex, MemoryStyle>,
55
56 signature: FunctionType,
60
61 locals: Vec<Location<M::GPR, M::SIMD>>,
64
65 local_types: Vec<WpType>,
67
68 value_stack: Vec<Location<M::GPR, M::SIMD>>,
70
71 fp_stack: Vec<FloatValue>,
73
74 control_stack: Vec<ControlFrame>,
76
77 stack_offset: MachineStackOffset,
78
79 save_area_offset: Option<MachineStackOffset>,
80
81 state: MachineState,
82
83 track_state: bool,
84
85 machine: M,
87
88 unreachable_depth: usize,
90
91 fsm: FunctionStateMap,
93
94 relocations: Vec<Relocation>,
96
97 special_labels: SpecialLabelSet,
99
100 calling_convention: CallingConvention,
102}
103
104struct SpecialLabelSet {
105 integer_division_by_zero: Label,
106 integer_overflow: Label,
107 heap_access_oob: Label,
108 table_access_oob: Label,
109 indirect_call_null: Label,
110 bad_signature: Label,
111 unaligned_atomic: Label,
112}
113
114#[derive(Copy, Clone, Debug)]
116struct FloatValue {
117 canonicalization: Option<CanonicalizeType>,
119
120 depth: usize,
122}
123
124impl FloatValue {
125 fn new(depth: usize) -> Self {
126 FloatValue {
127 canonicalization: None,
128 depth,
129 }
130 }
131
132 fn cncl_f32(depth: usize) -> Self {
133 FloatValue {
134 canonicalization: Some(CanonicalizeType::F32),
135 depth,
136 }
137 }
138
139 fn cncl_f64(depth: usize) -> Self {
140 FloatValue {
141 canonicalization: Some(CanonicalizeType::F64),
142 depth,
143 }
144 }
145
146 fn promote(self, depth: usize) -> Result<FloatValue, CompileError> {
147 let ret = FloatValue {
148 canonicalization: match self.canonicalization {
149 Some(CanonicalizeType::F32) => Some(CanonicalizeType::F64),
150 Some(CanonicalizeType::F64) => codegen_error!("cannot promote F64"),
151 None => None,
152 },
153 depth,
154 };
155 Ok(ret)
156 }
157
158 fn demote(self, depth: usize) -> Result<FloatValue, CompileError> {
159 let ret = FloatValue {
160 canonicalization: match self.canonicalization {
161 Some(CanonicalizeType::F64) => Some(CanonicalizeType::F32),
162 Some(CanonicalizeType::F32) => codegen_error!("cannot demote F32"),
163 None => None,
164 },
165 depth,
166 };
167 Ok(ret)
168 }
169}
170
171#[derive(Copy, Clone, Debug)]
174enum CanonicalizeType {
175 F32,
176 F64,
177}
178
179impl CanonicalizeType {
180 fn to_size(self) -> Size {
181 match self {
182 CanonicalizeType::F32 => Size::S32,
183 CanonicalizeType::F64 => Size::S64,
184 }
185 }
186}
187
188trait PopMany<T> {
189 fn peek1(&self) -> Result<&T, CompileError>;
190 fn pop1(&mut self) -> Result<T, CompileError>;
191 fn pop2(&mut self) -> Result<(T, T), CompileError>;
192}
193
194impl<T> PopMany<T> for Vec<T> {
195 fn peek1(&self) -> Result<&T, CompileError> {
196 self.last()
197 .ok_or_else(|| CompileError::Codegen("peek1() expects at least 1 element".to_owned()))
198 }
199 fn pop1(&mut self) -> Result<T, CompileError> {
200 self.pop()
201 .ok_or_else(|| CompileError::Codegen("pop1() expects at least 1 element".to_owned()))
202 }
203 fn pop2(&mut self) -> Result<(T, T), CompileError> {
204 if self.len() < 2 {
205 return Err(CompileError::Codegen(
206 "pop2() expects at least 2 elements".to_owned(),
207 ));
208 }
209
210 let right = self.pop().unwrap();
211 let left = self.pop().unwrap();
212 Ok((left, right))
213 }
214}
215
216trait WpTypeExt {
217 fn is_float(&self) -> bool;
218}
219
220impl WpTypeExt for WpType {
221 fn is_float(&self) -> bool {
222 matches!(self, WpType::F32 | WpType::F64)
223 }
224}
225
226#[derive(Debug, Clone)]
227pub struct ControlFrame {
228 pub label: Label,
229 pub loop_like: bool,
230 pub if_else: IfElseState,
231 pub returns: SmallVec<[WpType; 1]>,
232 pub value_stack_depth: usize,
233 pub fp_stack_depth: usize,
234 pub state: MachineState,
235 pub state_diff_id: usize,
236}
237
238#[derive(Debug, Copy, Clone)]
239pub enum IfElseState {
240 None,
241 If(Label),
242 Else,
243}
244
245fn type_to_wp_type(ty: Type) -> WpType {
246 match ty {
247 Type::I32 => WpType::I32,
248 Type::I64 => WpType::I64,
249 Type::F32 => WpType::F32,
250 Type::F64 => WpType::F64,
251 Type::V128 => WpType::V128,
252 Type::ExternRef => WpType::Ref(WpRefType::new(true, WpHeapType::EXTERN).unwrap()),
253 Type::FuncRef => WpType::Ref(WpRefType::new(true, WpHeapType::FUNC).unwrap()),
254 Type::ExceptionRef => todo!(),
255 }
256}
257
258struct I2O1<R: Reg, S: Reg> {
261 loc_a: Location<R, S>,
262 loc_b: Location<R, S>,
263 ret: Location<R, S>,
264}
265
266impl<'a, M: Machine> FuncGen<'a, M> {
267 fn get_stack_offset(&self) -> usize {
268 self.stack_offset.0
269 }
270
271 #[allow(clippy::type_complexity)]
276 fn acquire_locations(
277 &mut self,
278 tys: &[(WpType, MachineValue)],
279 zeroed: bool,
280 ) -> Result<SmallVec<[Location<M::GPR, M::SIMD>; 1]>, CompileError> {
281 let mut ret = smallvec![];
282 let mut delta_stack_offset: usize = 0;
283
284 for (ty, mv) in tys {
285 let loc = match *ty {
286 WpType::F32 | WpType::F64 => self.machine.pick_simd().map(Location::SIMD),
287 WpType::I32 | WpType::I64 => self.machine.pick_gpr().map(Location::GPR),
288 WpType::Ref(ty) if ty.is_extern_ref() || ty.is_func_ref() => {
289 self.machine.pick_gpr().map(Location::GPR)
290 }
291 _ => codegen_error!("can't acquire location for type {:?}", ty),
292 };
293
294 let loc = if let Some(x) = loc {
295 x
296 } else {
297 self.stack_offset.0 += 8;
298 delta_stack_offset += 8;
299 self.machine.local_on_stack(self.stack_offset.0 as i32)
300 };
301 if let Location::GPR(x) = loc {
302 self.machine.reserve_gpr(x);
303 self.state.register_values[self.machine.index_from_gpr(x).0] = mv.clone();
304 } else if let Location::SIMD(x) = loc {
305 self.machine.reserve_simd(x);
306 self.state.register_values[self.machine.index_from_simd(x).0] = mv.clone();
307 } else {
308 self.state.stack_values.push(mv.clone());
309 }
310 self.state.wasm_stack.push(WasmAbstractValue::Runtime);
311 ret.push(loc);
312 }
313
314 let delta_stack_offset = self.machine.round_stack_adjust(delta_stack_offset);
315 if delta_stack_offset != 0 {
316 self.machine.adjust_stack(delta_stack_offset as u32)?;
317 }
318 if zeroed {
319 for i in 0..tys.len() {
320 self.machine.zero_location(Size::S64, ret[i])?;
321 }
322 }
323 Ok(ret)
324 }
325
326 fn release_locations(
328 &mut self,
329 locs: &[Location<M::GPR, M::SIMD>],
330 ) -> Result<(), CompileError> {
331 let mut delta_stack_offset: usize = 0;
332
333 for loc in locs.iter().rev() {
334 match *loc {
335 Location::GPR(ref x) => {
336 self.machine.release_gpr(*x);
337 self.state.register_values[self.machine.index_from_gpr(*x).0] =
338 MachineValue::Undefined;
339 }
340 Location::SIMD(ref x) => {
341 self.machine.release_simd(*x);
342 self.state.register_values[self.machine.index_from_simd(*x).0] =
343 MachineValue::Undefined;
344 }
345 Location::Memory(y, x) => {
346 if y == self.machine.local_pointer() {
347 if x >= 0 {
348 codegen_error!("Invalid memory offset {}", x);
349 }
350 let offset = (-x) as usize;
351 if offset != self.stack_offset.0 {
352 codegen_error!(
353 "Invalid memory offset {}!={}",
354 offset,
355 self.stack_offset.0
356 );
357 }
358 self.stack_offset.0 -= 8;
359 delta_stack_offset += 8;
360 self.state
361 .stack_values
362 .pop()
363 .ok_or_else(|| CompileError::Codegen("Empty stack_value".to_owned()))?;
364 }
365 }
366 _ => {}
367 }
368 self.state
369 .wasm_stack
370 .pop()
371 .ok_or_else(|| CompileError::Codegen("Pop with wasm stack empty".to_owned()))?;
372 }
373 let delta_stack_offset = self.machine.round_stack_adjust(delta_stack_offset);
374 if delta_stack_offset != 0 {
375 self.machine.restore_stack(delta_stack_offset as u32)?;
376 }
377 Ok(())
378 }
379 fn release_locations_value(&mut self, stack_depth: usize) -> Result<(), CompileError> {
381 let mut delta_stack_offset: usize = 0;
382 let locs: &[Location<M::GPR, M::SIMD>] = &self.value_stack[stack_depth..];
383
384 for loc in locs.iter().rev() {
385 match *loc {
386 Location::GPR(ref x) => {
387 self.machine.release_gpr(*x);
388 self.state.register_values[self.machine.index_from_gpr(*x).0] =
389 MachineValue::Undefined;
390 }
391 Location::SIMD(ref x) => {
392 self.machine.release_simd(*x);
393 self.state.register_values[self.machine.index_from_simd(*x).0] =
394 MachineValue::Undefined;
395 }
396 Location::Memory(y, x) => {
397 if y == self.machine.local_pointer() {
398 if x >= 0 {
399 codegen_error!("Invalid memory offset {}", x);
400 }
401 let offset = (-x) as usize;
402 if offset != self.stack_offset.0 {
403 codegen_error!(
404 "Invalid memory offset {}!={}",
405 offset,
406 self.stack_offset.0
407 );
408 }
409 self.stack_offset.0 -= 8;
410 delta_stack_offset += 8;
411 self.state.stack_values.pop().ok_or_else(|| {
412 CompileError::Codegen("Pop with values stack empty".to_owned())
413 })?;
414 }
415 }
416 _ => {}
417 }
418 self.state
419 .wasm_stack
420 .pop()
421 .ok_or_else(|| CompileError::Codegen("Pop with wasm stack empty".to_owned()))?;
422 }
423
424 let delta_stack_offset = self.machine.round_stack_adjust(delta_stack_offset);
425 if delta_stack_offset != 0 {
426 self.machine.adjust_stack(delta_stack_offset as u32)?;
427 }
428 Ok(())
429 }
430
431 fn release_locations_only_regs(
432 &mut self,
433 locs: &[Location<M::GPR, M::SIMD>],
434 ) -> Result<(), CompileError> {
435 for loc in locs.iter().rev() {
436 match *loc {
437 Location::GPR(ref x) => {
438 self.machine.release_gpr(*x);
439 self.state.register_values[self.machine.index_from_gpr(*x).0] =
440 MachineValue::Undefined;
441 }
442 Location::SIMD(ref x) => {
443 self.machine.release_simd(*x);
444 self.state.register_values[self.machine.index_from_simd(*x).0] =
445 MachineValue::Undefined;
446 }
447 _ => {}
448 }
449 }
451 Ok(())
452 }
453
454 fn release_locations_only_stack(
455 &mut self,
456 locs: &[Location<M::GPR, M::SIMD>],
457 ) -> Result<(), CompileError> {
458 let mut delta_stack_offset: usize = 0;
459
460 for loc in locs.iter().rev() {
461 if let Location::Memory(y, x) = *loc {
462 if y == self.machine.local_pointer() {
463 if x >= 0 {
464 codegen_error!("Invalid memory offset {}", x);
465 }
466 let offset = (-x) as usize;
467 if offset != self.stack_offset.0 {
468 codegen_error!("Invalid memory offset {}!={}", offset, self.stack_offset.0);
469 }
470 self.stack_offset.0 -= 8;
471 delta_stack_offset += 8;
472 self.state.stack_values.pop().ok_or_else(|| {
473 CompileError::Codegen("Pop on empty value stack".to_owned())
474 })?;
475 }
476 }
477 }
479
480 let delta_stack_offset = self.machine.round_stack_adjust(delta_stack_offset);
481 if delta_stack_offset != 0 {
482 self.machine.pop_stack_locals(delta_stack_offset as u32)?;
483 }
484 Ok(())
485 }
486
487 fn release_locations_only_osr_state(&mut self, n: usize) -> Result<(), CompileError> {
488 let new_length = self
489 .state
490 .wasm_stack
491 .len()
492 .checked_sub(n)
493 .expect("release_locations_only_osr_state: length underflow");
494 self.state.wasm_stack.truncate(new_length);
495 Ok(())
496 }
497
498 fn release_locations_keep_state(&mut self, stack_depth: usize) -> Result<(), CompileError> {
499 let mut delta_stack_offset: usize = 0;
500 let mut stack_offset = self.stack_offset.0;
501 let locs = &self.value_stack[stack_depth..];
502
503 for loc in locs.iter().rev() {
504 if let Location::Memory(y, x) = *loc {
505 if y == self.machine.local_pointer() {
506 if x >= 0 {
507 codegen_error!("Invalid memory offset {}", x);
508 }
509 let offset = (-x) as usize;
510 if offset != stack_offset {
511 codegen_error!("Invalid memory offset {}!={}", offset, self.stack_offset.0);
512 }
513 stack_offset -= 8;
514 delta_stack_offset += 8;
515 }
516 }
517 }
518
519 let delta_stack_offset = self.machine.round_stack_adjust(delta_stack_offset);
520 if delta_stack_offset != 0 {
521 self.machine.pop_stack_locals(delta_stack_offset as u32)?;
522 }
523 Ok(())
524 }
525
526 #[allow(clippy::type_complexity)]
527 fn init_locals(
528 &mut self,
529 n: usize,
530 sig: FunctionType,
531 calling_convention: CallingConvention,
532 ) -> Result<Vec<Location<M::GPR, M::SIMD>>, CompileError> {
533 let num_mem_slots = (0..n)
535 .filter(|&x| self.machine.is_local_on_stack(x))
536 .count();
537
538 let mut static_area_size: usize = 0;
541
542 for i in 0..n {
545 if !self.machine.is_local_on_stack(i) {
547 static_area_size += 8;
548 }
549 }
550
551 static_area_size += 8;
553
554 static_area_size += 8 * self.machine.list_to_save(calling_convention).len();
556
557 let callee_saved_regs_size = static_area_size;
559
560 let locations: Vec<Location<M::GPR, M::SIMD>> = (0..n)
562 .map(|i| self.machine.get_local_location(i, callee_saved_regs_size))
563 .collect();
564
565 static_area_size += num_mem_slots * 8;
567
568 static_area_size = self.machine.round_stack_adjust(static_area_size);
570
571 for i in (sig.params().len()..n)
576 .step_by(NATIVE_PAGE_SIZE / 8)
577 .skip(1)
578 {
579 self.machine.zero_location(Size::S64, locations[i])?;
580 }
581
582 self.machine.adjust_stack(static_area_size as _)?;
583
584 for loc in locations.iter() {
586 if let Location::GPR(x) = *loc {
587 self.stack_offset.0 += 8;
588 self.machine.move_local(self.stack_offset.0 as i32, *loc)?;
589 self.state.stack_values.push(MachineValue::PreserveRegister(
590 self.machine.index_from_gpr(x),
591 ));
592 }
593 }
594
595 self.stack_offset.0 += 8;
597 self.machine.move_local(
598 self.stack_offset.0 as i32,
599 Location::GPR(self.machine.get_vmctx_reg()),
600 )?;
601 self.state.stack_values.push(MachineValue::PreserveRegister(
602 self.machine.index_from_gpr(self.machine.get_vmctx_reg()),
603 ));
604
605 let regs_to_save = self.machine.list_to_save(calling_convention);
607 for loc in regs_to_save.iter() {
608 self.stack_offset.0 += 8;
609 self.machine.move_local(self.stack_offset.0 as i32, *loc)?;
610 }
611
612 self.save_area_offset = Some(MachineStackOffset(self.stack_offset.0));
614
615 for (i, loc) in locations.iter().enumerate() {
617 match *loc {
618 Location::GPR(x) => {
619 self.state.register_values[self.machine.index_from_gpr(x).0] =
620 MachineValue::WasmLocal(i);
621 }
622 Location::Memory(_, _) => {
623 self.state.stack_values.push(MachineValue::WasmLocal(i));
624 }
625 _ => codegen_error!("singlpass init_local unreachable"),
626 }
627 }
628
629 let mut stack_offset: usize = 0;
633 for (i, param) in sig.params().iter().enumerate() {
634 let sz = match *param {
635 Type::I32 | Type::F32 => Size::S32,
636 Type::I64 | Type::F64 => Size::S64,
637 Type::ExternRef | Type::FuncRef => Size::S64,
638 _ => codegen_error!("singlepass init_local unimplemented"),
639 };
640 let loc = self.machine.get_call_param_location(
641 i + 1,
642 sz,
643 &mut stack_offset,
644 calling_convention,
645 );
646 self.machine
647 .move_location_extend(sz, false, loc, Size::S64, locations[i])?;
648 }
649
650 self.machine.move_location(
652 Size::S64,
653 self.machine
654 .get_simple_param_location(0, calling_convention),
655 Location::GPR(self.machine.get_vmctx_reg()),
656 )?;
657
658 let mut init_stack_loc_cnt = 0;
660 let mut last_stack_loc = Location::Memory(self.machine.local_pointer(), i32::MAX);
661 for location in locations.iter().take(n).skip(sig.params().len()) {
662 match location {
663 Location::Memory(_, _) => {
664 init_stack_loc_cnt += 1;
665 last_stack_loc = cmp::min(last_stack_loc, *location);
666 }
667 Location::GPR(_) => {
668 self.machine.zero_location(Size::S64, *location)?;
669 }
670 _ => codegen_error!("singlepass init_local unreachable"),
671 }
672 }
673 if init_stack_loc_cnt > 0 {
674 self.machine
675 .init_stack_loc(init_stack_loc_cnt, last_stack_loc)?;
676 }
677
678 self.stack_offset.0 += static_area_size - callee_saved_regs_size;
680
681 Ok(locations)
682 }
683
684 fn finalize_locals(
685 &mut self,
686 calling_convention: CallingConvention,
687 ) -> Result<(), CompileError> {
688 self.machine
690 .restore_saved_area(self.save_area_offset.as_ref().unwrap().0 as i32)?;
691
692 let regs_to_save = self.machine.list_to_save(calling_convention);
693 for loc in regs_to_save.iter().rev() {
694 self.machine.pop_location(*loc)?;
695 }
696
697 self.machine
699 .pop_location(Location::GPR(self.machine.get_vmctx_reg()))?;
700
701 for loc in self.locals.iter().rev() {
703 if let Location::GPR(_) = *loc {
704 self.machine.pop_location(*loc)?;
705 }
706 }
707 Ok(())
708 }
709
710 pub fn set_srcloc(&mut self, offset: u32) {
712 self.machine.set_srcloc(offset);
713 }
714
715 fn get_location_released(
716 &mut self,
717 loc: Location<M::GPR, M::SIMD>,
718 ) -> Result<Location<M::GPR, M::SIMD>, CompileError> {
719 self.release_locations(&[loc])?;
720 Ok(loc)
721 }
722
723 fn pop_value_released(&mut self) -> Result<Location<M::GPR, M::SIMD>, CompileError> {
724 let loc = self.value_stack.pop().ok_or_else(|| {
725 CompileError::Codegen("pop_value_released: value stack is empty".to_owned())
726 })?;
727 self.get_location_released(loc)
728 }
729
730 fn i2o1_prepare(&mut self, ty: WpType) -> Result<I2O1<M::GPR, M::SIMD>, CompileError> {
732 let loc_b = self.pop_value_released()?;
733 let loc_a = self.pop_value_released()?;
734 let ret = self.acquire_locations(
735 &[(ty, MachineValue::WasmStack(self.value_stack.len()))],
736 false,
737 )?[0];
738 self.value_stack.push(ret);
739 Ok(I2O1 { loc_a, loc_b, ret })
740 }
741
742 fn mark_trappable(&mut self) {
743 let state_diff_id = self.get_state_diff();
744 let offset = self.machine.assembler_get_offset().0;
745 self.fsm.trappable_offsets.insert(
746 offset,
747 OffsetInfo {
748 end_offset: offset + 1,
749 activate_offset: offset,
750 diff_id: state_diff_id,
751 },
752 );
753 self.fsm.wasm_offset_to_target_offset.insert(
754 self.state.wasm_inst_offset,
755 SuspendOffset::Trappable(offset),
756 );
757 }
758 fn mark_offset_trappable(&mut self, offset: usize) {
759 let state_diff_id = self.get_state_diff();
760 self.fsm.trappable_offsets.insert(
761 offset,
762 OffsetInfo {
763 end_offset: offset + 1,
764 activate_offset: offset,
765 diff_id: state_diff_id,
766 },
767 );
768 self.fsm.wasm_offset_to_target_offset.insert(
769 self.state.wasm_inst_offset,
770 SuspendOffset::Trappable(offset),
771 );
772 }
773
774 fn emit_call_native<
779 I: Iterator<Item = Location<M::GPR, M::SIMD>>,
780 J: Iterator<Item = WpType>,
781 F: FnOnce(&mut Self) -> Result<(), CompileError>,
782 >(
783 &mut self,
784 cb: F,
785 params: I,
786 params_type: J,
787 ) -> Result<(), CompileError> {
788 self.state.stack_values.push(MachineValue::ExplicitShadow);
790
791 let params: Vec<_> = params.collect();
792 let params_size: Vec<_> = params_type
793 .map(|x| match x {
794 WpType::F32 | WpType::I32 => Size::S32,
795 WpType::V128 => unimplemented!(),
796 _ => Size::S64,
797 })
798 .collect();
799
800 let used_gprs = self.machine.get_used_gprs();
802 let mut used_stack = self.machine.push_used_gpr(&used_gprs)?;
803 for r in used_gprs.iter() {
804 let content = self.state.register_values[self.machine.index_from_gpr(*r).0].clone();
805 if content == MachineValue::Undefined {
806 return Err(CompileError::Codegen(
807 "emit_call_native: Undefined used_gprs content".to_owned(),
808 ));
809 }
810 self.state.stack_values.push(content);
811 }
812
813 let used_simds = self.machine.get_used_simd();
815 if !used_simds.is_empty() {
816 used_stack += self.machine.push_used_simd(&used_simds)?;
817
818 for r in used_simds.iter().rev() {
819 let content =
820 self.state.register_values[self.machine.index_from_simd(*r).0].clone();
821 if content == MachineValue::Undefined {
822 return Err(CompileError::Codegen(
823 "emit_call_native: Undefined used_simds content".to_owned(),
824 ));
825 }
826 self.state.stack_values.push(content);
827 }
828 }
829 self.machine
831 .reserve_unused_temp_gpr(self.machine.get_grp_for_call());
832
833 let calling_convention = self.calling_convention;
834
835 let stack_padding: usize = match calling_convention {
836 CallingConvention::WindowsFastcall => 32,
837 _ => 0,
838 };
839
840 let mut stack_offset: usize = 0;
841 let mut args: Vec<Location<M::GPR, M::SIMD>> = vec![];
842 let mut pushed_args: usize = 0;
843 for (i, _param) in params.iter().enumerate() {
845 args.push(self.machine.get_param_location(
846 1 + i,
847 params_size[i],
848 &mut stack_offset,
849 calling_convention,
850 ));
851 }
852
853 let stack_unaligned =
855 (self.machine.round_stack_adjust(self.get_stack_offset()) + used_stack + stack_offset)
856 % 16;
857 if stack_unaligned != 0 {
858 stack_offset += 16 - stack_unaligned;
859 }
860 self.machine.adjust_stack(stack_offset as u32)?;
861
862 #[allow(clippy::type_complexity)]
863 let mut call_movs: Vec<(Location<M::GPR, M::SIMD>, M::GPR)> = vec![];
864 for (i, param) in params.iter().enumerate().rev() {
866 let loc = args[i];
867 match loc {
868 Location::GPR(x) => {
869 call_movs.push((*param, x));
870 }
871 Location::Memory(_, _) => {
872 pushed_args += 1;
873 match *param {
874 Location::GPR(x) => {
875 let content = self.state.register_values
876 [self.machine.index_from_gpr(x).0]
877 .clone();
878 self.state.stack_values.push(content);
883 }
884 Location::SIMD(x) => {
885 let content = self.state.register_values
886 [self.machine.index_from_simd(x).0]
887 .clone();
888 self.state.stack_values.push(content);
890 }
891 Location::Memory(reg, offset) => {
892 if reg != self.machine.local_pointer() {
893 return Err(CompileError::Codegen(
894 "emit_call_native loc param: unreachable code".to_owned(),
895 ));
896 }
897 self.state
898 .stack_values
899 .push(MachineValue::CopyStackBPRelative(offset));
900 }
902 _ => {
903 self.state.stack_values.push(MachineValue::Undefined);
904 }
905 }
906 self.machine
907 .move_location_for_native(params_size[i], *param, loc)?;
908 }
909 _ => {
910 return Err(CompileError::Codegen(
911 "emit_call_native loc: unreachable code".to_owned(),
912 ));
913 }
914 }
915 }
916
917 Self::sort_call_movs(&mut call_movs);
919
920 for (loc, gpr) in call_movs {
922 if loc != Location::GPR(gpr) {
923 self.machine
924 .move_location(Size::S64, loc, Location::GPR(gpr))?;
925 }
926 }
927
928 self.machine.move_location(
930 Size::S64,
931 Location::GPR(self.machine.get_vmctx_reg()),
932 self.machine
933 .get_simple_param_location(0, calling_convention),
934 )?; if stack_padding > 0 {
937 self.machine.adjust_stack(stack_padding as u32)?;
938 }
939 self.machine.release_gpr(self.machine.get_grp_for_call());
941 cb(self)?;
942
943 {
946 let state_diff_id = self.get_state_diff();
947 let offset = self.machine.assembler_get_offset().0;
948 self.fsm.call_offsets.insert(
949 offset,
950 OffsetInfo {
951 end_offset: offset + 1,
952 activate_offset: offset,
953 diff_id: state_diff_id,
954 },
955 );
956 self.fsm
957 .wasm_offset_to_target_offset
958 .insert(self.state.wasm_inst_offset, SuspendOffset::Call(offset));
959 }
960
961 if stack_offset + stack_padding > 0 {
963 self.machine.restore_stack(
964 self.machine
965 .round_stack_adjust(stack_offset + stack_padding) as u32,
966 )?;
967 if (stack_offset % 8) != 0 {
968 return Err(CompileError::Codegen(
969 "emit_call_native: Bad restoring stack alignement".to_owned(),
970 ));
971 }
972 for _ in 0..pushed_args {
973 self.state
974 .stack_values
975 .pop()
976 .ok_or_else(|| CompileError::Codegen("Pop an empty value stack".to_owned()))?;
977 }
978 }
979
980 if !used_simds.is_empty() {
982 self.machine.pop_used_simd(&used_simds)?;
983 for _ in 0..used_simds.len() {
984 self.state
985 .stack_values
986 .pop()
987 .ok_or_else(|| CompileError::Codegen("Pop an empty value stack".to_owned()))?;
988 }
989 }
990
991 self.machine.pop_used_gpr(&used_gprs)?;
993 for _ in used_gprs.iter().rev() {
994 self.state
995 .stack_values
996 .pop()
997 .ok_or_else(|| CompileError::Codegen("Pop an empty value stack".to_owned()))?;
998 }
999
1000 if self
1001 .state
1002 .stack_values
1003 .pop()
1004 .ok_or_else(|| CompileError::Codegen("Pop an empty value stack".to_owned()))?
1005 != MachineValue::ExplicitShadow
1006 {
1007 return Err(CompileError::Codegen(
1008 "emit_call_native: Popped value is not ExplicitShadow".to_owned(),
1009 ));
1010 }
1011 Ok(())
1012 }
1013
1014 fn _emit_call_native_label<
1016 I: Iterator<Item = Location<M::GPR, M::SIMD>>,
1017 J: Iterator<Item = WpType>,
1018 >(
1019 &mut self,
1020 label: Label,
1021 params: I,
1022 params_type: J,
1023 ) -> Result<(), CompileError> {
1024 self.emit_call_native(
1025 |this| this.machine.emit_call_label(label),
1026 params,
1027 params_type,
1028 )?;
1029 Ok(())
1030 }
1031
1032 fn op_memory<
1034 F: FnOnce(&mut Self, bool, bool, i32, Label, Label) -> Result<(), CompileError>,
1035 >(
1036 &mut self,
1037 cb: F,
1038 ) -> Result<(), CompileError> {
1039 let need_check = match self.memory_styles[MemoryIndex::new(0)] {
1040 MemoryStyle::Static { .. } => false,
1041 MemoryStyle::Dynamic { .. } => true,
1042 };
1043
1044 let offset = if self.module.num_imported_memories != 0 {
1045 self.vmoffsets
1046 .vmctx_vmmemory_import_definition(MemoryIndex::new(0))
1047 } else {
1048 self.vmoffsets
1049 .vmctx_vmmemory_definition(LocalMemoryIndex::new(0))
1050 };
1051 cb(
1052 self,
1053 need_check,
1054 self.module.num_imported_memories != 0,
1055 offset as i32,
1056 self.special_labels.heap_access_oob,
1057 self.special_labels.unaligned_atomic,
1058 )
1059 }
1060
1061 pub fn get_state_diff(&mut self) -> usize {
1062 if !self.track_state {
1063 return usize::MAX;
1064 }
1065 let last_frame = self.control_stack.last_mut().unwrap();
1066 let mut diff = self.state.diff(&last_frame.state);
1067 diff.last = Some(last_frame.state_diff_id);
1068 let id = self.fsm.diffs.len();
1069 last_frame.state = self.state.clone();
1070 last_frame.state_diff_id = id;
1071 self.fsm.diffs.push(diff);
1072 id
1073 }
1074
1075 fn emit_head(&mut self) -> Result<(), CompileError> {
1076 self.machine.emit_function_prolog()?;
1077
1078 self.locals = self.init_locals(
1080 self.local_types.len(),
1081 self.signature.clone(),
1082 self.calling_convention,
1083 )?;
1084
1085 self.state.register_values[self.machine.index_from_gpr(self.machine.get_vmctx_reg()).0] =
1087 MachineValue::Vmctx;
1088
1089 let diff = self.state.diff(&self.machine.new_machine_state());
1091 let state_diff_id = self.fsm.diffs.len();
1092 self.fsm.diffs.push(diff);
1093
1094 self.machine.adjust_stack(32)?;
1096
1097 self.control_stack.push(ControlFrame {
1098 label: self.machine.get_label(),
1099 loop_like: false,
1100 if_else: IfElseState::None,
1101 returns: self
1102 .signature
1103 .results()
1104 .iter()
1105 .map(|&x| type_to_wp_type(x))
1106 .collect(),
1107 value_stack_depth: 0,
1108 fp_stack_depth: 0,
1109 state: self.state.clone(),
1110 state_diff_id,
1111 });
1112
1113 self.machine.insert_stackoverflow();
1118
1119 if self.state.wasm_inst_offset != usize::MAX {
1120 return Err(CompileError::Codegen(
1121 "emit_head: wasm_inst_offset not usize::MAX".to_owned(),
1122 ));
1123 }
1124 Ok(())
1125 }
1126
1127 #[allow(clippy::too_many_arguments)]
1128 pub fn new(
1129 module: &'a ModuleInfo,
1130 config: &'a Singlepass,
1131 vmoffsets: &'a VMOffsets,
1132 memory_styles: &'a PrimaryMap<MemoryIndex, MemoryStyle>,
1133 _table_styles: &'a PrimaryMap<TableIndex, TableStyle>,
1134 local_func_index: LocalFunctionIndex,
1135 local_types_excluding_arguments: &[WpType],
1136 machine: M,
1137 calling_convention: CallingConvention,
1138 ) -> Result<FuncGen<'a, M>, CompileError> {
1139 let func_index = module.func_index(local_func_index);
1140 let sig_index = module.functions[func_index];
1141 let signature = module.signatures[sig_index].clone();
1142
1143 let mut local_types: Vec<_> = signature
1144 .params()
1145 .iter()
1146 .map(|&x| type_to_wp_type(x))
1147 .collect();
1148 local_types.extend_from_slice(local_types_excluding_arguments);
1149
1150 let mut machine = machine;
1151 let special_labels = SpecialLabelSet {
1152 integer_division_by_zero: machine.get_label(),
1153 integer_overflow: machine.get_label(),
1154 heap_access_oob: machine.get_label(),
1155 table_access_oob: machine.get_label(),
1156 indirect_call_null: machine.get_label(),
1157 bad_signature: machine.get_label(),
1158 unaligned_atomic: machine.get_label(),
1159 };
1160
1161 let fsm = FunctionStateMap::new(
1162 machine.new_machine_state(),
1163 local_func_index.index() as usize,
1164 32,
1165 (0..local_types.len())
1166 .map(|_| WasmAbstractValue::Runtime)
1167 .collect(),
1168 );
1169
1170 let mut fg = FuncGen {
1171 module,
1172 config,
1173 vmoffsets,
1174 memory_styles,
1175 signature,
1177 locals: vec![], local_types,
1179 value_stack: vec![],
1180 fp_stack: vec![],
1181 control_stack: vec![],
1182 stack_offset: MachineStackOffset(0),
1183 save_area_offset: None,
1184 state: machine.new_machine_state(),
1185 track_state: true,
1186 machine,
1187 unreachable_depth: 0,
1188 fsm,
1189 relocations: vec![],
1190 special_labels,
1191 calling_convention,
1192 };
1193 fg.emit_head()?;
1194 Ok(fg)
1195 }
1196
1197 pub fn has_control_frames(&self) -> bool {
1198 !self.control_stack.is_empty()
1199 }
1200
1201 pub fn feed_operator(&mut self, op: Operator) -> Result<(), CompileError> {
1202 assert!(self.fp_stack.len() <= self.value_stack.len());
1203
1204 self.state.wasm_inst_offset = self.state.wasm_inst_offset.wrapping_add(1);
1205
1206 let was_unreachable;
1208
1209 if self.unreachable_depth > 0 {
1210 was_unreachable = true;
1211
1212 match op {
1213 Operator::Block { .. } | Operator::Loop { .. } | Operator::If { .. } => {
1214 self.unreachable_depth += 1;
1215 }
1216 Operator::End => {
1217 self.unreachable_depth -= 1;
1218 }
1219 Operator::Else => {
1220 if self.unreachable_depth == 1 {
1222 if let Some(IfElseState::If(_)) =
1223 self.control_stack.last().map(|x| x.if_else)
1224 {
1225 self.unreachable_depth -= 1;
1226 }
1227 }
1228 }
1229 _ => {}
1230 }
1231 if self.unreachable_depth > 0 {
1232 return Ok(());
1233 }
1234 } else {
1235 was_unreachable = false;
1236 }
1237
1238 match op {
1239 Operator::GlobalGet { global_index } => {
1240 let global_index = GlobalIndex::from_u32(global_index);
1241
1242 let ty = type_to_wp_type(self.module.globals[global_index].ty);
1243 if ty.is_float() {
1244 self.fp_stack.push(FloatValue::new(self.value_stack.len()));
1245 }
1246 let loc = self.acquire_locations(
1247 &[(ty, MachineValue::WasmStack(self.value_stack.len()))],
1248 false,
1249 )?[0];
1250 self.value_stack.push(loc);
1251
1252 let tmp = self.machine.acquire_temp_gpr().unwrap();
1253
1254 let src = if let Some(local_global_index) =
1255 self.module.local_global_index(global_index)
1256 {
1257 let offset = self.vmoffsets.vmctx_vmglobal_definition(local_global_index);
1258 self.machine.emit_relaxed_mov(
1259 Size::S64,
1260 Location::Memory(self.machine.get_vmctx_reg(), offset as i32),
1261 Location::GPR(tmp),
1262 )?;
1263 Location::Memory(tmp, 0)
1264 } else {
1265 let offset = self
1267 .vmoffsets
1268 .vmctx_vmglobal_import_definition(global_index);
1269 self.machine.emit_relaxed_mov(
1270 Size::S64,
1271 Location::Memory(self.machine.get_vmctx_reg(), offset as i32),
1272 Location::GPR(tmp),
1273 )?;
1274 Location::Memory(tmp, 0)
1275 };
1276
1277 self.machine.emit_relaxed_mov(Size::S64, src, loc)?;
1278
1279 self.machine.release_gpr(tmp);
1280 }
1281 Operator::GlobalSet { global_index } => {
1282 let global_index = GlobalIndex::from_u32(global_index);
1283 let tmp = self.machine.acquire_temp_gpr().unwrap();
1284 let dst = if let Some(local_global_index) =
1285 self.module.local_global_index(global_index)
1286 {
1287 let offset = self.vmoffsets.vmctx_vmglobal_definition(local_global_index);
1288 self.machine.emit_relaxed_mov(
1289 Size::S64,
1290 Location::Memory(self.machine.get_vmctx_reg(), offset as i32),
1291 Location::GPR(tmp),
1292 )?;
1293 Location::Memory(tmp, 0)
1294 } else {
1295 let offset = self
1297 .vmoffsets
1298 .vmctx_vmglobal_import_definition(global_index);
1299 self.machine.emit_relaxed_mov(
1300 Size::S64,
1301 Location::Memory(self.machine.get_vmctx_reg(), offset as i32),
1302 Location::GPR(tmp),
1303 )?;
1304 Location::Memory(tmp, 0)
1305 };
1306 let ty = type_to_wp_type(self.module.globals[global_index].ty);
1307 let loc = self.pop_value_released()?;
1308 if ty.is_float() {
1309 let fp = self.fp_stack.pop1()?;
1310 if self.machine.arch_supports_canonicalize_nan()
1311 && self.config.enable_nan_canonicalization
1312 && fp.canonicalization.is_some()
1313 {
1314 self.machine.canonicalize_nan(
1315 match ty {
1316 WpType::F32 => Size::S32,
1317 WpType::F64 => Size::S64,
1318 _ => codegen_error!("singlepass Operator::GlobalSet unreachable"),
1319 },
1320 loc,
1321 dst,
1322 )?;
1323 } else {
1324 self.machine.emit_relaxed_mov(Size::S64, loc, dst)?;
1325 }
1326 } else {
1327 self.machine.emit_relaxed_mov(Size::S64, loc, dst)?;
1328 }
1329 self.machine.release_gpr(tmp);
1330 }
1331 Operator::LocalGet { local_index } => {
1332 let local_index = local_index as usize;
1333 let ret = self.acquire_locations(
1334 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
1335 false,
1336 )?[0];
1337 self.machine
1338 .emit_relaxed_mov(Size::S64, self.locals[local_index], ret)?;
1339 self.value_stack.push(ret);
1340 if self.local_types[local_index].is_float() {
1341 self.fp_stack
1342 .push(FloatValue::new(self.value_stack.len() - 1));
1343 }
1344 }
1345 Operator::LocalSet { local_index } => {
1346 let local_index = local_index as usize;
1347 let loc = self.pop_value_released()?;
1348
1349 if self.local_types[local_index].is_float() {
1350 let fp = self.fp_stack.pop1()?;
1351 if self.machine.arch_supports_canonicalize_nan()
1352 && self.config.enable_nan_canonicalization
1353 && fp.canonicalization.is_some()
1354 {
1355 self.machine.canonicalize_nan(
1356 match self.local_types[local_index] {
1357 WpType::F32 => Size::S32,
1358 WpType::F64 => Size::S64,
1359 _ => codegen_error!("singlepass Operator::LocalSet unreachable"),
1360 },
1361 loc,
1362 self.locals[local_index],
1363 )
1364 } else {
1365 self.machine
1366 .emit_relaxed_mov(Size::S64, loc, self.locals[local_index])
1367 }
1368 } else {
1369 self.machine
1370 .emit_relaxed_mov(Size::S64, loc, self.locals[local_index])
1371 }?;
1372 }
1373 Operator::LocalTee { local_index } => {
1374 let local_index = local_index as usize;
1375 let loc = *self.value_stack.last().unwrap();
1376
1377 if self.local_types[local_index].is_float() {
1378 let fp = self.fp_stack.peek1()?;
1379 if self.machine.arch_supports_canonicalize_nan()
1380 && self.config.enable_nan_canonicalization
1381 && fp.canonicalization.is_some()
1382 {
1383 self.machine.canonicalize_nan(
1384 match self.local_types[local_index] {
1385 WpType::F32 => Size::S32,
1386 WpType::F64 => Size::S64,
1387 _ => codegen_error!("singlepass Operator::LocalTee unreachable"),
1388 },
1389 loc,
1390 self.locals[local_index],
1391 )
1392 } else {
1393 self.machine
1394 .emit_relaxed_mov(Size::S64, loc, self.locals[local_index])
1395 }
1396 } else {
1397 self.machine
1398 .emit_relaxed_mov(Size::S64, loc, self.locals[local_index])
1399 }?;
1400 }
1401 Operator::I32Const { value } => {
1402 self.value_stack.push(Location::Imm32(value as u32));
1403 self.state
1404 .wasm_stack
1405 .push(WasmAbstractValue::Const(value as u32 as u64));
1406 }
1407 Operator::I32Add => {
1408 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1409 self.machine.emit_binop_add32(loc_a, loc_b, ret)?;
1410 }
1411 Operator::I32Sub => {
1412 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1413 self.machine.emit_binop_sub32(loc_a, loc_b, ret)?;
1414 }
1415 Operator::I32Mul => {
1416 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1417 self.machine.emit_binop_mul32(loc_a, loc_b, ret)?;
1418 }
1419 Operator::I32DivU => {
1420 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1421 let offset = self.machine.emit_binop_udiv32(
1422 loc_a,
1423 loc_b,
1424 ret,
1425 self.special_labels.integer_division_by_zero,
1426 self.special_labels.integer_overflow,
1427 )?;
1428 self.mark_offset_trappable(offset);
1429 }
1430 Operator::I32DivS => {
1431 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1432 let offset = self.machine.emit_binop_sdiv32(
1433 loc_a,
1434 loc_b,
1435 ret,
1436 self.special_labels.integer_division_by_zero,
1437 self.special_labels.integer_overflow,
1438 )?;
1439 self.mark_offset_trappable(offset);
1440 }
1441 Operator::I32RemU => {
1442 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1443 let offset = self.machine.emit_binop_urem32(
1444 loc_a,
1445 loc_b,
1446 ret,
1447 self.special_labels.integer_division_by_zero,
1448 self.special_labels.integer_overflow,
1449 )?;
1450 self.mark_offset_trappable(offset);
1451 }
1452 Operator::I32RemS => {
1453 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1454 let offset = self.machine.emit_binop_srem32(
1455 loc_a,
1456 loc_b,
1457 ret,
1458 self.special_labels.integer_division_by_zero,
1459 self.special_labels.integer_overflow,
1460 )?;
1461 self.mark_offset_trappable(offset);
1462 }
1463 Operator::I32And => {
1464 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1465 self.machine.emit_binop_and32(loc_a, loc_b, ret)?;
1466 }
1467 Operator::I32Or => {
1468 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1469 self.machine.emit_binop_or32(loc_a, loc_b, ret)?;
1470 }
1471 Operator::I32Xor => {
1472 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1473 self.machine.emit_binop_xor32(loc_a, loc_b, ret)?;
1474 }
1475 Operator::I32Eq => {
1476 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1477 self.machine.i32_cmp_eq(loc_a, loc_b, ret)?;
1478 }
1479 Operator::I32Ne => {
1480 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1481 self.machine.i32_cmp_ne(loc_a, loc_b, ret)?;
1482 }
1483 Operator::I32Eqz => {
1484 let loc_a = self.pop_value_released()?;
1485 let ret = self.acquire_locations(
1486 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
1487 false,
1488 )?[0];
1489 self.machine.i32_cmp_eq(loc_a, Location::Imm32(0), ret)?;
1490 self.value_stack.push(ret);
1491 }
1492 Operator::I32Clz => {
1493 let loc = self.pop_value_released()?;
1494 let ret = self.acquire_locations(
1495 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
1496 false,
1497 )?[0];
1498 self.value_stack.push(ret);
1499 self.machine.i32_clz(loc, ret)?;
1500 }
1501 Operator::I32Ctz => {
1502 let loc = self.pop_value_released()?;
1503 let ret = self.acquire_locations(
1504 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
1505 false,
1506 )?[0];
1507 self.value_stack.push(ret);
1508 self.machine.i32_ctz(loc, ret)?;
1509 }
1510 Operator::I32Popcnt => {
1511 let loc = self.pop_value_released()?;
1512 let ret = self.acquire_locations(
1513 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
1514 false,
1515 )?[0];
1516 self.value_stack.push(ret);
1517 self.machine.i32_popcnt(loc, ret)?;
1518 }
1519 Operator::I32Shl => {
1520 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1521 self.machine.i32_shl(loc_a, loc_b, ret)?;
1522 }
1523 Operator::I32ShrU => {
1524 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1525 self.machine.i32_shr(loc_a, loc_b, ret)?;
1526 }
1527 Operator::I32ShrS => {
1528 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1529 self.machine.i32_sar(loc_a, loc_b, ret)?;
1530 }
1531 Operator::I32Rotl => {
1532 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1533 self.machine.i32_rol(loc_a, loc_b, ret)?;
1534 }
1535 Operator::I32Rotr => {
1536 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1537 self.machine.i32_ror(loc_a, loc_b, ret)?;
1538 }
1539 Operator::I32LtU => {
1540 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1541 self.machine.i32_cmp_lt_u(loc_a, loc_b, ret)?;
1542 }
1543 Operator::I32LeU => {
1544 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1545 self.machine.i32_cmp_le_u(loc_a, loc_b, ret)?;
1546 }
1547 Operator::I32GtU => {
1548 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1549 self.machine.i32_cmp_gt_u(loc_a, loc_b, ret)?;
1550 }
1551 Operator::I32GeU => {
1552 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1553 self.machine.i32_cmp_ge_u(loc_a, loc_b, ret)?;
1554 }
1555 Operator::I32LtS => {
1556 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1557 self.machine.i32_cmp_lt_s(loc_a, loc_b, ret)?;
1558 }
1559 Operator::I32LeS => {
1560 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1561 self.machine.i32_cmp_le_s(loc_a, loc_b, ret)?;
1562 }
1563 Operator::I32GtS => {
1564 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1565 self.machine.i32_cmp_gt_s(loc_a, loc_b, ret)?;
1566 }
1567 Operator::I32GeS => {
1568 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1569 self.machine.i32_cmp_ge_s(loc_a, loc_b, ret)?;
1570 }
1571 Operator::I64Const { value } => {
1572 let value = value as u64;
1573 self.value_stack.push(Location::Imm64(value));
1574 self.state.wasm_stack.push(WasmAbstractValue::Const(value));
1575 }
1576 Operator::I64Add => {
1577 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I64)?;
1578 self.machine.emit_binop_add64(loc_a, loc_b, ret)?;
1579 }
1580 Operator::I64Sub => {
1581 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I64)?;
1582 self.machine.emit_binop_sub64(loc_a, loc_b, ret)?;
1583 }
1584 Operator::I64Mul => {
1585 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I64)?;
1586 self.machine.emit_binop_mul64(loc_a, loc_b, ret)?;
1587 }
1588 Operator::I64DivU => {
1589 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I64)?;
1590 let offset = self.machine.emit_binop_udiv64(
1591 loc_a,
1592 loc_b,
1593 ret,
1594 self.special_labels.integer_division_by_zero,
1595 self.special_labels.integer_overflow,
1596 )?;
1597 self.mark_offset_trappable(offset);
1598 }
1599 Operator::I64DivS => {
1600 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I64)?;
1601 let offset = self.machine.emit_binop_sdiv64(
1602 loc_a,
1603 loc_b,
1604 ret,
1605 self.special_labels.integer_division_by_zero,
1606 self.special_labels.integer_overflow,
1607 )?;
1608 self.mark_offset_trappable(offset);
1609 }
1610 Operator::I64RemU => {
1611 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I64)?;
1612 let offset = self.machine.emit_binop_urem64(
1613 loc_a,
1614 loc_b,
1615 ret,
1616 self.special_labels.integer_division_by_zero,
1617 self.special_labels.integer_overflow,
1618 )?;
1619 self.mark_offset_trappable(offset);
1620 }
1621 Operator::I64RemS => {
1622 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I64)?;
1623 let offset = self.machine.emit_binop_srem64(
1624 loc_a,
1625 loc_b,
1626 ret,
1627 self.special_labels.integer_division_by_zero,
1628 self.special_labels.integer_overflow,
1629 )?;
1630 self.mark_offset_trappable(offset);
1631 }
1632 Operator::I64And => {
1633 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I64)?;
1634 self.machine.emit_binop_and64(loc_a, loc_b, ret)?;
1635 }
1636 Operator::I64Or => {
1637 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I64)?;
1638 self.machine.emit_binop_or64(loc_a, loc_b, ret)?;
1639 }
1640 Operator::I64Xor => {
1641 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I64)?;
1642 self.machine.emit_binop_xor64(loc_a, loc_b, ret)?;
1643 }
1644 Operator::I64Eq => {
1645 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I64)?;
1646 self.machine.i64_cmp_eq(loc_a, loc_b, ret)?;
1647 }
1648 Operator::I64Ne => {
1649 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I64)?;
1650 self.machine.i64_cmp_ne(loc_a, loc_b, ret)?;
1651 }
1652 Operator::I64Eqz => {
1653 let loc_a = self.pop_value_released()?;
1654 let ret = self.acquire_locations(
1655 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
1656 false,
1657 )?[0];
1658 self.machine.i64_cmp_eq(loc_a, Location::Imm64(0), ret)?;
1659 self.value_stack.push(ret);
1660 }
1661 Operator::I64Clz => {
1662 let loc = self.pop_value_released()?;
1663 let ret = self.acquire_locations(
1664 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
1665 false,
1666 )?[0];
1667 self.value_stack.push(ret);
1668 self.machine.i64_clz(loc, ret)?;
1669 }
1670 Operator::I64Ctz => {
1671 let loc = self.pop_value_released()?;
1672 let ret = self.acquire_locations(
1673 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
1674 false,
1675 )?[0];
1676 self.value_stack.push(ret);
1677 self.machine.i64_ctz(loc, ret)?;
1678 }
1679 Operator::I64Popcnt => {
1680 let loc = self.pop_value_released()?;
1681 let ret = self.acquire_locations(
1682 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
1683 false,
1684 )?[0];
1685 self.value_stack.push(ret);
1686 self.machine.i64_popcnt(loc, ret)?;
1687 }
1688 Operator::I64Shl => {
1689 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I64)?;
1690 self.machine.i64_shl(loc_a, loc_b, ret)?;
1691 }
1692 Operator::I64ShrU => {
1693 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I64)?;
1694 self.machine.i64_shr(loc_a, loc_b, ret)?;
1695 }
1696 Operator::I64ShrS => {
1697 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I64)?;
1698 self.machine.i64_sar(loc_a, loc_b, ret)?;
1699 }
1700 Operator::I64Rotl => {
1701 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I64)?;
1702 self.machine.i64_rol(loc_a, loc_b, ret)?;
1703 }
1704 Operator::I64Rotr => {
1705 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I64)?;
1706 self.machine.i64_ror(loc_a, loc_b, ret)?;
1707 }
1708 Operator::I64LtU => {
1709 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I64)?;
1710 self.machine.i64_cmp_lt_u(loc_a, loc_b, ret)?;
1711 }
1712 Operator::I64LeU => {
1713 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I64)?;
1714 self.machine.i64_cmp_le_u(loc_a, loc_b, ret)?;
1715 }
1716 Operator::I64GtU => {
1717 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I64)?;
1718 self.machine.i64_cmp_gt_u(loc_a, loc_b, ret)?;
1719 }
1720 Operator::I64GeU => {
1721 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I64)?;
1722 self.machine.i64_cmp_ge_u(loc_a, loc_b, ret)?;
1723 }
1724 Operator::I64LtS => {
1725 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I64)?;
1726 self.machine.i64_cmp_lt_s(loc_a, loc_b, ret)?;
1727 }
1728 Operator::I64LeS => {
1729 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I64)?;
1730 self.machine.i64_cmp_le_s(loc_a, loc_b, ret)?;
1731 }
1732 Operator::I64GtS => {
1733 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I64)?;
1734 self.machine.i64_cmp_gt_s(loc_a, loc_b, ret)?;
1735 }
1736 Operator::I64GeS => {
1737 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I64)?;
1738 self.machine.i64_cmp_ge_s(loc_a, loc_b, ret)?;
1739 }
1740 Operator::I64ExtendI32U => {
1741 let loc = self.pop_value_released()?;
1742 let ret = self.acquire_locations(
1743 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
1744 false,
1745 )?[0];
1746 self.value_stack.push(ret);
1747 self.machine.emit_relaxed_mov(Size::S32, loc, ret)?;
1748
1749 if let Location::Memory(base, off) = ret {
1752 self.machine.emit_relaxed_mov(
1753 Size::S32,
1754 Location::Imm32(0),
1755 Location::Memory(base, off + 4),
1756 )?;
1757 }
1758 }
1759 Operator::I64ExtendI32S => {
1760 let loc = self.pop_value_released()?;
1761 let ret = self.acquire_locations(
1762 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
1763 false,
1764 )?[0];
1765 self.value_stack.push(ret);
1766 self.machine
1767 .emit_relaxed_sign_extension(Size::S32, loc, Size::S64, ret)?;
1768 }
1769 Operator::I32Extend8S => {
1770 let loc = self.pop_value_released()?;
1771 let ret = self.acquire_locations(
1772 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
1773 false,
1774 )?[0];
1775 self.value_stack.push(ret);
1776
1777 self.machine
1778 .emit_relaxed_sign_extension(Size::S8, loc, Size::S32, ret)?;
1779 }
1780 Operator::I32Extend16S => {
1781 let loc = self.pop_value_released()?;
1782 let ret = self.acquire_locations(
1783 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
1784 false,
1785 )?[0];
1786 self.value_stack.push(ret);
1787
1788 self.machine
1789 .emit_relaxed_sign_extension(Size::S16, loc, Size::S32, ret)?;
1790 }
1791 Operator::I64Extend8S => {
1792 let loc = self.pop_value_released()?;
1793 let ret = self.acquire_locations(
1794 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
1795 false,
1796 )?[0];
1797 self.value_stack.push(ret);
1798
1799 self.machine
1800 .emit_relaxed_sign_extension(Size::S8, loc, Size::S64, ret)?;
1801 }
1802 Operator::I64Extend16S => {
1803 let loc = self.pop_value_released()?;
1804 let ret = self.acquire_locations(
1805 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
1806 false,
1807 )?[0];
1808 self.value_stack.push(ret);
1809
1810 self.machine
1811 .emit_relaxed_sign_extension(Size::S16, loc, Size::S64, ret)?;
1812 }
1813 Operator::I64Extend32S => {
1814 let loc = self.pop_value_released()?;
1815 let ret = self.acquire_locations(
1816 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
1817 false,
1818 )?[0];
1819 self.value_stack.push(ret);
1820
1821 self.machine
1822 .emit_relaxed_sign_extension(Size::S32, loc, Size::S64, ret)?;
1823 }
1824 Operator::I32WrapI64 => {
1825 let loc = self.pop_value_released()?;
1826 let ret = self.acquire_locations(
1827 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
1828 false,
1829 )?[0];
1830 self.value_stack.push(ret);
1831 self.machine.emit_relaxed_mov(Size::S32, loc, ret)?;
1832 }
1833
1834 Operator::F32Const { value } => {
1835 self.value_stack.push(Location::Imm32(value.bits()));
1836 self.fp_stack
1837 .push(FloatValue::new(self.value_stack.len() - 1));
1838 self.state
1839 .wasm_stack
1840 .push(WasmAbstractValue::Const(value.bits() as u64));
1841 }
1842 Operator::F32Add => {
1843 self.fp_stack.pop2()?;
1844 self.fp_stack
1845 .push(FloatValue::cncl_f32(self.value_stack.len() - 2));
1846 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::F64)?;
1847
1848 self.machine.f32_add(loc_a, loc_b, ret)?;
1849 }
1850 Operator::F32Sub => {
1851 self.fp_stack.pop2()?;
1852 self.fp_stack
1853 .push(FloatValue::cncl_f32(self.value_stack.len() - 2));
1854 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::F64)?;
1855
1856 self.machine.f32_sub(loc_a, loc_b, ret)?;
1857 }
1858 Operator::F32Mul => {
1859 self.fp_stack.pop2()?;
1860 self.fp_stack
1861 .push(FloatValue::cncl_f32(self.value_stack.len() - 2));
1862 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::F64)?;
1863
1864 self.machine.f32_mul(loc_a, loc_b, ret)?;
1865 }
1866 Operator::F32Div => {
1867 self.fp_stack.pop2()?;
1868 self.fp_stack
1869 .push(FloatValue::cncl_f32(self.value_stack.len() - 2));
1870 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::F64)?;
1871
1872 self.machine.f32_div(loc_a, loc_b, ret)?;
1873 }
1874 Operator::F32Max => {
1875 self.fp_stack.pop2()?;
1876 self.fp_stack
1877 .push(FloatValue::new(self.value_stack.len() - 2));
1878 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::F64)?;
1879 self.machine.f32_max(loc_a, loc_b, ret)?;
1880 }
1881 Operator::F32Min => {
1882 self.fp_stack.pop2()?;
1883 self.fp_stack
1884 .push(FloatValue::new(self.value_stack.len() - 2));
1885 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::F64)?;
1886 self.machine.f32_min(loc_a, loc_b, ret)?;
1887 }
1888 Operator::F32Eq => {
1889 self.fp_stack.pop2()?;
1890 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1891 self.machine.f32_cmp_eq(loc_a, loc_b, ret)?;
1892 }
1893 Operator::F32Ne => {
1894 self.fp_stack.pop2()?;
1895 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1896 self.machine.f32_cmp_ne(loc_a, loc_b, ret)?;
1897 }
1898 Operator::F32Lt => {
1899 self.fp_stack.pop2()?;
1900 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1901 self.machine.f32_cmp_lt(loc_a, loc_b, ret)?;
1902 }
1903 Operator::F32Le => {
1904 self.fp_stack.pop2()?;
1905 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1906 self.machine.f32_cmp_le(loc_a, loc_b, ret)?;
1907 }
1908 Operator::F32Gt => {
1909 self.fp_stack.pop2()?;
1910 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1911 self.machine.f32_cmp_gt(loc_a, loc_b, ret)?;
1912 }
1913 Operator::F32Ge => {
1914 self.fp_stack.pop2()?;
1915 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
1916 self.machine.f32_cmp_ge(loc_a, loc_b, ret)?;
1917 }
1918 Operator::F32Nearest => {
1919 self.fp_stack.pop1()?;
1920 self.fp_stack
1921 .push(FloatValue::cncl_f32(self.value_stack.len() - 1));
1922 let loc = self.pop_value_released()?;
1923 let ret = self.acquire_locations(
1924 &[(WpType::F64, MachineValue::WasmStack(self.value_stack.len()))],
1925 false,
1926 )?[0];
1927 self.value_stack.push(ret);
1928 self.machine.f32_nearest(loc, ret)?;
1929 }
1930 Operator::F32Floor => {
1931 self.fp_stack.pop1()?;
1932 self.fp_stack
1933 .push(FloatValue::cncl_f32(self.value_stack.len() - 1));
1934 let loc = self.pop_value_released()?;
1935 let ret = self.acquire_locations(
1936 &[(WpType::F64, MachineValue::WasmStack(self.value_stack.len()))],
1937 false,
1938 )?[0];
1939 self.value_stack.push(ret);
1940 self.machine.f32_floor(loc, ret)?;
1941 }
1942 Operator::F32Ceil => {
1943 self.fp_stack.pop1()?;
1944 self.fp_stack
1945 .push(FloatValue::cncl_f32(self.value_stack.len() - 1));
1946 let loc = self.pop_value_released()?;
1947 let ret = self.acquire_locations(
1948 &[(WpType::F64, MachineValue::WasmStack(self.value_stack.len()))],
1949 false,
1950 )?[0];
1951 self.value_stack.push(ret);
1952 self.machine.f32_ceil(loc, ret)?;
1953 }
1954 Operator::F32Trunc => {
1955 self.fp_stack.pop1()?;
1956 self.fp_stack
1957 .push(FloatValue::cncl_f32(self.value_stack.len() - 1));
1958 let loc = self.pop_value_released()?;
1959 let ret = self.acquire_locations(
1960 &[(WpType::F64, MachineValue::WasmStack(self.value_stack.len()))],
1961 false,
1962 )?[0];
1963 self.value_stack.push(ret);
1964 self.machine.f32_trunc(loc, ret)?;
1965 }
1966 Operator::F32Sqrt => {
1967 self.fp_stack.pop1()?;
1968 self.fp_stack
1969 .push(FloatValue::cncl_f32(self.value_stack.len() - 1));
1970 let loc = self.pop_value_released()?;
1971 let ret = self.acquire_locations(
1972 &[(WpType::F64, MachineValue::WasmStack(self.value_stack.len()))],
1973 false,
1974 )?[0];
1975 self.value_stack.push(ret);
1976 self.machine.f32_sqrt(loc, ret)?;
1977 }
1978
1979 Operator::F32Copysign => {
1980 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::F32)?;
1981
1982 let (fp_src1, fp_src2) = self.fp_stack.pop2()?;
1983 self.fp_stack
1984 .push(FloatValue::new(self.value_stack.len() - 1));
1985
1986 let tmp1 = self.machine.acquire_temp_gpr().unwrap();
1987 let tmp2 = self.machine.acquire_temp_gpr().unwrap();
1988
1989 if self.machine.arch_supports_canonicalize_nan()
1990 && self.config.enable_nan_canonicalization
1991 {
1992 for (fp, loc, tmp) in [(fp_src1, loc_a, tmp1), (fp_src2, loc_b, tmp2)].iter() {
1993 match fp.canonicalization {
1994 Some(_) => {
1995 self.machine
1996 .canonicalize_nan(Size::S32, *loc, Location::GPR(*tmp))
1997 }
1998 None => {
1999 self.machine
2000 .move_location(Size::S32, *loc, Location::GPR(*tmp))
2001 }
2002 }?;
2003 }
2004 } else {
2005 self.machine
2006 .move_location(Size::S32, loc_a, Location::GPR(tmp1))?;
2007 self.machine
2008 .move_location(Size::S32, loc_b, Location::GPR(tmp2))?;
2009 }
2010 self.machine.emit_i32_copysign(tmp1, tmp2)?;
2011 self.machine
2012 .move_location(Size::S32, Location::GPR(tmp1), ret)?;
2013 self.machine.release_gpr(tmp2);
2014 self.machine.release_gpr(tmp1);
2015 }
2016
2017 Operator::F32Abs => {
2018 let loc = self.pop_value_released()?;
2021 let ret = self.acquire_locations(
2022 &[(WpType::F32, MachineValue::WasmStack(self.value_stack.len()))],
2023 false,
2024 )?[0];
2025 self.value_stack.push(ret);
2026
2027 self.machine.f32_abs(loc, ret)?;
2028 }
2029
2030 Operator::F32Neg => {
2031 let loc = self.pop_value_released()?;
2034 let ret = self.acquire_locations(
2035 &[(WpType::F32, MachineValue::WasmStack(self.value_stack.len()))],
2036 false,
2037 )?[0];
2038 self.value_stack.push(ret);
2039
2040 self.machine.f32_neg(loc, ret)?;
2041 }
2042
2043 Operator::F64Const { value } => {
2044 self.value_stack.push(Location::Imm64(value.bits()));
2045 self.fp_stack
2046 .push(FloatValue::new(self.value_stack.len() - 1));
2047 self.state
2048 .wasm_stack
2049 .push(WasmAbstractValue::Const(value.bits()));
2050 }
2051 Operator::F64Add => {
2052 self.fp_stack.pop2()?;
2053 self.fp_stack
2054 .push(FloatValue::cncl_f64(self.value_stack.len() - 2));
2055 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::F64)?;
2056
2057 self.machine.f64_add(loc_a, loc_b, ret)?;
2058 }
2059 Operator::F64Sub => {
2060 self.fp_stack.pop2()?;
2061 self.fp_stack
2062 .push(FloatValue::cncl_f64(self.value_stack.len() - 2));
2063 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::F64)?;
2064
2065 self.machine.f64_sub(loc_a, loc_b, ret)?;
2066 }
2067 Operator::F64Mul => {
2068 self.fp_stack.pop2()?;
2069 self.fp_stack
2070 .push(FloatValue::cncl_f64(self.value_stack.len() - 2));
2071 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::F64)?;
2072
2073 self.machine.f64_mul(loc_a, loc_b, ret)?;
2074 }
2075 Operator::F64Div => {
2076 self.fp_stack.pop2()?;
2077 self.fp_stack
2078 .push(FloatValue::cncl_f64(self.value_stack.len() - 2));
2079 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::F64)?;
2080
2081 self.machine.f64_div(loc_a, loc_b, ret)?;
2082 }
2083 Operator::F64Max => {
2084 self.fp_stack.pop2()?;
2085 self.fp_stack
2086 .push(FloatValue::new(self.value_stack.len() - 2));
2087 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::F64)?;
2088 self.machine.f64_max(loc_a, loc_b, ret)?;
2089 }
2090 Operator::F64Min => {
2091 self.fp_stack.pop2()?;
2092 self.fp_stack
2093 .push(FloatValue::new(self.value_stack.len() - 2));
2094 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::F64)?;
2095 self.machine.f64_min(loc_a, loc_b, ret)?;
2096 }
2097 Operator::F64Eq => {
2098 self.fp_stack.pop2()?;
2099 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
2100 self.machine.f64_cmp_eq(loc_a, loc_b, ret)?;
2101 }
2102 Operator::F64Ne => {
2103 self.fp_stack.pop2()?;
2104 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
2105 self.machine.f64_cmp_ne(loc_a, loc_b, ret)?;
2106 }
2107 Operator::F64Lt => {
2108 self.fp_stack.pop2()?;
2109 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
2110 self.machine.f64_cmp_lt(loc_a, loc_b, ret)?;
2111 }
2112 Operator::F64Le => {
2113 self.fp_stack.pop2()?;
2114 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
2115 self.machine.f64_cmp_le(loc_a, loc_b, ret)?;
2116 }
2117 Operator::F64Gt => {
2118 self.fp_stack.pop2()?;
2119 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
2120 self.machine.f64_cmp_gt(loc_a, loc_b, ret)?;
2121 }
2122 Operator::F64Ge => {
2123 self.fp_stack.pop2()?;
2124 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::I32)?;
2125 self.machine.f64_cmp_ge(loc_a, loc_b, ret)?;
2126 }
2127 Operator::F64Nearest => {
2128 self.fp_stack.pop1()?;
2129 self.fp_stack
2130 .push(FloatValue::cncl_f64(self.value_stack.len() - 1));
2131 let loc = self.pop_value_released()?;
2132 let ret = self.acquire_locations(
2133 &[(WpType::F64, MachineValue::WasmStack(self.value_stack.len()))],
2134 false,
2135 )?[0];
2136 self.value_stack.push(ret);
2137 self.machine.f64_nearest(loc, ret)?;
2138 }
2139 Operator::F64Floor => {
2140 self.fp_stack.pop1()?;
2141 self.fp_stack
2142 .push(FloatValue::cncl_f64(self.value_stack.len() - 1));
2143 let loc = self.pop_value_released()?;
2144 let ret = self.acquire_locations(
2145 &[(WpType::F64, MachineValue::WasmStack(self.value_stack.len()))],
2146 false,
2147 )?[0];
2148 self.value_stack.push(ret);
2149 self.machine.f64_floor(loc, ret)?;
2150 }
2151 Operator::F64Ceil => {
2152 self.fp_stack.pop1()?;
2153 self.fp_stack
2154 .push(FloatValue::cncl_f64(self.value_stack.len() - 1));
2155 let loc = self.pop_value_released()?;
2156 let ret = self.acquire_locations(
2157 &[(WpType::F64, MachineValue::WasmStack(self.value_stack.len()))],
2158 false,
2159 )?[0];
2160 self.value_stack.push(ret);
2161 self.machine.f64_ceil(loc, ret)?;
2162 }
2163 Operator::F64Trunc => {
2164 self.fp_stack.pop1()?;
2165 self.fp_stack
2166 .push(FloatValue::cncl_f64(self.value_stack.len() - 1));
2167 let loc = self.pop_value_released()?;
2168 let ret = self.acquire_locations(
2169 &[(WpType::F64, MachineValue::WasmStack(self.value_stack.len()))],
2170 false,
2171 )?[0];
2172 self.value_stack.push(ret);
2173 self.machine.f64_trunc(loc, ret)?;
2174 }
2175 Operator::F64Sqrt => {
2176 self.fp_stack.pop1()?;
2177 self.fp_stack
2178 .push(FloatValue::cncl_f64(self.value_stack.len() - 1));
2179 let loc = self.pop_value_released()?;
2180 let ret = self.acquire_locations(
2181 &[(WpType::F64, MachineValue::WasmStack(self.value_stack.len()))],
2182 false,
2183 )?[0];
2184 self.value_stack.push(ret);
2185 self.machine.f64_sqrt(loc, ret)?;
2186 }
2187
2188 Operator::F64Copysign => {
2189 let I2O1 { loc_a, loc_b, ret } = self.i2o1_prepare(WpType::F64)?;
2190
2191 let (fp_src1, fp_src2) = self.fp_stack.pop2()?;
2192 self.fp_stack
2193 .push(FloatValue::new(self.value_stack.len() - 1));
2194
2195 let tmp1 = self.machine.acquire_temp_gpr().unwrap();
2196 let tmp2 = self.machine.acquire_temp_gpr().unwrap();
2197
2198 if self.machine.arch_supports_canonicalize_nan()
2199 && self.config.enable_nan_canonicalization
2200 {
2201 for (fp, loc, tmp) in [(fp_src1, loc_a, tmp1), (fp_src2, loc_b, tmp2)].iter() {
2202 match fp.canonicalization {
2203 Some(_) => {
2204 self.machine
2205 .canonicalize_nan(Size::S64, *loc, Location::GPR(*tmp))
2206 }
2207 None => {
2208 self.machine
2209 .move_location(Size::S64, *loc, Location::GPR(*tmp))
2210 }
2211 }?;
2212 }
2213 } else {
2214 self.machine
2215 .move_location(Size::S64, loc_a, Location::GPR(tmp1))?;
2216 self.machine
2217 .move_location(Size::S64, loc_b, Location::GPR(tmp2))?;
2218 }
2219 self.machine.emit_i64_copysign(tmp1, tmp2)?;
2220 self.machine
2221 .move_location(Size::S64, Location::GPR(tmp1), ret)?;
2222
2223 self.machine.release_gpr(tmp2);
2224 self.machine.release_gpr(tmp1);
2225 }
2226
2227 Operator::F64Abs => {
2228 let loc = self.pop_value_released()?;
2231 let ret = self.acquire_locations(
2232 &[(WpType::F64, MachineValue::WasmStack(self.value_stack.len()))],
2233 false,
2234 )?[0];
2235 self.value_stack.push(ret);
2236
2237 self.machine.f64_abs(loc, ret)?;
2238 }
2239
2240 Operator::F64Neg => {
2241 let loc = self.pop_value_released()?;
2244 let ret = self.acquire_locations(
2245 &[(WpType::F64, MachineValue::WasmStack(self.value_stack.len()))],
2246 false,
2247 )?[0];
2248 self.value_stack.push(ret);
2249
2250 self.machine.f64_neg(loc, ret)?;
2251 }
2252
2253 Operator::F64PromoteF32 => {
2254 let fp = self.fp_stack.pop1()?;
2255 self.fp_stack.push(fp.promote(self.value_stack.len() - 1)?);
2256 let loc = self.pop_value_released()?;
2257 let ret = self.acquire_locations(
2258 &[(WpType::F64, MachineValue::WasmStack(self.value_stack.len()))],
2259 false,
2260 )?[0];
2261 self.value_stack.push(ret);
2262 self.machine.convert_f64_f32(loc, ret)?;
2263 }
2264 Operator::F32DemoteF64 => {
2265 let fp = self.fp_stack.pop1()?;
2266 self.fp_stack.push(fp.demote(self.value_stack.len() - 1)?);
2267 let loc = self.pop_value_released()?;
2268 let ret = self.acquire_locations(
2269 &[(WpType::F64, MachineValue::WasmStack(self.value_stack.len()))],
2270 false,
2271 )?[0];
2272 self.value_stack.push(ret);
2273 self.machine.convert_f32_f64(loc, ret)?;
2274 }
2275
2276 Operator::I32ReinterpretF32 => {
2277 let loc = self.pop_value_released()?;
2278 let ret = self.acquire_locations(
2279 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
2280 false,
2281 )?[0];
2282 self.value_stack.push(ret);
2283 let fp = self.fp_stack.pop1()?;
2284
2285 if !self.machine.arch_supports_canonicalize_nan()
2286 || !self.config.enable_nan_canonicalization
2287 || fp.canonicalization.is_none()
2288 {
2289 if loc != ret {
2290 self.machine.emit_relaxed_mov(Size::S32, loc, ret)?;
2291 }
2292 } else {
2293 self.machine.canonicalize_nan(Size::S32, loc, ret)?;
2294 }
2295 }
2296 Operator::F32ReinterpretI32 => {
2297 let loc = self.pop_value_released()?;
2298 let ret = self.acquire_locations(
2299 &[(WpType::F32, MachineValue::WasmStack(self.value_stack.len()))],
2300 false,
2301 )?[0];
2302 self.value_stack.push(ret);
2303 self.fp_stack
2304 .push(FloatValue::new(self.value_stack.len() - 1));
2305
2306 if loc != ret {
2307 self.machine.emit_relaxed_mov(Size::S32, loc, ret)?;
2308 }
2309 }
2310
2311 Operator::I64ReinterpretF64 => {
2312 let loc = self.pop_value_released()?;
2313 let ret = self.acquire_locations(
2314 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
2315 false,
2316 )?[0];
2317 self.value_stack.push(ret);
2318 let fp = self.fp_stack.pop1()?;
2319
2320 if !self.machine.arch_supports_canonicalize_nan()
2321 || !self.config.enable_nan_canonicalization
2322 || fp.canonicalization.is_none()
2323 {
2324 if loc != ret {
2325 self.machine.emit_relaxed_mov(Size::S64, loc, ret)?;
2326 }
2327 } else {
2328 self.machine.canonicalize_nan(Size::S64, loc, ret)?;
2329 }
2330 }
2331 Operator::F64ReinterpretI64 => {
2332 let loc = self.pop_value_released()?;
2333 let ret = self.acquire_locations(
2334 &[(WpType::F64, MachineValue::WasmStack(self.value_stack.len()))],
2335 false,
2336 )?[0];
2337 self.value_stack.push(ret);
2338 self.fp_stack
2339 .push(FloatValue::new(self.value_stack.len() - 1));
2340
2341 if loc != ret {
2342 self.machine.emit_relaxed_mov(Size::S64, loc, ret)?;
2343 }
2344 }
2345
2346 Operator::I32TruncF32U => {
2347 let loc = self.pop_value_released()?;
2348 let ret = self.acquire_locations(
2349 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
2350 false,
2351 )?[0];
2352 self.value_stack.push(ret);
2353 self.fp_stack.pop1()?;
2354
2355 self.machine.convert_i32_f32(loc, ret, false, false)?;
2356 }
2357
2358 Operator::I32TruncSatF32U => {
2359 let loc = self.pop_value_released()?;
2360 let ret = self.acquire_locations(
2361 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
2362 false,
2363 )?[0];
2364 self.value_stack.push(ret);
2365 self.fp_stack.pop1()?;
2366
2367 self.machine.convert_i32_f32(loc, ret, false, true)?;
2368 }
2369
2370 Operator::I32TruncF32S => {
2371 let loc = self.pop_value_released()?;
2372 let ret = self.acquire_locations(
2373 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
2374 false,
2375 )?[0];
2376 self.value_stack.push(ret);
2377 self.fp_stack.pop1()?;
2378
2379 self.machine.convert_i32_f32(loc, ret, true, false)?;
2380 }
2381 Operator::I32TruncSatF32S => {
2382 let loc = self.pop_value_released()?;
2383 let ret = self.acquire_locations(
2384 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
2385 false,
2386 )?[0];
2387 self.value_stack.push(ret);
2388 self.fp_stack.pop1()?;
2389
2390 self.machine.convert_i32_f32(loc, ret, true, true)?;
2391 }
2392
2393 Operator::I64TruncF32S => {
2394 let loc = self.pop_value_released()?;
2395 let ret = self.acquire_locations(
2396 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
2397 false,
2398 )?[0];
2399 self.value_stack.push(ret);
2400 self.fp_stack.pop1()?;
2401
2402 self.machine.convert_i64_f32(loc, ret, true, false)?;
2403 }
2404
2405 Operator::I64TruncSatF32S => {
2406 let loc = self.pop_value_released()?;
2407 let ret = self.acquire_locations(
2408 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
2409 false,
2410 )?[0];
2411 self.value_stack.push(ret);
2412 self.fp_stack.pop1()?;
2413
2414 self.machine.convert_i64_f32(loc, ret, true, true)?;
2415 }
2416
2417 Operator::I64TruncF32U => {
2418 let loc = self.pop_value_released()?;
2419 let ret = self.acquire_locations(
2420 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
2421 false,
2422 )?[0];
2423 self.value_stack.push(ret);
2424 self.fp_stack.pop1()?;
2425
2426 self.machine.convert_i64_f32(loc, ret, false, false)?;
2427 }
2428 Operator::I64TruncSatF32U => {
2429 let loc = self.pop_value_released()?;
2430 let ret = self.acquire_locations(
2431 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
2432 false,
2433 )?[0];
2434 self.value_stack.push(ret);
2435 self.fp_stack.pop1()?;
2436
2437 self.machine.convert_i64_f32(loc, ret, false, true)?;
2438 }
2439
2440 Operator::I32TruncF64U => {
2441 let loc = self.pop_value_released()?;
2442 let ret = self.acquire_locations(
2443 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
2444 false,
2445 )?[0];
2446 self.value_stack.push(ret);
2447 self.fp_stack.pop1()?;
2448
2449 self.machine.convert_i32_f64(loc, ret, false, false)?;
2450 }
2451
2452 Operator::I32TruncSatF64U => {
2453 let loc = self.pop_value_released()?;
2454 let ret = self.acquire_locations(
2455 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
2456 false,
2457 )?[0];
2458 self.value_stack.push(ret);
2459 self.fp_stack.pop1()?;
2460
2461 self.machine.convert_i32_f64(loc, ret, false, true)?;
2462 }
2463
2464 Operator::I32TruncF64S => {
2465 let loc = self.pop_value_released()?;
2466 let ret = self.acquire_locations(
2467 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
2468 false,
2469 )?[0];
2470 self.value_stack.push(ret);
2471 self.fp_stack.pop1()?;
2472
2473 self.machine.convert_i32_f64(loc, ret, true, false)?;
2474 }
2475
2476 Operator::I32TruncSatF64S => {
2477 let loc = self.pop_value_released()?;
2478 let ret = self.acquire_locations(
2479 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
2480 false,
2481 )?[0];
2482 self.value_stack.push(ret);
2483 self.fp_stack.pop1()?;
2484
2485 self.machine.convert_i32_f64(loc, ret, true, true)?;
2486 }
2487
2488 Operator::I64TruncF64S => {
2489 let loc = self.pop_value_released()?;
2490 let ret = self.acquire_locations(
2491 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
2492 false,
2493 )?[0];
2494 self.value_stack.push(ret);
2495 self.fp_stack.pop1()?;
2496
2497 self.machine.convert_i64_f64(loc, ret, true, false)?;
2498 }
2499
2500 Operator::I64TruncSatF64S => {
2501 let loc = self.pop_value_released()?;
2502 let ret = self.acquire_locations(
2503 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
2504 false,
2505 )?[0];
2506 self.value_stack.push(ret);
2507 self.fp_stack.pop1()?;
2508
2509 self.machine.convert_i64_f64(loc, ret, true, true)?;
2510 }
2511
2512 Operator::I64TruncF64U => {
2513 let loc = self.pop_value_released()?;
2514 let ret = self.acquire_locations(
2515 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
2516 false,
2517 )?[0];
2518 self.value_stack.push(ret);
2519 self.fp_stack.pop1()?;
2520
2521 self.machine.convert_i64_f64(loc, ret, false, false)?;
2522 }
2523
2524 Operator::I64TruncSatF64U => {
2525 let loc = self.pop_value_released()?;
2526 let ret = self.acquire_locations(
2527 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
2528 false,
2529 )?[0];
2530 self.value_stack.push(ret);
2531 self.fp_stack.pop1()?;
2532
2533 self.machine.convert_i64_f64(loc, ret, false, true)?;
2534 }
2535
2536 Operator::F32ConvertI32S => {
2537 let loc = self.pop_value_released()?;
2538 let ret = self.acquire_locations(
2539 &[(WpType::F32, MachineValue::WasmStack(self.value_stack.len()))],
2540 false,
2541 )?[0];
2542 self.value_stack.push(ret);
2543 self.fp_stack
2544 .push(FloatValue::new(self.value_stack.len() - 1)); self.machine.convert_f32_i32(loc, true, ret)?;
2547 }
2548 Operator::F32ConvertI32U => {
2549 let loc = self.pop_value_released()?;
2550 let ret = self.acquire_locations(
2551 &[(WpType::F32, MachineValue::WasmStack(self.value_stack.len()))],
2552 false,
2553 )?[0];
2554 self.value_stack.push(ret);
2555 self.fp_stack
2556 .push(FloatValue::new(self.value_stack.len() - 1)); self.machine.convert_f32_i32(loc, false, ret)?;
2559 }
2560 Operator::F32ConvertI64S => {
2561 let loc = self.pop_value_released()?;
2562 let ret = self.acquire_locations(
2563 &[(WpType::F32, MachineValue::WasmStack(self.value_stack.len()))],
2564 false,
2565 )?[0];
2566 self.value_stack.push(ret);
2567 self.fp_stack
2568 .push(FloatValue::new(self.value_stack.len() - 1)); self.machine.convert_f32_i64(loc, true, ret)?;
2571 }
2572 Operator::F32ConvertI64U => {
2573 let loc = self.pop_value_released()?;
2574 let ret = self.acquire_locations(
2575 &[(WpType::F32, MachineValue::WasmStack(self.value_stack.len()))],
2576 false,
2577 )?[0];
2578 self.value_stack.push(ret);
2579 self.fp_stack
2580 .push(FloatValue::new(self.value_stack.len() - 1)); self.machine.convert_f32_i64(loc, false, ret)?;
2583 }
2584
2585 Operator::F64ConvertI32S => {
2586 let loc = self.pop_value_released()?;
2587 let ret = self.acquire_locations(
2588 &[(WpType::F64, MachineValue::WasmStack(self.value_stack.len()))],
2589 false,
2590 )?[0];
2591 self.value_stack.push(ret);
2592 self.fp_stack
2593 .push(FloatValue::new(self.value_stack.len() - 1)); self.machine.convert_f64_i32(loc, true, ret)?;
2596 }
2597 Operator::F64ConvertI32U => {
2598 let loc = self.pop_value_released()?;
2599 let ret = self.acquire_locations(
2600 &[(WpType::F64, MachineValue::WasmStack(self.value_stack.len()))],
2601 false,
2602 )?[0];
2603 self.value_stack.push(ret);
2604 self.fp_stack
2605 .push(FloatValue::new(self.value_stack.len() - 1)); self.machine.convert_f64_i32(loc, false, ret)?;
2608 }
2609 Operator::F64ConvertI64S => {
2610 let loc = self.pop_value_released()?;
2611 let ret = self.acquire_locations(
2612 &[(WpType::F64, MachineValue::WasmStack(self.value_stack.len()))],
2613 false,
2614 )?[0];
2615 self.value_stack.push(ret);
2616 self.fp_stack
2617 .push(FloatValue::new(self.value_stack.len() - 1)); self.machine.convert_f64_i64(loc, true, ret)?;
2620 }
2621 Operator::F64ConvertI64U => {
2622 let loc = self.pop_value_released()?;
2623 let ret = self.acquire_locations(
2624 &[(WpType::F64, MachineValue::WasmStack(self.value_stack.len()))],
2625 false,
2626 )?[0];
2627 self.value_stack.push(ret);
2628 self.fp_stack
2629 .push(FloatValue::new(self.value_stack.len() - 1)); self.machine.convert_f64_i64(loc, false, ret)?;
2632 }
2633
2634 Operator::Call { function_index } => {
2635 let function_index = function_index as usize;
2636
2637 let sig_index = *self
2638 .module
2639 .functions
2640 .get(FunctionIndex::new(function_index))
2641 .unwrap();
2642 let sig = self.module.signatures.get(sig_index).unwrap();
2643 let param_types: SmallVec<[WpType; 8]> =
2644 sig.params().iter().cloned().map(type_to_wp_type).collect();
2645 let return_types: SmallVec<[WpType; 1]> =
2646 sig.results().iter().cloned().map(type_to_wp_type).collect();
2647
2648 let params: SmallVec<[_; 8]> = self
2649 .value_stack
2650 .drain(self.value_stack.len() - param_types.len()..)
2651 .collect();
2652 self.release_locations_only_regs(¶ms)?;
2653
2654 self.release_locations_only_osr_state(params.len())?;
2655
2656 while let Some(fp) = self.fp_stack.last() {
2661 if fp.depth >= self.value_stack.len() {
2662 let index = fp.depth - self.value_stack.len();
2663 if self.machine.arch_supports_canonicalize_nan()
2664 && self.config.enable_nan_canonicalization
2665 && fp.canonicalization.is_some()
2666 {
2667 let size = fp.canonicalization.unwrap().to_size();
2668 self.machine
2669 .canonicalize_nan(size, params[index], params[index])?;
2670 }
2671 self.fp_stack.pop().unwrap();
2672 } else {
2673 break;
2674 }
2675 }
2676
2677 let reloc_target = if function_index < self.module.num_imported_functions {
2679 RelocationTarget::CustomSection(SectionIndex::new(function_index))
2680 } else {
2681 RelocationTarget::LocalFunc(LocalFunctionIndex::new(
2682 function_index - self.module.num_imported_functions,
2683 ))
2684 };
2685 let calling_convention = self.calling_convention;
2686
2687 self.emit_call_native(
2688 |this| {
2689 let offset = this
2690 .machine
2691 .mark_instruction_with_trap_code(TrapCode::StackOverflow);
2692 let mut relocations = this
2693 .machine
2694 .emit_call_with_reloc(calling_convention, reloc_target)?;
2695 this.machine.mark_instruction_address_end(offset);
2696 this.relocations.append(&mut relocations);
2697 Ok(())
2698 },
2699 params.iter().copied(),
2700 param_types.iter().copied(),
2701 )?;
2702
2703 self.release_locations_only_stack(¶ms)?;
2704
2705 if !return_types.is_empty() {
2706 let ret = self.acquire_locations(
2707 &[(
2708 return_types[0],
2709 MachineValue::WasmStack(self.value_stack.len()),
2710 )],
2711 false,
2712 )?[0];
2713 self.value_stack.push(ret);
2714 if return_types[0].is_float() {
2715 self.machine.move_location(
2716 Size::S64,
2717 Location::SIMD(self.machine.get_simd_for_ret()),
2718 ret,
2719 )?;
2720 self.fp_stack
2721 .push(FloatValue::new(self.value_stack.len() - 1));
2722 } else {
2723 self.machine.move_location(
2724 Size::S64,
2725 Location::GPR(self.machine.get_gpr_for_ret()),
2726 ret,
2727 )?;
2728 }
2729 }
2730 }
2731 Operator::CallIndirect {
2732 type_index,
2733 table_index,
2734 } => {
2735 let table_index = TableIndex::new(table_index as _);
2738 let index = SignatureIndex::new(type_index as usize);
2739 let sig = self.module.signatures.get(index).unwrap();
2740 let param_types: SmallVec<[WpType; 8]> =
2741 sig.params().iter().cloned().map(type_to_wp_type).collect();
2742 let return_types: SmallVec<[WpType; 1]> =
2743 sig.results().iter().cloned().map(type_to_wp_type).collect();
2744
2745 let func_index = self.pop_value_released()?;
2746
2747 let params: SmallVec<[_; 8]> = self
2748 .value_stack
2749 .drain(self.value_stack.len() - param_types.len()..)
2750 .collect();
2751 self.release_locations_only_regs(¶ms)?;
2752
2753 while let Some(fp) = self.fp_stack.last() {
2758 if fp.depth >= self.value_stack.len() {
2759 let index = fp.depth - self.value_stack.len();
2760 if self.machine.arch_supports_canonicalize_nan()
2761 && self.config.enable_nan_canonicalization
2762 && fp.canonicalization.is_some()
2763 {
2764 let size = fp.canonicalization.unwrap().to_size();
2765 self.machine
2766 .canonicalize_nan(size, params[index], params[index])?;
2767 }
2768 self.fp_stack.pop().unwrap();
2769 } else {
2770 break;
2771 }
2772 }
2773
2774 let table_base = self.machine.acquire_temp_gpr().unwrap();
2775 let table_count = self.machine.acquire_temp_gpr().unwrap();
2776 let sigidx = self.machine.acquire_temp_gpr().unwrap();
2777
2778 if let Some(local_table_index) = self.module.local_table_index(table_index) {
2779 let (vmctx_offset_base, vmctx_offset_len) = (
2780 self.vmoffsets.vmctx_vmtable_definition(local_table_index),
2781 self.vmoffsets
2782 .vmctx_vmtable_definition_current_elements(local_table_index),
2783 );
2784 self.machine.move_location(
2785 Size::S64,
2786 Location::Memory(self.machine.get_vmctx_reg(), vmctx_offset_base as i32),
2787 Location::GPR(table_base),
2788 )?;
2789 self.machine.move_location(
2790 Size::S32,
2791 Location::Memory(self.machine.get_vmctx_reg(), vmctx_offset_len as i32),
2792 Location::GPR(table_count),
2793 )?;
2794 } else {
2795 let import_offset = self.vmoffsets.vmctx_vmtable_import(table_index);
2797 self.machine.move_location(
2798 Size::S64,
2799 Location::Memory(self.machine.get_vmctx_reg(), import_offset as i32),
2800 Location::GPR(table_base),
2801 )?;
2802
2803 self.machine.move_location(
2805 Size::S32,
2806 Location::Memory(
2807 table_base,
2808 self.vmoffsets.vmtable_definition_current_elements() as _,
2809 ),
2810 Location::GPR(table_count),
2811 )?;
2812
2813 self.machine.move_location(
2815 Size::S64,
2816 Location::Memory(table_base, self.vmoffsets.vmtable_definition_base() as _),
2817 Location::GPR(table_base),
2818 )?;
2819 }
2820
2821 self.machine.jmp_on_condition(
2822 UnsignedCondition::BelowEqual,
2823 Size::S32,
2824 func_index,
2825 Location::GPR(table_count),
2826 self.special_labels.table_access_oob,
2827 )?;
2828 self.machine
2829 .move_location(Size::S32, func_index, Location::GPR(table_count))?;
2830 self.machine.emit_imul_imm32(
2831 Size::S64,
2832 self.vmoffsets.size_of_vm_funcref() as u32,
2833 table_count,
2834 )?;
2835 self.machine.location_add(
2836 Size::S64,
2837 Location::GPR(table_base),
2838 Location::GPR(table_count),
2839 false,
2840 )?;
2841
2842 self.machine.move_location(
2844 Size::S64,
2845 Location::Memory(table_count, self.vmoffsets.vm_funcref_anyfunc_ptr() as i32),
2846 Location::GPR(table_count),
2847 )?;
2848 self.machine.jmp_on_condition(
2850 UnsignedCondition::Equal,
2851 Size::S64,
2852 Location::Imm32(0),
2853 Location::GPR(table_count),
2854 self.special_labels.indirect_call_null,
2855 )?;
2856 self.machine.move_location(
2857 Size::S64,
2858 Location::Memory(
2859 self.machine.get_vmctx_reg(),
2860 self.vmoffsets.vmctx_vmshared_signature_id(index) as i32,
2861 ),
2862 Location::GPR(sigidx),
2863 )?;
2864
2865 self.machine.jmp_on_condition(
2867 UnsignedCondition::NotEqual,
2868 Size::S32,
2869 Location::GPR(sigidx),
2870 Location::Memory(
2871 table_count,
2872 (self.vmoffsets.vmcaller_checked_anyfunc_type_index() as usize) as i32,
2873 ),
2874 self.special_labels.bad_signature,
2875 )?;
2876 self.machine.release_gpr(sigidx);
2877 self.machine.release_gpr(table_count);
2878 self.machine.release_gpr(table_base);
2879
2880 let gpr_for_call = self.machine.get_grp_for_call();
2881 if table_count != gpr_for_call {
2882 self.machine.move_location(
2883 Size::S64,
2884 Location::GPR(table_count),
2885 Location::GPR(gpr_for_call),
2886 )?;
2887 }
2888
2889 self.release_locations_only_osr_state(params.len())?;
2890
2891 let vmcaller_checked_anyfunc_func_ptr =
2892 self.vmoffsets.vmcaller_checked_anyfunc_func_ptr() as usize;
2893 let vmcaller_checked_anyfunc_vmctx =
2894 self.vmoffsets.vmcaller_checked_anyfunc_vmctx() as usize;
2895 let calling_convention = self.calling_convention;
2896
2897 self.emit_call_native(
2898 |this| {
2899 if this.machine.arch_requires_indirect_call_trampoline() {
2900 this.machine
2901 .arch_emit_indirect_call_with_trampoline(Location::Memory(
2902 gpr_for_call,
2903 vmcaller_checked_anyfunc_func_ptr as i32,
2904 ))
2905 } else {
2906 let offset = this
2907 .machine
2908 .mark_instruction_with_trap_code(TrapCode::StackOverflow);
2909
2910 this.machine.move_location(
2912 Size::S64,
2913 Location::Memory(
2914 gpr_for_call,
2915 vmcaller_checked_anyfunc_vmctx as i32,
2916 ),
2917 this.machine
2918 .get_simple_param_location(0, calling_convention),
2919 )?;
2920
2921 this.machine.emit_call_location(Location::Memory(
2922 gpr_for_call,
2923 vmcaller_checked_anyfunc_func_ptr as i32,
2924 ))?;
2925 this.machine.mark_instruction_address_end(offset);
2926 Ok(())
2927 }
2928 },
2929 params.iter().copied(),
2930 param_types.iter().copied(),
2931 )?;
2932
2933 self.release_locations_only_stack(¶ms)?;
2934
2935 if !return_types.is_empty() {
2936 let ret = self.acquire_locations(
2937 &[(
2938 return_types[0],
2939 MachineValue::WasmStack(self.value_stack.len()),
2940 )],
2941 false,
2942 )?[0];
2943 self.value_stack.push(ret);
2944 if return_types[0].is_float() {
2945 self.machine.move_location(
2946 Size::S64,
2947 Location::SIMD(self.machine.get_simd_for_ret()),
2948 ret,
2949 )?;
2950 self.fp_stack
2951 .push(FloatValue::new(self.value_stack.len() - 1));
2952 } else {
2953 self.machine.move_location(
2954 Size::S64,
2955 Location::GPR(self.machine.get_gpr_for_ret()),
2956 ret,
2957 )?;
2958 }
2959 }
2960 }
2961 Operator::If { blockty } => {
2962 let label_end = self.machine.get_label();
2963 let label_else = self.machine.get_label();
2964
2965 let cond = self.pop_value_released()?;
2966
2967 let frame = ControlFrame {
2968 label: label_end,
2969 loop_like: false,
2970 if_else: IfElseState::If(label_else),
2971 returns: match blockty {
2972 WpTypeOrFuncType::Empty => smallvec![],
2973 WpTypeOrFuncType::Type(inner_ty) => smallvec![inner_ty],
2974 _ => {
2975 return Err(CompileError::Codegen(
2976 "If: multi-value returns not yet implemented".to_owned(),
2977 ));
2978 }
2979 },
2980 value_stack_depth: self.value_stack.len(),
2981 fp_stack_depth: self.fp_stack.len(),
2982 state: self.state.clone(),
2983 state_diff_id: self.get_state_diff(),
2984 };
2985 self.control_stack.push(frame);
2986 self.machine.jmp_on_condition(
2987 UnsignedCondition::Equal,
2988 Size::S32,
2989 Location::Imm32(0),
2990 cond,
2991 label_else,
2992 )?;
2993 }
2994 Operator::Else => {
2995 let frame = self.control_stack.last_mut().unwrap();
2996
2997 if !was_unreachable && !frame.returns.is_empty() {
2998 let first_return = frame.returns[0];
2999 let loc = *self.value_stack.last().unwrap();
3000 let canonicalize = if first_return.is_float() {
3001 let fp = self.fp_stack.peek1()?;
3002 self.machine.arch_supports_canonicalize_nan()
3003 && self.config.enable_nan_canonicalization
3004 && fp.canonicalization.is_some()
3005 } else {
3006 false
3007 };
3008 self.machine
3009 .emit_function_return_value(first_return, canonicalize, loc)?;
3010 }
3011
3012 let frame = &self.control_stack.last_mut().unwrap();
3013 let stack_depth = frame.value_stack_depth;
3014 let fp_depth = frame.fp_stack_depth;
3015 self.release_locations_value(stack_depth)?;
3016 self.value_stack.truncate(stack_depth);
3017 self.fp_stack.truncate(fp_depth);
3018 let frame = &mut self.control_stack.last_mut().unwrap();
3019
3020 match frame.if_else {
3021 IfElseState::If(label) => {
3022 self.machine.jmp_unconditionnal(frame.label)?;
3023 self.machine.emit_label(label)?;
3024 frame.if_else = IfElseState::Else;
3025 }
3026 _ => {
3027 return Err(CompileError::Codegen(
3028 "Else: frame.if_else unreachable code".to_owned(),
3029 ));
3030 }
3031 }
3032 }
3033 Operator::TypedSelect { .. } | Operator::Select => {
3036 let cond = self.pop_value_released()?;
3037 let v_b = self.pop_value_released()?;
3038 let v_a = self.pop_value_released()?;
3039 let cncl: Option<(Option<CanonicalizeType>, Option<CanonicalizeType>)> =
3040 if self.fp_stack.len() >= 2
3041 && self.fp_stack[self.fp_stack.len() - 2].depth == self.value_stack.len()
3042 && self.fp_stack[self.fp_stack.len() - 1].depth
3043 == self.value_stack.len() + 1
3044 {
3045 let (left, right) = self.fp_stack.pop2()?;
3046 self.fp_stack.push(FloatValue::new(self.value_stack.len()));
3047 Some((left.canonicalization, right.canonicalization))
3048 } else {
3049 None
3050 };
3051 let ret = self.acquire_locations(
3052 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
3053 false,
3054 )?[0];
3055 self.value_stack.push(ret);
3056
3057 let end_label = self.machine.get_label();
3058 let zero_label = self.machine.get_label();
3059
3060 self.machine.jmp_on_condition(
3061 UnsignedCondition::Equal,
3062 Size::S32,
3063 Location::Imm32(0),
3064 cond,
3065 zero_label,
3066 )?;
3067 match cncl {
3068 Some((Some(fp), _))
3069 if self.machine.arch_supports_canonicalize_nan()
3070 && self.config.enable_nan_canonicalization =>
3071 {
3072 self.machine.canonicalize_nan(fp.to_size(), v_a, ret)?;
3073 }
3074 _ => {
3075 if v_a != ret {
3076 self.machine.emit_relaxed_mov(Size::S64, v_a, ret)?;
3077 }
3078 }
3079 }
3080 self.machine.jmp_unconditionnal(end_label)?;
3081 self.machine.emit_label(zero_label)?;
3082 match cncl {
3083 Some((_, Some(fp)))
3084 if self.machine.arch_supports_canonicalize_nan()
3085 && self.config.enable_nan_canonicalization =>
3086 {
3087 self.machine.canonicalize_nan(fp.to_size(), v_b, ret)?;
3088 }
3089 _ => {
3090 if v_b != ret {
3091 self.machine.emit_relaxed_mov(Size::S64, v_b, ret)?;
3092 }
3093 }
3094 }
3095 self.machine.emit_label(end_label)?;
3096 }
3097 Operator::Block { blockty } => {
3098 let frame = ControlFrame {
3099 label: self.machine.get_label(),
3100 loop_like: false,
3101 if_else: IfElseState::None,
3102 returns: match blockty {
3103 WpTypeOrFuncType::Empty => smallvec![],
3104 WpTypeOrFuncType::Type(inner_ty) => smallvec![inner_ty],
3105 _ => {
3106 return Err(CompileError::Codegen(
3107 "Block: multi-value returns not yet implemented".to_owned(),
3108 ));
3109 }
3110 },
3111 value_stack_depth: self.value_stack.len(),
3112 fp_stack_depth: self.fp_stack.len(),
3113 state: self.state.clone(),
3114 state_diff_id: self.get_state_diff(),
3115 };
3116 self.control_stack.push(frame);
3117 }
3118 Operator::Loop { blockty } => {
3119 self.machine.align_for_loop()?;
3120 let label = self.machine.get_label();
3121 let state_diff_id = self.get_state_diff();
3122 let _activate_offset = self.machine.assembler_get_offset().0;
3123
3124 self.control_stack.push(ControlFrame {
3125 label,
3126 loop_like: true,
3127 if_else: IfElseState::None,
3128 returns: match blockty {
3129 WpTypeOrFuncType::Empty => smallvec![],
3130 WpTypeOrFuncType::Type(inner_ty) => smallvec![inner_ty],
3131 _ => {
3132 return Err(CompileError::Codegen(
3133 "Loop: multi-value returns not yet implemented".to_owned(),
3134 ));
3135 }
3136 },
3137 value_stack_depth: self.value_stack.len(),
3138 fp_stack_depth: self.fp_stack.len(),
3139 state: self.state.clone(),
3140 state_diff_id,
3141 });
3142 self.machine.emit_label(label)?;
3143
3144 }
3146 Operator::Nop => {}
3147 Operator::MemorySize { mem } => {
3148 let memory_index = MemoryIndex::new(mem as usize);
3149 self.machine.move_location(
3150 Size::S64,
3151 Location::Memory(
3152 self.machine.get_vmctx_reg(),
3153 self.vmoffsets.vmctx_builtin_function(
3154 if self.module.local_memory_index(memory_index).is_some() {
3155 VMBuiltinFunctionIndex::get_memory32_size_index()
3156 } else {
3157 VMBuiltinFunctionIndex::get_imported_memory32_size_index()
3158 },
3159 ) as i32,
3160 ),
3161 Location::GPR(self.machine.get_grp_for_call()),
3162 )?;
3163 self.emit_call_native(
3164 |this| {
3165 this.machine
3166 .emit_call_register(this.machine.get_grp_for_call())
3167 },
3168 iter::once(Location::Imm32(memory_index.index() as u32)),
3170 iter::once(WpType::I64),
3171 )?;
3172 let ret = self.acquire_locations(
3173 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
3174 false,
3175 )?[0];
3176 self.value_stack.push(ret);
3177 self.machine.move_location(
3178 Size::S64,
3179 Location::GPR(self.machine.get_gpr_for_ret()),
3180 ret,
3181 )?;
3182 }
3183 Operator::MemoryInit { data_index, mem } => {
3184 let len = self.value_stack.pop().unwrap();
3185 let src = self.value_stack.pop().unwrap();
3186 let dst = self.value_stack.pop().unwrap();
3187 self.release_locations_only_regs(&[len, src, dst])?;
3188
3189 self.machine.move_location(
3190 Size::S64,
3191 Location::Memory(
3192 self.machine.get_vmctx_reg(),
3193 self.vmoffsets
3194 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_memory_init_index())
3195 as i32,
3196 ),
3197 Location::GPR(self.machine.get_grp_for_call()),
3198 )?;
3199
3200 self.release_locations_only_osr_state(1)?;
3202
3203 self.emit_call_native(
3204 |this| {
3205 this.machine
3206 .emit_call_register(this.machine.get_grp_for_call())
3207 },
3208 [
3210 Location::Imm32(mem),
3211 Location::Imm32(data_index),
3212 dst,
3213 src,
3214 len,
3215 ]
3216 .iter()
3217 .cloned(),
3218 [
3219 WpType::I64,
3220 WpType::I64,
3221 WpType::I64,
3222 WpType::I64,
3223 WpType::I64,
3224 ]
3225 .iter()
3226 .cloned(),
3227 )?;
3228 self.release_locations_only_stack(&[dst, src, len])?;
3229 }
3230 Operator::DataDrop { data_index } => {
3231 self.machine.move_location(
3232 Size::S64,
3233 Location::Memory(
3234 self.machine.get_vmctx_reg(),
3235 self.vmoffsets
3236 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_data_drop_index())
3237 as i32,
3238 ),
3239 Location::GPR(self.machine.get_grp_for_call()),
3240 )?;
3241
3242 self.emit_call_native(
3243 |this| {
3244 this.machine
3245 .emit_call_register(this.machine.get_grp_for_call())
3246 },
3247 iter::once(Location::Imm32(data_index)),
3249 iter::once(WpType::I64),
3250 )?;
3251 }
3252 Operator::MemoryCopy { dst_mem, src_mem } => {
3253 let _dst = dst_mem;
3255 let len = self.value_stack.pop().unwrap();
3256 let src_pos = self.value_stack.pop().unwrap();
3257 let dst_pos = self.value_stack.pop().unwrap();
3258 self.release_locations_only_regs(&[len, src_pos, dst_pos])?;
3259
3260 let memory_index = MemoryIndex::new(src_mem as usize);
3261 let (memory_copy_index, memory_index) =
3262 if self.module.local_memory_index(memory_index).is_some() {
3263 (
3264 VMBuiltinFunctionIndex::get_memory_copy_index(),
3265 memory_index,
3266 )
3267 } else {
3268 (
3269 VMBuiltinFunctionIndex::get_imported_memory_copy_index(),
3270 memory_index,
3271 )
3272 };
3273
3274 self.machine.move_location(
3275 Size::S64,
3276 Location::Memory(
3277 self.machine.get_vmctx_reg(),
3278 self.vmoffsets.vmctx_builtin_function(memory_copy_index) as i32,
3279 ),
3280 Location::GPR(self.machine.get_grp_for_call()),
3281 )?;
3282
3283 self.release_locations_only_osr_state(1)?;
3285
3286 self.emit_call_native(
3287 |this| {
3288 this.machine
3289 .emit_call_register(this.machine.get_grp_for_call())
3290 },
3291 [
3293 Location::Imm32(memory_index.index() as u32),
3294 dst_pos,
3295 src_pos,
3296 len,
3297 ]
3298 .iter()
3299 .cloned(),
3300 [WpType::I32, WpType::I64, WpType::I64, WpType::I64]
3301 .iter()
3302 .cloned(),
3303 )?;
3304 self.release_locations_only_stack(&[dst_pos, src_pos, len])?;
3305 }
3306 Operator::MemoryFill { mem } => {
3307 let len = self.value_stack.pop().unwrap();
3308 let val = self.value_stack.pop().unwrap();
3309 let dst = self.value_stack.pop().unwrap();
3310 self.release_locations_only_regs(&[len, val, dst])?;
3311
3312 let memory_index = MemoryIndex::new(mem as usize);
3313 let (memory_fill_index, memory_index) =
3314 if self.module.local_memory_index(memory_index).is_some() {
3315 (
3316 VMBuiltinFunctionIndex::get_memory_fill_index(),
3317 memory_index,
3318 )
3319 } else {
3320 (
3321 VMBuiltinFunctionIndex::get_imported_memory_fill_index(),
3322 memory_index,
3323 )
3324 };
3325
3326 self.machine.move_location(
3327 Size::S64,
3328 Location::Memory(
3329 self.machine.get_vmctx_reg(),
3330 self.vmoffsets.vmctx_builtin_function(memory_fill_index) as i32,
3331 ),
3332 Location::GPR(self.machine.get_grp_for_call()),
3333 )?;
3334
3335 self.release_locations_only_osr_state(1)?;
3337
3338 self.emit_call_native(
3339 |this| {
3340 this.machine
3341 .emit_call_register(this.machine.get_grp_for_call())
3342 },
3343 [Location::Imm32(memory_index.index() as u32), dst, val, len]
3345 .iter()
3346 .cloned(),
3347 [WpType::I32, WpType::I64, WpType::I64, WpType::I64]
3348 .iter()
3349 .cloned(),
3350 )?;
3351 self.release_locations_only_stack(&[dst, val, len])?;
3352 }
3353 Operator::MemoryGrow { mem } => {
3354 let memory_index = MemoryIndex::new(mem as usize);
3355 let param_pages = self.value_stack.pop().unwrap();
3356
3357 self.release_locations_only_regs(&[param_pages])?;
3358
3359 self.machine.move_location(
3360 Size::S64,
3361 Location::Memory(
3362 self.machine.get_vmctx_reg(),
3363 self.vmoffsets.vmctx_builtin_function(
3364 if self.module.local_memory_index(memory_index).is_some() {
3365 VMBuiltinFunctionIndex::get_memory32_grow_index()
3366 } else {
3367 VMBuiltinFunctionIndex::get_imported_memory32_grow_index()
3368 },
3369 ) as i32,
3370 ),
3371 Location::GPR(self.machine.get_grp_for_call()),
3372 )?;
3373
3374 self.release_locations_only_osr_state(1)?;
3375
3376 self.emit_call_native(
3377 |this| {
3378 this.machine
3379 .emit_call_register(this.machine.get_grp_for_call())
3380 },
3381 iter::once(param_pages)
3383 .chain(iter::once(Location::Imm32(memory_index.index() as u32))),
3384 [WpType::I64, WpType::I64].iter().cloned(),
3385 )?;
3386
3387 self.release_locations_only_stack(&[param_pages])?;
3388
3389 let ret = self.acquire_locations(
3390 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
3391 false,
3392 )?[0];
3393 self.value_stack.push(ret);
3394 self.machine.move_location(
3395 Size::S64,
3396 Location::GPR(self.machine.get_gpr_for_ret()),
3397 ret,
3398 )?;
3399 }
3400 Operator::I32Load { ref memarg } => {
3401 let target = self.pop_value_released()?;
3402 let ret = self.acquire_locations(
3403 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
3404 false,
3405 )?[0];
3406 self.value_stack.push(ret);
3407 self.op_memory(
3408 |this,
3409 need_check,
3410 imported_memories,
3411 offset,
3412 heap_access_oob,
3413 unaligned_atomic| {
3414 this.machine.i32_load(
3415 target,
3416 memarg,
3417 ret,
3418 need_check,
3419 imported_memories,
3420 offset,
3421 heap_access_oob,
3422 unaligned_atomic,
3423 )
3424 },
3425 )?;
3426 }
3427 Operator::F32Load { ref memarg } => {
3428 let target = self.pop_value_released()?;
3429 let ret = self.acquire_locations(
3430 &[(WpType::F32, MachineValue::WasmStack(self.value_stack.len()))],
3431 false,
3432 )?[0];
3433 self.value_stack.push(ret);
3434 self.fp_stack
3435 .push(FloatValue::new(self.value_stack.len() - 1));
3436 self.op_memory(
3437 |this,
3438 need_check,
3439 imported_memories,
3440 offset,
3441 heap_access_oob,
3442 unaligned_atomic| {
3443 this.machine.f32_load(
3444 target,
3445 memarg,
3446 ret,
3447 need_check,
3448 imported_memories,
3449 offset,
3450 heap_access_oob,
3451 unaligned_atomic,
3452 )
3453 },
3454 )?;
3455 }
3456 Operator::I32Load8U { ref memarg } => {
3457 let target = self.pop_value_released()?;
3458 let ret = self.acquire_locations(
3459 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
3460 false,
3461 )?[0];
3462 self.value_stack.push(ret);
3463 self.op_memory(
3464 |this,
3465 need_check,
3466 imported_memories,
3467 offset,
3468 heap_access_oob,
3469 unaligned_atomic| {
3470 this.machine.i32_load_8u(
3471 target,
3472 memarg,
3473 ret,
3474 need_check,
3475 imported_memories,
3476 offset,
3477 heap_access_oob,
3478 unaligned_atomic,
3479 )
3480 },
3481 )?;
3482 }
3483 Operator::I32Load8S { ref memarg } => {
3484 let target = self.pop_value_released()?;
3485 let ret = self.acquire_locations(
3486 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
3487 false,
3488 )?[0];
3489 self.value_stack.push(ret);
3490 self.op_memory(
3491 |this,
3492 need_check,
3493 imported_memories,
3494 offset,
3495 heap_access_oob,
3496 unaligned_atomic| {
3497 this.machine.i32_load_8s(
3498 target,
3499 memarg,
3500 ret,
3501 need_check,
3502 imported_memories,
3503 offset,
3504 heap_access_oob,
3505 unaligned_atomic,
3506 )
3507 },
3508 )?;
3509 }
3510 Operator::I32Load16U { ref memarg } => {
3511 let target = self.pop_value_released()?;
3512 let ret = self.acquire_locations(
3513 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
3514 false,
3515 )?[0];
3516 self.value_stack.push(ret);
3517 self.op_memory(
3518 |this,
3519 need_check,
3520 imported_memories,
3521 offset,
3522 heap_access_oob,
3523 unaligned_atomic| {
3524 this.machine.i32_load_16u(
3525 target,
3526 memarg,
3527 ret,
3528 need_check,
3529 imported_memories,
3530 offset,
3531 heap_access_oob,
3532 unaligned_atomic,
3533 )
3534 },
3535 )?;
3536 }
3537 Operator::I32Load16S { ref memarg } => {
3538 let target = self.pop_value_released()?;
3539 let ret = self.acquire_locations(
3540 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
3541 false,
3542 )?[0];
3543 self.value_stack.push(ret);
3544 self.op_memory(
3545 |this,
3546 need_check,
3547 imported_memories,
3548 offset,
3549 heap_access_oob,
3550 unaligned_atomic| {
3551 this.machine.i32_load_16s(
3552 target,
3553 memarg,
3554 ret,
3555 need_check,
3556 imported_memories,
3557 offset,
3558 heap_access_oob,
3559 unaligned_atomic,
3560 )
3561 },
3562 )?;
3563 }
3564 Operator::I32Store { ref memarg } => {
3565 let target_value = self.pop_value_released()?;
3566 let target_addr = self.pop_value_released()?;
3567 self.op_memory(
3568 |this,
3569 need_check,
3570 imported_memories,
3571 offset,
3572 heap_access_oob,
3573 unaligned_atomic| {
3574 this.machine.i32_save(
3575 target_value,
3576 memarg,
3577 target_addr,
3578 need_check,
3579 imported_memories,
3580 offset,
3581 heap_access_oob,
3582 unaligned_atomic,
3583 )
3584 },
3585 )?;
3586 }
3587 Operator::F32Store { ref memarg } => {
3588 let target_value = self.pop_value_released()?;
3589 let target_addr = self.pop_value_released()?;
3590 let fp = self.fp_stack.pop1()?;
3591 let config_nan_canonicalization = self.config.enable_nan_canonicalization;
3592 self.op_memory(
3593 |this,
3594 need_check,
3595 imported_memories,
3596 offset,
3597 heap_access_oob,
3598 unaligned_atomic| {
3599 this.machine.f32_save(
3600 target_value,
3601 memarg,
3602 target_addr,
3603 config_nan_canonicalization && fp.canonicalization.is_some(),
3604 need_check,
3605 imported_memories,
3606 offset,
3607 heap_access_oob,
3608 unaligned_atomic,
3609 )
3610 },
3611 )?;
3612 }
3613 Operator::I32Store8 { ref memarg } => {
3614 let target_value = self.pop_value_released()?;
3615 let target_addr = self.pop_value_released()?;
3616 self.op_memory(
3617 |this,
3618 need_check,
3619 imported_memories,
3620 offset,
3621 heap_access_oob,
3622 unaligned_atomic| {
3623 this.machine.i32_save_8(
3624 target_value,
3625 memarg,
3626 target_addr,
3627 need_check,
3628 imported_memories,
3629 offset,
3630 heap_access_oob,
3631 unaligned_atomic,
3632 )
3633 },
3634 )?;
3635 }
3636 Operator::I32Store16 { ref memarg } => {
3637 let target_value = self.pop_value_released()?;
3638 let target_addr = self.pop_value_released()?;
3639 self.op_memory(
3640 |this,
3641 need_check,
3642 imported_memories,
3643 offset,
3644 heap_access_oob,
3645 unaligned_atomic| {
3646 this.machine.i32_save_16(
3647 target_value,
3648 memarg,
3649 target_addr,
3650 need_check,
3651 imported_memories,
3652 offset,
3653 heap_access_oob,
3654 unaligned_atomic,
3655 )
3656 },
3657 )?;
3658 }
3659 Operator::I64Load { ref memarg } => {
3660 let target = self.pop_value_released()?;
3661 let ret = self.acquire_locations(
3662 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
3663 false,
3664 )?[0];
3665 self.value_stack.push(ret);
3666 self.op_memory(
3667 |this,
3668 need_check,
3669 imported_memories,
3670 offset,
3671 heap_access_oob,
3672 unaligned_atomic| {
3673 this.machine.i64_load(
3674 target,
3675 memarg,
3676 ret,
3677 need_check,
3678 imported_memories,
3679 offset,
3680 heap_access_oob,
3681 unaligned_atomic,
3682 )
3683 },
3684 )?;
3685 }
3686 Operator::F64Load { ref memarg } => {
3687 let target = self.pop_value_released()?;
3688 let ret = self.acquire_locations(
3689 &[(WpType::F64, MachineValue::WasmStack(self.value_stack.len()))],
3690 false,
3691 )?[0];
3692 self.value_stack.push(ret);
3693 self.fp_stack
3694 .push(FloatValue::new(self.value_stack.len() - 1));
3695 self.op_memory(
3696 |this,
3697 need_check,
3698 imported_memories,
3699 offset,
3700 heap_access_oob,
3701 unaligned_atomic| {
3702 this.machine.f64_load(
3703 target,
3704 memarg,
3705 ret,
3706 need_check,
3707 imported_memories,
3708 offset,
3709 heap_access_oob,
3710 unaligned_atomic,
3711 )
3712 },
3713 )?;
3714 }
3715 Operator::I64Load8U { ref memarg } => {
3716 let target = self.pop_value_released()?;
3717 let ret = self.acquire_locations(
3718 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
3719 false,
3720 )?[0];
3721 self.value_stack.push(ret);
3722 self.op_memory(
3723 |this,
3724 need_check,
3725 imported_memories,
3726 offset,
3727 heap_access_oob,
3728 unaligned_atomic| {
3729 this.machine.i64_load_8u(
3730 target,
3731 memarg,
3732 ret,
3733 need_check,
3734 imported_memories,
3735 offset,
3736 heap_access_oob,
3737 unaligned_atomic,
3738 )
3739 },
3740 )?;
3741 }
3742 Operator::I64Load8S { ref memarg } => {
3743 let target = self.pop_value_released()?;
3744 let ret = self.acquire_locations(
3745 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
3746 false,
3747 )?[0];
3748 self.value_stack.push(ret);
3749 self.op_memory(
3750 |this,
3751 need_check,
3752 imported_memories,
3753 offset,
3754 heap_access_oob,
3755 unaligned_atomic| {
3756 this.machine.i64_load_8s(
3757 target,
3758 memarg,
3759 ret,
3760 need_check,
3761 imported_memories,
3762 offset,
3763 heap_access_oob,
3764 unaligned_atomic,
3765 )
3766 },
3767 )?;
3768 }
3769 Operator::I64Load16U { ref memarg } => {
3770 let target = self.pop_value_released()?;
3771 let ret = self.acquire_locations(
3772 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
3773 false,
3774 )?[0];
3775 self.value_stack.push(ret);
3776 self.op_memory(
3777 |this,
3778 need_check,
3779 imported_memories,
3780 offset,
3781 heap_access_oob,
3782 unaligned_atomic| {
3783 this.machine.i64_load_16u(
3784 target,
3785 memarg,
3786 ret,
3787 need_check,
3788 imported_memories,
3789 offset,
3790 heap_access_oob,
3791 unaligned_atomic,
3792 )
3793 },
3794 )?;
3795 }
3796 Operator::I64Load16S { ref memarg } => {
3797 let target = self.pop_value_released()?;
3798 let ret = self.acquire_locations(
3799 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
3800 false,
3801 )?[0];
3802 self.value_stack.push(ret);
3803 self.op_memory(
3804 |this,
3805 need_check,
3806 imported_memories,
3807 offset,
3808 heap_access_oob,
3809 unaligned_atomic| {
3810 this.machine.i64_load_16s(
3811 target,
3812 memarg,
3813 ret,
3814 need_check,
3815 imported_memories,
3816 offset,
3817 heap_access_oob,
3818 unaligned_atomic,
3819 )
3820 },
3821 )?;
3822 }
3823 Operator::I64Load32U { ref memarg } => {
3824 let target = self.pop_value_released()?;
3825 let ret = self.acquire_locations(
3826 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
3827 false,
3828 )?[0];
3829 self.value_stack.push(ret);
3830 self.op_memory(
3831 |this,
3832 need_check,
3833 imported_memories,
3834 offset,
3835 heap_access_oob,
3836 unaligned_atomic| {
3837 this.machine.i64_load_32u(
3838 target,
3839 memarg,
3840 ret,
3841 need_check,
3842 imported_memories,
3843 offset,
3844 heap_access_oob,
3845 unaligned_atomic,
3846 )
3847 },
3848 )?;
3849 }
3850 Operator::I64Load32S { ref memarg } => {
3851 let target = self.pop_value_released()?;
3852 let ret = self.acquire_locations(
3853 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
3854 false,
3855 )?[0];
3856 self.value_stack.push(ret);
3857 self.op_memory(
3858 |this,
3859 need_check,
3860 imported_memories,
3861 offset,
3862 heap_access_oob,
3863 unaligned_atomic| {
3864 this.machine.i64_load_32s(
3865 target,
3866 memarg,
3867 ret,
3868 need_check,
3869 imported_memories,
3870 offset,
3871 heap_access_oob,
3872 unaligned_atomic,
3873 )
3874 },
3875 )?;
3876 }
3877 Operator::I64Store { ref memarg } => {
3878 let target_value = self.pop_value_released()?;
3879 let target_addr = self.pop_value_released()?;
3880
3881 self.op_memory(
3882 |this,
3883 need_check,
3884 imported_memories,
3885 offset,
3886 heap_access_oob,
3887 unaligned_atomic| {
3888 this.machine.i64_save(
3889 target_value,
3890 memarg,
3891 target_addr,
3892 need_check,
3893 imported_memories,
3894 offset,
3895 heap_access_oob,
3896 unaligned_atomic,
3897 )
3898 },
3899 )?;
3900 }
3901 Operator::F64Store { ref memarg } => {
3902 let target_value = self.pop_value_released()?;
3903 let target_addr = self.pop_value_released()?;
3904 let fp = self.fp_stack.pop1()?;
3905 let config_nan_canonicalization = self.config.enable_nan_canonicalization;
3906 self.op_memory(
3907 |this,
3908 need_check,
3909 imported_memories,
3910 offset,
3911 heap_access_oob,
3912 unaligned_atomic| {
3913 this.machine.f64_save(
3914 target_value,
3915 memarg,
3916 target_addr,
3917 config_nan_canonicalization && fp.canonicalization.is_some(),
3918 need_check,
3919 imported_memories,
3920 offset,
3921 heap_access_oob,
3922 unaligned_atomic,
3923 )
3924 },
3925 )?;
3926 }
3927 Operator::I64Store8 { ref memarg } => {
3928 let target_value = self.pop_value_released()?;
3929 let target_addr = self.pop_value_released()?;
3930 self.op_memory(
3931 |this,
3932 need_check,
3933 imported_memories,
3934 offset,
3935 heap_access_oob,
3936 unaligned_atomic| {
3937 this.machine.i64_save_8(
3938 target_value,
3939 memarg,
3940 target_addr,
3941 need_check,
3942 imported_memories,
3943 offset,
3944 heap_access_oob,
3945 unaligned_atomic,
3946 )
3947 },
3948 )?;
3949 }
3950 Operator::I64Store16 { ref memarg } => {
3951 let target_value = self.pop_value_released()?;
3952 let target_addr = self.pop_value_released()?;
3953 self.op_memory(
3954 |this,
3955 need_check,
3956 imported_memories,
3957 offset,
3958 heap_access_oob,
3959 unaligned_atomic| {
3960 this.machine.i64_save_16(
3961 target_value,
3962 memarg,
3963 target_addr,
3964 need_check,
3965 imported_memories,
3966 offset,
3967 heap_access_oob,
3968 unaligned_atomic,
3969 )
3970 },
3971 )?;
3972 }
3973 Operator::I64Store32 { ref memarg } => {
3974 let target_value = self.pop_value_released()?;
3975 let target_addr = self.pop_value_released()?;
3976 self.op_memory(
3977 |this,
3978 need_check,
3979 imported_memories,
3980 offset,
3981 heap_access_oob,
3982 unaligned_atomic| {
3983 this.machine.i64_save_32(
3984 target_value,
3985 memarg,
3986 target_addr,
3987 need_check,
3988 imported_memories,
3989 offset,
3990 heap_access_oob,
3991 unaligned_atomic,
3992 )
3993 },
3994 )?;
3995 }
3996 Operator::Unreachable => {
3997 self.mark_trappable();
3998 self.machine
3999 .emit_illegal_op(TrapCode::UnreachableCodeReached)?;
4000 self.unreachable_depth = 1;
4001 }
4002 Operator::Return => {
4003 let frame = &self.control_stack[0];
4004 if !frame.returns.is_empty() {
4005 if frame.returns.len() != 1 {
4006 return Err(CompileError::Codegen(
4007 "Return: incorrect frame.returns".to_owned(),
4008 ));
4009 }
4010 let first_return = frame.returns[0];
4011 let loc = *self.value_stack.last().unwrap();
4012 let canonicalize = if first_return.is_float() {
4013 let fp = self.fp_stack.peek1()?;
4014 self.machine.arch_supports_canonicalize_nan()
4015 && self.config.enable_nan_canonicalization
4016 && fp.canonicalization.is_some()
4017 } else {
4018 false
4019 };
4020 self.machine
4021 .emit_function_return_value(first_return, canonicalize, loc)?;
4022 }
4023 let frame = &self.control_stack[0];
4024 let frame_depth = frame.value_stack_depth;
4025 let label = frame.label;
4026 self.release_locations_keep_state(frame_depth)?;
4027 self.machine.jmp_unconditionnal(label)?;
4028 self.unreachable_depth = 1;
4029 }
4030 Operator::Br { relative_depth } => {
4031 let frame =
4032 &self.control_stack[self.control_stack.len() - 1 - (relative_depth as usize)];
4033 if !frame.loop_like && !frame.returns.is_empty() {
4034 if frame.returns.len() != 1 {
4035 return Err(CompileError::Codegen(
4036 "Br: incorrect frame.returns".to_owned(),
4037 ));
4038 }
4039 let first_return = frame.returns[0];
4040 let loc = *self.value_stack.last().unwrap();
4041 let canonicalize = if first_return.is_float() {
4042 let fp = self.fp_stack.peek1()?;
4043 self.machine.arch_supports_canonicalize_nan()
4044 && self.config.enable_nan_canonicalization
4045 && fp.canonicalization.is_some()
4046 } else {
4047 false
4048 };
4049 self.machine
4050 .emit_function_return_value(first_return, canonicalize, loc)?;
4051 }
4052 let stack_len = self.control_stack.len();
4053 let frame = &mut self.control_stack[stack_len - 1 - (relative_depth as usize)];
4054 let frame_depth = frame.value_stack_depth;
4055 let label = frame.label;
4056
4057 self.release_locations_keep_state(frame_depth)?;
4058 self.machine.jmp_unconditionnal(label)?;
4059 self.unreachable_depth = 1;
4060 }
4061 Operator::BrIf { relative_depth } => {
4062 let after = self.machine.get_label();
4063 let cond = self.pop_value_released()?;
4064 self.machine.jmp_on_condition(
4065 UnsignedCondition::Equal,
4066 Size::S32,
4067 Location::Imm32(0),
4068 cond,
4069 after,
4070 )?;
4071
4072 let frame =
4073 &self.control_stack[self.control_stack.len() - 1 - (relative_depth as usize)];
4074 if !frame.loop_like && !frame.returns.is_empty() {
4075 if frame.returns.len() != 1 {
4076 return Err(CompileError::Codegen(
4077 "BrIf: incorrect frame.returns".to_owned(),
4078 ));
4079 }
4080
4081 let first_return = frame.returns[0];
4082 let loc = *self.value_stack.last().unwrap();
4083 let canonicalize = if first_return.is_float() {
4084 let fp = self.fp_stack.peek1()?;
4085 self.machine.arch_supports_canonicalize_nan()
4086 && self.config.enable_nan_canonicalization
4087 && fp.canonicalization.is_some()
4088 } else {
4089 false
4090 };
4091 self.machine
4092 .emit_function_return_value(first_return, canonicalize, loc)?;
4093 }
4094 let stack_len = self.control_stack.len();
4095 let frame = &mut self.control_stack[stack_len - 1 - (relative_depth as usize)];
4096 let stack_depth = frame.value_stack_depth;
4097 let label = frame.label;
4098 self.release_locations_keep_state(stack_depth)?;
4099 self.machine.jmp_unconditionnal(label)?;
4100
4101 self.machine.emit_label(after)?;
4102 }
4103 Operator::BrTable { ref targets } => {
4104 let default_target = targets.default();
4105 let targets = targets
4106 .targets()
4107 .collect::<Result<Vec<_>, _>>()
4108 .map_err(|e| CompileError::Codegen(format!("BrTable read_table: {e:?}")))?;
4109 let cond = self.pop_value_released()?;
4110 let table_label = self.machine.get_label();
4111 let mut table: Vec<Label> = vec![];
4112 let default_br = self.machine.get_label();
4113 self.machine.jmp_on_condition(
4114 UnsignedCondition::AboveEqual,
4115 Size::S32,
4116 Location::Imm32(targets.len() as u32),
4117 cond,
4118 default_br,
4119 )?;
4120
4121 self.machine.emit_jmp_to_jumptable(table_label, cond)?;
4122
4123 for target in targets.iter() {
4124 let label = self.machine.get_label();
4125 self.machine.emit_label(label)?;
4126 table.push(label);
4127 let frame =
4128 &self.control_stack[self.control_stack.len() - 1 - (*target as usize)];
4129 if !frame.loop_like && !frame.returns.is_empty() {
4130 if frame.returns.len() != 1 {
4131 return Err(CompileError::Codegen(format!(
4132 "BrTable: incorrect frame.returns for {target:?}",
4133 )));
4134 }
4135
4136 let first_return = frame.returns[0];
4137 let loc = *self.value_stack.last().unwrap();
4138 let canonicalize = if first_return.is_float() {
4139 let fp = self.fp_stack.peek1()?;
4140 self.machine.arch_supports_canonicalize_nan()
4141 && self.config.enable_nan_canonicalization
4142 && fp.canonicalization.is_some()
4143 } else {
4144 false
4145 };
4146 self.machine
4147 .emit_function_return_value(first_return, canonicalize, loc)?;
4148 }
4149 let frame =
4150 &self.control_stack[self.control_stack.len() - 1 - (*target as usize)];
4151 let stack_depth = frame.value_stack_depth;
4152 let label = frame.label;
4153 self.release_locations_keep_state(stack_depth)?;
4154 self.machine.jmp_unconditionnal(label)?;
4155 }
4156 self.machine.emit_label(default_br)?;
4157
4158 {
4159 let frame = &self.control_stack
4160 [self.control_stack.len() - 1 - (default_target as usize)];
4161 if !frame.loop_like && !frame.returns.is_empty() {
4162 if frame.returns.len() != 1 {
4163 return Err(CompileError::Codegen(
4164 "BrTable: incorrect frame.returns".to_owned(),
4165 ));
4166 }
4167
4168 let first_return = frame.returns[0];
4169 let loc = *self.value_stack.last().unwrap();
4170 let canonicalize = if first_return.is_float() {
4171 let fp = self.fp_stack.peek1()?;
4172 self.machine.arch_supports_canonicalize_nan()
4173 && self.config.enable_nan_canonicalization
4174 && fp.canonicalization.is_some()
4175 } else {
4176 false
4177 };
4178 self.machine
4179 .emit_function_return_value(first_return, canonicalize, loc)?;
4180 }
4181 let frame = &self.control_stack
4182 [self.control_stack.len() - 1 - (default_target as usize)];
4183 let stack_depth = frame.value_stack_depth;
4184 let label = frame.label;
4185 self.release_locations_keep_state(stack_depth)?;
4186 self.machine.jmp_unconditionnal(label)?;
4187 }
4188
4189 self.machine.emit_label(table_label)?;
4190 for x in table {
4191 self.machine.jmp_unconditionnal(x)?;
4192 }
4193 self.unreachable_depth = 1;
4194 }
4195 Operator::Drop => {
4196 self.pop_value_released()?;
4197 if let Some(x) = self.fp_stack.last() {
4198 if x.depth == self.value_stack.len() {
4199 self.fp_stack.pop1()?;
4200 }
4201 }
4202 }
4203 Operator::End => {
4204 let frame = self.control_stack.pop().unwrap();
4205
4206 if !was_unreachable && !frame.returns.is_empty() {
4207 let loc = *self.value_stack.last().unwrap();
4208 let canonicalize = if frame.returns[0].is_float() {
4209 let fp = self.fp_stack.peek1()?;
4210 self.machine.arch_supports_canonicalize_nan()
4211 && self.config.enable_nan_canonicalization
4212 && fp.canonicalization.is_some()
4213 } else {
4214 false
4215 };
4216 self.machine
4217 .emit_function_return_value(frame.returns[0], canonicalize, loc)?;
4218 }
4219
4220 if self.control_stack.is_empty() {
4221 self.machine.emit_label(frame.label)?;
4222 self.finalize_locals(self.calling_convention)?;
4223 self.machine.emit_function_epilog()?;
4224
4225 match self.signature.results() {
4227 [x] if *x == Type::F32 || *x == Type::F64 => {
4228 self.machine.emit_function_return_float()?;
4229 }
4230 _ => {}
4231 }
4232 self.machine.emit_ret()?;
4233 } else {
4234 let released = &self.value_stack.clone()[frame.value_stack_depth..];
4235 self.release_locations(released)?;
4236 self.value_stack.truncate(frame.value_stack_depth);
4237 self.fp_stack.truncate(frame.fp_stack_depth);
4238
4239 if !frame.loop_like {
4240 self.machine.emit_label(frame.label)?;
4241 }
4242
4243 if let IfElseState::If(label) = frame.if_else {
4244 self.machine.emit_label(label)?;
4245 }
4246
4247 if !frame.returns.is_empty() {
4248 if frame.returns.len() != 1 {
4249 return Err(CompileError::Codegen(
4250 "End: incorrect frame.returns".to_owned(),
4251 ));
4252 }
4253 let loc = self.acquire_locations(
4254 &[(
4255 frame.returns[0],
4256 MachineValue::WasmStack(self.value_stack.len()),
4257 )],
4258 false,
4259 )?[0];
4260 self.machine.move_location(
4261 Size::S64,
4262 Location::GPR(self.machine.get_gpr_for_ret()),
4263 loc,
4264 )?;
4265 self.value_stack.push(loc);
4266 if frame.returns[0].is_float() {
4267 self.fp_stack
4268 .push(FloatValue::new(self.value_stack.len() - 1));
4269 }
4271 }
4272 }
4273 }
4274 Operator::AtomicFence => {
4275 self.machine.emit_memory_fence()?;
4283 }
4284 Operator::I32AtomicLoad { ref memarg } => {
4285 let target = self.pop_value_released()?;
4286 let ret = self.acquire_locations(
4287 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
4288 false,
4289 )?[0];
4290 self.value_stack.push(ret);
4291 self.op_memory(
4292 |this,
4293 need_check,
4294 imported_memories,
4295 offset,
4296 heap_access_oob,
4297 unaligned_atomic| {
4298 this.machine.i32_atomic_load(
4299 target,
4300 memarg,
4301 ret,
4302 need_check,
4303 imported_memories,
4304 offset,
4305 heap_access_oob,
4306 unaligned_atomic,
4307 )
4308 },
4309 )?;
4310 }
4311 Operator::I32AtomicLoad8U { ref memarg } => {
4312 let target = self.pop_value_released()?;
4313 let ret = self.acquire_locations(
4314 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
4315 false,
4316 )?[0];
4317 self.value_stack.push(ret);
4318 self.op_memory(
4319 |this,
4320 need_check,
4321 imported_memories,
4322 offset,
4323 heap_access_oob,
4324 unaligned_atomic| {
4325 this.machine.i32_atomic_load_8u(
4326 target,
4327 memarg,
4328 ret,
4329 need_check,
4330 imported_memories,
4331 offset,
4332 heap_access_oob,
4333 unaligned_atomic,
4334 )
4335 },
4336 )?;
4337 }
4338 Operator::I32AtomicLoad16U { ref memarg } => {
4339 let target = self.pop_value_released()?;
4340 let ret = self.acquire_locations(
4341 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
4342 false,
4343 )?[0];
4344 self.value_stack.push(ret);
4345 self.op_memory(
4346 |this,
4347 need_check,
4348 imported_memories,
4349 offset,
4350 heap_access_oob,
4351 unaligned_atomic| {
4352 this.machine.i32_atomic_load_16u(
4353 target,
4354 memarg,
4355 ret,
4356 need_check,
4357 imported_memories,
4358 offset,
4359 heap_access_oob,
4360 unaligned_atomic,
4361 )
4362 },
4363 )?;
4364 }
4365 Operator::I32AtomicStore { ref memarg } => {
4366 let target_value = self.pop_value_released()?;
4367 let target_addr = self.pop_value_released()?;
4368 self.op_memory(
4369 |this,
4370 need_check,
4371 imported_memories,
4372 offset,
4373 heap_access_oob,
4374 unaligned_atomic| {
4375 this.machine.i32_atomic_save(
4376 target_value,
4377 memarg,
4378 target_addr,
4379 need_check,
4380 imported_memories,
4381 offset,
4382 heap_access_oob,
4383 unaligned_atomic,
4384 )
4385 },
4386 )?;
4387 }
4388 Operator::I32AtomicStore8 { ref memarg } => {
4389 let target_value = self.pop_value_released()?;
4390 let target_addr = self.pop_value_released()?;
4391 self.op_memory(
4392 |this,
4393 need_check,
4394 imported_memories,
4395 offset,
4396 heap_access_oob,
4397 unaligned_atomic| {
4398 this.machine.i32_atomic_save_8(
4399 target_value,
4400 memarg,
4401 target_addr,
4402 need_check,
4403 imported_memories,
4404 offset,
4405 heap_access_oob,
4406 unaligned_atomic,
4407 )
4408 },
4409 )?;
4410 }
4411 Operator::I32AtomicStore16 { ref memarg } => {
4412 let target_value = self.pop_value_released()?;
4413 let target_addr = self.pop_value_released()?;
4414 self.op_memory(
4415 |this,
4416 need_check,
4417 imported_memories,
4418 offset,
4419 heap_access_oob,
4420 unaligned_atomic| {
4421 this.machine.i32_atomic_save_16(
4422 target_value,
4423 memarg,
4424 target_addr,
4425 need_check,
4426 imported_memories,
4427 offset,
4428 heap_access_oob,
4429 unaligned_atomic,
4430 )
4431 },
4432 )?;
4433 }
4434 Operator::I64AtomicLoad { ref memarg } => {
4435 let target = self.pop_value_released()?;
4436 let ret = self.acquire_locations(
4437 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
4438 false,
4439 )?[0];
4440 self.value_stack.push(ret);
4441 self.op_memory(
4442 |this,
4443 need_check,
4444 imported_memories,
4445 offset,
4446 heap_access_oob,
4447 unaligned_atomic| {
4448 this.machine.i64_atomic_load(
4449 target,
4450 memarg,
4451 ret,
4452 need_check,
4453 imported_memories,
4454 offset,
4455 heap_access_oob,
4456 unaligned_atomic,
4457 )
4458 },
4459 )?;
4460 }
4461 Operator::I64AtomicLoad8U { ref memarg } => {
4462 let target = self.pop_value_released()?;
4463 let ret = self.acquire_locations(
4464 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
4465 false,
4466 )?[0];
4467 self.value_stack.push(ret);
4468 self.op_memory(
4469 |this,
4470 need_check,
4471 imported_memories,
4472 offset,
4473 heap_access_oob,
4474 unaligned_atomic| {
4475 this.machine.i64_atomic_load_8u(
4476 target,
4477 memarg,
4478 ret,
4479 need_check,
4480 imported_memories,
4481 offset,
4482 heap_access_oob,
4483 unaligned_atomic,
4484 )
4485 },
4486 )?;
4487 }
4488 Operator::I64AtomicLoad16U { ref memarg } => {
4489 let target = self.pop_value_released()?;
4490 let ret = self.acquire_locations(
4491 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
4492 false,
4493 )?[0];
4494 self.value_stack.push(ret);
4495 self.op_memory(
4496 |this,
4497 need_check,
4498 imported_memories,
4499 offset,
4500 heap_access_oob,
4501 unaligned_atomic| {
4502 this.machine.i64_atomic_load_16u(
4503 target,
4504 memarg,
4505 ret,
4506 need_check,
4507 imported_memories,
4508 offset,
4509 heap_access_oob,
4510 unaligned_atomic,
4511 )
4512 },
4513 )?;
4514 }
4515 Operator::I64AtomicLoad32U { ref memarg } => {
4516 let target = self.pop_value_released()?;
4517 let ret = self.acquire_locations(
4518 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
4519 false,
4520 )?[0];
4521 self.value_stack.push(ret);
4522 self.op_memory(
4523 |this,
4524 need_check,
4525 imported_memories,
4526 offset,
4527 heap_access_oob,
4528 unaligned_atomic| {
4529 this.machine.i64_atomic_load_32u(
4530 target,
4531 memarg,
4532 ret,
4533 need_check,
4534 imported_memories,
4535 offset,
4536 heap_access_oob,
4537 unaligned_atomic,
4538 )
4539 },
4540 )?;
4541 }
4542 Operator::I64AtomicStore { ref memarg } => {
4543 let target_value = self.pop_value_released()?;
4544 let target_addr = self.pop_value_released()?;
4545 self.op_memory(
4546 |this,
4547 need_check,
4548 imported_memories,
4549 offset,
4550 heap_access_oob,
4551 unaligned_atomic| {
4552 this.machine.i64_atomic_save(
4553 target_value,
4554 memarg,
4555 target_addr,
4556 need_check,
4557 imported_memories,
4558 offset,
4559 heap_access_oob,
4560 unaligned_atomic,
4561 )
4562 },
4563 )?;
4564 }
4565 Operator::I64AtomicStore8 { ref memarg } => {
4566 let target_value = self.pop_value_released()?;
4567 let target_addr = self.pop_value_released()?;
4568 self.op_memory(
4569 |this,
4570 need_check,
4571 imported_memories,
4572 offset,
4573 heap_access_oob,
4574 unaligned_atomic| {
4575 this.machine.i64_atomic_save_8(
4576 target_value,
4577 memarg,
4578 target_addr,
4579 need_check,
4580 imported_memories,
4581 offset,
4582 heap_access_oob,
4583 unaligned_atomic,
4584 )
4585 },
4586 )?;
4587 }
4588 Operator::I64AtomicStore16 { ref memarg } => {
4589 let target_value = self.pop_value_released()?;
4590 let target_addr = self.pop_value_released()?;
4591 self.op_memory(
4592 |this,
4593 need_check,
4594 imported_memories,
4595 offset,
4596 heap_access_oob,
4597 unaligned_atomic| {
4598 this.machine.i64_atomic_save_16(
4599 target_value,
4600 memarg,
4601 target_addr,
4602 need_check,
4603 imported_memories,
4604 offset,
4605 heap_access_oob,
4606 unaligned_atomic,
4607 )
4608 },
4609 )?;
4610 }
4611 Operator::I64AtomicStore32 { ref memarg } => {
4612 let target_value = self.pop_value_released()?;
4613 let target_addr = self.pop_value_released()?;
4614 self.op_memory(
4615 |this,
4616 need_check,
4617 imported_memories,
4618 offset,
4619 heap_access_oob,
4620 unaligned_atomic| {
4621 this.machine.i64_atomic_save_32(
4622 target_value,
4623 memarg,
4624 target_addr,
4625 need_check,
4626 imported_memories,
4627 offset,
4628 heap_access_oob,
4629 unaligned_atomic,
4630 )
4631 },
4632 )?;
4633 }
4634 Operator::I32AtomicRmwAdd { ref memarg } => {
4635 let loc = self.pop_value_released()?;
4636 let target = self.pop_value_released()?;
4637 let ret = self.acquire_locations(
4638 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
4639 false,
4640 )?[0];
4641 self.value_stack.push(ret);
4642 self.op_memory(
4643 |this,
4644 need_check,
4645 imported_memories,
4646 offset,
4647 heap_access_oob,
4648 unaligned_atomic| {
4649 this.machine.i32_atomic_add(
4650 loc,
4651 target,
4652 memarg,
4653 ret,
4654 need_check,
4655 imported_memories,
4656 offset,
4657 heap_access_oob,
4658 unaligned_atomic,
4659 )
4660 },
4661 )?;
4662 }
4663 Operator::I64AtomicRmwAdd { ref memarg } => {
4664 let loc = self.pop_value_released()?;
4665 let target = self.pop_value_released()?;
4666 let ret = self.acquire_locations(
4667 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
4668 false,
4669 )?[0];
4670 self.value_stack.push(ret);
4671 self.op_memory(
4672 |this,
4673 need_check,
4674 imported_memories,
4675 offset,
4676 heap_access_oob,
4677 unaligned_atomic| {
4678 this.machine.i64_atomic_add(
4679 loc,
4680 target,
4681 memarg,
4682 ret,
4683 need_check,
4684 imported_memories,
4685 offset,
4686 heap_access_oob,
4687 unaligned_atomic,
4688 )
4689 },
4690 )?;
4691 }
4692 Operator::I32AtomicRmw8AddU { ref memarg } => {
4693 let loc = self.pop_value_released()?;
4694 let target = self.pop_value_released()?;
4695 let ret = self.acquire_locations(
4696 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
4697 false,
4698 )?[0];
4699 self.value_stack.push(ret);
4700 self.op_memory(
4701 |this,
4702 need_check,
4703 imported_memories,
4704 offset,
4705 heap_access_oob,
4706 unaligned_atomic| {
4707 this.machine.i32_atomic_add_8u(
4708 loc,
4709 target,
4710 memarg,
4711 ret,
4712 need_check,
4713 imported_memories,
4714 offset,
4715 heap_access_oob,
4716 unaligned_atomic,
4717 )
4718 },
4719 )?;
4720 }
4721 Operator::I32AtomicRmw16AddU { ref memarg } => {
4722 let loc = self.pop_value_released()?;
4723 let target = self.pop_value_released()?;
4724 let ret = self.acquire_locations(
4725 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
4726 false,
4727 )?[0];
4728 self.value_stack.push(ret);
4729 self.op_memory(
4730 |this,
4731 need_check,
4732 imported_memories,
4733 offset,
4734 heap_access_oob,
4735 unaligned_atomic| {
4736 this.machine.i32_atomic_add_16u(
4737 loc,
4738 target,
4739 memarg,
4740 ret,
4741 need_check,
4742 imported_memories,
4743 offset,
4744 heap_access_oob,
4745 unaligned_atomic,
4746 )
4747 },
4748 )?;
4749 }
4750 Operator::I64AtomicRmw8AddU { ref memarg } => {
4751 let loc = self.pop_value_released()?;
4752 let target = self.pop_value_released()?;
4753 let ret = self.acquire_locations(
4754 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
4755 false,
4756 )?[0];
4757 self.value_stack.push(ret);
4758 self.op_memory(
4759 |this,
4760 need_check,
4761 imported_memories,
4762 offset,
4763 heap_access_oob,
4764 unaligned_atomic| {
4765 this.machine.i64_atomic_add_8u(
4766 loc,
4767 target,
4768 memarg,
4769 ret,
4770 need_check,
4771 imported_memories,
4772 offset,
4773 heap_access_oob,
4774 unaligned_atomic,
4775 )
4776 },
4777 )?;
4778 }
4779 Operator::I64AtomicRmw16AddU { ref memarg } => {
4780 let loc = self.pop_value_released()?;
4781 let target = self.pop_value_released()?;
4782 let ret = self.acquire_locations(
4783 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
4784 false,
4785 )?[0];
4786 self.value_stack.push(ret);
4787 self.op_memory(
4788 |this,
4789 need_check,
4790 imported_memories,
4791 offset,
4792 heap_access_oob,
4793 unaligned_atomic| {
4794 this.machine.i64_atomic_add_16u(
4795 loc,
4796 target,
4797 memarg,
4798 ret,
4799 need_check,
4800 imported_memories,
4801 offset,
4802 heap_access_oob,
4803 unaligned_atomic,
4804 )
4805 },
4806 )?;
4807 }
4808 Operator::I64AtomicRmw32AddU { ref memarg } => {
4809 let loc = self.pop_value_released()?;
4810 let target = self.pop_value_released()?;
4811 let ret = self.acquire_locations(
4812 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
4813 false,
4814 )?[0];
4815 self.value_stack.push(ret);
4816 self.op_memory(
4817 |this,
4818 need_check,
4819 imported_memories,
4820 offset,
4821 heap_access_oob,
4822 unaligned_atomic| {
4823 this.machine.i64_atomic_add_32u(
4824 loc,
4825 target,
4826 memarg,
4827 ret,
4828 need_check,
4829 imported_memories,
4830 offset,
4831 heap_access_oob,
4832 unaligned_atomic,
4833 )
4834 },
4835 )?;
4836 }
4837 Operator::I32AtomicRmwSub { ref memarg } => {
4838 let loc = self.pop_value_released()?;
4839 let target = self.pop_value_released()?;
4840 let ret = self.acquire_locations(
4841 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
4842 false,
4843 )?[0];
4844 self.value_stack.push(ret);
4845 self.op_memory(
4846 |this,
4847 need_check,
4848 imported_memories,
4849 offset,
4850 heap_access_oob,
4851 unaligned_atomic| {
4852 this.machine.i32_atomic_sub(
4853 loc,
4854 target,
4855 memarg,
4856 ret,
4857 need_check,
4858 imported_memories,
4859 offset,
4860 heap_access_oob,
4861 unaligned_atomic,
4862 )
4863 },
4864 )?;
4865 }
4866 Operator::I64AtomicRmwSub { ref memarg } => {
4867 let loc = self.pop_value_released()?;
4868 let target = self.pop_value_released()?;
4869 let ret = self.acquire_locations(
4870 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
4871 false,
4872 )?[0];
4873 self.value_stack.push(ret);
4874 self.op_memory(
4875 |this,
4876 need_check,
4877 imported_memories,
4878 offset,
4879 heap_access_oob,
4880 unaligned_atomic| {
4881 this.machine.i64_atomic_sub(
4882 loc,
4883 target,
4884 memarg,
4885 ret,
4886 need_check,
4887 imported_memories,
4888 offset,
4889 heap_access_oob,
4890 unaligned_atomic,
4891 )
4892 },
4893 )?;
4894 }
4895 Operator::I32AtomicRmw8SubU { ref memarg } => {
4896 let loc = self.pop_value_released()?;
4897 let target = self.pop_value_released()?;
4898 let ret = self.acquire_locations(
4899 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
4900 false,
4901 )?[0];
4902 self.value_stack.push(ret);
4903 self.op_memory(
4904 |this,
4905 need_check,
4906 imported_memories,
4907 offset,
4908 heap_access_oob,
4909 unaligned_atomic| {
4910 this.machine.i32_atomic_sub_8u(
4911 loc,
4912 target,
4913 memarg,
4914 ret,
4915 need_check,
4916 imported_memories,
4917 offset,
4918 heap_access_oob,
4919 unaligned_atomic,
4920 )
4921 },
4922 )?;
4923 }
4924 Operator::I32AtomicRmw16SubU { ref memarg } => {
4925 let loc = self.pop_value_released()?;
4926 let target = self.pop_value_released()?;
4927 let ret = self.acquire_locations(
4928 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
4929 false,
4930 )?[0];
4931 self.value_stack.push(ret);
4932 self.op_memory(
4933 |this,
4934 need_check,
4935 imported_memories,
4936 offset,
4937 heap_access_oob,
4938 unaligned_atomic| {
4939 this.machine.i32_atomic_sub_16u(
4940 loc,
4941 target,
4942 memarg,
4943 ret,
4944 need_check,
4945 imported_memories,
4946 offset,
4947 heap_access_oob,
4948 unaligned_atomic,
4949 )
4950 },
4951 )?;
4952 }
4953 Operator::I64AtomicRmw8SubU { ref memarg } => {
4954 let loc = self.pop_value_released()?;
4955 let target = self.pop_value_released()?;
4956 let ret = self.acquire_locations(
4957 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
4958 false,
4959 )?[0];
4960 self.value_stack.push(ret);
4961 self.op_memory(
4962 |this,
4963 need_check,
4964 imported_memories,
4965 offset,
4966 heap_access_oob,
4967 unaligned_atomic| {
4968 this.machine.i64_atomic_sub_8u(
4969 loc,
4970 target,
4971 memarg,
4972 ret,
4973 need_check,
4974 imported_memories,
4975 offset,
4976 heap_access_oob,
4977 unaligned_atomic,
4978 )
4979 },
4980 )?;
4981 }
4982 Operator::I64AtomicRmw16SubU { ref memarg } => {
4983 let loc = self.pop_value_released()?;
4984 let target = self.pop_value_released()?;
4985 let ret = self.acquire_locations(
4986 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
4987 false,
4988 )?[0];
4989 self.value_stack.push(ret);
4990 self.op_memory(
4991 |this,
4992 need_check,
4993 imported_memories,
4994 offset,
4995 heap_access_oob,
4996 unaligned_atomic| {
4997 this.machine.i64_atomic_sub_16u(
4998 loc,
4999 target,
5000 memarg,
5001 ret,
5002 need_check,
5003 imported_memories,
5004 offset,
5005 heap_access_oob,
5006 unaligned_atomic,
5007 )
5008 },
5009 )?;
5010 }
5011 Operator::I64AtomicRmw32SubU { ref memarg } => {
5012 let loc = self.pop_value_released()?;
5013 let target = self.pop_value_released()?;
5014 let ret = self.acquire_locations(
5015 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
5016 false,
5017 )?[0];
5018 self.value_stack.push(ret);
5019 self.op_memory(
5020 |this,
5021 need_check,
5022 imported_memories,
5023 offset,
5024 heap_access_oob,
5025 unaligned_atomic| {
5026 this.machine.i64_atomic_sub_32u(
5027 loc,
5028 target,
5029 memarg,
5030 ret,
5031 need_check,
5032 imported_memories,
5033 offset,
5034 heap_access_oob,
5035 unaligned_atomic,
5036 )
5037 },
5038 )?;
5039 }
5040 Operator::I32AtomicRmwAnd { ref memarg } => {
5041 let loc = self.pop_value_released()?;
5042 let target = self.pop_value_released()?;
5043 let ret = self.acquire_locations(
5044 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
5045 false,
5046 )?[0];
5047 self.value_stack.push(ret);
5048 self.op_memory(
5049 |this,
5050 need_check,
5051 imported_memories,
5052 offset,
5053 heap_access_oob,
5054 unaligned_atomic| {
5055 this.machine.i32_atomic_and(
5056 loc,
5057 target,
5058 memarg,
5059 ret,
5060 need_check,
5061 imported_memories,
5062 offset,
5063 heap_access_oob,
5064 unaligned_atomic,
5065 )
5066 },
5067 )?;
5068 }
5069 Operator::I64AtomicRmwAnd { ref memarg } => {
5070 let loc = self.pop_value_released()?;
5071 let target = self.pop_value_released()?;
5072 let ret = self.acquire_locations(
5073 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
5074 false,
5075 )?[0];
5076 self.value_stack.push(ret);
5077 self.op_memory(
5078 |this,
5079 need_check,
5080 imported_memories,
5081 offset,
5082 heap_access_oob,
5083 unaligned_atomic| {
5084 this.machine.i64_atomic_and(
5085 loc,
5086 target,
5087 memarg,
5088 ret,
5089 need_check,
5090 imported_memories,
5091 offset,
5092 heap_access_oob,
5093 unaligned_atomic,
5094 )
5095 },
5096 )?;
5097 }
5098 Operator::I32AtomicRmw8AndU { ref memarg } => {
5099 let loc = self.pop_value_released()?;
5100 let target = self.pop_value_released()?;
5101 let ret = self.acquire_locations(
5102 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
5103 false,
5104 )?[0];
5105 self.value_stack.push(ret);
5106 self.op_memory(
5107 |this,
5108 need_check,
5109 imported_memories,
5110 offset,
5111 heap_access_oob,
5112 unaligned_atomic| {
5113 this.machine.i32_atomic_and_8u(
5114 loc,
5115 target,
5116 memarg,
5117 ret,
5118 need_check,
5119 imported_memories,
5120 offset,
5121 heap_access_oob,
5122 unaligned_atomic,
5123 )
5124 },
5125 )?;
5126 }
5127 Operator::I32AtomicRmw16AndU { ref memarg } => {
5128 let loc = self.pop_value_released()?;
5129 let target = self.pop_value_released()?;
5130 let ret = self.acquire_locations(
5131 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
5132 false,
5133 )?[0];
5134 self.value_stack.push(ret);
5135 self.op_memory(
5136 |this,
5137 need_check,
5138 imported_memories,
5139 offset,
5140 heap_access_oob,
5141 unaligned_atomic| {
5142 this.machine.i32_atomic_and_16u(
5143 loc,
5144 target,
5145 memarg,
5146 ret,
5147 need_check,
5148 imported_memories,
5149 offset,
5150 heap_access_oob,
5151 unaligned_atomic,
5152 )
5153 },
5154 )?;
5155 }
5156 Operator::I64AtomicRmw8AndU { ref memarg } => {
5157 let loc = self.pop_value_released()?;
5158 let target = self.pop_value_released()?;
5159 let ret = self.acquire_locations(
5160 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
5161 false,
5162 )?[0];
5163 self.value_stack.push(ret);
5164 self.op_memory(
5165 |this,
5166 need_check,
5167 imported_memories,
5168 offset,
5169 heap_access_oob,
5170 unaligned_atomic| {
5171 this.machine.i64_atomic_and_8u(
5172 loc,
5173 target,
5174 memarg,
5175 ret,
5176 need_check,
5177 imported_memories,
5178 offset,
5179 heap_access_oob,
5180 unaligned_atomic,
5181 )
5182 },
5183 )?;
5184 }
5185 Operator::I64AtomicRmw16AndU { ref memarg } => {
5186 let loc = self.pop_value_released()?;
5187 let target = self.pop_value_released()?;
5188 let ret = self.acquire_locations(
5189 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
5190 false,
5191 )?[0];
5192 self.value_stack.push(ret);
5193 self.op_memory(
5194 |this,
5195 need_check,
5196 imported_memories,
5197 offset,
5198 heap_access_oob,
5199 unaligned_atomic| {
5200 this.machine.i64_atomic_and_16u(
5201 loc,
5202 target,
5203 memarg,
5204 ret,
5205 need_check,
5206 imported_memories,
5207 offset,
5208 heap_access_oob,
5209 unaligned_atomic,
5210 )
5211 },
5212 )?;
5213 }
5214 Operator::I64AtomicRmw32AndU { ref memarg } => {
5215 let loc = self.pop_value_released()?;
5216 let target = self.pop_value_released()?;
5217 let ret = self.acquire_locations(
5218 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
5219 false,
5220 )?[0];
5221 self.value_stack.push(ret);
5222 self.op_memory(
5223 |this,
5224 need_check,
5225 imported_memories,
5226 offset,
5227 heap_access_oob,
5228 unaligned_atomic| {
5229 this.machine.i64_atomic_and_32u(
5230 loc,
5231 target,
5232 memarg,
5233 ret,
5234 need_check,
5235 imported_memories,
5236 offset,
5237 heap_access_oob,
5238 unaligned_atomic,
5239 )
5240 },
5241 )?;
5242 }
5243 Operator::I32AtomicRmwOr { ref memarg } => {
5244 let loc = self.pop_value_released()?;
5245 let target = self.pop_value_released()?;
5246 let ret = self.acquire_locations(
5247 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
5248 false,
5249 )?[0];
5250 self.value_stack.push(ret);
5251 self.op_memory(
5252 |this,
5253 need_check,
5254 imported_memories,
5255 offset,
5256 heap_access_oob,
5257 unaligned_atomic| {
5258 this.machine.i32_atomic_or(
5259 loc,
5260 target,
5261 memarg,
5262 ret,
5263 need_check,
5264 imported_memories,
5265 offset,
5266 heap_access_oob,
5267 unaligned_atomic,
5268 )
5269 },
5270 )?;
5271 }
5272 Operator::I64AtomicRmwOr { ref memarg } => {
5273 let loc = self.pop_value_released()?;
5274 let target = self.pop_value_released()?;
5275 let ret = self.acquire_locations(
5276 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
5277 false,
5278 )?[0];
5279 self.value_stack.push(ret);
5280 self.op_memory(
5281 |this,
5282 need_check,
5283 imported_memories,
5284 offset,
5285 heap_access_oob,
5286 unaligned_atomic| {
5287 this.machine.i64_atomic_or(
5288 loc,
5289 target,
5290 memarg,
5291 ret,
5292 need_check,
5293 imported_memories,
5294 offset,
5295 heap_access_oob,
5296 unaligned_atomic,
5297 )
5298 },
5299 )?;
5300 }
5301 Operator::I32AtomicRmw8OrU { ref memarg } => {
5302 let loc = self.pop_value_released()?;
5303 let target = self.pop_value_released()?;
5304 let ret = self.acquire_locations(
5305 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
5306 false,
5307 )?[0];
5308 self.value_stack.push(ret);
5309 self.op_memory(
5310 |this,
5311 need_check,
5312 imported_memories,
5313 offset,
5314 heap_access_oob,
5315 unaligned_atomic| {
5316 this.machine.i32_atomic_or_8u(
5317 loc,
5318 target,
5319 memarg,
5320 ret,
5321 need_check,
5322 imported_memories,
5323 offset,
5324 heap_access_oob,
5325 unaligned_atomic,
5326 )
5327 },
5328 )?;
5329 }
5330 Operator::I32AtomicRmw16OrU { ref memarg } => {
5331 let loc = self.pop_value_released()?;
5332 let target = self.pop_value_released()?;
5333 let ret = self.acquire_locations(
5334 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
5335 false,
5336 )?[0];
5337 self.value_stack.push(ret);
5338 self.op_memory(
5339 |this,
5340 need_check,
5341 imported_memories,
5342 offset,
5343 heap_access_oob,
5344 unaligned_atomic| {
5345 this.machine.i32_atomic_or_16u(
5346 loc,
5347 target,
5348 memarg,
5349 ret,
5350 need_check,
5351 imported_memories,
5352 offset,
5353 heap_access_oob,
5354 unaligned_atomic,
5355 )
5356 },
5357 )?;
5358 }
5359 Operator::I64AtomicRmw8OrU { ref memarg } => {
5360 let loc = self.pop_value_released()?;
5361 let target = self.pop_value_released()?;
5362 let ret = self.acquire_locations(
5363 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
5364 false,
5365 )?[0];
5366 self.value_stack.push(ret);
5367 self.op_memory(
5368 |this,
5369 need_check,
5370 imported_memories,
5371 offset,
5372 heap_access_oob,
5373 unaligned_atomic| {
5374 this.machine.i64_atomic_or_8u(
5375 loc,
5376 target,
5377 memarg,
5378 ret,
5379 need_check,
5380 imported_memories,
5381 offset,
5382 heap_access_oob,
5383 unaligned_atomic,
5384 )
5385 },
5386 )?;
5387 }
5388 Operator::I64AtomicRmw16OrU { ref memarg } => {
5389 let loc = self.pop_value_released()?;
5390 let target = self.pop_value_released()?;
5391 let ret = self.acquire_locations(
5392 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
5393 false,
5394 )?[0];
5395 self.value_stack.push(ret);
5396 self.op_memory(
5397 |this,
5398 need_check,
5399 imported_memories,
5400 offset,
5401 heap_access_oob,
5402 unaligned_atomic| {
5403 this.machine.i64_atomic_or_16u(
5404 loc,
5405 target,
5406 memarg,
5407 ret,
5408 need_check,
5409 imported_memories,
5410 offset,
5411 heap_access_oob,
5412 unaligned_atomic,
5413 )
5414 },
5415 )?;
5416 }
5417 Operator::I64AtomicRmw32OrU { ref memarg } => {
5418 let loc = self.pop_value_released()?;
5419 let target = self.pop_value_released()?;
5420 let ret = self.acquire_locations(
5421 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
5422 false,
5423 )?[0];
5424 self.value_stack.push(ret);
5425 self.op_memory(
5426 |this,
5427 need_check,
5428 imported_memories,
5429 offset,
5430 heap_access_oob,
5431 unaligned_atomic| {
5432 this.machine.i64_atomic_or_32u(
5433 loc,
5434 target,
5435 memarg,
5436 ret,
5437 need_check,
5438 imported_memories,
5439 offset,
5440 heap_access_oob,
5441 unaligned_atomic,
5442 )
5443 },
5444 )?;
5445 }
5446 Operator::I32AtomicRmwXor { ref memarg } => {
5447 let loc = self.pop_value_released()?;
5448 let target = self.pop_value_released()?;
5449 let ret = self.acquire_locations(
5450 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
5451 false,
5452 )?[0];
5453 self.value_stack.push(ret);
5454 self.op_memory(
5455 |this,
5456 need_check,
5457 imported_memories,
5458 offset,
5459 heap_access_oob,
5460 unaligned_atomic| {
5461 this.machine.i32_atomic_xor(
5462 loc,
5463 target,
5464 memarg,
5465 ret,
5466 need_check,
5467 imported_memories,
5468 offset,
5469 heap_access_oob,
5470 unaligned_atomic,
5471 )
5472 },
5473 )?;
5474 }
5475 Operator::I64AtomicRmwXor { ref memarg } => {
5476 let loc = self.pop_value_released()?;
5477 let target = self.pop_value_released()?;
5478 let ret = self.acquire_locations(
5479 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
5480 false,
5481 )?[0];
5482 self.value_stack.push(ret);
5483 self.op_memory(
5484 |this,
5485 need_check,
5486 imported_memories,
5487 offset,
5488 heap_access_oob,
5489 unaligned_atomic| {
5490 this.machine.i64_atomic_xor(
5491 loc,
5492 target,
5493 memarg,
5494 ret,
5495 need_check,
5496 imported_memories,
5497 offset,
5498 heap_access_oob,
5499 unaligned_atomic,
5500 )
5501 },
5502 )?;
5503 }
5504 Operator::I32AtomicRmw8XorU { ref memarg } => {
5505 let loc = self.pop_value_released()?;
5506 let target = self.pop_value_released()?;
5507 let ret = self.acquire_locations(
5508 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
5509 false,
5510 )?[0];
5511 self.value_stack.push(ret);
5512 self.op_memory(
5513 |this,
5514 need_check,
5515 imported_memories,
5516 offset,
5517 heap_access_oob,
5518 unaligned_atomic| {
5519 this.machine.i32_atomic_xor_8u(
5520 loc,
5521 target,
5522 memarg,
5523 ret,
5524 need_check,
5525 imported_memories,
5526 offset,
5527 heap_access_oob,
5528 unaligned_atomic,
5529 )
5530 },
5531 )?;
5532 }
5533 Operator::I32AtomicRmw16XorU { ref memarg } => {
5534 let loc = self.pop_value_released()?;
5535 let target = self.pop_value_released()?;
5536 let ret = self.acquire_locations(
5537 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
5538 false,
5539 )?[0];
5540 self.value_stack.push(ret);
5541 self.op_memory(
5542 |this,
5543 need_check,
5544 imported_memories,
5545 offset,
5546 heap_access_oob,
5547 unaligned_atomic| {
5548 this.machine.i32_atomic_xor_16u(
5549 loc,
5550 target,
5551 memarg,
5552 ret,
5553 need_check,
5554 imported_memories,
5555 offset,
5556 heap_access_oob,
5557 unaligned_atomic,
5558 )
5559 },
5560 )?;
5561 }
5562 Operator::I64AtomicRmw8XorU { ref memarg } => {
5563 let loc = self.pop_value_released()?;
5564 let target = self.pop_value_released()?;
5565 let ret = self.acquire_locations(
5566 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
5567 false,
5568 )?[0];
5569 self.value_stack.push(ret);
5570 self.op_memory(
5571 |this,
5572 need_check,
5573 imported_memories,
5574 offset,
5575 heap_access_oob,
5576 unaligned_atomic| {
5577 this.machine.i64_atomic_xor_8u(
5578 loc,
5579 target,
5580 memarg,
5581 ret,
5582 need_check,
5583 imported_memories,
5584 offset,
5585 heap_access_oob,
5586 unaligned_atomic,
5587 )
5588 },
5589 )?;
5590 }
5591 Operator::I64AtomicRmw16XorU { ref memarg } => {
5592 let loc = self.pop_value_released()?;
5593 let target = self.pop_value_released()?;
5594 let ret = self.acquire_locations(
5595 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
5596 false,
5597 )?[0];
5598 self.value_stack.push(ret);
5599 self.op_memory(
5600 |this,
5601 need_check,
5602 imported_memories,
5603 offset,
5604 heap_access_oob,
5605 unaligned_atomic| {
5606 this.machine.i64_atomic_xor_16u(
5607 loc,
5608 target,
5609 memarg,
5610 ret,
5611 need_check,
5612 imported_memories,
5613 offset,
5614 heap_access_oob,
5615 unaligned_atomic,
5616 )
5617 },
5618 )?;
5619 }
5620 Operator::I64AtomicRmw32XorU { ref memarg } => {
5621 let loc = self.pop_value_released()?;
5622 let target = self.pop_value_released()?;
5623 let ret = self.acquire_locations(
5624 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
5625 false,
5626 )?[0];
5627 self.value_stack.push(ret);
5628 self.op_memory(
5629 |this,
5630 need_check,
5631 imported_memories,
5632 offset,
5633 heap_access_oob,
5634 unaligned_atomic| {
5635 this.machine.i64_atomic_xor_32u(
5636 loc,
5637 target,
5638 memarg,
5639 ret,
5640 need_check,
5641 imported_memories,
5642 offset,
5643 heap_access_oob,
5644 unaligned_atomic,
5645 )
5646 },
5647 )?;
5648 }
5649 Operator::I32AtomicRmwXchg { ref memarg } => {
5650 let loc = self.pop_value_released()?;
5651 let target = self.pop_value_released()?;
5652 let ret = self.acquire_locations(
5653 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
5654 false,
5655 )?[0];
5656 self.value_stack.push(ret);
5657 self.op_memory(
5658 |this,
5659 need_check,
5660 imported_memories,
5661 offset,
5662 heap_access_oob,
5663 unaligned_atomic| {
5664 this.machine.i32_atomic_xchg(
5665 loc,
5666 target,
5667 memarg,
5668 ret,
5669 need_check,
5670 imported_memories,
5671 offset,
5672 heap_access_oob,
5673 unaligned_atomic,
5674 )
5675 },
5676 )?;
5677 }
5678 Operator::I64AtomicRmwXchg { ref memarg } => {
5679 let loc = self.pop_value_released()?;
5680 let target = self.pop_value_released()?;
5681 let ret = self.acquire_locations(
5682 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
5683 false,
5684 )?[0];
5685 self.value_stack.push(ret);
5686 self.op_memory(
5687 |this,
5688 need_check,
5689 imported_memories,
5690 offset,
5691 heap_access_oob,
5692 unaligned_atomic| {
5693 this.machine.i64_atomic_xchg(
5694 loc,
5695 target,
5696 memarg,
5697 ret,
5698 need_check,
5699 imported_memories,
5700 offset,
5701 heap_access_oob,
5702 unaligned_atomic,
5703 )
5704 },
5705 )?;
5706 }
5707 Operator::I32AtomicRmw8XchgU { ref memarg } => {
5708 let loc = self.pop_value_released()?;
5709 let target = self.pop_value_released()?;
5710 let ret = self.acquire_locations(
5711 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
5712 false,
5713 )?[0];
5714 self.value_stack.push(ret);
5715 self.op_memory(
5716 |this,
5717 need_check,
5718 imported_memories,
5719 offset,
5720 heap_access_oob,
5721 unaligned_atomic| {
5722 this.machine.i32_atomic_xchg_8u(
5723 loc,
5724 target,
5725 memarg,
5726 ret,
5727 need_check,
5728 imported_memories,
5729 offset,
5730 heap_access_oob,
5731 unaligned_atomic,
5732 )
5733 },
5734 )?;
5735 }
5736 Operator::I32AtomicRmw16XchgU { ref memarg } => {
5737 let loc = self.pop_value_released()?;
5738 let target = self.pop_value_released()?;
5739 let ret = self.acquire_locations(
5740 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
5741 false,
5742 )?[0];
5743 self.value_stack.push(ret);
5744 self.op_memory(
5745 |this,
5746 need_check,
5747 imported_memories,
5748 offset,
5749 heap_access_oob,
5750 unaligned_atomic| {
5751 this.machine.i32_atomic_xchg_16u(
5752 loc,
5753 target,
5754 memarg,
5755 ret,
5756 need_check,
5757 imported_memories,
5758 offset,
5759 heap_access_oob,
5760 unaligned_atomic,
5761 )
5762 },
5763 )?;
5764 }
5765 Operator::I64AtomicRmw8XchgU { ref memarg } => {
5766 let loc = self.pop_value_released()?;
5767 let target = self.pop_value_released()?;
5768 let ret = self.acquire_locations(
5769 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
5770 false,
5771 )?[0];
5772 self.value_stack.push(ret);
5773 self.op_memory(
5774 |this,
5775 need_check,
5776 imported_memories,
5777 offset,
5778 heap_access_oob,
5779 unaligned_atomic| {
5780 this.machine.i64_atomic_xchg_8u(
5781 loc,
5782 target,
5783 memarg,
5784 ret,
5785 need_check,
5786 imported_memories,
5787 offset,
5788 heap_access_oob,
5789 unaligned_atomic,
5790 )
5791 },
5792 )?;
5793 }
5794 Operator::I64AtomicRmw16XchgU { ref memarg } => {
5795 let loc = self.pop_value_released()?;
5796 let target = self.pop_value_released()?;
5797 let ret = self.acquire_locations(
5798 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
5799 false,
5800 )?[0];
5801 self.value_stack.push(ret);
5802 self.op_memory(
5803 |this,
5804 need_check,
5805 imported_memories,
5806 offset,
5807 heap_access_oob,
5808 unaligned_atomic| {
5809 this.machine.i64_atomic_xchg_16u(
5810 loc,
5811 target,
5812 memarg,
5813 ret,
5814 need_check,
5815 imported_memories,
5816 offset,
5817 heap_access_oob,
5818 unaligned_atomic,
5819 )
5820 },
5821 )?;
5822 }
5823 Operator::I64AtomicRmw32XchgU { ref memarg } => {
5824 let loc = self.pop_value_released()?;
5825 let target = self.pop_value_released()?;
5826 let ret = self.acquire_locations(
5827 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
5828 false,
5829 )?[0];
5830 self.value_stack.push(ret);
5831 self.op_memory(
5832 |this,
5833 need_check,
5834 imported_memories,
5835 offset,
5836 heap_access_oob,
5837 unaligned_atomic| {
5838 this.machine.i64_atomic_xchg_32u(
5839 loc,
5840 target,
5841 memarg,
5842 ret,
5843 need_check,
5844 imported_memories,
5845 offset,
5846 heap_access_oob,
5847 unaligned_atomic,
5848 )
5849 },
5850 )?;
5851 }
5852 Operator::I32AtomicRmwCmpxchg { ref memarg } => {
5853 let new = self.pop_value_released()?;
5854 let cmp = self.pop_value_released()?;
5855 let target = self.pop_value_released()?;
5856 let ret = self.acquire_locations(
5857 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
5858 false,
5859 )?[0];
5860 self.value_stack.push(ret);
5861 self.op_memory(
5862 |this,
5863 need_check,
5864 imported_memories,
5865 offset,
5866 heap_access_oob,
5867 unaligned_atomic| {
5868 this.machine.i32_atomic_cmpxchg(
5869 new,
5870 cmp,
5871 target,
5872 memarg,
5873 ret,
5874 need_check,
5875 imported_memories,
5876 offset,
5877 heap_access_oob,
5878 unaligned_atomic,
5879 )
5880 },
5881 )?;
5882 }
5883 Operator::I64AtomicRmwCmpxchg { ref memarg } => {
5884 let new = self.pop_value_released()?;
5885 let cmp = self.pop_value_released()?;
5886 let target = self.pop_value_released()?;
5887 let ret = self.acquire_locations(
5888 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
5889 false,
5890 )?[0];
5891 self.value_stack.push(ret);
5892 self.op_memory(
5893 |this,
5894 need_check,
5895 imported_memories,
5896 offset,
5897 heap_access_oob,
5898 unaligned_atomic| {
5899 this.machine.i64_atomic_cmpxchg(
5900 new,
5901 cmp,
5902 target,
5903 memarg,
5904 ret,
5905 need_check,
5906 imported_memories,
5907 offset,
5908 heap_access_oob,
5909 unaligned_atomic,
5910 )
5911 },
5912 )?;
5913 }
5914 Operator::I32AtomicRmw8CmpxchgU { ref memarg } => {
5915 let new = self.pop_value_released()?;
5916 let cmp = self.pop_value_released()?;
5917 let target = self.pop_value_released()?;
5918 let ret = self.acquire_locations(
5919 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
5920 false,
5921 )?[0];
5922 self.value_stack.push(ret);
5923 self.op_memory(
5924 |this,
5925 need_check,
5926 imported_memories,
5927 offset,
5928 heap_access_oob,
5929 unaligned_atomic| {
5930 this.machine.i32_atomic_cmpxchg_8u(
5931 new,
5932 cmp,
5933 target,
5934 memarg,
5935 ret,
5936 need_check,
5937 imported_memories,
5938 offset,
5939 heap_access_oob,
5940 unaligned_atomic,
5941 )
5942 },
5943 )?;
5944 }
5945 Operator::I32AtomicRmw16CmpxchgU { ref memarg } => {
5946 let new = self.pop_value_released()?;
5947 let cmp = self.pop_value_released()?;
5948 let target = self.pop_value_released()?;
5949 let ret = self.acquire_locations(
5950 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
5951 false,
5952 )?[0];
5953 self.value_stack.push(ret);
5954 self.op_memory(
5955 |this,
5956 need_check,
5957 imported_memories,
5958 offset,
5959 heap_access_oob,
5960 unaligned_atomic| {
5961 this.machine.i32_atomic_cmpxchg_16u(
5962 new,
5963 cmp,
5964 target,
5965 memarg,
5966 ret,
5967 need_check,
5968 imported_memories,
5969 offset,
5970 heap_access_oob,
5971 unaligned_atomic,
5972 )
5973 },
5974 )?;
5975 }
5976 Operator::I64AtomicRmw8CmpxchgU { ref memarg } => {
5977 let new = self.pop_value_released()?;
5978 let cmp = self.pop_value_released()?;
5979 let target = self.pop_value_released()?;
5980 let ret = self.acquire_locations(
5981 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
5982 false,
5983 )?[0];
5984 self.value_stack.push(ret);
5985 self.op_memory(
5986 |this,
5987 need_check,
5988 imported_memories,
5989 offset,
5990 heap_access_oob,
5991 unaligned_atomic| {
5992 this.machine.i64_atomic_cmpxchg_8u(
5993 new,
5994 cmp,
5995 target,
5996 memarg,
5997 ret,
5998 need_check,
5999 imported_memories,
6000 offset,
6001 heap_access_oob,
6002 unaligned_atomic,
6003 )
6004 },
6005 )?;
6006 }
6007 Operator::I64AtomicRmw16CmpxchgU { ref memarg } => {
6008 let new = self.pop_value_released()?;
6009 let cmp = self.pop_value_released()?;
6010 let target = self.pop_value_released()?;
6011 let ret = self.acquire_locations(
6012 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
6013 false,
6014 )?[0];
6015 self.value_stack.push(ret);
6016 self.op_memory(
6017 |this,
6018 need_check,
6019 imported_memories,
6020 offset,
6021 heap_access_oob,
6022 unaligned_atomic| {
6023 this.machine.i64_atomic_cmpxchg_16u(
6024 new,
6025 cmp,
6026 target,
6027 memarg,
6028 ret,
6029 need_check,
6030 imported_memories,
6031 offset,
6032 heap_access_oob,
6033 unaligned_atomic,
6034 )
6035 },
6036 )?;
6037 }
6038 Operator::I64AtomicRmw32CmpxchgU { ref memarg } => {
6039 let new = self.pop_value_released()?;
6040 let cmp = self.pop_value_released()?;
6041 let target = self.pop_value_released()?;
6042 let ret = self.acquire_locations(
6043 &[(WpType::I64, MachineValue::WasmStack(self.value_stack.len()))],
6044 false,
6045 )?[0];
6046 self.value_stack.push(ret);
6047 self.op_memory(
6048 |this,
6049 need_check,
6050 imported_memories,
6051 offset,
6052 heap_access_oob,
6053 unaligned_atomic| {
6054 this.machine.i64_atomic_cmpxchg_32u(
6055 new,
6056 cmp,
6057 target,
6058 memarg,
6059 ret,
6060 need_check,
6061 imported_memories,
6062 offset,
6063 heap_access_oob,
6064 unaligned_atomic,
6065 )
6066 },
6067 )?;
6068 }
6069
6070 Operator::RefNull { .. } => {
6071 self.value_stack.push(Location::Imm64(0));
6072 self.state.wasm_stack.push(WasmAbstractValue::Const(0));
6073 }
6074 Operator::RefFunc { function_index } => {
6075 self.machine.move_location(
6076 Size::S64,
6077 Location::Memory(
6078 self.machine.get_vmctx_reg(),
6079 self.vmoffsets
6080 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_func_ref_index())
6081 as i32,
6082 ),
6083 Location::GPR(self.machine.get_grp_for_call()),
6084 )?;
6085
6086 self.emit_call_native(
6089 |this| {
6090 this.machine
6091 .emit_call_register(this.machine.get_grp_for_call())
6092 },
6093 iter::once(Location::Imm32(function_index as u32)),
6095 iter::once(WpType::I64),
6096 )?;
6097
6098 let ret = self.acquire_locations(
6099 &[(
6100 WpType::Ref(WpRefType::new(true, WpHeapType::FUNC).unwrap()),
6101 MachineValue::WasmStack(self.value_stack.len()),
6102 )],
6103 false,
6104 )?[0];
6105 self.value_stack.push(ret);
6106 self.machine.move_location(
6107 Size::S64,
6108 Location::GPR(self.machine.get_gpr_for_ret()),
6109 ret,
6110 )?;
6111 }
6112 Operator::RefIsNull => {
6113 let loc_a = self.pop_value_released()?;
6114 let ret = self.acquire_locations(
6115 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
6116 false,
6117 )?[0];
6118 self.machine.i64_cmp_eq(loc_a, Location::Imm64(0), ret)?;
6119 self.value_stack.push(ret);
6120 }
6121 Operator::TableSet { table: index } => {
6122 let table_index = TableIndex::new(index as _);
6123 let value = self.value_stack.pop().unwrap();
6124 let index = self.value_stack.pop().unwrap();
6125
6126 self.release_locations_only_regs(&[value, index])?;
6128
6129 self.machine.move_location(
6130 Size::S64,
6131 Location::Memory(
6132 self.machine.get_vmctx_reg(),
6133 self.vmoffsets.vmctx_builtin_function(
6134 if self.module.local_table_index(table_index).is_some() {
6135 VMBuiltinFunctionIndex::get_table_set_index()
6136 } else {
6137 VMBuiltinFunctionIndex::get_imported_table_set_index()
6138 },
6139 ) as i32,
6140 ),
6141 Location::GPR(self.machine.get_grp_for_call()),
6142 )?;
6143
6144 self.release_locations_only_osr_state(1)?;
6146 self.emit_call_native(
6147 |this| {
6148 this.machine
6149 .emit_call_register(this.machine.get_grp_for_call())
6150 },
6151 [Location::Imm32(table_index.index() as u32), index, value]
6153 .iter()
6154 .cloned(),
6155 [WpType::I32, WpType::I64, WpType::I64].iter().cloned(),
6156 )?;
6157
6158 self.release_locations_only_stack(&[index, value])?;
6159 }
6160 Operator::TableGet { table: index } => {
6161 let table_index = TableIndex::new(index as _);
6162 let index = self.value_stack.pop().unwrap();
6163
6164 self.release_locations_only_regs(&[index])?;
6165
6166 self.machine.move_location(
6167 Size::S64,
6168 Location::Memory(
6169 self.machine.get_vmctx_reg(),
6170 self.vmoffsets.vmctx_builtin_function(
6171 if self.module.local_table_index(table_index).is_some() {
6172 VMBuiltinFunctionIndex::get_table_get_index()
6173 } else {
6174 VMBuiltinFunctionIndex::get_imported_table_get_index()
6175 },
6176 ) as i32,
6177 ),
6178 Location::GPR(self.machine.get_grp_for_call()),
6179 )?;
6180
6181 self.release_locations_only_osr_state(1)?;
6182 self.emit_call_native(
6183 |this| {
6184 this.machine
6185 .emit_call_register(this.machine.get_grp_for_call())
6186 },
6187 [Location::Imm32(table_index.index() as u32), index]
6189 .iter()
6190 .cloned(),
6191 [WpType::I32, WpType::I64].iter().cloned(),
6192 )?;
6193
6194 self.release_locations_only_stack(&[index])?;
6195
6196 let ret = self.acquire_locations(
6197 &[(
6198 WpType::Ref(WpRefType::new(true, WpHeapType::FUNC).unwrap()),
6199 MachineValue::WasmStack(self.value_stack.len()),
6200 )],
6201 false,
6202 )?[0];
6203 self.value_stack.push(ret);
6204 self.machine.move_location(
6205 Size::S64,
6206 Location::GPR(self.machine.get_gpr_for_ret()),
6207 ret,
6208 )?;
6209 }
6210 Operator::TableSize { table: index } => {
6211 let table_index = TableIndex::new(index as _);
6212
6213 self.machine.move_location(
6214 Size::S64,
6215 Location::Memory(
6216 self.machine.get_vmctx_reg(),
6217 self.vmoffsets.vmctx_builtin_function(
6218 if self.module.local_table_index(table_index).is_some() {
6219 VMBuiltinFunctionIndex::get_table_size_index()
6220 } else {
6221 VMBuiltinFunctionIndex::get_imported_table_size_index()
6222 },
6223 ) as i32,
6224 ),
6225 Location::GPR(self.machine.get_grp_for_call()),
6226 )?;
6227
6228 self.emit_call_native(
6229 |this| {
6230 this.machine
6231 .emit_call_register(this.machine.get_grp_for_call())
6232 },
6233 iter::once(Location::Imm32(table_index.index() as u32)),
6235 iter::once(WpType::I32),
6236 )?;
6237
6238 let ret = self.acquire_locations(
6239 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
6240 false,
6241 )?[0];
6242 self.value_stack.push(ret);
6243 self.machine.move_location(
6244 Size::S32,
6245 Location::GPR(self.machine.get_gpr_for_ret()),
6246 ret,
6247 )?;
6248 }
6249 Operator::TableGrow { table: index } => {
6250 let table_index = TableIndex::new(index as _);
6251 let delta = self.value_stack.pop().unwrap();
6252 let init_value = self.value_stack.pop().unwrap();
6253 self.release_locations_only_regs(&[delta, init_value])?;
6254
6255 self.machine.move_location(
6256 Size::S64,
6257 Location::Memory(
6258 self.machine.get_vmctx_reg(),
6259 self.vmoffsets.vmctx_builtin_function(
6260 if self.module.local_table_index(table_index).is_some() {
6261 VMBuiltinFunctionIndex::get_table_grow_index()
6262 } else {
6263 VMBuiltinFunctionIndex::get_imported_table_grow_index()
6264 },
6265 ) as i32,
6266 ),
6267 Location::GPR(self.machine.get_grp_for_call()),
6268 )?;
6269
6270 self.release_locations_only_osr_state(1)?;
6272 self.emit_call_native(
6273 |this| {
6274 this.machine
6275 .emit_call_register(this.machine.get_grp_for_call())
6276 },
6277 [
6279 init_value,
6280 delta,
6281 Location::Imm32(table_index.index() as u32),
6282 ]
6283 .iter()
6284 .cloned(),
6285 [WpType::I64, WpType::I64, WpType::I64].iter().cloned(),
6286 )?;
6287
6288 self.release_locations_only_stack(&[init_value, delta])?;
6289
6290 let ret = self.acquire_locations(
6291 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
6292 false,
6293 )?[0];
6294 self.value_stack.push(ret);
6295 self.machine.move_location(
6296 Size::S32,
6297 Location::GPR(self.machine.get_gpr_for_ret()),
6298 ret,
6299 )?;
6300 }
6301 Operator::TableCopy {
6302 dst_table,
6303 src_table,
6304 } => {
6305 let len = self.value_stack.pop().unwrap();
6306 let src = self.value_stack.pop().unwrap();
6307 let dest = self.value_stack.pop().unwrap();
6308 self.release_locations_only_regs(&[len, src, dest])?;
6309
6310 self.machine.move_location(
6311 Size::S64,
6312 Location::Memory(
6313 self.machine.get_vmctx_reg(),
6314 self.vmoffsets
6315 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_table_copy_index())
6316 as i32,
6317 ),
6318 Location::GPR(self.machine.get_grp_for_call()),
6319 )?;
6320
6321 self.release_locations_only_osr_state(1)?;
6323 self.emit_call_native(
6324 |this| {
6325 this.machine
6326 .emit_call_register(this.machine.get_grp_for_call())
6327 },
6328 [
6330 Location::Imm32(dst_table),
6331 Location::Imm32(src_table),
6332 dest,
6333 src,
6334 len,
6335 ]
6336 .iter()
6337 .cloned(),
6338 [
6339 WpType::I32,
6340 WpType::I32,
6341 WpType::I64,
6342 WpType::I64,
6343 WpType::I64,
6344 ]
6345 .iter()
6346 .cloned(),
6347 )?;
6348
6349 self.release_locations_only_stack(&[dest, src, len])?;
6350 }
6351
6352 Operator::TableFill { table } => {
6353 let len = self.value_stack.pop().unwrap();
6354 let val = self.value_stack.pop().unwrap();
6355 let dest = self.value_stack.pop().unwrap();
6356 self.release_locations_only_regs(&[len, val, dest])?;
6357
6358 self.machine.move_location(
6359 Size::S64,
6360 Location::Memory(
6361 self.machine.get_vmctx_reg(),
6362 self.vmoffsets
6363 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_table_fill_index())
6364 as i32,
6365 ),
6366 Location::GPR(self.machine.get_grp_for_call()),
6367 )?;
6368
6369 self.release_locations_only_osr_state(1)?;
6371 self.emit_call_native(
6372 |this| {
6373 this.machine
6374 .emit_call_register(this.machine.get_grp_for_call())
6375 },
6376 [Location::Imm32(table), dest, val, len].iter().cloned(),
6378 [WpType::I32, WpType::I64, WpType::I64, WpType::I64]
6379 .iter()
6380 .cloned(),
6381 )?;
6382
6383 self.release_locations_only_stack(&[dest, val, len])?;
6384 }
6385 Operator::TableInit { elem_index, table } => {
6386 let len = self.value_stack.pop().unwrap();
6387 let src = self.value_stack.pop().unwrap();
6388 let dest = self.value_stack.pop().unwrap();
6389 self.release_locations_only_regs(&[len, src, dest])?;
6390
6391 self.machine.move_location(
6392 Size::S64,
6393 Location::Memory(
6394 self.machine.get_vmctx_reg(),
6395 self.vmoffsets
6396 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_table_init_index())
6397 as i32,
6398 ),
6399 Location::GPR(self.machine.get_grp_for_call()),
6400 )?;
6401
6402 self.release_locations_only_osr_state(1)?;
6404 self.emit_call_native(
6405 |this| {
6406 this.machine
6407 .emit_call_register(this.machine.get_grp_for_call())
6408 },
6409 [
6411 Location::Imm32(table),
6412 Location::Imm32(elem_index),
6413 dest,
6414 src,
6415 len,
6416 ]
6417 .iter()
6418 .cloned(),
6419 [
6420 WpType::I32,
6421 WpType::I32,
6422 WpType::I64,
6423 WpType::I64,
6424 WpType::I64,
6425 ]
6426 .iter()
6427 .cloned(),
6428 )?;
6429
6430 self.release_locations_only_stack(&[dest, src, len])?;
6431 }
6432 Operator::ElemDrop { elem_index } => {
6433 self.machine.move_location(
6434 Size::S64,
6435 Location::Memory(
6436 self.machine.get_vmctx_reg(),
6437 self.vmoffsets
6438 .vmctx_builtin_function(VMBuiltinFunctionIndex::get_elem_drop_index())
6439 as i32,
6440 ),
6441 Location::GPR(self.machine.get_grp_for_call()),
6442 )?;
6443
6444 self.emit_call_native(
6447 |this| {
6448 this.machine
6449 .emit_call_register(this.machine.get_grp_for_call())
6450 },
6451 [Location::Imm32(elem_index)].iter().cloned(),
6453 [WpType::I32].iter().cloned(),
6454 )?;
6455 }
6456 Operator::MemoryAtomicWait32 { ref memarg } => {
6457 let timeout = self.value_stack.pop().unwrap();
6458 let val = self.value_stack.pop().unwrap();
6459 let dst = self.value_stack.pop().unwrap();
6460 self.release_locations_only_regs(&[timeout, val, dst])?;
6461
6462 let memory_index = MemoryIndex::new(memarg.memory as usize);
6463 let (memory_atomic_wait32, memory_index) =
6464 if self.module.local_memory_index(memory_index).is_some() {
6465 (
6466 VMBuiltinFunctionIndex::get_memory_atomic_wait32_index(),
6467 memory_index,
6468 )
6469 } else {
6470 (
6471 VMBuiltinFunctionIndex::get_imported_memory_atomic_wait32_index(),
6472 memory_index,
6473 )
6474 };
6475
6476 self.machine.move_location(
6477 Size::S64,
6478 Location::Memory(
6479 self.machine.get_vmctx_reg(),
6480 self.vmoffsets.vmctx_builtin_function(memory_atomic_wait32) as i32,
6481 ),
6482 Location::GPR(self.machine.get_grp_for_call()),
6483 )?;
6484
6485 self.release_locations_only_osr_state(1)?;
6487
6488 self.emit_call_native(
6489 |this| {
6490 this.machine
6491 .emit_call_register(this.machine.get_grp_for_call())
6492 },
6493 [
6495 Location::Imm32(memory_index.index() as u32),
6496 dst,
6497 val,
6498 timeout,
6499 ]
6500 .iter()
6501 .cloned(),
6502 [WpType::I32, WpType::I32, WpType::I32, WpType::I64]
6503 .iter()
6504 .cloned(),
6505 )?;
6506 self.release_locations_only_stack(&[dst, val, timeout])?;
6507 let ret = self.acquire_locations(
6508 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
6509 false,
6510 )?[0];
6511 self.value_stack.push(ret);
6512 self.machine.move_location(
6513 Size::S32,
6514 Location::GPR(self.machine.get_gpr_for_ret()),
6515 ret,
6516 )?;
6517 }
6518 Operator::MemoryAtomicWait64 { ref memarg } => {
6519 let timeout = self.value_stack.pop().unwrap();
6520 let val = self.value_stack.pop().unwrap();
6521 let dst = self.value_stack.pop().unwrap();
6522 self.release_locations_only_regs(&[timeout, val, dst])?;
6523
6524 let memory_index = MemoryIndex::new(memarg.memory as usize);
6525 let (memory_atomic_wait64, memory_index) =
6526 if self.module.local_memory_index(memory_index).is_some() {
6527 (
6528 VMBuiltinFunctionIndex::get_memory_atomic_wait64_index(),
6529 memory_index,
6530 )
6531 } else {
6532 (
6533 VMBuiltinFunctionIndex::get_imported_memory_atomic_wait64_index(),
6534 memory_index,
6535 )
6536 };
6537
6538 self.machine.move_location(
6539 Size::S64,
6540 Location::Memory(
6541 self.machine.get_vmctx_reg(),
6542 self.vmoffsets.vmctx_builtin_function(memory_atomic_wait64) as i32,
6543 ),
6544 Location::GPR(self.machine.get_grp_for_call()),
6545 )?;
6546
6547 self.release_locations_only_osr_state(1)?;
6549
6550 self.emit_call_native(
6551 |this| {
6552 this.machine
6553 .emit_call_register(this.machine.get_grp_for_call())
6554 },
6555 [
6557 Location::Imm32(memory_index.index() as u32),
6558 dst,
6559 val,
6560 timeout,
6561 ]
6562 .iter()
6563 .cloned(),
6564 [WpType::I32, WpType::I32, WpType::I64, WpType::I64]
6565 .iter()
6566 .cloned(),
6567 )?;
6568 self.release_locations_only_stack(&[dst, val, timeout])?;
6569 let ret = self.acquire_locations(
6570 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
6571 false,
6572 )?[0];
6573 self.value_stack.push(ret);
6574 self.machine.move_location(
6575 Size::S32,
6576 Location::GPR(self.machine.get_gpr_for_ret()),
6577 ret,
6578 )?;
6579 }
6580 Operator::MemoryAtomicNotify { ref memarg } => {
6581 let cnt = self.value_stack.pop().unwrap();
6582 let dst = self.value_stack.pop().unwrap();
6583 self.release_locations_only_regs(&[cnt, dst])?;
6584
6585 let memory_index = MemoryIndex::new(memarg.memory as usize);
6586 let (memory_atomic_notify, memory_index) =
6587 if self.module.local_memory_index(memory_index).is_some() {
6588 (
6589 VMBuiltinFunctionIndex::get_memory_atomic_notify_index(),
6590 memory_index,
6591 )
6592 } else {
6593 (
6594 VMBuiltinFunctionIndex::get_imported_memory_atomic_notify_index(),
6595 memory_index,
6596 )
6597 };
6598
6599 self.machine.move_location(
6600 Size::S64,
6601 Location::Memory(
6602 self.machine.get_vmctx_reg(),
6603 self.vmoffsets.vmctx_builtin_function(memory_atomic_notify) as i32,
6604 ),
6605 Location::GPR(self.machine.get_grp_for_call()),
6606 )?;
6607
6608 self.release_locations_only_osr_state(1)?;
6610
6611 self.emit_call_native(
6612 |this| {
6613 this.machine
6614 .emit_call_register(this.machine.get_grp_for_call())
6615 },
6616 [Location::Imm32(memory_index.index() as u32), dst]
6618 .iter()
6619 .cloned(),
6620 [WpType::I32, WpType::I32].iter().cloned(),
6621 )?;
6622 self.release_locations_only_stack(&[dst, cnt])?;
6623 let ret = self.acquire_locations(
6624 &[(WpType::I32, MachineValue::WasmStack(self.value_stack.len()))],
6625 false,
6626 )?[0];
6627 self.value_stack.push(ret);
6628 self.machine.move_location(
6629 Size::S32,
6630 Location::GPR(self.machine.get_gpr_for_ret()),
6631 ret,
6632 )?;
6633 }
6634 _ => {
6635 return Err(CompileError::Codegen(format!(
6636 "not yet implemented: {op:?}"
6637 )));
6638 }
6639 }
6640
6641 Ok(())
6642 }
6643
6644 pub fn finalize(
6645 mut self,
6646 data: &FunctionBodyData,
6647 ) -> Result<(CompiledFunction, Option<UnwindFrame>), CompileError> {
6648 self.machine
6650 .emit_label(self.special_labels.integer_division_by_zero)?;
6651 self.machine
6652 .emit_illegal_op(TrapCode::IntegerDivisionByZero)?;
6653
6654 self.machine
6655 .emit_label(self.special_labels.integer_overflow)?;
6656 self.machine.emit_illegal_op(TrapCode::IntegerOverflow)?;
6657
6658 self.machine
6659 .emit_label(self.special_labels.heap_access_oob)?;
6660 self.machine
6661 .emit_illegal_op(TrapCode::HeapAccessOutOfBounds)?;
6662
6663 self.machine
6664 .emit_label(self.special_labels.table_access_oob)?;
6665 self.machine
6666 .emit_illegal_op(TrapCode::TableAccessOutOfBounds)?;
6667
6668 self.machine
6669 .emit_label(self.special_labels.indirect_call_null)?;
6670 self.machine.emit_illegal_op(TrapCode::IndirectCallToNull)?;
6671
6672 self.machine.emit_label(self.special_labels.bad_signature)?;
6673 self.machine.emit_illegal_op(TrapCode::BadSignature)?;
6674
6675 self.machine
6676 .emit_label(self.special_labels.unaligned_atomic)?;
6677 self.machine.emit_illegal_op(TrapCode::UnalignedAtomic)?;
6678
6679 self.machine.finalize_function()?;
6681
6682 let body_len = self.machine.assembler_get_offset().0;
6683
6684 #[cfg_attr(not(feature = "unwind"), allow(unused_mut))]
6685 let mut unwind_info = None;
6686 #[cfg_attr(not(feature = "unwind"), allow(unused_mut))]
6687 let mut fde = None;
6688 #[cfg(feature = "unwind")]
6689 match self.calling_convention {
6690 CallingConvention::SystemV | CallingConvention::AppleAarch64 => {
6691 let unwind = self.machine.gen_dwarf_unwind_info(body_len);
6692 if let Some(unwind) = unwind {
6693 fde = Some(unwind.to_fde(Address::Symbol {
6694 symbol: WriterRelocate::FUNCTION_SYMBOL,
6695 addend: self.fsm.local_function_id as _,
6696 }));
6697 unwind_info = Some(CompiledFunctionUnwindInfo::Dwarf);
6698 }
6699 }
6700 CallingConvention::WindowsFastcall => {
6701 let unwind = self.machine.gen_windows_unwind_info(body_len);
6702 if let Some(unwind) = unwind {
6703 unwind_info = Some(CompiledFunctionUnwindInfo::WindowsX64(unwind));
6704 }
6705 }
6706 _ => (),
6707 };
6708
6709 let address_map =
6710 get_function_address_map(self.machine.instructions_address_map(), data, body_len);
6711 let traps = self.machine.collect_trap_information();
6712 let mut body = self.machine.assembler_finalize()?;
6713 body.shrink_to_fit();
6714
6715 Ok((
6716 CompiledFunction {
6717 body: FunctionBody { body, unwind_info },
6718 relocations: self.relocations.clone(),
6719 frame_info: CompiledFunctionFrameInfo { traps, address_map },
6720 },
6721 fde,
6722 ))
6723 }
6724 #[allow(clippy::type_complexity)]
6727 fn sort_call_movs(movs: &mut [(Location<M::GPR, M::SIMD>, M::GPR)]) {
6728 for i in 0..movs.len() {
6729 for j in (i + 1)..movs.len() {
6730 if let Location::GPR(src_gpr) = movs[j].0 {
6731 if src_gpr == movs[i].1 {
6732 movs.swap(i, j);
6733 }
6734 }
6735 }
6736 }
6737 }
6738
6739 }