Spracherkennung für: .rs vermutete Sprache: Unknown {[0] [0] [0]} [Methode: Schwerpunktbildung, einfache Gewichte, sechs Dimensionen]
use super::{
CompositeInnerType, Elements, FuncType, Instruction, InstructionKind::*, InstructionKinds,
Module, ValType,
};
use crate::{unique_string, MemoryOffsetChoices};
use arbitrary::{Result, Unstructured};
use std::collections::{BTreeMap, BTreeSet};
use std::rc::Rc;
use wasm_encoder::{
AbstractHeapType, ArrayType, BlockType, Catch, ConstExpr, ExportKind, FieldType, GlobalType,
HeapType, MemArg, RefType, StorageType, StructType,
};
mod no_traps;
macro_rules! instructions {
(
$(
($predicate:expr, $generator_fn:ident, $instruction_kind:ident $(, $cost:tt)?),
)*
) => {
static NUM_OPTIONS: usize = instructions!(
@count;
$( $generator_fn )*
);
fn choose_instruction(
u: &mut Unstructured<'_>,
module: &Module,
allowed_instructions: InstructionKinds,
builder: &mut CodeBuilder,
) -> Option<
fn(&mut Unstructured<'_>, &Module, &mut CodeBuilder, &mut Vec<Instruction>) -> Result<()>
> {
builder.allocs.options.clear();
let mut cost = 0;
// Unroll the loop that checks whether each instruction is valid in
// the current context and, if it is valid, pushes it onto our
// options. Unrolling this loops lets us avoid dynamic calls through
// function pointers and, furthermore, each call site can be branch
// predicted and even inlined. This saved us about 30% of time in
// the `corpus` benchmark.
$(
let predicate: Option<fn(&Module, &mut CodeBuilder) -> bool> = $predicate;
if predicate.map_or(true, |f| f(module, builder))
&& allowed_instructions.contains($instruction_kind) {
builder.allocs.options.push(($generator_fn, cost));
cost += 1000 $(- $cost)?;
}
)*
// If there aren't actually any candidate instructions due to
// various filters in place then return `None` to indicate the
// situation.
if cost == 0 {
return None;
}
let i = u.int_in_range(0..=cost).ok()?;
let idx = builder
.allocs
.options
.binary_search_by_key(&i,|p| p.1)
.unwrap_or_else(|i| i - 1);
Some(builder.allocs.options[idx].0)
}
};
( @count; ) => {
0
};
( @count; $x:ident $( $xs:ident )* ) => {
1 + instructions!( @count; $( $xs )* )
};
}
// The static set of options of instruction to generate that could be valid at
// some given time. One entry per Wasm instruction.
//
// Each entry is made up of up to three parts:
//
// 1. A predicate for whether this is a valid choice, if any. `None` means that
// the choice is always applicable.
//
// 2. The function to generate the instruction, given that we've made this
// choice.
//
// 3. The `InstructionKind` the instruction belongs to; this allows filtering
// out instructions by category.
//
// 4. An optional number used to weight how often this instruction is chosen.
// Higher numbers are less likely to be chosen, and number specified must be
// less than 1000.
instructions! {
// Control instructions.
(Some(unreachable_valid), unreachable, Control, 990),
(None, nop, Control, 800),
(None, block, Control),
(None, r#loop, Control),
(Some(try_table_valid), try_table, Control),
(Some(if_valid), r#if, Control),
(Some(else_valid), r#else, Control),
(Some(end_valid), end, Control),
(Some(br_valid), br, Control),
(Some(br_if_valid), br_if, Control),
(Some(br_table_valid), br_table, Control),
(Some(return_valid), r#return, Control, 900),
(Some(call_valid), call, Control),
(Some(call_ref_valid), call_ref, Control),
(Some(call_indirect_valid), call_indirect, Control),
(Some(return_call_valid), return_call, Control),
(Some(return_call_ref_valid), return_call_ref, Control),
(Some(return_call_indirect_valid), return_call_indirect, Control),
(Some(throw_valid), throw, Control, 850),
(Some(throw_ref_valid), throw_ref, Control, 850),
(Some(br_on_null_valid), br_on_null, Control),
(Some(br_on_non_null_valid), br_on_non_null, Control),
(Some(br_on_cast_valid), br_on_cast, Control),
(Some(br_on_cast_fail_valid), br_on_cast_fail, Control),
// Parametric instructions.
(Some(drop_valid), drop, Parametric, 990),
(Some(select_valid), select, Parametric),
// Variable instructions.
(Some(local_get_valid), local_get, Variable),
(Some(local_set_valid), local_set, Variable),
(Some(local_set_valid), local_tee, Variable),
(Some(global_get_valid), global_get, Variable),
(Some(global_set_valid), global_set, Variable),
// Memory instructions.
(Some(have_memory_and_offset), i32_load, MemoryInt),
(Some(have_memory_and_offset), i64_load, MemoryInt),
(Some(have_memory_and_offset), f32_load, Memory),
(Some(have_memory_and_offset), f64_load, Memory),
(Some(have_memory_and_offset), i32_load_8_s, MemoryInt),
(Some(have_memory_and_offset), i32_load_8_u, MemoryInt),
(Some(have_memory_and_offset), i32_load_16_s, MemoryInt),
(Some(have_memory_and_offset), i32_load_16_u, MemoryInt),
(Some(have_memory_and_offset), i64_load_8_s, MemoryInt),
(Some(have_memory_and_offset), i64_load_16_s, MemoryInt),
(Some(have_memory_and_offset), i64_load_32_s, MemoryInt),
(Some(have_memory_and_offset), i64_load_8_u, MemoryInt),
(Some(have_memory_and_offset), i64_load_16_u, MemoryInt),
(Some(have_memory_and_offset), i64_load_32_u, MemoryInt),
(Some(i32_store_valid), i32_store, MemoryInt),
(Some(i64_store_valid), i64_store, MemoryInt),
(Some(f32_store_valid), f32_store, Memory),
(Some(f64_store_valid), f64_store, Memory),
(Some(i32_store_valid), i32_store_8, MemoryInt),
(Some(i32_store_valid), i32_store_16, MemoryInt),
(Some(i64_store_valid), i64_store_8, MemoryInt),
(Some(i64_store_valid), i64_store_16, MemoryInt),
(Some(i64_store_valid), i64_store_32, MemoryInt),
(Some(have_memory), memory_size, MemoryInt),
(Some(memory_grow_valid), memory_grow, MemoryInt),
(Some(memory_init_valid), memory_init, MemoryInt),
(Some(data_drop_valid), data_drop, MemoryInt),
(Some(memory_copy_valid), memory_copy, MemoryInt),
(Some(memory_fill_valid), memory_fill, MemoryInt),
// Numeric instructions.
(None, i32_const, NumericInt),
(None, i64_const, NumericInt),
(None, f32_const, Numeric),
(None, f64_const, Numeric),
(Some(i32_on_stack), i32_eqz, NumericInt),
(Some(i32_i32_on_stack), i32_eq, NumericInt),
(Some(i32_i32_on_stack), i32_ne, NumericInt),
(Some(i32_i32_on_stack), i32_lt_s, NumericInt),
(Some(i32_i32_on_stack), i32_lt_u, NumericInt),
(Some(i32_i32_on_stack), i32_gt_s, NumericInt),
(Some(i32_i32_on_stack), i32_gt_u, NumericInt),
(Some(i32_i32_on_stack), i32_le_s, NumericInt),
(Some(i32_i32_on_stack), i32_le_u, NumericInt),
(Some(i32_i32_on_stack), i32_ge_s, NumericInt),
(Some(i32_i32_on_stack), i32_ge_u, NumericInt),
(Some(i64_on_stack), i64_eqz, NumericInt),
(Some(i64_i64_on_stack), i64_eq, NumericInt),
(Some(i64_i64_on_stack), i64_ne, NumericInt),
(Some(i64_i64_on_stack), i64_lt_s, NumericInt),
(Some(i64_i64_on_stack), i64_lt_u, NumericInt),
(Some(i64_i64_on_stack), i64_gt_s, NumericInt),
(Some(i64_i64_on_stack), i64_gt_u, NumericInt),
(Some(i64_i64_on_stack), i64_le_s, NumericInt),
(Some(i64_i64_on_stack), i64_le_u, NumericInt),
(Some(i64_i64_on_stack), i64_ge_s, NumericInt),
(Some(i64_i64_on_stack), i64_ge_u, NumericInt),
(Some(f32_f32_on_stack), f32_eq, Numeric),
(Some(f32_f32_on_stack), f32_ne, Numeric),
(Some(f32_f32_on_stack), f32_lt, Numeric),
(Some(f32_f32_on_stack), f32_gt, Numeric),
(Some(f32_f32_on_stack), f32_le, Numeric),
(Some(f32_f32_on_stack), f32_ge, Numeric),
(Some(f64_f64_on_stack), f64_eq, Numeric),
(Some(f64_f64_on_stack), f64_ne, Numeric),
(Some(f64_f64_on_stack), f64_lt, Numeric),
(Some(f64_f64_on_stack), f64_gt, Numeric),
(Some(f64_f64_on_stack), f64_le, Numeric),
(Some(f64_f64_on_stack), f64_ge, Numeric),
(Some(i32_on_stack), i32_clz, NumericInt),
(Some(i32_on_stack), i32_ctz, NumericInt),
(Some(i32_on_stack), i32_popcnt, NumericInt),
(Some(i32_i32_on_stack), i32_add, NumericInt),
(Some(i32_i32_on_stack), i32_sub, NumericInt),
(Some(i32_i32_on_stack), i32_mul, NumericInt),
(Some(i32_i32_on_stack), i32_div_s, NumericInt),
(Some(i32_i32_on_stack), i32_div_u, NumericInt),
(Some(i32_i32_on_stack), i32_rem_s, NumericInt),
(Some(i32_i32_on_stack), i32_rem_u, NumericInt),
(Some(i32_i32_on_stack), i32_and, NumericInt),
(Some(i32_i32_on_stack), i32_or, NumericInt),
(Some(i32_i32_on_stack), i32_xor, NumericInt),
(Some(i32_i32_on_stack), i32_shl, NumericInt),
(Some(i32_i32_on_stack), i32_shr_s, NumericInt),
(Some(i32_i32_on_stack), i32_shr_u, NumericInt),
(Some(i32_i32_on_stack), i32_rotl, NumericInt),
(Some(i32_i32_on_stack), i32_rotr, NumericInt),
(Some(i64_on_stack), i64_clz, NumericInt),
(Some(i64_on_stack), i64_ctz, NumericInt),
(Some(i64_on_stack), i64_popcnt, NumericInt),
(Some(i64_i64_on_stack), i64_add, NumericInt),
(Some(i64_i64_on_stack), i64_sub, NumericInt),
(Some(i64_i64_on_stack), i64_mul, NumericInt),
(Some(i64_i64_on_stack), i64_div_s, NumericInt),
(Some(i64_i64_on_stack), i64_div_u, NumericInt),
(Some(i64_i64_on_stack), i64_rem_s, NumericInt),
(Some(i64_i64_on_stack), i64_rem_u, NumericInt),
(Some(i64_i64_on_stack), i64_and, NumericInt),
(Some(i64_i64_on_stack), i64_or, NumericInt),
(Some(i64_i64_on_stack), i64_xor, NumericInt),
(Some(i64_i64_on_stack), i64_shl, NumericInt),
(Some(i64_i64_on_stack), i64_shr_s, NumericInt),
(Some(i64_i64_on_stack), i64_shr_u, NumericInt),
(Some(i64_i64_on_stack), i64_rotl, NumericInt),
(Some(i64_i64_on_stack), i64_rotr, NumericInt),
(Some(f32_on_stack), f32_abs, Numeric),
(Some(f32_on_stack), f32_neg, Numeric),
(Some(f32_on_stack), f32_ceil, Numeric),
(Some(f32_on_stack), f32_floor, Numeric),
(Some(f32_on_stack), f32_trunc, Numeric),
(Some(f32_on_stack), f32_nearest, Numeric),
(Some(f32_on_stack), f32_sqrt, Numeric),
(Some(f32_f32_on_stack), f32_add, Numeric),
(Some(f32_f32_on_stack), f32_sub, Numeric),
(Some(f32_f32_on_stack), f32_mul, Numeric),
(Some(f32_f32_on_stack), f32_div, Numeric),
(Some(f32_f32_on_stack), f32_min, Numeric),
(Some(f32_f32_on_stack), f32_max, Numeric),
(Some(f32_f32_on_stack), f32_copysign, Numeric),
(Some(f64_on_stack), f64_abs, Numeric),
(Some(f64_on_stack), f64_neg, Numeric),
(Some(f64_on_stack), f64_ceil, Numeric),
(Some(f64_on_stack), f64_floor, Numeric),
(Some(f64_on_stack), f64_trunc, Numeric),
(Some(f64_on_stack), f64_nearest, Numeric),
(Some(f64_on_stack), f64_sqrt, Numeric),
(Some(f64_f64_on_stack), f64_add, Numeric),
(Some(f64_f64_on_stack), f64_sub, Numeric),
(Some(f64_f64_on_stack), f64_mul, Numeric),
(Some(f64_f64_on_stack), f64_div, Numeric),
(Some(f64_f64_on_stack), f64_min, Numeric),
(Some(f64_f64_on_stack), f64_max, Numeric),
(Some(f64_f64_on_stack), f64_copysign, Numeric),
(Some(i64_on_stack), i32_wrap_i64, NumericInt),
(Some(f32_on_stack), i32_trunc_f32_s, Numeric),
(Some(f32_on_stack), i32_trunc_f32_u, Numeric),
(Some(f64_on_stack), i32_trunc_f64_s, Numeric),
(Some(f64_on_stack), i32_trunc_f64_u, Numeric),
(Some(i32_on_stack), i64_extend_i32_s, NumericInt),
(Some(i32_on_stack), i64_extend_i32_u, NumericInt),
(Some(f32_on_stack), i64_trunc_f32_s, Numeric),
(Some(f32_on_stack), i64_trunc_f32_u, Numeric),
(Some(f64_on_stack), i64_trunc_f64_s, Numeric),
(Some(f64_on_stack), i64_trunc_f64_u, Numeric),
(Some(i32_on_stack), f32_convert_i32_s, Numeric),
(Some(i32_on_stack), f32_convert_i32_u, Numeric),
(Some(i64_on_stack), f32_convert_i64_s, Numeric),
(Some(i64_on_stack), f32_convert_i64_u, Numeric),
(Some(f64_on_stack), f32_demote_f64, Numeric),
(Some(i32_on_stack), f64_convert_i32_s, Numeric),
(Some(i32_on_stack), f64_convert_i32_u, Numeric),
(Some(i64_on_stack), f64_convert_i64_s, Numeric),
(Some(i64_on_stack), f64_convert_i64_u, Numeric),
(Some(f32_on_stack), f64_promote_f32, Numeric),
(Some(f32_on_stack), i32_reinterpret_f32, Numeric),
(Some(f64_on_stack), i64_reinterpret_f64, Numeric),
(Some(i32_on_stack), f32_reinterpret_i32, Numeric),
(Some(i64_on_stack), f64_reinterpret_i64, Numeric),
(Some(extendable_i32_on_stack), i32_extend_8_s, NumericInt),
(Some(extendable_i32_on_stack), i32_extend_16_s, NumericInt),
(Some(extendable_i64_on_stack), i64_extend_8_s, NumericInt),
(Some(extendable_i64_on_stack), i64_extend_16_s, NumericInt),
(Some(extendable_i64_on_stack), i64_extend_32_s, NumericInt),
(Some(nontrapping_f32_on_stack), i32_trunc_sat_f32_s, Numeric),
(Some(nontrapping_f32_on_stack), i32_trunc_sat_f32_u, Numeric),
(Some(nontrapping_f64_on_stack), i32_trunc_sat_f64_s, Numeric),
(Some(nontrapping_f64_on_stack), i32_trunc_sat_f64_u, Numeric),
(Some(nontrapping_f32_on_stack), i64_trunc_sat_f32_s, Numeric),
(Some(nontrapping_f32_on_stack), i64_trunc_sat_f32_u, Numeric),
(Some(nontrapping_f64_on_stack), i64_trunc_sat_f64_s, Numeric),
(Some(nontrapping_f64_on_stack), i64_trunc_sat_f64_u, Numeric),
// Reference instructions.
(Some(ref_null_valid), ref_null, Reference),
(Some(ref_func_valid), ref_func, Reference),
(Some(ref_as_non_null_valid), ref_as_non_null, Reference),
(Some(ref_eq_valid), ref_eq, Reference),
(Some(ref_test_valid), ref_test, Reference),
(Some(ref_cast_valid), ref_cast, Reference),
(Some(ref_is_null_valid), ref_is_null, Reference),
(Some(table_fill_valid), table_fill, Reference),
(Some(table_set_valid), table_set, Reference),
(Some(table_get_valid), table_get, Reference),
(Some(table_size_valid), table_size, Reference),
(Some(table_grow_valid), table_grow, Reference),
(Some(table_copy_valid), table_copy, Reference),
(Some(table_init_valid), table_init, Reference),
(Some(elem_drop_valid), elem_drop, Reference),
// Aggregate instructions.
(Some(struct_new_valid), struct_new, Aggregate),
(Some(struct_new_default_valid), struct_new_default, Aggregate),
(Some(struct_get_valid), struct_get, Aggregate),
(Some(struct_set_valid), struct_set, Aggregate),
(Some(array_new_valid), array_new, Aggregate),
(Some(array_new_fixed_valid), array_new_fixed, Aggregate),
(Some(array_new_default_valid), array_new_default, Aggregate),
(Some(array_new_data_valid), array_new_data, Aggregate),
(Some(array_new_elem_valid), array_new_elem, Aggregate),
(Some(array_get_valid), array_get, Aggregate),
(Some(array_set_valid), array_set, Aggregate),
(Some(array_len_valid), array_len, Aggregate),
(Some(array_fill_valid), array_fill, Aggregate),
(Some(array_copy_valid), array_copy, Aggregate),
(Some(array_init_data_valid), array_init_data, Aggregate),
(Some(array_init_elem_valid), array_init_elem, Aggregate),
(Some(ref_i31_valid), ref_i31, Aggregate),
(Some(i31_get_valid), i31_get, Aggregate),
(Some(any_convert_extern_valid), any_convert_extern, Aggregate),
(Some(extern_convert_any_valid), extern_convert_any, Aggregate),
// SIMD instructions.
(Some(simd_have_memory_and_offset), v128_load, VectorInt),
(Some(simd_have_memory_and_offset), v128_load8x8s, VectorInt),
(Some(simd_have_memory_and_offset), v128_load8x8u, VectorInt),
(Some(simd_have_memory_and_offset), v128_load16x4s, VectorInt),
(Some(simd_have_memory_and_offset), v128_load16x4u, VectorInt),
(Some(simd_have_memory_and_offset), v128_load32x2s, VectorInt),
(Some(simd_have_memory_and_offset), v128_load32x2u, VectorInt),
(Some(simd_have_memory_and_offset), v128_load8_splat, VectorInt),
(Some(simd_have_memory_and_offset), v128_load16_splat, VectorInt),
(Some(simd_have_memory_and_offset), v128_load32_splat, VectorInt),
(Some(simd_have_memory_and_offset), v128_load64_splat, VectorInt),
(Some(simd_have_memory_and_offset), v128_load32_zero, VectorInt),
(Some(simd_have_memory_and_offset), v128_load64_zero, VectorInt),
(Some(simd_v128_store_valid), v128_store, VectorInt),
(Some(simd_load_lane_valid), v128_load8_lane, VectorInt),
(Some(simd_load_lane_valid), v128_load16_lane, VectorInt),
(Some(simd_load_lane_valid), v128_load32_lane, VectorInt),
(Some(simd_load_lane_valid), v128_load64_lane, VectorInt),
(Some(simd_store_lane_valid), v128_store8_lane, VectorInt),
(Some(simd_store_lane_valid), v128_store16_lane, VectorInt),
(Some(simd_store_lane_valid), v128_store32_lane, VectorInt),
(Some(simd_store_lane_valid), v128_store64_lane, VectorInt),
(Some(simd_enabled), v128_const, VectorInt),
(Some(simd_v128_v128_on_stack), i8x16_shuffle, VectorInt),
(Some(simd_v128_on_stack), i8x16_extract_lane_s, VectorInt),
(Some(simd_v128_on_stack), i8x16_extract_lane_u, VectorInt),
(Some(simd_v128_i32_on_stack), i8x16_replace_lane, VectorInt),
(Some(simd_v128_on_stack), i16x8_extract_lane_s, VectorInt),
(Some(simd_v128_on_stack), i16x8_extract_lane_u, VectorInt),
(Some(simd_v128_i32_on_stack), i16x8_replace_lane, VectorInt),
(Some(simd_v128_on_stack), i32x4_extract_lane, VectorInt),
(Some(simd_v128_i32_on_stack), i32x4_replace_lane, VectorInt),
(Some(simd_v128_on_stack), i64x2_extract_lane, VectorInt),
(Some(simd_v128_i64_on_stack), i64x2_replace_lane, VectorInt),
(Some(simd_v128_on_stack), f32x4_extract_lane, Vector),
(Some(simd_v128_f32_on_stack), f32x4_replace_lane, Vector),
(Some(simd_v128_on_stack), f64x2_extract_lane, Vector),
(Some(simd_v128_f64_on_stack), f64x2_replace_lane, Vector),
(Some(simd_i32_on_stack), i8x16_splat, VectorInt),
(Some(simd_i32_on_stack), i16x8_splat, VectorInt),
(Some(simd_i32_on_stack), i32x4_splat, VectorInt),
(Some(simd_i64_on_stack), i64x2_splat, VectorInt),
(Some(simd_f32_on_stack), f32x4_splat, Vector),
(Some(simd_f64_on_stack), f64x2_splat, Vector),
(Some(simd_v128_v128_on_stack), i8x16_swizzle, VectorInt),
(Some(simd_v128_v128_on_stack_relaxed), i8x16_relaxed_swizzle, VectorInt),
(Some(simd_v128_v128_v128_on_stack), v128_bitselect, VectorInt),
(Some(simd_v128_v128_v128_on_stack_relaxed), i8x16_relaxed_laneselect, VectorInt),
(Some(simd_v128_v128_v128_on_stack_relaxed), i16x8_relaxed_laneselect, VectorInt),
(Some(simd_v128_v128_v128_on_stack_relaxed), i32x4_relaxed_laneselect, VectorInt),
(Some(simd_v128_v128_v128_on_stack_relaxed), i64x2_relaxed_laneselect, VectorInt),
(Some(simd_v128_v128_on_stack), i8x16_eq, VectorInt),
(Some(simd_v128_v128_on_stack), i8x16_ne, VectorInt),
(Some(simd_v128_v128_on_stack), i8x16_lt_s, VectorInt),
(Some(simd_v128_v128_on_stack), i8x16_lt_u, VectorInt),
(Some(simd_v128_v128_on_stack), i8x16_gt_s, VectorInt),
(Some(simd_v128_v128_on_stack), i8x16_gt_u, VectorInt),
(Some(simd_v128_v128_on_stack), i8x16_le_s, VectorInt),
(Some(simd_v128_v128_on_stack), i8x16_le_u, VectorInt),
(Some(simd_v128_v128_on_stack), i8x16_ge_s, VectorInt),
(Some(simd_v128_v128_on_stack), i8x16_ge_u, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_eq, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_ne, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_lt_s, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_lt_u, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_gt_s, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_gt_u, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_le_s, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_le_u, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_ge_s, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_ge_u, VectorInt),
(Some(simd_v128_v128_on_stack), i32x4_eq, VectorInt),
(Some(simd_v128_v128_on_stack), i32x4_ne, VectorInt),
(Some(simd_v128_v128_on_stack), i32x4_lt_s, VectorInt),
(Some(simd_v128_v128_on_stack), i32x4_lt_u, VectorInt),
(Some(simd_v128_v128_on_stack), i32x4_gt_s, VectorInt),
(Some(simd_v128_v128_on_stack), i32x4_gt_u, VectorInt),
(Some(simd_v128_v128_on_stack), i32x4_le_s, VectorInt),
(Some(simd_v128_v128_on_stack), i32x4_le_u, VectorInt),
(Some(simd_v128_v128_on_stack), i32x4_ge_s, VectorInt),
(Some(simd_v128_v128_on_stack), i32x4_ge_u, VectorInt),
(Some(simd_v128_v128_on_stack), i64x2_eq, VectorInt),
(Some(simd_v128_v128_on_stack), i64x2_ne, VectorInt),
(Some(simd_v128_v128_on_stack), i64x2_lt_s, VectorInt),
(Some(simd_v128_v128_on_stack), i64x2_gt_s, VectorInt),
(Some(simd_v128_v128_on_stack), i64x2_le_s, VectorInt),
(Some(simd_v128_v128_on_stack), i64x2_ge_s, VectorInt),
(Some(simd_v128_v128_on_stack), f32x4_eq, Vector),
(Some(simd_v128_v128_on_stack), f32x4_ne, Vector),
(Some(simd_v128_v128_on_stack), f32x4_lt, Vector),
(Some(simd_v128_v128_on_stack), f32x4_gt, Vector),
(Some(simd_v128_v128_on_stack), f32x4_le, Vector),
(Some(simd_v128_v128_on_stack), f32x4_ge, Vector),
(Some(simd_v128_v128_on_stack), f64x2_eq, Vector),
(Some(simd_v128_v128_on_stack), f64x2_ne, Vector),
(Some(simd_v128_v128_on_stack), f64x2_lt, Vector),
(Some(simd_v128_v128_on_stack), f64x2_gt, Vector),
(Some(simd_v128_v128_on_stack), f64x2_le, Vector),
(Some(simd_v128_v128_on_stack), f64x2_ge, Vector),
(Some(simd_v128_on_stack), v128_not, VectorInt),
(Some(simd_v128_v128_on_stack), v128_and, VectorInt),
(Some(simd_v128_v128_on_stack), v128_and_not, VectorInt),
(Some(simd_v128_v128_on_stack), v128_or, VectorInt),
(Some(simd_v128_v128_on_stack), v128_xor, VectorInt),
(Some(simd_v128_v128_on_stack), v128_any_true, VectorInt),
(Some(simd_v128_on_stack), i8x16_abs, VectorInt),
(Some(simd_v128_on_stack), i8x16_neg, VectorInt),
(Some(simd_v128_on_stack), i8x16_popcnt, VectorInt),
(Some(simd_v128_on_stack), i8x16_all_true, VectorInt),
(Some(simd_v128_on_stack), i8x16_bitmask, VectorInt),
(Some(simd_v128_v128_on_stack), i8x16_narrow_i16x8s, VectorInt),
(Some(simd_v128_v128_on_stack), i8x16_narrow_i16x8u, VectorInt),
(Some(simd_v128_i32_on_stack), i8x16_shl, VectorInt),
(Some(simd_v128_i32_on_stack), i8x16_shr_s, VectorInt),
(Some(simd_v128_i32_on_stack), i8x16_shr_u, VectorInt),
(Some(simd_v128_v128_on_stack), i8x16_add, VectorInt),
(Some(simd_v128_v128_on_stack), i8x16_add_sat_s, VectorInt),
(Some(simd_v128_v128_on_stack), i8x16_add_sat_u, VectorInt),
(Some(simd_v128_v128_on_stack), i8x16_sub, VectorInt),
(Some(simd_v128_v128_on_stack), i8x16_sub_sat_s, VectorInt),
(Some(simd_v128_v128_on_stack), i8x16_sub_sat_u, VectorInt),
(Some(simd_v128_v128_on_stack), i8x16_min_s, VectorInt),
(Some(simd_v128_v128_on_stack), i8x16_min_u, VectorInt),
(Some(simd_v128_v128_on_stack), i8x16_max_s, VectorInt),
(Some(simd_v128_v128_on_stack), i8x16_max_u, VectorInt),
(Some(simd_v128_v128_on_stack), i8x16_avgr_u, VectorInt),
(Some(simd_v128_on_stack), i16x8_extadd_pairwise_i8x16s, VectorInt),
(Some(simd_v128_on_stack), i16x8_extadd_pairwise_i8x16u, VectorInt),
(Some(simd_v128_on_stack), i16x8_abs, VectorInt),
(Some(simd_v128_on_stack), i16x8_neg, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8q15_mulr_sat_s, VectorInt),
(Some(simd_v128_on_stack), i16x8_all_true, VectorInt),
(Some(simd_v128_on_stack), i16x8_bitmask, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_narrow_i32x4s, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_narrow_i32x4u, VectorInt),
(Some(simd_v128_on_stack), i16x8_extend_low_i8x16s, VectorInt),
(Some(simd_v128_on_stack), i16x8_extend_high_i8x16s, VectorInt),
(Some(simd_v128_on_stack), i16x8_extend_low_i8x16u, VectorInt),
(Some(simd_v128_on_stack), i16x8_extend_high_i8x16u, VectorInt),
(Some(simd_v128_i32_on_stack), i16x8_shl, VectorInt),
(Some(simd_v128_i32_on_stack), i16x8_shr_s, VectorInt),
(Some(simd_v128_i32_on_stack), i16x8_shr_u, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_add, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_add_sat_s, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_add_sat_u, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_sub, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_sub_sat_s, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_sub_sat_u, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_mul, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_min_s, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_min_u, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_max_s, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_max_u, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_avgr_u, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_extmul_low_i8x16s, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_extmul_high_i8x16s, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_extmul_low_i8x16u, VectorInt),
(Some(simd_v128_v128_on_stack), i16x8_extmul_high_i8x16u, VectorInt),
(Some(simd_v128_on_stack), i32x4_extadd_pairwise_i16x8s, VectorInt),
(Some(simd_v128_on_stack), i32x4_extadd_pairwise_i16x8u, VectorInt),
(Some(simd_v128_on_stack), i32x4_abs, VectorInt),
(Some(simd_v128_on_stack), i32x4_neg, VectorInt),
(Some(simd_v128_on_stack), i32x4_all_true, VectorInt),
(Some(simd_v128_on_stack), i32x4_bitmask, VectorInt),
(Some(simd_v128_on_stack), i32x4_extend_low_i16x8s, VectorInt),
(Some(simd_v128_on_stack), i32x4_extend_high_i16x8s, VectorInt),
(Some(simd_v128_on_stack), i32x4_extend_low_i16x8u, VectorInt),
(Some(simd_v128_on_stack), i32x4_extend_high_i16x8u, VectorInt),
(Some(simd_v128_i32_on_stack), i32x4_shl, VectorInt),
(Some(simd_v128_i32_on_stack), i32x4_shr_s, VectorInt),
(Some(simd_v128_i32_on_stack), i32x4_shr_u, VectorInt),
(Some(simd_v128_v128_on_stack), i32x4_add, VectorInt),
(Some(simd_v128_v128_on_stack), i32x4_sub, VectorInt),
(Some(simd_v128_v128_on_stack), i32x4_mul, VectorInt),
(Some(simd_v128_v128_on_stack), i32x4_min_s, VectorInt),
(Some(simd_v128_v128_on_stack), i32x4_min_u, VectorInt),
(Some(simd_v128_v128_on_stack), i32x4_max_s, VectorInt),
(Some(simd_v128_v128_on_stack), i32x4_max_u, VectorInt),
(Some(simd_v128_v128_on_stack), i32x4_dot_i16x8s, VectorInt),
(Some(simd_v128_v128_on_stack), i32x4_extmul_low_i16x8s, VectorInt),
(Some(simd_v128_v128_on_stack), i32x4_extmul_high_i16x8s, VectorInt),
(Some(simd_v128_v128_on_stack), i32x4_extmul_low_i16x8u, VectorInt),
(Some(simd_v128_v128_on_stack), i32x4_extmul_high_i16x8u, VectorInt),
(Some(simd_v128_on_stack), i64x2_abs, VectorInt),
(Some(simd_v128_on_stack), i64x2_neg, VectorInt),
(Some(simd_v128_on_stack), i64x2_all_true, VectorInt),
(Some(simd_v128_on_stack), i64x2_bitmask, VectorInt),
(Some(simd_v128_on_stack), i64x2_extend_low_i32x4s, VectorInt),
(Some(simd_v128_on_stack), i64x2_extend_high_i32x4s, VectorInt),
(Some(simd_v128_on_stack), i64x2_extend_low_i32x4u, VectorInt),
(Some(simd_v128_on_stack), i64x2_extend_high_i32x4u, VectorInt),
(Some(simd_v128_i32_on_stack), i64x2_shl, VectorInt),
(Some(simd_v128_i32_on_stack), i64x2_shr_s, VectorInt),
(Some(simd_v128_i32_on_stack), i64x2_shr_u, VectorInt),
(Some(simd_v128_v128_on_stack), i64x2_add, VectorInt),
(Some(simd_v128_v128_on_stack), i64x2_sub, VectorInt),
(Some(simd_v128_v128_on_stack), i64x2_mul, VectorInt),
(Some(simd_v128_v128_on_stack), i64x2_extmul_low_i32x4s, VectorInt),
(Some(simd_v128_v128_on_stack), i64x2_extmul_high_i32x4s, VectorInt),
(Some(simd_v128_v128_on_stack), i64x2_extmul_low_i32x4u, VectorInt),
(Some(simd_v128_v128_on_stack), i64x2_extmul_high_i32x4u, VectorInt),
(Some(simd_v128_on_stack), f32x4_ceil, Vector),
(Some(simd_v128_on_stack), f32x4_floor, Vector),
(Some(simd_v128_on_stack), f32x4_trunc, Vector),
(Some(simd_v128_on_stack), f32x4_nearest, Vector),
(Some(simd_v128_on_stack), f32x4_abs, Vector),
(Some(simd_v128_on_stack), f32x4_neg, Vector),
(Some(simd_v128_on_stack), f32x4_sqrt, Vector),
(Some(simd_v128_v128_on_stack), f32x4_add, Vector),
(Some(simd_v128_v128_on_stack), f32x4_sub, Vector),
(Some(simd_v128_v128_on_stack), f32x4_mul, Vector),
(Some(simd_v128_v128_on_stack), f32x4_div, Vector),
(Some(simd_v128_v128_on_stack), f32x4_min, Vector),
(Some(simd_v128_v128_on_stack), f32x4_max, Vector),
(Some(simd_v128_v128_on_stack), f32x4p_min, Vector),
(Some(simd_v128_v128_on_stack), f32x4p_max, Vector),
(Some(simd_v128_on_stack), f64x2_ceil, Vector),
(Some(simd_v128_on_stack), f64x2_floor, Vector),
(Some(simd_v128_on_stack), f64x2_trunc, Vector),
(Some(simd_v128_on_stack), f64x2_nearest, Vector),
(Some(simd_v128_on_stack), f64x2_abs, Vector),
(Some(simd_v128_on_stack), f64x2_neg, Vector),
(Some(simd_v128_on_stack), f64x2_sqrt, Vector),
(Some(simd_v128_v128_on_stack), f64x2_add, Vector),
(Some(simd_v128_v128_on_stack), f64x2_sub, Vector),
(Some(simd_v128_v128_on_stack), f64x2_mul, Vector),
(Some(simd_v128_v128_on_stack), f64x2_div, Vector),
(Some(simd_v128_v128_on_stack), f64x2_min, Vector),
(Some(simd_v128_v128_on_stack), f64x2_max, Vector),
(Some(simd_v128_v128_on_stack), f64x2p_min, Vector),
(Some(simd_v128_v128_on_stack), f64x2p_max, Vector),
(Some(simd_v128_on_stack), i32x4_trunc_sat_f32x4s, Vector),
(Some(simd_v128_on_stack), i32x4_trunc_sat_f32x4u, Vector),
(Some(simd_v128_on_stack), f32x4_convert_i32x4s, Vector),
(Some(simd_v128_on_stack), f32x4_convert_i32x4u, Vector),
(Some(simd_v128_on_stack), i32x4_trunc_sat_f64x2s_zero, Vector),
(Some(simd_v128_on_stack), i32x4_trunc_sat_f64x2u_zero, Vector),
(Some(simd_v128_on_stack), f64x2_convert_low_i32x4s, Vector),
(Some(simd_v128_on_stack), f64x2_convert_low_i32x4u, Vector),
(Some(simd_v128_on_stack), f32x4_demote_f64x2_zero, Vector),
(Some(simd_v128_on_stack), f64x2_promote_low_f32x4, Vector),
(Some(simd_v128_on_stack_relaxed), i32x4_relaxed_trunc_f32x4s, Vector),
(Some(simd_v128_on_stack_relaxed), i32x4_relaxed_trunc_f32x4u, Vector),
(Some(simd_v128_on_stack_relaxed), i32x4_relaxed_trunc_f64x2s_zero, Vector),
(Some(simd_v128_on_stack_relaxed), i32x4_relaxed_trunc_f64x2u_zero, Vector),
(Some(simd_v128_v128_v128_on_stack_relaxed), f32x4_relaxed_madd, Vector),
(Some(simd_v128_v128_v128_on_stack_relaxed), f32x4_relaxed_nmadd, Vector),
(Some(simd_v128_v128_v128_on_stack_relaxed), f64x2_relaxed_madd, Vector),
(Some(simd_v128_v128_v128_on_stack_relaxed), f64x2_relaxed_nmadd, Vector),
(Some(simd_v128_v128_on_stack_relaxed), f32x4_relaxed_min, Vector),
(Some(simd_v128_v128_on_stack_relaxed), f32x4_relaxed_max, Vector),
(Some(simd_v128_v128_on_stack_relaxed), f64x2_relaxed_min, Vector),
(Some(simd_v128_v128_on_stack_relaxed), f64x2_relaxed_max, Vector),
(Some(simd_v128_v128_on_stack_relaxed), i16x8_relaxed_q15mulr_s, VectorInt),
(Some(simd_v128_v128_on_stack_relaxed), i16x8_relaxed_dot_i8x16_i7x16_s, VectorInt),
(Some(simd_v128_v128_v128_on_stack_relaxed), i32x4_relaxed_dot_i8x16_i7x16_add_s, VectorInt),
(Some(wide_arithmetic_binop128_on_stack), i64_add128, NumericInt),
(Some(wide_arithmetic_binop128_on_stack), i64_sub128, NumericInt),
(Some(wide_arithmetic_mul_wide_on_stack), i64_mul_wide_s, NumericInt),
(Some(wide_arithmetic_mul_wide_on_stack), i64_mul_wide_u, NumericInt),
}
pub(crate) struct CodeBuilderAllocations {
// The control labels in scope right now.
controls: Vec<Control>,
// The types on the operand stack right now.
operands: Vec<Option<ValType>>,
// Dynamic set of options of instruction we can generate that are known to
// be valid right now.
options: Vec<(
fn(&mut Unstructured, &Module, &mut CodeBuilder, &mut Vec<Instruction>) -> Result<()>,
u32,
)>,
// Cached information about the module that we're generating functions for,
// used to speed up validity checks. The mutable globals map is a map of the
// type of global to the global indices which have that type (and they're
// all mutable).
mutable_globals: BTreeMap<ValType, Vec<u32>>,
// Like mutable globals above this is a map from function types to the list
// of functions that have that function type.
functions: BTreeMap<Rc<FuncType>, Vec<u32>>,
// Like functions above this is a map from tag types to the list of tags
// have that tag type.
tags: BTreeMap<Vec<ValType>, Vec<u32>>,
// Tables in this module which have a funcref element type.
table32_with_funcref: Vec<u32>,
table64_with_funcref: Vec<u32>,
// Functions that are referenced in the module through globals and segments.
referenced_functions: Vec<u32>,
// Precomputed tables/element segments that can be used for `table.init`,
// stored as (segment, table).
table32_init: Vec<(u32, u32)>,
table64_init: Vec<(u32, u32)>,
// Precomputed valid tables to copy between, stored in (src, dst) order.
table_copy_32_to_32: Vec<(u32, u32)>,
table_copy_32_to_64: Vec<(u32, u32)>,
table_copy_64_to_32: Vec<(u32, u32)>,
table_copy_64_to_64: Vec<(u32, u32)>,
// Lists of table/memory indices which are either 32-bit or 64-bit. This is
// used for faster lookup in validating instructions to know which memories
// have which types. For example if there are no 64-bit memories then we
// shouldn't ever look for i64 on the stack for `i32.load`.
memory32: Vec<u32>,
memory64: Vec<u32>,
table32: Vec<u32>,
table64: Vec<u32>,
// State used when dropping operands to avoid dropping them into the ether
// but instead folding their final values into module state, at this time
// chosen to be exported globals.
globals_cnt: u32,
new_globals: Vec<(ValType, ConstExpr)>,
global_dropped_i32: Option<u32>,
global_dropped_i64: Option<u32>,
global_dropped_f32: Option<u32>,
global_dropped_f64: Option<u32>,
global_dropped_v128: Option<u32>,
// Indicates that additional exports cannot be generated. This will be true
// if the `Config` specifies exactly which exports should be present.
disallow_exporting: bool,
}
pub(crate) struct CodeBuilder<'a> {
func_ty: &'a FuncType,
locals: &'a mut Vec<ValType>,
allocs: &'a mut CodeBuilderAllocations,
// Temporary locals injected and used by nan canonicalization. Note that
// this list of extra locals is appended to `self.locals` at the end of code
// generation, and it's kept separate here to avoid using these locals in
// `local.get` and similar instructions.
extra_locals: Vec<ValType>,
f32_scratch: Option<usize>,
f64_scratch: Option<usize>,
v128_scratch: Option<usize>,
}
/// A control frame.
#[derive(Debug, Clone)]
struct Control {
kind: ControlKind,
/// Value types that must be on the stack when entering this control frame.
params: Vec<ValType>,
/// Value types that are left on the stack when exiting this control frame.
results: Vec<ValType>,
/// How far down the operand stack instructions inside this control frame
/// can reach.
height: usize,
}
impl Control {
fn label_types(&self) -> &[ValType] {
if self.kind == ControlKind::Loop {
&self.params
} else {
&self.results
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum ControlKind {
Block,
If,
Loop,
TryTable,
}
enum Float {
F32,
F64,
F32x4,
F64x2,
}
impl CodeBuilderAllocations {
pub(crate) fn new(module: &Module, disallow_exporting: bool) -> Self {
let mut mutable_globals = BTreeMap::new();
for (i, global) in module.globals.iter().enumerate() {
if global.mutable {
mutable_globals
.entry(global.val_type)
.or_insert(Vec::new())
.push(i as u32);
}
}
let mut tags = BTreeMap::new();
for (idx, tag_type) in module.tags() {
tags.entry(tag_type.func_type.params.to_vec())
.or_insert(Vec::new())
.push(idx);
}
let mut functions = BTreeMap::new();
for (idx, func) in module.funcs() {
functions
.entry(func.clone())
.or_insert(Vec::new())
.push(idx);
}
let mut table32_with_funcref = Vec::new();
let mut table64_with_funcref = Vec::new();
let mut table32_tys = Vec::new();
let mut table64_tys = Vec::new();
for (i, table) in module.tables.iter().enumerate() {
let funcref_dst = if table.table64 {
table64_tys.push(table.element_type);
&mut table64_with_funcref
} else {
table32_tys.push(table.element_type);
&mut table32_with_funcref
};
if table.element_type == RefType::FUNCREF {
funcref_dst.push(i as u32);
}
}
let mut referenced_functions = BTreeSet::new();
for (_, expr) in module.defined_globals.iter() {
if let Some(i) = expr.get_ref_func() {
referenced_functions.insert(i);
}
}
let mut table32_init = Vec::new();
let mut table64_init = Vec::new();
for (i, g) in module.elems.iter().enumerate() {
match &g.items {
Elements::Expressions(e) => {
let iter = e.iter().filter_map(|e| e.get_ref_func());
referenced_functions.extend(iter);
}
Elements::Functions(e) => {
referenced_functions.extend(e.iter().cloned());
}
}
for (j, table) in module.tables.iter().enumerate() {
if module.ref_type_is_sub_type(g.ty, table.element_type) {
let dst = if table.table64 {
&mut table64_init
} else {
&mut table32_init
};
dst.push((i as u32, j as u32));
}
}
}
let mut memory32 = Vec::new();
let mut memory64 = Vec::new();
for (i, mem) in module.memories.iter().enumerate() {
if mem.memory64 {
memory64.push(i as u32);
} else {
memory32.push(i as u32);
}
}
let mut table32 = Vec::new();
let mut table64 = Vec::new();
let mut table_copy_32_to_32 = Vec::new();
let mut table_copy_32_to_64 = Vec::new();
let mut table_copy_64_to_32 = Vec::new();
let mut table_copy_64_to_64 = Vec::new();
for (i, t) in module.tables.iter().enumerate() {
if t.table64 {
table64.push(i as u32);
} else {
table32.push(i as u32);
}
for (j, t2) in module.tables.iter().enumerate() {
if module.val_type_is_sub_type(t.element_type.into(), t2.element_type.into()) {
let dst = match (t.table64, t2.table64) {
(false, false) => &mut table_copy_32_to_32,
(false, true) => &mut table_copy_32_to_64,
(true, false) => &mut table_copy_64_to_32,
(true, true) => &mut table_copy_64_to_64,
};
dst.push((i as u32, j as u32));
}
}
}
let mut global_dropped_i32 = None;
let mut global_dropped_i64 = None;
let mut global_dropped_f32 = None;
let mut global_dropped_f64 = None;
let mut global_dropped_v128 = None;
// If we can't export additional globals, try to use existing exported
// mutable globals for dropped values.
if disallow_exporting {
for (_, kind, index) in module.exports.iter() {
if *kind == ExportKind::Global {
let ty = module.globals[*index as usize];
if ty.mutable {
match ty.val_type {
ValType::I32 => {
if global_dropped_i32.is_none() {
global_dropped_i32 = Some(*index)
} else {
global_dropped_f32 = Some(*index)
}
}
ValType::I64 => {
if global_dropped_i64.is_none() {
global_dropped_i64 = Some(*index)
} else {
global_dropped_f64 = Some(*index)
}
}
ValType::V128 => global_dropped_v128 = Some(*index),
_ => {}
}
}
}
}
}
CodeBuilderAllocations {
controls: Vec::with_capacity(4),
operands: Vec::with_capacity(16),
options: Vec::with_capacity(NUM_OPTIONS),
functions,
tags,
mutable_globals,
table32_with_funcref,
table64_with_funcref,
referenced_functions: referenced_functions.into_iter().collect(),
table32_init,
table64_init,
table_copy_32_to_32,
table_copy_32_to_64,
table_copy_64_to_32,
table_copy_64_to_64,
memory32,
memory64,
table32,
table64,
global_dropped_i32,
global_dropped_i64,
global_dropped_f32,
global_dropped_f64,
global_dropped_v128,
globals_cnt: module.globals.len() as u32,
new_globals: Vec::new(),
disallow_exporting,
}
}
pub(crate) fn builder<'a>(
&'a mut self,
func_ty: &'a FuncType,
locals: &'a mut Vec<ValType>,
) -> CodeBuilder<'a> {
self.controls.clear();
self.controls.push(Control {
kind: ControlKind::Block,
params: vec![],
results: func_ty.results.to_vec(),
height: 0,
});
self.operands.clear();
self.options.clear();
CodeBuilder {
func_ty,
locals,
allocs: self,
extra_locals: Vec::new(),
f32_scratch: None,
f64_scratch: None,
v128_scratch: None,
}
}
pub fn finish(self, u: &mut Unstructured<'_>, module: &mut Module) -> arbitrary::Result<()> {
// Any globals injected as part of dropping operands on the stack get
// injected into the module here. Each global is then exported, most of
// the time (if additional exports are allowed), to ensure it's part of
// the "image" of this module available for differential execution for
// example.
for (ty, init) in self.new_globals {
let global_idx = module.globals.len() as u32;
module.globals.push(GlobalType {
val_type: ty,
mutable: true,
shared: false,
});
module.defined_globals.push((global_idx, init));
if self.disallow_exporting || u.ratio(1, 100).unwrap_or(false) {
continue;
}
let name = unique_string(1_000, &mut module.export_names, u)?;
module.add_arbitrary_export(name, ExportKind::Global, global_idx)?;
}
Ok(())
}
}
impl CodeBuilder<'_> {
fn pop_control(&mut self) -> Control {
let control = self.allocs.controls.pop().unwrap();
// Pop the actual types on the stack (which could be subtypes of the
// declared types) and then push the declared types. This avoids us
// accidentally generating code that relies on erased subtypes.
for _ in &control.results {
self.pop_operand();
}
for ty in &control.results {
self.push_operand(Some(*ty));
}
control
}
fn push_control(
&mut self,
kind: ControlKind,
params: impl Into<Vec<ValType>>,
results: impl Into<Vec<ValType>>,
) {
let params = params.into();
let results = results.into();
// Similar to in `pop_control`, we want to pop the actual argument types
// off the stack (which could be subtypes of the declared parameter
// types) and then push the parameter types. This effectively does type
// erasure of any subtyping that exists so that we don't accidentally
// generate code that relies on the specific subtypes.
for _ in ¶ms {
self.pop_operand();
}
self.push_operands(¶ms);
let height = self.allocs.operands.len() - params.len();
self.allocs.controls.push(Control {
kind,
params,
results,
height,
});
}
/// Get the operands that are in-scope within the current control frame.
#[inline]
fn operands(&self) -> &[Option<ValType>] {
let height = self.allocs.controls.last().map_or(0, |c| c.height);
&self.allocs.operands[height..]
}
/// Pop a single operand from the stack, regardless of expected type.
#[inline]
fn pop_operand(&mut self) -> Option<ValType> {
self.allocs.operands.pop().unwrap()
}
#[inline]
fn pop_operands(&mut self, module: &Module, to_pop: &[ValType]) {
debug_assert!(self.types_on_stack(module, to_pop));
self.allocs
.operands
.truncate(self.allocs.operands.len() - to_pop.len());
}
#[inline]
fn push_operands(&mut self, to_push: &[ValType]) {
self.allocs
.operands
.extend(to_push.iter().copied().map(Some));
}
#[inline]
fn push_operand(&mut self, ty: Option<ValType>) {
self.allocs.operands.push(ty);
}
fn pop_label_types(&mut self, module: &Module, target: u32) {
let target = usize::try_from(target).unwrap();
let control = &self.allocs.controls[self.allocs.controls.len() - 1 - target];
debug_assert!(self.label_types_on_stack(module, control));
self.allocs
.operands
.truncate(self.allocs.operands.len() - control.label_types().len());
}
fn push_label_types(&mut self, target: u32) {
let target = usize::try_from(target).unwrap();
let control = &self.allocs.controls[self.allocs.controls.len() - 1 - target];
self.allocs
.operands
.extend(control.label_types().iter().copied().map(Some));
}
/// Pop the target label types, and then push them again.
///
/// This is not a no-op due to subtyping: if we have a `T <: U` on the
/// stack, and the target label's type is `[U]`, then this will erase the
/// information about `T` and subsequent operations may only operate on `U`.
fn pop_push_label_types(&mut self, module: &Module, target: u32) {
self.pop_label_types(module, target);
self.push_label_types(target)
}
fn label_types_on_stack(&self, module: &Module, to_check: &Control) -> bool {
self.types_on_stack(module, to_check.label_types())
}
/// Is the given type on top of the stack?
#[inline]
fn type_on_stack(&self, module: &Module, ty: ValType) -> bool {
self.type_on_stack_at(module, 0, ty)
}
/// Is the given type on the stack at the given index (indexing from the top
/// of the stack towards the bottom).
#[inline]
fn type_on_stack_at(&self, module: &Module, at: usize, expected: ValType) -> bool {
let operands = self.operands();
if at >= operands.len() {
return false;
}
match operands[operands.len() - 1 - at] {
None => true,
Some(actual) => module.val_type_is_sub_type(actual, expected),
}
}
/// Are the given types on top of the stack?
#[inline]
fn types_on_stack(&self, module: &Module, types: &[ValType]) -> bool {
self.operands().len() >= types.len()
&& types
.iter()
.rev()
.enumerate()
.all(|(idx, ty)| self.type_on_stack_at(module, idx, *ty))
}
/// Are the given field types on top of the stack?
#[inline]
fn field_types_on_stack(&self, module: &Module, types: &[FieldType]) -> bool {
self.operands().len() >= types.len()
&& types
.iter()
.rev()
.enumerate()
.all(|(idx, ty)| self.type_on_stack_at(module, idx, ty.element_type.unpack()))
}
/// Is the given field type on top of the stack?
#[inline]
fn field_type_on_stack(&self, module: &Module, ty: FieldType) -> bool {
self.type_on_stack(module, ty.element_type.unpack())
}
/// Is the given field type on the stack at the given position (indexed from
/// the top of the stack)?
#[inline]
fn field_type_on_stack_at(&self, module: &Module, at: usize, ty: FieldType) -> bool {
self.type_on_stack_at(module, at, ty.element_type.unpack())
}
/// Get the ref type on the top of the operand stack, if any.
///
/// * `None` means no reftype on the stack.
/// * `Some(None)` means that the stack is polymorphic.
/// * `Some(Some(r))` means that `r` is the ref type on top of the stack.
fn ref_type_on_stack(&self) -> Option<Option<RefType>> {
match self.operands().last().copied()? {
Some(ValType::Ref(r)) => Some(Some(r)),
Some(_) => None,
None => Some(None),
}
}
/// Is there a `(ref null? <index>)` on the stack at the given position? If
/// so return its nullability and type index.
fn concrete_ref_type_on_stack_at(&self, at: usize) -> Option<(bool, u32)> {
match self.operands().iter().copied().rev().nth(at)?? {
ValType::Ref(RefType {
nullable,
heap_type: HeapType::Concrete(ty),
}) => Some((nullable, ty)),
_ => None,
}
}
/// Is there a `(ref null? <index>)` at the given stack position that
/// references a concrete array type?
fn concrete_array_ref_type_on_stack_at(
&self,
module: &Module,
at: usize,
) -> Option<(bool, u32, ArrayType)> {
let (nullable, ty) = self.concrete_ref_type_on_stack_at(at)?;
match &module.ty(ty).composite_type.inner {
CompositeInnerType::Array(a) => Some((nullable, ty, *a)),
_ => None,
}
}
/// Is there a `(ref null? <index>)` at the given stack position that
/// references a concrete struct type?
fn concrete_struct_ref_type_on_stack_at<'a>(
&self,
module: &'a Module,
at: usize,
) -> Option<(bool, u32, &'a StructType)> {
let (nullable, ty) = self.concrete_ref_type_on_stack_at(at)?;
match &module.ty(ty).composite_type.inner {
CompositeInnerType::Struct(s) => Some((nullable, ty, s)),
_ => None,
}
}
/// Pop a reference type from the stack and return it.
///
/// When in unreachable code and the stack is polymorphic, returns `None`.
fn pop_ref_type(&mut self) -> Option<RefType> {
let ref_ty = self.ref_type_on_stack().unwrap();
self.pop_operand();
ref_ty
}
/// Pops a `(ref null? <index>)` from the stack and return its nullability
/// and type index.
fn pop_concrete_ref_type(&mut self) -> (bool, u32) {
let ref_ty = self.pop_ref_type().unwrap();
match ref_ty.heap_type {
HeapType::Concrete(i) => (ref_ty.nullable, i),
_ => panic!("not a concrete ref type"),
}
}
/// Get the `(ref null? <index>)` type on the top of the stack that
/// references a function type, if any.
fn concrete_funcref_on_stack(&self, module: &Module) -> Option<RefType> {
match self.operands().last().copied()?? {
ValType::Ref(r) => match r.heap_type {
HeapType::Concrete(idx) => match &module.ty(idx).composite_type.inner {
CompositeInnerType::Func(_) => Some(r),
CompositeInnerType::Struct(_) | CompositeInnerType::Array(_) => None,
},
_ => None,
},
_ => None,
}
}
/// Is there a `(ref null? <index>)` on the top of the stack that references
/// a struct type with at least one field?
fn non_empty_struct_ref_on_stack(&self, module: &Module, allow_null_refs: bool) -> bool {
match self.operands().last() {
Some(Some(ValType::Ref(RefType {
nullable,
heap_type: HeapType::Concrete(idx),
}))) => match &module.ty(*idx).composite_type.inner {
CompositeInnerType::Struct(s) => {
!s.fields.is_empty() && (!nullable || allow_null_refs)
}
_ => false,
},
_ => false,
}
}
#[inline(never)]
fn arbitrary_block_type(&self, u: &mut Unstructured, module: &Module) -> Result<BlockType> {
let mut options: Vec<Box<dyn Fn(&mut Unstructured) -> Result<BlockType>>> = vec![
Box::new(|_| Ok(BlockType::Empty)),
Box::new(|u| Ok(BlockType::Result(module.arbitrary_valtype(u)?))),
];
if module.config.multi_value_enabled {
for (i, ty) in module.func_types() {
if self.types_on_stack(module, &ty.params) {
options.push(Box::new(move |_| Ok(BlockType::FunctionType(i as u32))));
}
}
}
let f = u.choose(&options)?;
f(u)
}
pub(crate) fn arbitrary(
mut self,
u: &mut Unstructured,
module: &Module,
) -> Result<Vec<Instruction>> {
let max_instructions = module.config.max_instructions;
let allowed_instructions = if module.config.allow_floats {
module.config.allowed_instructions
} else {
module.config.allowed_instructions.without_floats()
};
let mut instructions = vec![];
while !self.allocs.controls.is_empty() {
let keep_going = instructions.len() < max_instructions && u.arbitrary::<u8>()? != 0;
if !keep_going {
self.end_active_control_frames(
u,
module,
&mut instructions,
module.config.disallow_traps,
)?;
break;
}
match choose_instruction(u, module, allowed_instructions, &mut self) {
Some(f) => {
f(u, module, &mut self, &mut instructions)?;
}
// Choosing an instruction can fail because there is not enough
// underlying data, so we really cannot generate any more
// instructions. In this case we swallow that error and instead
// just terminate our wasm function's frames.
None => {
self.end_active_control_frames(
u,
module,
&mut instructions,
module.config.disallow_traps,
)?;
break;
}
}
// If the configuration for this module requests nan
// canonicalization then perform that here based on whether or not
// the previous instruction needs canonicalization. Note that this
// is based off Cranelift's pass for nan canonicalization for which
// instructions to canonicalize, but the general idea is most
// floating-point operations.
if module.config.canonicalize_nans {
match instructions.last().unwrap() {
Instruction::F32Ceil
| Instruction::F32Floor
| Instruction::F32Nearest
| Instruction::F32Sqrt
| Instruction::F32Trunc
| Instruction::F32Div
| Instruction::F32Max
| Instruction::F32Min
| Instruction::F32Mul
| Instruction::F32Sub
| Instruction::F32Add => self.canonicalize_nan(Float::F32, &mut instructions),
Instruction::F64Ceil
| Instruction::F64Floor
| Instruction::F64Nearest
| Instruction::F64Sqrt
| Instruction::F64Trunc
| Instruction::F64Div
| Instruction::F64Max
| Instruction::F64Min
| Instruction::F64Mul
| Instruction::F64Sub
| Instruction::F64Add => self.canonicalize_nan(Float::F64, &mut instructions),
Instruction::F32x4Ceil
| Instruction::F32x4Floor
| Instruction::F32x4Nearest
| Instruction::F32x4Sqrt
| Instruction::F32x4Trunc
| Instruction::F32x4Div
| Instruction::F32x4Max
| Instruction::F32x4Min
| Instruction::F32x4Mul
| Instruction::F32x4Sub
| Instruction::F32x4Add => {
self.canonicalize_nan(Float::F32x4, &mut instructions)
}
Instruction::F64x2Ceil
| Instruction::F64x2Floor
| Instruction::F64x2Nearest
| Instruction::F64x2Sqrt
| Instruction::F64x2Trunc
| Instruction::F64x2Div
| Instruction::F64x2Max
| Instruction::F64x2Min
| Instruction::F64x2Mul
| Instruction::F64x2Sub
| Instruction::F64x2Add => {
self.canonicalize_nan(Float::F64x2, &mut instructions)
}
_ => {}
}
}
}
self.locals.extend(self.extra_locals.drain(..));
Ok(instructions)
}
fn canonicalize_nan(&mut self, ty: Float, ins: &mut Vec<Instruction>) {
// We'll need to temporarily save the top of the stack into a local, so
// figure out that local here. Note that this tries to use the same
// local if canonicalization happens more than once in a function.
let (local, val_ty) = match ty {
Float::F32 => (&mut self.f32_scratch, ValType::F32),
Float::F64 => (&mut self.f64_scratch, ValType::F64),
Float::F32x4 | Float::F64x2 => (&mut self.v128_scratch, ValType::V128),
};
let local = match *local {
Some(i) => i as u32,
None => self.alloc_local(val_ty),
};
// Save the previous instruction's result into a local. This also leaves
// a value on the stack as `val1` for the `select` instruction.
ins.push(Instruction::LocalTee(local));
// The `val2` value input to the `select` below, our nan pattern.
//
// The nan patterns here are chosen to be a canonical representation
// which is still NaN but the wasm will always produce the same bits of
// a nan so if the wasm takes a look at the nan inside it'll always see
// the same representation.
const CANON_32BIT_NAN: u32 = 0b01111111110000000000000000000000;
const CANON_64BIT_NAN: u64 =
0b0111111111111000000000000000000000000000000000000000000000000000;
ins.push(match ty {
Float::F32 => Instruction::F32Const(f32::from_bits(CANON_32BIT_NAN)),
Float::F64 => Instruction::F64Const(f64::from_bits(CANON_64BIT_NAN)),
Float::F32x4 => {
let nan = CANON_32BIT_NAN as i128;
let nan = nan | (nan << 32) | (nan << 64) | (nan << 96);
Instruction::V128Const(nan)
}
Float::F64x2 => {
let nan = CANON_64BIT_NAN as i128;
let nan = nan | (nan << 64);
Instruction::V128Const(nan)
}
});
// the condition of the `select`, which is the float's equality test
// with itself.
ins.push(Instruction::LocalGet(local));
ins.push(Instruction::LocalGet(local));
ins.push(match ty {
Float::F32 => Instruction::F32Eq,
Float::F64 => Instruction::F64Eq,
Float::F32x4 => Instruction::F32x4Eq,
Float::F64x2 => Instruction::F64x2Eq,
});
// Select the result. If the condition is nonzero (aka the float is
// equal to itself) it picks `val1`, otherwise if zero (aka the float
// is nan) it picks `val2`.
ins.push(match ty {
Float::F32 | Float::F64 => Instruction::Select,
Float::F32x4 | Float::F64x2 => Instruction::V128Bitselect,
});
}
fn alloc_local(&mut self, ty: ValType) -> u32 {
let val = self.locals.len() + self.func_ty.params.len() + self.extra_locals.len();
self.extra_locals.push(ty);
u32::try_from(val).unwrap()
}
fn end_active_control_frames(
&mut self,
u: &mut Unstructured<'_>,
module: &Module,
instructions: &mut Vec<Instruction>,
disallow_traps: bool,
) -> Result<()> {
while !self.allocs.controls.is_empty() {
// Ensure that this label is valid by placing the right types onto
// the operand stack for the end of the label.
self.guarantee_label_results(u, module, instructions, disallow_traps)?;
// Remove the label and clear the operand stack since the label has
// been removed.
let label = self.allocs.controls.pop().unwrap();
self.allocs.operands.truncate(label.height);
// If this is an `if` that is not stack neutral, then it
// must have an `else`. Generate synthetic results here in the same
// manner we did above.
if label.kind == ControlKind::If && label.params != label.results {
instructions.push(Instruction::Else);
self.allocs.controls.push(label.clone());
self.allocs
.operands
.extend(label.params.into_iter().map(Some));
self.guarantee_label_results(u, module, instructions, disallow_traps)?;
self.allocs.controls.pop();
self.allocs.operands.truncate(label.height);
}
// The last control frame for the function return does not
// need an `end` instruction.
if !self.allocs.controls.is_empty() {
instructions.push(Instruction::End);
}
// Place the results of the label onto the operand stack for use
// after the label.
self.allocs
.operands
.extend(label.results.into_iter().map(Some));
}
Ok(())
}
/// Modifies the instruction stream to guarantee that the current control
/// label's results are on the stack and ready for the control label to return.
fn guarantee_label_results(
&mut self,
u: &mut Unstructured<'_>,
module: &Module,
instructions: &mut Vec<Instruction>,
disallow_traps: bool,
) -> Result<()> {
let operands = self.operands();
let label = self.allocs.controls.last().unwrap();
// Already done, yay!
if label.results.len() == operands.len() && self.types_on_stack(module, &label.results) {
return Ok(());
}
// Generating an unreachable instruction is always a valid way to
// generate any types for a label, but it's not too interesting, so
// don't favor it.
if !disallow_traps && u.ratio(1, u16::MAX)? {
instructions.push(Instruction::Unreachable);
--> --------------------
--> maximum size reached
--> --------------------