/* * Copyright (c) 2021, Ali Mohammad Pur * Copyright (c) 2023, Sam Atkins * * SPDX-License-Identifier: BSD-2-Clause */ #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include using namespace AK::SIMD; namespace Wasm { template struct ConvertToRaw { T operator()(T value) { return LittleEndian(value); } }; template<> struct ConvertToRaw { u32 operator()(float value) const { return bit_cast>(value); } }; template<> struct ConvertToRaw { u64 operator()(double value) const { return bit_cast>(value); } }; #define TRAP_IF_NOT(x, ...) \ do { \ if (trap_if_not(x, #x##sv __VA_OPT__(, ) __VA_ARGS__)) { \ dbgln_if(WASM_TRACE_DEBUG, "Trapped because {} failed, at line {}", #x, __LINE__); \ return true; \ } \ } while (false) #define TRAP_IN_LOOP_IF_NOT(x, ...) \ do { \ if (trap_if_not(x, #x##sv __VA_OPT__(, ) __VA_ARGS__)) { \ dbgln_if(WASM_TRACE_DEBUG, "Trapped because {} failed, at line {}", #x, __LINE__); \ return; \ } \ } while (false) void BytecodeInterpreter::interpret(Configuration& configuration) { m_trap = Empty {}; auto& expression = configuration.frame().expression(); auto const should_limit_instruction_count = configuration.should_limit_instruction_count(); if (!expression.compiled_instructions.dispatches.is_empty()) { if (should_limit_instruction_count) return interpret_impl(configuration, expression); return interpret_impl(configuration, expression); } if (should_limit_instruction_count) return interpret_impl(configuration, expression); return interpret_impl(configuration, expression); } template void BytecodeInterpreter::interpret_impl(Configuration& configuration, Expression const& expression) { auto& instructions = expression.instructions(); u64 max_ip_value = HasCompiledList ? expression.compiled_instructions.dispatches.size() : instructions.size(); auto& current_ip_value = configuration.ip(); u64 executed_instructions = 0; configuration.sources[0] = Dispatch::RegisterOrStack::Stack; configuration.sources[1] = Dispatch::RegisterOrStack::Stack; configuration.sources[2] = Dispatch::RegisterOrStack::Stack; configuration.destination = Dispatch::RegisterOrStack::Stack; constexpr static u32 default_sources_and_destination = (to_underlying(Dispatch::RegisterOrStack::Stack) | (to_underlying(Dispatch::RegisterOrStack::Stack) << 2) | (to_underlying(Dispatch::RegisterOrStack::Stack) << 4)); enum class CouldHaveChangedIP { No, Yes }; auto const cc = expression.compiled_instructions.dispatches.data(); while (current_ip_value < max_ip_value) { if constexpr (HasDynamicInsnLimit) { if (executed_instructions++ >= Constants::max_allowed_executed_instructions_per_call) [[unlikely]] { m_trap = Trap::from_string("Exceeded maximum allowed number of instructions"); return; } } // bounds checked by loop condition. auto old_ip = current_ip_value; { configuration.sources_and_destination = HasCompiledList ? cc[current_ip_value].sources_and_destination : default_sources_and_destination; auto const instruction = HasCompiledList ? cc[current_ip_value].instruction : &instructions.data()[current_ip_value]; auto const opcode = instruction->opcode().value(); #define RUN_NEXT_INSTRUCTION(ip_changed) \ { \ if constexpr (ip_changed == CouldHaveChangedIP::No) { \ ++current_ip_value; \ } else { \ if (current_ip_value == old_ip) \ ++current_ip_value; \ } \ break; \ } dbgln_if(WASM_TRACE_DEBUG, "Executing instruction {} at current_ip_value {}", instruction_name(instruction->opcode()), current_ip_value); if ((opcode & Instructions::SyntheticInstructionBase.value()) != Instructions::SyntheticInstructionBase.value()) __builtin_prefetch(&instruction->arguments(), /* read */ 0, /* low temporal locality */ 1); switch (opcode) { case Instructions::local_get.value(): configuration.push_to_destination(configuration.local(instruction->local_index())); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_const.value(): configuration.push_to_destination(Value(instruction->arguments().get())); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::synthetic_i32_add2local.value(): configuration.push_to_destination(Value(static_cast(Operators::Add {}(configuration.local(instruction->local_index()).to(), configuration.local(instruction->arguments().get()).to())))); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::synthetic_i32_addconstlocal.value(): configuration.push_to_destination(Value(static_cast(Operators::Add {}(configuration.local(instruction->local_index()).to(), instruction->arguments().get())))); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::synthetic_i32_andconstlocal.value(): configuration.push_to_destination(Value(Operators::BitAnd {}(configuration.local(instruction->local_index()).to(), instruction->arguments().get()))); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::synthetic_i32_storelocal.value(): if (store_value(configuration, *instruction, ConvertToRaw {}(configuration.local(instruction->local_index()).to()), 0)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::synthetic_i64_storelocal.value(): if (store_value(configuration, *instruction, ConvertToRaw {}(configuration.local(instruction->local_index()).to()), 0)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::synthetic_local_seti32_const.value(): configuration.local(instruction->local_index()) = Value(instruction->arguments().get()); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::unreachable.value(): m_trap = Trap::from_string("Unreachable"); return; case Instructions::nop.value(): RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::local_set.value(): { // bounds checked by verifier. configuration.local(instruction->local_index()) = configuration.take_source(0); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); } case Instructions::i64_const.value(): configuration.push_to_destination(Value(instruction->arguments().get())); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_const.value(): configuration.push_to_destination(Value(instruction->arguments().get())); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_const.value(): configuration.push_to_destination(Value(instruction->arguments().get())); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::block.value(): { size_t arity = 0; size_t param_arity = 0; auto& args = instruction->arguments().get(); if (args.block_type.kind() != BlockType::Empty) [[unlikely]] { switch (args.block_type.kind()) { case BlockType::Type: arity = 1; break; case BlockType::Index: { auto& type = configuration.frame().module().types()[args.block_type.type_index().value()]; arity = type.results().size(); param_arity = type.parameters().size(); break; } case BlockType::Empty: VERIFY_NOT_REACHED(); } } configuration.label_stack().append(Label(arity, args.end_ip, configuration.value_stack().size() - param_arity)); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::Yes); } case Instructions::loop.value(): { auto& args = instruction->arguments().get(); size_t arity = 0; if (args.block_type.kind() == BlockType::Index) { auto& type = configuration.frame().module().types()[args.block_type.type_index().value()]; arity = type.parameters().size(); } configuration.label_stack().append(Label(arity, current_ip_value + 1, configuration.value_stack().size() - arity)); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::Yes); } case Instructions::if_.value(): { size_t arity = 0; size_t param_arity = 0; auto& args = instruction->arguments().get(); switch (args.block_type.kind()) { case BlockType::Empty: break; case BlockType::Type: arity = 1; break; case BlockType::Index: { auto& type = configuration.frame().module().types()[args.block_type.type_index().value()]; arity = type.results().size(); param_arity = type.parameters().size(); } } auto value = configuration.take_source(0).to(); auto end_label = Label(arity, args.end_ip.value(), configuration.value_stack().size() - param_arity); if (value == 0) { if (args.else_ip.has_value()) { configuration.ip() = args.else_ip->value(); configuration.label_stack().append(end_label); } else { configuration.ip() = args.end_ip.value() + 1; } } else { configuration.label_stack().append(end_label); } RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::Yes); } case Instructions::structured_end.value(): configuration.label_stack().take_last(); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::structured_else.value(): { auto label = configuration.label_stack().take_last(); // Jump to the end label configuration.ip() = label.continuation().value(); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::Yes); } case Instructions::return_.value(): { while (configuration.label_stack().size() - 1 != configuration.frame().label_index()) configuration.label_stack().take_last(); configuration.ip() = max_ip_value; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::Yes); } case Instructions::br.value(): branch_to_label(configuration, instruction->arguments().get()); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::Yes); case Instructions::br_if.value(): { // bounds checked by verifier. auto cond = configuration.take_source(0).to(); if (cond == 0) RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); branch_to_label(configuration, instruction->arguments().get()); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::Yes); } case Instructions::br_table.value(): { auto& arguments = instruction->arguments().get(); auto i = configuration.take_source(0).to(); if (i >= arguments.labels.size()) { branch_to_label(configuration, arguments.default_); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::Yes); } branch_to_label(configuration, arguments.labels[i]); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::Yes); } case Instructions::call.value(): { auto index = instruction->arguments().get(); auto address = configuration.frame().module().functions()[index.value()]; dbgln_if(WASM_TRACE_DEBUG, "call({})", address.value()); if (call_address(configuration, address)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::Yes); } case Instructions::call_indirect.value(): { auto& args = instruction->arguments().get(); auto table_address = configuration.frame().module().tables()[args.table.value()]; auto table_instance = configuration.store().get(table_address); // bounds checked by verifier. auto index = configuration.take_source(0).to(); TRAP_IN_LOOP_IF_NOT(index >= 0); TRAP_IN_LOOP_IF_NOT(static_cast(index) < table_instance->elements().size()); auto& element = table_instance->elements()[index]; TRAP_IN_LOOP_IF_NOT(element.ref().has()); auto address = element.ref().get().address; auto const& type_actual = configuration.store().get(address)->visit([](auto& f) -> decltype(auto) { return f.type(); }); auto const& type_expected = configuration.frame().module().types()[args.type.value()]; TRAP_IN_LOOP_IF_NOT(type_actual.parameters().size() == type_expected.parameters().size()); TRAP_IN_LOOP_IF_NOT(type_actual.results().size() == type_expected.results().size()); TRAP_IN_LOOP_IF_NOT(type_actual.parameters() == type_expected.parameters()); TRAP_IN_LOOP_IF_NOT(type_actual.results() == type_expected.results()); dbgln_if(WASM_TRACE_DEBUG, "call_indirect({} -> {})", index, address.value()); if (call_address(configuration, address, CallAddressSource::IndirectCall)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::Yes); } case Instructions::i32_load.value(): if (load_and_push(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_load.value(): if (load_and_push(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_load.value(): if (load_and_push(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_load.value(): if (load_and_push(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_load8_s.value(): if (load_and_push(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_load8_u.value(): if (load_and_push(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_load16_s.value(): if (load_and_push(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_load16_u.value(): if (load_and_push(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_load8_s.value(): if (load_and_push(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_load8_u.value(): if (load_and_push(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_load16_s.value(): if (load_and_push(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_load16_u.value(): if (load_and_push(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_load32_s.value(): if (load_and_push(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_load32_u.value(): if (load_and_push(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_store.value(): if (pop_and_store(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_store.value(): if (pop_and_store(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_store.value(): if (pop_and_store(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_store.value(): if (pop_and_store(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_store8.value(): if (pop_and_store(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_store16.value(): if (pop_and_store(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_store8.value(): if (pop_and_store(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_store16.value(): if (pop_and_store(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_store32.value(): if (pop_and_store(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::local_tee.value(): { auto value = configuration.source_value(0); // bounds checked by verifier. auto local_index = instruction->local_index(); dbgln_if(WASM_TRACE_DEBUG, "stack:peek -> locals({})", local_index.value()); configuration.frame().locals()[local_index.value()] = value; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); } case Instructions::global_get.value(): { auto global_index = instruction->arguments().get(); // This check here is for const expressions. In non-const expressions, // a validation error would have been thrown. TRAP_IN_LOOP_IF_NOT(global_index < configuration.frame().module().globals().size()); auto address = configuration.frame().module().globals()[global_index.value()]; dbgln_if(WASM_TRACE_DEBUG, "global({}) -> stack", address.value()); auto global = configuration.store().get(address); configuration.push_to_destination(global->value()); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); } case Instructions::global_set.value(): { auto global_index = instruction->arguments().get(); auto address = configuration.frame().module().globals()[global_index.value()]; // bounds checked by verifier. auto value = configuration.take_source(0); dbgln_if(WASM_TRACE_DEBUG, "stack -> global({})", address.value()); auto global = configuration.store().get(address); global->set_value(value); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); } case Instructions::memory_size.value(): { auto& args = instruction->arguments().get(); auto address = configuration.frame().module().memories()[args.memory_index.value()]; auto instance = configuration.store().get(address); auto pages = instance->size() / Constants::page_size; dbgln_if(WASM_TRACE_DEBUG, "memory.size -> stack({})", pages); configuration.push_to_destination(Value((i32)pages)); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); } case Instructions::memory_grow.value(): { auto& args = instruction->arguments().get(); auto address = configuration.frame().module().memories()[args.memory_index.value()]; auto instance = configuration.store().get(address); i32 old_pages = instance->size() / Constants::page_size; auto& entry = configuration.source_value(0); // bounds checked by verifier. auto new_pages = entry.to(); dbgln_if(WASM_TRACE_DEBUG, "memory.grow({}), previously {} pages...", new_pages, old_pages); if (instance->grow(new_pages * Constants::page_size)) entry = Value((i32)old_pages); else entry = Value((i32)-1); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); } // https://webassembly.github.io/spec/core/bikeshed/#exec-memory-fill case Instructions::memory_fill.value(): { auto& args = instruction->arguments().get(); auto address = configuration.frame().module().memories()[args.memory_index.value()]; auto instance = configuration.store().get(address); // bounds checked by verifier. auto count = configuration.take_source(0).to(); u8 value = static_cast(configuration.take_source(1).to()); auto destination_offset = configuration.take_source(2).to(); TRAP_IN_LOOP_IF_NOT(static_cast(destination_offset + count) <= instance->data().size()); if (count == 0) RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); for (u32 i = 0; i < count; ++i) { if (store_to_memory(configuration, Instruction::MemoryArgument { 0, 0 }, { &value, sizeof(value) }, destination_offset + i)) return; } RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); } // https://webassembly.github.io/spec/core/bikeshed/#exec-memory-copy case Instructions::memory_copy.value(): { auto& args = instruction->arguments().get(); auto source_address = configuration.frame().module().memories()[args.src_index.value()]; auto destination_address = configuration.frame().module().memories()[args.dst_index.value()]; auto source_instance = configuration.store().get(source_address); auto destination_instance = configuration.store().get(destination_address); // bounds checked by verifier. auto count = configuration.take_source(0).to(); auto source_offset = configuration.take_source(1).to(); auto destination_offset = configuration.take_source(2).to(); Checked source_position = source_offset; source_position.saturating_add(count); Checked destination_position = destination_offset; destination_position.saturating_add(count); TRAP_IN_LOOP_IF_NOT(source_position <= source_instance->data().size()); TRAP_IN_LOOP_IF_NOT(destination_position <= destination_instance->data().size()); if (count == 0) RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); Instruction::MemoryArgument memarg { 0, 0, args.dst_index }; if (destination_offset <= source_offset) { for (auto i = 0; i < count; ++i) { auto value = source_instance->data()[source_offset + i]; if (store_to_memory(configuration, memarg, { &value, sizeof(value) }, destination_offset + i)) return; } } else { for (auto i = count - 1; i >= 0; --i) { auto value = source_instance->data()[source_offset + i]; if (store_to_memory(configuration, memarg, { &value, sizeof(value) }, destination_offset + i)) return; } } RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); } // https://webassembly.github.io/spec/core/bikeshed/#exec-memory-init case Instructions::memory_init.value(): { auto& args = instruction->arguments().get(); auto& data_address = configuration.frame().module().datas()[args.data_index.value()]; auto& data = *configuration.store().get(data_address); auto memory_address = configuration.frame().module().memories()[args.memory_index.value()]; auto memory = configuration.store().get(memory_address); // bounds checked by verifier. auto count = configuration.take_source(0).to(); auto source_offset = configuration.take_source(1).to(); auto destination_offset = configuration.take_source(2).to(); Checked source_position = source_offset; source_position.saturating_add(count); Checked destination_position = destination_offset; destination_position.saturating_add(count); TRAP_IN_LOOP_IF_NOT(source_position <= data.data().size()); TRAP_IN_LOOP_IF_NOT(destination_position <= memory->data().size()); if (count == 0) RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); Instruction::MemoryArgument memarg { 0, 0, args.memory_index }; for (size_t i = 0; i < (size_t)count; ++i) { auto value = data.data()[source_offset + i]; if (store_to_memory(configuration, memarg, { &value, sizeof(value) }, destination_offset + i)) return; } RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); } // https://webassembly.github.io/spec/core/bikeshed/#exec-data-drop case Instructions::data_drop.value(): { auto data_index = instruction->arguments().get(); auto data_address = configuration.frame().module().datas()[data_index.value()]; *configuration.store().get(data_address) = DataInstance({}); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); } case Instructions::elem_drop.value(): { auto elem_index = instruction->arguments().get(); auto address = configuration.frame().module().elements()[elem_index.value()]; auto elem = configuration.store().get(address); *configuration.store().get(address) = ElementInstance(elem->type(), {}); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); } case Instructions::table_init.value(): { auto& args = instruction->arguments().get(); auto table_address = configuration.frame().module().tables()[args.table_index.value()]; auto table = configuration.store().get(table_address); auto element_address = configuration.frame().module().elements()[args.element_index.value()]; auto element = configuration.store().get(element_address); // bounds checked by verifier. auto count = configuration.take_source(0).to(); auto source_offset = configuration.take_source(1).to(); auto destination_offset = configuration.take_source(2).to(); Checked checked_source_offset = source_offset; Checked checked_destination_offset = destination_offset; checked_source_offset += count; checked_destination_offset += count; TRAP_IN_LOOP_IF_NOT(!checked_source_offset.has_overflow() && checked_source_offset <= (u32)element->references().size()); TRAP_IN_LOOP_IF_NOT(!checked_destination_offset.has_overflow() && checked_destination_offset <= (u32)table->elements().size()); for (u32 i = 0; i < count; ++i) table->elements()[destination_offset + i] = element->references()[source_offset + i]; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); } case Instructions::table_copy.value(): { auto& args = instruction->arguments().get(); auto source_address = configuration.frame().module().tables()[args.rhs.value()]; auto destination_address = configuration.frame().module().tables()[args.lhs.value()]; auto source_instance = configuration.store().get(source_address); auto destination_instance = configuration.store().get(destination_address); // bounds checked by verifier. auto count = configuration.take_source(0).to(); auto source_offset = configuration.take_source(1).to(); auto destination_offset = configuration.take_source(2).to(); Checked source_position = source_offset; source_position.saturating_add(count); Checked destination_position = destination_offset; destination_position.saturating_add(count); TRAP_IN_LOOP_IF_NOT(source_position <= source_instance->elements().size()); TRAP_IN_LOOP_IF_NOT(destination_position <= destination_instance->elements().size()); if (count == 0) RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); if (destination_offset <= source_offset) { for (u32 i = 0; i < count; ++i) { auto value = source_instance->elements()[source_offset + i]; destination_instance->elements()[destination_offset + i] = value; } } else { for (u32 i = count - 1; i != NumericLimits::max(); --i) { auto value = source_instance->elements()[source_offset + i]; destination_instance->elements()[destination_offset + i] = value; } } RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); } case Instructions::table_fill.value(): { auto table_index = instruction->arguments().get(); auto address = configuration.frame().module().tables()[table_index.value()]; auto table = configuration.store().get(address); // bounds checked by verifier. auto count = configuration.take_source(0).to(); auto value = configuration.take_source(1); auto start = configuration.take_source(2).to(); Checked checked_offset = start; checked_offset += count; TRAP_IN_LOOP_IF_NOT(!checked_offset.has_overflow() && checked_offset <= (u32)table->elements().size()); for (u32 i = 0; i < count; ++i) table->elements()[start + i] = value.to(); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); } case Instructions::table_set.value(): { // bounds checked by verifier. auto ref = configuration.take_source(0); auto index = (size_t)(configuration.take_source(1).to()); auto table_index = instruction->arguments().get(); auto address = configuration.frame().module().tables()[table_index.value()]; auto table = configuration.store().get(address); TRAP_IN_LOOP_IF_NOT(index < table->elements().size()); table->elements()[index] = ref.to(); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); } case Instructions::table_get.value(): { // bounds checked by verifier. auto& index_value = configuration.source_value(0); auto index = static_cast(index_value.to()); auto table_index = instruction->arguments().get(); auto address = configuration.frame().module().tables()[table_index.value()]; auto table = configuration.store().get(address); TRAP_IN_LOOP_IF_NOT(index < table->elements().size()); index_value = Value(table->elements()[index]); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); } case Instructions::table_grow.value(): { // bounds checked by verifier. auto size = configuration.take_source(0).to(); auto fill_value = configuration.take_source(1); auto table_index = instruction->arguments().get(); auto address = configuration.frame().module().tables()[table_index.value()]; auto table = configuration.store().get(address); auto previous_size = table->elements().size(); auto did_grow = table->grow(size, fill_value.to()); if (!did_grow) { configuration.push_to_destination(Value((i32)-1)); } else { configuration.push_to_destination(Value((i32)previous_size)); } RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); } case Instructions::table_size.value(): { auto table_index = instruction->arguments().get(); auto address = configuration.frame().module().tables()[table_index.value()]; auto table = configuration.store().get(address); configuration.push_to_destination(Value((i32)table->elements().size())); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); } case Instructions::ref_null.value(): { auto type = instruction->arguments().get(); configuration.push_to_destination(Value(Reference(Reference::Null { type }))); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); }; case Instructions::ref_func.value(): { auto index = instruction->arguments().get().value(); auto& functions = configuration.frame().module().functions(); auto address = functions[index]; configuration.push_to_destination(Value(Reference { Reference::Func { address, configuration.store().get_module_for(address) } })); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); } case Instructions::ref_is_null.value(): { // bounds checked by verifier. auto ref = configuration.take_source(0); configuration.push_to_destination(Value(static_cast(ref.to().ref().has() ? 1 : 0))); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); } case Instructions::drop.value(): // bounds checked by verifier. configuration.take_source(0); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::select.value(): case Instructions::select_typed.value(): { // Note: The type seems to only be used for validation. auto value = configuration.take_source(0).to(); // bounds checked by verifier. dbgln_if(WASM_TRACE_DEBUG, "select({})", value); auto rhs = configuration.take_source(1); auto& lhs = configuration.source_value(2); // bounds checked by verifier. lhs = value != 0 ? lhs : rhs; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); } case Instructions::i32_eqz.value(): if (unary_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_eq.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_ne.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_lts.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_ltu.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_gts.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_gtu.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_les.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_leu.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_ges.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_geu.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_eqz.value(): if (unary_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_eq.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_ne.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_lts.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_ltu.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_gts.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_gtu.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_les.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_leu.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_ges.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_geu.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_eq.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_ne.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_lt.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_gt.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_le.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_ge.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_eq.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_ne.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_lt.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_gt.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_le.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_ge.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_clz.value(): if (unary_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_ctz.value(): if (unary_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_popcnt.value(): if (unary_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_add.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_sub.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_mul.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_divs.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_divu.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_rems.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_remu.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_and.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_or.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_xor.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_shl.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_shrs.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_shru.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_rotl.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_rotr.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_clz.value(): if (unary_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_ctz.value(): if (unary_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_popcnt.value(): if (unary_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_add.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_sub.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_mul.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_divs.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_divu.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_rems.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_remu.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_and.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_or.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_xor.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_shl.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_shrs.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_shru.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_rotl.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_rotr.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_abs.value(): if (unary_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_neg.value(): if (unary_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_ceil.value(): if (unary_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_floor.value(): if (unary_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_trunc.value(): if (unary_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_nearest.value(): if (unary_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_sqrt.value(): if (unary_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_add.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_sub.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_mul.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_div.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_min.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_max.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_copysign.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_abs.value(): if (unary_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_neg.value(): if (unary_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_ceil.value(): if (unary_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_floor.value(): if (unary_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_trunc.value(): if (unary_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_nearest.value(): if (unary_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_sqrt.value(): if (unary_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_add.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_sub.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_mul.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_div.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_min.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_max.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_copysign.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_wrap_i64.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_trunc_sf32.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_trunc_uf32.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_trunc_sf64.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_trunc_uf64.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_trunc_sf32.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_trunc_uf32.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_trunc_sf64.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_trunc_uf64.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_extend_si32.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_extend_ui32.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_convert_si32.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_convert_ui32.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_convert_si64.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_convert_ui64.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_demote_f64.value(): if (unary_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_convert_si32.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_convert_ui32.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_convert_si64.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_convert_ui64.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_promote_f32.value(): if (unary_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_reinterpret_f32.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_reinterpret_f64.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32_reinterpret_i32.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64_reinterpret_i64.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_extend8_s.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_extend16_s.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_extend8_s.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_extend16_s.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_extend32_s.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_trunc_sat_f32_s.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_trunc_sat_f32_u.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_trunc_sat_f64_s.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32_trunc_sat_f64_u.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_trunc_sat_f32_s.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_trunc_sat_f32_u.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_trunc_sat_f64_s.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64_trunc_sat_f64_u.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::v128_const.value(): configuration.push_to_destination(Value(instruction->arguments().get())); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::v128_load.value(): if (load_and_push(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::v128_load8x8_s.value(): if (load_and_push_mxn<8, 8, MakeSigned>(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::v128_load8x8_u.value(): if (load_and_push_mxn<8, 8, MakeUnsigned>(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::v128_load16x4_s.value(): if (load_and_push_mxn<16, 4, MakeSigned>(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::v128_load16x4_u.value(): if (load_and_push_mxn<16, 4, MakeUnsigned>(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::v128_load32x2_s.value(): if (load_and_push_mxn<32, 2, MakeSigned>(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::v128_load32x2_u.value(): if (load_and_push_mxn<32, 2, MakeUnsigned>(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::v128_load8_splat.value(): if (load_and_push_m_splat<8>(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::v128_load16_splat.value(): if (load_and_push_m_splat<16>(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::v128_load32_splat.value(): if (load_and_push_m_splat<32>(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::v128_load64_splat.value(): if (load_and_push_m_splat<64>(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_splat.value(): pop_and_push_m_splat<8, NativeIntegralType>(configuration, *instruction); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_splat.value(): pop_and_push_m_splat<16, NativeIntegralType>(configuration, *instruction); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_splat.value(): pop_and_push_m_splat<32, NativeIntegralType>(configuration, *instruction); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64x2_splat.value(): pop_and_push_m_splat<64, NativeIntegralType>(configuration, *instruction); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32x4_splat.value(): pop_and_push_m_splat<32, NativeFloatingType>(configuration, *instruction); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64x2_splat.value(): pop_and_push_m_splat<64, NativeFloatingType>(configuration, *instruction); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_shuffle.value(): { auto& arg = instruction->arguments().get(); auto b = pop_vector(configuration, 0); auto a = pop_vector(configuration, 1); using VectorType = Native128ByteVectorOf; VectorType result; for (size_t i = 0; i < 16; ++i) if (arg.lanes[i] < 16) result[i] = a[arg.lanes[i]]; else result[i] = b[arg.lanes[i] - 16]; configuration.push_to_destination(Value(bit_cast(result))); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); } case Instructions::v128_store.value(): if (pop_and_store(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_shl.value(): if (binary_numeric_operation, i32>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_shr_u.value(): if (binary_numeric_operation, i32>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_shr_s.value(): if (binary_numeric_operation, i32>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_shl.value(): if (binary_numeric_operation, i32>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_shr_u.value(): if (binary_numeric_operation, i32>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_shr_s.value(): if (binary_numeric_operation, i32>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_shl.value(): if (binary_numeric_operation, i32>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_shr_u.value(): if (binary_numeric_operation, i32>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_shr_s.value(): if (binary_numeric_operation, i32>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64x2_shl.value(): if (binary_numeric_operation, i32>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64x2_shr_u.value(): if (binary_numeric_operation, i32>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64x2_shr_s.value(): if (binary_numeric_operation, i32>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_swizzle.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_extract_lane_s.value(): if (unary_operation>(configuration, instruction->arguments().get().lane)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_extract_lane_u.value(): if (unary_operation>(configuration, instruction->arguments().get().lane)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_extract_lane_s.value(): if (unary_operation>(configuration, instruction->arguments().get().lane)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_extract_lane_u.value(): if (unary_operation>(configuration, instruction->arguments().get().lane)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_extract_lane.value(): if (unary_operation>(configuration, instruction->arguments().get().lane)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64x2_extract_lane.value(): if (unary_operation>(configuration, instruction->arguments().get().lane)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32x4_extract_lane.value(): if (unary_operation>(configuration, instruction->arguments().get().lane)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64x2_extract_lane.value(): if (unary_operation>(configuration, instruction->arguments().get().lane)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_replace_lane.value(): if (binary_numeric_operation, i32>(configuration, instruction->arguments().get().lane)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_replace_lane.value(): if (binary_numeric_operation, i32>(configuration, instruction->arguments().get().lane)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_replace_lane.value(): if (binary_numeric_operation, i32>(configuration, instruction->arguments().get().lane)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64x2_replace_lane.value(): if (binary_numeric_operation, i64>(configuration, instruction->arguments().get().lane)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32x4_replace_lane.value(): if (binary_numeric_operation, float>(configuration, instruction->arguments().get().lane)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64x2_replace_lane.value(): if (binary_numeric_operation, double>(configuration, instruction->arguments().get().lane)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_eq.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_ne.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_lt_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_lt_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_gt_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_gt_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_le_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_le_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_ge_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_ge_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_abs.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_neg.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_all_true.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_popcnt.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_add.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_sub.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_avgr_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_add_sat_s.value(): if (binary_numeric_operation, MakeSigned>>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_add_sat_u.value(): if (binary_numeric_operation, MakeUnsigned>>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_sub_sat_s.value(): if (binary_numeric_operation, MakeSigned>>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_sub_sat_u.value(): if (binary_numeric_operation, MakeUnsigned>>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_min_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_min_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_max_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_max_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_eq.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_ne.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_lt_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_lt_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_gt_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_gt_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_le_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_le_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_ge_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_ge_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_abs.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_neg.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_all_true.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_add.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_sub.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_mul.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_avgr_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_add_sat_s.value(): if (binary_numeric_operation, MakeSigned>>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_add_sat_u.value(): if (binary_numeric_operation, MakeUnsigned>>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_sub_sat_s.value(): if (binary_numeric_operation, MakeSigned>>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_sub_sat_u.value(): if (binary_numeric_operation, MakeUnsigned>>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_min_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_min_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_max_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_max_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_extend_low_i8x16_s.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_extend_high_i8x16_s.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_extend_low_i8x16_u.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_extend_high_i8x16_u.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_extadd_pairwise_i8x16_s.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_extadd_pairwise_i8x16_u.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_extmul_low_i8x16_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_extmul_high_i8x16_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_extmul_low_i8x16_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_extmul_high_i8x16_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_eq.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_ne.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_lt_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_lt_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_gt_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_gt_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_le_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_le_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_ge_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_ge_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_abs.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_neg.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_all_true.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_add.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_sub.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_mul.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_min_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_min_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_max_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_max_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_extend_low_i16x8_s.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_extend_high_i16x8_s.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_extend_low_i16x8_u.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_extend_high_i16x8_u.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_extadd_pairwise_i16x8_s.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_extadd_pairwise_i16x8_u.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_extmul_low_i16x8_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_extmul_high_i16x8_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_extmul_low_i16x8_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_extmul_high_i16x8_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64x2_eq.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64x2_ne.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64x2_lt_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64x2_gt_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64x2_le_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64x2_ge_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64x2_abs.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64x2_neg.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64x2_all_true.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64x2_add.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64x2_sub.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64x2_mul.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64x2_extend_low_i32x4_s.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64x2_extend_high_i32x4_s.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64x2_extend_low_i32x4_u.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64x2_extend_high_i32x4_u.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64x2_extmul_low_i32x4_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64x2_extmul_high_i32x4_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64x2_extmul_low_i32x4_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64x2_extmul_high_i32x4_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32x4_eq.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32x4_ne.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32x4_lt.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32x4_gt.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32x4_le.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32x4_ge.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32x4_min.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32x4_max.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64x2_eq.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64x2_ne.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64x2_lt.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64x2_gt.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64x2_le.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64x2_ge.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64x2_min.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64x2_max.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32x4_div.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32x4_mul.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32x4_sub.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32x4_add.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32x4_pmin.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32x4_pmax.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64x2_div.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64x2_mul.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64x2_sub.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64x2_add.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64x2_pmin.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64x2_pmax.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32x4_ceil.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32x4_floor.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32x4_trunc.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32x4_nearest.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32x4_sqrt.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32x4_neg.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32x4_abs.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64x2_ceil.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64x2_floor.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64x2_trunc.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64x2_nearest.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64x2_sqrt.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64x2_neg.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64x2_abs.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::v128_and.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::v128_or.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::v128_xor.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::v128_not.value(): if (unary_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::v128_andnot.value(): if (binary_numeric_operation(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::v128_bitselect.value(): { // bounds checked by verifier. auto mask = configuration.take_source(0).to(); auto false_vector = configuration.take_source(1).to(); auto true_vector = configuration.take_source(2).to(); u128 result = (true_vector & mask) | (false_vector & ~mask); configuration.push_to_destination(Value(result)); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); } case Instructions::v128_any_true.value(): { auto vector = configuration.take_source(0).to(); // bounds checked by verifier. configuration.push_to_destination(Value(static_cast(vector != 0))); RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); } case Instructions::v128_load8_lane.value(): if (load_and_push_lane_n<8>(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::v128_load16_lane.value(): if (load_and_push_lane_n<16>(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::v128_load32_lane.value(): if (load_and_push_lane_n<32>(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::v128_load64_lane.value(): if (load_and_push_lane_n<64>(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::v128_load32_zero.value(): if (load_and_push_zero_n<32>(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::v128_load64_zero.value(): if (load_and_push_zero_n<64>(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::v128_store8_lane.value(): if (pop_and_store_lane_n<8>(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::v128_store16_lane.value(): if (pop_and_store_lane_n<16>(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::v128_store32_lane.value(): if (pop_and_store_lane_n<32>(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::v128_store64_lane.value(): if (pop_and_store_lane_n<64>(configuration, *instruction)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_trunc_sat_f32x4_s.value(): if (unary_operation>>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_trunc_sat_f32x4_u.value(): if (unary_operation>>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_bitmask.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_bitmask.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_bitmask.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i64x2_bitmask.value(): if (unary_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_dot_i16x8_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_narrow_i16x8_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i8x16_narrow_i16x8_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_narrow_i32x4_s.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_narrow_i32x4_u.value(): if (binary_numeric_operation>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i16x8_q15mulr_sat_s.value(): if (binary_numeric_operation, MakeSigned>>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32x4_convert_i32x4_s.value(): if (unary_operation>>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32x4_convert_i32x4_u.value(): if (unary_operation>>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64x2_convert_low_i32x4_s.value(): if (unary_operation>>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64x2_convert_low_i32x4_u.value(): if (unary_operation>>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f32x4_demote_f64x2_zero.value(): if (unary_operation>>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::f64x2_promote_low_f32x4.value(): if (unary_operation>>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_trunc_sat_f64x2_s_zero.value(): if (unary_operation>>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); case Instructions::i32x4_trunc_sat_f64x2_u_zero.value(): if (unary_operation>>(configuration)) return; RUN_NEXT_INSTRUCTION(CouldHaveChangedIP::No); default: VERIFY_NOT_REACHED(); } } } } void BytecodeInterpreter::branch_to_label(Configuration& configuration, LabelIndex index) { dbgln_if(WASM_TRACE_DEBUG, "Branch to label with index {}...", index.value()); for (size_t i = 0; i < index.value(); ++i) configuration.label_stack().take_last(); auto label = configuration.label_stack().last(); dbgln_if(WASM_TRACE_DEBUG, "...which is actually IP {}, and has {} result(s)", label.continuation().value(), label.arity()); configuration.value_stack().remove(label.stack_height(), configuration.value_stack().size() - label.stack_height() - label.arity()); configuration.ip() = label.continuation().value(); } template bool BytecodeInterpreter::load_and_push(Configuration& configuration, Instruction const& instruction) { auto& arg = instruction.arguments().get(); auto& address = configuration.frame().module().memories()[arg.memory_index.value()]; auto memory = configuration.store().get(address); auto& entry = configuration.source_value(0); // bounds checked by verifier. auto base = entry.to(); u64 instance_address = static_cast(bit_cast(base)) + arg.offset; if (instance_address + sizeof(ReadType) > memory->size()) { m_trap = Trap::from_string("Memory access out of bounds"); dbgln_if(WASM_TRACE_DEBUG, "LibWasm: Memory access out of bounds (expected {} to be less than or equal to {})", instance_address + sizeof(ReadType), memory->size()); return true; } dbgln_if(WASM_TRACE_DEBUG, "load({} : {}) -> stack", instance_address, sizeof(ReadType)); auto slice = memory->data().bytes().slice(instance_address, sizeof(ReadType)); entry = Value(static_cast(read_value(slice))); return false; } template ALWAYS_INLINE static TDst convert_vector(TSrc v) { return __builtin_convertvector(v, TDst); } template typename SetSign> bool BytecodeInterpreter::load_and_push_mxn(Configuration& configuration, Instruction const& instruction) { auto& arg = instruction.arguments().get(); auto& address = configuration.frame().module().memories()[arg.memory_index.value()]; auto memory = configuration.store().get(address); auto& entry = configuration.source_value(0); // bounds checked by verifier. auto base = entry.to(); u64 instance_address = static_cast(bit_cast(base)) + arg.offset; if (instance_address + M * N / 8 > memory->size()) { m_trap = Trap::from_string("Memory access out of bounds"); dbgln_if(WASM_TRACE_DEBUG, "LibWasm: Memory access out of bounds (expected {} to be less than or equal to {})", instance_address + M * N / 8, memory->size()); return true; } dbgln_if(WASM_TRACE_DEBUG, "vec-load({} : {}) -> stack", instance_address, M * N / 8); auto slice = memory->data().bytes().slice(instance_address, M * N / 8); using V64 = NativeVectorType; using V128 = NativeVectorType; V64 bytes { 0 }; if (bit_cast(slice.data()) % sizeof(V64) == 0) bytes = *bit_cast(slice.data()); else ByteReader::load(slice.data(), bytes); entry = Value(bit_cast(convert_vector(bytes))); return false; } template bool BytecodeInterpreter::load_and_push_lane_n(Configuration& configuration, Instruction const& instruction) { auto memarg_and_lane = instruction.arguments().get(); auto& address = configuration.frame().module().memories()[memarg_and_lane.memory.memory_index.value()]; auto memory = configuration.store().get(address); // bounds checked by verifier. auto vector = configuration.take_source(0).to(); auto base = configuration.take_source(1).to(); u64 instance_address = static_cast(bit_cast(base)) + memarg_and_lane.memory.offset; if (instance_address + N / 8 > memory->size()) { m_trap = Trap::from_string("Memory access out of bounds"); return true; } auto slice = memory->data().bytes().slice(instance_address, N / 8); auto dst = bit_cast(&vector) + memarg_and_lane.lane * N / 8; memcpy(dst, slice.data(), N / 8); configuration.push_to_destination(Value(vector)); return false; } template bool BytecodeInterpreter::load_and_push_zero_n(Configuration& configuration, Instruction const& instruction) { auto memarg_and_lane = instruction.arguments().get(); auto& address = configuration.frame().module().memories()[memarg_and_lane.memory_index.value()]; auto memory = configuration.store().get(address); // bounds checked by verifier. auto base = configuration.take_source(0).to(); u64 instance_address = static_cast(bit_cast(base)) + memarg_and_lane.offset; if (instance_address + N / 8 > memory->size()) { m_trap = Trap::from_string("Memory access out of bounds"); return true; } auto slice = memory->data().bytes().slice(instance_address, N / 8); u128 vector = 0; memcpy(&vector, slice.data(), N / 8); configuration.push_to_destination(Value(vector)); return false; } template bool BytecodeInterpreter::load_and_push_m_splat(Configuration& configuration, Instruction const& instruction) { auto& arg = instruction.arguments().get(); auto& address = configuration.frame().module().memories()[arg.memory_index.value()]; auto memory = configuration.store().get(address); auto& entry = configuration.source_value(0); // bounds checked by verifier. auto base = entry.to(); u64 instance_address = static_cast(bit_cast(base)) + arg.offset; if (instance_address + M / 8 > memory->size()) { m_trap = Trap::from_string("Memory access out of bounds"); dbgln_if(WASM_TRACE_DEBUG, "LibWasm: Memory access out of bounds (expected {} to be less than or equal to {})", instance_address + M / 8, memory->size()); return true; } dbgln_if(WASM_TRACE_DEBUG, "vec-splat({} : {}) -> stack", instance_address, M / 8); auto slice = memory->data().bytes().slice(instance_address, M / 8); auto value = read_value>(slice); set_top_m_splat(configuration, value); return false; } template typename NativeType> void BytecodeInterpreter::set_top_m_splat(Wasm::Configuration& configuration, NativeType value) { auto push = [&](auto result) { configuration.source_value(0) = Value(bit_cast(result)); }; if constexpr (IsFloatingPoint>) { if constexpr (M == 32) // 32 -> 32x4 push(expand4(value)); else if constexpr (M == 64) // 64 -> 64x2 push(f64x2 { value, value }); else static_assert(DependentFalse>, "Invalid vector size"); } else { if constexpr (M == 8) // 8 -> 8x4 -> 32x4 push(expand4(bit_cast(u8x4 { value, value, value, value }))); else if constexpr (M == 16) // 16 -> 16x2 -> 32x4 push(expand4(bit_cast(u16x2 { value, value }))); else if constexpr (M == 32) // 32 -> 32x4 push(expand4(value)); else if constexpr (M == 64) // 64 -> 64x2 push(u64x2 { value, value }); else static_assert(DependentFalse>, "Invalid vector size"); } } template typename NativeType> void BytecodeInterpreter::pop_and_push_m_splat(Wasm::Configuration& configuration, Instruction const&) { using PopT = Conditional, NativeType<64>>; using ReadT = NativeType; auto entry = configuration.source_value(0); auto value = static_cast(entry.to()); dbgln_if(WASM_TRACE_DEBUG, "stack({}) -> splat({})", value, M); set_top_m_splat(configuration, value); } template typename SetSign, typename VectorType> VectorType BytecodeInterpreter::pop_vector(Configuration& configuration, size_t source) { // bounds checked by verifier. return bit_cast(configuration.take_source(source).to()); } bool BytecodeInterpreter::call_address(Configuration& configuration, FunctionAddress address, CallAddressSource source) { TRAP_IF_NOT(m_stack_info.size_free() >= Constants::minimum_stack_space_to_keep_free, "{}: {}", Constants::stack_exhaustion_message); auto instance = configuration.store().get(address); FunctionType const* type { nullptr }; instance->visit([&](auto const& function) { type = &function.type(); }); if (source == CallAddressSource::IndirectCall) { TRAP_IF_NOT(type->parameters().size() <= configuration.value_stack().size()); } Vector args; args.ensure_capacity(type->parameters().size()); auto span = configuration.value_stack().span().slice_from_end(type->parameters().size()); for (auto& value : span) args.unchecked_append(value); configuration.value_stack().remove(configuration.value_stack().size() - span.size(), span.size()); Result result { Trap::from_string("") }; if (instance->has()) { CallFrameHandle handle { *this, configuration }; result = configuration.call(*this, address, move(args)); } else { result = configuration.call(*this, address, move(args)); } if (result.is_trap()) { m_trap = move(result.trap()); return true; } configuration.value_stack().ensure_capacity(configuration.value_stack().size() + result.values().size()); for (auto& entry : result.values().in_reverse()) configuration.value_stack().unchecked_append(entry); return false; } template bool BytecodeInterpreter::binary_numeric_operation(Configuration& configuration, Args&&... args) { // bounds checked by Nor. auto rhs = configuration.take_source(0).to(); auto lhs = configuration.take_source(1).to(); // bounds checked by verifier. PushType result; auto call_result = Operator { forward(args)... }(lhs, rhs); if constexpr (IsSpecializationOf) { if (call_result.is_error()) return trap_if_not(false, call_result.error()); result = call_result.release_value(); } else { result = call_result; } dbgln_if(WASM_TRACE_DEBUG, "{} {} {} = {}", lhs, Operator::name(), rhs, result); configuration.push_to_destination(Value(result)); return false; } template bool BytecodeInterpreter::unary_operation(Configuration& configuration, Args&&... args) { auto& entry = configuration.source_value(0); // bounds checked by veriNor. auto value = entry.to(); auto call_result = Operator { forward(args)... }(value); PushType result; if constexpr (IsSpecializationOf) { if (call_result.is_error()) return trap_if_not(false, call_result.error()); result = call_result.release_value(); } else { result = call_result; } dbgln_if(WASM_TRACE_DEBUG, "map({}) {} = {}", Operator::name(), value, result); entry = Value(result); return false; } template bool BytecodeInterpreter::pop_and_store(Configuration& configuration, Instruction const& instruction) { // bounds checked by verifier. auto entry = configuration.take_source(0); auto value = ConvertToRaw {}(entry.to()); return store_value(configuration, instruction, value, 1); } template bool BytecodeInterpreter::store_value(Configuration& configuration, Instruction const& instruction, StoreT value, size_t address_source) { auto& memarg = instruction.arguments().get(); dbgln_if(WASM_TRACE_DEBUG, "stack({}) -> temporary({}b)", value, sizeof(StoreT)); auto base = configuration.take_source(address_source).to(); return store_to_memory(configuration, memarg, { &value, sizeof(StoreT) }, base); } template bool BytecodeInterpreter::pop_and_store_lane_n(Configuration& configuration, Instruction const& instruction) { auto& memarg_and_lane = instruction.arguments().get(); // bounds checked by verifier. auto vector = configuration.take_source(0).to(); auto src = bit_cast(&vector) + memarg_and_lane.lane * N / 8; auto base = configuration.take_source(1).to(); return store_to_memory(configuration, memarg_and_lane.memory, { src, N / 8 }, base); } bool BytecodeInterpreter::store_to_memory(Configuration& configuration, Instruction::MemoryArgument const& arg, ReadonlyBytes data, u32 base) { auto& address = configuration.frame().module().memories()[arg.memory_index.value()]; auto memory = configuration.store().get(address); u64 instance_address = static_cast(base) + arg.offset; Checked addition { instance_address }; addition += data.size(); if (addition.has_overflow() || addition.value() > memory->size()) { m_trap = Trap::from_string("Memory access out of bounds"); dbgln_if(WASM_TRACE_DEBUG, "LibWasm: Memory access out of bounds (expected 0 <= {} and {} <= {})", instance_address, instance_address + data.size(), memory->size()); return true; } dbgln_if(WASM_TRACE_DEBUG, "temporary({}b) -> store({})", data.size(), instance_address); data.copy_to(memory->data().bytes().slice(instance_address, data.size())); return false; } template T BytecodeInterpreter::read_value(ReadonlyBytes data) { VERIFY(sizeof(T) <= data.size()); if (bit_cast(data.data()) % alignof(T)) { alignas(T) u8 buf[sizeof(T)]; memcpy(buf, data.data(), sizeof(T)); return bit_cast>(buf); } return *bit_cast const*>(data.data()); } template<> float BytecodeInterpreter::read_value(ReadonlyBytes data) { return bit_cast(read_value(data)); } template<> double BytecodeInterpreter::read_value(ReadonlyBytes data) { return bit_cast(read_value(data)); } CompiledInstructions try_compile_instructions(Expression const& expression, Span) { CompiledInstructions result; result.dispatches.ensure_capacity(expression.instructions().size()); result.extra_instruction_storage.ensure_capacity(ceil_div(expression.instructions().size(), 2ul)); // At most half of the instructions can be replaced with synthetic instructions, as the detected sequences are 3 long. i32 i32_const_value { 0 }; LocalIndex local_index_0 { 0 }; LocalIndex local_index_1 { 0 }; enum class InsnPatternState { Nothing, GetLocal, GetLocalI32Const, GetLocalx2, I32Const, I32ConstGetLocal, } pattern_state { InsnPatternState::Nothing }; static Instruction nop { Instructions::nop }; constexpr auto default_dispatch = [](Instruction const& instruction) { return Dispatch { &instruction, { .sources = { Dispatch::Stack, Dispatch::Stack, Dispatch::Stack }, .destination = Dispatch::Stack } }; }; for (auto& instruction : expression.instructions()) { switch (pattern_state) { case InsnPatternState::Nothing: if (instruction.opcode() == Instructions::local_get) { local_index_0 = instruction.local_index(); pattern_state = InsnPatternState::GetLocal; } else if (instruction.opcode() == Instructions::i32_const) { i32_const_value = instruction.arguments().get(); pattern_state = InsnPatternState::I32Const; } break; case InsnPatternState::GetLocal: if (instruction.opcode() == Instructions::local_get) { local_index_1 = instruction.local_index(); pattern_state = InsnPatternState::GetLocalx2; } else if (instruction.opcode() == Instructions::i32_const) { i32_const_value = instruction.arguments().get(); pattern_state = InsnPatternState::GetLocalI32Const; } else if (instruction.opcode() == Instructions::i32_store) { // `local.get a; i32.store m` -> `i32.storelocal a m`. result.dispatches[result.dispatches.size() - 1] = default_dispatch(nop); result.extra_instruction_storage.append(Instruction( Instructions::synthetic_i32_storelocal, local_index_0, instruction.arguments())); result.dispatches.append(default_dispatch(result.extra_instruction_storage.unsafe_last())); pattern_state = InsnPatternState::Nothing; continue; } else if (instruction.opcode() == Instructions::i64_store) { // `local.get a; i64.store m` -> `i64.storelocal a m`. result.dispatches[result.dispatches.size() - 1] = default_dispatch(nop); result.extra_instruction_storage.append(Instruction( Instructions::synthetic_i64_storelocal, local_index_0, instruction.arguments())); result.dispatches.append(default_dispatch(result.extra_instruction_storage.unsafe_last())); pattern_state = InsnPatternState::Nothing; continue; } else { pattern_state = InsnPatternState::Nothing; } break; case InsnPatternState::GetLocalx2: if (instruction.opcode() == Instructions::i32_add) { // `local.get a; local.get b; i32.add` -> `i32.add_2local a b`. // Replace the previous two ops with noops, and add i32.add_2local. result.dispatches[result.dispatches.size() - 1] = default_dispatch(nop); result.dispatches[result.dispatches.size() - 2] = default_dispatch(nop); result.extra_instruction_storage.append(Instruction { Instructions::synthetic_i32_add2local, local_index_0, local_index_1, }); result.dispatches.append(default_dispatch(result.extra_instruction_storage.unsafe_last())); pattern_state = InsnPatternState::Nothing; continue; } if (instruction.opcode() == Instructions::i32_store) { // `local.get a; i32.store m` -> `i32.storelocal a m`. result.dispatches[result.dispatches.size() - 1] = default_dispatch(nop); result.extra_instruction_storage.append(Instruction( Instructions::synthetic_i32_storelocal, local_index_1, instruction.arguments())); result.dispatches.append(default_dispatch(result.extra_instruction_storage.unsafe_last())); pattern_state = InsnPatternState::Nothing; continue; } if (instruction.opcode() == Instructions::i64_store) { // `local.get a; i64.store m` -> `i64.storelocal a m`. result.dispatches[result.dispatches.size() - 1] = default_dispatch(nop); result.extra_instruction_storage.append(Instruction( Instructions::synthetic_i64_storelocal, local_index_1, instruction.arguments())); result.dispatches.append(default_dispatch(result.extra_instruction_storage.unsafe_last())); pattern_state = InsnPatternState::Nothing; continue; } if (instruction.opcode() == Instructions::i32_const) { swap(local_index_0, local_index_1); i32_const_value = instruction.arguments().get(); pattern_state = InsnPatternState::GetLocalI32Const; } else { pattern_state = InsnPatternState::Nothing; } break; case InsnPatternState::I32Const: if (instruction.opcode() == Instructions::local_get) { local_index_0 = instruction.local_index(); pattern_state = InsnPatternState::I32ConstGetLocal; } else if (instruction.opcode() == Instructions::i32_const) { i32_const_value = instruction.arguments().get(); } else if (instruction.opcode() == Instructions::local_set) { // `i32.const a; local.set b` -> `local.seti32_const b a`. result.dispatches[result.dispatches.size() - 1] = default_dispatch(nop); result.extra_instruction_storage.append(Instruction( Instructions::synthetic_local_seti32_const, instruction.local_index(), i32_const_value)); result.dispatches.append(default_dispatch(result.extra_instruction_storage.unsafe_last())); pattern_state = InsnPatternState::Nothing; continue; } else { pattern_state = InsnPatternState::Nothing; } break; case InsnPatternState::GetLocalI32Const: if (instruction.opcode() == Instructions::local_set) { // `i32.const a; local.set b` -> `local.seti32_const b a`. result.dispatches[result.dispatches.size() - 1] = default_dispatch(nop); result.extra_instruction_storage.append(Instruction( Instructions::synthetic_local_seti32_const, instruction.local_index(), i32_const_value)); result.dispatches.append(default_dispatch(result.extra_instruction_storage.unsafe_last())); pattern_state = InsnPatternState::Nothing; continue; } if (instruction.opcode() == Instructions::i32_const) { i32_const_value = instruction.arguments().get(); pattern_state = InsnPatternState::I32Const; break; } if (instruction.opcode() == Instructions::local_get) { local_index_0 = instruction.local_index(); pattern_state = InsnPatternState::I32ConstGetLocal; break; } [[fallthrough]]; case InsnPatternState::I32ConstGetLocal: if (instruction.opcode() == Instructions::i32_const) { i32_const_value = instruction.arguments().get(); pattern_state = InsnPatternState::GetLocalI32Const; } else if (instruction.opcode() == Instructions::local_get) { swap(local_index_0, local_index_1); local_index_1 = instruction.local_index(); pattern_state = InsnPatternState::GetLocalx2; } else if (instruction.opcode() == Instructions::i32_add) { // `i32.const a; local.get b; i32.add` -> `i32.add_constlocal b a`. // Replace the previous two ops with noops, and add i32.add_constlocal. result.dispatches[result.dispatches.size() - 1] = default_dispatch(nop); result.dispatches[result.dispatches.size() - 2] = default_dispatch(nop); result.extra_instruction_storage.append(Instruction( Instructions::synthetic_i32_addconstlocal, local_index_0, i32_const_value)); result.dispatches.append(default_dispatch(result.extra_instruction_storage.unsafe_last())); pattern_state = InsnPatternState::Nothing; continue; } if (instruction.opcode() == Instructions::i32_and) { // `i32.const a; local.get b; i32.add` -> `i32.and_constlocal b a`. // Replace the previous two ops with noops, and add i32.and_constlocal. result.dispatches[result.dispatches.size() - 1] = default_dispatch(nop); result.dispatches[result.dispatches.size() - 2] = default_dispatch(nop); result.extra_instruction_storage.append(Instruction( Instructions::synthetic_i32_andconstlocal, local_index_0, i32_const_value)); result.dispatches.append(default_dispatch(result.extra_instruction_storage.unsafe_last())); pattern_state = InsnPatternState::Nothing; continue; } pattern_state = InsnPatternState::Nothing; break; } result.dispatches.unchecked_append(default_dispatch(instruction)); } // Remove all nops (that were either added by the above patterns or were already present in the original instructions), // and adjust jumps accordingly. Vector nops_to_remove; for (size_t i = 0; i < result.dispatches.size(); ++i) { if (result.dispatches[i].instruction->opcode() == Instructions::nop) nops_to_remove.append(i); } auto nops_to_remove_span = nops_to_remove.span(); size_t offset_accumulated = 0; for (size_t i = 0; i < result.dispatches.size(); ++i) { if (result.dispatches[i].instruction->opcode() == Instructions::nop) { offset_accumulated++; nops_to_remove_span = nops_to_remove_span.slice(1); continue; } auto& args = result.dispatches[i].instruction->arguments(); if (auto ptr = args.get_pointer()) { auto offset_to = [&](InstructionPointer ip) { size_t offset = 0; for (auto nop_ip : nops_to_remove_span) { if (nop_ip < ip.value()) ++offset; else break; } return offset; }; InstructionPointer end_ip = ptr->end_ip.value() - offset_accumulated - offset_to(ptr->end_ip - ptr->else_ip.has_value()); auto else_ip = ptr->else_ip.map([&](InstructionPointer const& ip) -> InstructionPointer { return ip.value() - offset_accumulated - offset_to(ip - 1); }); auto instruction = *result.dispatches[i].instruction; instruction.arguments() = Instruction::StructuredInstructionArgs { .block_type = ptr->block_type, .end_ip = end_ip, .else_ip = else_ip, }; result.extra_instruction_storage.append(move(instruction)); result.dispatches[i].instruction = &result.extra_instruction_storage.unsafe_last(); } } for (auto index : nops_to_remove.in_reverse()) result.dispatches.remove(index); // Allocate registers for instructions, meeting the following constraints: // - Any instruction that produces polymorphic stack, or requires its inputs on the stack must sink all active values to the stack. // - All instructions must have the same location for their last input and their destination value (if any). // - Any value left at the end of the expression must be on the stack. using ValueID = DistinctNumeric; using IP = DistinctNumeric; struct Value { ValueID id; IP definition_index; Vector uses; IP last_use = 0; }; struct ActiveReg { ValueID value_id; IP end; Dispatch::RegisterOrStack reg; }; HashMap values; Vector value_stack; ValueID next_value_id = 0; HashMap instr_to_output_value; HashMap> instr_to_input_values; HashMap> instr_to_dependent_values; Vector forced_stack_values; Vector parent; // parent[id] -> parent ValueID of id in the alias tree Vector rank; // rank[id] -> rank of the tree rooted at id Vector final_roots; // final_roots[id] -> the final root parent of id auto ensure_id_space = [&](ValueID id) { if (id >= parent.size()) { size_t old_size = parent.size(); parent.resize(id.value() + 1); rank.resize(id.value() + 1); final_roots.resize(id.value() + 1); for (size_t i = old_size; i <= id; ++i) { parent[i] = i; rank[i] = 0; final_roots[i] = i; } } }; auto find_root = [&parent](this auto& self, ValueID x) -> ValueID { if (parent[x.value()] != x) parent[x.value()] = self(parent[x.value()]); return parent[x.value()]; }; auto union_alias = [&](ValueID a, ValueID b) { ensure_id_space(max(a, b)); auto const root_a = find_root(a); auto const root_b = find_root(b); if (root_a == root_b) return; if (rank[root_a.value()] < rank[root_b.value()]) { parent[root_a.value()] = root_b; } else if (rank[root_a.value()] > rank[root_b.value()]) { parent[root_b.value()] = root_a; } else { parent[root_b.value()] = root_a; ++rank[root_a.value()]; } }; HashTable stack_forced_roots; Vector> live_at_instr; live_at_instr.resize(result.dispatches.size()); for (size_t i = 0; i < result.dispatches.size(); ++i) { auto& dispatch = result.dispatches[i]; auto opcode = dispatch.instruction->opcode(); size_t inputs = 0; size_t outputs = 0; Vector dependent_ids; bool variadic_or_unknown = false; switch (opcode.value()) { #define M(name, _, ins, outs) \ case Instructions::name.value(): \ if constexpr (ins == -1 || outs == -1) { \ variadic_or_unknown = true; \ } else { \ inputs = ins; \ outputs = outs; \ } \ break; ENUMERATE_WASM_OPCODES(M) #undef M } if (variadic_or_unknown) { for (auto val : value_stack) { auto& value = values.get(val).value(); value.uses.append(i); value.last_use = max(value.last_use, i); dependent_ids.append(val); forced_stack_values.append(val); live_at_instr[i].append(val); } value_stack.clear_with_capacity(); } Vector input_ids; if (!variadic_or_unknown && value_stack.size() < inputs) { size_t j = 0; for (; j < inputs && !value_stack.is_empty(); ++j) { auto input_value = value_stack.take_last(); input_ids.append(input_value); dependent_ids.append(input_value); auto& value = values.get(input_value).value(); value.uses.append(i); value.last_use = max(value.last_use, i); } for (; j < inputs; ++j) { auto val_id = next_value_id++; values.set(val_id, Value { val_id, i, {}, i }); input_ids.append(val_id); forced_stack_values.append(val_id); ensure_id_space(val_id); } inputs = 0; } for (size_t j = 0; j < inputs; ++j) { auto input_value = value_stack.take_last(); input_ids.append(input_value); dependent_ids.append(input_value); auto& value = values.get(input_value).value(); value.uses.append(i); value.last_use = max(value.last_use, i); } instr_to_input_values.set(i, input_ids); instr_to_dependent_values.set(i, dependent_ids); ValueID output_id = NumericLimits::max(); for (size_t j = 0; j < outputs; ++j) { auto id = next_value_id++; values.set(id, Value { id, i, {}, i }); value_stack.append(id); instr_to_output_value.set(i, id); output_id = id; ensure_id_space(id); } // Alias the output with the last input, if one exists. if (outputs > 0) { auto maybe_input_ids = instr_to_input_values.get(i); if (maybe_input_ids.has_value() && !maybe_input_ids->is_empty()) { auto last_input_id = maybe_input_ids->last(); union_alias(output_id, last_input_id); auto alias_root = find_root(last_input_id); // If any *other* input is forced to alias the output, we have no choice but to place all three on the stack. for (size_t j = 0; j < maybe_input_ids->size() - 1; ++j) { auto input_root = find_root((*maybe_input_ids)[j]); if (input_root == alias_root) { stack_forced_roots.set(alias_root); break; } } } } } forced_stack_values.extend(value_stack); for (size_t i = 0; i < final_roots.size(); ++i) final_roots[i] = find_root(i); struct LiveInterval { ValueID value_id; IP start; IP end; bool forced_to_stack { false }; }; Vector intervals; intervals.ensure_capacity(values.size()); for (auto const& [_, value] : values) { auto start = value.definition_index; auto end = max(start, value.last_use); intervals.append({ value.id, start, end }); } for (auto id : forced_stack_values) stack_forced_roots.set(final_roots[id.value()]); for (auto& interval : intervals) interval.forced_to_stack = stack_forced_roots.contains(final_roots[interval.value_id.value()]); quick_sort(intervals, [](auto const& a, auto const& b) { return a.start < b.start; }); HashMap value_alloc; RedBlackTree active_by_end; auto expire_old_intervals = [&](IP current_start) { while (true) { auto it = active_by_end.find_smallest_not_below_iterator(current_start.value()); if (it.is_end()) break; active_by_end.remove(it.key()); } }; HashMap> alias_groups; for (auto& interval : intervals) { auto root = final_roots[interval.value_id.value()]; alias_groups.ensure(root).append(&interval); } Array, Dispatch::CountRegisters> reg_intervals; reg_intervals.fill({}); for (auto& [key, group] : alias_groups) { IP group_start = NumericLimits::max(); IP group_end = 0; auto group_forced_to_stack = false; for (auto* interval : group) { group_start = min(group_start, interval->start); group_end = max(group_end, interval->end); if (interval->forced_to_stack) group_forced_to_stack = true; } expire_old_intervals(group_start); Dispatch::RegisterOrStack reg = Dispatch::RegisterOrStack::Stack; if (!group_forced_to_stack) { Array used_regs; used_regs.fill(false); for (auto const& active_entry : active_by_end) { if (active_entry.reg != Dispatch::RegisterOrStack::Stack) used_regs[to_underlying(active_entry.reg)] = true; } for (u8 r = 0; r < Dispatch::CountRegisters; ++r) { if (used_regs[r]) // There's no hope of using this register, it was already used earlier. continue; // We can assign to "live" registers, but only if we know there will be no overlap, or that they're aliasing values anyway. auto can_assign = true; for (auto* interval : group) { auto interval_root = final_roots[interval->value_id.value()]; for (auto const* other_interval : reg_intervals[r]) { if (interval_root == final_roots[other_interval->value_id.value()]) continue; if (interval->end >= other_interval->start && other_interval->end >= interval->start) { can_assign = false; break; } } if (!can_assign) break; } if (can_assign) { reg = static_cast(r); active_by_end.insert(group_end.value(), { key, group_end, reg }); for (auto* interval : group) reg_intervals[r].append(interval); break; } } } for (auto* interval : group) value_alloc.set(interval->value_id, reg); } for (size_t i = 0; i < result.dispatches.size(); ++i) { auto& dispatch = result.dispatches[i]; auto input_ids = instr_to_input_values.get(i).value_or({}); for (size_t j = 0; j < input_ids.size(); ++j) { auto reg = value_alloc.get(input_ids[j]).value_or(Dispatch::RegisterOrStack::Stack); dispatch.sources[j] = reg; } if (auto output_id = instr_to_output_value.get(i); output_id.has_value()) dispatch.destination = value_alloc.get(*output_id).value_or(Dispatch::RegisterOrStack::Stack); } return result; } }