2021-06-03 10:46:30 +02:00
/*
2025-03-16 17:41:24 -05:00
* Copyright ( c ) 2021 - 2025 , Andreas Kling < andreas @ ladybird . org >
2025-04-30 16:31:26 +03:00
* Copyright ( c ) 2025 , Aliaksandr Kalenik < kalenik . aliaksandr @ gmail . com >
2021-06-03 10:46:30 +02:00
*
* SPDX - License - Identifier : BSD - 2 - Clause
*/
2021-06-07 15:17:37 +02:00
# include <AK/Debug.h>
2023-09-27 10:10:00 +02:00
# include <AK/HashTable.h>
2021-06-09 10:02:01 +02:00
# include <AK/TemporaryChange.h>
2025-05-03 11:35:22 +02:00
# include <LibGC/RootHashMap.h>
2026-03-06 22:07:49 +01:00
# include <LibJS/Bytecode/AsmInterpreter/AsmInterpreter.h>
2021-06-09 06:49:58 +04:30
# include <LibJS/Bytecode/BasicBlock.h>
2026-04-12 13:25:26 +02:00
# include <LibJS/Bytecode/Builtins.h>
2026-04-13 11:54:04 +02:00
# include <LibJS/Bytecode/Debug.h>
2025-11-20 22:14:50 +01:00
# include <LibJS/Bytecode/FormatOperand.h>
2021-06-03 10:46:30 +02:00
# include <LibJS/Bytecode/Instruction.h>
2023-10-20 00:33:51 +02:00
# include <LibJS/Bytecode/Label.h>
2021-06-09 09:19:34 +02:00
# include <LibJS/Bytecode/Op.h>
2025-10-14 13:23:55 +02:00
# include <LibJS/Bytecode/PropertyAccess.h>
LibJS: Cache stable for-in iteration at bytecode sites
Cache the flattened enumerable key snapshot for each `for..in` site and
reuse a `PropertyNameIterator` when the receiver shape, dictionary
generation, indexed storage kind and length, prototype chain
validity, and magical-length state still match.
Handle packed indexed receivers as well as plain named-property
objects. Teach `ObjectPropertyIteratorNext` in `asmint.asm` to return
cached property values directly and to fall back to the slow iterator
logic when any guard fails.
Treat arrays' hidden non-enumerable `length` property as a visited
name for for-in shadowing, and include the receiver's magical-length
state in the cache key so arrays and plain objects do not share
snapshots.
Add `test-js` and `test-js-bytecode` coverage for mixed numeric and
named keys, packed receiver transitions, re-entry, iterator reuse, GC
retention, array length shadowing, and same-site cache reuse.
2026-04-10 00:56:49 +02:00
# include <LibJS/Bytecode/PropertyNameIterator.h>
2025-05-24 23:46:13 +02:00
# include <LibJS/Export.h>
2023-09-27 10:10:00 +02:00
# include <LibJS/Runtime/AbstractOperations.h>
2024-10-17 20:29:07 +02:00
# include <LibJS/Runtime/Accessor.h>
2023-09-27 10:10:00 +02:00
# include <LibJS/Runtime/Array.h>
2025-11-06 19:20:29 +00:00
# include <LibJS/Runtime/AsyncFromSyncIterator.h>
# include <LibJS/Runtime/AsyncFromSyncIteratorPrototype.h>
2023-09-27 10:10:00 +02:00
# include <LibJS/Runtime/BigInt.h>
2026-02-11 00:28:10 +01:00
# include <LibJS/Runtime/ClassConstruction.h>
2025-03-31 09:32:39 +01:00
# include <LibJS/Runtime/CompletionCell.h>
2023-09-27 10:10:00 +02:00
# include <LibJS/Runtime/DeclarativeEnvironment.h>
# include <LibJS/Runtime/ECMAScriptFunctionObject.h>
# include <LibJS/Runtime/Environment.h>
# include <LibJS/Runtime/FunctionEnvironment.h>
2021-07-01 12:24:46 +02:00
# include <LibJS/Runtime/GlobalEnvironment.h>
2021-06-03 18:26:13 +02:00
# include <LibJS/Runtime/GlobalObject.h>
2023-09-27 10:10:00 +02:00
# include <LibJS/Runtime/Iterator.h>
2023-11-30 19:49:29 +01:00
# include <LibJS/Runtime/MathObject.h>
2025-03-16 17:41:24 -05:00
# include <LibJS/Runtime/ModuleEnvironment.h>
2023-09-27 10:10:00 +02:00
# include <LibJS/Runtime/NativeFunction.h>
# include <LibJS/Runtime/ObjectEnvironment.h>
2021-09-11 20:27:36 +01:00
# include <LibJS/Runtime/Realm.h>
2023-09-27 10:10:00 +02:00
# include <LibJS/Runtime/Reference.h>
# include <LibJS/Runtime/RegExpObject.h>
2026-04-12 16:12:40 +02:00
# include <LibJS/Runtime/StringConstructor.h>
2024-07-09 10:10:14 +02:00
# include <LibJS/Runtime/TypedArray.h>
2026-04-13 11:54:04 +02:00
# include <LibJS/Runtime/VM.h>
2023-09-27 10:10:00 +02:00
# include <LibJS/Runtime/Value.h>
2023-10-06 17:54:21 +02:00
# include <LibJS/Runtime/ValueInlines.h>
2023-09-27 10:10:00 +02:00
# include <LibJS/SourceTextModule.h>
2026-01-08 23:46:59 +01:00
# include <math.h>
2021-06-03 10:46:30 +02:00
2026-04-13 11:54:04 +02:00
namespace JS {
2021-06-03 10:46:30 +02:00
2026-04-13 11:54:04 +02:00
using namespace Bytecode ;
bool Bytecode : : g_dump_bytecode = false ;
2021-06-05 15:53:36 +02:00
2025-05-08 15:19:35 +12:00
ALWAYS_INLINE static ThrowCompletionOr < bool > loosely_inequals ( VM & vm , Value src1 , Value src2 )
2024-03-03 14:56:33 +01:00
{
2024-03-04 09:34:30 +01:00
if ( src1 . tag ( ) = = src2 . tag ( ) ) {
if ( src1 . is_int32 ( ) | | src1 . is_object ( ) | | src1 . is_boolean ( ) | | src1 . is_nullish ( ) )
2025-05-08 15:19:35 +12:00
return src1 . encoded ( ) ! = src2 . encoded ( ) ;
2024-03-04 09:34:30 +01:00
}
2025-05-08 15:19:35 +12:00
return ! TRY ( is_loosely_equal ( vm , src1 , src2 ) ) ;
2024-03-03 14:56:33 +01:00
}
2025-05-08 15:19:35 +12:00
ALWAYS_INLINE static ThrowCompletionOr < bool > loosely_equals ( VM & vm , Value src1 , Value src2 )
2024-03-03 14:56:33 +01:00
{
2024-03-04 09:34:30 +01:00
if ( src1 . tag ( ) = = src2 . tag ( ) ) {
if ( src1 . is_int32 ( ) | | src1 . is_object ( ) | | src1 . is_boolean ( ) | | src1 . is_nullish ( ) )
2025-05-08 15:19:35 +12:00
return src1 . encoded ( ) = = src2 . encoded ( ) ;
2024-03-04 09:34:30 +01:00
}
2025-05-08 15:19:35 +12:00
return TRY ( is_loosely_equal ( vm , src1 , src2 ) ) ;
2024-03-03 14:56:33 +01:00
}
2025-05-08 15:19:35 +12:00
ALWAYS_INLINE static ThrowCompletionOr < bool > strict_inequals ( VM & , Value src1 , Value src2 )
2024-03-03 14:56:33 +01:00
{
2024-03-04 09:34:30 +01:00
if ( src1 . tag ( ) = = src2 . tag ( ) ) {
if ( src1 . is_int32 ( ) | | src1 . is_object ( ) | | src1 . is_boolean ( ) | | src1 . is_nullish ( ) )
2025-05-08 15:19:35 +12:00
return src1 . encoded ( ) ! = src2 . encoded ( ) ;
2024-03-04 09:34:30 +01:00
}
2025-05-08 15:19:35 +12:00
return ! is_strictly_equal ( src1 , src2 ) ;
2024-03-03 14:56:33 +01:00
}
2025-05-08 15:19:35 +12:00
ALWAYS_INLINE static ThrowCompletionOr < bool > strict_equals ( VM & , Value src1 , Value src2 )
2024-03-03 14:56:33 +01:00
{
2024-03-04 09:34:30 +01:00
if ( src1 . tag ( ) = = src2 . tag ( ) ) {
if ( src1 . is_int32 ( ) | | src1 . is_object ( ) | | src1 . is_boolean ( ) | | src1 . is_nullish ( ) )
2025-05-08 15:19:35 +12:00
return src1 . encoded ( ) = = src2 . encoded ( ) ;
2024-03-04 09:34:30 +01:00
}
2025-05-08 15:19:35 +12:00
return is_strictly_equal ( src1 , src2 ) ;
2024-03-03 14:56:33 +01:00
}
2026-04-13 11:54:04 +02:00
ALWAYS_INLINE Value VM : : do_yield ( Value value , Optional < Label > continuation )
2024-05-18 17:25:43 +02:00
{
2026-03-16 00:02:11 +01:00
auto & context = running_execution_context ( ) ;
if ( continuation . has_value ( ) )
context . yield_continuation = continuation - > address ( ) ;
else
context . yield_continuation = ExecutionContext : : no_yield_continuation ;
context . yield_is_await = false ;
return value ;
2024-05-18 17:25:43 +02:00
}
2023-06-15 12:36:57 +02:00
// 16.1.6 ScriptEvaluation ( scriptRecord ), https://tc39.es/ecma262/#sec-runtime-semantics-scriptevaluation
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < Value > VM : : run ( Script & script_record , GC : : Ptr < Environment > lexical_environment_override )
2023-06-15 12:36:57 +02:00
{
auto & vm = this - > vm ( ) ;
// 1. Let globalEnv be scriptRecord.[[Realm]].[[GlobalEnv]].
auto & global_environment = script_record . realm ( ) . global_environment ( ) ;
2025-04-22 21:49:41 +02:00
// NOTE: Spec steps are rearranged in order to compute number of registers+constants+locals before construction of the execution context.
// 12. Let result be Completion(GlobalDeclarationInstantiation(script, globalEnv)).
2026-02-11 01:48:19 +01:00
auto instantiation_result = script_record . global_declaration_instantiation ( vm , global_environment ) ;
2025-04-22 21:49:41 +02:00
Completion result = instantiation_result . is_throw_completion ( ) ? instantiation_result . throw_completion ( ) : normal_completion ( js_undefined ( ) ) ;
2026-02-11 01:48:19 +01:00
// 11. Let script be scriptRecord.[[ECMAScriptCode]].
2026-02-11 01:42:41 +01:00
GC : : Ptr < Executable > executable = script_record . cached_executable ( ) ;
2026-02-23 11:50:46 +01:00
if ( executable & & g_dump_bytecode )
executable - > dump ( ) ;
2025-04-22 21:49:41 +02:00
2026-01-18 23:17:10 +01:00
u32 registers_and_locals_count = 0 ;
2026-03-27 23:45:15 +01:00
ReadonlySpan < Value > constants ;
2025-04-22 21:49:41 +02:00
if ( executable ) {
2026-01-18 23:17:10 +01:00
registers_and_locals_count = executable - > registers_and_locals_count ;
2026-03-27 23:45:15 +01:00
constants = executable - > constants ;
2025-04-22 21:49:41 +02:00
}
2023-06-15 12:36:57 +02:00
// 2. Let scriptContext be a new ECMAScript code execution context.
2026-03-04 10:32:01 +01:00
auto & stack = vm . interpreter_stack ( ) ;
auto * stack_mark = stack . top ( ) ;
2026-03-27 23:45:15 +01:00
auto * script_context = stack . allocate ( registers_and_locals_count , constants , 0 ) ;
2026-03-04 10:32:01 +01:00
if ( ! script_context ) [[unlikely]]
return vm . throw_completion < InternalError > ( ErrorType : : CallStackSizeExceeded ) ;
ScopeGuard deallocate_guard = [ & stack , stack_mark ] { stack . deallocate ( stack_mark ) ; } ;
2023-06-15 12:36:57 +02:00
// 3. Set the Function of scriptContext to null.
// NOTE: This was done during execution context construction.
// 4. Set the Realm of scriptContext to scriptRecord.[[Realm]].
2023-11-27 16:45:45 +01:00
script_context - > realm = & script_record . realm ( ) ;
2023-06-15 12:36:57 +02:00
// 5. Set the ScriptOrModule of scriptContext to scriptRecord.
2026-03-11 20:35:43 +00:00
script_context - > script_or_module = GC : : Ref < Script > ( script_record ) ;
2023-06-15 12:36:57 +02:00
// 6. Set the VariableEnvironment of scriptContext to globalEnv.
2023-11-27 16:45:45 +01:00
script_context - > variable_environment = & global_environment ;
2023-06-15 12:36:57 +02:00
// 7. Set the LexicalEnvironment of scriptContext to globalEnv.
2023-11-27 16:45:45 +01:00
script_context - > lexical_environment = & global_environment ;
2023-06-15 12:36:57 +02:00
2024-10-31 08:03:09 -04:00
// Non-standard: Override the lexical environment if requested.
if ( lexical_environment_override )
script_context - > lexical_environment = lexical_environment_override ;
2023-06-15 12:36:57 +02:00
// 8. Set the PrivateEnvironment of scriptContext to null.
2025-01-01 22:51:52 +13:00
// 9. Suspend the currently running execution context.
2023-06-15 12:36:57 +02:00
// 10. Push scriptContext onto the execution context stack; scriptContext is now the running execution context.
2023-11-27 16:45:45 +01:00
TRY ( vm . push_execution_context ( * script_context , { } ) ) ;
2023-06-15 12:36:57 +02:00
// 13. If result.[[Type]] is normal, then
2026-02-19 11:14:26 +01:00
if ( executable & & result . type ( ) = = Completion : : Type : : Normal ) {
2025-01-02 01:56:00 +13:00
// a. Set result to Completion(Evaluation of script).
2026-03-16 00:02:11 +01:00
result = run_executable ( * script_context , * executable , 0 , { } ) ;
2023-06-15 12:36:57 +02:00
2025-01-02 01:56:00 +13:00
// b. If result is a normal completion and result.[[Value]] is empty, then
2025-04-04 23:16:34 +02:00
if ( result . type ( ) = = Completion : : Type : : Normal & & result . value ( ) . is_special_empty_value ( ) ) {
2025-01-02 01:56:00 +13:00
// i. Set result to NormalCompletion(undefined).
result = normal_completion ( js_undefined ( ) ) ;
}
2023-06-15 12:36:57 +02:00
}
2025-01-02 01:56:00 +13:00
// 14. Suspend scriptContext and remove it from the execution context stack.
2023-06-15 12:36:57 +02:00
vm . pop_execution_context ( ) ;
2025-01-02 01:56:00 +13:00
// 15. Assert: The execution context stack is not empty.
2023-06-15 12:36:57 +02:00
VERIFY ( ! vm . execution_context_stack ( ) . is_empty ( ) ) ;
2025-01-02 01:56:00 +13:00
// FIXME: 16. Resume the context that is now on the top of the execution context stack as the running execution context.
2023-06-15 12:36:57 +02:00
vm . finish_execution_generation ( ) ;
2025-01-02 01:56:00 +13:00
// 17. Return ? result.
2023-06-15 12:36:57 +02:00
if ( result . is_abrupt ( ) ) {
VERIFY ( result . type ( ) = = Completion : : Type : : Throw ) ;
2025-04-04 23:16:34 +02:00
return result . release_error ( ) ;
2023-06-15 12:36:57 +02:00
}
2025-04-04 18:11:45 +02:00
return result . value ( ) ;
2023-06-15 12:36:57 +02:00
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < Value > VM : : run ( SourceTextModule & module )
2023-06-15 12:36:57 +02:00
{
// FIXME: This is not a entry point as defined in the spec, but is convenient.
// To avoid work we use link_and_eval_module however that can already be
// dangerous if the vm loaded other modules.
auto & vm = this - > vm ( ) ;
2026-04-13 11:54:04 +02:00
TRY ( vm . link_and_eval_module ( module ) ) ;
2023-06-15 12:36:57 +02:00
vm . run_queued_promise_jobs ( ) ;
vm . run_queued_finalization_registry_cleanup_jobs ( ) ;
return js_undefined ( ) ;
}
2026-04-13 11:54:04 +02:00
VM : : HandleExceptionResponse VM : : handle_exception ( u32 program_counter , Value exception )
2021-06-03 10:46:30 +02:00
{
2026-03-04 10:33:29 +01:00
for ( ; ; ) {
auto handlers = current_executable ( ) . exception_handlers_for_offset ( program_counter ) ;
if ( handlers . has_value ( ) ) {
reg ( Register : : exception ( ) ) = exception ;
m_running_execution_context - > program_counter = handlers - > handler_offset ;
return HandleExceptionResponse : : ContinueInThisExecutable ;
}
// If we're in an inline frame, unwind to the caller and try its handlers.
if ( m_running_execution_context - > caller_frame ) {
auto * callee_frame = m_running_execution_context ;
auto * caller_frame = callee_frame - > caller_frame ;
auto caller_pc = callee_frame - > caller_return_pc ;
vm ( ) . interpreter_stack ( ) . deallocate ( callee_frame ) ;
m_running_execution_context = caller_frame ;
// NB: caller_pc is the return address (one past the Call instruction).
// For handler lookup we need a PC inside the Call instruction,
// since the exception occurred during that call, not after it.
// Exception handler ranges use an exclusive end offset, so using
// caller_pc directly would miss a handler ending right at that address.
program_counter = caller_pc - 1 ;
continue ;
}
reg ( Register : : exception ( ) ) = exception ;
2024-05-06 06:44:08 +02:00
return HandleExceptionResponse : : ExitFromExecutable ;
}
2026-03-04 10:33:29 +01:00
}
2026-04-13 11:54:04 +02:00
ExecutionContext * VM : : push_inline_frame (
2026-03-04 10:33:29 +01:00
ECMAScriptFunctionObject & callee_function ,
Executable & callee_executable ,
ReadonlySpan < Operand > arguments ,
u32 return_pc ,
u32 dst_raw ,
Value this_value ,
Object * new_target ,
bool is_construct )
{
auto & stack = vm ( ) . interpreter_stack ( ) ;
u32 insn_argument_count = arguments . size ( ) ;
size_t registers_and_locals_count = callee_executable . registers_and_locals_count ;
size_t argument_count = max ( insn_argument_count , static_cast < u32 > ( callee_function . formal_parameter_count ( ) ) ) ;
2026-03-27 23:45:15 +01:00
auto * callee_context = stack . allocate ( registers_and_locals_count , callee_executable . constants , argument_count ) ;
2026-03-04 10:33:29 +01:00
if ( ! callee_context ) [[unlikely]]
return nullptr ;
// Copy arguments from caller's registers into callee's argument slots.
2026-03-08 12:30:23 +01:00
auto * callee_argument_values = callee_context - > arguments_data ( ) ;
2026-03-04 10:33:29 +01:00
for ( u32 i = 0 ; i < insn_argument_count ; + + i )
callee_argument_values [ i ] = get ( arguments [ i ] ) ;
for ( size_t i = insn_argument_count ; i < argument_count ; + + i )
callee_argument_values [ i ] = js_undefined ( ) ;
callee_context - > passed_argument_count = insn_argument_count ;
// Set up caller linkage so Return can restore the caller frame.
callee_context - > caller_frame = m_running_execution_context ;
callee_context - > caller_dst_raw = dst_raw ;
callee_context - > caller_return_pc = return_pc ;
callee_context - > caller_is_construct = is_construct ;
// Inlined PrepareForOrdinaryCall (avoids function call overhead on hot path).
callee_context - > function = & callee_function ;
callee_context - > realm = callee_function . realm ( ) ;
callee_context - > script_or_module = callee_function . m_script_or_module ;
if ( callee_function . function_environment_needed ( ) ) {
auto local_environment = new_function_environment ( callee_function , new_target ) ;
local_environment - > ensure_capacity ( callee_function . shared_data ( ) . m_function_environment_bindings_count ) ;
callee_context - > lexical_environment = local_environment ;
callee_context - > variable_environment = local_environment ;
} else {
callee_context - > lexical_environment = callee_function . environment ( ) ;
callee_context - > variable_environment = callee_function . environment ( ) ;
}
callee_context - > private_environment = callee_function . m_private_environment ;
2026-04-13 12:49:41 +02:00
// Inline JS-to-JS frames stay out of the VM execution context stack and
// are tracked through caller_frame instead.
2026-04-13 11:54:04 +02:00
m_running_execution_context = callee_context ;
2026-03-04 10:33:29 +01:00
// Bind this if the function uses it.
if ( callee_function . uses_this ( ) )
callee_function . ordinary_call_bind_this ( vm ( ) , * callee_context , this_value ) ;
// Set up execution context fields that run_executable normally does.
// NB: We must use the callee's realm (not the caller's) for global_object
// and global_declarative_environment, since the caller's realm may differ
// in cross-realm calls (e.g. iframe <-> parent).
callee_context - > executable = callee_executable ;
// Set this value register.
2026-03-27 23:45:16 +01:00
auto * values = callee_context - > registers_and_constants_and_locals_and_arguments ( ) ;
2026-03-04 10:33:29 +01:00
values [ Register : : this_value ( ) . index ( ) ] = callee_context - > this_value . value_or ( js_special_empty_value ( ) ) ;
return callee_context ;
}
2026-04-13 11:54:04 +02:00
NEVER_INLINE bool VM : : try_inline_call ( Instruction const & insn , u32 current_pc )
2026-03-04 10:33:29 +01:00
{
auto & instruction = static_cast < Op : : Call const & > ( insn ) ;
auto callee = get ( instruction . callee ( ) ) ;
if ( ! callee . is_object ( ) )
return false ;
auto & callee_object = callee . as_object ( ) ;
if ( ! is < ECMAScriptFunctionObject > ( callee_object ) )
return false ;
auto & callee_function = static_cast < ECMAScriptFunctionObject & > ( callee_object ) ;
if ( callee_function . kind ( ) ! = FunctionKind : : Normal
| | callee_function . is_class_constructor ( )
| | ! callee_function . bytecode_executable ( ) )
return false ;
u32 return_pc = current_pc + instruction . length ( ) ;
auto * callee_context = push_inline_frame (
callee_function , * callee_function . bytecode_executable ( ) ,
instruction . arguments ( ) , return_pc , instruction . dst ( ) . raw ( ) ,
get ( instruction . this_value ( ) ) , nullptr , false ) ;
if ( ! callee_context ) [[unlikely]]
return false ;
return true ;
}
2026-04-13 11:54:04 +02:00
NEVER_INLINE bool VM : : try_inline_call_construct ( Instruction const & insn , u32 current_pc )
2026-03-04 10:33:29 +01:00
{
auto & instruction = static_cast < Op : : CallConstruct const & > ( insn ) ;
auto callee = get ( instruction . callee ( ) ) ;
if ( ! callee . is_object ( ) )
return false ;
auto & callee_object = callee . as_object ( ) ;
if ( ! is < ECMAScriptFunctionObject > ( callee_object ) )
return false ;
auto & callee_function = static_cast < ECMAScriptFunctionObject & > ( callee_object ) ;
if ( ! callee_function . has_constructor ( )
| | callee_function . constructor_kind ( ) ! = ConstructorKind : : Base
| | ! callee_function . bytecode_executable ( ) )
return false ;
// OrdinaryCreateFromConstructor: create the this object.
auto prototype_or_error = get_prototype_from_constructor ( vm ( ) , callee_function , & Intrinsics : : object_prototype ) ;
if ( prototype_or_error . is_error ( ) ) [[unlikely]]
return false ;
auto this_argument = Object : : create ( realm ( ) , prototype_or_error . release_value ( ) ) ;
u32 return_pc = current_pc + instruction . length ( ) ;
auto * callee_context = push_inline_frame (
callee_function , * callee_function . bytecode_executable ( ) ,
instruction . arguments ( ) , return_pc , instruction . dst ( ) . raw ( ) ,
this_argument , & callee_function , true ) ;
if ( ! callee_context ) [[unlikely]]
return false ;
// Ensure this_value is set for construct return semantics.
if ( ! callee_context - > this_value . has_value ( ) )
callee_context - > this_value = Value ( this_argument ) ;
// InitializeInstanceElements (can throw).
auto init_result = this_argument - > initialize_instance_elements ( callee_function ) ;
if ( init_result . is_throw_completion ( ) ) [[unlikely]] {
2026-04-13 12:49:41 +02:00
m_running_execution_context = callee_context - > caller_frame ;
2026-03-04 10:33:29 +01:00
vm ( ) . interpreter_stack ( ) . deallocate ( callee_context ) ;
return false ;
}
return true ;
}
2026-04-13 11:54:04 +02:00
NEVER_INLINE void VM : : pop_inline_frame ( Value return_value )
2026-03-04 10:33:29 +01:00
{
auto * callee_frame = m_running_execution_context ;
auto * caller_frame = callee_frame - > caller_frame ;
auto caller_dst_raw = callee_frame - > caller_dst_raw ;
auto caller_pc = callee_frame - > caller_return_pc ;
// For base constructor calls, apply construct return semantics.
if ( callee_frame - > caller_is_construct & & ! return_value . is_object ( ) )
return_value = callee_frame - > this_value . value ( ) ;
vm ( ) . interpreter_stack ( ) . deallocate ( callee_frame ) ;
m_running_execution_context = caller_frame ;
caller_frame - > program_counter = caller_pc ;
caller_frame - > registers_and_constants_and_locals_and_arguments ( ) [ caller_dst_raw ] = return_value ;
vm ( ) . finish_execution_generation ( ) ;
2024-05-06 06:44:08 +02:00
}
2026-04-13 11:54:04 +02:00
void VM : : run_bytecode ( size_t entry_point )
2024-05-06 06:44:08 +02:00
{
2026-03-04 10:32:01 +01:00
if ( vm ( ) . interpreter_stack ( ) . is_exhausted ( ) | | vm ( ) . did_reach_stack_space_limit ( ) ) [[unlikely]] {
2025-04-04 18:11:45 +02:00
reg ( Register : : exception ( ) ) = vm ( ) . throw_completion < InternalError > ( ErrorType : : CallStackSizeExceeded ) . value ( ) ;
2024-05-07 07:50:19 +02:00
return ;
}
2026-03-06 22:07:49 +01:00
static bool const use_cpp_interpreter = [ ] ( ) {
auto const * env = getenv ( " LIBJS_USE_CPP_INTERPRETER " ) ;
return env & & env [ 0 ] = = ' 1 ' ;
} ( ) ;
if ( ! use_cpp_interpreter & & AsmInterpreter : : is_available ( ) ) {
AsmInterpreter : : run ( * this , entry_point ) ;
return ;
}
2026-03-04 10:33:29 +01:00
u8 const * bytecode ;
u32 program_counter ;
2024-05-06 06:44:08 +02:00
2024-05-06 16:44:45 +02:00
// Declare a lookup table for computed goto with each of the `handle_*` labels
// to avoid the overhead of a switch statement.
// This is a GCC extension, but it's also supported by Clang.
static void * const bytecode_dispatch_table [ ] = {
# define SET_UP_LABEL(name) &&handle_##name,
ENUMERATE_BYTECODE_OPS ( SET_UP_LABEL )
} ;
2024-05-09 15:13:31 +02:00
# undef SET_UP_LABEL
2024-05-06 16:44:45 +02:00
# define DISPATCH_NEXT(name) \
do { \
if constexpr ( Op : : name : : IsVariableLength ) \
2025-04-03 15:09:49 +02:00
program_counter + = instruction . length ( ) ; \
2024-05-06 16:44:45 +02:00
else \
program_counter + = sizeof ( Op : : name ) ; \
2026-03-04 10:33:29 +01:00
m_running_execution_context - > program_counter = program_counter ; \
2024-05-06 16:44:45 +02:00
auto & next_instruction = * reinterpret_cast < Instruction const * > ( & bytecode [ program_counter ] ) ; \
goto * bytecode_dispatch_table [ static_cast < size_t > ( next_instruction . type ( ) ) ] ; \
} while ( 0 )
2026-03-04 10:33:29 +01:00
// Reload bytecode and program_counter from the execution context after
// operations that may have changed the current executable (handle_exception
// unwinding inline frames, try_inline_call, pop_inline_frame).
# define RELOAD_AND_GOTO_START() \
do { \
bytecode = m_running_execution_context - > executable - > bytecode . data ( ) ; \
program_counter = m_running_execution_context - > program_counter ; \
goto start ; \
} while ( 0 )
bytecode = current_executable ( ) . bytecode . data ( ) ;
program_counter = entry_point ;
2021-06-09 06:49:58 +04:30
for ( ; ; ) {
2023-09-26 16:45:55 +02:00
start :
2026-03-04 10:33:29 +01:00
m_running_execution_context - > program_counter = program_counter ;
2024-05-06 06:44:08 +02:00
for ( ; ; ) {
2024-05-06 16:44:45 +02:00
goto * bytecode_dispatch_table [ static_cast < size_t > ( ( * reinterpret_cast < Instruction const * > ( & bytecode [ program_counter ] ) ) . type ( ) ) ] ;
2023-09-26 17:14:59 +02:00
2024-05-06 16:44:45 +02:00
handle_Mov : {
auto & instruction = * reinterpret_cast < Op : : Mov const * > ( & bytecode [ program_counter ] ) ;
2025-04-29 16:08:42 +02:00
set ( instruction . dst ( ) , get ( instruction . src ( ) ) ) ;
2024-05-06 16:44:45 +02:00
DISPATCH_NEXT ( Mov ) ;
}
2026-03-11 07:02:41 -05:00
handle_Mov2 : {
auto & instruction = * reinterpret_cast < Op : : Mov2 const * > ( & bytecode [ program_counter ] ) ;
set ( instruction . dst1 ( ) , get ( instruction . src1 ( ) ) ) ;
set ( instruction . dst2 ( ) , get ( instruction . src2 ( ) ) ) ;
DISPATCH_NEXT ( Mov2 ) ;
}
handle_Mov3 : {
auto & instruction = * reinterpret_cast < Op : : Mov3 const * > ( & bytecode [ program_counter ] ) ;
set ( instruction . dst1 ( ) , get ( instruction . src1 ( ) ) ) ;
set ( instruction . dst2 ( ) , get ( instruction . src2 ( ) ) ) ;
set ( instruction . dst3 ( ) , get ( instruction . src3 ( ) ) ) ;
DISPATCH_NEXT ( Mov3 ) ;
}
2024-05-06 16:44:45 +02:00
handle_End : {
auto & instruction = * reinterpret_cast < Op : : End const * > ( & bytecode [ program_counter ] ) ;
2025-10-31 21:10:51 +01:00
auto value = get ( instruction . value ( ) ) ;
if ( value . is_special_empty_value ( ) )
value = js_undefined ( ) ;
2026-03-04 10:33:29 +01:00
if ( m_running_execution_context - > caller_frame ) {
pop_inline_frame ( value ) ;
RELOAD_AND_GOTO_START ( ) ;
}
2025-10-31 21:10:51 +01:00
reg ( Register : : return_value ( ) ) = value ;
2024-05-06 16:44:45 +02:00
return ;
}
handle_Jump : {
auto & instruction = * reinterpret_cast < Op : : Jump const * > ( & bytecode [ program_counter ] ) ;
program_counter = instruction . target ( ) . address ( ) ;
goto start ;
}
handle_JumpIf : {
auto & instruction = * reinterpret_cast < Op : : JumpIf const * > ( & bytecode [ program_counter ] ) ;
2025-04-29 16:08:42 +02:00
if ( get ( instruction . condition ( ) ) . to_boolean ( ) )
2024-05-06 16:44:45 +02:00
program_counter = instruction . true_target ( ) . address ( ) ;
else
program_counter = instruction . false_target ( ) . address ( ) ;
goto start ;
}
handle_JumpTrue : {
auto & instruction = * reinterpret_cast < Op : : JumpTrue const * > ( & bytecode [ program_counter ] ) ;
2025-04-29 16:08:42 +02:00
if ( get ( instruction . condition ( ) ) . to_boolean ( ) ) {
2024-05-06 16:44:45 +02:00
program_counter = instruction . target ( ) . address ( ) ;
2024-05-06 10:42:52 +02:00
goto start ;
2024-05-06 10:15:17 +02:00
}
2024-05-06 16:44:45 +02:00
DISPATCH_NEXT ( JumpTrue ) ;
}
handle_JumpFalse : {
auto & instruction = * reinterpret_cast < Op : : JumpFalse const * > ( & bytecode [ program_counter ] ) ;
2025-04-29 16:08:42 +02:00
if ( ! get ( instruction . condition ( ) ) . to_boolean ( ) ) {
2024-05-06 16:44:45 +02:00
program_counter = instruction . target ( ) . address ( ) ;
2024-05-06 10:42:52 +02:00
goto start ;
2024-05-06 10:15:17 +02:00
}
2024-05-06 16:44:45 +02:00
DISPATCH_NEXT ( JumpFalse ) ;
}
handle_JumpNullish : {
auto & instruction = * reinterpret_cast < Op : : JumpNullish const * > ( & bytecode [ program_counter ] ) ;
2025-04-29 16:08:42 +02:00
if ( get ( instruction . condition ( ) ) . is_nullish ( ) )
2024-05-06 16:44:45 +02:00
program_counter = instruction . true_target ( ) . address ( ) ;
else
program_counter = instruction . false_target ( ) . address ( ) ;
goto start ;
}
2024-05-13 09:23:53 +02:00
# define HANDLE_COMPARISON_OP(op_TitleCase, op_snake_case, numeric_operator) \
2024-05-10 11:22:27 +02:00
handle_Jump # # op_TitleCase : \
{ \
auto & instruction = * reinterpret_cast < Op : : Jump # # op_TitleCase const * > ( & bytecode [ program_counter ] ) ; \
2025-04-29 16:08:42 +02:00
auto lhs = get ( instruction . lhs ( ) ) ; \
auto rhs = get ( instruction . rhs ( ) ) ; \
2025-12-04 22:26:22 +01:00
if ( lhs . is_number ( ) & & rhs . is_number ( ) ) [[likely]] { \
2024-05-13 09:23:53 +02:00
bool result ; \
if ( lhs . is_int32 ( ) & & rhs . is_int32 ( ) ) { \
result = lhs . as_i32 ( ) numeric_operator rhs . as_i32 ( ) ; \
} else { \
result = lhs . as_double ( ) numeric_operator rhs . as_double ( ) ; \
} \
program_counter = result ? instruction . true_target ( ) . address ( ) : instruction . false_target ( ) . address ( ) ; \
goto start ; \
} \
2025-04-29 16:08:42 +02:00
auto result = op_snake_case ( vm ( ) , get ( instruction . lhs ( ) ) , get ( instruction . rhs ( ) ) ) ; \
2025-04-06 02:32:04 +02:00
if ( result . is_error ( ) ) [[unlikely]] { \
2024-05-10 11:22:27 +02:00
if ( handle_exception ( program_counter , result . error_value ( ) ) = = HandleExceptionResponse : : ExitFromExecutable ) \
return ; \
2026-03-04 10:33:29 +01:00
RELOAD_AND_GOTO_START ( ) ; \
2024-05-10 11:22:27 +02:00
} \
2025-05-08 15:19:35 +12:00
if ( result . value ( ) ) \
2024-05-10 11:22:27 +02:00
program_counter = instruction . true_target ( ) . address ( ) ; \
else \
program_counter = instruction . false_target ( ) . address ( ) ; \
goto start ; \
2024-05-09 15:13:31 +02:00
}
JS_ENUMERATE_COMPARISON_OPS ( HANDLE_COMPARISON_OP )
# undef HANDLE_COMPARISON_OP
2024-05-06 16:44:45 +02:00
handle_JumpUndefined : {
auto & instruction = * reinterpret_cast < Op : : JumpUndefined const * > ( & bytecode [ program_counter ] ) ;
2025-04-29 16:08:42 +02:00
if ( get ( instruction . condition ( ) ) . is_undefined ( ) )
2024-05-06 16:44:45 +02:00
program_counter = instruction . true_target ( ) . address ( ) ;
else
program_counter = instruction . false_target ( ) . address ( ) ;
goto start ;
}
2024-05-10 11:22:27 +02:00
# define HANDLE_INSTRUCTION(name) \
handle_ # # name : \
{ \
auto & instruction = * reinterpret_cast < Op : : name const * > ( & bytecode [ program_counter ] ) ; \
{ \
2025-04-29 16:08:42 +02:00
auto result = instruction . execute_impl ( * this ) ; \
2025-04-06 02:32:04 +02:00
if ( result . is_error ( ) ) [[unlikely]] { \
2024-05-10 11:22:27 +02:00
if ( handle_exception ( program_counter , result . error_value ( ) ) = = HandleExceptionResponse : : ExitFromExecutable ) \
return ; \
2026-03-04 10:33:29 +01:00
RELOAD_AND_GOTO_START ( ) ; \
2024-05-10 11:22:27 +02:00
} \
} \
DISPATCH_NEXT ( name ) ; \
2024-05-06 16:44:45 +02:00
}
2024-05-09 15:24:34 +02:00
# define HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK(name) \
handle_ # # name : \
{ \
auto & instruction = * reinterpret_cast < Op : : name const * > ( & bytecode [ program_counter ] ) ; \
2025-04-29 16:08:42 +02:00
instruction . execute_impl ( * this ) ; \
2024-05-09 15:24:34 +02:00
DISPATCH_NEXT ( name ) ; \
}
2024-05-06 16:44:45 +02:00
HANDLE_INSTRUCTION ( Add ) ;
2024-05-11 22:54:41 +00:00
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( AddPrivateName ) ;
2024-05-06 16:44:45 +02:00
HANDLE_INSTRUCTION ( ArrayAppend ) ;
HANDLE_INSTRUCTION ( BitwiseAnd ) ;
HANDLE_INSTRUCTION ( BitwiseNot ) ;
HANDLE_INSTRUCTION ( BitwiseOr ) ;
2025-12-15 00:14:15 -06:00
HANDLE_INSTRUCTION ( ToInt32 ) ;
2026-01-22 17:23:19 -08:00
HANDLE_INSTRUCTION ( ToString ) ;
2026-02-08 12:32:38 +01:00
HANDLE_INSTRUCTION ( ToPrimitiveWithStringHint ) ;
2024-05-06 16:44:45 +02:00
HANDLE_INSTRUCTION ( BitwiseXor ) ;
2026-03-04 10:33:29 +01:00
handle_Call : {
auto & instruction = * reinterpret_cast < Op : : Call const * > ( & bytecode [ program_counter ] ) ;
if ( try_inline_call ( instruction , program_counter ) )
RELOAD_AND_GOTO_START ( ) ;
auto result = instruction . execute_impl ( * this ) ;
if ( result . is_error ( ) ) [[unlikely]] {
if ( handle_exception ( program_counter , result . error_value ( ) ) = = HandleExceptionResponse : : ExitFromExecutable )
return ;
RELOAD_AND_GOTO_START ( ) ;
}
DISPATCH_NEXT ( Call ) ;
}
2026-04-12 13:25:26 +02:00
# define HANDLE_CALL_BUILTIN_INSTRUCTION(name, ...) \
HANDLE_INSTRUCTION ( CallBuiltin # # name ) ;
JS_ENUMERATE_BUILTINS ( HANDLE_CALL_BUILTIN_INSTRUCTION )
# undef HANDLE_CALL_BUILTIN_INSTRUCTION
2026-03-04 10:33:29 +01:00
handle_CallConstruct : {
auto & instruction = * reinterpret_cast < Op : : CallConstruct const * > ( & bytecode [ program_counter ] ) ;
if ( try_inline_call_construct ( instruction , program_counter ) )
RELOAD_AND_GOTO_START ( ) ;
auto result = instruction . execute_impl ( * this ) ;
if ( result . is_error ( ) ) [[unlikely]] {
if ( handle_exception ( program_counter , result . error_value ( ) ) = = HandleExceptionResponse : : ExitFromExecutable )
return ;
RELOAD_AND_GOTO_START ( ) ;
}
DISPATCH_NEXT ( CallConstruct ) ;
}
2025-08-30 11:00:54 +02:00
HANDLE_INSTRUCTION ( CallConstructWithArgumentArray ) ;
2024-10-31 22:47:30 +01:00
HANDLE_INSTRUCTION ( CallDirectEval ) ;
2025-08-30 11:00:54 +02:00
HANDLE_INSTRUCTION ( CallDirectEvalWithArgumentArray ) ;
2024-05-06 16:44:45 +02:00
HANDLE_INSTRUCTION ( CallWithArgumentArray ) ;
2024-05-09 15:24:34 +02:00
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( Catch ) ;
2025-12-10 09:17:05 -06:00
HANDLE_INSTRUCTION ( ConcatString ) ;
2024-05-06 16:44:45 +02:00
HANDLE_INSTRUCTION ( CopyObjectExcludingProperties ) ;
2025-11-06 19:20:29 +00:00
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( CreateAsyncFromSyncIterator ) ;
HANDLE_INSTRUCTION ( CreateDataPropertyOrThrow ) ;
2025-10-25 14:06:48 +02:00
HANDLE_INSTRUCTION ( CreateImmutableBinding ) ;
HANDLE_INSTRUCTION ( CreateMutableBinding ) ;
2024-05-09 15:24:34 +02:00
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( CreateLexicalEnvironment ) ;
2024-05-05 22:06:55 +02:00
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( CreateVariableEnvironment ) ;
2024-05-11 22:54:41 +00:00
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( CreatePrivateEnvironment ) ;
2024-05-06 16:44:45 +02:00
HANDLE_INSTRUCTION ( CreateVariable ) ;
2025-04-04 13:48:59 +02:00
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( CreateRestParams ) ;
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( CreateArguments ) ;
2024-05-06 16:44:45 +02:00
HANDLE_INSTRUCTION ( Decrement ) ;
HANDLE_INSTRUCTION ( DeleteById ) ;
HANDLE_INSTRUCTION ( DeleteByValue ) ;
HANDLE_INSTRUCTION ( DeleteVariable ) ;
HANDLE_INSTRUCTION ( Div ) ;
HANDLE_INSTRUCTION ( EnterObjectEnvironment ) ;
HANDLE_INSTRUCTION ( Exp ) ;
HANDLE_INSTRUCTION ( GetById ) ;
HANDLE_INSTRUCTION ( GetByIdWithThis ) ;
HANDLE_INSTRUCTION ( GetByValue ) ;
HANDLE_INSTRUCTION ( GetByValueWithThis ) ;
HANDLE_INSTRUCTION ( GetCalleeAndThisFromEnvironment ) ;
2025-03-31 09:32:39 +01:00
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( GetCompletionFields ) ;
2024-05-06 16:44:45 +02:00
HANDLE_INSTRUCTION ( GetGlobal ) ;
2024-05-09 15:24:34 +02:00
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( GetImportMeta ) ;
2026-02-09 03:34:42 +01:00
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( GetLexicalEnvironment ) ;
2024-05-06 16:44:45 +02:00
HANDLE_INSTRUCTION ( GetIterator ) ;
2024-05-20 11:53:28 +02:00
HANDLE_INSTRUCTION ( GetLength ) ;
HANDLE_INSTRUCTION ( GetLengthWithThis ) ;
2024-05-06 16:44:45 +02:00
HANDLE_INSTRUCTION ( GetMethod ) ;
2024-05-09 15:24:34 +02:00
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( GetNewTarget ) ;
2024-05-06 16:44:45 +02:00
HANDLE_INSTRUCTION ( GetObjectPropertyIterator ) ;
HANDLE_INSTRUCTION ( GetPrivateById ) ;
2026-01-06 19:49:38 +00:00
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( GetTemplateObject ) ;
2024-05-14 11:32:04 +02:00
HANDLE_INSTRUCTION ( GetBinding ) ;
2025-05-04 01:41:49 +02:00
HANDLE_INSTRUCTION ( GetInitializedBinding ) ;
2024-05-06 16:44:45 +02:00
HANDLE_INSTRUCTION ( GreaterThan ) ;
HANDLE_INSTRUCTION ( GreaterThanEquals ) ;
HANDLE_INSTRUCTION ( HasPrivateId ) ;
HANDLE_INSTRUCTION ( ImportCall ) ;
HANDLE_INSTRUCTION ( In ) ;
HANDLE_INSTRUCTION ( Increment ) ;
2024-05-14 11:30:30 +02:00
HANDLE_INSTRUCTION ( InitializeLexicalBinding ) ;
HANDLE_INSTRUCTION ( InitializeVariableBinding ) ;
2024-05-06 16:44:45 +02:00
HANDLE_INSTRUCTION ( InstanceOf ) ;
2025-12-04 19:55:07 +01:00
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( IsCallable ) ;
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( IsConstructor ) ;
2024-05-06 16:44:45 +02:00
HANDLE_INSTRUCTION ( IteratorClose ) ;
HANDLE_INSTRUCTION ( IteratorNext ) ;
2025-05-01 16:05:24 +03:00
HANDLE_INSTRUCTION ( IteratorNextUnpack ) ;
LibJS: Cache stable for-in iteration at bytecode sites
Cache the flattened enumerable key snapshot for each `for..in` site and
reuse a `PropertyNameIterator` when the receiver shape, dictionary
generation, indexed storage kind and length, prototype chain
validity, and magical-length state still match.
Handle packed indexed receivers as well as plain named-property
objects. Teach `ObjectPropertyIteratorNext` in `asmint.asm` to return
cached property values directly and to fall back to the slow iterator
logic when any guard fails.
Treat arrays' hidden non-enumerable `length` property as a visited
name for for-in shadowing, and include the receiver's magical-length
state in the cache key so arrays and plain objects do not share
snapshots.
Add `test-js` and `test-js-bytecode` coverage for mixed numeric and
named keys, packed receiver transitions, re-entry, iterator reuse, GC
retention, array length shadowing, and same-site cache reuse.
2026-04-10 00:56:49 +02:00
HANDLE_INSTRUCTION ( ObjectPropertyIteratorNext ) ;
2024-05-06 16:44:45 +02:00
HANDLE_INSTRUCTION ( IteratorToArray ) ;
2024-05-11 22:54:41 +00:00
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( LeavePrivateEnvironment ) ;
2024-05-06 16:44:45 +02:00
HANDLE_INSTRUCTION ( LeftShift ) ;
HANDLE_INSTRUCTION ( LessThan ) ;
HANDLE_INSTRUCTION ( LessThanEquals ) ;
HANDLE_INSTRUCTION ( LooselyEquals ) ;
HANDLE_INSTRUCTION ( LooselyInequals ) ;
HANDLE_INSTRUCTION ( Mod ) ;
HANDLE_INSTRUCTION ( Mul ) ;
2024-05-09 15:24:34 +02:00
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( NewArray ) ;
2025-11-06 19:20:29 +00:00
HANDLE_INSTRUCTION ( NewArrayWithLength ) ;
2024-05-06 16:44:45 +02:00
HANDLE_INSTRUCTION ( NewClass ) ;
2024-05-09 15:24:34 +02:00
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( NewFunction ) ;
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( NewObject ) ;
2026-01-09 18:55:00 +01:00
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( CacheObjectShape ) ;
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( InitObjectLiteralProperty ) ;
2025-11-06 19:20:29 +00:00
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( NewObjectWithNoPrototype ) ;
2024-05-09 15:24:34 +02:00
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( NewPrimitiveArray ) ;
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( NewRegExp ) ;
2026-02-11 13:14:52 +01:00
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( NewReferenceError ) ;
2024-05-09 15:24:34 +02:00
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( NewTypeError ) ;
2025-12-04 21:53:10 +01:00
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( Not ) ;
2024-05-06 16:44:45 +02:00
HANDLE_INSTRUCTION ( PostfixDecrement ) ;
HANDLE_INSTRUCTION ( PostfixIncrement ) ;
2025-10-10 12:09:34 +02:00
2026-03-04 10:33:38 +01:00
HANDLE_INSTRUCTION ( PutById ) ;
HANDLE_INSTRUCTION ( PutByIdWithThis ) ;
HANDLE_INSTRUCTION ( PutByValue ) ;
HANDLE_INSTRUCTION ( PutByValueWithThis ) ;
2025-10-10 12:09:34 +02:00
2024-11-01 22:00:32 +01:00
HANDLE_INSTRUCTION ( PutBySpread ) ;
2024-05-06 16:44:45 +02:00
HANDLE_INSTRUCTION ( PutPrivateById ) ;
HANDLE_INSTRUCTION ( ResolveSuperBase ) ;
HANDLE_INSTRUCTION ( ResolveThisBinding ) ;
HANDLE_INSTRUCTION ( RightShift ) ;
2025-03-31 09:32:39 +01:00
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( SetCompletionType ) ;
2025-05-01 23:58:38 +02:00
HANDLE_INSTRUCTION ( SetGlobal ) ;
2026-02-09 03:34:42 +01:00
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( SetLexicalEnvironment ) ;
2024-05-14 11:30:30 +02:00
HANDLE_INSTRUCTION ( SetLexicalBinding ) ;
HANDLE_INSTRUCTION ( SetVariableBinding ) ;
2024-05-06 16:44:45 +02:00
HANDLE_INSTRUCTION ( StrictlyEquals ) ;
HANDLE_INSTRUCTION ( StrictlyInequals ) ;
HANDLE_INSTRUCTION ( Sub ) ;
HANDLE_INSTRUCTION ( SuperCallWithArgumentArray ) ;
HANDLE_INSTRUCTION ( ThrowIfNotObject ) ;
HANDLE_INSTRUCTION ( ThrowIfNullish ) ;
HANDLE_INSTRUCTION ( ThrowIfTDZ ) ;
2026-02-09 20:27:31 +01:00
HANDLE_INSTRUCTION ( ThrowConstAssignment ) ;
2025-11-06 19:20:29 +00:00
HANDLE_INSTRUCTION ( ToLength ) ;
HANDLE_INSTRUCTION ( ToObject ) ;
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( ToBoolean ) ;
2025-12-04 21:53:10 +01:00
HANDLE_INSTRUCTION_WITHOUT_EXCEPTION_CHECK ( Typeof ) ;
2024-06-14 09:37:26 +02:00
HANDLE_INSTRUCTION ( TypeofBinding ) ;
2024-05-06 16:44:45 +02:00
HANDLE_INSTRUCTION ( UnaryMinus ) ;
HANDLE_INSTRUCTION ( UnaryPlus ) ;
HANDLE_INSTRUCTION ( UnsignedRightShift ) ;
2025-12-04 23:13:55 +01:00
handle_Throw : {
auto & instruction = * reinterpret_cast < Op : : Throw const * > ( & bytecode [ program_counter ] ) ;
auto result = instruction . execute_impl ( * this ) ;
if ( handle_exception ( program_counter , result . error_value ( ) ) = = HandleExceptionResponse : : ExitFromExecutable )
return ;
2026-03-04 10:33:29 +01:00
RELOAD_AND_GOTO_START ( ) ;
2025-12-04 23:13:55 +01:00
}
2024-05-06 16:44:45 +02:00
handle_Await : {
auto & instruction = * reinterpret_cast < Op : : Await const * > ( & bytecode [ program_counter ] ) ;
2025-04-29 16:08:42 +02:00
instruction . execute_impl ( * this ) ;
2024-05-12 11:03:26 +02:00
return ;
2024-05-06 16:44:45 +02:00
}
2024-05-06 10:42:52 +02:00
2024-05-06 16:44:45 +02:00
handle_Return : {
auto & instruction = * reinterpret_cast < Op : : Return const * > ( & bytecode [ program_counter ] ) ;
2026-03-04 10:33:29 +01:00
auto return_value = get ( instruction . value ( ) ) ;
if ( return_value . is_special_empty_value ( ) )
return_value = js_undefined ( ) ;
if ( m_running_execution_context - > caller_frame ) {
pop_inline_frame ( return_value ) ;
RELOAD_AND_GOTO_START ( ) ;
}
reg ( Register : : return_value ( ) ) = return_value ;
reg ( Register : : exception ( ) ) = js_special_empty_value ( ) ;
2024-05-12 11:03:26 +02:00
return ;
2024-05-06 16:44:45 +02:00
}
2023-09-26 19:43:44 +02:00
2024-05-06 16:44:45 +02:00
handle_Yield : {
auto & instruction = * reinterpret_cast < Op : : Yield const * > ( & bytecode [ program_counter ] ) ;
2025-04-29 16:08:42 +02:00
instruction . execute_impl ( * this ) ;
2024-05-12 11:03:26 +02:00
return ;
2024-05-06 16:44:45 +02:00
}
2024-05-06 06:44:08 +02:00
}
2021-06-04 12:07:38 +02:00
}
2023-09-26 16:41:42 +02:00
}
2026-04-13 11:54:04 +02:00
Utf16FlyString const & VM : : get_identifier ( IdentifierTableIndex index ) const
2025-10-07 16:47:41 +02:00
{
2026-03-08 11:34:32 +01:00
return m_running_execution_context - > executable - > get_identifier ( index ) ;
2025-10-07 16:47:41 +02:00
}
2026-04-13 11:54:04 +02:00
PropertyKey const & VM : : get_property_key ( PropertyKeyTableIndex index ) const
2025-12-11 07:57:09 -06:00
{
2026-03-08 11:34:32 +01:00
return m_running_execution_context - > executable - > get_property_key ( index ) ;
}
2026-04-13 11:54:04 +02:00
DeclarativeEnvironment & VM : : global_declarative_environment ( )
2026-03-08 11:34:32 +01:00
{
return realm ( ) . global_declarative_environment ( ) ;
2025-12-11 07:57:09 -06:00
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < Value > VM : : run_executable ( ExecutionContext & context , Executable & executable , u32 entry_point )
2023-09-26 16:41:42 +02:00
{
2026-04-13 11:54:04 +02:00
dbgln_if ( JS_BYTECODE_DEBUG , " VM will run bytecode unit {} " , & executable ) ;
2023-09-26 16:41:42 +02:00
2026-04-13 11:54:04 +02:00
// NOTE: This is how we "push" a new execution context onto the VM's
// execution context stack.
2025-10-29 09:32:38 +01:00
TemporaryChange restore_running_execution_context { m_running_execution_context , & context } ;
2025-10-29 09:16:25 +01:00
2025-10-29 09:32:38 +01:00
context . executable = executable ;
2025-10-29 09:16:25 +01:00
2026-01-18 23:17:10 +01:00
VERIFY ( executable . registers_and_locals_count + executable . constants . size ( ) = = executable . registers_and_locals_and_constants_count ) ;
VERIFY ( executable . registers_and_locals_and_constants_count < = context . registers_and_constants_and_locals_and_arguments_span ( ) . size ( ) ) ;
2023-09-26 16:41:42 +02:00
2024-06-20 09:51:06 +02:00
// NOTE: We only copy the `this` value from ExecutionContext if it's not already set.
// If we are re-entering an async/generator context, the `this` value
// may have already been cached by a ResolveThisBinding instruction,
// and subsequent instructions expect this value to be set.
2025-04-04 23:16:34 +02:00
if ( reg ( Register : : this_value ( ) ) . is_special_empty_value ( ) )
2025-10-29 09:32:38 +01:00
reg ( Register : : this_value ( ) ) = context . this_value . value_or ( js_special_empty_value ( ) ) ;
2024-05-01 19:33:49 +02:00
2026-03-16 00:02:11 +01:00
run_bytecode ( entry_point ) ;
2021-06-03 10:46:30 +02:00
2026-04-13 11:54:04 +02:00
dbgln_if ( JS_BYTECODE_DEBUG , " VM did run bytecode unit {} " , context . executable ) ;
2021-06-07 15:17:37 +02:00
if constexpr ( JS_BYTECODE_DEBUG ) {
2026-03-27 23:45:16 +01:00
auto * values = context . registers_and_constants_and_locals_and_arguments ( ) ;
2024-05-12 18:49:03 +02:00
for ( size_t i = 0 ; i < executable . number_of_registers ; + + i ) {
2023-02-12 21:54:02 -05:00
String value_string ;
2026-01-18 23:17:10 +01:00
if ( values [ i ] . is_special_empty_value ( ) )
2023-08-07 11:12:38 +02:00
value_string = " (empty) " _string ;
2021-06-07 15:17:37 +02:00
else
2026-01-18 23:17:10 +01:00
value_string = values [ i ] . to_string_without_side_effects ( ) ;
2021-06-07 15:17:37 +02:00
dbgln ( " [{:3}] {} " , i , value_string ) ;
}
2021-06-03 10:46:30 +02:00
}
2021-06-05 15:53:36 +02:00
2021-11-11 00:44:56 +03:30
vm ( ) . run_queued_promise_jobs ( ) ;
2021-06-12 17:32:54 +03:00
vm ( ) . finish_execution_generation ( ) ;
2025-10-30 22:32:15 +01:00
auto exception = reg ( Register : : exception ( ) ) ;
2025-10-30 10:27:47 +01:00
if ( ! exception . is_special_empty_value ( ) ) [[unlikely]]
2026-04-13 11:54:04 +02:00
return JS : : throw_completion ( exception ) ;
2025-10-30 22:32:15 +01:00
2025-10-31 21:10:51 +01:00
return reg ( Register : : return_value ( ) ) ;
2021-06-03 10:46:30 +02:00
}
2026-04-13 11:54:04 +02:00
void VM : : catch_exception ( Operand dst )
2025-04-29 16:08:42 +02:00
{
set ( dst , reg ( Register : : exception ( ) ) ) ;
reg ( Register : : exception ( ) ) = js_special_empty_value ( ) ;
2023-11-12 00:12:21 +01:00
}
2024-07-09 10:10:14 +02:00
// NOTE: This function assumes that the index is valid within the TypedArray,
// and that the TypedArray is not detached.
template < typename T >
inline Value fast_typed_array_get_element ( TypedArrayBase & typed_array , u32 index )
{
Checked < u32 > offset_into_array_buffer = index ;
offset_into_array_buffer * = sizeof ( T ) ;
offset_into_array_buffer + = typed_array . byte_offset ( ) ;
if ( offset_into_array_buffer . has_overflow ( ) ) [[unlikely]] {
return js_undefined ( ) ;
}
auto const & array_buffer = * typed_array . viewed_array_buffer ( ) ;
auto const * slot = reinterpret_cast < T const * > ( array_buffer . buffer ( ) . offset_pointer ( offset_into_array_buffer . value ( ) ) ) ;
return Value { * slot } ;
2021-06-03 10:46:30 +02:00
}
2023-09-27 10:10:00 +02:00
2024-07-09 10:10:14 +02:00
// NOTE: This function assumes that the index is valid within the TypedArray,
// and that the TypedArray is not detached.
template < typename T >
inline void fast_typed_array_set_element ( TypedArrayBase & typed_array , u32 index , T value )
{
Checked < u32 > offset_into_array_buffer = index ;
offset_into_array_buffer * = sizeof ( T ) ;
offset_into_array_buffer + = typed_array . byte_offset ( ) ;
if ( offset_into_array_buffer . has_overflow ( ) ) [[unlikely]] {
return ;
}
auto & array_buffer = * typed_array . viewed_array_buffer ( ) ;
auto * slot = reinterpret_cast < T * > ( array_buffer . buffer ( ) . offset_pointer ( offset_into_array_buffer . value ( ) ) ) ;
* slot = value ;
}
2025-12-10 11:07:00 -06:00
static COLD Completion throw_null_or_undefined_property_get ( VM & vm , Value base_value , Optional < IdentifierTableIndex > base_identifier , IdentifierTableIndex property_identifier , Executable const & executable )
2024-07-09 11:37:06 +02:00
{
VERIFY ( base_value . is_nullish ( ) ) ;
if ( base_identifier . has_value ( ) )
return vm . throw_completion < TypeError > ( ErrorType : : ToObjectNullOrUndefinedWithPropertyAndName , executable . get_identifier ( property_identifier ) , base_value , executable . get_identifier ( base_identifier . value ( ) ) ) ;
return vm . throw_completion < TypeError > ( ErrorType : : ToObjectNullOrUndefinedWithProperty , executable . get_identifier ( property_identifier ) , base_value ) ;
}
2025-12-10 11:07:00 -06:00
static COLD Completion throw_null_or_undefined_property_get ( VM & vm , Value base_value , Optional < IdentifierTableIndex > base_identifier , Value property , Executable const & executable )
2024-07-09 11:37:06 +02:00
{
VERIFY ( base_value . is_nullish ( ) ) ;
if ( base_identifier . has_value ( ) )
return vm . throw_completion < TypeError > ( ErrorType : : ToObjectNullOrUndefinedWithPropertyAndName , property , base_value , executable . get_identifier ( base_identifier . value ( ) ) ) ;
return vm . throw_completion < TypeError > ( ErrorType : : ToObjectNullOrUndefinedWithProperty , property , base_value ) ;
}
2024-11-15 04:01:23 +13:00
ALWAYS_INLINE ThrowCompletionOr < GC : : Ref < Object > > base_object_for_get ( VM & vm , Value base_value , Optional < IdentifierTableIndex > base_identifier , IdentifierTableIndex property_identifier , Executable const & executable )
2024-07-09 11:37:06 +02:00
{
2025-12-04 10:35:26 +01:00
if ( auto base_object = base_object_for_get_impl ( vm , base_value ) ) [[likely]]
2024-11-15 04:01:23 +13:00
return GC : : Ref { * base_object } ;
2024-07-09 11:37:06 +02:00
2024-07-09 10:10:14 +02:00
// NOTE: At this point this is guaranteed to throw (null or undefined).
2024-07-09 11:37:06 +02:00
return throw_null_or_undefined_property_get ( vm , base_value , base_identifier , property_identifier , executable ) ;
}
2024-11-15 04:01:23 +13:00
ALWAYS_INLINE ThrowCompletionOr < GC : : Ref < Object > > base_object_for_get ( VM & vm , Value base_value , Optional < IdentifierTableIndex > base_identifier , Value property , Executable const & executable )
2024-07-09 11:37:06 +02:00
{
2025-12-04 10:35:26 +01:00
if ( auto base_object = base_object_for_get_impl ( vm , base_value ) ) [[likely]]
2024-11-15 04:01:23 +13:00
return GC : : Ref { * base_object } ;
2024-07-09 11:37:06 +02:00
// NOTE: At this point this is guaranteed to throw (null or undefined).
return throw_null_or_undefined_property_get ( vm , base_value , base_identifier , property , executable ) ;
2024-07-09 10:10:14 +02:00
}
2024-07-09 11:37:06 +02:00
inline ThrowCompletionOr < Value > get_by_value ( VM & vm , Optional < IdentifierTableIndex > base_identifier , Value base_value , Value property_key_value , Executable const & executable )
2024-07-09 10:10:14 +02:00
{
// OPTIMIZATION: Fast path for simple Int32 indexes in array-like objects.
2025-12-10 11:46:57 -06:00
if ( base_value . is_object ( ) & & property_key_value . is_non_negative_int32 ( ) ) {
2024-07-09 10:10:14 +02:00
auto & object = base_value . as_object ( ) ;
auto index = static_cast < u32 > ( property_key_value . as_i32 ( ) ) ;
// For "non-typed arrays":
if ( ! object . may_interfere_with_indexed_property_access ( )
2026-03-17 00:54:54 -05:00
& & object . indexed_storage_kind ( ) ! = IndexedStorageKind : : None ) {
auto maybe_value = object . indexed_get ( index ) ;
2024-07-09 10:10:14 +02:00
if ( maybe_value . has_value ( ) ) {
auto value = maybe_value - > value ;
if ( ! value . is_accessor ( ) )
return value ;
}
}
// For typed arrays:
if ( object . is_typed_array ( ) ) {
auto & typed_array = static_cast < TypedArrayBase & > ( object ) ;
auto canonical_index = CanonicalIndex { CanonicalIndex : : Type : : Index , index } ;
if ( is_valid_integer_index ( typed_array , canonical_index ) ) {
switch ( typed_array . kind ( ) ) {
case TypedArrayBase : : Kind : : Uint8Array :
return fast_typed_array_get_element < u8 > ( typed_array , index ) ;
case TypedArrayBase : : Kind : : Uint16Array :
return fast_typed_array_get_element < u16 > ( typed_array , index ) ;
case TypedArrayBase : : Kind : : Uint32Array :
return fast_typed_array_get_element < u32 > ( typed_array , index ) ;
case TypedArrayBase : : Kind : : Int8Array :
return fast_typed_array_get_element < i8 > ( typed_array , index ) ;
case TypedArrayBase : : Kind : : Int16Array :
return fast_typed_array_get_element < i16 > ( typed_array , index ) ;
case TypedArrayBase : : Kind : : Int32Array :
return fast_typed_array_get_element < i32 > ( typed_array , index ) ;
case TypedArrayBase : : Kind : : Uint8ClampedArray :
return fast_typed_array_get_element < u8 > ( typed_array , index ) ;
2024-11-18 01:42:36 +01:00
case TypedArrayBase : : Kind : : Float16Array :
return fast_typed_array_get_element < f16 > ( typed_array , index ) ;
case TypedArrayBase : : Kind : : Float32Array :
return fast_typed_array_get_element < float > ( typed_array , index ) ;
case TypedArrayBase : : Kind : : Float64Array :
return fast_typed_array_get_element < double > ( typed_array , index ) ;
2024-07-09 10:10:14 +02:00
default :
// FIXME: Support more TypedArray kinds.
break ;
}
}
switch ( typed_array . kind ( ) ) {
# define __JS_ENUMERATE(ClassName, snake_name, PrototypeName, ConstructorName, Type) \
case TypedArrayBase : : Kind : : ClassName : \
return typed_array_get_element < Type > ( typed_array , canonical_index ) ;
JS_ENUMERATE_TYPED_ARRAYS
# undef __JS_ENUMERATE
}
}
}
2024-07-09 11:37:06 +02:00
auto object = TRY ( base_object_for_get ( vm , base_value , base_identifier , property_key_value , executable ) ) ;
2024-07-09 10:10:14 +02:00
auto property_key = TRY ( property_key_value . to_property_key ( vm ) ) ;
if ( base_value . is_string ( ) ) {
auto string_value = TRY ( base_value . as_string ( ) . get ( vm , property_key ) ) ;
if ( string_value . has_value ( ) )
return * string_value ;
}
return TRY ( object - > internal_get ( property_key , base_value ) ) ;
}
2026-04-13 11:54:04 +02:00
inline ThrowCompletionOr < Value > get_global ( VM & vm , IdentifierTableIndex identifier_index , Strict strict , GlobalVariableCache & cache )
2024-07-09 10:10:14 +02:00
{
2026-04-13 11:54:04 +02:00
auto & binding_object = vm . global_object ( ) ;
auto & declarative_record = vm . global_declarative_environment ( ) ;
2024-07-09 10:10:14 +02:00
auto & shape = binding_object . shape ( ) ;
if ( cache . environment_serial_number = = declarative_record . environment_serial_number ( ) ) {
// OPTIMIZATION: For global var bindings, if the shape of the global object hasn't changed,
// we can use the cached property offset.
2025-12-20 16:16:16 -06:00
if ( & shape = = cache . entries [ 0 ] . shape & & ( ! shape . is_dictionary ( ) | | shape . dictionary_generation ( ) = = cache . entries [ 0 ] . shape_dictionary_generation ) ) {
auto value = binding_object . get_direct ( cache . entries [ 0 ] . property_offset ) ;
2024-10-17 20:29:07 +02:00
if ( value . is_accessor ( ) )
2026-02-08 11:54:20 +01:00
return TRY ( call ( vm , value . as_accessor ( ) . getter ( ) , & binding_object ) ) ;
2024-11-12 18:38:35 +01:00
return value ;
2024-07-09 10:10:14 +02:00
}
// OPTIMIZATION: For global lexical bindings, if the global declarative environment hasn't changed,
// we can use the cached environment binding index.
2025-03-16 18:39:57 -05:00
if ( cache . has_environment_binding_index ) {
if ( cache . in_module_environment ) {
2026-03-11 20:35:43 +00:00
auto module = vm . running_execution_context ( ) . script_or_module . get_pointer < GC : : Ref < Module > > ( ) ;
return ( * module ) - > environment ( ) - > get_binding_value_direct ( vm , cache . environment_binding_index ) ;
2025-03-16 18:39:57 -05:00
}
return declarative_record . get_binding_value_direct ( vm , cache . environment_binding_index ) ;
}
2024-07-09 10:10:14 +02:00
}
cache . environment_serial_number = declarative_record . environment_serial_number ( ) ;
2026-04-13 11:54:04 +02:00
auto & identifier = vm . get_identifier ( identifier_index ) ;
2024-07-09 10:10:14 +02:00
2026-03-11 20:35:43 +00:00
if ( auto * module = vm . running_execution_context ( ) . script_or_module . get_pointer < GC : : Ref < Module > > ( ) ) {
2024-07-09 10:10:14 +02:00
// NOTE: GetGlobal is used to access variables stored in the module environment and global environment.
// The module environment is checked first since it precedes the global environment in the environment chain.
2026-03-11 20:35:43 +00:00
auto & module_environment = * ( * module ) - > environment ( ) ;
2025-03-16 18:39:57 -05:00
Optional < size_t > index ;
if ( TRY ( module_environment . has_binding ( identifier , & index ) ) ) {
if ( index . has_value ( ) ) {
cache . environment_binding_index = static_cast < u32 > ( index . value ( ) ) ;
cache . has_environment_binding_index = true ;
cache . in_module_environment = true ;
return TRY ( module_environment . get_binding_value_direct ( vm , index . value ( ) ) ) ;
}
2025-10-29 23:23:36 +01:00
return TRY ( module_environment . get_binding_value ( vm , identifier , true ) ) ;
2024-07-09 10:10:14 +02:00
}
}
Optional < size_t > offset ;
if ( TRY ( declarative_record . has_binding ( identifier , & offset ) ) ) {
cache . environment_binding_index = static_cast < u32 > ( offset . value ( ) ) ;
2025-03-16 18:39:57 -05:00
cache . has_environment_binding_index = true ;
cache . in_module_environment = false ;
2025-10-28 20:25:12 +01:00
return TRY ( declarative_record . get_binding_value ( vm , identifier , strict = = Strict : : Yes ) ) ;
2024-07-09 10:10:14 +02:00
}
2025-12-04 22:38:40 +01:00
if ( TRY ( binding_object . has_property ( identifier ) ) ) [[likely]] {
2025-09-15 17:23:39 +02:00
CacheableGetPropertyMetadata cacheable_metadata ;
2026-02-08 11:54:20 +01:00
auto value = TRY ( binding_object . internal_get ( identifier , & binding_object , & cacheable_metadata ) ) ;
2025-09-15 17:23:39 +02:00
if ( cacheable_metadata . type = = CacheableGetPropertyMetadata : : Type : : GetOwnProperty ) {
2025-05-06 13:16:44 +02:00
cache . entries [ 0 ] . shape = shape ;
cache . entries [ 0 ] . property_offset = cacheable_metadata . property_offset . value ( ) ;
2025-09-07 15:27:16 +01:00
if ( shape . is_dictionary ( ) ) {
cache . entries [ 0 ] . shape_dictionary_generation = shape . dictionary_generation ( ) ;
}
2024-07-09 10:10:14 +02:00
}
return value ;
}
return vm . throw_completion < ReferenceError > ( ErrorType : : UnknownIdentifier , identifier ) ;
}
2026-04-13 11:54:04 +02:00
static COLD Completion throw_type_error_for_callee ( VM & vm , Value callee , StringView callee_type , Optional < StringTableIndex > const expression_string )
2024-07-09 10:10:14 +02:00
{
if ( expression_string . has_value ( ) )
2026-04-13 11:54:04 +02:00
return vm . throw_completion < TypeError > ( ErrorType : : IsNotAEvaluatedFrom , callee , callee_type , vm . current_executable ( ) . get_string ( * expression_string ) ) ;
2024-07-09 10:10:14 +02:00
2025-12-05 08:01:44 +01:00
return vm . throw_completion < TypeError > ( ErrorType : : IsNotA , callee , callee_type ) ;
2024-07-09 10:10:14 +02:00
}
2026-04-13 11:54:04 +02:00
inline ThrowCompletionOr < void > throw_if_needed_for_call ( VM & vm , Value callee , Op : : CallType call_type , Optional < StringTableIndex > const expression_string )
2024-07-09 10:10:14 +02:00
{
if ( ( call_type = = Op : : CallType : : Call | | call_type = = Op : : CallType : : DirectEval )
2025-12-05 10:49:50 +01:00
& & ! callee . is_function ( ) ) [[unlikely]]
2026-04-13 11:54:04 +02:00
return throw_type_error_for_callee ( vm , callee , " function " sv , expression_string ) ;
2025-12-05 10:49:50 +01:00
if ( call_type = = Op : : CallType : : Construct & & ! callee . is_constructor ( ) ) [[unlikely]]
2026-04-13 11:54:04 +02:00
return throw_type_error_for_callee ( vm , callee , " constructor " sv , expression_string ) ;
2024-07-09 10:10:14 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
inline Value new_function ( VM & vm , u32 shared_function_data_index , Optional < Operand > const home_object )
2025-10-27 13:02:37 +01:00
{
2026-04-13 11:54:04 +02:00
auto & shared_data = * vm . current_executable ( ) . shared_function_data [ shared_function_data_index ] ;
2026-02-10 23:02:46 +01:00
auto & realm = * vm . current_realm ( ) ;
2024-07-09 10:10:14 +02:00
2026-02-10 23:02:46 +01:00
GC : : Ref < Object > prototype = [ & ] ( ) - > GC : : Ref < Object > {
switch ( shared_data . m_kind ) {
case FunctionKind : : Normal :
return realm . intrinsics ( ) . function_prototype ( ) ;
case FunctionKind : : Generator :
return realm . intrinsics ( ) . generator_function_prototype ( ) ;
case FunctionKind : : Async :
return realm . intrinsics ( ) . async_function_prototype ( ) ;
case FunctionKind : : AsyncGenerator :
return realm . intrinsics ( ) . async_generator_function_prototype ( ) ;
}
VERIFY_NOT_REACHED ( ) ;
} ( ) ;
auto function = ECMAScriptFunctionObject : : create_from_function_data (
realm , shared_data ,
vm . lexical_environment ( ) ,
vm . running_execution_context ( ) . private_environment ,
* prototype ) ;
2024-07-09 10:10:14 +02:00
if ( home_object . has_value ( ) ) {
2026-04-13 11:54:04 +02:00
auto home_object_value = vm . get ( home_object . value ( ) ) ;
2026-03-07 12:51:11 +00:00
function - > make_method ( home_object_value . as_object ( ) ) ;
2024-07-09 10:10:14 +02:00
}
2026-02-10 23:02:46 +01:00
return function ;
2024-07-09 10:10:14 +02:00
}
2026-03-04 10:33:38 +01:00
inline ThrowCompletionOr < void > put_by_value ( VM & vm , Value base , Optional < Utf16FlyString const & > const base_identifier , Value property_key_value , Value value , PutKind kind , Strict strict )
2024-07-09 10:10:14 +02:00
{
// OPTIMIZATION: Fast path for simple Int32 indexes in array-like objects.
2025-10-10 12:09:34 +02:00
if ( kind = = PutKind : : Normal
2025-12-10 11:46:57 -06:00
& & base . is_object ( ) & & property_key_value . is_non_negative_int32 ( ) ) {
2024-07-09 10:10:14 +02:00
auto & object = base . as_object ( ) ;
auto index = static_cast < u32 > ( property_key_value . as_i32 ( ) ) ;
// For "non-typed arrays":
2026-03-17 00:54:54 -05:00
if ( ! object . may_interfere_with_indexed_property_access ( )
& & object . indexed_storage_kind ( ) ! = IndexedStorageKind : : None
& & object . indexed_storage_kind ( ) ! = IndexedStorageKind : : Dictionary ) {
auto maybe_value = object . indexed_get ( index ) ;
2024-07-09 10:10:14 +02:00
if ( maybe_value . has_value ( ) ) {
auto existing_value = maybe_value - > value ;
if ( ! existing_value . is_accessor ( ) ) {
2026-03-17 00:54:54 -05:00
object . indexed_put ( index , value ) ;
2024-07-09 10:10:14 +02:00
return { } ;
}
}
}
// For typed arrays:
if ( object . is_typed_array ( ) ) {
auto & typed_array = static_cast < TypedArrayBase & > ( object ) ;
auto canonical_index = CanonicalIndex { CanonicalIndex : : Type : : Index , index } ;
2024-11-18 01:42:36 +01:00
if ( is_valid_integer_index ( typed_array , canonical_index ) ) {
if ( value . is_int32 ( ) ) {
switch ( typed_array . kind ( ) ) {
case TypedArrayBase : : Kind : : Uint8Array :
fast_typed_array_set_element < u8 > ( typed_array , index , static_cast < u8 > ( value . as_i32 ( ) ) ) ;
return { } ;
case TypedArrayBase : : Kind : : Uint16Array :
fast_typed_array_set_element < u16 > ( typed_array , index , static_cast < u16 > ( value . as_i32 ( ) ) ) ;
return { } ;
case TypedArrayBase : : Kind : : Uint32Array :
fast_typed_array_set_element < u32 > ( typed_array , index , static_cast < u32 > ( value . as_i32 ( ) ) ) ;
return { } ;
case TypedArrayBase : : Kind : : Int8Array :
fast_typed_array_set_element < i8 > ( typed_array , index , static_cast < i8 > ( value . as_i32 ( ) ) ) ;
return { } ;
case TypedArrayBase : : Kind : : Int16Array :
fast_typed_array_set_element < i16 > ( typed_array , index , static_cast < i16 > ( value . as_i32 ( ) ) ) ;
return { } ;
case TypedArrayBase : : Kind : : Int32Array :
fast_typed_array_set_element < i32 > ( typed_array , index , value . as_i32 ( ) ) ;
return { } ;
case TypedArrayBase : : Kind : : Uint8ClampedArray :
fast_typed_array_set_element < u8 > ( typed_array , index , clamp ( value . as_i32 ( ) , 0 , 255 ) ) ;
return { } ;
default :
break ;
}
} else if ( value . is_double ( ) ) {
switch ( typed_array . kind ( ) ) {
case TypedArrayBase : : Kind : : Float16Array :
fast_typed_array_set_element < f16 > ( typed_array , index , static_cast < f16 > ( value . as_double ( ) ) ) ;
return { } ;
case TypedArrayBase : : Kind : : Float32Array :
fast_typed_array_set_element < float > ( typed_array , index , static_cast < float > ( value . as_double ( ) ) ) ;
return { } ;
case TypedArrayBase : : Kind : : Float64Array :
fast_typed_array_set_element < double > ( typed_array , index , value . as_double ( ) ) ;
return { } ;
2025-05-13 14:12:50 +02:00
case TypedArrayBase : : Kind : : Int8Array :
fast_typed_array_set_element < i8 > ( typed_array , index , MUST ( value . to_i8 ( vm ) ) ) ;
return { } ;
case TypedArrayBase : : Kind : : Int16Array :
fast_typed_array_set_element < i16 > ( typed_array , index , MUST ( value . to_i16 ( vm ) ) ) ;
return { } ;
case TypedArrayBase : : Kind : : Int32Array :
fast_typed_array_set_element < i32 > ( typed_array , index , MUST ( value . to_i32 ( vm ) ) ) ;
return { } ;
case TypedArrayBase : : Kind : : Uint8Array :
fast_typed_array_set_element < u8 > ( typed_array , index , MUST ( value . to_u8 ( vm ) ) ) ;
return { } ;
case TypedArrayBase : : Kind : : Uint16Array :
fast_typed_array_set_element < u16 > ( typed_array , index , MUST ( value . to_u16 ( vm ) ) ) ;
return { } ;
case TypedArrayBase : : Kind : : Uint32Array :
fast_typed_array_set_element < u32 > ( typed_array , index , MUST ( value . to_u32 ( vm ) ) ) ;
return { } ;
2024-11-18 01:42:36 +01:00
default :
break ;
}
2024-07-09 10:10:14 +02:00
}
2024-11-18 01:42:36 +01:00
// FIXME: Support more TypedArray kinds.
2024-07-09 10:10:14 +02:00
}
if ( typed_array . kind ( ) = = TypedArrayBase : : Kind : : Uint32Array & & value . is_integral_number ( ) ) {
auto integer = value . as_double ( ) ;
if ( AK : : is_within_range < u32 > ( integer ) & & is_valid_integer_index ( typed_array , canonical_index ) ) {
fast_typed_array_set_element < u32 > ( typed_array , index , static_cast < u32 > ( integer ) ) ;
return { } ;
}
}
switch ( typed_array . kind ( ) ) {
# define __JS_ENUMERATE(ClassName, snake_name, PrototypeName, ConstructorName, Type) \
case TypedArrayBase : : Kind : : ClassName : \
return typed_array_set_element < Type > ( typed_array , canonical_index , value ) ;
JS_ENUMERATE_TYPED_ARRAYS
# undef __JS_ENUMERATE
}
return { } ;
}
}
2024-11-01 22:00:32 +01:00
auto property_key = TRY ( property_key_value . to_property_key ( vm ) ) ;
2026-03-04 10:33:38 +01:00
TRY ( put_by_property_key ( vm , base , base , value , base_identifier , property_key , kind , strict ) ) ;
2024-07-09 10:10:14 +02:00
return { } ;
}
struct CalleeAndThis {
Value callee ;
Value this_value ;
} ;
2026-04-13 11:54:04 +02:00
inline ThrowCompletionOr < CalleeAndThis > get_callee_and_this_from_environment ( VM & vm , Utf16FlyString const & name , Strict strict , EnvironmentCoordinate & cache )
2024-07-09 10:10:14 +02:00
{
Value callee = js_undefined ( ) ;
2025-10-07 16:37:22 +02:00
if ( cache . is_valid ( ) ) [[likely]] {
2026-04-13 11:54:04 +02:00
auto const * environment = vm . running_execution_context ( ) . lexical_environment . ptr ( ) ;
2026-01-26 20:40:05 +01:00
for ( size_t i = 0 ; i < cache . hops ; + + i ) {
if ( environment - > is_permanently_screwed_by_eval ( ) ) [[unlikely]]
goto slow_path ;
2024-07-09 10:10:14 +02:00
environment = environment - > outer_environment ( ) ;
2026-01-26 20:40:05 +01:00
}
2025-10-07 16:37:22 +02:00
if ( ! environment - > is_permanently_screwed_by_eval ( ) ) [[likely]] {
2024-07-09 10:10:14 +02:00
callee = TRY ( static_cast < DeclarativeEnvironment const & > ( * environment ) . get_binding_value_direct ( vm , cache . index ) ) ;
2025-12-04 10:28:38 +01:00
auto this_value = js_undefined ( ) ;
if ( auto base_object = environment - > with_base_object ( ) ) [[unlikely]]
2024-07-09 10:10:14 +02:00
this_value = base_object ;
return CalleeAndThis {
. callee = callee ,
. this_value = this_value ,
} ;
}
2026-01-26 20:40:05 +01:00
slow_path :
2024-07-09 10:10:14 +02:00
cache = { } ;
}
2025-10-28 20:25:12 +01:00
auto reference = TRY ( vm . resolve_binding ( name , strict ) ) ;
2024-07-09 10:10:14 +02:00
if ( reference . environment_coordinate ( ) . has_value ( ) )
cache = reference . environment_coordinate ( ) . value ( ) ;
callee = TRY ( reference . get_value ( vm ) ) ;
2025-12-04 10:28:38 +01:00
Value this_value ;
2024-07-09 10:10:14 +02:00
if ( reference . is_property_reference ( ) ) {
this_value = reference . get_this_value ( ) ;
} else {
if ( reference . is_environment_reference ( ) ) {
2025-12-04 10:28:38 +01:00
if ( auto base_object = reference . base_environment ( ) . with_base_object ( ) ) [[unlikely]]
2024-07-09 10:10:14 +02:00
this_value = base_object ;
}
}
return CalleeAndThis {
. callee = callee ,
. this_value = this_value ,
} ;
}
// 13.2.7.3 Runtime Semantics: Evaluation, https://tc39.es/ecma262/#sec-regular-expression-literals-runtime-semantics-evaluation
LibJS+LibRegex: Switch RegExp over to the Rust engine
Switch LibJS `RegExp` over to the Rust-backed `ECMAScriptRegex` APIs.
Route `new RegExp()`, regex literals, and the RegExp builtins through
the new compile and exec APIs, and stop re-validating patterns with the
deleted C++ parser on the way in. Preserve the observable error
behavior by carrying structured compile errors and backtracking-limit
failures across the FFI boundary. Cache compiled regex state and named
capture metadata on `RegExpObject` in the new representation.
Use the new API surface to simplify and speed up the builtin paths too:
share `exec_internal`, cache compiled regex pointers, keep the legacy
RegExp statics lazy, run global replace through batch `find_all`, and
optimize replace, test, split, and String helper paths. Add regression
tests for those JavaScript-visible paths.
2026-03-25 10:52:40 +01:00
inline Value new_regexp ( VM & vm , Utf16String pattern , Utf16String flags )
2024-07-09 10:10:14 +02:00
{
// 1. Let pattern be CodePointsToString(BodyText of RegularExpressionLiteral).
// 2. Let flags be CodePointsToString(FlagText of RegularExpressionLiteral).
// 3. Return ! RegExpCreate(pattern, flags).
auto & realm = * vm . current_realm ( ) ;
// NOTE: We bypass RegExpCreate and subsequently RegExpAlloc as an optimization to use the already parsed values.
LibJS+LibRegex: Switch RegExp over to the Rust engine
Switch LibJS `RegExp` over to the Rust-backed `ECMAScriptRegex` APIs.
Route `new RegExp()`, regex literals, and the RegExp builtins through
the new compile and exec APIs, and stop re-validating patterns with the
deleted C++ parser on the way in. Preserve the observable error
behavior by carrying structured compile errors and backtracking-limit
failures across the FFI boundary. Cache compiled regex state and named
capture metadata on `RegExpObject` in the new representation.
Use the new API surface to simplify and speed up the builtin paths too:
share `exec_internal`, cache compiled regex pointers, keep the legacy
RegExp statics lazy, run global replace through batch `find_all`, and
optimize replace, test, split, and String helper paths. Add regression
tests for those JavaScript-visible paths.
2026-03-25 10:52:40 +01:00
auto regexp_object = RegExpObject : : create ( realm , move ( pattern ) , move ( flags ) ) ;
2024-07-09 10:10:14 +02:00
// RegExpAlloc has these two steps from the 'Legacy RegExp features' proposal.
regexp_object - > set_realm ( realm ) ;
// We don't need to check 'If SameValue(newTarget, thisRealm.[[Intrinsics]].[[%RegExp%]]) is true'
// here as we know RegExpCreate calls RegExpAlloc with %RegExp% for newTarget.
regexp_object - > set_legacy_features_enabled ( true ) ;
return regexp_object ;
}
2025-08-02 19:27:29 -04:00
inline ThrowCompletionOr < void > create_variable ( VM & vm , Utf16FlyString const & name , Op : : EnvironmentMode mode , bool is_global , bool is_immutable , bool is_strict )
2024-07-09 10:10:14 +02:00
{
if ( mode = = Op : : EnvironmentMode : : Lexical ) {
VERIFY ( ! is_global ) ;
// Note: This is papering over an issue where "FunctionDeclarationInstantiation" creates these bindings for us.
// Instead of crashing in there, we'll just raise an exception here.
2025-12-04 10:31:54 +01:00
if ( TRY ( vm . lexical_environment ( ) - > has_binding ( name ) ) ) [[unlikely]]
2024-07-09 10:10:14 +02:00
return vm . throw_completion < InternalError > ( TRY_OR_THROW_OOM ( vm , String : : formatted ( " Lexical environment already has binding '{}' " , name ) ) ) ;
if ( is_immutable )
return vm . lexical_environment ( ) - > create_immutable_binding ( vm , name , is_strict ) ;
return vm . lexical_environment ( ) - > create_mutable_binding ( vm , name , is_strict ) ;
}
if ( ! is_global ) {
if ( is_immutable )
return vm . variable_environment ( ) - > create_immutable_binding ( vm , name , is_strict ) ;
return vm . variable_environment ( ) - > create_mutable_binding ( vm , name , is_strict ) ;
}
// NOTE: CreateVariable with m_is_global set to true is expected to only be used in GlobalDeclarationInstantiation currently, which only uses "false" for "can_be_deleted".
// The only area that sets "can_be_deleted" to true is EvalDeclarationInstantiation, which is currently fully implemented in C++ and not in Bytecode.
2025-01-21 09:12:05 -05:00
return as < GlobalEnvironment > ( vm . variable_environment ( ) ) - > create_global_var_binding ( name , false ) ;
2024-07-09 10:10:14 +02:00
}
2024-11-15 04:01:23 +13:00
inline ThrowCompletionOr < GC : : Ref < Array > > iterator_to_array ( VM & vm , Value iterator )
2024-07-09 10:10:14 +02:00
{
2025-03-22 11:46:54 -05:00
auto & iterator_record = static_cast < IteratorRecord & > ( iterator . as_cell ( ) ) ;
2024-07-09 10:10:14 +02:00
auto array = MUST ( Array : : create ( * vm . current_realm ( ) , 0 ) ) ;
size_t index = 0 ;
while ( true ) {
auto value = TRY ( iterator_step_value ( vm , iterator_record ) ) ;
if ( ! value . has_value ( ) )
return array ;
MUST ( array - > create_data_property_or_throw ( index , value . release_value ( ) ) ) ;
index + + ;
}
}
inline ThrowCompletionOr < void > append ( VM & vm , Value lhs , Value rhs , bool is_spread )
{
// Note: This OpCode is used to construct array literals and argument arrays for calls,
// containing at least one spread element,
// Iterating over such a spread element to unpack it has to be visible by
// the user courtesy of
// (1) https://tc39.es/ecma262/#sec-runtime-semantics-arrayaccumulation
// SpreadElement : ... AssignmentExpression
// 1. Let spreadRef be ? Evaluation of AssignmentExpression.
// 2. Let spreadObj be ? GetValue(spreadRef).
// 3. Let iteratorRecord be ? GetIterator(spreadObj).
// 4. Repeat,
// a. Let next be ? IteratorStep(iteratorRecord).
// b. If next is false, return nextIndex.
// c. Let nextValue be ? IteratorValue(next).
// d. Perform ! CreateDataPropertyOrThrow(array, ! ToString(𝔽 (nextIndex)), nextValue).
// e. Set nextIndex to nextIndex + 1.
// (2) https://tc39.es/ecma262/#sec-runtime-semantics-argumentlistevaluation
// ArgumentList : ... AssignmentExpression
// 1. Let list be a new empty List.
// 2. Let spreadRef be ? Evaluation of AssignmentExpression.
// 3. Let spreadObj be ? GetValue(spreadRef).
// 4. Let iteratorRecord be ? GetIterator(spreadObj).
// 5. Repeat,
// a. Let next be ? IteratorStep(iteratorRecord).
// b. If next is false, return list.
// c. Let nextArg be ? IteratorValue(next).
// d. Append nextArg to list.
// ArgumentList : ArgumentList , ... AssignmentExpression
// 1. Let precedingArgs be ? ArgumentListEvaluation of ArgumentList.
// 2. Let spreadRef be ? Evaluation of AssignmentExpression.
// 3. Let iteratorRecord be ? GetIterator(? GetValue(spreadRef)).
// 4. Repeat,
// a. Let next be ? IteratorStep(iteratorRecord).
// b. If next is false, return precedingArgs.
// c. Let nextArg be ? IteratorValue(next).
// d. Append nextArg to precedingArgs.
// Note: We know from codegen, that lhs is a plain array with only indexed properties
2026-03-29 09:11:33 +01:00
auto & lhs_array = lhs . as_array_exotic_object ( ) ;
2026-03-17 00:54:54 -05:00
auto lhs_size = lhs_array . indexed_array_like_size ( ) ;
2024-07-09 10:10:14 +02:00
if ( is_spread ) {
// ...rhs
size_t i = lhs_size ;
TRY ( get_iterator_values ( vm , rhs , [ & i , & lhs_array ] ( Value iterator_value ) - > Optional < Completion > {
2026-03-17 00:54:54 -05:00
lhs_array . indexed_put ( i , iterator_value ) ;
2024-07-09 10:10:14 +02:00
+ + i ;
return { } ;
} ) ) ;
} else {
2026-03-17 00:54:54 -05:00
lhs_array . indexed_put ( lhs_size , rhs ) ;
2024-07-09 10:10:14 +02:00
}
return { } ;
}
LibJS: Cache stable for-in iteration at bytecode sites
Cache the flattened enumerable key snapshot for each `for..in` site and
reuse a `PropertyNameIterator` when the receiver shape, dictionary
generation, indexed storage kind and length, prototype chain
validity, and magical-length state still match.
Handle packed indexed receivers as well as plain named-property
objects. Teach `ObjectPropertyIteratorNext` in `asmint.asm` to return
cached property values directly and to fall back to the slow iterator
logic when any guard fails.
Treat arrays' hidden non-enumerable `length` property as a visited
name for for-in shadowing, and include the receiver's magical-length
state in the cache key so arrays and plain objects do not share
snapshots.
Add `test-js` and `test-js-bytecode` coverage for mixed numeric and
named keys, packed receiver transitions, re-entry, iterator reuse, GC
retention, array length shadowing, and same-site cache reuse.
2026-04-10 00:56:49 +02:00
struct FastPropertyNameIteratorData {
Vector < PropertyKey > properties ;
PropertyNameIterator : : FastPath fast_path { PropertyNameIterator : : FastPath : : None } ;
u32 indexed_property_count { 0 } ;
bool receiver_has_magical_length_property { false } ;
GC : : Ptr < Shape > shape ;
GC : : Ptr < PrototypeChainValidity > prototype_chain_validity ;
} ;
2025-05-03 10:17:11 +02:00
LibJS: Cache stable for-in iteration at bytecode sites
Cache the flattened enumerable key snapshot for each `for..in` site and
reuse a `PropertyNameIterator` when the receiver shape, dictionary
generation, indexed storage kind and length, prototype chain
validity, and magical-length state still match.
Handle packed indexed receivers as well as plain named-property
objects. Teach `ObjectPropertyIteratorNext` in `asmint.asm` to return
cached property values directly and to fall back to the slow iterator
logic when any guard fails.
Treat arrays' hidden non-enumerable `length` property as a visited
name for for-in shadowing, and include the receiver's magical-length
state in the cache key so arrays and plain objects do not share
snapshots.
Add `test-js` and `test-js-bytecode` coverage for mixed numeric and
named keys, packed receiver transitions, re-entry, iterator reuse, GC
retention, array length shadowing, and same-site cache reuse.
2026-04-10 00:56:49 +02:00
static bool shape_has_enumerable_string_property ( Shape const & shape )
{
for ( auto const & [ property_key , metadata ] : shape . property_table ( ) ) {
if ( property_key . is_string ( ) & & metadata . attributes . is_enumerable ( ) )
return true ;
}
return false ;
}
2025-05-03 10:17:11 +02:00
LibJS: Cache stable for-in iteration at bytecode sites
Cache the flattened enumerable key snapshot for each `for..in` site and
reuse a `PropertyNameIterator` when the receiver shape, dictionary
generation, indexed storage kind and length, prototype chain
validity, and magical-length state still match.
Handle packed indexed receivers as well as plain named-property
objects. Teach `ObjectPropertyIteratorNext` in `asmint.asm` to return
cached property values directly and to fall back to the slow iterator
logic when any guard fails.
Treat arrays' hidden non-enumerable `length` property as a visited
name for for-in shadowing, and include the receiver's magical-length
state in the cache key so arrays and plain objects do not share
snapshots.
Add `test-js` and `test-js-bytecode` coverage for mixed numeric and
named keys, packed receiver transitions, re-entry, iterator reuse, GC
retention, array length shadowing, and same-site cache reuse.
2026-04-10 00:56:49 +02:00
static bool property_name_iterator_fast_path_is_still_eligible ( Object & object , PropertyNameIterator : : FastPath fast_path , u32 indexed_property_count )
{
Object const * object_to_check = & object ;
bool is_receiver = true ;
while ( object_to_check ) {
if ( ! object_to_check - > eligible_for_own_property_enumeration_fast_path ( ) )
return false ;
if ( is_receiver ) {
if ( fast_path = = PropertyNameIterator : : FastPath : : PackedIndexed ) {
if ( object_to_check - > indexed_storage_kind ( ) ! = IndexedStorageKind : : Packed )
return false ;
if ( object_to_check - > indexed_array_like_size ( ) ! = indexed_property_count )
return false ;
} else if ( object_to_check - > indexed_array_like_size ( ) ! = 0 ) {
return false ;
2025-05-03 10:17:11 +02:00
}
LibJS: Cache stable for-in iteration at bytecode sites
Cache the flattened enumerable key snapshot for each `for..in` site and
reuse a `PropertyNameIterator` when the receiver shape, dictionary
generation, indexed storage kind and length, prototype chain
validity, and magical-length state still match.
Handle packed indexed receivers as well as plain named-property
objects. Teach `ObjectPropertyIteratorNext` in `asmint.asm` to return
cached property values directly and to fall back to the slow iterator
logic when any guard fails.
Treat arrays' hidden non-enumerable `length` property as a visited
name for for-in shadowing, and include the receiver's magical-length
state in the cache key so arrays and plain objects do not share
snapshots.
Add `test-js` and `test-js-bytecode` coverage for mixed numeric and
named keys, packed receiver transitions, re-entry, iterator reuse, GC
retention, array length shadowing, and same-site cache reuse.
2026-04-10 00:56:49 +02:00
} else if ( object_to_check - > indexed_array_like_size ( ) ! = 0 ) {
return false ;
}
2025-05-03 10:17:11 +02:00
LibJS: Cache stable for-in iteration at bytecode sites
Cache the flattened enumerable key snapshot for each `for..in` site and
reuse a `PropertyNameIterator` when the receiver shape, dictionary
generation, indexed storage kind and length, prototype chain
validity, and magical-length state still match.
Handle packed indexed receivers as well as plain named-property
objects. Teach `ObjectPropertyIteratorNext` in `asmint.asm` to return
cached property values directly and to fall back to the slow iterator
logic when any guard fails.
Treat arrays' hidden non-enumerable `length` property as a visited
name for for-in shadowing, and include the receiver's magical-length
state in the cache key so arrays and plain objects do not share
snapshots.
Add `test-js` and `test-js-bytecode` coverage for mixed numeric and
named keys, packed receiver transitions, re-entry, iterator reuse, GC
retention, array length shadowing, and same-site cache reuse.
2026-04-10 00:56:49 +02:00
object_to_check = object_to_check - > prototype ( ) ;
is_receiver = false ;
}
2025-05-03 10:17:11 +02:00
LibJS: Cache stable for-in iteration at bytecode sites
Cache the flattened enumerable key snapshot for each `for..in` site and
reuse a `PropertyNameIterator` when the receiver shape, dictionary
generation, indexed storage kind and length, prototype chain
validity, and magical-length state still match.
Handle packed indexed receivers as well as plain named-property
objects. Teach `ObjectPropertyIteratorNext` in `asmint.asm` to return
cached property values directly and to fall back to the slow iterator
logic when any guard fails.
Treat arrays' hidden non-enumerable `length` property as a visited
name for for-in shadowing, and include the receiver's magical-length
state in the cache key so arrays and plain objects do not share
snapshots.
Add `test-js` and `test-js-bytecode` coverage for mixed numeric and
named keys, packed receiver transitions, re-entry, iterator reuse, GC
retention, array length shadowing, and same-site cache reuse.
2026-04-10 00:56:49 +02:00
return true ;
}
2025-05-03 10:17:11 +02:00
LibJS: Cache stable for-in iteration at bytecode sites
Cache the flattened enumerable key snapshot for each `for..in` site and
reuse a `PropertyNameIterator` when the receiver shape, dictionary
generation, indexed storage kind and length, prototype chain
validity, and magical-length state still match.
Handle packed indexed receivers as well as plain named-property
objects. Teach `ObjectPropertyIteratorNext` in `asmint.asm` to return
cached property values directly and to fall back to the slow iterator
logic when any guard fails.
Treat arrays' hidden non-enumerable `length` property as a visited
name for for-in shadowing, and include the receiver's magical-length
state in the cache key so arrays and plain objects do not share
snapshots.
Add `test-js` and `test-js-bytecode` coverage for mixed numeric and
named keys, packed receiver transitions, re-entry, iterator reuse, GC
retention, array length shadowing, and same-site cache reuse.
2026-04-10 00:56:49 +02:00
static bool object_property_iterator_cache_matches ( Object & object , ObjectPropertyIteratorCacheData const & cache )
{
// A cache entry represents the fully flattened key snapshot for one bytecode
// site. Reusing it is only valid while the receiver still has the same local
// state and the prototype chain validity token says nothing above it changed.
if ( object . has_magical_length_property ( ) ! = cache . receiver_has_magical_length_property ( ) )
return false ;
auto & shape = object . shape ( ) ;
if ( & shape ! = cache . shape ( ) )
return false ;
if ( shape . is_dictionary ( ) & & shape . dictionary_generation ( ) ! = cache . shape_dictionary_generation ( ) )
return false ;
if ( cache . prototype_chain_validity ( ) & & ! cache . prototype_chain_validity ( ) - > is_valid ( ) )
return false ;
return property_name_iterator_fast_path_is_still_eligible ( object , cache . fast_path ( ) , cache . indexed_property_count ( ) ) ;
}
static ThrowCompletionOr < Optional < FastPropertyNameIteratorData > > try_get_fast_property_name_iterator_data ( Object & object )
{
auto & vm = object . vm ( ) ;
FastPropertyNameIteratorData result { } ;
result . fast_path = PropertyNameIterator : : FastPath : : PlainNamed ;
result . receiver_has_magical_length_property = object . has_magical_length_property ( ) ;
result . shape = & object . shape ( ) ;
HashTable < GC : : Ref < Object > > seen_objects ;
size_t estimated_properties_count = 0 ;
bool prototype_chain_has_enumerable_named_properties = false ;
for ( auto object_to_check = GC : : Ptr { & object } ; object_to_check & & ! seen_objects . contains ( * object_to_check ) ; object_to_check = TRY ( object_to_check - > internal_get_prototype_of ( ) ) ) {
seen_objects . set ( * object_to_check ) ;
if ( ! object_to_check - > eligible_for_own_property_enumeration_fast_path ( ) )
return Optional < FastPropertyNameIteratorData > { } ;
if ( & object = = object_to_check . ptr ( ) ) {
if ( object_to_check - > indexed_array_like_size ( ) ! = 0 ) {
if ( object_to_check - > indexed_storage_kind ( ) ! = IndexedStorageKind : : Packed )
return Optional < FastPropertyNameIteratorData > { } ;
result . fast_path = PropertyNameIterator : : FastPath : : PackedIndexed ;
result . indexed_property_count = object_to_check - > indexed_array_like_size ( ) ;
} else {
result . fast_path = PropertyNameIterator : : FastPath : : PlainNamed ;
}
} else if ( object_to_check - > indexed_array_like_size ( ) ! = 0 ) {
// The fast path only knows how to synthesize a packed indexed prefix
// for the receiver itself. As soon as indexed properties appear in
// the prototype chain, we fall back to the generic enumeration path.
return Optional < FastPropertyNameIteratorData > { } ;
} else if ( ! prototype_chain_has_enumerable_named_properties ) {
prototype_chain_has_enumerable_named_properties = shape_has_enumerable_string_property ( object_to_check - > shape ( ) ) ;
2025-05-03 10:17:11 +02:00
}
LibJS: Cache stable for-in iteration at bytecode sites
Cache the flattened enumerable key snapshot for each `for..in` site and
reuse a `PropertyNameIterator` when the receiver shape, dictionary
generation, indexed storage kind and length, prototype chain
validity, and magical-length state still match.
Handle packed indexed receivers as well as plain named-property
objects. Teach `ObjectPropertyIteratorNext` in `asmint.asm` to return
cached property values directly and to fall back to the slow iterator
logic when any guard fails.
Treat arrays' hidden non-enumerable `length` property as a visited
name for for-in shadowing, and include the receiver's magical-length
state in the cache key so arrays and plain objects do not share
snapshots.
Add `test-js` and `test-js-bytecode` coverage for mixed numeric and
named keys, packed receiver transitions, re-entry, iterator reuse, GC
retention, array length shadowing, and same-site cache reuse.
2026-04-10 00:56:49 +02:00
estimated_properties_count + = object_to_check - > shape ( ) . property_count ( ) ;
2025-05-03 10:17:11 +02:00
}
LibJS: Cache stable for-in iteration at bytecode sites
Cache the flattened enumerable key snapshot for each `for..in` site and
reuse a `PropertyNameIterator` when the receiver shape, dictionary
generation, indexed storage kind and length, prototype chain
validity, and magical-length state still match.
Handle packed indexed receivers as well as plain named-property
objects. Teach `ObjectPropertyIteratorNext` in `asmint.asm` to return
cached property values directly and to fall back to the slow iterator
logic when any guard fails.
Treat arrays' hidden non-enumerable `length` property as a visited
name for for-in shadowing, and include the receiver's magical-length
state in the cache key so arrays and plain objects do not share
snapshots.
Add `test-js` and `test-js-bytecode` coverage for mixed numeric and
named keys, packed receiver transitions, re-entry, iterator reuse, GC
retention, array length shadowing, and same-site cache reuse.
2026-04-10 00:56:49 +02:00
seen_objects . clear_with_capacity ( ) ;
2025-05-03 10:17:11 +02:00
LibJS: Cache stable for-in iteration at bytecode sites
Cache the flattened enumerable key snapshot for each `for..in` site and
reuse a `PropertyNameIterator` when the receiver shape, dictionary
generation, indexed storage kind and length, prototype chain
validity, and magical-length state still match.
Handle packed indexed receivers as well as plain named-property
objects. Teach `ObjectPropertyIteratorNext` in `asmint.asm` to return
cached property values directly and to fall back to the slow iterator
logic when any guard fails.
Treat arrays' hidden non-enumerable `length` property as a visited
name for for-in shadowing, and include the receiver's magical-length
state in the cache key so arrays and plain objects do not share
snapshots.
Add `test-js` and `test-js-bytecode` coverage for mixed numeric and
named keys, packed receiver transitions, re-entry, iterator reuse, GC
retention, array length shadowing, and same-site cache reuse.
2026-04-10 00:56:49 +02:00
if ( auto * prototype = object . shape ( ) . prototype ( ) ) {
result . prototype_chain_validity = prototype - > shape ( ) . prototype_chain_validity ( ) ;
if ( ! result . prototype_chain_validity )
return Optional < FastPropertyNameIteratorData > { } ;
2025-05-03 10:17:11 +02:00
}
LibJS: Cache stable for-in iteration at bytecode sites
Cache the flattened enumerable key snapshot for each `for..in` site and
reuse a `PropertyNameIterator` when the receiver shape, dictionary
generation, indexed storage kind and length, prototype chain
validity, and magical-length state still match.
Handle packed indexed receivers as well as plain named-property
objects. Teach `ObjectPropertyIteratorNext` in `asmint.asm` to return
cached property values directly and to fall back to the slow iterator
logic when any guard fails.
Treat arrays' hidden non-enumerable `length` property as a visited
name for for-in shadowing, and include the receiver's magical-length
state in the cache key so arrays and plain objects do not share
snapshots.
Add `test-js` and `test-js-bytecode` coverage for mixed numeric and
named keys, packed receiver transitions, re-entry, iterator reuse, GC
retention, array length shadowing, and same-site cache reuse.
2026-04-10 00:56:49 +02:00
if ( ! prototype_chain_has_enumerable_named_properties ) {
// Common case: only the receiver contributes enumerable string keys, so
// we can copy them straight from the shape without any shadowing work.
result . properties . ensure_capacity ( object . shape ( ) . property_count ( ) ) ;
for ( auto const & [ property_key , metadata ] : object . shape ( ) . property_table ( ) ) {
if ( property_key . is_string ( ) & & metadata . attributes . is_enumerable ( ) )
result . properties . append ( property_key ) ;
}
return result ;
2025-05-03 10:17:11 +02:00
}
LibJS: Cache stable for-in iteration at bytecode sites
Cache the flattened enumerable key snapshot for each `for..in` site and
reuse a `PropertyNameIterator` when the receiver shape, dictionary
generation, indexed storage kind and length, prototype chain
validity, and magical-length state still match.
Handle packed indexed receivers as well as plain named-property
objects. Teach `ObjectPropertyIteratorNext` in `asmint.asm` to return
cached property values directly and to fall back to the slow iterator
logic when any guard fails.
Treat arrays' hidden non-enumerable `length` property as a visited
name for for-in shadowing, and include the receiver's magical-length
state in the cache key so arrays and plain objects do not share
snapshots.
Add `test-js` and `test-js-bytecode` coverage for mixed numeric and
named keys, packed receiver transitions, re-entry, iterator reuse, GC
retention, array length shadowing, and same-site cache reuse.
2026-04-10 00:56:49 +02:00
result . properties . ensure_capacity ( estimated_properties_count ) ;
HashTable < PropertyKey > seen_non_enumerable_properties ;
Optional < HashTable < PropertyKey > > seen_properties ;
auto ensure_seen_properties = [ & ] {
if ( seen_properties . has_value ( ) )
return ;
// Prototype shadowing ignores enumerability, so once we start looking
// above the receiver we need an explicit visited set for names we have
// already decided to expose from lower objects.
seen_properties = HashTable < PropertyKey > { } ;
seen_properties - > ensure_capacity ( result . properties . size ( ) ) ;
for ( auto const & property : result . properties )
seen_properties - > set ( property ) ;
} ;
2025-05-03 10:17:11 +02:00
LibJS: Cache stable for-in iteration at bytecode sites
Cache the flattened enumerable key snapshot for each `for..in` site and
reuse a `PropertyNameIterator` when the receiver shape, dictionary
generation, indexed storage kind and length, prototype chain
validity, and magical-length state still match.
Handle packed indexed receivers as well as plain named-property
objects. Teach `ObjectPropertyIteratorNext` in `asmint.asm` to return
cached property values directly and to fall back to the slow iterator
logic when any guard fails.
Treat arrays' hidden non-enumerable `length` property as a visited
name for for-in shadowing, and include the receiver's magical-length
state in the cache key so arrays and plain objects do not share
snapshots.
Add `test-js` and `test-js-bytecode` coverage for mixed numeric and
named keys, packed receiver transitions, re-entry, iterator reuse, GC
retention, array length shadowing, and same-site cache reuse.
2026-04-10 00:56:49 +02:00
bool in_prototype_chain = false ;
for ( auto object_to_check = GC : : Ptr { & object } ; object_to_check & & ! seen_objects . contains ( * object_to_check ) ; object_to_check = TRY ( object_to_check - > internal_get_prototype_of ( ) ) ) {
seen_objects . set ( * object_to_check ) ;
// Arrays keep a non-enumerable magical `length` property outside the shape
// table, but it still shadows enumerable `length` properties higher up the
// prototype chain during for-in.
if ( object_to_check - > has_magical_length_property ( ) )
seen_non_enumerable_properties . set ( vm . names . length ) ;
for ( auto const & [ property_key , metadata ] : object_to_check - > shape ( ) . property_table ( ) ) {
if ( ! property_key . is_string ( ) )
continue ;
bool enumerable = metadata . attributes . is_enumerable ( ) ;
if ( ! enumerable )
seen_non_enumerable_properties . set ( property_key ) ;
if ( in_prototype_chain & & enumerable ) {
if ( seen_non_enumerable_properties . contains ( property_key ) )
continue ;
ensure_seen_properties ( ) ;
if ( seen_properties - > contains ( property_key ) )
continue ;
}
if ( enumerable )
result . properties . append ( property_key ) ;
if ( seen_properties . has_value ( ) )
seen_properties - > set ( property_key ) ;
}
in_prototype_chain = true ;
}
return result ;
}
2025-05-03 10:17:11 +02:00
2024-07-09 10:10:14 +02:00
// 14.7.5.9 EnumerateObjectProperties ( O ), https://tc39.es/ecma262/#sec-enumerate-object-properties
2026-04-13 11:54:04 +02:00
inline ThrowCompletionOr < GC : : Ref < PropertyNameIterator > > get_object_property_iterator ( VM & vm , Value value , ObjectPropertyIteratorCache * cache = nullptr )
2024-07-09 10:10:14 +02:00
{
// While the spec does provide an algorithm, it allows us to implement it ourselves so long as we meet the following invariants:
// 1- Returned property keys do not include keys that are Symbols
// 2- Properties of the target object may be deleted during enumeration. A property that is deleted before it is processed by the iterator's next method is ignored
// 3- If new properties are added to the target object during enumeration, the newly added properties are not guaranteed to be processed in the active enumeration
// 4- A property name will be returned by the iterator's next method at most once in any enumeration.
// 5- Enumerating the properties of the target object includes enumerating properties of its prototype, and the prototype of the prototype, and so on, recursively;
// but a property of a prototype is not processed if it has the same name as a property that has already been processed by the iterator's next method.
// 6- The values of [[Enumerable]] attributes are not considered when determining if a property of a prototype object has already been processed.
// 7- The enumerable property names of prototype objects must be obtained by invoking EnumerateObjectProperties passing the prototype object as the argument.
// 8- EnumerateObjectProperties must obtain the own property keys of the target object by calling its [[OwnPropertyKeys]] internal method.
// 9- Property attributes of the target object must be obtained by calling its [[GetOwnProperty]] internal method
// Invariant 3 effectively allows the implementation to ignore newly added keys, and we do so (similar to other implementations).
auto object = TRY ( value . to_object ( vm ) ) ;
// Note: While the spec doesn't explicitly require these to be ordered, it says that the values should be retrieved via OwnPropertyKeys,
// so we just keep the order consistent anyway.
2025-03-19 17:08:24 -05:00
LibJS: Cache stable for-in iteration at bytecode sites
Cache the flattened enumerable key snapshot for each `for..in` site and
reuse a `PropertyNameIterator` when the receiver shape, dictionary
generation, indexed storage kind and length, prototype chain
validity, and magical-length state still match.
Handle packed indexed receivers as well as plain named-property
objects. Teach `ObjectPropertyIteratorNext` in `asmint.asm` to return
cached property values directly and to fall back to the slow iterator
logic when any guard fails.
Treat arrays' hidden non-enumerable `length` property as a visited
name for for-in shadowing, and include the receiver's magical-length
state in the cache key so arrays and plain objects do not share
snapshots.
Add `test-js` and `test-js-bytecode` coverage for mixed numeric and
named keys, packed receiver transitions, re-entry, iterator reuse, GC
retention, array length shadowing, and same-site cache reuse.
2026-04-10 00:56:49 +02:00
if ( cache & & cache - > data ) {
if ( object_property_iterator_cache_matches ( * object , * cache - > data ) ) {
if ( cache - > reusable_property_name_iterator ) {
// We keep one iterator object per bytecode site alive so hot
// loops can recycle it without allocating a new cell each time.
auto & iterator = static_cast < PropertyNameIterator & > ( * cache - > reusable_property_name_iterator ) ;
cache - > reusable_property_name_iterator = nullptr ;
iterator . reset_with_cache_data ( object , * cache - > data , cache ) ;
return iterator ;
}
2026-04-13 11:54:04 +02:00
return PropertyNameIterator : : create ( vm . realm ( ) , object , * cache - > data , cache ) ;
LibJS: Cache stable for-in iteration at bytecode sites
Cache the flattened enumerable key snapshot for each `for..in` site and
reuse a `PropertyNameIterator` when the receiver shape, dictionary
generation, indexed storage kind and length, prototype chain
validity, and magical-length state still match.
Handle packed indexed receivers as well as plain named-property
objects. Teach `ObjectPropertyIteratorNext` in `asmint.asm` to return
cached property values directly and to fall back to the slow iterator
logic when any guard fails.
Treat arrays' hidden non-enumerable `length` property as a visited
name for for-in shadowing, and include the receiver's magical-length
state in the cache key so arrays and plain objects do not share
snapshots.
Add `test-js` and `test-js-bytecode` coverage for mixed numeric and
named keys, packed receiver transitions, re-entry, iterator reuse, GC
retention, array length shadowing, and same-site cache reuse.
2026-04-10 00:56:49 +02:00
}
}
if ( auto fast_iterator_data = TRY ( try_get_fast_property_name_iterator_data ( * object ) ) ; fast_iterator_data . has_value ( ) ) {
VERIFY ( fast_iterator_data - > shape ) ;
auto cache_data = vm . heap ( ) . allocate < ObjectPropertyIteratorCacheData > (
vm ,
move ( fast_iterator_data - > properties ) ,
fast_iterator_data - > fast_path ,
fast_iterator_data - > indexed_property_count ,
fast_iterator_data - > receiver_has_magical_length_property ,
* fast_iterator_data - > shape ,
fast_iterator_data - > prototype_chain_validity ) ;
if ( cache )
cache - > data = cache_data ;
if ( cache & & cache - > reusable_property_name_iterator ) {
auto & iterator = static_cast < PropertyNameIterator & > ( * cache - > reusable_property_name_iterator ) ;
cache - > reusable_property_name_iterator = nullptr ;
iterator . reset_with_cache_data ( object , cache_data , cache ) ;
return iterator ;
}
2026-04-13 11:54:04 +02:00
return PropertyNameIterator : : create ( vm . realm ( ) , object , cache_data , cache ) ;
LibJS: Cache stable for-in iteration at bytecode sites
Cache the flattened enumerable key snapshot for each `for..in` site and
reuse a `PropertyNameIterator` when the receiver shape, dictionary
generation, indexed storage kind and length, prototype chain
validity, and magical-length state still match.
Handle packed indexed receivers as well as plain named-property
objects. Teach `ObjectPropertyIteratorNext` in `asmint.asm` to return
cached property values directly and to fall back to the slow iterator
logic when any guard fails.
Treat arrays' hidden non-enumerable `length` property as a visited
name for for-in shadowing, and include the receiver's magical-length
state in the cache key so arrays and plain objects do not share
snapshots.
Add `test-js` and `test-js-bytecode` coverage for mixed numeric and
named keys, packed receiver transitions, re-entry, iterator reuse, GC
retention, array length shadowing, and same-site cache reuse.
2026-04-10 00:56:49 +02:00
}
LibJS: Fast-path own-property enumeration and reduce descriptor lookups
Before this change, PropertyNameIterator (used by for..in) and
`Object::enumerable_own_property_names()` (used by `Object.keys()`,
`Object.values()`, and `Object.entries()`) enumerated an object's own
enumerable properties exactly as the spec prescribes:
- Call `internal_own_property_keys()`, allocating a list of JS::Value
keys.
- For each key, call internal_get_own_property() to obtain a
descriptor and check `[[Enumerable]]`.
While that is required in the general case (e.g. for Proxy objects or
platform/exotic objects that override `[[OwnPropertyKeys]]`), it's
overkill for ordinary JS objects that store their own properties in the
shape table and indexed-properties storage.
This change introduces `for_each_own_property_with_enumerability()`,
which, for objects where
`eligible_for_own_property_enumeration_fast_path()` is `true`, lets us
read the enumerability directly from shape metadata (and from
indexed-properties storage) without a per-property descriptor lookup.
When we cannot avoid `internal_get_own_property()`, we still
benefit by skipping the temporary `Vector<Value>` of keys and avoiding
the unnecessary round-trip between PropertyKey and Value.
2025-09-19 16:49:53 +02:00
size_t estimated_properties_count = 0 ;
2024-11-15 04:01:23 +13:00
HashTable < GC : : Ref < Object > > seen_objects ;
for ( auto object_to_check = GC : : Ptr { object . ptr ( ) } ; object_to_check & & ! seen_objects . contains ( * object_to_check ) ; object_to_check = TRY ( object_to_check - > internal_get_prototype_of ( ) ) ) {
2024-07-09 10:10:14 +02:00
seen_objects . set ( * object_to_check ) ;
LibJS: Fast-path own-property enumeration and reduce descriptor lookups
Before this change, PropertyNameIterator (used by for..in) and
`Object::enumerable_own_property_names()` (used by `Object.keys()`,
`Object.values()`, and `Object.entries()`) enumerated an object's own
enumerable properties exactly as the spec prescribes:
- Call `internal_own_property_keys()`, allocating a list of JS::Value
keys.
- For each key, call internal_get_own_property() to obtain a
descriptor and check `[[Enumerable]]`.
While that is required in the general case (e.g. for Proxy objects or
platform/exotic objects that override `[[OwnPropertyKeys]]`), it's
overkill for ordinary JS objects that store their own properties in the
shape table and indexed-properties storage.
This change introduces `for_each_own_property_with_enumerability()`,
which, for objects where
`eligible_for_own_property_enumeration_fast_path()` is `true`, lets us
read the enumerability directly from shape metadata (and from
indexed-properties storage) without a per-property descriptor lookup.
When we cannot avoid `internal_get_own_property()`, we still
benefit by skipping the temporary `Vector<Value>` of keys and avoiding
the unnecessary round-trip between PropertyKey and Value.
2025-09-19 16:49:53 +02:00
estimated_properties_count + = object_to_check - > own_properties_count ( ) ;
}
seen_objects . clear_with_capacity ( ) ;
2025-09-20 19:16:31 +02:00
Vector < PropertyKey > properties ;
LibJS: Fast-path own-property enumeration and reduce descriptor lookups
Before this change, PropertyNameIterator (used by for..in) and
`Object::enumerable_own_property_names()` (used by `Object.keys()`,
`Object.values()`, and `Object.entries()`) enumerated an object's own
enumerable properties exactly as the spec prescribes:
- Call `internal_own_property_keys()`, allocating a list of JS::Value
keys.
- For each key, call internal_get_own_property() to obtain a
descriptor and check `[[Enumerable]]`.
While that is required in the general case (e.g. for Proxy objects or
platform/exotic objects that override `[[OwnPropertyKeys]]`), it's
overkill for ordinary JS objects that store their own properties in the
shape table and indexed-properties storage.
This change introduces `for_each_own_property_with_enumerability()`,
which, for objects where
`eligible_for_own_property_enumeration_fast_path()` is `true`, lets us
read the enumerability directly from shape metadata (and from
indexed-properties storage) without a per-property descriptor lookup.
When we cannot avoid `internal_get_own_property()`, we still
benefit by skipping the temporary `Vector<Value>` of keys and avoiding
the unnecessary round-trip between PropertyKey and Value.
2025-09-19 16:49:53 +02:00
properties . ensure_capacity ( estimated_properties_count ) ;
2024-07-09 10:10:14 +02:00
2025-09-20 19:16:31 +02:00
HashTable < PropertyKey > seen_non_enumerable_properties ;
Optional < HashTable < PropertyKey > > seen_properties ;
auto ensure_seen_properties = [ & ] {
if ( seen_properties . has_value ( ) )
return ;
seen_properties = HashTable < PropertyKey > { } ;
seen_properties - > ensure_capacity ( properties . size ( ) ) ;
for ( auto const & property : properties )
seen_properties - > set ( property ) ;
} ;
LibJS: Fast-path own-property enumeration and reduce descriptor lookups
Before this change, PropertyNameIterator (used by for..in) and
`Object::enumerable_own_property_names()` (used by `Object.keys()`,
`Object.values()`, and `Object.entries()`) enumerated an object's own
enumerable properties exactly as the spec prescribes:
- Call `internal_own_property_keys()`, allocating a list of JS::Value
keys.
- For each key, call internal_get_own_property() to obtain a
descriptor and check `[[Enumerable]]`.
While that is required in the general case (e.g. for Proxy objects or
platform/exotic objects that override `[[OwnPropertyKeys]]`), it's
overkill for ordinary JS objects that store their own properties in the
shape table and indexed-properties storage.
This change introduces `for_each_own_property_with_enumerability()`,
which, for objects where
`eligible_for_own_property_enumeration_fast_path()` is `true`, lets us
read the enumerability directly from shape metadata (and from
indexed-properties storage) without a per-property descriptor lookup.
When we cannot avoid `internal_get_own_property()`, we still
benefit by skipping the temporary `Vector<Value>` of keys and avoiding
the unnecessary round-trip between PropertyKey and Value.
2025-09-19 16:49:53 +02:00
// Collect all keys immediately (invariant no. 5)
2025-09-20 19:16:31 +02:00
bool in_prototype_chain = false ;
LibJS: Fast-path own-property enumeration and reduce descriptor lookups
Before this change, PropertyNameIterator (used by for..in) and
`Object::enumerable_own_property_names()` (used by `Object.keys()`,
`Object.values()`, and `Object.entries()`) enumerated an object's own
enumerable properties exactly as the spec prescribes:
- Call `internal_own_property_keys()`, allocating a list of JS::Value
keys.
- For each key, call internal_get_own_property() to obtain a
descriptor and check `[[Enumerable]]`.
While that is required in the general case (e.g. for Proxy objects or
platform/exotic objects that override `[[OwnPropertyKeys]]`), it's
overkill for ordinary JS objects that store their own properties in the
shape table and indexed-properties storage.
This change introduces `for_each_own_property_with_enumerability()`,
which, for objects where
`eligible_for_own_property_enumeration_fast_path()` is `true`, lets us
read the enumerability directly from shape metadata (and from
indexed-properties storage) without a per-property descriptor lookup.
When we cannot avoid `internal_get_own_property()`, we still
benefit by skipping the temporary `Vector<Value>` of keys and avoiding
the unnecessary round-trip between PropertyKey and Value.
2025-09-19 16:49:53 +02:00
for ( auto object_to_check = GC : : Ptr { object . ptr ( ) } ; object_to_check & & ! seen_objects . contains ( * object_to_check ) ; object_to_check = TRY ( object_to_check - > internal_get_prototype_of ( ) ) ) {
seen_objects . set ( * object_to_check ) ;
TRY ( object_to_check - > for_each_own_property_with_enumerability ( [ & ] ( PropertyKey const & property_key , bool enumerable ) - > ThrowCompletionOr < void > {
2025-09-20 19:16:31 +02:00
if ( ! enumerable )
seen_non_enumerable_properties . set ( property_key ) ;
if ( in_prototype_chain & & enumerable ) {
if ( seen_non_enumerable_properties . contains ( property_key ) )
return { } ;
ensure_seen_properties ( ) ;
if ( seen_properties - > contains ( property_key ) )
return { } ;
}
if ( enumerable )
properties . append ( property_key ) ;
if ( seen_properties . has_value ( ) )
seen_properties - > set ( property_key ) ;
LibJS: Fast-path own-property enumeration and reduce descriptor lookups
Before this change, PropertyNameIterator (used by for..in) and
`Object::enumerable_own_property_names()` (used by `Object.keys()`,
`Object.values()`, and `Object.entries()`) enumerated an object's own
enumerable properties exactly as the spec prescribes:
- Call `internal_own_property_keys()`, allocating a list of JS::Value
keys.
- For each key, call internal_get_own_property() to obtain a
descriptor and check `[[Enumerable]]`.
While that is required in the general case (e.g. for Proxy objects or
platform/exotic objects that override `[[OwnPropertyKeys]]`), it's
overkill for ordinary JS objects that store their own properties in the
shape table and indexed-properties storage.
This change introduces `for_each_own_property_with_enumerability()`,
which, for objects where
`eligible_for_own_property_enumeration_fast_path()` is `true`, lets us
read the enumerability directly from shape metadata (and from
indexed-properties storage) without a per-property descriptor lookup.
When we cannot avoid `internal_get_own_property()`, we still
benefit by skipping the temporary `Vector<Value>` of keys and avoiding
the unnecessary round-trip between PropertyKey and Value.
2025-09-19 16:49:53 +02:00
return { } ;
} ) ) ;
2025-09-20 19:16:31 +02:00
in_prototype_chain = true ;
2024-07-09 10:10:14 +02:00
}
2025-03-19 17:08:24 -05:00
2026-04-13 11:54:04 +02:00
return PropertyNameIterator : : create ( vm . realm ( ) , object , move ( properties ) ) ;
2024-07-09 10:10:14 +02:00
}
2023-09-27 10:10:00 +02:00
2023-12-16 17:49:34 +03:30
ByteString Instruction : : to_byte_string ( Bytecode : : Executable const & executable ) const
2023-09-27 10:10:00 +02:00
{
# define __BYTECODE_OP(op) \
case Instruction : : Type : : op : \
2023-12-16 17:49:34 +03:30
return static_cast < Bytecode : : Op : : op const & > ( * this ) . to_byte_string_impl ( executable ) ;
2023-09-27 10:10:00 +02:00
switch ( type ( ) ) {
ENUMERATE_BYTECODE_OPS ( __BYTECODE_OP )
default :
VERIFY_NOT_REACHED ( ) ;
}
# undef __BYTECODE_OP
}
}
namespace JS : : Bytecode : : Op {
2026-04-13 11:54:04 +02:00
# define JS_DEFINE_EXECUTE_FOR_COMMON_BINARY_OP(OpTitleCase, op_snake_case) \
ThrowCompletionOr < void > OpTitleCase : : execute_impl ( VM & vm ) const \
{ \
auto lhs = vm . get ( m_lhs ) ; \
auto rhs = vm . get ( m_rhs ) ; \
vm . set ( m_dst , Value { TRY ( op_snake_case ( vm , lhs , rhs ) ) } ) ; \
return { } ; \
2023-09-27 10:10:00 +02:00
}
2024-02-20 11:59:46 +01:00
JS_ENUMERATE_COMMON_BINARY_OPS_WITHOUT_FAST_PATH ( JS_DEFINE_EXECUTE_FOR_COMMON_BINARY_OP )
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > Add : : execute_impl ( VM & vm ) const
2024-02-20 11:59:46 +01:00
{
2026-04-13 11:54:04 +02:00
auto const lhs = vm . get ( m_lhs ) ;
auto const rhs = vm . get ( m_rhs ) ;
2024-02-20 11:59:46 +01:00
2025-12-04 22:26:22 +01:00
if ( lhs . is_number ( ) & & rhs . is_number ( ) ) [[likely]] {
2024-02-20 11:59:46 +01:00
if ( lhs . is_int32 ( ) & & rhs . is_int32 ( ) ) {
2025-12-04 21:58:41 +01:00
if ( ! Checked < i32 > : : addition_would_overflow ( lhs . as_i32 ( ) , rhs . as_i32 ( ) ) ) [[likely]] {
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( lhs . as_i32 ( ) + rhs . as_i32 ( ) ) ) ;
2024-02-20 11:59:46 +01:00
return { } ;
}
2025-10-07 12:08:06 +02:00
auto result = static_cast < i64 > ( lhs . as_i32 ( ) ) + static_cast < i64 > ( rhs . as_i32 ( ) ) ;
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( result , Value : : CannotFitInInt32 : : Indeed ) ) ;
2025-10-07 12:08:06 +02:00
return { } ;
2024-02-20 11:59:46 +01:00
}
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( lhs . as_double ( ) + rhs . as_double ( ) ) ) ;
2024-02-20 11:59:46 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , TRY ( add ( vm , lhs , rhs ) ) ) ;
2024-02-20 11:59:46 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > Mul : : execute_impl ( VM & vm ) const
2024-02-20 11:59:46 +01:00
{
2026-04-13 11:54:04 +02:00
auto const lhs = vm . get ( m_lhs ) ;
auto const rhs = vm . get ( m_rhs ) ;
2024-02-20 11:59:46 +01:00
2025-12-04 22:26:22 +01:00
if ( lhs . is_number ( ) & & rhs . is_number ( ) ) [[likely]] {
2024-02-20 11:59:46 +01:00
if ( lhs . is_int32 ( ) & & rhs . is_int32 ( ) ) {
2025-12-04 21:58:41 +01:00
if ( ! Checked < i32 > : : multiplication_would_overflow ( lhs . as_i32 ( ) , rhs . as_i32 ( ) ) ) [[likely]] {
2026-02-08 11:32:14 +01:00
auto lhs_i32 = lhs . as_i32 ( ) ;
auto rhs_i32 = rhs . as_i32 ( ) ;
auto result = lhs_i32 * rhs_i32 ;
if ( result ! = 0 ) [[likely]] {
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( result ) ) ;
2026-02-08 11:32:14 +01:00
return { } ;
}
// NB: When the mathematical result is zero, the sign depends on the operand
// signs. We can determine it directly here instead of widening to double.
auto is_negative_zero = ( lhs_i32 < 0 ) ! = ( rhs_i32 < 0 ) ;
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , is_negative_zero ? Value ( - 0.0 ) : Value ( 0 ) ) ;
2024-02-20 11:59:46 +01:00
return { } ;
}
2025-10-07 12:08:06 +02:00
auto result = static_cast < i64 > ( lhs . as_i32 ( ) ) * static_cast < i64 > ( rhs . as_i32 ( ) ) ;
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( result , Value : : CannotFitInInt32 : : Indeed ) ) ;
2025-10-07 12:08:06 +02:00
return { } ;
2024-02-20 11:59:46 +01:00
}
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( lhs . as_double ( ) * rhs . as_double ( ) ) ) ;
2024-02-20 11:59:46 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , TRY ( mul ( vm , lhs , rhs ) ) ) ;
2024-02-20 11:59:46 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > Div : : execute_impl ( VM & vm ) const
2025-10-11 17:33:34 +02:00
{
2026-04-13 11:54:04 +02:00
auto const lhs = vm . get ( m_lhs ) ;
auto const rhs = vm . get ( m_rhs ) ;
2025-10-11 17:33:34 +02:00
if ( lhs . is_number ( ) & & rhs . is_number ( ) ) [[likely]] {
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( lhs . as_double ( ) / rhs . as_double ( ) ) ) ;
2025-10-11 17:33:34 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , TRY ( div ( vm , lhs , rhs ) ) ) ;
2025-10-11 17:33:34 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > Mod : : execute_impl ( VM & vm ) const
2026-01-08 23:46:59 +01:00
{
2026-04-13 11:54:04 +02:00
auto const lhs = vm . get ( m_lhs ) ;
auto const rhs = vm . get ( m_rhs ) ;
2026-01-08 23:46:59 +01:00
if ( lhs . is_number ( ) & & rhs . is_number ( ) ) [[likely]] {
if ( lhs . is_int32 ( ) & & rhs . is_int32 ( ) ) {
auto n = lhs . as_i32 ( ) ;
auto d = rhs . as_i32 ( ) ;
if ( d = = 0 ) {
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , js_nan ( ) ) ;
2026-01-08 23:46:59 +01:00
return { } ;
}
if ( n = = NumericLimits < i32 > : : min ( ) & & d = = - 1 ) {
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( - 0.0 ) ) ;
2026-01-08 23:46:59 +01:00
return { } ;
}
auto result = n % d ;
if ( result = = 0 & & n < 0 ) {
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( - 0.0 ) ) ;
2026-01-08 23:46:59 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( result ) ) ;
2026-01-08 23:46:59 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( fmod ( lhs . as_double ( ) , rhs . as_double ( ) ) ) ) ;
2026-01-08 23:46:59 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , TRY ( mod ( vm , lhs , rhs ) ) ) ;
2026-01-08 23:46:59 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > Sub : : execute_impl ( VM & vm ) const
2024-02-20 11:59:46 +01:00
{
2026-04-13 11:54:04 +02:00
auto const lhs = vm . get ( m_lhs ) ;
auto const rhs = vm . get ( m_rhs ) ;
2024-02-20 11:59:46 +01:00
2025-12-04 22:26:22 +01:00
if ( lhs . is_number ( ) & & rhs . is_number ( ) ) [[likely]] {
2024-02-20 11:59:46 +01:00
if ( lhs . is_int32 ( ) & & rhs . is_int32 ( ) ) {
2025-12-04 21:58:41 +01:00
if ( ! Checked < i32 > : : subtraction_would_overflow ( lhs . as_i32 ( ) , rhs . as_i32 ( ) ) ) [[likely]] {
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( lhs . as_i32 ( ) - rhs . as_i32 ( ) ) ) ;
2024-02-20 11:59:46 +01:00
return { } ;
}
2025-10-07 12:08:06 +02:00
auto result = static_cast < i64 > ( lhs . as_i32 ( ) ) - static_cast < i64 > ( rhs . as_i32 ( ) ) ;
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( result , Value : : CannotFitInInt32 : : Indeed ) ) ;
2025-10-07 12:08:06 +02:00
return { } ;
2024-02-20 11:59:46 +01:00
}
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( lhs . as_double ( ) - rhs . as_double ( ) ) ) ;
2024-02-20 11:59:46 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , TRY ( sub ( vm , lhs , rhs ) ) ) ;
2024-02-20 11:59:46 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > BitwiseXor : : execute_impl ( VM & vm ) const
2024-02-20 11:59:46 +01:00
{
2026-04-13 11:54:04 +02:00
auto const lhs = vm . get ( m_lhs ) ;
auto const rhs = vm . get ( m_rhs ) ;
2024-02-20 11:59:46 +01:00
if ( lhs . is_int32 ( ) & & rhs . is_int32 ( ) ) {
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( lhs . as_i32 ( ) ^ rhs . as_i32 ( ) ) ) ;
2024-02-20 11:59:46 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , TRY ( bitwise_xor ( vm , lhs , rhs ) ) ) ;
2024-02-20 11:59:46 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > BitwiseAnd : : execute_impl ( VM & vm ) const
2024-02-20 11:59:46 +01:00
{
2026-04-13 11:54:04 +02:00
auto const lhs = vm . get ( m_lhs ) ;
auto const rhs = vm . get ( m_rhs ) ;
2024-02-20 11:59:46 +01:00
if ( lhs . is_int32 ( ) & & rhs . is_int32 ( ) ) {
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( lhs . as_i32 ( ) & rhs . as_i32 ( ) ) ) ;
2024-02-20 11:59:46 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , TRY ( bitwise_and ( vm , lhs , rhs ) ) ) ;
2024-02-20 11:59:46 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > ToInt32 : : execute_impl ( VM & vm ) const
2025-12-15 00:14:15 -06:00
{
2026-04-13 11:54:04 +02:00
auto const value = vm . get ( m_value ) ;
2025-12-15 00:14:15 -06:00
if ( value . is_int32 ( ) ) [[likely]] {
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , value ) ;
2025-12-15 00:14:15 -06:00
return { } ;
}
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( TRY ( value . to_i32 ( vm ) ) ) ) ;
2025-12-15 00:14:15 -06:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > ToString : : execute_impl ( VM & vm ) const
2026-01-22 17:23:19 -08:00
{
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value { TRY ( vm . get ( m_value ) . to_primitive_string ( vm ) ) } ) ;
2026-01-22 17:23:19 -08:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > ToPrimitiveWithStringHint : : execute_impl ( VM & vm ) const
2026-02-08 12:32:38 +01:00
{
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , TRY ( vm . get ( m_value ) . to_primitive ( vm , Value : : PreferredType : : String ) ) ) ;
2026-02-08 12:32:38 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > BitwiseOr : : execute_impl ( VM & vm ) const
2024-02-20 11:59:46 +01:00
{
2026-04-13 11:54:04 +02:00
auto const lhs = vm . get ( m_lhs ) ;
auto const rhs = vm . get ( m_rhs ) ;
2024-02-20 11:59:46 +01:00
if ( lhs . is_int32 ( ) & & rhs . is_int32 ( ) ) {
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( lhs . as_i32 ( ) | rhs . as_i32 ( ) ) ) ;
2024-02-20 11:59:46 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , TRY ( bitwise_or ( vm , lhs , rhs ) ) ) ;
2024-02-20 11:59:46 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > UnsignedRightShift : : execute_impl ( VM & vm ) const
2024-02-20 11:59:46 +01:00
{
2026-04-13 11:54:04 +02:00
auto const lhs = vm . get ( m_lhs ) ;
auto const rhs = vm . get ( m_rhs ) ;
2024-03-04 10:56:21 +01:00
if ( lhs . is_int32 ( ) & & rhs . is_int32 ( ) ) {
2024-02-20 11:59:46 +01:00
auto const shift_count = static_cast < u32 > ( rhs . as_i32 ( ) ) % 32 ;
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( static_cast < u32 > ( lhs . as_i32 ( ) ) > > shift_count ) ) ;
2024-02-20 11:59:46 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , TRY ( unsigned_right_shift ( vm , lhs , rhs ) ) ) ;
2024-02-20 11:59:46 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > RightShift : : execute_impl ( VM & vm ) const
2024-02-20 11:59:46 +01:00
{
2026-04-13 11:54:04 +02:00
auto const lhs = vm . get ( m_lhs ) ;
auto const rhs = vm . get ( m_rhs ) ;
2024-03-04 10:56:21 +01:00
if ( lhs . is_int32 ( ) & & rhs . is_int32 ( ) ) {
2024-02-20 11:59:46 +01:00
auto const shift_count = static_cast < u32 > ( rhs . as_i32 ( ) ) % 32 ;
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( lhs . as_i32 ( ) > > shift_count ) ) ;
2024-02-20 11:59:46 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , TRY ( right_shift ( vm , lhs , rhs ) ) ) ;
2024-02-20 11:59:46 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > LeftShift : : execute_impl ( VM & vm ) const
2024-03-04 10:01:40 +01:00
{
2026-04-13 11:54:04 +02:00
auto const lhs = vm . get ( m_lhs ) ;
auto const rhs = vm . get ( m_rhs ) ;
2024-03-04 10:56:21 +01:00
if ( lhs . is_int32 ( ) & & rhs . is_int32 ( ) ) {
2024-03-04 10:01:40 +01:00
auto const shift_count = static_cast < u32 > ( rhs . as_i32 ( ) ) % 32 ;
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( lhs . as_i32 ( ) < < shift_count ) ) ;
2024-03-04 10:01:40 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , TRY ( left_shift ( vm , lhs , rhs ) ) ) ;
2024-03-04 10:01:40 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > LessThan : : execute_impl ( VM & vm ) const
2024-02-20 11:59:46 +01:00
{
2026-04-13 11:54:04 +02:00
auto const lhs = vm . get ( m_lhs ) ;
auto const rhs = vm . get ( m_rhs ) ;
2025-12-04 22:26:22 +01:00
if ( lhs . is_number ( ) & & rhs . is_number ( ) ) [[likely]] {
2024-05-13 09:23:53 +02:00
if ( lhs . is_int32 ( ) & & rhs . is_int32 ( ) ) {
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( lhs . as_i32 ( ) < rhs . as_i32 ( ) ) ) ;
2024-05-13 09:23:53 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( lhs . as_double ( ) < rhs . as_double ( ) ) ) ;
2024-02-20 11:59:46 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value { TRY ( less_than ( vm , lhs , rhs ) ) } ) ;
2024-02-20 11:59:46 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > LessThanEquals : : execute_impl ( VM & vm ) const
2024-02-20 11:59:46 +01:00
{
2026-04-13 11:54:04 +02:00
auto const lhs = vm . get ( m_lhs ) ;
auto const rhs = vm . get ( m_rhs ) ;
2025-12-04 22:26:22 +01:00
if ( lhs . is_number ( ) & & rhs . is_number ( ) ) [[likely]] {
2024-05-13 09:23:53 +02:00
if ( lhs . is_int32 ( ) & & rhs . is_int32 ( ) ) {
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( lhs . as_i32 ( ) < = rhs . as_i32 ( ) ) ) ;
2024-05-13 09:23:53 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( lhs . as_double ( ) < = rhs . as_double ( ) ) ) ;
2024-02-20 11:59:46 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value { TRY ( less_than_equals ( vm , lhs , rhs ) ) } ) ;
2024-02-20 11:59:46 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > GreaterThan : : execute_impl ( VM & vm ) const
2024-02-20 11:59:46 +01:00
{
2026-04-13 11:54:04 +02:00
auto const lhs = vm . get ( m_lhs ) ;
auto const rhs = vm . get ( m_rhs ) ;
2025-12-04 22:26:22 +01:00
if ( lhs . is_number ( ) & & rhs . is_number ( ) ) [[likely]] {
2024-05-13 09:23:53 +02:00
if ( lhs . is_int32 ( ) & & rhs . is_int32 ( ) ) {
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( lhs . as_i32 ( ) > rhs . as_i32 ( ) ) ) ;
2024-05-13 09:23:53 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( lhs . as_double ( ) > rhs . as_double ( ) ) ) ;
2024-02-20 11:59:46 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value { TRY ( greater_than ( vm , lhs , rhs ) ) } ) ;
2024-02-20 11:59:46 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > GreaterThanEquals : : execute_impl ( VM & vm ) const
2024-02-20 11:59:46 +01:00
{
2026-04-13 11:54:04 +02:00
auto const lhs = vm . get ( m_lhs ) ;
auto const rhs = vm . get ( m_rhs ) ;
2025-12-04 22:26:22 +01:00
if ( lhs . is_number ( ) & & rhs . is_number ( ) ) [[likely]] {
2024-05-13 09:23:53 +02:00
if ( lhs . is_int32 ( ) & & rhs . is_int32 ( ) ) {
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( lhs . as_i32 ( ) > = rhs . as_i32 ( ) ) ) ;
2024-05-13 09:23:53 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( lhs . as_double ( ) > = rhs . as_double ( ) ) ) ;
2024-02-20 11:59:46 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value { TRY ( greater_than_equals ( vm , lhs , rhs ) ) } ) ;
2024-02-20 11:59:46 +01:00
return { } ;
}
2023-09-27 10:10:00 +02:00
2026-04-13 11:54:04 +02:00
void Typeof : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , vm . get ( src ( ) ) . typeof_ ( vm ) ) ;
2023-09-27 10:10:00 +02:00
}
2026-04-13 11:54:04 +02:00
void Not : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , Value ( ! vm . get ( src ( ) ) . to_boolean ( ) ) ) ;
2023-09-27 10:10:00 +02:00
}
2026-04-13 11:54:04 +02:00
# define JS_DEFINE_COMMON_UNARY_OP(OpTitleCase, op_snake_case) \
ThrowCompletionOr < void > OpTitleCase : : execute_impl ( VM & vm ) const \
{ \
vm . set ( dst ( ) , TRY ( op_snake_case ( vm , vm . get ( src ( ) ) ) ) ) ; \
return { } ; \
2023-09-27 10:10:00 +02:00
}
JS_ENUMERATE_COMMON_UNARY_OPS ( JS_DEFINE_COMMON_UNARY_OP )
2026-04-13 11:54:04 +02:00
void NewArray : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
auto array = MUST ( Array : : create ( vm . realm ( ) , m_element_count ) ) ;
2023-09-27 10:10:00 +02:00
for ( size_t i = 0 ; i < m_element_count ; i + + ) {
2026-04-13 11:54:04 +02:00
array - > indexed_put ( i , vm . get ( m_elements [ i ] ) ) ;
2023-09-27 10:10:00 +02:00
}
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , array ) ;
2023-09-27 10:10:00 +02:00
}
2026-04-13 11:54:04 +02:00
void NewPrimitiveArray : : execute_impl ( VM & vm ) const
2023-11-17 22:07:23 +02:00
{
2026-04-13 11:54:04 +02:00
auto array = MUST ( Array : : create ( vm . realm ( ) , m_element_count ) ) ;
2024-03-03 12:37:28 +01:00
for ( size_t i = 0 ; i < m_element_count ; i + + )
2026-03-17 00:54:54 -05:00
array - > indexed_put ( i , m_elements [ i ] ) ;
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , array ) ;
2023-11-17 22:07:23 +02:00
}
2026-01-06 19:49:38 +00:00
// 13.2.8.4 GetTemplateObject ( templateLiteral ), https://tc39.es/ecma262/#sec-gettemplateobject
2026-04-13 11:54:04 +02:00
void GetTemplateObject : : execute_impl ( VM & vm ) const
2026-01-06 19:49:38 +00:00
{
2026-03-07 22:52:25 +01:00
auto & cache = * bit_cast < TemplateObjectCache * > ( m_cache ) ;
2026-01-06 19:49:38 +00:00
// 1. Let realm be the current Realm Record.
auto & realm = * vm . current_realm ( ) ;
// 2. Let templateRegistry be realm.[[TemplateMap]].
// 3. For each element e of templateRegistry, do
// a. If e.[[Site]] is the same Parse Node as templateLiteral, then
// i. Return e.[[Array]].
if ( cache . cached_template_object ) {
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , cache . cached_template_object ) ;
2026-01-06 19:49:38 +00:00
return ;
}
// 4. Let rawStrings be the TemplateStrings of templateLiteral with argument true.
// 5. Assert: rawStrings is a List of Strings.
// 6. Let cookedStrings be the TemplateStrings of templateLiteral with argument false.
// NOTE: This has already been done.
// 7. Let count be the number of elements in the List cookedStrings.
// NOTE: m_strings contains [cooked_0, ..., cooked_n, raw_0, ..., raw_n]
// 8. Assert: count ≤ 2**32 - 1.
// NOTE: Done by having count be a u32.
u32 count = m_strings_count / 2 ;
// 9. Let template be ! ArrayCreate(count).
auto template_object = MUST ( Array : : create ( realm , count ) ) ;
// 10. Let rawObj be ! ArrayCreate(count).
auto raw_object = MUST ( Array : : create ( realm , count ) ) ;
// 12. Repeat, while index < count,
for ( size_t index = 0 ; index < count ; index + + ) {
// a. Let prop be ! ToString(𝔽 (index)).
// b. Let cookedValue be cookedStrings[index].
2026-04-13 11:54:04 +02:00
auto cooked_value = vm . get ( m_strings [ index ] ) ;
2026-01-06 19:49:38 +00:00
// c. Perform ! DefinePropertyOrThrow(template, prop, PropertyDescriptor { [[Value]]: cookedValue, [[Writable]]: false, [[Enumerable]]: true, [[Configurable]]: false }).
2026-03-17 00:54:54 -05:00
template_object - > indexed_put ( index , cooked_value , Attribute : : Enumerable ) ;
2026-01-06 19:49:38 +00:00
// d. Let rawValue be the String value rawStrings[index].
2026-04-13 11:54:04 +02:00
auto raw_value = vm . get ( m_strings [ count + index ] ) ;
2026-01-06 19:49:38 +00:00
// e. Perform ! DefinePropertyOrThrow(rawObj, prop, PropertyDescriptor { [[Value]]: rawValue, [[Writable]]: false, [[Enumerable]]: true, [[Configurable]]: false }).
2026-03-17 00:54:54 -05:00
raw_object - > indexed_put ( index , raw_value , Attribute : : Enumerable ) ;
2026-01-06 19:49:38 +00:00
// f. Set index to index + 1.
}
// 13. Perform ! SetIntegrityLevel(rawObj, FROZEN).
MUST ( raw_object - > set_integrity_level ( Object : : IntegrityLevel : : Frozen ) ) ;
// 14. Perform ! DefinePropertyOrThrow(template, "raw", PropertyDescriptor { [[Value]]: rawObj, [[Writable]]: false, [[Enumerable]]: false, [[Configurable]]: false }).
template_object - > define_direct_property ( vm . names . raw , raw_object , PropertyAttributes { } ) ;
// 15. Perform ! SetIntegrityLevel(template, FROZEN).
MUST ( template_object - > set_integrity_level ( Object : : IntegrityLevel : : Frozen ) ) ;
// 16. Append the Record { [[Site]]: templateLiteral, [[Array]]: template } to realm.[[TemplateMap]].
cache . cached_template_object = template_object ;
// 17. Return template.
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , template_object ) ;
2026-01-06 19:49:38 +00:00
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > NewArrayWithLength : : execute_impl ( VM & vm ) const
2025-11-06 19:20:29 +00:00
{
2026-04-13 11:54:04 +02:00
auto length = static_cast < u64 > ( vm . get ( m_array_length ) . as_double ( ) ) ;
auto array = TRY ( Array : : create ( vm . realm ( ) , length ) ) ;
vm . set ( m_dst , array ) ;
2025-11-06 19:20:29 +00:00
return { } ;
}
2026-04-13 11:54:04 +02:00
void AddPrivateName : : execute_impl ( VM & vm ) const
2024-05-11 22:54:41 +00:00
{
2026-04-13 11:54:04 +02:00
auto const & name = vm . get_identifier ( m_name ) ;
vm . running_execution_context ( ) . private_environment - > add_private_name ( name ) ;
2024-05-11 22:54:41 +00:00
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > ArrayAppend : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
return append ( vm , vm . get ( dst ( ) ) , vm . get ( src ( ) ) , m_is_spread ) ;
2023-09-27 10:10:00 +02:00
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > ImportCall : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
auto specifier = vm . get ( m_specifier ) ;
auto options_value = vm . get ( m_options ) ;
vm . set ( dst ( ) , TRY ( perform_import_call ( vm , specifier , options_value ) ) ) ;
2023-09-27 10:10:00 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > IteratorToArray : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
auto & iterator_object = vm . get ( m_iterator_object ) . as_object ( ) ;
auto iterator_next_method = vm . get ( m_iterator_next_method ) ;
auto iterator_done_property = vm . get ( m_iterator_done_property ) . as_bool ( ) ;
2025-10-27 19:46:54 +01:00
IteratorRecordImpl iterator_record { . done = iterator_done_property , . iterator = iterator_object , . next_method = iterator_next_method } ;
auto array = MUST ( Array : : create ( * vm . current_realm ( ) , 0 ) ) ;
size_t index = 0 ;
while ( true ) {
auto value = TRY ( iterator_step_value ( vm , iterator_record ) ) ;
if ( ! value . has_value ( ) )
break ;
MUST ( array - > create_data_property_or_throw ( index , value . release_value ( ) ) ) ;
index + + ;
}
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , array ) ;
2023-09-27 10:10:00 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
void NewObject : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2025-04-29 16:08:42 +02:00
auto & realm = * vm . current_realm ( ) ;
2026-01-09 18:55:00 +01:00
2026-03-07 22:52:25 +01:00
if ( m_cache ) {
auto & cache = * bit_cast < ObjectShapeCache * > ( m_cache ) ;
2026-01-09 18:55:00 +01:00
auto cached_shape = cache . shape . ptr ( ) ;
if ( cached_shape ) {
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , Object : : create_with_premade_shape ( * cached_shape ) ) ;
2026-01-09 18:55:00 +01:00
return ;
}
}
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , Object : : create ( realm , realm . intrinsics ( ) . object_prototype ( ) ) ) ;
2023-09-27 10:10:00 +02:00
}
2026-04-13 11:54:04 +02:00
void NewObjectWithNoPrototype : : execute_impl ( VM & vm ) const
2025-11-06 19:20:29 +00:00
{
auto & realm = * vm . current_realm ( ) ;
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , Object : : create ( realm , nullptr ) ) ;
2025-11-06 19:20:29 +00:00
}
2026-04-13 11:54:04 +02:00
void CacheObjectShape : : execute_impl ( VM & vm ) const
2026-01-09 18:55:00 +01:00
{
2026-03-07 22:52:25 +01:00
auto & cache = * bit_cast < ObjectShapeCache * > ( m_cache ) ;
2026-01-09 18:55:00 +01:00
if ( ! cache . shape ) {
2026-04-13 11:54:04 +02:00
auto & object = vm . get ( m_object ) . as_object ( ) ;
2026-03-22 09:20:03 -05:00
if ( ! object . shape ( ) . is_dictionary ( ) )
cache . shape = & object . shape ( ) ;
2026-01-09 18:55:00 +01:00
}
}
COLD static void init_object_literal_property_slow ( Object & object , PropertyKey const & property_key , Value value , ObjectShapeCache & cache , u32 property_slot )
{
object . define_direct_property ( property_key , value , JS : : Attribute : : Enumerable | JS : : Attribute : : Writable | JS : : Attribute : : Configurable ) ;
// Cache the property offset for future fast-path use
// Note: lookup may fail if the shape is in dictionary mode or for other edge cases.
// We only cache if we're not in dictionary mode and the lookup succeeds.
if ( ! object . shape ( ) . is_dictionary ( ) ) {
auto metadata = object . shape ( ) . lookup ( property_key ) ;
if ( metadata . has_value ( ) ) {
if ( property_slot > = cache . property_offsets . size ( ) )
cache . property_offsets . resize ( property_slot + 1 ) ;
cache . property_offsets [ property_slot ] = metadata - > offset ;
}
}
}
2026-04-13 11:54:04 +02:00
void InitObjectLiteralProperty : : execute_impl ( VM & vm ) const
2026-01-09 18:55:00 +01:00
{
2026-04-13 11:54:04 +02:00
auto & object = vm . get ( m_object ) . as_object ( ) ;
auto value = vm . get ( m_src ) ;
auto & cache = vm . current_executable ( ) . object_shape_caches [ m_shape_cache_index ] ;
2026-01-09 18:55:00 +01:00
// Fast path: if we have a cached shape and it matches, write directly to the cached offset
auto cached_shape = cache . shape . ptr ( ) ;
if ( cached_shape & & & object . shape ( ) = = cached_shape & & m_property_slot < cache . property_offsets . size ( ) ) {
object . put_direct ( cache . property_offsets [ m_property_slot ] , value ) ;
return ;
}
2026-04-13 11:54:04 +02:00
auto const & property_key = vm . current_executable ( ) . get_property_key ( m_property ) ;
2026-01-09 18:55:00 +01:00
init_object_literal_property_slow ( object , property_key , value , cache , m_property_slot ) ;
}
2026-04-13 11:54:04 +02:00
void NewRegExp : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) ,
2024-02-04 08:00:54 +01:00
new_regexp (
2026-04-13 11:54:04 +02:00
vm ,
vm . current_executable ( ) . get_string ( m_source_index ) ,
vm . current_executable ( ) . get_string ( m_flags_index ) ) ) ;
2024-02-04 08:00:54 +01:00
}
2026-04-13 11:54:04 +02:00
COLD void NewReferenceError : : execute_impl ( VM & vm ) const
2026-02-11 13:14:52 +01:00
{
auto & realm = * vm . current_realm ( ) ;
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , ReferenceError : : create ( realm , vm . current_executable ( ) . get_string ( m_error_string ) ) ) ;
2026-02-11 13:14:52 +01:00
}
2026-04-13 11:54:04 +02:00
COLD void NewTypeError : : execute_impl ( VM & vm ) const
2025-11-20 22:14:50 +01:00
{
auto & realm = * vm . current_realm ( ) ;
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , TypeError : : create ( realm , vm . current_executable ( ) . get_string ( m_error_string ) ) ) ;
2025-11-20 22:14:50 +01:00
}
2023-09-27 10:10:00 +02:00
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > CopyObjectExcludingProperties : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2025-04-29 16:08:42 +02:00
auto & realm = * vm . current_realm ( ) ;
2023-09-27 10:10:00 +02:00
2026-04-13 11:54:04 +02:00
auto from_object = vm . get ( m_from_object ) ;
2023-09-27 10:10:00 +02:00
auto to_object = Object : : create ( realm , realm . intrinsics ( ) . object_prototype ( ) ) ;
HashTable < PropertyKey > excluded_names ;
for ( size_t i = 0 ; i < m_excluded_names_count ; + + i ) {
2026-04-13 11:54:04 +02:00
excluded_names . set ( TRY ( vm . get ( m_excluded_names [ i ] ) . to_property_key ( vm ) ) ) ;
2023-09-27 10:10:00 +02:00
}
TRY ( to_object - > copy_data_properties ( vm , from_object , excluded_names ) ) ;
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , to_object ) ;
2023-09-27 10:10:00 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > ConcatString : : execute_impl ( VM & vm ) const
2025-12-10 09:17:05 -06:00
{
2026-04-13 11:54:04 +02:00
auto string = TRY ( vm . get ( src ( ) ) . to_primitive_string ( vm ) ) ;
vm . set ( dst ( ) , PrimitiveString : : create ( vm , vm . get ( dst ( ) ) . as_string ( ) , string ) ) ;
2025-12-10 09:17:05 -06:00
return { } ;
}
2025-05-04 01:41:49 +02:00
enum class BindingIsKnownToBeInitialized {
No ,
Yes ,
} ;
template < BindingIsKnownToBeInitialized binding_is_known_to_be_initialized >
2026-04-13 11:54:04 +02:00
static ThrowCompletionOr < void > get_binding ( VM & vm , Operand dst , IdentifierTableIndex identifier , Strict strict , EnvironmentCoordinate & cache )
2023-09-27 10:10:00 +02:00
{
2024-05-11 17:22:59 +02:00
2025-10-07 16:37:22 +02:00
if ( cache . is_valid ( ) ) [[likely]] {
2026-04-13 11:54:04 +02:00
auto const * environment = vm . running_execution_context ( ) . lexical_environment . ptr ( ) ;
2026-01-26 20:40:05 +01:00
for ( size_t i = 0 ; i < cache . hops ; + + i ) {
if ( environment - > is_permanently_screwed_by_eval ( ) ) [[unlikely]]
goto slow_path ;
2024-05-11 17:22:59 +02:00
environment = environment - > outer_environment ( ) ;
2026-01-26 20:40:05 +01:00
}
2025-10-07 16:37:22 +02:00
if ( ! environment - > is_permanently_screwed_by_eval ( ) ) [[likely]] {
2025-05-04 01:41:49 +02:00
Value value ;
if constexpr ( binding_is_known_to_be_initialized = = BindingIsKnownToBeInitialized : : No ) {
value = TRY ( static_cast < DeclarativeEnvironment const & > ( * environment ) . get_binding_value_direct ( vm , cache . index ) ) ;
} else {
value = static_cast < DeclarativeEnvironment const & > ( * environment ) . get_initialized_binding_value_direct ( cache . index ) ;
}
2026-04-13 11:54:04 +02:00
vm . set ( dst , value ) ;
2024-05-11 17:22:59 +02:00
return { } ;
}
2026-01-26 20:40:05 +01:00
slow_path :
2025-05-04 01:41:49 +02:00
cache = { } ;
2024-05-11 17:22:59 +02:00
}
2026-04-13 11:54:04 +02:00
auto & executable = vm . current_executable ( ) ;
2025-10-28 20:25:12 +01:00
auto reference = TRY ( vm . resolve_binding ( executable . get_identifier ( identifier ) , strict ) ) ;
2024-05-11 17:22:59 +02:00
if ( reference . environment_coordinate ( ) . has_value ( ) )
2025-05-04 01:41:49 +02:00
cache = reference . environment_coordinate ( ) . value ( ) ;
2026-01-26 20:40:05 +01:00
2026-04-13 11:54:04 +02:00
vm . set ( dst , TRY ( reference . get_value ( vm ) ) ) ;
2023-09-27 10:10:00 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > GetBinding : : execute_impl ( VM & vm ) const
2025-05-04 01:41:49 +02:00
{
2026-04-13 11:54:04 +02:00
return get_binding < BindingIsKnownToBeInitialized : : No > ( vm , m_dst , m_identifier , strict ( ) , m_cache ) ;
2025-05-04 01:41:49 +02:00
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > GetInitializedBinding : : execute_impl ( VM & vm ) const
2025-05-04 01:41:49 +02:00
{
2026-04-13 11:54:04 +02:00
return get_binding < BindingIsKnownToBeInitialized : : Yes > ( vm , m_dst , m_identifier , strict ( ) , m_cache ) ;
2025-05-04 01:41:49 +02:00
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > GetCalleeAndThisFromEnvironment : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2023-11-10 13:00:36 +01:00
auto callee_and_this = TRY ( get_callee_and_this_from_environment (
2026-04-13 11:54:04 +02:00
vm ,
vm . get_identifier ( m_identifier ) ,
2025-10-28 20:25:12 +01:00
strict ( ) ,
2024-05-11 18:28:03 +02:00
m_cache ) ) ;
2026-04-13 11:54:04 +02:00
vm . set ( m_callee , callee_and_this . callee ) ;
vm . set ( m_this_value , callee_and_this . this_value ) ;
2023-09-27 10:10:00 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > GetGlobal : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , TRY ( get_global ( vm , m_identifier , strict ( ) , * bit_cast < GlobalVariableCache * > ( m_cache ) ) ) ) ;
2023-10-20 12:56:12 +02:00
return { } ;
2023-09-27 10:10:00 +02:00
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > SetGlobal : : execute_impl ( VM & vm ) const
2025-05-01 23:58:38 +02:00
{
2026-04-13 11:54:04 +02:00
auto & binding_object = vm . global_object ( ) ;
auto & declarative_record = vm . global_declarative_environment ( ) ;
2025-05-01 23:58:38 +02:00
2026-03-07 22:52:25 +01:00
auto & cache = * bit_cast < GlobalVariableCache * > ( m_cache ) ;
2025-05-01 23:58:38 +02:00
auto & shape = binding_object . shape ( ) ;
2026-04-13 11:54:04 +02:00
auto src = vm . get ( m_src ) ;
2025-05-01 23:58:38 +02:00
if ( cache . environment_serial_number = = declarative_record . environment_serial_number ( ) ) {
// OPTIMIZATION: For global var bindings, if the shape of the global object hasn't changed,
// we can use the cached property offset.
2025-12-20 16:16:16 -06:00
if ( & shape = = cache . entries [ 0 ] . shape & & ( ! shape . is_dictionary ( ) | | shape . dictionary_generation ( ) = = cache . entries [ 0 ] . shape_dictionary_generation ) ) {
auto value = binding_object . get_direct ( cache . entries [ 0 ] . property_offset ) ;
2025-05-01 23:58:38 +02:00
if ( value . is_accessor ( ) )
2025-05-04 16:00:13 +02:00
TRY ( call ( vm , value . as_accessor ( ) . setter ( ) , & binding_object , src ) ) ;
2025-05-01 23:58:38 +02:00
else
2025-12-20 16:16:16 -06:00
binding_object . put_direct ( cache . entries [ 0 ] . property_offset , src ) ;
2025-05-01 23:58:38 +02:00
return { } ;
}
// OPTIMIZATION: For global lexical bindings, if the global declarative environment hasn't changed,
// we can use the cached environment binding index.
if ( cache . has_environment_binding_index ) {
if ( cache . in_module_environment ) {
2026-03-11 20:35:43 +00:00
auto module = vm . running_execution_context ( ) . script_or_module . get_pointer < GC : : Ref < Module > > ( ) ;
TRY ( ( * module ) - > environment ( ) - > set_mutable_binding_direct ( vm , cache . environment_binding_index , src , strict ( ) = = Strict : : Yes ) ) ;
2025-05-01 23:58:38 +02:00
} else {
2025-10-28 20:25:12 +01:00
TRY ( declarative_record . set_mutable_binding_direct ( vm , cache . environment_binding_index , src , strict ( ) = = Strict : : Yes ) ) ;
2025-05-01 23:58:38 +02:00
}
return { } ;
}
}
cache . environment_serial_number = declarative_record . environment_serial_number ( ) ;
2026-04-13 11:54:04 +02:00
auto & identifier = vm . get_identifier ( m_identifier ) ;
2025-05-01 23:58:38 +02:00
2026-03-11 20:35:43 +00:00
if ( auto * module = vm . running_execution_context ( ) . script_or_module . get_pointer < GC : : Ref < Module > > ( ) ) {
2025-05-01 23:58:38 +02:00
// NOTE: GetGlobal is used to access variables stored in the module environment and global environment.
// The module environment is checked first since it precedes the global environment in the environment chain.
2026-03-11 20:35:43 +00:00
auto & module_environment = * ( * module ) - > environment ( ) ;
2025-05-01 23:58:38 +02:00
Optional < size_t > index ;
if ( TRY ( module_environment . has_binding ( identifier , & index ) ) ) {
if ( index . has_value ( ) ) {
cache . environment_binding_index = static_cast < u32 > ( index . value ( ) ) ;
cache . has_environment_binding_index = true ;
cache . in_module_environment = true ;
2025-10-28 20:25:12 +01:00
return TRY ( module_environment . set_mutable_binding_direct ( vm , index . value ( ) , src , strict ( ) = = Strict : : Yes ) ) ;
2025-05-01 23:58:38 +02:00
}
2025-10-28 20:25:12 +01:00
return TRY ( module_environment . set_mutable_binding ( vm , identifier , src , strict ( ) = = Strict : : Yes ) ) ;
2025-05-01 23:58:38 +02:00
}
}
Optional < size_t > offset ;
if ( TRY ( declarative_record . has_binding ( identifier , & offset ) ) ) {
cache . environment_binding_index = static_cast < u32 > ( offset . value ( ) ) ;
cache . has_environment_binding_index = true ;
cache . in_module_environment = false ;
2025-10-28 20:25:12 +01:00
TRY ( declarative_record . set_mutable_binding ( vm , identifier , src , strict ( ) = = Strict : : Yes ) ) ;
2025-05-01 23:58:38 +02:00
return { } ;
}
if ( TRY ( binding_object . has_property ( identifier ) ) ) {
2025-09-15 17:23:39 +02:00
CacheableSetPropertyMetadata cacheable_metadata ;
2025-05-01 23:58:38 +02:00
auto success = TRY ( binding_object . internal_set ( identifier , src , & binding_object , & cacheable_metadata ) ) ;
2025-12-04 10:35:26 +01:00
if ( ! success & & strict ( ) = = Strict : : Yes ) [[unlikely]] {
2025-05-01 23:58:38 +02:00
// Note: Nothing like this in the spec, this is here to produce nicer errors instead of the generic one thrown by Object::set().
auto property_or_error = binding_object . internal_get_own_property ( identifier ) ;
if ( ! property_or_error . is_error ( ) ) {
auto property = property_or_error . release_value ( ) ;
if ( property . has_value ( ) & & ! property - > writable . value_or ( true ) ) {
return vm . throw_completion < TypeError > ( ErrorType : : DescWriteNonWritable , identifier ) ;
}
}
return vm . throw_completion < TypeError > ( ErrorType : : ObjectSetReturnedFalse ) ;
}
2025-09-15 17:23:39 +02:00
if ( cacheable_metadata . type = = CacheableSetPropertyMetadata : : Type : : ChangeOwnProperty ) {
2025-05-06 13:16:44 +02:00
cache . entries [ 0 ] . shape = shape ;
cache . entries [ 0 ] . property_offset = cacheable_metadata . property_offset . value ( ) ;
2025-09-07 15:27:16 +01:00
if ( shape . is_dictionary ( ) ) {
cache . entries [ 0 ] . shape_dictionary_generation = shape . dictionary_generation ( ) ;
}
2025-05-01 23:58:38 +02:00
}
return { } ;
}
2025-10-28 20:25:12 +01:00
auto reference = TRY ( vm . resolve_binding ( identifier , strict ( ) , & declarative_record ) ) ;
2025-05-01 23:58:38 +02:00
TRY ( reference . put_value ( vm , src ) ) ;
return { } ;
}
2026-04-13 11:54:04 +02:00
COLD ThrowCompletionOr < void > DeleteVariable : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
auto const & string = vm . get_identifier ( m_identifier ) ;
2025-10-28 20:25:12 +01:00
auto reference = TRY ( vm . resolve_binding ( string , strict ( ) ) ) ;
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , Value ( TRY ( reference . delete_ ( vm ) ) ) ) ;
2023-09-27 10:10:00 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
void CreateLexicalEnvironment : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
auto & parent = as < Environment > ( vm . get ( m_parent ) . as_cell ( ) ) ;
2026-02-09 03:34:42 +01:00
auto environment = new_declarative_environment ( parent ) ;
environment - > ensure_capacity ( m_capacity ) ;
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , environment ) ;
vm . running_execution_context ( ) . lexical_environment = environment ;
2023-09-27 10:10:00 +02:00
}
2026-04-13 11:54:04 +02:00
void CreatePrivateEnvironment : : execute_impl ( VM & vm ) const
2024-05-11 22:54:41 +00:00
{
2026-04-13 11:54:04 +02:00
auto & running_execution_context = vm . running_execution_context ( ) ;
2024-05-11 22:54:41 +00:00
auto outer_private_environment = running_execution_context . private_environment ;
2026-04-13 11:54:04 +02:00
running_execution_context . private_environment = new_private_environment ( vm , outer_private_environment ) ;
2024-05-11 22:54:41 +00:00
}
2026-04-13 11:54:04 +02:00
void CreateVariableEnvironment : : execute_impl ( VM & vm ) const
2024-05-05 22:06:55 +02:00
{
2026-04-13 11:54:04 +02:00
auto & running_execution_context = vm . running_execution_context ( ) ;
2024-05-05 22:06:55 +02:00
auto var_environment = new_declarative_environment ( * running_execution_context . lexical_environment ) ;
2024-05-09 17:10:20 +02:00
var_environment - > ensure_capacity ( m_capacity ) ;
2024-05-05 22:06:55 +02:00
running_execution_context . variable_environment = var_environment ;
running_execution_context . lexical_environment = var_environment ;
}
2026-04-13 11:54:04 +02:00
COLD ThrowCompletionOr < void > EnterObjectEnvironment : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
auto object = TRY ( vm . get ( m_object ) . to_object ( vm ) ) ;
auto & old_environment = vm . running_execution_context ( ) . lexical_environment ;
2026-02-09 03:34:42 +01:00
auto new_environment = new_object_environment ( * object , true , old_environment ) ;
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , new_environment ) ;
vm . running_execution_context ( ) . lexical_environment = new_environment ;
2023-09-27 10:10:00 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
COLD void Catch : : execute_impl ( VM & vm ) const
2023-11-11 23:19:46 +01:00
{
2026-04-13 11:54:04 +02:00
vm . catch_exception ( dst ( ) ) ;
2023-11-11 23:19:46 +01:00
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > CreateVariable : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
auto const & name = vm . get_identifier ( m_identifier ) ;
return create_variable ( vm , name , m_mode , m_is_global , m_is_immutable , m_is_strict ) ;
2023-09-27 10:10:00 +02:00
}
2026-04-13 11:54:04 +02:00
void CreateRestParams : : execute_impl ( VM & vm ) const
2024-05-05 22:06:55 +02:00
{
2026-04-13 11:54:04 +02:00
auto const arguments = vm . running_execution_context ( ) . arguments_span ( ) ;
auto arguments_count = vm . running_execution_context ( ) . passed_argument_count ;
auto array = MUST ( Array : : create ( vm . realm ( ) , 0 ) ) ;
2024-05-05 22:06:55 +02:00
for ( size_t rest_index = m_rest_index ; rest_index < arguments_count ; + + rest_index )
2026-03-17 00:54:54 -05:00
array - > indexed_append ( arguments [ rest_index ] ) ;
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , array ) ;
2024-05-05 22:06:55 +02:00
}
2026-04-13 11:54:04 +02:00
void CreateArguments : : execute_impl ( VM & vm ) const
2024-05-05 22:06:55 +02:00
{
2026-04-13 11:54:04 +02:00
auto const & function = vm . running_execution_context ( ) . function ;
auto const arguments = vm . running_execution_context ( ) . arguments_span ( ) ;
auto const & environment = vm . running_execution_context ( ) . lexical_environment ;
2024-05-05 22:06:55 +02:00
2026-04-13 11:54:04 +02:00
auto passed_arguments = ReadonlySpan < Value > { arguments . data ( ) , vm . running_execution_context ( ) . passed_argument_count } ;
2024-05-05 22:06:55 +02:00
Object * arguments_object ;
2025-11-20 22:14:50 +01:00
if ( m_kind = = ArgumentsKind : : Mapped ) {
2026-02-11 01:13:06 +01:00
auto const & ecma_function = static_cast < ECMAScriptFunctionObject const & > ( * function ) ;
2026-04-13 11:54:04 +02:00
arguments_object = create_mapped_arguments_object ( vm , * function , ecma_function . parameter_names_for_mapped_arguments ( ) , passed_arguments , * environment ) ;
2024-05-05 22:06:55 +02:00
} else {
2026-04-13 11:54:04 +02:00
arguments_object = create_unmapped_arguments_object ( vm , passed_arguments ) ;
2024-05-05 22:06:55 +02:00
}
2024-05-21 09:32:51 +01:00
if ( m_dst . has_value ( ) ) {
2026-04-13 11:54:04 +02:00
vm . set ( * m_dst , arguments_object ) ;
2025-04-04 13:48:59 +02:00
return ;
2024-05-21 09:32:51 +01:00
}
2024-05-05 22:06:55 +02:00
if ( m_is_immutable ) {
2026-04-13 11:54:04 +02:00
MUST ( environment - > create_immutable_binding ( vm , vm . names . arguments . as_string ( ) , false ) ) ;
2024-05-05 22:06:55 +02:00
} else {
2026-04-13 11:54:04 +02:00
MUST ( environment - > create_mutable_binding ( vm , vm . names . arguments . as_string ( ) , false ) ) ;
2024-05-05 22:06:55 +02:00
}
2026-04-13 11:54:04 +02:00
MUST ( environment - > initialize_binding ( vm , vm . names . arguments . as_string ( ) , arguments_object , Environment : : InitializeBindingHint : : Normal ) ) ;
2024-05-05 22:06:55 +02:00
}
2024-05-14 11:30:30 +02:00
template < EnvironmentMode environment_mode , BindingInitializationMode initialization_mode >
2026-04-13 11:54:04 +02:00
static ThrowCompletionOr < void > initialize_or_set_binding ( VM & vm , IdentifierTableIndex identifier_index , Strict strict , Value value , EnvironmentCoordinate & cache )
2023-09-27 10:10:00 +02:00
{
2024-05-13 22:03:52 +02:00
2024-05-14 11:30:30 +02:00
auto * environment = environment_mode = = EnvironmentMode : : Lexical
2026-04-13 11:54:04 +02:00
? vm . running_execution_context ( ) . lexical_environment . ptr ( )
: vm . running_execution_context ( ) . variable_environment . ptr ( ) ;
2024-05-14 11:30:30 +02:00
2025-10-07 16:37:22 +02:00
if ( cache . is_valid ( ) ) [[likely]] {
2026-01-26 20:40:05 +01:00
for ( size_t i = 0 ; i < cache . hops ; + + i ) {
if ( environment - > is_permanently_screwed_by_eval ( ) ) [[unlikely]]
goto slow_path ;
2024-05-13 22:03:52 +02:00
environment = environment - > outer_environment ( ) ;
2026-01-26 20:40:05 +01:00
}
2025-10-07 16:37:22 +02:00
if ( ! environment - > is_permanently_screwed_by_eval ( ) ) [[likely]] {
2024-05-14 11:30:30 +02:00
if constexpr ( initialization_mode = = BindingInitializationMode : : Initialize ) {
TRY ( static_cast < DeclarativeEnvironment & > ( * environment ) . initialize_binding_direct ( vm , cache . index , value , Environment : : InitializeBindingHint : : Normal ) ) ;
} else {
2025-10-28 20:25:12 +01:00
TRY ( static_cast < DeclarativeEnvironment & > ( * environment ) . set_mutable_binding_direct ( vm , cache . index , value , strict = = Strict : : Yes ) ) ;
2024-05-13 22:03:52 +02:00
}
return { } ;
}
2026-01-26 20:40:05 +01:00
slow_path :
2024-05-14 11:30:30 +02:00
cache = { } ;
2024-05-13 22:03:52 +02:00
}
2026-04-13 11:54:04 +02:00
auto reference = TRY ( vm . resolve_binding ( vm . get_identifier ( identifier_index ) , strict , environment ) ) ;
2024-05-14 11:30:30 +02:00
if ( reference . environment_coordinate ( ) . has_value ( ) )
cache = reference . environment_coordinate ( ) . value ( ) ;
if constexpr ( initialization_mode = = BindingInitializationMode : : Initialize ) {
TRY ( reference . initialize_referenced_binding ( vm , value ) ) ;
} else if ( initialization_mode = = BindingInitializationMode : : Set ) {
TRY ( reference . put_value ( vm , value ) ) ;
}
2023-09-27 10:10:00 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > InitializeLexicalBinding : : execute_impl ( VM & vm ) const
2024-05-14 11:30:30 +02:00
{
2026-04-13 11:54:04 +02:00
return initialize_or_set_binding < EnvironmentMode : : Lexical , BindingInitializationMode : : Initialize > ( vm , m_identifier , strict ( ) , vm . get ( m_src ) , m_cache ) ;
2024-05-14 11:30:30 +02:00
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > InitializeVariableBinding : : execute_impl ( VM & vm ) const
2024-05-14 11:30:30 +02:00
{
2026-04-13 11:54:04 +02:00
return initialize_or_set_binding < EnvironmentMode : : Var , BindingInitializationMode : : Initialize > ( vm , m_identifier , strict ( ) , vm . get ( m_src ) , m_cache ) ;
2024-05-14 11:30:30 +02:00
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > SetLexicalBinding : : execute_impl ( VM & vm ) const
2024-05-14 11:30:30 +02:00
{
2026-04-13 11:54:04 +02:00
return initialize_or_set_binding < EnvironmentMode : : Lexical , BindingInitializationMode : : Set > ( vm , m_identifier , strict ( ) , vm . get ( m_src ) , m_cache ) ;
2024-05-14 11:30:30 +02:00
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > SetVariableBinding : : execute_impl ( VM & vm ) const
2024-05-14 11:30:30 +02:00
{
2026-04-13 11:54:04 +02:00
return initialize_or_set_binding < EnvironmentMode : : Var , BindingInitializationMode : : Set > ( vm , m_identifier , strict ( ) , vm . get ( m_src ) , m_cache ) ;
2024-05-14 11:30:30 +02:00
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > GetById : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
auto base_value = vm . get ( base ( ) ) ;
2026-03-07 22:52:25 +01:00
auto & cache = * bit_cast < PropertyLookupCache * > ( m_cache ) ;
2024-03-29 11:26:10 -04:00
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , TRY ( get_by_id < GetByIdMode : : Normal > ( vm , [ & ] { return vm . get_identifier ( m_base_identifier ) ; } , [ & ] - > PropertyKey const & { return vm . get_property_key ( m_property ) ; } , base_value , base_value , cache ) ) ) ;
2023-10-18 13:26:47 +02:00
return { } ;
2023-09-27 10:10:00 +02:00
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > GetByIdWithThis : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
auto base_value = vm . get ( m_base ) ;
auto this_value = vm . get ( m_this_value ) ;
2026-03-07 22:52:25 +01:00
auto & cache = * bit_cast < PropertyLookupCache * > ( m_cache ) ;
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , TRY ( get_by_id < GetByIdMode : : Normal > ( vm , [ ] { return Optional < Utf16FlyString const & > { } ; } , [ & ] - > PropertyKey const & { return vm . get_property_key ( m_property ) ; } , base_value , this_value , cache ) ) ) ;
2023-10-18 13:26:47 +02:00
return { } ;
2023-09-27 10:10:00 +02:00
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > GetLength : : execute_impl ( VM & vm ) const
2024-05-20 11:53:28 +02:00
{
2026-04-13 11:54:04 +02:00
auto base_value = vm . get ( base ( ) ) ;
auto & executable = vm . current_executable ( ) ;
2026-03-07 22:52:25 +01:00
auto & cache = * bit_cast < PropertyLookupCache * > ( m_cache ) ;
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , TRY ( get_by_id < GetByIdMode : : Length > ( vm , [ & ] { return vm . get_identifier ( m_base_identifier ) ; } , [ & ] { return executable . get_property_key ( * executable . length_identifier ) ; } , base_value , base_value , cache ) ) ) ;
2024-05-20 11:53:28 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > GetLengthWithThis : : execute_impl ( VM & vm ) const
2024-05-20 11:53:28 +02:00
{
2026-04-13 11:54:04 +02:00
auto base_value = vm . get ( m_base ) ;
auto this_value = vm . get ( m_this_value ) ;
auto & executable = vm . current_executable ( ) ;
2026-03-07 22:52:25 +01:00
auto & cache = * bit_cast < PropertyLookupCache * > ( m_cache ) ;
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , TRY ( get_by_id < GetByIdMode : : Length > ( vm , [ ] { return Optional < Utf16FlyString const & > { } ; } , [ & ] - > PropertyKey const & { return executable . get_property_key ( * executable . length_identifier ) ; } , base_value , this_value , cache ) ) ) ;
2024-05-20 11:53:28 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > GetPrivateById : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
auto const & name = vm . get_identifier ( m_property ) ;
auto base_value = vm . get ( m_base ) ;
2023-09-27 10:10:00 +02:00
auto private_reference = make_private_reference ( vm , base_value , name ) ;
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , TRY ( private_reference . get_value ( vm ) ) ) ;
2023-09-27 10:10:00 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > HasPrivateId : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
auto base = vm . get ( m_base ) ;
2025-12-04 10:35:26 +01:00
if ( ! base . is_object ( ) ) [[unlikely]]
2023-09-27 10:10:00 +02:00
return vm . throw_completion < TypeError > ( ErrorType : : InOperatorWithObject ) ;
2026-04-13 11:54:04 +02:00
auto private_environment = vm . running_execution_context ( ) . private_environment ;
2023-09-27 10:10:00 +02:00
VERIFY ( private_environment ) ;
2026-04-13 11:54:04 +02:00
auto private_name = private_environment - > resolve_private_identifier ( vm . get_identifier ( m_property ) ) ;
vm . set ( dst ( ) , Value ( base . as_object ( ) . private_element_find ( private_name ) ! = nullptr ) ) ;
2023-09-27 10:10:00 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > PutBySpread : : execute_impl ( VM & vm ) const
2024-11-01 22:00:32 +01:00
{
2026-04-13 11:54:04 +02:00
auto value = vm . get ( m_src ) ;
auto base = vm . get ( m_base ) ;
2024-11-01 22:00:32 +01:00
// a. Let baseObj be ? ToObject(V.[[Base]]).
auto object = TRY ( base . to_object ( vm ) ) ;
TRY ( object - > copy_data_properties ( vm , value , { } ) ) ;
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > PutById : : execute_impl ( VM & vm ) const
2026-03-04 10:33:38 +01:00
{
2026-04-13 11:54:04 +02:00
auto value = vm . get ( m_src ) ;
auto base = vm . get ( m_base ) ;
auto const & base_identifier = vm . get_identifier ( m_base_identifier ) ;
auto const & property_key = vm . get_property_key ( m_property ) ;
2026-03-07 22:52:25 +01:00
auto & cache = * bit_cast < PropertyLookupCache * > ( m_cache ) ;
2026-03-04 10:33:38 +01:00
TRY ( put_by_property_key ( vm , base , base , value , base_identifier , property_key , m_kind , strict ( ) , & cache ) ) ;
return { } ;
}
2025-10-10 12:09:34 +02:00
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > PutByIdWithThis : : execute_impl ( VM & vm ) const
2026-03-04 10:33:38 +01:00
{
2026-04-13 11:54:04 +02:00
auto value = vm . get ( m_src ) ;
auto base = vm . get ( m_base ) ;
auto const & name = vm . get_property_key ( m_property ) ;
2026-03-07 22:52:25 +01:00
auto & cache = * bit_cast < PropertyLookupCache * > ( m_cache ) ;
2026-04-13 11:54:04 +02:00
TRY ( put_by_property_key ( vm , base , vm . get ( m_this_value ) , value , { } , name , m_kind , strict ( ) , & cache ) ) ;
2026-03-04 10:33:38 +01:00
return { } ;
}
2023-09-27 10:10:00 +02:00
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > PutPrivateById : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
auto value = vm . get ( m_src ) ;
auto object = TRY ( vm . get ( m_base ) . to_object ( vm ) ) ;
auto const & name = vm . get_identifier ( m_property ) ;
2023-09-27 10:10:00 +02:00
auto private_reference = make_private_reference ( vm , object , name ) ;
TRY ( private_reference . put_value ( vm , value ) ) ;
return { } ;
}
2026-04-13 11:54:04 +02:00
COLD ThrowCompletionOr < void > DeleteById : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
auto const & property_key = vm . get_property_key ( m_property ) ;
auto reference = Reference { vm . get ( m_base ) , property_key , { } , strict ( ) } ;
vm . set ( dst ( ) , Value ( TRY ( reference . delete_ ( vm ) ) ) ) ;
2023-09-27 10:10:00 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > ResolveThisBinding : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
auto & cached_this_value = vm . reg ( Register : : this_value ( ) ) ;
2025-04-04 23:16:34 +02:00
if ( ! cached_this_value . is_special_empty_value ( ) )
2024-05-31 20:41:29 +02:00
return { } ;
// OPTIMIZATION: Because the value of 'this' cannot be reassigned during a function execution, it's
// resolved once and then saved for subsequent use.
2026-04-13 11:54:04 +02:00
auto & running_execution_context = vm . running_execution_context ( ) ;
2024-05-31 20:41:29 +02:00
if ( auto function = running_execution_context . function ; function & & is < ECMAScriptFunctionObject > ( * function ) & & ! static_cast < ECMAScriptFunctionObject & > ( * function ) . allocates_function_environment ( ) ) {
2025-04-04 23:16:34 +02:00
cached_this_value = running_execution_context . this_value . value ( ) ;
2024-05-31 20:41:29 +02:00
} else {
cached_this_value = TRY ( vm . resolve_this_binding ( ) ) ;
2023-09-27 10:10:00 +02:00
}
return { } ;
}
// https://tc39.es/ecma262/#sec-makesuperpropertyreference
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > ResolveSuperBase : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
// 1. Let env be GetThisEnvironment().
2025-01-21 09:12:05 -05:00
auto & env = as < FunctionEnvironment > ( * get_this_environment ( vm ) ) ;
2023-09-27 10:10:00 +02:00
// 2. Assert: env.HasSuperBinding() is true.
VERIFY ( env . has_super_binding ( ) ) ;
// 3. Let baseValue be ? env.GetSuperBase().
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , TRY ( env . get_super_base ( ) ) ) ;
2023-09-27 10:10:00 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
void GetNewTarget : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , vm . get_new_target ( ) ) ;
2023-09-27 10:10:00 +02:00
}
2026-04-13 11:54:04 +02:00
void GetImportMeta : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , vm . get_import_meta ( ) ) ;
2023-09-27 10:10:00 +02:00
}
2026-04-13 11:54:04 +02:00
void GetLexicalEnvironment : : execute_impl ( VM & vm ) const
2026-02-09 03:34:42 +01:00
{
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , vm . running_execution_context ( ) . lexical_environment ) ;
2026-02-09 03:34:42 +01:00
}
2026-04-13 11:54:04 +02:00
void SetLexicalEnvironment : : execute_impl ( VM & vm ) const
2026-02-09 03:34:42 +01:00
{
2026-04-13 11:54:04 +02:00
vm . running_execution_context ( ) . lexical_environment = & as < Environment > ( vm . get ( m_environment ) . as_cell ( ) ) ;
2026-02-09 03:34:42 +01:00
}
2025-11-01 12:51:36 +01:00
template < CallType call_type >
2026-03-04 10:33:38 +01:00
NEVER_INLINE static ThrowCompletionOr < void > execute_call (
2026-04-13 11:54:04 +02:00
VM & vm ,
2025-11-01 12:51:36 +01:00
Value callee ,
Value this_value ,
ReadonlySpan < Operand > arguments ,
Operand dst ,
2025-12-04 10:02:11 +01:00
Optional < StringTableIndex > const expression_string ,
2025-11-01 12:51:36 +01:00
Strict strict )
{
2026-04-13 11:54:04 +02:00
TRY ( throw_if_needed_for_call ( vm , callee , call_type , expression_string ) ) ;
2025-11-01 12:51:36 +01:00
auto & function = callee . as_function ( ) ;
2026-01-18 23:17:10 +01:00
size_t registers_and_locals_count = 0 ;
2026-03-27 23:45:15 +01:00
ReadonlySpan < Value > constants ;
2025-11-01 12:51:36 +01:00
size_t argument_count = arguments . size ( ) ;
2026-03-27 23:45:15 +01:00
function . get_stack_frame_info ( registers_and_locals_count , constants , argument_count ) ;
2026-03-04 10:32:01 +01:00
auto & stack = vm . interpreter_stack ( ) ;
auto * stack_mark = stack . top ( ) ;
2026-03-27 23:45:15 +01:00
auto * callee_context = stack . allocate ( registers_and_locals_count , constants , max ( arguments . size ( ) , argument_count ) ) ;
2026-03-04 10:32:01 +01:00
if ( ! callee_context ) [[unlikely]]
return vm . throw_completion < InternalError > ( ErrorType : : CallStackSizeExceeded ) ;
ScopeGuard deallocate_guard = [ & stack , stack_mark ] { stack . deallocate ( stack_mark ) ; } ;
2025-11-01 12:51:36 +01:00
2026-03-08 12:30:23 +01:00
auto * callee_context_argument_values = callee_context - > arguments_data ( ) ;
auto const callee_context_argument_count = callee_context - > argument_count ;
2025-11-01 12:51:36 +01:00
auto const insn_argument_count = arguments . size ( ) ;
for ( size_t i = 0 ; i < insn_argument_count ; + + i )
2026-04-13 11:54:04 +02:00
callee_context_argument_values [ i ] = vm . get ( arguments . data ( ) [ i ] ) ;
2025-11-01 12:51:36 +01:00
for ( size_t i = insn_argument_count ; i < callee_context_argument_count ; + + i )
callee_context_argument_values [ i ] = js_undefined ( ) ;
callee_context - > passed_argument_count = insn_argument_count ;
Value retval ;
2026-04-13 11:54:04 +02:00
if ( call_type = = CallType : : DirectEval & & callee = = vm . realm ( ) . intrinsics ( ) . eval_function ( ) ) {
2026-03-08 12:30:23 +01:00
retval = TRY ( perform_eval ( vm , callee_context - > argument_count > 0 ? callee_context - > arguments_data ( ) [ 0 ] : js_undefined ( ) , strict = = Strict : : Yes ? CallerMode : : Strict : CallerMode : : NonStrict , EvalMode : : Direct ) ) ;
2025-11-01 12:51:36 +01:00
} else if ( call_type = = CallType : : Construct ) {
retval = TRY ( function . internal_construct ( * callee_context , function ) ) ;
} else {
retval = TRY ( function . internal_call ( * callee_context , this_value ) ) ;
}
2026-04-13 11:54:04 +02:00
vm . set ( dst , retval ) ;
2024-10-31 22:47:30 +01:00
return { } ;
2025-11-01 12:51:36 +01:00
}
2024-10-31 22:47:30 +01:00
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > Call : : execute_impl ( VM & vm ) const
2024-10-31 22:47:30 +01:00
{
2026-04-13 11:54:04 +02:00
return execute_call < CallType : : Call > ( vm , vm . get ( m_callee ) , vm . get ( m_this_value ) , { m_arguments , m_argument_count } , m_dst , m_expression_string , strict ( ) ) ;
2025-08-30 10:36:32 +02:00
}
2024-10-31 22:47:30 +01:00
2026-04-13 11:54:04 +02:00
NEVER_INLINE ThrowCompletionOr < void > CallConstruct : : execute_impl ( VM & vm ) const
2025-08-30 10:36:32 +02:00
{
2026-04-13 11:54:04 +02:00
return execute_call < CallType : : Construct > ( vm , vm . get ( m_callee ) , js_undefined ( ) , { m_arguments , m_argument_count } , m_dst , m_expression_string , strict ( ) ) ;
2024-10-31 22:47:30 +01:00
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > CallDirectEval : : execute_impl ( VM & vm ) const
2024-10-31 22:47:30 +01:00
{
2026-04-13 11:54:04 +02:00
return execute_call < CallType : : DirectEval > ( vm , vm . get ( m_callee ) , vm . get ( m_this_value ) , { m_arguments , m_argument_count } , m_dst , m_expression_string , strict ( ) ) ;
2024-10-31 22:47:30 +01:00
}
2026-04-12 13:25:26 +02:00
template < Builtin builtin , typename Callback >
ALWAYS_INLINE static ThrowCompletionOr < void > execute_specialized_builtin_call (
2026-04-13 11:54:04 +02:00
VM & vm ,
2026-04-12 13:25:26 +02:00
Operand callee_operand ,
Operand this_value_operand ,
ReadonlySpan < Operand > arguments ,
Operand dst ,
Optional < StringTableIndex > const expression_string ,
Strict strict ,
Callback callback )
2024-10-31 22:47:30 +01:00
{
2026-04-13 11:54:04 +02:00
auto callee = vm . get ( callee_operand ) ;
2026-04-12 13:25:26 +02:00
if ( callee . is_function ( ) & & callee . as_function ( ) . builtin ( ) = = builtin ) [[likely]] {
2026-04-13 11:54:04 +02:00
vm . set ( dst , TRY ( callback ( arguments ) ) ) ;
2023-11-30 19:49:29 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
return execute_call < CallType : : Call > ( vm , callee , vm . get ( this_value_operand ) , arguments , dst , expression_string , strict ) ;
2026-04-12 13:25:26 +02:00
}
2026-04-13 11:54:04 +02:00
# define JS_DEFINE_UNARY_BUILTIN_CALL_EXECUTE_IMPL(name, implementation) \
ThrowCompletionOr < void > CallBuiltin # # name : : execute_impl ( VM & vm ) const \
{ \
Operand arguments [ ] { m_argument } ; \
return execute_specialized_builtin_call < Builtin : : name > ( vm , m_callee , m_this_value , arguments , m_dst , m_expression_string , strict ( ) , [ & ] ( ReadonlySpan < Operand > arguments ) - > ThrowCompletionOr < Value > { \
return implementation ( vm , vm . get ( arguments [ 0 ] ) ) ; \
} ) ; \
2026-04-12 13:25:26 +02:00
}
2026-04-13 11:54:04 +02:00
# define JS_DEFINE_BINARY_BUILTIN_CALL_EXECUTE_IMPL(name, implementation) \
ThrowCompletionOr < void > CallBuiltin # # name : : execute_impl ( VM & vm ) const \
{ \
Operand arguments [ ] { m_argument0 , m_argument1 } ; \
return execute_specialized_builtin_call < Builtin : : name > ( vm , m_callee , m_this_value , arguments , m_dst , m_expression_string , strict ( ) , [ & ] ( ReadonlySpan < Operand > arguments ) - > ThrowCompletionOr < Value > { \
return implementation ( vm , vm . get ( arguments [ 0 ] ) , vm . get ( arguments [ 1 ] ) ) ; \
} ) ; \
2026-04-12 13:25:26 +02:00
}
2026-04-13 11:54:04 +02:00
# define JS_DEFINE_NULLARY_BUILTIN_CALL_EXECUTE_IMPL(name, implementation) \
ThrowCompletionOr < void > CallBuiltin # # name : : execute_impl ( VM & vm ) const \
{ \
return execute_specialized_builtin_call < Builtin : : name > ( vm , m_callee , m_this_value , { } , m_dst , m_expression_string , strict ( ) , [ & ] ( ReadonlySpan < Operand > ) - > ThrowCompletionOr < Value > { \
return implementation ( ) ; \
} ) ; \
2026-04-12 13:25:26 +02:00
}
2026-04-13 11:54:04 +02:00
# define JS_DEFINE_GENERIC_BUILTIN_CALL_EXECUTE_IMPL(name, ...) \
ThrowCompletionOr < void > CallBuiltin # # name : : execute_impl ( VM & vm ) const \
{ \
return execute_call < CallType : : Call > ( vm , vm . get ( m_callee ) , vm . get ( m_this_value ) , { } , m_dst , m_expression_string , strict ( ) ) ; \
2026-04-12 13:25:26 +02:00
}
2026-04-13 11:54:04 +02:00
# define JS_DEFINE_UNARY_GENERIC_BUILTIN_CALL_EXECUTE_IMPL(name, ...) \
ThrowCompletionOr < void > CallBuiltin # # name : : execute_impl ( VM & vm ) const \
{ \
Operand arguments [ ] { m_argument } ; \
return execute_call < CallType : : Call > ( vm , vm . get ( m_callee ) , vm . get ( m_this_value ) , arguments , m_dst , m_expression_string , strict ( ) ) ; \
2026-04-12 13:25:26 +02:00
}
2026-04-13 11:54:04 +02:00
# define JS_DEFINE_BINARY_GENERIC_BUILTIN_CALL_EXECUTE_IMPL(name, ...) \
ThrowCompletionOr < void > CallBuiltin # # name : : execute_impl ( VM & vm ) const \
{ \
Operand arguments [ ] { m_argument0 , m_argument1 } ; \
return execute_call < CallType : : Call > ( vm , vm . get ( m_callee ) , vm . get ( m_this_value ) , arguments , m_dst , m_expression_string , strict ( ) ) ; \
2026-04-12 13:25:26 +02:00
}
JS_DEFINE_UNARY_BUILTIN_CALL_EXECUTE_IMPL ( MathAbs , MathObject : : abs_impl )
JS_DEFINE_UNARY_BUILTIN_CALL_EXECUTE_IMPL ( MathLog , MathObject : : log_impl )
JS_DEFINE_BINARY_BUILTIN_CALL_EXECUTE_IMPL ( MathPow , MathObject : : pow_impl )
JS_DEFINE_UNARY_BUILTIN_CALL_EXECUTE_IMPL ( MathExp , MathObject : : exp_impl )
JS_DEFINE_UNARY_BUILTIN_CALL_EXECUTE_IMPL ( MathCeil , MathObject : : ceil_impl )
JS_DEFINE_UNARY_BUILTIN_CALL_EXECUTE_IMPL ( MathFloor , MathObject : : floor_impl )
JS_DEFINE_BINARY_BUILTIN_CALL_EXECUTE_IMPL ( MathImul , MathObject : : imul_impl )
JS_DEFINE_NULLARY_BUILTIN_CALL_EXECUTE_IMPL ( MathRandom , MathObject : : random_impl )
JS_DEFINE_UNARY_BUILTIN_CALL_EXECUTE_IMPL ( MathRound , MathObject : : round_impl )
JS_DEFINE_UNARY_BUILTIN_CALL_EXECUTE_IMPL ( MathSqrt , MathObject : : sqrt_impl )
JS_DEFINE_UNARY_BUILTIN_CALL_EXECUTE_IMPL ( MathSin , MathObject : : sin_impl )
JS_DEFINE_UNARY_BUILTIN_CALL_EXECUTE_IMPL ( MathCos , MathObject : : cos_impl )
JS_DEFINE_UNARY_BUILTIN_CALL_EXECUTE_IMPL ( MathTan , MathObject : : tan_impl )
JS_DEFINE_UNARY_GENERIC_BUILTIN_CALL_EXECUTE_IMPL ( RegExpPrototypeExec )
JS_DEFINE_BINARY_GENERIC_BUILTIN_CALL_EXECUTE_IMPL ( RegExpPrototypeReplace )
JS_DEFINE_BINARY_GENERIC_BUILTIN_CALL_EXECUTE_IMPL ( RegExpPrototypeSplit )
JS_DEFINE_UNARY_GENERIC_BUILTIN_CALL_EXECUTE_IMPL ( OrdinaryHasInstance )
JS_DEFINE_GENERIC_BUILTIN_CALL_EXECUTE_IMPL ( ArrayIteratorPrototypeNext )
JS_DEFINE_GENERIC_BUILTIN_CALL_EXECUTE_IMPL ( MapIteratorPrototypeNext )
JS_DEFINE_GENERIC_BUILTIN_CALL_EXECUTE_IMPL ( SetIteratorPrototypeNext )
JS_DEFINE_GENERIC_BUILTIN_CALL_EXECUTE_IMPL ( StringIteratorPrototypeNext )
2026-04-12 16:12:40 +02:00
JS_DEFINE_UNARY_BUILTIN_CALL_EXECUTE_IMPL ( StringFromCharCode , StringConstructor : : from_char_code_impl )
2026-04-12 14:06:57 +02:00
JS_DEFINE_UNARY_GENERIC_BUILTIN_CALL_EXECUTE_IMPL ( StringPrototypeCharCodeAt )
2026-04-12 14:11:55 +02:00
JS_DEFINE_UNARY_GENERIC_BUILTIN_CALL_EXECUTE_IMPL ( StringPrototypeCharAt )
2026-04-12 13:25:26 +02:00
# undef JS_DEFINE_BINARY_GENERIC_BUILTIN_CALL_EXECUTE_IMPL
# undef JS_DEFINE_UNARY_GENERIC_BUILTIN_CALL_EXECUTE_IMPL
# undef JS_DEFINE_GENERIC_BUILTIN_CALL_EXECUTE_IMPL
# undef JS_DEFINE_NULLARY_BUILTIN_CALL_EXECUTE_IMPL
# undef JS_DEFINE_BINARY_BUILTIN_CALL_EXECUTE_IMPL
# undef JS_DEFINE_UNARY_BUILTIN_CALL_EXECUTE_IMPL
2023-09-27 10:10:00 +02:00
2025-08-30 11:00:54 +02:00
template < CallType call_type >
2026-03-04 10:33:38 +01:00
NEVER_INLINE static ThrowCompletionOr < void > call_with_argument_array (
2026-04-13 11:54:04 +02:00
VM & vm ,
2025-08-30 11:00:54 +02:00
Value callee ,
Value this_value ,
Value arguments ,
Operand dst ,
2025-12-04 10:02:11 +01:00
Optional < StringTableIndex > const expression_string ,
2025-10-28 20:25:12 +01:00
Strict strict )
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
TRY ( throw_if_needed_for_call ( vm , callee , call_type , expression_string ) ) ;
2025-08-30 11:00:54 +02:00
auto & function = callee . as_function ( ) ;
2026-03-29 09:11:33 +01:00
auto & argument_array = arguments . as_array_exotic_object ( ) ;
2026-03-17 00:54:54 -05:00
auto argument_array_length = argument_array . indexed_array_like_size ( ) ;
2025-08-30 11:00:54 +02:00
size_t argument_count = argument_array_length ;
2026-01-18 23:17:10 +01:00
size_t registers_and_locals_count = 0 ;
2026-03-27 23:45:15 +01:00
ReadonlySpan < Value > constants ;
function . get_stack_frame_info ( registers_and_locals_count , constants , argument_count ) ;
2026-03-04 10:32:01 +01:00
auto & stack = vm . interpreter_stack ( ) ;
auto * stack_mark = stack . top ( ) ;
2026-03-27 23:45:15 +01:00
auto * callee_context = stack . allocate ( registers_and_locals_count , constants , max ( argument_array_length , argument_count ) ) ;
2026-03-04 10:32:01 +01:00
if ( ! callee_context ) [[unlikely]]
return vm . throw_completion < InternalError > ( ErrorType : : CallStackSizeExceeded ) ;
ScopeGuard deallocate_guard = [ & stack , stack_mark ] { stack . deallocate ( stack_mark ) ; } ;
2025-08-30 11:00:54 +02:00
2026-03-08 12:30:23 +01:00
auto * callee_context_argument_values = callee_context - > arguments_data ( ) ;
auto const callee_context_argument_count = callee_context - > argument_count ;
2025-08-30 11:00:54 +02:00
auto const insn_argument_count = argument_array_length ;
for ( size_t i = 0 ; i < insn_argument_count ; + + i ) {
2026-03-17 00:54:54 -05:00
if ( auto maybe_value = argument_array . indexed_get ( i ) ; maybe_value . has_value ( ) )
2025-08-30 11:00:54 +02:00
callee_context_argument_values [ i ] = maybe_value . release_value ( ) . value ;
else
callee_context_argument_values [ i ] = js_undefined ( ) ;
}
for ( size_t i = insn_argument_count ; i < callee_context_argument_count ; + + i )
callee_context_argument_values [ i ] = js_undefined ( ) ;
callee_context - > passed_argument_count = insn_argument_count ;
Value retval ;
2026-04-13 11:54:04 +02:00
if ( call_type = = CallType : : DirectEval & & callee = = vm . realm ( ) . intrinsics ( ) . eval_function ( ) ) {
2026-03-08 12:30:23 +01:00
retval = TRY ( perform_eval ( vm , callee_context - > argument_count > 0 ? callee_context - > arguments_data ( ) [ 0 ] : js_undefined ( ) , strict = = Strict : : Yes ? CallerMode : : Strict : CallerMode : : NonStrict , EvalMode : : Direct ) ) ;
2025-08-30 11:00:54 +02:00
} else if ( call_type = = CallType : : Construct ) {
retval = TRY ( function . internal_construct ( * callee_context , function ) ) ;
} else {
retval = TRY ( function . internal_call ( * callee_context , this_value ) ) ;
}
2026-04-13 11:54:04 +02:00
vm . set ( dst , retval ) ;
2023-10-20 13:20:28 +02:00
return { } ;
2023-09-27 10:10:00 +02:00
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > CallWithArgumentArray : : execute_impl ( VM & vm ) const
2025-08-30 11:00:54 +02:00
{
2026-04-13 11:54:04 +02:00
return call_with_argument_array < CallType : : Call > ( vm , vm . get ( callee ( ) ) , vm . get ( this_value ( ) ) , vm . get ( arguments ( ) ) , dst ( ) , expression_string ( ) , strict ( ) ) ;
2025-08-30 11:00:54 +02:00
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > CallDirectEvalWithArgumentArray : : execute_impl ( VM & vm ) const
2025-08-30 11:00:54 +02:00
{
2026-04-13 11:54:04 +02:00
return call_with_argument_array < CallType : : DirectEval > ( vm , vm . get ( callee ( ) ) , vm . get ( this_value ( ) ) , vm . get ( arguments ( ) ) , dst ( ) , expression_string ( ) , strict ( ) ) ;
2025-08-30 11:00:54 +02:00
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > CallConstructWithArgumentArray : : execute_impl ( VM & vm ) const
2025-08-30 11:00:54 +02:00
{
2026-04-13 11:54:04 +02:00
return call_with_argument_array < CallType : : Construct > ( vm , vm . get ( callee ( ) ) , js_undefined ( ) , vm . get ( arguments ( ) ) , dst ( ) , expression_string ( ) , strict ( ) ) ;
2025-08-30 11:00:54 +02:00
}
2023-09-27 10:10:00 +02:00
// 13.3.7.1 Runtime Semantics: Evaluation, https://tc39.es/ecma262/#sec-super-keyword-runtime-semantics-evaluation
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > SuperCallWithArgumentArray : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2025-08-30 16:26:25 +02:00
// 1. Let newTarget be GetNewTarget().
auto new_target = vm . get_new_target ( ) ;
// 2. Assert: Type(newTarget) is Object.
VERIFY ( new_target . is_object ( ) ) ;
// 3. Let func be GetSuperConstructor().
auto * func = get_super_constructor ( vm ) ;
// NON-STANDARD: We're doing this step earlier to streamline control flow.
// 5. If IsConstructor(func) is false, throw a TypeError exception.
2025-12-04 10:35:26 +01:00
if ( ! Value ( func ) . is_constructor ( ) ) [[unlikely]]
2025-08-30 16:26:25 +02:00
return vm . throw_completion < TypeError > ( ErrorType : : NotAConstructor , " Super constructor " ) ;
auto & function = static_cast < FunctionObject & > ( * func ) ;
// 4. Let argList be ? ArgumentListEvaluation of Arguments.
2026-04-13 11:54:04 +02:00
auto & argument_array = vm . get ( m_arguments ) . as_array_exotic_object ( ) ;
2025-08-30 16:26:25 +02:00
size_t argument_array_length = 0 ;
if ( m_is_synthetic ) {
argument_array_length = MUST ( length_of_array_like ( vm , argument_array ) ) ;
} else {
2026-03-17 00:54:54 -05:00
argument_array_length = argument_array . indexed_array_like_size ( ) ;
2025-08-30 16:26:25 +02:00
}
size_t argument_count = argument_array_length ;
2026-01-18 23:17:10 +01:00
size_t registers_and_locals_count = 0 ;
2026-03-27 23:45:15 +01:00
ReadonlySpan < Value > constants ;
function . get_stack_frame_info ( registers_and_locals_count , constants , argument_count ) ;
2026-03-04 10:32:01 +01:00
auto & stack = vm . interpreter_stack ( ) ;
auto * stack_mark = stack . top ( ) ;
2026-03-27 23:45:15 +01:00
auto * callee_context = stack . allocate ( registers_and_locals_count , constants , max ( argument_array_length , argument_count ) ) ;
2026-03-04 10:32:01 +01:00
if ( ! callee_context ) [[unlikely]]
return vm . throw_completion < InternalError > ( ErrorType : : CallStackSizeExceeded ) ;
ScopeGuard deallocate_guard = [ & stack , stack_mark ] { stack . deallocate ( stack_mark ) ; } ;
2025-08-30 16:26:25 +02:00
2026-03-08 12:30:23 +01:00
auto * callee_context_argument_values = callee_context - > arguments_data ( ) ;
auto const callee_context_argument_count = callee_context - > argument_count ;
2025-08-30 16:26:25 +02:00
auto const insn_argument_count = argument_array_length ;
if ( m_is_synthetic ) {
for ( size_t i = 0 ; i < insn_argument_count ; + + i )
callee_context_argument_values [ i ] = argument_array . get_without_side_effects ( PropertyKey { i } ) ;
} else {
for ( size_t i = 0 ; i < insn_argument_count ; + + i ) {
2026-03-17 00:54:54 -05:00
if ( auto maybe_value = argument_array . indexed_get ( i ) ; maybe_value . has_value ( ) )
2025-08-30 16:26:25 +02:00
callee_context_argument_values [ i ] = maybe_value . release_value ( ) . value ;
else
callee_context_argument_values [ i ] = js_undefined ( ) ;
}
}
for ( size_t i = insn_argument_count ; i < callee_context_argument_count ; + + i )
callee_context_argument_values [ i ] = js_undefined ( ) ;
callee_context - > passed_argument_count = insn_argument_count ;
// 6. Let result be ? Construct(func, argList, newTarget).
auto result = TRY ( function . internal_construct ( * callee_context , new_target . as_function ( ) ) ) ;
// 7. Let thisER be GetThisEnvironment().
auto & this_environment = as < FunctionEnvironment > ( * get_this_environment ( vm ) ) ;
// 8. Perform ? thisER.BindThisValue(result).
TRY ( this_environment . bind_this_value ( vm , result ) ) ;
// 9. Let F be thisER.[[FunctionObject]].
2025-11-06 19:00:36 +00:00
auto & f = as < ECMAScriptFunctionObject > ( this_environment . function_object ( ) ) ;
2025-08-30 16:26:25 +02:00
// 10. Assert: F is an ECMAScript function object.
// NOTE: This is implied by the strong C++ type.
// 11. Perform ? InitializeInstanceElements(result, F).
TRY ( result - > initialize_instance_elements ( f ) ) ;
// 12. Return result.
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , result ) ;
2023-09-27 10:10:00 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
void NewFunction : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , new_function ( vm , m_shared_function_data_index , m_home_object ) ) ;
2023-09-27 10:10:00 +02:00
}
2026-04-13 11:54:04 +02:00
void Return : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
vm . do_return ( vm . get ( m_value ) ) ;
2023-09-27 10:10:00 +02:00
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > Increment : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
auto old_value = vm . get ( dst ( ) ) ;
2024-01-27 20:51:11 +01:00
// OPTIMIZATION: Fast path for Int32 values.
2025-12-04 21:26:58 +01:00
if ( old_value . is_int32 ( ) ) [[likely]] {
2024-01-27 20:51:11 +01:00
auto integer_value = old_value . as_i32 ( ) ;
if ( integer_value ! = NumericLimits < i32 > : : max ( ) ) [[likely]] {
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , Value { integer_value + 1 } ) ;
2024-01-27 20:51:11 +01:00
return { } ;
}
}
old_value = TRY ( old_value . to_numeric ( vm ) ) ;
2023-09-27 10:10:00 +02:00
if ( old_value . is_number ( ) )
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , Value ( old_value . as_double ( ) + 1 ) ) ;
2023-09-27 10:10:00 +02:00
else
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , BigInt : : create ( vm , old_value . as_bigint ( ) . big_integer ( ) . plus ( Crypto : : SignedBigInteger { 1 } ) ) ) ;
2023-09-27 10:10:00 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > PostfixIncrement : : execute_impl ( VM & vm ) const
2024-02-20 11:45:01 +01:00
{
2026-04-13 11:54:04 +02:00
auto old_value = vm . get ( m_src ) ;
2024-02-20 11:45:01 +01:00
// OPTIMIZATION: Fast path for Int32 values.
2025-12-04 21:26:58 +01:00
if ( old_value . is_int32 ( ) ) [[likely]] {
2024-02-20 11:45:01 +01:00
auto integer_value = old_value . as_i32 ( ) ;
if ( integer_value ! = NumericLimits < i32 > : : max ( ) ) [[likely]] {
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , old_value ) ;
vm . set ( m_src , Value { integer_value + 1 } ) ;
2024-02-20 11:45:01 +01:00
return { } ;
}
}
old_value = TRY ( old_value . to_numeric ( vm ) ) ;
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , old_value ) ;
2024-02-20 11:45:01 +01:00
if ( old_value . is_number ( ) )
2026-04-13 11:54:04 +02:00
vm . set ( m_src , Value ( old_value . as_double ( ) + 1 ) ) ;
2024-02-20 11:45:01 +01:00
else
2026-04-13 11:54:04 +02:00
vm . set ( m_src , BigInt : : create ( vm , old_value . as_bigint ( ) . big_integer ( ) . plus ( Crypto : : SignedBigInteger { 1 } ) ) ) ;
2024-02-20 11:45:01 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > Decrement : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
auto old_value = vm . get ( dst ( ) ) ;
2024-02-04 08:00:54 +01:00
old_value = TRY ( old_value . to_numeric ( vm ) ) ;
2023-09-27 10:10:00 +02:00
if ( old_value . is_number ( ) )
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , Value ( old_value . as_double ( ) - 1 ) ) ;
2023-09-27 10:10:00 +02:00
else
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , BigInt : : create ( vm , old_value . as_bigint ( ) . big_integer ( ) . minus ( Crypto : : SignedBigInteger { 1 } ) ) ) ;
2023-09-27 10:10:00 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > PostfixDecrement : : execute_impl ( VM & vm ) const
2024-02-20 11:45:01 +01:00
{
2026-04-13 11:54:04 +02:00
auto old_value = vm . get ( m_src ) ;
2024-02-20 11:45:01 +01:00
old_value = TRY ( old_value . to_numeric ( vm ) ) ;
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , old_value ) ;
2024-02-20 11:45:01 +01:00
if ( old_value . is_number ( ) )
2026-04-13 11:54:04 +02:00
vm . set ( m_src , Value ( old_value . as_double ( ) - 1 ) ) ;
2024-02-20 11:45:01 +01:00
else
2026-04-13 11:54:04 +02:00
vm . set ( m_src , BigInt : : create ( vm , old_value . as_bigint ( ) . big_integer ( ) . minus ( Crypto : : SignedBigInteger { 1 } ) ) ) ;
2024-02-20 11:45:01 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
COLD ThrowCompletionOr < void > Throw : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
return throw_completion ( vm . get ( src ( ) ) ) ;
2023-09-27 10:10:00 +02:00
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > ThrowIfNotObject : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
auto src = vm . get ( m_src ) ;
2025-12-04 10:15:59 +01:00
if ( ! src . is_object ( ) ) [[unlikely]]
2025-12-05 08:01:44 +01:00
return vm . throw_completion < TypeError > ( ErrorType : : NotAnObject , src ) ;
2023-09-27 10:10:00 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > ThrowIfNullish : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
auto value = vm . get ( m_src ) ;
2025-12-04 10:15:59 +01:00
if ( value . is_nullish ( ) ) [[unlikely]]
2025-12-05 08:01:44 +01:00
return vm . throw_completion < TypeError > ( ErrorType : : NotObjectCoercible , value ) ;
2023-09-27 10:10:00 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > ThrowIfTDZ : : execute_impl ( VM & vm ) const
2024-02-04 08:00:54 +01:00
{
2026-04-13 11:54:04 +02:00
auto value = vm . get ( m_src ) ;
2025-12-04 10:15:59 +01:00
if ( value . is_special_empty_value ( ) ) [[unlikely]]
2025-12-05 08:01:44 +01:00
return vm . throw_completion < ReferenceError > ( ErrorType : : BindingNotInitialized , value ) ;
2024-02-04 08:00:54 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > ThrowConstAssignment : : execute_impl ( VM & vm ) const
2026-02-09 20:27:31 +01:00
{
return vm . throw_completion < TypeError > ( ErrorType : : InvalidAssignToConst ) ;
}
2026-04-13 11:54:04 +02:00
void LeavePrivateEnvironment : : execute_impl ( VM & vm ) const
2024-05-11 22:54:41 +00:00
{
2026-04-13 11:54:04 +02:00
auto & running_execution_context = vm . running_execution_context ( ) ;
2024-05-11 22:54:41 +00:00
running_execution_context . private_environment = running_execution_context . private_environment - > outer_environment ( ) ;
}
2026-04-13 11:54:04 +02:00
void Yield : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
auto yielded_value = vm . get ( m_value ) . is_special_empty_value ( ) ? js_undefined ( ) : vm . get ( m_value ) ;
vm . do_return (
vm . do_yield ( yielded_value , m_continuation_label ) ) ;
2024-05-18 17:25:43 +02:00
}
2024-02-04 08:00:54 +01:00
2026-04-13 11:54:04 +02:00
void Await : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
auto yielded_value = vm . get ( m_argument ) . is_special_empty_value ( ) ? js_undefined ( ) : vm . get ( m_argument ) ;
auto & context = vm . running_execution_context ( ) ;
2026-03-16 00:02:11 +01:00
context . yield_continuation = m_continuation_label . address ( ) ;
context . yield_is_await = true ;
2026-04-13 11:54:04 +02:00
vm . do_return ( yielded_value ) ;
2023-09-27 10:10:00 +02:00
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > GetByValue : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , TRY ( get_by_value ( vm , m_base_identifier , vm . get ( m_base ) , vm . get ( m_property ) , vm . current_executable ( ) ) ) ) ;
2023-09-27 10:10:00 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > GetByValueWithThis : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
auto property_key_value = vm . get ( m_property ) ;
auto object = TRY ( vm . get ( m_base ) . to_object ( vm ) ) ;
2023-09-27 10:10:00 +02:00
auto property_key = TRY ( property_key_value . to_property_key ( vm ) ) ;
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , TRY ( object - > internal_get ( property_key , vm . get ( m_this_value ) ) ) ) ;
2023-09-27 10:10:00 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > PutByValue : : execute_impl ( VM & vm ) const
2026-03-04 10:33:38 +01:00
{
2026-04-13 11:54:04 +02:00
auto value = vm . get ( m_src ) ;
auto base = vm . get ( m_base ) ;
auto const & base_identifier = vm . get_identifier ( m_base_identifier ) ;
auto property = vm . get ( m_property ) ;
2026-03-04 10:33:38 +01:00
TRY ( put_by_value ( vm , base , base_identifier , property , value , m_kind , strict ( ) ) ) ;
return { } ;
}
2025-10-10 12:09:34 +02:00
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > PutByValueWithThis : : execute_impl ( VM & vm ) const
2026-03-04 10:33:38 +01:00
{
2026-04-13 11:54:04 +02:00
auto value = vm . get ( m_src ) ;
auto base = vm . get ( m_base ) ;
auto this_value = vm . get ( m_this_value ) ;
auto property_key = TRY ( vm . get ( m_property ) . to_property_key ( vm ) ) ;
2026-03-04 10:33:38 +01:00
TRY ( put_by_property_key ( vm , base , this_value , value , { } , property_key , m_kind , strict ( ) ) ) ;
return { } ;
}
2023-09-27 10:10:00 +02:00
2026-04-13 11:54:04 +02:00
COLD ThrowCompletionOr < void > DeleteByValue : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
auto property_key = TRY ( vm . get ( m_property ) . to_property_key ( vm ) ) ;
auto reference = Reference { vm . get ( m_base ) , property_key , { } , strict ( ) } ;
vm . set ( m_dst , Value ( TRY ( reference . delete_ ( vm ) ) ) ) ;
2023-09-27 10:10:00 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > GetIterator : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
auto iterator_record = TRY ( get_iterator_impl ( vm , vm . get ( iterable ( ) ) , m_hint ) ) ;
vm . set ( m_dst_iterator_object , iterator_record . iterator ) ;
vm . set ( m_dst_iterator_next , iterator_record . next_method ) ;
vm . set ( m_dst_iterator_done , Value ( iterator_record . done ) ) ;
2023-12-07 10:44:41 +01:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > GetMethod : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
auto const & property_key = vm . get_property_key ( m_property ) ;
auto method = TRY ( vm . get ( m_object ) . get_method ( vm , property_key ) ) ;
vm . set ( dst ( ) , method ? : js_undefined ( ) ) ;
2023-09-27 10:10:00 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
NEVER_INLINE ThrowCompletionOr < void > GetObjectPropertyIterator : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
LibJS: Cache stable for-in iteration at bytecode sites
Cache the flattened enumerable key snapshot for each `for..in` site and
reuse a `PropertyNameIterator` when the receiver shape, dictionary
generation, indexed storage kind and length, prototype chain
validity, and magical-length state still match.
Handle packed indexed receivers as well as plain named-property
objects. Teach `ObjectPropertyIteratorNext` in `asmint.asm` to return
cached property values directly and to fall back to the slow iterator
logic when any guard fails.
Treat arrays' hidden non-enumerable `length` property as a visited
name for for-in shadowing, and include the receiver's magical-length
state in the cache key so arrays and plain objects do not share
snapshots.
Add `test-js` and `test-js-bytecode` coverage for mixed numeric and
named keys, packed receiver transitions, re-entry, iterator reuse, GC
retention, array length shadowing, and same-site cache reuse.
2026-04-10 00:56:49 +02:00
auto * cache = bit_cast < ObjectPropertyIteratorCache * > ( m_cache ) ;
2026-04-13 11:54:04 +02:00
vm . set ( m_dst_iterator , TRY ( get_object_property_iterator ( vm , vm . get ( m_object ) , cache ) ) ) ;
2023-09-27 10:10:00 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > IteratorClose : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
auto & iterator_object = vm . get ( m_iterator_object ) . as_object ( ) ;
auto iterator_next_method = vm . get ( m_iterator_next ) ;
auto iterator_done_property = vm . get ( m_iterator_done ) . as_bool ( ) ;
2025-10-27 19:46:54 +01:00
IteratorRecordImpl iterator_record { . done = iterator_done_property , . iterator = iterator_object , . next_method = iterator_next_method } ;
2023-09-27 10:10:00 +02:00
2026-02-11 23:49:20 +01:00
// FIXME: Return the value of the resulting completion.
2026-04-13 11:54:04 +02:00
TRY ( iterator_close ( vm , iterator_record , Completion { m_completion_type , vm . get ( m_completion_value ) } ) ) ;
2023-09-27 10:10:00 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > IteratorNext : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2026-04-13 11:54:04 +02:00
auto & iterator_object = vm . get ( m_iterator_object ) . as_object ( ) ;
auto iterator_next_method = vm . get ( m_iterator_next ) ;
auto iterator_done_property = vm . get ( m_iterator_done ) . as_bool ( ) ;
2025-10-27 19:46:54 +01:00
IteratorRecordImpl iterator_record { . done = iterator_done_property , . iterator = iterator_object , . next_method = iterator_next_method } ;
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , TRY ( JS : : iterator_next ( vm , iterator_record ) ) ) ;
2025-10-27 19:46:54 +01:00
if ( iterator_done_property )
2026-04-13 11:54:04 +02:00
vm . set ( m_iterator_done , Value ( true ) ) ;
2023-09-27 10:10:00 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > IteratorNextUnpack : : execute_impl ( VM & vm ) const
2025-04-30 16:31:26 +03:00
{
2026-04-13 11:54:04 +02:00
auto & iterator_object = vm . get ( m_iterator_object ) . as_object ( ) ;
auto iterator_next_method = vm . get ( m_iterator_next ) ;
auto iterator_done_property = vm . get ( m_iterator_done ) . as_bool ( ) ;
2025-10-27 19:46:54 +01:00
IteratorRecordImpl iterator_record { . done = iterator_done_property , . iterator = iterator_object , . next_method = iterator_next_method } ;
2025-05-12 17:32:54 +03:00
auto iteration_result_or_done = TRY ( iterator_step ( vm , iterator_record ) ) ;
2025-10-27 19:46:54 +01:00
if ( iterator_done_property )
2026-04-13 11:54:04 +02:00
vm . set ( m_iterator_done , Value ( true ) ) ;
2025-05-12 17:32:54 +03:00
if ( iteration_result_or_done . has < IterationDone > ( ) ) {
2026-04-13 11:54:04 +02:00
vm . set ( m_dst_done , Value ( true ) ) ;
2025-05-12 17:32:54 +03:00
return { } ;
2025-04-30 16:31:26 +03:00
}
2025-05-12 17:32:54 +03:00
auto & iteration_result = iteration_result_or_done . get < IterationResult > ( ) ;
2026-04-13 11:54:04 +02:00
vm . set ( m_dst_done , TRY ( iteration_result . done ) ) ;
vm . set ( m_dst_value , TRY ( iteration_result . value ) ) ;
2025-04-30 16:31:26 +03:00
return { } ;
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > ObjectPropertyIteratorNext : : execute_impl ( VM & vm ) const
LibJS: Cache stable for-in iteration at bytecode sites
Cache the flattened enumerable key snapshot for each `for..in` site and
reuse a `PropertyNameIterator` when the receiver shape, dictionary
generation, indexed storage kind and length, prototype chain
validity, and magical-length state still match.
Handle packed indexed receivers as well as plain named-property
objects. Teach `ObjectPropertyIteratorNext` in `asmint.asm` to return
cached property values directly and to fall back to the slow iterator
logic when any guard fails.
Treat arrays' hidden non-enumerable `length` property as a visited
name for for-in shadowing, and include the receiver's magical-length
state in the cache key so arrays and plain objects do not share
snapshots.
Add `test-js` and `test-js-bytecode` coverage for mixed numeric and
named keys, packed receiver transitions, re-entry, iterator reuse, GC
retention, array length shadowing, and same-site cache reuse.
2026-04-10 00:56:49 +02:00
{
2026-04-13 11:54:04 +02:00
auto & iterator = static_cast < PropertyNameIterator & > ( vm . get ( m_iterator_object ) . as_object ( ) ) ;
LibJS: Cache stable for-in iteration at bytecode sites
Cache the flattened enumerable key snapshot for each `for..in` site and
reuse a `PropertyNameIterator` when the receiver shape, dictionary
generation, indexed storage kind and length, prototype chain
validity, and magical-length state still match.
Handle packed indexed receivers as well as plain named-property
objects. Teach `ObjectPropertyIteratorNext` in `asmint.asm` to return
cached property values directly and to fall back to the slow iterator
logic when any guard fails.
Treat arrays' hidden non-enumerable `length` property as a visited
name for for-in shadowing, and include the receiver's magical-length
state in the cache key so arrays and plain objects do not share
snapshots.
Add `test-js` and `test-js-bytecode` coverage for mixed numeric and
named keys, packed receiver transitions, re-entry, iterator reuse, GC
retention, array length shadowing, and same-site cache reuse.
2026-04-10 00:56:49 +02:00
Value value ;
bool done = false ;
2026-04-13 11:54:04 +02:00
TRY ( iterator . next ( vm , done , value ) ) ;
vm . set ( m_dst_done , Value ( done ) ) ;
LibJS: Cache stable for-in iteration at bytecode sites
Cache the flattened enumerable key snapshot for each `for..in` site and
reuse a `PropertyNameIterator` when the receiver shape, dictionary
generation, indexed storage kind and length, prototype chain
validity, and magical-length state still match.
Handle packed indexed receivers as well as plain named-property
objects. Teach `ObjectPropertyIteratorNext` in `asmint.asm` to return
cached property values directly and to fall back to the slow iterator
logic when any guard fails.
Treat arrays' hidden non-enumerable `length` property as a visited
name for for-in shadowing, and include the receiver's magical-length
state in the cache key so arrays and plain objects do not share
snapshots.
Add `test-js` and `test-js-bytecode` coverage for mixed numeric and
named keys, packed receiver transitions, re-entry, iterator reuse, GC
retention, array length shadowing, and same-site cache reuse.
2026-04-10 00:56:49 +02:00
if ( ! done )
2026-04-13 11:54:04 +02:00
vm . set ( m_dst_value , value ) ;
LibJS: Cache stable for-in iteration at bytecode sites
Cache the flattened enumerable key snapshot for each `for..in` site and
reuse a `PropertyNameIterator` when the receiver shape, dictionary
generation, indexed storage kind and length, prototype chain
validity, and magical-length state still match.
Handle packed indexed receivers as well as plain named-property
objects. Teach `ObjectPropertyIteratorNext` in `asmint.asm` to return
cached property values directly and to fall back to the slow iterator
logic when any guard fails.
Treat arrays' hidden non-enumerable `length` property as a visited
name for for-in shadowing, and include the receiver's magical-length
state in the cache key so arrays and plain objects do not share
snapshots.
Add `test-js` and `test-js-bytecode` coverage for mixed numeric and
named keys, packed receiver transitions, re-entry, iterator reuse, GC
retention, array length shadowing, and same-site cache reuse.
2026-04-10 00:56:49 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
NEVER_INLINE ThrowCompletionOr < void > NewClass : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2024-02-04 08:00:54 +01:00
Value super_class ;
if ( m_super_class . has_value ( ) )
2026-04-13 11:54:04 +02:00
super_class = vm . get ( m_super_class . value ( ) ) ;
2024-05-11 22:54:41 +00:00
Vector < Value > element_keys ;
2025-12-04 09:56:46 +01:00
element_keys . ensure_capacity ( m_element_keys_count ) ;
2024-05-11 22:54:41 +00:00
for ( size_t i = 0 ; i < m_element_keys_count ; + + i ) {
Value element_key ;
if ( m_element_keys [ i ] . has_value ( ) )
2026-04-13 11:54:04 +02:00
element_key = vm . get ( m_element_keys [ i ] . value ( ) ) ;
2025-12-04 09:56:46 +01:00
element_keys . unchecked_append ( element_key ) ;
2024-05-11 22:54:41 +00:00
}
2025-12-04 09:56:46 +01:00
2026-04-13 11:54:04 +02:00
auto & running_execution_context = vm . running_execution_context ( ) ;
auto * class_environment = & as < Environment > ( vm . get ( m_class_environment ) . as_cell ( ) ) ;
2026-02-09 03:34:42 +01:00
auto & outer_environment = running_execution_context . lexical_environment ;
2025-12-04 09:56:46 +01:00
2026-04-13 11:54:04 +02:00
auto const & blueprint = vm . current_executable ( ) . class_blueprints [ m_class_blueprint_index ] ;
2026-02-11 00:28:10 +01:00
2025-12-04 09:56:46 +01:00
Optional < Utf16FlyString > binding_name ;
Utf16FlyString class_name ;
2026-02-11 00:28:10 +01:00
if ( ! blueprint . has_name & & m_lhs_name . has_value ( ) ) {
2026-04-13 11:54:04 +02:00
class_name = vm . get_identifier ( m_lhs_name . value ( ) ) ;
2025-12-04 09:56:46 +01:00
} else {
2026-02-11 00:28:10 +01:00
class_name = blueprint . name ;
2025-12-04 09:56:46 +01:00
binding_name = class_name ;
}
2026-04-13 11:54:04 +02:00
auto * retval = TRY ( construct_class ( vm , blueprint , vm . current_executable ( ) , class_environment , outer_environment , super_class , element_keys , binding_name , class_name ) ) ;
vm . set ( dst ( ) , retval ) ;
2023-09-27 10:10:00 +02:00
return { } ;
}
// 13.5.3.1 Runtime Semantics: Evaluation, https://tc39.es/ecma262/#sec-typeof-operator-runtime-semantics-evaluation
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > TypeofBinding : : execute_impl ( VM & vm ) const
2023-09-27 10:10:00 +02:00
{
2024-06-14 13:57:51 +02:00
2025-10-07 16:37:22 +02:00
if ( m_cache . is_valid ( ) ) [[likely]] {
2026-04-13 11:54:04 +02:00
auto const * environment = vm . running_execution_context ( ) . lexical_environment . ptr ( ) ;
2026-01-26 20:40:05 +01:00
for ( size_t i = 0 ; i < m_cache . hops ; + + i ) {
if ( environment - > is_permanently_screwed_by_eval ( ) ) [[unlikely]]
goto slow_path ;
2024-06-14 13:57:51 +02:00
environment = environment - > outer_environment ( ) ;
2026-01-26 20:40:05 +01:00
}
2025-10-07 16:37:22 +02:00
if ( ! environment - > is_permanently_screwed_by_eval ( ) ) [[likely]] {
2024-06-14 13:57:51 +02:00
auto value = TRY ( static_cast < DeclarativeEnvironment const & > ( * environment ) . get_binding_value_direct ( vm , m_cache . index ) ) ;
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , value . typeof_ ( vm ) ) ;
2024-06-14 13:57:51 +02:00
return { } ;
}
2026-01-26 20:40:05 +01:00
slow_path :
2024-06-14 13:57:51 +02:00
m_cache = { } ;
}
// 1. Let val be the result of evaluating UnaryExpression.
2026-04-13 11:54:04 +02:00
auto reference = TRY ( vm . resolve_binding ( vm . get_identifier ( m_identifier ) , strict ( ) ) ) ;
2024-06-14 13:57:51 +02:00
// 2. If val is a Reference Record, then
// a. If IsUnresolvableReference(val) is true, return "undefined".
if ( reference . is_unresolvable ( ) ) {
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , PrimitiveString : : create ( vm , " undefined " _string ) ) ;
2024-06-14 13:57:51 +02:00
return { } ;
}
// 3. Set val to ? GetValue(val).
auto value = TRY ( reference . get_value ( vm ) ) ;
if ( reference . environment_coordinate ( ) . has_value ( ) )
m_cache = reference . environment_coordinate ( ) . value ( ) ;
// 4. NOTE: This step is replaced in section B.3.6.3.
// 5. Return a String according to Table 41.
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , value . typeof_ ( vm ) ) ;
2023-09-27 10:10:00 +02:00
return { } ;
}
2026-04-13 11:54:04 +02:00
void GetCompletionFields : : execute_impl ( VM & vm ) const
2025-03-31 09:32:39 +01:00
{
2026-04-13 11:54:04 +02:00
auto const & completion_cell = static_cast < CompletionCell const & > ( vm . get ( m_completion ) . as_cell ( ) ) ;
vm . set ( m_value_dst , completion_cell . completion ( ) . value ( ) ) ;
vm . set ( m_type_dst , Value ( to_underlying ( completion_cell . completion ( ) . type ( ) ) ) ) ;
2025-03-31 09:32:39 +01:00
}
2026-04-13 11:54:04 +02:00
void SetCompletionType : : execute_impl ( VM & vm ) const
2025-03-31 09:32:39 +01:00
{
2026-04-13 11:54:04 +02:00
auto & completion_cell = static_cast < CompletionCell & > ( vm . get ( m_completion ) . as_cell ( ) ) ;
2025-03-31 09:32:39 +01:00
auto completion = completion_cell . completion ( ) ;
2025-11-22 11:55:49 +01:00
completion_cell . set_completion ( Completion { m_completion_type , completion . value ( ) } ) ;
2025-03-31 09:32:39 +01:00
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > CreateImmutableBinding : : execute_impl ( VM & vm ) const
2025-10-25 14:06:48 +02:00
{
2026-04-13 11:54:04 +02:00
auto & environment = as < Environment > ( vm . get ( m_environment ) . as_cell ( ) ) ;
return environment . create_immutable_binding ( vm , vm . get_identifier ( m_identifier ) , m_strict_binding ) ;
2025-10-25 14:06:48 +02:00
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > CreateMutableBinding : : execute_impl ( VM & vm ) const
2025-10-25 14:06:48 +02:00
{
2026-04-13 11:54:04 +02:00
auto & environment = as < Environment > ( vm . get ( m_environment ) . as_cell ( ) ) ;
return environment . create_mutable_binding ( vm , vm . get_identifier ( m_identifier ) , m_can_be_deleted ) ;
2025-10-25 14:06:48 +02:00
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > ToObject : : execute_impl ( VM & vm ) const
2025-11-06 19:20:29 +00:00
{
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , TRY ( vm . get ( m_value ) . to_object ( vm ) ) ) ;
2025-11-06 19:20:29 +00:00
return { } ;
}
2026-04-13 11:54:04 +02:00
void ToBoolean : : execute_impl ( VM & vm ) const
2025-11-06 19:20:29 +00:00
{
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value ( vm . get ( m_value ) . to_boolean ( ) ) ) ;
2025-11-06 19:20:29 +00:00
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > ToLength : : execute_impl ( VM & vm ) const
2025-11-06 19:20:29 +00:00
{
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , Value { TRY ( vm . get ( m_value ) . to_length ( vm ) ) } ) ;
2025-11-06 19:20:29 +00:00
return { } ;
}
2026-04-13 11:54:04 +02:00
void CreateAsyncFromSyncIterator : : execute_impl ( VM & vm ) const
2025-11-06 19:20:29 +00:00
{
2026-04-13 11:54:04 +02:00
auto & realm = vm . realm ( ) ;
2025-11-06 19:20:29 +00:00
2026-04-13 11:54:04 +02:00
auto & iterator = vm . get ( m_iterator ) . as_object ( ) ;
auto next_method = vm . get ( m_next_method ) ;
auto done = vm . get ( m_done ) . as_bool ( ) ;
2025-11-06 19:20:29 +00:00
auto iterator_record = realm . create < IteratorRecord > ( iterator , next_method , done ) ;
auto async_from_sync_iterator = create_async_from_sync_iterator ( vm , iterator_record ) ;
auto iterator_object = Object : : create ( realm , nullptr ) ;
iterator_object - > define_direct_property ( vm . names . iterator , async_from_sync_iterator . iterator , default_attributes ) ;
iterator_object - > define_direct_property ( vm . names . nextMethod , async_from_sync_iterator . next_method , default_attributes ) ;
iterator_object - > define_direct_property ( vm . names . done , Value { async_from_sync_iterator . done } , default_attributes ) ;
2026-04-13 11:54:04 +02:00
vm . set ( m_dst , iterator_object ) ;
2025-11-06 19:20:29 +00:00
}
2026-04-13 11:54:04 +02:00
ThrowCompletionOr < void > CreateDataPropertyOrThrow : : execute_impl ( VM & vm ) const
2025-11-06 19:20:29 +00:00
{
2026-04-13 11:54:04 +02:00
auto & object = vm . get ( m_object ) . as_object ( ) ;
auto property = TRY ( vm . get ( m_property ) . to_property_key ( vm ) ) ;
auto value = vm . get ( m_value ) ;
2025-11-06 19:20:29 +00:00
TRY ( object . create_data_property_or_throw ( property , value ) ) ;
return { } ;
}
2026-04-13 11:54:04 +02:00
void IsCallable : : execute_impl ( VM & vm ) const
2025-12-04 19:55:07 +01:00
{
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , Value ( vm . get ( value ( ) ) . is_function ( ) ) ) ;
2025-12-04 19:55:07 +01:00
}
2026-04-13 11:54:04 +02:00
void IsConstructor : : execute_impl ( VM & vm ) const
2025-12-04 19:55:07 +01:00
{
2026-04-13 11:54:04 +02:00
vm . set ( dst ( ) , Value ( vm . get ( value ( ) ) . is_constructor ( ) ) ) ;
2025-12-04 19:55:07 +01:00
}
2023-09-27 10:10:00 +02:00
}