//! AST → register-based bytecode compiler. //! //! Walks the AST produced by the parser and emits bytecode instructions. //! Uses a simple greedy register allocator: each new temporary gets the next //! available register, and registers are freed when no longer needed. use crate::ast::*; use crate::bytecode::*; use crate::JsError; use std::collections::HashSet; /// Compiler state for a single function scope. struct FunctionCompiler { builder: BytecodeBuilder, /// Maps local variable names to their register slots. locals: Vec, /// Next free register index. next_reg: u8, /// Stack of loop contexts for break/continue. loop_stack: Vec, /// Upvalues captured from the parent scope (used in inner functions). upvalues: Vec, /// Set of local variable names that are captured by inner functions. /// Pre-populated before compilation by scanning inner function bodies. captured_names: HashSet, } #[derive(Debug, Clone)] struct Local { name: String, reg: Reg, /// Whether this variable is captured by an inner function (stored in a cell). is_captured: bool, /// Whether this variable was declared with `const`. is_const: bool, } /// An upvalue entry tracking how this function captures an outer variable. struct UpvalueEntry { /// Name of the captured variable (for dedup during resolution). name: String, /// The resolved upvalue definition. def: UpvalueDef, /// Whether the original declaration was `const`. is_const: bool, } struct LoopCtx { /// Label, if this is a labeled loop. label: Option, /// Patch positions for break jumps. break_patches: Vec, /// Patch positions for continue jumps (patched after body compilation). continue_patches: Vec, } impl FunctionCompiler { fn new(name: String, param_count: u8) -> Self { Self { builder: BytecodeBuilder::new(name, param_count), locals: Vec::new(), next_reg: 0, loop_stack: Vec::new(), upvalues: Vec::new(), captured_names: HashSet::new(), } } /// Allocate a register, updating the high-water mark. fn alloc_reg(&mut self) -> Reg { let r = self.next_reg; self.next_reg = self.next_reg.checked_add(1).expect("register overflow"); if self.next_reg > self.builder.func.register_count { self.builder.func.register_count = self.next_reg; } r } /// Free the last allocated register (must be called in reverse order). fn free_reg(&mut self, r: Reg) { debug_assert_eq!( r, self.next_reg - 1, "registers must be freed in reverse order" ); self.next_reg -= 1; } /// Look up a local variable by name, returning full info. fn find_local_info(&self, name: &str) -> Option<&Local> { self.locals.iter().rev().find(|l| l.name == name) } /// Look up an upvalue by name, returning its index. fn find_upvalue(&self, name: &str) -> Option { self.upvalues .iter() .position(|u| u.name == name) .map(|i| i as u8) } /// Check if an upvalue is const. fn is_upvalue_const(&self, idx: u8) -> bool { self.upvalues .get(idx as usize) .map(|u| u.is_const) .unwrap_or(false) } /// Define a local variable with capture and const flags. fn define_local_ext(&mut self, name: &str, is_captured: bool, is_const: bool) -> Reg { let reg = self.alloc_reg(); self.locals.push(Local { name: name.to_string(), reg, is_captured, is_const, }); reg } } // ── Free variable analysis ────────────────────────────────── /// Collect identifiers referenced inside `body` that are not declared locally /// (params or variable declarations within the body). This set represents the /// "free variables" of a function body — variables that must be captured. /// This includes transitive free variables from nested functions. fn collect_free_vars(params: &[Pattern], body: &[Stmt]) -> HashSet { let mut declared = HashSet::new(); let mut referenced = HashSet::new(); // Params are local declarations. for p in params { collect_pattern_names(p, &mut declared); } // Collect declarations and references from the body. for stmt in body { collect_stmt_decls(stmt, &mut declared); } for stmt in body { collect_stmt_refs(stmt, &declared, &mut referenced); } // Also include transitive free variables from nested inner functions. // If an inner function references `x` and `x` is not declared in THIS scope, // then `x` is also a free variable of THIS function. let inner_caps = collect_inner_captures(body); for name in inner_caps { if !declared.contains(&name) { referenced.insert(name); } } referenced } /// Collect free variables from an arrow function body. fn collect_free_vars_arrow(params: &[Pattern], body: &ArrowBody) -> HashSet { let mut declared = HashSet::new(); let mut referenced = HashSet::new(); for p in params { collect_pattern_names(p, &mut declared); } match body { ArrowBody::Expr(expr) => { collect_expr_refs(expr, &declared, &mut referenced); } ArrowBody::Block(stmts) => { for stmt in stmts { collect_stmt_decls(stmt, &mut declared); } for stmt in stmts { collect_stmt_refs(stmt, &declared, &mut referenced); } // Transitive free variables from nested functions. let inner_caps = collect_inner_captures(stmts); for name in inner_caps { if !declared.contains(&name) { referenced.insert(name); } } } } referenced } fn collect_pattern_names(pat: &Pattern, names: &mut HashSet) { match &pat.kind { PatternKind::Identifier(name) => { names.insert(name.clone()); } PatternKind::Array { elements, rest } => { for elem in elements.iter().flatten() { collect_pattern_names(elem, names); } if let Some(rest) = rest { collect_pattern_names(rest, names); } } PatternKind::Object { properties, rest } => { for prop in properties { collect_pattern_names(&prop.value, names); } if let Some(rest) = rest { collect_pattern_names(rest, names); } } PatternKind::Assign { left, .. } => { collect_pattern_names(left, names); } } } /// Collect all variable/function declarations in a statement (not recursing into /// inner functions — those form their own scope). fn collect_stmt_decls(stmt: &Stmt, declared: &mut HashSet) { match &stmt.kind { StmtKind::VarDecl { declarators, .. } => { for d in declarators { collect_pattern_names(&d.pattern, declared); } } StmtKind::FunctionDecl(f) => { if let Some(name) = &f.id { declared.insert(name.clone()); } } StmtKind::ClassDecl(c) => { if let Some(name) = &c.id { declared.insert(name.clone()); } } StmtKind::Block(stmts) => { for s in stmts { collect_stmt_decls(s, declared); } } StmtKind::If { consequent, alternate, .. } => { collect_stmt_decls(consequent, declared); if let Some(alt) = alternate { collect_stmt_decls(alt, declared); } } StmtKind::While { body, .. } | StmtKind::DoWhile { body, .. } | StmtKind::Labeled { body, .. } => { collect_stmt_decls(body, declared); } StmtKind::For { init, body, .. } => { if let Some(ForInit::VarDecl { declarators, .. }) = init { for d in declarators { collect_pattern_names(&d.pattern, declared); } } collect_stmt_decls(body, declared); } StmtKind::ForIn { left, body, .. } | StmtKind::ForOf { left, body, .. } => { if let ForInOfLeft::VarDecl { pattern, .. } = left { collect_pattern_names(pattern, declared); } collect_stmt_decls(body, declared); } StmtKind::Try { block, handler, finalizer, } => { for s in block { collect_stmt_decls(s, declared); } if let Some(h) = handler { if let Some(param) = &h.param { collect_pattern_names(param, declared); } for s in &h.body { collect_stmt_decls(s, declared); } } if let Some(fin) = finalizer { for s in fin { collect_stmt_decls(s, declared); } } } StmtKind::Switch { cases, .. } => { for case in cases { for s in &case.consequent { collect_stmt_decls(s, declared); } } } _ => {} } } /// Collect all identifier references in a statement, excluding inner function scopes. /// Identifiers that are in `declared` are local and skipped. fn collect_stmt_refs(stmt: &Stmt, declared: &HashSet, refs: &mut HashSet) { match &stmt.kind { StmtKind::Expr(expr) => collect_expr_refs(expr, declared, refs), StmtKind::Block(stmts) => { for s in stmts { collect_stmt_refs(s, declared, refs); } } StmtKind::VarDecl { declarators, .. } => { for d in declarators { if let Some(init) = &d.init { collect_expr_refs(init, declared, refs); } } } StmtKind::FunctionDecl(_) => { // Don't recurse into inner functions — they have their own scope. } StmtKind::If { test, consequent, alternate, } => { collect_expr_refs(test, declared, refs); collect_stmt_refs(consequent, declared, refs); if let Some(alt) = alternate { collect_stmt_refs(alt, declared, refs); } } StmtKind::While { test, body } => { collect_expr_refs(test, declared, refs); collect_stmt_refs(body, declared, refs); } StmtKind::DoWhile { body, test } => { collect_stmt_refs(body, declared, refs); collect_expr_refs(test, declared, refs); } StmtKind::For { init, test, update, body, } => { if let Some(init) = init { match init { ForInit::VarDecl { declarators, .. } => { for d in declarators { if let Some(init) = &d.init { collect_expr_refs(init, declared, refs); } } } ForInit::Expr(e) => collect_expr_refs(e, declared, refs), } } if let Some(t) = test { collect_expr_refs(t, declared, refs); } if let Some(u) = update { collect_expr_refs(u, declared, refs); } collect_stmt_refs(body, declared, refs); } StmtKind::ForIn { right, body, .. } | StmtKind::ForOf { right, body, .. } => { collect_expr_refs(right, declared, refs); collect_stmt_refs(body, declared, refs); } StmtKind::Return(Some(expr)) | StmtKind::Throw(expr) => { collect_expr_refs(expr, declared, refs); } StmtKind::Try { block, handler, finalizer, } => { for s in block { collect_stmt_refs(s, declared, refs); } if let Some(h) = handler { for s in &h.body { collect_stmt_refs(s, declared, refs); } } if let Some(fin) = finalizer { for s in fin { collect_stmt_refs(s, declared, refs); } } } StmtKind::Switch { discriminant, cases, } => { collect_expr_refs(discriminant, declared, refs); for case in cases { if let Some(test) = &case.test { collect_expr_refs(test, declared, refs); } for s in &case.consequent { collect_stmt_refs(s, declared, refs); } } } StmtKind::Labeled { body, .. } => { collect_stmt_refs(body, declared, refs); } _ => {} } } /// Collect identifier references in an expression. Does NOT recurse into /// inner function/arrow bodies (those form their own scope). fn collect_expr_refs(expr: &Expr, declared: &HashSet, refs: &mut HashSet) { match &expr.kind { ExprKind::Identifier(name) => { if !declared.contains(name) { refs.insert(name.clone()); } } ExprKind::Binary { left, right, .. } | ExprKind::Logical { left, right, .. } | ExprKind::Assignment { left, right, .. } => { collect_expr_refs(left, declared, refs); collect_expr_refs(right, declared, refs); } ExprKind::Unary { argument, .. } | ExprKind::Update { argument, .. } => { collect_expr_refs(argument, declared, refs); } ExprKind::Conditional { test, consequent, alternate, } => { collect_expr_refs(test, declared, refs); collect_expr_refs(consequent, declared, refs); collect_expr_refs(alternate, declared, refs); } ExprKind::Call { callee, arguments } | ExprKind::New { callee, arguments } => { collect_expr_refs(callee, declared, refs); for arg in arguments { collect_expr_refs(arg, declared, refs); } } ExprKind::Member { object, property, computed, .. } => { collect_expr_refs(object, declared, refs); if *computed { collect_expr_refs(property, declared, refs); } } ExprKind::Array(elements) => { for elem in elements.iter().flatten() { match elem { ArrayElement::Expr(e) | ArrayElement::Spread(e) => { collect_expr_refs(e, declared, refs); } } } } ExprKind::Object(props) => { for prop in props { if let PropertyKey::Computed(e) = &prop.key { collect_expr_refs(e, declared, refs); } if let Some(val) = &prop.value { collect_expr_refs(val, declared, refs); } } } ExprKind::Sequence(exprs) => { for e in exprs { collect_expr_refs(e, declared, refs); } } ExprKind::Spread(inner) => { collect_expr_refs(inner, declared, refs); } ExprKind::TemplateLiteral { expressions, .. } => { for e in expressions { collect_expr_refs(e, declared, refs); } } // Function/Arrow/Class bodies are new scopes — don't recurse. ExprKind::Function(_) | ExprKind::Arrow { .. } | ExprKind::Class(_) => {} _ => {} } } /// Collect the free variables of ALL inner functions/arrows within a list of /// statements. Returns the set of outer-scope names they reference. fn collect_inner_captures(stmts: &[Stmt]) -> HashSet { let mut captures = HashSet::new(); for stmt in stmts { collect_inner_captures_stmt(stmt, &mut captures); } captures } fn collect_inner_captures_stmt(stmt: &Stmt, caps: &mut HashSet) { match &stmt.kind { StmtKind::FunctionDecl(f) => { let fv = collect_free_vars(&f.params, &f.body); caps.extend(fv); } StmtKind::Expr(expr) => collect_inner_captures_expr(expr, caps), StmtKind::VarDecl { declarators, .. } => { for d in declarators { if let Some(init) = &d.init { collect_inner_captures_expr(init, caps); } } } StmtKind::Block(stmts) => { for s in stmts { collect_inner_captures_stmt(s, caps); } } StmtKind::If { test, consequent, alternate, } => { collect_inner_captures_expr(test, caps); collect_inner_captures_stmt(consequent, caps); if let Some(alt) = alternate { collect_inner_captures_stmt(alt, caps); } } StmtKind::While { test, body } => { collect_inner_captures_expr(test, caps); collect_inner_captures_stmt(body, caps); } StmtKind::DoWhile { body, test } => { collect_inner_captures_stmt(body, caps); collect_inner_captures_expr(test, caps); } StmtKind::For { init, test, update, body, } => { if let Some(ForInit::Expr(e)) = init { collect_inner_captures_expr(e, caps); } if let Some(ForInit::VarDecl { declarators, .. }) = init { for d in declarators { if let Some(init) = &d.init { collect_inner_captures_expr(init, caps); } } } if let Some(t) = test { collect_inner_captures_expr(t, caps); } if let Some(u) = update { collect_inner_captures_expr(u, caps); } collect_inner_captures_stmt(body, caps); } StmtKind::ForIn { right, body, .. } | StmtKind::ForOf { right, body, .. } => { collect_inner_captures_expr(right, caps); collect_inner_captures_stmt(body, caps); } StmtKind::Return(Some(expr)) | StmtKind::Throw(expr) => { collect_inner_captures_expr(expr, caps); } StmtKind::Try { block, handler, finalizer, } => { for s in block { collect_inner_captures_stmt(s, caps); } if let Some(h) = handler { for s in &h.body { collect_inner_captures_stmt(s, caps); } } if let Some(fin) = finalizer { for s in fin { collect_inner_captures_stmt(s, caps); } } } StmtKind::Switch { discriminant, cases, } => { collect_inner_captures_expr(discriminant, caps); for case in cases { if let Some(test) = &case.test { collect_inner_captures_expr(test, caps); } for s in &case.consequent { collect_inner_captures_stmt(s, caps); } } } StmtKind::Labeled { body, .. } => { collect_inner_captures_stmt(body, caps); } _ => {} } } fn collect_inner_captures_expr(expr: &Expr, caps: &mut HashSet) { match &expr.kind { ExprKind::Function(f) => { let fv = collect_free_vars(&f.params, &f.body); caps.extend(fv); } ExprKind::Arrow { params, body, .. } => { let fv = collect_free_vars_arrow(params, body); caps.extend(fv); } ExprKind::Binary { left, right, .. } | ExprKind::Logical { left, right, .. } | ExprKind::Assignment { left, right, .. } => { collect_inner_captures_expr(left, caps); collect_inner_captures_expr(right, caps); } ExprKind::Unary { argument, .. } | ExprKind::Update { argument, .. } => { collect_inner_captures_expr(argument, caps); } ExprKind::Conditional { test, consequent, alternate, } => { collect_inner_captures_expr(test, caps); collect_inner_captures_expr(consequent, caps); collect_inner_captures_expr(alternate, caps); } ExprKind::Call { callee, arguments } | ExprKind::New { callee, arguments } => { collect_inner_captures_expr(callee, caps); for arg in arguments { collect_inner_captures_expr(arg, caps); } } ExprKind::Member { object, property, computed, .. } => { collect_inner_captures_expr(object, caps); if *computed { collect_inner_captures_expr(property, caps); } } ExprKind::Array(elements) => { for elem in elements.iter().flatten() { match elem { ArrayElement::Expr(e) | ArrayElement::Spread(e) => { collect_inner_captures_expr(e, caps); } } } } ExprKind::Object(props) => { for prop in props { if let PropertyKey::Computed(e) = &prop.key { collect_inner_captures_expr(e, caps); } if let Some(val) = &prop.value { collect_inner_captures_expr(val, caps); } } } ExprKind::Sequence(exprs) => { for e in exprs { collect_inner_captures_expr(e, caps); } } ExprKind::Spread(inner) => { collect_inner_captures_expr(inner, caps); } ExprKind::TemplateLiteral { expressions, .. } => { for e in expressions { collect_inner_captures_expr(e, caps); } } ExprKind::Class(c) => { for member in &c.body { if let ClassMemberKind::Method { value, .. } = &member.kind { let fv = collect_free_vars(&value.params, &value.body); caps.extend(fv); } } } _ => {} } } /// Compile a parsed program into a top-level bytecode function. pub fn compile(program: &Program) -> Result { let mut fc = FunctionCompiler::new("
".into(), 0); // Pre-scan to find which top-level locals are captured by inner functions. fc.captured_names = collect_inner_captures(&program.body); // Reserve r0 for the implicit return value. let result_reg = fc.alloc_reg(); fc.builder.emit_reg(Op::LoadUndefined, result_reg); compile_stmts(&mut fc, &program.body, result_reg)?; fc.builder.emit_reg(Op::Return, result_reg); Ok(fc.builder.finish()) } fn compile_stmts( fc: &mut FunctionCompiler, stmts: &[Stmt], result_reg: Reg, ) -> Result<(), JsError> { for stmt in stmts { compile_stmt(fc, stmt, result_reg)?; } Ok(()) } fn compile_stmt(fc: &mut FunctionCompiler, stmt: &Stmt, result_reg: Reg) -> Result<(), JsError> { match &stmt.kind { StmtKind::Expr(expr) => { // Expression statement: compile and store result in result_reg. compile_expr(fc, expr, result_reg)?; } StmtKind::Block(stmts) => { let saved_locals = fc.locals.len(); let saved_next = fc.next_reg; compile_stmts(fc, stmts, result_reg)?; // Pop locals from this block. fc.locals.truncate(saved_locals); fc.next_reg = saved_next; } StmtKind::VarDecl { kind, declarators } => { for decl in declarators { compile_var_declarator(fc, decl, *kind)?; } } StmtKind::FunctionDecl(func_def) => { compile_function_decl(fc, func_def)?; } StmtKind::If { test, consequent, alternate, } => { compile_if(fc, test, consequent, alternate.as_deref(), result_reg)?; } StmtKind::While { test, body } => { compile_while(fc, test, body, None, result_reg)?; } StmtKind::DoWhile { body, test } => { compile_do_while(fc, body, test, None, result_reg)?; } StmtKind::For { init, test, update, body, } => { compile_for( fc, init.as_ref(), test.as_ref(), update.as_ref(), body, None, result_reg, )?; } StmtKind::ForIn { left, right, body } => { let saved_locals = fc.locals.len(); let saved_next = fc.next_reg; // Evaluate the RHS object. let obj_r = fc.alloc_reg(); compile_expr(fc, right, obj_r)?; // ForInInit: collect enumerable keys into an array. let keys_r = fc.alloc_reg(); fc.builder.emit_reg_reg(Op::ForInInit, keys_r, obj_r); // obj_r is no longer needed but we don't free it (LIFO constraint). // Index register (starts at 0). let idx_r = fc.alloc_reg(); fc.builder.emit_load_int8(idx_r, 0); // Key and done registers (reused each iteration). let key_r = fc.alloc_reg(); let done_r = fc.alloc_reg(); let loop_start = fc.builder.offset(); // ForInNext: get next key or done flag. fc.builder .emit_reg4(Op::ForInNext, key_r, done_r, keys_r, idx_r); // Exit loop if done. let exit_patch = fc.builder.emit_cond_jump(Op::JumpIfTrue, done_r); // Bind the loop variable. match left { ForInOfLeft::VarDecl { kind, pattern } => { if let PatternKind::Identifier(name) = &pattern.kind { let is_captured = fc.captured_names.contains(name.as_str()); let is_const = *kind == VarKind::Const; let var_r = fc.define_local_ext(name, is_captured, is_const); if is_captured { fc.builder.emit_reg(Op::NewCell, var_r); fc.builder.emit_reg_reg(Op::CellStore, var_r, key_r); } else { fc.builder.emit_reg_reg(Op::Move, var_r, key_r); } } } ForInOfLeft::Pattern(pattern) => { if let PatternKind::Identifier(name) = &pattern.kind { if let Some(local) = fc.find_local_info(name) { let reg = local.reg; let captured = local.is_captured; if captured { fc.builder.emit_reg_reg(Op::CellStore, reg, key_r); } else { fc.builder.emit_reg_reg(Op::Move, reg, key_r); } } else if let Some(uv_idx) = fc.find_upvalue(name) { fc.builder.emit_store_upvalue(uv_idx, key_r); } else { let ni = fc.builder.add_name(name); fc.builder.emit_store_global(ni, key_r); } } } } // Compile the body. fc.loop_stack.push(LoopCtx { label: None, break_patches: Vec::new(), continue_patches: Vec::new(), }); compile_stmt(fc, body, result_reg)?; // Increment index: idx = idx + 1. // Use a temp register for the constant 1. Since we allocate it // after the loop body, we can't free it with LIFO either — the // saved_next restoration handles cleanup. let one_r = fc.alloc_reg(); fc.builder.emit_load_int8(one_r, 1); fc.builder.emit_reg3(Op::Add, idx_r, idx_r, one_r); // Jump back to loop start. fc.builder.emit_jump_to(loop_start); // Patch exit and continue jumps. fc.builder.patch_jump(exit_patch); let ctx = fc.loop_stack.pop().unwrap(); for patch in ctx.break_patches { fc.builder.patch_jump(patch); } for patch in ctx.continue_patches { fc.builder.patch_jump_to(patch, loop_start); } // Restore locals/regs — frees all temporaries at once. fc.locals.truncate(saved_locals); fc.next_reg = saved_next; } StmtKind::ForOf { left, right, body, is_await: _, } => { let saved_locals = fc.locals.len(); let saved_next = fc.next_reg; // Evaluate the iterable. let iterable_r = fc.alloc_reg(); compile_expr(fc, right, iterable_r)?; // Get the iterator: call iterable[@@iterator](). let iter_method_r = fc.alloc_reg(); let sym_iter_ni = fc.builder.add_name("@@iterator"); fc.builder .emit_get_prop_name(iter_method_r, iterable_r, sym_iter_ni); // Set `this` = iterable for the call. let this_ni = fc.builder.add_name("this"); fc.builder.emit_store_global(this_ni, iterable_r); // Call [@@iterator]() with 0 args. let iterator_r = fc.alloc_reg(); let args_start = fc.next_reg; fc.builder .emit_call(iterator_r, iter_method_r, args_start, 0); // Temp registers for next method, result, done, value. let next_method_r = fc.alloc_reg(); let next_ni = fc.builder.add_name("next"); fc.builder .emit_get_prop_name(next_method_r, iterator_r, next_ni); let result_obj_r = fc.alloc_reg(); let done_r = fc.alloc_reg(); let value_r = fc.alloc_reg(); // Loop start. let loop_start = fc.builder.offset(); // Set `this` = iterator for the .next() call. fc.builder.emit_store_global(this_ni, iterator_r); // Call iterator.next(). fc.builder .emit_call(result_obj_r, next_method_r, args_start, 0); // Extract done and value. let done_ni = fc.builder.add_name("done"); let value_ni = fc.builder.add_name("value"); fc.builder.emit_get_prop_name(done_r, result_obj_r, done_ni); // Exit if done. let exit_patch = fc.builder.emit_cond_jump(Op::JumpIfTrue, done_r); // Extract value. fc.builder .emit_get_prop_name(value_r, result_obj_r, value_ni); // Bind the loop variable. match left { ForInOfLeft::VarDecl { kind, pattern } => match &pattern.kind { PatternKind::Identifier(name) => { let is_captured = fc.captured_names.contains(name.as_str()); let is_const = *kind == VarKind::Const; let var_r = fc.define_local_ext(name, is_captured, is_const); if is_captured { fc.builder.emit_reg(Op::NewCell, var_r); fc.builder.emit_reg_reg(Op::CellStore, var_r, value_r); } else { fc.builder.emit_reg_reg(Op::Move, var_r, value_r); } } _ => { // Destructuring pattern in for...of. compile_destructuring_pattern(fc, pattern, value_r)?; } }, ForInOfLeft::Pattern(pattern) => match &pattern.kind { PatternKind::Identifier(name) => { if let Some(local) = fc.find_local_info(name) { let reg = local.reg; let captured = local.is_captured; if captured { fc.builder.emit_reg_reg(Op::CellStore, reg, value_r); } else { fc.builder.emit_reg_reg(Op::Move, reg, value_r); } } else if let Some(uv_idx) = fc.find_upvalue(name) { fc.builder.emit_store_upvalue(uv_idx, value_r); } else { let ni = fc.builder.add_name(name); fc.builder.emit_store_global(ni, value_r); } } _ => { compile_destructuring_pattern(fc, pattern, value_r)?; } }, } // Push loop context for break/continue. fc.loop_stack.push(LoopCtx { label: None, break_patches: Vec::new(), continue_patches: Vec::new(), }); // Compile body. compile_stmt(fc, body, result_reg)?; // Jump back to loop start. fc.builder.emit_jump_to(loop_start); // Patch exit. fc.builder.patch_jump(exit_patch); let ctx = fc.loop_stack.pop().unwrap(); for patch in ctx.break_patches { fc.builder.patch_jump(patch); } for patch in ctx.continue_patches { fc.builder.patch_jump_to(patch, loop_start); } // Restore locals/regs. fc.locals.truncate(saved_locals); fc.next_reg = saved_next; } StmtKind::Return(expr) => { let ret_reg = fc.alloc_reg(); if let Some(e) = expr { compile_expr(fc, e, ret_reg)?; } else { fc.builder.emit_reg(Op::LoadUndefined, ret_reg); } fc.builder.emit_reg(Op::Return, ret_reg); fc.free_reg(ret_reg); } StmtKind::Throw(expr) => { let tmp = fc.alloc_reg(); compile_expr(fc, expr, tmp)?; fc.builder.emit_reg(Op::Throw, tmp); fc.free_reg(tmp); } StmtKind::Break(label) => { // Find the matching loop context. let idx = find_loop_ctx(&fc.loop_stack, label.as_deref()) .ok_or_else(|| JsError::SyntaxError("break outside of loop".into()))?; let patch = fc.builder.emit_jump(Op::Jump); fc.loop_stack[idx].break_patches.push(patch); } StmtKind::Continue(label) => { let idx = find_loop_ctx(&fc.loop_stack, label.as_deref()) .ok_or_else(|| JsError::SyntaxError("continue outside of loop".into()))?; let patch = fc.builder.emit_jump(Op::Jump); fc.loop_stack[idx].continue_patches.push(patch); } StmtKind::Labeled { label, body } => { // If body is a loop, propagate the label. match &body.kind { StmtKind::While { test, body: inner } => { compile_while(fc, test, inner, Some(label.clone()), result_reg)?; } StmtKind::DoWhile { body: inner, test } => { compile_do_while(fc, inner, test, Some(label.clone()), result_reg)?; } StmtKind::For { init, test, update, body: inner, } => { compile_for( fc, init.as_ref(), test.as_ref(), update.as_ref(), inner, Some(label.clone()), result_reg, )?; } _ => { compile_stmt(fc, body, result_reg)?; } } } StmtKind::Switch { discriminant, cases, } => { compile_switch(fc, discriminant, cases, result_reg)?; } StmtKind::Try { block, handler, finalizer, } => { if let Some(catch) = handler { // The catch register will receive the exception value. Use the // current next_reg so it doesn't conflict with temporaries // allocated inside the try block. let saved_next = fc.next_reg; let catch_reg = fc.alloc_reg(); // Immediately "release" it so the try block can reuse registers // from this point. We remember catch_reg for PushExceptionHandler. fc.next_reg = saved_next; // Emit PushExceptionHandler with placeholder offset to catch block. let catch_patch = fc.builder.emit_push_exception_handler(catch_reg); let locals_len = fc.locals.len(); // Compile the try block. compile_stmts(fc, block, result_reg)?; // If we reach here, no exception was thrown. Pop handler and // jump past the catch block. fc.builder.emit_pop_exception_handler(); let end_patch = fc.builder.emit_jump(Op::Jump); // Reset register state for catch block — locals declared in // the try block are out of scope. fc.locals.truncate(locals_len); fc.next_reg = saved_next; // Patch the exception handler to jump here (catch block start). fc.builder.patch_jump(catch_patch); // Bind the catch parameter if present. if let Some(param) = &catch.param { if let PatternKind::Identifier(name) = ¶m.kind { let is_captured = fc.captured_names.contains(name.as_str()); let local = fc.define_local_ext(name, is_captured, false); if is_captured { fc.builder.emit_reg(Op::NewCell, local); fc.builder.emit_reg_reg(Op::CellStore, local, catch_reg); } else { fc.builder.emit_reg_reg(Op::Move, local, catch_reg); } } } // Compile the catch body. compile_stmts(fc, &catch.body, result_reg)?; // End of catch — restore state. fc.locals.truncate(locals_len); fc.next_reg = saved_next; // Jump target from the try block. fc.builder.patch_jump(end_patch); } else { // No catch handler: just compile the try block. compile_stmts(fc, block, result_reg)?; } // Compile the finally block (always runs after try or catch). if let Some(fin) = finalizer { compile_stmts(fc, fin, result_reg)?; } } StmtKind::Empty | StmtKind::Debugger => { // No-op. } StmtKind::With { object, body } => { // Compile `with` as: evaluate object (discard), then run body. // Proper `with` scope requires VM support. let tmp = fc.alloc_reg(); compile_expr(fc, object, tmp)?; fc.free_reg(tmp); compile_stmt(fc, body, result_reg)?; } StmtKind::Import { .. } => { // Module imports are resolved before execution; no bytecode needed. } StmtKind::Export(export) => { compile_export(fc, export, result_reg)?; } StmtKind::ClassDecl(class_def) => { compile_class_decl(fc, class_def)?; } } Ok(()) } // ── Variable declarations ─────────────────────────────────── fn compile_var_declarator( fc: &mut FunctionCompiler, decl: &VarDeclarator, kind: VarKind, ) -> Result<(), JsError> { match &decl.pattern.kind { PatternKind::Identifier(name) => { let is_const = kind == VarKind::Const; let is_captured = fc.captured_names.contains(name.as_str()); if is_const && decl.init.is_none() { return Err(JsError::SyntaxError( "Missing initializer in const declaration".into(), )); } let reg = fc.define_local_ext(name, is_captured, is_const); if is_captured { // Allocate a cell for this variable. fc.builder.emit_reg(Op::NewCell, reg); if let Some(init) = &decl.init { let tmp = fc.alloc_reg(); compile_expr(fc, init, tmp)?; fc.builder.emit_reg_reg(Op::CellStore, reg, tmp); fc.free_reg(tmp); } // No init => cell stays undefined (already the default). } else if let Some(init) = &decl.init { compile_expr(fc, init, reg)?; } else { fc.builder.emit_reg(Op::LoadUndefined, reg); } } _ => { // Destructuring: evaluate init, then bind patterns. // Note: don't free tmp — destructuring pattern allocates permanent // local registers above it. The tmp register slot is reused via // next_reg restoration by the parent scope. let tmp = fc.alloc_reg(); if let Some(init) = &decl.init { compile_expr(fc, init, tmp)?; } else { fc.builder.emit_reg(Op::LoadUndefined, tmp); } compile_destructuring_pattern(fc, &decl.pattern, tmp)?; } } Ok(()) } fn compile_destructuring_pattern( fc: &mut FunctionCompiler, pattern: &Pattern, src: Reg, ) -> Result<(), JsError> { match &pattern.kind { PatternKind::Identifier(name) => { let is_captured = fc.captured_names.contains(name.as_str()); let reg = fc.define_local_ext(name, is_captured, false); if is_captured { fc.builder.emit_reg(Op::NewCell, reg); fc.builder.emit_reg_reg(Op::CellStore, reg, src); } else { fc.builder.emit_reg_reg(Op::Move, reg, src); } } PatternKind::Object { properties, rest } => { // For each property, extract the value and bind it. // We use a single temp register that we reuse for each property // by resetting next_reg after each binding. for prop in properties { let key_name = match &prop.key { PropertyKey::Identifier(s) | PropertyKey::String(s) => s.clone(), PropertyKey::Computed(expr) => { let saved = fc.next_reg; let key_reg = fc.alloc_reg(); compile_expr(fc, expr, key_reg)?; let val_reg = fc.alloc_reg(); fc.builder.emit_reg3(Op::GetProperty, val_reg, src, key_reg); compile_destructuring_pattern(fc, &prop.value, val_reg)?; // Temp regs are buried; just let them be. let _ = saved; continue; } PropertyKey::Number(n) => { if n.fract() == 0.0 && n.abs() < 1e15 { format!("{}", *n as i64) } else { format!("{n}") } } }; // For simple identifier patterns, load property directly into // the target register to avoid LIFO register allocation issues. if let PatternKind::Identifier(name) = &prop.value.kind { let is_captured = fc.captured_names.contains(name.as_str()); let reg = fc.define_local_ext(name, is_captured, false); let name_idx = fc.builder.add_name(&key_name); if is_captured { let tmp = fc.alloc_reg(); fc.builder.emit_get_prop_name(tmp, src, name_idx); fc.builder.emit_reg(Op::NewCell, reg); fc.builder.emit_reg_reg(Op::CellStore, reg, tmp); fc.free_reg(tmp); } else { fc.builder.emit_get_prop_name(reg, src, name_idx); } } else { // Complex inner pattern (nested, default, etc.) // Allocate temp, extract value, then recurse. // Temp register won't be freed (LIFO constraint with inner locals). let val_reg = fc.alloc_reg(); let name_idx = fc.builder.add_name(&key_name); fc.builder.emit_get_prop_name(val_reg, src, name_idx); compile_destructuring_pattern(fc, &prop.value, val_reg)?; } } // Handle rest: collect remaining own enumerable properties. if let Some(rest_pat) = rest { // Collect extracted key names for exclusion. let extracted_keys: Vec = properties .iter() .filter_map(|prop| match &prop.key { PropertyKey::Identifier(s) | PropertyKey::String(s) => Some(s.clone()), _ => None, }) .collect(); let rest_obj = fc.alloc_reg(); fc.builder.emit_reg(Op::CreateObject, rest_obj); let keys_r = fc.alloc_reg(); fc.builder.emit_reg_reg(Op::ForInInit, keys_r, src); let idx_r = fc.alloc_reg(); fc.builder.emit_load_int8(idx_r, 0); let key_r = fc.alloc_reg(); let done_r = fc.alloc_reg(); let loop_start = fc.builder.offset(); fc.builder .emit_reg4(Op::ForInNext, key_r, done_r, keys_r, idx_r); let exit_patch = fc.builder.emit_cond_jump(Op::JumpIfTrue, done_r); let mut skip_patches = Vec::new(); for excluded in &extracted_keys { let excluded_r = fc.alloc_reg(); let ci = fc.builder.add_constant(Constant::String(excluded.clone())); fc.builder.emit_reg_u16(Op::LoadConst, excluded_r, ci); let cmp_r = fc.alloc_reg(); fc.builder.emit_reg3(Op::StrictEq, cmp_r, key_r, excluded_r); let skip = fc.builder.emit_cond_jump(Op::JumpIfTrue, cmp_r); skip_patches.push(skip); fc.free_reg(cmp_r); fc.free_reg(excluded_r); } let val_r = fc.alloc_reg(); fc.builder.emit_reg3(Op::GetProperty, val_r, src, key_r); fc.builder .emit_reg3(Op::SetProperty, rest_obj, key_r, val_r); fc.free_reg(val_r); for patch in skip_patches { fc.builder.patch_jump(patch); } let one_r = fc.alloc_reg(); fc.builder.emit_load_int8(one_r, 1); fc.builder.emit_reg3(Op::Add, idx_r, idx_r, one_r); fc.free_reg(one_r); fc.builder.emit_jump_to(loop_start); fc.builder.patch_jump(exit_patch); fc.free_reg(done_r); fc.free_reg(key_r); fc.free_reg(idx_r); fc.free_reg(keys_r); compile_destructuring_pattern(fc, rest_pat, rest_obj)?; } } PatternKind::Array { elements, rest } => { for (i, elem) in elements.iter().enumerate() { if let Some(pat) = elem { // For simple identifier patterns, load directly into local. if let PatternKind::Identifier(name) = &pat.kind { let is_captured = fc.captured_names.contains(name.as_str()); let reg = fc.define_local_ext(name, is_captured, false); let idx_reg = fc.alloc_reg(); if i <= 127 { fc.builder.emit_load_int8(idx_reg, i as i8); } else { let ci = fc.builder.add_constant(Constant::Number(i as f64)); fc.builder.emit_reg_u16(Op::LoadConst, idx_reg, ci); } if is_captured { let tmp = fc.alloc_reg(); fc.builder.emit_reg3(Op::GetProperty, tmp, src, idx_reg); fc.builder.emit_reg(Op::NewCell, reg); fc.builder.emit_reg_reg(Op::CellStore, reg, tmp); fc.free_reg(tmp); } else { fc.builder.emit_reg3(Op::GetProperty, reg, src, idx_reg); } fc.free_reg(idx_reg); } else { // Complex inner pattern (nested, default, etc.) let idx_reg = fc.alloc_reg(); if i <= 127 { fc.builder.emit_load_int8(idx_reg, i as i8); } else { let ci = fc.builder.add_constant(Constant::Number(i as f64)); fc.builder.emit_reg_u16(Op::LoadConst, idx_reg, ci); } let val_reg = fc.alloc_reg(); fc.builder.emit_reg3(Op::GetProperty, val_reg, src, idx_reg); compile_destructuring_pattern(fc, pat, val_reg)?; // Don't free val_reg/idx_reg — inner pattern may have // allocated locals above them. } } } // Handle rest element: ...rest = src.slice(elements.len()) if let Some(rest_pat) = rest { let slice_fn_r = fc.alloc_reg(); let slice_ni = fc.builder.add_name("slice"); fc.builder.emit_get_prop_name(slice_fn_r, src, slice_ni); let this_ni = fc.builder.add_name("this"); fc.builder.emit_store_global(this_ni, src); let start_r = fc.alloc_reg(); let elem_count = elements.len(); if elem_count <= 127 { fc.builder.emit_load_int8(start_r, elem_count as i8); } else { let ci = fc.builder.add_constant(Constant::Number(elem_count as f64)); fc.builder.emit_reg_u16(Op::LoadConst, start_r, ci); } let rest_val = fc.alloc_reg(); fc.builder.emit_call(rest_val, slice_fn_r, start_r, 1); compile_destructuring_pattern(fc, rest_pat, rest_val)?; // Don't free temps — rest pattern allocates locals. } } PatternKind::Assign { left, right } => { // Default value: if src is undefined, use default. let val_reg = fc.alloc_reg(); fc.builder.emit_reg_reg(Op::Move, val_reg, src); // Check if undefined, if so use default. let check_reg = fc.alloc_reg(); let undef_reg = fc.alloc_reg(); fc.builder.emit_reg(Op::LoadUndefined, undef_reg); fc.builder .emit_reg3(Op::StrictEq, check_reg, val_reg, undef_reg); fc.free_reg(undef_reg); let patch = fc.builder.emit_cond_jump(Op::JumpIfFalse, check_reg); fc.free_reg(check_reg); // Is undefined → evaluate default. compile_expr(fc, right, val_reg)?; fc.builder.patch_jump(patch); compile_destructuring_pattern(fc, left, val_reg)?; // Don't free val_reg — inner pattern may have allocated locals. } } Ok(()) } // ── Function declarations ─────────────────────────────────── fn compile_function_decl(fc: &mut FunctionCompiler, func_def: &FunctionDef) -> Result<(), JsError> { let name = func_def.id.clone().unwrap_or_default(); let inner = compile_function_body_with_captures(fc, func_def)?; let func_idx = fc.builder.add_function(inner); let is_captured = fc.captured_names.contains(name.as_str()); let reg = fc.define_local_ext(&name, is_captured, false); if is_captured { // Create a cell, then create the closure into a temp, then store into cell. fc.builder.emit_reg(Op::NewCell, reg); let tmp = fc.alloc_reg(); fc.builder.emit_reg_u16(Op::CreateClosure, tmp, func_idx); fc.builder.emit_reg_reg(Op::CellStore, reg, tmp); // Also store as global. if !name.is_empty() { let name_idx = fc.builder.add_name(&name); fc.builder.emit_store_global(name_idx, tmp); } fc.free_reg(tmp); } else { fc.builder.emit_reg_u16(Op::CreateClosure, reg, func_idx); // Also store as global so inner/recursive calls via LoadGlobal can find it. if !name.is_empty() { let name_idx = fc.builder.add_name(&name); fc.builder.emit_store_global(name_idx, reg); } } Ok(()) } /// Compile a function body, resolving upvalue captures from the parent scope. fn compile_function_body_with_captures( parent: &mut FunctionCompiler, func_def: &FunctionDef, ) -> Result { // 1. Collect free variables of this inner function. let free_vars = collect_free_vars(&func_def.params, &func_def.body); // 2. Build upvalue list by resolving free vars against the parent scope. let mut upvalue_entries = Vec::new(); for name in &free_vars { if let Some(local) = parent.find_local_info(name) { let reg = local.reg; let is_const = local.is_const; // Mark the parent's local as captured (if not already). // We need to update the parent's local, so find the index and mutate. if let Some(l) = parent.locals.iter_mut().rev().find(|l| l.name == *name) { l.is_captured = true; } upvalue_entries.push(UpvalueEntry { name: name.clone(), def: UpvalueDef { is_local: true, index: reg, }, is_const, }); } else if let Some(parent_uv_idx) = parent.find_upvalue(name) { // Transitive capture: the parent captures it from its own parent. let is_const = parent.is_upvalue_const(parent_uv_idx); upvalue_entries.push(UpvalueEntry { name: name.clone(), def: UpvalueDef { is_local: false, index: parent_uv_idx, }, is_const, }); } // If not found in parent or parent's upvalues, it must be a global — no upvalue needed. } // 3. Compile the inner function with its own scope. let mut inner = compile_function_body_inner(func_def, &upvalue_entries)?; // 4. Attach upvalue definitions to the compiled function. inner.upvalue_defs = upvalue_entries.iter().map(|e| e.def.clone()).collect(); Ok(inner) } /// Core function body compilation. The `upvalue_entries` tell this function which /// outer variables it can access via LoadUpvalue/StoreUpvalue. fn compile_function_body_inner( func_def: &FunctionDef, upvalue_entries: &[UpvalueEntry], ) -> Result { let name = func_def.id.clone().unwrap_or_default(); let param_count = func_def.params.len().min(255) as u8; let mut inner = FunctionCompiler::new(name, param_count); // Copy upvalue entries into the inner compiler so it can resolve references. for entry in upvalue_entries { inner.upvalues.push(UpvalueEntry { name: entry.name.clone(), def: entry.def.clone(), is_const: entry.is_const, }); } // Pre-scan to find which of this function's locals are captured by ITS inner functions. let inner_caps = collect_inner_captures(&func_def.body); inner.captured_names = inner_caps; // Allocate registers for parameters. for p in &func_def.params { if let PatternKind::Identifier(pname) = &p.kind { let is_captured = inner.captured_names.contains(pname.as_str()); inner.define_local_ext(pname, is_captured, false); } else { let _ = inner.alloc_reg(); } } // Box captured parameters into cells. for p in &func_def.params { if let PatternKind::Identifier(pname) = &p.kind { if let Some(local) = inner.find_local_info(pname) { if local.is_captured { let reg = local.reg; // Move param value to temp, allocate cell, store value into cell. let tmp = inner.alloc_reg(); inner.builder.emit_reg_reg(Op::Move, tmp, reg); inner.builder.emit_reg(Op::NewCell, reg); inner.builder.emit_reg_reg(Op::CellStore, reg, tmp); inner.free_reg(tmp); } } } } // Result register for the function body. let result_reg = inner.alloc_reg(); inner.builder.emit_reg(Op::LoadUndefined, result_reg); compile_stmts(&mut inner, &func_def.body, result_reg)?; // Implicit return undefined. inner.builder.emit_reg(Op::Return, result_reg); let mut func = inner.builder.finish(); func.is_generator = func_def.is_generator; Ok(func) } // ── Class declarations ────────────────────────────────────── fn compile_class_decl(fc: &mut FunctionCompiler, class_def: &ClassDef) -> Result<(), JsError> { let name = class_def.id.clone().unwrap_or_default(); let is_captured = fc.captured_names.contains(name.as_str()); let reg = fc.define_local_ext(&name, is_captured, false); // For captured classes, build the constructor into a temp register so we can // set prototype methods on it before wrapping it in a cell. let ctor_reg = if is_captured { fc.alloc_reg() } else { reg }; // Find constructor or create empty one. let ctor = class_def.body.iter().find(|m| { matches!( &m.kind, ClassMemberKind::Method { kind: MethodKind::Constructor, .. } ) }); if let Some(member) = ctor { if let ClassMemberKind::Method { value, .. } = &member.kind { let inner = compile_function_body_with_captures(fc, value)?; let func_idx = fc.builder.add_function(inner); fc.builder .emit_reg_u16(Op::CreateClosure, ctor_reg, func_idx); } } else { // No constructor: create a minimal function that returns undefined. let mut empty = BytecodeBuilder::new(name.clone(), 0); let r = 0u8; empty.func.register_count = 1; empty.emit_reg(Op::LoadUndefined, r); empty.emit_reg(Op::Return, r); let func_idx = fc.builder.add_function(empty.finish()); fc.builder .emit_reg_u16(Op::CreateClosure, ctor_reg, func_idx); } // Compile methods: set them as properties on the constructor's prototype. // This is simplified — real class compilation needs prototype chain setup. for member in &class_def.body { match &member.kind { ClassMemberKind::Method { key, value, kind, is_static: _, computed: _, } => { if matches!(kind, MethodKind::Constructor) { continue; } let method_name = match key { PropertyKey::Identifier(s) | PropertyKey::String(s) => s.clone(), _ => continue, }; let inner = compile_function_body_with_captures(fc, value)?; let func_idx = fc.builder.add_function(inner); let method_reg = fc.alloc_reg(); fc.builder .emit_reg_u16(Op::CreateClosure, method_reg, func_idx); let name_idx = fc.builder.add_name(&method_name); fc.builder .emit_set_prop_name(ctor_reg, name_idx, method_reg); fc.free_reg(method_reg); } ClassMemberKind::Property { .. } => { // Class fields are set in constructor; skip here. } } } if is_captured { fc.builder.emit_reg(Op::NewCell, reg); fc.builder.emit_reg_reg(Op::CellStore, reg, ctor_reg); fc.free_reg(ctor_reg); } Ok(()) } // ── Export ─────────────────────────────────────────────────── fn compile_export( fc: &mut FunctionCompiler, export: &ExportDecl, result_reg: Reg, ) -> Result<(), JsError> { match export { ExportDecl::Declaration(stmt) => { compile_stmt(fc, stmt, result_reg)?; } ExportDecl::Default(expr) => { compile_expr(fc, expr, result_reg)?; } ExportDecl::Named { .. } | ExportDecl::AllFrom(_) => { // Named re-exports are module-level; no bytecode needed. } } Ok(()) } // ── Control flow ──────────────────────────────────────────── fn compile_if( fc: &mut FunctionCompiler, test: &Expr, consequent: &Stmt, alternate: Option<&Stmt>, result_reg: Reg, ) -> Result<(), JsError> { let cond = fc.alloc_reg(); compile_expr(fc, test, cond)?; let else_patch = fc.builder.emit_cond_jump(Op::JumpIfFalse, cond); fc.free_reg(cond); compile_stmt(fc, consequent, result_reg)?; if let Some(alt) = alternate { let end_patch = fc.builder.emit_jump(Op::Jump); fc.builder.patch_jump(else_patch); compile_stmt(fc, alt, result_reg)?; fc.builder.patch_jump(end_patch); } else { fc.builder.patch_jump(else_patch); } Ok(()) } fn compile_while( fc: &mut FunctionCompiler, test: &Expr, body: &Stmt, label: Option, result_reg: Reg, ) -> Result<(), JsError> { let loop_start = fc.builder.offset(); let cond = fc.alloc_reg(); compile_expr(fc, test, cond)?; let exit_patch = fc.builder.emit_cond_jump(Op::JumpIfFalse, cond); fc.free_reg(cond); fc.loop_stack.push(LoopCtx { label, break_patches: Vec::new(), continue_patches: Vec::new(), }); compile_stmt(fc, body, result_reg)?; fc.builder.emit_jump_to(loop_start); fc.builder.patch_jump(exit_patch); let ctx = fc.loop_stack.pop().unwrap(); for patch in ctx.break_patches { fc.builder.patch_jump(patch); } // In a while loop, continue jumps back to the condition check (loop_start). for patch in ctx.continue_patches { fc.builder.patch_jump_to(patch, loop_start); } Ok(()) } fn compile_do_while( fc: &mut FunctionCompiler, body: &Stmt, test: &Expr, label: Option, result_reg: Reg, ) -> Result<(), JsError> { let loop_start = fc.builder.offset(); fc.loop_stack.push(LoopCtx { label, break_patches: Vec::new(), continue_patches: Vec::new(), }); compile_stmt(fc, body, result_reg)?; // continue in do-while should jump here (the condition check). let cond_start = fc.builder.offset(); let cond = fc.alloc_reg(); compile_expr(fc, test, cond)?; fc.builder .emit_cond_jump_to(Op::JumpIfTrue, cond, loop_start); fc.free_reg(cond); let ctx = fc.loop_stack.pop().unwrap(); for patch in ctx.break_patches { fc.builder.patch_jump(patch); } for patch in ctx.continue_patches { fc.builder.patch_jump_to(patch, cond_start); } Ok(()) } fn compile_for( fc: &mut FunctionCompiler, init: Option<&ForInit>, test: Option<&Expr>, update: Option<&Expr>, body: &Stmt, label: Option, result_reg: Reg, ) -> Result<(), JsError> { let saved_locals = fc.locals.len(); let saved_next = fc.next_reg; // Init. if let Some(init) = init { match init { ForInit::VarDecl { kind, declarators } => { for decl in declarators { compile_var_declarator(fc, decl, *kind)?; } } ForInit::Expr(expr) => { let tmp = fc.alloc_reg(); compile_expr(fc, expr, tmp)?; fc.free_reg(tmp); } } } let loop_start = fc.builder.offset(); // Test. let exit_patch = if let Some(test) = test { let cond = fc.alloc_reg(); compile_expr(fc, test, cond)?; let patch = fc.builder.emit_cond_jump(Op::JumpIfFalse, cond); fc.free_reg(cond); Some(patch) } else { None }; fc.loop_stack.push(LoopCtx { label, break_patches: Vec::new(), continue_patches: Vec::new(), }); compile_stmt(fc, body, result_reg)?; // continue in a for-loop should jump here (the update expression). let continue_target = fc.builder.offset(); // Update. if let Some(update) = update { let tmp = fc.alloc_reg(); compile_expr(fc, update, tmp)?; fc.free_reg(tmp); } fc.builder.emit_jump_to(loop_start); if let Some(patch) = exit_patch { fc.builder.patch_jump(patch); } let ctx = fc.loop_stack.pop().unwrap(); for patch in ctx.break_patches { fc.builder.patch_jump(patch); } for patch in ctx.continue_patches { fc.builder.patch_jump_to(patch, continue_target); } fc.locals.truncate(saved_locals); fc.next_reg = saved_next; Ok(()) } fn compile_switch( fc: &mut FunctionCompiler, discriminant: &Expr, cases: &[SwitchCase], result_reg: Reg, ) -> Result<(), JsError> { let disc_reg = fc.alloc_reg(); compile_expr(fc, discriminant, disc_reg)?; // Use a loop context for break statements. fc.loop_stack.push(LoopCtx { label: None, break_patches: Vec::new(), continue_patches: Vec::new(), }); // Phase 1: emit comparison jumps for each non-default case. // Store (case_index, patch_position) for each case with a test. let mut case_jump_patches: Vec<(usize, usize)> = Vec::new(); let mut default_index: Option = None; for (i, case) in cases.iter().enumerate() { if let Some(test) = &case.test { let test_reg = fc.alloc_reg(); compile_expr(fc, test, test_reg)?; let cmp_reg = fc.alloc_reg(); fc.builder .emit_reg3(Op::StrictEq, cmp_reg, disc_reg, test_reg); let patch = fc.builder.emit_cond_jump(Op::JumpIfTrue, cmp_reg); fc.free_reg(cmp_reg); fc.free_reg(test_reg); case_jump_patches.push((i, patch)); } else { default_index = Some(i); } } // After all comparisons: jump to default body or end. let fallthrough_patch = fc.builder.emit_jump(Op::Jump); // Phase 2: emit case bodies in order (fall-through semantics). let mut body_offsets: Vec<(usize, usize)> = Vec::new(); for (i, case) in cases.iter().enumerate() { body_offsets.push((i, fc.builder.offset())); compile_stmts(fc, &case.consequent, result_reg)?; } let end_offset = fc.builder.offset(); // Patch case test jumps to their respective body offsets. for (case_idx, patch) in &case_jump_patches { let body_offset = body_offsets .iter() .find(|(i, _)| i == case_idx) .map(|(_, off)| *off) .unwrap(); fc.builder.patch_jump_to(*patch, body_offset); } // Patch fallthrough: jump to default body if present, otherwise to end. if let Some(def_idx) = default_index { let default_offset = body_offsets .iter() .find(|(i, _)| *i == def_idx) .map(|(_, off)| *off) .unwrap(); fc.builder.patch_jump_to(fallthrough_patch, default_offset); } else { fc.builder.patch_jump_to(fallthrough_patch, end_offset); } fc.free_reg(disc_reg); let ctx = fc.loop_stack.pop().unwrap(); for patch in ctx.break_patches { fc.builder.patch_jump(patch); } Ok(()) } fn find_loop_ctx(stack: &[LoopCtx], label: Option<&str>) -> Option { if let Some(label) = label { stack .iter() .rposition(|ctx| ctx.label.as_deref() == Some(label)) } else { if stack.is_empty() { None } else { Some(stack.len() - 1) } } } // ── Expressions ───────────────────────────────────────────── fn compile_expr(fc: &mut FunctionCompiler, expr: &Expr, dst: Reg) -> Result<(), JsError> { match &expr.kind { ExprKind::Number(n) => { // Optimize small integers. let int_val = *n as i64; if int_val as f64 == *n && (-128..=127).contains(&int_val) { fc.builder.emit_load_int8(dst, int_val as i8); } else { let ci = fc.builder.add_constant(Constant::Number(*n)); fc.builder.emit_reg_u16(Op::LoadConst, dst, ci); } } ExprKind::String(s) => { let ci = fc.builder.add_constant(Constant::String(s.clone())); fc.builder.emit_reg_u16(Op::LoadConst, dst, ci); } ExprKind::Bool(true) => { fc.builder.emit_reg(Op::LoadTrue, dst); } ExprKind::Bool(false) => { fc.builder.emit_reg(Op::LoadFalse, dst); } ExprKind::Null => { fc.builder.emit_reg(Op::LoadNull, dst); } ExprKind::Identifier(name) => { if let Some(local) = fc.find_local_info(name) { let reg = local.reg; let captured = local.is_captured; if captured { fc.builder.emit_reg_reg(Op::CellLoad, dst, reg); } else if reg != dst { fc.builder.emit_reg_reg(Op::Move, dst, reg); } } else if let Some(uv_idx) = fc.find_upvalue(name) { fc.builder.emit_load_upvalue(dst, uv_idx); } else { // Global lookup. let ni = fc.builder.add_name(name); fc.builder.emit_load_global(dst, ni); } } ExprKind::This => { // `this` is loaded as a global named "this" (the VM binds it). let ni = fc.builder.add_name("this"); fc.builder.emit_load_global(dst, ni); } ExprKind::Binary { op, left, right } => { let lhs = fc.alloc_reg(); compile_expr(fc, left, lhs)?; let rhs = fc.alloc_reg(); compile_expr(fc, right, rhs)?; let bytecode_op = binary_op_to_opcode(*op); fc.builder.emit_reg3(bytecode_op, dst, lhs, rhs); fc.free_reg(rhs); fc.free_reg(lhs); } ExprKind::Unary { op, argument } => { if *op == UnaryOp::Delete { // Handle delete specially: need object + key form for member expressions. match &argument.kind { ExprKind::Member { object, property, computed, } => { let obj_r = fc.alloc_reg(); compile_expr(fc, object, obj_r)?; let key_r = fc.alloc_reg(); if *computed { compile_expr(fc, property, key_r)?; } else if let ExprKind::Identifier(name) = &property.kind { let ci = fc.builder.add_constant(Constant::String(name.clone())); fc.builder.emit_reg_u16(Op::LoadConst, key_r, ci); } else { compile_expr(fc, property, key_r)?; } fc.builder.emit_reg3(Op::Delete, dst, obj_r, key_r); fc.free_reg(key_r); fc.free_reg(obj_r); } _ => { // `delete x` on a simple identifier: always true in non-strict mode. fc.builder.emit_reg(Op::LoadTrue, dst); } } } else { let src = fc.alloc_reg(); compile_expr(fc, argument, src)?; match op { UnaryOp::Minus => fc.builder.emit_reg_reg(Op::Neg, dst, src), UnaryOp::Plus => { fc.builder.emit_reg_reg(Op::Move, dst, src); } UnaryOp::Not => fc.builder.emit_reg_reg(Op::LogicalNot, dst, src), UnaryOp::BitwiseNot => fc.builder.emit_reg_reg(Op::BitNot, dst, src), UnaryOp::Typeof => fc.builder.emit_reg_reg(Op::TypeOf, dst, src), UnaryOp::Void => fc.builder.emit_reg_reg(Op::Void, dst, src), UnaryOp::Delete => unreachable!(), } fc.free_reg(src); } } ExprKind::Update { op, argument, prefix, } => { // Get current value. compile_expr(fc, argument, dst)?; let one = fc.alloc_reg(); fc.builder.emit_load_int8(one, 1); if *prefix { // ++x / --x: modify first, return modified. match op { UpdateOp::Increment => fc.builder.emit_reg3(Op::Add, dst, dst, one), UpdateOp::Decrement => fc.builder.emit_reg3(Op::Sub, dst, dst, one), } // Store back. compile_store(fc, argument, dst)?; } else { // x++ / x--: return original, then modify. let tmp = fc.alloc_reg(); fc.builder.emit_reg_reg(Op::Move, tmp, dst); match op { UpdateOp::Increment => fc.builder.emit_reg3(Op::Add, tmp, tmp, one), UpdateOp::Decrement => fc.builder.emit_reg3(Op::Sub, tmp, tmp, one), } compile_store(fc, argument, tmp)?; fc.free_reg(tmp); } fc.free_reg(one); } ExprKind::Logical { op, left, right } => { compile_expr(fc, left, dst)?; match op { LogicalOp::And => { // Short-circuit: if falsy, skip right. let skip = fc.builder.emit_cond_jump(Op::JumpIfFalse, dst); compile_expr(fc, right, dst)?; fc.builder.patch_jump(skip); } LogicalOp::Or => { let skip = fc.builder.emit_cond_jump(Op::JumpIfTrue, dst); compile_expr(fc, right, dst)?; fc.builder.patch_jump(skip); } LogicalOp::Nullish => { let skip = fc.builder.emit_cond_jump(Op::JumpIfNullish, dst); // If NOT nullish, skip the right side. Wait — JumpIfNullish // should mean "jump if nullish" so we want: evaluate left, // if NOT nullish skip right. // Let's invert: evaluate left, check if nullish → evaluate right. // We need the jump to skip the "evaluate right" if NOT nullish. // Since JumpIfNullish jumps when nullish, we need the inverse. // Instead: use a two-step approach. // // Actually, rethink: for `a ?? b`: // 1. evaluate a → dst // 2. if dst is NOT null/undefined, jump to end // 3. evaluate b → dst // end: // JumpIfNullish jumps when IS nullish. So we want jump when NOT nullish. // Let's just use a "not nullish" check. // For now: negate and use JumpIfFalse. // Actually simpler: skip right when not nullish. // JumpIfNullish jumps WHEN nullish. We want to jump over right when NOT nullish. // So: // evaluate a → dst // JumpIfNullish dst → evaluate_right // Jump → end // evaluate_right: evaluate b → dst // end: // But we already emitted JumpIfNullish. Let's fix this. // The JumpIfNullish we emitted jumps to "after patch", which is where // we'll put the right-side code. We need another jump to skip right. let end_patch = fc.builder.emit_jump(Op::Jump); fc.builder.patch_jump(skip); // nullish → evaluate right compile_expr(fc, right, dst)?; fc.builder.patch_jump(end_patch); } } } ExprKind::Assignment { op, left, right } => { if *op == AssignOp::Assign { compile_expr(fc, right, dst)?; compile_store(fc, left, dst)?; } else { // Compound assignment: load current, operate, store. compile_expr(fc, left, dst)?; let rhs = fc.alloc_reg(); compile_expr(fc, right, rhs)?; let arith_op = compound_assign_op(*op); fc.builder.emit_reg3(arith_op, dst, dst, rhs); fc.free_reg(rhs); compile_store(fc, left, dst)?; } } ExprKind::Conditional { test, consequent, alternate, } => { let cond = fc.alloc_reg(); compile_expr(fc, test, cond)?; let else_patch = fc.builder.emit_cond_jump(Op::JumpIfFalse, cond); fc.free_reg(cond); compile_expr(fc, consequent, dst)?; let end_patch = fc.builder.emit_jump(Op::Jump); fc.builder.patch_jump(else_patch); compile_expr(fc, alternate, dst)?; fc.builder.patch_jump(end_patch); } ExprKind::Call { callee, arguments } => { // Detect method calls (obj.method()) to set `this`. if let ExprKind::Member { object, property, computed, } = &callee.kind { // Layout: [obj_reg] [func_reg] [arg0] [arg1] ... // We keep obj_reg alive so we can set `this` before the call. let obj_reg = fc.alloc_reg(); compile_expr(fc, object, obj_reg)?; let func_reg = fc.alloc_reg(); if !computed { if let ExprKind::Identifier(name) = &property.kind { let ni = fc.builder.add_name(name); fc.builder.emit_get_prop_name(func_reg, obj_reg, ni); } else { let key_reg = fc.alloc_reg(); compile_expr(fc, property, key_reg)?; fc.builder .emit_reg3(Op::GetProperty, func_reg, obj_reg, key_reg); fc.free_reg(key_reg); } } else { let key_reg = fc.alloc_reg(); compile_expr(fc, property, key_reg)?; fc.builder .emit_reg3(Op::GetProperty, func_reg, obj_reg, key_reg); fc.free_reg(key_reg); } // Set `this` to the receiver object before calling. let this_ni = fc.builder.add_name("this"); fc.builder.emit_store_global(this_ni, obj_reg); let args_start = fc.next_reg; let arg_count = arguments.len().min(255) as u8; for arg in arguments { let arg_reg = fc.alloc_reg(); compile_expr(fc, arg, arg_reg)?; } fc.builder.emit_call(dst, func_reg, args_start, arg_count); // Free in LIFO order: args, func_reg, obj_reg. for _ in 0..arg_count { fc.next_reg -= 1; } fc.free_reg(func_reg); fc.free_reg(obj_reg); } else { let func_reg = fc.alloc_reg(); compile_expr(fc, callee, func_reg)?; let args_start = fc.next_reg; let arg_count = arguments.len().min(255) as u8; for arg in arguments { let arg_reg = fc.alloc_reg(); compile_expr(fc, arg, arg_reg)?; } fc.builder.emit_call(dst, func_reg, args_start, arg_count); for _ in 0..arg_count { fc.next_reg -= 1; } fc.free_reg(func_reg); } } ExprKind::New { callee, arguments } => { // For now, compile like a regular call. The VM will differentiate // based on the `New` vs `Call` distinction (TODO: add NewCall opcode). let func_reg = fc.alloc_reg(); compile_expr(fc, callee, func_reg)?; let args_start = fc.next_reg; let arg_count = arguments.len().min(255) as u8; for arg in arguments { let arg_reg = fc.alloc_reg(); compile_expr(fc, arg, arg_reg)?; } fc.builder.emit_call(dst, func_reg, args_start, arg_count); for _ in 0..arg_count { fc.next_reg -= 1; } fc.free_reg(func_reg); } ExprKind::Member { object, property, computed, } => { let obj_reg = fc.alloc_reg(); compile_expr(fc, object, obj_reg)?; if !computed { // Static member: obj.prop → GetPropertyByName. if let ExprKind::Identifier(name) = &property.kind { let ni = fc.builder.add_name(name); fc.builder.emit_get_prop_name(dst, obj_reg, ni); } else { let key_reg = fc.alloc_reg(); compile_expr(fc, property, key_reg)?; fc.builder.emit_reg3(Op::GetProperty, dst, obj_reg, key_reg); fc.free_reg(key_reg); } } else { // Computed member: obj[expr]. let key_reg = fc.alloc_reg(); compile_expr(fc, property, key_reg)?; fc.builder.emit_reg3(Op::GetProperty, dst, obj_reg, key_reg); fc.free_reg(key_reg); } fc.free_reg(obj_reg); } ExprKind::Array(elements) => { let has_spread = elements .iter() .any(|e| matches!(e, Some(ArrayElement::Spread(_)))); fc.builder.emit_reg(Op::CreateArray, dst); if has_spread { // When spreads are present, we track the index dynamically. // For each normal element, push at current length. // For spread elements, use the Spread opcode. for el in elements.iter().flatten() { match el { ArrayElement::Expr(e) => { let val_reg = fc.alloc_reg(); compile_expr(fc, e, val_reg)?; // Get current length as index. let idx_reg = fc.alloc_reg(); let len_ni = fc.builder.add_name("length"); fc.builder.emit_get_prop_name(idx_reg, dst, len_ni); fc.builder.emit_reg3(Op::SetProperty, dst, idx_reg, val_reg); // Increment length. let one_r = fc.alloc_reg(); fc.builder.emit_load_int8(one_r, 1); fc.builder.emit_reg3(Op::Add, idx_reg, idx_reg, one_r); fc.builder.emit_set_prop_name(dst, len_ni, idx_reg); fc.free_reg(one_r); fc.free_reg(idx_reg); fc.free_reg(val_reg); } ArrayElement::Spread(e) => { let spread_src = fc.alloc_reg(); compile_expr(fc, e, spread_src)?; fc.builder.emit_spread(dst, spread_src); fc.free_reg(spread_src); } } } } else { // No spreads: use simple indexed assignment. for (i, elem) in elements.iter().enumerate() { if let Some(ArrayElement::Expr(e)) = elem { let val_reg = fc.alloc_reg(); compile_expr(fc, e, val_reg)?; let idx_reg = fc.alloc_reg(); if i <= 127 { fc.builder.emit_load_int8(idx_reg, i as i8); } else { let ci = fc.builder.add_constant(Constant::Number(i as f64)); fc.builder.emit_reg_u16(Op::LoadConst, idx_reg, ci); } fc.builder.emit_reg3(Op::SetProperty, dst, idx_reg, val_reg); fc.free_reg(idx_reg); fc.free_reg(val_reg); } } // Set length. if !elements.is_empty() { let len_name = fc.builder.add_name("length"); let len_reg = fc.alloc_reg(); if elements.len() <= 127 { fc.builder.emit_load_int8(len_reg, elements.len() as i8); } else { let ci = fc .builder .add_constant(Constant::Number(elements.len() as f64)); fc.builder.emit_reg_u16(Op::LoadConst, len_reg, ci); } fc.builder.emit_set_prop_name(dst, len_name, len_reg); fc.free_reg(len_reg); } } } ExprKind::Object(properties) => { fc.builder.emit_reg(Op::CreateObject, dst); for prop in properties { let val_reg = fc.alloc_reg(); if let Some(value) = &prop.value { compile_expr(fc, value, val_reg)?; } else { // Shorthand: `{ x }` means `{ x: x }`. if let PropertyKey::Identifier(name) = &prop.key { if let Some(local) = fc.find_local_info(name) { let reg = local.reg; let captured = local.is_captured; if captured { fc.builder.emit_reg_reg(Op::CellLoad, val_reg, reg); } else { fc.builder.emit_reg_reg(Op::Move, val_reg, reg); } } else if let Some(uv_idx) = fc.find_upvalue(name) { fc.builder.emit_load_upvalue(val_reg, uv_idx); } else { let ni = fc.builder.add_name(name); fc.builder.emit_load_global(val_reg, ni); } } else { fc.builder.emit_reg(Op::LoadUndefined, val_reg); } } match &prop.key { PropertyKey::Identifier(name) | PropertyKey::String(name) => { let ni = fc.builder.add_name(name); fc.builder.emit_set_prop_name(dst, ni, val_reg); } PropertyKey::Number(n) => { let key_reg = fc.alloc_reg(); let ci = fc.builder.add_constant(Constant::Number(*n)); fc.builder.emit_reg_u16(Op::LoadConst, key_reg, ci); fc.builder.emit_reg3(Op::SetProperty, dst, key_reg, val_reg); fc.free_reg(key_reg); } PropertyKey::Computed(expr) => { let key_reg = fc.alloc_reg(); compile_expr(fc, expr, key_reg)?; fc.builder.emit_reg3(Op::SetProperty, dst, key_reg, val_reg); fc.free_reg(key_reg); } } fc.free_reg(val_reg); } } ExprKind::Function(func_def) => { let inner = compile_function_body_with_captures(fc, func_def)?; let func_idx = fc.builder.add_function(inner); fc.builder.emit_reg_u16(Op::CreateClosure, dst, func_idx); } ExprKind::Arrow { params, body, is_async: _, } => { // Collect free variables from the arrow body. let free_vars = collect_free_vars_arrow(params, body); // Resolve upvalues against the parent scope. let mut upvalue_entries = Vec::new(); for name in &free_vars { if let Some(local) = fc.find_local_info(name) { let reg = local.reg; let is_const = local.is_const; if let Some(l) = fc.locals.iter_mut().rev().find(|l| l.name == *name) { l.is_captured = true; } upvalue_entries.push(UpvalueEntry { name: name.clone(), def: UpvalueDef { is_local: true, index: reg, }, is_const, }); } else if let Some(parent_uv_idx) = fc.find_upvalue(name) { let is_const = fc.is_upvalue_const(parent_uv_idx); upvalue_entries.push(UpvalueEntry { name: name.clone(), def: UpvalueDef { is_local: false, index: parent_uv_idx, }, is_const, }); } } let param_count = params.len().min(255) as u8; let mut inner = FunctionCompiler::new("".into(), param_count); // Copy upvalue entries. for entry in &upvalue_entries { inner.upvalues.push(UpvalueEntry { name: entry.name.clone(), def: entry.def.clone(), is_const: entry.is_const, }); } // Pre-scan for inner captures within the arrow body. match body { ArrowBody::Expr(_) => {} ArrowBody::Block(stmts) => { inner.captured_names = collect_inner_captures(stmts); } } for p in params { if let PatternKind::Identifier(pname) = &p.kind { let is_captured = inner.captured_names.contains(pname.as_str()); inner.define_local_ext(pname, is_captured, false); } else { let _ = inner.alloc_reg(); } } // Box captured parameters. for p in params { if let PatternKind::Identifier(pname) = &p.kind { if let Some(local) = inner.find_local_info(pname) { if local.is_captured { let reg = local.reg; let tmp = inner.alloc_reg(); inner.builder.emit_reg_reg(Op::Move, tmp, reg); inner.builder.emit_reg(Op::NewCell, reg); inner.builder.emit_reg_reg(Op::CellStore, reg, tmp); inner.free_reg(tmp); } } } } let result = inner.alloc_reg(); match body { ArrowBody::Expr(e) => { compile_expr(&mut inner, e, result)?; } ArrowBody::Block(stmts) => { inner.builder.emit_reg(Op::LoadUndefined, result); compile_stmts(&mut inner, stmts, result)?; } } inner.builder.emit_reg(Op::Return, result); let mut inner_func = inner.builder.finish(); inner_func.upvalue_defs = upvalue_entries.iter().map(|e| e.def.clone()).collect(); let func_idx = fc.builder.add_function(inner_func); fc.builder.emit_reg_u16(Op::CreateClosure, dst, func_idx); } ExprKind::Class(class_def) => { // Class expression: compile like class decl but into dst. let name = class_def.id.clone().unwrap_or_default(); // Find constructor. let ctor = class_def.body.iter().find(|m| { matches!( &m.kind, ClassMemberKind::Method { kind: MethodKind::Constructor, .. } ) }); if let Some(member) = ctor { if let ClassMemberKind::Method { value, .. } = &member.kind { let inner = compile_function_body_with_captures(fc, value)?; let func_idx = fc.builder.add_function(inner); fc.builder.emit_reg_u16(Op::CreateClosure, dst, func_idx); } } else { let mut empty = BytecodeBuilder::new(name, 0); let r = 0u8; empty.func.register_count = 1; empty.emit_reg(Op::LoadUndefined, r); empty.emit_reg(Op::Return, r); let func_idx = fc.builder.add_function(empty.finish()); fc.builder.emit_reg_u16(Op::CreateClosure, dst, func_idx); } // Compile methods as properties on the constructor. for member in &class_def.body { match &member.kind { ClassMemberKind::Method { key, value, kind, is_static: _, computed: _, } => { if matches!(kind, MethodKind::Constructor) { continue; } let method_name = match key { PropertyKey::Identifier(s) | PropertyKey::String(s) => s.clone(), _ => continue, }; let inner = compile_function_body_with_captures(fc, value)?; let func_idx = fc.builder.add_function(inner); let method_reg = fc.alloc_reg(); fc.builder .emit_reg_u16(Op::CreateClosure, method_reg, func_idx); let name_idx = fc.builder.add_name(&method_name); fc.builder.emit_set_prop_name(dst, name_idx, method_reg); fc.free_reg(method_reg); } ClassMemberKind::Property { .. } => {} } } } ExprKind::Sequence(exprs) => { for e in exprs { compile_expr(fc, e, dst)?; } } ExprKind::Spread(inner) => { compile_expr(fc, inner, dst)?; } ExprKind::TemplateLiteral { quasis, expressions, } => { // Compile template literal as string concatenation. if quasis.len() == 1 && expressions.is_empty() { let ci = fc.builder.add_constant(Constant::String(quasis[0].clone())); fc.builder.emit_reg_u16(Op::LoadConst, dst, ci); } else { // Start with first quasi. let ci = fc.builder.add_constant(Constant::String(quasis[0].clone())); fc.builder.emit_reg_u16(Op::LoadConst, dst, ci); for (i, expr) in expressions.iter().enumerate() { let tmp = fc.alloc_reg(); compile_expr(fc, expr, tmp)?; fc.builder.emit_reg3(Op::Add, dst, dst, tmp); fc.free_reg(tmp); if i + 1 < quasis.len() { let qi = fc .builder .add_constant(Constant::String(quasis[i + 1].clone())); let tmp2 = fc.alloc_reg(); fc.builder.emit_reg_u16(Op::LoadConst, tmp2, qi); fc.builder.emit_reg3(Op::Add, dst, dst, tmp2); fc.free_reg(tmp2); } } } } ExprKind::TaggedTemplate { tag, quasi } => { // Simplified: call tag with the template as argument. let func_reg = fc.alloc_reg(); compile_expr(fc, tag, func_reg)?; let arg_reg = fc.alloc_reg(); compile_expr(fc, quasi, arg_reg)?; fc.builder.emit_call(dst, func_reg, arg_reg, 1); fc.free_reg(arg_reg); fc.free_reg(func_reg); } ExprKind::Yield { argument, delegate } => { if *delegate { // yield* expr: iterate the sub-iterator and yield each value. let iter_r = fc.alloc_reg(); if let Some(arg) = argument { compile_expr(fc, arg, iter_r)?; } else { fc.builder.emit_reg(Op::LoadUndefined, iter_r); } // Get iterator from the expression. let iter_method_r = fc.alloc_reg(); let sym_iter_ni = fc.builder.add_name("@@iterator"); fc.builder .emit_get_prop_name(iter_method_r, iter_r, sym_iter_ni); let this_ni = fc.builder.add_name("this"); fc.builder.emit_store_global(this_ni, iter_r); let iterator_r = fc.alloc_reg(); let args_start = fc.next_reg; fc.builder .emit_call(iterator_r, iter_method_r, args_start, 0); // Get next method. let next_r = fc.alloc_reg(); let next_ni = fc.builder.add_name("next"); fc.builder.emit_get_prop_name(next_r, iterator_r, next_ni); let result_r = fc.alloc_reg(); let done_r = fc.alloc_reg(); let val_r = fc.alloc_reg(); let loop_start = fc.builder.offset(); // Call next(). fc.builder.emit_store_global(this_ni, iterator_r); fc.builder.emit_call(result_r, next_r, args_start, 0); let done_ni = fc.builder.add_name("done"); let value_ni = fc.builder.add_name("value"); fc.builder.emit_get_prop_name(done_r, result_r, done_ni); let exit_patch = fc.builder.emit_cond_jump(Op::JumpIfTrue, done_r); fc.builder.emit_get_prop_name(val_r, result_r, value_ni); // Yield the value. fc.builder.emit_yield(dst, val_r); // Jump back. fc.builder.emit_jump_to(loop_start); // Exit: the last result's value is the yield* expression value. fc.builder.patch_jump(exit_patch); fc.builder.emit_get_prop_name(dst, result_r, value_ni); fc.free_reg(val_r); fc.free_reg(done_r); fc.free_reg(result_r); fc.free_reg(next_r); fc.free_reg(iterator_r); fc.free_reg(iter_method_r); fc.free_reg(iter_r); } else { // yield expr: emit Yield opcode. let src = fc.alloc_reg(); if let Some(arg) = argument { compile_expr(fc, arg, src)?; } else { fc.builder.emit_reg(Op::LoadUndefined, src); } fc.builder.emit_yield(dst, src); fc.free_reg(src); } } ExprKind::Await(inner) => { // Await is a VM-level operation; compile the argument. compile_expr(fc, inner, dst)?; } ExprKind::RegExp { pattern, flags } => { // Compile as: RegExp(pattern, flags) — a call to the global constructor. let func_reg = fc.alloc_reg(); let name_idx = fc.builder.add_name("RegExp"); fc.builder.emit_reg_u16(Op::LoadGlobal, func_reg, name_idx); let args_start = fc.next_reg; let pat_reg = fc.alloc_reg(); let pat_idx = fc.builder.add_constant(Constant::String(pattern.clone())); fc.builder.emit_reg_u16(Op::LoadConst, pat_reg, pat_idx); let flags_reg = fc.alloc_reg(); let flags_idx = fc.builder.add_constant(Constant::String(flags.clone())); fc.builder.emit_reg_u16(Op::LoadConst, flags_reg, flags_idx); fc.builder.emit_call(dst, func_reg, args_start, 2); fc.next_reg -= 1; // flags_reg fc.next_reg -= 1; // pat_reg fc.free_reg(func_reg); } ExprKind::OptionalChain { base } => { compile_expr(fc, base, dst)?; } } Ok(()) } /// Compile a store operation (assignment target). fn compile_store(fc: &mut FunctionCompiler, target: &Expr, src: Reg) -> Result<(), JsError> { match &target.kind { ExprKind::Identifier(name) => { if let Some(local) = fc.find_local_info(name) { if local.is_const { return Err(JsError::SyntaxError(format!( "Assignment to constant variable '{name}'" ))); } let reg = local.reg; let captured = local.is_captured; if captured { fc.builder.emit_reg_reg(Op::CellStore, reg, src); } else if reg != src { fc.builder.emit_reg_reg(Op::Move, reg, src); } } else if let Some(uv_idx) = fc.find_upvalue(name) { if fc.is_upvalue_const(uv_idx) { return Err(JsError::SyntaxError(format!( "Assignment to constant variable '{name}'" ))); } fc.builder.emit_store_upvalue(uv_idx, src); } else { let ni = fc.builder.add_name(name); fc.builder.emit_store_global(ni, src); } } ExprKind::Member { object, property, computed, } => { let obj_reg = fc.alloc_reg(); compile_expr(fc, object, obj_reg)?; if !computed { if let ExprKind::Identifier(name) = &property.kind { let ni = fc.builder.add_name(name); fc.builder.emit_set_prop_name(obj_reg, ni, src); } else { let key_reg = fc.alloc_reg(); compile_expr(fc, property, key_reg)?; fc.builder.emit_reg3(Op::SetProperty, obj_reg, key_reg, src); fc.free_reg(key_reg); } } else { let key_reg = fc.alloc_reg(); compile_expr(fc, property, key_reg)?; fc.builder.emit_reg3(Op::SetProperty, obj_reg, key_reg, src); fc.free_reg(key_reg); } fc.free_reg(obj_reg); } _ => { // Other assignment targets (destructuring) not handled here. } } Ok(()) } fn binary_op_to_opcode(op: BinaryOp) -> Op { match op { BinaryOp::Add => Op::Add, BinaryOp::Sub => Op::Sub, BinaryOp::Mul => Op::Mul, BinaryOp::Div => Op::Div, BinaryOp::Rem => Op::Rem, BinaryOp::Exp => Op::Exp, BinaryOp::Eq => Op::Eq, BinaryOp::Ne => Op::NotEq, BinaryOp::StrictEq => Op::StrictEq, BinaryOp::StrictNe => Op::StrictNotEq, BinaryOp::Lt => Op::LessThan, BinaryOp::Le => Op::LessEq, BinaryOp::Gt => Op::GreaterThan, BinaryOp::Ge => Op::GreaterEq, BinaryOp::Shl => Op::ShiftLeft, BinaryOp::Shr => Op::ShiftRight, BinaryOp::Ushr => Op::UShiftRight, BinaryOp::BitAnd => Op::BitAnd, BinaryOp::BitOr => Op::BitOr, BinaryOp::BitXor => Op::BitXor, BinaryOp::In => Op::In, BinaryOp::Instanceof => Op::InstanceOf, } } fn compound_assign_op(op: AssignOp) -> Op { match op { AssignOp::AddAssign => Op::Add, AssignOp::SubAssign => Op::Sub, AssignOp::MulAssign => Op::Mul, AssignOp::DivAssign => Op::Div, AssignOp::RemAssign => Op::Rem, AssignOp::ExpAssign => Op::Exp, AssignOp::ShlAssign => Op::ShiftLeft, AssignOp::ShrAssign => Op::ShiftRight, AssignOp::UshrAssign => Op::UShiftRight, AssignOp::BitAndAssign => Op::BitAnd, AssignOp::BitOrAssign => Op::BitOr, AssignOp::BitXorAssign => Op::BitXor, AssignOp::AndAssign => Op::BitAnd, // logical AND assignment uses short-circuit; simplified here AssignOp::OrAssign => Op::BitOr, // likewise AssignOp::NullishAssign => Op::Move, // simplified AssignOp::Assign => unreachable!(), } } #[cfg(test)] mod tests { use super::*; use crate::parser::Parser; /// Helper: parse and compile source, return the top-level function. fn compile_src(src: &str) -> Function { let program = Parser::parse(src).expect("parse failed"); compile(&program).expect("compile failed") } #[test] fn test_compile_number_literal() { let f = compile_src("42;"); let dis = f.disassemble(); assert!(dis.contains("LoadInt8 r0, 42"), "got:\n{dis}"); assert!(dis.contains("Return r0")); } #[test] fn test_compile_large_number() { let f = compile_src("3.14;"); let dis = f.disassemble(); assert!(dis.contains("LoadConst r0, #0"), "got:\n{dis}"); assert!( f.constants.contains(&Constant::Number(3.14)), "constants: {:?}", f.constants ); } #[test] fn test_compile_string() { let f = compile_src("\"hello\";"); let dis = f.disassemble(); assert!(dis.contains("LoadConst r0, #0")); assert!(f.constants.contains(&Constant::String("hello".into()))); } #[test] fn test_compile_bool_null() { let f = compile_src("true; false; null;"); let dis = f.disassemble(); assert!(dis.contains("LoadTrue r0")); assert!(dis.contains("LoadFalse r0")); assert!(dis.contains("LoadNull r0")); } #[test] fn test_compile_binary_arithmetic() { let f = compile_src("1 + 2;"); let dis = f.disassemble(); assert!(dis.contains("Add r0, r1, r2"), "got:\n{dis}"); } #[test] fn test_compile_nested_arithmetic() { let f = compile_src("(1 + 2) * 3;"); let dis = f.disassemble(); assert!(dis.contains("Add"), "got:\n{dis}"); assert!(dis.contains("Mul"), "got:\n{dis}"); } #[test] fn test_compile_var_decl() { let f = compile_src("var x = 10; x;"); let dis = f.disassemble(); // x should get a register, then be loaded from that register. assert!(dis.contains("LoadInt8"), "got:\n{dis}"); assert!( dis.contains("Move") || dis.contains("LoadInt8"), "got:\n{dis}" ); } #[test] fn test_compile_let_const() { let f = compile_src("let a = 1; const b = 2; a + b;"); let dis = f.disassemble(); assert!(dis.contains("Add"), "got:\n{dis}"); } #[test] fn test_compile_if_else() { let f = compile_src("if (true) { 1; } else { 2; }"); let dis = f.disassemble(); assert!(dis.contains("JumpIfFalse"), "got:\n{dis}"); assert!(dis.contains("Jump"), "got:\n{dis}"); } #[test] fn test_compile_while() { let f = compile_src("var i = 0; while (i < 10) { i = i + 1; }"); let dis = f.disassemble(); assert!(dis.contains("LessThan"), "got:\n{dis}"); assert!(dis.contains("JumpIfFalse"), "got:\n{dis}"); assert!( dis.contains("Jump"), "backward jump should be present: {dis}" ); } #[test] fn test_compile_do_while() { let f = compile_src("var i = 0; do { i = i + 1; } while (i < 5);"); let dis = f.disassemble(); assert!(dis.contains("JumpIfTrue"), "got:\n{dis}"); } #[test] fn test_compile_for_loop() { let f = compile_src("for (var i = 0; i < 10; i = i + 1) { i; }"); let dis = f.disassemble(); assert!(dis.contains("LessThan"), "got:\n{dis}"); assert!(dis.contains("JumpIfFalse"), "got:\n{dis}"); } #[test] fn test_compile_function_decl() { let f = compile_src("function add(a, b) { return a + b; }"); let dis = f.disassemble(); assert!(dis.contains("CreateClosure"), "got:\n{dis}"); assert!(!f.functions.is_empty(), "should have nested function"); let inner = &f.functions[0]; assert_eq!(inner.name, "add"); assert_eq!(inner.param_count, 2); let inner_dis = inner.disassemble(); assert!(inner_dis.contains("Add"), "inner:\n{inner_dis}"); assert!(inner_dis.contains("Return"), "inner:\n{inner_dis}"); } #[test] fn test_compile_function_call() { let f = compile_src("function f() { return 42; } f();"); let dis = f.disassemble(); assert!(dis.contains("Call"), "got:\n{dis}"); } #[test] fn test_compile_arrow_function() { let f = compile_src("var add = (a, b) => a + b;"); let dis = f.disassemble(); assert!(dis.contains("CreateClosure"), "got:\n{dis}"); let inner = &f.functions[0]; assert_eq!(inner.param_count, 2); } #[test] fn test_compile_assignment() { let f = compile_src("var x = 1; x = x + 2;"); let dis = f.disassemble(); assert!(dis.contains("Add"), "got:\n{dis}"); assert!( dis.contains("Move"), "assignment should produce Move:\n{dis}" ); } #[test] fn test_compile_compound_assignment() { let f = compile_src("var x = 10; x += 5;"); let dis = f.disassemble(); assert!(dis.contains("Add"), "got:\n{dis}"); } #[test] fn test_compile_member_access() { let f = compile_src("var obj = {}; obj.x;"); let dis = f.disassemble(); assert!(dis.contains("CreateObject"), "got:\n{dis}"); assert!(dis.contains("GetPropertyByName"), "got:\n{dis}"); } #[test] fn test_compile_computed_member() { let f = compile_src("var arr = []; arr[0];"); let dis = f.disassemble(); assert!(dis.contains("GetProperty"), "got:\n{dis}"); } #[test] fn test_compile_object_literal() { let f = compile_src("var obj = { a: 1, b: 2 };"); let dis = f.disassemble(); assert!(dis.contains("CreateObject"), "got:\n{dis}"); assert!(dis.contains("SetPropertyByName"), "got:\n{dis}"); } #[test] fn test_compile_array_literal() { let f = compile_src("[1, 2, 3];"); let dis = f.disassemble(); assert!(dis.contains("CreateArray"), "got:\n{dis}"); assert!(dis.contains("SetProperty"), "got:\n{dis}"); } #[test] fn test_compile_conditional() { let f = compile_src("true ? 1 : 2;"); let dis = f.disassemble(); assert!(dis.contains("JumpIfFalse"), "got:\n{dis}"); } #[test] fn test_compile_logical_and() { let f = compile_src("true && false;"); let dis = f.disassemble(); assert!(dis.contains("JumpIfFalse"), "short-circuit:\n{dis}"); } #[test] fn test_compile_logical_or() { let f = compile_src("false || true;"); let dis = f.disassemble(); assert!(dis.contains("JumpIfTrue"), "short-circuit:\n{dis}"); } #[test] fn test_compile_typeof() { let f = compile_src("typeof 42;"); let dis = f.disassemble(); assert!(dis.contains("TypeOf"), "got:\n{dis}"); } #[test] fn test_compile_unary_minus() { let f = compile_src("-42;"); let dis = f.disassemble(); assert!(dis.contains("Neg"), "got:\n{dis}"); } #[test] fn test_compile_not() { let f = compile_src("!true;"); let dis = f.disassemble(); assert!(dis.contains("LogicalNot"), "got:\n{dis}"); } #[test] fn test_compile_return() { let f = compile_src("function f() { return 42; }"); let inner = &f.functions[0]; let dis = inner.disassemble(); assert!(dis.contains("Return"), "got:\n{dis}"); } #[test] fn test_compile_empty_return() { let f = compile_src("function f() { return; }"); let inner = &f.functions[0]; let dis = inner.disassemble(); assert!(dis.contains("LoadUndefined"), "got:\n{dis}"); assert!(dis.contains("Return"), "got:\n{dis}"); } #[test] fn test_compile_throw() { let f = compile_src("function f() { throw 42; }"); let inner = &f.functions[0]; let dis = inner.disassemble(); assert!(dis.contains("Throw"), "got:\n{dis}"); } #[test] fn test_compile_this() { let f = compile_src("this;"); let dis = f.disassemble(); assert!(dis.contains("LoadGlobal"), "got:\n{dis}"); assert!(f.names.contains(&"this".to_string())); } #[test] fn test_compile_global_var() { let f = compile_src("console;"); let dis = f.disassemble(); assert!(dis.contains("LoadGlobal"), "got:\n{dis}"); assert!(f.names.contains(&"console".to_string())); } #[test] fn test_compile_template_literal() { let f = compile_src("`hello`;"); assert!( f.constants.contains(&Constant::String("hello".into())), "constants: {:?}", f.constants ); } #[test] fn test_compile_switch() { let f = compile_src("switch (1) { case 1: 42; break; case 2: 99; break; }"); let dis = f.disassemble(); assert!(dis.contains("StrictEq"), "got:\n{dis}"); } #[test] fn test_compile_class() { let f = compile_src("class Foo { constructor() {} greet() { return 1; } }"); let dis = f.disassemble(); assert!(dis.contains("CreateClosure"), "got:\n{dis}"); } #[test] fn test_compile_update_prefix() { let f = compile_src("var x = 0; ++x;"); let dis = f.disassemble(); assert!(dis.contains("Add"), "got:\n{dis}"); } #[test] fn test_compile_comparison() { let f = compile_src("1 === 2;"); let dis = f.disassemble(); assert!(dis.contains("StrictEq"), "got:\n{dis}"); } #[test] fn test_compile_bitwise() { let f = compile_src("1 & 2;"); let dis = f.disassemble(); assert!(dis.contains("BitAnd"), "got:\n{dis}"); } #[test] fn test_compile_void() { let f = compile_src("void 0;"); let dis = f.disassemble(); assert!(dis.contains("Void"), "got:\n{dis}"); } #[test] fn test_disassembler_output_format() { let f = compile_src("var x = 42; x + 1;"); let dis = f.disassemble(); // Should contain function header. assert!(dis.contains("function
")); // Should contain code section. assert!(dis.contains("code:")); // Should have hex offsets. assert!(dis.contains("0000")); } #[test] fn test_register_allocation_is_minimal() { // `var a = 1; var b = 2; a + b;` should use few registers. let f = compile_src("var a = 1; var b = 2; a + b;"); // r0 = result, r1 = a, r2 = b, r3/r4 = temps for addition assert!( f.register_count <= 6, "too many registers: {}", f.register_count ); } #[test] fn test_nested_function_closure() { let f = compile_src("function outer() { function inner() { return 1; } return inner; }"); assert_eq!(f.functions.len(), 1); let outer = &f.functions[0]; assert_eq!(outer.name, "outer"); assert_eq!(outer.functions.len(), 1); let inner = &outer.functions[0]; assert_eq!(inner.name, "inner"); } #[test] fn test_for_with_no_parts() { // `for (;;) { break; }` — infinite loop with immediate break. let f = compile_src("for (;;) { break; }"); let dis = f.disassemble(); assert!(dis.contains("Jump"), "got:\n{dis}"); } #[test] fn test_for_continue_targets_update() { // `continue` in a for-loop must jump to the update expression, not back // to the condition check. Verify the continue jump goes to the Add (i + 1) // rather than to the LessThan condition. let f = compile_src("for (var i = 0; i < 10; i = i + 1) { continue; }"); let dis = f.disassemble(); // The for-loop should contain: LessThan (test), JumpIfFalse (exit), // Jump (continue), Add (update), Jump (back to test). assert!(dis.contains("LessThan"), "missing test: {dis}"); assert!(dis.contains("Add"), "missing update: {dis}"); // There should be at least 2 Jump instructions (continue + back-edge). let jump_count = dis.matches("Jump ").count(); assert!( jump_count >= 2, "expected >= 2 jumps for continue + back-edge, got {jump_count}: {dis}" ); } #[test] fn test_do_while_continue_targets_condition() { // `continue` in do-while must jump to the condition, not the body start. let f = compile_src("var i = 0; do { i = i + 1; continue; } while (i < 5);"); let dis = f.disassemble(); assert!(dis.contains("LessThan"), "missing condition: {dis}"); assert!(dis.contains("JumpIfTrue"), "missing back-edge: {dis}"); } #[test] fn test_switch_default_case() { // Default case must not corrupt bytecode. let f = compile_src("switch (1) { case 1: 10; break; default: 20; break; }"); let dis = f.disassemble(); assert!(dis.contains("StrictEq"), "missing case test: {dis}"); // The first instruction should NOT be corrupted. assert!( dis.contains("LoadUndefined r0"), "first instruction corrupted: {dis}" ); } #[test] fn test_switch_only_default() { // Switch with only a default case. let f = compile_src("switch (42) { default: 99; }"); let dis = f.disassemble(); // Should compile without panicking and contain the default body. assert!(dis.contains("LoadInt8"), "got:\n{dis}"); } #[test] fn test_class_empty_constructor_has_return() { // A class without an explicit constructor should produce a function with Return. let f = compile_src("class Foo {}"); assert!(!f.functions.is_empty(), "should have constructor function"); let ctor = &f.functions[0]; let dis = ctor.disassemble(); assert!( dis.contains("Return"), "empty constructor must have Return: {dis}" ); } #[test] fn test_class_expression_compiles_methods() { // Class expression should compile methods, not just the constructor. let f = compile_src("var C = class { greet() { return 1; } };"); let dis = f.disassemble(); assert!( dis.contains("SetPropertyByName"), "method should be set as property: {dis}" ); } }