//! Tri-color mark-and-sweep garbage collector. //! //! Manages heap-allocated JS objects (plain objects, arrays, functions) using //! the tri-color invariant: white (unreached), gray (reached, children not yet //! scanned), black (fully scanned). The collector is stop-the-world and //! triggered when live object count exceeds a configurable threshold. /// Mark color for tri-color marking. #[derive(Debug, Clone, Copy, PartialEq, Eq)] enum Color { /// Not yet visited — candidate for collection. White, /// Visited but children not yet scanned. Gray, /// Fully scanned — reachable and all children traced. Black, } /// A reference (handle) to a GC-managed object. /// /// This is a lightweight index into the GC heap. It is `Copy` so it can be /// freely duplicated — the GC is responsible for tracking liveness. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct GcRef(u32); /// Trait for types that can be traced by the garbage collector. /// /// Implementors must report all `GcRef` values they hold so the GC can /// traverse the object graph. pub trait Traceable { /// Call `visitor` once for each `GcRef` this object holds. fn trace(&self, visitor: &mut dyn FnMut(GcRef)); } /// A heap slot containing GC metadata and the object data. struct HeapSlot { color: Color, data: T, } /// Statistics about the GC heap. #[derive(Debug, Clone)] pub struct GcStats { /// Number of live (allocated) objects. pub live_count: usize, /// Total heap slots (including free slots). pub total_slots: usize, /// Number of collections performed so far. pub collections: usize, } /// The garbage collector. /// /// Manages a heap of `T` objects. Objects are allocated via [`alloc`] and /// accessed via [`get`]/[`get_mut`]. Collection is triggered manually or /// when [`should_collect`] returns true. pub struct Gc { /// The heap: a vector of optional slots (None = free). heap: Vec>>, /// Free slot indices for O(1) allocation. free_list: Vec, /// Number of currently live objects. live_count: usize, /// Collection is suggested when live_count exceeds this. threshold: usize, /// Total collections performed. collections: usize, } /// Initial collection threshold. const INITIAL_THRESHOLD: usize = 256; impl Gc { /// Create a new, empty GC heap. pub fn new() -> Self { Self { heap: Vec::new(), free_list: Vec::new(), live_count: 0, threshold: INITIAL_THRESHOLD, collections: 0, } } /// Allocate a new object on the heap, returning a `GcRef` handle. pub fn alloc(&mut self, data: T) -> GcRef { let slot = HeapSlot { color: Color::White, data, }; let idx = if let Some(free_idx) = self.free_list.pop() { let i = free_idx as usize; self.heap[i] = Some(slot); free_idx } else { let i = self.heap.len(); self.heap.push(Some(slot)); i as u32 }; self.live_count += 1; GcRef(idx) } /// Get an immutable reference to the object at `r`. /// /// Returns `None` if the slot has been freed (stale reference). pub fn get(&self, r: GcRef) -> Option<&T> { let idx = r.0 as usize; self.heap .get(idx) .and_then(|slot| slot.as_ref().map(|s| &s.data)) } /// Get a mutable reference to the object at `r`. /// /// Returns `None` if the slot has been freed (stale reference). pub fn get_mut(&mut self, r: GcRef) -> Option<&mut T> { let idx = r.0 as usize; self.heap .get_mut(idx) .and_then(|slot| slot.as_mut().map(|s| &mut s.data)) } /// Returns `true` if a collection should be triggered. pub fn should_collect(&self) -> bool { self.live_count >= self.threshold } /// Perform a garbage collection pass. /// /// `roots` must contain every `GcRef` reachable from the mutator (VM /// registers, globals, call stack). Any object not transitively reachable /// from a root is freed. pub fn collect(&mut self, roots: &[GcRef]) { self.mark(roots); self.sweep(); self.collections += 1; // Grow threshold so we don't collect too often. if self.live_count >= self.threshold { self.threshold = self.live_count * 2; } } /// Return heap statistics. pub fn stats(&self) -> GcStats { GcStats { live_count: self.live_count, total_slots: self.heap.len(), collections: self.collections, } } /// Mark phase: starting from roots, color all reachable objects black. fn mark(&mut self, roots: &[GcRef]) { let mut gray_stack: Vec = Vec::new(); // Color roots gray. for &root in roots { let idx = root.0 as usize; if idx < self.heap.len() { if let Some(entry) = &mut self.heap[idx] { if entry.color == Color::White { entry.color = Color::Gray; gray_stack.push(root); } } } } // Process the gray stack. while let Some(gc_ref) = gray_stack.pop() { let idx = gc_ref.0 as usize; // Collect child references (scoped borrow). let children = { let mut c = Vec::new(); if let Some(entry) = &self.heap[idx] { entry.data.trace(&mut |child| c.push(child)); } c }; // Mark current object black. if let Some(entry) = &mut self.heap[idx] { entry.color = Color::Black; } // Mark white children gray. for child in children { let cidx = child.0 as usize; if cidx < self.heap.len() { if let Some(entry) = &mut self.heap[cidx] { if entry.color == Color::White { entry.color = Color::Gray; gray_stack.push(child); } } } } } } /// Sweep phase: free all white objects, reset black objects to white. fn sweep(&mut self) { for i in 0..self.heap.len() { let should_free = match &self.heap[i] { Some(entry) => entry.color == Color::White, None => false, }; if should_free { self.heap[i] = None; self.free_list.push(i as u32); self.live_count -= 1; } else if let Some(entry) = &mut self.heap[i] { // Reset black → white for next cycle. entry.color = Color::White; } } } } impl Default for Gc { fn default() -> Self { Self::new() } } #[cfg(test)] mod tests { use super::*; /// A simple test object that holds GcRef children. struct TestObj { children: Vec, } impl Traceable for TestObj { fn trace(&self, visitor: &mut dyn FnMut(GcRef)) { for &child in &self.children { visitor(child); } } } #[test] fn test_alloc_and_get() { let mut gc = Gc::new(); let r = gc.alloc(TestObj { children: Vec::new(), }); assert!(gc.get(r).is_some()); assert_eq!(gc.stats().live_count, 1); } #[test] fn test_collect_unreachable() { let mut gc = Gc::new(); let r1 = gc.alloc(TestObj { children: Vec::new(), }); let r2 = gc.alloc(TestObj { children: Vec::new(), }); assert_eq!(gc.stats().live_count, 2); // Collect with no roots — everything is freed. gc.collect(&[]); assert_eq!(gc.stats().live_count, 0); assert!(gc.get(r1).is_none()); assert!(gc.get(r2).is_none()); } #[test] fn test_collect_reachable() { let mut gc = Gc::new(); let r1 = gc.alloc(TestObj { children: Vec::new(), }); let r2 = gc.alloc(TestObj { children: Vec::new(), }); assert_eq!(gc.stats().live_count, 2); // Only r1 is a root. gc.collect(&[r1]); assert_eq!(gc.stats().live_count, 1); assert!(gc.get(r1).is_some()); assert!(gc.get(r2).is_none()); } #[test] fn test_collect_transitive() { let mut gc = Gc::new(); let r1 = gc.alloc(TestObj { children: Vec::new(), }); let r2 = gc.alloc(TestObj { children: Vec::new(), }); let r3 = gc.alloc(TestObj { children: Vec::new(), }); // r1 -> r2 -> r3 gc.get_mut(r1).unwrap().children.push(r2); gc.get_mut(r2).unwrap().children.push(r3); gc.collect(&[r1]); assert_eq!(gc.stats().live_count, 3); assert!(gc.get(r1).is_some()); assert!(gc.get(r2).is_some()); assert!(gc.get(r3).is_some()); } #[test] fn test_cycle_collection() { let mut gc = Gc::new(); let r1 = gc.alloc(TestObj { children: Vec::new(), }); let r2 = gc.alloc(TestObj { children: Vec::new(), }); // Create a cycle: r1 -> r2 -> r1. gc.get_mut(r1).unwrap().children.push(r2); gc.get_mut(r2).unwrap().children.push(r1); // With r1 as root, both survive. gc.collect(&[r1]); assert_eq!(gc.stats().live_count, 2); // With no roots, both are freed (cycle is collected). gc.collect(&[]); assert_eq!(gc.stats().live_count, 0); } #[test] fn test_free_list_reuse() { let mut gc = Gc::new(); let _r1 = gc.alloc(TestObj { children: Vec::new(), }); let _r2 = gc.alloc(TestObj { children: Vec::new(), }); // Free both. gc.collect(&[]); assert_eq!(gc.stats().live_count, 0); assert_eq!(gc.stats().total_slots, 2); // Allocate again — should reuse freed slots. let _r3 = gc.alloc(TestObj { children: Vec::new(), }); let _r4 = gc.alloc(TestObj { children: Vec::new(), }); assert_eq!(gc.stats().live_count, 2); assert_eq!(gc.stats().total_slots, 2); // No growth. } #[test] fn test_should_collect_threshold() { let mut gc: Gc = Gc::new(); assert!(!gc.should_collect()); // Allocate up to threshold. for _ in 0..INITIAL_THRESHOLD { gc.alloc(TestObj { children: Vec::new(), }); } assert!(gc.should_collect()); } #[test] fn test_stress() { let mut gc = Gc::new(); let mut live_refs = Vec::new(); for i in 0..1000 { let r = gc.alloc(TestObj { children: Vec::new(), }); if i % 3 == 0 { live_refs.push(r); } } gc.collect(&live_refs); // Only refs kept in live_refs should survive. assert_eq!(gc.stats().live_count, live_refs.len()); for r in &live_refs { assert!(gc.get(*r).is_some()); } } }