Next Generation WASM Microkernel Operating System
1// Copyright 2025 Jonas Kruckenberg
2//
3// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
4// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
5// http://opensource.org/licenses/MIT>, at your option. This file may not be
6// copied, modified, or distributed except according to those terms.
7
8use alloc::string::String;
9use core::alloc::Layout;
10use core::marker::PhantomPinned;
11use core::ptr::NonNull;
12use core::sync::atomic::{AtomicU64, Ordering};
13use core::{fmt, ptr, slice};
14
15use anyhow::{bail, ensure};
16use cranelift_entity::packed_option::ReservedValue;
17use cranelift_entity::{EntityRef, EntitySet, PrimaryMap};
18use kmem::VirtualAddress;
19use static_assertions::const_assert_eq;
20
21use crate::wasm::TrapKind;
22use crate::wasm::indices::{
23 DataIndex, DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, DefinedTagIndex,
24 ElemIndex, EntityIndex, FuncIndex, GlobalIndex, MemoryIndex, TableIndex, TagIndex,
25 VMSharedTypeIndex,
26};
27use crate::wasm::module::Module;
28use crate::wasm::store::{StoreInner, StoreOpaque};
29use crate::wasm::translate::{
30 IndexType, MemoryInitializer, TableInitialValue, TableSegmentElements, TranslatedModule,
31 WasmHeapTopType, WasmHeapTypeInner,
32};
33use crate::wasm::trap_handler::WasmFault;
34use crate::wasm::vm::const_eval::{ConstEvalContext, ConstExprEvaluator};
35use crate::wasm::vm::memory::Memory;
36use crate::wasm::vm::provenance::{VmPtr, VmSafe};
37use crate::wasm::vm::table::{Table, TableElement, TableElementType};
38use crate::wasm::vm::{
39 Export, ExportedFunction, ExportedGlobal, ExportedMemory, ExportedTable, ExportedTag, Imports,
40 StaticVMShape, VMBuiltinFunctionsArray, VMCONTEXT_MAGIC, VMContext, VMFuncRef, VMFunctionBody,
41 VMFunctionImport, VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition, VMMemoryImport,
42 VMOpaqueContext, VMShape, VMStoreContext, VMTableDefinition, VMTableImport, VMTagDefinition,
43 VMTagImport,
44};
45
46#[derive(Debug)]
47pub struct InstanceHandle {
48 instance: Option<NonNull<Instance>>,
49}
50// Safety: TODO
51unsafe impl Send for InstanceHandle {}
52// Safety: TODO
53unsafe impl Sync for InstanceHandle {}
54
55#[repr(C)] // ensure that the vmctx field is last.
56#[derive(Debug)]
57pub struct Instance {
58 module: Module,
59 pub(in crate::wasm) memories: PrimaryMap<DefinedMemoryIndex, Memory>,
60 pub(in crate::wasm) tables: PrimaryMap<DefinedTableIndex, Table>,
61 dropped_elements: EntitySet<ElemIndex>,
62 dropped_data: EntitySet<DataIndex>,
63
64 /// A pointer to the `vmctx` field at the end of the `Instance`.
65 ///
66 /// This pointer is created upon allocation with provenance that covers the *entire* instance
67 /// and VMContext memory. Pointers to VMContext are derived from it inheriting this broader
68 /// provenance. This is important for correctness.
69 vmctx_self_reference: NonNull<VMContext>,
70 /// Self-pointer back to `Store<T>` and its functions. Not present for
71 /// the brief time that `Store<T>` is itself being created. Also not
72 /// present for some niche uses that are disconnected from stores (e.g.
73 /// cross-thread stuff used in `InstancePre`)
74 store: Option<NonNull<StoreOpaque>>,
75 /// Additional context used by compiled wasm code. This field is last, and
76 /// represents a dynamically-sized array that extends beyond the nominal
77 /// end of the struct (similar to a flexible array member).
78 vmctx: VMContext,
79}
80
81impl InstanceHandle {
82 /// Creates an "empty" instance handle which internally has a null pointer
83 /// to an instance. Actually calling any methods on this `InstanceHandle` will always
84 /// panic.
85 pub fn null() -> InstanceHandle {
86 InstanceHandle { instance: None }
87 }
88
89 pub fn initialize(
90 &mut self,
91 store: &mut StoreOpaque,
92 const_eval: &mut ConstExprEvaluator,
93 module: &Module,
94 imports: Imports,
95 is_bulk_memory: bool,
96 ) -> crate::Result<()> {
97 // Safety: we call the functions in the right order (initialize_vmctx) first
98 unsafe {
99 self.instance_mut().initialize_vmctx(store, imports, module);
100
101 if !is_bulk_memory {
102 // Safety: see? we called `initialize_vmctx` before calling `check_init_bounds`!
103 check_init_bounds(store, self.instance_mut(), module)?;
104 }
105
106 let mut ctx = ConstEvalContext::new(self.instance.unwrap().as_mut());
107 self.instance_mut()
108 .initialize_tables(store, &mut ctx, const_eval, module)?;
109 self.instance_mut()
110 .initialize_memories(store, &mut ctx, const_eval, module)?;
111 self.instance_mut()
112 .initialize_globals(store, &mut ctx, const_eval, module)?;
113 }
114
115 Ok(())
116 }
117
118 pub fn debug_vmctx(&self) {
119 struct Dbg<'a> {
120 data: &'a Instance,
121 }
122 impl fmt::Debug for Dbg<'_> {
123 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
124 // Safety: Reading from JIT-owned memory is inherently unsafe.
125 unsafe {
126 f.debug_struct("VMContext")
127 .field(
128 "magic",
129 &*self
130 .data
131 .vmctx_plus_offset::<u32>(StaticVMShape.vmctx_magic()),
132 )
133 .field(
134 "vm_store_context",
135 &*self
136 .data
137 .vmctx_plus_offset::<Option<VmPtr<VMStoreContext>>>(
138 StaticVMShape.vmctx_store_context(),
139 ),
140 )
141 .field(
142 "builtin_functions",
143 &*self
144 .data
145 .vmctx_plus_offset::<VmPtr<VMBuiltinFunctionsArray>>(
146 StaticVMShape.vmctx_builtin_functions(),
147 ),
148 )
149 .field(
150 "callee",
151 &*self
152 .data
153 .vmctx_plus_offset::<Option<VmPtr<VMFunctionBody>>>(
154 StaticVMShape.vmctx_callee(),
155 ),
156 )
157 .field(
158 "epoch_ptr",
159 &*self.data.vmctx_plus_offset::<Option<VmPtr<AtomicU64>>>(
160 StaticVMShape.vmctx_epoch_ptr(),
161 ),
162 )
163 .field(
164 "gc_heap_base",
165 &*self.data.vmctx_plus_offset::<Option<VmPtr<u8>>>(
166 StaticVMShape.vmctx_gc_heap_base(),
167 ),
168 )
169 .field(
170 "gc_heap_bound",
171 &*self
172 .data
173 .vmctx_plus_offset::<usize>(StaticVMShape.vmctx_gc_heap_bound()),
174 )
175 .field(
176 "gc_heap_data",
177 &*self.data.vmctx_plus_offset::<Option<VmPtr<u8>>>(
178 StaticVMShape.vmctx_gc_heap_data(),
179 ),
180 )
181 .field(
182 "type_ids",
183 &*self.data.vmctx_plus_offset::<VmPtr<VMSharedTypeIndex>>(
184 StaticVMShape.vmctx_type_ids_array(),
185 ),
186 )
187 .field(
188 "imported_memories",
189 &slice::from_raw_parts(
190 self.data.vmctx_plus_offset::<VMMemoryImport>(
191 self.data.vmshape().vmctx_imported_memories_begin(),
192 ),
193 self.data.vmshape().num_imported_memories as usize,
194 ),
195 )
196 .field(
197 "memories",
198 &slice::from_raw_parts(
199 self.data.vmctx_plus_offset::<VmPtr<VMMemoryDefinition>>(
200 self.data.vmshape().vmctx_memories_begin(),
201 ),
202 self.data.vmshape().num_defined_memories as usize,
203 ),
204 )
205 .field(
206 "owned_memories",
207 &slice::from_raw_parts(
208 self.data.vmctx_plus_offset::<VMMemoryDefinition>(
209 self.data.vmshape().vmctx_owned_memories_begin(),
210 ),
211 self.data.vmshape().num_owned_memories as usize,
212 ),
213 )
214 .field(
215 "imported_functions",
216 &slice::from_raw_parts(
217 self.data.vmctx_plus_offset::<VMFunctionImport>(
218 self.data.vmshape().vmctx_imported_functions_begin(),
219 ),
220 self.data.vmshape().num_imported_functions as usize,
221 ),
222 )
223 .field(
224 "imported_tables",
225 &slice::from_raw_parts(
226 self.data.vmctx_plus_offset::<VMTableImport>(
227 self.data.vmshape().vmctx_imported_tables_begin(),
228 ),
229 self.data.vmshape().num_imported_tables as usize,
230 ),
231 )
232 .field(
233 "imported_globals",
234 &slice::from_raw_parts(
235 self.data.vmctx_plus_offset::<VMGlobalImport>(
236 self.data.vmshape().vmctx_imported_globals_begin(),
237 ),
238 self.data.vmshape().num_imported_globals as usize,
239 ),
240 )
241 .field(
242 "imported_tags",
243 &slice::from_raw_parts(
244 self.data.vmctx_plus_offset::<VMTagImport>(
245 self.data.vmshape().vmctx_imported_tags_begin(),
246 ),
247 self.data.vmshape().num_imported_tags as usize,
248 ),
249 )
250 .field(
251 "tables",
252 &slice::from_raw_parts(
253 self.data.vmctx_plus_offset::<VMTableDefinition>(
254 self.data.vmshape().vmctx_tables_begin(),
255 ),
256 self.data.vmshape().num_defined_tables as usize,
257 ),
258 )
259 .field(
260 "globals",
261 &slice::from_raw_parts(
262 self.data.vmctx_plus_offset::<VMGlobalDefinition>(
263 self.data.vmshape().vmctx_globals_begin(),
264 ),
265 self.data.vmshape().num_defined_globals as usize,
266 ),
267 )
268 .field(
269 "tags",
270 &slice::from_raw_parts(
271 self.data.vmctx_plus_offset::<VMTagDefinition>(
272 self.data.vmshape().vmctx_tags_begin(),
273 ),
274 self.data.vmshape().num_defined_tags as usize,
275 ),
276 )
277 .field(
278 "func_refs",
279 &slice::from_raw_parts(
280 self.data.vmctx_plus_offset::<VMFuncRef>(
281 self.data.vmshape().vmctx_func_refs_begin(),
282 ),
283 self.data.vmshape().num_escaped_funcs as usize,
284 ),
285 )
286 .finish()
287 }
288 }
289 }
290
291 tracing::debug!(
292 "{:#?}",
293 Dbg {
294 data: self.instance()
295 }
296 );
297 }
298
299 pub fn vmctx(&self) -> NonNull<VMContext> {
300 self.instance().vmctx()
301 }
302
303 /// Return a reference to a module.
304 pub fn module(&self) -> &Module {
305 self.instance().module()
306 }
307
308 /// Lookup a table by index.
309 pub fn get_exported_table(&mut self, export: TableIndex) -> ExportedTable {
310 self.instance_mut().get_exported_table(export)
311 }
312
313 /// Lookup a memory by index.
314 pub fn get_exported_memory(&mut self, export: MemoryIndex) -> ExportedMemory {
315 self.instance_mut().get_exported_memory(export)
316 }
317
318 /// Lookup a function by index.
319 pub fn get_exported_func(&mut self, export: FuncIndex) -> ExportedFunction {
320 self.instance_mut().get_exported_func(export)
321 }
322
323 /// Lookup a global by index.
324 pub fn get_exported_global(&mut self, export: GlobalIndex) -> ExportedGlobal {
325 self.instance_mut().get_exported_global(export)
326 }
327
328 /// Lookup a tag by index.
329 pub fn get_exported_tag(&mut self, export: TagIndex) -> ExportedTag {
330 self.instance_mut().get_exported_tag(export)
331 }
332
333 /// Lookup an item with the given index.
334 pub fn get_export_by_index(&mut self, export: EntityIndex) -> Export {
335 match export {
336 EntityIndex::Function(i) => Export::Function(self.get_exported_func(i)),
337 EntityIndex::Global(i) => Export::Global(self.get_exported_global(i)),
338 EntityIndex::Table(i) => Export::Table(self.get_exported_table(i)),
339 EntityIndex::Memory(i) => Export::Memory(self.get_exported_memory(i)),
340 EntityIndex::Tag(i) => Export::Tag(self.get_exported_tag(i)),
341 }
342 }
343
344 /// Return an iterator over the exports of this instance.
345 ///
346 /// Specifically, it provides access to the key-value pairs, where the keys
347 /// are export names, and the values are export declarations which can be
348 /// resolved `lookup_by_declaration`.
349 pub fn exports(&self) -> wasmparser::collections::index_map::Iter<'_, String, EntityIndex> {
350 self.instance().translated_module().exports.iter()
351 }
352
353 pub fn as_non_null(&self) -> NonNull<Instance> {
354 self.instance.unwrap()
355 }
356
357 /// Return a reference to the contained `Instance`.
358 #[inline]
359 pub fn instance(&self) -> &Instance {
360 // Safety: the constructor ensures the instance is correctly initialized
361 unsafe { self.instance.unwrap().as_ref() }
362 }
363
364 #[inline]
365 pub fn instance_mut(&mut self) -> &mut Instance {
366 // Safety: the constructor ensures the instance is correctly initialized
367 unsafe { self.instance.unwrap().as_mut() }
368 }
369
370 /// Attempts to convert from the host `addr` specified to a WebAssembly
371 /// based address recorded in `WasmFault`.
372 ///
373 /// This method will check all linear memories that this instance contains
374 /// to see if any of them contain `addr`. If one does then `Some` is
375 /// returned with metadata about the wasm fault. Otherwise `None` is
376 /// returned and `addr` doesn't belong to this instance.
377 pub fn wasm_fault(&self, faulting_addr: VirtualAddress) -> Option<WasmFault> {
378 self.instance().wasm_fault(faulting_addr)
379 }
380}
381
382impl Instance {
383 /// # Safety
384 ///
385 /// The caller must ensure that `instance: NonNull<Instance>` got allocated using the
386 /// `Instance::alloc_layout` to ensure it is the right size for the VMContext
387 pub unsafe fn from_parts(
388 module: Module,
389 instance: NonNull<Instance>,
390 tables: PrimaryMap<DefinedTableIndex, Table>,
391 memories: PrimaryMap<DefinedMemoryIndex, Memory>,
392 ) -> InstanceHandle {
393 let dropped_elements = module.translated().active_table_initializers.clone();
394 let dropped_data = module.translated().active_memory_initializers.clone();
395
396 // Safety: we have to trust the caller that `NonNull<Instance>` got allocated using the correct
397 // `Instance::alloc_layout` and therefore has the right-sized vmctx memory
398 unsafe {
399 instance.write(Instance {
400 module: module.clone(),
401 memories,
402 tables,
403 dropped_elements,
404 dropped_data,
405 vmctx_self_reference: instance.add(1).cast(),
406 store: None,
407 vmctx: VMContext {
408 _marker: PhantomPinned,
409 },
410 });
411 }
412
413 InstanceHandle {
414 instance: Some(instance),
415 }
416 }
417
418 pub fn alloc_layout(offsets: &VMShape) -> Layout {
419 let size = size_of::<Self>()
420 .checked_add(usize::try_from(offsets.size_of_vmctx()).unwrap())
421 .unwrap();
422 let align = align_of::<Self>();
423 Layout::from_size_align(size, align).unwrap()
424 }
425
426 pub fn module(&self) -> &Module {
427 &self.module
428 }
429 pub fn translated_module(&self) -> &TranslatedModule {
430 self.module.translated()
431 }
432 pub fn vmshape(&self) -> &VMShape {
433 self.module.vmshape()
434 }
435
436 fn wasm_fault(&self, addr: VirtualAddress) -> Option<WasmFault> {
437 let mut fault = None;
438
439 for (_, memory) in &self.memories {
440 let accessible = memory.wasm_accessible();
441 if accessible.start <= addr && addr < accessible.end {
442 // All linear memories should be disjoint so assert that no
443 // prior fault has been found.
444 assert!(fault.is_none());
445 fault = Some(WasmFault {
446 memory_size: memory.byte_size(),
447 wasm_address: u64::try_from(addr.offset_from_unsigned(accessible.start))
448 .unwrap(),
449 });
450 }
451 }
452
453 fault
454 }
455
456 pub fn get_exported_func(&mut self, index: FuncIndex) -> ExportedFunction {
457 ExportedFunction {
458 func_ref: self.get_func_ref(index).unwrap(),
459 }
460 }
461 pub fn get_exported_table(&mut self, index: TableIndex) -> ExportedTable {
462 let (definition, vmctx) =
463 if let Some(def_index) = self.translated_module().defined_table_index(index) {
464 (self.table_ptr(def_index), self.vmctx())
465 } else {
466 let import = self.imported_table(index);
467 (import.from.as_non_null(), import.vmctx.as_non_null())
468 };
469
470 ExportedTable {
471 definition,
472 vmctx,
473 table: self.translated_module().tables[index].clone(),
474 }
475 }
476 pub fn get_exported_memory(&mut self, index: MemoryIndex) -> ExportedMemory {
477 let (definition, vmctx, def_index) =
478 if let Some(def_index) = self.translated_module().defined_memory_index(index) {
479 (self.memory_ptr(def_index), self.vmctx(), def_index)
480 } else {
481 let import = self.imported_memory(index);
482 (
483 import.from.as_non_null(),
484 import.vmctx.as_non_null(),
485 import.index,
486 )
487 };
488
489 ExportedMemory {
490 definition,
491 vmctx,
492 index: def_index,
493 memory: self.translated_module().memories[index].clone(),
494 }
495 }
496 pub fn get_exported_global(&mut self, index: GlobalIndex) -> ExportedGlobal {
497 ExportedGlobal {
498 definition: if let Some(def_index) =
499 self.translated_module().defined_global_index(index)
500 {
501 self.global_ptr(def_index)
502 } else {
503 self.imported_global(index).from.as_non_null()
504 },
505 vmctx: Some(self.vmctx()),
506 global: self.translated_module().globals[index].clone(),
507 }
508 }
509 pub fn get_exported_tag(&mut self, index: TagIndex) -> ExportedTag {
510 ExportedTag {
511 definition: if let Some(def_index) = self.translated_module().defined_tag_index(index) {
512 self.tag_ptr(def_index)
513 } else {
514 self.imported_tag(index).from.as_non_null()
515 },
516 tag: self.translated_module().tags[index],
517 }
518 }
519
520 /// Get the given memory's page size, in bytes.
521 pub fn memory_page_size(&self, index: MemoryIndex) -> u64 {
522 self.translated_module().memories[index].page_size()
523 }
524
525 #[expect(unused, reason = "TODO")]
526 pub fn memory_grow(
527 &mut self,
528 store: &mut StoreOpaque,
529 index: MemoryIndex,
530 delta: u64,
531 ) -> crate::Result<Option<u64>> {
532 todo!()
533 }
534
535 #[expect(unused, reason = "TODO")]
536 pub fn memory_copy(
537 &mut self,
538 dst_index: MemoryIndex,
539 dst: u64,
540 src_index: MemoryIndex,
541 src: u64,
542 len: u64,
543 ) -> Result<(), TrapKind> {
544 todo!()
545 }
546
547 #[expect(unused, reason = "TODO")]
548 pub fn memory_fill(
549 &mut self,
550 memory_index: MemoryIndex,
551 dst: u64,
552 val: u8,
553 len: u64,
554 ) -> Result<(), TrapKind> {
555 todo!()
556 }
557
558 #[expect(unused, reason = "TODO")]
559 pub fn memory_init(
560 &mut self,
561 memory_index: MemoryIndex,
562 data_index: DataIndex,
563 dst: u64,
564 src: u32,
565 len: u32,
566 ) -> Result<(), TrapKind> {
567 todo!()
568 }
569
570 pub fn data_drop(&mut self, data_index: DataIndex) {
571 self.dropped_data.insert(data_index);
572 }
573
574 pub fn table_element_type(&self, table_index: TableIndex) -> TableElementType {
575 match self.translated_module().tables[table_index]
576 .element_type
577 .heap_type
578 .inner
579 {
580 WasmHeapTypeInner::Func
581 | WasmHeapTypeInner::ConcreteFunc(_)
582 | WasmHeapTypeInner::NoFunc => TableElementType::Func,
583 WasmHeapTypeInner::Extern
584 | WasmHeapTypeInner::NoExtern
585 | WasmHeapTypeInner::Any
586 | WasmHeapTypeInner::Eq
587 | WasmHeapTypeInner::I31
588 | WasmHeapTypeInner::Array
589 | WasmHeapTypeInner::ConcreteArray(_)
590 | WasmHeapTypeInner::Struct
591 | WasmHeapTypeInner::ConcreteStruct(_)
592 | WasmHeapTypeInner::None => TableElementType::GcRef,
593
594 WasmHeapTypeInner::Exn | WasmHeapTypeInner::NoExn => {
595 todo!("exception-handling proposal")
596 }
597 WasmHeapTypeInner::Cont
598 | WasmHeapTypeInner::ConcreteCont(_)
599 | WasmHeapTypeInner::NoCont => todo!("stack switching proposal"),
600 }
601 }
602
603 pub fn table_grow(
604 &mut self,
605 table_index: TableIndex,
606 delta: u64,
607 init_value: TableElement,
608 ) -> crate::Result<Option<usize>> {
609 let res = self
610 .with_defined_table_index_and_instance(table_index, |def_index, instance| {
611 instance.tables[def_index].grow(delta, init_value)
612 })?;
613
614 Ok(res)
615 }
616
617 pub fn table_fill(
618 &mut self,
619 table_index: TableIndex,
620 dst: u64,
621 val: TableElement,
622 len: u64,
623 ) -> Result<(), TrapKind> {
624 self.with_defined_table_index_and_instance(table_index, |def_index, instance| {
625 instance.tables[def_index].fill(dst, val, len)
626 })
627 }
628
629 pub fn table_init(
630 &mut self,
631 store: &mut StoreOpaque,
632 table_index: TableIndex,
633 elem_index: ElemIndex,
634 dst: u64,
635 src: u64,
636 len: u64,
637 ) -> Result<(), TrapKind> {
638 let module = self.module().clone(); // FIXME this clone is here to workaround lifetime issues. remove
639 let elements = &module.translated().passive_table_initializers[&elem_index];
640 // TODO reuse this const_eval across calls
641 let mut const_eval = ConstExprEvaluator::default();
642 self.table_init_segment(store, &mut const_eval, table_index, elements, dst, src, len)
643 }
644
645 fn table_init_segment(
646 &mut self,
647 store: &mut StoreOpaque,
648 const_eval: &mut ConstExprEvaluator,
649 table_index: TableIndex,
650 elements: &TableSegmentElements,
651 dst: u64,
652 src: u64,
653 len: u64,
654 ) -> Result<(), TrapKind> {
655 let src = usize::try_from(src).map_err(|_| TrapKind::TableOutOfBounds)?;
656 let len = usize::try_from(len).map_err(|_| TrapKind::TableOutOfBounds)?;
657
658 // Safety: the implementation promises that vmctx is correctly initialized
659 let table = unsafe { self.defined_or_imported_table(table_index).as_mut() };
660
661 match elements {
662 TableSegmentElements::Functions(funcs) => {
663 let elements = funcs
664 .get(src..)
665 .and_then(|s| s.get(..len))
666 .ok_or(TrapKind::TableOutOfBounds)?;
667 table.init_func(dst, elements.iter().map(|idx| self.get_func_ref(*idx)))?;
668 }
669 TableSegmentElements::Expressions(exprs) => {
670 let exprs = exprs
671 .get(src..)
672 .and_then(|s| s.get(..len))
673 .ok_or(TrapKind::TableOutOfBounds)?;
674 let (heap_top_ty, shared) = self.translated_module().tables[table_index]
675 .element_type
676 .heap_type
677 .top();
678 assert!(!shared);
679
680 // Safety: the implementation promises that vmctx is correctly initialized
681 let mut context = unsafe { ConstEvalContext::new(self) };
682
683 match heap_top_ty {
684 WasmHeapTopType::Func => table.init_func(
685 dst,
686 exprs.iter().map(|expr| {
687 NonNull::new(
688 const_eval
689 .eval(store, &mut context, expr)
690 .expect("const expr should be valid")
691 .get_funcref()
692 .cast(),
693 )
694 }),
695 )?,
696 WasmHeapTopType::Extern | WasmHeapTopType::Any => todo!("gc proposal"),
697 WasmHeapTopType::Exn => todo!("exception-handling proposal"),
698 WasmHeapTopType::Cont => todo!("continuation proposal"),
699 }
700 }
701 }
702
703 Ok(())
704 }
705
706 pub fn elem_drop(&mut self, elem_index: ElemIndex) {
707 self.dropped_elements.insert(elem_index);
708 }
709
710 pub fn get_func_ref(&mut self, index: FuncIndex) -> Option<NonNull<VMFuncRef>> {
711 if index == FuncIndex::reserved_value() {
712 return None;
713 }
714
715 // Safety: we have a `&mut self`, so we have exclusive access
716 // to this Instance.
717 unsafe {
718 let func = &self.translated_module().functions[index];
719 let func_ref: *mut VMFuncRef = self
720 .vmctx_plus_offset_mut::<VMFuncRef>(self.vmshape().vmctx_vmfunc_ref(func.func_ref));
721
722 Some(NonNull::new(func_ref).unwrap())
723 }
724 }
725
726 pub(crate) fn set_store(&mut self, store: Option<NonNull<StoreOpaque>>) {
727 // Safety: the implementation promises that vmctx is correctly initialized
728 unsafe {
729 self.store = store;
730
731 if let Some(mut store) = store {
732 let store = store.as_mut();
733
734 self.vm_store_context()
735 .write(Some(VmPtr::from(store.vm_store_context_ptr())));
736 #[cfg(target_has_atomic = "64")]
737 self.epoch_ptr().write(Some(VmPtr::from(NonNull::from(
738 store.engine().epoch_counter(),
739 ))));
740
741 // if self.env_module().needs_gc_heap {
742 // self.set_gc_heap(Some(store.gc_store_mut().expect(
743 // "if we need a GC heap, then `Instance::new_raw` should have already \
744 // allocated it for us",
745 // )));
746 // } else {
747 // self.set_gc_heap(None);
748 // }
749 } else {
750 self.vm_store_context().write(None);
751 #[cfg(target_has_atomic = "64")]
752 self.epoch_ptr().write(None);
753 // self.set_gc_heap(None);
754 }
755 }
756 }
757
758 // unsafe fn set_gc_heap(&mut self, gc_store: Option<&mut StoreOpaque>) {
759 // if let Some(gc_store) = gc_store {
760 // let heap = gc_store.gc_heap.heap_slice_mut();
761 // self.gc_heap_bound().write(heap.len());
762 // self.gc_heap_base()
763 // .write(Some(NonNull::from(heap).cast().into()));
764 // self.gc_heap_data()
765 // .write(Some(gc_store.gc_heap.vmctx_gc_heap_data().into()));
766 // } else {
767 // self.gc_heap_bound().write(0);
768 // self.gc_heap_base().write(None);
769 // self.gc_heap_data().write(None);
770 // }
771 // }
772
773 pub(crate) fn set_callee(&mut self, callee: Option<NonNull<VMFunctionBody>>) {
774 // Safety: the implementation promises that vmctx is correctly initialized
775 unsafe {
776 let callee = callee.map(VmPtr::from);
777 self.vmctx_plus_offset_mut::<Option<VmPtr<VMFunctionBody>>>(
778 StaticVMShape.vmctx_callee(),
779 )
780 .write(callee);
781 }
782 }
783
784 // VMContext accessors
785
786 #[inline]
787 pub unsafe fn from_vmctx<R>(
788 vmctx: NonNull<VMContext>,
789 f: impl FnOnce(&mut Instance) -> R,
790 ) -> R {
791 // Safety: ensured by caller
792 unsafe {
793 let mut ptr = vmctx.byte_sub(size_of::<Instance>()).cast::<Instance>();
794 f(ptr.as_mut())
795 }
796 }
797
798 /// Return a reference to the vmctx used by compiled wasm code.
799 #[inline]
800 pub fn vmctx(&self) -> NonNull<VMContext> {
801 let addr = &raw const self.vmctx;
802 let ret = self.vmctx_self_reference.as_ptr().with_addr(addr.addr());
803 NonNull::new(ret).unwrap()
804 }
805
806 /// Helper function to access various locations offset from our `*mut
807 /// VMContext` object.
808 ///
809 /// # Safety
810 ///
811 /// This method is unsafe because the `offset` must be within bounds of the
812 /// `VMContext` object trailing this instance.
813 unsafe fn vmctx_plus_offset<T: VmSafe>(&self, offset: impl Into<u32>) -> *const T {
814 // Safety: ensured by caller
815 unsafe {
816 self.vmctx()
817 .as_ptr()
818 .byte_add(usize::try_from(offset.into()).unwrap())
819 .cast()
820 }
821 }
822 /// Dual of `vmctx_plus_offset`, but for mutability.
823 unsafe fn vmctx_plus_offset_mut<T: VmSafe>(&mut self, offset: impl Into<u32>) -> *mut T {
824 // Safety: ensured by caller
825 unsafe {
826 self.vmctx()
827 .as_ptr()
828 .byte_add(usize::try_from(offset.into()).unwrap())
829 .cast()
830 }
831 }
832
833 #[inline]
834 pub fn vm_store_context(&mut self) -> NonNull<Option<VmPtr<VMStoreContext>>> {
835 // Safety: the implementation promises that vmctx is correctly initialized
836 unsafe {
837 NonNull::new(self.vmctx_plus_offset_mut(StaticVMShape.vmctx_store_context())).unwrap()
838 }
839 }
840
841 /// Return a pointer to the global epoch counter used by this instance.
842 #[cfg(target_has_atomic = "64")]
843 pub fn epoch_ptr(&mut self) -> NonNull<Option<VmPtr<AtomicU64>>> {
844 // Safety: the implementation promises that vmctx is correctly initialized
845 unsafe {
846 NonNull::new(self.vmctx_plus_offset_mut(StaticVMShape.vmctx_epoch_ptr())).unwrap()
847 }
848 }
849
850 /// Return a pointer to the GC heap base pointer.
851 pub fn gc_heap_base(&mut self) -> NonNull<Option<VmPtr<u8>>> {
852 // Safety: the implementation promises that vmctx is correctly initialized
853 unsafe {
854 NonNull::new(self.vmctx_plus_offset_mut(StaticVMShape.vmctx_gc_heap_base())).unwrap()
855 }
856 }
857
858 /// Return a pointer to the GC heap bound.
859 pub fn gc_heap_bound(&mut self) -> NonNull<usize> {
860 // Safety: the implementation promises that vmctx is correctly initialized
861 unsafe {
862 NonNull::new(self.vmctx_plus_offset_mut(StaticVMShape.vmctx_gc_heap_bound())).unwrap()
863 }
864 }
865
866 /// Return a pointer to the collector-specific heap data.
867 pub fn gc_heap_data(&mut self) -> NonNull<Option<VmPtr<u8>>> {
868 // Safety: the implementation promises that vmctx is correctly initialized
869 unsafe {
870 NonNull::new(self.vmctx_plus_offset_mut(StaticVMShape.vmctx_gc_heap_data())).unwrap()
871 }
872 }
873
874 /// Return the indexed `VMFunctionImport`.
875 fn imported_function(&self, index: FuncIndex) -> &VMFunctionImport {
876 // Safety: the implementation promises that vmctx is correctly initialized
877 unsafe { &*self.vmctx_plus_offset(self.vmshape().vmctx_vmfunction_import(index)) }
878 }
879 /// Return the index `VMTable`.
880 fn imported_table(&self, index: TableIndex) -> &VMTableImport {
881 // Safety: the implementation promises that vmctx is correctly initialized
882 unsafe { &*self.vmctx_plus_offset(self.vmshape().vmctx_vmtable_import(index)) }
883 }
884 /// Return the indexed `VMMemoryImport`.
885 fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
886 // Safety: the implementation promises that vmctx is correctly initialized
887 unsafe { &*self.vmctx_plus_offset(self.vmshape().vmctx_vmmemory_import(index)) }
888 }
889 /// Return the indexed `VMGlobalImport`.
890 fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
891 // Safety: the implementation promises that vmctx is correctly initialized
892 unsafe { &*self.vmctx_plus_offset(self.vmshape().vmctx_vmglobal_import(index)) }
893 }
894 /// Return the indexed `VMTagImport`.
895 fn imported_tag(&self, index: TagIndex) -> &VMTagImport {
896 // Safety: the implementation promises that vmctx is correctly initialized
897 unsafe { &*self.vmctx_plus_offset(self.vmshape().vmctx_vmtag_import(index)) }
898 }
899
900 fn table_ptr(&mut self, index: DefinedTableIndex) -> NonNull<VMTableDefinition> {
901 // Safety: the implementation promises that vmctx is correctly initialized
902 unsafe {
903 NonNull::new(self.vmctx_plus_offset_mut(self.vmshape().vmctx_vmtable_definition(index)))
904 .unwrap()
905 }
906 }
907 fn memory_ptr(&mut self, index: DefinedMemoryIndex) -> NonNull<VMMemoryDefinition> {
908 // Safety: the implementation promises that vmctx is correctly initialized
909 let ptr = unsafe {
910 *self.vmctx_plus_offset::<VmPtr<_>>(self.vmshape().vmctx_vmmemory_pointer(index))
911 };
912 ptr.as_non_null()
913 }
914 fn global_ptr(&mut self, index: DefinedGlobalIndex) -> NonNull<VMGlobalDefinition> {
915 // Safety: the implementation promises that vmctx is correctly initialized
916 unsafe {
917 NonNull::new(
918 self.vmctx_plus_offset_mut(self.vmshape().vmctx_vmglobal_definition(index)),
919 )
920 .unwrap()
921 }
922 }
923 fn tag_ptr(&mut self, index: DefinedTagIndex) -> NonNull<VMTagDefinition> {
924 // Safety: the implementation promises that vmctx is correctly initialized
925 unsafe {
926 NonNull::new(self.vmctx_plus_offset_mut(self.vmshape().vmctx_vmtag_definition(index)))
927 .unwrap()
928 }
929 }
930
931 pub fn get_defined_table(&mut self, index: DefinedTableIndex) -> NonNull<Table> {
932 NonNull::from(&mut self.tables[index])
933 }
934
935 pub(super) fn defined_or_imported_table(&mut self, table_index: TableIndex) -> NonNull<Table> {
936 self.with_defined_table_index_and_instance(table_index, |idx, instance| {
937 NonNull::from(instance.tables.get(idx).unwrap())
938 })
939 }
940
941 fn with_defined_table_index_and_instance<R>(
942 &mut self,
943 index: TableIndex,
944 f: impl FnOnce(DefinedTableIndex, &mut Instance) -> R,
945 ) -> R {
946 if let Some(defined_table_index) = self.translated_module().defined_table_index(index) {
947 f(defined_table_index, self)
948 } else {
949 let import = self.imported_table(index);
950 // Safety: the VMTableImport needs should be correct. TODO test & verify
951 unsafe {
952 Instance::from_vmctx(import.vmctx.as_non_null(), |foreign_instance| {
953 let foreign_table_def = import.from.as_ptr();
954 let foreign_table_index = foreign_instance.table_index(&*foreign_table_def);
955 f(foreign_table_index, foreign_instance)
956 })
957 }
958 }
959 }
960
961 pub(super) fn defined_or_imported_memory(
962 &mut self,
963 index: MemoryIndex,
964 ) -> NonNull<VMMemoryDefinition> {
965 if let Some(defined_index) = self.translated_module().defined_memory_index(index) {
966 self.memory_ptr(defined_index)
967 } else {
968 let import = self.imported_memory(index);
969 import.from.as_non_null()
970 }
971 }
972
973 pub(super) fn defined_or_imported_global(
974 &mut self,
975 index: GlobalIndex,
976 ) -> NonNull<VMGlobalDefinition> {
977 if let Some(index) = self.translated_module().defined_global_index(index) {
978 self.global_ptr(index)
979 } else {
980 self.imported_global(index).from.as_non_null()
981 }
982 }
983
984 pub unsafe fn table_index(&mut self, table: &VMTableDefinition) -> DefinedTableIndex {
985 // Safety: ensured by caller
986 unsafe {
987 let index = DefinedTableIndex::new(
988 usize::try_from(
989 ptr::from_ref::<VMTableDefinition>(table)
990 .offset_from(self.table_ptr(DefinedTableIndex::new(0)).as_ptr()),
991 )
992 .unwrap(),
993 );
994 assert!(index.index() < self.tables.len());
995 index
996 }
997 }
998
999 pub unsafe fn memory_index(&mut self, table: &VMMemoryDefinition) -> DefinedMemoryIndex {
1000 // Safety: ensured by caller
1001 unsafe {
1002 let index = DefinedMemoryIndex::new(
1003 usize::try_from(
1004 ptr::from_ref::<VMMemoryDefinition>(table)
1005 .offset_from(self.memory_ptr(DefinedMemoryIndex::new(0)).as_ptr()),
1006 )
1007 .unwrap(),
1008 );
1009 assert!(index.index() < self.memories.len());
1010 index
1011 }
1012 }
1013
1014 #[tracing::instrument(level = "debug", skip(self, store, module))]
1015 unsafe fn initialize_vmctx(
1016 &mut self,
1017 store: &mut StoreOpaque,
1018 imports: Imports,
1019 module: &Module,
1020 ) {
1021 let vmshape = module.vmshape();
1022
1023 // Safety: there is no safety, we just have to trust that the entire vmctx memory range
1024 // we need was correctly allocated
1025 unsafe {
1026 // initialize vmctx magic
1027 tracing::trace!("initializing vmctx magic");
1028 self.vmctx_plus_offset_mut::<u32>(vmshape.vmctx_magic())
1029 .write(VMCONTEXT_MAGIC);
1030
1031 tracing::trace!("initializing store-related fields");
1032 self.set_store(Some(NonNull::from(store)));
1033
1034 tracing::trace!("initializing built-in functions array ptr");
1035 self.vmctx_plus_offset_mut::<VmPtr<VMBuiltinFunctionsArray>>(
1036 vmshape.vmctx_builtin_functions(),
1037 )
1038 .write(VmPtr::from(NonNull::from(&VMBuiltinFunctionsArray::INIT)));
1039
1040 tracing::trace!("initializing callee");
1041 self.set_callee(None);
1042
1043 // gc_heap_base: *mut u8,
1044 // gc_heap_bound: *mut u8,
1045 // gc_heap_data: *mut T, //! Collector-specific pointer
1046
1047 self.vmctx_plus_offset_mut::<VmPtr<VMSharedTypeIndex>>(vmshape.vmctx_type_ids_array())
1048 .write(VmPtr::from(NonNull::from(self.module.type_ids()).cast()));
1049
1050 // initialize imports
1051 tracing::trace!("initializing function imports");
1052 debug_assert_eq!(
1053 imports.functions.len(),
1054 self.translated_module().num_imported_functions as usize
1055 );
1056 ptr::copy_nonoverlapping(
1057 imports.functions.as_ptr(),
1058 self.vmctx_plus_offset_mut::<VMFunctionImport>(
1059 vmshape.vmctx_imported_functions_begin(),
1060 ),
1061 imports.functions.len(),
1062 );
1063
1064 tracing::trace!("initializing table imports");
1065 debug_assert_eq!(
1066 imports.tables.len(),
1067 self.translated_module().num_imported_tables as usize
1068 );
1069 ptr::copy_nonoverlapping(
1070 imports.tables.as_ptr(),
1071 self.vmctx_plus_offset_mut::<VMTableImport>(vmshape.vmctx_imported_tables_begin()),
1072 imports.tables.len(),
1073 );
1074
1075 tracing::trace!("initializing memory imports");
1076 debug_assert_eq!(
1077 imports.memories.len(),
1078 self.translated_module().num_imported_memories as usize
1079 );
1080 ptr::copy_nonoverlapping(
1081 imports.memories.as_ptr(),
1082 self.vmctx_plus_offset_mut::<VMMemoryImport>(
1083 vmshape.vmctx_imported_memories_begin(),
1084 ),
1085 imports.memories.len(),
1086 );
1087
1088 tracing::trace!("initializing global imports");
1089 debug_assert_eq!(
1090 imports.globals.len(),
1091 self.translated_module().num_imported_globals as usize
1092 );
1093 ptr::copy_nonoverlapping(
1094 imports.globals.as_ptr(),
1095 self.vmctx_plus_offset_mut::<VMGlobalImport>(
1096 vmshape.vmctx_imported_globals_begin(),
1097 ),
1098 imports.globals.len(),
1099 );
1100
1101 tracing::trace!("initializing tag imports");
1102 debug_assert_eq!(
1103 imports.tags.len(),
1104 self.translated_module().num_imported_tags as usize
1105 );
1106 ptr::copy_nonoverlapping(
1107 imports.tags.as_ptr(),
1108 self.vmctx_plus_offset_mut::<VMTagImport>(vmshape.vmctx_imported_tags_begin()),
1109 imports.tags.len(),
1110 );
1111
1112 // initialize defined tables
1113 tracing::trace!("initializing defined tables");
1114 for def_index in module
1115 .translated()
1116 .tables
1117 .keys()
1118 .filter_map(|index| module.translated().defined_table_index(index))
1119 {
1120 let def = self.tables[def_index].as_vmtable_definition();
1121 self.table_ptr(def_index).write(def);
1122 }
1123
1124 // Initialize the defined memories. This fills in both the
1125 // `defined_memories` table and the `owned_memories` table at the same
1126 // time. Entries in `defined_memories` hold a pointer to a definition
1127 // (all memories) whereas the `owned_memories` hold the actual
1128 // definitions of memories owned (not shared) in the module.
1129 tracing::trace!("initializing defined memories");
1130 for (def_index, desc) in
1131 module
1132 .translated()
1133 .memories
1134 .iter()
1135 .filter_map(|(index, desc)| {
1136 Some((module.translated().defined_memory_index(index)?, desc))
1137 })
1138 {
1139 let ptr = self.vmctx_plus_offset_mut::<VmPtr<VMMemoryDefinition>>(
1140 vmshape.vmctx_vmmemory_pointer(def_index),
1141 );
1142
1143 if desc.shared {
1144 // let def_ptr = self.memories[def_index]
1145 // .as_shared_memory()
1146 // .unwrap()
1147 // .vmmemory_ptr();
1148 // ptr.write(VmPtr::from(def_ptr));
1149
1150 todo!()
1151 } else {
1152 let owned_index = self.translated_module().owned_memory_index(def_index);
1153 let owned_ptr = self.vmctx_plus_offset_mut::<VMMemoryDefinition>(
1154 vmshape.vmctx_vmmemory_definition(owned_index),
1155 );
1156
1157 owned_ptr.write(self.memories[def_index].vmmemory_definition());
1158 ptr.write(VmPtr::from(NonNull::new(owned_ptr).unwrap()));
1159 }
1160 }
1161
1162 // Zero-initialize the globals so that nothing is uninitialized memory
1163 // after this function returns. The globals are actually initialized
1164 // with their const expression initializers after the instance is fully
1165 // allocated.
1166 tracing::trace!("initializing defined globals");
1167 for (index, _init) in &module.translated().global_initializers {
1168 self.global_ptr(index).write(VMGlobalDefinition::new());
1169 }
1170
1171 tracing::trace!("initializing defined tags");
1172 for (def_index, tag) in
1173 module.translated().tags.iter().filter_map(|(index, ty)| {
1174 Some((module.translated().defined_tag_index(index)?, ty))
1175 })
1176 {
1177 self.tag_ptr(def_index).write(VMTagDefinition::new(
1178 tag.signature.unwrap_engine_type_index(),
1179 ));
1180 }
1181
1182 tracing::trace!("initializing func refs array");
1183 self.initialize_vmfunc_refs(&imports, module);
1184 }
1185 }
1186
1187 /// # Safety
1188 ///
1189 /// among other things the caller has to ensure that this is only ever called **after**
1190 /// calling `Instance::initialize_vmctx`
1191 #[tracing::instrument(level = "debug", skip(self, module))]
1192 unsafe fn initialize_vmfunc_refs(&mut self, imports: &Imports, module: &Module) {
1193 // Safety: the caller pinky-promised that the vmctx is correctly initialized
1194 unsafe {
1195 let vmshape = module.vmshape();
1196
1197 for (index, func) in module
1198 .translated()
1199 .functions
1200 .iter()
1201 .filter(|(_, f)| f.is_escaping())
1202 {
1203 let type_index = {
1204 let base: *const VMSharedTypeIndex = (*self
1205 .vmctx_plus_offset_mut::<VmPtr<VMSharedTypeIndex>>(
1206 StaticVMShape.vmctx_type_ids_array(),
1207 ))
1208 .as_ptr();
1209 *base.add(func.signature.unwrap_module_type_index().index())
1210 };
1211
1212 let func_ref =
1213 if let Some(def_index) = module.translated().defined_func_index(index) {
1214 VMFuncRef {
1215 array_call: self.module().array_to_wasm_trampoline(def_index).expect(
1216 "should have array-to-Wasm trampoline for escaping function",
1217 ),
1218 wasm_call: Some(VmPtr::from(self.module.function(def_index))),
1219 type_index,
1220 vmctx: VmPtr::from(VMOpaqueContext::from_vmcontext(self.vmctx())),
1221 }
1222 } else {
1223 let import = &imports.functions[index.index()];
1224 VMFuncRef {
1225 array_call: import.array_call,
1226 wasm_call: Some(import.wasm_call),
1227 vmctx: import.vmctx,
1228 type_index,
1229 }
1230 };
1231
1232 self.vmctx_plus_offset_mut::<VMFuncRef>(vmshape.vmctx_vmfunc_ref(func.func_ref))
1233 .write(func_ref);
1234 }
1235 }
1236 }
1237
1238 /// # Safety
1239 ///
1240 /// among other things the caller has to ensure that this is only ever called **after**
1241 /// calling `Instance::initialize_vmctx`
1242 #[tracing::instrument(level = "debug", skip(self, store, ctx, const_eval, module))]
1243 unsafe fn initialize_globals(
1244 &mut self,
1245 store: &mut StoreOpaque,
1246 ctx: &mut ConstEvalContext,
1247 const_eval: &mut ConstExprEvaluator,
1248 module: &Module,
1249 ) -> crate::Result<()> {
1250 for (def_index, init) in &module.translated().global_initializers {
1251 let vmval = const_eval
1252 .eval(store, ctx, init)
1253 .expect("const expression should be valid");
1254 let index = self.translated_module().global_index(def_index);
1255 let ty = self.translated_module().globals[index].content_type;
1256
1257 // Safety: the caller pinky-promised that the vmctx is correctly initialized
1258 unsafe {
1259 self.global_ptr(def_index)
1260 .write(VMGlobalDefinition::from_vmval(store, ty, vmval)?);
1261 }
1262 }
1263
1264 Ok(())
1265 }
1266
1267 /// # Safety
1268 ///
1269 /// among other things the caller has to ensure that this is only ever called **after**
1270 /// calling `Instance::initialize_vmctx`
1271 #[tracing::instrument(level = "debug", skip(self, store, ctx, const_eval, module))]
1272 unsafe fn initialize_tables(
1273 &mut self,
1274 store: &mut StoreOpaque,
1275 ctx: &mut ConstEvalContext,
1276 const_eval: &mut ConstExprEvaluator,
1277 module: &Module,
1278 ) -> crate::Result<()> {
1279 // update initial values
1280 for (def_index, init) in &module.translated().table_initializers.initial_values {
1281 match init {
1282 TableInitialValue::RefNull => {}
1283 TableInitialValue::ConstExpr(expr) => {
1284 let index = self.translated_module().table_index(def_index);
1285 let (heap_top_ty, shared) = self.translated_module().tables[index]
1286 .element_type
1287 .heap_type
1288 .top();
1289 assert!(!shared);
1290
1291 let vmval = const_eval
1292 .eval(store, ctx, expr)
1293 .expect("const expression should be valid");
1294
1295 // Safety: the caller pinky-promised that the vmctx is correctly initialized
1296 let table = unsafe { self.get_defined_table(def_index).as_mut() };
1297
1298 match heap_top_ty {
1299 WasmHeapTopType::Func => {
1300 let funcref = NonNull::new(vmval.get_funcref().cast::<VMFuncRef>());
1301 let items = (0..table.size()).map(|_| funcref);
1302 table.init_func(0, items)?;
1303 }
1304 WasmHeapTopType::Extern | WasmHeapTopType::Any => todo!("gc proposal"),
1305 WasmHeapTopType::Exn => todo!("exception-handling proposal"),
1306 WasmHeapTopType::Cont => todo!("continuation proposal"),
1307 }
1308 }
1309 }
1310 }
1311
1312 // run active elements
1313 for segment in &module.translated().table_initializers.segments {
1314 let start = const_eval
1315 .eval(store, ctx, &segment.offset)
1316 .expect("const expression should be valid");
1317
1318 ctx.instance.table_init_segment(
1319 store,
1320 const_eval,
1321 segment.table_index,
1322 &segment.elements,
1323 start.get_u64(),
1324 0,
1325 segment.elements.len(),
1326 )?;
1327 }
1328
1329 Ok(())
1330 }
1331
1332 /// # Safety
1333 ///
1334 /// among other things the caller has to ensure that this is only ever called **after**
1335 /// calling `Instance::initialize_vmctx`
1336 #[tracing::instrument(level = "debug", skip(self, store, ctx, const_eval, module))]
1337 unsafe fn initialize_memories(
1338 &mut self,
1339 store: &mut StoreOpaque,
1340 ctx: &mut ConstEvalContext,
1341 const_eval: &mut ConstExprEvaluator,
1342 module: &Module,
1343 ) -> crate::Result<()> {
1344 for initializer in &module.translated().memory_initializers {
1345 let start: usize = {
1346 let vmval = const_eval
1347 .eval(store, ctx, &initializer.offset)
1348 .expect("const expression should be valid");
1349
1350 match self.translated_module().memories[initializer.memory_index].index_type {
1351 IndexType::I32 => usize::try_from(vmval.get_u32()).unwrap(),
1352 IndexType::I64 => usize::try_from(vmval.get_u64()).unwrap(),
1353 }
1354 };
1355
1356 // Safety: the caller pinky-promised that the vmctx is correctly initialized
1357 let memory = unsafe {
1358 self.defined_or_imported_memory(initializer.memory_index)
1359 .as_mut()
1360 };
1361
1362 let end = start.checked_add(initializer.data.len()).unwrap();
1363 ensure!(end <= memory.current_length(Ordering::Relaxed));
1364
1365 // Safety: we did all the checking we could above
1366 unsafe {
1367 let src = &initializer.data;
1368 let dst = memory.base.as_ptr().add(start);
1369 ptr::copy_nonoverlapping(src.as_ptr(), dst, src.len());
1370 }
1371 }
1372
1373 Ok(())
1374 }
1375}
1376
1377#[repr(transparent)]
1378pub struct InstanceAndStore {
1379 instance: Instance,
1380}
1381
1382impl InstanceAndStore {
1383 #[inline]
1384 pub(crate) unsafe fn from_vmctx<R>(
1385 vmctx: NonNull<VMContext>,
1386 f: impl for<'a> FnOnce(&'a mut Self) -> R,
1387 ) -> R {
1388 const_assert_eq!(size_of::<InstanceAndStore>(), size_of::<Instance>());
1389 // Safety: the instance is always directly before the vmctx in memory
1390 unsafe {
1391 let mut ptr = vmctx
1392 .byte_sub(size_of::<Instance>())
1393 .cast::<InstanceAndStore>();
1394
1395 f(ptr.as_mut())
1396 }
1397 }
1398
1399 #[inline]
1400 pub(crate) fn unpack_mut(&mut self) -> (&mut Instance, &mut StoreOpaque) {
1401 // Safety: this is fine
1402 unsafe {
1403 let store = self.instance.store.unwrap().as_mut();
1404 (&mut self.instance, store)
1405 }
1406 }
1407
1408 #[inline]
1409 pub(crate) unsafe fn unpack_with_state_mut<T>(
1410 &mut self,
1411 ) -> (&mut Instance, &'_ mut StoreInner<T>) {
1412 let mut store_ptr = self.instance.store.unwrap().cast::<StoreInner<T>>();
1413 (
1414 &mut self.instance,
1415 // Safety: ensured by caller
1416 unsafe { store_ptr.as_mut() },
1417 )
1418 }
1419}
1420
1421/// # Safety
1422///
1423/// The caller must ensure this function is only ever called **after** `Instance::initialize_vmctx`
1424unsafe fn check_init_bounds(
1425 store: &mut StoreOpaque,
1426 instance: &mut Instance,
1427 module: &Module,
1428) -> crate::Result<()> {
1429 // Safety: ensured by caller
1430 unsafe {
1431 check_table_init_bounds(store, instance, module)?;
1432 check_memory_init_bounds(store, instance, &module.translated().memory_initializers)?;
1433 }
1434 Ok(())
1435}
1436
1437/// # Safety
1438///
1439/// The caller must ensure this function is only ever called **after** `Instance::initialize_vmctx`
1440unsafe fn check_table_init_bounds(
1441 store: &mut StoreOpaque,
1442 instance: &mut Instance,
1443 module: &Module,
1444) -> crate::Result<()> {
1445 // Safety: the caller pinky-promised to have called initialize_vmctx before calling this function
1446 // so the VMTableDefinitions are all properly initialized
1447 unsafe {
1448 let mut const_evaluator = ConstExprEvaluator::default();
1449
1450 for segment in &module.translated().table_initializers.segments {
1451 let table = instance
1452 .defined_or_imported_table(segment.table_index)
1453 .as_ref();
1454 let mut context = ConstEvalContext::new(instance);
1455 let start = const_evaluator
1456 .eval(store, &mut context, &segment.offset)
1457 .expect("const expression should be valid");
1458 let start = usize::try_from(start.get_u32()).unwrap();
1459 let end = start.checked_add(usize::try_from(segment.elements.len()).unwrap());
1460
1461 match end {
1462 Some(end) if end <= table.size() => {
1463 // Initializer is in bounds
1464 }
1465 _ => {
1466 bail!("table out of bounds: elements segment does not fit")
1467 }
1468 }
1469 }
1470 Ok(())
1471 }
1472}
1473
1474/// # Safety
1475///
1476/// The caller must ensure this function is only ever called **after** `Instance::initialize_vmctx`
1477unsafe fn check_memory_init_bounds(
1478 store: &mut StoreOpaque,
1479 instance: &mut Instance,
1480 initializers: &[MemoryInitializer],
1481) -> crate::Result<()> {
1482 // Safety: the caller pinky-promised to have called initialize_vmctx before calling this function
1483 // so the VMMemoryDefinitions are all properly initialized
1484 unsafe {
1485 for init in initializers {
1486 let memory = instance
1487 .defined_or_imported_memory(init.memory_index)
1488 .as_ref();
1489 let start = get_memory_init_start(store, init, instance)?;
1490 let end = usize::try_from(start)
1491 .ok()
1492 .and_then(|start| start.checked_add(init.data.len()));
1493
1494 match end {
1495 Some(end) if end <= memory.current_length(Ordering::Relaxed) => {
1496 // Initializer is in bounds
1497 }
1498 _ => {
1499 bail!("memory out of bounds: data segment does not fit")
1500 }
1501 }
1502 }
1503
1504 Ok(())
1505 }
1506}
1507
1508/// # Safety
1509///
1510/// The caller must ensure this function is only ever called **after** `Instance::initialize_vmctx`
1511unsafe fn get_memory_init_start(
1512 store: &mut StoreOpaque,
1513 init: &MemoryInitializer,
1514 instance: &mut Instance,
1515) -> crate::Result<u64> {
1516 // Safety: the caller pinky-promised that the vmctx is correctly initialized
1517 let mut context = unsafe { ConstEvalContext::new(instance) };
1518 let mut const_evaluator = ConstExprEvaluator::default();
1519 const_evaluator
1520 .eval(store, &mut context, &init.offset)
1521 .map(
1522 |v| match instance.translated_module().memories[init.memory_index].index_type {
1523 IndexType::I32 => v.get_u32().into(),
1524 IndexType::I64 => v.get_u64(),
1525 },
1526 )
1527}