Next Generation WASM Microkernel Operating System
1// Copyright 2025 Jonas Kruckenberg
2//
3// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
4// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
5// http://opensource.org/licenses/MIT>, at your option. This file may not be
6// copied, modified, or distributed except according to those terms.
7
8use crate::mem::VirtualAddress;
9use crate::wasm::TrapKind;
10use crate::wasm::indices::{
11 DataIndex, DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, DefinedTagIndex,
12 ElemIndex, EntityIndex, FuncIndex, GlobalIndex, MemoryIndex, TableIndex, TagIndex,
13 VMSharedTypeIndex,
14};
15use crate::wasm::module::Module;
16use crate::wasm::store::{StoreInner, StoreOpaque};
17use crate::wasm::translate::{
18 IndexType, MemoryInitializer, TableInitialValue, TableSegmentElements, TranslatedModule,
19 WasmHeapTopType, WasmHeapTypeInner,
20};
21use crate::wasm::trap_handler::WasmFault;
22use crate::wasm::vm::const_eval::{ConstEvalContext, ConstExprEvaluator};
23use crate::wasm::vm::memory::Memory;
24use crate::wasm::vm::provenance::{VmPtr, VmSafe};
25use crate::wasm::vm::table::{Table, TableElement, TableElementType};
26use crate::wasm::vm::{
27 Export, ExportedFunction, ExportedGlobal, ExportedMemory, ExportedTable, ExportedTag, Imports,
28 StaticVMShape, VMBuiltinFunctionsArray, VMCONTEXT_MAGIC, VMContext, VMFuncRef, VMFunctionBody,
29 VMFunctionImport, VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition, VMMemoryImport,
30 VMOpaqueContext, VMShape, VMStoreContext, VMTableDefinition, VMTableImport, VMTagDefinition,
31 VMTagImport,
32};
33use alloc::string::String;
34use anyhow::{bail, ensure};
35use core::alloc::Layout;
36use core::marker::PhantomPinned;
37use core::ptr::NonNull;
38use core::sync::atomic::{AtomicU64, Ordering};
39use core::{fmt, ptr, slice};
40use cranelift_entity::packed_option::ReservedValue;
41use cranelift_entity::{EntityRef, EntitySet, PrimaryMap};
42use static_assertions::const_assert_eq;
43
44#[derive(Debug)]
45pub struct InstanceHandle {
46 instance: Option<NonNull<Instance>>,
47}
48// Safety: TODO
49unsafe impl Send for InstanceHandle {}
50// Safety: TODO
51unsafe impl Sync for InstanceHandle {}
52
53#[repr(C)] // ensure that the vmctx field is last.
54#[derive(Debug)]
55pub struct Instance {
56 module: Module,
57 pub(in crate::wasm) memories: PrimaryMap<DefinedMemoryIndex, Memory>,
58 pub(in crate::wasm) tables: PrimaryMap<DefinedTableIndex, Table>,
59 dropped_elements: EntitySet<ElemIndex>,
60 dropped_data: EntitySet<DataIndex>,
61
62 /// A pointer to the `vmctx` field at the end of the `Instance`.
63 ///
64 /// This pointer is created upon allocation with provenance that covers the *entire* instance
65 /// and VMContext memory. Pointers to VMContext are derived from it inheriting this broader
66 /// provenance. This is important for correctness.
67 vmctx_self_reference: NonNull<VMContext>,
68 /// Self-pointer back to `Store<T>` and its functions. Not present for
69 /// the brief time that `Store<T>` is itself being created. Also not
70 /// present for some niche uses that are disconnected from stores (e.g.
71 /// cross-thread stuff used in `InstancePre`)
72 store: Option<NonNull<StoreOpaque>>,
73 /// Additional context used by compiled wasm code. This field is last, and
74 /// represents a dynamically-sized array that extends beyond the nominal
75 /// end of the struct (similar to a flexible array member).
76 vmctx: VMContext,
77}
78
79impl InstanceHandle {
80 /// Creates an "empty" instance handle which internally has a null pointer
81 /// to an instance. Actually calling any methods on this `InstanceHandle` will always
82 /// panic.
83 pub fn null() -> InstanceHandle {
84 InstanceHandle { instance: None }
85 }
86
87 pub fn initialize(
88 &mut self,
89 store: &mut StoreOpaque,
90 const_eval: &mut ConstExprEvaluator,
91 module: &Module,
92 imports: Imports,
93 is_bulk_memory: bool,
94 ) -> crate::Result<()> {
95 // Safety: we call the functions in the right order (initialize_vmctx) first
96 unsafe {
97 self.instance_mut().initialize_vmctx(store, imports, module);
98
99 if !is_bulk_memory {
100 // Safety: see? we called `initialize_vmctx` before calling `check_init_bounds`!
101 check_init_bounds(store, self.instance_mut(), module)?;
102 }
103
104 let mut ctx = ConstEvalContext::new(self.instance.unwrap().as_mut());
105 self.instance_mut()
106 .initialize_tables(store, &mut ctx, const_eval, module)?;
107 self.instance_mut()
108 .initialize_memories(store, &mut ctx, const_eval, module)?;
109 self.instance_mut()
110 .initialize_globals(store, &mut ctx, const_eval, module)?;
111 }
112
113 Ok(())
114 }
115
116 pub fn debug_vmctx(&self) {
117 struct Dbg<'a> {
118 data: &'a Instance,
119 }
120 impl fmt::Debug for Dbg<'_> {
121 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
122 // Safety: Reading from JIT-owned memory is inherently unsafe.
123 unsafe {
124 f.debug_struct("VMContext")
125 .field(
126 "magic",
127 &*self
128 .data
129 .vmctx_plus_offset::<u32>(StaticVMShape.vmctx_magic()),
130 )
131 .field(
132 "vm_store_context",
133 &*self
134 .data
135 .vmctx_plus_offset::<Option<VmPtr<VMStoreContext>>>(
136 StaticVMShape.vmctx_store_context(),
137 ),
138 )
139 .field(
140 "builtin_functions",
141 &*self
142 .data
143 .vmctx_plus_offset::<VmPtr<VMBuiltinFunctionsArray>>(
144 StaticVMShape.vmctx_builtin_functions(),
145 ),
146 )
147 .field(
148 "callee",
149 &*self
150 .data
151 .vmctx_plus_offset::<Option<VmPtr<VMFunctionBody>>>(
152 StaticVMShape.vmctx_callee(),
153 ),
154 )
155 .field(
156 "epoch_ptr",
157 &*self.data.vmctx_plus_offset::<Option<VmPtr<AtomicU64>>>(
158 StaticVMShape.vmctx_epoch_ptr(),
159 ),
160 )
161 .field(
162 "gc_heap_base",
163 &*self.data.vmctx_plus_offset::<Option<VmPtr<u8>>>(
164 StaticVMShape.vmctx_gc_heap_base(),
165 ),
166 )
167 .field(
168 "gc_heap_bound",
169 &*self
170 .data
171 .vmctx_plus_offset::<usize>(StaticVMShape.vmctx_gc_heap_bound()),
172 )
173 .field(
174 "gc_heap_data",
175 &*self.data.vmctx_plus_offset::<Option<VmPtr<u8>>>(
176 StaticVMShape.vmctx_gc_heap_data(),
177 ),
178 )
179 .field(
180 "type_ids",
181 &*self.data.vmctx_plus_offset::<VmPtr<VMSharedTypeIndex>>(
182 StaticVMShape.vmctx_type_ids_array(),
183 ),
184 )
185 .field(
186 "imported_memories",
187 &slice::from_raw_parts(
188 self.data.vmctx_plus_offset::<VMMemoryImport>(
189 self.data.vmshape().vmctx_imported_memories_begin(),
190 ),
191 self.data.vmshape().num_imported_memories as usize,
192 ),
193 )
194 .field(
195 "memories",
196 &slice::from_raw_parts(
197 self.data.vmctx_plus_offset::<VmPtr<VMMemoryDefinition>>(
198 self.data.vmshape().vmctx_memories_begin(),
199 ),
200 self.data.vmshape().num_defined_memories as usize,
201 ),
202 )
203 .field(
204 "owned_memories",
205 &slice::from_raw_parts(
206 self.data.vmctx_plus_offset::<VMMemoryDefinition>(
207 self.data.vmshape().vmctx_owned_memories_begin(),
208 ),
209 self.data.vmshape().num_owned_memories as usize,
210 ),
211 )
212 .field(
213 "imported_functions",
214 &slice::from_raw_parts(
215 self.data.vmctx_plus_offset::<VMFunctionImport>(
216 self.data.vmshape().vmctx_imported_functions_begin(),
217 ),
218 self.data.vmshape().num_imported_functions as usize,
219 ),
220 )
221 .field(
222 "imported_tables",
223 &slice::from_raw_parts(
224 self.data.vmctx_plus_offset::<VMTableImport>(
225 self.data.vmshape().vmctx_imported_tables_begin(),
226 ),
227 self.data.vmshape().num_imported_tables as usize,
228 ),
229 )
230 .field(
231 "imported_globals",
232 &slice::from_raw_parts(
233 self.data.vmctx_plus_offset::<VMGlobalImport>(
234 self.data.vmshape().vmctx_imported_globals_begin(),
235 ),
236 self.data.vmshape().num_imported_globals as usize,
237 ),
238 )
239 .field(
240 "imported_tags",
241 &slice::from_raw_parts(
242 self.data.vmctx_plus_offset::<VMTagImport>(
243 self.data.vmshape().vmctx_imported_tags_begin(),
244 ),
245 self.data.vmshape().num_imported_tags as usize,
246 ),
247 )
248 .field(
249 "tables",
250 &slice::from_raw_parts(
251 self.data.vmctx_plus_offset::<VMTableDefinition>(
252 self.data.vmshape().vmctx_tables_begin(),
253 ),
254 self.data.vmshape().num_defined_tables as usize,
255 ),
256 )
257 .field(
258 "globals",
259 &slice::from_raw_parts(
260 self.data.vmctx_plus_offset::<VMGlobalDefinition>(
261 self.data.vmshape().vmctx_globals_begin(),
262 ),
263 self.data.vmshape().num_defined_globals as usize,
264 ),
265 )
266 .field(
267 "tags",
268 &slice::from_raw_parts(
269 self.data.vmctx_plus_offset::<VMTagDefinition>(
270 self.data.vmshape().vmctx_tags_begin(),
271 ),
272 self.data.vmshape().num_defined_tags as usize,
273 ),
274 )
275 .field(
276 "func_refs",
277 &slice::from_raw_parts(
278 self.data.vmctx_plus_offset::<VMFuncRef>(
279 self.data.vmshape().vmctx_func_refs_begin(),
280 ),
281 self.data.vmshape().num_escaped_funcs as usize,
282 ),
283 )
284 .finish()
285 }
286 }
287 }
288
289 tracing::debug!(
290 "{:#?}",
291 Dbg {
292 data: self.instance()
293 }
294 );
295 }
296
297 pub fn vmctx(&self) -> NonNull<VMContext> {
298 self.instance().vmctx()
299 }
300
301 /// Return a reference to a module.
302 pub fn module(&self) -> &Module {
303 self.instance().module()
304 }
305
306 /// Lookup a table by index.
307 pub fn get_exported_table(&mut self, export: TableIndex) -> ExportedTable {
308 self.instance_mut().get_exported_table(export)
309 }
310
311 /// Lookup a memory by index.
312 pub fn get_exported_memory(&mut self, export: MemoryIndex) -> ExportedMemory {
313 self.instance_mut().get_exported_memory(export)
314 }
315
316 /// Lookup a function by index.
317 pub fn get_exported_func(&mut self, export: FuncIndex) -> ExportedFunction {
318 self.instance_mut().get_exported_func(export)
319 }
320
321 /// Lookup a global by index.
322 pub fn get_exported_global(&mut self, export: GlobalIndex) -> ExportedGlobal {
323 self.instance_mut().get_exported_global(export)
324 }
325
326 /// Lookup a tag by index.
327 pub fn get_exported_tag(&mut self, export: TagIndex) -> ExportedTag {
328 self.instance_mut().get_exported_tag(export)
329 }
330
331 /// Lookup an item with the given index.
332 pub fn get_export_by_index(&mut self, export: EntityIndex) -> Export {
333 match export {
334 EntityIndex::Function(i) => Export::Function(self.get_exported_func(i)),
335 EntityIndex::Global(i) => Export::Global(self.get_exported_global(i)),
336 EntityIndex::Table(i) => Export::Table(self.get_exported_table(i)),
337 EntityIndex::Memory(i) => Export::Memory(self.get_exported_memory(i)),
338 EntityIndex::Tag(i) => Export::Tag(self.get_exported_tag(i)),
339 }
340 }
341
342 /// Return an iterator over the exports of this instance.
343 ///
344 /// Specifically, it provides access to the key-value pairs, where the keys
345 /// are export names, and the values are export declarations which can be
346 /// resolved `lookup_by_declaration`.
347 pub fn exports(&self) -> wasmparser::collections::index_map::Iter<String, EntityIndex> {
348 self.instance().translated_module().exports.iter()
349 }
350
351 pub fn as_non_null(&self) -> NonNull<Instance> {
352 self.instance.unwrap()
353 }
354
355 /// Return a reference to the contained `Instance`.
356 #[inline]
357 pub fn instance(&self) -> &Instance {
358 // Safety: the constructor ensures the instance is correctly initialized
359 unsafe { self.instance.unwrap().as_ref() }
360 }
361
362 #[inline]
363 pub fn instance_mut(&mut self) -> &mut Instance {
364 // Safety: the constructor ensures the instance is correctly initialized
365 unsafe { self.instance.unwrap().as_mut() }
366 }
367
368 /// Attempts to convert from the host `addr` specified to a WebAssembly
369 /// based address recorded in `WasmFault`.
370 ///
371 /// This method will check all linear memories that this instance contains
372 /// to see if any of them contain `addr`. If one does then `Some` is
373 /// returned with metadata about the wasm fault. Otherwise `None` is
374 /// returned and `addr` doesn't belong to this instance.
375 pub fn wasm_fault(&self, faulting_addr: VirtualAddress) -> Option<WasmFault> {
376 self.instance().wasm_fault(faulting_addr)
377 }
378}
379
380impl Instance {
381 /// # Safety
382 ///
383 /// The caller must ensure that `instance: NonNull<Instance>` got allocated using the
384 /// `Instance::alloc_layout` to ensure it is the right size for the VMContext
385 pub unsafe fn from_parts(
386 module: Module,
387 instance: NonNull<Instance>,
388 tables: PrimaryMap<DefinedTableIndex, Table>,
389 memories: PrimaryMap<DefinedMemoryIndex, Memory>,
390 ) -> InstanceHandle {
391 let dropped_elements = module.translated().active_table_initializers.clone();
392 let dropped_data = module.translated().active_memory_initializers.clone();
393
394 // Safety: we have to trust the caller that `NonNull<Instance>` got allocated using the correct
395 // `Instance::alloc_layout` and therefore has the right-sized vmctx memory
396 unsafe {
397 instance.write(Instance {
398 module: module.clone(),
399 memories,
400 tables,
401 dropped_elements,
402 dropped_data,
403 vmctx_self_reference: instance.add(1).cast(),
404 store: None,
405 vmctx: VMContext {
406 _marker: PhantomPinned,
407 },
408 });
409 }
410
411 InstanceHandle {
412 instance: Some(instance),
413 }
414 }
415
416 pub fn alloc_layout(offsets: &VMShape) -> Layout {
417 let size = size_of::<Self>()
418 .checked_add(usize::try_from(offsets.size_of_vmctx()).unwrap())
419 .unwrap();
420 let align = align_of::<Self>();
421 Layout::from_size_align(size, align).unwrap()
422 }
423
424 pub fn module(&self) -> &Module {
425 &self.module
426 }
427 pub fn translated_module(&self) -> &TranslatedModule {
428 self.module.translated()
429 }
430 pub fn vmshape(&self) -> &VMShape {
431 self.module.vmshape()
432 }
433
434 fn wasm_fault(&self, addr: VirtualAddress) -> Option<WasmFault> {
435 let mut fault = None;
436
437 for (_, memory) in &self.memories {
438 let accessible = memory.wasm_accessible();
439 if accessible.start <= addr && addr < accessible.end {
440 // All linear memories should be disjoint so assert that no
441 // prior fault has been found.
442 assert!(fault.is_none());
443 fault = Some(WasmFault {
444 memory_size: memory.byte_size(),
445 wasm_address: u64::try_from(addr.checked_sub_addr(accessible.start).unwrap())
446 .unwrap(),
447 });
448 }
449 }
450
451 fault
452 }
453
454 pub fn get_exported_func(&mut self, index: FuncIndex) -> ExportedFunction {
455 ExportedFunction {
456 func_ref: self.get_func_ref(index).unwrap(),
457 }
458 }
459 pub fn get_exported_table(&mut self, index: TableIndex) -> ExportedTable {
460 let (definition, vmctx) =
461 if let Some(def_index) = self.translated_module().defined_table_index(index) {
462 (self.table_ptr(def_index), self.vmctx())
463 } else {
464 let import = self.imported_table(index);
465 (import.from.as_non_null(), import.vmctx.as_non_null())
466 };
467
468 ExportedTable {
469 definition,
470 vmctx,
471 table: self.translated_module().tables[index].clone(),
472 }
473 }
474 pub fn get_exported_memory(&mut self, index: MemoryIndex) -> ExportedMemory {
475 let (definition, vmctx, def_index) =
476 if let Some(def_index) = self.translated_module().defined_memory_index(index) {
477 (self.memory_ptr(def_index), self.vmctx(), def_index)
478 } else {
479 let import = self.imported_memory(index);
480 (
481 import.from.as_non_null(),
482 import.vmctx.as_non_null(),
483 import.index,
484 )
485 };
486
487 ExportedMemory {
488 definition,
489 vmctx,
490 index: def_index,
491 memory: self.translated_module().memories[index].clone(),
492 }
493 }
494 pub fn get_exported_global(&mut self, index: GlobalIndex) -> ExportedGlobal {
495 ExportedGlobal {
496 definition: if let Some(def_index) =
497 self.translated_module().defined_global_index(index)
498 {
499 self.global_ptr(def_index)
500 } else {
501 self.imported_global(index).from.as_non_null()
502 },
503 vmctx: Some(self.vmctx()),
504 global: self.translated_module().globals[index].clone(),
505 }
506 }
507 pub fn get_exported_tag(&mut self, index: TagIndex) -> ExportedTag {
508 ExportedTag {
509 definition: if let Some(def_index) = self.translated_module().defined_tag_index(index) {
510 self.tag_ptr(def_index)
511 } else {
512 self.imported_tag(index).from.as_non_null()
513 },
514 tag: self.translated_module().tags[index],
515 }
516 }
517
518 /// Get the given memory's page size, in bytes.
519 pub fn memory_page_size(&self, index: MemoryIndex) -> u64 {
520 self.translated_module().memories[index].page_size()
521 }
522
523 #[expect(unused, reason = "TODO")]
524 pub fn memory_grow(
525 &mut self,
526 store: &mut StoreOpaque,
527 index: MemoryIndex,
528 delta: u64,
529 ) -> crate::Result<Option<u64>> {
530 todo!()
531 }
532
533 #[expect(unused, reason = "TODO")]
534 pub fn memory_copy(
535 &mut self,
536 dst_index: MemoryIndex,
537 dst: u64,
538 src_index: MemoryIndex,
539 src: u64,
540 len: u64,
541 ) -> Result<(), TrapKind> {
542 todo!()
543 }
544
545 #[expect(unused, reason = "TODO")]
546 pub fn memory_fill(
547 &mut self,
548 memory_index: MemoryIndex,
549 dst: u64,
550 val: u8,
551 len: u64,
552 ) -> Result<(), TrapKind> {
553 todo!()
554 }
555
556 #[expect(unused, reason = "TODO")]
557 pub fn memory_init(
558 &mut self,
559 memory_index: MemoryIndex,
560 data_index: DataIndex,
561 dst: u64,
562 src: u32,
563 len: u32,
564 ) -> Result<(), TrapKind> {
565 todo!()
566 }
567
568 pub fn data_drop(&mut self, data_index: DataIndex) {
569 self.dropped_data.insert(data_index);
570 }
571
572 pub fn table_element_type(&self, table_index: TableIndex) -> TableElementType {
573 match self.translated_module().tables[table_index]
574 .element_type
575 .heap_type
576 .inner
577 {
578 WasmHeapTypeInner::Func
579 | WasmHeapTypeInner::ConcreteFunc(_)
580 | WasmHeapTypeInner::NoFunc => TableElementType::Func,
581 WasmHeapTypeInner::Extern
582 | WasmHeapTypeInner::NoExtern
583 | WasmHeapTypeInner::Any
584 | WasmHeapTypeInner::Eq
585 | WasmHeapTypeInner::I31
586 | WasmHeapTypeInner::Array
587 | WasmHeapTypeInner::ConcreteArray(_)
588 | WasmHeapTypeInner::Struct
589 | WasmHeapTypeInner::ConcreteStruct(_)
590 | WasmHeapTypeInner::None => TableElementType::GcRef,
591
592 WasmHeapTypeInner::Exn | WasmHeapTypeInner::NoExn => {
593 todo!("exception-handling proposal")
594 }
595 WasmHeapTypeInner::Cont
596 | WasmHeapTypeInner::ConcreteCont(_)
597 | WasmHeapTypeInner::NoCont => todo!("stack switching proposal"),
598 }
599 }
600
601 pub fn table_grow(
602 &mut self,
603 table_index: TableIndex,
604 delta: u64,
605 init_value: TableElement,
606 ) -> crate::Result<Option<usize>> {
607 let res = self
608 .with_defined_table_index_and_instance(table_index, |def_index, instance| {
609 instance.tables[def_index].grow(delta, init_value)
610 })?;
611
612 Ok(res)
613 }
614
615 pub fn table_fill(
616 &mut self,
617 table_index: TableIndex,
618 dst: u64,
619 val: TableElement,
620 len: u64,
621 ) -> Result<(), TrapKind> {
622 self.with_defined_table_index_and_instance(table_index, |def_index, instance| {
623 instance.tables[def_index].fill(dst, val, len)
624 })
625 }
626
627 pub fn table_init(
628 &mut self,
629 store: &mut StoreOpaque,
630 table_index: TableIndex,
631 elem_index: ElemIndex,
632 dst: u64,
633 src: u64,
634 len: u64,
635 ) -> Result<(), TrapKind> {
636 let module = self.module().clone(); // FIXME this clone is here to workaround lifetime issues. remove
637 let elements = &module.translated().passive_table_initializers[&elem_index];
638 // TODO reuse this const_eval across calls
639 let mut const_eval = ConstExprEvaluator::default();
640 self.table_init_segment(store, &mut const_eval, table_index, elements, dst, src, len)
641 }
642
643 fn table_init_segment(
644 &mut self,
645 store: &mut StoreOpaque,
646 const_eval: &mut ConstExprEvaluator,
647 table_index: TableIndex,
648 elements: &TableSegmentElements,
649 dst: u64,
650 src: u64,
651 len: u64,
652 ) -> Result<(), TrapKind> {
653 let src = usize::try_from(src).map_err(|_| TrapKind::TableOutOfBounds)?;
654 let len = usize::try_from(len).map_err(|_| TrapKind::TableOutOfBounds)?;
655
656 // Safety: the implementation promises that vmctx is correctly initialized
657 let table = unsafe { self.defined_or_imported_table(table_index).as_mut() };
658
659 match elements {
660 TableSegmentElements::Functions(funcs) => {
661 let elements = funcs
662 .get(src..)
663 .and_then(|s| s.get(..len))
664 .ok_or(TrapKind::TableOutOfBounds)?;
665 table.init_func(dst, elements.iter().map(|idx| self.get_func_ref(*idx)))?;
666 }
667 TableSegmentElements::Expressions(exprs) => {
668 let exprs = exprs
669 .get(src..)
670 .and_then(|s| s.get(..len))
671 .ok_or(TrapKind::TableOutOfBounds)?;
672 let (heap_top_ty, shared) = self.translated_module().tables[table_index]
673 .element_type
674 .heap_type
675 .top();
676 assert!(!shared);
677
678 // Safety: the implementation promises that vmctx is correctly initialized
679 let mut context = unsafe { ConstEvalContext::new(self) };
680
681 match heap_top_ty {
682 WasmHeapTopType::Func => table.init_func(
683 dst,
684 exprs.iter().map(|expr| {
685 NonNull::new(
686 const_eval
687 .eval(store, &mut context, expr)
688 .expect("const expr should be valid")
689 .get_funcref()
690 .cast(),
691 )
692 }),
693 )?,
694 WasmHeapTopType::Extern | WasmHeapTopType::Any => todo!("gc proposal"),
695 WasmHeapTopType::Exn => todo!("exception-handling proposal"),
696 WasmHeapTopType::Cont => todo!("continuation proposal"),
697 }
698 }
699 }
700
701 Ok(())
702 }
703
704 pub fn elem_drop(&mut self, elem_index: ElemIndex) {
705 self.dropped_elements.insert(elem_index);
706 }
707
708 pub fn get_func_ref(&mut self, index: FuncIndex) -> Option<NonNull<VMFuncRef>> {
709 if index == FuncIndex::reserved_value() {
710 return None;
711 }
712
713 // Safety: we have a `&mut self`, so we have exclusive access
714 // to this Instance.
715 unsafe {
716 let func = &self.translated_module().functions[index];
717 let func_ref: *mut VMFuncRef = self
718 .vmctx_plus_offset_mut::<VMFuncRef>(self.vmshape().vmctx_vmfunc_ref(func.func_ref));
719
720 Some(NonNull::new(func_ref).unwrap())
721 }
722 }
723
724 pub(crate) fn set_store(&mut self, store: Option<NonNull<StoreOpaque>>) {
725 // Safety: the implementation promises that vmctx is correctly initialized
726 unsafe {
727 self.store = store;
728
729 if let Some(mut store) = store {
730 let store = store.as_mut();
731
732 self.vm_store_context()
733 .write(Some(VmPtr::from(store.vm_store_context_ptr())));
734 #[cfg(target_has_atomic = "64")]
735 self.epoch_ptr().write(Some(VmPtr::from(NonNull::from(
736 store.engine().epoch_counter(),
737 ))));
738
739 // if self.env_module().needs_gc_heap {
740 // self.set_gc_heap(Some(store.gc_store_mut().expect(
741 // "if we need a GC heap, then `Instance::new_raw` should have already \
742 // allocated it for us",
743 // )));
744 // } else {
745 // self.set_gc_heap(None);
746 // }
747 } else {
748 self.vm_store_context().write(None);
749 #[cfg(target_has_atomic = "64")]
750 self.epoch_ptr().write(None);
751 // self.set_gc_heap(None);
752 }
753 }
754 }
755
756 // unsafe fn set_gc_heap(&mut self, gc_store: Option<&mut StoreOpaque>) {
757 // if let Some(gc_store) = gc_store {
758 // let heap = gc_store.gc_heap.heap_slice_mut();
759 // self.gc_heap_bound().write(heap.len());
760 // self.gc_heap_base()
761 // .write(Some(NonNull::from(heap).cast().into()));
762 // self.gc_heap_data()
763 // .write(Some(gc_store.gc_heap.vmctx_gc_heap_data().into()));
764 // } else {
765 // self.gc_heap_bound().write(0);
766 // self.gc_heap_base().write(None);
767 // self.gc_heap_data().write(None);
768 // }
769 // }
770
771 pub(crate) fn set_callee(&mut self, callee: Option<NonNull<VMFunctionBody>>) {
772 // Safety: the implementation promises that vmctx is correctly initialized
773 unsafe {
774 let callee = callee.map(VmPtr::from);
775 self.vmctx_plus_offset_mut::<Option<VmPtr<VMFunctionBody>>>(
776 StaticVMShape.vmctx_callee(),
777 )
778 .write(callee);
779 }
780 }
781
782 // VMContext accessors
783
784 #[inline]
785 pub unsafe fn from_vmctx<R>(
786 vmctx: NonNull<VMContext>,
787 f: impl FnOnce(&mut Instance) -> R,
788 ) -> R {
789 // Safety: ensured by caller
790 unsafe {
791 let mut ptr = vmctx.byte_sub(size_of::<Instance>()).cast::<Instance>();
792 f(ptr.as_mut())
793 }
794 }
795
796 /// Return a reference to the vmctx used by compiled wasm code.
797 #[inline]
798 pub fn vmctx(&self) -> NonNull<VMContext> {
799 let addr = &raw const self.vmctx;
800 let ret = self.vmctx_self_reference.as_ptr().with_addr(addr.addr());
801 NonNull::new(ret).unwrap()
802 }
803
804 /// Helper function to access various locations offset from our `*mut
805 /// VMContext` object.
806 ///
807 /// # Safety
808 ///
809 /// This method is unsafe because the `offset` must be within bounds of the
810 /// `VMContext` object trailing this instance.
811 unsafe fn vmctx_plus_offset<T: VmSafe>(&self, offset: impl Into<u32>) -> *const T {
812 // Safety: ensured by caller
813 unsafe {
814 self.vmctx()
815 .as_ptr()
816 .byte_add(usize::try_from(offset.into()).unwrap())
817 .cast()
818 }
819 }
820 /// Dual of `vmctx_plus_offset`, but for mutability.
821 unsafe fn vmctx_plus_offset_mut<T: VmSafe>(&mut self, offset: impl Into<u32>) -> *mut T {
822 // Safety: ensured by caller
823 unsafe {
824 self.vmctx()
825 .as_ptr()
826 .byte_add(usize::try_from(offset.into()).unwrap())
827 .cast()
828 }
829 }
830
831 #[inline]
832 pub fn vm_store_context(&mut self) -> NonNull<Option<VmPtr<VMStoreContext>>> {
833 // Safety: the implementation promises that vmctx is correctly initialized
834 unsafe {
835 NonNull::new(self.vmctx_plus_offset_mut(StaticVMShape.vmctx_store_context())).unwrap()
836 }
837 }
838
839 /// Return a pointer to the global epoch counter used by this instance.
840 #[cfg(target_has_atomic = "64")]
841 pub fn epoch_ptr(&mut self) -> NonNull<Option<VmPtr<AtomicU64>>> {
842 // Safety: the implementation promises that vmctx is correctly initialized
843 unsafe {
844 NonNull::new(self.vmctx_plus_offset_mut(StaticVMShape.vmctx_epoch_ptr())).unwrap()
845 }
846 }
847
848 /// Return a pointer to the GC heap base pointer.
849 pub fn gc_heap_base(&mut self) -> NonNull<Option<VmPtr<u8>>> {
850 // Safety: the implementation promises that vmctx is correctly initialized
851 unsafe {
852 NonNull::new(self.vmctx_plus_offset_mut(StaticVMShape.vmctx_gc_heap_base())).unwrap()
853 }
854 }
855
856 /// Return a pointer to the GC heap bound.
857 pub fn gc_heap_bound(&mut self) -> NonNull<usize> {
858 // Safety: the implementation promises that vmctx is correctly initialized
859 unsafe {
860 NonNull::new(self.vmctx_plus_offset_mut(StaticVMShape.vmctx_gc_heap_bound())).unwrap()
861 }
862 }
863
864 /// Return a pointer to the collector-specific heap data.
865 pub fn gc_heap_data(&mut self) -> NonNull<Option<VmPtr<u8>>> {
866 // Safety: the implementation promises that vmctx is correctly initialized
867 unsafe {
868 NonNull::new(self.vmctx_plus_offset_mut(StaticVMShape.vmctx_gc_heap_data())).unwrap()
869 }
870 }
871
872 /// Return the indexed `VMFunctionImport`.
873 fn imported_function(&self, index: FuncIndex) -> &VMFunctionImport {
874 // Safety: the implementation promises that vmctx is correctly initialized
875 unsafe { &*self.vmctx_plus_offset(self.vmshape().vmctx_vmfunction_import(index)) }
876 }
877 /// Return the index `VMTable`.
878 fn imported_table(&self, index: TableIndex) -> &VMTableImport {
879 // Safety: the implementation promises that vmctx is correctly initialized
880 unsafe { &*self.vmctx_plus_offset(self.vmshape().vmctx_vmtable_import(index)) }
881 }
882 /// Return the indexed `VMMemoryImport`.
883 fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
884 // Safety: the implementation promises that vmctx is correctly initialized
885 unsafe { &*self.vmctx_plus_offset(self.vmshape().vmctx_vmmemory_import(index)) }
886 }
887 /// Return the indexed `VMGlobalImport`.
888 fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
889 // Safety: the implementation promises that vmctx is correctly initialized
890 unsafe { &*self.vmctx_plus_offset(self.vmshape().vmctx_vmglobal_import(index)) }
891 }
892 /// Return the indexed `VMTagImport`.
893 fn imported_tag(&self, index: TagIndex) -> &VMTagImport {
894 // Safety: the implementation promises that vmctx is correctly initialized
895 unsafe { &*self.vmctx_plus_offset(self.vmshape().vmctx_vmtag_import(index)) }
896 }
897
898 fn table_ptr(&mut self, index: DefinedTableIndex) -> NonNull<VMTableDefinition> {
899 // Safety: the implementation promises that vmctx is correctly initialized
900 unsafe {
901 NonNull::new(self.vmctx_plus_offset_mut(self.vmshape().vmctx_vmtable_definition(index)))
902 .unwrap()
903 }
904 }
905 fn memory_ptr(&mut self, index: DefinedMemoryIndex) -> NonNull<VMMemoryDefinition> {
906 // Safety: the implementation promises that vmctx is correctly initialized
907 let ptr = unsafe {
908 *self.vmctx_plus_offset::<VmPtr<_>>(self.vmshape().vmctx_vmmemory_pointer(index))
909 };
910 ptr.as_non_null()
911 }
912 fn global_ptr(&mut self, index: DefinedGlobalIndex) -> NonNull<VMGlobalDefinition> {
913 // Safety: the implementation promises that vmctx is correctly initialized
914 unsafe {
915 NonNull::new(
916 self.vmctx_plus_offset_mut(self.vmshape().vmctx_vmglobal_definition(index)),
917 )
918 .unwrap()
919 }
920 }
921 fn tag_ptr(&mut self, index: DefinedTagIndex) -> NonNull<VMTagDefinition> {
922 // Safety: the implementation promises that vmctx is correctly initialized
923 unsafe {
924 NonNull::new(self.vmctx_plus_offset_mut(self.vmshape().vmctx_vmtag_definition(index)))
925 .unwrap()
926 }
927 }
928
929 pub fn get_defined_table(&mut self, index: DefinedTableIndex) -> NonNull<Table> {
930 NonNull::from(&mut self.tables[index])
931 }
932
933 pub(super) fn defined_or_imported_table(&mut self, table_index: TableIndex) -> NonNull<Table> {
934 self.with_defined_table_index_and_instance(table_index, |idx, instance| {
935 NonNull::from(instance.tables.get(idx).unwrap())
936 })
937 }
938
939 fn with_defined_table_index_and_instance<R>(
940 &mut self,
941 index: TableIndex,
942 f: impl FnOnce(DefinedTableIndex, &mut Instance) -> R,
943 ) -> R {
944 if let Some(defined_table_index) = self.translated_module().defined_table_index(index) {
945 f(defined_table_index, self)
946 } else {
947 let import = self.imported_table(index);
948 // Safety: the VMTableImport needs should be correct. TODO test & verify
949 unsafe {
950 Instance::from_vmctx(import.vmctx.as_non_null(), |foreign_instance| {
951 let foreign_table_def = import.from.as_ptr();
952 let foreign_table_index = foreign_instance.table_index(&*foreign_table_def);
953 f(foreign_table_index, foreign_instance)
954 })
955 }
956 }
957 }
958
959 pub(super) fn defined_or_imported_memory(
960 &mut self,
961 index: MemoryIndex,
962 ) -> NonNull<VMMemoryDefinition> {
963 if let Some(defined_index) = self.translated_module().defined_memory_index(index) {
964 self.memory_ptr(defined_index)
965 } else {
966 let import = self.imported_memory(index);
967 import.from.as_non_null()
968 }
969 }
970
971 pub(super) fn defined_or_imported_global(
972 &mut self,
973 index: GlobalIndex,
974 ) -> NonNull<VMGlobalDefinition> {
975 if let Some(index) = self.translated_module().defined_global_index(index) {
976 self.global_ptr(index)
977 } else {
978 self.imported_global(index).from.as_non_null()
979 }
980 }
981
982 pub unsafe fn table_index(&mut self, table: &VMTableDefinition) -> DefinedTableIndex {
983 // Safety: ensured by caller
984 unsafe {
985 let index = DefinedTableIndex::new(
986 usize::try_from(
987 ptr::from_ref::<VMTableDefinition>(table)
988 .offset_from(self.table_ptr(DefinedTableIndex::new(0)).as_ptr()),
989 )
990 .unwrap(),
991 );
992 assert!(index.index() < self.tables.len());
993 index
994 }
995 }
996
997 pub unsafe fn memory_index(&mut self, table: &VMMemoryDefinition) -> DefinedMemoryIndex {
998 // Safety: ensured by caller
999 unsafe {
1000 let index = DefinedMemoryIndex::new(
1001 usize::try_from(
1002 ptr::from_ref::<VMMemoryDefinition>(table)
1003 .offset_from(self.memory_ptr(DefinedMemoryIndex::new(0)).as_ptr()),
1004 )
1005 .unwrap(),
1006 );
1007 assert!(index.index() < self.memories.len());
1008 index
1009 }
1010 }
1011
1012 #[tracing::instrument(level = "debug", skip(self, store, module))]
1013 unsafe fn initialize_vmctx(
1014 &mut self,
1015 store: &mut StoreOpaque,
1016 imports: Imports,
1017 module: &Module,
1018 ) {
1019 let vmshape = module.vmshape();
1020
1021 // Safety: there is no safety, we just have to trust that the entire vmctx memory range
1022 // we need was correctly allocated
1023 unsafe {
1024 // initialize vmctx magic
1025 tracing::trace!("initializing vmctx magic");
1026 self.vmctx_plus_offset_mut::<u32>(vmshape.vmctx_magic())
1027 .write(VMCONTEXT_MAGIC);
1028
1029 tracing::trace!("initializing store-related fields");
1030 self.set_store(Some(NonNull::from(store)));
1031
1032 tracing::trace!("initializing built-in functions array ptr");
1033 self.vmctx_plus_offset_mut::<VmPtr<VMBuiltinFunctionsArray>>(
1034 vmshape.vmctx_builtin_functions(),
1035 )
1036 .write(VmPtr::from(NonNull::from(&VMBuiltinFunctionsArray::INIT)));
1037
1038 tracing::trace!("initializing callee");
1039 self.set_callee(None);
1040
1041 // gc_heap_base: *mut u8,
1042 // gc_heap_bound: *mut u8,
1043 // gc_heap_data: *mut T, //! Collector-specific pointer
1044
1045 self.vmctx_plus_offset_mut::<VmPtr<VMSharedTypeIndex>>(vmshape.vmctx_type_ids_array())
1046 .write(VmPtr::from(NonNull::from(self.module.type_ids()).cast()));
1047
1048 // initialize imports
1049 tracing::trace!("initializing function imports");
1050 debug_assert_eq!(
1051 imports.functions.len(),
1052 self.translated_module().num_imported_functions as usize
1053 );
1054 ptr::copy_nonoverlapping(
1055 imports.functions.as_ptr(),
1056 self.vmctx_plus_offset_mut::<VMFunctionImport>(
1057 vmshape.vmctx_imported_functions_begin(),
1058 ),
1059 imports.functions.len(),
1060 );
1061
1062 tracing::trace!("initializing table imports");
1063 debug_assert_eq!(
1064 imports.tables.len(),
1065 self.translated_module().num_imported_tables as usize
1066 );
1067 ptr::copy_nonoverlapping(
1068 imports.tables.as_ptr(),
1069 self.vmctx_plus_offset_mut::<VMTableImport>(vmshape.vmctx_imported_tables_begin()),
1070 imports.tables.len(),
1071 );
1072
1073 tracing::trace!("initializing memory imports");
1074 debug_assert_eq!(
1075 imports.memories.len(),
1076 self.translated_module().num_imported_memories as usize
1077 );
1078 ptr::copy_nonoverlapping(
1079 imports.memories.as_ptr(),
1080 self.vmctx_plus_offset_mut::<VMMemoryImport>(
1081 vmshape.vmctx_imported_memories_begin(),
1082 ),
1083 imports.memories.len(),
1084 );
1085
1086 tracing::trace!("initializing global imports");
1087 debug_assert_eq!(
1088 imports.globals.len(),
1089 self.translated_module().num_imported_globals as usize
1090 );
1091 ptr::copy_nonoverlapping(
1092 imports.globals.as_ptr(),
1093 self.vmctx_plus_offset_mut::<VMGlobalImport>(
1094 vmshape.vmctx_imported_globals_begin(),
1095 ),
1096 imports.globals.len(),
1097 );
1098
1099 tracing::trace!("initializing tag imports");
1100 debug_assert_eq!(
1101 imports.tags.len(),
1102 self.translated_module().num_imported_tags as usize
1103 );
1104 ptr::copy_nonoverlapping(
1105 imports.tags.as_ptr(),
1106 self.vmctx_plus_offset_mut::<VMTagImport>(vmshape.vmctx_imported_tags_begin()),
1107 imports.tags.len(),
1108 );
1109
1110 // initialize defined tables
1111 tracing::trace!("initializing defined tables");
1112 for def_index in module
1113 .translated()
1114 .tables
1115 .keys()
1116 .filter_map(|index| module.translated().defined_table_index(index))
1117 {
1118 let def = self.tables[def_index].as_vmtable_definition();
1119 self.table_ptr(def_index).write(def);
1120 }
1121
1122 // Initialize the defined memories. This fills in both the
1123 // `defined_memories` table and the `owned_memories` table at the same
1124 // time. Entries in `defined_memories` hold a pointer to a definition
1125 // (all memories) whereas the `owned_memories` hold the actual
1126 // definitions of memories owned (not shared) in the module.
1127 tracing::trace!("initializing defined memories");
1128 for (def_index, desc) in
1129 module
1130 .translated()
1131 .memories
1132 .iter()
1133 .filter_map(|(index, desc)| {
1134 Some((module.translated().defined_memory_index(index)?, desc))
1135 })
1136 {
1137 let ptr = self.vmctx_plus_offset_mut::<VmPtr<VMMemoryDefinition>>(
1138 vmshape.vmctx_vmmemory_pointer(def_index),
1139 );
1140
1141 if desc.shared {
1142 // let def_ptr = self.memories[def_index]
1143 // .as_shared_memory()
1144 // .unwrap()
1145 // .vmmemory_ptr();
1146 // ptr.write(VmPtr::from(def_ptr));
1147
1148 todo!()
1149 } else {
1150 let owned_index = self.translated_module().owned_memory_index(def_index);
1151 let owned_ptr = self.vmctx_plus_offset_mut::<VMMemoryDefinition>(
1152 vmshape.vmctx_vmmemory_definition(owned_index),
1153 );
1154
1155 owned_ptr.write(self.memories[def_index].vmmemory_definition());
1156 ptr.write(VmPtr::from(NonNull::new(owned_ptr).unwrap()));
1157 }
1158 }
1159
1160 // Zero-initialize the globals so that nothing is uninitialized memory
1161 // after this function returns. The globals are actually initialized
1162 // with their const expression initializers after the instance is fully
1163 // allocated.
1164 tracing::trace!("initializing defined globals");
1165 for (index, _init) in &module.translated().global_initializers {
1166 self.global_ptr(index).write(VMGlobalDefinition::new());
1167 }
1168
1169 tracing::trace!("initializing defined tags");
1170 for (def_index, tag) in
1171 module.translated().tags.iter().filter_map(|(index, ty)| {
1172 Some((module.translated().defined_tag_index(index)?, ty))
1173 })
1174 {
1175 self.tag_ptr(def_index).write(VMTagDefinition::new(
1176 tag.signature.unwrap_engine_type_index(),
1177 ));
1178 }
1179
1180 tracing::trace!("initializing func refs array");
1181 self.initialize_vmfunc_refs(&imports, module);
1182 }
1183 }
1184
1185 /// # Safety
1186 ///
1187 /// among other things the caller has to ensure that this is only ever called **after**
1188 /// calling `Instance::initialize_vmctx`
1189 #[tracing::instrument(level = "debug", skip(self, module))]
1190 unsafe fn initialize_vmfunc_refs(&mut self, imports: &Imports, module: &Module) {
1191 // Safety: the caller pinky-promised that the vmctx is correctly initialized
1192 unsafe {
1193 let vmshape = module.vmshape();
1194
1195 for (index, func) in module
1196 .translated()
1197 .functions
1198 .iter()
1199 .filter(|(_, f)| f.is_escaping())
1200 {
1201 let type_index = {
1202 let base: *const VMSharedTypeIndex = (*self
1203 .vmctx_plus_offset_mut::<VmPtr<VMSharedTypeIndex>>(
1204 StaticVMShape.vmctx_type_ids_array(),
1205 ))
1206 .as_ptr();
1207 *base.add(func.signature.unwrap_module_type_index().index())
1208 };
1209
1210 let func_ref =
1211 if let Some(def_index) = module.translated().defined_func_index(index) {
1212 VMFuncRef {
1213 array_call: self.module().array_to_wasm_trampoline(def_index).expect(
1214 "should have array-to-Wasm trampoline for escaping function",
1215 ),
1216 wasm_call: Some(VmPtr::from(self.module.function(def_index))),
1217 type_index,
1218 vmctx: VmPtr::from(VMOpaqueContext::from_vmcontext(self.vmctx())),
1219 }
1220 } else {
1221 let import = &imports.functions[index.index()];
1222 VMFuncRef {
1223 array_call: import.array_call,
1224 wasm_call: Some(import.wasm_call),
1225 vmctx: import.vmctx,
1226 type_index,
1227 }
1228 };
1229
1230 self.vmctx_plus_offset_mut::<VMFuncRef>(vmshape.vmctx_vmfunc_ref(func.func_ref))
1231 .write(func_ref);
1232 }
1233 }
1234 }
1235
1236 /// # Safety
1237 ///
1238 /// among other things the caller has to ensure that this is only ever called **after**
1239 /// calling `Instance::initialize_vmctx`
1240 #[tracing::instrument(level = "debug", skip(self, store, ctx, const_eval, module))]
1241 unsafe fn initialize_globals(
1242 &mut self,
1243 store: &mut StoreOpaque,
1244 ctx: &mut ConstEvalContext,
1245 const_eval: &mut ConstExprEvaluator,
1246 module: &Module,
1247 ) -> crate::Result<()> {
1248 for (def_index, init) in &module.translated().global_initializers {
1249 let vmval = const_eval
1250 .eval(store, ctx, init)
1251 .expect("const expression should be valid");
1252 let index = self.translated_module().global_index(def_index);
1253 let ty = self.translated_module().globals[index].content_type;
1254
1255 // Safety: the caller pinky-promised that the vmctx is correctly initialized
1256 unsafe {
1257 self.global_ptr(def_index)
1258 .write(VMGlobalDefinition::from_vmval(store, ty, vmval)?);
1259 }
1260 }
1261
1262 Ok(())
1263 }
1264
1265 /// # Safety
1266 ///
1267 /// among other things the caller has to ensure that this is only ever called **after**
1268 /// calling `Instance::initialize_vmctx`
1269 #[tracing::instrument(level = "debug", skip(self, store, ctx, const_eval, module))]
1270 unsafe fn initialize_tables(
1271 &mut self,
1272 store: &mut StoreOpaque,
1273 ctx: &mut ConstEvalContext,
1274 const_eval: &mut ConstExprEvaluator,
1275 module: &Module,
1276 ) -> crate::Result<()> {
1277 // update initial values
1278 for (def_index, init) in &module.translated().table_initializers.initial_values {
1279 match init {
1280 TableInitialValue::RefNull => {}
1281 TableInitialValue::ConstExpr(expr) => {
1282 let index = self.translated_module().table_index(def_index);
1283 let (heap_top_ty, shared) = self.translated_module().tables[index]
1284 .element_type
1285 .heap_type
1286 .top();
1287 assert!(!shared);
1288
1289 let vmval = const_eval
1290 .eval(store, ctx, expr)
1291 .expect("const expression should be valid");
1292
1293 // Safety: the caller pinky-promised that the vmctx is correctly initialized
1294 let table = unsafe { self.get_defined_table(def_index).as_mut() };
1295
1296 match heap_top_ty {
1297 WasmHeapTopType::Func => {
1298 let funcref = NonNull::new(vmval.get_funcref().cast::<VMFuncRef>());
1299 let items = (0..table.size()).map(|_| funcref);
1300 table.init_func(0, items)?;
1301 }
1302 WasmHeapTopType::Extern | WasmHeapTopType::Any => todo!("gc proposal"),
1303 WasmHeapTopType::Exn => todo!("exception-handling proposal"),
1304 WasmHeapTopType::Cont => todo!("continuation proposal"),
1305 }
1306 }
1307 }
1308 }
1309
1310 // run active elements
1311 for segment in &module.translated().table_initializers.segments {
1312 let start = const_eval
1313 .eval(store, ctx, &segment.offset)
1314 .expect("const expression should be valid");
1315
1316 ctx.instance.table_init_segment(
1317 store,
1318 const_eval,
1319 segment.table_index,
1320 &segment.elements,
1321 start.get_u64(),
1322 0,
1323 segment.elements.len(),
1324 )?;
1325 }
1326
1327 Ok(())
1328 }
1329
1330 /// # Safety
1331 ///
1332 /// among other things the caller has to ensure that this is only ever called **after**
1333 /// calling `Instance::initialize_vmctx`
1334 #[tracing::instrument(level = "debug", skip(self, store, ctx, const_eval, module))]
1335 unsafe fn initialize_memories(
1336 &mut self,
1337 store: &mut StoreOpaque,
1338 ctx: &mut ConstEvalContext,
1339 const_eval: &mut ConstExprEvaluator,
1340 module: &Module,
1341 ) -> crate::Result<()> {
1342 for initializer in &module.translated().memory_initializers {
1343 let start: usize = {
1344 let vmval = const_eval
1345 .eval(store, ctx, &initializer.offset)
1346 .expect("const expression should be valid");
1347
1348 match self.translated_module().memories[initializer.memory_index].index_type {
1349 IndexType::I32 => usize::try_from(vmval.get_u32()).unwrap(),
1350 IndexType::I64 => usize::try_from(vmval.get_u64()).unwrap(),
1351 }
1352 };
1353
1354 // Safety: the caller pinky-promised that the vmctx is correctly initialized
1355 let memory = unsafe {
1356 self.defined_or_imported_memory(initializer.memory_index)
1357 .as_mut()
1358 };
1359
1360 let end = start.checked_add(initializer.data.len()).unwrap();
1361 ensure!(end <= memory.current_length(Ordering::Relaxed));
1362
1363 // Safety: we did all the checking we could above
1364 unsafe {
1365 let src = &initializer.data;
1366 let dst = memory.base.as_ptr().add(start);
1367 ptr::copy_nonoverlapping(src.as_ptr(), dst, src.len());
1368 }
1369 }
1370
1371 Ok(())
1372 }
1373}
1374
1375#[repr(transparent)]
1376pub struct InstanceAndStore {
1377 instance: Instance,
1378}
1379
1380impl InstanceAndStore {
1381 #[inline]
1382 pub(crate) unsafe fn from_vmctx<R>(
1383 vmctx: NonNull<VMContext>,
1384 f: impl for<'a> FnOnce(&'a mut Self) -> R,
1385 ) -> R {
1386 const_assert_eq!(size_of::<InstanceAndStore>(), size_of::<Instance>());
1387 // Safety: the instance is always directly before the vmctx in memory
1388 unsafe {
1389 let mut ptr = vmctx
1390 .byte_sub(size_of::<Instance>())
1391 .cast::<InstanceAndStore>();
1392
1393 f(ptr.as_mut())
1394 }
1395 }
1396
1397 #[inline]
1398 pub(crate) fn unpack_mut(&mut self) -> (&mut Instance, &mut StoreOpaque) {
1399 // Safety: this is fine
1400 unsafe {
1401 let store = self.instance.store.unwrap().as_mut();
1402 (&mut self.instance, store)
1403 }
1404 }
1405
1406 #[inline]
1407 pub(crate) unsafe fn unpack_with_state_mut<T>(
1408 &mut self,
1409 ) -> (&mut Instance, &'_ mut StoreInner<T>) {
1410 let mut store_ptr = self.instance.store.unwrap().cast::<StoreInner<T>>();
1411 (
1412 &mut self.instance,
1413 // Safety: ensured by caller
1414 unsafe { store_ptr.as_mut() },
1415 )
1416 }
1417}
1418
1419/// # Safety
1420///
1421/// The caller must ensure this function is only ever called **after** `Instance::initialize_vmctx`
1422unsafe fn check_init_bounds(
1423 store: &mut StoreOpaque,
1424 instance: &mut Instance,
1425 module: &Module,
1426) -> crate::Result<()> {
1427 // Safety: ensured by caller
1428 unsafe {
1429 check_table_init_bounds(store, instance, module)?;
1430 check_memory_init_bounds(store, instance, &module.translated().memory_initializers)?;
1431 }
1432 Ok(())
1433}
1434
1435/// # Safety
1436///
1437/// The caller must ensure this function is only ever called **after** `Instance::initialize_vmctx`
1438unsafe fn check_table_init_bounds(
1439 store: &mut StoreOpaque,
1440 instance: &mut Instance,
1441 module: &Module,
1442) -> crate::Result<()> {
1443 // Safety: the caller pinky-promised to have called initialize_vmctx before calling this function
1444 // so the VMTableDefinitions are all properly initialized
1445 unsafe {
1446 let mut const_evaluator = ConstExprEvaluator::default();
1447
1448 for segment in &module.translated().table_initializers.segments {
1449 let table = instance
1450 .defined_or_imported_table(segment.table_index)
1451 .as_ref();
1452 let mut context = ConstEvalContext::new(instance);
1453 let start = const_evaluator
1454 .eval(store, &mut context, &segment.offset)
1455 .expect("const expression should be valid");
1456 let start = usize::try_from(start.get_u32()).unwrap();
1457 let end = start.checked_add(usize::try_from(segment.elements.len()).unwrap());
1458
1459 match end {
1460 Some(end) if end <= table.size() => {
1461 // Initializer is in bounds
1462 }
1463 _ => {
1464 bail!("table out of bounds: elements segment does not fit")
1465 }
1466 }
1467 }
1468 Ok(())
1469 }
1470}
1471
1472/// # Safety
1473///
1474/// The caller must ensure this function is only ever called **after** `Instance::initialize_vmctx`
1475unsafe fn check_memory_init_bounds(
1476 store: &mut StoreOpaque,
1477 instance: &mut Instance,
1478 initializers: &[MemoryInitializer],
1479) -> crate::Result<()> {
1480 // Safety: the caller pinky-promised to have called initialize_vmctx before calling this function
1481 // so the VMMemoryDefinitions are all properly initialized
1482 unsafe {
1483 for init in initializers {
1484 let memory = instance
1485 .defined_or_imported_memory(init.memory_index)
1486 .as_ref();
1487 let start = get_memory_init_start(store, init, instance)?;
1488 let end = usize::try_from(start)
1489 .ok()
1490 .and_then(|start| start.checked_add(init.data.len()));
1491
1492 match end {
1493 Some(end) if end <= memory.current_length(Ordering::Relaxed) => {
1494 // Initializer is in bounds
1495 }
1496 _ => {
1497 bail!("memory out of bounds: data segment does not fit")
1498 }
1499 }
1500 }
1501
1502 Ok(())
1503 }
1504}
1505
1506/// # Safety
1507///
1508/// The caller must ensure this function is only ever called **after** `Instance::initialize_vmctx`
1509unsafe fn get_memory_init_start(
1510 store: &mut StoreOpaque,
1511 init: &MemoryInitializer,
1512 instance: &mut Instance,
1513) -> crate::Result<u64> {
1514 // Safety: the caller pinky-promised that the vmctx is correctly initialized
1515 let mut context = unsafe { ConstEvalContext::new(instance) };
1516 let mut const_evaluator = ConstExprEvaluator::default();
1517 const_evaluator
1518 .eval(store, &mut context, &init.offset)
1519 .map(
1520 |v| match instance.translated_module().memories[init.memory_index].index_type {
1521 IndexType::I32 => v.get_u32().into(),
1522 IndexType::I64 => v.get_u64(),
1523 },
1524 )
1525}