Next Generation WASM Microkernel Operating System
1// Copyright 2025 Jonas Kruckenberg
2//
3// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
4// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
5// http://opensource.org/licenses/MIT>, at your option. This file may not be
6// copied, modified, or distributed except according to those terms.
7
8use crate::mem::VirtualAddress;
9use crate::wasm::builtins::{BuiltinFunctionIndex, foreach_builtin_function};
10use crate::wasm::indices::{DefinedMemoryIndex, VMSharedTypeIndex};
11use crate::wasm::store::StoreOpaque;
12use crate::wasm::translate::{WasmHeapTopType, WasmValType};
13use crate::wasm::type_registry::RegisteredType;
14use crate::wasm::types::FuncType;
15use crate::wasm::vm::provenance::{VmPtr, VmSafe};
16use alloc::boxed::Box;
17use core::any::Any;
18use core::cell::UnsafeCell;
19use core::ffi::c_void;
20use core::marker::PhantomPinned;
21use core::mem::MaybeUninit;
22use core::ptr::NonNull;
23use core::sync::atomic::{AtomicUsize, Ordering};
24use core::{fmt, ptr};
25use cranelift_entity::Unsigned;
26use static_assertions::const_assert_eq;
27
28/// Magic value for core Wasm VM contexts.
29///
30/// This is stored at the start of all `VMContext` structures.
31pub const VMCONTEXT_MAGIC: u32 = u32::from_le_bytes(*b"core");
32
33/// Equivalent of `VMCONTEXT_MAGIC` except for array-call host functions.
34///
35/// This is stored at the start of all `VMArrayCallHostFuncContext` structures
36/// and double-checked on `VMArrayCallHostFuncContext::from_opaque`.
37pub const VM_ARRAY_CALL_HOST_FUNC_MAGIC: u32 = u32::from_le_bytes(*b"ACHF");
38
39/// A "raw" and unsafe representation of a WebAssembly value.
40///
41/// This is provided for use with the `Func::new_unchecked` and
42/// `Func::call_unchecked` APIs. In general it's unlikely you should be using
43/// this from Rust, rather using APIs like `Func::wrap` and `TypedFunc::call`.
44///
45/// This is notably an "unsafe" way to work with `Val` and it's recommended to
46/// instead use `Val` where possible. An important note about this union is that
47/// fields are all stored in little-endian format, regardless of the endianness
48/// of the host system.
49#[repr(C)]
50#[derive(Copy, Clone)]
51pub union VMVal {
52 /// A WebAssembly `i32` value.
53 ///
54 /// Note that the payload here is a Rust `i32` but the WebAssembly `i32`
55 /// type does not assign an interpretation of the upper bit as either signed
56 /// or unsigned. The Rust type `i32` is simply chosen for convenience.
57 ///
58 /// This value is always stored in a little-endian format.
59 i32: i32,
60
61 /// A WebAssembly `i64` value.
62 ///
63 /// Note that the payload here is a Rust `i64` but the WebAssembly `i64`
64 /// type does not assign an interpretation of the upper bit as either signed
65 /// or unsigned. The Rust type `i64` is simply chosen for convenience.
66 ///
67 /// This value is always stored in a little-endian format.
68 i64: i64,
69
70 /// A WebAssembly `f32` value.
71 ///
72 /// Note that the payload here is a Rust `u32`. This is to allow passing any
73 /// representation of NaN into WebAssembly without risk of changing NaN
74 /// payload bits as its gets passed around the system. Otherwise though this
75 /// `u32` value is the return value of `f32::to_bits` in Rust.
76 ///
77 /// This value is always stored in a little-endian format.
78 f32: u32,
79
80 /// A WebAssembly `f64` value.
81 ///
82 /// Note that the payload here is a Rust `u64`. This is to allow passing any
83 /// representation of NaN into WebAssembly without risk of changing NaN
84 /// payload bits as its gets passed around the system. Otherwise though this
85 /// `u64` value is the return value of `f64::to_bits` in Rust.
86 ///
87 /// This value is always stored in a little-endian format.
88 f64: u64,
89
90 /// A WebAssembly `v128` value.
91 ///
92 /// The payload here is a Rust `[u8; 16]` which has the same number of bits
93 /// but note that `v128` in WebAssembly is often considered a vector type
94 /// such as `i32x4` or `f64x2`. This means that the actual interpretation
95 /// of the underlying bits is left up to the instructions which consume
96 /// this value.
97 ///
98 /// This value is always stored in a little-endian format.
99 v128: [u8; 16],
100
101 /// A WebAssembly `funcref` value (or one of its subtypes).
102 ///
103 /// The payload here is a pointer which is runtime-defined. This is one of
104 /// the main points of unsafety about the `VMVal` type as the validity of
105 /// the pointer here is not easily verified and must be preserved by
106 /// carefully calling the correct functions throughout the runtime.
107 ///
108 /// This value is always stored in a little-endian format.
109 funcref: *mut c_void,
110
111 /// A WebAssembly `externref` value (or one of its subtypes).
112 ///
113 /// The payload here is a compressed pointer value which is
114 /// runtime-defined. This is one of the main points of unsafety about the
115 /// `VMVal` type as the validity of the pointer here is not easily verified
116 /// and must be preserved by carefully calling the correct functions
117 /// throughout the runtime.
118 ///
119 /// This value is always stored in a little-endian format.
120 externref: u32,
121
122 /// A WebAssembly `anyref` value (or one of its subtypes).
123 ///
124 /// The payload here is a compressed pointer value which is
125 /// runtime-defined. This is one of the main points of unsafety about the
126 /// `VMVal` type as the validity of the pointer here is not easily verified
127 /// and must be preserved by carefully calling the correct functions
128 /// throughout the runtime.
129 ///
130 /// This value is always stored in a little-endian format.
131 anyref: u32,
132}
133
134// Safety: This type is just a bag-of-bits so it's up to the caller to figure out how
135// to safely deal with threading concerns and safely access interior bits.
136unsafe impl Send for VMVal {}
137// Safety: See above
138unsafe impl Sync for VMVal {}
139
140impl fmt::Debug for VMVal {
141 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
142 struct Hex<T>(T);
143 impl<T: fmt::LowerHex> fmt::Debug for Hex<T> {
144 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
145 let bytes = size_of::<T>();
146 let hex_digits_per_byte = 2;
147 let hex_digits = bytes * hex_digits_per_byte;
148 write!(f, "0x{:0width$x}", self.0, width = hex_digits)
149 }
150 }
151
152 // Safety: this is just a bag-of-bits, any bit pattern is valid (even if nonsensical)
153 unsafe {
154 f.debug_struct("VMVal")
155 .field("i32", &Hex(self.i32))
156 .field("i64", &Hex(self.i64))
157 .field("f32", &Hex(self.f32))
158 .field("f64", &Hex(self.f64))
159 .field("v128", &Hex(u128::from_le_bytes(self.v128)))
160 .field("funcref", &self.funcref)
161 .field("externref", &Hex(self.externref))
162 .field("anyref", &Hex(self.anyref))
163 .finish()
164 }
165 }
166}
167
168impl VMVal {
169 /// Create a null reference that is compatible with any of
170 /// `{any,extern,func}ref`.
171 pub fn null() -> VMVal {
172 // Safety: this is just a bag-of-bits, any bit pattern is valid (even if nonsensical)
173 unsafe {
174 let raw = MaybeUninit::<Self>::zeroed().assume_init();
175 debug_assert_eq!(raw.get_anyref(), 0);
176 debug_assert_eq!(raw.get_externref(), 0);
177 debug_assert_eq!(raw.get_funcref(), ptr::null_mut());
178 raw
179 }
180 }
181
182 /// Creates a WebAssembly `i32` value
183 #[inline]
184 pub fn i32(i: i32) -> VMVal {
185 // Note that this is intentionally not setting the `i32` field, instead
186 // setting the `i64` field with a zero-extended version of `i`. For more
187 // information on this see the comments on `Lower for Result` in the
188 // `wasmtime` crate. Otherwise though all `VMVal` constructors are
189 // otherwise constrained to guarantee that the initial 64-bits are
190 // always initialized.
191 VMVal::u64(i.unsigned().into())
192 }
193
194 /// Creates a WebAssembly `i64` value
195 #[inline]
196 pub fn i64(i: i64) -> VMVal {
197 VMVal { i64: i.to_le() }
198 }
199
200 /// Creates a WebAssembly `i32` value
201 #[inline]
202 pub fn u32(i: u32) -> VMVal {
203 // See comments in `VMVal::i32` for why this is setting the upper
204 // 32-bits as well.
205 VMVal::u64(i.into())
206 }
207
208 /// Creates a WebAssembly `i64` value
209 #[inline]
210 #[expect(clippy::cast_possible_wrap, reason = "wrapping is intentional")]
211 pub fn u64(i: u64) -> VMVal {
212 VMVal::i64(i as i64)
213 }
214
215 /// Creates a WebAssembly `f32` value
216 #[inline]
217 pub fn f32(i: u32) -> VMVal {
218 // See comments in `VMVal::i32` for why this is setting the upper
219 // 32-bits as well.
220 VMVal::u64(i.into())
221 }
222
223 /// Creates a WebAssembly `f64` value
224 #[inline]
225 pub fn f64(i: u64) -> VMVal {
226 VMVal { f64: i.to_le() }
227 }
228
229 /// Creates a WebAssembly `v128` value
230 #[inline]
231 pub fn v128(i: u128) -> VMVal {
232 VMVal {
233 v128: i.to_le_bytes(),
234 }
235 }
236
237 /// Creates a WebAssembly `funcref` value
238 #[inline]
239 pub fn funcref(i: *mut c_void) -> VMVal {
240 VMVal {
241 funcref: i.map_addr(|i| i.to_le()),
242 }
243 }
244
245 /// Creates a WebAssembly `externref` value
246 #[inline]
247 pub fn externref(e: u32) -> VMVal {
248 VMVal {
249 externref: e.to_le(),
250 }
251 }
252
253 /// Creates a WebAssembly `anyref` value
254 #[inline]
255 pub fn anyref(r: u32) -> VMVal {
256 VMVal { anyref: r.to_le() }
257 }
258
259 /// Gets the WebAssembly `i32` value
260 #[inline]
261 pub fn get_i32(&self) -> i32 {
262 // Safety: this is just a bag-of-bits, any bit pattern is valid (even if nonsensical)
263 unsafe { i32::from_le(self.i32) }
264 }
265
266 /// Gets the WebAssembly `i64` value
267 #[inline]
268 pub fn get_i64(&self) -> i64 {
269 // Safety: this is just a bag-of-bits, any bit pattern is valid (even if nonsensical)
270 unsafe { i64::from_le(self.i64) }
271 }
272
273 /// Gets the WebAssembly `i32` value
274 #[inline]
275 pub fn get_u32(&self) -> u32 {
276 self.get_i32().unsigned()
277 }
278
279 /// Gets the WebAssembly `i64` value
280 #[inline]
281 pub fn get_u64(&self) -> u64 {
282 self.get_i64().unsigned()
283 }
284
285 /// Gets the WebAssembly `f32` value
286 #[inline]
287 pub fn get_f32(&self) -> u32 {
288 // Safety: this is just a bag-of-bits, any bit pattern is valid (even if nonsensical)
289 unsafe { u32::from_le(self.f32) }
290 }
291
292 /// Gets the WebAssembly `f64` value
293 #[inline]
294 pub fn get_f64(&self) -> u64 {
295 // Safety: this is just a bag-of-bits, any bit pattern is valid (even if nonsensical)
296 unsafe { u64::from_le(self.f64) }
297 }
298
299 /// Gets the WebAssembly `v128` value
300 #[inline]
301 pub fn get_v128(&self) -> u128 {
302 // Safety: this is just a bag-of-bits, any bit pattern is valid (even if nonsensical)
303 unsafe { u128::from_le_bytes(self.v128) }
304 }
305
306 /// Gets the WebAssembly `funcref` value
307 #[inline]
308 pub fn get_funcref(&self) -> *mut c_void {
309 // Safety: this is just a bag-of-bits, any bit pattern is valid (even if nonsensical)
310 unsafe { self.funcref.map_addr(usize::from_le) }
311 }
312
313 /// Gets the WebAssembly `externref` value
314 #[inline]
315 pub fn get_externref(&self) -> u32 {
316 // Safety: this is just a bag-of-bits, any bit pattern is valid (even if nonsensical)
317 u32::from_le(unsafe { self.externref })
318 }
319
320 /// Gets the WebAssembly `anyref` value
321 #[inline]
322 pub fn get_anyref(&self) -> u32 {
323 // Safety: this is just a bag-of-bits, any bit pattern is valid
324 u32::from_le(unsafe { self.anyref })
325 }
326}
327
328pub type VMArrayCallFunction = unsafe extern "C" fn(
329 NonNull<VMOpaqueContext>, // callee
330 NonNull<VMOpaqueContext>, // caller
331 NonNull<VMVal>, // pointer to params/results array
332 usize, // len of params/results array
333) -> bool;
334
335/// A function pointer that exposes the Wasm calling convention.
336#[repr(transparent)]
337pub struct VMWasmCallFunction(VMFunctionBody);
338
339/// A placeholder byte-sized type which is just used to provide some amount of type
340/// safety when dealing with pointers to JIT-compiled function bodies. Note that it's
341/// deliberately not Copy, as we shouldn't be carelessly copying function body bytes
342/// around.
343#[repr(C)]
344pub struct VMFunctionBody(u8);
345// SAFETY: this structure is never read and is safe to pass to jit code.
346unsafe impl VmSafe for VMFunctionBody {}
347
348/// An imported function.
349#[derive(Debug, Copy, Clone)]
350#[repr(C)]
351pub struct VMFunctionImport {
352 /// Function pointer to use when calling this imported function from Wasm.
353 pub wasm_call: VmPtr<VMWasmCallFunction>,
354
355 /// Function pointer to use when calling this imported function with the
356 /// "array" calling convention that `Func::new` et al use.
357 pub array_call: VMArrayCallFunction,
358
359 /// The VM state associated with this function.
360 ///
361 /// For Wasm functions defined by core wasm instances this will be `*mut
362 /// VMContext`, but for lifted/lowered component model functions this will
363 /// be a `VMComponentContext`, and for a host function it will be a
364 /// `VMHostFuncContext`, etc.
365 pub vmctx: VmPtr<VMOpaqueContext>,
366}
367// SAFETY: the above structure is repr(C) and only contains `VmSafe` fields.
368unsafe impl VmSafe for VMFunctionImport {}
369
370/// The fields compiled code needs to access to utilize a WebAssembly table
371/// imported from another instance.
372#[derive(Debug, Copy, Clone)]
373#[repr(C)]
374pub struct VMTableImport {
375 /// A pointer to the imported table description.
376 pub from: VmPtr<VMTableDefinition>,
377
378 /// A pointer to the `VMContext` that owns the table description.
379 pub vmctx: VmPtr<VMContext>,
380}
381// SAFETY: the above structure is repr(C) and only contains `VmSafe` fields.
382unsafe impl VmSafe for VMTableImport {}
383
384/// The fields compiled code needs to access to utilize a WebAssembly linear
385/// memory imported from another instance.
386#[derive(Debug, Copy, Clone)]
387#[repr(C)]
388pub struct VMMemoryImport {
389 /// A pointer to the imported memory description.
390 pub from: VmPtr<VMMemoryDefinition>,
391
392 /// A pointer to the `VMContext` that owns the memory description.
393 pub vmctx: VmPtr<VMContext>,
394
395 /// The index of the memory in the containing `vmctx`.
396 pub index: DefinedMemoryIndex,
397}
398// SAFETY: the above structure is repr(C) and only contains `VmSafe` fields.
399unsafe impl VmSafe for VMMemoryImport {}
400
401/// The fields compiled code needs to access to utilize a WebAssembly global
402/// variable imported from another instance.
403///
404/// Note that unlike with functions, tables, and memories, `VMGlobalImport`
405/// doesn't include a `vmctx` pointer. Globals are never resized, and don't
406/// require a `vmctx` pointer to access.
407#[derive(Debug, Copy, Clone)]
408#[repr(C)]
409pub struct VMGlobalImport {
410 /// A pointer to the imported global variable description.
411 pub from: VmPtr<VMGlobalDefinition>,
412}
413// SAFETY: the above structure is repr(C) and only contains `VmSafe` fields.
414unsafe impl VmSafe for VMGlobalImport {}
415
416/// The fields compiled code needs to access to utilize a WebAssembly
417/// tag imported from another instance.
418#[derive(Debug, Copy, Clone)]
419#[repr(C)]
420pub struct VMTagImport {
421 /// A pointer to the imported tag description.
422 pub from: VmPtr<VMTagDefinition>,
423}
424// SAFETY: the above structure is repr(C) and only contains `VmSafe` fields.
425unsafe impl VmSafe for VMTagImport {}
426
427/// The fields compiled code needs to access to utilize a WebAssembly table
428/// defined within the instance.
429#[derive(Debug)]
430#[repr(C)]
431pub struct VMTableDefinition {
432 /// Pointer to the table data.
433 pub base: VmPtr<u8>,
434
435 /// The current number of elements in the table.
436 pub current_elements: AtomicUsize,
437}
438// SAFETY: the above structure is repr(C) and only contains `VmSafe` fields.
439unsafe impl VmSafe for VMTableDefinition {}
440// Safety: The store synchronization protocol ensures this type will only ever be access in a thread-safe way
441unsafe impl Send for VMTableDefinition {}
442// Safety: The store synchronization protocol ensures this type will only ever be access in a thread-safe way
443unsafe impl Sync for VMTableDefinition {}
444
445/// The fields compiled code needs to access to utilize a WebAssembly linear
446/// memory defined within the instance, namely the start address and the
447/// size in bytes.
448#[derive(Debug)]
449#[repr(C)]
450pub struct VMMemoryDefinition {
451 /// The start address.
452 pub base: VmPtr<u8>,
453
454 /// The current logical size of this linear memory in bytes.
455 ///
456 /// This is atomic because shared memories must be able to grow their length
457 /// atomically. For relaxed access, see
458 /// [`VMMemoryDefinition::current_length()`].
459 pub current_length: AtomicUsize,
460}
461// SAFETY: the above structure is repr(C) and only contains `VmSafe` fields.
462unsafe impl VmSafe for VMMemoryDefinition {}
463
464impl VMMemoryDefinition {
465 pub fn current_length(&self, ordering: Ordering) -> usize {
466 self.current_length.load(ordering)
467 }
468}
469
470/// The storage for a WebAssembly global defined within the instance.
471///
472/// TODO: Pack the globals more densely, rather than using the same size
473/// for every type.
474#[derive(Debug)]
475#[repr(C, align(16))]
476pub struct VMGlobalDefinition {
477 storage: [u8; 16],
478}
479// SAFETY: the above structure is repr(C) and only contains `VmSafe` fields.
480unsafe impl VmSafe for VMGlobalDefinition {}
481
482#[expect(
483 clippy::cast_ptr_alignment,
484 reason = "false positive: the manual repr(C, align(16)) ensures proper alignment"
485)]
486impl VMGlobalDefinition {
487 /// Construct a `VMGlobalDefinition`.
488 pub fn new() -> Self {
489 Self { storage: [0; 16] }
490 }
491
492 /// Create a `VMGlobalDefinition` from a `VMVal`.
493 ///
494 /// # Unsafety
495 ///
496 /// This raw value's type must match the given `WasmValType`.
497 #[expect(clippy::unnecessary_wraps, reason = "TODO")]
498 pub unsafe fn from_vmval(
499 _store: &mut StoreOpaque,
500 wasm_ty: WasmValType,
501 raw: VMVal,
502 ) -> crate::Result<Self> {
503 // Safety: ensured by caller
504 unsafe {
505 let mut global = Self::new();
506 match wasm_ty {
507 WasmValType::I32 => *global.as_i32_mut() = raw.get_i32(),
508 WasmValType::I64 => *global.as_i64_mut() = raw.get_i64(),
509 WasmValType::F32 => *global.as_f32_bits_mut() = raw.get_f32(),
510 WasmValType::F64 => *global.as_f64_bits_mut() = raw.get_f64(),
511 WasmValType::V128 => global.set_u128(raw.get_v128()),
512 WasmValType::Ref(r) => match r.heap_type.top().0 {
513 WasmHeapTopType::Extern => {
514 todo!()
515 // let r = VMGcRef::from_raw_u32(raw.get_externref());
516 // global.init_gc_ref(store.gc_store_mut()?, r.as_ref())
517 }
518 WasmHeapTopType::Any => {
519 todo!()
520 // let r = VMGcRef::from_raw_u32(raw.get_anyref());
521 // global.init_gc_ref(store.gc_store_mut()?, r.as_ref())
522 }
523 WasmHeapTopType::Func => *global.as_func_ref_mut() = raw.get_funcref().cast(),
524 WasmHeapTopType::Cont => todo!("stack switching support"),
525 WasmHeapTopType::Exn => todo!("exception handling support"),
526 },
527 }
528 Ok(global)
529 }
530 }
531
532 /// Get this global's value as a `ValRaw`.
533 ///
534 /// # Unsafety
535 ///
536 /// This global's value's type must match the given `WasmValType`.
537 #[expect(clippy::unnecessary_wraps, reason = "TODO")]
538 pub unsafe fn to_vmval(
539 &self,
540 _store: &mut StoreOpaque,
541 wasm_ty: WasmValType,
542 ) -> crate::Result<VMVal> {
543 // Safety: ensured by caller
544 unsafe {
545 Ok(match wasm_ty {
546 WasmValType::I32 => VMVal::i32(*self.as_i32()),
547 WasmValType::I64 => VMVal::i64(*self.as_i64()),
548 WasmValType::F32 => VMVal::f32(*self.as_f32_bits()),
549 WasmValType::F64 => VMVal::f64(*self.as_f64_bits()),
550 WasmValType::V128 => VMVal::v128(self.get_u128()),
551 WasmValType::Ref(r) => match r.heap_type.top().0 {
552 WasmHeapTopType::Extern => {
553 // VMVal::externref(match self.as_gc_ref() {
554 // Some(r) => store.gc_store_mut()?.clone_gc_ref(r).as_raw_u32(),
555 // None => 0,
556 // }),
557 todo!()
558 }
559 WasmHeapTopType::Any => {
560 //VMVal::anyref({
561 // match self.as_gc_ref() {
562 // Some(r) => store.gc_store_mut()?.clone_gc_ref(r).as_raw_u32(),
563 // None => 0,
564 // }
565 // }),
566 todo!()
567 }
568 WasmHeapTopType::Func => VMVal::funcref(self.as_func_ref().cast()),
569 WasmHeapTopType::Cont => todo!("stack switching support"),
570 WasmHeapTopType::Exn => todo!("exception handling support"),
571 },
572 })
573 }
574 }
575
576 /// Return a reference to the value as an i32.
577 pub unsafe fn as_i32(&self) -> &i32 {
578 // Safety: ensured by caller
579 unsafe { &*(self.storage.as_ref().as_ptr().cast::<i32>()) }
580 }
581
582 /// Return a mutable reference to the value as an i32.
583 pub unsafe fn as_i32_mut(&mut self) -> &mut i32 {
584 // Safety: ensured by caller
585 unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<i32>()) }
586 }
587
588 /// Return a reference to the value as a u32.
589 pub unsafe fn as_u32(&self) -> &u32 {
590 // Safety: ensured by caller
591 unsafe { &*(self.storage.as_ref().as_ptr().cast::<u32>()) }
592 }
593
594 /// Return a mutable reference to the value as an u32.
595 pub unsafe fn as_u32_mut(&mut self) -> &mut u32 {
596 // Safety: ensured by caller
597 unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<u32>()) }
598 }
599
600 /// Return a reference to the value as an i64.
601 pub unsafe fn as_i64(&self) -> &i64 {
602 // Safety: ensured by caller
603 unsafe { &*(self.storage.as_ref().as_ptr().cast::<i64>()) }
604 }
605
606 /// Return a mutable reference to the value as an i64.
607 pub unsafe fn as_i64_mut(&mut self) -> &mut i64 {
608 // Safety: ensured by caller
609 unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<i64>()) }
610 }
611
612 /// Return a reference to the value as an u64.
613 pub unsafe fn as_u64(&self) -> &u64 {
614 // Safety: ensured by caller
615 unsafe { &*(self.storage.as_ref().as_ptr().cast::<u64>()) }
616 }
617
618 /// Return a mutable reference to the value as an u64.
619 pub unsafe fn as_u64_mut(&mut self) -> &mut u64 {
620 // Safety: ensured by caller
621 unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<u64>()) }
622 }
623
624 /// Return a reference to the value as an f32.
625 pub unsafe fn as_f32(&self) -> &f32 {
626 // Safety: ensured by caller
627 unsafe { &*(self.storage.as_ref().as_ptr().cast::<f32>()) }
628 }
629
630 /// Return a mutable reference to the value as an f32.
631 pub unsafe fn as_f32_mut(&mut self) -> &mut f32 {
632 // Safety: ensured by caller
633 unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<f32>()) }
634 }
635
636 /// Return a reference to the value as f32 bits.
637 pub unsafe fn as_f32_bits(&self) -> &u32 {
638 // Safety: ensured by caller
639 unsafe { &*(self.storage.as_ref().as_ptr().cast::<u32>()) }
640 }
641
642 /// Return a mutable reference to the value as f32 bits.
643 pub unsafe fn as_f32_bits_mut(&mut self) -> &mut u32 {
644 // Safety: ensured by caller
645 unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<u32>()) }
646 }
647
648 /// Return a reference to the value as an f64.
649 pub unsafe fn as_f64(&self) -> &f64 {
650 // Safety: ensured by caller
651 unsafe { &*(self.storage.as_ref().as_ptr().cast::<f64>()) }
652 }
653
654 /// Return a mutable reference to the value as an f64.
655 pub unsafe fn as_f64_mut(&mut self) -> &mut f64 {
656 // Safety: ensured by caller
657 unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<f64>()) }
658 }
659
660 /// Return a reference to the value as f64 bits.
661 pub unsafe fn as_f64_bits(&self) -> &u64 {
662 // Safety: ensured by caller
663 unsafe { &*(self.storage.as_ref().as_ptr().cast::<u64>()) }
664 }
665
666 /// Return a mutable reference to the value as f64 bits.
667 pub unsafe fn as_f64_bits_mut(&mut self) -> &mut u64 {
668 // Safety: ensured by caller
669 unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<u64>()) }
670 }
671
672 /// Gets the underlying 128-bit vector value.
673 //
674 // Note that vectors are stored in little-endian format while other types
675 // are stored in native-endian format.
676 pub unsafe fn get_u128(&self) -> u128 {
677 // Safety: ensured by caller
678 unsafe { u128::from_le(*(self.storage.as_ref().as_ptr().cast::<u128>())) }
679 }
680
681 /// Sets the 128-bit vector values.
682 //
683 // Note that vectors are stored in little-endian format while other types
684 // are stored in native-endian format.
685 pub unsafe fn set_u128(&mut self, val: u128) {
686 // Safety: ensured by caller
687 unsafe {
688 *self.storage.as_mut().as_mut_ptr().cast::<u128>() = val.to_le();
689 }
690 }
691
692 /// Return a reference to the value as u128 bits.
693 pub unsafe fn as_u128_bits(&self) -> &[u8; 16] {
694 // Safety: ensured by caller
695 unsafe { &*(self.storage.as_ref().as_ptr().cast::<[u8; 16]>()) }
696 }
697
698 /// Return a mutable reference to the value as u128 bits.
699 pub unsafe fn as_u128_bits_mut(&mut self) -> &mut [u8; 16] {
700 // Safety: ensured by caller
701 unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<[u8; 16]>()) }
702 }
703
704 // /// Return a reference to the global value as a borrowed GC reference.
705 // pub unsafe fn as_gc_ref(&self) -> Option<&VMGcRef> {
706 // let raw_ptr = self.storage.as_ref().as_ptr().cast::<Option<VMGcRef>>();
707 // let ret = (*raw_ptr).as_ref();
708 // assert!(cfg!(feature = "gc") || ret.is_none());
709 // ret
710 // }
711 //
712 // /// Initialize a global to the given GC reference.
713 // pub unsafe fn init_gc_ref(&mut self, gc_store: &mut GcStore, gc_ref: Option<&VMGcRef>) {
714 // assert!(cfg!(feature = "gc") || gc_ref.is_none());
715 //
716 // let dest = &mut *(self
717 // .storage
718 // .as_mut()
719 // .as_mut_ptr()
720 // .cast::<MaybeUninit<Option<VMGcRef>>>());
721 //
722 // gc_store.init_gc_ref(dest, gc_ref)
723 // }
724 //
725 // /// Write a GC reference into this global value.
726 // pub unsafe fn write_gc_ref(&mut self, gc_store: &mut GcStore, gc_ref: Option<&VMGcRef>) {
727 // assert!(cfg!(feature = "gc") || gc_ref.is_none());
728 //
729 // let dest = &mut *(self.storage.as_mut().as_mut_ptr().cast::<Option<VMGcRef>>());
730 // assert!(cfg!(feature = "gc") || dest.is_none());
731 //
732 // gc_store.write_gc_ref(dest, gc_ref)
733 // }
734
735 /// Return a reference to the value as a `VMFuncRef`.
736 pub unsafe fn as_func_ref(&self) -> *mut VMFuncRef {
737 // Safety: ensured by caller
738 unsafe { *(self.storage.as_ref().as_ptr().cast::<*mut VMFuncRef>()) }
739 }
740
741 /// Return a mutable reference to the value as a `VMFuncRef`.
742 pub unsafe fn as_func_ref_mut(&mut self) -> &mut *mut VMFuncRef {
743 // Safety: ensured by caller
744 unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<*mut VMFuncRef>()) }
745 }
746}
747
748/// A WebAssembly tag defined within the instance.
749///
750#[derive(Debug)]
751#[repr(C)]
752pub struct VMTagDefinition {
753 /// Function signature's type id.
754 pub type_index: VMSharedTypeIndex,
755}
756// SAFETY: the above structure is repr(C) and only contains `VmSafe` fields.
757unsafe impl VmSafe for VMTagDefinition {}
758
759impl VMTagDefinition {
760 pub fn new(type_index: VMSharedTypeIndex) -> Self {
761 Self { type_index }
762 }
763}
764
765/// The VM caller-checked "funcref" record, for caller-side signature checking.
766///
767/// It consists of function pointer(s), a type id to be checked by the
768/// caller, and the vmctx closure associated with this function.
769#[derive(Debug, Clone)]
770#[repr(C)]
771pub struct VMFuncRef {
772 /// Function pointer for this funcref if being called via the "array"
773 /// calling convention that `Func::new` et al use.
774 pub array_call: VMArrayCallFunction,
775
776 /// Function pointer for this funcref if being called via the calling
777 /// convention we use when compiling Wasm.
778 ///
779 /// Most functions come with a function pointer that we can use when they
780 /// are called from Wasm. The notable exception is when we `Func::wrap` a
781 /// host function, and we don't have a Wasm compiler on hand to compile a
782 /// Wasm-to-native trampoline for the function. In this case, we leave
783 /// `wasm_call` empty until the function is passed as an import to Wasm (or
784 /// otherwise exposed to Wasm via tables/globals). At this point, we look up
785 /// a Wasm-to-native trampoline for the function in the Wasm's compiled
786 /// module and use that fill in `VMFunctionImport::wasm_call`. **However**
787 /// there is no guarantee that the Wasm module has a trampoline for this
788 /// function's signature. The Wasm module only has trampolines for its
789 /// types, and if this function isn't of one of those types, then the Wasm
790 /// module will not have a trampoline for it. This is actually okay, because
791 /// it means that the Wasm cannot actually call this function. But it does
792 /// mean that this field needs to be an `Option` even though it is non-null
793 /// the vast vast vast majority of the time.
794 pub wasm_call: Option<VmPtr<VMWasmCallFunction>>,
795
796 /// Function signature's type id.
797 pub type_index: VMSharedTypeIndex,
798
799 /// The VM state associated with this function.
800 ///
801 /// The actual definition of what this pointer points to depends on the
802 /// function being referenced: for core Wasm functions, this is a `*mut
803 /// VMContext`, for host functions it is a `*mut VMHostFuncContext`, and for
804 /// component functions it is a `*mut VMComponentContext`.
805 pub vmctx: VmPtr<VMOpaqueContext>,
806}
807// SAFETY: the above structure is repr(C) and only contains `VmSafe` fields.
808unsafe impl VmSafe for VMFuncRef {}
809
810impl VMFuncRef {
811 /// Invokes the `array_call` field of this `VMFuncRef` with the supplied
812 /// arguments.
813 ///
814 /// This will invoke the function pointer in the `array_call` field with:
815 ///
816 /// * the `callee` vmctx as `self.vmctx`
817 /// * the `caller` as `caller` specified here
818 /// * the args pointer as `args_and_results`
819 /// * the args length as `args_and_results`
820 ///
821 /// The `args_and_results` area must be large enough to both load all
822 /// arguments from and store all results to.
823 ///
824 /// Returns whether a trap was recorded.
825 ///
826 /// # Unsafety
827 ///
828 /// This method is unsafe because it can be called with any pointers. They
829 /// must all be valid for this wasm function call to proceed. For example
830 /// `args_and_results` must be large enough to handle all the arguments/results for this call.
831 ///
832 /// Note that the unsafety invariants to maintain here are not currently
833 /// exhaustively documented.
834 pub unsafe fn array_call(
835 &self,
836 caller: NonNull<VMOpaqueContext>,
837 params_and_results: NonNull<[VMVal]>,
838 ) -> bool {
839 // Safety: ensured by caller
840 unsafe {
841 (self.array_call)(
842 self.vmctx.as_non_null(),
843 caller,
844 params_and_results.cast(),
845 params_and_results.len(),
846 )
847 }
848 }
849}
850
851#[derive(Debug)]
852#[repr(C)]
853pub struct VMStoreContext {
854 // NB: 64-bit integer fields are located first with pointer-sized fields
855 // trailing afterwards. That makes the offsets in this structure easier to
856 // calculate on 32-bit platforms as we don't have to worry about the
857 // alignment of 64-bit integers.
858 //
859 /// Indicator of how much fuel has been consumed and is remaining to
860 /// WebAssembly.
861 ///
862 /// This field is typically negative and increments towards positive. Upon
863 /// turning positive a wasm trap will be generated. This field is only
864 /// modified if wasm is configured to consume fuel.
865 pub fuel_consumed: UnsafeCell<i64>,
866
867 /// Deadline epoch for interruption: if epoch-based interruption
868 /// is enabled and the global (per engine) epoch counter is
869 /// observed to reach or exceed this value, the guest code will
870 /// yield if running asynchronously.
871 pub epoch_deadline: UnsafeCell<u64>,
872
873 /// Current stack limit of the wasm module.
874 ///
875 /// For more information see `crates/cranelift/src/lib.rs`.
876 pub stack_limit: UnsafeCell<VirtualAddress>,
877
878 /// The value of the frame pointer register when we last called from Wasm to
879 /// the host.
880 ///
881 /// Maintained by our Wasm-to-host trampoline, and cleared just before
882 /// calling into Wasm in `catch_traps`.
883 ///
884 /// This member is `0` when Wasm is actively running and has not called out
885 /// to the host.
886 ///
887 /// Used to find the start of a a contiguous sequence of Wasm frames when
888 /// walking the stack.
889 pub last_wasm_exit_fp: UnsafeCell<VirtualAddress>,
890
891 /// The last Wasm program counter before we called from Wasm to the host.
892 ///
893 /// Maintained by our Wasm-to-host trampoline, and cleared just before
894 /// calling into Wasm in `catch_traps`.
895 ///
896 /// This member is `0` when Wasm is actively running and has not called out
897 /// to the host.
898 ///
899 /// Used when walking a contiguous sequence of Wasm frames.
900 pub last_wasm_exit_pc: UnsafeCell<VirtualAddress>,
901
902 /// The last host stack pointer before we called into Wasm from the host.
903 ///
904 /// Maintained by our host-to-Wasm trampoline, and cleared just before
905 /// calling into Wasm in `catch_traps`.
906 ///
907 /// This member is `0` when Wasm is actively running and has not called out
908 /// to the host.
909 ///
910 /// When a host function is wrapped into a `wasmtime::Func`, and is then
911 /// called from the host, then this member has the sentinel value of `-1 as
912 /// usize`, meaning that this contiguous sequence of Wasm frames is the
913 /// empty sequence, and it is not safe to dereference the
914 /// `last_wasm_exit_fp`.
915 ///
916 /// Used to find the end of a contiguous sequence of Wasm frames when
917 /// walking the stack.
918 pub last_wasm_entry_fp: UnsafeCell<VirtualAddress>,
919}
920
921// SAFETY: the above structure is repr(C) and only contains `VmSafe` fields.
922unsafe impl VmSafe for VMStoreContext {}
923
924// Safety: The `VMStoreContext` type is a pod-type with no destructor, and we don't
925// access any fields from other threads, so add in these trait impls which are
926// otherwise not available due to the `fuel_consumed` and `epoch_deadline`
927// variables in `VMStoreContext`.
928unsafe impl Send for VMStoreContext {}
929// Safety: see above
930unsafe impl Sync for VMStoreContext {}
931
932impl Default for VMStoreContext {
933 fn default() -> VMStoreContext {
934 VMStoreContext {
935 stack_limit: UnsafeCell::new(VirtualAddress::MAX),
936 fuel_consumed: UnsafeCell::new(0),
937 epoch_deadline: UnsafeCell::new(0),
938 last_wasm_exit_fp: UnsafeCell::new(VirtualAddress::ZERO),
939 last_wasm_exit_pc: UnsafeCell::new(VirtualAddress::ZERO),
940 last_wasm_entry_fp: UnsafeCell::new(VirtualAddress::ZERO),
941 }
942 }
943}
944
945macro_rules! define_builtin_array {
946 (
947 $(
948 $( #[$attr:meta] )*
949 $name:ident( $( $pname:ident: $param:ident ),* ) $( -> $result:ident )?;
950 )*
951 ) => {
952 /// An array that stores addresses of builtin functions. We translate code
953 /// to use indirect calls. This way, we don't have to patch the code.
954 #[repr(C)]
955 pub struct VMBuiltinFunctionsArray {
956 $(
957 $name: unsafe extern "C" fn(
958 $(define_builtin_array!(@ty $param)),*
959 ) $( -> define_builtin_array!(@ty $result))?,
960 )*
961 }
962
963 impl VMBuiltinFunctionsArray {
964 // #[expect(unused_doc_comments, reason = "")]
965 pub const INIT: VMBuiltinFunctionsArray = VMBuiltinFunctionsArray {
966 $(
967 $name: crate::wasm::vm::builtins::raw::$name,
968 )*
969 };
970
971 /// Helper to call `expose_provenance()` on all contained pointers.
972 ///
973 /// This is required to be called at least once before entering wasm
974 /// to inform the compiler that these function pointers may all be
975 /// loaded/stored and used on the "other end" to reacquire
976 /// provenance in Pulley. Pulley models hostcalls with a host
977 /// pointer as the first parameter that's a function pointer under
978 /// the hood, and this call ensures that the use of the function
979 /// pointer is considered valid.
980 pub fn expose_provenance(&self) -> NonNull<Self>{
981 $(
982 (self.$name as *mut u8).expose_provenance();
983 )*
984 NonNull::from(self)
985 }
986 }
987 };
988
989 (@ty u32) => (u32);
990 (@ty u64) => (u64);
991 (@ty u8) => (u8);
992 (@ty bool) => (bool);
993 (@ty pointer) => (*mut u8);
994 (@ty vmctx) => (NonNull<VMContext>);
995}
996
997foreach_builtin_function!(define_builtin_array);
998const_assert_eq!(
999 size_of::<VMBuiltinFunctionsArray>(),
1000 size_of::<usize>() * (BuiltinFunctionIndex::builtin_functions_total_number() as usize)
1001);
1002
1003// SAFETY: the above structure is repr(C) and only contains `VmSafe` fields.
1004unsafe impl VmSafe for VMBuiltinFunctionsArray {}
1005
1006/// The VM "context", which is pointed to by the `vmctx` arg in Cranelift.
1007/// This has information about globals, memories, tables, and other runtime
1008/// state associated with the current instance.
1009///
1010/// The struct here is empty, as the sizes of these fields are dynamic, and
1011/// we can't describe them in Rust's type system. Sufficient memory is
1012/// allocated at runtime.
1013#[derive(Debug)]
1014#[repr(C, align(16))] // align 16 since globals are aligned to that and contained inside
1015pub struct VMContext {
1016 pub(super) _marker: PhantomPinned,
1017}
1018
1019impl VMContext {
1020 /// Helper function to cast between context types using a debug assertion to
1021 /// protect against some mistakes.
1022 #[inline]
1023 pub unsafe fn from_opaque(opaque: NonNull<VMOpaqueContext>) -> NonNull<VMContext> {
1024 // Safety: ensured by caller
1025 unsafe {
1026 debug_assert_eq!(opaque.as_ref().magic, VMCONTEXT_MAGIC);
1027 opaque.cast()
1028 }
1029 }
1030}
1031
1032/// An "opaque" version of `VMContext` which must be explicitly casted to a target context.
1033pub struct VMOpaqueContext {
1034 magic: u32,
1035 _marker: PhantomPinned,
1036}
1037
1038impl VMOpaqueContext {
1039 /// Helper function to clearly indicate that casts are desired.
1040 #[inline]
1041 pub fn from_vmcontext(ptr: NonNull<VMContext>) -> NonNull<VMOpaqueContext> {
1042 ptr.cast()
1043 }
1044
1045 /// Helper function to clearly indicate that casts are desired.
1046 #[inline]
1047 pub fn from_vm_array_call_host_func_context(
1048 ptr: NonNull<VMArrayCallHostFuncContext>,
1049 ) -> NonNull<VMOpaqueContext> {
1050 ptr.cast()
1051 }
1052}
1053
1054/// The `VM*Context` for array-call host functions.
1055///
1056/// Its `magic` field must always be
1057/// `VM_ARRAY_CALL_HOST_FUNC_MAGIC`, and this is how you can
1058/// determine whether a `VM*Context` is a `VMArrayCallHostFuncContext` versus a
1059/// different kind of context.
1060#[repr(C)]
1061#[derive(Debug)]
1062pub struct VMArrayCallHostFuncContext {
1063 magic: u32,
1064 // _padding: u32, // (on 64-bit systems)
1065 pub(crate) func_ref: VMFuncRef,
1066 func: Box<dyn Any + Send + Sync>,
1067 ty: RegisteredType,
1068}
1069
1070// Safety: TODO
1071unsafe impl Send for VMArrayCallHostFuncContext {}
1072// Safety: TODO
1073unsafe impl Sync for VMArrayCallHostFuncContext {}
1074
1075impl VMArrayCallHostFuncContext {
1076 /// Create the context for the given host function.
1077 ///
1078 /// # Safety
1079 ///
1080 /// The `host_func` must be a pointer to a host (not Wasm) function and it
1081 /// must be `Send` and `Sync`.
1082 pub unsafe fn new(
1083 array_call: VMArrayCallFunction,
1084 func_ty: FuncType,
1085 func: Box<dyn Any + Send + Sync>,
1086 ) -> Box<VMArrayCallHostFuncContext> {
1087 let mut ctx = Box::new(VMArrayCallHostFuncContext {
1088 magic: VM_ARRAY_CALL_HOST_FUNC_MAGIC,
1089 func_ref: VMFuncRef {
1090 array_call,
1091 type_index: func_ty.type_index(),
1092 wasm_call: None,
1093 vmctx: NonNull::dangling().into(),
1094 },
1095 func,
1096 ty: func_ty.into_registered_type(),
1097 });
1098
1099 let vmctx =
1100 VMOpaqueContext::from_vm_array_call_host_func_context(NonNull::from(ctx.as_mut()));
1101
1102 ctx.as_mut().func_ref.vmctx = VmPtr::from(vmctx);
1103
1104 ctx
1105 }
1106
1107 /// Helper function to cast between context types using a debug assertion to
1108 /// protect against some mistakes.
1109 #[inline]
1110 pub unsafe fn from_opaque(
1111 opaque: NonNull<VMOpaqueContext>,
1112 ) -> NonNull<VMArrayCallHostFuncContext> {
1113 // Safety: ensured by caller
1114 unsafe {
1115 // See comments in `VMContext::from_opaque` for this debug assert
1116 debug_assert_eq!(opaque.as_ref().magic, VM_ARRAY_CALL_HOST_FUNC_MAGIC);
1117 opaque.cast()
1118 }
1119 }
1120
1121 /// Get the host state for this host function context.
1122 #[inline]
1123 pub fn func(&self) -> &(dyn Any + Send + Sync) {
1124 &*self.func
1125 }
1126}