Next Generation WASM Microkernel Operating System
1// Copyright 2025 Jonas Kruckenberg
2//
3// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
4// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
5// http://opensource.org/licenses/MIT>, at your option. This file may not be
6// copied, modified, or distributed except according to those terms.
7
8use alloc::boxed::Box;
9use core::any::Any;
10use core::cell::UnsafeCell;
11use core::ffi::c_void;
12use core::marker::PhantomPinned;
13use core::mem::MaybeUninit;
14use core::ptr::NonNull;
15use core::sync::atomic::{AtomicUsize, Ordering};
16use core::{fmt, ptr};
17
18use cranelift_entity::Unsigned;
19use kmem::VirtualAddress;
20use static_assertions::const_assert_eq;
21
22use crate::wasm::builtins::{BuiltinFunctionIndex, foreach_builtin_function};
23use crate::wasm::indices::{DefinedMemoryIndex, VMSharedTypeIndex};
24use crate::wasm::store::StoreOpaque;
25use crate::wasm::translate::{WasmHeapTopType, WasmValType};
26use crate::wasm::type_registry::RegisteredType;
27use crate::wasm::types::FuncType;
28use crate::wasm::vm::provenance::{VmPtr, VmSafe};
29
30/// Magic value for core Wasm VM contexts.
31///
32/// This is stored at the start of all `VMContext` structures.
33pub const VMCONTEXT_MAGIC: u32 = u32::from_le_bytes(*b"core");
34
35/// Equivalent of `VMCONTEXT_MAGIC` except for array-call host functions.
36///
37/// This is stored at the start of all `VMArrayCallHostFuncContext` structures
38/// and double-checked on `VMArrayCallHostFuncContext::from_opaque`.
39pub const VM_ARRAY_CALL_HOST_FUNC_MAGIC: u32 = u32::from_le_bytes(*b"ACHF");
40
41/// A "raw" and unsafe representation of a WebAssembly value.
42///
43/// This is provided for use with the `Func::new_unchecked` and
44/// `Func::call_unchecked` APIs. In general it's unlikely you should be using
45/// this from Rust, rather using APIs like `Func::wrap` and `TypedFunc::call`.
46///
47/// This is notably an "unsafe" way to work with `Val` and it's recommended to
48/// instead use `Val` where possible. An important note about this union is that
49/// fields are all stored in little-endian format, regardless of the endianness
50/// of the host system.
51#[repr(C)]
52#[derive(Copy, Clone)]
53pub union VMVal {
54 /// A WebAssembly `i32` value.
55 ///
56 /// Note that the payload here is a Rust `i32` but the WebAssembly `i32`
57 /// type does not assign an interpretation of the upper bit as either signed
58 /// or unsigned. The Rust type `i32` is simply chosen for convenience.
59 ///
60 /// This value is always stored in a little-endian format.
61 i32: i32,
62
63 /// A WebAssembly `i64` value.
64 ///
65 /// Note that the payload here is a Rust `i64` but the WebAssembly `i64`
66 /// type does not assign an interpretation of the upper bit as either signed
67 /// or unsigned. The Rust type `i64` is simply chosen for convenience.
68 ///
69 /// This value is always stored in a little-endian format.
70 i64: i64,
71
72 /// A WebAssembly `f32` value.
73 ///
74 /// Note that the payload here is a Rust `u32`. This is to allow passing any
75 /// representation of NaN into WebAssembly without risk of changing NaN
76 /// payload bits as its gets passed around the system. Otherwise though this
77 /// `u32` value is the return value of `f32::to_bits` in Rust.
78 ///
79 /// This value is always stored in a little-endian format.
80 f32: u32,
81
82 /// A WebAssembly `f64` value.
83 ///
84 /// Note that the payload here is a Rust `u64`. This is to allow passing any
85 /// representation of NaN into WebAssembly without risk of changing NaN
86 /// payload bits as its gets passed around the system. Otherwise though this
87 /// `u64` value is the return value of `f64::to_bits` in Rust.
88 ///
89 /// This value is always stored in a little-endian format.
90 f64: u64,
91
92 /// A WebAssembly `v128` value.
93 ///
94 /// The payload here is a Rust `[u8; 16]` which has the same number of bits
95 /// but note that `v128` in WebAssembly is often considered a vector type
96 /// such as `i32x4` or `f64x2`. This means that the actual interpretation
97 /// of the underlying bits is left up to the instructions which consume
98 /// this value.
99 ///
100 /// This value is always stored in a little-endian format.
101 v128: [u8; 16],
102
103 /// A WebAssembly `funcref` value (or one of its subtypes).
104 ///
105 /// The payload here is a pointer which is runtime-defined. This is one of
106 /// the main points of unsafety about the `VMVal` type as the validity of
107 /// the pointer here is not easily verified and must be preserved by
108 /// carefully calling the correct functions throughout the runtime.
109 ///
110 /// This value is always stored in a little-endian format.
111 funcref: *mut c_void,
112
113 /// A WebAssembly `externref` value (or one of its subtypes).
114 ///
115 /// The payload here is a compressed pointer value which is
116 /// runtime-defined. This is one of the main points of unsafety about the
117 /// `VMVal` type as the validity of the pointer here is not easily verified
118 /// and must be preserved by carefully calling the correct functions
119 /// throughout the runtime.
120 ///
121 /// This value is always stored in a little-endian format.
122 externref: u32,
123
124 /// A WebAssembly `anyref` value (or one of its subtypes).
125 ///
126 /// The payload here is a compressed pointer value which is
127 /// runtime-defined. This is one of the main points of unsafety about the
128 /// `VMVal` type as the validity of the pointer here is not easily verified
129 /// and must be preserved by carefully calling the correct functions
130 /// throughout the runtime.
131 ///
132 /// This value is always stored in a little-endian format.
133 anyref: u32,
134}
135
136// Safety: This type is just a bag-of-bits so it's up to the caller to figure out how
137// to safely deal with threading concerns and safely access interior bits.
138unsafe impl Send for VMVal {}
139// Safety: See above
140unsafe impl Sync for VMVal {}
141
142impl fmt::Debug for VMVal {
143 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
144 struct Hex<T>(T);
145 impl<T: fmt::LowerHex> fmt::Debug for Hex<T> {
146 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
147 let bytes = size_of::<T>();
148 let hex_digits_per_byte = 2;
149 let hex_digits = bytes * hex_digits_per_byte;
150 write!(f, "0x{:0width$x}", self.0, width = hex_digits)
151 }
152 }
153
154 // Safety: this is just a bag-of-bits, any bit pattern is valid (even if nonsensical)
155 unsafe {
156 f.debug_struct("VMVal")
157 .field("i32", &Hex(self.i32))
158 .field("i64", &Hex(self.i64))
159 .field("f32", &Hex(self.f32))
160 .field("f64", &Hex(self.f64))
161 .field("v128", &Hex(u128::from_le_bytes(self.v128)))
162 .field("funcref", &self.funcref)
163 .field("externref", &Hex(self.externref))
164 .field("anyref", &Hex(self.anyref))
165 .finish()
166 }
167 }
168}
169
170impl VMVal {
171 /// Create a null reference that is compatible with any of
172 /// `{any,extern,func}ref`.
173 pub fn null() -> VMVal {
174 // Safety: this is just a bag-of-bits, any bit pattern is valid (even if nonsensical)
175 unsafe {
176 let raw = MaybeUninit::<Self>::zeroed().assume_init();
177 debug_assert_eq!(raw.get_anyref(), 0);
178 debug_assert_eq!(raw.get_externref(), 0);
179 debug_assert_eq!(raw.get_funcref(), ptr::null_mut());
180 raw
181 }
182 }
183
184 /// Creates a WebAssembly `i32` value
185 #[inline]
186 pub fn i32(i: i32) -> VMVal {
187 // Note that this is intentionally not setting the `i32` field, instead
188 // setting the `i64` field with a zero-extended version of `i`. For more
189 // information on this see the comments on `Lower for Result` in the
190 // `wasmtime` crate. Otherwise though all `VMVal` constructors are
191 // otherwise constrained to guarantee that the initial 64-bits are
192 // always initialized.
193 VMVal::u64(i.unsigned().into())
194 }
195
196 /// Creates a WebAssembly `i64` value
197 #[inline]
198 pub fn i64(i: i64) -> VMVal {
199 VMVal { i64: i.to_le() }
200 }
201
202 /// Creates a WebAssembly `i32` value
203 #[inline]
204 pub fn u32(i: u32) -> VMVal {
205 // See comments in `VMVal::i32` for why this is setting the upper
206 // 32-bits as well.
207 VMVal::u64(i.into())
208 }
209
210 /// Creates a WebAssembly `i64` value
211 #[inline]
212 #[expect(clippy::cast_possible_wrap, reason = "wrapping is intentional")]
213 pub fn u64(i: u64) -> VMVal {
214 VMVal::i64(i as i64)
215 }
216
217 /// Creates a WebAssembly `f32` value
218 #[inline]
219 pub fn f32(i: u32) -> VMVal {
220 // See comments in `VMVal::i32` for why this is setting the upper
221 // 32-bits as well.
222 VMVal::u64(i.into())
223 }
224
225 /// Creates a WebAssembly `f64` value
226 #[inline]
227 pub fn f64(i: u64) -> VMVal {
228 VMVal { f64: i.to_le() }
229 }
230
231 /// Creates a WebAssembly `v128` value
232 #[inline]
233 pub fn v128(i: u128) -> VMVal {
234 VMVal {
235 v128: i.to_le_bytes(),
236 }
237 }
238
239 /// Creates a WebAssembly `funcref` value
240 #[inline]
241 pub fn funcref(i: *mut c_void) -> VMVal {
242 VMVal {
243 funcref: i.map_addr(|i| i.to_le()),
244 }
245 }
246
247 /// Creates a WebAssembly `externref` value
248 #[inline]
249 pub fn externref(e: u32) -> VMVal {
250 VMVal {
251 externref: e.to_le(),
252 }
253 }
254
255 /// Creates a WebAssembly `anyref` value
256 #[inline]
257 pub fn anyref(r: u32) -> VMVal {
258 VMVal { anyref: r.to_le() }
259 }
260
261 /// Gets the WebAssembly `i32` value
262 #[inline]
263 pub fn get_i32(&self) -> i32 {
264 // Safety: this is just a bag-of-bits, any bit pattern is valid (even if nonsensical)
265 unsafe { i32::from_le(self.i32) }
266 }
267
268 /// Gets the WebAssembly `i64` value
269 #[inline]
270 pub fn get_i64(&self) -> i64 {
271 // Safety: this is just a bag-of-bits, any bit pattern is valid (even if nonsensical)
272 unsafe { i64::from_le(self.i64) }
273 }
274
275 /// Gets the WebAssembly `i32` value
276 #[inline]
277 pub fn get_u32(&self) -> u32 {
278 self.get_i32().unsigned()
279 }
280
281 /// Gets the WebAssembly `i64` value
282 #[inline]
283 pub fn get_u64(&self) -> u64 {
284 self.get_i64().unsigned()
285 }
286
287 /// Gets the WebAssembly `f32` value
288 #[inline]
289 pub fn get_f32(&self) -> u32 {
290 // Safety: this is just a bag-of-bits, any bit pattern is valid (even if nonsensical)
291 unsafe { u32::from_le(self.f32) }
292 }
293
294 /// Gets the WebAssembly `f64` value
295 #[inline]
296 pub fn get_f64(&self) -> u64 {
297 // Safety: this is just a bag-of-bits, any bit pattern is valid (even if nonsensical)
298 unsafe { u64::from_le(self.f64) }
299 }
300
301 /// Gets the WebAssembly `v128` value
302 #[inline]
303 pub fn get_v128(&self) -> u128 {
304 // Safety: this is just a bag-of-bits, any bit pattern is valid (even if nonsensical)
305 unsafe { u128::from_le_bytes(self.v128) }
306 }
307
308 /// Gets the WebAssembly `funcref` value
309 #[inline]
310 pub fn get_funcref(&self) -> *mut c_void {
311 // Safety: this is just a bag-of-bits, any bit pattern is valid (even if nonsensical)
312 unsafe { self.funcref.map_addr(usize::from_le) }
313 }
314
315 /// Gets the WebAssembly `externref` value
316 #[inline]
317 pub fn get_externref(&self) -> u32 {
318 // Safety: this is just a bag-of-bits, any bit pattern is valid (even if nonsensical)
319 u32::from_le(unsafe { self.externref })
320 }
321
322 /// Gets the WebAssembly `anyref` value
323 #[inline]
324 pub fn get_anyref(&self) -> u32 {
325 // Safety: this is just a bag-of-bits, any bit pattern is valid
326 u32::from_le(unsafe { self.anyref })
327 }
328}
329
330pub type VMArrayCallFunction = unsafe extern "C" fn(
331 NonNull<VMOpaqueContext>, // callee
332 NonNull<VMOpaqueContext>, // caller
333 NonNull<VMVal>, // pointer to params/results array
334 usize, // len of params/results array
335) -> bool;
336
337/// A function pointer that exposes the Wasm calling convention.
338#[repr(transparent)]
339pub struct VMWasmCallFunction(VMFunctionBody);
340
341/// A placeholder byte-sized type which is just used to provide some amount of type
342/// safety when dealing with pointers to JIT-compiled function bodies. Note that it's
343/// deliberately not Copy, as we shouldn't be carelessly copying function body bytes
344/// around.
345#[repr(C)]
346pub struct VMFunctionBody(u8);
347// SAFETY: this structure is never read and is safe to pass to jit code.
348unsafe impl VmSafe for VMFunctionBody {}
349
350/// An imported function.
351#[derive(Debug, Copy, Clone)]
352#[repr(C)]
353pub struct VMFunctionImport {
354 /// Function pointer to use when calling this imported function from Wasm.
355 pub wasm_call: VmPtr<VMWasmCallFunction>,
356
357 /// Function pointer to use when calling this imported function with the
358 /// "array" calling convention that `Func::new` et al use.
359 pub array_call: VMArrayCallFunction,
360
361 /// The VM state associated with this function.
362 ///
363 /// For Wasm functions defined by core wasm instances this will be `*mut
364 /// VMContext`, but for lifted/lowered component model functions this will
365 /// be a `VMComponentContext`, and for a host function it will be a
366 /// `VMHostFuncContext`, etc.
367 pub vmctx: VmPtr<VMOpaqueContext>,
368}
369// SAFETY: the above structure is repr(C) and only contains `VmSafe` fields.
370unsafe impl VmSafe for VMFunctionImport {}
371
372/// The fields compiled code needs to access to utilize a WebAssembly table
373/// imported from another instance.
374#[derive(Debug, Copy, Clone)]
375#[repr(C)]
376pub struct VMTableImport {
377 /// A pointer to the imported table description.
378 pub from: VmPtr<VMTableDefinition>,
379
380 /// A pointer to the `VMContext` that owns the table description.
381 pub vmctx: VmPtr<VMContext>,
382}
383// SAFETY: the above structure is repr(C) and only contains `VmSafe` fields.
384unsafe impl VmSafe for VMTableImport {}
385
386/// The fields compiled code needs to access to utilize a WebAssembly linear
387/// memory imported from another instance.
388#[derive(Debug, Copy, Clone)]
389#[repr(C)]
390pub struct VMMemoryImport {
391 /// A pointer to the imported memory description.
392 pub from: VmPtr<VMMemoryDefinition>,
393
394 /// A pointer to the `VMContext` that owns the memory description.
395 pub vmctx: VmPtr<VMContext>,
396
397 /// The index of the memory in the containing `vmctx`.
398 pub index: DefinedMemoryIndex,
399}
400// SAFETY: the above structure is repr(C) and only contains `VmSafe` fields.
401unsafe impl VmSafe for VMMemoryImport {}
402
403/// The fields compiled code needs to access to utilize a WebAssembly global
404/// variable imported from another instance.
405///
406/// Note that unlike with functions, tables, and memories, `VMGlobalImport`
407/// doesn't include a `vmctx` pointer. Globals are never resized, and don't
408/// require a `vmctx` pointer to access.
409#[derive(Debug, Copy, Clone)]
410#[repr(C)]
411pub struct VMGlobalImport {
412 /// A pointer to the imported global variable description.
413 pub from: VmPtr<VMGlobalDefinition>,
414}
415// SAFETY: the above structure is repr(C) and only contains `VmSafe` fields.
416unsafe impl VmSafe for VMGlobalImport {}
417
418/// The fields compiled code needs to access to utilize a WebAssembly
419/// tag imported from another instance.
420#[derive(Debug, Copy, Clone)]
421#[repr(C)]
422pub struct VMTagImport {
423 /// A pointer to the imported tag description.
424 pub from: VmPtr<VMTagDefinition>,
425}
426// SAFETY: the above structure is repr(C) and only contains `VmSafe` fields.
427unsafe impl VmSafe for VMTagImport {}
428
429/// The fields compiled code needs to access to utilize a WebAssembly table
430/// defined within the instance.
431#[derive(Debug)]
432#[repr(C)]
433pub struct VMTableDefinition {
434 /// Pointer to the table data.
435 pub base: VmPtr<u8>,
436
437 /// The current number of elements in the table.
438 pub current_elements: AtomicUsize,
439}
440// SAFETY: the above structure is repr(C) and only contains `VmSafe` fields.
441unsafe impl VmSafe for VMTableDefinition {}
442// Safety: The store synchronization protocol ensures this type will only ever be access in a thread-safe way
443unsafe impl Send for VMTableDefinition {}
444// Safety: The store synchronization protocol ensures this type will only ever be access in a thread-safe way
445unsafe impl Sync for VMTableDefinition {}
446
447/// The fields compiled code needs to access to utilize a WebAssembly linear
448/// memory defined within the instance, namely the start address and the
449/// size in bytes.
450#[derive(Debug)]
451#[repr(C)]
452pub struct VMMemoryDefinition {
453 /// The start address.
454 pub base: VmPtr<u8>,
455
456 /// The current logical size of this linear memory in bytes.
457 ///
458 /// This is atomic because shared memories must be able to grow their length
459 /// atomically. For relaxed access, see
460 /// [`VMMemoryDefinition::current_length()`].
461 pub current_length: AtomicUsize,
462}
463// SAFETY: the above structure is repr(C) and only contains `VmSafe` fields.
464unsafe impl VmSafe for VMMemoryDefinition {}
465
466impl VMMemoryDefinition {
467 pub fn current_length(&self, ordering: Ordering) -> usize {
468 self.current_length.load(ordering)
469 }
470}
471
472/// The storage for a WebAssembly global defined within the instance.
473///
474/// TODO: Pack the globals more densely, rather than using the same size
475/// for every type.
476#[derive(Debug)]
477#[repr(C, align(16))]
478pub struct VMGlobalDefinition {
479 storage: [u8; 16],
480}
481// SAFETY: the above structure is repr(C) and only contains `VmSafe` fields.
482unsafe impl VmSafe for VMGlobalDefinition {}
483
484#[expect(
485 clippy::cast_ptr_alignment,
486 reason = "false positive: the manual repr(C, align(16)) ensures proper alignment"
487)]
488impl VMGlobalDefinition {
489 /// Construct a `VMGlobalDefinition`.
490 pub fn new() -> Self {
491 Self { storage: [0; 16] }
492 }
493
494 /// Create a `VMGlobalDefinition` from a `VMVal`.
495 ///
496 /// # Unsafety
497 ///
498 /// This raw value's type must match the given `WasmValType`.
499 #[expect(clippy::unnecessary_wraps, reason = "TODO")]
500 pub unsafe fn from_vmval(
501 _store: &mut StoreOpaque,
502 wasm_ty: WasmValType,
503 raw: VMVal,
504 ) -> crate::Result<Self> {
505 // Safety: ensured by caller
506 unsafe {
507 let mut global = Self::new();
508 match wasm_ty {
509 WasmValType::I32 => *global.as_i32_mut() = raw.get_i32(),
510 WasmValType::I64 => *global.as_i64_mut() = raw.get_i64(),
511 WasmValType::F32 => *global.as_f32_bits_mut() = raw.get_f32(),
512 WasmValType::F64 => *global.as_f64_bits_mut() = raw.get_f64(),
513 WasmValType::V128 => global.set_u128(raw.get_v128()),
514 WasmValType::Ref(r) => match r.heap_type.top().0 {
515 WasmHeapTopType::Extern => {
516 todo!()
517 // let r = VMGcRef::from_raw_u32(raw.get_externref());
518 // global.init_gc_ref(store.gc_store_mut()?, r.as_ref())
519 }
520 WasmHeapTopType::Any => {
521 todo!()
522 // let r = VMGcRef::from_raw_u32(raw.get_anyref());
523 // global.init_gc_ref(store.gc_store_mut()?, r.as_ref())
524 }
525 WasmHeapTopType::Func => *global.as_func_ref_mut() = raw.get_funcref().cast(),
526 WasmHeapTopType::Cont => todo!("stack switching support"),
527 WasmHeapTopType::Exn => todo!("exception handling support"),
528 },
529 }
530 Ok(global)
531 }
532 }
533
534 /// Get this global's value as a `ValRaw`.
535 ///
536 /// # Unsafety
537 ///
538 /// This global's value's type must match the given `WasmValType`.
539 #[expect(clippy::unnecessary_wraps, reason = "TODO")]
540 pub unsafe fn to_vmval(
541 &self,
542 _store: &mut StoreOpaque,
543 wasm_ty: WasmValType,
544 ) -> crate::Result<VMVal> {
545 // Safety: ensured by caller
546 unsafe {
547 Ok(match wasm_ty {
548 WasmValType::I32 => VMVal::i32(*self.as_i32()),
549 WasmValType::I64 => VMVal::i64(*self.as_i64()),
550 WasmValType::F32 => VMVal::f32(*self.as_f32_bits()),
551 WasmValType::F64 => VMVal::f64(*self.as_f64_bits()),
552 WasmValType::V128 => VMVal::v128(self.get_u128()),
553 WasmValType::Ref(r) => match r.heap_type.top().0 {
554 WasmHeapTopType::Extern => {
555 // VMVal::externref(match self.as_gc_ref() {
556 // Some(r) => store.gc_store_mut()?.clone_gc_ref(r).as_raw_u32(),
557 // None => 0,
558 // }),
559 todo!()
560 }
561 WasmHeapTopType::Any => {
562 //VMVal::anyref({
563 // match self.as_gc_ref() {
564 // Some(r) => store.gc_store_mut()?.clone_gc_ref(r).as_raw_u32(),
565 // None => 0,
566 // }
567 // }),
568 todo!()
569 }
570 WasmHeapTopType::Func => VMVal::funcref(self.as_func_ref().cast()),
571 WasmHeapTopType::Cont => todo!("stack switching support"),
572 WasmHeapTopType::Exn => todo!("exception handling support"),
573 },
574 })
575 }
576 }
577
578 /// Return a reference to the value as an i32.
579 pub unsafe fn as_i32(&self) -> &i32 {
580 // Safety: ensured by caller
581 unsafe { &*(self.storage.as_ref().as_ptr().cast::<i32>()) }
582 }
583
584 /// Return a mutable reference to the value as an i32.
585 pub unsafe fn as_i32_mut(&mut self) -> &mut i32 {
586 // Safety: ensured by caller
587 unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<i32>()) }
588 }
589
590 /// Return a reference to the value as a u32.
591 pub unsafe fn as_u32(&self) -> &u32 {
592 // Safety: ensured by caller
593 unsafe { &*(self.storage.as_ref().as_ptr().cast::<u32>()) }
594 }
595
596 /// Return a mutable reference to the value as an u32.
597 pub unsafe fn as_u32_mut(&mut self) -> &mut u32 {
598 // Safety: ensured by caller
599 unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<u32>()) }
600 }
601
602 /// Return a reference to the value as an i64.
603 pub unsafe fn as_i64(&self) -> &i64 {
604 // Safety: ensured by caller
605 unsafe { &*(self.storage.as_ref().as_ptr().cast::<i64>()) }
606 }
607
608 /// Return a mutable reference to the value as an i64.
609 pub unsafe fn as_i64_mut(&mut self) -> &mut i64 {
610 // Safety: ensured by caller
611 unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<i64>()) }
612 }
613
614 /// Return a reference to the value as an u64.
615 pub unsafe fn as_u64(&self) -> &u64 {
616 // Safety: ensured by caller
617 unsafe { &*(self.storage.as_ref().as_ptr().cast::<u64>()) }
618 }
619
620 /// Return a mutable reference to the value as an u64.
621 pub unsafe fn as_u64_mut(&mut self) -> &mut u64 {
622 // Safety: ensured by caller
623 unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<u64>()) }
624 }
625
626 /// Return a reference to the value as an f32.
627 pub unsafe fn as_f32(&self) -> &f32 {
628 // Safety: ensured by caller
629 unsafe { &*(self.storage.as_ref().as_ptr().cast::<f32>()) }
630 }
631
632 /// Return a mutable reference to the value as an f32.
633 pub unsafe fn as_f32_mut(&mut self) -> &mut f32 {
634 // Safety: ensured by caller
635 unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<f32>()) }
636 }
637
638 /// Return a reference to the value as f32 bits.
639 pub unsafe fn as_f32_bits(&self) -> &u32 {
640 // Safety: ensured by caller
641 unsafe { &*(self.storage.as_ref().as_ptr().cast::<u32>()) }
642 }
643
644 /// Return a mutable reference to the value as f32 bits.
645 pub unsafe fn as_f32_bits_mut(&mut self) -> &mut u32 {
646 // Safety: ensured by caller
647 unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<u32>()) }
648 }
649
650 /// Return a reference to the value as an f64.
651 pub unsafe fn as_f64(&self) -> &f64 {
652 // Safety: ensured by caller
653 unsafe { &*(self.storage.as_ref().as_ptr().cast::<f64>()) }
654 }
655
656 /// Return a mutable reference to the value as an f64.
657 pub unsafe fn as_f64_mut(&mut self) -> &mut f64 {
658 // Safety: ensured by caller
659 unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<f64>()) }
660 }
661
662 /// Return a reference to the value as f64 bits.
663 pub unsafe fn as_f64_bits(&self) -> &u64 {
664 // Safety: ensured by caller
665 unsafe { &*(self.storage.as_ref().as_ptr().cast::<u64>()) }
666 }
667
668 /// Return a mutable reference to the value as f64 bits.
669 pub unsafe fn as_f64_bits_mut(&mut self) -> &mut u64 {
670 // Safety: ensured by caller
671 unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<u64>()) }
672 }
673
674 /// Gets the underlying 128-bit vector value.
675 //
676 // Note that vectors are stored in little-endian format while other types
677 // are stored in native-endian format.
678 pub unsafe fn get_u128(&self) -> u128 {
679 // Safety: ensured by caller
680 unsafe { u128::from_le(*(self.storage.as_ref().as_ptr().cast::<u128>())) }
681 }
682
683 /// Sets the 128-bit vector values.
684 //
685 // Note that vectors are stored in little-endian format while other types
686 // are stored in native-endian format.
687 pub unsafe fn set_u128(&mut self, val: u128) {
688 // Safety: ensured by caller
689 unsafe {
690 *self.storage.as_mut().as_mut_ptr().cast::<u128>() = val.to_le();
691 }
692 }
693
694 /// Return a reference to the value as u128 bits.
695 pub unsafe fn as_u128_bits(&self) -> &[u8; 16] {
696 // Safety: ensured by caller
697 unsafe { &*(self.storage.as_ref().as_ptr().cast::<[u8; 16]>()) }
698 }
699
700 /// Return a mutable reference to the value as u128 bits.
701 pub unsafe fn as_u128_bits_mut(&mut self) -> &mut [u8; 16] {
702 // Safety: ensured by caller
703 unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<[u8; 16]>()) }
704 }
705
706 // /// Return a reference to the global value as a borrowed GC reference.
707 // pub unsafe fn as_gc_ref(&self) -> Option<&VMGcRef> {
708 // let raw_ptr = self.storage.as_ref().as_ptr().cast::<Option<VMGcRef>>();
709 // let ret = (*raw_ptr).as_ref();
710 // assert!(cfg!(feature = "gc") || ret.is_none());
711 // ret
712 // }
713 //
714 // /// Initialize a global to the given GC reference.
715 // pub unsafe fn init_gc_ref(&mut self, gc_store: &mut GcStore, gc_ref: Option<&VMGcRef>) {
716 // assert!(cfg!(feature = "gc") || gc_ref.is_none());
717 //
718 // let dest = &mut *(self
719 // .storage
720 // .as_mut()
721 // .as_mut_ptr()
722 // .cast::<MaybeUninit<Option<VMGcRef>>>());
723 //
724 // gc_store.init_gc_ref(dest, gc_ref)
725 // }
726 //
727 // /// Write a GC reference into this global value.
728 // pub unsafe fn write_gc_ref(&mut self, gc_store: &mut GcStore, gc_ref: Option<&VMGcRef>) {
729 // assert!(cfg!(feature = "gc") || gc_ref.is_none());
730 //
731 // let dest = &mut *(self.storage.as_mut().as_mut_ptr().cast::<Option<VMGcRef>>());
732 // assert!(cfg!(feature = "gc") || dest.is_none());
733 //
734 // gc_store.write_gc_ref(dest, gc_ref)
735 // }
736
737 /// Return a reference to the value as a `VMFuncRef`.
738 pub unsafe fn as_func_ref(&self) -> *mut VMFuncRef {
739 // Safety: ensured by caller
740 unsafe { *(self.storage.as_ref().as_ptr().cast::<*mut VMFuncRef>()) }
741 }
742
743 /// Return a mutable reference to the value as a `VMFuncRef`.
744 pub unsafe fn as_func_ref_mut(&mut self) -> &mut *mut VMFuncRef {
745 // Safety: ensured by caller
746 unsafe { &mut *(self.storage.as_mut().as_mut_ptr().cast::<*mut VMFuncRef>()) }
747 }
748}
749
750/// A WebAssembly tag defined within the instance.
751///
752#[derive(Debug)]
753#[repr(C)]
754pub struct VMTagDefinition {
755 /// Function signature's type id.
756 pub type_index: VMSharedTypeIndex,
757}
758// SAFETY: the above structure is repr(C) and only contains `VmSafe` fields.
759unsafe impl VmSafe for VMTagDefinition {}
760
761impl VMTagDefinition {
762 pub fn new(type_index: VMSharedTypeIndex) -> Self {
763 Self { type_index }
764 }
765}
766
767/// The VM caller-checked "funcref" record, for caller-side signature checking.
768///
769/// It consists of function pointer(s), a type id to be checked by the
770/// caller, and the vmctx closure associated with this function.
771#[derive(Debug, Clone)]
772#[repr(C)]
773pub struct VMFuncRef {
774 /// Function pointer for this funcref if being called via the "array"
775 /// calling convention that `Func::new` et al use.
776 pub array_call: VMArrayCallFunction,
777
778 /// Function pointer for this funcref if being called via the calling
779 /// convention we use when compiling Wasm.
780 ///
781 /// Most functions come with a function pointer that we can use when they
782 /// are called from Wasm. The notable exception is when we `Func::wrap` a
783 /// host function, and we don't have a Wasm compiler on hand to compile a
784 /// Wasm-to-native trampoline for the function. In this case, we leave
785 /// `wasm_call` empty until the function is passed as an import to Wasm (or
786 /// otherwise exposed to Wasm via tables/globals). At this point, we look up
787 /// a Wasm-to-native trampoline for the function in the Wasm's compiled
788 /// module and use that fill in `VMFunctionImport::wasm_call`. **However**
789 /// there is no guarantee that the Wasm module has a trampoline for this
790 /// function's signature. The Wasm module only has trampolines for its
791 /// types, and if this function isn't of one of those types, then the Wasm
792 /// module will not have a trampoline for it. This is actually okay, because
793 /// it means that the Wasm cannot actually call this function. But it does
794 /// mean that this field needs to be an `Option` even though it is non-null
795 /// the vast vast vast majority of the time.
796 pub wasm_call: Option<VmPtr<VMWasmCallFunction>>,
797
798 /// Function signature's type id.
799 pub type_index: VMSharedTypeIndex,
800
801 /// The VM state associated with this function.
802 ///
803 /// The actual definition of what this pointer points to depends on the
804 /// function being referenced: for core Wasm functions, this is a `*mut
805 /// VMContext`, for host functions it is a `*mut VMHostFuncContext`, and for
806 /// component functions it is a `*mut VMComponentContext`.
807 pub vmctx: VmPtr<VMOpaqueContext>,
808}
809// SAFETY: the above structure is repr(C) and only contains `VmSafe` fields.
810unsafe impl VmSafe for VMFuncRef {}
811
812impl VMFuncRef {
813 /// Invokes the `array_call` field of this `VMFuncRef` with the supplied
814 /// arguments.
815 ///
816 /// This will invoke the function pointer in the `array_call` field with:
817 ///
818 /// * the `callee` vmctx as `self.vmctx`
819 /// * the `caller` as `caller` specified here
820 /// * the args pointer as `args_and_results`
821 /// * the args length as `args_and_results`
822 ///
823 /// The `args_and_results` area must be large enough to both load all
824 /// arguments from and store all results to.
825 ///
826 /// Returns whether a trap was recorded.
827 ///
828 /// # Unsafety
829 ///
830 /// This method is unsafe because it can be called with any pointers. They
831 /// must all be valid for this wasm function call to proceed. For example
832 /// `args_and_results` must be large enough to handle all the arguments/results for this call.
833 ///
834 /// Note that the unsafety invariants to maintain here are not currently
835 /// exhaustively documented.
836 pub unsafe fn array_call(
837 &self,
838 caller: NonNull<VMOpaqueContext>,
839 params_and_results: NonNull<[VMVal]>,
840 ) -> bool {
841 // Safety: ensured by caller
842 unsafe {
843 (self.array_call)(
844 self.vmctx.as_non_null(),
845 caller,
846 params_and_results.cast(),
847 params_and_results.len(),
848 )
849 }
850 }
851}
852
853#[derive(Debug)]
854#[repr(C)]
855pub struct VMStoreContext {
856 // NB: 64-bit integer fields are located first with pointer-sized fields
857 // trailing afterwards. That makes the offsets in this structure easier to
858 // calculate on 32-bit platforms as we don't have to worry about the
859 // alignment of 64-bit integers.
860 //
861 /// Indicator of how much fuel has been consumed and is remaining to
862 /// WebAssembly.
863 ///
864 /// This field is typically negative and increments towards positive. Upon
865 /// turning positive a wasm trap will be generated. This field is only
866 /// modified if wasm is configured to consume fuel.
867 pub fuel_consumed: UnsafeCell<i64>,
868
869 /// Deadline epoch for interruption: if epoch-based interruption
870 /// is enabled and the global (per engine) epoch counter is
871 /// observed to reach or exceed this value, the guest code will
872 /// yield if running asynchronously.
873 pub epoch_deadline: UnsafeCell<u64>,
874
875 /// Current stack limit of the wasm module.
876 ///
877 /// For more information see `crates/cranelift/src/lib.rs`.
878 pub stack_limit: UnsafeCell<VirtualAddress>,
879
880 /// The value of the frame pointer register when we last called from Wasm to
881 /// the host.
882 ///
883 /// Maintained by our Wasm-to-host trampoline, and cleared just before
884 /// calling into Wasm in `catch_traps`.
885 ///
886 /// This member is `0` when Wasm is actively running and has not called out
887 /// to the host.
888 ///
889 /// Used to find the start of a a contiguous sequence of Wasm frames when
890 /// walking the stack.
891 pub last_wasm_exit_fp: UnsafeCell<VirtualAddress>,
892
893 /// The last Wasm program counter before we called from Wasm to the host.
894 ///
895 /// Maintained by our Wasm-to-host trampoline, and cleared just before
896 /// calling into Wasm in `catch_traps`.
897 ///
898 /// This member is `0` when Wasm is actively running and has not called out
899 /// to the host.
900 ///
901 /// Used when walking a contiguous sequence of Wasm frames.
902 pub last_wasm_exit_pc: UnsafeCell<VirtualAddress>,
903
904 /// The last host stack pointer before we called into Wasm from the host.
905 ///
906 /// Maintained by our host-to-Wasm trampoline, and cleared just before
907 /// calling into Wasm in `catch_traps`.
908 ///
909 /// This member is `0` when Wasm is actively running and has not called out
910 /// to the host.
911 ///
912 /// When a host function is wrapped into a `wasmtime::Func`, and is then
913 /// called from the host, then this member has the sentinel value of `-1 as
914 /// usize`, meaning that this contiguous sequence of Wasm frames is the
915 /// empty sequence, and it is not safe to dereference the
916 /// `last_wasm_exit_fp`.
917 ///
918 /// Used to find the end of a contiguous sequence of Wasm frames when
919 /// walking the stack.
920 pub last_wasm_entry_fp: UnsafeCell<VirtualAddress>,
921}
922
923// SAFETY: the above structure is repr(C) and only contains `VmSafe` fields.
924unsafe impl VmSafe for VMStoreContext {}
925
926// Safety: The `VMStoreContext` type is a pod-type with no destructor, and we don't
927// access any fields from other threads, so add in these trait impls which are
928// otherwise not available due to the `fuel_consumed` and `epoch_deadline`
929// variables in `VMStoreContext`.
930unsafe impl Send for VMStoreContext {}
931// Safety: see above
932unsafe impl Sync for VMStoreContext {}
933
934impl Default for VMStoreContext {
935 fn default() -> VMStoreContext {
936 VMStoreContext {
937 stack_limit: UnsafeCell::new(VirtualAddress::MAX),
938 fuel_consumed: UnsafeCell::new(0),
939 epoch_deadline: UnsafeCell::new(0),
940 last_wasm_exit_fp: UnsafeCell::new(VirtualAddress::MIN),
941 last_wasm_exit_pc: UnsafeCell::new(VirtualAddress::MIN),
942 last_wasm_entry_fp: UnsafeCell::new(VirtualAddress::MIN),
943 }
944 }
945}
946
947macro_rules! define_builtin_array {
948 (
949 $(
950 $( #[$attr:meta] )*
951 $name:ident( $( $pname:ident: $param:ident ),* ) $( -> $result:ident )?;
952 )*
953 ) => {
954 /// An array that stores addresses of builtin functions. We translate code
955 /// to use indirect calls. This way, we don't have to patch the code.
956 #[repr(C)]
957 pub struct VMBuiltinFunctionsArray {
958 $(
959 $name: unsafe extern "C" fn(
960 $(define_builtin_array!(@ty $param)),*
961 ) $( -> define_builtin_array!(@ty $result))?,
962 )*
963 }
964
965 impl VMBuiltinFunctionsArray {
966 // #[expect(unused_doc_comments, reason = "")]
967 pub const INIT: VMBuiltinFunctionsArray = VMBuiltinFunctionsArray {
968 $(
969 $name: crate::wasm::vm::builtins::raw::$name,
970 )*
971 };
972
973 /// Helper to call `expose_provenance()` on all contained pointers.
974 ///
975 /// This is required to be called at least once before entering wasm
976 /// to inform the compiler that these function pointers may all be
977 /// loaded/stored and used on the "other end" to reacquire
978 /// provenance in Pulley. Pulley models hostcalls with a host
979 /// pointer as the first parameter that's a function pointer under
980 /// the hood, and this call ensures that the use of the function
981 /// pointer is considered valid.
982 pub fn expose_provenance(&self) -> NonNull<Self>{
983 $(
984 (self.$name as *mut u8).expose_provenance();
985 )*
986 NonNull::from(self)
987 }
988 }
989 };
990
991 (@ty u32) => (u32);
992 (@ty u64) => (u64);
993 (@ty u8) => (u8);
994 (@ty bool) => (bool);
995 (@ty pointer) => (*mut u8);
996 (@ty vmctx) => (NonNull<VMContext>);
997}
998
999foreach_builtin_function!(define_builtin_array);
1000const_assert_eq!(
1001 size_of::<VMBuiltinFunctionsArray>(),
1002 size_of::<usize>() * (BuiltinFunctionIndex::builtin_functions_total_number() as usize)
1003);
1004
1005// SAFETY: the above structure is repr(C) and only contains `VmSafe` fields.
1006unsafe impl VmSafe for VMBuiltinFunctionsArray {}
1007
1008/// The VM "context", which is pointed to by the `vmctx` arg in Cranelift.
1009/// This has information about globals, memories, tables, and other runtime
1010/// state associated with the current instance.
1011///
1012/// The struct here is empty, as the sizes of these fields are dynamic, and
1013/// we can't describe them in Rust's type system. Sufficient memory is
1014/// allocated at runtime.
1015#[derive(Debug)]
1016#[repr(C, align(16))] // align 16 since globals are aligned to that and contained inside
1017pub struct VMContext {
1018 pub(super) _marker: PhantomPinned,
1019}
1020
1021impl VMContext {
1022 /// Helper function to cast between context types using a debug assertion to
1023 /// protect against some mistakes.
1024 #[inline]
1025 pub unsafe fn from_opaque(opaque: NonNull<VMOpaqueContext>) -> NonNull<VMContext> {
1026 // Safety: ensured by caller
1027 unsafe {
1028 debug_assert_eq!(opaque.as_ref().magic, VMCONTEXT_MAGIC);
1029 opaque.cast()
1030 }
1031 }
1032}
1033
1034/// An "opaque" version of `VMContext` which must be explicitly casted to a target context.
1035pub struct VMOpaqueContext {
1036 magic: u32,
1037 _marker: PhantomPinned,
1038}
1039
1040impl VMOpaqueContext {
1041 /// Helper function to clearly indicate that casts are desired.
1042 #[inline]
1043 pub fn from_vmcontext(ptr: NonNull<VMContext>) -> NonNull<VMOpaqueContext> {
1044 ptr.cast()
1045 }
1046
1047 /// Helper function to clearly indicate that casts are desired.
1048 #[inline]
1049 pub fn from_vm_array_call_host_func_context(
1050 ptr: NonNull<VMArrayCallHostFuncContext>,
1051 ) -> NonNull<VMOpaqueContext> {
1052 ptr.cast()
1053 }
1054}
1055
1056/// The `VM*Context` for array-call host functions.
1057///
1058/// Its `magic` field must always be
1059/// `VM_ARRAY_CALL_HOST_FUNC_MAGIC`, and this is how you can
1060/// determine whether a `VM*Context` is a `VMArrayCallHostFuncContext` versus a
1061/// different kind of context.
1062#[repr(C)]
1063#[derive(Debug)]
1064pub struct VMArrayCallHostFuncContext {
1065 magic: u32,
1066 // _padding: u32, // (on 64-bit systems)
1067 pub(crate) func_ref: VMFuncRef,
1068 func: Box<dyn Any + Send + Sync>,
1069 ty: RegisteredType,
1070}
1071
1072// Safety: TODO
1073unsafe impl Send for VMArrayCallHostFuncContext {}
1074// Safety: TODO
1075unsafe impl Sync for VMArrayCallHostFuncContext {}
1076
1077impl VMArrayCallHostFuncContext {
1078 /// Create the context for the given host function.
1079 ///
1080 /// # Safety
1081 ///
1082 /// The `host_func` must be a pointer to a host (not Wasm) function and it
1083 /// must be `Send` and `Sync`.
1084 pub unsafe fn new(
1085 array_call: VMArrayCallFunction,
1086 func_ty: FuncType,
1087 func: Box<dyn Any + Send + Sync>,
1088 ) -> Box<VMArrayCallHostFuncContext> {
1089 let mut ctx = Box::new(VMArrayCallHostFuncContext {
1090 magic: VM_ARRAY_CALL_HOST_FUNC_MAGIC,
1091 func_ref: VMFuncRef {
1092 array_call,
1093 type_index: func_ty.type_index(),
1094 wasm_call: None,
1095 vmctx: NonNull::dangling().into(),
1096 },
1097 func,
1098 ty: func_ty.into_registered_type(),
1099 });
1100
1101 let vmctx =
1102 VMOpaqueContext::from_vm_array_call_host_func_context(NonNull::from(ctx.as_mut()));
1103
1104 ctx.as_mut().func_ref.vmctx = VmPtr::from(vmctx);
1105
1106 ctx
1107 }
1108
1109 /// Helper function to cast between context types using a debug assertion to
1110 /// protect against some mistakes.
1111 #[inline]
1112 pub unsafe fn from_opaque(
1113 opaque: NonNull<VMOpaqueContext>,
1114 ) -> NonNull<VMArrayCallHostFuncContext> {
1115 // Safety: ensured by caller
1116 unsafe {
1117 // See comments in `VMContext::from_opaque` for this debug assert
1118 debug_assert_eq!(opaque.as_ref().magic, VM_ARRAY_CALL_HOST_FUNC_MAGIC);
1119 opaque.cast()
1120 }
1121 }
1122
1123 /// Get the host state for this host function context.
1124 #[inline]
1125 pub fn func(&self) -> &(dyn Any + Send + Sync) {
1126 &*self.func
1127 }
1128}