Next Generation WASM Microkernel Operating System
1// Copyright 2025 Jonas Kruckenberg
2//
3// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
4// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
5// http://opensource.org/licenses/MIT>, at your option. This file may not be
6// copied, modified, or distributed except according to those terms.
7
8mod arena;
9mod frame;
10pub mod frame_list;
11
12use crate::arch;
13use crate::mem::bootstrap_alloc::BootstrapAllocator;
14use crate::mem::{PhysicalAddress, VirtualAddress};
15use alloc::vec::Vec;
16use arena::Arena;
17use arena::select_arenas;
18use cordyceps::list::List;
19use core::alloc::Layout;
20use core::cell::RefCell;
21use core::ptr::NonNull;
22use core::range::Range;
23use core::sync::atomic::AtomicUsize;
24use core::{cmp, fmt, iter, slice};
25use cpu_local::collection::CpuLocal;
26use fallible_iterator::FallibleIterator;
27use spin::{Mutex, OnceLock};
28
29use crate::mem::frame_alloc::frame_list::FrameList;
30pub use frame::{Frame, FrameInfo};
31
32pub static FRAME_ALLOC: OnceLock<FrameAllocator> = OnceLock::new();
33pub fn init(
34 boot_alloc: BootstrapAllocator,
35 fdt_region: Range<PhysicalAddress>,
36) -> &'static FrameAllocator {
37 FRAME_ALLOC.get_or_init(|| FrameAllocator::new(boot_alloc, fdt_region))
38}
39
40#[derive(Debug)]
41pub struct FrameAllocator {
42 /// Global list of arenas that can be allocated from.
43 global: Mutex<GlobalFrameAllocator>,
44 max_alignment: usize,
45 /// Per-cpu cache of frames to speed up allocation.
46 cpu_local_cache: CpuLocal<RefCell<CpuLocalFrameCache>>,
47 /// Number of frames - across all cpus - that are in cpu-local caches.
48 /// This value must only ever be treated as a hint and should only be used to
49 /// produce more accurate frame usage statistics.
50 frames_in_caches_hint: AtomicUsize,
51}
52
53#[derive(Debug)]
54struct GlobalFrameAllocator {
55 arenas: Vec<Arena>,
56}
57
58#[derive(Debug, Default)]
59struct CpuLocalFrameCache {
60 free_list: List<FrameInfo>,
61}
62
63/// Allocation failure that may be due to resource exhaustion or invalid combination of arguments
64/// such as a too-large alignment. Importantly this error is *not-permanent*, a caller choosing to
65/// retry allocation at a later point in time or with different arguments and might receive a successful
66/// result.
67#[derive(Debug)]
68pub struct AllocError;
69
70// === impl FrameAllocator ===
71
72impl FrameAllocator {
73 pub fn new(boot_alloc: BootstrapAllocator, fdt_region: Range<PhysicalAddress>) -> Self {
74 let mut max_alignment = arch::PAGE_SIZE;
75 let mut arenas = Vec::new();
76
77 let phys_regions = boot_alloc
78 .free_regions()
79 .chain(iter::once(fdt_region))
80 .collect();
81 for selection_result in select_arenas(phys_regions).iterator() {
82 match selection_result {
83 Ok(selection) => {
84 tracing::trace!("selection {selection:?}");
85 let arena = Arena::from_selection(selection);
86 tracing::trace!("max arena alignment {}", arena.max_alignment());
87 max_alignment = cmp::max(max_alignment, arena.max_alignment());
88 arenas.push(arena);
89 }
90 Err(err) => {
91 tracing::error!("unable to include RAM region {:?}", err.range);
92 }
93 }
94 }
95
96 FrameAllocator {
97 global: Mutex::new(GlobalFrameAllocator { arenas }),
98 max_alignment,
99 frames_in_caches_hint: AtomicUsize::new(0),
100 cpu_local_cache: CpuLocal::new(),
101 }
102 }
103
104 /// Allocate a single [`Frame`].
105 pub fn alloc_one(&self) -> Result<Frame, AllocError> {
106 let mut cpu_local_cache = self.cpu_local_cache.get_or_default().borrow_mut();
107 let frame = cpu_local_cache
108 .allocate_one()
109 .or_else(|| {
110 let mut global_alloc = self.global.lock();
111
112 global_alloc.allocate_one()
113 })
114 .ok_or(AllocError)?;
115
116 // Safety: we just allocated the frame
117 let frame = unsafe { Frame::from_free_info(frame) };
118
119 #[cfg(debug_assertions)]
120 frame.assert_valid();
121 Ok(frame)
122 }
123
124 /// Allocate a single [`Frame`] and ensure the backing physical memory is zero initialized.
125 pub fn alloc_one_zeroed(&self) -> Result<Frame, AllocError> {
126 let frame = self.alloc_one()?;
127
128 // Translate the physical address into a virtual one through the physmap
129 let virt = VirtualAddress::from_phys(frame.addr()).unwrap();
130
131 // memset'ing the slice to zero
132 // Safety: the slice has just been allocated
133 unsafe {
134 slice::from_raw_parts_mut(virt.as_mut_ptr(), arch::PAGE_SIZE).fill(0);
135 }
136
137 Ok(frame)
138 }
139
140 /// Allocate a contiguous runs of [`Frame`] meeting the size and alignment requirements of `layout`.
141 pub fn alloc_contiguous(&self, layout: Layout) -> Result<FrameList, AllocError> {
142 // try to allocate from the per-cpu cache first
143 let mut cpu_local_cache = self.cpu_local_cache.get_or_default().borrow_mut();
144 let frames = cpu_local_cache
145 .allocate_contiguous(layout)
146 .or_else(|| {
147 let mut global_alloc = self.global.lock();
148
149 tracing::trace!(
150 "CPU-local cache exhausted, refilling {} frames...",
151 layout.size() / arch::PAGE_SIZE
152 );
153 let mut frames = global_alloc.allocate_contiguous(layout)?;
154 cpu_local_cache.free_list.append(&mut frames);
155
156 tracing::trace!("retrying allocation...");
157 // If this fails then we failed to pull enough frames from the global allocator
158 // which means we're fully out of frames
159 cpu_local_cache.allocate_contiguous(layout)
160 })
161 .ok_or(AllocError)?;
162
163 let frames = FrameList::from_iter(frames.into_iter().map(|info| {
164 // Safety: we just allocated the frame
165 unsafe { Frame::from_free_info(info) }
166 }));
167 #[cfg(debug_assertions)]
168 frames.assert_valid();
169 Ok(frames)
170 }
171
172 /// Allocate a contiguous runs of [`Frame`] meeting the size and alignment requirements of `layout`
173 /// and ensuring the backing physical memory is zero initialized.
174 pub fn alloc_contiguous_zeroed(&self, layout: Layout) -> Result<FrameList, AllocError> {
175 let frames = self.alloc_contiguous(layout)?;
176
177 // Translate the physical address into a virtual one through the physmap
178 let virt = VirtualAddress::from_phys(frames.first().unwrap().addr()).unwrap();
179
180 // memset'ing the slice to zero
181 // Safety: the slice has just been allocated
182 unsafe {
183 slice::from_raw_parts_mut(virt.as_mut_ptr(), frames.size()).fill(0);
184 }
185
186 Ok(frames)
187 }
188
189 pub fn max_alignment(&self) -> usize {
190 self.max_alignment
191 }
192}
193
194// === impl GlobalFrameAllocator ===
195
196impl GlobalFrameAllocator {
197 fn allocate_one(&mut self) -> Option<NonNull<FrameInfo>> {
198 for arena in &mut self.arenas {
199 if let Some(frame) = arena.allocate_one() {
200 return Some(frame);
201 }
202 }
203
204 None
205 }
206
207 fn allocate_contiguous(&mut self, layout: Layout) -> Option<List<FrameInfo>> {
208 for arena in &mut self.arenas {
209 if let Some(frames) = arena.allocate_contiguous(layout) {
210 return Some(frames);
211 }
212 }
213
214 None
215 }
216}
217
218// === impl CpuLocalFrameCache ===
219
220impl CpuLocalFrameCache {
221 fn allocate_one(&mut self) -> Option<NonNull<FrameInfo>> {
222 self.free_list.pop_front()
223 }
224
225 fn allocate_contiguous(&mut self, layout: Layout) -> Option<List<FrameInfo>> {
226 let frames = layout.size() / arch::PAGE_SIZE;
227
228 // short-circuit if the cache doesn't even have enough pages
229 if self.free_list.len() < frames {
230 return None;
231 }
232
233 let mut index = 0;
234 let mut base = self.free_list.iter();
235 'outer: while let Some(base_frame) = base.next() {
236 if base_frame.addr().alignment() >= layout.align() {
237 let mut prev_addr = base_frame.addr();
238
239 let mut c = 0;
240 for frame in base.by_ref() {
241 // we found a contiguous block
242 if c == frames {
243 break 'outer;
244 }
245
246 if frame.addr().checked_sub_addr(prev_addr).unwrap() > arch::PAGE_SIZE {
247 // frames aren't contiguous, so let's try the next one
248 tracing::trace!("frames not contiguous, trying next");
249 continue 'outer;
250 }
251
252 c += 1;
253 prev_addr = frame.addr();
254 }
255 }
256
257 tracing::trace!("base frame not aligned, trying next");
258 // the base wasn't aligned, try the next one
259 index += 1;
260 }
261
262 tracing::trace!("found contiguous block at index {index}");
263
264 // split the cache first at the start of the contiguous block. This will return the contiguous block
265 // plus everything after it
266 let mut split = self.free_list.split_off(index);
267 // the split the contiguous block after the number of frames we need
268 // and return the rest back to the cache
269 let mut rest = split.split_off(frames);
270 self.free_list.append(&mut rest);
271
272 Some(split)
273 }
274}
275
276// === impl AllocError ===
277
278impl fmt::Display for AllocError {
279 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
280 f.write_str("AllocError")
281 }
282}
283
284impl core::error::Error for AllocError {}