A RPi Pico powered Lightning Detector
1use core::{
2 alloc::{AllocError, Allocator, GlobalAlloc, Layout},
3 ptr::NonNull,
4};
5
6use embassy_sync::blocking_mutex::Mutex;
7use rlsf::Tlsf;
8use static_cell::ConstStaticCell;
9
10use crate::locks::AllocatorLock;
11
12type Heap = Tlsf<'static, usize, usize, { usize::BITS as usize }, { usize::BITS as usize }>;
13
14struct HeapInner {
15 heap: Heap,
16}
17
18pub struct PicoHeap<const MEMORY_SIZE: usize> {
19 inner: Mutex<AllocatorLock, HeapInner>,
20 block: ConstStaticCell<[u8; MEMORY_SIZE]>,
21}
22
23#[derive(Debug)]
24#[cfg_attr(feature = "defmt", derive(defmt::Format))]
25pub enum HeapError {
26 MemoryBlockTooSmall,
27 BlockAlreadyTaken,
28}
29
30impl<const MEMORY_SIZE: usize> PicoHeap<MEMORY_SIZE> {
31 /// Creates an empty, uninitialised [`PicoHeap`]. This must be created with a sufficiently sized
32 /// `MEMORY_SIZE`, else [`PicoHeap::init`] will return a [`HeapError::MemoryBlockTooSmall`] error
33 /// when it is called.
34 pub const fn empty() -> Self {
35 Self {
36 inner: Mutex::new(HeapInner { heap: Heap::new() }),
37 block: ConstStaticCell::new([0u8; MEMORY_SIZE]),
38 }
39 }
40
41 /// Initialises the heap. Should be called only once, else this method will return an error.
42 pub fn init(&'static self) -> Result<(), HeapError> {
43 // SAFETY: We have checked whether the heap was initialised already, and that the block
44 // of memory satisfies our lifetime conditions thanks to the &'static self type of the
45 // init() method. We also never call lock_mut reetrantly, so this is safe.
46 unsafe {
47 self.inner.lock_mut(|inner| {
48 let block = self.block.try_take().ok_or(HeapError::BlockAlreadyTaken)?;
49
50 inner
51 .heap
52 .insert_free_block_ptr(NonNull::from_mut(block))
53 .ok_or(HeapError::MemoryBlockTooSmall)?;
54
55 Ok(())
56 })
57 }
58 }
59
60 pub fn alloc_block(&self, layout: Layout) -> Option<NonNull<u8>> {
61 // SAFETY: lock_mut is never called reentrantly, therefore this is safe.
62 unsafe { self.inner.lock_mut(|inner| inner.heap.allocate(layout)) }
63 }
64
65 /// # Safety
66 ///
67 /// The caller must ensure:
68 ///
69 /// * `ptr` is a block of memory currently allocated via this allocator and,
70 /// * `layout` is the same layout that was used to allocate that block of memory.
71 ///
72 /// Otherwise the behavior is undefined.
73 pub unsafe fn dealloc_block(&self, ptr: NonNull<u8>, layout: Layout) {
74 // SAFETY: lock_mut is never called reentrantly, therefore this is safe. Deallocation
75 // invariants are upheld by the caller.
76 unsafe {
77 self.inner
78 .lock_mut(|inner| inner.heap.deallocate(ptr, layout.align()));
79 }
80 }
81}
82
83unsafe impl<const MEMORY_SIZE: usize> GlobalAlloc for PicoHeap<MEMORY_SIZE> {
84 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
85 self.alloc_block(layout)
86 .map_or(core::ptr::null_mut(), NonNull::as_ptr)
87 }
88
89 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
90 unsafe {
91 self.dealloc_block(NonNull::new_unchecked(ptr), layout);
92 }
93 }
94}
95
96unsafe impl<const MEMORY_SIZE: usize> Allocator for PicoHeap<MEMORY_SIZE> {
97 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, core::alloc::AllocError> {
98 match layout.size() {
99 0 => Ok(NonNull::slice_from_raw_parts(NonNull::dangling(), 0)),
100 size => self
101 .alloc_block(layout)
102 .map_or(Err(AllocError), |allocated| {
103 Ok(NonNull::slice_from_raw_parts(allocated, size))
104 }),
105 }
106 }
107
108 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
109 if layout.size() != 0 {
110 unsafe {
111 self.dealloc_block(ptr, layout);
112 }
113 }
114 }
115}