Realtime safe, waitfree, concurrency library
1// SPDX-FileCopyrightText: 2025 Lucas Baumann
2// SPDX-FileCopyrightText: Lucas Baumann
3//
4// SPDX-License-Identifier: Apache-2.0
5// SPDX-License-Identifier: MIT
6
7use core::{
8 cell::UnsafeCell,
9 mem::MaybeUninit,
10 ptr::NonNull,
11 sync::atomic::{self, AtomicU8, Ordering},
12};
13
14use alloc::boxed::Box;
15
16#[derive(Clone, Copy, Debug)]
17#[repr(u8)]
18pub(crate) enum Ptr {
19 Value1 = 0,
20 Value2 = 0b1000,
21}
22
23impl Ptr {
24 pub(crate) fn switch(&mut self) {
25 *self = match *self {
26 Ptr::Value1 => Self::Value2,
27 Ptr::Value2 => Self::Value1,
28 };
29 }
30}
31
32#[repr(transparent)]
33#[derive(Debug, Clone, Copy)]
34struct State(u8);
35
36impl State {
37 /// - Read Ptr: Value 1,
38 /// - No Read
39 /// - Unique
40 const INITIAL: u8 = 0b0000;
41
42 const VALUE1_READ: u8 = 0b0010;
43 const VALUE2_READ: u8 = 0b0100;
44 const READ_MASK: u8 = 0b0110;
45 const NOREAD_MASK: u8 = 0b1001;
46 const UNIQUE_MASK: u8 = 0b0001;
47 const INV_UNIQUE_MASK: u8 = 0b1110;
48 const READ_PTR_MASK: u8 = 0b1000;
49 const INV_READ_PTR: u8 = 0b0111;
50
51 // only does debug tests that state is valid
52 // could be turned into assert_uncheckeds as they are still checkd in debug
53 fn new(value: u8) -> Self {
54 debug_assert!(
55 (value & Self::READ_MASK).count_ones() <= 1,
56 "max 1 read. value: {value:b}"
57 );
58 debug_assert!(
59 value & 0b1111_0000 == 0,
60 "only lower 4 bits used. value: {value:b}"
61 );
62 Self(value)
63 }
64
65 fn is_unique(self) -> bool {
66 self.0 & Self::UNIQUE_MASK == 0
67 }
68
69 fn read_ptr(self) -> Ptr {
70 // mask out everything except the read ptr
71 if self.0 & Self::READ_PTR_MASK == 0 {
72 Ptr::Value1
73 } else {
74 Ptr::Value2
75 }
76 }
77
78 fn with_read(self, ptr: Ptr) -> Self {
79 debug_assert_eq!(self.0 & Self::READ_MASK, 0, "No read currently");
80 let mask = match ptr {
81 Ptr::Value1 => Self::VALUE1_READ,
82 Ptr::Value2 => Self::VALUE2_READ,
83 };
84
85 Self(self.0 | mask)
86 }
87
88 fn can_write(self, ptr: Ptr) -> bool {
89 #[expect(
90 clippy::match_like_matches_macro,
91 reason = "i think it's more readable like this"
92 )]
93 match (self.0 & Self::READ_MASK, ptr) {
94 (Self::VALUE1_READ, Ptr::Value1) => false,
95 (Self::VALUE2_READ, Ptr::Value2) => false,
96 _ => true,
97 }
98 }
99}
100
101#[derive(Debug)]
102pub(crate) struct Shared<T> {
103 pub(crate) value_1: UnsafeCell<T>,
104 pub(crate) value_2: UnsafeCell<T>,
105 /// ### Bits from low to high
106 /// | bit | meaning |
107 /// |---|---|
108 /// | 0 | 0: unique, 1: second object exists |
109 /// | 1 | is value 1 being read |
110 /// | 2 | is value 2 being read |
111 /// | 3 | which value should be read next (0: value 1, 1: value 2) |
112 ///
113 /// This mixed use doesn't lead to more contention because there are only two threads max.
114 state: AtomicU8,
115}
116
117impl<T> Shared<T> {
118 pub(crate) fn lock_read(&self) -> &UnsafeCell<T> {
119 // fetch update loop could be replaced with:
120 // - set read state to both
121 // - read read ptr
122 // - set read state to only that
123 // this would need to be synchronized correctly and is probably not faster than this
124 let result = self
125 .state
126 .fetch_update(Ordering::Relaxed, Ordering::Acquire, |value| {
127 let state = State::new(value);
128 let ptr = state.read_ptr();
129 Some(state.with_read(ptr).0)
130 });
131 // SAFETY: fetch_update closure always returns Some, so the result is alwyays Ok
132 let result = unsafe { result.unwrap_unchecked() };
133 // result is the previous value, so the read_state isn't set, only the read_ptr
134 let ptr = State::new(result).read_ptr();
135 self.get_value(ptr)
136 }
137
138 pub(crate) fn release_read_lock(&self) {
139 self.state.fetch_and(State::NOREAD_MASK, Ordering::Release);
140 }
141
142 /// tries to get the write lock to the ptr.
143 pub(crate) fn lock_write(&self, ptr: Ptr) -> Result<(), ()> {
144 let state = State::new(self.state.load(Ordering::Relaxed));
145 if state.can_write(ptr) {
146 // only need to synchronize with another thread when locking was successfull
147 atomic::fence(Ordering::Acquire);
148 Ok(())
149 } else {
150 Err(())
151 }
152 }
153
154 /// Releases the read lock
155 pub(crate) fn set_read_ptr(&self, ptr: Ptr) {
156 let value = match ptr {
157 Ptr::Value1 => self.state.fetch_and(State::INV_READ_PTR, Ordering::Release),
158 Ptr::Value2 => self.state.fetch_or(State::READ_PTR_MASK, Ordering::Release),
159 };
160 State::new(value);
161 }
162
163 pub(crate) fn new(value: T, second_value: fn(&T) -> T) -> (NonNull<Self>, Ptr) {
164 // make sure that the first value gets dropped if the constructor of the second panics
165 struct DropGuard<T>(*mut T);
166 impl<T> Drop for DropGuard<T> {
167 fn drop(&mut self) {
168 unsafe { self.0.drop_in_place() }
169 }
170 }
171
172 let mut this: Box<MaybeUninit<Self>> = Box::new_uninit();
173 let this_ptr = this.as_mut_ptr();
174 let this = unsafe {
175 let state_ptr = &raw mut (*this_ptr).state;
176 let value_1_ptr = UnsafeCell::raw_get(&raw mut (*this_ptr).value_1);
177 let value_2_ptr = UnsafeCell::raw_get(&raw mut (*this_ptr).value_2);
178
179 state_ptr.write(AtomicU8::new(State::INITIAL));
180 value_1_ptr.write(value);
181 let guard = DropGuard(value_1_ptr);
182 value_2_ptr.write(second_value(&*value_1_ptr));
183 // after this nothing can panic, so nothing can leak the value
184 core::mem::forget(guard);
185 this.assume_init()
186 };
187 // SAFETY: Box is valid, so not Null
188 (
189 unsafe { NonNull::new_unchecked(Box::into_raw(this)) },
190 Ptr::Value2,
191 )
192 }
193
194 pub(crate) fn get_value(&self, ptr: Ptr) -> &UnsafeCell<T> {
195 match ptr {
196 Ptr::Value1 => &self.value_1,
197 Ptr::Value2 => &self.value_2,
198 }
199 }
200
201 /// SAFETY: needs to have synchronized shared access to the ptr.
202 /// If the access is not unique T needs to be Sync
203 pub(crate) unsafe fn get_value_ref(&self, ptr: Ptr) -> &T {
204 // SAFETY: requirements on the function make it safe
205 unsafe { &*self.get_value(ptr).get() }
206 }
207
208 /// If self is unique increase the count and returns true.
209 /// Otherwise returns false.
210 ///
211 /// If this returns true another smart pointer has to be created otherwise memory will be leaked
212 pub(crate) unsafe fn set_shared(&self) {
213 self.state.fetch_or(State::UNIQUE_MASK, Ordering::Relaxed);
214 }
215
216 pub(crate) fn is_unique(&self) -> bool {
217 State::new(self.state.load(Ordering::Acquire)).is_unique()
218 }
219
220 /// SAFETY: this needs to be valid. this can't be used after this function anymore.
221 pub(crate) unsafe fn drop(this: NonNull<Self>) {
222 // SAFETY: function SAFETY precondition
223 let old_state = unsafe { this.as_ref() }
224 .state
225 .fetch_and(State::INV_UNIQUE_MASK, Ordering::Release);
226
227 if State::new(old_state).is_unique() {
228 // see std Arc
229 atomic::fence(Ordering::Acquire);
230
231 drop(Box::from_raw(this.as_ptr()));
232 }
233 }
234}
235
236/// SAFETY: same as `SyncUnsafeCell`. Synchronisation done by Reader and Writer
237///
238/// Isn't actually needed for the library as the public types have their own Send & Sync impls
239/// which are needed as they have a ptr to Shared.
240/// Clarifies that multithreaded refs are fine.
241///
242/// Send is autoimplemented, because `UnsafeCell` is Send if T: Send
243unsafe impl<T: Sync> Sync for Shared<T> {}