Serenity Operating System
1/*
2 * Copyright (c) 2018-2020, Andreas Kling <kling@serenityos.org>
3 *
4 * SPDX-License-Identifier: BSD-2-Clause
5 */
6
7#pragma once
8
9#include <AK/Assertions.h>
10#include <AK/Atomic.h>
11#include <AK/Format.h>
12#include <AK/NonnullRefPtr.h>
13#include <AK/Traits.h>
14#include <AK/Types.h>
15#ifdef KERNEL
16# include <Kernel/Arch/Processor.h>
17# include <Kernel/ScopedCritical.h>
18#endif
19
20#define NONNULLLOCKREFPTR_SCRUB_BYTE 0xa1
21
22namespace AK {
23
24template<typename T, typename PtrTraits>
25class LockRefPtr;
26
27template<typename T>
28class [[nodiscard]] NonnullLockRefPtr {
29 template<typename U, typename P>
30 friend class LockRefPtr;
31 template<typename U>
32 friend class NonnullLockRefPtr;
33 template<typename U>
34 friend class LockWeakPtr;
35
36public:
37 using ElementType = T;
38
39 enum AdoptTag { Adopt };
40
41 ALWAYS_INLINE NonnullLockRefPtr(T const& object)
42 : m_bits((FlatPtr)&object)
43 {
44 VERIFY(!(m_bits & 1));
45 const_cast<T&>(object).ref();
46 }
47 template<typename U>
48 ALWAYS_INLINE NonnullLockRefPtr(U const& object)
49 requires(IsConvertible<U*, T*>)
50 : m_bits((FlatPtr) static_cast<T const*>(&object))
51 {
52 VERIFY(!(m_bits & 1));
53 const_cast<T&>(static_cast<T const&>(object)).ref();
54 }
55 ALWAYS_INLINE NonnullLockRefPtr(AdoptTag, T& object)
56 : m_bits((FlatPtr)&object)
57 {
58 VERIFY(!(m_bits & 1));
59 }
60 ALWAYS_INLINE NonnullLockRefPtr(NonnullLockRefPtr&& other)
61 : m_bits((FlatPtr)&other.leak_ref())
62 {
63 VERIFY(!(m_bits & 1));
64 }
65 template<typename U>
66 ALWAYS_INLINE NonnullLockRefPtr(NonnullLockRefPtr<U>&& other)
67 requires(IsConvertible<U*, T*>)
68 : m_bits((FlatPtr)&other.leak_ref())
69 {
70 VERIFY(!(m_bits & 1));
71 }
72 ALWAYS_INLINE NonnullLockRefPtr(NonnullLockRefPtr const& other)
73 : m_bits((FlatPtr)other.add_ref())
74 {
75 VERIFY(!(m_bits & 1));
76 }
77 template<typename U>
78 ALWAYS_INLINE NonnullLockRefPtr(NonnullLockRefPtr<U> const& other)
79 requires(IsConvertible<U*, T*>)
80 : m_bits((FlatPtr)other.add_ref())
81 {
82 VERIFY(!(m_bits & 1));
83 }
84 ALWAYS_INLINE ~NonnullLockRefPtr()
85 {
86 assign(nullptr);
87#ifdef SANITIZE_PTRS
88 m_bits.store(explode_byte(NONNULLLOCKREFPTR_SCRUB_BYTE), AK::MemoryOrder::memory_order_relaxed);
89#endif
90 }
91
92 template<typename U>
93 NonnullLockRefPtr(OwnPtr<U> const&) = delete;
94 template<typename U>
95 NonnullLockRefPtr& operator=(OwnPtr<U> const&) = delete;
96
97 template<typename U>
98 NonnullLockRefPtr(LockRefPtr<U> const&) = delete;
99 template<typename U>
100 NonnullLockRefPtr& operator=(LockRefPtr<U> const&) = delete;
101 NonnullLockRefPtr(LockRefPtr<T> const&) = delete;
102 NonnullLockRefPtr& operator=(LockRefPtr<T> const&) = delete;
103
104 NonnullLockRefPtr& operator=(NonnullLockRefPtr const& other)
105 {
106 if (this != &other)
107 assign(other.add_ref());
108 return *this;
109 }
110
111 template<typename U>
112 NonnullLockRefPtr& operator=(NonnullLockRefPtr<U> const& other)
113 requires(IsConvertible<U*, T*>)
114 {
115 assign(other.add_ref());
116 return *this;
117 }
118
119 ALWAYS_INLINE NonnullLockRefPtr& operator=(NonnullLockRefPtr&& other)
120 {
121 if (this != &other)
122 assign(&other.leak_ref());
123 return *this;
124 }
125
126 template<typename U>
127 NonnullLockRefPtr& operator=(NonnullLockRefPtr<U>&& other)
128 requires(IsConvertible<U*, T*>)
129 {
130 assign(&other.leak_ref());
131 return *this;
132 }
133
134 NonnullLockRefPtr& operator=(T const& object)
135 {
136 const_cast<T&>(object).ref();
137 assign(const_cast<T*>(&object));
138 return *this;
139 }
140
141 [[nodiscard]] ALWAYS_INLINE T& leak_ref()
142 {
143 T* ptr = exchange(nullptr);
144 VERIFY(ptr);
145 return *ptr;
146 }
147
148 ALWAYS_INLINE RETURNS_NONNULL T* ptr()
149 {
150 return as_nonnull_ptr();
151 }
152 ALWAYS_INLINE RETURNS_NONNULL T const* ptr() const
153 {
154 return as_nonnull_ptr();
155 }
156
157 ALWAYS_INLINE RETURNS_NONNULL T* operator->()
158 {
159 return as_nonnull_ptr();
160 }
161 ALWAYS_INLINE RETURNS_NONNULL T const* operator->() const
162 {
163 return as_nonnull_ptr();
164 }
165
166 ALWAYS_INLINE T& operator*()
167 {
168 return *as_nonnull_ptr();
169 }
170 ALWAYS_INLINE T const& operator*() const
171 {
172 return *as_nonnull_ptr();
173 }
174
175 ALWAYS_INLINE RETURNS_NONNULL operator T*()
176 {
177 return as_nonnull_ptr();
178 }
179 ALWAYS_INLINE RETURNS_NONNULL operator T const*() const
180 {
181 return as_nonnull_ptr();
182 }
183
184 ALWAYS_INLINE operator T&()
185 {
186 return *as_nonnull_ptr();
187 }
188 ALWAYS_INLINE operator T const&() const
189 {
190 return *as_nonnull_ptr();
191 }
192
193 operator bool() const = delete;
194 bool operator!() const = delete;
195
196 void swap(NonnullLockRefPtr& other)
197 {
198 if (this == &other)
199 return;
200
201 // NOTE: swap is not atomic!
202 T* other_ptr = other.exchange(nullptr);
203 T* ptr = exchange(other_ptr);
204 other.exchange(ptr);
205 }
206
207 template<typename U>
208 void swap(NonnullLockRefPtr<U>& other)
209 requires(IsConvertible<U*, T*>)
210 {
211 // NOTE: swap is not atomic!
212 U* other_ptr = other.exchange(nullptr);
213 T* ptr = exchange(other_ptr);
214 other.exchange(ptr);
215 }
216
217private:
218 NonnullLockRefPtr() = delete;
219
220 ALWAYS_INLINE T* as_ptr() const
221 {
222 return (T*)(m_bits.load(AK::MemoryOrder::memory_order_relaxed) & ~(FlatPtr)1);
223 }
224
225 ALWAYS_INLINE RETURNS_NONNULL T* as_nonnull_ptr() const
226 {
227 T* ptr = (T*)(m_bits.load(AK::MemoryOrder::memory_order_relaxed) & ~(FlatPtr)1);
228 VERIFY(ptr);
229 return ptr;
230 }
231
232 template<typename F>
233 void do_while_locked(F f) const
234 {
235#ifdef KERNEL
236 // We don't want to be pre-empted while we have the lock bit set
237 Kernel::ScopedCritical critical;
238#endif
239 FlatPtr bits;
240 for (;;) {
241 bits = m_bits.fetch_or(1, AK::MemoryOrder::memory_order_acq_rel);
242 if (!(bits & 1))
243 break;
244#ifdef KERNEL
245 Kernel::Processor::wait_check();
246#endif
247 }
248 VERIFY(!(bits & 1));
249 f((T*)bits);
250 m_bits.store(bits, AK::MemoryOrder::memory_order_release);
251 }
252
253 ALWAYS_INLINE void assign(T* new_ptr)
254 {
255 T* prev_ptr = exchange(new_ptr);
256 unref_if_not_null(prev_ptr);
257 }
258
259 ALWAYS_INLINE T* exchange(T* new_ptr)
260 {
261 VERIFY(!((FlatPtr)new_ptr & 1));
262#ifdef KERNEL
263 // We don't want to be pre-empted while we have the lock bit set
264 Kernel::ScopedCritical critical;
265#endif
266 // Only exchange while not locked
267 FlatPtr expected = m_bits.load(AK::MemoryOrder::memory_order_relaxed);
268 for (;;) {
269 expected &= ~(FlatPtr)1; // only if lock bit is not set
270 if (m_bits.compare_exchange_strong(expected, (FlatPtr)new_ptr, AK::MemoryOrder::memory_order_acq_rel))
271 break;
272#ifdef KERNEL
273 Kernel::Processor::wait_check();
274#endif
275 }
276 VERIFY(!(expected & 1));
277 return (T*)expected;
278 }
279
280 T* add_ref() const
281 {
282#ifdef KERNEL
283 // We don't want to be pre-empted while we have the lock bit set
284 Kernel::ScopedCritical critical;
285#endif
286 // Lock the pointer
287 FlatPtr expected = m_bits.load(AK::MemoryOrder::memory_order_relaxed);
288 for (;;) {
289 expected &= ~(FlatPtr)1; // only if lock bit is not set
290 if (m_bits.compare_exchange_strong(expected, expected | 1, AK::MemoryOrder::memory_order_acq_rel))
291 break;
292#ifdef KERNEL
293 Kernel::Processor::wait_check();
294#endif
295 }
296
297 // Add a reference now that we locked the pointer
298 ref_if_not_null((T*)expected);
299
300 // Unlock the pointer again
301 m_bits.store(expected, AK::MemoryOrder::memory_order_release);
302 return (T*)expected;
303 }
304
305 mutable Atomic<FlatPtr> m_bits { 0 };
306};
307
308template<typename T>
309inline NonnullLockRefPtr<T> adopt_lock_ref(T& object)
310{
311 return NonnullLockRefPtr<T>(NonnullLockRefPtr<T>::Adopt, object);
312}
313
314template<typename T>
315struct Formatter<NonnullLockRefPtr<T>> : Formatter<T const*> {
316 ErrorOr<void> format(FormatBuilder& builder, NonnullLockRefPtr<T> const& value)
317 {
318 return Formatter<T const*>::format(builder, value.ptr());
319 }
320};
321
322template<typename T, typename U>
323inline void swap(NonnullLockRefPtr<T>& a, NonnullLockRefPtr<U>& b)
324requires(IsConvertible<U*, T*>)
325{
326 a.swap(b);
327}
328
329}
330
331template<typename T>
332struct Traits<NonnullLockRefPtr<T>> : public GenericTraits<NonnullLockRefPtr<T>> {
333 using PeekType = T*;
334 using ConstPeekType = T const*;
335 static unsigned hash(NonnullLockRefPtr<T> const& p) { return ptr_hash(p.ptr()); }
336 static bool equals(NonnullLockRefPtr<T> const& a, NonnullLockRefPtr<T> const& b) { return a.ptr() == b.ptr(); }
337};
338
339using AK::adopt_lock_ref;
340using AK::NonnullLockRefPtr;