Serenity Operating System
1/*
2 * Copyright (c) 2018-2021, Andreas Kling <kling@serenityos.org>
3 *
4 * SPDX-License-Identifier: BSD-2-Clause
5 */
6
7#pragma once
8
9#include <AK/FixedArray.h>
10#include <AK/IntrusiveList.h>
11#include <AK/RefPtr.h>
12#include <Kernel/Forward.h>
13#include <Kernel/Library/ListedRefCounted.h>
14#include <Kernel/Library/LockWeakable.h>
15#include <Kernel/Locking/Mutex.h>
16#include <Kernel/Memory/Region.h>
17
18namespace Kernel::Memory {
19
20class VMObject
21 : public ListedRefCounted<VMObject, LockType::Spinlock>
22 , public LockWeakable<VMObject> {
23 friend class MemoryManager;
24 friend class Region;
25
26public:
27 virtual ~VMObject();
28
29 virtual ErrorOr<NonnullLockRefPtr<VMObject>> try_clone() = 0;
30
31 virtual bool is_anonymous() const { return false; }
32 virtual bool is_inode() const { return false; }
33 virtual bool is_shared_inode() const { return false; }
34 virtual bool is_private_inode() const { return false; }
35
36 size_t page_count() const { return m_physical_pages.size(); }
37
38 virtual ReadonlySpan<RefPtr<PhysicalPage>> physical_pages() const { return m_physical_pages.span(); }
39 virtual Span<RefPtr<PhysicalPage>> physical_pages() { return m_physical_pages.span(); }
40
41 size_t size() const { return m_physical_pages.size() * PAGE_SIZE; }
42
43 virtual StringView class_name() const = 0;
44
45 ALWAYS_INLINE void add_region(Region& region)
46 {
47 SpinlockLocker locker(m_lock);
48 m_regions.append(region);
49 }
50
51 ALWAYS_INLINE void remove_region(Region& region)
52 {
53 SpinlockLocker locker(m_lock);
54 m_regions.remove(region);
55 }
56
57protected:
58 static ErrorOr<FixedArray<RefPtr<PhysicalPage>>> try_create_physical_pages(size_t);
59 ErrorOr<FixedArray<RefPtr<PhysicalPage>>> try_clone_physical_pages() const;
60 explicit VMObject(FixedArray<RefPtr<PhysicalPage>>&&);
61
62 template<typename Callback>
63 void for_each_region(Callback);
64
65 IntrusiveListNode<VMObject> m_list_node;
66 FixedArray<RefPtr<PhysicalPage>> m_physical_pages;
67
68 mutable RecursiveSpinlock<LockRank::None> m_lock {};
69
70private:
71 VMObject& operator=(VMObject const&) = delete;
72 VMObject& operator=(VMObject&&) = delete;
73 VMObject(VMObject&&) = delete;
74
75 Region::ListInVMObject m_regions;
76
77public:
78 using AllInstancesList = IntrusiveList<&VMObject::m_list_node>;
79 static SpinlockProtected<VMObject::AllInstancesList, LockRank::None>& all_instances();
80};
81
82template<typename Callback>
83inline void VMObject::for_each_region(Callback callback)
84{
85 SpinlockLocker lock(m_lock);
86 for (auto& region : m_regions) {
87 callback(region);
88 }
89}
90
91}