/* * Copyright (c) 2018-2020, Andreas Kling * * SPDX-License-Identifier: BSD-2-Clause */ #pragma once #include #include #include #include #include #include #include #include #include #include namespace Kernel { class VMObjectDeletedHandler { public: virtual ~VMObjectDeletedHandler() = default; virtual void vmobject_deleted(VMObject&) = 0; }; class VMObject : public RefCounted , public Weakable { friend class MemoryManager; friend class Region; public: virtual ~VMObject(); virtual RefPtr try_clone() = 0; virtual bool is_anonymous() const { return false; } virtual bool is_inode() const { return false; } virtual bool is_shared_inode() const { return false; } virtual bool is_private_inode() const { return false; } virtual bool is_contiguous() const { return false; } size_t page_count() const { return m_physical_pages.size(); } Span const> physical_pages() const { return m_physical_pages.span(); } Span> physical_pages() { return m_physical_pages.span(); } size_t size() const { return m_physical_pages.size() * PAGE_SIZE; } virtual StringView class_name() const = 0; ALWAYS_INLINE void add_region(Region& region) { ScopedSpinLock locker(m_lock); m_regions.append(region); } ALWAYS_INLINE void remove_region(Region& region) { ScopedSpinLock locker(m_lock); m_regions.remove(region); } void register_on_deleted_handler(VMObjectDeletedHandler& handler) { ScopedSpinLock locker(m_on_deleted_lock); m_on_deleted.set(&handler); } void unregister_on_deleted_handler(VMObjectDeletedHandler& handler) { ScopedSpinLock locker(m_on_deleted_lock); m_on_deleted.remove(&handler); } protected: explicit VMObject(size_t); explicit VMObject(VMObject const&); template void for_each_region(Callback); IntrusiveListNode m_list_node; FixedArray> m_physical_pages; mutable RecursiveSpinLock m_lock; private: VMObject& operator=(VMObject const&) = delete; VMObject& operator=(VMObject&&) = delete; VMObject(VMObject&&) = delete; HashTable m_on_deleted; SpinLock m_on_deleted_lock; Region::ListInVMObject m_regions; public: using List = IntrusiveList, &VMObject::m_list_node>; }; template inline void VMObject::for_each_region(Callback callback) { ScopedSpinLock lock(m_lock); for (auto& region : m_regions) { callback(region); } } inline PhysicalPage const* Region::physical_page(size_t index) const { VERIFY(index < page_count()); return vmobject().physical_pages()[first_page_index() + index]; } inline RefPtr& Region::physical_page_slot(size_t index) { VERIFY(index < page_count()); return vmobject().physical_pages()[first_page_index() + index]; } }