From 11c968fa1f1b9beaa76726132293f0d088f00932 Mon Sep 17 00:00:00 2001 From: Andreas Kling Date: Sat, 23 Dec 2023 15:13:51 +0100 Subject: [PATCH] LibJS: Make Heap aware of all CellAllocators Also add a link from HeapBlock to their owning CellAllocator. This fixes an issue where the Heap would skip over non-size-based cell allocators. --- .../Libraries/LibJS/Heap/CellAllocator.cpp | 5 ++++- Userland/Libraries/LibJS/Heap/CellAllocator.h | 3 +++ Userland/Libraries/LibJS/Heap/Heap.cpp | 22 +++++++++---------- Userland/Libraries/LibJS/Heap/Heap.h | 18 ++++++++++----- Userland/Libraries/LibJS/Heap/HeapBlock.cpp | 7 +++--- Userland/Libraries/LibJS/Heap/HeapBlock.h | 7 ++++-- 6 files changed, 40 insertions(+), 22 deletions(-) diff --git a/Userland/Libraries/LibJS/Heap/CellAllocator.cpp b/Userland/Libraries/LibJS/Heap/CellAllocator.cpp index e7b86ec0403..b4cf5fa41a8 100644 --- a/Userland/Libraries/LibJS/Heap/CellAllocator.cpp +++ b/Userland/Libraries/LibJS/Heap/CellAllocator.cpp @@ -19,8 +19,11 @@ CellAllocator::CellAllocator(size_t cell_size) Cell* CellAllocator::allocate_cell(Heap& heap) { + if (!m_list_node.is_in_list()) + heap.register_cell_allocator({}, *this); + if (m_usable_blocks.is_empty()) { - auto block = HeapBlock::create_with_cell_size(heap, m_cell_size); + auto block = HeapBlock::create_with_cell_size(heap, *this, m_cell_size); m_usable_blocks.append(*block.leak_ptr()); } diff --git a/Userland/Libraries/LibJS/Heap/CellAllocator.h b/Userland/Libraries/LibJS/Heap/CellAllocator.h index 2474acf587e..33ad2f2c511 100644 --- a/Userland/Libraries/LibJS/Heap/CellAllocator.h +++ b/Userland/Libraries/LibJS/Heap/CellAllocator.h @@ -45,6 +45,9 @@ public: void block_did_become_empty(Badge, HeapBlock&); void block_did_become_usable(Badge, HeapBlock&); + IntrusiveListNode m_list_node; + using List = IntrusiveList<&CellAllocator::m_list_node>; + private: size_t const m_cell_size; diff --git a/Userland/Libraries/LibJS/Heap/Heap.cpp b/Userland/Libraries/LibJS/Heap/Heap.cpp index 5fabb1c0b93..43700326747 100644 --- a/Userland/Libraries/LibJS/Heap/Heap.cpp +++ b/Userland/Libraries/LibJS/Heap/Heap.cpp @@ -50,17 +50,17 @@ Heap::Heap(VM& vm) #endif if constexpr (HeapBlock::min_possible_cell_size <= 16) { - m_allocators.append(make(16)); + m_size_based_cell_allocators.append(make(16)); } static_assert(HeapBlock::min_possible_cell_size <= 24, "Heap Cell tracking uses too much data!"); - m_allocators.append(make(32)); - m_allocators.append(make(64)); - m_allocators.append(make(96)); - m_allocators.append(make(128)); - m_allocators.append(make(256)); - m_allocators.append(make(512)); - m_allocators.append(make(1024)); - m_allocators.append(make(3072)); + m_size_based_cell_allocators.append(make(32)); + m_size_based_cell_allocators.append(make(64)); + m_size_based_cell_allocators.append(make(96)); + m_size_based_cell_allocators.append(make(128)); + m_size_based_cell_allocators.append(make(256)); + m_size_based_cell_allocators.append(make(512)); + m_size_based_cell_allocators.append(make(1024)); + m_size_based_cell_allocators.append(make(3072)); } Heap::~Heap() @@ -517,12 +517,12 @@ void Heap::sweep_dead_cells(bool print_report, Core::ElapsedTimer const& measure for (auto* block : empty_blocks) { dbgln_if(HEAP_DEBUG, " - HeapBlock empty @ {}: cell_size={}", block, block->cell_size()); - allocator_for_size(block->cell_size()).block_did_become_empty({}, *block); + block->cell_allocator().block_did_become_empty({}, *block); } for (auto* block : full_blocks_that_became_usable) { dbgln_if(HEAP_DEBUG, " - HeapBlock usable again @ {}: cell_size={}", block, block->cell_size()); - allocator_for_size(block->cell_size()).block_did_become_usable({}, *block); + block->cell_allocator().block_did_become_usable({}, *block); } if constexpr (HEAP_DEBUG) { diff --git a/Userland/Libraries/LibJS/Heap/Heap.h b/Userland/Libraries/LibJS/Heap/Heap.h index 79a0a67ed83..57e056ba256 100644 --- a/Userland/Libraries/LibJS/Heap/Heap.h +++ b/Userland/Libraries/LibJS/Heap/Heap.h @@ -81,6 +81,8 @@ public: void did_create_execution_context(Badge, ExecutionContext&); void did_destroy_execution_context(Badge, ExecutionContext&); + void register_cell_allocator(Badge, CellAllocator&); + BlockAllocator& block_allocator() { return m_block_allocator; } void uproot_cell(Cell* cell); @@ -120,19 +122,19 @@ private: ALWAYS_INLINE CellAllocator& allocator_for_size(size_t cell_size) { // FIXME: Use binary search? - for (auto& allocator : m_allocators) { + for (auto& allocator : m_size_based_cell_allocators) { if (allocator->cell_size() >= cell_size) return *allocator; } - dbgln("Cannot get CellAllocator for cell size {}, largest available is {}!", cell_size, m_allocators.last()->cell_size()); + dbgln("Cannot get CellAllocator for cell size {}, largest available is {}!", cell_size, m_size_based_cell_allocators.last()->cell_size()); VERIFY_NOT_REACHED(); } template void for_each_block(Callback callback) { - for (auto& allocator : m_allocators) { - if (allocator->for_each_block(callback) == IterationDecision::Break) + for (auto& allocator : m_all_cell_allocators) { + if (allocator.for_each_block(callback) == IterationDecision::Break) return; } } @@ -143,7 +145,8 @@ private: bool m_should_collect_on_every_allocation { false }; - Vector> m_allocators; + Vector> m_size_based_cell_allocators; + CellAllocator::List m_all_cell_allocators; HandleImpl::List m_handles; MarkedVectorBase::List m_marked_vectors; @@ -195,4 +198,9 @@ inline void Heap::did_destroy_weak_container(Badge, WeakContainer m_weak_containers.remove(set); } +inline void Heap::register_cell_allocator(Badge, CellAllocator& allocator) +{ + m_all_cell_allocators.append(allocator); +} + } diff --git a/Userland/Libraries/LibJS/Heap/HeapBlock.cpp b/Userland/Libraries/LibJS/Heap/HeapBlock.cpp index 59a2292f074..bedeb4747c4 100644 --- a/Userland/Libraries/LibJS/Heap/HeapBlock.cpp +++ b/Userland/Libraries/LibJS/Heap/HeapBlock.cpp @@ -18,7 +18,7 @@ namespace JS { -NonnullOwnPtr HeapBlock::create_with_cell_size(Heap& heap, size_t cell_size) +NonnullOwnPtr HeapBlock::create_with_cell_size(Heap& heap, CellAllocator& cell_allocator, size_t cell_size) { #ifdef AK_OS_SERENITY char name[64]; @@ -27,12 +27,13 @@ NonnullOwnPtr HeapBlock::create_with_cell_size(Heap& heap, size_t cel char const* name = nullptr; #endif auto* block = static_cast(heap.block_allocator().allocate_block(name)); - new (block) HeapBlock(heap, cell_size); + new (block) HeapBlock(heap, cell_allocator, cell_size); return NonnullOwnPtr(NonnullOwnPtr::Adopt, *block); } -HeapBlock::HeapBlock(Heap& heap, size_t cell_size) +HeapBlock::HeapBlock(Heap& heap, CellAllocator& cell_allocator, size_t cell_size) : HeapBlockBase(heap) + , m_cell_allocator(cell_allocator) , m_cell_size(cell_size) { VERIFY(cell_size >= sizeof(FreelistEntry)); diff --git a/Userland/Libraries/LibJS/Heap/HeapBlock.h b/Userland/Libraries/LibJS/Heap/HeapBlock.h index f852166f84f..b8e26338107 100644 --- a/Userland/Libraries/LibJS/Heap/HeapBlock.h +++ b/Userland/Libraries/LibJS/Heap/HeapBlock.h @@ -26,7 +26,7 @@ class HeapBlock : public HeapBlockBase { public: using HeapBlockBase::block_size; - static NonnullOwnPtr create_with_cell_size(Heap&, size_t); + static NonnullOwnPtr create_with_cell_size(Heap&, CellAllocator&, size_t); size_t cell_size() const { return m_cell_size; } size_t cell_count() const { return (block_size - sizeof(HeapBlock)) / m_cell_size; } @@ -90,8 +90,10 @@ public: IntrusiveListNode m_list_node; + CellAllocator& cell_allocator() { return m_cell_allocator; } + private: - HeapBlock(Heap&, size_t cell_size); + HeapBlock(Heap&, CellAllocator&, size_t cell_size); bool has_lazy_freelist() const { return m_next_lazy_freelist_index < cell_count(); } @@ -106,6 +108,7 @@ private: return reinterpret_cast(&m_storage[index * cell_size()]); } + CellAllocator& m_cell_allocator; size_t m_cell_size { 0 }; size_t m_next_lazy_freelist_index { 0 }; GCPtr m_freelist;