LibJS: Make Heap aware of all CellAllocators

Also add a link from HeapBlock to their owning CellAllocator.
This fixes an issue where the Heap would skip over non-size-based
cell allocators.
This commit is contained in:
Andreas Kling 2023-12-23 15:13:51 +01:00
parent a31b988473
commit 11c968fa1f
Notes: sideshowbarker 2024-07-17 07:35:03 +09:00
6 changed files with 40 additions and 22 deletions

View File

@ -19,8 +19,11 @@ CellAllocator::CellAllocator(size_t cell_size)
Cell* CellAllocator::allocate_cell(Heap& heap)
{
if (!m_list_node.is_in_list())
heap.register_cell_allocator({}, *this);
if (m_usable_blocks.is_empty()) {
auto block = HeapBlock::create_with_cell_size(heap, m_cell_size);
auto block = HeapBlock::create_with_cell_size(heap, *this, m_cell_size);
m_usable_blocks.append(*block.leak_ptr());
}

View File

@ -45,6 +45,9 @@ public:
void block_did_become_empty(Badge<Heap>, HeapBlock&);
void block_did_become_usable(Badge<Heap>, HeapBlock&);
IntrusiveListNode<CellAllocator> m_list_node;
using List = IntrusiveList<&CellAllocator::m_list_node>;
private:
size_t const m_cell_size;

View File

@ -50,17 +50,17 @@ Heap::Heap(VM& vm)
#endif
if constexpr (HeapBlock::min_possible_cell_size <= 16) {
m_allocators.append(make<CellAllocator>(16));
m_size_based_cell_allocators.append(make<CellAllocator>(16));
}
static_assert(HeapBlock::min_possible_cell_size <= 24, "Heap Cell tracking uses too much data!");
m_allocators.append(make<CellAllocator>(32));
m_allocators.append(make<CellAllocator>(64));
m_allocators.append(make<CellAllocator>(96));
m_allocators.append(make<CellAllocator>(128));
m_allocators.append(make<CellAllocator>(256));
m_allocators.append(make<CellAllocator>(512));
m_allocators.append(make<CellAllocator>(1024));
m_allocators.append(make<CellAllocator>(3072));
m_size_based_cell_allocators.append(make<CellAllocator>(32));
m_size_based_cell_allocators.append(make<CellAllocator>(64));
m_size_based_cell_allocators.append(make<CellAllocator>(96));
m_size_based_cell_allocators.append(make<CellAllocator>(128));
m_size_based_cell_allocators.append(make<CellAllocator>(256));
m_size_based_cell_allocators.append(make<CellAllocator>(512));
m_size_based_cell_allocators.append(make<CellAllocator>(1024));
m_size_based_cell_allocators.append(make<CellAllocator>(3072));
}
Heap::~Heap()
@ -517,12 +517,12 @@ void Heap::sweep_dead_cells(bool print_report, Core::ElapsedTimer const& measure
for (auto* block : empty_blocks) {
dbgln_if(HEAP_DEBUG, " - HeapBlock empty @ {}: cell_size={}", block, block->cell_size());
allocator_for_size(block->cell_size()).block_did_become_empty({}, *block);
block->cell_allocator().block_did_become_empty({}, *block);
}
for (auto* block : full_blocks_that_became_usable) {
dbgln_if(HEAP_DEBUG, " - HeapBlock usable again @ {}: cell_size={}", block, block->cell_size());
allocator_for_size(block->cell_size()).block_did_become_usable({}, *block);
block->cell_allocator().block_did_become_usable({}, *block);
}
if constexpr (HEAP_DEBUG) {

View File

@ -81,6 +81,8 @@ public:
void did_create_execution_context(Badge<ExecutionContext>, ExecutionContext&);
void did_destroy_execution_context(Badge<ExecutionContext>, ExecutionContext&);
void register_cell_allocator(Badge<CellAllocator>, CellAllocator&);
BlockAllocator& block_allocator() { return m_block_allocator; }
void uproot_cell(Cell* cell);
@ -120,19 +122,19 @@ private:
ALWAYS_INLINE CellAllocator& allocator_for_size(size_t cell_size)
{
// FIXME: Use binary search?
for (auto& allocator : m_allocators) {
for (auto& allocator : m_size_based_cell_allocators) {
if (allocator->cell_size() >= cell_size)
return *allocator;
}
dbgln("Cannot get CellAllocator for cell size {}, largest available is {}!", cell_size, m_allocators.last()->cell_size());
dbgln("Cannot get CellAllocator for cell size {}, largest available is {}!", cell_size, m_size_based_cell_allocators.last()->cell_size());
VERIFY_NOT_REACHED();
}
template<typename Callback>
void for_each_block(Callback callback)
{
for (auto& allocator : m_allocators) {
if (allocator->for_each_block(callback) == IterationDecision::Break)
for (auto& allocator : m_all_cell_allocators) {
if (allocator.for_each_block(callback) == IterationDecision::Break)
return;
}
}
@ -143,7 +145,8 @@ private:
bool m_should_collect_on_every_allocation { false };
Vector<NonnullOwnPtr<CellAllocator>> m_allocators;
Vector<NonnullOwnPtr<CellAllocator>> m_size_based_cell_allocators;
CellAllocator::List m_all_cell_allocators;
HandleImpl::List m_handles;
MarkedVectorBase::List m_marked_vectors;
@ -195,4 +198,9 @@ inline void Heap::did_destroy_weak_container(Badge<WeakContainer>, WeakContainer
m_weak_containers.remove(set);
}
inline void Heap::register_cell_allocator(Badge<CellAllocator>, CellAllocator& allocator)
{
m_all_cell_allocators.append(allocator);
}
}

View File

@ -18,7 +18,7 @@
namespace JS {
NonnullOwnPtr<HeapBlock> HeapBlock::create_with_cell_size(Heap& heap, size_t cell_size)
NonnullOwnPtr<HeapBlock> HeapBlock::create_with_cell_size(Heap& heap, CellAllocator& cell_allocator, size_t cell_size)
{
#ifdef AK_OS_SERENITY
char name[64];
@ -27,12 +27,13 @@ NonnullOwnPtr<HeapBlock> HeapBlock::create_with_cell_size(Heap& heap, size_t cel
char const* name = nullptr;
#endif
auto* block = static_cast<HeapBlock*>(heap.block_allocator().allocate_block(name));
new (block) HeapBlock(heap, cell_size);
new (block) HeapBlock(heap, cell_allocator, cell_size);
return NonnullOwnPtr<HeapBlock>(NonnullOwnPtr<HeapBlock>::Adopt, *block);
}
HeapBlock::HeapBlock(Heap& heap, size_t cell_size)
HeapBlock::HeapBlock(Heap& heap, CellAllocator& cell_allocator, size_t cell_size)
: HeapBlockBase(heap)
, m_cell_allocator(cell_allocator)
, m_cell_size(cell_size)
{
VERIFY(cell_size >= sizeof(FreelistEntry));

View File

@ -26,7 +26,7 @@ class HeapBlock : public HeapBlockBase {
public:
using HeapBlockBase::block_size;
static NonnullOwnPtr<HeapBlock> create_with_cell_size(Heap&, size_t);
static NonnullOwnPtr<HeapBlock> create_with_cell_size(Heap&, CellAllocator&, size_t);
size_t cell_size() const { return m_cell_size; }
size_t cell_count() const { return (block_size - sizeof(HeapBlock)) / m_cell_size; }
@ -90,8 +90,10 @@ public:
IntrusiveListNode<HeapBlock> m_list_node;
CellAllocator& cell_allocator() { return m_cell_allocator; }
private:
HeapBlock(Heap&, size_t cell_size);
HeapBlock(Heap&, CellAllocator&, size_t cell_size);
bool has_lazy_freelist() const { return m_next_lazy_freelist_index < cell_count(); }
@ -106,6 +108,7 @@ private:
return reinterpret_cast<Cell*>(&m_storage[index * cell_size()]);
}
CellAllocator& m_cell_allocator;
size_t m_cell_size { 0 };
size_t m_next_lazy_freelist_index { 0 };
GCPtr<FreelistEntry> m_freelist;