mirror of
https://github.com/LadybirdBrowser/ladybird.git
synced 2024-11-11 01:06:01 +03:00
5f3773b715
This is required to make SSE instructions work when building with Clang. Apparently Clang uses SSE instructions where GCC didn't so we didn't previously run into this problem.
80 lines
2.2 KiB
C
80 lines
2.2 KiB
C
/*
|
|
* Copyright (c) 2018-2020, Andreas Kling <kling@serenityos.org>
|
|
*
|
|
* SPDX-License-Identifier: BSD-2-Clause
|
|
*/
|
|
|
|
#pragma once
|
|
|
|
#include <AK/IntrusiveList.h>
|
|
#include <AK/Types.h>
|
|
|
|
#define MAGIC_PAGE_HEADER 0x42657274 // 'Bert'
|
|
#define MAGIC_BIGALLOC_HEADER 0x42697267 // 'Birg'
|
|
#define MALLOC_SCRUB_BYTE 0xdc
|
|
#define FREE_SCRUB_BYTE 0xed
|
|
|
|
#define PAGE_ROUND_UP(x) ((((size_t)(x)) + PAGE_SIZE - 1) & (~(PAGE_SIZE - 1)))
|
|
|
|
static constexpr unsigned short size_classes[] = { 16, 32, 64, 128, 256, 496, 1008, 2032, 4080, 8176, 16368, 32752, 0 };
|
|
static constexpr size_t num_size_classes = (sizeof(size_classes) / sizeof(unsigned short)) - 1;
|
|
|
|
consteval bool check_size_classes_alignment()
|
|
{
|
|
for (size_t i = 0; i < num_size_classes; i++) {
|
|
if ((size_classes[i] % 16) != 0)
|
|
return false;
|
|
}
|
|
return true;
|
|
}
|
|
static_assert(check_size_classes_alignment());
|
|
|
|
struct CommonHeader {
|
|
size_t m_magic;
|
|
size_t m_size;
|
|
};
|
|
|
|
struct BigAllocationBlock : public CommonHeader {
|
|
BigAllocationBlock(size_t size)
|
|
{
|
|
m_magic = MAGIC_BIGALLOC_HEADER;
|
|
m_size = size;
|
|
}
|
|
unsigned char* m_slot[0];
|
|
};
|
|
|
|
struct FreelistEntry {
|
|
FreelistEntry* next;
|
|
};
|
|
|
|
struct ChunkedBlock : public CommonHeader {
|
|
|
|
static constexpr size_t block_size = 64 * KiB;
|
|
static constexpr size_t block_mask = ~(block_size - 1);
|
|
|
|
ChunkedBlock(size_t bytes_per_chunk)
|
|
{
|
|
m_magic = MAGIC_PAGE_HEADER;
|
|
m_size = bytes_per_chunk;
|
|
m_free_chunks = chunk_capacity();
|
|
}
|
|
|
|
IntrusiveListNode<ChunkedBlock> m_list_node;
|
|
size_t m_next_lazy_freelist_index { 0 };
|
|
FreelistEntry* m_freelist { nullptr };
|
|
size_t m_free_chunks { 0 };
|
|
[[gnu::aligned(16)]] unsigned char m_slot[0];
|
|
|
|
void* chunk(size_t index)
|
|
{
|
|
return &m_slot[index * m_size];
|
|
}
|
|
bool is_full() const { return m_free_chunks == 0; }
|
|
size_t bytes_per_chunk() const { return m_size; }
|
|
size_t free_chunks() const { return m_free_chunks; }
|
|
size_t used_chunks() const { return chunk_capacity() - m_free_chunks; }
|
|
size_t chunk_capacity() const { return (block_size - sizeof(ChunkedBlock)) / m_size; }
|
|
|
|
using List = IntrusiveList<ChunkedBlock, RawPtr<ChunkedBlock>, &ChunkedBlock::m_list_node>;
|
|
};
|