2020-01-18 11:38:21 +03:00
|
|
|
/*
|
|
|
|
* Copyright (c) 2018-2020, Andreas Kling <kling@serenityos.org>
|
|
|
|
* All rights reserved.
|
|
|
|
*
|
|
|
|
* Redistribution and use in source and binary forms, with or without
|
|
|
|
* modification, are permitted provided that the following conditions are met:
|
|
|
|
*
|
|
|
|
* 1. Redistributions of source code must retain the above copyright notice, this
|
|
|
|
* list of conditions and the following disclaimer.
|
|
|
|
*
|
|
|
|
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
|
|
* this list of conditions and the following disclaimer in the documentation
|
|
|
|
* and/or other materials provided with the distribution.
|
|
|
|
*
|
|
|
|
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
|
|
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
|
|
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
|
|
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
|
|
|
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
|
|
|
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
|
|
|
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
|
|
|
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
|
|
|
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
|
|
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
|
|
*/
|
|
|
|
|
2020-01-19 15:18:27 +03:00
|
|
|
#include <AK/BinarySearch.h>
|
2019-06-07 12:43:58 +03:00
|
|
|
#include <AK/QuickSort.h>
|
2020-01-18 01:05:37 +03:00
|
|
|
#include <Kernel/Random.h>
|
2020-01-19 15:18:27 +03:00
|
|
|
#include <Kernel/Thread.h>
|
2020-02-09 17:47:15 +03:00
|
|
|
#include <Kernel/VM/RangeAllocator.h>
|
2019-05-17 04:40:15 +03:00
|
|
|
|
2019-05-17 05:02:29 +03:00
|
|
|
//#define VRA_DEBUG
|
2019-09-30 18:21:45 +03:00
|
|
|
#define VM_GUARD_PAGES
|
2019-05-17 05:02:29 +03:00
|
|
|
|
2020-02-16 03:27:42 +03:00
|
|
|
namespace Kernel {
|
|
|
|
|
2020-01-18 01:05:37 +03:00
|
|
|
RangeAllocator::RangeAllocator()
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
|
|
|
void RangeAllocator::initialize_with_range(VirtualAddress base, size_t size)
|
2019-05-17 04:40:15 +03:00
|
|
|
{
|
2020-01-19 15:18:27 +03:00
|
|
|
m_total_range = { base, size };
|
2019-05-17 04:40:15 +03:00
|
|
|
m_available_ranges.append({ base, size });
|
2019-05-18 04:59:16 +03:00
|
|
|
#ifdef VRA_DEBUG
|
2019-05-17 05:02:29 +03:00
|
|
|
dump();
|
2019-05-18 04:59:16 +03:00
|
|
|
#endif
|
2019-05-17 04:40:15 +03:00
|
|
|
}
|
|
|
|
|
2020-01-18 01:05:37 +03:00
|
|
|
void RangeAllocator::initialize_from_parent(const RangeAllocator& parent_allocator)
|
2019-05-22 14:24:28 +03:00
|
|
|
{
|
2020-01-19 15:18:27 +03:00
|
|
|
m_total_range = parent_allocator.m_total_range;
|
2020-01-18 01:05:37 +03:00
|
|
|
m_available_ranges = parent_allocator.m_available_ranges;
|
2019-05-22 14:24:28 +03:00
|
|
|
}
|
|
|
|
|
2019-05-17 04:40:15 +03:00
|
|
|
RangeAllocator::~RangeAllocator()
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
|
|
|
void RangeAllocator::dump() const
|
|
|
|
{
|
|
|
|
dbgprintf("RangeAllocator{%p}\n", this);
|
|
|
|
for (auto& range : m_available_ranges) {
|
|
|
|
dbgprintf(" %x -> %x\n", range.base().get(), range.end().get() - 1);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Vector<Range, 2> Range::carve(const Range& taken)
|
|
|
|
{
|
|
|
|
Vector<Range, 2> parts;
|
|
|
|
if (taken == *this)
|
2019-06-07 12:43:58 +03:00
|
|
|
return {};
|
2019-05-17 04:40:15 +03:00
|
|
|
if (taken.base() > base())
|
|
|
|
parts.append({ base(), taken.base().get() - base().get() });
|
|
|
|
if (taken.end() < end())
|
|
|
|
parts.append({ taken.end(), end().get() - taken.end().get() });
|
|
|
|
#ifdef VRA_DEBUG
|
2019-05-17 05:02:29 +03:00
|
|
|
dbgprintf("VRA: carve: take %x-%x from %x-%x\n",
|
|
|
|
taken.base().get(), taken.end().get() - 1,
|
|
|
|
base().get(), end().get() - 1);
|
2019-05-17 04:40:15 +03:00
|
|
|
for (int i = 0; i < parts.size(); ++i)
|
|
|
|
dbgprintf(" %x-%x\n", parts[i].base().get(), parts[i].end().get() - 1);
|
|
|
|
#endif
|
|
|
|
return parts;
|
|
|
|
}
|
|
|
|
|
|
|
|
void RangeAllocator::carve_at_index(int index, const Range& range)
|
|
|
|
{
|
|
|
|
auto remaining_parts = m_available_ranges[index].carve(range);
|
|
|
|
ASSERT(remaining_parts.size() >= 1);
|
|
|
|
m_available_ranges[index] = remaining_parts[0];
|
|
|
|
if (remaining_parts.size() == 2)
|
|
|
|
m_available_ranges.insert(index + 1, move(remaining_parts[1]));
|
|
|
|
}
|
|
|
|
|
2020-02-16 14:55:56 +03:00
|
|
|
Range RangeAllocator::allocate_anywhere(size_t size, size_t alignment)
|
2019-05-17 04:40:15 +03:00
|
|
|
{
|
2020-02-20 00:19:55 +03:00
|
|
|
if (!size)
|
|
|
|
return {};
|
|
|
|
|
2019-09-30 18:21:45 +03:00
|
|
|
#ifdef VM_GUARD_PAGES
|
2019-09-22 16:12:29 +03:00
|
|
|
// NOTE: We pad VM allocations with a guard page on each side.
|
2019-09-30 18:21:45 +03:00
|
|
|
size_t effective_size = size + PAGE_SIZE * 2;
|
|
|
|
size_t offset_from_effective_base = PAGE_SIZE;
|
|
|
|
#else
|
|
|
|
size_t effective_size = size;
|
|
|
|
size_t offset_from_effective_base = 0;
|
|
|
|
#endif
|
2020-02-16 14:55:56 +03:00
|
|
|
|
2019-05-17 04:40:15 +03:00
|
|
|
for (int i = 0; i < m_available_ranges.size(); ++i) {
|
|
|
|
auto& available_range = m_available_ranges[i];
|
2020-02-16 14:55:56 +03:00
|
|
|
// FIXME: This check is probably excluding some valid candidates when using a large alignment.
|
|
|
|
if (available_range.size() < (effective_size + alignment))
|
2019-05-17 04:40:15 +03:00
|
|
|
continue;
|
2020-02-16 14:55:56 +03:00
|
|
|
|
|
|
|
uintptr_t initial_base = available_range.base().offset(offset_from_effective_base).get();
|
|
|
|
uintptr_t aligned_base = round_up_to_power_of_two(initial_base, alignment);
|
|
|
|
|
|
|
|
Range allocated_range(VirtualAddress(aligned_base), size);
|
|
|
|
if (available_range == allocated_range) {
|
2019-05-17 04:40:15 +03:00
|
|
|
#ifdef VRA_DEBUG
|
2020-02-16 14:55:56 +03:00
|
|
|
dbgprintf("VRA: Allocated perfect-fit anywhere(%zu, %zu): %x\n", size, alignment, allocated_range.base().get());
|
2019-05-17 04:40:15 +03:00
|
|
|
#endif
|
|
|
|
m_available_ranges.remove(i);
|
|
|
|
return allocated_range;
|
|
|
|
}
|
|
|
|
carve_at_index(i, allocated_range);
|
|
|
|
#ifdef VRA_DEBUG
|
2020-02-16 14:55:56 +03:00
|
|
|
dbgprintf("VRA: Allocated anywhere(%zu, %zu): %x\n", size, alignment, allocated_range.base().get());
|
2019-05-17 04:40:15 +03:00
|
|
|
dump();
|
|
|
|
#endif
|
|
|
|
return allocated_range;
|
|
|
|
}
|
2020-02-16 14:55:56 +03:00
|
|
|
kprintf("VRA: Failed to allocate anywhere: %zu, %zu\n", size, alignment);
|
2019-06-07 12:43:58 +03:00
|
|
|
return {};
|
2019-05-17 04:40:15 +03:00
|
|
|
}
|
|
|
|
|
2019-06-07 13:56:50 +03:00
|
|
|
Range RangeAllocator::allocate_specific(VirtualAddress base, size_t size)
|
2019-05-17 04:40:15 +03:00
|
|
|
{
|
2020-02-20 00:19:55 +03:00
|
|
|
if (!size)
|
|
|
|
return {};
|
|
|
|
|
2019-05-17 04:40:15 +03:00
|
|
|
Range allocated_range(base, size);
|
|
|
|
for (int i = 0; i < m_available_ranges.size(); ++i) {
|
|
|
|
auto& available_range = m_available_ranges[i];
|
|
|
|
if (!available_range.contains(base, size))
|
|
|
|
continue;
|
|
|
|
if (available_range == allocated_range) {
|
|
|
|
m_available_ranges.remove(i);
|
|
|
|
return allocated_range;
|
|
|
|
}
|
|
|
|
carve_at_index(i, allocated_range);
|
|
|
|
#ifdef VRA_DEBUG
|
|
|
|
dbgprintf("VRA: Allocated specific(%u): %x\n", size, available_range.base().get());
|
|
|
|
dump();
|
|
|
|
#endif
|
|
|
|
return allocated_range;
|
|
|
|
}
|
|
|
|
kprintf("VRA: Failed to allocate specific range: %x(%u)\n", base.get(), size);
|
2019-06-07 12:43:58 +03:00
|
|
|
return {};
|
2019-05-17 04:40:15 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
void RangeAllocator::deallocate(Range range)
|
|
|
|
{
|
2020-01-19 15:18:27 +03:00
|
|
|
ASSERT(m_total_range.contains(range));
|
2020-01-30 23:50:23 +03:00
|
|
|
ASSERT(range.size());
|
|
|
|
ASSERT(range.base() < range.end());
|
2020-01-19 15:18:27 +03:00
|
|
|
|
2019-05-17 04:40:15 +03:00
|
|
|
#ifdef VRA_DEBUG
|
|
|
|
dbgprintf("VRA: Deallocate: %x(%u)\n", range.base().get(), range.size());
|
|
|
|
dump();
|
|
|
|
#endif
|
|
|
|
|
2020-01-19 15:18:27 +03:00
|
|
|
ASSERT(!m_available_ranges.is_empty());
|
|
|
|
|
|
|
|
int nearby_index = 0;
|
2020-02-09 17:47:15 +03:00
|
|
|
auto* existing_range = binary_search(
|
|
|
|
m_available_ranges.data(), m_available_ranges.size(), range, [](auto& a, auto& b) {
|
|
|
|
return a.base().get() - b.end().get();
|
|
|
|
},
|
|
|
|
&nearby_index);
|
2020-01-19 15:18:27 +03:00
|
|
|
|
|
|
|
int inserted_index = 0;
|
|
|
|
if (existing_range) {
|
|
|
|
existing_range->m_size += range.size();
|
|
|
|
inserted_index = nearby_index;
|
|
|
|
} else {
|
2020-02-09 17:47:15 +03:00
|
|
|
m_available_ranges.insert_before_matching(
|
|
|
|
Range(range), [&](auto& entry) {
|
|
|
|
return entry.base() >= range.end();
|
|
|
|
},
|
|
|
|
nearby_index, &inserted_index);
|
2019-05-17 04:40:15 +03:00
|
|
|
}
|
|
|
|
|
2020-01-19 15:18:27 +03:00
|
|
|
if (inserted_index < (m_available_ranges.size() - 1)) {
|
|
|
|
// We already merged with previous. Try to merge with next.
|
|
|
|
auto& inserted_range = m_available_ranges[inserted_index];
|
|
|
|
auto& next_range = m_available_ranges[inserted_index + 1];
|
|
|
|
if (inserted_range.end() == next_range.base()) {
|
|
|
|
inserted_range.m_size += next_range.size();
|
|
|
|
m_available_ranges.remove(inserted_index + 1);
|
|
|
|
return;
|
2019-05-17 04:40:15 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
#ifdef VRA_DEBUG
|
|
|
|
dbgprintf("VRA: After deallocate\n");
|
|
|
|
dump();
|
|
|
|
#endif
|
|
|
|
}
|
2020-02-16 03:27:42 +03:00
|
|
|
|
|
|
|
}
|