2020-01-18 11:38:21 +03:00
|
|
|
/*
|
|
|
|
* Copyright (c) 2018-2020, Andreas Kling <kling@serenityos.org>
|
|
|
|
* All rights reserved.
|
|
|
|
*
|
|
|
|
* Redistribution and use in source and binary forms, with or without
|
|
|
|
* modification, are permitted provided that the following conditions are met:
|
|
|
|
*
|
|
|
|
* 1. Redistributions of source code must retain the above copyright notice, this
|
|
|
|
* list of conditions and the following disclaimer.
|
|
|
|
*
|
|
|
|
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
|
|
* this list of conditions and the following disclaimer in the documentation
|
|
|
|
* and/or other materials provided with the distribution.
|
|
|
|
*
|
|
|
|
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
|
|
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
|
|
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
|
|
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
|
|
|
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
|
|
|
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
|
|
|
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
|
|
|
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
|
|
|
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
|
|
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
|
|
*/
|
|
|
|
|
2018-10-10 12:53:07 +03:00
|
|
|
#pragma once
|
|
|
|
|
2019-07-04 08:05:58 +03:00
|
|
|
#include <AK/NonnullRefPtr.h>
|
2021-01-11 02:29:28 +03:00
|
|
|
#include <AK/OwnPtr.h>
|
2019-06-21 19:58:45 +03:00
|
|
|
#include <AK/RefCounted.h>
|
2019-07-04 08:05:58 +03:00
|
|
|
#include <AK/RefPtr.h>
|
2020-07-27 15:15:37 +03:00
|
|
|
#include <AK/Span.h>
|
2019-09-11 20:35:14 +03:00
|
|
|
#include <AK/StdLibExtras.h>
|
|
|
|
#include <AK/Types.h>
|
2019-03-16 15:12:13 +03:00
|
|
|
#include <AK/kmalloc.h>
|
2018-10-10 12:53:07 +03:00
|
|
|
|
|
|
|
namespace AK {
|
|
|
|
|
2019-06-21 16:29:31 +03:00
|
|
|
class ByteBufferImpl : public RefCounted<ByteBufferImpl> {
|
2019-03-16 15:12:13 +03:00
|
|
|
public:
|
2020-02-20 14:54:15 +03:00
|
|
|
static NonnullRefPtr<ByteBufferImpl> create_uninitialized(size_t size);
|
|
|
|
static NonnullRefPtr<ByteBufferImpl> create_zeroed(size_t);
|
|
|
|
static NonnullRefPtr<ByteBufferImpl> copy(const void*, size_t);
|
2019-03-16 15:12:13 +03:00
|
|
|
|
2021-01-11 02:29:28 +03:00
|
|
|
ByteBufferImpl() = delete;
|
2019-03-16 15:12:13 +03:00
|
|
|
~ByteBufferImpl() { clear(); }
|
|
|
|
|
|
|
|
void clear()
|
|
|
|
{
|
|
|
|
if (!m_data)
|
|
|
|
return;
|
2020-12-19 20:23:34 +03:00
|
|
|
kfree(m_data);
|
2019-03-16 15:12:13 +03:00
|
|
|
m_data = nullptr;
|
|
|
|
}
|
|
|
|
|
2020-02-20 14:54:15 +03:00
|
|
|
u8& operator[](size_t i)
|
2019-05-28 12:53:16 +03:00
|
|
|
{
|
|
|
|
ASSERT(i < m_size);
|
|
|
|
return m_data[i];
|
|
|
|
}
|
2020-02-20 14:54:15 +03:00
|
|
|
const u8& operator[](size_t i) const
|
2019-05-28 12:53:16 +03:00
|
|
|
{
|
|
|
|
ASSERT(i < m_size);
|
|
|
|
return m_data[i];
|
|
|
|
}
|
2019-03-16 15:12:13 +03:00
|
|
|
bool is_empty() const { return !m_size; }
|
2020-02-20 14:54:15 +03:00
|
|
|
size_t size() const { return m_size; }
|
2019-03-16 15:12:13 +03:00
|
|
|
|
2019-09-30 09:57:01 +03:00
|
|
|
u8* data() { return m_data; }
|
|
|
|
const u8* data() const { return m_data; }
|
2019-03-16 15:12:13 +03:00
|
|
|
|
2020-08-15 19:38:24 +03:00
|
|
|
Bytes bytes() { return { data(), size() }; }
|
|
|
|
ReadonlyBytes bytes() const { return { data(), size() }; }
|
2020-07-27 15:15:37 +03:00
|
|
|
|
2020-12-31 02:35:15 +03:00
|
|
|
Span<u8> span() { return { data(), size() }; }
|
|
|
|
Span<const u8> span() const { return { data(), size() }; }
|
|
|
|
|
2019-07-03 22:17:35 +03:00
|
|
|
u8* offset_pointer(int offset) { return m_data + offset; }
|
|
|
|
const u8* offset_pointer(int offset) const { return m_data + offset; }
|
2019-03-16 15:12:13 +03:00
|
|
|
|
2019-03-18 16:38:30 +03:00
|
|
|
void* end_pointer() { return m_data + m_size; }
|
2019-03-16 15:12:13 +03:00
|
|
|
const void* end_pointer() const { return m_data + m_size; }
|
|
|
|
|
|
|
|
// NOTE: trim() does not reallocate.
|
2020-02-20 14:54:15 +03:00
|
|
|
void trim(size_t size)
|
2019-03-16 15:12:13 +03:00
|
|
|
{
|
|
|
|
ASSERT(size <= m_size);
|
|
|
|
m_size = size;
|
|
|
|
}
|
|
|
|
|
2020-02-20 14:54:15 +03:00
|
|
|
void grow(size_t size);
|
2019-03-16 15:12:13 +03:00
|
|
|
|
|
|
|
private:
|
2020-12-19 20:23:34 +03:00
|
|
|
explicit ByteBufferImpl(size_t);
|
|
|
|
ByteBufferImpl(const void*, size_t);
|
2019-03-16 15:12:13 +03:00
|
|
|
|
2019-07-03 22:17:35 +03:00
|
|
|
u8* m_data { nullptr };
|
2020-02-20 14:54:15 +03:00
|
|
|
size_t m_size { 0 };
|
2019-03-16 15:12:13 +03:00
|
|
|
};
|
|
|
|
|
2018-10-10 12:53:07 +03:00
|
|
|
class ByteBuffer {
|
|
|
|
public:
|
2021-01-11 02:29:28 +03:00
|
|
|
ByteBuffer() = default;
|
2018-10-10 12:53:07 +03:00
|
|
|
ByteBuffer(const ByteBuffer& other)
|
2019-07-11 16:45:11 +03:00
|
|
|
: m_impl(other.m_impl)
|
2018-10-10 12:53:07 +03:00
|
|
|
{
|
|
|
|
}
|
|
|
|
ByteBuffer(ByteBuffer&& other)
|
2018-10-16 13:10:01 +03:00
|
|
|
: m_impl(move(other.m_impl))
|
2018-10-10 12:53:07 +03:00
|
|
|
{
|
|
|
|
}
|
|
|
|
ByteBuffer& operator=(ByteBuffer&& other)
|
|
|
|
{
|
|
|
|
if (this != &other)
|
2018-10-16 13:10:01 +03:00
|
|
|
m_impl = move(other.m_impl);
|
2018-10-10 12:53:07 +03:00
|
|
|
return *this;
|
|
|
|
}
|
2018-10-27 01:14:24 +03:00
|
|
|
ByteBuffer& operator=(const ByteBuffer& other)
|
|
|
|
{
|
2019-07-11 16:45:11 +03:00
|
|
|
if (this != &other)
|
|
|
|
m_impl = other.m_impl;
|
2018-10-27 01:14:24 +03:00
|
|
|
return *this;
|
|
|
|
}
|
2018-10-10 12:53:07 +03:00
|
|
|
|
2020-02-20 14:54:15 +03:00
|
|
|
static ByteBuffer create_uninitialized(size_t size) { return ByteBuffer(ByteBufferImpl::create_uninitialized(size)); }
|
|
|
|
static ByteBuffer create_zeroed(size_t size) { return ByteBuffer(ByteBufferImpl::create_zeroed(size)); }
|
|
|
|
static ByteBuffer copy(const void* data, size_t size) { return ByteBuffer(ByteBufferImpl::copy(data, size)); }
|
2020-12-25 18:21:59 +03:00
|
|
|
static ByteBuffer copy(ReadonlyBytes bytes) { return ByteBuffer(ByteBufferImpl::copy(bytes.data(), bytes.size())); }
|
2018-10-10 12:53:07 +03:00
|
|
|
|
|
|
|
~ByteBuffer() { clear(); }
|
|
|
|
void clear() { m_impl = nullptr; }
|
|
|
|
|
2018-12-21 04:10:45 +03:00
|
|
|
operator bool() const { return !is_null(); }
|
|
|
|
bool operator!() const { return is_null(); }
|
|
|
|
bool is_null() const { return m_impl == nullptr; }
|
2018-10-10 12:53:07 +03:00
|
|
|
|
2020-08-22 17:10:19 +03:00
|
|
|
// Disable default implementations that would use surprising integer promotion.
|
|
|
|
bool operator==(const ByteBuffer& other) const;
|
|
|
|
bool operator!=(const ByteBuffer& other) const { return !(*this == other); }
|
|
|
|
bool operator<=(const ByteBuffer& other) const = delete;
|
|
|
|
bool operator>=(const ByteBuffer& other) const = delete;
|
|
|
|
bool operator<(const ByteBuffer& other) const = delete;
|
|
|
|
bool operator>(const ByteBuffer& other) const = delete;
|
|
|
|
|
2020-02-20 14:54:15 +03:00
|
|
|
u8& operator[](size_t i)
|
2019-05-28 12:53:16 +03:00
|
|
|
{
|
|
|
|
ASSERT(m_impl);
|
|
|
|
return (*m_impl)[i];
|
|
|
|
}
|
2020-02-20 14:54:15 +03:00
|
|
|
u8 operator[](size_t i) const
|
2019-05-28 12:53:16 +03:00
|
|
|
{
|
|
|
|
ASSERT(m_impl);
|
|
|
|
return (*m_impl)[i];
|
|
|
|
}
|
2018-12-21 04:10:45 +03:00
|
|
|
bool is_empty() const { return !m_impl || m_impl->is_empty(); }
|
2020-02-20 14:54:15 +03:00
|
|
|
size_t size() const { return m_impl ? m_impl->size() : 0; }
|
2018-10-10 12:53:07 +03:00
|
|
|
|
2019-09-30 09:57:01 +03:00
|
|
|
u8* data() { return m_impl ? m_impl->data() : nullptr; }
|
|
|
|
const u8* data() const { return m_impl ? m_impl->data() : nullptr; }
|
2018-10-10 12:53:07 +03:00
|
|
|
|
2021-01-11 02:29:28 +03:00
|
|
|
Bytes bytes()
|
|
|
|
{
|
|
|
|
if (m_impl) {
|
|
|
|
return m_impl->bytes();
|
|
|
|
}
|
|
|
|
return {};
|
|
|
|
}
|
|
|
|
ReadonlyBytes bytes() const
|
|
|
|
{
|
|
|
|
if (m_impl) {
|
|
|
|
return m_impl->bytes();
|
|
|
|
}
|
|
|
|
return {};
|
|
|
|
}
|
2020-07-27 15:15:37 +03:00
|
|
|
|
2021-01-11 02:29:28 +03:00
|
|
|
Span<u8> span()
|
|
|
|
{
|
|
|
|
if (m_impl) {
|
|
|
|
return m_impl->span();
|
|
|
|
}
|
|
|
|
return {};
|
|
|
|
}
|
|
|
|
Span<const u8> span() const
|
|
|
|
{
|
|
|
|
if (m_impl) {
|
|
|
|
return m_impl->span();
|
|
|
|
}
|
|
|
|
return {};
|
|
|
|
}
|
2020-12-31 02:35:15 +03:00
|
|
|
|
2019-07-03 22:17:35 +03:00
|
|
|
u8* offset_pointer(int offset) { return m_impl ? m_impl->offset_pointer(offset) : nullptr; }
|
|
|
|
const u8* offset_pointer(int offset) const { return m_impl ? m_impl->offset_pointer(offset) : nullptr; }
|
2018-10-10 12:53:07 +03:00
|
|
|
|
2019-03-18 16:38:30 +03:00
|
|
|
void* end_pointer() { return m_impl ? m_impl->end_pointer() : nullptr; }
|
2018-12-21 04:10:45 +03:00
|
|
|
const void* end_pointer() const { return m_impl ? m_impl->end_pointer() : nullptr; }
|
2018-10-10 12:53:07 +03:00
|
|
|
|
2019-04-25 23:05:53 +03:00
|
|
|
ByteBuffer isolated_copy() const
|
|
|
|
{
|
|
|
|
if (!m_impl)
|
2019-05-28 12:53:16 +03:00
|
|
|
return {};
|
2019-09-30 09:57:01 +03:00
|
|
|
return copy(m_impl->data(), m_impl->size());
|
2019-04-25 23:05:53 +03:00
|
|
|
}
|
|
|
|
|
2018-10-10 12:53:07 +03:00
|
|
|
// NOTE: trim() does not reallocate.
|
2020-02-20 14:54:15 +03:00
|
|
|
void trim(size_t size)
|
2018-10-10 12:53:07 +03:00
|
|
|
{
|
|
|
|
if (m_impl)
|
|
|
|
m_impl->trim(size);
|
|
|
|
}
|
|
|
|
|
2020-02-20 14:54:15 +03:00
|
|
|
ByteBuffer slice(size_t offset, size_t size) const
|
2018-10-10 12:53:07 +03:00
|
|
|
{
|
2018-12-21 04:10:45 +03:00
|
|
|
if (is_null())
|
2019-05-28 12:53:16 +03:00
|
|
|
return {};
|
2020-04-27 20:23:39 +03:00
|
|
|
|
|
|
|
// I cannot hand you a slice I don't have
|
|
|
|
ASSERT(offset + size <= this->size());
|
|
|
|
|
2018-12-21 04:10:45 +03:00
|
|
|
return copy(offset_pointer(offset), size);
|
2018-10-10 12:53:07 +03:00
|
|
|
}
|
|
|
|
|
2020-02-20 14:54:15 +03:00
|
|
|
void grow(size_t size)
|
2019-01-18 05:27:51 +03:00
|
|
|
{
|
|
|
|
if (!m_impl)
|
2019-03-16 15:12:13 +03:00
|
|
|
m_impl = ByteBufferImpl::create_uninitialized(size);
|
2019-01-18 05:27:51 +03:00
|
|
|
else
|
|
|
|
m_impl->grow(size);
|
|
|
|
}
|
|
|
|
|
2020-02-20 14:54:15 +03:00
|
|
|
void append(const void* data, size_t data_size)
|
2019-03-18 16:38:30 +03:00
|
|
|
{
|
2020-04-08 18:04:37 +03:00
|
|
|
if (data_size == 0)
|
|
|
|
return;
|
|
|
|
ASSERT(data != nullptr);
|
2019-03-18 16:38:30 +03:00
|
|
|
int old_size = size();
|
|
|
|
grow(size() + data_size);
|
2020-03-08 13:57:24 +03:00
|
|
|
__builtin_memcpy(this->data() + old_size, data, data_size);
|
2019-03-18 16:38:30 +03:00
|
|
|
}
|
|
|
|
|
2020-11-06 11:09:51 +03:00
|
|
|
void operator+=(const ByteBuffer& other)
|
|
|
|
{
|
|
|
|
append(other.data(), other.size());
|
|
|
|
}
|
|
|
|
|
2020-04-03 04:52:31 +03:00
|
|
|
void overwrite(size_t offset, const void* data, size_t data_size)
|
|
|
|
{
|
|
|
|
// make sure we're not told to write past the end
|
2020-04-27 20:23:39 +03:00
|
|
|
ASSERT(offset + data_size <= size());
|
2020-04-03 04:52:31 +03:00
|
|
|
__builtin_memcpy(this->data() + offset, data, data_size);
|
|
|
|
}
|
|
|
|
|
2020-08-15 19:38:24 +03:00
|
|
|
operator Bytes() { return bytes(); }
|
|
|
|
operator ReadonlyBytes() const { return bytes(); }
|
|
|
|
|
2018-10-10 12:53:07 +03:00
|
|
|
private:
|
2019-06-21 19:37:47 +03:00
|
|
|
explicit ByteBuffer(RefPtr<ByteBufferImpl>&& impl)
|
2018-10-16 13:10:01 +03:00
|
|
|
: m_impl(move(impl))
|
2018-10-10 12:53:07 +03:00
|
|
|
{
|
|
|
|
}
|
|
|
|
|
2019-06-21 19:37:47 +03:00
|
|
|
RefPtr<ByteBufferImpl> m_impl;
|
2018-10-10 12:53:07 +03:00
|
|
|
};
|
|
|
|
|
2020-02-20 14:54:15 +03:00
|
|
|
inline ByteBufferImpl::ByteBufferImpl(size_t size)
|
2019-03-16 15:12:13 +03:00
|
|
|
: m_size(size)
|
|
|
|
{
|
2020-08-30 18:00:57 +03:00
|
|
|
if (size != 0)
|
|
|
|
m_data = static_cast<u8*>(kmalloc(size));
|
2019-03-16 15:12:13 +03:00
|
|
|
}
|
|
|
|
|
2020-12-19 20:23:34 +03:00
|
|
|
inline ByteBufferImpl::ByteBufferImpl(const void* data, size_t size)
|
2019-03-16 15:12:13 +03:00
|
|
|
: m_size(size)
|
|
|
|
{
|
2020-08-30 18:00:57 +03:00
|
|
|
if (size != 0) {
|
|
|
|
m_data = static_cast<u8*>(kmalloc(size));
|
|
|
|
__builtin_memcpy(m_data, data, size);
|
|
|
|
}
|
2019-03-16 15:12:13 +03:00
|
|
|
}
|
|
|
|
|
2020-02-20 14:54:15 +03:00
|
|
|
inline void ByteBufferImpl::grow(size_t size)
|
2019-03-16 15:12:13 +03:00
|
|
|
{
|
|
|
|
ASSERT(size > m_size);
|
2020-08-30 18:00:57 +03:00
|
|
|
if (size == 0) {
|
|
|
|
if (m_data)
|
|
|
|
kfree(m_data);
|
|
|
|
m_data = nullptr;
|
|
|
|
m_size = 0;
|
|
|
|
return;
|
|
|
|
}
|
2019-07-03 22:17:35 +03:00
|
|
|
u8* new_data = static_cast<u8*>(kmalloc(size));
|
2020-03-08 13:57:24 +03:00
|
|
|
__builtin_memcpy(new_data, m_data, m_size);
|
2019-07-03 22:17:35 +03:00
|
|
|
u8* old_data = m_data;
|
2019-03-16 15:12:13 +03:00
|
|
|
m_data = new_data;
|
|
|
|
m_size = size;
|
2020-08-30 18:00:57 +03:00
|
|
|
if (old_data)
|
|
|
|
kfree(old_data);
|
2019-03-16 15:12:13 +03:00
|
|
|
}
|
|
|
|
|
2020-02-20 14:54:15 +03:00
|
|
|
inline NonnullRefPtr<ByteBufferImpl> ByteBufferImpl::create_uninitialized(size_t size)
|
2019-03-16 15:12:13 +03:00
|
|
|
{
|
|
|
|
return ::adopt(*new ByteBufferImpl(size));
|
|
|
|
}
|
|
|
|
|
2020-02-20 14:54:15 +03:00
|
|
|
inline NonnullRefPtr<ByteBufferImpl> ByteBufferImpl::create_zeroed(size_t size)
|
2019-03-16 15:12:13 +03:00
|
|
|
{
|
|
|
|
auto buffer = ::adopt(*new ByteBufferImpl(size));
|
2020-08-30 18:00:57 +03:00
|
|
|
if (size != 0)
|
|
|
|
__builtin_memset(buffer->data(), 0, size);
|
2019-03-16 15:12:13 +03:00
|
|
|
return buffer;
|
|
|
|
}
|
|
|
|
|
2020-02-20 14:54:15 +03:00
|
|
|
inline NonnullRefPtr<ByteBufferImpl> ByteBufferImpl::copy(const void* data, size_t size)
|
2019-03-16 15:12:13 +03:00
|
|
|
{
|
2020-12-19 20:23:34 +03:00
|
|
|
return ::adopt(*new ByteBufferImpl(data, size));
|
2019-03-16 15:12:13 +03:00
|
|
|
}
|
|
|
|
|
2019-09-11 20:35:14 +03:00
|
|
|
inline const LogStream& operator<<(const LogStream& stream, const ByteBuffer& value)
|
|
|
|
{
|
|
|
|
stream.write((const char*)value.data(), value.size());
|
|
|
|
return stream;
|
|
|
|
}
|
|
|
|
|
2018-10-10 12:53:07 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
using AK::ByteBuffer;
|