2020-04-05 20:41:04 +02:00
|
|
|
// Copyright 2020 yuzu Emulator Project
|
|
|
|
// Licensed under GPLv2 or any later version
|
|
|
|
// Refer to the license.txt file included.
|
|
|
|
|
|
|
|
#pragma once
|
|
|
|
|
2021-05-28 02:15:23 +02:00
|
|
|
#include <atomic>
|
|
|
|
|
|
|
|
#include "common/assert.h"
|
2022-02-02 18:59:36 +01:00
|
|
|
#include "common/common_funcs.h"
|
2021-05-28 02:15:23 +02:00
|
|
|
#include "common/common_types.h"
|
|
|
|
|
2021-02-13 01:21:12 +01:00
|
|
|
namespace Kernel {
|
2020-04-05 20:41:04 +02:00
|
|
|
|
2021-05-21 03:15:59 +02:00
|
|
|
class KernelCore;
|
2020-04-05 20:41:04 +02:00
|
|
|
|
2021-05-28 02:15:23 +02:00
|
|
|
namespace impl {
|
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
class KSlabHeapImpl {
|
2022-02-02 18:59:36 +01:00
|
|
|
YUZU_NON_COPYABLE(KSlabHeapImpl);
|
|
|
|
YUZU_NON_MOVEABLE(KSlabHeapImpl);
|
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
public:
|
2021-05-28 02:15:23 +02:00
|
|
|
struct Node {
|
|
|
|
Node* next{};
|
|
|
|
};
|
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
public:
|
2021-05-28 02:15:23 +02:00
|
|
|
constexpr KSlabHeapImpl() = default;
|
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
void Initialize() {
|
|
|
|
ASSERT(m_head == nullptr);
|
2021-05-28 02:15:23 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
Node* GetHead() const {
|
2022-03-12 01:29:53 +01:00
|
|
|
return m_head;
|
2021-05-28 02:15:23 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
void* Allocate() {
|
2022-03-12 01:29:53 +01:00
|
|
|
Node* ret = m_head.load();
|
2021-05-28 02:15:23 +02:00
|
|
|
|
|
|
|
do {
|
|
|
|
if (ret == nullptr) {
|
|
|
|
break;
|
|
|
|
}
|
2022-03-12 01:29:53 +01:00
|
|
|
} while (!m_head.compare_exchange_weak(ret, ret->next));
|
2021-05-28 02:15:23 +02:00
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
void Free(void* obj) {
|
|
|
|
Node* node = static_cast<Node*>(obj);
|
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
Node* cur_head = m_head.load();
|
2021-05-28 02:15:23 +02:00
|
|
|
do {
|
|
|
|
node->next = cur_head;
|
2022-03-12 01:29:53 +01:00
|
|
|
} while (!m_head.compare_exchange_weak(cur_head, node));
|
2021-05-28 02:15:23 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
2022-03-12 01:29:53 +01:00
|
|
|
std::atomic<Node*> m_head{};
|
2021-05-28 02:15:23 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
} // namespace impl
|
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
template <bool SupportDynamicExpansion>
|
|
|
|
class KSlabHeapBase : protected impl::KSlabHeapImpl {
|
2022-02-02 18:59:36 +01:00
|
|
|
YUZU_NON_COPYABLE(KSlabHeapBase);
|
|
|
|
YUZU_NON_MOVEABLE(KSlabHeapBase);
|
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
private:
|
|
|
|
size_t m_obj_size{};
|
|
|
|
uintptr_t m_peak{};
|
|
|
|
uintptr_t m_start{};
|
|
|
|
uintptr_t m_end{};
|
2021-05-28 02:15:23 +02:00
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
private:
|
|
|
|
void UpdatePeakImpl(uintptr_t obj) {
|
|
|
|
static_assert(std::atomic_ref<uintptr_t>::is_always_lock_free);
|
|
|
|
std::atomic_ref<uintptr_t> peak_ref(m_peak);
|
2021-05-28 02:15:23 +02:00
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
const uintptr_t alloc_peak = obj + this->GetObjectSize();
|
|
|
|
uintptr_t cur_peak = m_peak;
|
|
|
|
do {
|
|
|
|
if (alloc_peak <= cur_peak) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
} while (!peak_ref.compare_exchange_strong(cur_peak, alloc_peak));
|
2021-05-28 02:15:23 +02:00
|
|
|
}
|
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
public:
|
|
|
|
constexpr KSlabHeapBase() = default;
|
2021-05-28 02:15:23 +02:00
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
bool Contains(uintptr_t address) const {
|
|
|
|
return m_start <= address && address < m_end;
|
2021-05-28 02:15:23 +02:00
|
|
|
}
|
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
void Initialize(size_t obj_size, void* memory, size_t memory_size) {
|
|
|
|
// Ensure we don't initialize a slab using null memory.
|
|
|
|
ASSERT(memory != nullptr);
|
|
|
|
|
|
|
|
// Set our object size.
|
|
|
|
m_obj_size = obj_size;
|
|
|
|
|
|
|
|
// Initialize the base allocator.
|
|
|
|
KSlabHeapImpl::Initialize();
|
|
|
|
|
|
|
|
// Set our tracking variables.
|
|
|
|
const size_t num_obj = (memory_size / obj_size);
|
|
|
|
m_start = reinterpret_cast<uintptr_t>(memory);
|
|
|
|
m_end = m_start + num_obj * obj_size;
|
|
|
|
m_peak = m_start;
|
|
|
|
|
|
|
|
// Free the objects.
|
|
|
|
u8* cur = reinterpret_cast<u8*>(m_end);
|
|
|
|
|
|
|
|
for (size_t i = 0; i < num_obj; i++) {
|
|
|
|
cur -= obj_size;
|
|
|
|
KSlabHeapImpl::Free(cur);
|
|
|
|
}
|
2021-05-28 02:15:23 +02:00
|
|
|
}
|
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
size_t GetSlabHeapSize() const {
|
|
|
|
return (m_end - m_start) / this->GetObjectSize();
|
2021-05-28 02:15:23 +02:00
|
|
|
}
|
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
size_t GetObjectSize() const {
|
|
|
|
return m_obj_size;
|
2021-05-28 02:15:23 +02:00
|
|
|
}
|
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
void* Allocate() {
|
|
|
|
void* obj = KSlabHeapImpl::Allocate();
|
2021-05-28 02:15:23 +02:00
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
return obj;
|
2021-05-28 02:15:23 +02:00
|
|
|
}
|
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
void Free(void* obj) {
|
|
|
|
// Don't allow freeing an object that wasn't allocated from this heap.
|
|
|
|
const bool contained = this->Contains(reinterpret_cast<uintptr_t>(obj));
|
|
|
|
ASSERT(contained);
|
|
|
|
KSlabHeapImpl::Free(obj);
|
|
|
|
}
|
2021-05-28 02:15:23 +02:00
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
size_t GetObjectIndex(const void* obj) const {
|
|
|
|
if constexpr (SupportDynamicExpansion) {
|
|
|
|
if (!this->Contains(reinterpret_cast<uintptr_t>(obj))) {
|
|
|
|
return std::numeric_limits<size_t>::max();
|
|
|
|
}
|
|
|
|
}
|
2021-05-28 02:15:23 +02:00
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
return (reinterpret_cast<uintptr_t>(obj) - m_start) / this->GetObjectSize();
|
|
|
|
}
|
2021-05-28 02:15:23 +02:00
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
size_t GetPeakIndex() const {
|
|
|
|
return this->GetObjectIndex(reinterpret_cast<const void*>(m_peak));
|
2021-05-28 02:15:23 +02:00
|
|
|
}
|
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
uintptr_t GetSlabHeapAddress() const {
|
|
|
|
return m_start;
|
|
|
|
}
|
2021-05-28 02:15:23 +02:00
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
size_t GetNumRemaining() const {
|
|
|
|
// Only calculate the number of remaining objects under debug configuration.
|
|
|
|
return 0;
|
|
|
|
}
|
2021-05-28 02:15:23 +02:00
|
|
|
};
|
2020-04-05 20:41:04 +02:00
|
|
|
|
|
|
|
template <typename T>
|
2022-03-12 01:29:53 +01:00
|
|
|
class KSlabHeap final : public KSlabHeapBase<false> {
|
|
|
|
private:
|
|
|
|
using BaseHeap = KSlabHeapBase<false>;
|
2020-04-05 20:41:04 +02:00
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
public:
|
|
|
|
constexpr KSlabHeap() = default;
|
2021-05-28 02:15:23 +02:00
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
void Initialize(void* memory, size_t memory_size) {
|
|
|
|
BaseHeap::Initialize(sizeof(T), memory, memory_size);
|
2020-04-05 20:41:04 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
T* Allocate() {
|
2022-03-12 01:29:53 +01:00
|
|
|
T* obj = static_cast<T*>(BaseHeap::Allocate());
|
2021-05-28 02:15:23 +02:00
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
if (obj != nullptr) [[likely]] {
|
|
|
|
std::construct_at(obj);
|
|
|
|
}
|
|
|
|
return obj;
|
2020-04-05 20:41:04 +02:00
|
|
|
}
|
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
T* Allocate(KernelCore& kernel) {
|
|
|
|
T* obj = static_cast<T*>(BaseHeap::Allocate());
|
2021-05-28 02:15:23 +02:00
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
if (obj != nullptr) [[likely]] {
|
|
|
|
std::construct_at(obj, kernel);
|
2021-05-28 02:15:23 +02:00
|
|
|
}
|
2022-03-12 01:29:53 +01:00
|
|
|
return obj;
|
2021-04-04 04:11:46 +02:00
|
|
|
}
|
|
|
|
|
2020-04-05 20:41:04 +02:00
|
|
|
void Free(T* obj) {
|
2022-03-12 01:29:53 +01:00
|
|
|
BaseHeap::Free(obj);
|
2020-04-05 20:41:04 +02:00
|
|
|
}
|
2021-05-28 02:15:23 +02:00
|
|
|
|
2022-03-12 01:29:53 +01:00
|
|
|
size_t GetObjectIndex(const T* obj) const {
|
|
|
|
return BaseHeap::GetObjectIndex(obj);
|
2021-05-28 02:15:23 +02:00
|
|
|
}
|
2020-04-05 20:41:04 +02:00
|
|
|
};
|
|
|
|
|
2021-02-13 01:21:12 +01:00
|
|
|
} // namespace Kernel
|