summaryrefslogtreecommitdiffstats
path: root/src/core
diff options
context:
space:
mode:
authorbunnei <bunneidev@gmail.com>2020-04-05 20:41:04 +0200
committerbunnei <bunneidev@gmail.com>2020-04-17 06:59:29 +0200
commitd364e7cf09ee8de31610bc606e69ded6bfa94c18 (patch)
treece20a4a2818f6d082c00a4e0602d1fbb98deb193 /src/core
parentkernel: memory: Add AddressSpaceInfo class, for managing the memory address space. (diff)
downloadyuzu-d364e7cf09ee8de31610bc606e69ded6bfa94c18.tar
yuzu-d364e7cf09ee8de31610bc606e69ded6bfa94c18.tar.gz
yuzu-d364e7cf09ee8de31610bc606e69ded6bfa94c18.tar.bz2
yuzu-d364e7cf09ee8de31610bc606e69ded6bfa94c18.tar.lz
yuzu-d364e7cf09ee8de31610bc606e69ded6bfa94c18.tar.xz
yuzu-d364e7cf09ee8de31610bc606e69ded6bfa94c18.tar.zst
yuzu-d364e7cf09ee8de31610bc606e69ded6bfa94c18.zip
Diffstat (limited to 'src/core')
-rw-r--r--src/core/CMakeLists.txt1
-rw-r--r--src/core/hle/kernel/memory/slab_heap.h161
2 files changed, 162 insertions, 0 deletions
diff --git a/src/core/CMakeLists.txt b/src/core/CMakeLists.txt
index e98091cba..82efb9e21 100644
--- a/src/core/CMakeLists.txt
+++ b/src/core/CMakeLists.txt
@@ -156,6 +156,7 @@ add_library(core STATIC
hle/kernel/kernel.h
hle/kernel/memory/address_space_info.cpp
hle/kernel/memory/address_space_info.h
+ hle/kernel/memory/slab_heap.h
hle/kernel/mutex.cpp
hle/kernel/mutex.h
hle/kernel/object.cpp
diff --git a/src/core/hle/kernel/memory/slab_heap.h b/src/core/hle/kernel/memory/slab_heap.h
new file mode 100644
index 000000000..ca334384b
--- /dev/null
+++ b/src/core/hle/kernel/memory/slab_heap.h
@@ -0,0 +1,161 @@
+// Copyright 2020 yuzu Emulator Project
+// Licensed under GPLv2 or any later version
+// Refer to the license.txt file included.
+
+#pragma once
+
+#include <atomic>
+
+#include "common/assert.h"
+#include "common/common_funcs.h"
+#include "common/common_types.h"
+
+namespace Kernel::Memory {
+
+namespace impl {
+
+class SlabHeapImpl final : NonCopyable {
+public:
+ struct Node {
+ Node* next{};
+ };
+
+ constexpr SlabHeapImpl() = default;
+
+ void Initialize(std::size_t size) {
+ ASSERT(head == nullptr);
+ obj_size = size;
+ }
+
+ constexpr std::size_t GetObjectSize() const {
+ return obj_size;
+ }
+
+ Node* GetHead() const {
+ return head;
+ }
+
+ void* Allocate() {
+ Node* ret = head.load();
+
+ do {
+ if (ret == nullptr) {
+ break;
+ }
+ } while (!head.compare_exchange_weak(ret, ret->next));
+
+ return ret;
+ }
+
+ void Free(void* obj) {
+ Node* node = reinterpret_cast<Node*>(obj);
+
+ Node* cur_head = head.load();
+ do {
+ node->next = cur_head;
+ } while (!head.compare_exchange_weak(cur_head, node));
+ }
+
+private:
+ std::atomic<Node*> head{};
+ std::size_t obj_size{};
+};
+
+} // namespace impl
+
+class SlabHeapBase : NonCopyable {
+public:
+ constexpr SlabHeapBase() = default;
+
+ constexpr bool Contains(uintptr_t addr) const {
+ return start <= addr && addr < end;
+ }
+
+ constexpr std::size_t GetSlabHeapSize() const {
+ return (end - start) / GetObjectSize();
+ }
+
+ constexpr std::size_t GetObjectSize() const {
+ return impl.GetObjectSize();
+ }
+
+ constexpr uintptr_t GetSlabHeapAddress() const {
+ return start;
+ }
+
+ std::size_t GetObjectIndexImpl(const void* obj) const {
+ return (reinterpret_cast<uintptr_t>(obj) - start) / GetObjectSize();
+ }
+
+ std::size_t GetPeakIndex() const {
+ return GetObjectIndexImpl(reinterpret_cast<const void*>(peak));
+ }
+
+ void* AllocateImpl() {
+ return impl.Allocate();
+ }
+
+ void FreeImpl(void* obj) {
+ // Don't allow freeing an object that wasn't allocated from this heap
+ ASSERT(Contains(reinterpret_cast<uintptr_t>(obj)));
+ impl.Free(obj);
+ }
+
+ void InitializeImpl(std::size_t obj_size, void* memory, std::size_t memory_size) {
+ // Ensure we don't initialize a slab using null memory
+ ASSERT(memory != nullptr);
+
+ // Initialize the base allocator
+ impl.Initialize(obj_size);
+
+ // Set our tracking variables
+ const std::size_t num_obj = (memory_size / obj_size);
+ start = reinterpret_cast<uintptr_t>(memory);
+ end = start + num_obj * obj_size;
+ peak = start;
+
+ // Free the objects
+ u8* cur = reinterpret_cast<u8*>(end);
+
+ for (std::size_t i{}; i < num_obj; i++) {
+ cur -= obj_size;
+ impl.Free(cur);
+ }
+ }
+
+private:
+ using Impl = impl::SlabHeapImpl;
+
+ Impl impl;
+ uintptr_t peak{};
+ uintptr_t start{};
+ uintptr_t end{};
+};
+
+template <typename T>
+class SlabHeap final : public SlabHeapBase {
+public:
+ constexpr SlabHeap() : SlabHeapBase() {}
+
+ void Initialize(void* memory, std::size_t memory_size) {
+ InitializeImpl(sizeof(T), memory, memory_size);
+ }
+
+ T* Allocate() {
+ T* obj = reinterpret_cast<T*>(AllocateImpl());
+ if (obj != nullptr) {
+ new (obj) T();
+ }
+ return obj;
+ }
+
+ void Free(T* obj) {
+ FreeImpl(obj);
+ }
+
+ constexpr std::size_t GetObjectIndex(const T* obj) const {
+ return GetObjectIndexImpl(obj);
+ }
+};
+
+} // namespace Kernel::Memory