From d364e7cf09ee8de31610bc606e69ded6bfa94c18 Mon Sep 17 00:00:00 2001 From: bunnei Date: Sun, 5 Apr 2020 14:41:04 -0400 Subject: kernel: memory: Add SlabHeap class, for managing memory heaps. - This will be used for TLS pages, among other things. --- src/core/hle/kernel/memory/slab_heap.h | 161 +++++++++++++++++++++++++++++++++ 1 file changed, 161 insertions(+) create mode 100644 src/core/hle/kernel/memory/slab_heap.h (limited to 'src/core/hle/kernel') diff --git a/src/core/hle/kernel/memory/slab_heap.h b/src/core/hle/kernel/memory/slab_heap.h new file mode 100644 index 000000000..ca334384b --- /dev/null +++ b/src/core/hle/kernel/memory/slab_heap.h @@ -0,0 +1,161 @@ +// Copyright 2020 yuzu Emulator Project +// Licensed under GPLv2 or any later version +// Refer to the license.txt file included. + +#pragma once + +#include + +#include "common/assert.h" +#include "common/common_funcs.h" +#include "common/common_types.h" + +namespace Kernel::Memory { + +namespace impl { + +class SlabHeapImpl final : NonCopyable { +public: + struct Node { + Node* next{}; + }; + + constexpr SlabHeapImpl() = default; + + void Initialize(std::size_t size) { + ASSERT(head == nullptr); + obj_size = size; + } + + constexpr std::size_t GetObjectSize() const { + return obj_size; + } + + Node* GetHead() const { + return head; + } + + void* Allocate() { + Node* ret = head.load(); + + do { + if (ret == nullptr) { + break; + } + } while (!head.compare_exchange_weak(ret, ret->next)); + + return ret; + } + + void Free(void* obj) { + Node* node = reinterpret_cast(obj); + + Node* cur_head = head.load(); + do { + node->next = cur_head; + } while (!head.compare_exchange_weak(cur_head, node)); + } + +private: + std::atomic head{}; + std::size_t obj_size{}; +}; + +} // namespace impl + +class SlabHeapBase : NonCopyable { +public: + constexpr SlabHeapBase() = default; + + constexpr bool Contains(uintptr_t addr) const { + return start <= addr && addr < end; + } + + constexpr std::size_t GetSlabHeapSize() const { + return (end - start) / GetObjectSize(); + } + + constexpr std::size_t GetObjectSize() const { + return impl.GetObjectSize(); + } + + constexpr uintptr_t GetSlabHeapAddress() const { + return start; + } + + std::size_t GetObjectIndexImpl(const void* obj) const { + return (reinterpret_cast(obj) - start) / GetObjectSize(); + } + + std::size_t GetPeakIndex() const { + return GetObjectIndexImpl(reinterpret_cast(peak)); + } + + void* AllocateImpl() { + return impl.Allocate(); + } + + void FreeImpl(void* obj) { + // Don't allow freeing an object that wasn't allocated from this heap + ASSERT(Contains(reinterpret_cast(obj))); + impl.Free(obj); + } + + void InitializeImpl(std::size_t obj_size, void* memory, std::size_t memory_size) { + // Ensure we don't initialize a slab using null memory + ASSERT(memory != nullptr); + + // Initialize the base allocator + impl.Initialize(obj_size); + + // Set our tracking variables + const std::size_t num_obj = (memory_size / obj_size); + start = reinterpret_cast(memory); + end = start + num_obj * obj_size; + peak = start; + + // Free the objects + u8* cur = reinterpret_cast(end); + + for (std::size_t i{}; i < num_obj; i++) { + cur -= obj_size; + impl.Free(cur); + } + } + +private: + using Impl = impl::SlabHeapImpl; + + Impl impl; + uintptr_t peak{}; + uintptr_t start{}; + uintptr_t end{}; +}; + +template +class SlabHeap final : public SlabHeapBase { +public: + constexpr SlabHeap() : SlabHeapBase() {} + + void Initialize(void* memory, std::size_t memory_size) { + InitializeImpl(sizeof(T), memory, memory_size); + } + + T* Allocate() { + T* obj = reinterpret_cast(AllocateImpl()); + if (obj != nullptr) { + new (obj) T(); + } + return obj; + } + + void Free(T* obj) { + FreeImpl(obj); + } + + constexpr std::size_t GetObjectIndex(const T* obj) const { + return GetObjectIndexImpl(obj); + } +}; + +} // namespace Kernel::Memory -- cgit v1.2.3