2
1
Fork 0
mirror of https://github.com/yuzu-emu/yuzu.git synced 2024-07-04 23:31:19 +01:00

hle: kernel: Move slab heap management to KernelCore.

This commit is contained in:
bunnei 2021-04-09 22:10:14 -07:00
parent ab704acab8
commit b6156e735c
7 changed files with 112 additions and 70 deletions

View file

@ -185,11 +185,11 @@ KThread* KConditionVariable::SignalImpl(KThread* thread) {
thread->Wakeup(); thread->Wakeup();
} else { } else {
// Get the previous owner. // Get the previous owner.
KThread* owner_thread = KThread* owner_thread = kernel.CurrentProcess()
kernel.CurrentProcess()->GetHandleTable() ->GetHandleTable()
.GetObjectWithoutPseudoHandle<KThread>( .GetObjectWithoutPseudoHandle<KThread>(
static_cast<Handle>(prev_tag & ~Svc::HandleWaitMask)) static_cast<Handle>(prev_tag & ~Svc::HandleWaitMask))
.ReleasePointerUnsafe(); .ReleasePointerUnsafe();
if (owner_thread) { if (owner_thread) {
// Add the thread as a waiter on the owner. // Add the thread as a waiter on the owner.
@ -214,7 +214,7 @@ void KConditionVariable::Signal(u64 cv_key, s32 count) {
// Prepare for signaling. // Prepare for signaling.
constexpr int MaxThreads = 16; constexpr int MaxThreads = 16;
KLinkedList<KThread> thread_list; KLinkedList<KThread> thread_list{kernel};
std::array<KThread*, MaxThreads> thread_array; std::array<KThread*, MaxThreads> thread_array;
s32 num_to_close{}; s32 num_to_close{};
@ -254,7 +254,8 @@ void KConditionVariable::Signal(u64 cv_key, s32 count) {
} }
// Close threads in the list. // Close threads in the list.
for (auto it = thread_list.begin(); it != thread_list.end(); it = thread_list.erase(it)) { for (auto it = thread_list.begin(); it != thread_list.end();
it = thread_list.erase(kernel, it)) {
(*it).Close(); (*it).Close();
} }
} }

View file

@ -21,6 +21,10 @@ void KEvent::Initialize(std::string&& name_) {
// writable events are closed this object will be destroyed. // writable events are closed this object will be destroyed.
Open(); Open();
//// Create our sub events.
//KAutoObject::Create(readable_event.get());
//KAutoObject::Create(writable_event.get());
// Create our sub events. // Create our sub events.
readable_event = std::make_shared<KReadableEvent>(kernel, name_ + ":Readable"); readable_event = std::make_shared<KReadableEvent>(kernel, name_ + ":Readable");
writable_event = std::make_shared<KWritableEvent>(kernel, name_ + ":Writable"); writable_event = std::make_shared<KWritableEvent>(kernel, name_ + ":Writable");

View file

@ -11,6 +11,8 @@
namespace Kernel { namespace Kernel {
class KernelCore;
class KLinkedListNode : public boost::intrusive::list_base_hook<>, class KLinkedListNode : public boost::intrusive::list_base_hook<>,
public KSlabAllocated<KLinkedListNode> { public KSlabAllocated<KLinkedListNode> {
private: private:
@ -118,11 +120,11 @@ public:
}; };
public: public:
constexpr KLinkedList() : BaseList() {} constexpr KLinkedList(KernelCore& kernel_) : BaseList(), kernel{kernel_} {}
~KLinkedList() { ~KLinkedList() {
// Erase all elements. // Erase all elements.
for (auto it = this->begin(); it != this->end(); it = this->erase(it)) { for (auto it = this->begin(); it != this->end(); it = this->erase(kernel, it)) {
} }
// Ensure we succeeded. // Ensure we succeeded.
@ -199,7 +201,7 @@ public:
} }
iterator insert(const_iterator pos, reference ref) { iterator insert(const_iterator pos, reference ref) {
KLinkedListNode* node = KLinkedListNode::Allocate(); KLinkedListNode* node = KLinkedListNode::Allocate(kernel);
ASSERT(node != nullptr); ASSERT(node != nullptr);
node->Initialize(std::addressof(ref)); node->Initialize(std::addressof(ref));
return iterator(BaseList::insert(pos.m_base_it, *node)); return iterator(BaseList::insert(pos.m_base_it, *node));
@ -221,13 +223,16 @@ public:
this->erase(this->begin()); this->erase(this->begin());
} }
iterator erase(const iterator pos) { iterator erase(KernelCore& kernel, const iterator pos) {
KLinkedListNode* freed_node = std::addressof(*pos.m_base_it); KLinkedListNode* freed_node = std::addressof(*pos.m_base_it);
iterator ret = iterator(BaseList::erase(pos.m_base_it)); iterator ret = iterator(BaseList::erase(pos.m_base_it));
KLinkedListNode::Free(freed_node); KLinkedListNode::Free(kernel, freed_node);
return ret; return ret;
} }
private:
KernelCore& kernel;
}; };
} // namespace Kernel } // namespace Kernel

View file

@ -617,7 +617,9 @@ KScheduler::KScheduler(Core::System& system, s32 core_id) : system(system), core
state.highest_priority_thread = nullptr; state.highest_priority_thread = nullptr;
} }
KScheduler::~KScheduler() = default; KScheduler::~KScheduler() {
idle_thread->Close();
}
KThread* KScheduler::GetCurrentThread() const { KThread* KScheduler::GetCurrentThread() const {
if (auto result = current_thread.load(); result) { if (auto result = current_thread.load(); result) {

View file

@ -97,6 +97,7 @@ public:
void FreeImpl(void* obj) { void FreeImpl(void* obj) {
// Don't allow freeing an object that wasn't allocated from this heap // Don't allow freeing an object that wasn't allocated from this heap
ASSERT(Contains(reinterpret_cast<uintptr_t>(obj))); ASSERT(Contains(reinterpret_cast<uintptr_t>(obj)));
impl.Free(obj); impl.Free(obj);
} }

View file

@ -11,9 +11,10 @@
#include <vector> #include <vector>
#include "core/arm/cpu_interrupt_handler.h" #include "core/arm/cpu_interrupt_handler.h"
#include "core/hardware_properties.h" #include "core/hardware_properties.h"
#include "core/hle/kernel/k_auto_object.h"
#include "core/hle/kernel/k_slab_heap.h"
#include "core/hle/kernel/memory_types.h" #include "core/hle/kernel/memory_types.h"
#include "core/hle/kernel/object.h" #include "core/hle/kernel/object.h"
#include "core/hle/kernel/k_auto_object.h"
namespace Core { namespace Core {
class CPUInterruptHandler; class CPUInterruptHandler;
@ -32,6 +33,8 @@ class ClientPort;
class GlobalSchedulerContext; class GlobalSchedulerContext;
class HandleTable; class HandleTable;
class KAutoObjectWithListContainer; class KAutoObjectWithListContainer;
class KEvent;
class KLinkedListNode;
class KMemoryManager; class KMemoryManager;
class KResourceLimit; class KResourceLimit;
class KScheduler; class KScheduler;
@ -231,9 +234,10 @@ public:
/** /**
* Creates an HLE service thread, which are used to execute service routines asynchronously. * Creates an HLE service thread, which are used to execute service routines asynchronously.
* While these are allocated per ServerSession, these need to be owned and managed outside of * While these are allocated per ServerSession, these need to be owned and managed outside
* ServerSession to avoid a circular dependency. * of ServerSession to avoid a circular dependency.
* @param name String name for the ServerSession creating this thread, used for debug purposes. * @param name String name for the ServerSession creating this thread, used for debug
* purposes.
* @returns The a weak pointer newly created service thread. * @returns The a weak pointer newly created service thread.
*/ */
std::weak_ptr<Kernel::ServiceThread> CreateServiceThread(const std::string& name); std::weak_ptr<Kernel::ServiceThread> CreateServiceThread(const std::string& name);
@ -252,6 +256,22 @@ public:
Core::System& System(); Core::System& System();
const Core::System& System() const; const Core::System& System() const;
/// Gets the slab heap for the specified kernel object type.
template <typename T>
KSlabHeap<T>& SlabHeap() {
if constexpr (std::is_same_v<T, Process>) {
return slab_heap_Process;
} else if constexpr (std::is_same_v<T, KThread>) {
return slab_heap_KThread;
} else if constexpr (std::is_same_v<T, KEvent>) {
return slab_heap_KEvent;
} else if constexpr (std::is_same_v<T, KSharedMemory>) {
return slab_heap_KSharedMemory;
} else if constexpr (std::is_same_v<T, KLinkedListNode>) {
return slab_heap_KLinkedListNode;
}
}
private: private:
friend class Object; friend class Object;
friend class Process; friend class Process;
@ -277,7 +297,15 @@ private:
struct Impl; struct Impl;
std::unique_ptr<Impl> impl; std::unique_ptr<Impl> impl;
bool exception_exited{}; bool exception_exited{};
private:
KSlabHeap<Process> slab_heap_Process;
KSlabHeap<KThread> slab_heap_KThread;
KSlabHeap<KEvent> slab_heap_KEvent;
KSlabHeap<KSharedMemory> slab_heap_KSharedMemory;
KSlabHeap<KLinkedListNode> slab_heap_KLinkedListNode;
}; };
} // namespace Kernel } // namespace Kernel

View file

@ -20,44 +20,44 @@ namespace Kernel {
template <class Derived> template <class Derived>
class KSlabAllocated { class KSlabAllocated {
private:
static inline KSlabHeap<Derived> s_slab_heap;
public: public:
constexpr KSlabAllocated() = default; constexpr KSlabAllocated() = default;
size_t GetSlabIndex() const { size_t GetSlabIndex(KernelCore& kernel) const {
return s_slab_heap.GetIndex(static_cast<const Derived*>(this)); return kernel.SlabHeap<Derived>().GetIndex(static_cast<const Derived*>(this));
} }
public: public:
static void InitializeSlabHeap(void* memory, size_t memory_size) { static void InitializeSlabHeap(KernelCore& kernel, void* memory, size_t memory_size) {
s_slab_heap.Initialize(memory, memory_size); kernel.SlabHeap<Derived>().Initialize(memory, memory_size);
} }
static Derived* Allocate() { static Derived* Allocate(KernelCore& kernel) {
return s_slab_heap.Allocate(); return kernel.SlabHeap<Derived>().Allocate();
} }
static void Free(Derived* obj) { static void Free(KernelCore& kernel, Derived* obj) {
s_slab_heap.Free(obj); kernel.SlabHeap<Derived>().Free(obj);
} }
static size_t GetObjectSize() { static size_t GetObjectSize(KernelCore& kernel) {
return s_slab_heap.GetObjectSize(); return kernel.SlabHeap<Derived>().GetObjectSize();
}
static size_t GetSlabHeapSize() {
return s_slab_heap.GetSlabHeapSize();
}
static size_t GetPeakIndex() {
return s_slab_heap.GetPeakIndex();
}
static uintptr_t GetSlabHeapAddress() {
return s_slab_heap.GetSlabHeapAddress();
} }
static size_t GetNumRemaining() { static size_t GetSlabHeapSize(KernelCore& kernel) {
return s_slab_heap.GetNumRemaining(); return kernel.SlabHeap<Derived>().GetSlabHeapSize();
}
static size_t GetPeakIndex(KernelCore& kernel) {
return kernel.SlabHeap<Derived>().GetPeakIndex();
}
static uintptr_t GetSlabHeapAddress(KernelCore& kernel) {
return kernel.SlabHeap<Derived>().GetSlabHeapAddress();
}
static size_t GetNumRemaining(KernelCore& kernel) {
return kernel.SlabHeap<Derived>().GetNumRemaining();
} }
}; };
@ -66,43 +66,38 @@ class KAutoObjectWithSlabHeapAndContainer : public Base {
static_assert(std::is_base_of<KAutoObjectWithList, Base>::value); static_assert(std::is_base_of<KAutoObjectWithList, Base>::value);
private: private:
static inline KSlabHeap<Derived> s_slab_heap; static Derived* Allocate(KernelCore& kernel) {
KernelCore& m_kernel; return kernel.SlabHeap<Derived>().Allocate();
private:
static Derived* Allocate() {
return s_slab_heap.Allocate();
} }
static Derived* AllocateWithKernel(KernelCore& kernel) { static Derived* AllocateWithKernel(KernelCore& kernel) {
return s_slab_heap.AllocateWithKernel(kernel); return kernel.SlabHeap<Derived>().AllocateWithKernel(kernel);
} }
static void Free(Derived* obj) { static void Free(KernelCore& kernel, Derived* obj) {
s_slab_heap.Free(obj); kernel.SlabHeap<Derived>().Free(obj);
} }
public: public:
class ListAccessor : public KAutoObjectWithListContainer::ListAccessor { class ListAccessor : public KAutoObjectWithListContainer::ListAccessor {
public: public:
ListAccessor() ListAccessor() : KAutoObjectWithListContainer::ListAccessor(kernel.ObjectListContainer()) {}
: KAutoObjectWithListContainer::ListAccessor(m_kernel.ObjectListContainer()) {}
~ListAccessor() = default; ~ListAccessor() = default;
}; };
public: public:
KAutoObjectWithSlabHeapAndContainer(KernelCore& kernel) : Base(kernel), m_kernel(kernel) {} KAutoObjectWithSlabHeapAndContainer(KernelCore& kernel_) : Base(kernel_), kernel(kernel_) {}
virtual ~KAutoObjectWithSlabHeapAndContainer() {} virtual ~KAutoObjectWithSlabHeapAndContainer() {}
virtual void Destroy() override { virtual void Destroy() override {
const bool is_initialized = this->IsInitialized(); const bool is_initialized = this->IsInitialized();
uintptr_t arg = 0; uintptr_t arg = 0;
if (is_initialized) { if (is_initialized) {
m_kernel.ObjectListContainer().Unregister(this); kernel.ObjectListContainer().Unregister(this);
arg = this->GetPostDestroyArgument(); arg = this->GetPostDestroyArgument();
this->Finalize(); this->Finalize();
} }
Free(static_cast<Derived*>(this)); Free(kernel, static_cast<Derived*>(this));
if (is_initialized) { if (is_initialized) {
Derived::PostDestroy(arg); Derived::PostDestroy(arg);
} }
@ -116,12 +111,12 @@ public:
} }
size_t GetSlabIndex() const { size_t GetSlabIndex() const {
return s_slab_heap.GetObjectIndex(static_cast<const Derived*>(this)); return SlabHeap<Derived>(kernel).GetObjectIndex(static_cast<const Derived*>(this));
} }
public: public:
static void InitializeSlabHeap(KernelCore& kernel, void* memory, size_t memory_size) { static void InitializeSlabHeap(KernelCore& kernel, void* memory, size_t memory_size) {
s_slab_heap.Initialize(memory, memory_size); kernel.SlabHeap<Derived>().Initialize(memory, memory_size);
kernel.ObjectListContainer().Initialize(); kernel.ObjectListContainer().Initialize();
} }
@ -145,22 +140,28 @@ public:
return kernel.ObjectListContainer().Register(obj); return kernel.ObjectListContainer().Register(obj);
} }
static size_t GetObjectSize() { static size_t GetObjectSize(KernelCore& kernel) {
return s_slab_heap.GetObjectSize(); return kernel.SlabHeap<Derived>().GetObjectSize();
}
static size_t GetSlabHeapSize() {
return s_slab_heap.GetSlabHeapSize();
}
static size_t GetPeakIndex() {
return s_slab_heap.GetPeakIndex();
}
static uintptr_t GetSlabHeapAddress() {
return s_slab_heap.GetSlabHeapAddress();
} }
static size_t GetNumRemaining() { static size_t GetSlabHeapSize(KernelCore& kernel) {
return s_slab_heap.GetNumRemaining(); return kernel.SlabHeap<Derived>().GetSlabHeapSize();
} }
static size_t GetPeakIndex(KernelCore& kernel) {
return kernel.SlabHeap<Derived>().GetPeakIndex();
}
static uintptr_t GetSlabHeapAddress(KernelCore& kernel) {
return kernel.SlabHeap<Derived>().GetSlabHeapAddress();
}
static size_t GetNumRemaining(KernelCore& kernel) {
return kernel.SlabHeap<Derived>().GetNumRemaining();
}
protected:
KernelCore& kernel;
}; };
} // namespace Kernel } // namespace Kernel