diff --git a/libraries/libstratosphere/include/stratosphere.hpp b/libraries/libstratosphere/include/stratosphere.hpp
index 0217bb5f3..fa107b909 100644
--- a/libraries/libstratosphere/include/stratosphere.hpp
+++ b/libraries/libstratosphere/include/stratosphere.hpp
@@ -26,6 +26,7 @@
#include "stratosphere/ams.hpp"
#include "stratosphere/os.hpp"
#include "stratosphere/dd.hpp"
+#include "stratosphere/lmem.hpp"
/* Lots of things depend on NCM, for Program IDs. */
#include "stratosphere/ncm.hpp"
diff --git a/libraries/libstratosphere/include/stratosphere/lmem.hpp b/libraries/libstratosphere/include/stratosphere/lmem.hpp
new file mode 100644
index 000000000..18b8f8d45
--- /dev/null
+++ b/libraries/libstratosphere/include/stratosphere/lmem.hpp
@@ -0,0 +1,19 @@
+/*
+ * Copyright (c) 2018-2019 Atmosphère-NX
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms and conditions of the GNU General Public License,
+ * version 2, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+ * more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+#pragma once
+
+#include "lmem/lmem_common.hpp"
+#include "lmem/lmem_exp_heap.hpp"
diff --git a/libraries/libstratosphere/include/stratosphere/lmem/impl/lmem_impl_common.hpp b/libraries/libstratosphere/include/stratosphere/lmem/impl/lmem_impl_common.hpp
new file mode 100644
index 000000000..10eb3a69a
--- /dev/null
+++ b/libraries/libstratosphere/include/stratosphere/lmem/impl/lmem_impl_common.hpp
@@ -0,0 +1,90 @@
+/*
+ * Copyright (c) 2018-2019 Atmosphère-NX
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms and conditions of the GNU General Public License,
+ * version 2, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+ * more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+#pragma once
+#include
+#include "../../os.hpp"
+
+namespace ams::lmem::impl {
+
+ /* NOTE: Nintendo does not use util::IntrusiveListNode. */
+ /* They seem to manually manage linked list pointers. */
+ /* This is pretty gross, so we're going to use util::IntrusiveListNode. */
+
+ struct ExpHeapMemoryBlockHead {
+ u16 magic;
+ u32 attributes;
+ size_t block_size;
+ util::IntrusiveListNode list_node;
+ };
+ static_assert(std::is_trivially_destructible::value);
+
+ using ExpHeapMemoryBlockList = typename util::IntrusiveListMemberTraits<&ExpHeapMemoryBlockHead::list_node>::ListType;
+
+ struct ExpHeapHead {
+ ExpHeapMemoryBlockList free_list;
+ ExpHeapMemoryBlockList used_list;
+ u16 group_id;
+ u16 mode;
+ bool use_alignment_margins;
+ char pad[3];
+ };
+ static_assert(sizeof(ExpHeapHead) == 0x28);
+ static_assert(std::is_trivially_destructible::value);
+
+ struct FrameHeapHead {
+ void *next_block_head;
+ void *next_block_tail;
+ };
+ static_assert(sizeof(FrameHeapHead) == 0x10);
+ static_assert(std::is_trivially_destructible::value);
+
+ struct UnitHead {
+ UnitHead *next;
+ };
+
+ struct UnitHeapList {
+ UnitHead *head;
+ };
+
+ struct UnitHeapHead {
+ UnitHeapList free_list;
+ size_t unit_size;
+ s32 alignment;
+ s32 num_units;
+ };
+ static_assert(sizeof(UnitHeapHead) == 0x18);
+ static_assert(std::is_trivially_destructible::value);
+
+ union ImplementationHeapHead {
+ ExpHeapHead exp_heap_head;
+ FrameHeapHead frame_heap_head;
+ UnitHeapHead unit_heap_head;
+ };
+
+ struct HeapHead {
+ u32 magic;
+ util::IntrusiveListNode list_node;
+ typename util::IntrusiveListMemberTraits<&HeapHead::list_node>::ListType child_list;
+ void *heap_start;
+ void *heap_end;
+ os::Mutex mutex;
+ u8 option;
+ ImplementationHeapHead impl_head;
+ };
+ static_assert(std::is_trivially_destructible::value);
+
+}
diff --git a/libraries/libstratosphere/include/stratosphere/lmem/lmem_common.hpp b/libraries/libstratosphere/include/stratosphere/lmem/lmem_common.hpp
new file mode 100644
index 000000000..090fa9fa4
--- /dev/null
+++ b/libraries/libstratosphere/include/stratosphere/lmem/lmem_common.hpp
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2018-2019 Atmosphère-NX
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms and conditions of the GNU General Public License,
+ * version 2, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+ * more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+#pragma once
+#include
+#include "impl/lmem_impl_common.hpp"
+
+namespace ams::lmem {
+
+ enum CreateOption {
+ CreateOption_None = (0),
+ CreateOption_ZeroClear = (1 << 0),
+ CreateOption_DebugFill = (1 << 1),
+ CreateOption_ThreadSafe = (1 << 2),
+ };
+
+ enum FillType {
+ FillType_Unallocated,
+ FillType_Allocated,
+ FillType_Freed,
+ FillType_Count,
+ };
+
+ namespace impl {
+
+ struct HeapHead;
+
+ }
+
+ using HeapHandle = impl::HeapHead *;
+
+ using HeapCommonHead = impl::HeapHead;
+
+ struct MemoryRange {
+ uintptr_t address;
+ size_t size;
+ };
+
+ constexpr inline s32 DefaultAlignment = 0x8;
+
+ /* Common API. */
+ u32 GetDebugFillValue(FillType fill_type);
+ void SetDebugFillValue(FillType fill_type, u32 value);
+
+ size_t GetTotalSize(HeapHandle handle);
+ void *GetStartAddress(HeapHandle handle);
+ bool ContainsAddress(HeapHandle handle, const void *address);
+
+}
diff --git a/libraries/libstratosphere/include/stratosphere/lmem/lmem_exp_heap.hpp b/libraries/libstratosphere/include/stratosphere/lmem/lmem_exp_heap.hpp
new file mode 100644
index 000000000..9660ca976
--- /dev/null
+++ b/libraries/libstratosphere/include/stratosphere/lmem/lmem_exp_heap.hpp
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2018-2019 Atmosphère-NX
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms and conditions of the GNU General Public License,
+ * version 2, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+ * more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+#pragma once
+#include
+#include "lmem_common.hpp"
+
+namespace ams::lmem {
+
+ enum AllocationMode {
+ AllocationMode_FirstFit,
+ AllocationMode_BestFit,
+ };
+
+ enum AllocationDirection {
+ AllocationDirection_Front,
+ AllocationDirection_Back,
+ };
+
+ using HeapVisitor = void (*)(void *block, HeapHandle handle, uintptr_t user_data);
+
+ HeapHandle CreateExpHeap(void *address, size_t size, u32 option);
+ void DestroyExpHeap(HeapHandle handle);
+ MemoryRange AdjustExpHeap(HeapHandle handle);
+
+ void *AllocateFromExpHeap(HeapHandle handle, size_t size);
+ void *AllocateFromExpHeap(HeapHandle handle, size_t size, s32 alignment);
+ void FreeToExpHeap(HeapHandle handle, void *block);
+
+ size_t ResizeExpHeapMemoryBlock(HeapHandle handle, void *block, size_t size);
+
+ size_t GetExpHeapTotalFreeSize(HeapHandle handle);
+ size_t GetExpHeapAllocatableSize(HeapHandle handle, s32 alignment);
+
+ AllocationMode GetExpHeapAllocationMode(HeapHandle handle);
+ AllocationMode SetExpHeapAllocationMode(HeapHandle handle, AllocationMode new_mode);
+
+ bool GetExpHeapUseMarginsOfAlignment(HeapHandle handle);
+ bool SetExpHeapUseMarginsOfAlignment(HeapHandle handle, bool use_margins);
+
+ u16 GetExpHeapGroupId(HeapHandle handle);
+ u16 SetExpHeapGroupId(HeapHandle handle, u16 group_id);
+
+ size_t GetExpHeapMemoryBlockSize(const void *memory_block);
+ u16 GetExpHeapMemoryBlockGroupId(const void *memory_block);
+ AllocationDirection GetExpHeapMemoryBlockAllocationDirection(const void *memory_block);
+
+ void VisitExpHeapAllocatedBlocks(HeapHandle handle, HeapVisitor visitor, uintptr_t user_data);
+
+}
diff --git a/libraries/libstratosphere/source/lmem/impl/lmem_impl_common_heap.cpp b/libraries/libstratosphere/source/lmem/impl/lmem_impl_common_heap.cpp
new file mode 100644
index 000000000..3b58dd4ae
--- /dev/null
+++ b/libraries/libstratosphere/source/lmem/impl/lmem_impl_common_heap.cpp
@@ -0,0 +1,98 @@
+/*
+ * Copyright (c) 2018-2019 Atmosphère-NX
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms and conditions of the GNU General Public License,
+ * version 2, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+ * more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+#include
+#include "lmem_impl_common_heap.hpp"
+
+namespace ams::lmem::impl {
+
+ namespace {
+
+ u32 g_fill_values[FillType_Count] = {
+ 0xC3C3C3C3, /* FillType_Unallocated */
+ 0xF3F3F3F3, /* FillType_Allocated */
+ 0xD3D3D3D3, /* FillType_Freed */
+ };
+
+ }
+
+ void InitializeHeapHead(HeapHead *out, u32 magic, void *start, void *end, u32 option) {
+ /* Call member constructors. */
+ new (&out->list_node) util::IntrusiveListNode;
+ new (&out->child_list) decltype(out->child_list);
+
+ /* Only initialize mutex if option requires it. */
+ if (option & CreateOption_ThreadSafe) {
+ static_assert(std::is_trivially_destructible::value);
+ new (&out->mutex) os::Mutex;
+ }
+
+ /* Set fields. */
+ out->magic = magic;
+ out->heap_start = start;
+ out->heap_end = end;
+ out->option = static_cast(option);
+
+ /* Fill memory with pattern if needed. */
+ FillUnallocatedMemory(out, start, GetPointerDifference(start, end));
+ }
+
+ void FinalizeHeap(HeapHead *heap) {
+ /* Nothing actually needs to be done here. */
+ }
+
+ bool ContainsAddress(HeapHandle handle, const void *address) {
+ const uintptr_t uptr_handle = reinterpret_cast(handle);
+ const uintptr_t uptr_start = reinterpret_cast(handle->heap_start);
+ const uintptr_t uptr_end = reinterpret_cast(handle->heap_end);
+ const uintptr_t uptr_addr = reinterpret_cast(address);
+
+ if (uptr_start - sizeof(HeapHead) == uptr_handle) {
+ /* The heap head is at the start of the managed memory. */
+ return uptr_handle <= uptr_addr && uptr_addr < uptr_end;
+ } else if (uptr_handle == uptr_end) {
+ /* The heap head is at the end of the managed memory. */
+ return uptr_start <= uptr_addr && uptr_addr < uptr_end + sizeof(HeapHead);
+ } else {
+ /* Heap head is somewhere unrelated to managed memory. */
+ return uptr_start <= uptr_addr && uptr_addr < uptr_end;
+ }
+ }
+
+ size_t GetHeapTotalSize(HeapHandle handle) {
+ const uintptr_t uptr_start = reinterpret_cast(handle->heap_start);
+ const uintptr_t uptr_end = reinterpret_cast(handle->heap_end);
+
+ if (ContainsAddress(handle, reinterpret_cast(handle))) {
+ /* The heap metadata is contained within the heap, either before or after. */
+ return static_cast(uptr_end - uptr_start + sizeof(HeapHead));
+ } else {
+ /* The heap metadata is not contained within the heap. */
+ return static_cast(uptr_end - uptr_start);
+ }
+ }
+
+ u32 GetDebugFillValue(FillType type) {
+ return g_fill_values[type];
+ }
+
+ u32 SetDebugFillValue(FillType type, u32 value) {
+ const u32 old_value = g_fill_values[type];
+ g_fill_values[type] = value;
+ return old_value;
+ }
+
+
+}
\ No newline at end of file
diff --git a/libraries/libstratosphere/source/lmem/impl/lmem_impl_common_heap.hpp b/libraries/libstratosphere/source/lmem/impl/lmem_impl_common_heap.hpp
new file mode 100644
index 000000000..ba698be94
--- /dev/null
+++ b/libraries/libstratosphere/source/lmem/impl/lmem_impl_common_heap.hpp
@@ -0,0 +1,98 @@
+/*
+ * Copyright (c) 2018-2019 Atmosphère-NX
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms and conditions of the GNU General Public License,
+ * version 2, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+ * more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+#pragma once
+#include
+
+namespace ams::lmem::impl {
+
+ constexpr inline u32 ExpHeapMagic = util::ReverseFourCC<'E','X','P','H'>::Code;
+ constexpr inline u32 FrameHeapMagic = util::ReverseFourCC<'F','R','M','H'>::Code;
+ constexpr inline u32 UnitHeapMagic = util::ReverseFourCC<'U','N','T','H'>::Code;
+
+ class ScopedHeapLock {
+ NON_COPYABLE(ScopedHeapLock);
+ NON_MOVEABLE(ScopedHeapLock);
+ private:
+ HeapHandle handle;
+ public:
+ explicit ScopedHeapLock(HeapHandle h) : handle(h) {
+ if (this->handle->option & CreateOption_ThreadSafe) {
+ this->handle->mutex.Lock();
+ }
+ }
+
+ ~ScopedHeapLock() {
+ if (this->handle->option & CreateOption_ThreadSafe) {
+ this->handle->mutex.Unlock();
+ }
+ }
+ };
+
+ constexpr inline MemoryRange MakeMemoryRange(void *address, size_t size) {
+ return MemoryRange{ .address = reinterpret_cast(address), .size = size };
+ }
+
+ constexpr inline void *GetHeapStartAddress(HeapHandle handle) {
+ return handle->heap_start;
+ }
+
+ constexpr inline size_t GetPointerDifference(const void *start, const void *end) {
+ return reinterpret_cast(end) - reinterpret_cast(start);
+ }
+
+ constexpr inline size_t GetPointerDifference(uintptr_t start, uintptr_t end) {
+ return end - start;
+ }
+
+
+ void InitializeHeapHead(HeapHead *out, u32 magic, void *start, void *end, u32 option);
+ void FinalizeHeap(HeapHead *heap);
+ bool ContainsAddress(HeapHandle handle, const void *address);
+ size_t GetHeapTotalSize(HeapHandle handle);
+
+ u32 GetDebugFillValue(FillType type);
+ u32 SetDebugFillValue(FillType type, u32 value);
+
+ inline void FillMemory(void *dst, u32 fill_value, size_t size) {
+ /* All heap blocks must be at least 32-bit aligned. */
+ /* AMS_ASSERT(util::IsAligned(dst, 4)); */
+ /* AMS_ASSERT(util::IsAligned(size, 4)); */
+ for (size_t i = 0; i < size / sizeof(fill_value); i++) {
+ reinterpret_cast(dst)[i] = fill_value;
+ }
+ }
+
+ inline void FillUnallocatedMemory(HeapHead *heap, void *address, size_t size) {
+ if (heap->option & CreateOption_DebugFill) {
+ FillMemory(address, impl::GetDebugFillValue(FillType_Unallocated), size);
+ }
+ }
+
+ inline void FillAllocatedMemory(HeapHead *heap, void *address, size_t size) {
+ if (heap->option & CreateOption_ZeroClear) {
+ FillMemory(address, 0, size);
+ } else if (heap->option & CreateOption_DebugFill) {
+ FillMemory(address, impl::GetDebugFillValue(FillType_Allocated), size);
+ }
+ }
+
+ inline void FillFreedMemory(HeapHead *heap, void *address, size_t size) {
+ if (heap->option & CreateOption_DebugFill) {
+ FillMemory(address, impl::GetDebugFillValue(FillType_Freed), size);
+ }
+ }
+
+}
\ No newline at end of file
diff --git a/libraries/libstratosphere/source/lmem/impl/lmem_impl_exp_heap.cpp b/libraries/libstratosphere/source/lmem/impl/lmem_impl_exp_heap.cpp
new file mode 100644
index 000000000..3b168682f
--- /dev/null
+++ b/libraries/libstratosphere/source/lmem/impl/lmem_impl_exp_heap.cpp
@@ -0,0 +1,641 @@
+/*
+ * Copyright (c) 2018-2019 Atmosphère-NX
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms and conditions of the GNU General Public License,
+ * version 2, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+ * more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+#include
+#include "lmem_impl_exp_heap.hpp"
+
+namespace ams::lmem::impl {
+
+ namespace {
+
+ constexpr u16 FreeBlockMagic = 0x4652; /* FR */
+ constexpr u16 UsedBlockMagic = 0x5544; /* UD */
+
+ constexpr u16 DefaultGroupId = 0x00;
+ constexpr u16 MaxGroupId = 0xFF;
+
+ constexpr size_t MinimumAlignment = 4;
+ constexpr size_t MaximumPaddingalignment = 0x80;
+
+ constexpr AllocationMode DefaultAllocationMode = AllocationMode_FirstFit;
+
+ constexpr size_t MinimumFreeBlockSize = 4;
+
+ struct MemoryRegion {
+ void *start;
+ void *end;
+ };
+
+ constexpr inline bool IsValidHeapHandle(HeapHandle handle) {
+ return handle->magic == ExpHeapMagic;
+ }
+
+ constexpr inline ExpHeapHead *GetExpHeapHead(HeapHead *heap_head) {
+ return &heap_head->impl_head.exp_heap_head;
+ }
+
+ constexpr inline const ExpHeapHead *GetExpHeapHead(const HeapHead *heap_head) {
+ return &heap_head->impl_head.exp_heap_head;
+ }
+
+ constexpr inline HeapHead *GetHeapHead(ExpHeapHead *exp_heap_head) {
+ return util::GetParentPointer<&HeapHead::impl_head>(util::GetParentPointer<&ImplementationHeapHead::exp_heap_head>(exp_heap_head));
+ }
+
+ constexpr inline const HeapHead *GetHeapHead(const ExpHeapHead *exp_heap_head) {
+ return util::GetParentPointer<&HeapHead::impl_head>(util::GetParentPointer<&ImplementationHeapHead::exp_heap_head>(exp_heap_head));
+ }
+
+ constexpr inline void *GetExpHeapMemoryStart(ExpHeapHead *exp_heap_head) {
+ return reinterpret_cast(reinterpret_cast(exp_heap_head) + sizeof(ImplementationHeapHead));
+ }
+
+ constexpr inline void *GetMemoryBlockStart(ExpHeapMemoryBlockHead *head) {
+ return reinterpret_cast(reinterpret_cast(head) + sizeof(*head));
+ }
+
+ constexpr inline const void *GetMemoryBlockStart(const ExpHeapMemoryBlockHead *head) {
+ return reinterpret_cast(reinterpret_cast(head) + sizeof(*head));
+ }
+
+ constexpr inline void *GetMemoryBlockEnd(ExpHeapMemoryBlockHead *head) {
+ return reinterpret_cast(reinterpret_cast(GetMemoryBlockStart(head)) + head->block_size);
+ }
+
+ constexpr inline const void *GetMemoryBlockEnd(const ExpHeapMemoryBlockHead *head) {
+ return reinterpret_cast(reinterpret_cast(GetMemoryBlockStart(head)) + head->block_size);
+ }
+
+ constexpr inline ExpHeapMemoryBlockHead *GetHeadForMemoryBlock(const void *block) {
+ return reinterpret_cast(reinterpret_cast(block) - sizeof(ExpHeapMemoryBlockHead));
+ }
+
+ constexpr inline bool IsValidUsedMemoryBlock(const HeapHead *heap, const void *block) {
+ /* Block must fall within the heap range. */
+ if (heap != nullptr) {
+ if (block < heap->heap_start || heap->heap_end <= block) {
+ return false;
+ }
+ }
+
+ /* Block magic must be used. */
+ const ExpHeapMemoryBlockHead *head = GetHeadForMemoryBlock(block);
+ if (head->magic != UsedBlockMagic) {
+ return false;
+ }
+
+ /* End of block must remain within the heap range. */
+ if (heap != nullptr) {
+ if (reinterpret_cast(block) + head->block_size > reinterpret_cast(heap->heap_end)) {
+ return false;
+ }
+ }
+
+ return true;
+ }
+
+ constexpr inline u16 GetMemoryBlockAlignmentPadding(const ExpHeapMemoryBlockHead *block_head) {
+ return static_cast((block_head->attributes >> 8) & 0x7F);
+ }
+
+ inline void SetMemoryBlockAlignmentPadding(ExpHeapMemoryBlockHead *block_head, u16 padding) {
+ block_head->attributes &= ~static_castattributes)>(0x7F << 8);
+ block_head->attributes |= static_castattributes)>(padding & 0x7F) << 8;
+ }
+
+ constexpr inline u16 GetMemoryBlockGroupId(const ExpHeapMemoryBlockHead *block_head) {
+ return static_cast(block_head->attributes & 0xFF);
+ }
+
+ inline void SetMemoryBlockGroupId(ExpHeapMemoryBlockHead *block_head, u16 group_id) {
+ block_head->attributes &= ~static_castattributes)>(0xFF);
+ block_head->attributes |= static_castattributes)>(group_id & 0xFF);
+ }
+
+ constexpr inline AllocationDirection GetMemoryBlockAllocationDirection(const ExpHeapMemoryBlockHead *block_head) {
+ return static_cast((block_head->attributes >> 15) & 1);
+ }
+
+ inline void SetMemoryBlockAllocationDirection(ExpHeapMemoryBlockHead *block_head, AllocationDirection dir) {
+ block_head->attributes &= ~static_castattributes)>(0x8000);
+ block_head->attributes |= static_castattributes)>(dir) << 15;
+ }
+
+ inline void GetMemoryBlockRegion(MemoryRegion *out, ExpHeapMemoryBlockHead *head) {
+ out->start = reinterpret_cast(reinterpret_cast(head) - GetMemoryBlockAlignmentPadding(head));
+ out->end = GetMemoryBlockEnd(head);
+ }
+
+ constexpr inline AllocationMode GetAllocationModeImpl(const ExpHeapHead *head) {
+ return static_cast(head->mode);
+ }
+
+ inline void SetAllocationModeImpl(ExpHeapHead *head, AllocationMode mode) {
+ head->mode = mode;
+ }
+
+ inline ExpHeapMemoryBlockHead *InitializeMemoryBlock(const MemoryRegion ®ion, u16 magic) {
+ ExpHeapMemoryBlockHead *block = reinterpret_cast(region.start);
+
+ /* Ensure all member constructors are called. */
+ new (block) ExpHeapMemoryBlockHead;
+
+ block->magic = magic;
+ block->attributes = 0;
+ block->block_size = GetPointerDifference(GetMemoryBlockStart(block), region.end);
+
+ return block;
+ }
+
+ inline ExpHeapMemoryBlockHead *InitializeFreeMemoryBlock(const MemoryRegion ®ion) {
+ return InitializeMemoryBlock(region, FreeBlockMagic);
+ }
+
+ inline ExpHeapMemoryBlockHead *InitializeUsedMemoryBlock(const MemoryRegion ®ion) {
+ return InitializeMemoryBlock(region, UsedBlockMagic);
+ }
+
+ HeapHead *InitializeExpHeap(void *start, void *end, u32 option) {
+ HeapHead *heap_head = reinterpret_cast(start);
+ ExpHeapHead *exp_heap_head = GetExpHeapHead(heap_head);
+
+ /* Initialize the parent heap. */
+ InitializeHeapHead(heap_head, ExpHeapMagic, GetExpHeapMemoryStart(exp_heap_head), end, option);
+
+ /* Call exp heap member constructors. */
+ new (&exp_heap_head->free_list) ExpHeapMemoryBlockList;
+ new (&exp_heap_head->used_list) ExpHeapMemoryBlockList;
+
+ /* Set exp heap fields. */
+ exp_heap_head->group_id = DefaultGroupId;
+ exp_heap_head->use_alignment_margins = false;
+ SetAllocationModeImpl(exp_heap_head, DefaultAllocationMode);
+
+ /* Initialize memory block. */
+ {
+ MemoryRegion region{ .start = heap_head->heap_start, .end = heap_head->heap_end, };
+ exp_heap_head->free_list.push_back(*InitializeFreeMemoryBlock(region));
+ }
+
+ return heap_head;
+ }
+
+ bool CoalesceFreedRegion(ExpHeapHead *head, const MemoryRegion *region) {
+ auto prev_free_block_it = head->free_list.end();
+ MemoryRegion free_region = *region;
+
+ /* Locate the block. */
+ for (auto it = head->free_list.begin(); it != head->free_list.end(); it++) {
+ ExpHeapMemoryBlockHead *cur_free_block = &*it;
+
+ if (cur_free_block < region->start) {
+ prev_free_block_it = it;
+ continue;
+ }
+
+ /* Coalesce block after, if possible. */
+ if (cur_free_block == region->end) {
+ free_region.end = GetMemoryBlockEnd(cur_free_block);
+ it = head->free_list.erase(it);
+
+ /* Fill the memory with a pattern, for debug. */
+ FillUnallocatedMemory(GetHeapHead(head), cur_free_block, sizeof(ExpHeapMemoryBlockHead));
+ }
+
+ break;
+ }
+
+ /* We'll want to insert after the previous free block. */
+ auto insertion_it = head->free_list.begin();
+ if (prev_free_block_it != head->free_list.end()) {
+ /* There's a previous free block, so we want to insert as the next iterator. */
+ if (GetMemoryBlockEnd(&*prev_free_block_it) == region->start) {
+ /* We can coalesce, so do so. */
+ free_region.start = &*prev_free_block_it;
+ insertion_it = head->free_list.erase(prev_free_block_it);
+ } else {
+ /* We can't coalesce, so just select the next iterator. */
+ insertion_it = (++prev_free_block_it);
+ }
+ }
+
+ /* Ensure region is big enough for a block. */
+ /* NOTE: Nintendo does not check against minimum block size here, only header size. */
+ /* We will check against minimum block size, to avoid the creation of zero-size blocks. */
+ if (GetPointerDifference(free_region.start, free_region.end) < sizeof(ExpHeapMemoryBlockHead) + MinimumFreeBlockSize) {
+ return false;
+ }
+
+ /* Fill the memory with a pattern, for debug. */
+ FillFreedMemory(GetHeapHead(head), free_region.start, GetPointerDifference(free_region.start, free_region.end));
+
+ /* Insert the new memory block. */
+ head->free_list.insert(insertion_it, *InitializeFreeMemoryBlock(free_region));
+
+ return true;
+ }
+
+ void *ConvertFreeBlockToUsedBlock(ExpHeapHead *head, ExpHeapMemoryBlockHead *block_head, void *block, size_t size, AllocationDirection direction) {
+ /* Calculate freed memory regions. */
+ MemoryRegion free_region_front;
+ GetMemoryBlockRegion(&free_region_front, block_head);
+ MemoryRegion free_region_back{ .start = reinterpret_cast(reinterpret_cast(block) + size), .end = free_region_front.end, };
+
+ /* Adjust end of head region. */
+ free_region_front.end = reinterpret_cast(reinterpret_cast(block) - sizeof(ExpHeapMemoryBlockHead));
+
+ /* Remove the old block. */
+ auto old_block_it = head->free_list.erase(head->free_list.iterator_to(*block_head));
+
+ /* If the front margins are big enough (and we're allowed to do so), make a new block. */
+ if ((GetPointerDifference(free_region_front.start, free_region_front.end) < sizeof(ExpHeapMemoryBlockHead) + MinimumFreeBlockSize) ||
+ (direction == AllocationDirection_Front && !head->use_alignment_margins && GetPointerDifference(free_region_front.start, free_region_front.end) < MaximumPaddingalignment)) {
+ /* There isn't enough space for a new block, or else we're not allowed to make one. */
+ free_region_front.end = free_region_front.start;
+ } else {
+ /* Make a new block! */
+ head->free_list.insert(old_block_it, *InitializeFreeMemoryBlock(free_region_front));
+ }
+
+ /* If the back margins are big enough (and we're allowed to do so), make a new block. */
+ if ((GetPointerDifference(free_region_back.start, free_region_back.end) < sizeof(ExpHeapMemoryBlockHead) + MinimumFreeBlockSize) ||
+ (direction == AllocationDirection_Back && !head->use_alignment_margins && GetPointerDifference(free_region_back.start, free_region_back.end) < MaximumPaddingalignment)) {
+ /* There isn't enough space for a new block, or else we're not allowed to make one. */
+ free_region_back.end = free_region_back.start;
+ } else {
+ /* Make a new block! */
+ head->free_list.insert(old_block_it, *InitializeFreeMemoryBlock(free_region_back));
+ }
+
+ /* Fill the memory with a pattern, for debug. */
+ FillAllocatedMemory(GetHeapHead(head), free_region_front.end, GetPointerDifference(free_region_front.end, free_region_back.start));
+
+ {
+ /* Create the used block */
+ MemoryRegion used_region{ .start = free_region_front.end, .end = free_region_back.start };
+
+ ExpHeapMemoryBlockHead *used_block = InitializeUsedMemoryBlock(used_region);
+
+ /* Insert it into the used list. */
+ head->used_list.push_back(*used_block);
+ SetMemoryBlockAllocationDirection(used_block, direction);
+ SetMemoryBlockAlignmentPadding(used_block, static_cast(GetPointerDifference(free_region_front.end, used_block)));
+ SetMemoryBlockGroupId(used_block, head->group_id);
+ }
+
+ return block;
+ }
+
+ void *AllocateFromHead(HeapHead *heap, size_t size, s32 alignment) {
+ ExpHeapHead *exp_heap_head = GetExpHeapHead(heap);
+
+ const bool is_first_fit = GetAllocationModeImpl(exp_heap_head) == AllocationMode_FirstFit;
+
+ /* Choose a block. */
+ ExpHeapMemoryBlockHead *found_block_head = nullptr;
+ void *found_block = nullptr;
+ size_t best_size = std::numeric_limits::max();
+
+ for (auto it = exp_heap_head->free_list.begin(); it != exp_heap_head->free_list.end(); it++) {
+ const uintptr_t absolute_block_start = reinterpret_cast(GetMemoryBlockStart(&*it));
+ const uintptr_t block_start = util::AlignUp(absolute_block_start, alignment);
+ const size_t block_offset = block_start - absolute_block_start;
+
+ if (it->block_size >= size + block_offset && best_size > it->block_size) {
+ found_block_head = &*it;
+ found_block = reinterpret_cast(block_start);
+ best_size = it->block_size;
+
+ if (is_first_fit || best_size == size) {
+ break;
+ }
+ }
+ }
+
+ /* If we didn't find a block, return nullptr. */
+ if (found_block_head == nullptr) {
+ return nullptr;
+ }
+
+ return ConvertFreeBlockToUsedBlock(exp_heap_head, found_block_head, found_block, size, AllocationDirection_Front);
+ }
+
+ void *AllocateFromTail(HeapHead *heap, size_t size, s32 alignment) {
+ ExpHeapHead *exp_heap_head = GetExpHeapHead(heap);
+
+ const bool is_first_fit = GetAllocationModeImpl(exp_heap_head) == AllocationMode_FirstFit;
+
+ /* Choose a block. */
+ ExpHeapMemoryBlockHead *found_block_head = nullptr;
+ void *found_block = nullptr;
+ size_t best_size = std::numeric_limits::max();
+
+ for (auto it = exp_heap_head->free_list.rbegin(); it != exp_heap_head->free_list.rend(); it++) {
+ const uintptr_t absolute_block_start = reinterpret_cast(GetMemoryBlockStart(&*it));
+ const uintptr_t block_start = util::AlignUp(absolute_block_start, alignment);
+ const size_t block_offset = block_start - absolute_block_start;
+
+ if (it->block_size >= size + block_offset && best_size > it->block_size) {
+ found_block_head = &*it;
+ found_block = reinterpret_cast(block_start);
+ best_size = it->block_size;
+
+ if (is_first_fit || best_size == size) {
+ break;
+ }
+ }
+ }
+
+ /* If we didn't find a block, return nullptr. */
+ if (found_block_head == nullptr) {
+ return nullptr;
+ }
+
+ return ConvertFreeBlockToUsedBlock(exp_heap_head, found_block_head, found_block, size, AllocationDirection_Back);
+ }
+
+ }
+
+ HeapHandle CreateExpHeap(void *address, size_t size, u32 option) {
+ const uintptr_t uptr_end = util::AlignDown(reinterpret_cast(address) + size, MinimumAlignment);
+ const uintptr_t uptr_start = util::AlignUp(reinterpret_cast(address), MinimumAlignment);
+
+ if (uptr_start > uptr_end || GetPointerDifference(uptr_start, uptr_end) < sizeof(ExpHeapMemoryBlockHead) + MinimumFreeBlockSize) {
+ return nullptr;
+ }
+
+ return InitializeExpHeap(reinterpret_cast(uptr_start), reinterpret_cast(uptr_end), option);
+ }
+
+ void DestroyExpHeap(HeapHandle handle) {
+ AMS_ASSERT(IsValidHeapHandle(handle));
+
+ FinalizeHeap(handle);
+ }
+
+ MemoryRange AdjustExpHeap(HeapHandle handle) {
+ AMS_ASSERT(IsValidHeapHandle(handle));
+
+ HeapHead *heap_head = handle;
+ ExpHeapHead *exp_heap_head = GetExpHeapHead(heap_head);
+
+ /* If there's no free blocks, we can't do anything. */
+ if (exp_heap_head->free_list.empty()) {
+ return MakeMemoryRange(handle->heap_end, 0);
+ }
+
+ /* Get the memory block end, make sure it really is the last block. */
+ ExpHeapMemoryBlockHead *block = &exp_heap_head->free_list.back();
+ void * const block_start = GetMemoryBlockStart(block);
+ const size_t block_size = block->block_size;
+ void * const block_end = reinterpret_cast(reinterpret_cast(block_start) + block_size);
+
+ if (block_end != handle->heap_end) {
+ return MakeMemoryRange(handle->heap_end, 0);
+ }
+
+ /* Remove the memory block. */
+ exp_heap_head->free_list.pop_back();
+
+ const size_t freed_size = block_size + sizeof(ExpHeapMemoryBlockHead);
+ heap_head->heap_end = reinterpret_cast(reinterpret_cast(heap_head->heap_end) - freed_size);
+ return MakeMemoryRange(heap_head->heap_end, freed_size);
+ }
+
+ void *AllocateFromExpHeap(HeapHandle handle, size_t size, s32 alignment) {
+ AMS_ASSERT(IsValidHeapHandle(handle));
+
+ /* Fix up alignments less than 4. */
+ if (alignment == 1 || alignment == 2) {
+ alignment = 4;
+ } else if (alignment == -1 || alignment == -2) {
+ alignment = -4;
+ }
+
+ /* Ensure the alignment is valid. */
+ const s32 abs_alignment = std::abs(alignment);
+ AMS_ASSERT((abs_alignment & (abs_alignment - 1)) == 0);
+ AMS_ASSERT(MinimumAlignment <= static_cast(abs_alignment));
+
+ /* Fix size to be correctly aligned. */
+ if (size == 0) {
+ size = 1;
+ }
+ size = util::AlignUp(size, MinimumAlignment);
+
+ /* Allocate a memory block. */
+ void *allocated_memory = nullptr;
+ if (alignment >= 0) {
+ allocated_memory = AllocateFromHead(handle, size, alignment);
+ } else {
+ allocated_memory = AllocateFromTail(handle, size, -alignment);
+ }
+
+ return allocated_memory;
+ }
+
+ void FreeToExpHeap(HeapHandle handle, void *mem_block) {
+ /* Ensure this is actually a valid heap and a valid memory block we allocated. */
+ AMS_ASSERT(IsValidHeapHandle(handle));
+ AMS_ASSERT(IsValidUsedMemoryBlock(handle, mem_block));
+
+ /* TODO: Nintendo does not allow FreeToExpHeap(nullptr). Should we? */
+
+ /* Get block pointers. */
+ HeapHead *heap_head = handle;
+ ExpHeapHead *exp_heap_head = GetExpHeapHead(heap_head);
+ ExpHeapMemoryBlockHead *block = GetHeadForMemoryBlock(mem_block);
+ MemoryRegion region;
+
+ /* Erase the heap from the used list, and coalesce it with adjacent blocks. */
+ GetMemoryBlockRegion(®ion, block);
+ exp_heap_head->used_list.erase(exp_heap_head->used_list.iterator_to(*block));
+ AMS_ASSERT(CoalesceFreedRegion(exp_heap_head, ®ion));
+ }
+
+ size_t ResizeExpHeapMemoryBlock(HeapHandle handle, void *mem_block, size_t size) {
+ /* Ensure this is actually a valid heap and a valid memory block we allocated. */
+ AMS_ASSERT(IsValidHeapHandle(handle));
+ AMS_ASSERT(IsValidUsedMemoryBlock(handle, mem_block));
+
+ ExpHeapHead *exp_heap_head = GetExpHeapHead(handle);
+ ExpHeapMemoryBlockHead *block_head = GetHeadForMemoryBlock(mem_block);
+ const size_t original_block_size = block_head->block_size;
+
+ /* It's possible that there's no actual resizing being done. */
+ size = util::AlignUp(size, MinimumAlignment);
+ if (size == original_block_size) {
+ return size;
+ }
+
+ /* We're resizing one way or the other. */
+ if (size > original_block_size) {
+ /* We want to try to make the block bigger. */
+
+ /* Find the free block after this one. */
+ void * const cur_block_end = GetMemoryBlockEnd(block_head);
+ ExpHeapMemoryBlockHead *next_block_head = nullptr;
+
+ for (auto it = exp_heap_head->free_list.begin(); it != exp_heap_head->free_list.end(); it++) {
+ if (&*it == cur_block_end) {
+ next_block_head = &*it;
+ break;
+ }
+ }
+
+ /* If we can't get a big enough allocation using the next block, give up. */
+ if (next_block_head == nullptr || size > original_block_size + sizeof(ExpHeapMemoryBlockHead) + next_block_head->block_size) {
+ return 0;
+ }
+
+ /* Grow the block to encompass the next block. */
+ {
+ /* Get block region. */
+ MemoryRegion new_free_region;
+ GetMemoryBlockRegion(&new_free_region, next_block_head);
+
+ /* Remove the next block from the free list. */
+ auto insertion_it = exp_heap_head->free_list.erase(exp_heap_head->free_list.iterator_to(*next_block_head));
+
+ /* Figure out the new block extents. */
+ void *old_start = new_free_region.start;
+ new_free_region.start = reinterpret_cast(reinterpret_cast(mem_block) + size);
+
+ /* Only maintain the new free region as a memory block candidate if it can hold a header. */
+ /* NOTE: Nintendo does not check against minimum block size here, only header size. */
+ /* We will check against minimum block size, to avoid the creation of zero-size blocks. */
+ if (GetPointerDifference(new_free_region.start, new_free_region.end) < sizeof(ExpHeapMemoryBlockHead) + MinimumFreeBlockSize) {
+ new_free_region.start = new_free_region.end;
+ }
+
+ /* Adjust block sizes. */
+ block_head->block_size = GetPointerDifference(mem_block, new_free_region.start);
+ if (GetPointerDifference(new_free_region.start, new_free_region.end) >= sizeof(ExpHeapMemoryBlockHead) + MinimumFreeBlockSize) {
+ exp_heap_head->free_list.insert(insertion_it, *InitializeFreeMemoryBlock(new_free_region));
+ }
+
+ /* Fill the memory with a pattern, for debug. */
+ FillAllocatedMemory(GetHeapHead(exp_heap_head), old_start, GetPointerDifference(old_start, new_free_region.start));
+ }
+ } else {
+ /* We're shrinking the block. Nice and easy. */
+ MemoryRegion new_free_region{ .start = reinterpret_cast(reinterpret_cast(mem_block)+ size), .end = GetMemoryBlockEnd(block_head) };
+
+ /* Try to free the new memory. */
+ block_head->block_size = size;
+ if (!CoalesceFreedRegion(exp_heap_head, &new_free_region)) {
+ /* We didn't shrink the block successfully, so restore the size. */
+ block_head->block_size = original_block_size;
+ }
+ }
+
+ return block_head->block_size;
+ }
+
+ size_t GetExpHeapTotalFreeSize(HeapHandle handle) {
+ AMS_ASSERT(IsValidHeapHandle(handle));
+
+ size_t total_size = 0;
+ for (const auto &it : GetExpHeapHead(handle)->free_list) {
+ total_size += it.block_size;
+ }
+ return total_size;
+ }
+
+ size_t GetExpHeapAllocatableSize(HeapHandle handle, s32 alignment) {
+ AMS_ASSERT(IsValidHeapHandle(handle));
+
+ /* Ensure alignment is positive. */
+ alignment = std::abs(alignment);
+
+ size_t max_size = std::numeric_limits::min();
+ size_t min_offset = std::numeric_limits::max();
+ for (const auto &it : GetExpHeapHead(handle)->free_list) {
+ const uintptr_t absolute_block_start = reinterpret_cast(GetMemoryBlockStart(&it));
+ const uintptr_t block_start = util::AlignUp(absolute_block_start, alignment);
+ const uintptr_t block_end = reinterpret_cast(GetMemoryBlockEnd(&it));
+
+ if (block_start < block_end) {
+ const size_t block_size = GetPointerDifference(block_start, block_end);
+ const size_t offset = GetPointerDifference(absolute_block_start, block_start);
+
+ if (block_size > max_size || (block_size == max_size && offset < min_offset)) {
+ max_size = block_size;
+ min_offset = offset;
+ }
+ }
+ }
+
+ return max_size;
+ }
+
+ AllocationMode GetExpHeapAllocationMode(HeapHandle handle) {
+ AMS_ASSERT(IsValidHeapHandle(handle));
+
+ return GetAllocationModeImpl(GetExpHeapHead(handle));
+ }
+
+ AllocationMode SetExpHeapAllocationMode(HeapHandle handle, AllocationMode new_mode) {
+ AMS_ASSERT(IsValidHeapHandle(handle));
+
+ ExpHeapHead *exp_heap_head = GetExpHeapHead(handle);
+ const AllocationMode old_mode = GetAllocationModeImpl(exp_heap_head);
+ SetAllocationModeImpl(exp_heap_head, new_mode);
+ return old_mode;
+ }
+
+ u16 GetExpHeapGroupId(HeapHandle handle) {
+ AMS_ASSERT(IsValidHeapHandle(handle));
+
+ return GetExpHeapHead(handle)->group_id;
+ }
+
+ u16 SetExpHeapGroupId(HeapHandle handle, u16 group_id) {
+ AMS_ASSERT(IsValidHeapHandle(handle));
+ AMS_ASSERT(group_id <= MaxGroupId);
+
+ ExpHeapHead *exp_heap_head = GetExpHeapHead(handle);
+ const u16 old_group_id = exp_heap_head->group_id;
+ exp_heap_head->group_id = group_id;
+ return old_group_id;
+ }
+
+ void VisitExpHeapAllocatedBlocks(HeapHandle handle, HeapVisitor visitor, uintptr_t user_data) {
+ AMS_ASSERT(IsValidHeapHandle(handle));
+
+ for (auto &it : GetExpHeapHead(handle)->used_list) {
+ (*visitor)(GetMemoryBlockStart(&it), handle, user_data);
+ }
+ }
+
+ size_t GetExpHeapMemoryBlockSize(const void *memory_block) {
+ AMS_ASSERT(IsValidUsedMemoryBlock(nullptr, memory_block));
+
+ return GetHeadForMemoryBlock(memory_block)->block_size;
+ }
+
+ u16 GetExpHeapMemoryBlockGroupId(const void *memory_block) {
+ AMS_ASSERT(IsValidUsedMemoryBlock(nullptr, memory_block));
+
+ return GetMemoryBlockGroupId(GetHeadForMemoryBlock(memory_block));
+ }
+
+ AllocationDirection GetExpHeapMemoryBlockAllocationDirection(const void *memory_block) {
+ AMS_ASSERT(IsValidUsedMemoryBlock(nullptr, memory_block));
+
+ return GetMemoryBlockAllocationDirection(GetHeadForMemoryBlock(memory_block));
+ }
+
+}
diff --git a/libraries/libstratosphere/source/lmem/impl/lmem_impl_exp_heap.hpp b/libraries/libstratosphere/source/lmem/impl/lmem_impl_exp_heap.hpp
new file mode 100644
index 000000000..4522a5344
--- /dev/null
+++ b/libraries/libstratosphere/source/lmem/impl/lmem_impl_exp_heap.hpp
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2018-2019 Atmosphère-NX
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms and conditions of the GNU General Public License,
+ * version 2, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+ * more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+#pragma once
+#include
+#include "lmem_impl_common_heap.hpp"
+
+namespace ams::lmem::impl {
+
+ HeapHandle CreateExpHeap(void *address, size_t size, u32 option);
+ void DestroyExpHeap(HeapHandle handle);
+ MemoryRange AdjustExpHeap(HeapHandle handle);
+
+ void *AllocateFromExpHeap(HeapHandle handle, size_t size, s32 alignment);
+ void FreeToExpHeap(HeapHandle handle, void *block);
+
+ size_t ResizeExpHeapMemoryBlock(HeapHandle handle, void *block, size_t size);
+
+ size_t GetExpHeapTotalFreeSize(HeapHandle handle);
+ size_t GetExpHeapAllocatableSize(HeapHandle handle, s32 alignment);
+
+ AllocationMode GetExpHeapAllocationMode(HeapHandle handle);
+ AllocationMode SetExpHeapAllocationMode(HeapHandle handle, AllocationMode new_mode);
+
+ bool GetExpHeapUseMarginsOfAlignment(HeapHandle handle);
+ bool SetExpHeapUseMarginsOfAlignment(HeapHandle handle, bool use_margins);
+
+ u16 GetExpHeapGroupId(HeapHandle handle);
+ u16 SetExpHeapGroupId(HeapHandle handle, u16 group_id);
+
+ size_t GetExpHeapMemoryBlockSize(const void *memory_block);
+ u16 GetExpHeapMemoryBlockGroupId(const void *memory_block);
+ AllocationDirection GetExpHeapMemoryBlockAllocationDirection(const void *memory_block);
+
+ void VisitExpHeapAllocatedBlocks(HeapHandle handle, HeapVisitor visitor, uintptr_t user_data);
+
+}
\ No newline at end of file
diff --git a/libraries/libstratosphere/source/lmem/lmem_common.cpp b/libraries/libstratosphere/source/lmem/lmem_common.cpp
new file mode 100644
index 000000000..7d00f3b6a
--- /dev/null
+++ b/libraries/libstratosphere/source/lmem/lmem_common.cpp
@@ -0,0 +1,44 @@
+/*
+ * Copyright (c) 2018-2019 Atmosphère-NX
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms and conditions of the GNU General Public License,
+ * version 2, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+ * more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+#include
+#include "impl/lmem_impl_common_heap.hpp"
+
+namespace ams::lmem {
+
+ u32 GetDebugFillValue(FillType fill_type) {
+ return impl::GetDebugFillValue(fill_type);
+ }
+
+ void SetDebugFillValue(FillType fill_type, u32 value) {
+ impl::SetDebugFillValue(fill_type, value);
+ }
+
+ size_t GetTotalSize(HeapHandle handle) {
+ impl::ScopedHeapLock lk(handle);
+ return impl::GetHeapTotalSize(handle);
+ }
+
+ void *GetStartAddress(HeapHandle handle) {
+ impl::ScopedHeapLock lk(handle);
+ return impl::GetHeapStartAddress(handle);
+ }
+
+ bool ContainsAddress(HeapHandle handle, const void *address) {
+ impl::ScopedHeapLock lk(handle);
+ return impl::ContainsAddress(handle, address);
+ }
+
+}
diff --git a/libraries/libstratosphere/source/lmem/lmem_exp_heap.cpp b/libraries/libstratosphere/source/lmem/lmem_exp_heap.cpp
new file mode 100644
index 000000000..5d8e8352f
--- /dev/null
+++ b/libraries/libstratosphere/source/lmem/lmem_exp_heap.cpp
@@ -0,0 +1,111 @@
+/*
+ * Copyright (c) 2018-2019 Atmosphère-NX
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms and conditions of the GNU General Public License,
+ * version 2, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+ * more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+#include
+#include "impl/lmem_impl_exp_heap.hpp"
+
+namespace ams::lmem {
+
+ HeapHandle CreateExpHeap(void *address, size_t size, u32 option) {
+ return impl::CreateExpHeap(address, size, option);
+ }
+
+ void DestroyExpHeap(HeapHandle handle) {
+ impl::DestroyExpHeap(handle);
+ }
+
+ MemoryRange AdjustExpHeap(HeapHandle handle) {
+ impl::ScopedHeapLock lk(handle);
+ return impl::AdjustExpHeap(handle);
+ }
+
+ void *AllocateFromExpHeap(HeapHandle handle, size_t size) {
+ impl::ScopedHeapLock lk(handle);
+ return impl::AllocateFromExpHeap(handle, size, DefaultAlignment);
+ }
+
+ void *AllocateFromExpHeap(HeapHandle handle, size_t size, s32 alignment) {
+ impl::ScopedHeapLock lk(handle);
+ return impl::AllocateFromExpHeap(handle, size, alignment);
+ }
+
+ void FreeToExpHeap(HeapHandle handle, void *block) {
+ impl::ScopedHeapLock lk(handle);
+ impl::FreeToExpHeap(handle, block);
+ }
+
+ size_t ResizeExpHeapMemoryBlock(HeapHandle handle, void *block, size_t size) {
+ impl::ScopedHeapLock lk(handle);
+ return impl::ResizeExpHeapMemoryBlock(handle, block, size);
+ }
+
+ size_t GetExpHeapTotalFreeSize(HeapHandle handle) {
+ impl::ScopedHeapLock lk(handle);
+ return impl::GetExpHeapTotalFreeSize(handle);
+ }
+
+ size_t GetExpHeapAllocatableSize(HeapHandle handle, s32 alignment) {
+ impl::ScopedHeapLock lk(handle);
+ return impl::GetExpHeapAllocatableSize(handle, alignment);
+ }
+
+ AllocationMode GetExpHeapAllocationMode(HeapHandle handle) {
+ impl::ScopedHeapLock lk(handle);
+ return impl::GetExpHeapAllocationMode(handle);
+ }
+
+ AllocationMode SetExpHeapAllocationMode(HeapHandle handle, AllocationMode new_mode) {
+ impl::ScopedHeapLock lk(handle);
+ return impl::SetExpHeapAllocationMode(handle, new_mode);
+ }
+
+ bool GetExpHeapUseMarginsOfAlignment(HeapHandle handle) {
+ impl::ScopedHeapLock lk(handle);
+ return impl::GetExpHeapUseMarginsOfAlignment(handle);
+ }
+
+ bool SetExpHeapUseMarginsOfAlignment(HeapHandle handle, bool use_margins) {
+ impl::ScopedHeapLock lk(handle);
+ return impl::SetExpHeapUseMarginsOfAlignment(handle, use_margins);
+ }
+
+ u16 GetExpHeapGroupId(HeapHandle handle) {
+ impl::ScopedHeapLock lk(handle);
+ return impl::GetExpHeapGroupId(handle);
+ }
+
+ u16 SetExpHeapGroupId(HeapHandle handle, u16 group_id) {
+ impl::ScopedHeapLock lk(handle);
+ return impl::SetExpHeapGroupId(handle, group_id);
+ }
+
+ size_t GetExpHeapMemoryBlockSize(const void *memory_block) {
+ return impl::GetExpHeapMemoryBlockSize(memory_block);
+ }
+
+ u16 GetExpHeapMemoryBlockGroupId(const void *memory_block) {
+ return impl::GetExpHeapMemoryBlockGroupId(memory_block);
+ }
+
+ AllocationDirection GetExpHeapMemoryBlockAllocationDirection(const void *memory_block) {
+ return impl::GetExpHeapMemoryBlockAllocationDirection(memory_block);
+ }
+
+ void VisitExpHeapAllocatedBlocks(HeapHandle handle, HeapVisitor visitor, uintptr_t user_data) {
+ impl::ScopedHeapLock lk(handle);
+ impl::VisitExpHeapAllocatedBlocks(handle, visitor, user_data);
+ }
+
+}