1
0
Fork 0
mirror of https://github.com/Atmosphere-NX/Atmosphere.git synced 2024-11-18 01:46:47 +00:00
Atmosphere/libraries/libmesosphere/source/kern_k_memory_layout.cpp

184 lines
7.8 KiB
C++
Raw Normal View History

/*
* Copyright (c) 2018-2020 Atmosphère-NX
*
* This program is free software; you can redistribute it and/or modify it
* under the terms and conditions of the GNU General Public License,
* version 2, as published by the Free Software Foundation.
*
* This program is distributed in the hope it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <mesosphere.hpp>
namespace ams::kern {
2020-08-03 20:06:24 +01:00
namespace {
class KMemoryRegionAllocator {
NON_COPYABLE(KMemoryRegionAllocator);
NON_MOVEABLE(KMemoryRegionAllocator);
public:
static constexpr size_t MaxMemoryRegions = 1000;
private:
KMemoryRegion region_heap[MaxMemoryRegions];
size_t num_regions;
public:
constexpr ALWAYS_INLINE KMemoryRegionAllocator() : region_heap(), num_regions() { /* ... */ }
public:
template<typename... Args>
ALWAYS_INLINE KMemoryRegion *Allocate(Args&&... args) {
/* Ensure we stay within the bounds of our heap. */
MESOSPHERE_INIT_ABORT_UNLESS(this->num_regions < MaxMemoryRegions);
/* Create the new region. */
KMemoryRegion *region = std::addressof(this->region_heap[this->num_regions++]);
new (region) KMemoryRegion(std::forward<Args>(args)...);
return region;
}
};
constinit KMemoryRegionAllocator g_memory_region_allocator;
template<typename... Args>
ALWAYS_INLINE KMemoryRegion *AllocateRegion(Args&&... args) {
return g_memory_region_allocator.Allocate(std::forward<Args>(args)...);
}
}
void KMemoryRegionTree::InsertDirectly(uintptr_t address, uintptr_t last_address, u32 attr, u32 type_id) {
this->insert(*AllocateRegion(address, last_address, attr, type_id));
2020-08-03 20:06:24 +01:00
}
2020-02-05 22:16:56 +00:00
bool KMemoryRegionTree::Insert(uintptr_t address, size_t size, u32 type_id, u32 new_attr, u32 old_attr) {
/* Locate the memory region that contains the address. */
2020-08-03 20:06:24 +01:00
KMemoryRegion *found = this->FindModifiable(address);
/* We require that the old attr is correct. */
2020-08-03 20:06:24 +01:00
if (found->GetAttributes() != old_attr) {
return false;
}
2020-02-05 22:16:56 +00:00
/* We further require that the region can be split from the old region. */
const uintptr_t inserted_region_end = address + size;
const uintptr_t inserted_region_last = inserted_region_end - 1;
2020-08-03 20:06:24 +01:00
if (found->GetLastAddress() < inserted_region_last) {
return false;
}
/* Further, we require that the type id is a valid transformation. */
2020-08-03 20:06:24 +01:00
if (!found->CanDerive(type_id)) {
return false;
}
2020-02-05 22:16:56 +00:00
/* Cache information from the region before we remove it. */
2020-08-03 20:06:24 +01:00
const uintptr_t old_address = found->GetAddress();
const uintptr_t old_last = found->GetLastAddress();
2020-08-03 20:06:24 +01:00
const uintptr_t old_pair = found->GetPairAddress();
const u32 old_type = found->GetType();
2020-02-05 22:16:56 +00:00
/* Erase the existing region from the tree. */
2020-08-03 20:06:24 +01:00
this->erase(this->iterator_to(*found));
2020-08-03 20:06:24 +01:00
/* Insert the new region into the tree. */
if (old_address == address) {
/* Reuse the old object for the new region, if we can. */
found->Reset(address, inserted_region_last, old_pair, new_attr, type_id);
2020-08-03 20:06:24 +01:00
this->insert(*found);
} else {
/* If we can't re-use, adjust the old region. */
found->Reset(old_address, address - 1, old_pair, old_attr, old_type);
2020-08-03 20:06:24 +01:00
this->insert(*found);
/* Insert a new region for the split. */
const uintptr_t new_pair = (old_pair != std::numeric_limits<uintptr_t>::max()) ? old_pair + (address - old_address) : old_pair;
this->insert(*AllocateRegion(address, inserted_region_last, new_pair, new_attr, type_id));
2020-08-03 20:06:24 +01:00
}
2020-02-05 22:16:56 +00:00
/* If we need to insert a region after the region, do so. */
if (old_last != inserted_region_last) {
const uintptr_t after_pair = (old_pair != std::numeric_limits<uintptr_t>::max()) ? old_pair + (inserted_region_end - old_address) : old_pair;
this->insert(*AllocateRegion(inserted_region_end, old_last, after_pair, old_attr, old_type));
}
return true;
}
2020-02-05 22:16:56 +00:00
KVirtualAddress KMemoryRegionTree::GetRandomAlignedRegion(size_t size, size_t alignment, u32 type_id) {
/* We want to find the total extents of the type id. */
const auto extents = this->GetDerivedRegionExtents(type_id);
/* Ensure that our alignment is correct. */
2020-02-06 09:05:35 +00:00
MESOSPHERE_INIT_ABORT_UNLESS(util::IsAligned(extents.GetAddress(), alignment));
2020-02-06 09:05:35 +00:00
const uintptr_t first_address = extents.GetAddress();
const uintptr_t last_address = extents.GetLastAddress();
const uintptr_t first_index = first_address / alignment;
const uintptr_t last_index = last_address / alignment;
while (true) {
const uintptr_t candidate = KSystemControl::Init::GenerateRandomRange(first_index, last_index) * alignment;
/* Ensure that the candidate doesn't overflow with the size. */
if (!(candidate < candidate + size)) {
continue;
}
const uintptr_t candidate_last = candidate + size - 1;
/* Ensure that the candidate fits within the region. */
if (candidate_last > last_address) {
continue;
}
2020-08-03 20:06:24 +01:00
/* Locate the candidate region, and ensure it fits and has the correct type id. */
if (const auto &candidate_region = *this->Find(candidate); !(candidate_last <= candidate_region.GetLastAddress() && candidate_region.GetType() == type_id)) {
continue;
}
return candidate;
}
}
2020-02-05 22:16:56 +00:00
void KMemoryLayout::InitializeLinearMemoryRegionTrees(KPhysicalAddress aligned_linear_phys_start, KVirtualAddress linear_virtual_start) {
/* Set static differences. */
s_linear_phys_to_virt_diff = GetInteger(linear_virtual_start) - GetInteger(aligned_linear_phys_start);
s_linear_virt_to_phys_diff = GetInteger(aligned_linear_phys_start) - GetInteger(linear_virtual_start);
/* Initialize linear trees. */
2020-02-05 22:16:56 +00:00
for (auto &region : GetPhysicalMemoryRegionTree()) {
2020-08-03 20:06:24 +01:00
if (region.HasTypeAttribute(KMemoryRegionAttr_LinearMapped)) {
GetPhysicalLinearMemoryRegionTree().InsertDirectly(region.GetAddress(), region.GetLastAddress(), region.GetAttributes(), region.GetType());
}
}
2020-02-05 22:16:56 +00:00
for (auto &region : GetVirtualMemoryRegionTree()) {
2020-08-03 20:06:24 +01:00
if (region.IsDerivedFrom(KMemoryRegionType_Dram)) {
GetVirtualLinearMemoryRegionTree().InsertDirectly(region.GetAddress(), region.GetLastAddress(), region.GetAttributes(), region.GetType());
}
}
}
size_t KMemoryLayout::GetResourceRegionSizeForInit() {
/* Calculate resource region size based on whether we allow extra threads. */
const bool use_extra_resources = KSystemControl::Init::ShouldIncreaseThreadResourceLimit();
size_t resource_region_size = KernelResourceSize + (use_extra_resources ? KernelSlabHeapAdditionalSize : 0);
/* 10.0.0 reduced the slab heap gaps by 64K. */
if (kern::GetTargetFirmware() < ams::TargetFirmware_10_0_0) {
resource_region_size += (KernelSlabHeapGapsSizeDeprecated - KernelSlabHeapGapsSize);
}
return resource_region_size;
}
}