2
1
Fork 0
mirror of https://github.com/yuzu-emu/yuzu.git synced 2024-07-04 23:31:19 +01:00

Merge pull request #11155 from liamwhite/memory3

memory: check page against address space size
This commit is contained in:
liamwhite 2023-07-28 09:29:21 -04:00 committed by GitHub
commit 689dc4a17b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -24,6 +24,16 @@
namespace Core::Memory { namespace Core::Memory {
namespace {
bool AddressSpaceContains(const Common::PageTable& table, const Common::ProcessAddress addr,
const std::size_t size) {
const Common::ProcessAddress max_addr = 1ULL << table.GetAddressSpaceBits();
return addr + size >= addr && addr + size <= max_addr;
}
} // namespace
// Implementation class used to keep the specifics of the memory subsystem hidden // Implementation class used to keep the specifics of the memory subsystem hidden
// from outside classes. This also allows modification to the internals of the memory // from outside classes. This also allows modification to the internals of the memory
// subsystem without needing to rebuild all files that make use of the memory interface. // subsystem without needing to rebuild all files that make use of the memory interface.
@ -191,6 +201,11 @@ struct Memory::Impl {
std::size_t page_offset = addr & YUZU_PAGEMASK; std::size_t page_offset = addr & YUZU_PAGEMASK;
bool user_accessible = true; bool user_accessible = true;
if (!AddressSpaceContains(page_table, addr, size)) [[unlikely]] {
on_unmapped(size, addr);
return false;
}
while (remaining_size) { while (remaining_size) {
const std::size_t copy_amount = const std::size_t copy_amount =
std::min(static_cast<std::size_t>(YUZU_PAGESIZE) - page_offset, remaining_size); std::min(static_cast<std::size_t>(YUZU_PAGESIZE) - page_offset, remaining_size);
@ -421,7 +436,7 @@ struct Memory::Impl {
} }
void MarkRegionDebug(u64 vaddr, u64 size, bool debug) { void MarkRegionDebug(u64 vaddr, u64 size, bool debug) {
if (vaddr == 0) { if (vaddr == 0 || !AddressSpaceContains(*current_page_table, vaddr, size)) {
return; return;
} }
@ -478,7 +493,7 @@ struct Memory::Impl {
} }
void RasterizerMarkRegionCached(u64 vaddr, u64 size, bool cached) { void RasterizerMarkRegionCached(u64 vaddr, u64 size, bool cached) {
if (vaddr == 0) { if (vaddr == 0 || !AddressSpaceContains(*current_page_table, vaddr, size)) {
return; return;
} }
@ -615,7 +630,7 @@ struct Memory::Impl {
// AARCH64 masks the upper 16 bit of all memory accesses // AARCH64 masks the upper 16 bit of all memory accesses
vaddr = vaddr & 0xffffffffffffULL; vaddr = vaddr & 0xffffffffffffULL;
if (vaddr >= 1uLL << current_page_table->GetAddressSpaceBits()) { if (!AddressSpaceContains(*current_page_table, vaddr, 1)) [[unlikely]] {
on_unmapped(); on_unmapped();
return nullptr; return nullptr;
} }