mirror of
https://github.com/Atmosphere-NX/Atmosphere.git
synced 2024-12-18 00:12:03 +00:00
kern: implement KUserPointer (and test with QueryMemory) in advance of svc dev
This commit is contained in:
parent
9f9593e05f
commit
efae01c165
17 changed files with 460 additions and 72 deletions
|
@ -16,7 +16,7 @@
|
|||
#pragma once
|
||||
#include <vapours.hpp>
|
||||
#include <mesosphere/arch/arm64/kern_cpu_system_registers.hpp>
|
||||
#include <mesosphere/arch/arm64/kern_userspace_memory_access.hpp>
|
||||
#include <mesosphere/kern_select_userspace_memory_access.hpp>
|
||||
|
||||
namespace ams::kern::arch::arm64::cpu {
|
||||
|
||||
|
|
|
@ -52,6 +52,10 @@ namespace ams::kern::arch::arm64 {
|
|||
return this->page_table.SetMaxHeapSize(size);
|
||||
}
|
||||
|
||||
Result QueryInfo(KMemoryInfo *out_info, ams::svc::PageInfo *out_page_info, KProcessAddress addr) const {
|
||||
return this->page_table.QueryInfo(out_info, out_page_info, addr);
|
||||
}
|
||||
|
||||
Result MapIo(KPhysicalAddress phys_addr, size_t size, KMemoryPermission perm) {
|
||||
return this->page_table.MapIo(phys_addr, size, perm);
|
||||
}
|
||||
|
@ -84,7 +88,7 @@ namespace ams::kern::arch::arm64 {
|
|||
return this->page_table.GetPhysicalAddress(out, address);
|
||||
}
|
||||
|
||||
bool CanContain(KProcessAddress addr, size_t size) const { return this->page_table.CanContain(addr, size); }
|
||||
bool Contains(KProcessAddress addr, size_t size) const { return this->page_table.Contains(addr, size); }
|
||||
bool CanContain(KProcessAddress addr, size_t size, KMemoryState state) const { return this->page_table.CanContain(addr, size, state); }
|
||||
|
||||
KProcessAddress GetAddressSpaceStart() const { return this->page_table.GetAddressSpaceStart(); }
|
||||
|
|
|
@ -18,13 +18,40 @@
|
|||
|
||||
namespace ams::kern::arch::arm64 {
|
||||
|
||||
void UserspaceMemoryAccessFunctionAreaBegin();
|
||||
void UserspaceAccessFunctionAreaBegin();
|
||||
|
||||
bool StoreDataCache(uintptr_t start, uintptr_t end);
|
||||
bool FlushDataCache(uintptr_t start, uintptr_t end);
|
||||
bool InvalidateDataCache(uintptr_t start, uintptr_t end);
|
||||
bool InvalidateInstructionCache(uintptr_t start, uintptr_t end);
|
||||
class UserspaceAccess {
|
||||
public:
|
||||
static bool CopyMemoryFromUser(void *dst, const void *src, size_t size);
|
||||
static bool CopyMemoryFromUserAligned32Bit(void *dst, const void *src, size_t size);
|
||||
static bool CopyMemoryFromUserAligned64Bit(void *dst, const void *src, size_t size);
|
||||
static bool CopyMemoryFromUserSize32Bit(void *dst, const void *src);
|
||||
static s32 CopyStringFromUser(void *dst, const void *src, size_t size);
|
||||
|
||||
void UserspaceMemoryAccessFunctionAreaEnd();
|
||||
static bool CopyMemoryToUser(void *dst, const void *src, size_t size);
|
||||
static bool CopyMemoryToUserAligned32Bit(void *dst, const void *src, size_t size);
|
||||
static bool CopyMemoryToUserAligned64Bit(void *dst, const void *src, size_t size);
|
||||
static bool CopyMemoryToUserSize32Bit(void *dst, const void *src);
|
||||
static s32 CopyStringToUser(void *dst, const void *src, size_t size);
|
||||
|
||||
static bool ClearMemory(void *dst, size_t size);
|
||||
static bool ClearMemoryAligned32Bit(void *dst, size_t size);
|
||||
static bool ClearMemorySize32Bit(void *dst);
|
||||
|
||||
static bool StoreDataCache(uintptr_t start, uintptr_t end);
|
||||
static bool FlushDataCache(uintptr_t start, uintptr_t end);
|
||||
static bool InvalidateDataCache(uintptr_t start, uintptr_t end);
|
||||
static bool InvalidateInstructionCache(uintptr_t start, uintptr_t end);
|
||||
|
||||
static bool ReadIoMemory32Bit(void *dst, const void *src, size_t size);
|
||||
static bool ReadIoMemory16Bit(void *dst, const void *src, size_t size);
|
||||
static bool ReadIoMemory8Bit(void *dst, const void *src, size_t size);
|
||||
static bool WriteIoMemory32Bit(void *dst, const void *src, size_t size);
|
||||
static bool WriteIoMemory16Bit(void *dst, const void *src, size_t size);
|
||||
static bool WriteIoMemory8Bit(void *dst, const void *src, size_t size);
|
||||
};
|
||||
|
||||
|
||||
void UserspaceAccessFunctionAreaEnd();
|
||||
|
||||
}
|
|
@ -221,6 +221,9 @@ namespace ams::kern {
|
|||
constexpr ALWAYS_INLINE void Reset(T *o) {
|
||||
KScopedAutoObject(o).Swap(*this);
|
||||
}
|
||||
|
||||
constexpr ALWAYS_INLINE bool IsNull() const { return this->obj == nullptr; }
|
||||
constexpr ALWAYS_INLINE bool IsNotNull() const { return this->obj != nullptr; }
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -26,6 +26,9 @@ namespace ams::kern {
|
|||
return util::BitPack32{handle};
|
||||
}
|
||||
|
||||
class KProcess;
|
||||
class KThread;
|
||||
|
||||
class KHandleTable {
|
||||
NON_COPYABLE(KHandleTable);
|
||||
NON_MOVEABLE(KHandleTable);
|
||||
|
@ -125,6 +128,19 @@ namespace ams::kern {
|
|||
template<typename T = KAutoObject>
|
||||
ALWAYS_INLINE KScopedAutoObject<T> GetObject(ams::svc::Handle handle) const {
|
||||
MESOSPHERE_ASSERT_THIS();
|
||||
|
||||
/* Handle pseudo-handles. */
|
||||
if constexpr (std::is_same<T, KProcess>::value) {
|
||||
if (handle == ams::svc::PseudoHandle::CurrentProcess) {
|
||||
return GetCurrentProcessPointer();
|
||||
}
|
||||
} else if constexpr (std::is_same<T, KThread>::value) {
|
||||
if (handle == ams::svc::PseudoHandle::CurrentThread) {
|
||||
return GetCurrentThreadPointer();
|
||||
}
|
||||
}
|
||||
|
||||
/* Lock and look up in table. */
|
||||
KScopedDisableDispatch dd;
|
||||
KScopedSpinLock lk(this->lock);
|
||||
|
||||
|
@ -139,6 +155,21 @@ namespace ams::kern {
|
|||
ALWAYS_INLINE KScopedAutoObject<T> GetObjectForIpc(ams::svc::Handle handle) const {
|
||||
static_assert(!std::is_base_of<KInterruptEvent, T>::value);
|
||||
|
||||
/* Handle pseudo-handles. */
|
||||
if constexpr (std::is_same<T, KProcess>::value) {
|
||||
if (handle == ams::svc::PseudoHandle::CurrentProcess) {
|
||||
return GetCurrentProcessPointer();
|
||||
}
|
||||
} else if constexpr (std::is_same<T, KThread>::value) {
|
||||
if (handle == ams::svc::PseudoHandle::CurrentThread) {
|
||||
return GetCurrentThreadPointer();
|
||||
}
|
||||
}
|
||||
|
||||
/* Lock and look up in table. */
|
||||
KScopedDisableDispatch dd;
|
||||
KScopedSpinLock lk(this->lock);
|
||||
|
||||
KAutoObject *obj = this->GetObjectImpl(handle);
|
||||
if (obj->DynamicCast<KInterruptEvent *>() != nullptr) {
|
||||
return nullptr;
|
||||
|
|
|
@ -172,11 +172,11 @@ namespace ams::kern {
|
|||
constexpr bool IsKernel() const { return this->is_kernel; }
|
||||
constexpr bool IsAslrEnabled() const { return this->enable_aslr; }
|
||||
|
||||
constexpr bool CanContain(KProcessAddress addr) const {
|
||||
constexpr bool Contains(KProcessAddress addr) const {
|
||||
return this->address_space_start <= addr && addr <= this->address_space_end - 1;
|
||||
}
|
||||
|
||||
constexpr bool CanContain(KProcessAddress addr, size_t size) const {
|
||||
constexpr bool Contains(KProcessAddress addr, size_t size) const {
|
||||
return this->address_space_start <= addr && addr < addr + size && addr + size - 1 <= this->address_space_end - 1;
|
||||
}
|
||||
|
||||
|
@ -249,6 +249,7 @@ namespace ams::kern {
|
|||
Result SetProcessMemoryPermission(KProcessAddress addr, size_t size, ams::svc::MemoryPermission perm);
|
||||
Result SetHeapSize(KProcessAddress *out, size_t size);
|
||||
Result SetMaxHeapSize(size_t size);
|
||||
Result QueryInfo(KMemoryInfo *out_info, ams::svc::PageInfo *out_page_info, KProcessAddress addr) const;
|
||||
Result MapIo(KPhysicalAddress phys_addr, size_t size, KMemoryPermission perm);
|
||||
Result MapStatic(KPhysicalAddress phys_addr, size_t size, KMemoryPermission perm);
|
||||
Result MapRegion(KMemoryRegionType region_type, KMemoryPermission perm);
|
||||
|
|
|
@ -123,6 +123,8 @@ namespace ams::kern {
|
|||
|
||||
Result Initialize(const ams::svc::CreateProcessParameter ¶ms, const KPageGroup &pg, const u32 *caps, s32 num_caps, KResourceLimit *res_limit, KMemoryManager::Pool pool);
|
||||
|
||||
constexpr const char *GetName() const { return this->name; }
|
||||
|
||||
constexpr u64 GetProcessId() const { return this->process_id; }
|
||||
|
||||
constexpr u64 GetCoreMask() const { return this->capabilities.GetCoreMask(); }
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#pragma once
|
||||
#include <mesosphere/kern_common.hpp>
|
||||
|
||||
#ifdef ATMOSPHERE_ARCH_ARM64
|
||||
#include <mesosphere/arch/arm64/kern_userspace_memory_access.hpp>
|
||||
|
||||
namespace ams::kern {
|
||||
|
||||
using ams::kern::arch::arm64::UserspaceAccess;
|
||||
|
||||
}
|
||||
|
||||
#else
|
||||
#error "Unknown architecture for CPU"
|
||||
#endif
|
||||
|
|
@ -16,18 +16,187 @@
|
|||
#pragma once
|
||||
#include <vapours.hpp>
|
||||
#include <mesosphere/svc/kern_svc_results.hpp>
|
||||
#include <mesosphere/kern_select_userspace_memory_access.hpp>
|
||||
|
||||
namespace ams::kern::svc {
|
||||
|
||||
/* TODO: Actually implement this type. */
|
||||
template<typename T>
|
||||
struct KUserPointer : impl::KUserPointerTag {
|
||||
namespace impl {
|
||||
|
||||
/* TODO: C++20
|
||||
template<typename T>
|
||||
concept Pointer = std::is_pointer<T>::value;
|
||||
|
||||
template<typename T>
|
||||
concept NonConstPointer = Pointer<T> && !std::is_const<typename std::remove_pointer<T>::type>::value;
|
||||
|
||||
template<typename T>
|
||||
concept ConstPointer = Pointer<T> && std::is_const<typename std::remove_pointer<T>::type>::value;
|
||||
|
||||
template<typename T, size_t N>
|
||||
concept AlignedNPointer = Pointer<T> && alignof(typename std::remove_pointer<T>::type) >= N && util::IsAligned(sizeof(typename std::remove_pointer<T>::type), N);
|
||||
|
||||
template<typename T>
|
||||
concept Aligned8Pointer = AlignedNPointer<T, sizeof(u8)>;
|
||||
|
||||
template<typename T>
|
||||
concept Aligned16Pointer = AlignedNPointer<T, sizeof(u16)> && Aligned8<T>;
|
||||
|
||||
template<typename T>
|
||||
concept Aligned32Pointer = AlignedNPointer<T, sizeof(u32)> && Aligned16<T>;
|
||||
|
||||
template<typename T>
|
||||
concept Aligned64Pointer = AlignedNPointer<T, sizeof(u64)> && Aligned32<T>;
|
||||
*/
|
||||
|
||||
template<typename T>
|
||||
constexpr inline bool IsPointer = std::is_pointer<T>::value;
|
||||
|
||||
template<typename T>
|
||||
constexpr inline bool IsConstPointer = IsPointer<T> && std::is_const<typename std::remove_pointer<T>::type>::value;
|
||||
|
||||
template<typename T>
|
||||
constexpr inline bool IsNonConstPointer = IsPointer<T> && !std::is_const<typename std::remove_pointer<T>::type>::value;
|
||||
|
||||
template<typename T, size_t N>
|
||||
constexpr inline bool IsAlignedNPointer = IsPointer<T> && alignof(typename std::remove_pointer<T>::type) >= N && util::IsAligned(sizeof(typename std::remove_pointer<T>::type), N);
|
||||
|
||||
template<typename _T, typename = void> /* requires Aligned8Pointer<_T> */
|
||||
class KUserPointerImplTraits {
|
||||
static_assert(IsAlignedNPointer<_T, sizeof(u8)>);
|
||||
public:
|
||||
using T = typename std::remove_const<typename std::remove_pointer<_T>::type>::type;
|
||||
public:
|
||||
static Result CopyFromUserspace(void *dst, const void *src, size_t size) {
|
||||
R_UNLESS(UserspaceAccess::CopyMemoryFromUser(dst, src, size), svc::ResultInvalidPointer());
|
||||
return ResultSuccess();
|
||||
}
|
||||
|
||||
static Result CopyToUserspace(void *dst, const void *src, size_t size) {
|
||||
R_UNLESS(UserspaceAccess::CopyMemoryToUser(dst, src, size), svc::ResultInvalidPointer());
|
||||
return ResultSuccess();
|
||||
}
|
||||
};
|
||||
|
||||
template<typename _T> /* requires Aligned32Pointer<_T> */
|
||||
class KUserPointerImplTraits<_T, typename std::enable_if<IsAlignedNPointer<_T, sizeof(u32)> && !IsAlignedNPointer<_T, sizeof(u64)>>::type> {
|
||||
static_assert(IsAlignedNPointer<_T, sizeof(u32)>);
|
||||
public:
|
||||
using T = typename std::remove_const<typename std::remove_pointer<_T>::type>::type;
|
||||
public:
|
||||
static Result CopyFromUserspace(void *dst, const void *src, size_t size) {
|
||||
R_UNLESS(UserspaceAccess::CopyMemoryFromUserAligned32Bit(dst, src, size), svc::ResultInvalidPointer());
|
||||
return ResultSuccess();
|
||||
}
|
||||
|
||||
static Result CopyToUserspace(void *dst, const void *src, size_t size) {
|
||||
R_UNLESS(UserspaceAccess::CopyMemoryToUserAligned32Bit(dst, src, size), svc::ResultInvalidPointer());
|
||||
return ResultSuccess();
|
||||
}
|
||||
};
|
||||
|
||||
template<typename _T> /* requires Aligned64Pointer<_T> */
|
||||
class KUserPointerImplTraits<_T, typename std::enable_if<IsAlignedNPointer<_T, sizeof(u64)>>::type> {
|
||||
static_assert(IsAlignedNPointer<_T, sizeof(u64)>);
|
||||
public:
|
||||
using T = typename std::remove_const<typename std::remove_pointer<_T>::type>::type;
|
||||
public:
|
||||
static Result CopyFromUserspace(void *dst, const void *src, size_t size) {
|
||||
R_UNLESS(UserspaceAccess::CopyMemoryFromUserAligned64Bit(dst, src, size), svc::ResultInvalidPointer());
|
||||
return ResultSuccess();
|
||||
}
|
||||
|
||||
static Result CopyToUserspace(void *dst, const void *src, size_t size) {
|
||||
R_UNLESS(UserspaceAccess::CopyMemoryToUserAligned64Bit(dst, src, size), svc::ResultInvalidPointer());
|
||||
return ResultSuccess();
|
||||
}
|
||||
};
|
||||
|
||||
template<typename _T> /* requires Aligned8Pointer<_T> */
|
||||
class KUserPointerImpl : impl::KUserPointerTag {
|
||||
private:
|
||||
using Traits = KUserPointerImplTraits<_T>;
|
||||
protected:
|
||||
using T = typename std::remove_const<typename std::remove_pointer<_T>::type>::type;
|
||||
private:
|
||||
_T *ptr;
|
||||
private:
|
||||
Result CopyToImpl(void *p, size_t size) const {
|
||||
return Traits::CopyFromUserspace(p, this->ptr, size);
|
||||
}
|
||||
|
||||
Result CopyFromImpl(const void *p, size_t size) const {
|
||||
return Traits::CopyToUserspace(this->ptr, p, size);
|
||||
}
|
||||
protected:
|
||||
Result CopyTo(T *p) const { return this->CopyToImpl(p, sizeof(*p)); }
|
||||
Result CopyFrom(const T *p) const { return this->CopyFromImpl(p, sizeof(*p)); }
|
||||
|
||||
Result CopyArrayElementTo(T *p, size_t index) const { return Traits::CopyFromUserspace(p, this->ptr + index, sizeof(*p)); }
|
||||
Result CopyArrayElementFrom(const T *p, size_t index) const { return Traits::CopyToUserspace(this->ptr + index, p, sizeof(*p)); }
|
||||
|
||||
Result CopyArrayTo(T *arr, size_t count) const { return this->CopyToImpl(arr, sizeof(*arr) * count); }
|
||||
Result CopyArrayFrom(const T *arr, size_t count) const { return this->CopyFromImpl(arr, sizeof(*arr) * count); }
|
||||
|
||||
constexpr bool IsNull() const { return this->ptr == nullptr; }
|
||||
};
|
||||
|
||||
template<>
|
||||
class KUserPointerImpl<const char *> : impl::KUserPointerTag {
|
||||
private:
|
||||
using Traits = KUserPointerImplTraits<const char *>;
|
||||
protected:
|
||||
using T = char;
|
||||
private:
|
||||
const char *ptr;
|
||||
protected:
|
||||
Result CopyStringTo(char *dst, size_t size) const {
|
||||
static_assert(sizeof(char) == 1);
|
||||
R_UNLESS(UserspaceAccess::CopyStringFromUser(dst, this->ptr, size) > 0, svc::ResultInvalidPointer());
|
||||
return ResultSuccess();
|
||||
}
|
||||
|
||||
Result CopyArrayElementTo(char *dst, size_t index) const {
|
||||
return Traits::CopyFromUserspace(dst, this->ptr + index, sizeof(*dst));
|
||||
}
|
||||
|
||||
constexpr bool IsNull() const { return this->ptr == nullptr; }
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
template<typename T, typename = void>
|
||||
class KUserPointer;
|
||||
|
||||
template<typename T> /* requires impl::ConstPointer<T> */
|
||||
struct KUserPointer<T, typename std::enable_if<impl::IsConstPointer<T>>::type> : public impl::KUserPointerImpl<T> {
|
||||
public:
|
||||
static_assert(std::is_pointer<T>::value);
|
||||
static constexpr bool IsInput = std::is_const<typename std::remove_pointer<T>::type>::value;
|
||||
private:
|
||||
T pointer;
|
||||
static constexpr bool IsInput = true;
|
||||
public:
|
||||
using impl::KUserPointerImpl<T>::CopyTo;
|
||||
using impl::KUserPointerImpl<T>::CopyArrayElementTo;
|
||||
using impl::KUserPointerImpl<T>::CopyArrayTo;
|
||||
using impl::KUserPointerImpl<T>::IsNull;
|
||||
};
|
||||
|
||||
template<typename T> /* requires impl::NonConstPointer<T> */
|
||||
struct KUserPointer<T, typename std::enable_if<impl::IsNonConstPointer<T>>::type> : public impl::KUserPointerImpl<T> {
|
||||
public:
|
||||
static constexpr bool IsInput = false;
|
||||
public:
|
||||
using impl::KUserPointerImpl<T>::CopyFrom;
|
||||
using impl::KUserPointerImpl<T>::CopyArrayElementFrom;
|
||||
using impl::KUserPointerImpl<T>::CopyArrayFrom;
|
||||
using impl::KUserPointerImpl<T>::IsNull;
|
||||
};
|
||||
|
||||
template<>
|
||||
struct KUserPointer<const char *> : public impl::KUserPointerImpl<const char *> {
|
||||
public:
|
||||
static constexpr bool IsInput = true;
|
||||
public:
|
||||
using impl::KUserPointerImpl<const char *>::CopyStringTo;
|
||||
using impl::KUserPointerImpl<const char *>::CopyArrayElementTo;
|
||||
using impl::KUserPointerImpl<const char *>::IsNull;
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -294,7 +294,7 @@ namespace ams::kern::arch::arm64::cpu {
|
|||
ALWAYS_INLINE Result InvalidateDataCacheRange(uintptr_t start, uintptr_t end) {
|
||||
MESOSPHERE_ASSERT(util::IsAligned(start, DataCacheLineSize));
|
||||
MESOSPHERE_ASSERT(util::IsAligned(end, DataCacheLineSize));
|
||||
R_UNLESS(arm64::InvalidateDataCache(start, end), svc::ResultInvalidCurrentMemory());
|
||||
R_UNLESS(UserspaceAccess::InvalidateDataCache(start, end), svc::ResultInvalidCurrentMemory());
|
||||
DataSynchronizationBarrier();
|
||||
return ResultSuccess();
|
||||
}
|
||||
|
@ -302,7 +302,7 @@ namespace ams::kern::arch::arm64::cpu {
|
|||
ALWAYS_INLINE Result StoreDataCacheRange(uintptr_t start, uintptr_t end) {
|
||||
MESOSPHERE_ASSERT(util::IsAligned(start, DataCacheLineSize));
|
||||
MESOSPHERE_ASSERT(util::IsAligned(end, DataCacheLineSize));
|
||||
R_UNLESS(arm64::StoreDataCache(start, end), svc::ResultInvalidCurrentMemory());
|
||||
R_UNLESS(UserspaceAccess::StoreDataCache(start, end), svc::ResultInvalidCurrentMemory());
|
||||
DataSynchronizationBarrier();
|
||||
return ResultSuccess();
|
||||
}
|
||||
|
@ -310,7 +310,7 @@ namespace ams::kern::arch::arm64::cpu {
|
|||
ALWAYS_INLINE Result FlushDataCacheRange(uintptr_t start, uintptr_t end) {
|
||||
MESOSPHERE_ASSERT(util::IsAligned(start, DataCacheLineSize));
|
||||
MESOSPHERE_ASSERT(util::IsAligned(end, DataCacheLineSize));
|
||||
R_UNLESS(arm64::FlushDataCache(start, end), svc::ResultInvalidCurrentMemory());
|
||||
R_UNLESS(UserspaceAccess::FlushDataCache(start, end), svc::ResultInvalidCurrentMemory());
|
||||
DataSynchronizationBarrier();
|
||||
return ResultSuccess();
|
||||
}
|
||||
|
@ -318,7 +318,7 @@ namespace ams::kern::arch::arm64::cpu {
|
|||
ALWAYS_INLINE Result InvalidateInstructionCacheRange(uintptr_t start, uintptr_t end) {
|
||||
MESOSPHERE_ASSERT(util::IsAligned(start, InstructionCacheLineSize));
|
||||
MESOSPHERE_ASSERT(util::IsAligned(end, InstructionCacheLineSize));
|
||||
R_UNLESS(arm64::InvalidateInstructionCache(start, end), svc::ResultInvalidCurrentMemory());
|
||||
R_UNLESS(UserspaceAccess::InvalidateInstructionCache(start, end), svc::ResultInvalidCurrentMemory());
|
||||
EnsureInstructionConsistency();
|
||||
return ResultSuccess();
|
||||
}
|
||||
|
|
|
@ -14,20 +14,58 @@
|
|||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
/* ams::kern::arch::arm64::UserspaceMemoryAccessFunctionAreaBegin() */
|
||||
.section .text._ZN3ams4kern4arch5arm6438UserspaceMemoryAccessFunctionAreaBeginEv, "ax", %progbits
|
||||
.global _ZN3ams4kern4arch5arm6438UserspaceMemoryAccessFunctionAreaBeginEv
|
||||
.type _ZN3ams4kern4arch5arm6438UserspaceMemoryAccessFunctionAreaBeginEv, %function
|
||||
_ZN3ams4kern4arch5arm6438UserspaceMemoryAccessFunctionAreaBeginEv:
|
||||
/* ams::kern::arch::arm64::UserspaceAccessFunctionAreaBegin() */
|
||||
.section .text._ZN3ams4kern4arch5arm6432UserspaceAccessFunctionAreaBeginEv, "ax", %progbits
|
||||
.global _ZN3ams4kern4arch5arm6432UserspaceAccessFunctionAreaBeginEv
|
||||
.type _ZN3ams4kern4arch5arm6432UserspaceAccessFunctionAreaBeginEv, %function
|
||||
_ZN3ams4kern4arch5arm6432UserspaceAccessFunctionAreaBeginEv:
|
||||
/* NOTE: This is not a real function, and only exists as a label for safety. */
|
||||
|
||||
/* ================ All Userspace Memory Functions after this line. ================ */
|
||||
/* ================ All Userspace Access Functions after this line. ================ */
|
||||
|
||||
/* ams::kern::arch::arm64::StoreDataCache(uintptr_t start, uintptr_t end) */
|
||||
.section .text._ZN3ams4kern4arch5arm6414StoreDataCacheEmm, "ax", %progbits
|
||||
.global _ZN3ams4kern4arch5arm6414StoreDataCacheEmm
|
||||
.type _ZN3ams4kern4arch5arm6414StoreDataCacheEmm, %function
|
||||
_ZN3ams4kern4arch5arm6414StoreDataCacheEmm:
|
||||
/* ams::kern::arch::arm64::UserspaceAccess::CopyMemoryToUserAligned64Bit(void *dst, const void *src, size_t size) */
|
||||
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned64BitEPvPKvm, "ax", %progbits
|
||||
.global _ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned64BitEPvPKvm
|
||||
.type _ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned64BitEPvPKvm, %function
|
||||
_ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned64BitEPvPKvm:
|
||||
/* Check if there are 0x40 bytes to copy */
|
||||
cmp x2, #0x3F
|
||||
b.ls 1f
|
||||
ldp x4, x5, [x1, #0x00]
|
||||
ldp x6, x7, [x1, #0x10]
|
||||
ldp x8, x9, [x1, #0x20]
|
||||
ldp x10, x11, [x1, #0x30]
|
||||
sttr x4, [x0, #0x00]
|
||||
sttr x5, [x0, #0x08]
|
||||
sttr x6, [x0, #0x10]
|
||||
sttr x7, [x0, #0x18]
|
||||
sttr x8, [x0, #0x20]
|
||||
sttr x9, [x0, #0x28]
|
||||
sttr x10, [x0, #0x30]
|
||||
sttr x11, [x0, #0x38]
|
||||
add x0, x0, #0x40
|
||||
add x1, x1, #0x40
|
||||
sub x2, x2, #0x40
|
||||
b _ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned64BitEPvPKvm
|
||||
|
||||
1: /* We have less than 0x40 bytes to copy. */
|
||||
cmp x2, #0x0
|
||||
b.eq 2f
|
||||
ldr x4, [x1], #0x8
|
||||
sttr x4, [x0]
|
||||
add x0, x0, #0x8
|
||||
sub x2, x2, #0x8
|
||||
b 1b
|
||||
|
||||
2: /* We're done. */
|
||||
mov x0, #1
|
||||
ret
|
||||
|
||||
/* ams::kern::arch::arm64::UserspaceAccess::StoreDataCache(uintptr_t start, uintptr_t end) */
|
||||
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess14StoreDataCacheEmm, "ax", %progbits
|
||||
.global _ZN3ams4kern4arch5arm6415UserspaceAccess14StoreDataCacheEmm
|
||||
.type _ZN3ams4kern4arch5arm6415UserspaceAccess14StoreDataCacheEmm, %function
|
||||
_ZN3ams4kern4arch5arm6415UserspaceAccess14StoreDataCacheEmm:
|
||||
/* Check if we have any work to do. */
|
||||
cmp x1, x0
|
||||
b.eq 2f
|
||||
|
@ -42,11 +80,11 @@ _ZN3ams4kern4arch5arm6414StoreDataCacheEmm:
|
|||
mov x0, #1
|
||||
ret
|
||||
|
||||
/* ams::kern::arch::arm64::FlushDataCache(uintptr_t start, uintptr_t end) */
|
||||
.section .text._ZN3ams4kern4arch5arm6414FlushDataCacheEmm, "ax", %progbits
|
||||
.global _ZN3ams4kern4arch5arm6414FlushDataCacheEmm
|
||||
.type _ZN3ams4kern4arch5arm6414FlushDataCacheEmm, %function
|
||||
_ZN3ams4kern4arch5arm6414FlushDataCacheEmm:
|
||||
/* ams::kern::arch::arm64::UserspaceAccess::FlushDataCache(uintptr_t start, uintptr_t end) */
|
||||
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess14FlushDataCacheEmm, "ax", %progbits
|
||||
.global _ZN3ams4kern4arch5arm6415UserspaceAccess14FlushDataCacheEmm
|
||||
.type _ZN3ams4kern4arch5arm6415UserspaceAccess14FlushDataCacheEmm, %function
|
||||
_ZN3ams4kern4arch5arm6415UserspaceAccess14FlushDataCacheEmm:
|
||||
/* Check if we have any work to do. */
|
||||
cmp x1, x0
|
||||
b.eq 2f
|
||||
|
@ -61,11 +99,11 @@ _ZN3ams4kern4arch5arm6414FlushDataCacheEmm:
|
|||
mov x0, #1
|
||||
ret
|
||||
|
||||
/* ams::kern::arch::arm64::InvalidateDataCache(uintptr_t start, uintptr_t end) */
|
||||
.section .text._ZN3ams4kern4arch5arm6419InvalidateDataCacheEmm, "ax", %progbits
|
||||
.global _ZN3ams4kern4arch5arm6419InvalidateDataCacheEmm
|
||||
.type _ZN3ams4kern4arch5arm6419InvalidateDataCacheEmm, %function
|
||||
_ZN3ams4kern4arch5arm6419InvalidateDataCacheEmm:
|
||||
/* ams::kern::arch::arm64::UserspaceAccess::InvalidateDataCache(uintptr_t start, uintptr_t end) */
|
||||
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess19InvalidateDataCacheEmm, "ax", %progbits
|
||||
.global _ZN3ams4kern4arch5arm6415UserspaceAccess19InvalidateDataCacheEmm
|
||||
.type _ZN3ams4kern4arch5arm6415UserspaceAccess19InvalidateDataCacheEmm, %function
|
||||
_ZN3ams4kern4arch5arm6415UserspaceAccess19InvalidateDataCacheEmm:
|
||||
/* Check if we have any work to do. */
|
||||
cmp x1, x0
|
||||
b.eq 2f
|
||||
|
@ -80,11 +118,11 @@ _ZN3ams4kern4arch5arm6419InvalidateDataCacheEmm:
|
|||
mov x0, #1
|
||||
ret
|
||||
|
||||
/* ams::kern::arch::arm64::InvalidateInstructionCache(uintptr_t start, uintptr_t end) */
|
||||
.section .text._ZN3ams4kern4arch5arm6426InvalidateInstructionCacheEmm, "ax", %progbits
|
||||
.global _ZN3ams4kern4arch5arm6426InvalidateInstructionCacheEmm
|
||||
.type _ZN3ams4kern4arch5arm6426InvalidateInstructionCacheEmm, %function
|
||||
_ZN3ams4kern4arch5arm6426InvalidateInstructionCacheEmm:
|
||||
/* ams::kern::arch::arm64::UserspaceAccess::InvalidateInstructionCache(uintptr_t start, uintptr_t end) */
|
||||
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess26InvalidateInstructionCacheEmm, "ax", %progbits
|
||||
.global _ZN3ams4kern4arch5arm6415UserspaceAccess26InvalidateInstructionCacheEmm
|
||||
.type _ZN3ams4kern4arch5arm6415UserspaceAccess26InvalidateInstructionCacheEmm, %function
|
||||
_ZN3ams4kern4arch5arm6415UserspaceAccess26InvalidateInstructionCacheEmm:
|
||||
/* Check if we have any work to do. */
|
||||
cmp x1, x0
|
||||
b.eq 2f
|
||||
|
@ -99,11 +137,11 @@ _ZN3ams4kern4arch5arm6426InvalidateInstructionCacheEmm:
|
|||
mov x0, #1
|
||||
ret
|
||||
|
||||
/* ================ All Userspace Memory Functions before this line. ================ */
|
||||
/* ================ All Userspace Access Functions before this line. ================ */
|
||||
|
||||
/* ams::kern::arch::arm64::UserspaceMemoryAccessFunctionAreaEnd() */
|
||||
.section .text._ZN3ams4kern4arch5arm6436UserspaceMemoryAccessFunctionAreaEndEv, "ax", %progbits
|
||||
.global _ZN3ams4kern4arch5arm6436UserspaceMemoryAccessFunctionAreaEndEv
|
||||
.type _ZN3ams4kern4arch5arm6436UserspaceMemoryAccessFunctionAreaEndEv, %function
|
||||
_ZN3ams4kern4arch5arm6436UserspaceMemoryAccessFunctionAreaEndEv:
|
||||
/* ams::kern::arch::arm64::UserspaceAccessFunctionAreaEnd() */
|
||||
.section .text._ZN3ams4kern4arch5arm6430UserspaceAccessFunctionAreaEndEv, "ax", %progbits
|
||||
.global _ZN3ams4kern4arch5arm6430UserspaceAccessFunctionAreaEndEv
|
||||
.type _ZN3ams4kern4arch5arm6430UserspaceAccessFunctionAreaEndEv, %function
|
||||
_ZN3ams4kern4arch5arm6430UserspaceAccessFunctionAreaEndEv:
|
||||
/* NOTE: This is not a real function, and only exists as a label for safety. */
|
|
@ -765,6 +765,29 @@ namespace ams::kern {
|
|||
return ResultSuccess();
|
||||
}
|
||||
|
||||
Result KPageTableBase::QueryInfo(KMemoryInfo *out_info, ams::svc::PageInfo *out_page_info, KProcessAddress addr) const {
|
||||
/* If the address is invalid, create a fake block. */
|
||||
if (!this->Contains(addr, 1)) {
|
||||
*out_info = {
|
||||
.address = GetInteger(this->address_space_end),
|
||||
.size = 0 - GetInteger(this->address_space_end),
|
||||
.state = static_cast<KMemoryState>(ams::svc::MemoryState_Inaccessible),
|
||||
.perm = KMemoryPermission_None,
|
||||
.attribute = KMemoryAttribute_None,
|
||||
.original_perm = KMemoryPermission_None,
|
||||
.ipc_lock_count = 0,
|
||||
.device_use_count = 0,
|
||||
};
|
||||
out_page_info->flags = 0;
|
||||
|
||||
return ResultSuccess();
|
||||
}
|
||||
|
||||
/* Otherwise, lock the table and query. */
|
||||
KScopedLightLock lk(this->general_lock);
|
||||
return this->QueryInfoImpl(out_info, out_page_info, addr);
|
||||
}
|
||||
|
||||
Result KPageTableBase::MapIo(KPhysicalAddress phys_addr, size_t size, KMemoryPermission perm) {
|
||||
MESOSPHERE_ASSERT(util::IsAligned(GetInteger(phys_addr), PageSize));
|
||||
MESOSPHERE_ASSERT(util::IsAligned(size, PageSize));
|
||||
|
|
|
@ -43,10 +43,6 @@ namespace ams::kern::svc {
|
|||
MESOSPHERE_PANIC("Stubbed SvcUnmapMemory64 was called.");
|
||||
}
|
||||
|
||||
Result QueryMemory64(KUserPointer<ams::svc::lp64::MemoryInfo *> out_memory_info, ams::svc::PageInfo *out_page_info, ams::svc::Address address) {
|
||||
MESOSPHERE_PANIC("Stubbed SvcQueryMemory64 was called.");
|
||||
}
|
||||
|
||||
/* ============================= 64From32 ABI ============================= */
|
||||
|
||||
Result SetMemoryPermission64From32(ams::svc::Address address, ams::svc::Size size, ams::svc::MemoryPermission perm) {
|
||||
|
@ -65,8 +61,4 @@ namespace ams::kern::svc {
|
|||
MESOSPHERE_PANIC("Stubbed SvcUnmapMemory64From32 was called.");
|
||||
}
|
||||
|
||||
Result QueryMemory64From32(KUserPointer<ams::svc::ilp32::MemoryInfo *> out_memory_info, ams::svc::PageInfo *out_page_info, ams::svc::Address address) {
|
||||
MESOSPHERE_PANIC("Stubbed SvcQueryMemory64From32 was called.");
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -39,10 +39,6 @@ namespace ams::kern::svc {
|
|||
MESOSPHERE_PANIC("Stubbed SvcUnmapProcessMemory64 was called.");
|
||||
}
|
||||
|
||||
Result QueryProcessMemory64(KUserPointer<ams::svc::lp64::MemoryInfo *> out_memory_info, ams::svc::PageInfo *out_page_info, ams::svc::Handle process_handle, uint64_t address) {
|
||||
MESOSPHERE_PANIC("Stubbed SvcQueryProcessMemory64 was called.");
|
||||
}
|
||||
|
||||
Result MapProcessCodeMemory64(ams::svc::Handle process_handle, uint64_t dst_address, uint64_t src_address, uint64_t size) {
|
||||
MESOSPHERE_PANIC("Stubbed SvcMapProcessCodeMemory64 was called.");
|
||||
}
|
||||
|
@ -65,10 +61,6 @@ namespace ams::kern::svc {
|
|||
MESOSPHERE_PANIC("Stubbed SvcUnmapProcessMemory64From32 was called.");
|
||||
}
|
||||
|
||||
Result QueryProcessMemory64From32(KUserPointer<ams::svc::ilp32::MemoryInfo *> out_memory_info, ams::svc::PageInfo *out_page_info, ams::svc::Handle process_handle, uint64_t address) {
|
||||
MESOSPHERE_PANIC("Stubbed SvcQueryProcessMemory64From32 was called.");
|
||||
}
|
||||
|
||||
Result MapProcessCodeMemory64From32(ams::svc::Handle process_handle, uint64_t dst_address, uint64_t src_address, uint64_t size) {
|
||||
MESOSPHERE_PANIC("Stubbed SvcMapProcessCodeMemory64From32 was called.");
|
||||
}
|
||||
|
|
75
libraries/libmesosphere/source/svc/kern_svc_query_memory.cpp
Normal file
75
libraries/libmesosphere/source/svc/kern_svc_query_memory.cpp
Normal file
|
@ -0,0 +1,75 @@
|
|||
/*
|
||||
* Copyright (c) 2018-2020 Atmosphère-NX
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms and conditions of the GNU General Public License,
|
||||
* version 2, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#include <mesosphere.hpp>
|
||||
|
||||
namespace ams::kern::svc {
|
||||
|
||||
/* ============================= Common ============================= */
|
||||
|
||||
namespace {
|
||||
|
||||
Result QueryProcessMemory(ams::svc::MemoryInfo *out_memory_info, ams::svc::PageInfo *out_page_info, ams::svc::Handle process_handle, uintptr_t address) {
|
||||
MESOSPHERE_LOG("%s: QueryProcessMemory(0x%08x, 0x%zx) was called\n", GetCurrentProcess().GetName(), process_handle, address);
|
||||
|
||||
/* Get the process. */
|
||||
KScopedAutoObject process = GetCurrentProcess().GetHandleTable().GetObject<KProcess>(process_handle);
|
||||
R_UNLESS(process.IsNotNull(), svc::ResultInvalidHandle());
|
||||
|
||||
/* Query the mapping's info. */
|
||||
KMemoryInfo info;
|
||||
R_TRY(process->GetPageTable().QueryInfo(std::addressof(info), out_page_info, address));
|
||||
|
||||
/* Write output. */
|
||||
*out_memory_info = info.GetSvcMemoryInfo();
|
||||
return ResultSuccess();
|
||||
}
|
||||
|
||||
Result QueryMemory(ams::svc::MemoryInfo *out_memory_info, ams::svc::PageInfo *out_page_info, uintptr_t address) {
|
||||
/* Query memory is just QueryProcessMemory on the current process. */
|
||||
return QueryProcessMemory(out_memory_info, out_page_info, ams::svc::PseudoHandle::CurrentProcess, address);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/* ============================= 64 ABI ============================= */
|
||||
|
||||
Result QueryMemory64(KUserPointer<ams::svc::lp64::MemoryInfo *> out_memory_info, ams::svc::PageInfo *out_page_info, ams::svc::Address address) {
|
||||
/* Get an ams::svc::MemoryInfo for the region. */
|
||||
ams::svc::MemoryInfo info = {};
|
||||
R_TRY(QueryMemory(std::addressof(info), out_page_info, address));
|
||||
|
||||
/* Try to copy to userspace. In the 64-bit case, ams::svc::lp64::MemoryInfo is the same as ams::svc::MemoryInfo. */
|
||||
static_assert(sizeof(ams::svc::MemoryInfo) == sizeof(ams::svc::lp64::MemoryInfo));
|
||||
R_TRY(out_memory_info.CopyFrom(std::addressof(info)));
|
||||
|
||||
return ResultSuccess();
|
||||
}
|
||||
|
||||
Result QueryProcessMemory64(KUserPointer<ams::svc::lp64::MemoryInfo *> out_memory_info, ams::svc::PageInfo *out_page_info, ams::svc::Handle process_handle, uint64_t address) {
|
||||
MESOSPHERE_PANIC("Stubbed SvcQueryProcessMemory64 was called.");
|
||||
}
|
||||
|
||||
/* ============================= 64From32 ABI ============================= */
|
||||
|
||||
Result QueryMemory64From32(KUserPointer<ams::svc::ilp32::MemoryInfo *> out_memory_info, ams::svc::PageInfo *out_page_info, ams::svc::Address address) {
|
||||
MESOSPHERE_PANIC("Stubbed SvcQueryMemory64From32 was called.");
|
||||
}
|
||||
|
||||
Result QueryProcessMemory64From32(KUserPointer<ams::svc::ilp32::MemoryInfo *> out_memory_info, ams::svc::PageInfo *out_page_info, ams::svc::Handle process_handle, uint64_t address) {
|
||||
MESOSPHERE_PANIC("Stubbed SvcQueryProcessMemory64From32 was called.");
|
||||
}
|
||||
|
||||
}
|
|
@ -30,7 +30,7 @@ namespace ams::svc {
|
|||
|
||||
static constexpr size_t MaxWaitSynchronizationHandleCount = 0x40;
|
||||
|
||||
enum class PseudoHandle : Handle {
|
||||
enum PseudoHandle : Handle {
|
||||
CurrentThread = 0xFFFF8000,
|
||||
CurrentProcess = 0xFFFF8001,
|
||||
};
|
||||
|
|
|
@ -324,10 +324,10 @@ _ZN3ams4kern4arch5arm6430EL1SynchronousExceptionHandlerEv:
|
|||
|
||||
/* Data abort. Check if it was from trying to access userspace memory. */
|
||||
mrs x1, elr_el1
|
||||
adr x0, _ZN3ams4kern4arch5arm6438UserspaceMemoryAccessFunctionAreaBeginEv
|
||||
adr x0, _ZN3ams4kern4arch5arm6432UserspaceAccessFunctionAreaBeginEv
|
||||
cmp x1, x0
|
||||
b.lo 3f
|
||||
adr x0, _ZN3ams4kern4arch5arm6436UserspaceMemoryAccessFunctionAreaEndEv
|
||||
adr x0, _ZN3ams4kern4arch5arm6430UserspaceAccessFunctionAreaEndEv
|
||||
cmp x1, x0
|
||||
b.hs 3f
|
||||
|
||||
|
|
Loading…
Reference in a new issue