2019-12-17 08:37:55 +00:00
|
|
|
/*
|
2021-10-04 20:59:10 +01:00
|
|
|
* Copyright (c) Atmosphère-NX
|
2019-12-17 08:37:55 +00:00
|
|
|
*
|
|
|
|
* This program is free software; you can redistribute it and/or modify it
|
|
|
|
* under the terms and conditions of the GNU General Public License,
|
|
|
|
* version 2, as published by the Free Software Foundation.
|
|
|
|
*
|
|
|
|
* This program is distributed in the hope it will be useful, but WITHOUT
|
|
|
|
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
|
|
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
|
|
|
* more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU General Public License
|
|
|
|
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
*/
|
|
|
|
#pragma once
|
|
|
|
#include <vapours.hpp>
|
2020-01-30 23:29:51 +00:00
|
|
|
#include <mesosphere/arch/arm64/kern_cpu_system_registers.hpp>
|
2020-02-20 17:05:01 +00:00
|
|
|
#include <mesosphere/kern_select_userspace_memory_access.hpp>
|
2019-12-17 08:37:55 +00:00
|
|
|
|
2020-02-15 02:22:55 +00:00
|
|
|
namespace ams::kern::arch::arm64::cpu {
|
2019-12-17 08:37:55 +00:00
|
|
|
|
2020-01-24 08:47:43 +00:00
|
|
|
#if defined(ATMOSPHERE_CPU_ARM_CORTEX_A57) || defined(ATMOSPHERE_CPU_ARM_CORTEX_A53)
|
|
|
|
constexpr inline size_t InstructionCacheLineSize = 0x40;
|
|
|
|
constexpr inline size_t DataCacheLineSize = 0x40;
|
2020-02-14 10:20:33 +00:00
|
|
|
constexpr inline size_t NumPerformanceCounters = 6;
|
2020-01-24 08:47:43 +00:00
|
|
|
#else
|
|
|
|
#error "Unknown CPU for cache line sizes"
|
|
|
|
#endif
|
|
|
|
|
2020-02-15 03:58:57 +00:00
|
|
|
#if defined(ATMOSPHERE_BOARD_NINTENDO_NX)
|
2020-02-14 10:20:33 +00:00
|
|
|
constexpr inline size_t NumCores = 4;
|
2021-10-21 04:53:42 +01:00
|
|
|
#elif defined(ATMOSPHERE_BOARD_QEMU_VIRT)
|
|
|
|
constexpr inline size_t NumCores = 4;
|
2020-01-24 08:47:43 +00:00
|
|
|
#else
|
|
|
|
#error "Unknown Board for cpu::NumCores"
|
|
|
|
#endif
|
|
|
|
|
2023-02-21 10:20:49 +00:00
|
|
|
constexpr inline u32 El0Aarch64PsrMask = 0xF0000000;
|
|
|
|
constexpr inline u32 El0Aarch32PsrMask = 0xFE0FFE20;
|
|
|
|
|
2020-02-14 10:20:33 +00:00
|
|
|
/* Initialization. */
|
|
|
|
NOINLINE void InitializeInterruptThreads(s32 core_id);
|
|
|
|
|
2019-12-17 08:37:55 +00:00
|
|
|
/* Helpers for managing memory state. */
|
|
|
|
ALWAYS_INLINE void DataSynchronizationBarrier() {
|
2019-12-17 15:07:35 +00:00
|
|
|
__asm__ __volatile__("dsb sy" ::: "memory");
|
2019-12-17 08:37:55 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
ALWAYS_INLINE void DataSynchronizationBarrierInnerShareable() {
|
2019-12-17 15:07:35 +00:00
|
|
|
__asm__ __volatile__("dsb ish" ::: "memory");
|
2019-12-17 08:37:55 +00:00
|
|
|
}
|
|
|
|
|
2022-03-23 04:33:43 +00:00
|
|
|
ALWAYS_INLINE void DataSynchronizationBarrierInnerShareableStore() {
|
|
|
|
__asm__ __volatile__("dsb ishst" ::: "memory");
|
|
|
|
}
|
|
|
|
|
2019-12-17 08:37:55 +00:00
|
|
|
ALWAYS_INLINE void DataMemoryBarrier() {
|
2019-12-17 15:07:35 +00:00
|
|
|
__asm__ __volatile__("dmb sy" ::: "memory");
|
2019-12-17 08:37:55 +00:00
|
|
|
}
|
|
|
|
|
2021-10-26 01:34:47 +01:00
|
|
|
ALWAYS_INLINE void DataMemoryBarrierInnerShareable() {
|
|
|
|
__asm__ __volatile__("dmb ish" ::: "memory");
|
|
|
|
}
|
|
|
|
|
2022-03-23 04:33:43 +00:00
|
|
|
ALWAYS_INLINE void DataMemoryBarrierInnerShareableStore() {
|
|
|
|
__asm__ __volatile__("dmb ishst" ::: "memory");
|
|
|
|
}
|
|
|
|
|
2019-12-17 08:37:55 +00:00
|
|
|
ALWAYS_INLINE void InstructionMemoryBarrier() {
|
2019-12-17 15:07:35 +00:00
|
|
|
__asm__ __volatile__("isb" ::: "memory");
|
2019-12-17 08:37:55 +00:00
|
|
|
}
|
|
|
|
|
2022-03-23 04:33:43 +00:00
|
|
|
ALWAYS_INLINE void EnsureInstructionConsistency() {
|
2021-10-27 20:31:53 +01:00
|
|
|
DataSynchronizationBarrierInnerShareable();
|
|
|
|
InstructionMemoryBarrier();
|
|
|
|
}
|
|
|
|
|
2022-03-23 04:33:43 +00:00
|
|
|
ALWAYS_INLINE void EnsureInstructionConsistencyFullSystem() {
|
2019-12-17 08:37:55 +00:00
|
|
|
DataSynchronizationBarrier();
|
|
|
|
InstructionMemoryBarrier();
|
|
|
|
}
|
|
|
|
|
2020-07-30 22:51:45 +01:00
|
|
|
ALWAYS_INLINE void Yield() {
|
|
|
|
__asm__ __volatile__("yield" ::: "memory");
|
|
|
|
}
|
|
|
|
|
2020-02-10 10:26:00 +00:00
|
|
|
ALWAYS_INLINE void SwitchProcess(u64 ttbr, u32 proc_id) {
|
|
|
|
SetTtbr0El1(ttbr);
|
|
|
|
ContextIdRegisterAccessor(0).SetProcId(proc_id).Store();
|
|
|
|
InstructionMemoryBarrier();
|
|
|
|
}
|
|
|
|
|
2020-02-14 10:20:33 +00:00
|
|
|
/* Performance counter helpers. */
|
|
|
|
ALWAYS_INLINE u64 GetCycleCounter() {
|
|
|
|
return cpu::GetPmcCntrEl0();
|
|
|
|
}
|
|
|
|
|
|
|
|
ALWAYS_INLINE u32 GetPerformanceCounter(s32 n) {
|
|
|
|
u64 counter = 0;
|
|
|
|
if (n < static_cast<s32>(NumPerformanceCounters)) {
|
|
|
|
switch (n) {
|
|
|
|
case 0:
|
|
|
|
counter = cpu::GetPmevCntr0El0();
|
|
|
|
break;
|
|
|
|
case 1:
|
|
|
|
counter = cpu::GetPmevCntr1El0();
|
|
|
|
break;
|
|
|
|
case 2:
|
|
|
|
counter = cpu::GetPmevCntr2El0();
|
|
|
|
break;
|
|
|
|
case 3:
|
|
|
|
counter = cpu::GetPmevCntr3El0();
|
|
|
|
break;
|
|
|
|
case 4:
|
|
|
|
counter = cpu::GetPmevCntr4El0();
|
|
|
|
break;
|
|
|
|
case 5:
|
|
|
|
counter = cpu::GetPmevCntr5El0();
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return static_cast<u32>(counter);
|
|
|
|
}
|
|
|
|
|
2020-02-07 14:26:01 +00:00
|
|
|
/* Helper for address access. */
|
|
|
|
ALWAYS_INLINE bool GetPhysicalAddressWritable(KPhysicalAddress *out, KVirtualAddress addr, bool privileged = false) {
|
|
|
|
const uintptr_t va = GetInteger(addr);
|
|
|
|
|
|
|
|
if (privileged) {
|
|
|
|
__asm__ __volatile__("at s1e1w, %[va]" :: [va]"r"(va) : "memory");
|
|
|
|
} else {
|
|
|
|
__asm__ __volatile__("at s1e0w, %[va]" :: [va]"r"(va) : "memory");
|
|
|
|
}
|
|
|
|
InstructionMemoryBarrier();
|
|
|
|
|
|
|
|
u64 par = GetParEl1();
|
|
|
|
|
|
|
|
if (par & 0x1) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (out) {
|
|
|
|
*out = KPhysicalAddress((par & 0xFFFFFFFFF000ull) | (va & 0xFFFull));
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
ALWAYS_INLINE bool GetPhysicalAddressReadable(KPhysicalAddress *out, KVirtualAddress addr, bool privileged = false) {
|
|
|
|
const uintptr_t va = GetInteger(addr);
|
|
|
|
|
|
|
|
if (privileged) {
|
|
|
|
__asm__ __volatile__("at s1e1r, %[va]" :: [va]"r"(va) : "memory");
|
|
|
|
} else {
|
|
|
|
__asm__ __volatile__("at s1e0r, %[va]" :: [va]"r"(va) : "memory");
|
|
|
|
}
|
|
|
|
InstructionMemoryBarrier();
|
|
|
|
|
|
|
|
u64 par = GetParEl1();
|
|
|
|
|
|
|
|
if (par & 0x1) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (out) {
|
|
|
|
*out = KPhysicalAddress((par & 0xFFFFFFFFF000ull) | (va & 0xFFFull));
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2020-07-17 03:06:48 +01:00
|
|
|
ALWAYS_INLINE bool CanAccessAtomic(KProcessAddress addr, bool privileged = false) {
|
|
|
|
const uintptr_t va = GetInteger(addr);
|
|
|
|
|
|
|
|
if (privileged) {
|
|
|
|
__asm__ __volatile__("at s1e1w, %[va]" :: [va]"r"(va) : "memory");
|
|
|
|
} else {
|
|
|
|
__asm__ __volatile__("at s1e0w, %[va]" :: [va]"r"(va) : "memory");
|
|
|
|
}
|
|
|
|
InstructionMemoryBarrier();
|
|
|
|
|
|
|
|
u64 par = GetParEl1();
|
|
|
|
|
|
|
|
if (par & 0x1) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2020-07-18 06:25:28 +01:00
|
|
|
return (par >> (BITSIZEOF(par) - BITSIZEOF(u8))) == 0xFF;
|
2020-07-17 03:06:48 +01:00
|
|
|
}
|
|
|
|
|
2023-02-21 17:38:48 +00:00
|
|
|
ALWAYS_INLINE void StoreDataCacheForInitArguments(const void *addr, size_t size) {
|
|
|
|
const uintptr_t start = util::AlignDown(reinterpret_cast<uintptr_t>(addr), DataCacheLineSize);
|
|
|
|
for (size_t stored = 0; stored < size; stored += cpu::DataCacheLineSize) {
|
|
|
|
__asm__ __volatile__("dc cvac, %[cur]" :: [cur]"r"(start + stored) : "memory");
|
|
|
|
}
|
|
|
|
DataSynchronizationBarrier();
|
|
|
|
}
|
|
|
|
|
2020-01-29 06:09:47 +00:00
|
|
|
/* Synchronization helpers. */
|
|
|
|
NOINLINE void SynchronizeAllCores();
|
2022-03-23 06:02:10 +00:00
|
|
|
void SynchronizeCores(u64 core_mask);
|
2020-01-29 06:09:47 +00:00
|
|
|
|
2019-12-17 08:37:55 +00:00
|
|
|
/* Cache management helpers. */
|
2022-03-23 04:33:43 +00:00
|
|
|
void StoreCacheForInit(void *addr, size_t size);
|
2019-12-17 08:37:55 +00:00
|
|
|
|
2020-02-17 10:49:21 +00:00
|
|
|
void FlushEntireDataCache();
|
|
|
|
|
2020-02-15 08:00:35 +00:00
|
|
|
Result InvalidateDataCache(void *addr, size_t size);
|
|
|
|
Result StoreDataCache(const void *addr, size_t size);
|
|
|
|
Result FlushDataCache(const void *addr, size_t size);
|
|
|
|
|
2020-08-04 02:39:32 +01:00
|
|
|
void InvalidateEntireInstructionCache();
|
|
|
|
|
2022-03-23 04:33:43 +00:00
|
|
|
void ClearPageToZeroImpl(void *);
|
|
|
|
|
2021-10-27 20:31:53 +01:00
|
|
|
ALWAYS_INLINE void ClearPageToZero(void * const page) {
|
2020-02-09 11:45:45 +00:00
|
|
|
MESOSPHERE_ASSERT(util::IsAligned(reinterpret_cast<uintptr_t>(page), PageSize));
|
|
|
|
MESOSPHERE_ASSERT(page != nullptr);
|
2021-10-27 20:31:53 +01:00
|
|
|
|
2022-03-23 04:33:43 +00:00
|
|
|
ClearPageToZeroImpl(page);
|
2020-02-09 11:45:45 +00:00
|
|
|
}
|
|
|
|
|
2020-02-14 01:38:56 +00:00
|
|
|
ALWAYS_INLINE void InvalidateTlbByAsid(u32 asid) {
|
|
|
|
const u64 value = (static_cast<u64>(asid) << 48);
|
2020-07-21 08:56:57 +01:00
|
|
|
__asm__ __volatile__("tlbi aside1is, %[value]" :: [value]"r"(value) : "memory");
|
2020-02-14 01:38:56 +00:00
|
|
|
EnsureInstructionConsistency();
|
|
|
|
}
|
|
|
|
|
|
|
|
ALWAYS_INLINE void InvalidateTlbByAsidAndVa(u32 asid, KProcessAddress virt_addr) {
|
|
|
|
const u64 value = (static_cast<u64>(asid) << 48) | ((GetInteger(virt_addr) >> 12) & 0xFFFFFFFFFFFul);
|
|
|
|
__asm__ __volatile__("tlbi aside1is, %[value]" :: [value]"r"(value) : "memory");
|
|
|
|
EnsureInstructionConsistency();
|
|
|
|
}
|
|
|
|
|
2019-12-17 08:37:55 +00:00
|
|
|
ALWAYS_INLINE void InvalidateEntireTlb() {
|
|
|
|
__asm__ __volatile__("tlbi vmalle1is" ::: "memory");
|
|
|
|
EnsureInstructionConsistency();
|
|
|
|
}
|
|
|
|
|
2020-02-14 01:38:56 +00:00
|
|
|
ALWAYS_INLINE void InvalidateEntireTlbDataOnly() {
|
|
|
|
__asm__ __volatile__("tlbi vmalle1is" ::: "memory");
|
2022-03-23 04:33:43 +00:00
|
|
|
DataSynchronizationBarrierInnerShareable();
|
2020-02-14 01:38:56 +00:00
|
|
|
}
|
|
|
|
|
2020-02-18 09:04:44 +00:00
|
|
|
ALWAYS_INLINE void InvalidateTlbByVaDataOnly(KProcessAddress virt_addr) {
|
|
|
|
const u64 value = ((GetInteger(virt_addr) >> 12) & 0xFFFFFFFFFFFul);
|
|
|
|
__asm__ __volatile__("tlbi vaae1is, %[value]" :: [value]"r"(value) : "memory");
|
2022-03-23 04:33:43 +00:00
|
|
|
DataSynchronizationBarrierInnerShareable();
|
2020-02-18 09:04:44 +00:00
|
|
|
}
|
|
|
|
|
2020-12-01 21:41:37 +00:00
|
|
|
ALWAYS_INLINE uintptr_t GetCurrentThreadPointerValue() {
|
2020-01-29 13:37:14 +00:00
|
|
|
register uintptr_t x18 asm("x18");
|
|
|
|
__asm__ __volatile__("" : [x18]"=r"(x18));
|
|
|
|
return x18;
|
|
|
|
}
|
|
|
|
|
2020-12-01 21:41:37 +00:00
|
|
|
ALWAYS_INLINE void SetCurrentThreadPointerValue(uintptr_t value) {
|
2020-01-29 13:37:14 +00:00
|
|
|
register uintptr_t x18 asm("x18") = value;
|
|
|
|
__asm__ __volatile__("":: [x18]"r"(x18));
|
2020-12-01 21:41:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
ALWAYS_INLINE void SetExceptionThreadStackTop(uintptr_t top) {
|
2021-04-07 16:36:03 +01:00
|
|
|
cpu::SetCntvCvalEl0(top);
|
2020-01-29 13:37:14 +00:00
|
|
|
}
|
|
|
|
|
2020-02-05 21:02:35 +00:00
|
|
|
ALWAYS_INLINE void SwitchThreadLocalRegion(uintptr_t tlr) {
|
|
|
|
cpu::SetTpidrRoEl0(tlr);
|
|
|
|
}
|
|
|
|
|
2019-12-17 08:37:55 +00:00
|
|
|
}
|