1
0
Fork 0
mirror of https://github.com/Atmosphere-NX/Atmosphere.git synced 2024-12-23 02:42:09 +00:00

svc: revert codegen changes

This commit is contained in:
Michael Scire 2020-03-10 05:57:18 -07:00
parent f556db8c89
commit 884844bc23

View file

@ -36,15 +36,6 @@ namespace ams::svc::codegen::impl {
HANDLER(56, ## __VA_ARGS__); HANDLER(57, ## __VA_ARGS__); HANDLER(58, ## __VA_ARGS__); HANDLER(59, ## __VA_ARGS__); \
HANDLER(60, ## __VA_ARGS__); HANDLER(61, ## __VA_ARGS__); HANDLER(62, ## __VA_ARGS__); HANDLER(63, ## __VA_ARGS__);
#define SVC_CODEGEN_FOR_I_FROM_0_TO_8(HANDLER, ...) \
HANDLER( 0, ## __VA_ARGS__); HANDLER( 1, ## __VA_ARGS__); HANDLER( 2, ## __VA_ARGS__); HANDLER( 3, ## __VA_ARGS__); \
HANDLER( 4, ## __VA_ARGS__); HANDLER( 5, ## __VA_ARGS__); HANDLER( 6, ## __VA_ARGS__); HANDLER( 7, ## __VA_ARGS__);
#define SVC_CODEGEN_FOR_I_J_FROM_0_TO_8(HANDLER, ...) \
SVC_CODEGEN_FOR_I_FROM_0_TO_8(HANDLER, 0, ## __VA_ARGS__); SVC_CODEGEN_FOR_I_FROM_0_TO_8(HANDLER, 1, ## __VA_ARGS__); \
SVC_CODEGEN_FOR_I_FROM_0_TO_8(HANDLER, 2, ## __VA_ARGS__); SVC_CODEGEN_FOR_I_FROM_0_TO_8(HANDLER, 3, ## __VA_ARGS__); \
SVC_CODEGEN_FOR_I_FROM_0_TO_8(HANDLER, 4, ## __VA_ARGS__); SVC_CODEGEN_FOR_I_FROM_0_TO_8(HANDLER, 5, ## __VA_ARGS__); \
SVC_CODEGEN_FOR_I_FROM_0_TO_8(HANDLER, 6, ## __VA_ARGS__); SVC_CODEGEN_FOR_I_FROM_0_TO_8(HANDLER, 7, ## __VA_ARGS__);
class Aarch64CodeGenerator {
private:
@ -195,50 +186,29 @@ namespace ams::svc::codegen::impl {
template<size_t Dst, size_t Src>
static ALWAYS_INLINE void MoveRegister() {
#define SVC_CODEGEN_HANDLER(dst) \
if constexpr (dst == Dst) { \
__asm__ __volatile__("mov x" #dst ", x%c[src]" :: [src]"i"(Src) : "x" #dst, "memory"); \
}
SVC_CODEGEN_FOR_I_FROM_0_TO_8(SVC_CODEGEN_HANDLER)
#undef SVC_CODEGEN_HANDLER
__asm__ __volatile__("mov x%c[dst], x%c[src]" :: [dst]"i"(Dst), [src]"i"(Src) : "memory");
}
template<size_t Reg, size_t Offset, size_t Size>
static ALWAYS_INLINE void LoadFromStack() {
#define SVC_CODEGEN_HANDLER(reg, pfix) \
if constexpr (reg == Reg) { \
__asm__ __volatile__("ldr " #pfix #reg", [sp, %c[offset]]" :: [offset]"i"(Offset) : #pfix #reg, "memory"); \
}
if constexpr (Size == 4) {
SVC_CODEGEN_FOR_I_FROM_0_TO_8(SVC_CODEGEN_HANDLER, w)
__asm__ __volatile__("ldr w%c[r], [sp, %c[offset]]" :: [r]"i"(Reg), [offset]"i"(Offset) : "memory");
} else if constexpr (Size == 8) {
SVC_CODEGEN_FOR_I_FROM_0_TO_8(SVC_CODEGEN_HANDLER, r)
__asm__ __volatile__("ldr x%c[r], [sp, %c[offset]]" :: [r]"i"(Reg), [offset]"i"(Offset) : "memory");
} else {
static_assert(Size != Size);
}
#undef SVC_CODEGEN_HANDLER
}
template<size_t Reg0, size_t Reg1, size_t Offset, size_t Size>
static ALWAYS_INLINE void LoadPairFromStack() {
#define SVC_CODEGEN_HANDLER(r0, r1, pfix) \
if constexpr (r0 == Reg0 && r1 == Reg1) { \
__asm__ __volatile__("ldp " #pfix #r0 ", " #pfix #r1 ", [sp, %c[offset]]" :: [offset]"i"(Offset) : #pfix #r0, #pfix #r1, "memory"); \
}
if constexpr (Size == 4) {
SVC_CODEGEN_FOR_I_J_FROM_0_TO_8(SVC_CODEGEN_HANDLER, w)
__asm__ __volatile__("ldp w%c[r0], w%c[r1], [sp, %c[offset]]" :: [r0]"i"(Reg0), [r1]"i"(Reg1), [offset]"i"(Offset) : "memory");
} else if constexpr (Size == 8) {
SVC_CODEGEN_FOR_I_J_FROM_0_TO_8(SVC_CODEGEN_HANDLER, x)
__asm__ __volatile__("ldp x%c[r0], x%c[r1], [sp, %c[offset]]" :: [r0]"i"(Reg0), [r1]"i"(Reg1), [offset]"i"(Offset) : "memory");
} else {
static_assert(Size != Size);
}
#undef SVC_CODEGEN_HANDLER
}
template<size_t Reg, size_t Offset, size_t Size>
@ -265,14 +235,7 @@ namespace ams::svc::codegen::impl {
template<size_t Dst, size_t Low, size_t High>
static ALWAYS_INLINE void Pack() {
#define SVC_CODEGEN_HANDLER(dst) \
if constexpr (dst == Dst) { \
__asm__ __volatile__("orr x" #dst ", x%c[low], x%c[high], lsl#32" :: [low]"i"(Low), [high]"i"(High) : "x" #dst, "memory"); \
}
SVC_CODEGEN_FOR_I_FROM_0_TO_8(SVC_CODEGEN_HANDLER)
#undef SVC_CODEGEN_HANDLER
__asm__ __volatile__("orr x%c[dst], x%c[low], x%c[high], lsl #32" :: [dst]"i"(Dst), [low]"i"(Low), [high]"i"(High) : "memory");
}
template<size_t Low, size_t High, size_t Src>
@ -281,38 +244,15 @@ namespace ams::svc::codegen::impl {
MoveRegister<Src, Low>();
}
#define SVC_CODEGEN_HANDLER(h) \
if constexpr (h == High) { \
__asm__ __volatile__("lsr x" #h ", x%c[src], #32" :: [src]"i"(Src) : "x" #h, "memory"); \
}
SVC_CODEGEN_FOR_I_FROM_0_TO_8(SVC_CODEGEN_HANDLER)
#undef SVC_CODEGEN_HANDLER
__asm__ __volatile__("lsr x%c[high], x%c[src], #32" :: [high]"i"(High), [src]"i"(Src) : "memory");
}
template<size_t Dst, size_t Offset>
static ALWAYS_INLINE void LoadStackAddress() {
if constexpr (Offset > 0) {
#define SVC_CODEGEN_HANDLER(dst) \
if constexpr (dst == Dst) { \
__asm__ __volatile__("add x" #dst ", sp, %c[offset]" :: [offset]"i"(Offset) : "x" #dst, "memory"); \
}
SVC_CODEGEN_FOR_I_FROM_0_TO_8(SVC_CODEGEN_HANDLER)
#undef SVC_CODEGEN_HANDLER
__asm__ __volatile__("add x%c[dst], sp, %c[offset]" :: [dst]"i"(Dst), [offset]"i"(Offset) : "memory");
} else if constexpr (Offset == 0) {
#define SVC_CODEGEN_HANDLER(dst) \
if constexpr (dst == Dst) { \
__asm__ __volatile__("mov x" #dst ", sp" ::: "x" #dst, "memory"); \
}
SVC_CODEGEN_FOR_I_FROM_0_TO_8(SVC_CODEGEN_HANDLER)
#undef SVC_CODEGEN_HANDLER
} else {
static_assert(Offset != Offset);
__asm__ __volatile__("mov x%c[dst], sp" :: [dst]"i"(Dst) : "memory");
}
}
};