2018-02-26 01:14:58 +00:00
|
|
|
using ChocolArm64.Events;
|
2019-02-04 21:26:05 +00:00
|
|
|
using ChocolArm64.Translation;
|
2018-02-04 23:08:20 +00:00
|
|
|
using System;
|
2018-03-14 00:24:17 +00:00
|
|
|
using System.Diagnostics;
|
2018-09-19 16:16:20 +01:00
|
|
|
using System.Runtime.CompilerServices;
|
2018-05-12 00:10:27 +01:00
|
|
|
using System.Runtime.Intrinsics;
|
2018-02-04 23:08:20 +00:00
|
|
|
|
|
|
|
namespace ChocolArm64.State
|
|
|
|
{
|
2018-10-31 01:43:02 +00:00
|
|
|
public class CpuThreadState
|
2018-02-04 23:08:20 +00:00
|
|
|
{
|
2018-02-06 15:15:08 +00:00
|
|
|
internal const int ErgSizeLog2 = 4;
|
|
|
|
internal const int DczSizeLog2 = 4;
|
|
|
|
|
2018-09-19 16:16:20 +01:00
|
|
|
private const int MinInstForCheck = 4000000;
|
|
|
|
|
2018-02-04 23:08:20 +00:00
|
|
|
public ulong X0, X1, X2, X3, X4, X5, X6, X7,
|
|
|
|
X8, X9, X10, X11, X12, X13, X14, X15,
|
|
|
|
X16, X17, X18, X19, X20, X21, X22, X23,
|
|
|
|
X24, X25, X26, X27, X28, X29, X30, X31;
|
|
|
|
|
2018-05-12 00:10:27 +01:00
|
|
|
public Vector128<float> V0, V1, V2, V3, V4, V5, V6, V7,
|
|
|
|
V8, V9, V10, V11, V12, V13, V14, V15,
|
|
|
|
V16, V17, V18, V19, V20, V21, V22, V23,
|
|
|
|
V24, V25, V26, V27, V28, V29, V30, V31;
|
2018-02-04 23:08:20 +00:00
|
|
|
|
Implement some ARM32 memory instructions and CMP (#565)
* Implement ARM32 memory instructions: LDM, LDR, LDRB, LDRD, LDRH, LDRSB, LDRSH, STM, STR, STRB, STRD, STRH (immediate and register + immediate variants), implement CMP (immediate and register shifted by immediate variants)
* Rename some opcode classes and flag masks for consistency
* Fix a few suboptimal ARM32 codegen issues, only loads should be considered on decoder when checking if Rt == PC, and only NZCV flags should be considered for comparison optimizations
* Take into account Rt2 for LDRD instructions aswell when checking if the instruction changes PC
* Re-align arm32 instructions on the opcode table
2019-01-29 16:06:11 +00:00
|
|
|
public bool Aarch32;
|
|
|
|
|
|
|
|
public bool Thumb;
|
|
|
|
public bool BigEndian;
|
|
|
|
|
2018-02-04 23:08:20 +00:00
|
|
|
public bool Overflow;
|
|
|
|
public bool Carry;
|
|
|
|
public bool Zero;
|
|
|
|
public bool Negative;
|
|
|
|
|
2019-01-25 01:59:53 +00:00
|
|
|
public int ElrHyp;
|
|
|
|
|
2018-03-12 04:04:52 +00:00
|
|
|
public bool Running { get; set; }
|
2018-09-19 00:36:43 +01:00
|
|
|
|
2018-10-31 01:43:02 +00:00
|
|
|
private bool _interrupted;
|
2018-03-12 04:04:52 +00:00
|
|
|
|
2018-10-31 01:43:02 +00:00
|
|
|
private int _syncCount;
|
2018-09-19 16:16:20 +01:00
|
|
|
|
2018-02-06 15:15:08 +00:00
|
|
|
public long TpidrEl0 { get; set; }
|
|
|
|
public long Tpidr { get; set; }
|
2018-02-04 23:08:20 +00:00
|
|
|
|
2018-02-06 15:15:08 +00:00
|
|
|
public int Fpcr { get; set; }
|
|
|
|
public int Fpsr { get; set; }
|
2018-02-04 23:08:20 +00:00
|
|
|
|
2018-06-26 05:09:32 +01:00
|
|
|
public int Psr
|
|
|
|
{
|
|
|
|
get
|
|
|
|
{
|
Implement some ARM32 memory instructions and CMP (#565)
* Implement ARM32 memory instructions: LDM, LDR, LDRB, LDRD, LDRH, LDRSB, LDRSH, STM, STR, STRB, STRD, STRH (immediate and register + immediate variants), implement CMP (immediate and register shifted by immediate variants)
* Rename some opcode classes and flag masks for consistency
* Fix a few suboptimal ARM32 codegen issues, only loads should be considered on decoder when checking if Rt == PC, and only NZCV flags should be considered for comparison optimizations
* Take into account Rt2 for LDRD instructions aswell when checking if the instruction changes PC
* Re-align arm32 instructions on the opcode table
2019-01-29 16:06:11 +00:00
|
|
|
return (Negative ? (int)PState.NMask : 0) |
|
|
|
|
(Zero ? (int)PState.ZMask : 0) |
|
|
|
|
(Carry ? (int)PState.CMask : 0) |
|
|
|
|
(Overflow ? (int)PState.VMask : 0);
|
2018-06-26 05:09:32 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-02-06 15:15:08 +00:00
|
|
|
public uint CtrEl0 => 0x8444c004;
|
|
|
|
public uint DczidEl0 => 0x00000004;
|
2018-02-04 23:08:20 +00:00
|
|
|
|
2018-03-14 00:24:17 +00:00
|
|
|
public ulong CntfrqEl0 { get; set; }
|
|
|
|
public ulong CntpctEl0
|
|
|
|
{
|
|
|
|
get
|
|
|
|
{
|
2018-10-31 01:43:02 +00:00
|
|
|
double ticks = _tickCounter.ElapsedTicks * _hostTickFreq;
|
2018-02-12 03:37:20 +00:00
|
|
|
|
2018-10-31 01:43:02 +00:00
|
|
|
return (ulong)(ticks * CntfrqEl0);
|
2018-03-14 00:24:17 +00:00
|
|
|
}
|
|
|
|
}
|
2018-02-04 23:08:20 +00:00
|
|
|
|
2018-11-01 04:22:09 +00:00
|
|
|
public event EventHandler<EventArgs> Interrupt;
|
2018-10-31 01:43:02 +00:00
|
|
|
public event EventHandler<InstExceptionEventArgs> Break;
|
|
|
|
public event EventHandler<InstExceptionEventArgs> SvcCall;
|
|
|
|
public event EventHandler<InstUndefinedEventArgs> Undefined;
|
2018-02-10 13:24:16 +00:00
|
|
|
|
2018-10-31 01:43:02 +00:00
|
|
|
private static Stopwatch _tickCounter;
|
2018-03-14 00:24:17 +00:00
|
|
|
|
2018-10-31 01:43:02 +00:00
|
|
|
private static double _hostTickFreq;
|
2018-03-14 00:24:17 +00:00
|
|
|
|
2019-02-04 21:26:05 +00:00
|
|
|
internal Translator CurrentTranslator;
|
|
|
|
|
2019-02-18 23:52:06 +00:00
|
|
|
private ulong _exclusiveAddress;
|
|
|
|
|
|
|
|
internal ulong ExclusiveValueLow { get; set; }
|
|
|
|
internal ulong ExclusiveValueHigh { get; set; }
|
|
|
|
|
|
|
|
public CpuThreadState()
|
|
|
|
{
|
|
|
|
ClearExclusiveAddress();
|
|
|
|
}
|
|
|
|
|
2018-10-31 01:43:02 +00:00
|
|
|
static CpuThreadState()
|
2018-03-14 00:24:17 +00:00
|
|
|
{
|
2018-10-31 01:43:02 +00:00
|
|
|
_hostTickFreq = 1.0 / Stopwatch.Frequency;
|
2018-03-14 00:24:17 +00:00
|
|
|
|
2018-10-31 01:43:02 +00:00
|
|
|
_tickCounter = new Stopwatch();
|
2018-03-14 00:24:17 +00:00
|
|
|
|
2018-10-31 01:43:02 +00:00
|
|
|
_tickCounter.Start();
|
2018-03-14 00:24:17 +00:00
|
|
|
}
|
|
|
|
|
2019-02-18 23:52:06 +00:00
|
|
|
internal void SetExclusiveAddress(ulong address)
|
|
|
|
{
|
|
|
|
_exclusiveAddress = GetMaskedExclusiveAddress(address);
|
|
|
|
}
|
|
|
|
|
|
|
|
internal bool CheckExclusiveAddress(ulong address)
|
|
|
|
{
|
|
|
|
return GetMaskedExclusiveAddress(address) == _exclusiveAddress;
|
|
|
|
}
|
|
|
|
|
|
|
|
internal void ClearExclusiveAddress()
|
|
|
|
{
|
|
|
|
_exclusiveAddress = ulong.MaxValue;
|
|
|
|
}
|
|
|
|
|
|
|
|
private ulong GetMaskedExclusiveAddress(ulong address)
|
|
|
|
{
|
|
|
|
return address & ~((4UL << ErgSizeLog2) - 1);
|
|
|
|
}
|
|
|
|
|
2018-09-19 16:16:20 +01:00
|
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
2018-10-31 01:43:02 +00:00
|
|
|
internal bool Synchronize(int bbWeight)
|
2018-08-17 00:47:36 +01:00
|
|
|
{
|
2018-09-19 16:16:20 +01:00
|
|
|
//Firing a interrupt frequently is expensive, so we only
|
|
|
|
//do it after a given number of instructions has executed.
|
2018-10-31 01:43:02 +00:00
|
|
|
_syncCount += bbWeight;
|
2018-09-19 00:36:43 +01:00
|
|
|
|
2018-10-31 01:43:02 +00:00
|
|
|
if (_syncCount >= MinInstForCheck)
|
2018-09-19 16:16:20 +01:00
|
|
|
{
|
|
|
|
CheckInterrupt();
|
2018-09-19 00:36:43 +01:00
|
|
|
}
|
|
|
|
|
2018-08-17 00:47:36 +01:00
|
|
|
return Running;
|
|
|
|
}
|
|
|
|
|
2018-09-19 00:36:43 +01:00
|
|
|
internal void RequestInterrupt()
|
|
|
|
{
|
2018-10-31 01:43:02 +00:00
|
|
|
_interrupted = true;
|
2018-09-19 00:36:43 +01:00
|
|
|
}
|
|
|
|
|
2018-09-19 16:16:20 +01:00
|
|
|
[MethodImpl(MethodImplOptions.NoInlining)]
|
|
|
|
private void CheckInterrupt()
|
2018-09-19 00:36:43 +01:00
|
|
|
{
|
2018-10-31 01:43:02 +00:00
|
|
|
_syncCount = 0;
|
2018-09-19 16:16:20 +01:00
|
|
|
|
2018-10-31 01:43:02 +00:00
|
|
|
if (_interrupted)
|
2018-09-19 16:16:20 +01:00
|
|
|
{
|
2018-10-31 01:43:02 +00:00
|
|
|
_interrupted = false;
|
2018-09-19 16:16:20 +01:00
|
|
|
|
|
|
|
Interrupt?.Invoke(this, EventArgs.Empty);
|
|
|
|
}
|
2018-09-19 00:36:43 +01:00
|
|
|
}
|
|
|
|
|
2018-10-31 01:43:02 +00:00
|
|
|
internal void OnBreak(long position, int imm)
|
2018-02-10 13:24:16 +00:00
|
|
|
{
|
2018-10-31 01:43:02 +00:00
|
|
|
Break?.Invoke(this, new InstExceptionEventArgs(position, imm));
|
2018-02-10 13:24:16 +00:00
|
|
|
}
|
2018-02-04 23:08:20 +00:00
|
|
|
|
2018-10-31 01:43:02 +00:00
|
|
|
internal void OnSvcCall(long position, int imm)
|
2018-02-04 23:08:20 +00:00
|
|
|
{
|
2018-10-31 01:43:02 +00:00
|
|
|
SvcCall?.Invoke(this, new InstExceptionEventArgs(position, imm));
|
2018-02-04 23:08:20 +00:00
|
|
|
}
|
|
|
|
|
2018-10-31 01:43:02 +00:00
|
|
|
internal void OnUndefined(long position, int rawOpCode)
|
2018-02-04 23:08:20 +00:00
|
|
|
{
|
2018-10-31 01:43:02 +00:00
|
|
|
Undefined?.Invoke(this, new InstUndefinedEventArgs(position, rawOpCode));
|
2018-02-04 23:08:20 +00:00
|
|
|
}
|
2018-10-23 15:12:45 +01:00
|
|
|
|
2019-01-25 01:59:53 +00:00
|
|
|
internal ExecutionMode GetExecutionMode()
|
|
|
|
{
|
Implement some ARM32 memory instructions and CMP (#565)
* Implement ARM32 memory instructions: LDM, LDR, LDRB, LDRD, LDRH, LDRSB, LDRSH, STM, STR, STRB, STRD, STRH (immediate and register + immediate variants), implement CMP (immediate and register shifted by immediate variants)
* Rename some opcode classes and flag masks for consistency
* Fix a few suboptimal ARM32 codegen issues, only loads should be considered on decoder when checking if Rt == PC, and only NZCV flags should be considered for comparison optimizations
* Take into account Rt2 for LDRD instructions aswell when checking if the instruction changes PC
* Re-align arm32 instructions on the opcode table
2019-01-29 16:06:11 +00:00
|
|
|
if (!Aarch32)
|
2019-01-25 01:59:53 +00:00
|
|
|
{
|
|
|
|
return ExecutionMode.Aarch64;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
return Thumb ? ExecutionMode.Aarch32Thumb : ExecutionMode.Aarch32Arm;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-10-31 01:43:02 +00:00
|
|
|
internal bool GetFpcrFlag(Fpcr flag)
|
2018-10-23 15:12:45 +01:00
|
|
|
{
|
2018-10-31 01:43:02 +00:00
|
|
|
return (Fpcr & (1 << (int)flag)) != 0;
|
2018-10-23 15:12:45 +01:00
|
|
|
}
|
|
|
|
|
2018-10-31 01:43:02 +00:00
|
|
|
internal void SetFpsrFlag(Fpsr flag)
|
2018-10-23 15:12:45 +01:00
|
|
|
{
|
2018-10-31 01:43:02 +00:00
|
|
|
Fpsr |= 1 << (int)flag;
|
2018-10-23 15:12:45 +01:00
|
|
|
}
|
|
|
|
|
2018-10-31 01:43:02 +00:00
|
|
|
internal RoundMode FPRoundingMode()
|
2018-10-23 15:12:45 +01:00
|
|
|
{
|
2018-10-31 01:43:02 +00:00
|
|
|
return (RoundMode)((Fpcr >> (int)State.Fpcr.RMode) & 3);
|
2018-10-23 15:12:45 +01:00
|
|
|
}
|
2018-02-04 23:08:20 +00:00
|
|
|
}
|
2018-10-23 15:12:45 +01:00
|
|
|
}
|