2018-01-06 21:15:25 +00:00
|
|
|
/* This file is part of the dynarmic project.
|
|
|
|
* Copyright (c) 2016 MerryMage
|
|
|
|
* This software may be used and distributed according to the terms of the GNU
|
|
|
|
* General Public License version 2 or any later version.
|
|
|
|
*/
|
|
|
|
|
2018-01-08 17:26:49 +00:00
|
|
|
#include <cstring>
|
2018-01-06 21:15:25 +00:00
|
|
|
#include <memory>
|
|
|
|
|
|
|
|
#include <boost/icl/interval_set.hpp>
|
|
|
|
|
|
|
|
#include "backend_x64/a64_emit_x64.h"
|
|
|
|
#include "backend_x64/a64_jitstate.h"
|
|
|
|
#include "backend_x64/block_of_code.h"
|
2018-01-12 17:31:21 +00:00
|
|
|
#include "backend_x64/devirtualize.h"
|
2018-01-06 21:15:25 +00:00
|
|
|
#include "backend_x64/jitstate_info.h"
|
|
|
|
#include "common/assert.h"
|
|
|
|
#include "common/scope_exit.h"
|
2018-02-18 11:20:43 +00:00
|
|
|
#include "common/llvm_disassemble.h"
|
2018-01-06 21:15:25 +00:00
|
|
|
#include "dynarmic/A64/a64.h"
|
|
|
|
#include "frontend/A64/translate/translate.h"
|
|
|
|
#include "frontend/ir/basic_block.h"
|
|
|
|
#include "ir_opt/passes.h"
|
|
|
|
|
2018-01-26 13:51:48 +00:00
|
|
|
namespace Dynarmic::A64 {
|
2018-01-06 21:15:25 +00:00
|
|
|
|
|
|
|
using namespace BackendX64;
|
|
|
|
|
|
|
|
static RunCodeCallbacks GenRunCodeCallbacks(A64::UserCallbacks* cb, CodePtr (*LookupBlock)(void* lookup_block_arg), void* arg) {
|
|
|
|
return RunCodeCallbacks{
|
|
|
|
std::make_unique<ArgCallback>(LookupBlock, reinterpret_cast<u64>(arg)),
|
2018-01-12 17:31:21 +00:00
|
|
|
std::make_unique<ArgCallback>(DEVIRT(cb, &A64::UserCallbacks::AddTicks)),
|
|
|
|
std::make_unique<ArgCallback>(DEVIRT(cb, &A64::UserCallbacks::GetTicksRemaining)),
|
2018-01-06 21:15:25 +00:00
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
struct Jit::Impl final {
|
|
|
|
public:
|
|
|
|
explicit Impl(UserConfig conf)
|
|
|
|
: conf(conf)
|
|
|
|
, block_of_code(GenRunCodeCallbacks(conf.callbacks, &GetCurrentBlockThunk, this), JitStateInfo{jit_state})
|
2018-02-03 14:28:57 +00:00
|
|
|
, emitter(block_of_code, conf)
|
2018-02-12 20:49:52 +00:00
|
|
|
{
|
|
|
|
ASSERT(conf.page_table_address_space_bits >= 12 && conf.page_table_address_space_bits <= 64);
|
|
|
|
}
|
2018-01-06 21:15:25 +00:00
|
|
|
|
|
|
|
~Impl() = default;
|
|
|
|
|
|
|
|
void Run() {
|
|
|
|
ASSERT(!is_executing);
|
|
|
|
is_executing = true;
|
2018-01-28 17:57:02 +00:00
|
|
|
SCOPE_EXIT { this->is_executing = false; };
|
2018-01-06 21:15:25 +00:00
|
|
|
jit_state.halt_requested = false;
|
|
|
|
|
2018-01-07 13:56:32 +00:00
|
|
|
// TODO: Check code alignment
|
2018-01-26 22:54:03 +00:00
|
|
|
|
|
|
|
const u32 new_rsb_ptr = (jit_state.rsb_ptr - 1) & A64JitState::RSBPtrMask;
|
|
|
|
if (jit_state.GetUniqueHash() == jit_state.rsb_location_descriptors[new_rsb_ptr]) {
|
|
|
|
jit_state.rsb_ptr = new_rsb_ptr;
|
|
|
|
block_of_code.RunCodeFrom(&jit_state, reinterpret_cast<CodePtr>(jit_state.rsb_codeptrs[new_rsb_ptr]));
|
|
|
|
} else {
|
|
|
|
block_of_code.RunCode(&jit_state);
|
|
|
|
}
|
2018-01-06 21:15:25 +00:00
|
|
|
|
|
|
|
PerformRequestedCacheInvalidation();
|
|
|
|
}
|
|
|
|
|
|
|
|
void ClearCache() {
|
|
|
|
invalidate_entire_cache = true;
|
|
|
|
RequestCacheInvalidation();
|
|
|
|
}
|
|
|
|
|
|
|
|
void InvalidateCacheRange(u64 start_address, size_t length) {
|
|
|
|
const auto end_address = static_cast<u64>(start_address + length - 1);
|
|
|
|
const auto range = boost::icl::discrete_interval<u64>::closed(start_address, end_address);
|
|
|
|
invalid_cache_ranges.add(range);
|
|
|
|
RequestCacheInvalidation();
|
|
|
|
}
|
|
|
|
|
|
|
|
void Reset() {
|
|
|
|
ASSERT(!is_executing);
|
|
|
|
jit_state = {};
|
|
|
|
}
|
|
|
|
|
|
|
|
void HaltExecution() {
|
|
|
|
jit_state.halt_requested = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
u64 GetSP() const {
|
|
|
|
return jit_state.sp;
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetSP(u64 value) {
|
|
|
|
jit_state.sp = value;
|
|
|
|
}
|
|
|
|
|
|
|
|
u64 GetPC() const {
|
|
|
|
return jit_state.pc;
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetPC(u64 value) {
|
|
|
|
jit_state.pc = value;
|
|
|
|
}
|
|
|
|
|
|
|
|
u64 GetRegister(size_t index) const {
|
|
|
|
if (index == 31)
|
|
|
|
return GetSP();
|
|
|
|
return jit_state.reg.at(index);
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetRegister(size_t index, u64 value) {
|
|
|
|
if (index == 31)
|
|
|
|
return SetSP(value);
|
|
|
|
jit_state.reg.at(index) = value;
|
|
|
|
}
|
|
|
|
|
2018-01-08 17:26:49 +00:00
|
|
|
std::array<u64, 31> GetRegisters() const {
|
|
|
|
return jit_state.reg;
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetRegisters(const std::array<u64, 31>& value) {
|
|
|
|
jit_state.reg = value;
|
|
|
|
}
|
|
|
|
|
2018-01-06 21:15:25 +00:00
|
|
|
Vector GetVector(size_t index) const {
|
|
|
|
return {jit_state.vec.at(index * 2), jit_state.vec.at(index * 2 + 1)};
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetVector(size_t index, Vector value) {
|
2018-01-08 17:26:49 +00:00
|
|
|
jit_state.vec.at(index * 2) = value[0];
|
|
|
|
jit_state.vec.at(index * 2 + 1) = value[1];
|
|
|
|
}
|
|
|
|
|
2018-01-24 15:52:49 +00:00
|
|
|
std::array<Vector, 32> GetVectors() const {
|
|
|
|
std::array<Vector, 32> ret;
|
2018-01-08 17:26:49 +00:00
|
|
|
static_assert(sizeof(ret) == sizeof(jit_state.vec));
|
|
|
|
std::memcpy(ret.data(), jit_state.vec.data(), sizeof(jit_state.vec));
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2018-01-24 15:52:49 +00:00
|
|
|
void SetVectors(const std::array<Vector, 32>& value) {
|
2018-01-08 17:26:49 +00:00
|
|
|
static_assert(sizeof(value) == sizeof(jit_state.vec));
|
|
|
|
std::memcpy(jit_state.vec.data(), value.data(), sizeof(jit_state.vec));
|
2018-01-06 21:15:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
u32 GetFpcr() const {
|
|
|
|
return jit_state.GetFpcr();
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetFpcr(u32 value) {
|
|
|
|
jit_state.SetFpcr(value);
|
|
|
|
}
|
|
|
|
|
2018-02-20 17:38:29 +00:00
|
|
|
u32 GetFpsr() const {
|
|
|
|
return jit_state.GetFpsr();
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetFpsr(u32 value) {
|
|
|
|
jit_state.SetFpsr(value);
|
|
|
|
}
|
|
|
|
|
2018-01-07 14:46:35 +00:00
|
|
|
u32 GetPstate() const {
|
|
|
|
return jit_state.GetPstate();
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetPstate(u32 value) {
|
|
|
|
jit_state.SetPstate(value);
|
|
|
|
}
|
|
|
|
|
2018-01-06 21:15:25 +00:00
|
|
|
bool IsExecuting() const {
|
|
|
|
return is_executing;
|
|
|
|
}
|
|
|
|
|
2018-01-28 17:56:26 +00:00
|
|
|
std::string Disassemble() const {
|
2018-02-18 11:20:43 +00:00
|
|
|
return Common::DisassembleX64(block_of_code.GetCodeBegin(), block_of_code.getCurr());
|
2018-01-28 17:56:26 +00:00
|
|
|
}
|
|
|
|
|
2018-01-06 21:15:25 +00:00
|
|
|
private:
|
|
|
|
static CodePtr GetCurrentBlockThunk(void* thisptr) {
|
2018-05-01 17:34:53 +01:00
|
|
|
Jit::Impl* this_ = static_cast<Jit::Impl*>(thisptr);
|
2018-01-06 21:15:25 +00:00
|
|
|
return this_->GetCurrentBlock();
|
|
|
|
}
|
|
|
|
|
|
|
|
CodePtr GetCurrentBlock() {
|
|
|
|
IR::LocationDescriptor current_location{jit_state.GetUniqueHash()};
|
|
|
|
|
|
|
|
if (auto block = emitter.GetBasicBlock(current_location))
|
|
|
|
return block->entrypoint;
|
|
|
|
|
|
|
|
constexpr size_t MINIMUM_REMAINING_CODESIZE = 1 * 1024 * 1024;
|
|
|
|
if (block_of_code.SpaceRemaining() < MINIMUM_REMAINING_CODESIZE) {
|
|
|
|
// Immediately evacuate cache
|
|
|
|
invalidate_entire_cache = true;
|
|
|
|
PerformRequestedCacheInvalidation();
|
|
|
|
}
|
|
|
|
|
|
|
|
// JIT Compile
|
|
|
|
IR::Block ir_block = A64::Translate(A64::LocationDescriptor{current_location}, [this](u64 vaddr) { return conf.callbacks->MemoryReadCode(vaddr); });
|
2018-02-11 22:53:46 +00:00
|
|
|
Optimization::A64CallbackConfigPass(ir_block, conf);
|
2018-01-26 23:37:54 +00:00
|
|
|
Optimization::A64GetSetElimination(ir_block);
|
2018-01-06 21:15:25 +00:00
|
|
|
Optimization::DeadCodeElimination(ir_block);
|
2018-01-13 21:51:13 +00:00
|
|
|
Optimization::A64MergeInterpretBlocksPass(ir_block, conf.callbacks);
|
2018-01-07 12:52:12 +00:00
|
|
|
// printf("%s\n", IR::DumpBlock(ir_block).c_str());
|
2018-01-06 21:15:25 +00:00
|
|
|
Optimization::VerificationPass(ir_block);
|
|
|
|
return emitter.Emit(ir_block).entrypoint;
|
|
|
|
}
|
|
|
|
|
|
|
|
void RequestCacheInvalidation() {
|
|
|
|
if (is_executing) {
|
|
|
|
jit_state.halt_requested = true;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
PerformRequestedCacheInvalidation();
|
|
|
|
}
|
|
|
|
|
|
|
|
void PerformRequestedCacheInvalidation() {
|
|
|
|
if (!invalidate_entire_cache && invalid_cache_ranges.empty()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
jit_state.ResetRSB();
|
|
|
|
if (invalidate_entire_cache) {
|
|
|
|
block_of_code.ClearCache();
|
|
|
|
emitter.ClearCache();
|
|
|
|
} else {
|
|
|
|
emitter.InvalidateCacheRanges(invalid_cache_ranges);
|
|
|
|
}
|
|
|
|
invalid_cache_ranges.clear();
|
|
|
|
invalidate_entire_cache = false;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool is_executing = false;
|
|
|
|
|
|
|
|
UserConfig conf;
|
|
|
|
A64JitState jit_state;
|
|
|
|
BlockOfCode block_of_code;
|
|
|
|
A64EmitX64 emitter;
|
|
|
|
|
|
|
|
bool invalidate_entire_cache = false;
|
|
|
|
boost::icl::interval_set<u64> invalid_cache_ranges;
|
|
|
|
};
|
|
|
|
|
|
|
|
Jit::Jit(UserConfig conf)
|
|
|
|
: impl(std::make_unique<Jit::Impl>(conf)) {}
|
|
|
|
|
|
|
|
Jit::~Jit() = default;
|
|
|
|
|
|
|
|
void Jit::Run() {
|
|
|
|
impl->Run();
|
|
|
|
}
|
|
|
|
|
|
|
|
void Jit::ClearCache() {
|
|
|
|
impl->ClearCache();
|
|
|
|
}
|
|
|
|
|
|
|
|
void Jit::InvalidateCacheRange(u64 start_address, size_t length) {
|
|
|
|
impl->InvalidateCacheRange(start_address, length);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Jit::Reset() {
|
|
|
|
impl->Reset();
|
|
|
|
}
|
|
|
|
|
|
|
|
void Jit::HaltExecution() {
|
|
|
|
impl->HaltExecution();
|
|
|
|
}
|
|
|
|
|
|
|
|
u64 Jit::GetSP() const {
|
|
|
|
return impl->GetSP();
|
|
|
|
}
|
|
|
|
|
|
|
|
void Jit::SetSP(u64 value) {
|
|
|
|
impl->SetSP(value);
|
|
|
|
}
|
|
|
|
|
|
|
|
u64 Jit::GetPC() const {
|
|
|
|
return impl->GetPC();
|
|
|
|
}
|
|
|
|
|
|
|
|
void Jit::SetPC(u64 value) {
|
|
|
|
impl->SetPC(value);
|
|
|
|
}
|
|
|
|
|
|
|
|
u64 Jit::GetRegister(size_t index) const {
|
|
|
|
return impl->GetRegister(index);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Jit::SetRegister(size_t index, u64 value) {
|
|
|
|
impl->SetRegister(index, value);
|
|
|
|
}
|
|
|
|
|
2018-01-08 17:26:49 +00:00
|
|
|
std::array<u64, 31> Jit::GetRegisters() const {
|
|
|
|
return impl->GetRegisters();
|
|
|
|
}
|
|
|
|
|
|
|
|
void Jit::SetRegisters(const std::array<u64, 31>& value) {
|
|
|
|
impl->SetRegisters(value);
|
|
|
|
}
|
|
|
|
|
2018-01-24 15:52:49 +00:00
|
|
|
Vector Jit::GetVector(size_t index) const {
|
2018-01-06 21:15:25 +00:00
|
|
|
return impl->GetVector(index);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Jit::SetVector(size_t index, Vector value) {
|
|
|
|
impl->SetVector(index, value);
|
|
|
|
}
|
|
|
|
|
2018-01-24 15:52:49 +00:00
|
|
|
std::array<Vector, 32> Jit::GetVectors() const {
|
2018-01-08 17:26:49 +00:00
|
|
|
return impl->GetVectors();
|
|
|
|
}
|
|
|
|
|
2018-01-24 15:52:49 +00:00
|
|
|
void Jit::SetVectors(const std::array<Vector, 32>& value) {
|
2018-01-08 17:26:49 +00:00
|
|
|
impl->SetVectors(value);
|
|
|
|
}
|
|
|
|
|
2018-01-06 21:15:25 +00:00
|
|
|
u32 Jit::GetFpcr() const {
|
|
|
|
return impl->GetFpcr();
|
|
|
|
}
|
|
|
|
|
|
|
|
void Jit::SetFpcr(u32 value) {
|
|
|
|
impl->SetFpcr(value);
|
|
|
|
}
|
|
|
|
|
2018-02-20 17:38:29 +00:00
|
|
|
u32 Jit::GetFpsr() const {
|
|
|
|
return impl->GetFpsr();
|
|
|
|
}
|
|
|
|
|
|
|
|
void Jit::SetFpsr(u32 value) {
|
|
|
|
impl->SetFpsr(value);
|
|
|
|
}
|
|
|
|
|
2018-01-07 14:46:35 +00:00
|
|
|
u32 Jit::GetPstate() const {
|
|
|
|
return impl->GetPstate();
|
|
|
|
}
|
|
|
|
|
|
|
|
void Jit::SetPstate(u32 value) {
|
|
|
|
impl->SetPstate(value);
|
|
|
|
}
|
|
|
|
|
2018-01-06 21:15:25 +00:00
|
|
|
bool Jit::IsExecuting() const {
|
|
|
|
return impl->IsExecuting();
|
|
|
|
}
|
|
|
|
|
2018-01-28 17:56:26 +00:00
|
|
|
std::string Jit::Disassemble() const {
|
|
|
|
return impl->Disassemble();
|
|
|
|
}
|
|
|
|
|
2018-01-26 13:51:48 +00:00
|
|
|
} // namespace Dynarmic::A64
|