2016-07-04 10:22:11 +01:00
|
|
|
/* This file is part of the dynarmic project.
|
|
|
|
* Copyright (c) 2016 MerryMage
|
|
|
|
* This software may be used and distributed according to the terms of the GNU
|
|
|
|
* General Public License version 2 or any later version.
|
|
|
|
*/
|
|
|
|
|
|
|
|
#include <memory>
|
|
|
|
|
2017-12-05 21:34:40 +00:00
|
|
|
#include <boost/icl/interval_set.hpp>
|
2016-08-25 23:41:31 +01:00
|
|
|
#include <fmt/format.h>
|
|
|
|
|
2018-08-14 19:13:47 +01:00
|
|
|
#include "backend/x64/a32_emit_x64.h"
|
|
|
|
#include "backend/x64/a32_jitstate.h"
|
|
|
|
#include "backend/x64/block_of_code.h"
|
|
|
|
#include "backend/x64/callback.h"
|
|
|
|
#include "backend/x64/devirtualize.h"
|
|
|
|
#include "backend/x64/jitstate_info.h"
|
2016-07-04 10:22:11 +01:00
|
|
|
#include "common/assert.h"
|
|
|
|
#include "common/common_types.h"
|
2018-02-18 11:20:43 +00:00
|
|
|
#include "common/llvm_disassemble.h"
|
2016-07-04 10:22:11 +01:00
|
|
|
#include "common/scope_exit.h"
|
2018-01-04 21:12:02 +00:00
|
|
|
#include "dynarmic/A32/a32.h"
|
|
|
|
#include "dynarmic/A32/context.h"
|
2018-01-01 15:23:56 +00:00
|
|
|
#include "frontend/A32/translate/translate.h"
|
2016-09-03 21:48:03 +01:00
|
|
|
#include "frontend/ir/basic_block.h"
|
2016-09-05 11:54:09 +01:00
|
|
|
#include "frontend/ir/location_descriptor.h"
|
2016-07-21 21:48:45 +01:00
|
|
|
#include "ir_opt/passes.h"
|
2016-07-04 10:22:11 +01:00
|
|
|
|
2018-01-26 13:51:48 +00:00
|
|
|
namespace Dynarmic::A32 {
|
2016-07-04 10:22:11 +01:00
|
|
|
|
|
|
|
using namespace BackendX64;
|
|
|
|
|
2018-01-27 22:36:55 +00:00
|
|
|
static RunCodeCallbacks GenRunCodeCallbacks(A32::UserCallbacks* cb, CodePtr (*LookupBlock)(void* lookup_block_arg), void* arg) {
|
2018-01-04 21:12:02 +00:00
|
|
|
return RunCodeCallbacks{
|
2018-01-06 21:15:25 +00:00
|
|
|
std::make_unique<ArgCallback>(LookupBlock, reinterpret_cast<u64>(arg)),
|
2018-08-03 09:09:29 +01:00
|
|
|
std::make_unique<ArgCallback>(Devirtualize<&A32::UserCallbacks::AddTicks>(cb)),
|
|
|
|
std::make_unique<ArgCallback>(Devirtualize<&A32::UserCallbacks::GetTicksRemaining>(cb)),
|
2018-01-04 21:12:02 +00:00
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2016-07-04 10:22:11 +01:00
|
|
|
struct Jit::Impl {
|
2018-01-27 22:36:55 +00:00
|
|
|
Impl(Jit* jit, A32::UserConfig config)
|
|
|
|
: block_of_code(GenRunCodeCallbacks(config.callbacks, &GetCurrentBlock, this), JitStateInfo{jit_state})
|
2018-02-03 14:28:57 +00:00
|
|
|
, emitter(block_of_code, config, jit)
|
2018-01-27 22:36:55 +00:00
|
|
|
, config(config)
|
2017-02-16 18:18:29 +00:00
|
|
|
, jit_interface(jit)
|
2016-08-13 00:10:23 +01:00
|
|
|
{}
|
|
|
|
|
2018-01-01 22:49:17 +00:00
|
|
|
A32JitState jit_state;
|
2018-01-04 21:12:02 +00:00
|
|
|
BlockOfCode block_of_code;
|
2018-01-01 15:23:56 +00:00
|
|
|
A32EmitX64 emitter;
|
2018-08-14 19:13:47 +01:00
|
|
|
|
2018-01-27 22:36:55 +00:00
|
|
|
const A32::UserConfig config;
|
2016-07-04 10:22:11 +01:00
|
|
|
|
2017-02-16 18:18:29 +00:00
|
|
|
// Requests made during execution to invalidate the cache are queued up here.
|
2017-12-03 18:25:40 +00:00
|
|
|
size_t invalid_cache_generation = 0;
|
2017-12-05 21:34:40 +00:00
|
|
|
boost::icl::interval_set<u32> invalid_cache_ranges;
|
2017-09-11 00:09:52 +01:00
|
|
|
bool invalidate_entire_cache = false;
|
2016-09-02 10:58:37 +01:00
|
|
|
|
2018-01-04 21:12:02 +00:00
|
|
|
void Execute() {
|
2018-01-26 22:54:03 +00:00
|
|
|
const u32 new_rsb_ptr = (jit_state.rsb_ptr - 1) & A32JitState::RSBPtrMask;
|
|
|
|
if (jit_state.GetUniqueHash() == jit_state.rsb_location_descriptors[new_rsb_ptr]) {
|
|
|
|
jit_state.rsb_ptr = new_rsb_ptr;
|
|
|
|
block_of_code.RunCodeFrom(&jit_state, reinterpret_cast<CodePtr>(jit_state.rsb_codeptrs[new_rsb_ptr]));
|
|
|
|
} else {
|
|
|
|
block_of_code.RunCode(&jit_state);
|
|
|
|
}
|
2016-07-04 10:22:11 +01:00
|
|
|
}
|
2016-08-05 01:50:31 +01:00
|
|
|
|
2016-09-05 11:54:09 +01:00
|
|
|
std::string Disassemble(const IR::LocationDescriptor& descriptor) {
|
2016-08-05 01:50:31 +01:00
|
|
|
auto block = GetBasicBlock(descriptor);
|
2017-02-16 18:18:29 +00:00
|
|
|
std::string result = fmt::format("address: {}\nsize: {} bytes\n", block.entrypoint, block.size);
|
2018-02-18 11:20:43 +00:00
|
|
|
result += Common::DisassembleX64(block.entrypoint, reinterpret_cast<const char*>(block.entrypoint) + block.size);
|
2016-08-05 01:50:31 +01:00
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2017-02-16 18:18:29 +00:00
|
|
|
void PerformCacheInvalidation() {
|
2017-09-11 00:09:52 +01:00
|
|
|
if (invalidate_entire_cache) {
|
|
|
|
jit_state.ResetRSB();
|
|
|
|
block_of_code.ClearCache();
|
|
|
|
emitter.ClearCache();
|
|
|
|
|
|
|
|
invalid_cache_ranges.clear();
|
|
|
|
invalidate_entire_cache = false;
|
2017-12-03 18:25:40 +00:00
|
|
|
invalid_cache_generation++;
|
2017-09-11 00:09:52 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2017-02-16 18:18:29 +00:00
|
|
|
if (invalid_cache_ranges.empty()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2016-09-02 10:58:37 +01:00
|
|
|
jit_state.ResetRSB();
|
2017-12-05 21:34:40 +00:00
|
|
|
emitter.InvalidateCacheRanges(invalid_cache_ranges);
|
|
|
|
invalid_cache_ranges.clear();
|
2017-12-03 18:25:40 +00:00
|
|
|
invalid_cache_generation++;
|
2017-02-16 18:18:29 +00:00
|
|
|
}
|
|
|
|
|
2017-09-11 00:09:52 +01:00
|
|
|
void RequestCacheInvalidation() {
|
2017-02-16 18:18:29 +00:00
|
|
|
if (jit_interface->is_executing) {
|
|
|
|
jit_state.halt_requested = true;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
PerformCacheInvalidation();
|
2016-09-02 10:58:37 +01:00
|
|
|
}
|
|
|
|
|
2016-07-04 10:22:11 +01:00
|
|
|
private:
|
2017-02-16 18:18:29 +00:00
|
|
|
Jit* jit_interface;
|
|
|
|
|
2018-05-01 17:34:53 +01:00
|
|
|
static CodePtr GetCurrentBlock(void* this_voidptr) {
|
|
|
|
Jit::Impl& this_ = *static_cast<Jit::Impl*>(this_voidptr);
|
2018-01-01 22:49:17 +00:00
|
|
|
A32JitState& jit_state = this_.jit_state;
|
2017-04-07 10:52:44 +01:00
|
|
|
|
|
|
|
u32 pc = jit_state.Reg[15];
|
2018-01-01 15:23:56 +00:00
|
|
|
A32::PSR cpsr{jit_state.Cpsr()};
|
2019-05-05 19:45:45 +01:00
|
|
|
A32::FPSCR fpscr{jit_state.fpcr_mode};
|
2018-01-01 15:23:56 +00:00
|
|
|
A32::LocationDescriptor descriptor{pc, cpsr, fpscr};
|
2017-04-07 10:52:44 +01:00
|
|
|
|
|
|
|
return this_.GetBasicBlock(descriptor).entrypoint;
|
|
|
|
}
|
|
|
|
|
2018-01-01 15:23:56 +00:00
|
|
|
A32EmitX64::BlockDescriptor GetBasicBlock(IR::LocationDescriptor descriptor) {
|
2016-08-05 01:50:31 +01:00
|
|
|
auto block = emitter.GetBasicBlock(descriptor);
|
|
|
|
if (block)
|
2016-08-12 18:17:31 +01:00
|
|
|
return *block;
|
2016-07-04 10:22:11 +01:00
|
|
|
|
2017-12-03 14:32:01 +00:00
|
|
|
constexpr size_t MINIMUM_REMAINING_CODESIZE = 1 * 1024 * 1024;
|
|
|
|
if (block_of_code.SpaceRemaining() < MINIMUM_REMAINING_CODESIZE) {
|
|
|
|
invalidate_entire_cache = true;
|
|
|
|
PerformCacheInvalidation();
|
|
|
|
}
|
|
|
|
|
2018-08-25 23:00:04 +01:00
|
|
|
IR::Block ir_block = A32::Translate(A32::LocationDescriptor{descriptor}, [this](u32 vaddr) { return config.callbacks->MemoryReadCode(vaddr); }, {config.define_unpredictable_behaviour});
|
2018-01-10 19:24:19 +00:00
|
|
|
Optimization::A32GetSetElimination(ir_block);
|
2017-02-19 11:05:16 +00:00
|
|
|
Optimization::DeadCodeElimination(ir_block);
|
2018-01-27 22:36:55 +00:00
|
|
|
Optimization::A32ConstantMemoryReads(ir_block, config.callbacks);
|
2018-01-10 19:24:19 +00:00
|
|
|
Optimization::ConstantPropagation(ir_block);
|
2016-07-21 21:48:45 +01:00
|
|
|
Optimization::DeadCodeElimination(ir_block);
|
|
|
|
Optimization::VerificationPass(ir_block);
|
2016-08-26 19:14:25 +01:00
|
|
|
return emitter.Emit(ir_block);
|
2016-07-04 10:22:11 +01:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2018-01-27 22:36:55 +00:00
|
|
|
Jit::Jit(UserConfig config) : impl(std::make_unique<Impl>(this, config)) {}
|
2016-07-04 10:22:11 +01:00
|
|
|
|
|
|
|
Jit::~Jit() {}
|
|
|
|
|
2018-01-04 21:12:02 +00:00
|
|
|
void Jit::Run() {
|
2016-07-04 10:22:11 +01:00
|
|
|
ASSERT(!is_executing);
|
|
|
|
is_executing = true;
|
2018-01-28 17:57:02 +00:00
|
|
|
SCOPE_EXIT { this->is_executing = false; };
|
2016-07-04 10:22:11 +01:00
|
|
|
|
2016-08-15 15:02:08 +01:00
|
|
|
impl->jit_state.halt_requested = false;
|
2016-07-04 10:22:11 +01:00
|
|
|
|
2018-01-04 21:12:02 +00:00
|
|
|
impl->Execute();
|
2016-07-04 10:22:11 +01:00
|
|
|
|
2017-02-16 18:18:29 +00:00
|
|
|
impl->PerformCacheInvalidation();
|
2016-07-04 10:22:11 +01:00
|
|
|
}
|
|
|
|
|
2016-09-01 09:47:09 +01:00
|
|
|
void Jit::ClearCache() {
|
2017-09-11 00:09:52 +01:00
|
|
|
impl->invalidate_entire_cache = true;
|
|
|
|
impl->RequestCacheInvalidation();
|
2017-02-16 18:18:29 +00:00
|
|
|
}
|
2016-09-02 10:58:37 +01:00
|
|
|
|
2017-02-16 18:18:29 +00:00
|
|
|
void Jit::InvalidateCacheRange(std::uint32_t start_address, std::size_t length) {
|
2017-12-07 20:26:46 +00:00
|
|
|
impl->invalid_cache_ranges.add(boost::icl::discrete_interval<u32>::closed(start_address, static_cast<u32>(start_address + length - 1)));
|
2017-09-11 00:09:52 +01:00
|
|
|
impl->RequestCacheInvalidation();
|
2016-07-04 10:22:11 +01:00
|
|
|
}
|
|
|
|
|
2016-08-09 22:45:54 +01:00
|
|
|
void Jit::Reset() {
|
|
|
|
ASSERT(!is_executing);
|
2016-08-31 21:57:33 +01:00
|
|
|
impl->jit_state = {};
|
2016-08-09 22:45:54 +01:00
|
|
|
}
|
|
|
|
|
2016-07-04 10:22:11 +01:00
|
|
|
void Jit::HaltExecution() {
|
2016-08-15 15:02:08 +01:00
|
|
|
impl->jit_state.halt_requested = true;
|
2016-07-04 10:22:11 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
std::array<u32, 16>& Jit::Regs() {
|
|
|
|
return impl->jit_state.Reg;
|
|
|
|
}
|
2016-08-25 01:01:42 +01:00
|
|
|
const std::array<u32, 16>& Jit::Regs() const {
|
2016-07-04 10:22:11 +01:00
|
|
|
return impl->jit_state.Reg;
|
|
|
|
}
|
|
|
|
|
2016-08-05 18:54:19 +01:00
|
|
|
std::array<u32, 64>& Jit::ExtRegs() {
|
|
|
|
return impl->jit_state.ExtReg;
|
|
|
|
}
|
|
|
|
|
2016-08-25 01:01:42 +01:00
|
|
|
const std::array<u32, 64>& Jit::ExtRegs() const {
|
2016-08-05 18:54:19 +01:00
|
|
|
return impl->jit_state.ExtReg;
|
|
|
|
}
|
|
|
|
|
2017-12-02 13:55:04 +00:00
|
|
|
u32 Jit::Cpsr() const {
|
|
|
|
return impl->jit_state.Cpsr();
|
2016-07-04 10:22:11 +01:00
|
|
|
}
|
2016-08-05 18:54:19 +01:00
|
|
|
|
2017-12-02 13:55:04 +00:00
|
|
|
void Jit::SetCpsr(u32 value) {
|
|
|
|
return impl->jit_state.SetCpsr(value);
|
2016-07-04 10:22:11 +01:00
|
|
|
}
|
|
|
|
|
2016-08-05 18:54:19 +01:00
|
|
|
u32 Jit::Fpscr() const {
|
|
|
|
return impl->jit_state.Fpscr();
|
|
|
|
}
|
|
|
|
|
2017-12-02 13:55:04 +00:00
|
|
|
void Jit::SetFpscr(u32 value) {
|
2016-08-05 18:54:19 +01:00
|
|
|
return impl->jit_state.SetFpscr(value);
|
|
|
|
}
|
|
|
|
|
2017-12-03 18:25:40 +00:00
|
|
|
Context Jit::SaveContext() const {
|
|
|
|
Context ctx;
|
|
|
|
SaveContext(ctx);
|
|
|
|
return ctx;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct Context::Impl {
|
2018-01-01 22:49:17 +00:00
|
|
|
A32JitState jit_state;
|
2017-12-03 18:25:40 +00:00
|
|
|
size_t invalid_cache_generation;
|
|
|
|
};
|
|
|
|
|
|
|
|
Context::Context() : impl(std::make_unique<Context::Impl>()) { impl->jit_state.ResetRSB(); }
|
|
|
|
Context::~Context() = default;
|
|
|
|
Context::Context(const Context& ctx) : impl(std::make_unique<Context::Impl>(*ctx.impl)) {}
|
2019-02-27 15:47:42 +00:00
|
|
|
Context::Context(Context&& ctx) noexcept : impl(std::move(ctx.impl)) {}
|
2017-12-03 18:25:40 +00:00
|
|
|
Context& Context::operator=(const Context& ctx) {
|
|
|
|
*impl = *ctx.impl;
|
|
|
|
return *this;
|
|
|
|
}
|
2019-02-27 15:47:42 +00:00
|
|
|
Context& Context::operator=(Context&& ctx) noexcept {
|
2017-12-03 18:25:40 +00:00
|
|
|
impl = std::move(ctx.impl);
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
|
|
|
std::array<std::uint32_t, 16>& Context::Regs() {
|
|
|
|
return impl->jit_state.Reg;
|
|
|
|
}
|
|
|
|
const std::array<std::uint32_t, 16>& Context::Regs() const {
|
|
|
|
return impl->jit_state.Reg;
|
|
|
|
}
|
|
|
|
std::array<std::uint32_t, 64>& Context::ExtRegs() {
|
|
|
|
return impl->jit_state.ExtReg;
|
|
|
|
}
|
|
|
|
const std::array<std::uint32_t, 64>& Context::ExtRegs() const {
|
|
|
|
return impl->jit_state.ExtReg;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// View and modify CPSR.
|
|
|
|
std::uint32_t Context::Cpsr() const {
|
|
|
|
return impl->jit_state.Cpsr();
|
|
|
|
}
|
|
|
|
void Context::SetCpsr(std::uint32_t value) {
|
|
|
|
impl->jit_state.SetCpsr(value);
|
|
|
|
}
|
|
|
|
|
|
|
|
/// View and modify FPSCR.
|
|
|
|
std::uint32_t Context::Fpscr() const {
|
|
|
|
return impl->jit_state.Fpscr();
|
|
|
|
}
|
|
|
|
void Context::SetFpscr(std::uint32_t value) {
|
|
|
|
return impl->jit_state.SetFpscr(value);
|
|
|
|
}
|
|
|
|
|
2018-01-01 22:49:17 +00:00
|
|
|
void TransferJitState(A32JitState& dest, const A32JitState& src, bool reset_rsb) {
|
2019-05-05 19:49:54 +01:00
|
|
|
dest.cpsr_ge = src.cpsr_ge;
|
|
|
|
dest.cpsr_et = src.cpsr_et;
|
|
|
|
dest.cpsr_q = src.cpsr_q;
|
|
|
|
dest.cpsr_nzcv = src.cpsr_nzcv;
|
|
|
|
dest.cpsr_jaifm = src.cpsr_jaifm;
|
2017-12-03 18:25:40 +00:00
|
|
|
dest.Reg = src.Reg;
|
|
|
|
dest.ExtReg = src.ExtReg;
|
|
|
|
dest.guest_MXCSR = src.guest_MXCSR;
|
2019-05-05 19:45:45 +01:00
|
|
|
dest.fpcr_mode = src.fpcr_mode;
|
2019-05-05 19:46:57 +01:00
|
|
|
dest.fpsr_nzcv = src.fpsr_nzcv;
|
2017-12-03 18:25:40 +00:00
|
|
|
if (reset_rsb) {
|
|
|
|
dest.ResetRSB();
|
|
|
|
} else {
|
|
|
|
dest.rsb_ptr = src.rsb_ptr;
|
|
|
|
dest.rsb_location_descriptors = src.rsb_location_descriptors;
|
|
|
|
dest.rsb_codeptrs = src.rsb_codeptrs;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void Jit::SaveContext(Context& ctx) const {
|
|
|
|
TransferJitState(ctx.impl->jit_state, impl->jit_state, false);
|
|
|
|
ctx.impl->invalid_cache_generation = impl->invalid_cache_generation;
|
|
|
|
}
|
|
|
|
|
|
|
|
void Jit::LoadContext(const Context& ctx) {
|
|
|
|
bool reset_rsb = ctx.impl->invalid_cache_generation != impl->invalid_cache_generation;
|
|
|
|
TransferJitState(impl->jit_state, ctx.impl->jit_state, reset_rsb);
|
|
|
|
}
|
|
|
|
|
2016-09-05 11:54:09 +01:00
|
|
|
std::string Jit::Disassemble(const IR::LocationDescriptor& descriptor) {
|
2016-08-05 01:50:31 +01:00
|
|
|
return impl->Disassemble(descriptor);
|
|
|
|
}
|
|
|
|
|
2018-01-26 13:51:48 +00:00
|
|
|
} // namespace Dynarmic::A32
|