dynarmic/src/backend/x64/emit_x64.cpp

369 lines
11 KiB
C++
Raw Normal View History

2016-07-01 14:01:06 +01:00
/* This file is part of the dynarmic project.
* Copyright (c) 2016 MerryMage
* This software may be used and distributed according to the terms of the GNU
* General Public License version 2 or any later version.
*/
#include <unordered_map>
#include "backend/x64/block_of_code.h"
#include "backend/x64/emit_x64.h"
2018-07-27 12:42:10 +01:00
#include "backend/x64/perf_map.h"
2016-12-05 04:11:34 +00:00
#include "common/assert.h"
#include "common/bit_util.h"
2017-02-16 18:18:29 +00:00
#include "common/common_types.h"
#include "common/scope_exit.h"
#include "common/variant_util.h"
#include "frontend/ir/basic_block.h"
#include "frontend/ir/microinstruction.h"
2016-12-05 04:11:34 +00:00
#include "frontend/ir/opcodes.h"
2016-07-01 14:01:06 +01:00
// TODO: Have ARM flags in host flags and not have them use up GPR registers unless necessary.
// TODO: Actually implement that proper instruction selector you've always wanted to sweetheart.
namespace Dynarmic::BackendX64 {
2016-07-01 14:01:06 +01:00
2017-12-09 15:42:47 +00:00
using namespace Xbyak::util;
2018-01-01 23:40:34 +00:00
EmitContext::EmitContext(RegAlloc& reg_alloc, IR::Block& block)
: reg_alloc(reg_alloc), block(block) {}
void EmitContext::EraseInstruction(IR::Inst* inst) {
block.Instructions().erase(inst);
inst->ClearArgs();
2016-07-01 14:01:06 +01:00
}
EmitX64::EmitX64(BlockOfCode& code)
2018-01-04 21:12:02 +00:00
: code(code) {}
EmitX64::~EmitX64() = default;
std::optional<EmitX64::BlockDescriptor> EmitX64::GetBasicBlock(IR::LocationDescriptor descriptor) const {
const auto iter = block_descriptors.find(descriptor);
if (iter == block_descriptors.end()) {
return std::nullopt;
}
return iter->second;
}
void EmitX64::EmitVoid(EmitContext&, IR::Inst*) {
}
void EmitX64::EmitBreakpoint(EmitContext&, IR::Inst*) {
code.int3();
}
void EmitX64::EmitIdentity(EmitContext& ctx, IR::Inst* inst) {
2018-01-01 23:40:34 +00:00
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
if (!args[0].IsImmediate()) {
2018-01-01 23:40:34 +00:00
ctx.reg_alloc.DefineValue(inst, args[0]);
}
}
void EmitX64::PushRSBHelper(Xbyak::Reg64 loc_desc_reg, Xbyak::Reg64 index_reg, IR::LocationDescriptor target) {
2016-08-24 20:07:08 +01:00
using namespace Xbyak::util;
const auto iter = block_descriptors.find(target);
CodePtr target_code_ptr = iter != block_descriptors.end()
2017-02-16 18:18:29 +00:00
? iter->second.entrypoint
: code.GetReturnFromRunCodeAddress();
code.mov(index_reg.cvt32(), dword[r15 + code.GetJitStateInfo().offsetof_rsb_ptr]);
code.mov(loc_desc_reg, target.Value());
patch_information[target].mov_rcx.emplace_back(code.getCurr());
EmitPatchMovRcx(target_code_ptr);
code.mov(qword[r15 + index_reg * 8 + code.GetJitStateInfo().offsetof_rsb_location_descriptors], loc_desc_reg);
code.mov(qword[r15 + index_reg * 8 + code.GetJitStateInfo().offsetof_rsb_codeptrs], rcx);
code.add(index_reg.cvt32(), 1);
code.and_(index_reg.cvt32(), u32(code.GetJitStateInfo().rsb_ptr_mask));
code.mov(dword[r15 + code.GetJitStateInfo().offsetof_rsb_ptr], index_reg.cvt32());
2017-11-27 20:29:19 +00:00
}
void EmitX64::EmitPushRSB(EmitContext& ctx, IR::Inst* inst) {
2018-01-01 23:40:34 +00:00
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
2017-11-27 20:29:19 +00:00
ASSERT(args[0].IsImmediate());
const u64 unique_hash_of_target = args[0].GetImmediateU64();
2017-11-27 20:29:19 +00:00
2018-01-01 23:40:34 +00:00
ctx.reg_alloc.ScratchGpr({HostLoc::RCX});
const Xbyak::Reg64 loc_desc_reg = ctx.reg_alloc.ScratchGpr();
const Xbyak::Reg64 index_reg = ctx.reg_alloc.ScratchGpr();
PushRSBHelper(loc_desc_reg, index_reg, IR::LocationDescriptor{unique_hash_of_target});
}
void EmitX64::EmitGetCarryFromOp(EmitContext&, IR::Inst*) {
2016-08-26 16:43:51 +01:00
ASSERT_MSG(false, "should never happen");
2016-07-01 14:01:06 +01:00
}
void EmitX64::EmitGetOverflowFromOp(EmitContext&, IR::Inst*) {
2016-08-26 16:43:51 +01:00
ASSERT_MSG(false, "should never happen");
}
void EmitX64::EmitGetGEFromOp(EmitContext&, IR::Inst*) {
ASSERT_MSG(false, "should never happen");
}
void EmitX64::EmitGetUpperFromOp(EmitContext&, IR::Inst*) {
ASSERT_MSG(false, "should never happen");
}
void EmitX64::EmitGetLowerFromOp(EmitContext&, IR::Inst*) {
ASSERT_MSG(false, "should never happen");
}
void EmitX64::EmitGetNZCVFromOp(EmitContext& ctx, IR::Inst* inst) {
2018-01-07 12:52:12 +00:00
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
2018-01-12 17:31:21 +00:00
const int bitsize = [&]{
2018-01-07 12:52:12 +00:00
switch (args[0].GetType()) {
case IR::Type::U8:
return 8;
case IR::Type::U16:
return 16;
case IR::Type::U32:
return 32;
case IR::Type::U64:
return 64;
default:
UNREACHABLE();
2018-01-07 12:52:12 +00:00
return 0;
}
}();
const Xbyak::Reg64 nzcv = ctx.reg_alloc.ScratchGpr({HostLoc::RAX});
const Xbyak::Reg value = ctx.reg_alloc.UseGpr(args[0]).changeBit(bitsize);
code.cmp(value, 0);
code.lahf();
code.seto(code.al);
2018-01-07 12:52:12 +00:00
ctx.reg_alloc.DefineValue(inst, nzcv);
2018-01-07 11:31:20 +00:00
}
void EmitX64::EmitNZCVFromPackedFlags(EmitContext& ctx, IR::Inst* inst) {
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
if (args[0].IsImmediate()) {
const Xbyak::Reg32 nzcv = ctx.reg_alloc.ScratchGpr().cvt32();
u32 value = 0;
value |= Common::Bit<31>(args[0].GetImmediateU32()) ? (1 << 15) : 0;
value |= Common::Bit<30>(args[0].GetImmediateU32()) ? (1 << 14) : 0;
value |= Common::Bit<29>(args[0].GetImmediateU32()) ? (1 << 8) : 0;
value |= Common::Bit<28>(args[0].GetImmediateU32()) ? (1 << 0) : 0;
code.mov(nzcv, value);
ctx.reg_alloc.DefineValue(inst, nzcv);
} else {
const Xbyak::Reg32 nzcv = ctx.reg_alloc.UseScratchGpr(args[0]).cvt32();
// TODO: Optimize
code.shr(nzcv, 28);
code.imul(nzcv, nzcv, 0b00010000'10000001);
code.and_(nzcv.cvt8(), 1);
ctx.reg_alloc.DefineValue(inst, nzcv);
}
}
void EmitX64::EmitAddCycles(size_t cycles) {
ASSERT(cycles < std::numeric_limits<u32>::max());
code.sub(qword[r15 + code.GetJitStateInfo().offsetof_cycles_remaining], static_cast<u32>(cycles));
2016-07-07 10:53:09 +01:00
}
Xbyak::Label EmitX64::EmitCond(IR::Cond cond) {
2016-08-24 20:07:08 +01:00
Xbyak::Label label;
2016-08-24 20:07:08 +01:00
const Xbyak::Reg32 cpsr = eax;
code.mov(cpsr, dword[r15 + code.GetJitStateInfo().offsetof_cpsr_nzcv]);
2016-08-24 20:07:08 +01:00
constexpr size_t n_shift = 31;
constexpr size_t z_shift = 30;
constexpr size_t c_shift = 29;
constexpr size_t v_shift = 28;
constexpr u32 n_mask = 1u << n_shift;
constexpr u32 z_mask = 1u << z_shift;
constexpr u32 c_mask = 1u << c_shift;
constexpr u32 v_mask = 1u << v_shift;
switch (cond) {
2018-01-01 15:23:56 +00:00
case IR::Cond::EQ: //z
code.test(cpsr, z_mask);
code.jnz(label);
break;
2018-01-01 15:23:56 +00:00
case IR::Cond::NE: //!z
code.test(cpsr, z_mask);
code.jz(label);
break;
2018-01-01 15:23:56 +00:00
case IR::Cond::CS: //c
code.test(cpsr, c_mask);
code.jnz(label);
break;
2018-01-01 15:23:56 +00:00
case IR::Cond::CC: //!c
code.test(cpsr, c_mask);
code.jz(label);
break;
2018-01-01 15:23:56 +00:00
case IR::Cond::MI: //n
code.test(cpsr, n_mask);
code.jnz(label);
break;
2018-01-01 15:23:56 +00:00
case IR::Cond::PL: //!n
code.test(cpsr, n_mask);
code.jz(label);
break;
2018-01-01 15:23:56 +00:00
case IR::Cond::VS: //v
code.test(cpsr, v_mask);
code.jnz(label);
break;
2018-01-01 15:23:56 +00:00
case IR::Cond::VC: //!v
code.test(cpsr, v_mask);
code.jz(label);
break;
2018-01-01 15:23:56 +00:00
case IR::Cond::HI: { //c & !z
code.and_(cpsr, z_mask | c_mask);
code.cmp(cpsr, c_mask);
code.je(label);
break;
}
2018-01-01 15:23:56 +00:00
case IR::Cond::LS: { //!c | z
code.and_(cpsr, z_mask | c_mask);
code.cmp(cpsr, c_mask);
code.jne(label);
break;
}
2018-01-01 15:23:56 +00:00
case IR::Cond::GE: { // n == v
code.and_(cpsr, n_mask | v_mask);
code.jz(label);
code.cmp(cpsr, n_mask | v_mask);
code.je(label);
break;
}
2018-01-01 15:23:56 +00:00
case IR::Cond::LT: { // n != v
2016-08-24 20:07:08 +01:00
Xbyak::Label fail;
code.and_(cpsr, n_mask | v_mask);
code.jz(fail);
code.cmp(cpsr, n_mask | v_mask);
code.jne(label);
code.L(fail);
break;
}
2018-01-01 15:23:56 +00:00
case IR::Cond::GT: { // !z & (n == v)
2016-08-24 20:07:08 +01:00
const Xbyak::Reg32 tmp1 = ebx;
const Xbyak::Reg32 tmp2 = esi;
code.mov(tmp1, cpsr);
code.mov(tmp2, cpsr);
code.shr(tmp1, n_shift);
code.shr(tmp2, v_shift);
code.shr(cpsr, z_shift);
code.xor_(tmp1, tmp2);
code.or_(tmp1, cpsr);
code.test(tmp1, 1);
code.jz(label);
break;
}
2018-01-01 15:23:56 +00:00
case IR::Cond::LE: { // z | (n != v)
2016-08-24 20:07:08 +01:00
const Xbyak::Reg32 tmp1 = ebx;
const Xbyak::Reg32 tmp2 = esi;
code.mov(tmp1, cpsr);
code.mov(tmp2, cpsr);
code.shr(tmp1, n_shift);
code.shr(tmp2, v_shift);
code.shr(cpsr, z_shift);
code.xor_(tmp1, tmp2);
code.or_(tmp1, cpsr);
code.test(tmp1, 1);
code.jnz(label);
break;
}
default:
2018-01-27 23:42:30 +00:00
ASSERT_MSG(false, "Unknown cond {}", static_cast<size_t>(cond));
break;
}
2016-08-24 20:07:08 +01:00
return label;
}
void EmitX64::EmitCondPrelude(const IR::Block& block) {
2018-01-01 15:23:56 +00:00
if (block.GetCondition() == IR::Cond::AL) {
ASSERT(!block.HasConditionFailedLocation());
return;
}
ASSERT(block.HasConditionFailedLocation());
Xbyak::Label pass = EmitCond(block.GetCondition());
EmitAddCycles(block.ConditionFailedCycleCount());
EmitTerminal(IR::Term::LinkBlock{block.ConditionFailedLocation()}, block.Location());
code.L(pass);
}
2018-07-27 12:42:10 +01:00
EmitX64::BlockDescriptor EmitX64::RegisterBlock(const IR::LocationDescriptor& descriptor, CodePtr entrypoint, size_t size) {
PerfMapRegister(entrypoint, code.getCurr(), LocationDescriptorToFriendlyName(descriptor));
Patch(descriptor, entrypoint);
BlockDescriptor block_desc{entrypoint, size};
block_descriptors.emplace(descriptor.Value(), block_desc);
return block_desc;
}
void EmitX64::EmitTerminal(IR::Terminal terminal, IR::LocationDescriptor initial_location) {
Common::VisitVariant<void>(terminal, [this, &initial_location](auto x) {
using T = std::decay_t<decltype(x)>;
if constexpr (!std::is_same_v<T, IR::Term::Invalid>) {
this->EmitTerminalImpl(x, initial_location);
} else {
ASSERT_MSG(false, "Invalid terminal");
}
});
}
void EmitX64::Patch(const IR::LocationDescriptor& target_desc, CodePtr target_code_ptr) {
const CodePtr save_code_ptr = code.getCurr();
const PatchInformation& patch_info = patch_information[target_desc];
2016-08-07 22:11:39 +01:00
for (CodePtr location : patch_info.jg) {
code.SetCodePtr(location);
EmitPatchJg(target_desc, target_code_ptr);
2016-08-07 22:11:39 +01:00
}
for (CodePtr location : patch_info.jmp) {
code.SetCodePtr(location);
EmitPatchJmp(target_desc, target_code_ptr);
}
for (CodePtr location : patch_info.mov_rcx) {
code.SetCodePtr(location);
EmitPatchMovRcx(target_code_ptr);
}
code.SetCodePtr(save_code_ptr);
2016-08-07 22:11:39 +01:00
}
void EmitX64::Unpatch(const IR::LocationDescriptor& target_desc) {
Patch(target_desc, nullptr);
}
void EmitX64::ClearCache() {
block_descriptors.clear();
patch_information.clear();
2018-07-27 12:42:10 +01:00
PerfMapClear();
2016-07-07 12:01:47 +01:00
}
void EmitX64::InvalidateBasicBlocks(const std::unordered_set<IR::LocationDescriptor>& locations) {
code.EnableWriting();
SCOPE_EXIT { code.DisableWriting(); };
for (const auto &descriptor : locations) {
const auto it = block_descriptors.find(descriptor);
if (it == block_descriptors.end()) {
continue;
2017-02-16 18:18:29 +00:00
}
if (patch_information.count(descriptor)) {
Unpatch(descriptor);
}
block_descriptors.erase(it);
2017-02-16 18:18:29 +00:00
}
}
} // namespace Dynarmic::BackendX64