2018-01-06 21:15:25 +00:00
|
|
|
/* This file is part of the dynarmic project.
|
|
|
|
* Copyright (c) 2016 MerryMage
|
|
|
|
* This software may be used and distributed according to the terms of the GNU
|
|
|
|
* General Public License version 2 or any later version.
|
|
|
|
*/
|
|
|
|
|
|
|
|
#include <unordered_map>
|
|
|
|
#include <unordered_set>
|
|
|
|
|
2018-01-27 23:42:30 +00:00
|
|
|
#include <fmt/ostream.h>
|
|
|
|
|
2018-01-06 21:15:25 +00:00
|
|
|
#include "backend_x64/a64_emit_x64.h"
|
|
|
|
#include "backend_x64/a64_jitstate.h"
|
|
|
|
#include "backend_x64/abi.h"
|
|
|
|
#include "backend_x64/block_of_code.h"
|
2018-01-08 18:33:42 +00:00
|
|
|
#include "backend_x64/devirtualize.h"
|
2018-01-06 21:15:25 +00:00
|
|
|
#include "backend_x64/emit_x64.h"
|
|
|
|
#include "common/address_range.h"
|
|
|
|
#include "common/assert.h"
|
|
|
|
#include "common/bit_util.h"
|
|
|
|
#include "common/common_types.h"
|
|
|
|
#include "common/variant_util.h"
|
|
|
|
#include "frontend/A64/location_descriptor.h"
|
|
|
|
#include "frontend/A64/types.h"
|
|
|
|
#include "frontend/ir/basic_block.h"
|
|
|
|
#include "frontend/ir/microinstruction.h"
|
|
|
|
#include "frontend/ir/opcodes.h"
|
|
|
|
|
|
|
|
// TODO: Have ARM flags in host flags and not have them use up GPR registers unless necessary.
|
|
|
|
// TODO: Actually implement that proper instruction selector you've always wanted to sweetheart.
|
|
|
|
|
2018-01-26 13:51:48 +00:00
|
|
|
namespace Dynarmic::BackendX64 {
|
2018-01-06 21:15:25 +00:00
|
|
|
|
|
|
|
using namespace Xbyak::util;
|
|
|
|
|
|
|
|
A64EmitContext::A64EmitContext(RegAlloc& reg_alloc, IR::Block& block)
|
|
|
|
: EmitContext(reg_alloc, block) {}
|
|
|
|
|
|
|
|
A64::LocationDescriptor A64EmitContext::Location() const {
|
|
|
|
return A64::LocationDescriptor{block.Location()};
|
|
|
|
}
|
|
|
|
|
|
|
|
bool A64EmitContext::FPSCR_RoundTowardsZero() const {
|
|
|
|
return Location().FPCR().RMode() != A64::FPCR::RoundingMode::TowardsZero;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool A64EmitContext::FPSCR_FTZ() const {
|
|
|
|
return Location().FPCR().FZ();
|
|
|
|
}
|
|
|
|
|
|
|
|
bool A64EmitContext::FPSCR_DN() const {
|
|
|
|
return Location().FPCR().DN();
|
|
|
|
}
|
|
|
|
|
2018-02-03 14:28:57 +00:00
|
|
|
A64EmitX64::A64EmitX64(BlockOfCode& code, A64::UserConfig conf)
|
2018-01-06 21:15:25 +00:00
|
|
|
: EmitX64(code), conf(conf)
|
|
|
|
{
|
2018-02-03 14:28:57 +00:00
|
|
|
code.PreludeComplete();
|
2018-01-06 21:15:25 +00:00
|
|
|
}
|
|
|
|
|
2018-01-24 02:11:07 +00:00
|
|
|
A64EmitX64::~A64EmitX64() = default;
|
2018-01-06 21:15:25 +00:00
|
|
|
|
|
|
|
A64EmitX64::BlockDescriptor A64EmitX64::Emit(IR::Block& block) {
|
2018-02-03 14:28:57 +00:00
|
|
|
code.align();
|
|
|
|
const u8* const entrypoint = code.getCurr();
|
2018-01-06 21:15:25 +00:00
|
|
|
|
|
|
|
// Start emitting.
|
|
|
|
EmitCondPrelude(block);
|
|
|
|
|
|
|
|
RegAlloc reg_alloc{code, A64JitState::SpillCount, SpillToOpArg<A64JitState>};
|
|
|
|
A64EmitContext ctx{reg_alloc, block};
|
|
|
|
|
|
|
|
for (auto iter = block.begin(); iter != block.end(); ++iter) {
|
|
|
|
IR::Inst* inst = &*iter;
|
|
|
|
|
|
|
|
// Call the relevant Emit* member function.
|
|
|
|
switch (inst->GetOpcode()) {
|
|
|
|
|
|
|
|
#define OPCODE(name, type, ...) \
|
|
|
|
case IR::Opcode::name: \
|
|
|
|
A64EmitX64::Emit##name(ctx, inst); \
|
|
|
|
break;
|
|
|
|
#define A32OPC(...)
|
|
|
|
#define A64OPC(name, type, ...) \
|
|
|
|
case IR::Opcode::A64##name: \
|
|
|
|
A64EmitX64::EmitA64##name(ctx, inst); \
|
|
|
|
break;
|
|
|
|
#include "frontend/ir/opcodes.inc"
|
|
|
|
#undef OPCODE
|
|
|
|
#undef A32OPC
|
|
|
|
#undef A64OPC
|
|
|
|
|
|
|
|
default:
|
2018-01-27 23:42:30 +00:00
|
|
|
ASSERT_MSG(false, "Invalid opcode: {}", inst->GetOpcode());
|
2018-01-06 21:15:25 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2018-01-10 02:05:08 +00:00
|
|
|
ctx.reg_alloc.EndOfAllocScope();
|
2018-01-06 21:15:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
reg_alloc.AssertNoMoreUses();
|
|
|
|
|
|
|
|
EmitAddCycles(block.CycleCount());
|
|
|
|
EmitX64::EmitTerminal(block.GetTerminal(), block.Location());
|
2018-02-03 14:28:57 +00:00
|
|
|
code.int3();
|
2018-01-06 21:15:25 +00:00
|
|
|
|
|
|
|
const A64::LocationDescriptor descriptor{block.Location()};
|
|
|
|
Patch(descriptor, entrypoint);
|
|
|
|
|
2018-02-03 14:28:57 +00:00
|
|
|
const size_t size = static_cast<size_t>(code.getCurr() - entrypoint);
|
2018-01-06 21:15:25 +00:00
|
|
|
const A64::LocationDescriptor end_location{block.EndLocation()};
|
|
|
|
const auto range = boost::icl::discrete_interval<u64>::closed(descriptor.PC(), end_location.PC() - 1);
|
2018-01-23 19:16:39 +00:00
|
|
|
A64EmitX64::BlockDescriptor block_desc{entrypoint, size};
|
2018-01-06 21:15:25 +00:00
|
|
|
block_descriptors.emplace(descriptor.UniqueHash(), block_desc);
|
2018-01-23 19:16:39 +00:00
|
|
|
block_ranges.AddRange(range, descriptor);
|
2018-01-06 21:15:25 +00:00
|
|
|
|
|
|
|
return block_desc;
|
|
|
|
}
|
|
|
|
|
2018-01-23 19:16:39 +00:00
|
|
|
void A64EmitX64::ClearCache() {
|
|
|
|
EmitX64::ClearCache();
|
|
|
|
block_ranges.ClearCache();
|
|
|
|
}
|
|
|
|
|
|
|
|
void A64EmitX64::InvalidateCacheRanges(const boost::icl::interval_set<u64>& ranges) {
|
|
|
|
InvalidateBasicBlocks(block_ranges.InvalidateRanges(ranges));
|
|
|
|
}
|
|
|
|
|
2018-01-07 16:33:02 +00:00
|
|
|
void A64EmitX64::EmitA64SetCheckBit(A64EmitContext& ctx, IR::Inst* inst) {
|
|
|
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
|
|
|
Xbyak::Reg8 to_store = ctx.reg_alloc.UseGpr(args[0]).cvt8();
|
2018-02-03 14:28:57 +00:00
|
|
|
code.mov(code.byte[r15 + offsetof(A64JitState, check_bit)], to_store);
|
2018-01-07 16:33:02 +00:00
|
|
|
}
|
|
|
|
|
2018-01-07 11:31:20 +00:00
|
|
|
void A64EmitX64::EmitA64GetCFlag(A64EmitContext& ctx, IR::Inst* inst) {
|
|
|
|
Xbyak::Reg32 result = ctx.reg_alloc.ScratchGpr().cvt32();
|
2018-02-03 14:28:57 +00:00
|
|
|
code.mov(result, dword[r15 + offsetof(A64JitState, CPSR_nzcv)]);
|
|
|
|
code.shr(result, 29);
|
|
|
|
code.and_(result, 1);
|
2018-01-07 11:31:20 +00:00
|
|
|
ctx.reg_alloc.DefineValue(inst, result);
|
|
|
|
}
|
|
|
|
|
|
|
|
void A64EmitX64::EmitA64SetNZCV(A64EmitContext& ctx, IR::Inst* inst) {
|
|
|
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
2018-01-09 21:20:55 +00:00
|
|
|
Xbyak::Reg32 to_store = ctx.reg_alloc.UseScratchGpr(args[0]).cvt32();
|
2018-02-03 14:28:57 +00:00
|
|
|
code.and_(to_store, 0b11000001'00000001);
|
|
|
|
code.imul(to_store, to_store, 0b00010000'00100001);
|
|
|
|
code.shl(to_store, 16);
|
|
|
|
code.and_(to_store, 0xF0000000);
|
|
|
|
code.mov(dword[r15 + offsetof(A64JitState, CPSR_nzcv)], to_store);
|
2018-01-07 11:31:20 +00:00
|
|
|
}
|
|
|
|
|
2018-01-07 00:11:57 +00:00
|
|
|
void A64EmitX64::EmitA64GetW(A64EmitContext& ctx, IR::Inst* inst) {
|
|
|
|
A64::Reg reg = inst->GetArg(0).GetA64RegRef();
|
|
|
|
|
|
|
|
Xbyak::Reg32 result = ctx.reg_alloc.ScratchGpr().cvt32();
|
2018-02-03 14:28:57 +00:00
|
|
|
code.mov(result, dword[r15 + offsetof(A64JitState, reg) + sizeof(u64) * static_cast<size_t>(reg)]);
|
2018-01-07 00:11:57 +00:00
|
|
|
ctx.reg_alloc.DefineValue(inst, result);
|
|
|
|
}
|
|
|
|
|
|
|
|
void A64EmitX64::EmitA64GetX(A64EmitContext& ctx, IR::Inst* inst) {
|
|
|
|
A64::Reg reg = inst->GetArg(0).GetA64RegRef();
|
|
|
|
|
|
|
|
Xbyak::Reg64 result = ctx.reg_alloc.ScratchGpr();
|
2018-02-03 14:28:57 +00:00
|
|
|
code.mov(result, qword[r15 + offsetof(A64JitState, reg) + sizeof(u64) * static_cast<size_t>(reg)]);
|
2018-01-07 00:11:57 +00:00
|
|
|
ctx.reg_alloc.DefineValue(inst, result);
|
|
|
|
}
|
|
|
|
|
2018-01-26 18:35:19 +00:00
|
|
|
void A64EmitX64::EmitA64GetS(A64EmitContext& ctx, IR::Inst* inst) {
|
|
|
|
A64::Vec vec = inst->GetArg(0).GetA64VecRef();
|
|
|
|
auto addr = qword[r15 + offsetof(A64JitState, vec) + sizeof(u64) * 2 * static_cast<size_t>(vec)];
|
|
|
|
|
|
|
|
Xbyak::Xmm result = ctx.reg_alloc.ScratchXmm();
|
2018-02-03 14:28:57 +00:00
|
|
|
code.movd(result, addr);
|
2018-01-26 18:35:19 +00:00
|
|
|
ctx.reg_alloc.DefineValue(inst, result);
|
|
|
|
}
|
|
|
|
|
2018-01-21 17:45:43 +00:00
|
|
|
void A64EmitX64::EmitA64GetD(A64EmitContext& ctx, IR::Inst* inst) {
|
|
|
|
A64::Vec vec = inst->GetArg(0).GetA64VecRef();
|
|
|
|
auto addr = qword[r15 + offsetof(A64JitState, vec) + sizeof(u64) * 2 * static_cast<size_t>(vec)];
|
|
|
|
|
|
|
|
Xbyak::Xmm result = ctx.reg_alloc.ScratchXmm();
|
2018-02-03 14:28:57 +00:00
|
|
|
code.movq(result, addr);
|
2018-01-21 17:45:43 +00:00
|
|
|
ctx.reg_alloc.DefineValue(inst, result);
|
|
|
|
}
|
|
|
|
|
|
|
|
void A64EmitX64::EmitA64GetQ(A64EmitContext& ctx, IR::Inst* inst) {
|
|
|
|
A64::Vec vec = inst->GetArg(0).GetA64VecRef();
|
2018-01-26 18:34:22 +00:00
|
|
|
auto addr = xword[r15 + offsetof(A64JitState, vec) + sizeof(u64) * 2 * static_cast<size_t>(vec)];
|
2018-01-21 17:45:43 +00:00
|
|
|
|
|
|
|
Xbyak::Xmm result = ctx.reg_alloc.ScratchXmm();
|
2018-02-03 14:28:57 +00:00
|
|
|
code.movaps(result, addr);
|
2018-01-21 17:45:43 +00:00
|
|
|
ctx.reg_alloc.DefineValue(inst, result);
|
|
|
|
}
|
|
|
|
|
2018-01-07 11:31:20 +00:00
|
|
|
void A64EmitX64::EmitA64GetSP(A64EmitContext& ctx, IR::Inst* inst) {
|
|
|
|
Xbyak::Reg64 result = ctx.reg_alloc.ScratchGpr();
|
2018-02-03 14:28:57 +00:00
|
|
|
code.mov(result, qword[r15 + offsetof(A64JitState, sp)]);
|
2018-01-07 11:31:20 +00:00
|
|
|
ctx.reg_alloc.DefineValue(inst, result);
|
|
|
|
}
|
|
|
|
|
2018-01-07 00:11:57 +00:00
|
|
|
void A64EmitX64::EmitA64SetW(A64EmitContext& ctx, IR::Inst* inst) {
|
|
|
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
|
|
|
A64::Reg reg = inst->GetArg(0).GetA64RegRef();
|
2018-01-09 21:21:15 +00:00
|
|
|
auto addr = qword[r15 + offsetof(A64JitState, reg) + sizeof(u64) * static_cast<size_t>(reg)];
|
2018-01-15 21:47:28 +00:00
|
|
|
if (args[1].FitsInImmediateS32()) {
|
2018-02-03 14:28:57 +00:00
|
|
|
code.mov(addr, args[1].GetImmediateS32());
|
2018-01-07 00:11:57 +00:00
|
|
|
} else {
|
2018-01-09 21:21:15 +00:00
|
|
|
// TODO: zext tracking, xmm variant
|
|
|
|
Xbyak::Reg64 to_store = ctx.reg_alloc.UseScratchGpr(args[1]);
|
2018-02-03 14:28:57 +00:00
|
|
|
code.mov(to_store.cvt32(), to_store.cvt32());
|
|
|
|
code.mov(addr, to_store);
|
2018-01-07 00:11:57 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void A64EmitX64::EmitA64SetX(A64EmitContext& ctx, IR::Inst* inst) {
|
|
|
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
|
|
|
A64::Reg reg = inst->GetArg(0).GetA64RegRef();
|
|
|
|
auto addr = qword[r15 + offsetof(A64JitState, reg) + sizeof(u64) * static_cast<size_t>(reg)];
|
2018-01-13 17:59:50 +00:00
|
|
|
if (args[1].FitsInImmediateS32()) {
|
2018-02-03 14:28:57 +00:00
|
|
|
code.mov(addr, args[1].GetImmediateS32());
|
2018-01-07 00:11:57 +00:00
|
|
|
} else if (args[1].IsInXmm()) {
|
|
|
|
Xbyak::Xmm to_store = ctx.reg_alloc.UseXmm(args[1]);
|
2018-02-03 14:28:57 +00:00
|
|
|
code.movq(addr, to_store);
|
2018-01-07 00:11:57 +00:00
|
|
|
} else {
|
|
|
|
Xbyak::Reg64 to_store = ctx.reg_alloc.UseGpr(args[1]);
|
2018-02-03 14:28:57 +00:00
|
|
|
code.mov(addr, to_store);
|
2018-01-07 00:11:57 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-01-26 18:34:22 +00:00
|
|
|
void A64EmitX64::EmitA64SetS(A64EmitContext& ctx, IR::Inst* inst) {
|
|
|
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
|
|
|
A64::Vec vec = inst->GetArg(0).GetA64VecRef();
|
|
|
|
auto addr = xword[r15 + offsetof(A64JitState, vec) + sizeof(u64) * 2 * static_cast<size_t>(vec)];
|
|
|
|
|
|
|
|
Xbyak::Xmm to_store = ctx.reg_alloc.UseXmm(args[1]);
|
|
|
|
Xbyak::Xmm tmp = ctx.reg_alloc.ScratchXmm();
|
|
|
|
// TODO: Optimize
|
2018-02-03 14:28:57 +00:00
|
|
|
code.pxor(tmp, tmp);
|
|
|
|
code.movss(tmp, to_store);
|
|
|
|
code.movaps(addr, tmp);
|
2018-01-26 18:34:22 +00:00
|
|
|
}
|
|
|
|
|
2018-01-21 17:45:43 +00:00
|
|
|
void A64EmitX64::EmitA64SetD(A64EmitContext& ctx, IR::Inst* inst) {
|
|
|
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
|
|
|
A64::Vec vec = inst->GetArg(0).GetA64VecRef();
|
2018-01-26 18:34:22 +00:00
|
|
|
auto addr = xword[r15 + offsetof(A64JitState, vec) + sizeof(u64) * 2 * static_cast<size_t>(vec)];
|
2018-01-21 17:45:43 +00:00
|
|
|
|
|
|
|
Xbyak::Xmm to_store = ctx.reg_alloc.UseScratchXmm(args[1]);
|
2018-02-03 14:28:57 +00:00
|
|
|
code.movq(to_store, to_store); // TODO: Remove when able
|
|
|
|
code.movaps(addr, to_store);
|
2018-01-21 17:45:43 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void A64EmitX64::EmitA64SetQ(A64EmitContext& ctx, IR::Inst* inst) {
|
|
|
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
|
|
|
A64::Vec vec = inst->GetArg(0).GetA64VecRef();
|
2018-01-26 18:34:22 +00:00
|
|
|
auto addr = xword[r15 + offsetof(A64JitState, vec) + sizeof(u64) * 2 * static_cast<size_t>(vec)];
|
2018-01-21 17:45:43 +00:00
|
|
|
|
|
|
|
Xbyak::Xmm to_store = ctx.reg_alloc.UseXmm(args[1]);
|
2018-02-03 14:28:57 +00:00
|
|
|
code.movaps(addr, to_store);
|
2018-01-21 17:45:43 +00:00
|
|
|
}
|
|
|
|
|
2018-01-07 11:31:20 +00:00
|
|
|
void A64EmitX64::EmitA64SetSP(A64EmitContext& ctx, IR::Inst* inst) {
|
|
|
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
|
|
|
auto addr = qword[r15 + offsetof(A64JitState, sp)];
|
2018-01-27 00:38:09 +00:00
|
|
|
if (args[0].FitsInImmediateS32()) {
|
2018-02-03 14:28:57 +00:00
|
|
|
code.mov(addr, args[0].GetImmediateS32());
|
2018-01-07 11:31:20 +00:00
|
|
|
} else if (args[0].IsInXmm()) {
|
|
|
|
Xbyak::Xmm to_store = ctx.reg_alloc.UseXmm(args[0]);
|
2018-02-03 14:28:57 +00:00
|
|
|
code.movq(addr, to_store);
|
2018-01-07 11:31:20 +00:00
|
|
|
} else {
|
|
|
|
Xbyak::Reg64 to_store = ctx.reg_alloc.UseGpr(args[0]);
|
2018-02-03 14:28:57 +00:00
|
|
|
code.mov(addr, to_store);
|
2018-01-07 11:31:20 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-01-07 13:56:32 +00:00
|
|
|
void A64EmitX64::EmitA64SetPC(A64EmitContext& ctx, IR::Inst* inst) {
|
|
|
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
|
|
|
auto addr = qword[r15 + offsetof(A64JitState, pc)];
|
2018-01-27 00:38:09 +00:00
|
|
|
if (args[0].FitsInImmediateS32()) {
|
2018-02-03 14:28:57 +00:00
|
|
|
code.mov(addr, args[0].GetImmediateS32());
|
2018-01-07 13:56:32 +00:00
|
|
|
} else if (args[0].IsInXmm()) {
|
|
|
|
Xbyak::Xmm to_store = ctx.reg_alloc.UseXmm(args[0]);
|
2018-02-03 14:28:57 +00:00
|
|
|
code.movq(addr, to_store);
|
2018-01-07 13:56:32 +00:00
|
|
|
} else {
|
|
|
|
Xbyak::Reg64 to_store = ctx.reg_alloc.UseGpr(args[0]);
|
2018-02-03 14:28:57 +00:00
|
|
|
code.mov(addr, to_store);
|
2018-01-07 13:56:32 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-01-08 22:03:03 +00:00
|
|
|
void A64EmitX64::EmitA64CallSupervisor(A64EmitContext& ctx, IR::Inst* inst) {
|
2018-01-13 18:00:39 +00:00
|
|
|
ctx.reg_alloc.HostCall(nullptr);
|
2018-01-08 22:03:03 +00:00
|
|
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
|
|
|
ASSERT(args[0].IsImmediate());
|
|
|
|
u32 imm = args[0].GetImmediateU32();
|
2018-01-12 17:31:21 +00:00
|
|
|
DEVIRT(conf.callbacks, &A64::UserCallbacks::CallSVC).EmitCall(code, [&](Xbyak::Reg64 param1) {
|
2018-02-03 14:28:57 +00:00
|
|
|
code.mov(param1.cvt32(), imm);
|
2018-01-08 22:03:03 +00:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2018-01-13 17:54:29 +00:00
|
|
|
void A64EmitX64::EmitA64ExceptionRaised(A64EmitContext& ctx, IR::Inst* inst) {
|
|
|
|
ctx.reg_alloc.HostCall(nullptr);
|
|
|
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
|
|
|
ASSERT(args[0].IsImmediate() && args[1].IsImmediate());
|
|
|
|
u64 pc = args[0].GetImmediateU64();
|
|
|
|
u64 exception = args[1].GetImmediateU64();
|
|
|
|
DEVIRT(conf.callbacks, &A64::UserCallbacks::ExceptionRaised).EmitCall(code, [&](Xbyak::Reg64 param1, Xbyak::Reg64 param2) {
|
2018-02-03 14:28:57 +00:00
|
|
|
code.mov(param1, pc);
|
|
|
|
code.mov(param2, exception);
|
2018-01-13 17:54:29 +00:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2018-01-10 01:13:23 +00:00
|
|
|
void A64EmitX64::EmitA64ReadMemory8(A64EmitContext& ctx, IR::Inst* inst) {
|
2018-01-12 17:31:21 +00:00
|
|
|
DEVIRT(conf.callbacks, &A64::UserCallbacks::MemoryRead8).EmitCall(code, [&](Xbyak::Reg64 vaddr) {
|
2018-02-03 14:28:57 +00:00
|
|
|
ASSERT(vaddr == code.ABI_PARAM2);
|
2018-01-10 01:13:23 +00:00
|
|
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
|
|
|
ctx.reg_alloc.HostCall(inst, {}, args[0]);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
void A64EmitX64::EmitA64ReadMemory16(A64EmitContext& ctx, IR::Inst* inst) {
|
2018-01-12 17:31:21 +00:00
|
|
|
DEVIRT(conf.callbacks, &A64::UserCallbacks::MemoryRead16).EmitCall(code, [&](Xbyak::Reg64 vaddr) {
|
2018-02-03 14:28:57 +00:00
|
|
|
ASSERT(vaddr == code.ABI_PARAM2);
|
2018-01-10 01:13:23 +00:00
|
|
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
|
|
|
ctx.reg_alloc.HostCall(inst, {}, args[0]);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
void A64EmitX64::EmitA64ReadMemory32(A64EmitContext& ctx, IR::Inst* inst) {
|
2018-01-12 17:31:21 +00:00
|
|
|
DEVIRT(conf.callbacks, &A64::UserCallbacks::MemoryRead32).EmitCall(code, [&](Xbyak::Reg64 vaddr) {
|
2018-02-03 14:28:57 +00:00
|
|
|
ASSERT(vaddr == code.ABI_PARAM2);
|
2018-01-10 01:13:23 +00:00
|
|
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
|
|
|
ctx.reg_alloc.HostCall(inst, {}, args[0]);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
void A64EmitX64::EmitA64ReadMemory64(A64EmitContext& ctx, IR::Inst* inst) {
|
2018-01-12 17:31:21 +00:00
|
|
|
DEVIRT(conf.callbacks, &A64::UserCallbacks::MemoryRead64).EmitCall(code, [&](Xbyak::Reg64 vaddr) {
|
2018-02-03 14:28:57 +00:00
|
|
|
ASSERT(vaddr == code.ABI_PARAM2);
|
2018-01-10 01:13:23 +00:00
|
|
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
|
|
|
ctx.reg_alloc.HostCall(inst, {}, args[0]);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2018-01-24 15:55:59 +00:00
|
|
|
void A64EmitX64::EmitA64ReadMemory128(A64EmitContext& ctx, IR::Inst* inst) {
|
2018-01-24 16:18:24 +00:00
|
|
|
#ifdef _WIN32
|
|
|
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
|
|
|
|
|
|
|
static_assert(ABI_SHADOW_SPACE >= 16);
|
|
|
|
ctx.reg_alloc.HostCall(nullptr, {}, {}, args[0]);
|
2018-02-03 14:28:57 +00:00
|
|
|
code.lea(code.ABI_PARAM2, ptr[rsp]);
|
|
|
|
code.sub(rsp, ABI_SHADOW_SPACE);
|
2018-01-24 16:18:24 +00:00
|
|
|
|
|
|
|
DEVIRT(conf.callbacks, &A64::UserCallbacks::MemoryRead128).EmitCall(code, [&](Xbyak::Reg64 return_value, Xbyak::Reg64 vaddr) {
|
2018-02-03 14:28:57 +00:00
|
|
|
ASSERT(return_value == code.ABI_PARAM2 && vaddr == code.ABI_PARAM3);
|
2018-01-24 16:18:24 +00:00
|
|
|
});
|
|
|
|
|
|
|
|
Xbyak::Xmm result = xmm0;
|
2018-02-03 14:28:57 +00:00
|
|
|
code.movups(result, xword[code.ABI_RETURN]);
|
|
|
|
code.add(rsp, ABI_SHADOW_SPACE);
|
2018-01-24 16:18:24 +00:00
|
|
|
|
|
|
|
ctx.reg_alloc.DefineValue(inst, result);
|
|
|
|
#else
|
2018-01-24 15:55:59 +00:00
|
|
|
DEVIRT(conf.callbacks, &A64::UserCallbacks::MemoryRead128).EmitCall(code, [&](Xbyak::Reg64 vaddr) {
|
2018-02-03 14:28:57 +00:00
|
|
|
ASSERT(vaddr == code.ABI_PARAM2);
|
2018-01-24 15:55:59 +00:00
|
|
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
|
|
|
ctx.reg_alloc.HostCall(nullptr, {}, args[0]);
|
|
|
|
});
|
|
|
|
Xbyak::Xmm result = xmm0;
|
2018-02-03 14:28:57 +00:00
|
|
|
if (code.DoesCpuSupport(Xbyak::util::Cpu::tSSE41)) {
|
|
|
|
code.movq(result, code.ABI_RETURN);
|
|
|
|
code.pinsrq(result, code.ABI_RETURN2, 1);
|
2018-01-24 15:55:59 +00:00
|
|
|
} else {
|
|
|
|
Xbyak::Xmm tmp = xmm1;
|
2018-02-03 14:28:57 +00:00
|
|
|
code.movq(result, code.ABI_RETURN);
|
|
|
|
code.movq(tmp, code.ABI_RETURN2);
|
|
|
|
code.punpcklqdq(result, tmp);
|
2018-01-24 15:55:59 +00:00
|
|
|
}
|
|
|
|
ctx.reg_alloc.DefineValue(inst, result);
|
2018-01-24 16:18:24 +00:00
|
|
|
#endif
|
2018-01-24 15:55:59 +00:00
|
|
|
}
|
|
|
|
|
2018-01-10 01:13:23 +00:00
|
|
|
void A64EmitX64::EmitA64WriteMemory8(A64EmitContext& ctx, IR::Inst* inst) {
|
2018-01-12 17:31:21 +00:00
|
|
|
DEVIRT(conf.callbacks, &A64::UserCallbacks::MemoryWrite8).EmitCall(code, [&](Xbyak::Reg64 vaddr, Xbyak::Reg64 value) {
|
2018-02-03 14:28:57 +00:00
|
|
|
ASSERT(vaddr == code.ABI_PARAM2 && value == code.ABI_PARAM3);
|
2018-01-10 01:13:23 +00:00
|
|
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
|
|
|
ctx.reg_alloc.HostCall(nullptr, {}, args[0], args[1]);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
void A64EmitX64::EmitA64WriteMemory16(A64EmitContext& ctx, IR::Inst* inst) {
|
2018-01-12 17:31:21 +00:00
|
|
|
DEVIRT(conf.callbacks, &A64::UserCallbacks::MemoryWrite16).EmitCall(code, [&](Xbyak::Reg64 vaddr, Xbyak::Reg64 value) {
|
2018-02-03 14:28:57 +00:00
|
|
|
ASSERT(vaddr == code.ABI_PARAM2 && value == code.ABI_PARAM3);
|
2018-01-10 01:13:23 +00:00
|
|
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
|
|
|
ctx.reg_alloc.HostCall(nullptr, {}, args[0], args[1]);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
void A64EmitX64::EmitA64WriteMemory32(A64EmitContext& ctx, IR::Inst* inst) {
|
2018-01-12 17:31:21 +00:00
|
|
|
DEVIRT(conf.callbacks, &A64::UserCallbacks::MemoryWrite32).EmitCall(code, [&](Xbyak::Reg64 vaddr, Xbyak::Reg64 value) {
|
2018-02-03 14:28:57 +00:00
|
|
|
ASSERT(vaddr == code.ABI_PARAM2 && value == code.ABI_PARAM3);
|
2018-01-10 01:13:23 +00:00
|
|
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
|
|
|
ctx.reg_alloc.HostCall(nullptr, {}, args[0], args[1]);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
void A64EmitX64::EmitA64WriteMemory64(A64EmitContext& ctx, IR::Inst* inst) {
|
2018-01-12 17:31:21 +00:00
|
|
|
DEVIRT(conf.callbacks, &A64::UserCallbacks::MemoryWrite64).EmitCall(code, [&](Xbyak::Reg64 vaddr, Xbyak::Reg64 value) {
|
2018-02-03 14:28:57 +00:00
|
|
|
ASSERT(vaddr == code.ABI_PARAM2 && value == code.ABI_PARAM3);
|
2018-01-10 01:13:23 +00:00
|
|
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
|
|
|
ctx.reg_alloc.HostCall(nullptr, {}, args[0], args[1]);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2018-01-24 15:55:59 +00:00
|
|
|
void A64EmitX64::EmitA64WriteMemory128(A64EmitContext& ctx, IR::Inst* inst) {
|
2018-01-24 16:18:24 +00:00
|
|
|
#ifdef _WIN32
|
|
|
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
|
|
|
|
|
|
|
static_assert(ABI_SHADOW_SPACE >= 16);
|
|
|
|
ctx.reg_alloc.Use(args[0], ABI_PARAM2);
|
|
|
|
Xbyak::Xmm xmm_value = ctx.reg_alloc.UseXmm(args[1]);
|
|
|
|
ctx.reg_alloc.EndOfAllocScope();
|
|
|
|
ctx.reg_alloc.HostCall(nullptr);
|
2018-02-03 14:28:57 +00:00
|
|
|
code.lea(code.ABI_PARAM3, ptr[rsp]);
|
|
|
|
code.sub(rsp, ABI_SHADOW_SPACE);
|
|
|
|
code.movaps(xword[code.ABI_PARAM3], xmm_value);
|
2018-01-24 16:18:24 +00:00
|
|
|
|
2018-01-24 16:49:06 +00:00
|
|
|
DEVIRT(conf.callbacks, &A64::UserCallbacks::MemoryWrite128).EmitCall(code, [&](Xbyak::Reg64 vaddr, Xbyak::Reg64 value_ptr) {
|
2018-02-03 14:28:57 +00:00
|
|
|
ASSERT(vaddr == code.ABI_PARAM2 && value_ptr == code.ABI_PARAM3);
|
2018-01-24 16:18:24 +00:00
|
|
|
});
|
|
|
|
|
2018-02-03 14:28:57 +00:00
|
|
|
code.add(rsp, ABI_SHADOW_SPACE);
|
2018-01-24 16:18:24 +00:00
|
|
|
#else
|
2018-01-24 15:55:59 +00:00
|
|
|
DEVIRT(conf.callbacks, &A64::UserCallbacks::MemoryWrite128).EmitCall(code, [&](Xbyak::Reg64 vaddr, Xbyak::Reg64 value0, Xbyak::Reg64 value1) {
|
2018-02-03 14:28:57 +00:00
|
|
|
ASSERT(vaddr == code.ABI_PARAM2 && value0 == code.ABI_PARAM3 && value1 == code.ABI_PARAM4);
|
2018-01-24 15:55:59 +00:00
|
|
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
|
|
|
ctx.reg_alloc.Use(args[0], ABI_PARAM2);
|
|
|
|
ctx.reg_alloc.ScratchGpr({ABI_PARAM3});
|
|
|
|
ctx.reg_alloc.ScratchGpr({ABI_PARAM4});
|
2018-02-03 14:28:57 +00:00
|
|
|
if (code.DoesCpuSupport(Xbyak::util::Cpu::tSSE41)) {
|
2018-01-24 15:55:59 +00:00
|
|
|
Xbyak::Xmm xmm_value = ctx.reg_alloc.UseXmm(args[1]);
|
2018-02-03 14:28:57 +00:00
|
|
|
code.movq(code.ABI_PARAM3, xmm_value);
|
|
|
|
code.pextrq(code.ABI_PARAM4, xmm_value, 1);
|
2018-01-24 15:55:59 +00:00
|
|
|
} else {
|
|
|
|
Xbyak::Xmm xmm_value = ctx.reg_alloc.UseScratchXmm(args[1]);
|
2018-02-03 14:28:57 +00:00
|
|
|
code.movq(code.ABI_PARAM3, xmm_value);
|
|
|
|
code.punpckhqdq(xmm_value, xmm_value);
|
|
|
|
code.movq(code.ABI_PARAM4, xmm_value);
|
2018-01-24 15:55:59 +00:00
|
|
|
}
|
|
|
|
ctx.reg_alloc.EndOfAllocScope();
|
|
|
|
ctx.reg_alloc.HostCall(nullptr);
|
|
|
|
});
|
2018-01-24 16:18:24 +00:00
|
|
|
#endif
|
2018-01-24 15:55:59 +00:00
|
|
|
}
|
|
|
|
|
2018-01-06 21:15:25 +00:00
|
|
|
void A64EmitX64::EmitTerminalImpl(IR::Term::Interpret terminal, IR::LocationDescriptor) {
|
2018-02-03 14:28:57 +00:00
|
|
|
code.SwitchMxcsrOnExit();
|
2018-01-12 17:31:21 +00:00
|
|
|
DEVIRT(conf.callbacks, &A64::UserCallbacks::InterpreterFallback).EmitCall(code, [&](Xbyak::Reg64 param1, Xbyak::Reg64 param2) {
|
2018-02-03 14:28:57 +00:00
|
|
|
code.mov(param1, A64::LocationDescriptor{terminal.next}.PC());
|
|
|
|
code.mov(qword[r15 + offsetof(A64JitState, pc)], param1);
|
|
|
|
code.mov(param2.cvt32(), terminal.num_instructions);
|
2018-01-08 18:33:42 +00:00
|
|
|
});
|
2018-02-03 14:28:57 +00:00
|
|
|
code.ReturnFromRunCode(true); // TODO: Check cycles
|
2018-01-06 21:15:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void A64EmitX64::EmitTerminalImpl(IR::Term::ReturnToDispatch, IR::LocationDescriptor) {
|
2018-02-03 14:28:57 +00:00
|
|
|
code.ReturnFromRunCode();
|
2018-01-06 21:15:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void A64EmitX64::EmitTerminalImpl(IR::Term::LinkBlock terminal, IR::LocationDescriptor) {
|
2018-02-03 14:28:57 +00:00
|
|
|
code.cmp(qword[r15 + offsetof(A64JitState, cycles_remaining)], 0);
|
2018-01-06 21:15:25 +00:00
|
|
|
|
2018-02-03 14:28:57 +00:00
|
|
|
patch_information[terminal.next].jg.emplace_back(code.getCurr());
|
2018-01-06 21:15:25 +00:00
|
|
|
if (auto next_bb = GetBasicBlock(terminal.next)) {
|
|
|
|
EmitPatchJg(terminal.next, next_bb->entrypoint);
|
|
|
|
} else {
|
|
|
|
EmitPatchJg(terminal.next);
|
|
|
|
}
|
2018-02-03 14:28:57 +00:00
|
|
|
code.mov(rax, A64::LocationDescriptor{terminal.next}.PC());
|
|
|
|
code.mov(qword[r15 + offsetof(A64JitState, pc)], rax);
|
|
|
|
code.ForceReturnFromRunCode();
|
2018-01-06 21:15:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void A64EmitX64::EmitTerminalImpl(IR::Term::LinkBlockFast terminal, IR::LocationDescriptor) {
|
2018-02-03 14:28:57 +00:00
|
|
|
patch_information[terminal.next].jmp.emplace_back(code.getCurr());
|
2018-01-06 21:15:25 +00:00
|
|
|
if (auto next_bb = GetBasicBlock(terminal.next)) {
|
|
|
|
EmitPatchJmp(terminal.next, next_bb->entrypoint);
|
|
|
|
} else {
|
|
|
|
EmitPatchJmp(terminal.next);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-01-27 22:44:17 +00:00
|
|
|
void A64EmitX64::EmitTerminalImpl(IR::Term::PopRSBHint, IR::LocationDescriptor) {
|
|
|
|
// This calculation has to match up with A64::LocationDescriptor::UniqueHash
|
|
|
|
// TODO: Optimization is available here based on known state of FPSCR_mode and CPSR_et.
|
2018-02-03 14:28:57 +00:00
|
|
|
code.mov(rcx, qword[r15 + offsetof(A64JitState, pc)]);
|
|
|
|
code.mov(ebx, dword[r15 + offsetof(A64JitState, fpcr)]);
|
|
|
|
code.and_(ebx, A64::LocationDescriptor::FPCR_MASK);
|
|
|
|
code.shl(ebx, 37);
|
|
|
|
code.or_(rbx, rcx);
|
|
|
|
|
|
|
|
code.mov(eax, dword[r15 + offsetof(A64JitState, rsb_ptr)]);
|
|
|
|
code.sub(eax, 1);
|
|
|
|
code.and_(eax, u32(A64JitState::RSBPtrMask));
|
|
|
|
code.mov(dword[r15 + offsetof(A64JitState, rsb_ptr)], eax);
|
|
|
|
code.cmp(rbx, qword[r15 + offsetof(A64JitState, rsb_location_descriptors) + rax * sizeof(u64)]);
|
|
|
|
code.jne(code.GetReturnFromRunCodeAddress());
|
|
|
|
code.mov(rax, qword[r15 + offsetof(A64JitState, rsb_codeptrs) + rax * sizeof(u64)]);
|
|
|
|
code.jmp(rax);
|
2018-01-06 21:15:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void A64EmitX64::EmitTerminalImpl(IR::Term::If terminal, IR::LocationDescriptor initial_location) {
|
2018-01-17 00:34:33 +00:00
|
|
|
switch (terminal.if_) {
|
|
|
|
case IR::Cond::AL:
|
|
|
|
case IR::Cond::NV:
|
|
|
|
EmitTerminal(terminal.then_, initial_location);
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
Xbyak::Label pass = EmitCond(terminal.if_);
|
|
|
|
EmitTerminal(terminal.else_, initial_location);
|
2018-02-03 14:28:57 +00:00
|
|
|
code.L(pass);
|
2018-01-17 00:34:33 +00:00
|
|
|
EmitTerminal(terminal.then_, initial_location);
|
|
|
|
break;
|
|
|
|
}
|
2018-01-06 21:15:25 +00:00
|
|
|
}
|
|
|
|
|
2018-01-07 16:33:02 +00:00
|
|
|
void A64EmitX64::EmitTerminalImpl(IR::Term::CheckBit terminal, IR::LocationDescriptor initial_location) {
|
|
|
|
Xbyak::Label fail;
|
2018-02-03 14:28:57 +00:00
|
|
|
code.cmp(code.byte[r15 + offsetof(A64JitState, check_bit)], u8(0));
|
|
|
|
code.jz(fail);
|
2018-01-07 16:33:02 +00:00
|
|
|
EmitTerminal(terminal.then_, initial_location);
|
2018-02-03 14:28:57 +00:00
|
|
|
code.L(fail);
|
2018-01-07 16:33:02 +00:00
|
|
|
EmitTerminal(terminal.else_, initial_location);
|
|
|
|
}
|
|
|
|
|
2018-01-06 21:15:25 +00:00
|
|
|
void A64EmitX64::EmitTerminalImpl(IR::Term::CheckHalt terminal, IR::LocationDescriptor initial_location) {
|
2018-02-03 14:28:57 +00:00
|
|
|
code.cmp(code.byte[r15 + offsetof(A64JitState, halt_requested)], u8(0));
|
|
|
|
code.jne(code.GetForceReturnFromRunCodeAddress());
|
2018-01-06 21:15:25 +00:00
|
|
|
EmitTerminal(terminal.else_, initial_location);
|
|
|
|
}
|
|
|
|
|
2018-01-07 00:11:57 +00:00
|
|
|
void A64EmitX64::EmitPatchJg(const IR::LocationDescriptor& target_desc, CodePtr target_code_ptr) {
|
2018-02-03 14:28:57 +00:00
|
|
|
const CodePtr patch_location = code.getCurr();
|
2018-01-06 21:15:25 +00:00
|
|
|
if (target_code_ptr) {
|
2018-02-03 14:28:57 +00:00
|
|
|
code.jg(target_code_ptr);
|
2018-01-06 21:15:25 +00:00
|
|
|
} else {
|
2018-02-03 14:28:57 +00:00
|
|
|
code.mov(rax, A64::LocationDescriptor{target_desc}.PC());
|
|
|
|
code.mov(qword[r15 + offsetof(A64JitState, pc)], rax);
|
|
|
|
code.jg(code.GetReturnFromRunCodeAddress());
|
2018-01-06 21:15:25 +00:00
|
|
|
}
|
2018-02-03 14:28:57 +00:00
|
|
|
code.EnsurePatchLocationSize(patch_location, 30); // TODO: Reduce size
|
2018-01-06 21:15:25 +00:00
|
|
|
}
|
|
|
|
|
2018-01-07 00:11:57 +00:00
|
|
|
void A64EmitX64::EmitPatchJmp(const IR::LocationDescriptor& target_desc, CodePtr target_code_ptr) {
|
2018-02-03 14:28:57 +00:00
|
|
|
const CodePtr patch_location = code.getCurr();
|
2018-01-06 21:15:25 +00:00
|
|
|
if (target_code_ptr) {
|
2018-02-03 14:28:57 +00:00
|
|
|
code.jmp(target_code_ptr);
|
2018-01-06 21:15:25 +00:00
|
|
|
} else {
|
2018-02-03 14:28:57 +00:00
|
|
|
code.mov(rax, A64::LocationDescriptor{target_desc}.PC());
|
|
|
|
code.mov(qword[r15 + offsetof(A64JitState, pc)], rax);
|
|
|
|
code.jmp(code.GetReturnFromRunCodeAddress());
|
2018-01-06 21:15:25 +00:00
|
|
|
}
|
2018-02-03 14:28:57 +00:00
|
|
|
code.EnsurePatchLocationSize(patch_location, 30); // TODO: Reduce size
|
2018-01-06 21:15:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void A64EmitX64::EmitPatchMovRcx(CodePtr target_code_ptr) {
|
|
|
|
if (!target_code_ptr) {
|
2018-02-03 14:28:57 +00:00
|
|
|
target_code_ptr = code.GetReturnFromRunCodeAddress();
|
2018-01-06 21:15:25 +00:00
|
|
|
}
|
2018-02-03 14:28:57 +00:00
|
|
|
const CodePtr patch_location = code.getCurr();
|
|
|
|
code.mov(code.rcx, reinterpret_cast<u64>(target_code_ptr));
|
|
|
|
code.EnsurePatchLocationSize(patch_location, 10);
|
2018-01-06 21:15:25 +00:00
|
|
|
}
|
|
|
|
|
2018-01-26 13:51:48 +00:00
|
|
|
} // namespace Dynarmic::BackendX64
|