2016-07-01 14:01:06 +01:00
|
|
|
/* This file is part of the dynarmic project.
|
|
|
|
* Copyright (c) 2016 MerryMage
|
|
|
|
* This software may be used and distributed according to the terms of the GNU
|
|
|
|
* General Public License version 2 or any later version.
|
|
|
|
*/
|
|
|
|
|
2016-12-11 15:27:26 +00:00
|
|
|
#include <cstring>
|
2016-07-01 14:01:06 +01:00
|
|
|
#include <limits>
|
|
|
|
|
2016-08-24 20:07:08 +01:00
|
|
|
#include <xbyak.h>
|
|
|
|
|
2018-01-01 22:49:17 +00:00
|
|
|
#include "backend_x64/a32_jitstate.h"
|
2016-08-24 20:07:08 +01:00
|
|
|
#include "backend_x64/abi.h"
|
2016-08-07 18:08:48 +01:00
|
|
|
#include "backend_x64/block_of_code.h"
|
2016-08-24 20:07:08 +01:00
|
|
|
#include "common/assert.h"
|
2016-07-01 14:01:06 +01:00
|
|
|
|
|
|
|
namespace Dynarmic {
|
|
|
|
namespace BackendX64 {
|
|
|
|
|
2017-09-29 01:35:24 +01:00
|
|
|
#ifdef _WIN32
|
|
|
|
const Xbyak::Reg64 BlockOfCode::ABI_RETURN = Xbyak::util::rax;
|
|
|
|
const Xbyak::Reg64 BlockOfCode::ABI_PARAM1 = Xbyak::util::rcx;
|
|
|
|
const Xbyak::Reg64 BlockOfCode::ABI_PARAM2 = Xbyak::util::rdx;
|
|
|
|
const Xbyak::Reg64 BlockOfCode::ABI_PARAM3 = Xbyak::util::r8;
|
|
|
|
const Xbyak::Reg64 BlockOfCode::ABI_PARAM4 = Xbyak::util::r9;
|
|
|
|
#else
|
|
|
|
const Xbyak::Reg64 BlockOfCode::ABI_RETURN = Xbyak::util::rax;
|
|
|
|
const Xbyak::Reg64 BlockOfCode::ABI_PARAM1 = Xbyak::util::rdi;
|
|
|
|
const Xbyak::Reg64 BlockOfCode::ABI_PARAM2 = Xbyak::util::rsi;
|
|
|
|
const Xbyak::Reg64 BlockOfCode::ABI_PARAM3 = Xbyak::util::rdx;
|
|
|
|
const Xbyak::Reg64 BlockOfCode::ABI_PARAM4 = Xbyak::util::rcx;
|
|
|
|
#endif
|
|
|
|
|
2017-04-19 18:58:36 +01:00
|
|
|
constexpr size_t TOTAL_CODE_SIZE = 128 * 1024 * 1024;
|
|
|
|
constexpr size_t FAR_CODE_OFFSET = 100 * 1024 * 1024;
|
|
|
|
|
2018-01-04 21:12:02 +00:00
|
|
|
BlockOfCode::BlockOfCode(RunCodeCallbacks cb, JitStateInfo jsi)
|
2017-04-19 18:58:36 +01:00
|
|
|
: Xbyak::CodeGenerator(TOTAL_CODE_SIZE)
|
2018-01-06 21:15:25 +00:00
|
|
|
, cb(std::move(cb))
|
2018-01-04 21:12:02 +00:00
|
|
|
, jsi(jsi)
|
2017-04-07 10:52:44 +01:00
|
|
|
, constant_pool(this, 256)
|
|
|
|
{
|
2016-07-01 14:01:06 +01:00
|
|
|
GenRunCode();
|
2017-04-20 14:08:56 +01:00
|
|
|
exception_handler.Register(this);
|
2018-01-04 21:12:02 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void BlockOfCode::PreludeComplete() {
|
|
|
|
prelude_complete = true;
|
2017-04-19 18:58:36 +01:00
|
|
|
near_code_begin = getCurr();
|
|
|
|
far_code_begin = getCurr() + FAR_CODE_OFFSET;
|
|
|
|
ClearCache();
|
2016-12-11 15:36:58 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void BlockOfCode::ClearCache() {
|
2018-01-04 21:12:02 +00:00
|
|
|
ASSERT(prelude_complete);
|
2017-04-19 18:58:36 +01:00
|
|
|
in_far_code = false;
|
|
|
|
near_code_ptr = near_code_begin;
|
|
|
|
far_code_ptr = far_code_begin;
|
|
|
|
SetCodePtr(near_code_begin);
|
2016-07-01 14:01:06 +01:00
|
|
|
}
|
|
|
|
|
2017-12-03 14:32:01 +00:00
|
|
|
size_t BlockOfCode::SpaceRemaining() const {
|
2018-01-04 21:12:02 +00:00
|
|
|
ASSERT(prelude_complete);
|
2017-12-03 14:32:01 +00:00
|
|
|
// This function provides an underestimate of near-code-size but that's okay.
|
|
|
|
// (Why? The maximum size of near code should be measured from near_code_begin, not top_.)
|
|
|
|
// These are offsets from Xbyak::CodeArray::top_.
|
|
|
|
std::size_t far_code_offset, near_code_offset;
|
|
|
|
if (in_far_code) {
|
|
|
|
near_code_offset = static_cast<const u8*>(near_code_ptr) - getCode();
|
|
|
|
far_code_offset = getCurr() - getCode();
|
|
|
|
} else {
|
|
|
|
near_code_offset = getCurr() - getCode();
|
|
|
|
far_code_offset = static_cast<const u8*>(far_code_ptr) - getCode();
|
|
|
|
}
|
|
|
|
if (far_code_offset > TOTAL_CODE_SIZE)
|
|
|
|
return 0;
|
|
|
|
if (near_code_offset > FAR_CODE_OFFSET)
|
|
|
|
return 0;
|
|
|
|
return std::min(TOTAL_CODE_SIZE - far_code_offset, FAR_CODE_OFFSET - near_code_offset);
|
|
|
|
}
|
|
|
|
|
2018-01-04 21:12:02 +00:00
|
|
|
void BlockOfCode::RunCode(void* jit_state) const {
|
|
|
|
run_code(jit_state);
|
|
|
|
}
|
2017-12-12 14:18:21 +00:00
|
|
|
|
2018-01-04 21:12:02 +00:00
|
|
|
void BlockOfCode::RunCodeFrom(void* jit_state, CodePtr code_ptr) const {
|
|
|
|
run_code_from(jit_state, code_ptr);
|
2016-07-01 14:01:06 +01:00
|
|
|
}
|
|
|
|
|
2017-12-03 15:22:49 +00:00
|
|
|
void BlockOfCode::ReturnFromRunCode(bool mxcsr_already_exited) {
|
2017-04-07 10:52:44 +01:00
|
|
|
size_t index = 0;
|
2017-12-03 15:22:49 +00:00
|
|
|
if (mxcsr_already_exited)
|
|
|
|
index |= MXCSR_ALREADY_EXITED;
|
2017-04-07 10:52:44 +01:00
|
|
|
jmp(return_from_run_code[index]);
|
|
|
|
}
|
|
|
|
|
2017-12-03 15:22:49 +00:00
|
|
|
void BlockOfCode::ForceReturnFromRunCode(bool mxcsr_already_exited) {
|
2017-04-07 10:52:44 +01:00
|
|
|
size_t index = FORCE_RETURN;
|
2017-12-03 15:22:49 +00:00
|
|
|
if (mxcsr_already_exited)
|
|
|
|
index |= MXCSR_ALREADY_EXITED;
|
2017-04-07 10:52:44 +01:00
|
|
|
jmp(return_from_run_code[index]);
|
2016-08-07 18:08:48 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void BlockOfCode::GenRunCode() {
|
2017-12-03 15:22:49 +00:00
|
|
|
Xbyak::Label loop, enter_mxcsr_then_loop;
|
2017-04-07 10:52:44 +01:00
|
|
|
|
2017-12-12 14:18:21 +00:00
|
|
|
align();
|
|
|
|
run_code_from = getCurr<RunCodeFromFuncType>();
|
|
|
|
|
|
|
|
ABI_PushCalleeSaveRegistersAndAdjustStack(this);
|
2018-01-04 21:12:02 +00:00
|
|
|
|
2017-12-12 14:18:21 +00:00
|
|
|
mov(r15, ABI_PARAM1);
|
2018-01-04 21:12:02 +00:00
|
|
|
mov(r14, ABI_PARAM2); // save temporarily in non-volatile register
|
|
|
|
|
2018-01-06 21:15:25 +00:00
|
|
|
cb.GetTicksRemaining->EmitCall(this);
|
2018-01-04 21:12:02 +00:00
|
|
|
mov(qword[r15 + jsi.offsetof_cycles_to_run], ABI_RETURN);
|
|
|
|
mov(qword[r15 + jsi.offsetof_cycles_remaining], ABI_RETURN);
|
|
|
|
|
2017-12-12 14:18:21 +00:00
|
|
|
SwitchMxcsrOnEntry();
|
2018-01-04 21:12:02 +00:00
|
|
|
jmp(r14);
|
2017-12-12 14:18:21 +00:00
|
|
|
|
2016-08-24 20:07:08 +01:00
|
|
|
align();
|
|
|
|
run_code = getCurr<RunCodeFuncType>();
|
2016-07-01 14:01:06 +01:00
|
|
|
|
|
|
|
// This serves two purposes:
|
|
|
|
// 1. It saves all the registers we as a callee need to save.
|
|
|
|
// 2. It aligns the stack so that the code the JIT emits can assume
|
|
|
|
// that the stack is appropriately aligned for CALLs.
|
2016-08-24 20:07:08 +01:00
|
|
|
ABI_PushCalleeSaveRegistersAndAdjustStack(this);
|
2016-07-01 14:01:06 +01:00
|
|
|
|
2016-08-24 20:07:08 +01:00
|
|
|
mov(r15, ABI_PARAM1);
|
2017-04-07 10:52:44 +01:00
|
|
|
|
2018-01-06 21:15:25 +00:00
|
|
|
cb.GetTicksRemaining->EmitCall(this);
|
2018-01-04 21:12:02 +00:00
|
|
|
mov(qword[r15 + jsi.offsetof_cycles_to_run], ABI_RETURN);
|
|
|
|
mov(qword[r15 + jsi.offsetof_cycles_remaining], ABI_RETURN);
|
|
|
|
|
2017-12-03 15:22:49 +00:00
|
|
|
L(enter_mxcsr_then_loop);
|
|
|
|
SwitchMxcsrOnEntry();
|
2017-04-07 10:52:44 +01:00
|
|
|
L(loop);
|
2018-01-06 21:15:25 +00:00
|
|
|
cb.LookupBlock->EmitCall(this);
|
2017-04-07 10:52:44 +01:00
|
|
|
|
|
|
|
jmp(ABI_RETURN);
|
2016-08-07 22:47:17 +01:00
|
|
|
|
2017-04-07 10:52:44 +01:00
|
|
|
// Return from run code variants
|
2017-12-03 15:22:49 +00:00
|
|
|
const auto emit_return_from_run_code = [this, &loop, &enter_mxcsr_then_loop](bool mxcsr_already_exited, bool force_return){
|
2017-04-07 10:52:44 +01:00
|
|
|
if (!force_return) {
|
2018-01-04 21:12:02 +00:00
|
|
|
cmp(qword[r15 + jsi.offsetof_cycles_remaining], 0);
|
2017-12-03 15:22:49 +00:00
|
|
|
jg(mxcsr_already_exited ? enter_mxcsr_then_loop : loop);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!mxcsr_already_exited) {
|
|
|
|
SwitchMxcsrOnExit();
|
2017-04-07 10:52:44 +01:00
|
|
|
}
|
2016-08-13 00:10:23 +01:00
|
|
|
|
2018-01-06 21:15:25 +00:00
|
|
|
cb.AddTicks->EmitCall(this, [this](Xbyak::Reg64 param1) {
|
|
|
|
mov(param1, qword[r15 + jsi.offsetof_cycles_to_run]);
|
|
|
|
sub(param1, qword[r15 + jsi.offsetof_cycles_remaining]);
|
|
|
|
});
|
2017-12-03 02:42:22 +00:00
|
|
|
|
2017-04-07 10:52:44 +01:00
|
|
|
ABI_PopCalleeSaveRegistersAndAdjustStack(this);
|
|
|
|
ret();
|
|
|
|
};
|
2016-08-13 00:10:23 +01:00
|
|
|
|
2017-04-07 10:52:44 +01:00
|
|
|
align();
|
|
|
|
return_from_run_code[0] = getCurr<const void*>();
|
|
|
|
emit_return_from_run_code(false, false);
|
|
|
|
|
|
|
|
align();
|
2017-12-03 15:22:49 +00:00
|
|
|
return_from_run_code[MXCSR_ALREADY_EXITED] = getCurr<const void*>();
|
2017-04-07 10:52:44 +01:00
|
|
|
emit_return_from_run_code(true, false);
|
|
|
|
|
|
|
|
align();
|
|
|
|
return_from_run_code[FORCE_RETURN] = getCurr<const void*>();
|
|
|
|
emit_return_from_run_code(false, true);
|
|
|
|
|
|
|
|
align();
|
2017-12-03 15:22:49 +00:00
|
|
|
return_from_run_code[MXCSR_ALREADY_EXITED | FORCE_RETURN] = getCurr<const void*>();
|
2017-04-07 10:52:44 +01:00
|
|
|
emit_return_from_run_code(true, true);
|
2016-08-13 00:10:23 +01:00
|
|
|
}
|
|
|
|
|
2016-08-07 22:47:17 +01:00
|
|
|
void BlockOfCode::SwitchMxcsrOnEntry() {
|
2018-01-04 21:12:02 +00:00
|
|
|
stmxcsr(dword[r15 + jsi.offsetof_save_host_MXCSR]);
|
|
|
|
ldmxcsr(dword[r15 + jsi.offsetof_guest_MXCSR]);
|
2016-08-07 22:47:17 +01:00
|
|
|
}
|
2016-07-01 14:01:06 +01:00
|
|
|
|
2016-08-07 22:47:17 +01:00
|
|
|
void BlockOfCode::SwitchMxcsrOnExit() {
|
2018-01-04 21:12:02 +00:00
|
|
|
stmxcsr(dword[r15 + jsi.offsetof_guest_MXCSR]);
|
|
|
|
ldmxcsr(dword[r15 + jsi.offsetof_save_host_MXCSR]);
|
2016-08-24 20:07:08 +01:00
|
|
|
}
|
|
|
|
|
2017-03-18 17:20:21 +00:00
|
|
|
Xbyak::Address BlockOfCode::MConst(u64 constant) {
|
|
|
|
return constant_pool.GetConstant(constant);
|
|
|
|
}
|
|
|
|
|
2017-04-19 18:58:36 +01:00
|
|
|
void BlockOfCode::SwitchToFarCode() {
|
2018-01-04 21:12:02 +00:00
|
|
|
ASSERT(prelude_complete);
|
2017-04-19 18:58:36 +01:00
|
|
|
ASSERT(!in_far_code);
|
|
|
|
in_far_code = true;
|
|
|
|
near_code_ptr = getCurr();
|
|
|
|
SetCodePtr(far_code_ptr);
|
|
|
|
|
|
|
|
ASSERT_MSG(near_code_ptr < far_code_begin, "Near code has overwritten far code!");
|
|
|
|
}
|
|
|
|
|
|
|
|
void BlockOfCode::SwitchToNearCode() {
|
2018-01-04 21:12:02 +00:00
|
|
|
ASSERT(prelude_complete);
|
2017-04-19 18:58:36 +01:00
|
|
|
ASSERT(in_far_code);
|
|
|
|
in_far_code = false;
|
|
|
|
far_code_ptr = getCurr();
|
|
|
|
SetCodePtr(near_code_ptr);
|
|
|
|
}
|
|
|
|
|
2016-12-16 20:48:08 +00:00
|
|
|
void* BlockOfCode::AllocateFromCodeSpace(size_t alloc_size) {
|
2016-12-11 15:27:26 +00:00
|
|
|
if (size_ + alloc_size >= maxSize_) {
|
|
|
|
throw Xbyak::Error(Xbyak::ERR_CODE_IS_TOO_BIG);
|
|
|
|
}
|
|
|
|
|
|
|
|
void* ret = getCurr<void*>();
|
|
|
|
size_ += alloc_size;
|
|
|
|
memset(ret, 0, alloc_size);
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2016-09-04 11:30:57 +01:00
|
|
|
void BlockOfCode::SetCodePtr(CodePtr code_ptr) {
|
2016-08-24 20:07:08 +01:00
|
|
|
// The "size" defines where top_, the insertion point, is.
|
2016-09-04 11:30:57 +01:00
|
|
|
size_t required_size = reinterpret_cast<const u8*>(code_ptr) - getCode();
|
2016-08-24 20:07:08 +01:00
|
|
|
setSize(required_size);
|
|
|
|
}
|
|
|
|
|
|
|
|
void BlockOfCode::EnsurePatchLocationSize(CodePtr begin, size_t size) {
|
|
|
|
size_t current_size = getCurr<const u8*>() - reinterpret_cast<const u8*>(begin);
|
|
|
|
ASSERT(current_size <= size);
|
|
|
|
nop(size - current_size);
|
2016-07-01 14:01:06 +01:00
|
|
|
}
|
|
|
|
|
2017-11-02 19:58:40 +00:00
|
|
|
bool BlockOfCode::DoesCpuSupport(Xbyak::util::Cpu::Type type) const {
|
|
|
|
return cpu_info.has(type);
|
|
|
|
}
|
|
|
|
|
2016-07-01 14:01:06 +01:00
|
|
|
} // namespace BackendX64
|
|
|
|
} // namespace Dynarmic
|