backend/x64: Rename namespace BackendX64 -> Backend::X64
This commit is contained in:
parent
f569d7913c
commit
325808949f
42 changed files with 86 additions and 86 deletions
|
@ -34,7 +34,7 @@
|
||||||
// TODO: Have ARM flags in host flags and not have them use up GPR registers unless necessary.
|
// TODO: Have ARM flags in host flags and not have them use up GPR registers unless necessary.
|
||||||
// TODO: Actually implement that proper instruction selector you've always wanted to sweetheart.
|
// TODO: Actually implement that proper instruction selector you've always wanted to sweetheart.
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
using namespace Xbyak::util;
|
using namespace Xbyak::util;
|
||||||
|
|
||||||
|
@ -1380,4 +1380,4 @@ void A32EmitX64::EmitPatchMovRcx(CodePtr target_code_ptr) {
|
||||||
code.EnsurePatchLocationSize(patch_location, 10);
|
code.EnsurePatchLocationSize(patch_location, 10);
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
#include "frontend/A32/location_descriptor.h"
|
#include "frontend/A32/location_descriptor.h"
|
||||||
#include "frontend/ir/terminal.h"
|
#include "frontend/ir/terminal.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
class RegAlloc;
|
class RegAlloc;
|
||||||
|
|
||||||
|
@ -101,4 +101,4 @@ protected:
|
||||||
void EmitPatchMovRcx(CodePtr target_code_ptr = nullptr) override;
|
void EmitPatchMovRcx(CodePtr target_code_ptr = nullptr) override;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -29,7 +29,7 @@
|
||||||
|
|
||||||
namespace Dynarmic::A32 {
|
namespace Dynarmic::A32 {
|
||||||
|
|
||||||
using namespace BackendX64;
|
using namespace Backend::X64;
|
||||||
|
|
||||||
static RunCodeCallbacks GenRunCodeCallbacks(A32::UserCallbacks* cb, CodePtr (*LookupBlock)(void* lookup_block_arg), void* arg) {
|
static RunCodeCallbacks GenRunCodeCallbacks(A32::UserCallbacks* cb, CodePtr (*LookupBlock)(void* lookup_block_arg), void* arg) {
|
||||||
return RunCodeCallbacks{
|
return RunCodeCallbacks{
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
#include "common/common_types.h"
|
#include "common/common_types.h"
|
||||||
#include "frontend/A32/location_descriptor.h"
|
#include "frontend/A32/location_descriptor.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* CPSR Bits
|
* CPSR Bits
|
||||||
|
@ -202,4 +202,4 @@ void A32JitState::SetFpscr(u32 FPSCR) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -12,7 +12,7 @@
|
||||||
|
|
||||||
#include "common/common_types.h"
|
#include "common/common_types.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
class BlockOfCode;
|
class BlockOfCode;
|
||||||
|
|
||||||
|
@ -109,4 +109,4 @@ struct A32JitState {
|
||||||
|
|
||||||
using CodePtr = const void*;
|
using CodePtr = const void*;
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -30,7 +30,7 @@
|
||||||
// TODO: Have ARM flags in host flags and not have them use up GPR registers unless necessary.
|
// TODO: Have ARM flags in host flags and not have them use up GPR registers unless necessary.
|
||||||
// TODO: Actually implement that proper instruction selector you've always wanted to sweetheart.
|
// TODO: Actually implement that proper instruction selector you've always wanted to sweetheart.
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
using namespace Xbyak::util;
|
using namespace Xbyak::util;
|
||||||
|
|
||||||
|
@ -1244,4 +1244,4 @@ void A64EmitX64::EmitPatchMovRcx(CodePtr target_code_ptr) {
|
||||||
code.EnsurePatchLocationSize(patch_location, 10);
|
code.EnsurePatchLocationSize(patch_location, 10);
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
#include "frontend/A64/location_descriptor.h"
|
#include "frontend/A64/location_descriptor.h"
|
||||||
#include "frontend/ir/terminal.h"
|
#include "frontend/ir/terminal.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
class RegAlloc;
|
class RegAlloc;
|
||||||
|
|
||||||
|
@ -106,4 +106,4 @@ protected:
|
||||||
void EmitPatchMovRcx(CodePtr target_code_ptr = nullptr) override;
|
void EmitPatchMovRcx(CodePtr target_code_ptr = nullptr) override;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -24,7 +24,7 @@
|
||||||
|
|
||||||
namespace Dynarmic::A64 {
|
namespace Dynarmic::A64 {
|
||||||
|
|
||||||
using namespace BackendX64;
|
using namespace Backend::X64;
|
||||||
|
|
||||||
static RunCodeCallbacks GenRunCodeCallbacks(A64::UserCallbacks* cb, CodePtr (*LookupBlock)(void* lookup_block_arg), void* arg) {
|
static RunCodeCallbacks GenRunCodeCallbacks(A64::UserCallbacks* cb, CodePtr (*LookupBlock)(void* lookup_block_arg), void* arg) {
|
||||||
return RunCodeCallbacks{
|
return RunCodeCallbacks{
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
#include "common/bit_util.h"
|
#include "common/bit_util.h"
|
||||||
#include "frontend/A64/location_descriptor.h"
|
#include "frontend/A64/location_descriptor.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Comparing MXCSR and FPCR
|
* Comparing MXCSR and FPCR
|
||||||
|
@ -108,4 +108,4 @@ void A64JitState::SetFpsr(u32 value) {
|
||||||
fpsr_exc = value & 0x9F;
|
fpsr_exc = value & 0x9F;
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
#include "common/common_types.h"
|
#include "common/common_types.h"
|
||||||
#include "frontend/A64/location_descriptor.h"
|
#include "frontend/A64/location_descriptor.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
class BlockOfCode;
|
class BlockOfCode;
|
||||||
|
|
||||||
|
@ -93,4 +93,4 @@ struct A64JitState {
|
||||||
|
|
||||||
using CodePtr = const void*;
|
using CodePtr = const void*;
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -24,7 +24,7 @@
|
||||||
#include "common/common_types.h"
|
#include "common/common_types.h"
|
||||||
#include "common/iterator_util.h"
|
#include "common/iterator_util.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
constexpr size_t GPR_SIZE = 8;
|
constexpr size_t GPR_SIZE = 8;
|
||||||
constexpr size_t XMM_SIZE = 16;
|
constexpr size_t XMM_SIZE = 16;
|
||||||
|
@ -150,4 +150,4 @@ void ABI_PopCallerSaveRegistersAndAdjustStackExcept(BlockOfCode& code, HostLoc e
|
||||||
ABI_PopRegistersAndAdjustStack(code, 0, regs);
|
ABI_PopRegistersAndAdjustStack(code, 0, regs);
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -9,7 +9,7 @@
|
||||||
|
|
||||||
#include "backend/x64/hostloc.h"
|
#include "backend/x64/hostloc.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
class BlockOfCode;
|
class BlockOfCode;
|
||||||
|
|
||||||
|
@ -121,4 +121,4 @@ void ABI_PopCallerSaveRegistersAndAdjustStack(BlockOfCode& code, size_t frame_si
|
||||||
void ABI_PushCallerSaveRegistersAndAdjustStackExcept(BlockOfCode& code, HostLoc exception);
|
void ABI_PushCallerSaveRegistersAndAdjustStackExcept(BlockOfCode& code, HostLoc exception);
|
||||||
void ABI_PopCallerSaveRegistersAndAdjustStackExcept(BlockOfCode& code, HostLoc exception);
|
void ABI_PopCallerSaveRegistersAndAdjustStackExcept(BlockOfCode& code, HostLoc exception);
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -21,7 +21,7 @@
|
||||||
#include <sys/mman.h>
|
#include <sys/mman.h>
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
const Xbyak::Reg64 BlockOfCode::ABI_RETURN = Xbyak::util::rax;
|
const Xbyak::Reg64 BlockOfCode::ABI_RETURN = Xbyak::util::rax;
|
||||||
|
@ -317,4 +317,4 @@ bool BlockOfCode::DoesCpuSupport([[maybe_unused]] Xbyak::util::Cpu::Type type) c
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
#include "common/cast_util.h"
|
#include "common/cast_util.h"
|
||||||
#include "common/common_types.h"
|
#include "common/common_types.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
using CodePtr = const void*;
|
using CodePtr = const void*;
|
||||||
|
|
||||||
|
@ -178,4 +178,4 @@ private:
|
||||||
Xbyak::util::Cpu cpu_info;
|
Xbyak::util::Cpu cpu_info;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -12,7 +12,7 @@
|
||||||
#include "backend/x64/block_range_information.h"
|
#include "backend/x64/block_range_information.h"
|
||||||
#include "common/common_types.h"
|
#include "common/common_types.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
template <typename ProgramCounterType>
|
template <typename ProgramCounterType>
|
||||||
void BlockRangeInformation<ProgramCounterType>::AddRange(boost::icl::discrete_interval<ProgramCounterType> range, IR::LocationDescriptor location) {
|
void BlockRangeInformation<ProgramCounterType>::AddRange(boost::icl::discrete_interval<ProgramCounterType> range, IR::LocationDescriptor location) {
|
||||||
|
@ -42,4 +42,4 @@ std::unordered_set<IR::LocationDescriptor> BlockRangeInformation<ProgramCounterT
|
||||||
template class BlockRangeInformation<u32>;
|
template class BlockRangeInformation<u32>;
|
||||||
template class BlockRangeInformation<u64>;
|
template class BlockRangeInformation<u64>;
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -14,7 +14,7 @@
|
||||||
|
|
||||||
#include "frontend/ir/location_descriptor.h"
|
#include "frontend/ir/location_descriptor.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
template <typename ProgramCounterType>
|
template <typename ProgramCounterType>
|
||||||
class BlockRangeInformation {
|
class BlockRangeInformation {
|
||||||
|
@ -27,4 +27,4 @@ private:
|
||||||
boost::icl::interval_map<ProgramCounterType, std::set<IR::LocationDescriptor>> block_ranges;
|
boost::icl::interval_map<ProgramCounterType, std::set<IR::LocationDescriptor>> block_ranges;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
#include "backend/x64/callback.h"
|
#include "backend/x64/callback.h"
|
||||||
#include "backend/x64/block_of_code.h"
|
#include "backend/x64/block_of_code.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
Callback::~Callback() = default;
|
Callback::~Callback() = default;
|
||||||
|
|
||||||
|
@ -38,4 +38,4 @@ void ArgCallback::EmitCallWithReturnPointer(BlockOfCode& code, std::function<voi
|
||||||
code.CallFunction(fn);
|
code.CallFunction(fn);
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
|
|
||||||
#include "common/common_types.h"
|
#include "common/common_types.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
using RegList = std::vector<Xbyak::Reg64>;
|
using RegList = std::vector<Xbyak::Reg64>;
|
||||||
|
|
||||||
|
@ -52,4 +52,4 @@ private:
|
||||||
u64 arg;
|
u64 arg;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
#include "backend/x64/constant_pool.h"
|
#include "backend/x64/constant_pool.h"
|
||||||
#include "common/assert.h"
|
#include "common/assert.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
ConstantPool::ConstantPool(BlockOfCode& code, size_t size) : code(code), pool_size(size) {
|
ConstantPool::ConstantPool(BlockOfCode& code, size_t size) : code(code), pool_size(size) {
|
||||||
code.int3();
|
code.int3();
|
||||||
|
@ -32,4 +32,4 @@ Xbyak::Address ConstantPool::GetConstant(const Xbyak::AddressFrame& frame, u64 l
|
||||||
return frame[code.rip + iter->second];
|
return frame[code.rip + iter->second];
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
|
|
||||||
#include "common/common_types.h"
|
#include "common/common_types.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
class BlockOfCode;
|
class BlockOfCode;
|
||||||
|
|
||||||
|
@ -38,4 +38,4 @@ private:
|
||||||
u8* current_pool_ptr;
|
u8* current_pool_ptr;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
#include "common/common_types.h"
|
#include "common/common_types.h"
|
||||||
|
|
||||||
namespace Dynarmic {
|
namespace Dynarmic {
|
||||||
namespace BackendX64 {
|
namespace Backend::X64 {
|
||||||
|
|
||||||
namespace impl {
|
namespace impl {
|
||||||
|
|
||||||
|
@ -78,5 +78,5 @@ ArgCallback Devirtualize(mp::class_type<decltype(mfp)>* this_) {
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace BackendX64
|
} // namespace Backend::X64
|
||||||
} // namespace Dynarmic
|
} // namespace Dynarmic
|
||||||
|
|
|
@ -21,7 +21,7 @@
|
||||||
// TODO: Have ARM flags in host flags and not have them use up GPR registers unless necessary.
|
// TODO: Have ARM flags in host flags and not have them use up GPR registers unless necessary.
|
||||||
// TODO: Actually implement that proper instruction selector you've always wanted to sweetheart.
|
// TODO: Actually implement that proper instruction selector you've always wanted to sweetheart.
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
using namespace Xbyak::util;
|
using namespace Xbyak::util;
|
||||||
|
|
||||||
|
@ -365,4 +365,4 @@ void EmitX64::InvalidateBasicBlocks(const std::unordered_set<IR::LocationDescrip
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -27,7 +27,7 @@ class Block;
|
||||||
class Inst;
|
class Inst;
|
||||||
} // namespace Dynarmic::IR
|
} // namespace Dynarmic::IR
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
class BlockOfCode;
|
class BlockOfCode;
|
||||||
|
|
||||||
|
@ -118,4 +118,4 @@ protected:
|
||||||
std::unordered_map<IR::LocationDescriptor, PatchInformation> patch_information;
|
std::unordered_map<IR::LocationDescriptor, PatchInformation> patch_information;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
#include "common/crypto/aes.h"
|
#include "common/crypto/aes.h"
|
||||||
#include "frontend/ir/microinstruction.h"
|
#include "frontend/ir/microinstruction.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
using namespace Xbyak::util;
|
using namespace Xbyak::util;
|
||||||
namespace AES = Common::Crypto::AES;
|
namespace AES = Common::Crypto::AES;
|
||||||
|
@ -73,4 +73,4 @@ void EmitX64::EmitAESMixColumns(EmitContext& ctx, IR::Inst* inst) {
|
||||||
EmitAESFunction(args, ctx, code, inst, AES::MixColumns);
|
EmitAESFunction(args, ctx, code, inst, AES::MixColumns);
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -12,7 +12,7 @@
|
||||||
#include "common/crypto/crc32.h"
|
#include "common/crypto/crc32.h"
|
||||||
#include "frontend/ir/microinstruction.h"
|
#include "frontend/ir/microinstruction.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
using namespace Xbyak::util;
|
using namespace Xbyak::util;
|
||||||
namespace CRC32 = Common::Crypto::CRC32;
|
namespace CRC32 = Common::Crypto::CRC32;
|
||||||
|
@ -72,4 +72,4 @@ void EmitX64::EmitCRC32ISO64(EmitContext& ctx, IR::Inst* inst) {
|
||||||
EmitCRC32ISO(code, ctx, inst, 64);
|
EmitCRC32ISO(code, ctx, inst, 64);
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
#include "frontend/ir/microinstruction.h"
|
#include "frontend/ir/microinstruction.h"
|
||||||
#include "frontend/ir/opcodes.h"
|
#include "frontend/ir/opcodes.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
using namespace Xbyak::util;
|
using namespace Xbyak::util;
|
||||||
|
|
||||||
|
@ -214,11 +214,11 @@ static void EmitExtractRegister(BlockOfCode& code, EmitContext& ctx, IR::Inst* i
|
||||||
ctx.reg_alloc.DefineValue(inst, result);
|
ctx.reg_alloc.DefineValue(inst, result);
|
||||||
}
|
}
|
||||||
|
|
||||||
void EmitX64::EmitExtractRegister32(Dynarmic::BackendX64::EmitContext& ctx, IR::Inst* inst) {
|
void EmitX64::EmitExtractRegister32(Dynarmic::Backend::X64::EmitContext& ctx, IR::Inst* inst) {
|
||||||
EmitExtractRegister(code, ctx, inst, 32);
|
EmitExtractRegister(code, ctx, inst, 32);
|
||||||
}
|
}
|
||||||
|
|
||||||
void EmitX64::EmitExtractRegister64(Dynarmic::BackendX64::EmitContext& ctx, IR::Inst* inst) {
|
void EmitX64::EmitExtractRegister64(Dynarmic::Backend::X64::EmitContext& ctx, IR::Inst* inst) {
|
||||||
EmitExtractRegister(code, ctx, inst, 64);
|
EmitExtractRegister(code, ctx, inst, 64);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1520,4 +1520,4 @@ void EmitX64::EmitMinUnsigned64(EmitContext& ctx, IR::Inst* inst) {
|
||||||
ctx.reg_alloc.DefineValue(inst, y);
|
ctx.reg_alloc.DefineValue(inst, y);
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -30,7 +30,7 @@
|
||||||
#include "frontend/ir/basic_block.h"
|
#include "frontend/ir/basic_block.h"
|
||||||
#include "frontend/ir/microinstruction.h"
|
#include "frontend/ir/microinstruction.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
using namespace Xbyak::util;
|
using namespace Xbyak::util;
|
||||||
|
|
||||||
|
@ -1562,4 +1562,4 @@ void EmitX64::EmitFPFixedU64ToSingle(EmitContext& ctx, IR::Inst* inst) {
|
||||||
|
|
||||||
ctx.reg_alloc.DefineValue(inst, result);
|
ctx.reg_alloc.DefineValue(inst, result);
|
||||||
}
|
}
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -9,7 +9,7 @@
|
||||||
#include "frontend/ir/microinstruction.h"
|
#include "frontend/ir/microinstruction.h"
|
||||||
#include "frontend/ir/opcodes.h"
|
#include "frontend/ir/opcodes.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
using namespace Xbyak::util;
|
using namespace Xbyak::util;
|
||||||
|
|
||||||
|
@ -698,4 +698,4 @@ void EmitX64::EmitPackedSelect(EmitContext& ctx, IR::Inst* inst) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
#include "frontend/ir/microinstruction.h"
|
#include "frontend/ir/microinstruction.h"
|
||||||
#include "frontend/ir/opcodes.h"
|
#include "frontend/ir/opcodes.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
using namespace Xbyak::util;
|
using namespace Xbyak::util;
|
||||||
|
|
||||||
|
@ -321,4 +321,4 @@ void EmitX64::EmitUnsignedSaturation(EmitContext& ctx, IR::Inst* inst) {
|
||||||
ctx.reg_alloc.DefineValue(inst, result);
|
ctx.reg_alloc.DefineValue(inst, result);
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -9,7 +9,7 @@
|
||||||
#include "common/crypto/sm4.h"
|
#include "common/crypto/sm4.h"
|
||||||
#include "frontend/ir/microinstruction.h"
|
#include "frontend/ir/microinstruction.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
void EmitX64::EmitSM4AccessSubstitutionBox(EmitContext& ctx, IR::Inst* inst) {
|
void EmitX64::EmitSM4AccessSubstitutionBox(EmitContext& ctx, IR::Inst* inst) {
|
||||||
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
||||||
|
@ -18,4 +18,4 @@ void EmitX64::EmitSM4AccessSubstitutionBox(EmitContext& ctx, IR::Inst* inst) {
|
||||||
code.CallFunction(&Common::Crypto::SM4::AccessSubstitutionBox);
|
code.CallFunction(&Common::Crypto::SM4::AccessSubstitutionBox);
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -22,7 +22,7 @@
|
||||||
#include "frontend/ir/microinstruction.h"
|
#include "frontend/ir/microinstruction.h"
|
||||||
#include "frontend/ir/opcodes.h"
|
#include "frontend/ir/opcodes.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
using namespace Xbyak::util;
|
using namespace Xbyak::util;
|
||||||
|
|
||||||
|
@ -4496,4 +4496,4 @@ void EmitX64::EmitZeroVector(EmitContext& ctx, IR::Inst* inst) {
|
||||||
ctx.reg_alloc.DefineValue(inst, a);
|
ctx.reg_alloc.DefineValue(inst, a);
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -30,7 +30,7 @@
|
||||||
#include "frontend/ir/basic_block.h"
|
#include "frontend/ir/basic_block.h"
|
||||||
#include "frontend/ir/microinstruction.h"
|
#include "frontend/ir/microinstruction.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
using namespace Xbyak::util;
|
using namespace Xbyak::util;
|
||||||
|
|
||||||
|
@ -1519,4 +1519,4 @@ void EmitX64::EmitFPVectorToUnsignedFixed64(EmitContext& ctx, IR::Inst* inst) {
|
||||||
EmitFPVectorToFixed<64, true>(code, ctx, inst);
|
EmitFPVectorToFixed<64, true>(code, ctx, inst);
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
|
|
||||||
#include "backend/x64/block_of_code.h"
|
#include "backend/x64/block_of_code.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
struct BlockOfCode::ExceptionHandler::Impl final {
|
struct BlockOfCode::ExceptionHandler::Impl final {
|
||||||
};
|
};
|
||||||
|
@ -18,4 +18,4 @@ void BlockOfCode::ExceptionHandler::Register(BlockOfCode&) {
|
||||||
// Do nothing
|
// Do nothing
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -68,7 +68,7 @@ struct UNWIND_INFO {
|
||||||
// With Flags == 0 there are no additional fields.
|
// With Flags == 0 there are no additional fields.
|
||||||
};
|
};
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
struct PrologueInformation {
|
struct PrologueInformation {
|
||||||
std::vector<UNWIND_CODE> unwind_code;
|
std::vector<UNWIND_CODE> unwind_code;
|
||||||
|
@ -197,4 +197,4 @@ void BlockOfCode::ExceptionHandler::Register(BlockOfCode& code) {
|
||||||
impl = std::make_unique<Impl>(rfuncs, code.getCode());
|
impl = std::make_unique<Impl>(rfuncs, code.getCode());
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
|
|
||||||
#include "backend/x64/hostloc.h"
|
#include "backend/x64/hostloc.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
Xbyak::Reg64 HostLocToReg64(HostLoc loc) {
|
Xbyak::Reg64 HostLocToReg64(HostLoc loc) {
|
||||||
ASSERT(HostLocIsGPR(loc));
|
ASSERT(HostLocIsGPR(loc));
|
||||||
|
@ -20,4 +20,4 @@ Xbyak::Xmm HostLocToXmm(HostLoc loc) {
|
||||||
return Xbyak::Xmm(static_cast<int>(loc) - static_cast<int>(HostLoc::XMM0));
|
return Xbyak::Xmm(static_cast<int>(loc) - static_cast<int>(HostLoc::XMM0));
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
#include "common/assert.h"
|
#include "common/assert.h"
|
||||||
#include "common/common_types.h"
|
#include "common/common_types.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
enum class HostLoc {
|
enum class HostLoc {
|
||||||
// Ordering of the registers is intentional. See also: HostLocToX64.
|
// Ordering of the registers is intentional. See also: HostLocToX64.
|
||||||
|
@ -122,4 +122,4 @@ Xbyak::Address SpillToOpArg(HostLoc loc) {
|
||||||
return JitStateType::GetSpillLocationFromIndex(i);
|
return JitStateType::GetSpillLocationFromIndex(i);
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
|
|
||||||
#include <cstddef>
|
#include <cstddef>
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
struct JitStateInfo {
|
struct JitStateInfo {
|
||||||
template <typename JitStateType>
|
template <typename JitStateType>
|
||||||
|
@ -39,4 +39,4 @@ struct JitStateInfo {
|
||||||
const size_t offsetof_fpsr_qc;
|
const size_t offsetof_fpsr_qc;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
|
|
||||||
#include "common/assert.h"
|
#include "common/assert.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
struct OpArg {
|
struct OpArg {
|
||||||
OpArg() : type(Type::Operand), inner_operand() {}
|
OpArg() : type(Type::Operand), inner_operand() {}
|
||||||
|
@ -75,4 +75,4 @@ private:
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
#include "common/common_types.h"
|
#include "common/common_types.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
namespace {
|
namespace {
|
||||||
std::mutex mutex;
|
std::mutex mutex;
|
||||||
|
@ -72,11 +72,11 @@ void PerfMapClear() {
|
||||||
OpenFile();
|
OpenFile();
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
||||||
#else
|
#else
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
namespace detail {
|
namespace detail {
|
||||||
void PerfMapRegister(const void*, const void*, std::string_view) {}
|
void PerfMapRegister(const void*, const void*, std::string_view) {}
|
||||||
|
@ -84,6 +84,6 @@ void PerfMapRegister(const void*, const void*, std::string_view) {}
|
||||||
|
|
||||||
void PerfMapClear() {}
|
void PerfMapClear() {}
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
|
|
||||||
#include "common/cast_util.h"
|
#include "common/cast_util.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
namespace detail {
|
namespace detail {
|
||||||
void PerfMapRegister(const void* start, const void* end, std::string_view friendly_name);
|
void PerfMapRegister(const void* start, const void* end, std::string_view friendly_name);
|
||||||
|
@ -23,4 +23,4 @@ void PerfMapRegister(T start, const void* end, std::string_view friendly_name) {
|
||||||
|
|
||||||
void PerfMapClear();
|
void PerfMapClear();
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
#include "backend/x64/reg_alloc.h"
|
#include "backend/x64/reg_alloc.h"
|
||||||
#include "common/assert.h"
|
#include "common/assert.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
#define MAYBE_AVX(OPCODE, ...) \
|
#define MAYBE_AVX(OPCODE, ...) \
|
||||||
[&] { \
|
[&] { \
|
||||||
|
@ -681,4 +681,4 @@ void RegAlloc::EmitExchange(HostLoc a, HostLoc b) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -22,7 +22,7 @@
|
||||||
#include "frontend/ir/microinstruction.h"
|
#include "frontend/ir/microinstruction.h"
|
||||||
#include "frontend/ir/value.h"
|
#include "frontend/ir/value.h"
|
||||||
|
|
||||||
namespace Dynarmic::BackendX64 {
|
namespace Dynarmic::Backend::X64 {
|
||||||
|
|
||||||
class RegAlloc;
|
class RegAlloc;
|
||||||
|
|
||||||
|
@ -162,4 +162,4 @@ private:
|
||||||
void EmitExchange(HostLoc a, HostLoc b);
|
void EmitExchange(HostLoc a, HostLoc b);
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
Loading…
Reference in a new issue