Implement memory aborts
This commit is contained in:
parent
285e617e35
commit
b6ddeeea0f
16 changed files with 217 additions and 116 deletions
|
@ -110,6 +110,7 @@ protected:
|
||||||
FakeCall FastmemCallback(u64 rip);
|
FakeCall FastmemCallback(u64 rip);
|
||||||
|
|
||||||
// Memory access helpers
|
// Memory access helpers
|
||||||
|
void EmitCheckMemoryAbort(A32EmitContext& ctx, IR::Inst* inst, Xbyak::Label* end = nullptr);
|
||||||
template<std::size_t bitsize, auto callback>
|
template<std::size_t bitsize, auto callback>
|
||||||
void EmitMemoryRead(A32EmitContext& ctx, IR::Inst* inst);
|
void EmitMemoryRead(A32EmitContext& ctx, IR::Inst* inst);
|
||||||
template<std::size_t bitsize, auto callback>
|
template<std::size_t bitsize, auto callback>
|
||||||
|
|
|
@ -235,4 +235,32 @@ void A32EmitX64::EmitA32ExclusiveWriteMemory64(A32EmitContext& ctx, IR::Inst* in
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void A32EmitX64::EmitCheckMemoryAbort(A32EmitContext& ctx, IR::Inst* inst, Xbyak::Label* end) {
|
||||||
|
if (!conf.check_halt_on_memory_access) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const A32::LocationDescriptor current_location{IR::LocationDescriptor{inst->GetArg(0).GetU64()}};
|
||||||
|
|
||||||
|
code.test(dword[r15 + offsetof(A32JitState, halt_reason)], static_cast<u32>(HaltReason::MemoryAbort));
|
||||||
|
|
||||||
|
Xbyak::Label memory_abort;
|
||||||
|
|
||||||
|
if (!end) {
|
||||||
|
code.jnz(memory_abort, code.T_NEAR);
|
||||||
|
code.SwitchToFarCode();
|
||||||
|
} else {
|
||||||
|
code.jz(*end, code.T_NEAR);
|
||||||
|
}
|
||||||
|
|
||||||
|
code.L(memory_abort);
|
||||||
|
EmitSetUpperLocationDescriptor(current_location, ctx.Location());
|
||||||
|
code.mov(dword[r15 + offsetof(A32JitState, Reg) + sizeof(u32) * 15], current_location.PC());
|
||||||
|
code.ForceReturnFromRunCode();
|
||||||
|
|
||||||
|
if (!end) {
|
||||||
|
code.SwitchToNearCode();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::Backend::X64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -173,7 +173,7 @@ private:
|
||||||
|
|
||||||
IR::Block ir_block = A32::Translate(A32::LocationDescriptor{descriptor}, conf.callbacks, {conf.arch_version, conf.define_unpredictable_behaviour, conf.hook_hint_instructions});
|
IR::Block ir_block = A32::Translate(A32::LocationDescriptor{descriptor}, conf.callbacks, {conf.arch_version, conf.define_unpredictable_behaviour, conf.hook_hint_instructions});
|
||||||
Optimization::PolyfillPass(ir_block, polyfill_options);
|
Optimization::PolyfillPass(ir_block, polyfill_options);
|
||||||
if (conf.HasOptimization(OptimizationFlag::GetSetElimination)) {
|
if (conf.HasOptimization(OptimizationFlag::GetSetElimination) && !conf.check_halt_on_memory_access) {
|
||||||
Optimization::A32GetSetElimination(ir_block);
|
Optimization::A32GetSetElimination(ir_block);
|
||||||
Optimization::DeadCodeElimination(ir_block);
|
Optimization::DeadCodeElimination(ir_block);
|
||||||
}
|
}
|
||||||
|
|
|
@ -108,6 +108,7 @@ protected:
|
||||||
FakeCall FastmemCallback(u64 rip);
|
FakeCall FastmemCallback(u64 rip);
|
||||||
|
|
||||||
// Memory access helpers
|
// Memory access helpers
|
||||||
|
void EmitCheckMemoryAbort(A64EmitContext& ctx, IR::Inst* inst, Xbyak::Label* end = nullptr);
|
||||||
template<std::size_t bitsize, auto callback>
|
template<std::size_t bitsize, auto callback>
|
||||||
void EmitMemoryRead(A64EmitContext& ctx, IR::Inst* inst);
|
void EmitMemoryRead(A64EmitContext& ctx, IR::Inst* inst);
|
||||||
template<std::size_t bitsize, auto callback>
|
template<std::size_t bitsize, auto callback>
|
||||||
|
|
|
@ -407,4 +407,32 @@ void A64EmitX64::EmitA64ExclusiveWriteMemory128(A64EmitContext& ctx, IR::Inst* i
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void A64EmitX64::EmitCheckMemoryAbort(A64EmitContext&, IR::Inst* inst, Xbyak::Label* end) {
|
||||||
|
if (!conf.check_halt_on_memory_access) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const A64::LocationDescriptor current_location{IR::LocationDescriptor{inst->GetArg(0).GetU64()}};
|
||||||
|
|
||||||
|
code.test(dword[r15 + offsetof(A64JitState, halt_reason)], static_cast<u32>(HaltReason::MemoryAbort));
|
||||||
|
|
||||||
|
Xbyak::Label memory_abort;
|
||||||
|
|
||||||
|
if (!end) {
|
||||||
|
code.jnz(memory_abort, code.T_NEAR);
|
||||||
|
code.SwitchToFarCode();
|
||||||
|
} else {
|
||||||
|
code.jz(*end, code.T_NEAR);
|
||||||
|
}
|
||||||
|
|
||||||
|
code.L(memory_abort);
|
||||||
|
code.mov(rax, current_location.PC());
|
||||||
|
code.mov(qword[r15 + offsetof(A64JitState, pc)], rax);
|
||||||
|
code.ForceReturnFromRunCode();
|
||||||
|
|
||||||
|
if (!end) {
|
||||||
|
code.SwitchToNearCode();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::Backend::X64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -272,7 +272,7 @@ private:
|
||||||
{conf.define_unpredictable_behaviour, conf.wall_clock_cntpct});
|
{conf.define_unpredictable_behaviour, conf.wall_clock_cntpct});
|
||||||
Optimization::PolyfillPass(ir_block, polyfill_options);
|
Optimization::PolyfillPass(ir_block, polyfill_options);
|
||||||
Optimization::A64CallbackConfigPass(ir_block, conf);
|
Optimization::A64CallbackConfigPass(ir_block, conf);
|
||||||
if (conf.HasOptimization(OptimizationFlag::GetSetElimination)) {
|
if (conf.HasOptimization(OptimizationFlag::GetSetElimination) && !conf.check_halt_on_memory_access) {
|
||||||
Optimization::A64GetSetElimination(ir_block);
|
Optimization::A64GetSetElimination(ir_block);
|
||||||
Optimization::DeadCodeElimination(ir_block);
|
Optimization::DeadCodeElimination(ir_block);
|
||||||
}
|
}
|
||||||
|
|
|
@ -52,26 +52,27 @@ FakeCall AxxEmitX64::FastmemCallback(u64 rip_) {
|
||||||
template<std::size_t bitsize, auto callback>
|
template<std::size_t bitsize, auto callback>
|
||||||
void AxxEmitX64::EmitMemoryRead(AxxEmitContext& ctx, IR::Inst* inst) {
|
void AxxEmitX64::EmitMemoryRead(AxxEmitContext& ctx, IR::Inst* inst) {
|
||||||
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
||||||
const bool ordered = IsOrdered(args[1].GetImmediateAccType());
|
const bool ordered = IsOrdered(args[2].GetImmediateAccType());
|
||||||
const auto fastmem_marker = ShouldFastmem(ctx, inst);
|
const auto fastmem_marker = ShouldFastmem(ctx, inst);
|
||||||
|
|
||||||
if (!conf.page_table && !fastmem_marker) {
|
if (!conf.page_table && !fastmem_marker) {
|
||||||
// Neither fastmem nor page table: Use callbacks
|
// Neither fastmem nor page table: Use callbacks
|
||||||
if constexpr (bitsize == 128) {
|
if constexpr (bitsize == 128) {
|
||||||
ctx.reg_alloc.HostCall(nullptr, {}, args[0]);
|
ctx.reg_alloc.HostCall(nullptr, {}, args[1]);
|
||||||
if (ordered) {
|
if (ordered) {
|
||||||
code.mfence();
|
code.mfence();
|
||||||
}
|
}
|
||||||
code.CallFunction(memory_read_128);
|
code.CallFunction(memory_read_128);
|
||||||
ctx.reg_alloc.DefineValue(inst, xmm1);
|
ctx.reg_alloc.DefineValue(inst, xmm1);
|
||||||
} else {
|
} else {
|
||||||
ctx.reg_alloc.HostCall(inst, {}, args[0]);
|
ctx.reg_alloc.HostCall(inst, {}, args[1]);
|
||||||
if (ordered) {
|
if (ordered) {
|
||||||
code.mfence();
|
code.mfence();
|
||||||
}
|
}
|
||||||
Devirtualize<callback>(conf.callbacks).EmitCall(code);
|
Devirtualize<callback>(conf.callbacks).EmitCall(code);
|
||||||
code.ZeroExtendFrom(bitsize, code.ABI_RETURN);
|
code.ZeroExtendFrom(bitsize, code.ABI_RETURN);
|
||||||
}
|
}
|
||||||
|
EmitCheckMemoryAbort(ctx, inst);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -83,20 +84,24 @@ void AxxEmitX64::EmitMemoryRead(AxxEmitContext& ctx, IR::Inst* inst) {
|
||||||
ctx.reg_alloc.ScratchGpr(HostLoc::RDX);
|
ctx.reg_alloc.ScratchGpr(HostLoc::RDX);
|
||||||
}
|
}
|
||||||
|
|
||||||
const Xbyak::Reg64 vaddr = ctx.reg_alloc.UseGpr(args[0]);
|
const Xbyak::Reg64 vaddr = ctx.reg_alloc.UseGpr(args[1]);
|
||||||
const int value_idx = bitsize == 128 ? ctx.reg_alloc.ScratchXmm().getIdx() : ctx.reg_alloc.ScratchGpr().getIdx();
|
const int value_idx = bitsize == 128 ? ctx.reg_alloc.ScratchXmm().getIdx() : ctx.reg_alloc.ScratchGpr().getIdx();
|
||||||
|
|
||||||
const auto wrapped_fn = read_fallbacks[std::make_tuple(ordered, bitsize, vaddr.getIdx(), value_idx)];
|
const auto wrapped_fn = read_fallbacks[std::make_tuple(ordered, bitsize, vaddr.getIdx(), value_idx)];
|
||||||
|
|
||||||
Xbyak::Label abort, end;
|
Xbyak::Label abort, end;
|
||||||
bool require_abort_handling = false;
|
|
||||||
|
|
||||||
if (fastmem_marker) {
|
if (fastmem_marker) {
|
||||||
// Use fastmem
|
// Use fastmem
|
||||||
|
bool require_abort_handling;
|
||||||
const auto src_ptr = EmitFastmemVAddr(code, ctx, abort, vaddr, require_abort_handling);
|
const auto src_ptr = EmitFastmemVAddr(code, ctx, abort, vaddr, require_abort_handling);
|
||||||
|
|
||||||
const auto location = EmitReadMemoryMov<bitsize>(code, value_idx, src_ptr, ordered);
|
const auto location = EmitReadMemoryMov<bitsize>(code, value_idx, src_ptr, ordered);
|
||||||
|
|
||||||
|
code.SwitchToFarCode();
|
||||||
|
code.L(abort);
|
||||||
|
code.call(wrapped_fn);
|
||||||
|
|
||||||
fastmem_patch_info.emplace(
|
fastmem_patch_info.emplace(
|
||||||
mcl::bit_cast<u64>(location),
|
mcl::bit_cast<u64>(location),
|
||||||
FastmemPatchInfo{
|
FastmemPatchInfo{
|
||||||
|
@ -105,22 +110,24 @@ void AxxEmitX64::EmitMemoryRead(AxxEmitContext& ctx, IR::Inst* inst) {
|
||||||
*fastmem_marker,
|
*fastmem_marker,
|
||||||
conf.recompile_on_fastmem_failure,
|
conf.recompile_on_fastmem_failure,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
EmitCheckMemoryAbort(ctx, inst, &end);
|
||||||
|
code.jmp(end, code.T_NEAR);
|
||||||
|
code.SwitchToNearCode();
|
||||||
} else {
|
} else {
|
||||||
// Use page table
|
// Use page table
|
||||||
ASSERT(conf.page_table);
|
ASSERT(conf.page_table);
|
||||||
const auto src_ptr = EmitVAddrLookup(code, ctx, bitsize, abort, vaddr);
|
const auto src_ptr = EmitVAddrLookup(code, ctx, bitsize, abort, vaddr);
|
||||||
require_abort_handling = true;
|
|
||||||
EmitReadMemoryMov<bitsize>(code, value_idx, src_ptr, ordered);
|
EmitReadMemoryMov<bitsize>(code, value_idx, src_ptr, ordered);
|
||||||
}
|
|
||||||
code.L(end);
|
|
||||||
|
|
||||||
if (require_abort_handling) {
|
|
||||||
code.SwitchToFarCode();
|
code.SwitchToFarCode();
|
||||||
code.L(abort);
|
code.L(abort);
|
||||||
code.call(wrapped_fn);
|
code.call(wrapped_fn);
|
||||||
|
EmitCheckMemoryAbort(ctx, inst, &end);
|
||||||
code.jmp(end, code.T_NEAR);
|
code.jmp(end, code.T_NEAR);
|
||||||
code.SwitchToNearCode();
|
code.SwitchToNearCode();
|
||||||
}
|
}
|
||||||
|
code.L(end);
|
||||||
|
|
||||||
if constexpr (bitsize == 128) {
|
if constexpr (bitsize == 128) {
|
||||||
ctx.reg_alloc.DefineValue(inst, Xbyak::Xmm{value_idx});
|
ctx.reg_alloc.DefineValue(inst, Xbyak::Xmm{value_idx});
|
||||||
|
@ -132,24 +139,25 @@ void AxxEmitX64::EmitMemoryRead(AxxEmitContext& ctx, IR::Inst* inst) {
|
||||||
template<std::size_t bitsize, auto callback>
|
template<std::size_t bitsize, auto callback>
|
||||||
void AxxEmitX64::EmitMemoryWrite(AxxEmitContext& ctx, IR::Inst* inst) {
|
void AxxEmitX64::EmitMemoryWrite(AxxEmitContext& ctx, IR::Inst* inst) {
|
||||||
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
||||||
const bool ordered = IsOrdered(args[2].GetImmediateAccType());
|
const bool ordered = IsOrdered(args[3].GetImmediateAccType());
|
||||||
const auto fastmem_marker = ShouldFastmem(ctx, inst);
|
const auto fastmem_marker = ShouldFastmem(ctx, inst);
|
||||||
|
|
||||||
if (!conf.page_table && !fastmem_marker) {
|
if (!conf.page_table && !fastmem_marker) {
|
||||||
// Neither fastmem nor page table: Use callbacks
|
// Neither fastmem nor page table: Use callbacks
|
||||||
if constexpr (bitsize == 128) {
|
if constexpr (bitsize == 128) {
|
||||||
ctx.reg_alloc.Use(args[0], ABI_PARAM2);
|
ctx.reg_alloc.Use(args[1], ABI_PARAM2);
|
||||||
ctx.reg_alloc.Use(args[1], HostLoc::XMM1);
|
ctx.reg_alloc.Use(args[2], HostLoc::XMM1);
|
||||||
ctx.reg_alloc.EndOfAllocScope();
|
ctx.reg_alloc.EndOfAllocScope();
|
||||||
ctx.reg_alloc.HostCall(nullptr);
|
ctx.reg_alloc.HostCall(nullptr);
|
||||||
code.CallFunction(memory_write_128);
|
code.CallFunction(memory_write_128);
|
||||||
} else {
|
} else {
|
||||||
ctx.reg_alloc.HostCall(nullptr, {}, args[0], args[1]);
|
ctx.reg_alloc.HostCall(nullptr, {}, args[1], args[2]);
|
||||||
Devirtualize<callback>(conf.callbacks).EmitCall(code);
|
Devirtualize<callback>(conf.callbacks).EmitCall(code);
|
||||||
}
|
}
|
||||||
if (ordered) {
|
if (ordered) {
|
||||||
code.mfence();
|
code.mfence();
|
||||||
}
|
}
|
||||||
|
EmitCheckMemoryAbort(ctx, inst);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -161,22 +169,26 @@ void AxxEmitX64::EmitMemoryWrite(AxxEmitContext& ctx, IR::Inst* inst) {
|
||||||
ctx.reg_alloc.ScratchGpr(HostLoc::RDX);
|
ctx.reg_alloc.ScratchGpr(HostLoc::RDX);
|
||||||
}
|
}
|
||||||
|
|
||||||
const Xbyak::Reg64 vaddr = ctx.reg_alloc.UseGpr(args[0]);
|
const Xbyak::Reg64 vaddr = ctx.reg_alloc.UseGpr(args[1]);
|
||||||
const int value_idx = bitsize == 128
|
const int value_idx = bitsize == 128
|
||||||
? ctx.reg_alloc.UseXmm(args[1]).getIdx()
|
? ctx.reg_alloc.UseXmm(args[2]).getIdx()
|
||||||
: (ordered ? ctx.reg_alloc.UseScratchGpr(args[1]).getIdx() : ctx.reg_alloc.UseGpr(args[1]).getIdx());
|
: (ordered ? ctx.reg_alloc.UseScratchGpr(args[2]).getIdx() : ctx.reg_alloc.UseGpr(args[2]).getIdx());
|
||||||
|
|
||||||
const auto wrapped_fn = write_fallbacks[std::make_tuple(ordered, bitsize, vaddr.getIdx(), value_idx)];
|
const auto wrapped_fn = write_fallbacks[std::make_tuple(ordered, bitsize, vaddr.getIdx(), value_idx)];
|
||||||
|
|
||||||
Xbyak::Label abort, end;
|
Xbyak::Label abort, end;
|
||||||
bool require_abort_handling = false;
|
|
||||||
|
|
||||||
if (fastmem_marker) {
|
if (fastmem_marker) {
|
||||||
// Use fastmem
|
// Use fastmem
|
||||||
|
bool require_abort_handling;
|
||||||
const auto dest_ptr = EmitFastmemVAddr(code, ctx, abort, vaddr, require_abort_handling);
|
const auto dest_ptr = EmitFastmemVAddr(code, ctx, abort, vaddr, require_abort_handling);
|
||||||
|
|
||||||
const auto location = EmitWriteMemoryMov<bitsize>(code, dest_ptr, value_idx, ordered);
|
const auto location = EmitWriteMemoryMov<bitsize>(code, dest_ptr, value_idx, ordered);
|
||||||
|
|
||||||
|
code.SwitchToFarCode();
|
||||||
|
code.L(abort);
|
||||||
|
code.call(wrapped_fn);
|
||||||
|
|
||||||
fastmem_patch_info.emplace(
|
fastmem_patch_info.emplace(
|
||||||
mcl::bit_cast<u64>(location),
|
mcl::bit_cast<u64>(location),
|
||||||
FastmemPatchInfo{
|
FastmemPatchInfo{
|
||||||
|
@ -185,34 +197,36 @@ void AxxEmitX64::EmitMemoryWrite(AxxEmitContext& ctx, IR::Inst* inst) {
|
||||||
*fastmem_marker,
|
*fastmem_marker,
|
||||||
conf.recompile_on_fastmem_failure,
|
conf.recompile_on_fastmem_failure,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
EmitCheckMemoryAbort(ctx, inst, &end);
|
||||||
|
code.jmp(end, code.T_NEAR);
|
||||||
|
code.SwitchToNearCode();
|
||||||
} else {
|
} else {
|
||||||
// Use page table
|
// Use page table
|
||||||
ASSERT(conf.page_table);
|
ASSERT(conf.page_table);
|
||||||
const auto dest_ptr = EmitVAddrLookup(code, ctx, bitsize, abort, vaddr);
|
const auto dest_ptr = EmitVAddrLookup(code, ctx, bitsize, abort, vaddr);
|
||||||
require_abort_handling = true;
|
|
||||||
EmitWriteMemoryMov<bitsize>(code, dest_ptr, value_idx, ordered);
|
EmitWriteMemoryMov<bitsize>(code, dest_ptr, value_idx, ordered);
|
||||||
}
|
|
||||||
code.L(end);
|
|
||||||
|
|
||||||
if (require_abort_handling) {
|
|
||||||
code.SwitchToFarCode();
|
code.SwitchToFarCode();
|
||||||
code.L(abort);
|
code.L(abort);
|
||||||
code.call(wrapped_fn);
|
code.call(wrapped_fn);
|
||||||
|
EmitCheckMemoryAbort(ctx, inst, &end);
|
||||||
code.jmp(end, code.T_NEAR);
|
code.jmp(end, code.T_NEAR);
|
||||||
code.SwitchToNearCode();
|
code.SwitchToNearCode();
|
||||||
}
|
}
|
||||||
|
code.L(end);
|
||||||
}
|
}
|
||||||
|
|
||||||
template<std::size_t bitsize, auto callback>
|
template<std::size_t bitsize, auto callback>
|
||||||
void AxxEmitX64::EmitExclusiveReadMemory(AxxEmitContext& ctx, IR::Inst* inst) {
|
void AxxEmitX64::EmitExclusiveReadMemory(AxxEmitContext& ctx, IR::Inst* inst) {
|
||||||
ASSERT(conf.global_monitor != nullptr);
|
ASSERT(conf.global_monitor != nullptr);
|
||||||
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
||||||
const bool ordered = IsOrdered(args[1].GetImmediateAccType());
|
const bool ordered = IsOrdered(args[2].GetImmediateAccType());
|
||||||
|
|
||||||
if constexpr (bitsize != 128) {
|
if constexpr (bitsize != 128) {
|
||||||
using T = mcl::unsigned_integer_of_size<bitsize>;
|
using T = mcl::unsigned_integer_of_size<bitsize>;
|
||||||
|
|
||||||
ctx.reg_alloc.HostCall(inst, {}, args[0]);
|
ctx.reg_alloc.HostCall(inst, {}, args[1]);
|
||||||
|
|
||||||
code.mov(code.byte[r15 + offsetof(AxxJitState, exclusive_state)], u8(1));
|
code.mov(code.byte[r15 + offsetof(AxxJitState, exclusive_state)], u8(1));
|
||||||
code.mov(code.ABI_PARAM1, reinterpret_cast<u64>(&conf));
|
code.mov(code.ABI_PARAM1, reinterpret_cast<u64>(&conf));
|
||||||
|
@ -228,7 +242,7 @@ void AxxEmitX64::EmitExclusiveReadMemory(AxxEmitContext& ctx, IR::Inst* inst) {
|
||||||
code.ZeroExtendFrom(bitsize, code.ABI_RETURN);
|
code.ZeroExtendFrom(bitsize, code.ABI_RETURN);
|
||||||
} else {
|
} else {
|
||||||
const Xbyak::Xmm result = ctx.reg_alloc.ScratchXmm();
|
const Xbyak::Xmm result = ctx.reg_alloc.ScratchXmm();
|
||||||
ctx.reg_alloc.Use(args[0], ABI_PARAM2);
|
ctx.reg_alloc.Use(args[1], ABI_PARAM2);
|
||||||
ctx.reg_alloc.EndOfAllocScope();
|
ctx.reg_alloc.EndOfAllocScope();
|
||||||
ctx.reg_alloc.HostCall(nullptr);
|
ctx.reg_alloc.HostCall(nullptr);
|
||||||
|
|
||||||
|
@ -250,19 +264,21 @@ void AxxEmitX64::EmitExclusiveReadMemory(AxxEmitContext& ctx, IR::Inst* inst) {
|
||||||
|
|
||||||
ctx.reg_alloc.DefineValue(inst, result);
|
ctx.reg_alloc.DefineValue(inst, result);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
EmitCheckMemoryAbort(ctx, inst);
|
||||||
}
|
}
|
||||||
|
|
||||||
template<std::size_t bitsize, auto callback>
|
template<std::size_t bitsize, auto callback>
|
||||||
void AxxEmitX64::EmitExclusiveWriteMemory(AxxEmitContext& ctx, IR::Inst* inst) {
|
void AxxEmitX64::EmitExclusiveWriteMemory(AxxEmitContext& ctx, IR::Inst* inst) {
|
||||||
ASSERT(conf.global_monitor != nullptr);
|
ASSERT(conf.global_monitor != nullptr);
|
||||||
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
|
||||||
const bool ordered = IsOrdered(args[2].GetImmediateAccType());
|
const bool ordered = IsOrdered(args[3].GetImmediateAccType());
|
||||||
|
|
||||||
if constexpr (bitsize != 128) {
|
if constexpr (bitsize != 128) {
|
||||||
ctx.reg_alloc.HostCall(inst, {}, args[0], args[1]);
|
ctx.reg_alloc.HostCall(inst, {}, args[1], args[2]);
|
||||||
} else {
|
} else {
|
||||||
ctx.reg_alloc.Use(args[0], ABI_PARAM2);
|
ctx.reg_alloc.Use(args[1], ABI_PARAM2);
|
||||||
ctx.reg_alloc.Use(args[1], HostLoc::XMM1);
|
ctx.reg_alloc.Use(args[2], HostLoc::XMM1);
|
||||||
ctx.reg_alloc.EndOfAllocScope();
|
ctx.reg_alloc.EndOfAllocScope();
|
||||||
ctx.reg_alloc.HostCall(inst);
|
ctx.reg_alloc.HostCall(inst);
|
||||||
}
|
}
|
||||||
|
@ -308,6 +324,8 @@ void AxxEmitX64::EmitExclusiveWriteMemory(AxxEmitContext& ctx, IR::Inst* inst) {
|
||||||
ctx.reg_alloc.ReleaseStackSpace(16 + ABI_SHADOW_SPACE);
|
ctx.reg_alloc.ReleaseStackSpace(16 + ABI_SHADOW_SPACE);
|
||||||
}
|
}
|
||||||
code.L(end);
|
code.L(end);
|
||||||
|
|
||||||
|
EmitCheckMemoryAbort(ctx, inst);
|
||||||
}
|
}
|
||||||
|
|
||||||
template<std::size_t bitsize, auto callback>
|
template<std::size_t bitsize, auto callback>
|
||||||
|
@ -329,7 +347,7 @@ void AxxEmitX64::EmitExclusiveReadMemoryInline(AxxEmitContext& ctx, IR::Inst* in
|
||||||
ctx.reg_alloc.ScratchGpr(HostLoc::RDX);
|
ctx.reg_alloc.ScratchGpr(HostLoc::RDX);
|
||||||
}
|
}
|
||||||
|
|
||||||
const Xbyak::Reg64 vaddr = ctx.reg_alloc.UseGpr(args[0]);
|
const Xbyak::Reg64 vaddr = ctx.reg_alloc.UseGpr(args[1]);
|
||||||
const int value_idx = bitsize == 128 ? ctx.reg_alloc.ScratchXmm().getIdx() : ctx.reg_alloc.ScratchGpr().getIdx();
|
const int value_idx = bitsize == 128 ? ctx.reg_alloc.ScratchXmm().getIdx() : ctx.reg_alloc.ScratchGpr().getIdx();
|
||||||
const Xbyak::Reg64 tmp = ctx.reg_alloc.ScratchGpr();
|
const Xbyak::Reg64 tmp = ctx.reg_alloc.ScratchGpr();
|
||||||
const Xbyak::Reg64 tmp2 = ctx.reg_alloc.ScratchGpr();
|
const Xbyak::Reg64 tmp2 = ctx.reg_alloc.ScratchGpr();
|
||||||
|
@ -383,6 +401,8 @@ void AxxEmitX64::EmitExclusiveReadMemoryInline(AxxEmitContext& ctx, IR::Inst* in
|
||||||
} else {
|
} else {
|
||||||
ctx.reg_alloc.DefineValue(inst, Xbyak::Reg64{value_idx});
|
ctx.reg_alloc.DefineValue(inst, Xbyak::Reg64{value_idx});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
EmitCheckMemoryAbort(ctx, inst);
|
||||||
}
|
}
|
||||||
|
|
||||||
template<std::size_t bitsize, auto callback>
|
template<std::size_t bitsize, auto callback>
|
||||||
|
@ -402,13 +422,13 @@ void AxxEmitX64::EmitExclusiveWriteMemoryInline(AxxEmitContext& ctx, IR::Inst* i
|
||||||
ctx.reg_alloc.ScratchGpr(HostLoc::RBX);
|
ctx.reg_alloc.ScratchGpr(HostLoc::RBX);
|
||||||
ctx.reg_alloc.ScratchGpr(HostLoc::RCX);
|
ctx.reg_alloc.ScratchGpr(HostLoc::RCX);
|
||||||
ctx.reg_alloc.ScratchGpr(HostLoc::RDX);
|
ctx.reg_alloc.ScratchGpr(HostLoc::RDX);
|
||||||
return ctx.reg_alloc.UseXmm(args[1]);
|
return ctx.reg_alloc.UseXmm(args[2]);
|
||||||
} else {
|
} else {
|
||||||
ctx.reg_alloc.ScratchGpr(HostLoc::RAX);
|
ctx.reg_alloc.ScratchGpr(HostLoc::RAX);
|
||||||
return ctx.reg_alloc.UseGpr(args[1]);
|
return ctx.reg_alloc.UseGpr(args[2]);
|
||||||
}
|
}
|
||||||
}();
|
}();
|
||||||
const Xbyak::Reg64 vaddr = ctx.reg_alloc.UseGpr(args[0]);
|
const Xbyak::Reg64 vaddr = ctx.reg_alloc.UseGpr(args[1]);
|
||||||
const Xbyak::Reg32 status = ctx.reg_alloc.ScratchGpr().cvt32();
|
const Xbyak::Reg32 status = ctx.reg_alloc.ScratchGpr().cvt32();
|
||||||
const Xbyak::Reg64 tmp = ctx.reg_alloc.ScratchGpr();
|
const Xbyak::Reg64 tmp = ctx.reg_alloc.ScratchGpr();
|
||||||
|
|
||||||
|
@ -513,6 +533,8 @@ void AxxEmitX64::EmitExclusiveWriteMemoryInline(AxxEmitContext& ctx, IR::Inst* i
|
||||||
EmitExclusiveUnlock(code, conf, tmp, eax);
|
EmitExclusiveUnlock(code, conf, tmp, eax);
|
||||||
|
|
||||||
ctx.reg_alloc.DefineValue(inst, status);
|
ctx.reg_alloc.DefineValue(inst, status);
|
||||||
|
|
||||||
|
EmitCheckMemoryAbort(ctx, inst);
|
||||||
}
|
}
|
||||||
|
|
||||||
#undef AxxEmitX64
|
#undef AxxEmitX64
|
||||||
|
|
|
@ -245,40 +245,40 @@ IR::UAny IREmitter::ReadMemory(size_t bitsize, const IR::U32& vaddr, IR::AccType
|
||||||
}
|
}
|
||||||
|
|
||||||
IR::U8 IREmitter::ReadMemory8(const IR::U32& vaddr, IR::AccType acc_type) {
|
IR::U8 IREmitter::ReadMemory8(const IR::U32& vaddr, IR::AccType acc_type) {
|
||||||
return Inst<IR::U8>(Opcode::A32ReadMemory8, vaddr, IR::Value{acc_type});
|
return Inst<IR::U8>(Opcode::A32ReadMemory8, ImmCurrentLocationDescriptor(), vaddr, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
|
|
||||||
IR::U16 IREmitter::ReadMemory16(const IR::U32& vaddr, IR::AccType acc_type) {
|
IR::U16 IREmitter::ReadMemory16(const IR::U32& vaddr, IR::AccType acc_type) {
|
||||||
const auto value = Inst<IR::U16>(Opcode::A32ReadMemory16, vaddr, IR::Value{acc_type});
|
const auto value = Inst<IR::U16>(Opcode::A32ReadMemory16, ImmCurrentLocationDescriptor(), vaddr, IR::Value{acc_type});
|
||||||
return current_location.EFlag() ? ByteReverseHalf(value) : value;
|
return current_location.EFlag() ? ByteReverseHalf(value) : value;
|
||||||
}
|
}
|
||||||
|
|
||||||
IR::U32 IREmitter::ReadMemory32(const IR::U32& vaddr, IR::AccType acc_type) {
|
IR::U32 IREmitter::ReadMemory32(const IR::U32& vaddr, IR::AccType acc_type) {
|
||||||
const auto value = Inst<IR::U32>(Opcode::A32ReadMemory32, vaddr, IR::Value{acc_type});
|
const auto value = Inst<IR::U32>(Opcode::A32ReadMemory32, ImmCurrentLocationDescriptor(), vaddr, IR::Value{acc_type});
|
||||||
return current_location.EFlag() ? ByteReverseWord(value) : value;
|
return current_location.EFlag() ? ByteReverseWord(value) : value;
|
||||||
}
|
}
|
||||||
|
|
||||||
IR::U64 IREmitter::ReadMemory64(const IR::U32& vaddr, IR::AccType acc_type) {
|
IR::U64 IREmitter::ReadMemory64(const IR::U32& vaddr, IR::AccType acc_type) {
|
||||||
const auto value = Inst<IR::U64>(Opcode::A32ReadMemory64, vaddr, IR::Value{acc_type});
|
const auto value = Inst<IR::U64>(Opcode::A32ReadMemory64, ImmCurrentLocationDescriptor(), vaddr, IR::Value{acc_type});
|
||||||
return current_location.EFlag() ? ByteReverseDual(value) : value;
|
return current_location.EFlag() ? ByteReverseDual(value) : value;
|
||||||
}
|
}
|
||||||
|
|
||||||
IR::U8 IREmitter::ExclusiveReadMemory8(const IR::U32& vaddr, IR::AccType acc_type) {
|
IR::U8 IREmitter::ExclusiveReadMemory8(const IR::U32& vaddr, IR::AccType acc_type) {
|
||||||
return Inst<IR::U8>(Opcode::A32ExclusiveReadMemory8, vaddr, IR::Value{acc_type});
|
return Inst<IR::U8>(Opcode::A32ExclusiveReadMemory8, ImmCurrentLocationDescriptor(), vaddr, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
|
|
||||||
IR::U16 IREmitter::ExclusiveReadMemory16(const IR::U32& vaddr, IR::AccType acc_type) {
|
IR::U16 IREmitter::ExclusiveReadMemory16(const IR::U32& vaddr, IR::AccType acc_type) {
|
||||||
const auto value = Inst<IR::U16>(Opcode::A32ExclusiveReadMemory16, vaddr, IR::Value{acc_type});
|
const auto value = Inst<IR::U16>(Opcode::A32ExclusiveReadMemory16, ImmCurrentLocationDescriptor(), vaddr, IR::Value{acc_type});
|
||||||
return current_location.EFlag() ? ByteReverseHalf(value) : value;
|
return current_location.EFlag() ? ByteReverseHalf(value) : value;
|
||||||
}
|
}
|
||||||
|
|
||||||
IR::U32 IREmitter::ExclusiveReadMemory32(const IR::U32& vaddr, IR::AccType acc_type) {
|
IR::U32 IREmitter::ExclusiveReadMemory32(const IR::U32& vaddr, IR::AccType acc_type) {
|
||||||
const auto value = Inst<IR::U32>(Opcode::A32ExclusiveReadMemory32, vaddr, IR::Value{acc_type});
|
const auto value = Inst<IR::U32>(Opcode::A32ExclusiveReadMemory32, ImmCurrentLocationDescriptor(), vaddr, IR::Value{acc_type});
|
||||||
return current_location.EFlag() ? ByteReverseWord(value) : value;
|
return current_location.EFlag() ? ByteReverseWord(value) : value;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::pair<IR::U32, IR::U32> IREmitter::ExclusiveReadMemory64(const IR::U32& vaddr, IR::AccType acc_type) {
|
std::pair<IR::U32, IR::U32> IREmitter::ExclusiveReadMemory64(const IR::U32& vaddr, IR::AccType acc_type) {
|
||||||
const auto value = Inst<IR::U64>(Opcode::A32ExclusiveReadMemory64, vaddr, IR::Value{acc_type});
|
const auto value = Inst<IR::U64>(Opcode::A32ExclusiveReadMemory64, ImmCurrentLocationDescriptor(), vaddr, IR::Value{acc_type});
|
||||||
const auto lo = LeastSignificantWord(value);
|
const auto lo = LeastSignificantWord(value);
|
||||||
const auto hi = MostSignificantWord(value).result;
|
const auto hi = MostSignificantWord(value).result;
|
||||||
if (current_location.EFlag()) {
|
if (current_location.EFlag()) {
|
||||||
|
@ -303,55 +303,55 @@ void IREmitter::WriteMemory(size_t bitsize, const IR::U32& vaddr, const IR::UAny
|
||||||
}
|
}
|
||||||
|
|
||||||
void IREmitter::WriteMemory8(const IR::U32& vaddr, const IR::U8& value, IR::AccType acc_type) {
|
void IREmitter::WriteMemory8(const IR::U32& vaddr, const IR::U8& value, IR::AccType acc_type) {
|
||||||
Inst(Opcode::A32WriteMemory8, vaddr, value, IR::Value{acc_type});
|
Inst(Opcode::A32WriteMemory8, ImmCurrentLocationDescriptor(), vaddr, value, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
|
|
||||||
void IREmitter::WriteMemory16(const IR::U32& vaddr, const IR::U16& value, IR::AccType acc_type) {
|
void IREmitter::WriteMemory16(const IR::U32& vaddr, const IR::U16& value, IR::AccType acc_type) {
|
||||||
if (current_location.EFlag()) {
|
if (current_location.EFlag()) {
|
||||||
const auto v = ByteReverseHalf(value);
|
const auto v = ByteReverseHalf(value);
|
||||||
Inst(Opcode::A32WriteMemory16, vaddr, v, IR::Value{acc_type});
|
Inst(Opcode::A32WriteMemory16, ImmCurrentLocationDescriptor(), vaddr, v, IR::Value{acc_type});
|
||||||
} else {
|
} else {
|
||||||
Inst(Opcode::A32WriteMemory16, vaddr, value, IR::Value{acc_type});
|
Inst(Opcode::A32WriteMemory16, ImmCurrentLocationDescriptor(), vaddr, value, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void IREmitter::WriteMemory32(const IR::U32& vaddr, const IR::U32& value, IR::AccType acc_type) {
|
void IREmitter::WriteMemory32(const IR::U32& vaddr, const IR::U32& value, IR::AccType acc_type) {
|
||||||
if (current_location.EFlag()) {
|
if (current_location.EFlag()) {
|
||||||
const auto v = ByteReverseWord(value);
|
const auto v = ByteReverseWord(value);
|
||||||
Inst(Opcode::A32WriteMemory32, vaddr, v, IR::Value{acc_type});
|
Inst(Opcode::A32WriteMemory32, ImmCurrentLocationDescriptor(), vaddr, v, IR::Value{acc_type});
|
||||||
} else {
|
} else {
|
||||||
Inst(Opcode::A32WriteMemory32, vaddr, value, IR::Value{acc_type});
|
Inst(Opcode::A32WriteMemory32, ImmCurrentLocationDescriptor(), vaddr, value, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void IREmitter::WriteMemory64(const IR::U32& vaddr, const IR::U64& value, IR::AccType acc_type) {
|
void IREmitter::WriteMemory64(const IR::U32& vaddr, const IR::U64& value, IR::AccType acc_type) {
|
||||||
if (current_location.EFlag()) {
|
if (current_location.EFlag()) {
|
||||||
const auto v = ByteReverseDual(value);
|
const auto v = ByteReverseDual(value);
|
||||||
Inst(Opcode::A32WriteMemory64, vaddr, v, IR::Value{acc_type});
|
Inst(Opcode::A32WriteMemory64, ImmCurrentLocationDescriptor(), vaddr, v, IR::Value{acc_type});
|
||||||
} else {
|
} else {
|
||||||
Inst(Opcode::A32WriteMemory64, vaddr, value, IR::Value{acc_type});
|
Inst(Opcode::A32WriteMemory64, ImmCurrentLocationDescriptor(), vaddr, value, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
IR::U32 IREmitter::ExclusiveWriteMemory8(const IR::U32& vaddr, const IR::U8& value, IR::AccType acc_type) {
|
IR::U32 IREmitter::ExclusiveWriteMemory8(const IR::U32& vaddr, const IR::U8& value, IR::AccType acc_type) {
|
||||||
return Inst<IR::U32>(Opcode::A32ExclusiveWriteMemory8, vaddr, value, IR::Value{acc_type});
|
return Inst<IR::U32>(Opcode::A32ExclusiveWriteMemory8, ImmCurrentLocationDescriptor(), vaddr, value, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
|
|
||||||
IR::U32 IREmitter::ExclusiveWriteMemory16(const IR::U32& vaddr, const IR::U16& value, IR::AccType acc_type) {
|
IR::U32 IREmitter::ExclusiveWriteMemory16(const IR::U32& vaddr, const IR::U16& value, IR::AccType acc_type) {
|
||||||
if (current_location.EFlag()) {
|
if (current_location.EFlag()) {
|
||||||
const auto v = ByteReverseHalf(value);
|
const auto v = ByteReverseHalf(value);
|
||||||
return Inst<IR::U32>(Opcode::A32ExclusiveWriteMemory16, vaddr, v, IR::Value{acc_type});
|
return Inst<IR::U32>(Opcode::A32ExclusiveWriteMemory16, ImmCurrentLocationDescriptor(), vaddr, v, IR::Value{acc_type});
|
||||||
} else {
|
} else {
|
||||||
return Inst<IR::U32>(Opcode::A32ExclusiveWriteMemory16, vaddr, value, IR::Value{acc_type});
|
return Inst<IR::U32>(Opcode::A32ExclusiveWriteMemory16, ImmCurrentLocationDescriptor(), vaddr, value, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
IR::U32 IREmitter::ExclusiveWriteMemory32(const IR::U32& vaddr, const IR::U32& value, IR::AccType acc_type) {
|
IR::U32 IREmitter::ExclusiveWriteMemory32(const IR::U32& vaddr, const IR::U32& value, IR::AccType acc_type) {
|
||||||
if (current_location.EFlag()) {
|
if (current_location.EFlag()) {
|
||||||
const auto v = ByteReverseWord(value);
|
const auto v = ByteReverseWord(value);
|
||||||
return Inst<IR::U32>(Opcode::A32ExclusiveWriteMemory32, vaddr, v, IR::Value{acc_type});
|
return Inst<IR::U32>(Opcode::A32ExclusiveWriteMemory32, ImmCurrentLocationDescriptor(), vaddr, v, IR::Value{acc_type});
|
||||||
} else {
|
} else {
|
||||||
return Inst<IR::U32>(Opcode::A32ExclusiveWriteMemory32, vaddr, value, IR::Value{acc_type});
|
return Inst<IR::U32>(Opcode::A32ExclusiveWriteMemory32, ImmCurrentLocationDescriptor(), vaddr, value, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -359,9 +359,9 @@ IR::U32 IREmitter::ExclusiveWriteMemory64(const IR::U32& vaddr, const IR::U32& v
|
||||||
if (current_location.EFlag()) {
|
if (current_location.EFlag()) {
|
||||||
const auto vlo = ByteReverseWord(value_lo);
|
const auto vlo = ByteReverseWord(value_lo);
|
||||||
const auto vhi = ByteReverseWord(value_hi);
|
const auto vhi = ByteReverseWord(value_hi);
|
||||||
return Inst<IR::U32>(Opcode::A32ExclusiveWriteMemory64, vaddr, Pack2x32To1x64(vlo, vhi), IR::Value{acc_type});
|
return Inst<IR::U32>(Opcode::A32ExclusiveWriteMemory64, ImmCurrentLocationDescriptor(), vaddr, Pack2x32To1x64(vlo, vhi), IR::Value{acc_type});
|
||||||
} else {
|
} else {
|
||||||
return Inst<IR::U32>(Opcode::A32ExclusiveWriteMemory64, vaddr, Pack2x32To1x64(value_lo, value_hi), IR::Value{acc_type});
|
return Inst<IR::U32>(Opcode::A32ExclusiveWriteMemory64, ImmCurrentLocationDescriptor(), vaddr, Pack2x32To1x64(value_lo, value_hi), IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -439,4 +439,8 @@ void IREmitter::CoprocStoreWords(size_t coproc_no, bool two, bool long_transfer,
|
||||||
Inst(Opcode::A32CoprocStoreWords, IR::Value(coproc_info), address);
|
Inst(Opcode::A32CoprocStoreWords, IR::Value(coproc_info), address);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
IR::U64 IREmitter::ImmCurrentLocationDescriptor() {
|
||||||
|
return Imm64(IR::LocationDescriptor{current_location}.Value());
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::A32
|
} // namespace Dynarmic::A32
|
||||||
|
|
|
@ -110,6 +110,7 @@ public:
|
||||||
|
|
||||||
private:
|
private:
|
||||||
enum ArchVersion arch_version;
|
enum ArchVersion arch_version;
|
||||||
|
IR::U64 ImmCurrentLocationDescriptor();
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace Dynarmic::A32
|
} // namespace Dynarmic::A32
|
||||||
|
|
|
@ -107,83 +107,83 @@ void IREmitter::ClearExclusive() {
|
||||||
}
|
}
|
||||||
|
|
||||||
IR::U8 IREmitter::ReadMemory8(const IR::U64& vaddr, IR::AccType acc_type) {
|
IR::U8 IREmitter::ReadMemory8(const IR::U64& vaddr, IR::AccType acc_type) {
|
||||||
return Inst<IR::U8>(Opcode::A64ReadMemory8, vaddr, IR::Value{acc_type});
|
return Inst<IR::U8>(Opcode::A64ReadMemory8, ImmCurrentLocationDescriptor(), vaddr, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
|
|
||||||
IR::U16 IREmitter::ReadMemory16(const IR::U64& vaddr, IR::AccType acc_type) {
|
IR::U16 IREmitter::ReadMemory16(const IR::U64& vaddr, IR::AccType acc_type) {
|
||||||
return Inst<IR::U16>(Opcode::A64ReadMemory16, vaddr, IR::Value{acc_type});
|
return Inst<IR::U16>(Opcode::A64ReadMemory16, ImmCurrentLocationDescriptor(), vaddr, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
|
|
||||||
IR::U32 IREmitter::ReadMemory32(const IR::U64& vaddr, IR::AccType acc_type) {
|
IR::U32 IREmitter::ReadMemory32(const IR::U64& vaddr, IR::AccType acc_type) {
|
||||||
return Inst<IR::U32>(Opcode::A64ReadMemory32, vaddr, IR::Value{acc_type});
|
return Inst<IR::U32>(Opcode::A64ReadMemory32, ImmCurrentLocationDescriptor(), vaddr, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
|
|
||||||
IR::U64 IREmitter::ReadMemory64(const IR::U64& vaddr, IR::AccType acc_type) {
|
IR::U64 IREmitter::ReadMemory64(const IR::U64& vaddr, IR::AccType acc_type) {
|
||||||
return Inst<IR::U64>(Opcode::A64ReadMemory64, vaddr, IR::Value{acc_type});
|
return Inst<IR::U64>(Opcode::A64ReadMemory64, ImmCurrentLocationDescriptor(), vaddr, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
|
|
||||||
IR::U128 IREmitter::ReadMemory128(const IR::U64& vaddr, IR::AccType acc_type) {
|
IR::U128 IREmitter::ReadMemory128(const IR::U64& vaddr, IR::AccType acc_type) {
|
||||||
return Inst<IR::U128>(Opcode::A64ReadMemory128, vaddr, IR::Value{acc_type});
|
return Inst<IR::U128>(Opcode::A64ReadMemory128, ImmCurrentLocationDescriptor(), vaddr, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
|
|
||||||
IR::U8 IREmitter::ExclusiveReadMemory8(const IR::U64& vaddr, IR::AccType acc_type) {
|
IR::U8 IREmitter::ExclusiveReadMemory8(const IR::U64& vaddr, IR::AccType acc_type) {
|
||||||
return Inst<IR::U8>(Opcode::A64ExclusiveReadMemory8, vaddr, IR::Value{acc_type});
|
return Inst<IR::U8>(Opcode::A64ExclusiveReadMemory8, ImmCurrentLocationDescriptor(), vaddr, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
|
|
||||||
IR::U16 IREmitter::ExclusiveReadMemory16(const IR::U64& vaddr, IR::AccType acc_type) {
|
IR::U16 IREmitter::ExclusiveReadMemory16(const IR::U64& vaddr, IR::AccType acc_type) {
|
||||||
return Inst<IR::U16>(Opcode::A64ExclusiveReadMemory16, vaddr, IR::Value{acc_type});
|
return Inst<IR::U16>(Opcode::A64ExclusiveReadMemory16, ImmCurrentLocationDescriptor(), vaddr, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
|
|
||||||
IR::U32 IREmitter::ExclusiveReadMemory32(const IR::U64& vaddr, IR::AccType acc_type) {
|
IR::U32 IREmitter::ExclusiveReadMemory32(const IR::U64& vaddr, IR::AccType acc_type) {
|
||||||
return Inst<IR::U32>(Opcode::A64ExclusiveReadMemory32, vaddr, IR::Value{acc_type});
|
return Inst<IR::U32>(Opcode::A64ExclusiveReadMemory32, ImmCurrentLocationDescriptor(), vaddr, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
|
|
||||||
IR::U64 IREmitter::ExclusiveReadMemory64(const IR::U64& vaddr, IR::AccType acc_type) {
|
IR::U64 IREmitter::ExclusiveReadMemory64(const IR::U64& vaddr, IR::AccType acc_type) {
|
||||||
return Inst<IR::U64>(Opcode::A64ExclusiveReadMemory64, vaddr, IR::Value{acc_type});
|
return Inst<IR::U64>(Opcode::A64ExclusiveReadMemory64, ImmCurrentLocationDescriptor(), vaddr, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
|
|
||||||
IR::U128 IREmitter::ExclusiveReadMemory128(const IR::U64& vaddr, IR::AccType acc_type) {
|
IR::U128 IREmitter::ExclusiveReadMemory128(const IR::U64& vaddr, IR::AccType acc_type) {
|
||||||
return Inst<IR::U128>(Opcode::A64ExclusiveReadMemory128, vaddr, IR::Value{acc_type});
|
return Inst<IR::U128>(Opcode::A64ExclusiveReadMemory128, ImmCurrentLocationDescriptor(), vaddr, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
|
|
||||||
void IREmitter::WriteMemory8(const IR::U64& vaddr, const IR::U8& value, IR::AccType acc_type) {
|
void IREmitter::WriteMemory8(const IR::U64& vaddr, const IR::U8& value, IR::AccType acc_type) {
|
||||||
Inst(Opcode::A64WriteMemory8, vaddr, value, IR::Value{acc_type});
|
Inst(Opcode::A64WriteMemory8, ImmCurrentLocationDescriptor(), vaddr, value, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
|
|
||||||
void IREmitter::WriteMemory16(const IR::U64& vaddr, const IR::U16& value, IR::AccType acc_type) {
|
void IREmitter::WriteMemory16(const IR::U64& vaddr, const IR::U16& value, IR::AccType acc_type) {
|
||||||
Inst(Opcode::A64WriteMemory16, vaddr, value, IR::Value{acc_type});
|
Inst(Opcode::A64WriteMemory16, ImmCurrentLocationDescriptor(), vaddr, value, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
|
|
||||||
void IREmitter::WriteMemory32(const IR::U64& vaddr, const IR::U32& value, IR::AccType acc_type) {
|
void IREmitter::WriteMemory32(const IR::U64& vaddr, const IR::U32& value, IR::AccType acc_type) {
|
||||||
Inst(Opcode::A64WriteMemory32, vaddr, value, IR::Value{acc_type});
|
Inst(Opcode::A64WriteMemory32, ImmCurrentLocationDescriptor(), vaddr, value, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
|
|
||||||
void IREmitter::WriteMemory64(const IR::U64& vaddr, const IR::U64& value, IR::AccType acc_type) {
|
void IREmitter::WriteMemory64(const IR::U64& vaddr, const IR::U64& value, IR::AccType acc_type) {
|
||||||
Inst(Opcode::A64WriteMemory64, vaddr, value, IR::Value{acc_type});
|
Inst(Opcode::A64WriteMemory64, ImmCurrentLocationDescriptor(), vaddr, value, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
|
|
||||||
void IREmitter::WriteMemory128(const IR::U64& vaddr, const IR::U128& value, IR::AccType acc_type) {
|
void IREmitter::WriteMemory128(const IR::U64& vaddr, const IR::U128& value, IR::AccType acc_type) {
|
||||||
Inst(Opcode::A64WriteMemory128, vaddr, value, IR::Value{acc_type});
|
Inst(Opcode::A64WriteMemory128, ImmCurrentLocationDescriptor(), vaddr, value, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
|
|
||||||
IR::U32 IREmitter::ExclusiveWriteMemory8(const IR::U64& vaddr, const IR::U8& value, IR::AccType acc_type) {
|
IR::U32 IREmitter::ExclusiveWriteMemory8(const IR::U64& vaddr, const IR::U8& value, IR::AccType acc_type) {
|
||||||
return Inst<IR::U32>(Opcode::A64ExclusiveWriteMemory8, vaddr, value, IR::Value{acc_type});
|
return Inst<IR::U32>(Opcode::A64ExclusiveWriteMemory8, ImmCurrentLocationDescriptor(), vaddr, value, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
|
|
||||||
IR::U32 IREmitter::ExclusiveWriteMemory16(const IR::U64& vaddr, const IR::U16& value, IR::AccType acc_type) {
|
IR::U32 IREmitter::ExclusiveWriteMemory16(const IR::U64& vaddr, const IR::U16& value, IR::AccType acc_type) {
|
||||||
return Inst<IR::U32>(Opcode::A64ExclusiveWriteMemory16, vaddr, value, IR::Value{acc_type});
|
return Inst<IR::U32>(Opcode::A64ExclusiveWriteMemory16, ImmCurrentLocationDescriptor(), vaddr, value, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
|
|
||||||
IR::U32 IREmitter::ExclusiveWriteMemory32(const IR::U64& vaddr, const IR::U32& value, IR::AccType acc_type) {
|
IR::U32 IREmitter::ExclusiveWriteMemory32(const IR::U64& vaddr, const IR::U32& value, IR::AccType acc_type) {
|
||||||
return Inst<IR::U32>(Opcode::A64ExclusiveWriteMemory32, vaddr, value, IR::Value{acc_type});
|
return Inst<IR::U32>(Opcode::A64ExclusiveWriteMemory32, ImmCurrentLocationDescriptor(), vaddr, value, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
|
|
||||||
IR::U32 IREmitter::ExclusiveWriteMemory64(const IR::U64& vaddr, const IR::U64& value, IR::AccType acc_type) {
|
IR::U32 IREmitter::ExclusiveWriteMemory64(const IR::U64& vaddr, const IR::U64& value, IR::AccType acc_type) {
|
||||||
return Inst<IR::U32>(Opcode::A64ExclusiveWriteMemory64, vaddr, value, IR::Value{acc_type});
|
return Inst<IR::U32>(Opcode::A64ExclusiveWriteMemory64, ImmCurrentLocationDescriptor(), vaddr, value, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
|
|
||||||
IR::U32 IREmitter::ExclusiveWriteMemory128(const IR::U64& vaddr, const IR::U128& value, IR::AccType acc_type) {
|
IR::U32 IREmitter::ExclusiveWriteMemory128(const IR::U64& vaddr, const IR::U128& value, IR::AccType acc_type) {
|
||||||
return Inst<IR::U32>(Opcode::A64ExclusiveWriteMemory128, vaddr, value, IR::Value{acc_type});
|
return Inst<IR::U32>(Opcode::A64ExclusiveWriteMemory128, ImmCurrentLocationDescriptor(), vaddr, value, IR::Value{acc_type});
|
||||||
}
|
}
|
||||||
|
|
||||||
IR::U32 IREmitter::GetW(Reg reg) {
|
IR::U32 IREmitter::GetW(Reg reg) {
|
||||||
|
@ -262,4 +262,8 @@ void IREmitter::SetPC(const IR::U64& value) {
|
||||||
Inst(Opcode::A64SetPC, value);
|
Inst(Opcode::A64SetPC, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
IR::U64 IREmitter::ImmCurrentLocationDescriptor() {
|
||||||
|
return Imm64(IR::LocationDescriptor{*current_location}.Value());
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::A64
|
} // namespace Dynarmic::A64
|
||||||
|
|
|
@ -95,6 +95,9 @@ public:
|
||||||
void SetFPCR(const IR::U32& value);
|
void SetFPCR(const IR::U32& value);
|
||||||
void SetFPSR(const IR::U32& value);
|
void SetFPSR(const IR::U32& value);
|
||||||
void SetPC(const IR::U64& value);
|
void SetPC(const IR::U64& value);
|
||||||
|
|
||||||
|
private:
|
||||||
|
IR::U64 ImmCurrentLocationDescriptor();
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace Dynarmic::A64
|
} // namespace Dynarmic::A64
|
||||||
|
|
|
@ -212,6 +212,10 @@ struct UserConfig {
|
||||||
/// to avoid writting certain unnecessary code only needed for cycle timers.
|
/// to avoid writting certain unnecessary code only needed for cycle timers.
|
||||||
bool wall_clock_cntpct = false;
|
bool wall_clock_cntpct = false;
|
||||||
|
|
||||||
|
/// This allows accurately emulating protection fault handlers. If true, we check
|
||||||
|
/// for exit after every data memory access by the emulated program.
|
||||||
|
bool check_halt_on_memory_access = false;
|
||||||
|
|
||||||
/// This option allows you to disable cycle counting. If this is set to false,
|
/// This option allows you to disable cycle counting. If this is set to false,
|
||||||
/// AddTicks and GetTicksRemaining are never called, and no cycle counting is done.
|
/// AddTicks and GetTicksRemaining are never called, and no cycle counting is done.
|
||||||
bool enable_cycle_counting = true;
|
bool enable_cycle_counting = true;
|
||||||
|
|
|
@ -277,6 +277,10 @@ struct UserConfig {
|
||||||
/// to avoid writting certain unnecessary code only needed for cycle timers.
|
/// to avoid writting certain unnecessary code only needed for cycle timers.
|
||||||
bool wall_clock_cntpct = false;
|
bool wall_clock_cntpct = false;
|
||||||
|
|
||||||
|
/// This allows accurately emulating protection fault handlers. If true, we check
|
||||||
|
/// for exit after every data memory access by the emulated program.
|
||||||
|
bool check_halt_on_memory_access = false;
|
||||||
|
|
||||||
/// This option allows you to disable cycle counting. If this is set to false,
|
/// This option allows you to disable cycle counting. If this is set to false,
|
||||||
/// AddTicks and GetTicksRemaining are never called, and no cycle counting is done.
|
/// AddTicks and GetTicksRemaining are never called, and no cycle counting is done.
|
||||||
bool enable_cycle_counting = true;
|
bool enable_cycle_counting = true;
|
||||||
|
|
|
@ -12,6 +12,7 @@ namespace Dynarmic {
|
||||||
enum class HaltReason : std::uint32_t {
|
enum class HaltReason : std::uint32_t {
|
||||||
Step = 0x00000001,
|
Step = 0x00000001,
|
||||||
CacheInvalidation = 0x00000002,
|
CacheInvalidation = 0x00000002,
|
||||||
|
MemoryAbort = 0x00000004,
|
||||||
UserDefined1 = 0x01000000,
|
UserDefined1 = 0x01000000,
|
||||||
UserDefined2 = 0x02000000,
|
UserDefined2 = 0x02000000,
|
||||||
UserDefined3 = 0x04000000,
|
UserDefined3 = 0x04000000,
|
||||||
|
|
|
@ -688,45 +688,45 @@ OPCODE(FPVectorToUnsignedFixed64, U128, U128
|
||||||
|
|
||||||
// A32 Memory access
|
// A32 Memory access
|
||||||
A32OPC(ClearExclusive, Void, )
|
A32OPC(ClearExclusive, Void, )
|
||||||
A32OPC(ReadMemory8, U8, U32, AccType )
|
A32OPC(ReadMemory8, U8, U64, U32, AccType )
|
||||||
A32OPC(ReadMemory16, U16, U32, AccType )
|
A32OPC(ReadMemory16, U16, U64, U32, AccType )
|
||||||
A32OPC(ReadMemory32, U32, U32, AccType )
|
A32OPC(ReadMemory32, U32, U64, U32, AccType )
|
||||||
A32OPC(ReadMemory64, U64, U32, AccType )
|
A32OPC(ReadMemory64, U64, U64, U32, AccType )
|
||||||
A32OPC(ExclusiveReadMemory8, U8, U32, AccType )
|
A32OPC(ExclusiveReadMemory8, U8, U64, U32, AccType )
|
||||||
A32OPC(ExclusiveReadMemory16, U16, U32, AccType )
|
A32OPC(ExclusiveReadMemory16, U16, U64, U32, AccType )
|
||||||
A32OPC(ExclusiveReadMemory32, U32, U32, AccType )
|
A32OPC(ExclusiveReadMemory32, U32, U64, U32, AccType )
|
||||||
A32OPC(ExclusiveReadMemory64, U64, U32, AccType )
|
A32OPC(ExclusiveReadMemory64, U64, U64, U32, AccType )
|
||||||
A32OPC(WriteMemory8, Void, U32, U8, AccType )
|
A32OPC(WriteMemory8, Void, U64, U32, U8, AccType )
|
||||||
A32OPC(WriteMemory16, Void, U32, U16, AccType )
|
A32OPC(WriteMemory16, Void, U64, U32, U16, AccType )
|
||||||
A32OPC(WriteMemory32, Void, U32, U32, AccType )
|
A32OPC(WriteMemory32, Void, U64, U32, U32, AccType )
|
||||||
A32OPC(WriteMemory64, Void, U32, U64, AccType )
|
A32OPC(WriteMemory64, Void, U64, U32, U64, AccType )
|
||||||
A32OPC(ExclusiveWriteMemory8, U32, U32, U8, AccType )
|
A32OPC(ExclusiveWriteMemory8, U32, U64, U32, U8, AccType )
|
||||||
A32OPC(ExclusiveWriteMemory16, U32, U32, U16, AccType )
|
A32OPC(ExclusiveWriteMemory16, U32, U64, U32, U16, AccType )
|
||||||
A32OPC(ExclusiveWriteMemory32, U32, U32, U32, AccType )
|
A32OPC(ExclusiveWriteMemory32, U32, U64, U32, U32, AccType )
|
||||||
A32OPC(ExclusiveWriteMemory64, U32, U32, U64, AccType )
|
A32OPC(ExclusiveWriteMemory64, U32, U64, U32, U64, AccType )
|
||||||
|
|
||||||
// A64 Memory access
|
// A64 Memory access
|
||||||
A64OPC(ClearExclusive, Void, )
|
A64OPC(ClearExclusive, Void, )
|
||||||
A64OPC(ReadMemory8, U8, U64, AccType )
|
A64OPC(ReadMemory8, U8, U64, U64, AccType )
|
||||||
A64OPC(ReadMemory16, U16, U64, AccType )
|
A64OPC(ReadMemory16, U16, U64, U64, AccType )
|
||||||
A64OPC(ReadMemory32, U32, U64, AccType )
|
A64OPC(ReadMemory32, U32, U64, U64, AccType )
|
||||||
A64OPC(ReadMemory64, U64, U64, AccType )
|
A64OPC(ReadMemory64, U64, U64, U64, AccType )
|
||||||
A64OPC(ReadMemory128, U128, U64, AccType )
|
A64OPC(ReadMemory128, U128, U64, U64, AccType )
|
||||||
A64OPC(ExclusiveReadMemory8, U8, U64, AccType )
|
A64OPC(ExclusiveReadMemory8, U8, U64, U64, AccType )
|
||||||
A64OPC(ExclusiveReadMemory16, U16, U64, AccType )
|
A64OPC(ExclusiveReadMemory16, U16, U64, U64, AccType )
|
||||||
A64OPC(ExclusiveReadMemory32, U32, U64, AccType )
|
A64OPC(ExclusiveReadMemory32, U32, U64, U64, AccType )
|
||||||
A64OPC(ExclusiveReadMemory64, U64, U64, AccType )
|
A64OPC(ExclusiveReadMemory64, U64, U64, U64, AccType )
|
||||||
A64OPC(ExclusiveReadMemory128, U128, U64, AccType )
|
A64OPC(ExclusiveReadMemory128, U128, U64, U64, AccType )
|
||||||
A64OPC(WriteMemory8, Void, U64, U8, AccType )
|
A64OPC(WriteMemory8, Void, U64, U64, U8, AccType )
|
||||||
A64OPC(WriteMemory16, Void, U64, U16, AccType )
|
A64OPC(WriteMemory16, Void, U64, U64, U16, AccType )
|
||||||
A64OPC(WriteMemory32, Void, U64, U32, AccType )
|
A64OPC(WriteMemory32, Void, U64, U64, U32, AccType )
|
||||||
A64OPC(WriteMemory64, Void, U64, U64, AccType )
|
A64OPC(WriteMemory64, Void, U64, U64, U64, AccType )
|
||||||
A64OPC(WriteMemory128, Void, U64, U128, AccType )
|
A64OPC(WriteMemory128, Void, U64, U64, U128, AccType )
|
||||||
A64OPC(ExclusiveWriteMemory8, U32, U64, U8, AccType )
|
A64OPC(ExclusiveWriteMemory8, U32, U64, U64, U8, AccType )
|
||||||
A64OPC(ExclusiveWriteMemory16, U32, U64, U16, AccType )
|
A64OPC(ExclusiveWriteMemory16, U32, U64, U64, U16, AccType )
|
||||||
A64OPC(ExclusiveWriteMemory32, U32, U64, U32, AccType )
|
A64OPC(ExclusiveWriteMemory32, U32, U64, U64, U32, AccType )
|
||||||
A64OPC(ExclusiveWriteMemory64, U32, U64, U64, AccType )
|
A64OPC(ExclusiveWriteMemory64, U32, U64, U64, U64, AccType )
|
||||||
A64OPC(ExclusiveWriteMemory128, U32, U64, U128, AccType )
|
A64OPC(ExclusiveWriteMemory128, U32, U64, U64, U128, AccType )
|
||||||
|
|
||||||
// Coprocessor
|
// Coprocessor
|
||||||
A32OPC(CoprocInternalOperation, Void, CoprocInfo )
|
A32OPC(CoprocInternalOperation, Void, CoprocInfo )
|
||||||
|
|
|
@ -25,7 +25,7 @@ void A32ConstantMemoryReads(IR::Block& block, A32::UserCallbacks* cb) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
const u32 vaddr = inst.GetArg(0).GetU32();
|
const u32 vaddr = inst.GetArg(1).GetU32();
|
||||||
if (cb->IsReadOnlyMemory(vaddr)) {
|
if (cb->IsReadOnlyMemory(vaddr)) {
|
||||||
const u8 value_from_memory = cb->MemoryRead8(vaddr);
|
const u8 value_from_memory = cb->MemoryRead8(vaddr);
|
||||||
inst.ReplaceUsesWith(IR::Value{value_from_memory});
|
inst.ReplaceUsesWith(IR::Value{value_from_memory});
|
||||||
|
@ -37,7 +37,7 @@ void A32ConstantMemoryReads(IR::Block& block, A32::UserCallbacks* cb) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
const u32 vaddr = inst.GetArg(0).GetU32();
|
const u32 vaddr = inst.GetArg(1).GetU32();
|
||||||
if (cb->IsReadOnlyMemory(vaddr)) {
|
if (cb->IsReadOnlyMemory(vaddr)) {
|
||||||
const u16 value_from_memory = cb->MemoryRead16(vaddr);
|
const u16 value_from_memory = cb->MemoryRead16(vaddr);
|
||||||
inst.ReplaceUsesWith(IR::Value{value_from_memory});
|
inst.ReplaceUsesWith(IR::Value{value_from_memory});
|
||||||
|
@ -49,7 +49,7 @@ void A32ConstantMemoryReads(IR::Block& block, A32::UserCallbacks* cb) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
const u32 vaddr = inst.GetArg(0).GetU32();
|
const u32 vaddr = inst.GetArg(1).GetU32();
|
||||||
if (cb->IsReadOnlyMemory(vaddr)) {
|
if (cb->IsReadOnlyMemory(vaddr)) {
|
||||||
const u32 value_from_memory = cb->MemoryRead32(vaddr);
|
const u32 value_from_memory = cb->MemoryRead32(vaddr);
|
||||||
inst.ReplaceUsesWith(IR::Value{value_from_memory});
|
inst.ReplaceUsesWith(IR::Value{value_from_memory});
|
||||||
|
@ -61,7 +61,7 @@ void A32ConstantMemoryReads(IR::Block& block, A32::UserCallbacks* cb) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
const u32 vaddr = inst.GetArg(0).GetU32();
|
const u32 vaddr = inst.GetArg(1).GetU32();
|
||||||
if (cb->IsReadOnlyMemory(vaddr)) {
|
if (cb->IsReadOnlyMemory(vaddr)) {
|
||||||
const u64 value_from_memory = cb->MemoryRead64(vaddr);
|
const u64 value_from_memory = cb->MemoryRead64(vaddr);
|
||||||
inst.ReplaceUsesWith(IR::Value{value_from_memory});
|
inst.ReplaceUsesWith(IR::Value{value_from_memory});
|
||||||
|
|
Loading…
Reference in a new issue