emit_x64_memory: Use deferred emits
This commit is contained in:
parent
0d1e4fc4a8
commit
dd60f4b7d8
4 changed files with 101 additions and 115 deletions
|
@ -240,27 +240,20 @@ void A32EmitX64::EmitCheckMemoryAbort(A32EmitContext& ctx, IR::Inst* inst, Xbyak
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Xbyak::Label skip;
|
||||||
|
|
||||||
const A32::LocationDescriptor current_location{IR::LocationDescriptor{inst->GetArg(0).GetU64()}};
|
const A32::LocationDescriptor current_location{IR::LocationDescriptor{inst->GetArg(0).GetU64()}};
|
||||||
|
|
||||||
code.test(dword[r15 + offsetof(A32JitState, halt_reason)], static_cast<u32>(HaltReason::MemoryAbort));
|
code.test(dword[r15 + offsetof(A32JitState, halt_reason)], static_cast<u32>(HaltReason::MemoryAbort));
|
||||||
|
if (end) {
|
||||||
Xbyak::Label memory_abort;
|
|
||||||
|
|
||||||
if (!end) {
|
|
||||||
code.jnz(memory_abort, code.T_NEAR);
|
|
||||||
code.SwitchToFarCode();
|
|
||||||
} else {
|
|
||||||
code.jz(*end, code.T_NEAR);
|
code.jz(*end, code.T_NEAR);
|
||||||
|
} else {
|
||||||
|
code.jz(skip, code.T_NEAR);
|
||||||
}
|
}
|
||||||
|
|
||||||
code.L(memory_abort);
|
|
||||||
EmitSetUpperLocationDescriptor(current_location, ctx.Location());
|
EmitSetUpperLocationDescriptor(current_location, ctx.Location());
|
||||||
code.mov(dword[r15 + offsetof(A32JitState, Reg) + sizeof(u32) * 15], current_location.PC());
|
code.mov(dword[r15 + offsetof(A32JitState, Reg) + sizeof(u32) * 15], current_location.PC());
|
||||||
code.ForceReturnFromRunCode();
|
code.ForceReturnFromRunCode();
|
||||||
|
code.L(skip);
|
||||||
if (!end) {
|
|
||||||
code.SwitchToNearCode();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::Backend::X64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -412,27 +412,20 @@ void A64EmitX64::EmitCheckMemoryAbort(A64EmitContext&, IR::Inst* inst, Xbyak::La
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Xbyak::Label skip;
|
||||||
|
|
||||||
const A64::LocationDescriptor current_location{IR::LocationDescriptor{inst->GetArg(0).GetU64()}};
|
const A64::LocationDescriptor current_location{IR::LocationDescriptor{inst->GetArg(0).GetU64()}};
|
||||||
|
|
||||||
code.test(dword[r15 + offsetof(A64JitState, halt_reason)], static_cast<u32>(HaltReason::MemoryAbort));
|
code.test(dword[r15 + offsetof(A64JitState, halt_reason)], static_cast<u32>(HaltReason::MemoryAbort));
|
||||||
|
if (end) {
|
||||||
Xbyak::Label memory_abort;
|
|
||||||
|
|
||||||
if (!end) {
|
|
||||||
code.jnz(memory_abort, code.T_NEAR);
|
|
||||||
code.SwitchToFarCode();
|
|
||||||
} else {
|
|
||||||
code.jz(*end, code.T_NEAR);
|
code.jz(*end, code.T_NEAR);
|
||||||
|
} else {
|
||||||
|
code.jz(skip, code.T_NEAR);
|
||||||
}
|
}
|
||||||
|
|
||||||
code.L(memory_abort);
|
|
||||||
code.mov(rax, current_location.PC());
|
code.mov(rax, current_location.PC());
|
||||||
code.mov(qword[r15 + offsetof(A64JitState, pc)], rax);
|
code.mov(qword[r15 + offsetof(A64JitState, pc)], rax);
|
||||||
code.ForceReturnFromRunCode();
|
code.ForceReturnFromRunCode();
|
||||||
|
code.L(skip);
|
||||||
if (!end) {
|
|
||||||
code.SwitchToNearCode();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::Backend::X64
|
} // namespace Dynarmic::Backend::X64
|
||||||
|
|
|
@ -89,45 +89,45 @@ void AxxEmitX64::EmitMemoryRead(AxxEmitContext& ctx, IR::Inst* inst) {
|
||||||
|
|
||||||
const auto wrapped_fn = read_fallbacks[std::make_tuple(ordered, bitsize, vaddr.getIdx(), value_idx)];
|
const auto wrapped_fn = read_fallbacks[std::make_tuple(ordered, bitsize, vaddr.getIdx(), value_idx)];
|
||||||
|
|
||||||
Xbyak::Label abort, end;
|
SharedLabel abort = GenSharedLabel(), end = GenSharedLabel();
|
||||||
|
|
||||||
if (fastmem_marker) {
|
if (fastmem_marker) {
|
||||||
// Use fastmem
|
// Use fastmem
|
||||||
bool require_abort_handling;
|
bool require_abort_handling;
|
||||||
const auto src_ptr = EmitFastmemVAddr(code, ctx, abort, vaddr, require_abort_handling);
|
const auto src_ptr = EmitFastmemVAddr(code, ctx, *abort, vaddr, require_abort_handling);
|
||||||
|
|
||||||
const auto location = EmitReadMemoryMov<bitsize>(code, value_idx, src_ptr, ordered);
|
const auto location = EmitReadMemoryMov<bitsize>(code, value_idx, src_ptr, ordered);
|
||||||
|
|
||||||
code.SwitchToFarCode();
|
ctx.deferred_emits.emplace_back([=, this, &ctx] {
|
||||||
code.L(abort);
|
code.L(*abort);
|
||||||
code.call(wrapped_fn);
|
code.call(wrapped_fn);
|
||||||
|
|
||||||
fastmem_patch_info.emplace(
|
fastmem_patch_info.emplace(
|
||||||
mcl::bit_cast<u64>(location),
|
mcl::bit_cast<u64>(location),
|
||||||
FastmemPatchInfo{
|
FastmemPatchInfo{
|
||||||
mcl::bit_cast<u64>(code.getCurr()),
|
mcl::bit_cast<u64>(code.getCurr()),
|
||||||
mcl::bit_cast<u64>(wrapped_fn),
|
mcl::bit_cast<u64>(wrapped_fn),
|
||||||
*fastmem_marker,
|
*fastmem_marker,
|
||||||
conf.recompile_on_fastmem_failure,
|
conf.recompile_on_fastmem_failure,
|
||||||
});
|
});
|
||||||
|
|
||||||
EmitCheckMemoryAbort(ctx, inst, &end);
|
EmitCheckMemoryAbort(ctx, inst, end.get());
|
||||||
code.jmp(end, code.T_NEAR);
|
code.jmp(*end, code.T_NEAR);
|
||||||
code.SwitchToNearCode();
|
});
|
||||||
} else {
|
} else {
|
||||||
// Use page table
|
// Use page table
|
||||||
ASSERT(conf.page_table);
|
ASSERT(conf.page_table);
|
||||||
const auto src_ptr = EmitVAddrLookup(code, ctx, bitsize, abort, vaddr);
|
const auto src_ptr = EmitVAddrLookup(code, ctx, bitsize, *abort, vaddr);
|
||||||
EmitReadMemoryMov<bitsize>(code, value_idx, src_ptr, ordered);
|
EmitReadMemoryMov<bitsize>(code, value_idx, src_ptr, ordered);
|
||||||
|
|
||||||
code.SwitchToFarCode();
|
ctx.deferred_emits.emplace_back([=, this, &ctx] {
|
||||||
code.L(abort);
|
code.L(*abort);
|
||||||
code.call(wrapped_fn);
|
code.call(wrapped_fn);
|
||||||
EmitCheckMemoryAbort(ctx, inst, &end);
|
EmitCheckMemoryAbort(ctx, inst, end.get());
|
||||||
code.jmp(end, code.T_NEAR);
|
code.jmp(*end, code.T_NEAR);
|
||||||
code.SwitchToNearCode();
|
});
|
||||||
}
|
}
|
||||||
code.L(end);
|
code.L(*end);
|
||||||
|
|
||||||
if constexpr (bitsize == 128) {
|
if constexpr (bitsize == 128) {
|
||||||
ctx.reg_alloc.DefineValue(inst, Xbyak::Xmm{value_idx});
|
ctx.reg_alloc.DefineValue(inst, Xbyak::Xmm{value_idx});
|
||||||
|
@ -176,45 +176,45 @@ void AxxEmitX64::EmitMemoryWrite(AxxEmitContext& ctx, IR::Inst* inst) {
|
||||||
|
|
||||||
const auto wrapped_fn = write_fallbacks[std::make_tuple(ordered, bitsize, vaddr.getIdx(), value_idx)];
|
const auto wrapped_fn = write_fallbacks[std::make_tuple(ordered, bitsize, vaddr.getIdx(), value_idx)];
|
||||||
|
|
||||||
Xbyak::Label abort, end;
|
SharedLabel abort = GenSharedLabel(), end = GenSharedLabel();
|
||||||
|
|
||||||
if (fastmem_marker) {
|
if (fastmem_marker) {
|
||||||
// Use fastmem
|
// Use fastmem
|
||||||
bool require_abort_handling;
|
bool require_abort_handling;
|
||||||
const auto dest_ptr = EmitFastmemVAddr(code, ctx, abort, vaddr, require_abort_handling);
|
const auto dest_ptr = EmitFastmemVAddr(code, ctx, *abort, vaddr, require_abort_handling);
|
||||||
|
|
||||||
const auto location = EmitWriteMemoryMov<bitsize>(code, dest_ptr, value_idx, ordered);
|
const auto location = EmitWriteMemoryMov<bitsize>(code, dest_ptr, value_idx, ordered);
|
||||||
|
|
||||||
code.SwitchToFarCode();
|
ctx.deferred_emits.emplace_back([=, this, &ctx] {
|
||||||
code.L(abort);
|
code.L(*abort);
|
||||||
code.call(wrapped_fn);
|
code.call(wrapped_fn);
|
||||||
|
|
||||||
fastmem_patch_info.emplace(
|
fastmem_patch_info.emplace(
|
||||||
mcl::bit_cast<u64>(location),
|
mcl::bit_cast<u64>(location),
|
||||||
FastmemPatchInfo{
|
FastmemPatchInfo{
|
||||||
mcl::bit_cast<u64>(code.getCurr()),
|
mcl::bit_cast<u64>(code.getCurr()),
|
||||||
mcl::bit_cast<u64>(wrapped_fn),
|
mcl::bit_cast<u64>(wrapped_fn),
|
||||||
*fastmem_marker,
|
*fastmem_marker,
|
||||||
conf.recompile_on_fastmem_failure,
|
conf.recompile_on_fastmem_failure,
|
||||||
});
|
});
|
||||||
|
|
||||||
EmitCheckMemoryAbort(ctx, inst, &end);
|
EmitCheckMemoryAbort(ctx, inst, end.get());
|
||||||
code.jmp(end, code.T_NEAR);
|
code.jmp(*end, code.T_NEAR);
|
||||||
code.SwitchToNearCode();
|
});
|
||||||
} else {
|
} else {
|
||||||
// Use page table
|
// Use page table
|
||||||
ASSERT(conf.page_table);
|
ASSERT(conf.page_table);
|
||||||
const auto dest_ptr = EmitVAddrLookup(code, ctx, bitsize, abort, vaddr);
|
const auto dest_ptr = EmitVAddrLookup(code, ctx, bitsize, *abort, vaddr);
|
||||||
EmitWriteMemoryMov<bitsize>(code, dest_ptr, value_idx, ordered);
|
EmitWriteMemoryMov<bitsize>(code, dest_ptr, value_idx, ordered);
|
||||||
|
|
||||||
code.SwitchToFarCode();
|
ctx.deferred_emits.emplace_back([=, this, &ctx] {
|
||||||
code.L(abort);
|
code.L(*abort);
|
||||||
code.call(wrapped_fn);
|
code.call(wrapped_fn);
|
||||||
EmitCheckMemoryAbort(ctx, inst, &end);
|
EmitCheckMemoryAbort(ctx, inst, end.get());
|
||||||
code.jmp(end, code.T_NEAR);
|
code.jmp(*end, code.T_NEAR);
|
||||||
code.SwitchToNearCode();
|
});
|
||||||
}
|
}
|
||||||
code.L(end);
|
code.L(*end);
|
||||||
}
|
}
|
||||||
|
|
||||||
template<std::size_t bitsize, auto callback>
|
template<std::size_t bitsize, auto callback>
|
||||||
|
@ -362,10 +362,10 @@ void AxxEmitX64::EmitExclusiveReadMemoryInline(AxxEmitContext& ctx, IR::Inst* in
|
||||||
|
|
||||||
const auto fastmem_marker = ShouldFastmem(ctx, inst);
|
const auto fastmem_marker = ShouldFastmem(ctx, inst);
|
||||||
if (fastmem_marker) {
|
if (fastmem_marker) {
|
||||||
Xbyak::Label abort, end;
|
SharedLabel abort = GenSharedLabel(), end = GenSharedLabel();
|
||||||
bool require_abort_handling = false;
|
bool require_abort_handling = false;
|
||||||
|
|
||||||
const auto src_ptr = EmitFastmemVAddr(code, ctx, abort, vaddr, require_abort_handling);
|
const auto src_ptr = EmitFastmemVAddr(code, ctx, *abort, vaddr, require_abort_handling);
|
||||||
|
|
||||||
const auto location = EmitReadMemoryMov<bitsize>(code, value_idx, src_ptr, ordered);
|
const auto location = EmitReadMemoryMov<bitsize>(code, value_idx, src_ptr, ordered);
|
||||||
|
|
||||||
|
@ -378,14 +378,14 @@ void AxxEmitX64::EmitExclusiveReadMemoryInline(AxxEmitContext& ctx, IR::Inst* in
|
||||||
conf.recompile_on_exclusive_fastmem_failure,
|
conf.recompile_on_exclusive_fastmem_failure,
|
||||||
});
|
});
|
||||||
|
|
||||||
code.L(end);
|
code.L(*end);
|
||||||
|
|
||||||
if (require_abort_handling) {
|
if (require_abort_handling) {
|
||||||
code.SwitchToFarCode();
|
ctx.deferred_emits.emplace_back([=, this] {
|
||||||
code.L(abort);
|
code.L(*abort);
|
||||||
code.call(wrapped_fn);
|
code.call(wrapped_fn);
|
||||||
code.jmp(end, code.T_NEAR);
|
code.jmp(*end, code.T_NEAR);
|
||||||
code.SwitchToNearCode();
|
});
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
code.call(wrapped_fn);
|
code.call(wrapped_fn);
|
||||||
|
@ -436,14 +436,14 @@ void AxxEmitX64::EmitExclusiveWriteMemoryInline(AxxEmitContext& ctx, IR::Inst* i
|
||||||
|
|
||||||
EmitExclusiveLock(code, conf, tmp, eax);
|
EmitExclusiveLock(code, conf, tmp, eax);
|
||||||
|
|
||||||
Xbyak::Label end;
|
SharedLabel end = GenSharedLabel();
|
||||||
|
|
||||||
code.mov(tmp, mcl::bit_cast<u64>(GetExclusiveMonitorAddressPointer(conf.global_monitor, conf.processor_id)));
|
code.mov(tmp, mcl::bit_cast<u64>(GetExclusiveMonitorAddressPointer(conf.global_monitor, conf.processor_id)));
|
||||||
code.mov(status, u32(1));
|
code.mov(status, u32(1));
|
||||||
code.cmp(code.byte[r15 + offsetof(AxxJitState, exclusive_state)], u8(0));
|
code.cmp(code.byte[r15 + offsetof(AxxJitState, exclusive_state)], u8(0));
|
||||||
code.je(end, code.T_NEAR);
|
code.je(*end, code.T_NEAR);
|
||||||
code.cmp(qword[tmp], vaddr);
|
code.cmp(qword[tmp], vaddr);
|
||||||
code.jne(end, code.T_NEAR);
|
code.jne(*end, code.T_NEAR);
|
||||||
|
|
||||||
EmitExclusiveTestAndClear(code, conf, vaddr, tmp, rax);
|
EmitExclusiveTestAndClear(code, conf, vaddr, tmp, rax);
|
||||||
|
|
||||||
|
@ -468,10 +468,10 @@ void AxxEmitX64::EmitExclusiveWriteMemoryInline(AxxEmitContext& ctx, IR::Inst* i
|
||||||
|
|
||||||
const auto fastmem_marker = ShouldFastmem(ctx, inst);
|
const auto fastmem_marker = ShouldFastmem(ctx, inst);
|
||||||
if (fastmem_marker) {
|
if (fastmem_marker) {
|
||||||
Xbyak::Label abort;
|
SharedLabel abort = GenSharedLabel();
|
||||||
bool require_abort_handling = false;
|
bool require_abort_handling = false;
|
||||||
|
|
||||||
const auto dest_ptr = EmitFastmemVAddr(code, ctx, abort, vaddr, require_abort_handling, tmp);
|
const auto dest_ptr = EmitFastmemVAddr(code, ctx, *abort, vaddr, require_abort_handling, tmp);
|
||||||
|
|
||||||
const auto location = code.getCurr();
|
const auto location = code.getCurr();
|
||||||
|
|
||||||
|
@ -503,24 +503,24 @@ void AxxEmitX64::EmitExclusiveWriteMemoryInline(AxxEmitContext& ctx, IR::Inst* i
|
||||||
|
|
||||||
code.setnz(status.cvt8());
|
code.setnz(status.cvt8());
|
||||||
|
|
||||||
code.SwitchToFarCode();
|
ctx.deferred_emits.emplace_back([=, this] {
|
||||||
code.L(abort);
|
code.L(*abort);
|
||||||
code.call(wrapped_fn);
|
code.call(wrapped_fn);
|
||||||
|
|
||||||
fastmem_patch_info.emplace(
|
fastmem_patch_info.emplace(
|
||||||
mcl::bit_cast<u64>(location),
|
mcl::bit_cast<u64>(location),
|
||||||
FastmemPatchInfo{
|
FastmemPatchInfo{
|
||||||
mcl::bit_cast<u64>(code.getCurr()),
|
mcl::bit_cast<u64>(code.getCurr()),
|
||||||
mcl::bit_cast<u64>(wrapped_fn),
|
mcl::bit_cast<u64>(wrapped_fn),
|
||||||
*fastmem_marker,
|
*fastmem_marker,
|
||||||
conf.recompile_on_exclusive_fastmem_failure,
|
conf.recompile_on_exclusive_fastmem_failure,
|
||||||
});
|
});
|
||||||
|
|
||||||
code.cmp(al, 0);
|
code.cmp(al, 0);
|
||||||
code.setz(status.cvt8());
|
code.setz(status.cvt8());
|
||||||
code.movzx(status.cvt32(), status.cvt8());
|
code.movzx(status.cvt32(), status.cvt8());
|
||||||
code.jmp(end, code.T_NEAR);
|
code.jmp(*end, code.T_NEAR);
|
||||||
code.SwitchToNearCode();
|
});
|
||||||
} else {
|
} else {
|
||||||
code.call(wrapped_fn);
|
code.call(wrapped_fn);
|
||||||
code.cmp(al, 0);
|
code.cmp(al, 0);
|
||||||
|
@ -528,7 +528,7 @@ void AxxEmitX64::EmitExclusiveWriteMemoryInline(AxxEmitContext& ctx, IR::Inst* i
|
||||||
code.movzx(status.cvt32(), status.cvt8());
|
code.movzx(status.cvt32(), status.cvt8());
|
||||||
}
|
}
|
||||||
|
|
||||||
code.L(end);
|
code.L(*end);
|
||||||
|
|
||||||
EmitExclusiveUnlock(code, conf, tmp, eax);
|
EmitExclusiveUnlock(code, conf, tmp, eax);
|
||||||
|
|
||||||
|
|
|
@ -53,19 +53,19 @@ void EmitDetectMisalignedVAddr(BlockOfCode& code, EmitContext& ctx, size_t bitsi
|
||||||
|
|
||||||
const u32 page_align_mask = static_cast<u32>(page_size - 1) & ~align_mask;
|
const u32 page_align_mask = static_cast<u32>(page_size - 1) & ~align_mask;
|
||||||
|
|
||||||
Xbyak::Label detect_boundary, resume;
|
SharedLabel detect_boundary = GenSharedLabel(), resume = GenSharedLabel();
|
||||||
|
|
||||||
code.jnz(detect_boundary, code.T_NEAR);
|
code.jnz(*detect_boundary, code.T_NEAR);
|
||||||
code.L(resume);
|
code.L(*resume);
|
||||||
|
|
||||||
code.SwitchToFarCode();
|
ctx.deferred_emits.emplace_back([=, &code] {
|
||||||
code.L(detect_boundary);
|
code.L(*detect_boundary);
|
||||||
code.mov(tmp, vaddr);
|
code.mov(tmp, vaddr);
|
||||||
code.and_(tmp, page_align_mask);
|
code.and_(tmp, page_align_mask);
|
||||||
code.cmp(tmp, page_align_mask);
|
code.cmp(tmp, page_align_mask);
|
||||||
code.jne(resume, code.T_NEAR);
|
code.jne(*resume, code.T_NEAR);
|
||||||
// NOTE: We expect to fallthrough into abort code here.
|
// NOTE: We expect to fallthrough into abort code here.
|
||||||
code.SwitchToNearCode();
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
template<typename EmitContext>
|
template<typename EmitContext>
|
||||||
|
|
Loading…
Reference in a new issue