basic_block: Move all variables behind a public interface
This commit is contained in:
parent
1d8432487d
commit
0e12fb6a56
9 changed files with 185 additions and 66 deletions
|
@ -57,7 +57,7 @@ static IR::Inst* FindUseWithOpcode(IR::Inst* inst, IR::Opcode opcode) {
|
|||
}
|
||||
|
||||
static void EraseInstruction(IR::Block& block, IR::Inst* inst) {
|
||||
block.instructions.erase(block.instructions.iterator_to(*inst));
|
||||
block.Instructions().erase(block.Instructions().iterator_to(*inst));
|
||||
}
|
||||
|
||||
EmitX64::BlockDescriptor EmitX64::Emit(const Arm::LocationDescriptor descriptor, Dynarmic::IR::Block& block) {
|
||||
|
@ -92,8 +92,8 @@ EmitX64::BlockDescriptor EmitX64::Emit(const Arm::LocationDescriptor descriptor,
|
|||
reg_alloc.EndOfAllocScope();
|
||||
}
|
||||
|
||||
EmitAddCycles(block.cycle_count);
|
||||
EmitTerminal(block.terminal, block.location);
|
||||
EmitAddCycles(block.CycleCount());
|
||||
EmitTerminal(block.GetTerminal(), block.Location());
|
||||
|
||||
reg_alloc.AssertNoMoreUses();
|
||||
|
||||
|
@ -1227,15 +1227,15 @@ static void FPThreeOp32(BlockOfCode* code, RegAlloc& reg_alloc, IR::Block& block
|
|||
X64Reg operand = reg_alloc.UseRegister(b, any_xmm);
|
||||
X64Reg gpr_scratch = reg_alloc.ScratchRegister(any_gpr);
|
||||
|
||||
if (block.location.FPSCR().FTZ()) {
|
||||
if (block.Location().FPSCR().FTZ()) {
|
||||
DenormalsAreZero32(code, result, gpr_scratch);
|
||||
DenormalsAreZero32(code, operand, gpr_scratch);
|
||||
}
|
||||
(code->*fn)(result, R(operand));
|
||||
if (block.location.FPSCR().FTZ()) {
|
||||
if (block.Location().FPSCR().FTZ()) {
|
||||
FlushToZero32(code, result, gpr_scratch);
|
||||
}
|
||||
if (block.location.FPSCR().DN()) {
|
||||
if (block.Location().FPSCR().DN()) {
|
||||
DefaultNaN32(code, result);
|
||||
}
|
||||
}
|
||||
|
@ -1248,15 +1248,15 @@ static void FPThreeOp64(BlockOfCode* code, RegAlloc& reg_alloc, IR::Block& block
|
|||
X64Reg operand = reg_alloc.UseRegister(b, any_xmm);
|
||||
X64Reg gpr_scratch = reg_alloc.ScratchRegister(any_gpr);
|
||||
|
||||
if (block.location.FPSCR().FTZ()) {
|
||||
if (block.Location().FPSCR().FTZ()) {
|
||||
DenormalsAreZero64(code, result, gpr_scratch);
|
||||
DenormalsAreZero64(code, operand, gpr_scratch);
|
||||
}
|
||||
(code->*fn)(result, R(operand));
|
||||
if (block.location.FPSCR().FTZ()) {
|
||||
if (block.Location().FPSCR().FTZ()) {
|
||||
FlushToZero64(code, result, gpr_scratch);
|
||||
}
|
||||
if (block.location.FPSCR().DN()) {
|
||||
if (block.Location().FPSCR().DN()) {
|
||||
DefaultNaN64(code, result);
|
||||
}
|
||||
}
|
||||
|
@ -1267,14 +1267,14 @@ static void FPTwoOp32(BlockOfCode* code, RegAlloc& reg_alloc, IR::Block& block,
|
|||
X64Reg result = reg_alloc.UseDefRegister(a, inst, any_xmm);
|
||||
X64Reg gpr_scratch = reg_alloc.ScratchRegister(any_gpr);
|
||||
|
||||
if (block.location.FPSCR().FTZ()) {
|
||||
if (block.Location().FPSCR().FTZ()) {
|
||||
DenormalsAreZero32(code, result, gpr_scratch);
|
||||
}
|
||||
(code->*fn)(result, R(result));
|
||||
if (block.location.FPSCR().FTZ()) {
|
||||
if (block.Location().FPSCR().FTZ()) {
|
||||
FlushToZero32(code, result, gpr_scratch);
|
||||
}
|
||||
if (block.location.FPSCR().DN()) {
|
||||
if (block.Location().FPSCR().DN()) {
|
||||
DefaultNaN32(code, result);
|
||||
}
|
||||
}
|
||||
|
@ -1285,14 +1285,14 @@ static void FPTwoOp64(BlockOfCode* code, RegAlloc& reg_alloc, IR::Block& block,
|
|||
X64Reg result = reg_alloc.UseDefRegister(a, inst, any_xmm);
|
||||
X64Reg gpr_scratch = reg_alloc.ScratchRegister(any_gpr);
|
||||
|
||||
if (block.location.FPSCR().FTZ()) {
|
||||
if (block.Location().FPSCR().FTZ()) {
|
||||
DenormalsAreZero64(code, result, gpr_scratch);
|
||||
}
|
||||
(code->*fn)(result, R(result));
|
||||
if (block.location.FPSCR().FTZ()) {
|
||||
if (block.Location().FPSCR().FTZ()) {
|
||||
FlushToZero64(code, result, gpr_scratch);
|
||||
}
|
||||
if (block.location.FPSCR().DN()) {
|
||||
if (block.Location().FPSCR().DN()) {
|
||||
DefaultNaN64(code, result);
|
||||
}
|
||||
}
|
||||
|
@ -1403,14 +1403,14 @@ void EmitX64::EmitFPSingleToDouble(IR::Block& block, IR::Inst* inst) {
|
|||
X64Reg result = reg_alloc.UseDefRegister(a, inst, any_xmm);
|
||||
X64Reg gpr_scratch = reg_alloc.ScratchRegister(any_gpr);
|
||||
|
||||
if (block.location.FPSCR().FTZ()) {
|
||||
if (block.Location().FPSCR().FTZ()) {
|
||||
DenormalsAreZero32(code, result, gpr_scratch);
|
||||
}
|
||||
code->CVTSS2SD(result, R(result));
|
||||
if (block.location.FPSCR().FTZ()) {
|
||||
if (block.Location().FPSCR().FTZ()) {
|
||||
FlushToZero64(code, result, gpr_scratch);
|
||||
}
|
||||
if (block.location.FPSCR().DN()) {
|
||||
if (block.Location().FPSCR().DN()) {
|
||||
DefaultNaN64(code, result);
|
||||
}
|
||||
}
|
||||
|
@ -1421,14 +1421,14 @@ void EmitX64::EmitFPDoubleToSingle(IR::Block& block, IR::Inst* inst) {
|
|||
X64Reg result = reg_alloc.UseDefRegister(a, inst, any_xmm);
|
||||
X64Reg gpr_scratch = reg_alloc.ScratchRegister(any_gpr);
|
||||
|
||||
if (block.location.FPSCR().FTZ()) {
|
||||
if (block.Location().FPSCR().FTZ()) {
|
||||
DenormalsAreZero64(code, result, gpr_scratch);
|
||||
}
|
||||
code->CVTSD2SS(result, R(result));
|
||||
if (block.location.FPSCR().FTZ()) {
|
||||
if (block.Location().FPSCR().FTZ()) {
|
||||
FlushToZero32(code, result, gpr_scratch);
|
||||
}
|
||||
if (block.location.FPSCR().DN()) {
|
||||
if (block.Location().FPSCR().DN()) {
|
||||
DefaultNaN32(code, result);
|
||||
}
|
||||
}
|
||||
|
@ -1444,7 +1444,7 @@ void EmitX64::EmitFPSingleToS32(IR::Block& block, IR::Inst* inst) {
|
|||
// ARM saturates on conversion; this differs from x64 which returns a sentinel value.
|
||||
// Conversion to double is lossless, and allows for clamping.
|
||||
|
||||
if (block.location.FPSCR().FTZ()) {
|
||||
if (block.Location().FPSCR().FTZ()) {
|
||||
DenormalsAreZero32(code, from, gpr_scratch);
|
||||
}
|
||||
code->CVTSS2SD(from, R(from));
|
||||
|
@ -1482,8 +1482,8 @@ void EmitX64::EmitFPSingleToU32(IR::Block& block, IR::Inst* inst) {
|
|||
//
|
||||
// FIXME: Inexact exception not correctly signalled with the below code
|
||||
|
||||
if (block.location.FPSCR().RMode() != Arm::FPSCR::RoundingMode::TowardsZero && !round_towards_zero) {
|
||||
if (block.location.FPSCR().FTZ()) {
|
||||
if (block.Location().FPSCR().RMode() != Arm::FPSCR::RoundingMode::TowardsZero && !round_towards_zero) {
|
||||
if (block.Location().FPSCR().FTZ()) {
|
||||
DenormalsAreZero32(code, from, gpr_scratch);
|
||||
}
|
||||
code->CVTSS2SD(from, R(from));
|
||||
|
@ -1504,7 +1504,7 @@ void EmitX64::EmitFPSingleToU32(IR::Block& block, IR::Inst* inst) {
|
|||
X64Reg xmm_mask = reg_alloc.ScratchRegister(any_xmm);
|
||||
X64Reg gpr_mask = reg_alloc.ScratchRegister(any_gpr);
|
||||
|
||||
if (block.location.FPSCR().FTZ()) {
|
||||
if (block.Location().FPSCR().FTZ()) {
|
||||
DenormalsAreZero32(code, from, gpr_scratch);
|
||||
}
|
||||
code->CVTSS2SD(from, R(from));
|
||||
|
@ -1540,7 +1540,7 @@ void EmitX64::EmitFPDoubleToS32(IR::Block& block, IR::Inst* inst) {
|
|||
|
||||
// ARM saturates on conversion; this differs from x64 which returns a sentinel value.
|
||||
|
||||
if (block.location.FPSCR().FTZ()) {
|
||||
if (block.Location().FPSCR().FTZ()) {
|
||||
DenormalsAreZero64(code, from, gpr_scratch);
|
||||
}
|
||||
// First time is to set flags
|
||||
|
@ -1575,8 +1575,8 @@ void EmitX64::EmitFPDoubleToU32(IR::Block& block, IR::Inst* inst) {
|
|||
// TODO: Use VCVTPD2UDQ when AVX512VL is available.
|
||||
// FIXME: Inexact exception not correctly signalled with the below code
|
||||
|
||||
if (block.location.FPSCR().RMode() != Arm::FPSCR::RoundingMode::TowardsZero && !round_towards_zero) {
|
||||
if (block.location.FPSCR().FTZ()) {
|
||||
if (block.Location().FPSCR().RMode() != Arm::FPSCR::RoundingMode::TowardsZero && !round_towards_zero) {
|
||||
if (block.Location().FPSCR().FTZ()) {
|
||||
DenormalsAreZero64(code, from, gpr_scratch);
|
||||
}
|
||||
ZeroIfNaN64(code, from);
|
||||
|
@ -1596,7 +1596,7 @@ void EmitX64::EmitFPDoubleToU32(IR::Block& block, IR::Inst* inst) {
|
|||
X64Reg xmm_mask = reg_alloc.ScratchRegister(any_xmm);
|
||||
X64Reg gpr_mask = reg_alloc.ScratchRegister(any_gpr);
|
||||
|
||||
if (block.location.FPSCR().FTZ()) {
|
||||
if (block.Location().FPSCR().FTZ()) {
|
||||
DenormalsAreZero64(code, from, gpr_scratch);
|
||||
}
|
||||
ZeroIfNaN64(code, from);
|
||||
|
@ -1933,19 +1933,19 @@ static CCFlags EmitCond(BlockOfCode* code, Arm::Cond cond) {
|
|||
}
|
||||
|
||||
void EmitX64::EmitCondPrelude(const IR::Block& block) {
|
||||
if (block.cond == Arm::Cond::AL) {
|
||||
ASSERT(!block.cond_failed.is_initialized());
|
||||
if (block.GetCondition() == Arm::Cond::AL) {
|
||||
ASSERT(!block.HasConditionFailedLocation());
|
||||
return;
|
||||
}
|
||||
|
||||
ASSERT(block.cond_failed.is_initialized());
|
||||
ASSERT(block.HasConditionFailedLocation());
|
||||
|
||||
CCFlags cc = EmitCond(code, block.cond);
|
||||
CCFlags cc = EmitCond(code, block.GetCondition());
|
||||
|
||||
// TODO: Improve, maybe.
|
||||
auto fixup = code->J_CC(cc, true);
|
||||
EmitAddCycles(block.cond_failed_cycle_count);
|
||||
EmitTerminalLinkBlock(IR::Term::LinkBlock{block.cond_failed.get()}, block.location);
|
||||
EmitAddCycles(block.ConditionFailedCycleCount());
|
||||
EmitTerminalLinkBlock(IR::Term::LinkBlock{block.ConditionFailedLocation()}, block.Location());
|
||||
code->SetJumpTarget(fixup);
|
||||
}
|
||||
|
||||
|
|
|
@ -4,15 +4,92 @@
|
|||
* General Public License version 2 or any later version.
|
||||
*/
|
||||
|
||||
#include <algorithm>
|
||||
#include <initializer_list>
|
||||
#include <map>
|
||||
#include <string>
|
||||
|
||||
#include "common/assert.h"
|
||||
#include "common/string_util.h"
|
||||
#include "frontend/ir/basic_block.h"
|
||||
#include "frontend/ir/opcodes.h"
|
||||
|
||||
namespace Dynarmic {
|
||||
namespace IR {
|
||||
|
||||
void Block::AppendNewInst(Opcode opcode, std::initializer_list<IR::Value> args) {
|
||||
IR::Inst* inst = new(instruction_alloc_pool->Alloc()) IR::Inst(opcode);
|
||||
DEBUG_ASSERT(args.size() == inst->NumArgs());
|
||||
|
||||
std::for_each(args.begin(), args.end(), [&inst, index = size_t(0)](const auto& arg) mutable {
|
||||
inst->SetArg(index, arg);
|
||||
index++;
|
||||
});
|
||||
|
||||
instructions.push_back(inst);
|
||||
}
|
||||
|
||||
Arm::LocationDescriptor Block::Location() const {
|
||||
return location;
|
||||
}
|
||||
|
||||
Arm::Cond Block::GetCondition() const {
|
||||
return cond;
|
||||
}
|
||||
|
||||
void Block::SetCondition(Arm::Cond condition) {
|
||||
cond = condition;
|
||||
}
|
||||
|
||||
Arm::LocationDescriptor Block::ConditionFailedLocation() const {
|
||||
return cond_failed.get();
|
||||
}
|
||||
|
||||
void Block::SetConditionFailedLocation(Arm::LocationDescriptor location) {
|
||||
cond_failed = location;
|
||||
}
|
||||
|
||||
size_t& Block::ConditionFailedCycleCount() {
|
||||
return cond_failed_cycle_count;
|
||||
}
|
||||
|
||||
const size_t& Block::ConditionFailedCycleCount() const {
|
||||
return cond_failed_cycle_count;
|
||||
}
|
||||
|
||||
bool Block::HasConditionFailedLocation() const {
|
||||
return cond_failed.is_initialized();
|
||||
}
|
||||
|
||||
Block::InstructionList& Block::Instructions() {
|
||||
return instructions;
|
||||
}
|
||||
|
||||
const Block::InstructionList& Block::Instructions() const {
|
||||
return instructions;
|
||||
}
|
||||
|
||||
Terminal Block::GetTerminal() const {
|
||||
return terminal;
|
||||
}
|
||||
|
||||
void Block::SetTerminal(Terminal term) {
|
||||
ASSERT_MSG(!HasTerminal(), "Terminal has already been set.");
|
||||
terminal = term;
|
||||
}
|
||||
|
||||
bool Block::HasTerminal() const {
|
||||
return terminal.which() != 0;
|
||||
}
|
||||
|
||||
size_t& Block::CycleCount() {
|
||||
return cycle_count;
|
||||
}
|
||||
|
||||
const size_t& Block::CycleCount() const {
|
||||
return cycle_count;
|
||||
}
|
||||
|
||||
static std::string LocDescToString(const Arm::LocationDescriptor& loc) {
|
||||
return Common::StringFromFormat("{%u,%s,%s,%u}",
|
||||
loc.PC(),
|
||||
|
@ -57,11 +134,11 @@ static std::string TerminalToString(const Terminal& terminal_variant) {
|
|||
std::string DumpBlock(const IR::Block& block) {
|
||||
std::string ret;
|
||||
|
||||
ret += Common::StringFromFormat("Block: location=%s\n", LocDescToString(block.location).c_str());
|
||||
ret += Common::StringFromFormat("cycles=%zu", block.cycle_count);
|
||||
ret += Common::StringFromFormat(", entry_cond=%s", Arm::CondToString(block.cond, true));
|
||||
if (block.cond != Arm::Cond::AL) {
|
||||
ret += Common::StringFromFormat(", cond_fail=%s", LocDescToString(block.cond_failed.get()).c_str());
|
||||
ret += Common::StringFromFormat("Block: location=%s\n", LocDescToString(block.Location()).c_str());
|
||||
ret += Common::StringFromFormat("cycles=%zu", block.CycleCount());
|
||||
ret += Common::StringFromFormat(", entry_cond=%s", Arm::CondToString(block.GetCondition(), true));
|
||||
if (block.GetCondition() != Arm::Cond::AL) {
|
||||
ret += Common::StringFromFormat(", cond_fail=%s", LocDescToString(block.ConditionFailedLocation()).c_str());
|
||||
}
|
||||
ret += "\n";
|
||||
|
||||
|
@ -119,7 +196,7 @@ std::string DumpBlock(const IR::Block& block) {
|
|||
inst_to_index[&inst] = index++;
|
||||
}
|
||||
|
||||
ret += "terminal = " + TerminalToString(block.terminal) + "\n";
|
||||
ret += "terminal = " + TerminalToString(block.GetTerminal()) + "\n";
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
|
||||
#pragma once
|
||||
|
||||
#include <initializer_list>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
|
||||
|
@ -17,10 +18,13 @@
|
|||
#include "frontend/arm_types.h"
|
||||
#include "frontend/ir/microinstruction.h"
|
||||
#include "frontend/ir/terminal.h"
|
||||
#include "frontend/ir/value.h"
|
||||
|
||||
namespace Dynarmic {
|
||||
namespace IR {
|
||||
|
||||
enum class Opcode;
|
||||
|
||||
/**
|
||||
* A basic block. It consists of zero or more instructions followed by exactly one terminal.
|
||||
* Note that this is a linear IR and not a pure tree-based IR: i.e.: there is an ordering to
|
||||
|
@ -63,6 +67,53 @@ public:
|
|||
const_reverse_iterator crbegin() const { return instructions.crbegin(); }
|
||||
const_reverse_iterator crend() const { return instructions.crend(); }
|
||||
|
||||
/**
|
||||
* Appends a new instruction to this basic block,
|
||||
* handling any allocations necessary to do so.
|
||||
*
|
||||
* @param op Opcode representing the instruction to add.
|
||||
* @param args A sequence of Value instances used as arguments for the instruction.
|
||||
*/
|
||||
void AppendNewInst(Opcode op, std::initializer_list<Value> args);
|
||||
|
||||
/// Gets the starting location for this basic block.
|
||||
Arm::LocationDescriptor Location() const;
|
||||
|
||||
/// Gets the condition required to pass in order to execute this block.
|
||||
Arm::Cond GetCondition() const;
|
||||
/// Sets the condition required to pass in order to execute this block.
|
||||
void SetCondition(Arm::Cond condition);
|
||||
|
||||
/// Gets the location of the block to execute if the predicated condition fails.
|
||||
Arm::LocationDescriptor ConditionFailedLocation() const;
|
||||
/// Sets the location of the block to execute if the predicated condition fails.
|
||||
void SetConditionFailedLocation(Arm::LocationDescriptor location);
|
||||
/// Determines whether or not a prediated condition failure block is present.
|
||||
bool HasConditionFailedLocation() const;
|
||||
|
||||
/// Gets a mutable reference to the condition failed cycle count.
|
||||
size_t& ConditionFailedCycleCount();
|
||||
/// Gets an immutable reference to the condition failed cycle count.
|
||||
const size_t& ConditionFailedCycleCount() const;
|
||||
|
||||
/// Gets a mutable reference to the instruction list for this basic block.
|
||||
InstructionList& Instructions();
|
||||
/// Gets an immutable reference to the instruction list for this basic block.
|
||||
const InstructionList& Instructions() const;
|
||||
|
||||
/// Gets the terminal instruction for this basic block.
|
||||
Terminal GetTerminal() const;
|
||||
/// Sets the terminal instruction for this basic block.
|
||||
void SetTerminal(Terminal term);
|
||||
/// Determines whether or not this basic block has a terminal instruction.
|
||||
bool HasTerminal() const;
|
||||
|
||||
/// Gets a mutable reference to the cycle count for this basic block.
|
||||
size_t& CycleCount();
|
||||
/// Gets an immutable reference to the cycle count for this basic block.
|
||||
const size_t& CycleCount() const;
|
||||
|
||||
private:
|
||||
/// Description of the starting location of this block
|
||||
Arm::LocationDescriptor location;
|
||||
/// Conditional to pass in order to execute this block
|
||||
|
|
|
@ -552,21 +552,12 @@ void IREmitter::Breakpoint() {
|
|||
}
|
||||
|
||||
void IREmitter::SetTerm(const IR::Terminal& terminal) {
|
||||
ASSERT_MSG(block.terminal.which() == 0, "Terminal has already been set.");
|
||||
block.terminal = terminal;
|
||||
block.SetTerminal(terminal);
|
||||
}
|
||||
|
||||
IR::Value IREmitter::Inst(IR::Opcode op, std::initializer_list<IR::Value> args) {
|
||||
IR::Inst* inst = new(block.instruction_alloc_pool->Alloc()) IR::Inst(op);
|
||||
DEBUG_ASSERT(args.size() == inst->NumArgs());
|
||||
|
||||
std::for_each(args.begin(), args.end(), [&inst, op, index = size_t(0)](const auto& v) mutable {
|
||||
inst->SetArg(index, v);
|
||||
index++;
|
||||
});
|
||||
|
||||
block.instructions.push_back(inst);
|
||||
return IR::Value(inst);
|
||||
block.AppendNewInst(op, args);
|
||||
return IR::Value(&block.back());
|
||||
}
|
||||
|
||||
} // namespace Arm
|
||||
|
|
|
@ -48,7 +48,7 @@ IR::Block TranslateArm(LocationDescriptor descriptor, MemoryRead32FuncType memor
|
|||
}
|
||||
|
||||
visitor.ir.current_location = visitor.ir.current_location.AdvancePC(4);
|
||||
visitor.ir.block.cycle_count++;
|
||||
visitor.ir.block.CycleCount()++;
|
||||
}
|
||||
|
||||
if (visitor.cond_state == ConditionalState::Translating || visitor.cond_state == ConditionalState::Trailing) {
|
||||
|
@ -57,7 +57,7 @@ IR::Block TranslateArm(LocationDescriptor descriptor, MemoryRead32FuncType memor
|
|||
}
|
||||
}
|
||||
|
||||
ASSERT_MSG(visitor.ir.block.terminal.which() != 0, "Terminal has not been set");
|
||||
ASSERT_MSG(visitor.ir.block.HasTerminal(), "Terminal has not been set");
|
||||
|
||||
return std::move(visitor.ir.block);
|
||||
}
|
||||
|
@ -68,12 +68,12 @@ bool ArmTranslatorVisitor::ConditionPassed(Cond cond) {
|
|||
ASSERT_MSG(cond != Cond::NV, "NV conditional is obsolete");
|
||||
|
||||
if (cond_state == ConditionalState::Translating) {
|
||||
if (ir.block.cond_failed != ir.current_location || cond == Cond::AL) {
|
||||
if (ir.block.ConditionFailedLocation() != ir.current_location || cond == Cond::AL) {
|
||||
cond_state = ConditionalState::Trailing;
|
||||
} else {
|
||||
if (cond == ir.block.cond) {
|
||||
ir.block.cond_failed = { ir.current_location.AdvancePC(4) };
|
||||
ir.block.cond_failed_cycle_count++;
|
||||
if (cond == ir.block.GetCondition()) {
|
||||
ir.block.SetConditionFailedLocation({ ir.current_location.AdvancePC(4) });
|
||||
ir.block.ConditionFailedCycleCount()++;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -102,9 +102,9 @@ bool ArmTranslatorVisitor::ConditionPassed(Cond cond) {
|
|||
// We'll emit one instruction, and set the block-entry conditional appropriately.
|
||||
|
||||
cond_state = ConditionalState::Translating;
|
||||
ir.block.cond = cond;
|
||||
ir.block.cond_failed = { ir.current_location.AdvancePC(4) };
|
||||
ir.block.cond_failed_cycle_count = 1;
|
||||
ir.block.SetCondition(cond);
|
||||
ir.block.SetConditionFailedLocation({ ir.current_location.AdvancePC(4) });
|
||||
ir.block.ConditionFailedCycleCount() = 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
@ -886,7 +886,7 @@ IR::Block TranslateThumb(LocationDescriptor descriptor, MemoryRead32FuncType mem
|
|||
|
||||
s32 advance_pc = (inst_size == ThumbInstSize::Thumb16) ? 2 : 4;
|
||||
visitor.ir.current_location = visitor.ir.current_location.AdvancePC(advance_pc);
|
||||
visitor.ir.block.cycle_count++;
|
||||
visitor.ir.block.CycleCount()++;
|
||||
}
|
||||
|
||||
return std::move(visitor.ir.block);
|
||||
|
|
|
@ -25,7 +25,7 @@ void DeadCodeElimination(IR::Block& block) {
|
|||
--iter;
|
||||
if (!iter->HasUses() && !iter->MayHaveSideEffects()) {
|
||||
iter->Invalidate();
|
||||
iter = block.instructions.erase(iter);
|
||||
iter = block.Instructions().erase(iter);
|
||||
}
|
||||
} while (iter != block.begin());
|
||||
}
|
||||
|
|
|
@ -33,7 +33,7 @@ void GetSetElimination(IR::Block& block) {
|
|||
const auto do_set = [&block](RegisterInfo& info, IR::Value value, Iterator set_inst) {
|
||||
if (info.set_instruction_present) {
|
||||
info.last_set_instruction->Invalidate();
|
||||
block.instructions.erase(info.last_set_instruction);
|
||||
block.Instructions().erase(info.last_set_instruction);
|
||||
}
|
||||
|
||||
info.register_value = value;
|
||||
|
|
|
@ -297,7 +297,7 @@ void FuzzJitArm(const size_t instruction_count, const size_t instructions_to_exe
|
|||
Dynarmic::Optimization::VerificationPass(ir_block);
|
||||
printf("\n\nIR:\n%s", Dynarmic::IR::DumpBlock(ir_block).c_str());
|
||||
printf("\n\nx86_64:\n%s", jit.Disassemble(descriptor).c_str());
|
||||
num_insts += ir_block.cycle_count;
|
||||
num_insts += ir_block.CycleCount();
|
||||
}
|
||||
|
||||
#ifdef _MSC_VER
|
||||
|
|
Loading…
Reference in a new issue