Merge branch 'simplify-reg-alloc'
This commit is contained in:
commit
6396bd02f0
6 changed files with 1220 additions and 1317 deletions
File diff suppressed because it is too large
Load diff
|
@ -10,12 +10,12 @@ namespace Dynarmic {
|
||||||
namespace BackendX64 {
|
namespace BackendX64 {
|
||||||
|
|
||||||
Xbyak::Reg64 HostLocToReg64(HostLoc loc) {
|
Xbyak::Reg64 HostLocToReg64(HostLoc loc) {
|
||||||
DEBUG_ASSERT(HostLocIsGPR(loc));
|
ASSERT(HostLocIsGPR(loc));
|
||||||
return Xbyak::Reg64(static_cast<int>(loc));
|
return Xbyak::Reg64(static_cast<int>(loc));
|
||||||
}
|
}
|
||||||
|
|
||||||
Xbyak::Xmm HostLocToXmm(HostLoc loc) {
|
Xbyak::Xmm HostLocToXmm(HostLoc loc) {
|
||||||
DEBUG_ASSERT(HostLocIsXMM(loc));
|
ASSERT(HostLocIsXMM(loc));
|
||||||
return Xbyak::Xmm(static_cast<int>(loc) - static_cast<int>(HostLoc::XMM0));
|
return Xbyak::Xmm(static_cast<int>(loc) - static_cast<int>(HostLoc::XMM0));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -23,7 +23,7 @@ Xbyak::Address SpillToOpArg(HostLoc loc) {
|
||||||
using namespace Xbyak::util;
|
using namespace Xbyak::util;
|
||||||
|
|
||||||
static_assert(std::is_same<decltype(JitState::Spill[0]), u64&>::value, "Spill must be u64");
|
static_assert(std::is_same<decltype(JitState::Spill[0]), u64&>::value, "Spill must be u64");
|
||||||
DEBUG_ASSERT(HostLocIsSpill(loc));
|
ASSERT(HostLocIsSpill(loc));
|
||||||
|
|
||||||
size_t i = static_cast<size_t>(loc) - static_cast<size_t>(HostLoc::FirstSpill);
|
size_t i = static_cast<size_t>(loc) - static_cast<size_t>(HostLoc::FirstSpill);
|
||||||
return qword[r15 + offsetof(JitState, Spill) + i * sizeof(u64)];
|
return qword[r15 + offsetof(JitState, Spill) + i * sizeof(u64)];
|
||||||
|
|
|
@ -22,6 +22,8 @@ static u64 ImmediateToU64(const IR::Value& imm) {
|
||||||
return u64(imm.GetU1());
|
return u64(imm.GetU1());
|
||||||
case IR::Type::U8:
|
case IR::Type::U8:
|
||||||
return u64(imm.GetU8());
|
return u64(imm.GetU8());
|
||||||
|
case IR::Type::U16:
|
||||||
|
return u64(imm.GetU16());
|
||||||
case IR::Type::U32:
|
case IR::Type::U32:
|
||||||
return u64(imm.GetU32());
|
return u64(imm.GetU32());
|
||||||
case IR::Type::U64:
|
case IR::Type::U64:
|
||||||
|
@ -31,253 +33,265 @@ static u64 ImmediateToU64(const IR::Value& imm) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static Xbyak::Reg HostLocToX64(HostLoc hostloc) {
|
static bool IsSameHostLocClass(HostLoc a, HostLoc b) {
|
||||||
if (HostLocIsGPR(hostloc)) {
|
return (HostLocIsGPR(a) && HostLocIsGPR(b))
|
||||||
DEBUG_ASSERT(hostloc != HostLoc::RSP && hostloc != HostLoc::R15);
|
|| (HostLocIsXMM(a) && HostLocIsXMM(b))
|
||||||
return HostLocToReg64(hostloc);
|
|| (HostLocIsSpill(a) && HostLocIsSpill(b));
|
||||||
}
|
|
||||||
if (HostLocIsXMM(hostloc)) {
|
|
||||||
return HostLocToXmm(hostloc);
|
|
||||||
}
|
|
||||||
ASSERT_MSG(false, "This should never happen.");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
HostLoc RegAlloc::DefHostLocReg(IR::Inst* def_inst, HostLocList desired_locations) {
|
static void EmitMove(BlockOfCode* code, HostLoc to, HostLoc from) {
|
||||||
DEBUG_ASSERT(std::all_of(desired_locations.begin(), desired_locations.end(), HostLocIsRegister));
|
if (HostLocIsXMM(to) && HostLocIsXMM(from)) {
|
||||||
DEBUG_ASSERT_MSG(!ValueLocation(def_inst), "def_inst has already been defined");
|
code->movaps(HostLocToXmm(to), HostLocToXmm(from));
|
||||||
|
} else if (HostLocIsGPR(to) && HostLocIsGPR(from)) {
|
||||||
HostLoc location = SelectARegister(desired_locations);
|
code->mov(HostLocToReg64(to), HostLocToReg64(from));
|
||||||
|
} else if (HostLocIsXMM(to) && HostLocIsGPR(from)) {
|
||||||
if (IsRegisterOccupied(location)) {
|
code->movq(HostLocToXmm(to), HostLocToReg64(from));
|
||||||
SpillRegister(location);
|
} else if (HostLocIsGPR(to) && HostLocIsXMM(from)) {
|
||||||
}
|
code->movq(HostLocToReg64(to), HostLocToXmm(from));
|
||||||
|
} else if (HostLocIsXMM(to) && HostLocIsSpill(from)) {
|
||||||
LocInfo(location).is_being_used = true;
|
code->movsd(HostLocToXmm(to), SpillToOpArg(from));
|
||||||
LocInfo(location).def = def_inst;
|
} else if (HostLocIsSpill(to) && HostLocIsXMM(from)) {
|
||||||
|
code->movsd(SpillToOpArg(to), HostLocToXmm(from));
|
||||||
DEBUG_ASSERT(LocInfo(location).IsDef());
|
} else if (HostLocIsGPR(to) && HostLocIsSpill(from)) {
|
||||||
return location;
|
code->mov(HostLocToReg64(to), SpillToOpArg(from));
|
||||||
}
|
} else if (HostLocIsSpill(to) && HostLocIsGPR(from)) {
|
||||||
|
code->mov(SpillToOpArg(to), HostLocToReg64(from));
|
||||||
void RegAlloc::RegisterAddDef(IR::Inst* def_inst, const IR::Value& use_inst) {
|
|
||||||
DEBUG_ASSERT_MSG(!ValueLocation(def_inst), "def_inst has already been defined");
|
|
||||||
|
|
||||||
if (use_inst.IsImmediate()) {
|
|
||||||
LoadImmediateIntoHostLocReg(use_inst, DefHostLocReg(def_inst, any_gpr));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
DEBUG_ASSERT_MSG(ValueLocation(use_inst.GetInst()), "use_inst must already be defined");
|
|
||||||
HostLoc location = *ValueLocation(use_inst.GetInst());
|
|
||||||
LocInfo(location).values.emplace_back(def_inst);
|
|
||||||
use_inst.GetInst()->DecrementRemainingUses();
|
|
||||||
DEBUG_ASSERT(LocInfo(location).IsIdle());
|
|
||||||
}
|
|
||||||
|
|
||||||
HostLoc RegAlloc::UseDefHostLocReg(IR::Value use_value, IR::Inst* def_inst, HostLocList desired_locations) {
|
|
||||||
if (!use_value.IsImmediate()) {
|
|
||||||
return UseDefHostLocReg(use_value.GetInst(), def_inst, desired_locations);
|
|
||||||
}
|
|
||||||
|
|
||||||
return LoadImmediateIntoHostLocReg(use_value, DefHostLocReg(def_inst, desired_locations));
|
|
||||||
}
|
|
||||||
|
|
||||||
HostLoc RegAlloc::UseDefHostLocReg(IR::Inst* use_inst, IR::Inst* def_inst, HostLocList desired_locations) {
|
|
||||||
DEBUG_ASSERT(std::all_of(desired_locations.begin(), desired_locations.end(), HostLocIsRegister));
|
|
||||||
DEBUG_ASSERT_MSG(!ValueLocation(def_inst), "def_inst has already been defined");
|
|
||||||
DEBUG_ASSERT_MSG(ValueLocation(use_inst), "use_inst has not been defined");
|
|
||||||
|
|
||||||
if (IsLastUse(use_inst)) {
|
|
||||||
HostLoc current_location = *ValueLocation(use_inst);
|
|
||||||
auto& loc_info = LocInfo(current_location);
|
|
||||||
if (loc_info.IsIdle()) {
|
|
||||||
loc_info.is_being_used = true;
|
|
||||||
loc_info.def = def_inst;
|
|
||||||
DEBUG_ASSERT(loc_info.IsUseDef());
|
|
||||||
if (HostLocIsSpill(current_location)) {
|
|
||||||
HostLoc new_location = SelectARegister(desired_locations);
|
|
||||||
if (IsRegisterOccupied(new_location)) {
|
|
||||||
SpillRegister(new_location);
|
|
||||||
}
|
|
||||||
EmitMove(new_location, current_location);
|
|
||||||
LocInfo(new_location) = LocInfo(current_location);
|
|
||||||
LocInfo(current_location) = {};
|
|
||||||
return new_location;
|
|
||||||
} else {
|
|
||||||
return current_location;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
bool is_floating_point = HostLocIsXMM(*desired_locations.begin());
|
|
||||||
if (is_floating_point) {
|
|
||||||
DEBUG_ASSERT(use_inst->GetType() == IR::Type::F32 || use_inst->GetType() == IR::Type::F64);
|
|
||||||
}
|
|
||||||
HostLoc use_reg = UseHostLocReg(use_inst, is_floating_point ? any_xmm : any_gpr);
|
|
||||||
HostLoc def_reg = DefHostLocReg(def_inst, desired_locations);
|
|
||||||
if (is_floating_point) {
|
|
||||||
code->movapd(HostLocToXmm(def_reg), HostLocToXmm(use_reg));
|
|
||||||
} else {
|
} else {
|
||||||
code->mov(HostLocToReg64(def_reg), HostLocToReg64(use_reg));
|
ASSERT_MSG(false, "Invalid RegAlloc::EmitMove");
|
||||||
}
|
}
|
||||||
return def_reg;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
std::tuple<OpArg, HostLoc> RegAlloc::UseDefOpArgHostLocReg(IR::Value use_value, IR::Inst* def_inst, HostLocList desired_locations) {
|
static void EmitExchange(BlockOfCode* code, HostLoc a, HostLoc b) {
|
||||||
DEBUG_ASSERT(std::all_of(desired_locations.begin(), desired_locations.end(), HostLocIsRegister));
|
if (HostLocIsGPR(a) && HostLocIsGPR(b)) {
|
||||||
DEBUG_ASSERT_MSG(!ValueLocation(def_inst), "def_inst has already been defined");
|
code->xchg(HostLocToReg64(a), HostLocToReg64(b));
|
||||||
DEBUG_ASSERT_MSG(use_value.IsImmediate() || ValueLocation(use_value.GetInst()), "use_inst has not been defined");
|
} else if (HostLocIsXMM(a) && HostLocIsXMM(b)) {
|
||||||
|
ASSERT_MSG(false, "Check your code: Exchanging XMM registers is unnecessary");
|
||||||
|
} else {
|
||||||
|
ASSERT_MSG(false, "Invalid RegAlloc::EmitExchange");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (!use_value.IsImmediate()) {
|
bool HostLocInfo::IsLocked() const {
|
||||||
const IR::Inst* use_inst = use_value.GetInst();
|
return is_being_used;
|
||||||
|
}
|
||||||
|
|
||||||
if (IsLastUse(use_inst)) {
|
bool HostLocInfo::IsEmpty() const {
|
||||||
HostLoc current_location = *ValueLocation(use_inst);
|
return !is_being_used && values.empty();
|
||||||
auto& loc_info = LocInfo(current_location);
|
}
|
||||||
if (!loc_info.IsIdle()) {
|
|
||||||
if (HostLocIsSpill(current_location)) {
|
bool HostLocInfo::IsLastUse() const {
|
||||||
loc_info.is_being_used = true;
|
return !is_being_used && std::all_of(values.begin(), values.end(), [](const auto& inst) { return !inst->HasUses(); });
|
||||||
DEBUG_ASSERT(loc_info.IsUse());
|
}
|
||||||
return std::make_tuple(SpillToOpArg(current_location), DefHostLocReg(def_inst, desired_locations));
|
|
||||||
} else {
|
bool HostLocInfo::ContainsValue(const IR::Inst* inst) const {
|
||||||
loc_info.is_being_used = true;
|
return std::find(values.begin(), values.end(), inst) != values.end();
|
||||||
loc_info.def = def_inst;
|
}
|
||||||
DEBUG_ASSERT(loc_info.IsUseDef());
|
|
||||||
return std::make_tuple(HostLocToX64(current_location), current_location);
|
void HostLocInfo::ReadLock() {
|
||||||
}
|
ASSERT(!is_scratch);
|
||||||
}
|
is_being_used = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
void HostLocInfo::WriteLock() {
|
||||||
|
ASSERT(!is_being_used);
|
||||||
|
is_being_used = true;
|
||||||
|
is_scratch = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
void HostLocInfo::AddValue(IR::Inst* inst) {
|
||||||
|
values.push_back(inst);
|
||||||
|
}
|
||||||
|
|
||||||
|
void HostLocInfo::EndOfAllocScope() {
|
||||||
|
const auto to_erase = std::remove_if(values.begin(), values.end(), [](const auto& inst) { return !inst->HasUses(); });
|
||||||
|
values.erase(to_erase, values.end());
|
||||||
|
|
||||||
|
is_being_used = false;
|
||||||
|
is_scratch = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
IR::Type Argument::GetType() const {
|
||||||
|
return value.GetType();
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Argument::IsImmediate() const {
|
||||||
|
return value.IsImmediate();
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Argument::GetImmediateU1() const {
|
||||||
|
return value.GetU1();
|
||||||
|
}
|
||||||
|
|
||||||
|
u8 Argument::GetImmediateU8() const {
|
||||||
|
u64 imm = ImmediateToU64(value);
|
||||||
|
ASSERT(imm < 0x100);
|
||||||
|
return u8(imm);
|
||||||
|
}
|
||||||
|
|
||||||
|
u16 Argument::GetImmediateU16() const {
|
||||||
|
u64 imm = ImmediateToU64(value);
|
||||||
|
ASSERT(imm < 0x10000);
|
||||||
|
return u16(imm);
|
||||||
|
}
|
||||||
|
|
||||||
|
u32 Argument::GetImmediateU32() const {
|
||||||
|
u64 imm = ImmediateToU64(value);
|
||||||
|
ASSERT(imm < 0x100000000);
|
||||||
|
return u32(imm);
|
||||||
|
}
|
||||||
|
|
||||||
|
u64 Argument::GetImmediateU64() const {
|
||||||
|
return ImmediateToU64(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Argument::IsInGpr() const {
|
||||||
|
return HostLocIsGPR(*reg_alloc.ValueLocation(value.GetInst()));
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Argument::IsInXmm() const {
|
||||||
|
return HostLocIsXMM(*reg_alloc.ValueLocation(value.GetInst()));
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Argument::IsInMemory() const {
|
||||||
|
return HostLocIsSpill(*reg_alloc.ValueLocation(value.GetInst()));
|
||||||
|
}
|
||||||
|
|
||||||
|
std::array<Argument, 3> RegAlloc::GetArgumentInfo(IR::Inst* inst) {
|
||||||
|
std::array<Argument, 3> ret = { Argument{*this}, Argument{*this}, Argument{*this} };
|
||||||
|
for (size_t i = 0; i < inst->NumArgs(); i++) {
|
||||||
|
IR::Value arg = inst->GetArg(i);
|
||||||
|
ret[i].value = arg;
|
||||||
|
if (!arg.IsImmediate()) {
|
||||||
|
arg.GetInst()->DecrementRemainingUses();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return ret;
|
||||||
OpArg use_oparg = UseOpArg(use_value, any_gpr);
|
|
||||||
HostLoc def_reg = DefHostLocReg(def_inst, desired_locations);
|
|
||||||
return std::make_tuple(use_oparg, def_reg);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
HostLoc RegAlloc::UseHostLocReg(IR::Value use_value, HostLocList desired_locations) {
|
Xbyak::Reg64 RegAlloc::UseGpr(Argument& arg) {
|
||||||
if (!use_value.IsImmediate()) {
|
ASSERT(!arg.allocated);
|
||||||
return UseHostLocReg(use_value.GetInst(), desired_locations);
|
arg.allocated = true;
|
||||||
}
|
return HostLocToReg64(UseImpl(arg.value, any_gpr));
|
||||||
|
|
||||||
return LoadImmediateIntoHostLocReg(use_value, ScratchHostLocReg(desired_locations));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
HostLoc RegAlloc::UseHostLocReg(IR::Inst* use_inst, HostLocList desired_locations) {
|
Xbyak::Xmm RegAlloc::UseXmm(Argument& arg) {
|
||||||
HostLoc current_location;
|
ASSERT(!arg.allocated);
|
||||||
bool was_being_used;
|
arg.allocated = true;
|
||||||
std::tie(current_location, was_being_used) = UseHostLoc(use_inst, desired_locations);
|
return HostLocToXmm(UseImpl(arg.value, any_xmm));
|
||||||
|
|
||||||
if (HostLocIsRegister(current_location)) {
|
|
||||||
return current_location;
|
|
||||||
} else if (HostLocIsSpill(current_location)) {
|
|
||||||
HostLoc new_location = SelectARegister(desired_locations);
|
|
||||||
if (IsRegisterOccupied(new_location)) {
|
|
||||||
SpillRegister(new_location);
|
|
||||||
}
|
|
||||||
EmitMove(new_location, current_location);
|
|
||||||
if (!was_being_used) {
|
|
||||||
LocInfo(new_location) = LocInfo(current_location);
|
|
||||||
LocInfo(current_location) = {};
|
|
||||||
DEBUG_ASSERT(LocInfo(new_location).IsUse());
|
|
||||||
} else {
|
|
||||||
LocInfo(new_location).is_being_used = true;
|
|
||||||
DEBUG_ASSERT(LocInfo(new_location).IsScratch());
|
|
||||||
}
|
|
||||||
return new_location;
|
|
||||||
}
|
|
||||||
|
|
||||||
ASSERT_MSG(false, "Unknown current_location type");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
OpArg RegAlloc::UseOpArg(IR::Value use_value, HostLocList desired_locations) {
|
OpArg RegAlloc::UseOpArg(Argument& arg) {
|
||||||
|
return UseGpr(arg);
|
||||||
|
}
|
||||||
|
|
||||||
|
void RegAlloc::Use(Argument& arg, HostLoc host_loc) {
|
||||||
|
ASSERT(!arg.allocated);
|
||||||
|
arg.allocated = true;
|
||||||
|
UseImpl(arg.value, {host_loc});
|
||||||
|
}
|
||||||
|
|
||||||
|
Xbyak::Reg64 RegAlloc::UseScratchGpr(Argument& arg) {
|
||||||
|
ASSERT(!arg.allocated);
|
||||||
|
arg.allocated = true;
|
||||||
|
return HostLocToReg64(UseScratchImpl(arg.value, any_gpr));
|
||||||
|
}
|
||||||
|
|
||||||
|
Xbyak::Xmm RegAlloc::UseScratchXmm(Argument& arg) {
|
||||||
|
ASSERT(!arg.allocated);
|
||||||
|
arg.allocated = true;
|
||||||
|
return HostLocToXmm(UseScratchImpl(arg.value, any_xmm));
|
||||||
|
}
|
||||||
|
|
||||||
|
void RegAlloc::UseScratch(Argument& arg, HostLoc host_loc) {
|
||||||
|
ASSERT(!arg.allocated);
|
||||||
|
arg.allocated = true;
|
||||||
|
UseScratchImpl(arg.value, {host_loc});
|
||||||
|
}
|
||||||
|
|
||||||
|
void RegAlloc::DefineValue(IR::Inst* inst, const Xbyak::Reg& reg) {
|
||||||
|
ASSERT(reg.getKind() == Xbyak::Operand::XMM || reg.getKind() == Xbyak::Operand::REG);
|
||||||
|
HostLoc hostloc = static_cast<HostLoc>(reg.getIdx() + static_cast<size_t>(reg.getKind() == Xbyak::Operand::XMM ? HostLoc::XMM0 : HostLoc::RAX));
|
||||||
|
DefineValueImpl(inst, hostloc);
|
||||||
|
}
|
||||||
|
|
||||||
|
void RegAlloc::DefineValue(IR::Inst* inst, Argument& arg) {
|
||||||
|
ASSERT(!arg.allocated);
|
||||||
|
arg.allocated = true;
|
||||||
|
DefineValueImpl(inst, arg.value);
|
||||||
|
}
|
||||||
|
|
||||||
|
Xbyak::Reg64 RegAlloc::ScratchGpr(HostLocList desired_locations) {
|
||||||
|
return HostLocToReg64(ScratchImpl(desired_locations));
|
||||||
|
}
|
||||||
|
|
||||||
|
Xbyak::Xmm RegAlloc::ScratchXmm(HostLocList desired_locations) {
|
||||||
|
return HostLocToXmm(ScratchImpl(desired_locations));
|
||||||
|
}
|
||||||
|
|
||||||
|
HostLoc RegAlloc::UseImpl(IR::Value use_value, HostLocList desired_locations) {
|
||||||
if (use_value.IsImmediate()) {
|
if (use_value.IsImmediate()) {
|
||||||
ASSERT_MSG(false, "UseOpArg does not support immediates");
|
return LoadImmediate(use_value, ScratchImpl(desired_locations));
|
||||||
return {}; // return a None
|
|
||||||
}
|
}
|
||||||
|
|
||||||
IR::Inst* use_inst = use_value.GetInst();
|
IR::Inst* use_inst = use_value.GetInst();
|
||||||
|
const HostLoc current_location = *ValueLocation(use_inst);
|
||||||
|
|
||||||
HostLoc current_location;
|
const bool can_use_current_location = std::find(desired_locations.begin(), desired_locations.end(), current_location) != desired_locations.end();
|
||||||
bool was_being_used;
|
if (can_use_current_location) {
|
||||||
std::tie(current_location, was_being_used) = UseHostLoc(use_inst, desired_locations);
|
LocInfo(current_location).ReadLock();
|
||||||
|
return current_location;
|
||||||
if (HostLocIsRegister(current_location)) {
|
|
||||||
return HostLocToX64(current_location);
|
|
||||||
} else if (HostLocIsSpill(current_location)) {
|
|
||||||
return SpillToOpArg(current_location);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ASSERT_MSG(false, "Unknown current_location type");
|
if (LocInfo(current_location).IsLocked()) {
|
||||||
|
return UseScratchImpl(use_value, desired_locations);
|
||||||
|
}
|
||||||
|
|
||||||
|
const HostLoc destination_location = SelectARegister(desired_locations);
|
||||||
|
if (IsSameHostLocClass(destination_location, current_location)) {
|
||||||
|
Exchange(destination_location, current_location);
|
||||||
|
} else {
|
||||||
|
MoveOutOfTheWay(destination_location);
|
||||||
|
Move(destination_location, current_location);
|
||||||
|
}
|
||||||
|
LocInfo(destination_location).ReadLock();
|
||||||
|
return destination_location;
|
||||||
}
|
}
|
||||||
|
|
||||||
HostLoc RegAlloc::UseScratchHostLocReg(IR::Value use_value, HostLocList desired_locations) {
|
HostLoc RegAlloc::UseScratchImpl(IR::Value use_value, HostLocList desired_locations) {
|
||||||
if (!use_value.IsImmediate()) {
|
if (use_value.IsImmediate()) {
|
||||||
return UseScratchHostLocReg(use_value.GetInst(), desired_locations);
|
return LoadImmediate(use_value, ScratchImpl(desired_locations));
|
||||||
}
|
}
|
||||||
|
|
||||||
return LoadImmediateIntoHostLocReg(use_value, ScratchHostLocReg(desired_locations));
|
IR::Inst* use_inst = use_value.GetInst();
|
||||||
|
const HostLoc current_location = *ValueLocation(use_inst);
|
||||||
|
|
||||||
|
const bool can_use_current_location = std::find(desired_locations.begin(), desired_locations.end(), current_location) != desired_locations.end();
|
||||||
|
if (can_use_current_location && !LocInfo(current_location).IsLocked()) {
|
||||||
|
MoveOutOfTheWay(current_location);
|
||||||
|
LocInfo(current_location).WriteLock();
|
||||||
|
return current_location;
|
||||||
|
}
|
||||||
|
|
||||||
|
const HostLoc destination_location = SelectARegister(desired_locations);
|
||||||
|
MoveOutOfTheWay(destination_location);
|
||||||
|
CopyToScratch(destination_location, current_location);
|
||||||
|
LocInfo(destination_location).WriteLock();
|
||||||
|
return destination_location;
|
||||||
}
|
}
|
||||||
|
|
||||||
HostLoc RegAlloc::UseScratchHostLocReg(IR::Inst* use_inst, HostLocList desired_locations) {
|
HostLoc RegAlloc::ScratchImpl(HostLocList desired_locations) {
|
||||||
DEBUG_ASSERT(std::all_of(desired_locations.begin(), desired_locations.end(), HostLocIsRegister));
|
|
||||||
DEBUG_ASSERT_MSG(ValueLocation(use_inst), "use_inst has not been defined");
|
|
||||||
ASSERT_MSG(use_inst->HasUses(), "use_inst ran out of uses. (Use-d an IR::Inst* too many times)");
|
|
||||||
|
|
||||||
HostLoc current_location = *ValueLocation(use_inst);
|
|
||||||
HostLoc new_location = SelectARegister(desired_locations);
|
|
||||||
if (IsRegisterOccupied(new_location)) {
|
|
||||||
SpillRegister(new_location);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (HostLocIsSpill(current_location)) {
|
|
||||||
EmitMove(new_location, current_location);
|
|
||||||
LocInfo(new_location).is_being_used = true;
|
|
||||||
use_inst->DecrementRemainingUses();
|
|
||||||
DEBUG_ASSERT(LocInfo(new_location).IsScratch());
|
|
||||||
return new_location;
|
|
||||||
} else if (HostLocIsRegister(current_location)) {
|
|
||||||
ASSERT(LocInfo(current_location).IsIdle()
|
|
||||||
|| LocInfo(current_location).IsUse()
|
|
||||||
|| LocInfo(current_location).IsUseDef());
|
|
||||||
|
|
||||||
if (current_location != new_location) {
|
|
||||||
EmitMove(new_location, current_location);
|
|
||||||
} else {
|
|
||||||
ASSERT(LocInfo(current_location).IsIdle());
|
|
||||||
}
|
|
||||||
|
|
||||||
LocInfo(new_location).is_being_used = true;
|
|
||||||
LocInfo(new_location).values.clear();
|
|
||||||
use_inst->DecrementRemainingUses();
|
|
||||||
DEBUG_ASSERT(LocInfo(new_location).IsScratch());
|
|
||||||
return new_location;
|
|
||||||
}
|
|
||||||
|
|
||||||
ASSERT_MSG(false, "Invalid current_location");
|
|
||||||
}
|
|
||||||
|
|
||||||
HostLoc RegAlloc::ScratchHostLocReg(HostLocList desired_locations) {
|
|
||||||
DEBUG_ASSERT(std::all_of(desired_locations.begin(), desired_locations.end(), HostLocIsRegister));
|
|
||||||
|
|
||||||
HostLoc location = SelectARegister(desired_locations);
|
HostLoc location = SelectARegister(desired_locations);
|
||||||
|
MoveOutOfTheWay(location);
|
||||||
if (IsRegisterOccupied(location)) {
|
LocInfo(location).WriteLock();
|
||||||
SpillRegister(location);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update state
|
|
||||||
LocInfo(location).is_being_used = true;
|
|
||||||
|
|
||||||
DEBUG_ASSERT(LocInfo(location).IsScratch());
|
|
||||||
return location;
|
return location;
|
||||||
}
|
}
|
||||||
|
|
||||||
void RegAlloc::HostCall(IR::Inst* result_def, IR::Value arg0_use, IR::Value arg1_use, IR::Value arg2_use, IR::Value arg3_use) {
|
void RegAlloc::HostCall(IR::Inst* result_def, boost::optional<Argument&> arg0, boost::optional<Argument&> arg1, boost::optional<Argument&> arg2, boost::optional<Argument&> arg3) {
|
||||||
constexpr size_t args_count = 4;
|
constexpr size_t args_count = 4;
|
||||||
constexpr std::array<HostLoc, args_count> args_hostloc = { ABI_PARAM1, ABI_PARAM2, ABI_PARAM3, ABI_PARAM4 };
|
constexpr std::array<HostLoc, args_count> args_hostloc = { ABI_PARAM1, ABI_PARAM2, ABI_PARAM3, ABI_PARAM4 };
|
||||||
const std::array<IR::Value*, args_count> args = {&arg0_use, &arg1_use, &arg2_use, &arg3_use};
|
const std::array<boost::optional<Argument&>, args_count> args = { arg0, arg1, arg2, arg3 };
|
||||||
|
|
||||||
const static std::vector<HostLoc> other_caller_save = [args_hostloc](){
|
const static std::vector<HostLoc> other_caller_save = [args_hostloc]() {
|
||||||
std::vector<HostLoc> ret(ABI_ALL_CALLER_SAVE.begin(), ABI_ALL_CALLER_SAVE.end());
|
std::vector<HostLoc> ret(ABI_ALL_CALLER_SAVE.begin(), ABI_ALL_CALLER_SAVE.end());
|
||||||
|
|
||||||
for (auto hostloc : args_hostloc)
|
for (auto hostloc : args_hostloc)
|
||||||
|
@ -286,33 +300,45 @@ void RegAlloc::HostCall(IR::Inst* result_def, IR::Value arg0_use, IR::Value arg1
|
||||||
return ret;
|
return ret;
|
||||||
}();
|
}();
|
||||||
|
|
||||||
// TODO: This works but almost certainly leads to suboptimal generated code.
|
ScratchGpr({ABI_RETURN});
|
||||||
|
|
||||||
if (result_def) {
|
if (result_def) {
|
||||||
DefHostLocReg(result_def, {ABI_RETURN});
|
DefineValueImpl(result_def, ABI_RETURN);
|
||||||
} else {
|
|
||||||
ScratchHostLocReg({ABI_RETURN});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for (size_t i = 0; i < args_count; i++) {
|
for (size_t i = 0; i < args_count; i++) {
|
||||||
if (!args[i]->IsEmpty()) {
|
if (args[i]) {
|
||||||
UseScratchHostLocReg(*args[i], {args_hostloc[i]});
|
UseScratch(*args[i], args_hostloc[i]);
|
||||||
} else {
|
}
|
||||||
ScratchHostLocReg({args_hostloc[i]});
|
}
|
||||||
|
|
||||||
|
for (size_t i = 0; i < args_count; i++) {
|
||||||
|
if (!args[i]) {
|
||||||
|
// TODO: Force spill
|
||||||
|
ScratchGpr({args_hostloc[i]});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (HostLoc caller_saved : other_caller_save) {
|
for (HostLoc caller_saved : other_caller_save) {
|
||||||
ScratchHostLocReg({caller_saved});
|
ScratchImpl({caller_saved});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void RegAlloc::EndOfAllocScope() {
|
||||||
|
for (auto& iter : hostloc_info) {
|
||||||
|
iter.EndOfAllocScope();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void RegAlloc::AssertNoMoreUses() {
|
||||||
|
ASSERT(std::all_of(hostloc_info.begin(), hostloc_info.end(), [](const auto& i) { return i.IsEmpty(); }));
|
||||||
|
}
|
||||||
|
|
||||||
HostLoc RegAlloc::SelectARegister(HostLocList desired_locations) const {
|
HostLoc RegAlloc::SelectARegister(HostLocList desired_locations) const {
|
||||||
std::vector<HostLoc> candidates = desired_locations;
|
std::vector<HostLoc> candidates = desired_locations;
|
||||||
|
|
||||||
// Find all locations that have not been allocated..
|
// Find all locations that have not been allocated..
|
||||||
auto allocated_locs = std::partition(candidates.begin(), candidates.end(), [this](auto loc){
|
auto allocated_locs = std::partition(candidates.begin(), candidates.end(), [this](auto loc){
|
||||||
return !this->IsRegisterAllocated(loc);
|
return !this->LocInfo(loc).IsLocked();
|
||||||
});
|
});
|
||||||
candidates.erase(allocated_locs, candidates.end());
|
candidates.erase(allocated_locs, candidates.end());
|
||||||
ASSERT_MSG(!candidates.empty(), "All candidate registers have already been allocated");
|
ASSERT_MSG(!candidates.empty(), "All candidate registers have already been allocated");
|
||||||
|
@ -321,7 +347,7 @@ HostLoc RegAlloc::SelectARegister(HostLocList desired_locations) const {
|
||||||
// TODO: Actually do LRU or something. Currently we just try to pick something without a value if possible.
|
// TODO: Actually do LRU or something. Currently we just try to pick something without a value if possible.
|
||||||
|
|
||||||
std::partition(candidates.begin(), candidates.end(), [this](auto loc){
|
std::partition(candidates.begin(), candidates.end(), [this](auto loc){
|
||||||
return !this->IsRegisterOccupied(loc);
|
return this->LocInfo(loc).IsEmpty();
|
||||||
});
|
});
|
||||||
|
|
||||||
return candidates.front();
|
return candidates.front();
|
||||||
|
@ -329,152 +355,33 @@ HostLoc RegAlloc::SelectARegister(HostLocList desired_locations) const {
|
||||||
|
|
||||||
boost::optional<HostLoc> RegAlloc::ValueLocation(const IR::Inst* value) const {
|
boost::optional<HostLoc> RegAlloc::ValueLocation(const IR::Inst* value) const {
|
||||||
for (size_t i = 0; i < HostLocCount; i++)
|
for (size_t i = 0; i < HostLocCount; i++)
|
||||||
for (const IR::Inst* v : hostloc_info[i].values)
|
if (hostloc_info[i].ContainsValue(value))
|
||||||
if (v == value)
|
return boost::make_optional<HostLoc>(static_cast<HostLoc>(i));
|
||||||
return boost::make_optional<HostLoc>(static_cast<HostLoc>(i));
|
|
||||||
|
|
||||||
return boost::none;
|
return boost::none;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool RegAlloc::IsRegisterOccupied(HostLoc loc) const {
|
void RegAlloc::DefineValueImpl(IR::Inst* def_inst, HostLoc host_loc) {
|
||||||
const auto& info = LocInfo(loc);
|
ASSERT_MSG(!ValueLocation(def_inst), "def_inst has already been defined");
|
||||||
|
LocInfo(host_loc).AddValue(def_inst);
|
||||||
return !info.values.empty() || info.def;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool RegAlloc::IsRegisterAllocated(HostLoc loc) const {
|
void RegAlloc::DefineValueImpl(IR::Inst* def_inst, const IR::Value& use_inst) {
|
||||||
return LocInfo(loc).is_being_used;
|
ASSERT_MSG(!ValueLocation(def_inst), "def_inst has already been defined");
|
||||||
}
|
|
||||||
|
|
||||||
bool RegAlloc::IsLastUse(const IR::Inst* inst) const {
|
if (use_inst.IsImmediate()) {
|
||||||
if (inst->UseCount() > 1)
|
HostLoc location = ScratchImpl(any_gpr);
|
||||||
return false;
|
DefineValueImpl(def_inst, location);
|
||||||
return LocInfo(*ValueLocation(inst)).values.size() == 1;
|
LoadImmediate(use_inst, location);
|
||||||
}
|
return;
|
||||||
|
|
||||||
void RegAlloc::SpillRegister(HostLoc loc) {
|
|
||||||
ASSERT_MSG(HostLocIsRegister(loc), "Only registers can be spilled");
|
|
||||||
ASSERT_MSG(IsRegisterOccupied(loc), "There is no need to spill unoccupied registers");
|
|
||||||
ASSERT_MSG(!IsRegisterAllocated(loc), "Registers that have been allocated must not be spilt");
|
|
||||||
|
|
||||||
HostLoc new_loc = FindFreeSpill();
|
|
||||||
|
|
||||||
EmitMove(new_loc, loc);
|
|
||||||
|
|
||||||
LocInfo(new_loc) = LocInfo(loc);
|
|
||||||
LocInfo(loc) = {};
|
|
||||||
}
|
|
||||||
|
|
||||||
HostLoc RegAlloc::FindFreeSpill() const {
|
|
||||||
for (size_t i = 0; i < SpillCount; i++)
|
|
||||||
if (!IsRegisterOccupied(HostLocSpill(i)))
|
|
||||||
return HostLocSpill(i);
|
|
||||||
|
|
||||||
ASSERT_MSG(false, "All spill locations are full");
|
|
||||||
}
|
|
||||||
|
|
||||||
void RegAlloc::EndOfAllocScope() {
|
|
||||||
for (auto& iter : hostloc_info) {
|
|
||||||
iter.is_being_used = false;
|
|
||||||
if (iter.def) {
|
|
||||||
iter.values.clear();
|
|
||||||
iter.values.emplace_back(iter.def);
|
|
||||||
iter.def = nullptr;
|
|
||||||
}
|
|
||||||
if (!iter.values.empty()) {
|
|
||||||
auto to_erase = std::remove_if(iter.values.begin(), iter.values.end(),
|
|
||||||
[](const auto& inst){ return !inst->HasUses(); });
|
|
||||||
iter.values.erase(to_erase, iter.values.end());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void RegAlloc::AssertNoMoreUses() {
|
|
||||||
ASSERT(std::all_of(hostloc_info.begin(), hostloc_info.end(), [](const auto& i){ return i.values.empty(); }));
|
|
||||||
}
|
|
||||||
|
|
||||||
void RegAlloc::Reset() {
|
|
||||||
hostloc_info.fill({});
|
|
||||||
}
|
|
||||||
|
|
||||||
void RegAlloc::EmitMove(HostLoc to, HostLoc from) {
|
|
||||||
if (HostLocIsXMM(to) && HostLocIsSpill(from)) {
|
|
||||||
code->movsd(HostLocToXmm(to), SpillToOpArg(from));
|
|
||||||
} else if (HostLocIsSpill(to) && HostLocIsXMM(from)) {
|
|
||||||
code->movsd(SpillToOpArg(to), HostLocToXmm(from));
|
|
||||||
} else if (HostLocIsXMM(to) && HostLocIsXMM(from)) {
|
|
||||||
code->movaps(HostLocToXmm(to), HostLocToXmm(from));
|
|
||||||
} else if (HostLocIsGPR(to) && HostLocIsSpill(from)) {
|
|
||||||
code->mov(HostLocToReg64(to), SpillToOpArg(from));
|
|
||||||
} else if (HostLocIsSpill(to) && HostLocIsGPR(from)) {
|
|
||||||
code->mov(SpillToOpArg(to), HostLocToReg64(from));
|
|
||||||
} else if (HostLocIsGPR(to) && HostLocIsGPR(from)){
|
|
||||||
code->mov(HostLocToReg64(to), HostLocToReg64(from));
|
|
||||||
} else {
|
|
||||||
ASSERT_MSG(false, "Invalid RegAlloc::EmitMove");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void RegAlloc::EmitExchange(HostLoc a, HostLoc b) {
|
|
||||||
if (HostLocIsGPR(a) && HostLocIsGPR(b)) {
|
|
||||||
code->xchg(HostLocToReg64(a), HostLocToReg64(b));
|
|
||||||
} else if (HostLocIsXMM(a) && HostLocIsXMM(b)) {
|
|
||||||
ASSERT_MSG(false, "Exchange is unnecessary for XMM registers");
|
|
||||||
} else {
|
|
||||||
ASSERT_MSG(false, "Invalid RegAlloc::EmitExchange");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
std::tuple<HostLoc, bool> RegAlloc::UseHostLoc(IR::Inst* use_inst, HostLocList desired_locations) {
|
|
||||||
DEBUG_ASSERT(std::all_of(desired_locations.begin(), desired_locations.end(), HostLocIsRegister));
|
|
||||||
DEBUG_ASSERT_MSG(ValueLocation(use_inst), "use_inst has not been defined");
|
|
||||||
|
|
||||||
HostLoc current_location = *ValueLocation(use_inst);
|
|
||||||
auto iter = std::find(desired_locations.begin(), desired_locations.end(), current_location);
|
|
||||||
if (iter != desired_locations.end()) {
|
|
||||||
if (LocInfo(current_location).IsDef()) {
|
|
||||||
HostLoc new_location = SelectARegister(desired_locations);
|
|
||||||
if (IsRegisterOccupied(new_location)) {
|
|
||||||
SpillRegister(new_location);
|
|
||||||
}
|
|
||||||
EmitMove(new_location, current_location);
|
|
||||||
LocInfo(new_location).is_being_used = true;
|
|
||||||
LocInfo(new_location).values.emplace_back(use_inst);
|
|
||||||
use_inst->DecrementRemainingUses();
|
|
||||||
DEBUG_ASSERT(LocInfo(new_location).IsUse());
|
|
||||||
return std::make_tuple(new_location, false);
|
|
||||||
} else {
|
|
||||||
bool was_being_used = LocInfo(current_location).is_being_used;
|
|
||||||
ASSERT(LocInfo(current_location).IsUse() || LocInfo(current_location).IsIdle());
|
|
||||||
LocInfo(current_location).is_being_used = true;
|
|
||||||
use_inst->DecrementRemainingUses();
|
|
||||||
DEBUG_ASSERT(LocInfo(current_location).IsUse());
|
|
||||||
return std::make_tuple(current_location, was_being_used);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (HostLocIsSpill(current_location)) {
|
ASSERT_MSG(ValueLocation(use_inst.GetInst()), "use_inst must already be defined");
|
||||||
bool was_being_used = LocInfo(current_location).is_being_used;
|
HostLoc location = *ValueLocation(use_inst.GetInst());
|
||||||
LocInfo(current_location).is_being_used = true;
|
DefineValueImpl(def_inst, location);
|
||||||
use_inst->DecrementRemainingUses();
|
|
||||||
DEBUG_ASSERT(LocInfo(current_location).IsUse());
|
|
||||||
return std::make_tuple(current_location, was_being_used);
|
|
||||||
} else if (HostLocIsRegister(current_location)) {
|
|
||||||
HostLoc new_location = SelectARegister(desired_locations);
|
|
||||||
ASSERT(LocInfo(current_location).IsIdle());
|
|
||||||
EmitExchange(new_location, current_location);
|
|
||||||
std::swap(LocInfo(new_location), LocInfo(current_location));
|
|
||||||
LocInfo(new_location).is_being_used = true;
|
|
||||||
use_inst->DecrementRemainingUses();
|
|
||||||
DEBUG_ASSERT(LocInfo(new_location).IsUse());
|
|
||||||
return std::make_tuple(new_location, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
ASSERT_MSG(false, "Invalid current_location");
|
|
||||||
return std::make_tuple(static_cast<HostLoc>(-1), false);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
HostLoc RegAlloc::LoadImmediateIntoHostLocReg(IR::Value imm, HostLoc host_loc) {
|
HostLoc RegAlloc::LoadImmediate(IR::Value imm, HostLoc host_loc) {
|
||||||
ASSERT_MSG(imm.IsImmediate(), "imm is not an immediate");
|
ASSERT_MSG(imm.IsImmediate(), "imm is not an immediate");
|
||||||
|
|
||||||
Xbyak::Reg64 reg = HostLocToReg64(host_loc);
|
Xbyak::Reg64 reg = HostLocToReg64(host_loc);
|
||||||
|
@ -487,5 +394,76 @@ HostLoc RegAlloc::LoadImmediateIntoHostLocReg(IR::Value imm, HostLoc host_loc) {
|
||||||
return host_loc;
|
return host_loc;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void RegAlloc::Move(HostLoc to, HostLoc from) {
|
||||||
|
ASSERT(LocInfo(to).IsEmpty() && !LocInfo(from).IsLocked());
|
||||||
|
|
||||||
|
if (LocInfo(from).IsEmpty()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
LocInfo(to) = LocInfo(from);
|
||||||
|
LocInfo(from) = {};
|
||||||
|
|
||||||
|
EmitMove(code, to, from);
|
||||||
|
}
|
||||||
|
|
||||||
|
void RegAlloc::CopyToScratch(HostLoc to, HostLoc from) {
|
||||||
|
ASSERT(LocInfo(to).IsEmpty() && !LocInfo(from).IsEmpty());
|
||||||
|
|
||||||
|
EmitMove(code, to, from);
|
||||||
|
}
|
||||||
|
|
||||||
|
void RegAlloc::Exchange(HostLoc a, HostLoc b) {
|
||||||
|
ASSERT(!LocInfo(a).IsLocked() && !LocInfo(b).IsLocked());
|
||||||
|
|
||||||
|
if (LocInfo(a).IsEmpty()) {
|
||||||
|
Move(a, b);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (LocInfo(b).IsEmpty()) {
|
||||||
|
Move(b, a);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::swap(LocInfo(a), LocInfo(b));
|
||||||
|
|
||||||
|
EmitExchange(code, a, b);
|
||||||
|
}
|
||||||
|
|
||||||
|
void RegAlloc::MoveOutOfTheWay(HostLoc reg) {
|
||||||
|
ASSERT(!LocInfo(reg).IsLocked());
|
||||||
|
if (!LocInfo(reg).IsEmpty()) {
|
||||||
|
SpillRegister(reg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void RegAlloc::SpillRegister(HostLoc loc) {
|
||||||
|
ASSERT_MSG(HostLocIsRegister(loc), "Only registers can be spilled");
|
||||||
|
ASSERT_MSG(!LocInfo(loc).IsEmpty(), "There is no need to spill unoccupied registers");
|
||||||
|
ASSERT_MSG(!LocInfo(loc).IsLocked(), "Registers that have been allocated must not be spilt");
|
||||||
|
|
||||||
|
HostLoc new_loc = FindFreeSpill();
|
||||||
|
Move(new_loc, loc);
|
||||||
|
}
|
||||||
|
|
||||||
|
HostLoc RegAlloc::FindFreeSpill() const {
|
||||||
|
for (size_t i = 0; i < SpillCount; i++)
|
||||||
|
if (LocInfo(HostLocSpill(i)).IsEmpty())
|
||||||
|
return HostLocSpill(i);
|
||||||
|
|
||||||
|
ASSERT_MSG(false, "All spill locations are full");
|
||||||
|
}
|
||||||
|
|
||||||
|
HostLocInfo& RegAlloc::LocInfo(HostLoc loc) {
|
||||||
|
ASSERT(loc != HostLoc::RSP && loc != HostLoc::R15);
|
||||||
|
return hostloc_info[static_cast<size_t>(loc)];
|
||||||
|
}
|
||||||
|
|
||||||
|
const HostLocInfo& RegAlloc::LocInfo(HostLoc loc) const {
|
||||||
|
ASSERT(loc != HostLoc::RSP && loc != HostLoc::R15);
|
||||||
|
return hostloc_info[static_cast<size_t>(loc)];
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace BackendX64
|
} // namespace BackendX64
|
||||||
} // namespace Dynarmic
|
} // namespace Dynarmic
|
||||||
|
|
|
@ -22,62 +22,78 @@
|
||||||
namespace Dynarmic {
|
namespace Dynarmic {
|
||||||
namespace BackendX64 {
|
namespace BackendX64 {
|
||||||
|
|
||||||
|
class RegAlloc;
|
||||||
|
|
||||||
|
struct HostLocInfo {
|
||||||
|
public:
|
||||||
|
bool IsLocked() const;
|
||||||
|
bool IsEmpty() const;
|
||||||
|
bool IsLastUse() const;
|
||||||
|
|
||||||
|
bool ContainsValue(const IR::Inst* inst) const;
|
||||||
|
|
||||||
|
void ReadLock();
|
||||||
|
void WriteLock();
|
||||||
|
|
||||||
|
void AddValue(IR::Inst* inst);
|
||||||
|
|
||||||
|
void EndOfAllocScope();
|
||||||
|
|
||||||
|
private:
|
||||||
|
std::vector<IR::Inst*> values;
|
||||||
|
bool is_being_used = false;
|
||||||
|
bool is_scratch = false;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct Argument {
|
||||||
|
public:
|
||||||
|
IR::Type GetType() const;
|
||||||
|
bool IsImmediate() const;
|
||||||
|
|
||||||
|
bool GetImmediateU1() const;
|
||||||
|
u8 GetImmediateU8() const;
|
||||||
|
u16 GetImmediateU16() const;
|
||||||
|
u32 GetImmediateU32() const;
|
||||||
|
u64 GetImmediateU64() const;
|
||||||
|
|
||||||
|
/// Is this value currently in a GPR?
|
||||||
|
bool IsInGpr() const;
|
||||||
|
/// Is this value currently in a XMM?
|
||||||
|
bool IsInXmm() const;
|
||||||
|
/// Is this value currently in memory?
|
||||||
|
bool IsInMemory() const;
|
||||||
|
|
||||||
|
private:
|
||||||
|
friend class RegAlloc;
|
||||||
|
Argument(RegAlloc& reg_alloc) : reg_alloc(reg_alloc) {}
|
||||||
|
|
||||||
|
bool allocated = false;
|
||||||
|
RegAlloc& reg_alloc;
|
||||||
|
IR::Value value;
|
||||||
|
};
|
||||||
|
|
||||||
class RegAlloc final {
|
class RegAlloc final {
|
||||||
public:
|
public:
|
||||||
explicit RegAlloc(BlockOfCode* code) : code(code) {}
|
explicit RegAlloc(BlockOfCode* code) : code(code) {}
|
||||||
|
|
||||||
/// Late-def
|
std::array<Argument, 3> GetArgumentInfo(IR::Inst* inst);
|
||||||
Xbyak::Reg64 DefGpr(IR::Inst* def_inst, HostLocList desired_locations = any_gpr) {
|
|
||||||
return HostLocToReg64(DefHostLocReg(def_inst, desired_locations));
|
|
||||||
}
|
|
||||||
Xbyak::Xmm DefXmm(IR::Inst* def_inst, HostLocList desired_locations = any_xmm) {
|
|
||||||
return HostLocToXmm(DefHostLocReg(def_inst, desired_locations));
|
|
||||||
}
|
|
||||||
void RegisterAddDef(IR::Inst* def_inst, const IR::Value& use_inst);
|
|
||||||
/// Early-use, Late-def
|
|
||||||
Xbyak::Reg64 UseDefGpr(IR::Value use_value, IR::Inst* def_inst, HostLocList desired_locations = any_gpr) {
|
|
||||||
return HostLocToReg64(UseDefHostLocReg(use_value, def_inst, desired_locations));
|
|
||||||
}
|
|
||||||
Xbyak::Xmm UseDefXmm(IR::Value use_value, IR::Inst* def_inst, HostLocList desired_locations = any_xmm) {
|
|
||||||
return HostLocToXmm(UseDefHostLocReg(use_value, def_inst, desired_locations));
|
|
||||||
}
|
|
||||||
std::tuple<OpArg, Xbyak::Reg64> UseDefOpArgGpr(IR::Value use_value, IR::Inst* def_inst, HostLocList desired_locations = any_gpr) {
|
|
||||||
OpArg op;
|
|
||||||
HostLoc host_loc;
|
|
||||||
std::tie(op, host_loc) = UseDefOpArgHostLocReg(use_value, def_inst, desired_locations);
|
|
||||||
return std::make_tuple(op, HostLocToReg64(host_loc));
|
|
||||||
}
|
|
||||||
std::tuple<OpArg, Xbyak::Xmm> UseDefOpArgXmm(IR::Value use_value, IR::Inst* def_inst, HostLocList desired_locations = any_xmm) {
|
|
||||||
OpArg op;
|
|
||||||
HostLoc host_loc;
|
|
||||||
std::tie(op, host_loc) = UseDefOpArgHostLocReg(use_value, def_inst, desired_locations);
|
|
||||||
return std::make_tuple(op, HostLocToXmm(host_loc));
|
|
||||||
}
|
|
||||||
/// Early-use
|
|
||||||
Xbyak::Reg64 UseGpr(IR::Value use_value, HostLocList desired_locations = any_gpr) {
|
|
||||||
return HostLocToReg64(UseHostLocReg(use_value, desired_locations));
|
|
||||||
}
|
|
||||||
Xbyak::Xmm UseXmm(IR::Value use_value, HostLocList desired_locations = any_xmm) {
|
|
||||||
return HostLocToXmm(UseHostLocReg(use_value, desired_locations));
|
|
||||||
}
|
|
||||||
OpArg UseOpArg(IR::Value use_value, HostLocList desired_locations);
|
|
||||||
/// Early-use, Destroyed
|
|
||||||
Xbyak::Reg64 UseScratchGpr(IR::Value use_value, HostLocList desired_locations = any_gpr) {
|
|
||||||
return HostLocToReg64(UseScratchHostLocReg(use_value, desired_locations));
|
|
||||||
}
|
|
||||||
Xbyak::Xmm UseScratchXmm(IR::Value use_value, HostLocList desired_locations = any_xmm) {
|
|
||||||
return HostLocToXmm(UseScratchHostLocReg(use_value, desired_locations));
|
|
||||||
}
|
|
||||||
/// Early-def, Late-use, single-use
|
|
||||||
Xbyak::Reg64 ScratchGpr(HostLocList desired_locations = any_gpr) {
|
|
||||||
return HostLocToReg64(ScratchHostLocReg(desired_locations));
|
|
||||||
}
|
|
||||||
Xbyak::Xmm ScratchXmm(HostLocList desired_locations = any_xmm) {
|
|
||||||
return HostLocToXmm(ScratchHostLocReg(desired_locations));
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Late-def for result register, Early-use for all arguments, Each value is placed into registers according to host ABI.
|
Xbyak::Reg64 UseGpr(Argument& arg);
|
||||||
void HostCall(IR::Inst* result_def = nullptr, IR::Value arg0_use = {}, IR::Value arg1_use = {}, IR::Value arg2_use = {}, IR::Value arg3_use = {});
|
Xbyak::Xmm UseXmm(Argument& arg);
|
||||||
|
OpArg UseOpArg(Argument& arg);
|
||||||
|
void Use(Argument& arg, HostLoc host_loc);
|
||||||
|
|
||||||
|
Xbyak::Reg64 UseScratchGpr(Argument& arg);
|
||||||
|
Xbyak::Xmm UseScratchXmm(Argument& arg);
|
||||||
|
void UseScratch(Argument& arg, HostLoc host_loc);
|
||||||
|
|
||||||
|
void DefineValue(IR::Inst* inst, const Xbyak::Reg& reg);
|
||||||
|
void DefineValue(IR::Inst* inst, Argument& arg);
|
||||||
|
|
||||||
|
Xbyak::Reg64 ScratchGpr(HostLocList desired_locations = any_gpr);
|
||||||
|
Xbyak::Xmm ScratchXmm(HostLocList desired_locations = any_xmm);
|
||||||
|
|
||||||
|
void HostCall(IR::Inst* result_def = nullptr, boost::optional<Argument&> arg0 = {}, boost::optional<Argument&> arg1 = {}, boost::optional<Argument&> arg2 = {}, boost::optional<Argument&> arg3 = {});
|
||||||
|
|
||||||
// TODO: Values in host flags
|
// TODO: Values in host flags
|
||||||
|
|
||||||
|
@ -85,65 +101,32 @@ public:
|
||||||
|
|
||||||
void AssertNoMoreUses();
|
void AssertNoMoreUses();
|
||||||
|
|
||||||
void Reset();
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
friend struct Argument;
|
||||||
|
|
||||||
HostLoc SelectARegister(HostLocList desired_locations) const;
|
HostLoc SelectARegister(HostLocList desired_locations) const;
|
||||||
boost::optional<HostLoc> ValueLocation(const IR::Inst* value) const;
|
boost::optional<HostLoc> ValueLocation(const IR::Inst* value) const;
|
||||||
bool IsRegisterOccupied(HostLoc loc) const;
|
|
||||||
bool IsRegisterAllocated(HostLoc loc) const;
|
|
||||||
bool IsLastUse(const IR::Inst* inst) const;
|
|
||||||
|
|
||||||
HostLoc DefHostLocReg(IR::Inst* def_inst, HostLocList desired_locations);
|
HostLoc UseImpl(IR::Value use_value, HostLocList desired_locations);
|
||||||
HostLoc UseDefHostLocReg(IR::Value use_value, IR::Inst* def_inst, HostLocList desired_locations);
|
HostLoc UseScratchImpl(IR::Value use_value, HostLocList desired_locations);
|
||||||
HostLoc UseDefHostLocReg(IR::Inst* use_inst, IR::Inst* def_inst, HostLocList desired_locations);
|
HostLoc ScratchImpl(HostLocList desired_locations);
|
||||||
std::tuple<OpArg, HostLoc> UseDefOpArgHostLocReg(IR::Value use_value, IR::Inst* def_inst, HostLocList desired_locations);
|
void DefineValueImpl(IR::Inst* def_inst, HostLoc host_loc);
|
||||||
HostLoc UseHostLocReg(IR::Value use_value, HostLocList desired_locations);
|
void DefineValueImpl(IR::Inst* def_inst, const IR::Value& use_inst);
|
||||||
HostLoc UseHostLocReg(IR::Inst* use_inst, HostLocList desired_locations);
|
|
||||||
std::tuple<HostLoc, bool> UseHostLoc(IR::Inst* use_inst, HostLocList desired_locations);
|
|
||||||
HostLoc UseScratchHostLocReg(IR::Value use_value, HostLocList desired_locations);
|
|
||||||
HostLoc UseScratchHostLocReg(IR::Inst* use_inst, HostLocList desired_locations);
|
|
||||||
HostLoc ScratchHostLocReg(HostLocList desired_locations);
|
|
||||||
|
|
||||||
void EmitMove(HostLoc to, HostLoc from);
|
BlockOfCode* code = nullptr;
|
||||||
void EmitExchange(HostLoc a, HostLoc b);
|
|
||||||
HostLoc LoadImmediateIntoHostLocReg(IR::Value imm, HostLoc reg);
|
HostLoc LoadImmediate(IR::Value imm, HostLoc reg);
|
||||||
|
void Move(HostLoc to, HostLoc from);
|
||||||
|
void CopyToScratch(HostLoc to, HostLoc from);
|
||||||
|
void Exchange(HostLoc a, HostLoc b);
|
||||||
|
void MoveOutOfTheWay(HostLoc reg);
|
||||||
|
|
||||||
void SpillRegister(HostLoc loc);
|
void SpillRegister(HostLoc loc);
|
||||||
HostLoc FindFreeSpill() const;
|
HostLoc FindFreeSpill() const;
|
||||||
|
|
||||||
BlockOfCode* code = nullptr;
|
|
||||||
|
|
||||||
struct HostLocInfo {
|
|
||||||
std::vector<IR::Inst*> values; // early value
|
|
||||||
IR::Inst* def = nullptr; // late value
|
|
||||||
bool is_being_used = false;
|
|
||||||
|
|
||||||
bool IsIdle() const {
|
|
||||||
return !is_being_used;
|
|
||||||
}
|
|
||||||
bool IsScratch() const {
|
|
||||||
return is_being_used && !def && values.empty();
|
|
||||||
}
|
|
||||||
bool IsUse() const {
|
|
||||||
return is_being_used && !def && !values.empty();
|
|
||||||
}
|
|
||||||
bool IsDef() const {
|
|
||||||
return is_being_used && def && values.empty();
|
|
||||||
}
|
|
||||||
bool IsUseDef() const {
|
|
||||||
return is_being_used && def && !values.empty();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
std::array<HostLocInfo, HostLocCount> hostloc_info;
|
std::array<HostLocInfo, HostLocCount> hostloc_info;
|
||||||
HostLocInfo& LocInfo(HostLoc loc) {
|
HostLocInfo& LocInfo(HostLoc loc);
|
||||||
DEBUG_ASSERT(loc != HostLoc::RSP && loc != HostLoc::R15);
|
const HostLocInfo& LocInfo(HostLoc loc) const;
|
||||||
return hostloc_info[static_cast<size_t>(loc)];
|
|
||||||
}
|
|
||||||
const HostLocInfo& LocInfo(HostLoc loc) const {
|
|
||||||
DEBUG_ASSERT(loc != HostLoc::RSP && loc != HostLoc::R15);
|
|
||||||
return hostloc_info[static_cast<size_t>(loc)];
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace BackendX64
|
} // namespace BackendX64
|
||||||
|
|
|
@ -255,13 +255,13 @@ Inst* Inst::GetAssociatedPseudoOperation(Opcode opcode) {
|
||||||
// This is faster than doing a search through the block.
|
// This is faster than doing a search through the block.
|
||||||
switch (opcode) {
|
switch (opcode) {
|
||||||
case IR::Opcode::GetCarryFromOp:
|
case IR::Opcode::GetCarryFromOp:
|
||||||
DEBUG_ASSERT(!carry_inst || carry_inst->GetOpcode() == Opcode::GetCarryFromOp);
|
ASSERT(!carry_inst || carry_inst->GetOpcode() == Opcode::GetCarryFromOp);
|
||||||
return carry_inst;
|
return carry_inst;
|
||||||
case IR::Opcode::GetOverflowFromOp:
|
case IR::Opcode::GetOverflowFromOp:
|
||||||
DEBUG_ASSERT(!overflow_inst || overflow_inst->GetOpcode() == Opcode::GetOverflowFromOp);
|
ASSERT(!overflow_inst || overflow_inst->GetOpcode() == Opcode::GetOverflowFromOp);
|
||||||
return overflow_inst;
|
return overflow_inst;
|
||||||
case IR::Opcode::GetGEFromOp:
|
case IR::Opcode::GetGEFromOp:
|
||||||
DEBUG_ASSERT(!ge_inst || ge_inst->GetOpcode() == Opcode::GetGEFromOp);
|
ASSERT(!ge_inst || ge_inst->GetOpcode() == Opcode::GetGEFromOp);
|
||||||
return ge_inst;
|
return ge_inst;
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
|
@ -278,15 +278,15 @@ Type Inst::GetType() const {
|
||||||
}
|
}
|
||||||
|
|
||||||
Value Inst::GetArg(size_t index) const {
|
Value Inst::GetArg(size_t index) const {
|
||||||
DEBUG_ASSERT(index < GetNumArgsOf(op));
|
ASSERT(index < GetNumArgsOf(op));
|
||||||
DEBUG_ASSERT(!args[index].IsEmpty());
|
ASSERT(!args[index].IsEmpty());
|
||||||
|
|
||||||
return args[index];
|
return args[index];
|
||||||
}
|
}
|
||||||
|
|
||||||
void Inst::SetArg(size_t index, Value value) {
|
void Inst::SetArg(size_t index, Value value) {
|
||||||
DEBUG_ASSERT(index < GetNumArgsOf(op));
|
ASSERT(index < GetNumArgsOf(op));
|
||||||
DEBUG_ASSERT(AreTypesCompatible(value.GetType(), GetArgTypeOf(op, index)));
|
ASSERT(AreTypesCompatible(value.GetType(), GetArgTypeOf(op, index)));
|
||||||
|
|
||||||
if (!args[index].IsImmediate()) {
|
if (!args[index].IsImmediate()) {
|
||||||
UndoUse(args[index]);
|
UndoUse(args[index]);
|
||||||
|
@ -346,15 +346,15 @@ void Inst::UndoUse(const Value& value) {
|
||||||
|
|
||||||
switch (op){
|
switch (op){
|
||||||
case Opcode::GetCarryFromOp:
|
case Opcode::GetCarryFromOp:
|
||||||
DEBUG_ASSERT(value.GetInst()->carry_inst->GetOpcode() == Opcode::GetCarryFromOp);
|
ASSERT(value.GetInst()->carry_inst->GetOpcode() == Opcode::GetCarryFromOp);
|
||||||
value.GetInst()->carry_inst = nullptr;
|
value.GetInst()->carry_inst = nullptr;
|
||||||
break;
|
break;
|
||||||
case Opcode::GetOverflowFromOp:
|
case Opcode::GetOverflowFromOp:
|
||||||
DEBUG_ASSERT(value.GetInst()->overflow_inst->GetOpcode() == Opcode::GetOverflowFromOp);
|
ASSERT(value.GetInst()->overflow_inst->GetOpcode() == Opcode::GetOverflowFromOp);
|
||||||
value.GetInst()->overflow_inst = nullptr;
|
value.GetInst()->overflow_inst = nullptr;
|
||||||
break;
|
break;
|
||||||
case Opcode::GetGEFromOp:
|
case Opcode::GetGEFromOp:
|
||||||
DEBUG_ASSERT(value.GetInst()->ge_inst->GetOpcode() == Opcode::GetGEFromOp);
|
ASSERT(value.GetInst()->ge_inst->GetOpcode() == Opcode::GetGEFromOp);
|
||||||
value.GetInst()->ge_inst = nullptr;
|
value.GetInst()->ge_inst = nullptr;
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
|
|
|
@ -69,59 +69,59 @@ Type Value::GetType() const {
|
||||||
}
|
}
|
||||||
|
|
||||||
Arm::Reg Value::GetRegRef() const {
|
Arm::Reg Value::GetRegRef() const {
|
||||||
DEBUG_ASSERT(type == Type::RegRef);
|
ASSERT(type == Type::RegRef);
|
||||||
return inner.imm_regref;
|
return inner.imm_regref;
|
||||||
}
|
}
|
||||||
|
|
||||||
Arm::ExtReg Value::GetExtRegRef() const {
|
Arm::ExtReg Value::GetExtRegRef() const {
|
||||||
DEBUG_ASSERT(type == Type::ExtRegRef);
|
ASSERT(type == Type::ExtRegRef);
|
||||||
return inner.imm_extregref;
|
return inner.imm_extregref;
|
||||||
}
|
}
|
||||||
|
|
||||||
Inst* Value::GetInst() const {
|
Inst* Value::GetInst() const {
|
||||||
DEBUG_ASSERT(type == Type::Opaque);
|
ASSERT(type == Type::Opaque);
|
||||||
return inner.inst;
|
return inner.inst;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Value::GetU1() const {
|
bool Value::GetU1() const {
|
||||||
if (type == Type::Opaque && inner.inst->GetOpcode() == Opcode::Identity)
|
if (type == Type::Opaque && inner.inst->GetOpcode() == Opcode::Identity)
|
||||||
return inner.inst->GetArg(0).GetU1();
|
return inner.inst->GetArg(0).GetU1();
|
||||||
DEBUG_ASSERT(type == Type::U1);
|
ASSERT(type == Type::U1);
|
||||||
return inner.imm_u1;
|
return inner.imm_u1;
|
||||||
}
|
}
|
||||||
|
|
||||||
u8 Value::GetU8() const {
|
u8 Value::GetU8() const {
|
||||||
if (type == Type::Opaque && inner.inst->GetOpcode() == Opcode::Identity)
|
if (type == Type::Opaque && inner.inst->GetOpcode() == Opcode::Identity)
|
||||||
return inner.inst->GetArg(0).GetU8();
|
return inner.inst->GetArg(0).GetU8();
|
||||||
DEBUG_ASSERT(type == Type::U8);
|
ASSERT(type == Type::U8);
|
||||||
return inner.imm_u8;
|
return inner.imm_u8;
|
||||||
}
|
}
|
||||||
|
|
||||||
u16 Value::GetU16() const {
|
u16 Value::GetU16() const {
|
||||||
if (type == Type::Opaque && inner.inst->GetOpcode() == Opcode::Identity)
|
if (type == Type::Opaque && inner.inst->GetOpcode() == Opcode::Identity)
|
||||||
return inner.inst->GetArg(0).GetU16();
|
return inner.inst->GetArg(0).GetU16();
|
||||||
DEBUG_ASSERT(type == Type::U16);
|
ASSERT(type == Type::U16);
|
||||||
return inner.imm_u16;
|
return inner.imm_u16;
|
||||||
}
|
}
|
||||||
|
|
||||||
u32 Value::GetU32() const {
|
u32 Value::GetU32() const {
|
||||||
if (type == Type::Opaque && inner.inst->GetOpcode() == Opcode::Identity)
|
if (type == Type::Opaque && inner.inst->GetOpcode() == Opcode::Identity)
|
||||||
return inner.inst->GetArg(0).GetU32();
|
return inner.inst->GetArg(0).GetU32();
|
||||||
DEBUG_ASSERT(type == Type::U32);
|
ASSERT(type == Type::U32);
|
||||||
return inner.imm_u32;
|
return inner.imm_u32;
|
||||||
}
|
}
|
||||||
|
|
||||||
u64 Value::GetU64() const {
|
u64 Value::GetU64() const {
|
||||||
if (type == Type::Opaque && inner.inst->GetOpcode() == Opcode::Identity)
|
if (type == Type::Opaque && inner.inst->GetOpcode() == Opcode::Identity)
|
||||||
return inner.inst->GetArg(0).GetU64();
|
return inner.inst->GetArg(0).GetU64();
|
||||||
DEBUG_ASSERT(type == Type::U64);
|
ASSERT(type == Type::U64);
|
||||||
return inner.imm_u64;
|
return inner.imm_u64;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::array<u8, 8> Value::GetCoprocInfo() const {
|
std::array<u8, 8> Value::GetCoprocInfo() const {
|
||||||
if (type == Type::Opaque && inner.inst->GetOpcode() == Opcode::Identity)
|
if (type == Type::Opaque && inner.inst->GetOpcode() == Opcode::Identity)
|
||||||
return inner.inst->GetArg(0).GetCoprocInfo();
|
return inner.inst->GetArg(0).GetCoprocInfo();
|
||||||
DEBUG_ASSERT(type == Type::CoprocInfo);
|
ASSERT(type == Type::CoprocInfo);
|
||||||
return inner.imm_coproc;
|
return inner.imm_coproc;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue