dynarmic/src/backend_x64/reg_alloc.h

188 lines
6.9 KiB
C
Raw Normal View History

2016-07-01 14:01:06 +01:00
/* This file is part of the dynarmic project.
* Copyright (c) 2016 MerryMage
* This software may be used and distributed according to the terms of the GNU
* General Public License version 2 or any later version.
*/
#pragma once
#include <array>
#include <vector>
#include <boost/optional.hpp>
2016-08-24 20:07:08 +01:00
#include <xbyak.h>
2016-07-01 14:01:06 +01:00
#include "backend_x64/block_of_code.h"
2016-08-24 20:07:08 +01:00
#include "backend_x64/hostloc.h"
2017-02-21 23:38:36 +00:00
#include "backend_x64/oparg.h"
2016-07-01 14:01:06 +01:00
#include "common/common_types.h"
#include "frontend/ir/microinstruction.h"
#include "frontend/ir/value.h"
2016-07-01 14:01:06 +01:00
namespace Dynarmic {
namespace BackendX64 {
struct HostLocInfo {
public:
bool IsIdle() const {
return !is_being_used;
}
bool IsLocked() const {
return is_being_used;
}
bool IsEmpty() const {
return !is_being_used && !def && values.empty();
}
bool IsScratch() const {
return is_being_used && !def && values.empty();
}
bool IsUse() const {
return is_being_used && !def && !values.empty();
}
bool IsDef() const {
return is_being_used && def && values.empty();
}
bool IsUseDef() const {
return is_being_used && def && !values.empty();
}
bool ContainsValue(const IR::Inst* inst) const {
return std::find(values.begin(), values.end(), inst) != values.end();
}
void Lock() {
is_being_used = true;
}
void AddValue(IR::Inst* inst) {
values.push_back(inst);
}
void Def(IR::Inst* inst) {
ASSERT(!def);
def = inst;
}
void EndOfAllocScope() {
const auto to_erase = std::remove_if(values.begin(), values.end(), [](const auto& inst){ return !inst->HasUses(); });
values.erase(to_erase, values.end());
if (def) {
ASSERT(values.empty());
AddValue(def);
def = nullptr;
}
is_being_used = false;
}
private:
std::vector<IR::Inst*> values; // early value
IR::Inst* def = nullptr; // late value
bool is_being_used = false;
};
2016-07-01 14:01:06 +01:00
class RegAlloc final {
public:
2016-09-07 12:08:48 +01:00
explicit RegAlloc(BlockOfCode* code) : code(code) {}
2016-07-01 14:01:06 +01:00
/// Late-def
2016-08-24 20:07:08 +01:00
Xbyak::Reg64 DefGpr(IR::Inst* def_inst, HostLocList desired_locations = any_gpr) {
return HostLocToReg64(DefHostLocReg(def_inst, desired_locations));
}
Xbyak::Xmm DefXmm(IR::Inst* def_inst, HostLocList desired_locations = any_xmm) {
return HostLocToXmm(DefHostLocReg(def_inst, desired_locations));
}
void RegisterAddDef(IR::Inst* def_inst, const IR::Value& use_inst);
2016-07-01 14:01:06 +01:00
/// Early-use, Late-def
2016-08-24 20:07:08 +01:00
Xbyak::Reg64 UseDefGpr(IR::Value use_value, IR::Inst* def_inst, HostLocList desired_locations = any_gpr) {
return HostLocToReg64(UseDefHostLocReg(use_value, def_inst, desired_locations));
}
Xbyak::Xmm UseDefXmm(IR::Value use_value, IR::Inst* def_inst, HostLocList desired_locations = any_xmm) {
return HostLocToXmm(UseDefHostLocReg(use_value, def_inst, desired_locations));
}
std::tuple<OpArg, Xbyak::Reg64> UseDefOpArgGpr(IR::Value use_value, IR::Inst* def_inst, HostLocList desired_locations = any_gpr) {
OpArg op;
HostLoc host_loc;
std::tie(op, host_loc) = UseDefOpArgHostLocReg(use_value, def_inst, desired_locations);
return std::make_tuple(op, HostLocToReg64(host_loc));
}
std::tuple<OpArg, Xbyak::Xmm> UseDefOpArgXmm(IR::Value use_value, IR::Inst* def_inst, HostLocList desired_locations = any_xmm) {
2016-08-24 20:07:08 +01:00
OpArg op;
HostLoc host_loc;
std::tie(op, host_loc) = UseDefOpArgHostLocReg(use_value, def_inst, desired_locations);
return std::make_tuple(op, HostLocToXmm(host_loc));
}
2016-07-01 14:01:06 +01:00
/// Early-use
2016-08-24 20:07:08 +01:00
Xbyak::Reg64 UseGpr(IR::Value use_value, HostLocList desired_locations = any_gpr) {
return HostLocToReg64(UseHostLocReg(use_value, desired_locations));
}
Xbyak::Xmm UseXmm(IR::Value use_value, HostLocList desired_locations = any_xmm) {
return HostLocToXmm(UseHostLocReg(use_value, desired_locations));
}
OpArg UseOpArg(IR::Value use_value, HostLocList desired_locations);
2016-07-18 15:11:16 +01:00
/// Early-use, Destroyed
2016-08-24 20:07:08 +01:00
Xbyak::Reg64 UseScratchGpr(IR::Value use_value, HostLocList desired_locations = any_gpr) {
return HostLocToReg64(UseScratchHostLocReg(use_value, desired_locations));
}
Xbyak::Xmm UseScratchXmm(IR::Value use_value, HostLocList desired_locations = any_xmm) {
return HostLocToXmm(UseScratchHostLocReg(use_value, desired_locations));
}
2016-07-01 14:01:06 +01:00
/// Early-def, Late-use, single-use
2016-08-24 20:07:08 +01:00
Xbyak::Reg64 ScratchGpr(HostLocList desired_locations = any_gpr) {
return HostLocToReg64(ScratchHostLocReg(desired_locations));
}
Xbyak::Xmm ScratchXmm(HostLocList desired_locations = any_xmm) {
return HostLocToXmm(ScratchHostLocReg(desired_locations));
}
2016-07-01 14:01:06 +01:00
/// Late-def for result register, Early-use for all arguments, Each value is placed into registers according to host ABI.
2016-07-22 23:55:00 +01:00
void HostCall(IR::Inst* result_def = nullptr, IR::Value arg0_use = {}, IR::Value arg1_use = {}, IR::Value arg2_use = {}, IR::Value arg3_use = {});
2016-07-01 14:01:06 +01:00
// TODO: Values in host flags
void EndOfAllocScope();
void AssertNoMoreUses();
void Reset();
2016-07-01 14:01:06 +01:00
private:
HostLoc SelectARegister(HostLocList desired_locations) const;
boost::optional<HostLoc> ValueLocation(const IR::Inst* value) const;
2016-07-01 14:01:06 +01:00
bool IsRegisterOccupied(HostLoc loc) const;
bool IsRegisterAllocated(HostLoc loc) const;
2016-09-02 17:30:01 +01:00
bool IsLastUse(const IR::Inst* inst) const;
2016-08-24 20:07:08 +01:00
HostLoc DefHostLocReg(IR::Inst* def_inst, HostLocList desired_locations);
HostLoc UseDefHostLocReg(IR::Value use_value, IR::Inst* def_inst, HostLocList desired_locations);
HostLoc UseDefHostLocReg(IR::Inst* use_inst, IR::Inst* def_inst, HostLocList desired_locations);
std::tuple<OpArg, HostLoc> UseDefOpArgHostLocReg(IR::Value use_value, IR::Inst* def_inst, HostLocList desired_locations);
HostLoc UseHostLocReg(IR::Value use_value, HostLocList desired_locations);
HostLoc UseHostLocReg(IR::Inst* use_inst, HostLocList desired_locations);
std::tuple<HostLoc, bool> UseHostLoc(IR::Inst* use_inst, HostLocList desired_locations);
2016-08-24 20:07:08 +01:00
HostLoc UseScratchHostLocReg(IR::Value use_value, HostLocList desired_locations);
HostLoc UseScratchHostLocReg(IR::Inst* use_inst, HostLocList desired_locations);
HostLoc ScratchHostLocReg(HostLocList desired_locations);
2016-07-01 14:01:06 +01:00
void EmitMove(HostLoc to, HostLoc from);
void EmitExchange(HostLoc a, HostLoc b);
2016-08-24 20:07:08 +01:00
HostLoc LoadImmediateIntoHostLocReg(IR::Value imm, HostLoc reg);
2016-07-01 14:01:06 +01:00
void SpillRegister(HostLoc loc);
HostLoc FindFreeSpill() const;
BlockOfCode* code = nullptr;
2016-07-01 14:01:06 +01:00
std::array<HostLocInfo, HostLocCount> hostloc_info;
HostLocInfo& LocInfo(HostLoc loc) {
DEBUG_ASSERT(loc != HostLoc::RSP && loc != HostLoc::R15);
return hostloc_info[static_cast<size_t>(loc)];
}
const HostLocInfo& LocInfo(HostLoc loc) const {
DEBUG_ASSERT(loc != HostLoc::RSP && loc != HostLoc::R15);
return hostloc_info[static_cast<size_t>(loc)];
}
2016-07-01 14:01:06 +01:00
};
} // namespace BackendX64
} // namespace Dynarmic