dynarmic/src/backend_x64/reg_alloc.h

187 lines
5.5 KiB
C
Raw Normal View History

2016-07-01 14:01:06 +01:00
/* This file is part of the dynarmic project.
* Copyright (c) 2016 MerryMage
* This software may be used and distributed according to the terms of the GNU
* General Public License version 2 or any later version.
*/
#pragma once
#include <array>
2016-07-01 14:01:06 +01:00
#include <map>
#include <vector>
#include <boost/optional.hpp>
2016-07-01 14:01:06 +01:00
#include "backend_x64/block_of_code.h"
2016-07-01 14:01:06 +01:00
#include "backend_x64/jitstate.h"
#include "common/common_types.h"
#include "common/x64/emitter.h"
#include "frontend/ir/microinstruction.h"
#include "frontend/ir/value.h"
2016-07-01 14:01:06 +01:00
namespace Dynarmic {
namespace BackendX64 {
enum class HostLoc {
2016-07-22 23:55:00 +01:00
// Ordering of the registers is intentional. See also: HostLocToX64.
RAX, RCX, RDX, RBX, RSP, RBP, RSI, RDI, R8, R9, R10, R11, R12, R13, R14,
XMM0, XMM1, XMM2, XMM3, XMM4, XMM5, XMM6, XMM7,
XMM8, XMM9, XMM10, XMM11, XMM12, XMM13, XMM14, XMM15,
2016-07-01 14:01:06 +01:00
CF, PF, AF, ZF, SF, OF,
FirstSpill,
};
2016-07-22 23:55:00 +01:00
constexpr size_t HostLocCount = static_cast<size_t>(HostLoc::FirstSpill) + SpillCount;
2016-07-01 14:01:06 +01:00
enum class HostLocState {
Idle, Def, Use, Scratch
};
inline bool HostLocIsGPR(HostLoc reg) {
2016-07-01 14:01:06 +01:00
return reg >= HostLoc::RAX && reg <= HostLoc::R14;
}
inline bool HostLocIsXMM(HostLoc reg) {
return reg >= HostLoc::XMM0 && reg <= HostLoc::XMM15;
}
inline bool HostLocIsRegister(HostLoc reg) {
return HostLocIsGPR(reg) || HostLocIsXMM(reg);
}
2016-07-01 14:01:06 +01:00
inline bool HostLocIsFlag(HostLoc reg) {
return reg >= HostLoc::CF && reg <= HostLoc::OF;
}
inline HostLoc HostLocSpill(size_t i) {
ASSERT_MSG(i < SpillCount, "Invalid spill");
return static_cast<HostLoc>(static_cast<int>(HostLoc::FirstSpill) + i);
}
inline bool HostLocIsSpill(HostLoc reg) {
return reg >= HostLoc::FirstSpill && reg <= HostLocSpill(SpillCount - 1);
}
using HostLocList = std::initializer_list<HostLoc>;
const HostLocList any_gpr = {
2016-07-01 14:01:06 +01:00
HostLoc::RAX,
HostLoc::RBX,
HostLoc::RCX,
HostLoc::RDX,
HostLoc::RSI,
HostLoc::RDI,
HostLoc::RBP,
HostLoc::R8,
HostLoc::R9,
HostLoc::R10,
HostLoc::R11,
HostLoc::R12,
HostLoc::R13,
HostLoc::R14,
};
const HostLocList any_xmm = {
HostLoc::XMM0,
HostLoc::XMM1,
HostLoc::XMM2,
HostLoc::XMM3,
HostLoc::XMM4,
HostLoc::XMM5,
HostLoc::XMM6,
HostLoc::XMM7,
HostLoc::XMM8,
HostLoc::XMM9,
HostLoc::XMM10,
HostLoc::XMM11,
HostLoc::XMM12,
HostLoc::XMM13,
HostLoc::XMM14,
HostLoc::XMM15,
2016-07-01 14:01:06 +01:00
};
class RegAlloc final {
public:
RegAlloc(BlockOfCode* code) : code(code) {}
2016-07-01 14:01:06 +01:00
/// Late-def
Gen::X64Reg DefRegister(IR::Inst* def_inst, HostLocList desired_locations);
void RegisterAddDef(IR::Inst* def_inst, const IR::Value& use_inst);
2016-07-01 14:01:06 +01:00
/// Early-use, Late-def
Gen::X64Reg UseDefRegister(IR::Value use_value, IR::Inst* def_inst, HostLocList desired_locations);
Gen::X64Reg UseDefRegister(IR::Inst* use_inst, IR::Inst* def_inst, HostLocList desired_locations);
std::tuple<Gen::OpArg, Gen::X64Reg> UseDefOpArg(IR::Value use_value, IR::Inst* def_inst, HostLocList desired_locations);
2016-07-01 14:01:06 +01:00
/// Early-use
Gen::X64Reg UseRegister(IR::Value use_value, HostLocList desired_locations);
Gen::X64Reg UseRegister(IR::Inst* use_inst, HostLocList desired_locations);
Gen::OpArg UseOpArg(IR::Value use_value, HostLocList desired_locations);
2016-07-18 15:11:16 +01:00
/// Early-use, Destroyed
Gen::X64Reg UseScratchRegister(IR::Value use_value, HostLocList desired_locations);
Gen::X64Reg UseScratchRegister(IR::Inst* use_inst, HostLocList desired_locations);
2016-07-01 14:01:06 +01:00
/// Early-def, Late-use, single-use
Gen::X64Reg ScratchRegister(HostLocList desired_locations);
2016-07-01 14:01:06 +01:00
/// Late-def for result register, Early-use for all arguments, Each value is placed into registers according to host ABI.
2016-07-22 23:55:00 +01:00
void HostCall(IR::Inst* result_def = nullptr, IR::Value arg0_use = {}, IR::Value arg1_use = {}, IR::Value arg2_use = {}, IR::Value arg3_use = {});
2016-07-01 14:01:06 +01:00
// TODO: Values in host flags
2016-07-22 23:55:00 +01:00
void DecrementRemainingUses(IR::Inst* value);
2016-07-01 14:01:06 +01:00
void EndOfAllocScope();
void AssertNoMoreUses();
void Reset();
2016-07-01 14:01:06 +01:00
private:
HostLoc SelectARegister(HostLocList desired_locations) const;
boost::optional<HostLoc> ValueLocation(IR::Inst* value) const;
2016-07-01 14:01:06 +01:00
bool IsRegisterOccupied(HostLoc loc) const;
bool IsRegisterAllocated(HostLoc loc) const;
bool IsLastUse(IR::Inst* inst) const;
std::tuple<HostLoc, bool> UseHostLoc(IR::Inst* use_inst, HostLocList desired_locations);
2016-07-01 14:01:06 +01:00
void EmitMove(HostLoc to, HostLoc from);
void EmitExchange(HostLoc a, HostLoc b);
Gen::X64Reg LoadImmediateIntoRegister(IR::Value imm, Gen::X64Reg reg);
2016-07-01 14:01:06 +01:00
void SpillRegister(HostLoc loc);
HostLoc FindFreeSpill() const;
BlockOfCode* code = nullptr;
2016-07-01 14:01:06 +01:00
struct HostLocInfo {
std::vector<IR::Inst*> values; // early value
IR::Inst* def = nullptr; // late value
bool is_being_used = false;
bool IsIdle() const {
return !is_being_used;
}
bool IsScratch() const {
return is_being_used && !def && values.empty();
}
bool IsUse() const {
return is_being_used && !def && !values.empty();
}
bool IsDef() const {
return is_being_used && def && values.empty();
}
bool IsUseDef() const {
return is_being_used && def && !values.empty();
}
};
std::array<HostLocInfo, HostLocCount> hostloc_info;
HostLocInfo& LocInfo(HostLoc loc) {
return hostloc_info[static_cast<size_t>(loc)];
}
const HostLocInfo& LocInfo(HostLoc loc) const {
return hostloc_info[static_cast<size_t>(loc)];
}
2016-07-01 14:01:06 +01:00
};
} // namespace BackendX64
} // namespace Dynarmic