[X86] Rename X86MemoryFoldTableEntry to X86FoldTableEntry, NFCI
b/c it's used for element that folds a load, store or broadcast.
This commit is contained in:
parent
d20cc3ef4c
commit
bafa51c8a5
@ -357,14 +357,14 @@ bool X86FixupVectorConstantsPass::processInstruction(MachineFunction &MF,
|
||||
unsigned OpBcst32 = 0, OpBcst64 = 0;
|
||||
unsigned OpNoBcst32 = 0, OpNoBcst64 = 0;
|
||||
if (OpSrc32) {
|
||||
if (const X86MemoryFoldTableEntry *Mem2Bcst =
|
||||
if (const X86FoldTableEntry *Mem2Bcst =
|
||||
llvm::lookupBroadcastFoldTable(OpSrc32, 32)) {
|
||||
OpBcst32 = Mem2Bcst->DstOp;
|
||||
OpNoBcst32 = Mem2Bcst->Flags & TB_INDEX_MASK;
|
||||
}
|
||||
}
|
||||
if (OpSrc64) {
|
||||
if (const X86MemoryFoldTableEntry *Mem2Bcst =
|
||||
if (const X86FoldTableEntry *Mem2Bcst =
|
||||
llvm::lookupBroadcastFoldTable(OpSrc64, 64)) {
|
||||
OpBcst64 = Mem2Bcst->DstOp;
|
||||
OpNoBcst64 = Mem2Bcst->Flags & TB_INDEX_MASK;
|
||||
|
@ -23,7 +23,7 @@ using namespace llvm;
|
||||
// are currently emitted in X86GenInstrInfo.inc in alphabetical order. Which
|
||||
// makes sorting these tables a simple matter of alphabetizing the table.
|
||||
#include "X86GenFoldTables.inc"
|
||||
static const X86MemoryFoldTableEntry BroadcastTable2[] = {
|
||||
static const X86FoldTableEntry BroadcastTable2[] = {
|
||||
{ X86::VADDPDZ128rr, X86::VADDPDZ128rmb, TB_BCAST_SD },
|
||||
{ X86::VADDPDZ256rr, X86::VADDPDZ256rmb, TB_BCAST_SD },
|
||||
{ X86::VADDPDZrr, X86::VADDPDZrmb, TB_BCAST_SD },
|
||||
@ -200,7 +200,7 @@ static const X86MemoryFoldTableEntry BroadcastTable2[] = {
|
||||
{ X86::VXORPSZrr, X86::VXORPSZrmb, TB_BCAST_SS },
|
||||
};
|
||||
|
||||
static const X86MemoryFoldTableEntry BroadcastTable3[] = {
|
||||
static const X86FoldTableEntry BroadcastTable3[] = {
|
||||
{ X86::VFMADD132PDZ128r, X86::VFMADD132PDZ128mb, TB_BCAST_SD },
|
||||
{ X86::VFMADD132PDZ256r, X86::VFMADD132PDZ256mb, TB_BCAST_SD },
|
||||
{ X86::VFMADD132PDZr, X86::VFMADD132PDZmb, TB_BCAST_SD },
|
||||
@ -319,7 +319,7 @@ static const X86MemoryFoldTableEntry BroadcastTable3[] = {
|
||||
|
||||
// Table to map instructions safe to broadcast using a different width from the
|
||||
// element width.
|
||||
static const X86MemoryFoldTableEntry BroadcastSizeTable2[] = {
|
||||
static const X86FoldTableEntry BroadcastSizeTable2[] = {
|
||||
{ X86::VANDNPDZ128rr, X86::VANDNPSZ128rmb, TB_BCAST_SS },
|
||||
{ X86::VANDNPDZ256rr, X86::VANDNPSZ256rmb, TB_BCAST_SS },
|
||||
{ X86::VANDNPDZrr, X86::VANDNPSZrmb, TB_BCAST_SS },
|
||||
@ -370,7 +370,7 @@ static const X86MemoryFoldTableEntry BroadcastSizeTable2[] = {
|
||||
{ X86::VXORPSZrr, X86::VXORPDZrmb, TB_BCAST_SD },
|
||||
};
|
||||
|
||||
static const X86MemoryFoldTableEntry BroadcastSizeTable3[] = {
|
||||
static const X86FoldTableEntry BroadcastSizeTable3[] = {
|
||||
{ X86::VPTERNLOGDZ128rri, X86::VPTERNLOGQZ128rmbi, TB_BCAST_Q },
|
||||
{ X86::VPTERNLOGDZ256rri, X86::VPTERNLOGQZ256rmbi, TB_BCAST_Q },
|
||||
{ X86::VPTERNLOGDZrri, X86::VPTERNLOGQZrmbi, TB_BCAST_Q },
|
||||
@ -379,8 +379,8 @@ static const X86MemoryFoldTableEntry BroadcastSizeTable3[] = {
|
||||
{ X86::VPTERNLOGQZrri, X86::VPTERNLOGDZrmbi, TB_BCAST_D },
|
||||
};
|
||||
|
||||
static const X86MemoryFoldTableEntry *
|
||||
lookupFoldTableImpl(ArrayRef<X86MemoryFoldTableEntry> Table, unsigned RegOp) {
|
||||
static const X86FoldTableEntry *
|
||||
lookupFoldTableImpl(ArrayRef<X86FoldTableEntry> Table, unsigned RegOp) {
|
||||
#ifndef NDEBUG
|
||||
#define CHECK_SORTED_UNIQUE(TABLE) \
|
||||
assert(llvm::is_sorted(TABLE) && #TABLE " is not sorted"); \
|
||||
@ -405,21 +405,21 @@ lookupFoldTableImpl(ArrayRef<X86MemoryFoldTableEntry> Table, unsigned RegOp) {
|
||||
}
|
||||
#endif
|
||||
|
||||
const X86MemoryFoldTableEntry *Data = llvm::lower_bound(Table, RegOp);
|
||||
const X86FoldTableEntry *Data = llvm::lower_bound(Table, RegOp);
|
||||
if (Data != Table.end() && Data->KeyOp == RegOp &&
|
||||
!(Data->Flags & TB_NO_FORWARD))
|
||||
return Data;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
const X86MemoryFoldTableEntry *
|
||||
const X86FoldTableEntry *
|
||||
llvm::lookupTwoAddrFoldTable(unsigned RegOp) {
|
||||
return lookupFoldTableImpl(Table2Addr, RegOp);
|
||||
}
|
||||
|
||||
const X86MemoryFoldTableEntry *
|
||||
const X86FoldTableEntry *
|
||||
llvm::lookupFoldTable(unsigned RegOp, unsigned OpNum) {
|
||||
ArrayRef<X86MemoryFoldTableEntry> FoldTable;
|
||||
ArrayRef<X86FoldTableEntry> FoldTable;
|
||||
if (OpNum == 0)
|
||||
FoldTable = ArrayRef(Table0);
|
||||
else if (OpNum == 1)
|
||||
@ -442,39 +442,39 @@ namespace {
|
||||
// function scope static variable to lazily init the unfolding table.
|
||||
struct X86MemUnfoldTable {
|
||||
// Stores memory unfolding tables entries sorted by opcode.
|
||||
std::vector<X86MemoryFoldTableEntry> Table;
|
||||
std::vector<X86FoldTableEntry> Table;
|
||||
|
||||
X86MemUnfoldTable() {
|
||||
for (const X86MemoryFoldTableEntry &Entry : Table2Addr)
|
||||
for (const X86FoldTableEntry &Entry : Table2Addr)
|
||||
// Index 0, folded load and store, no alignment requirement.
|
||||
addTableEntry(Entry, TB_INDEX_0 | TB_FOLDED_LOAD | TB_FOLDED_STORE);
|
||||
|
||||
for (const X86MemoryFoldTableEntry &Entry : Table0)
|
||||
for (const X86FoldTableEntry &Entry : Table0)
|
||||
// Index 0, mix of loads and stores.
|
||||
addTableEntry(Entry, TB_INDEX_0);
|
||||
|
||||
for (const X86MemoryFoldTableEntry &Entry : Table1)
|
||||
for (const X86FoldTableEntry &Entry : Table1)
|
||||
// Index 1, folded load
|
||||
addTableEntry(Entry, TB_INDEX_1 | TB_FOLDED_LOAD);
|
||||
|
||||
for (const X86MemoryFoldTableEntry &Entry : Table2)
|
||||
for (const X86FoldTableEntry &Entry : Table2)
|
||||
// Index 2, folded load
|
||||
addTableEntry(Entry, TB_INDEX_2 | TB_FOLDED_LOAD);
|
||||
|
||||
for (const X86MemoryFoldTableEntry &Entry : Table3)
|
||||
for (const X86FoldTableEntry &Entry : Table3)
|
||||
// Index 3, folded load
|
||||
addTableEntry(Entry, TB_INDEX_3 | TB_FOLDED_LOAD);
|
||||
|
||||
for (const X86MemoryFoldTableEntry &Entry : Table4)
|
||||
for (const X86FoldTableEntry &Entry : Table4)
|
||||
// Index 4, folded load
|
||||
addTableEntry(Entry, TB_INDEX_4 | TB_FOLDED_LOAD);
|
||||
|
||||
// Broadcast tables.
|
||||
for (const X86MemoryFoldTableEntry &Entry : BroadcastTable2)
|
||||
for (const X86FoldTableEntry &Entry : BroadcastTable2)
|
||||
// Index 2, folded broadcast
|
||||
addTableEntry(Entry, TB_INDEX_2 | TB_FOLDED_LOAD | TB_FOLDED_BCAST);
|
||||
|
||||
for (const X86MemoryFoldTableEntry &Entry : BroadcastTable3)
|
||||
for (const X86FoldTableEntry &Entry : BroadcastTable3)
|
||||
// Index 3, folded broadcast
|
||||
addTableEntry(Entry, TB_INDEX_3 | TB_FOLDED_LOAD | TB_FOLDED_BCAST);
|
||||
|
||||
@ -486,7 +486,7 @@ struct X86MemUnfoldTable {
|
||||
"Memory unfolding table is not unique!");
|
||||
}
|
||||
|
||||
void addTableEntry(const X86MemoryFoldTableEntry &Entry,
|
||||
void addTableEntry(const X86FoldTableEntry &Entry,
|
||||
uint16_t ExtraFlags) {
|
||||
// NOTE: This swaps the KeyOp and DstOp in the table so we can sort it.
|
||||
if ((Entry.Flags & TB_NO_REVERSE) == 0)
|
||||
@ -496,7 +496,7 @@ struct X86MemUnfoldTable {
|
||||
};
|
||||
}
|
||||
|
||||
const X86MemoryFoldTableEntry *
|
||||
const X86FoldTableEntry *
|
||||
llvm::lookupUnfoldTable(unsigned MemOp) {
|
||||
static X86MemUnfoldTable MemUnfoldTable;
|
||||
auto &Table = MemUnfoldTable.Table;
|
||||
@ -510,26 +510,26 @@ namespace {
|
||||
|
||||
// This class stores the memory -> broadcast folding tables. It is instantiated
|
||||
// as a function scope static variable to lazily init the folding table.
|
||||
struct X86MemBroadcastFoldTable {
|
||||
struct X86BroadcastFoldTable {
|
||||
// Stores memory broadcast folding tables entries sorted by opcode.
|
||||
std::vector<X86MemoryFoldTableEntry> Table;
|
||||
std::vector<X86FoldTableEntry> Table;
|
||||
|
||||
X86MemBroadcastFoldTable() {
|
||||
X86BroadcastFoldTable() {
|
||||
// Broadcast tables.
|
||||
for (const X86MemoryFoldTableEntry &Reg2Bcst : BroadcastTable2) {
|
||||
for (const X86FoldTableEntry &Reg2Bcst : BroadcastTable2) {
|
||||
unsigned RegOp = Reg2Bcst.KeyOp;
|
||||
unsigned BcstOp = Reg2Bcst.DstOp;
|
||||
if (const X86MemoryFoldTableEntry *Reg2Mem = lookupFoldTable(RegOp, 2)) {
|
||||
if (const X86FoldTableEntry *Reg2Mem = lookupFoldTable(RegOp, 2)) {
|
||||
unsigned MemOp = Reg2Mem->DstOp;
|
||||
uint16_t Flags = Reg2Mem->Flags | Reg2Bcst.Flags | TB_INDEX_2 |
|
||||
TB_FOLDED_LOAD | TB_FOLDED_BCAST;
|
||||
Table.push_back({MemOp, BcstOp, Flags});
|
||||
}
|
||||
}
|
||||
for (const X86MemoryFoldTableEntry &Reg2Bcst : BroadcastSizeTable2) {
|
||||
for (const X86FoldTableEntry &Reg2Bcst : BroadcastSizeTable2) {
|
||||
unsigned RegOp = Reg2Bcst.KeyOp;
|
||||
unsigned BcstOp = Reg2Bcst.DstOp;
|
||||
if (const X86MemoryFoldTableEntry *Reg2Mem = lookupFoldTable(RegOp, 2)) {
|
||||
if (const X86FoldTableEntry *Reg2Mem = lookupFoldTable(RegOp, 2)) {
|
||||
unsigned MemOp = Reg2Mem->DstOp;
|
||||
uint16_t Flags = Reg2Mem->Flags | Reg2Bcst.Flags | TB_INDEX_2 |
|
||||
TB_FOLDED_LOAD | TB_FOLDED_BCAST;
|
||||
@ -537,20 +537,20 @@ struct X86MemBroadcastFoldTable {
|
||||
}
|
||||
}
|
||||
|
||||
for (const X86MemoryFoldTableEntry &Reg2Bcst : BroadcastTable3) {
|
||||
for (const X86FoldTableEntry &Reg2Bcst : BroadcastTable3) {
|
||||
unsigned RegOp = Reg2Bcst.KeyOp;
|
||||
unsigned BcstOp = Reg2Bcst.DstOp;
|
||||
if (const X86MemoryFoldTableEntry *Reg2Mem = lookupFoldTable(RegOp, 3)) {
|
||||
if (const X86FoldTableEntry *Reg2Mem = lookupFoldTable(RegOp, 3)) {
|
||||
unsigned MemOp = Reg2Mem->DstOp;
|
||||
uint16_t Flags = Reg2Mem->Flags | Reg2Bcst.Flags | TB_INDEX_3 |
|
||||
TB_FOLDED_LOAD | TB_FOLDED_BCAST;
|
||||
Table.push_back({MemOp, BcstOp, Flags});
|
||||
}
|
||||
}
|
||||
for (const X86MemoryFoldTableEntry &Reg2Bcst : BroadcastSizeTable3) {
|
||||
for (const X86FoldTableEntry &Reg2Bcst : BroadcastSizeTable3) {
|
||||
unsigned RegOp = Reg2Bcst.KeyOp;
|
||||
unsigned BcstOp = Reg2Bcst.DstOp;
|
||||
if (const X86MemoryFoldTableEntry *Reg2Mem = lookupFoldTable(RegOp, 3)) {
|
||||
if (const X86FoldTableEntry *Reg2Mem = lookupFoldTable(RegOp, 3)) {
|
||||
unsigned MemOp = Reg2Mem->DstOp;
|
||||
uint16_t Flags = Reg2Mem->Flags | Reg2Bcst.Flags | TB_INDEX_3 |
|
||||
TB_FOLDED_LOAD | TB_FOLDED_BCAST;
|
||||
@ -564,7 +564,7 @@ struct X86MemBroadcastFoldTable {
|
||||
};
|
||||
} // namespace
|
||||
|
||||
static bool matchBroadcastSize(const X86MemoryFoldTableEntry &Entry,
|
||||
static bool matchBroadcastSize(const X86FoldTableEntry &Entry,
|
||||
unsigned BroadcastBits) {
|
||||
switch (Entry.Flags & TB_BCAST_MASK) {
|
||||
case TB_BCAST_SD:
|
||||
@ -577,10 +577,10 @@ static bool matchBroadcastSize(const X86MemoryFoldTableEntry &Entry,
|
||||
return false;
|
||||
}
|
||||
|
||||
const X86MemoryFoldTableEntry *
|
||||
const X86FoldTableEntry *
|
||||
llvm::lookupBroadcastFoldTable(unsigned MemOp, unsigned BroadcastBits) {
|
||||
static X86MemBroadcastFoldTable MemBroadcastFoldTable;
|
||||
auto &Table = MemBroadcastFoldTable.Table;
|
||||
static X86BroadcastFoldTable BroadcastFoldTable;
|
||||
auto &Table = BroadcastFoldTable.Table;
|
||||
for (auto I = llvm::lower_bound(Table, MemOp);
|
||||
I != Table.end() && I->KeyOp == MemOp; ++I) {
|
||||
if (matchBroadcastSize(*I, BroadcastBits))
|
||||
|
@ -20,37 +20,37 @@ namespace llvm {
|
||||
|
||||
// This struct is used for both the folding and unfold tables. They KeyOp
|
||||
// is used to determine the sorting order.
|
||||
struct X86MemoryFoldTableEntry {
|
||||
struct X86FoldTableEntry {
|
||||
unsigned KeyOp;
|
||||
unsigned DstOp;
|
||||
uint16_t Flags;
|
||||
|
||||
bool operator<(const X86MemoryFoldTableEntry &RHS) const {
|
||||
bool operator<(const X86FoldTableEntry &RHS) const {
|
||||
return KeyOp < RHS.KeyOp;
|
||||
}
|
||||
bool operator==(const X86MemoryFoldTableEntry &RHS) const {
|
||||
bool operator==(const X86FoldTableEntry &RHS) const {
|
||||
return KeyOp == RHS.KeyOp;
|
||||
}
|
||||
friend bool operator<(const X86MemoryFoldTableEntry &TE, unsigned Opcode) {
|
||||
friend bool operator<(const X86FoldTableEntry &TE, unsigned Opcode) {
|
||||
return TE.KeyOp < Opcode;
|
||||
}
|
||||
};
|
||||
|
||||
// Look up the memory folding table entry for folding a load and a store into
|
||||
// operand 0.
|
||||
const X86MemoryFoldTableEntry *lookupTwoAddrFoldTable(unsigned RegOp);
|
||||
const X86FoldTableEntry *lookupTwoAddrFoldTable(unsigned RegOp);
|
||||
|
||||
// Look up the memory folding table entry for folding a load or store with
|
||||
// operand OpNum.
|
||||
const X86MemoryFoldTableEntry *lookupFoldTable(unsigned RegOp, unsigned OpNum);
|
||||
const X86FoldTableEntry *lookupFoldTable(unsigned RegOp, unsigned OpNum);
|
||||
|
||||
// Look up the memory unfolding table entry for this instruction.
|
||||
const X86MemoryFoldTableEntry *lookupUnfoldTable(unsigned MemOp);
|
||||
const X86FoldTableEntry *lookupUnfoldTable(unsigned MemOp);
|
||||
|
||||
// Look up the broadcast memory folding table entry for this instruction from
|
||||
// Look up the broadcast folding table entry for this instruction from
|
||||
// the regular memory instruction.
|
||||
const X86MemoryFoldTableEntry *lookupBroadcastFoldTable(unsigned MemOp,
|
||||
unsigned BroadcastBits);
|
||||
const X86FoldTableEntry *lookupBroadcastFoldTable(unsigned MemOp,
|
||||
unsigned BroadcastBits);
|
||||
|
||||
} // namespace llvm
|
||||
|
||||
|
@ -6596,7 +6596,7 @@ MachineInstr *X86InstrInfo::foldMemoryOperandImpl(
|
||||
MF, MI, OpNum, MOs, InsertPt, Size, Alignment))
|
||||
return CustomMI;
|
||||
|
||||
const X86MemoryFoldTableEntry *I = nullptr;
|
||||
const X86FoldTableEntry *I = nullptr;
|
||||
|
||||
// Folding a memory location into the two-address part of a two-address
|
||||
// instruction is different than folding it other places. It requires
|
||||
@ -7304,7 +7304,7 @@ extractStoreMMOs(ArrayRef<MachineMemOperand *> MMOs, MachineFunction &MF) {
|
||||
return StoreMMOs;
|
||||
}
|
||||
|
||||
static unsigned getBroadcastOpcode(const X86MemoryFoldTableEntry *I,
|
||||
static unsigned getBroadcastOpcode(const X86FoldTableEntry *I,
|
||||
const TargetRegisterClass *RC,
|
||||
const X86Subtarget &STI) {
|
||||
assert(STI.hasAVX512() && "Expected at least AVX512!");
|
||||
@ -7352,7 +7352,7 @@ static unsigned getBroadcastOpcode(const X86MemoryFoldTableEntry *I,
|
||||
bool X86InstrInfo::unfoldMemoryOperand(
|
||||
MachineFunction &MF, MachineInstr &MI, unsigned Reg, bool UnfoldLoad,
|
||||
bool UnfoldStore, SmallVectorImpl<MachineInstr *> &NewMIs) const {
|
||||
const X86MemoryFoldTableEntry *I = lookupUnfoldTable(MI.getOpcode());
|
||||
const X86FoldTableEntry *I = lookupUnfoldTable(MI.getOpcode());
|
||||
if (I == nullptr)
|
||||
return false;
|
||||
unsigned Opc = I->DstOp;
|
||||
@ -7494,7 +7494,7 @@ X86InstrInfo::unfoldMemoryOperand(SelectionDAG &DAG, SDNode *N,
|
||||
if (!N->isMachineOpcode())
|
||||
return false;
|
||||
|
||||
const X86MemoryFoldTableEntry *I = lookupUnfoldTable(N->getMachineOpcode());
|
||||
const X86FoldTableEntry *I = lookupUnfoldTable(N->getMachineOpcode());
|
||||
if (I == nullptr)
|
||||
return false;
|
||||
unsigned Opc = I->DstOp;
|
||||
@ -7617,7 +7617,7 @@ X86InstrInfo::unfoldMemoryOperand(SelectionDAG &DAG, SDNode *N,
|
||||
unsigned X86InstrInfo::getOpcodeAfterMemoryUnfold(unsigned Opc,
|
||||
bool UnfoldLoad, bool UnfoldStore,
|
||||
unsigned *LoadRegIndex) const {
|
||||
const X86MemoryFoldTableEntry *I = lookupUnfoldTable(Opc);
|
||||
const X86FoldTableEntry *I = lookupUnfoldTable(Opc);
|
||||
if (I == nullptr)
|
||||
return 0;
|
||||
bool FoldedLoad = I->Flags & TB_FOLDED_LOAD;
|
||||
|
@ -1,4 +1,4 @@
|
||||
static const X86MemoryFoldTableEntry Table2Addr[] = {
|
||||
static const X86FoldTableEntry Table2Addr[] = {
|
||||
{X86::ADD16ri_DB, X86::ADD16mi, TB_NO_REVERSE},
|
||||
{X86::ADD16rr_DB, X86::ADD16mr, TB_NO_REVERSE},
|
||||
{X86::ADD32ri_DB, X86::ADD32mi, TB_NO_REVERSE},
|
||||
@ -214,7 +214,7 @@ static const X86MemoryFoldTableEntry Table2Addr[] = {
|
||||
{X86::XOR8rr, X86::XOR8mr, TB_NO_REVERSE},
|
||||
};
|
||||
|
||||
static const X86MemoryFoldTableEntry Table0[] = {
|
||||
static const X86FoldTableEntry Table0[] = {
|
||||
{X86::BT16ri8, X86::BT16mi8, TB_FOLDED_LOAD},
|
||||
{X86::BT32ri8, X86::BT32mi8, TB_FOLDED_LOAD},
|
||||
{X86::BT64ri8, X86::BT64mi8, TB_FOLDED_LOAD},
|
||||
@ -407,7 +407,7 @@ static const X86MemoryFoldTableEntry Table0[] = {
|
||||
{X86::VPMOVWBZrr, X86::VPMOVWBZmr, TB_FOLDED_STORE},
|
||||
};
|
||||
|
||||
static const X86MemoryFoldTableEntry Table1[] = {
|
||||
static const X86FoldTableEntry Table1[] = {
|
||||
{X86::AESIMCrr, X86::AESIMCrm, TB_ALIGN_16},
|
||||
{X86::AESKEYGENASSIST128rr, X86::AESKEYGENASSIST128rm, TB_ALIGN_16},
|
||||
{X86::BEXTR32rr, X86::BEXTR32rm, 0},
|
||||
@ -1294,7 +1294,7 @@ static const X86MemoryFoldTableEntry Table1[] = {
|
||||
{X86::VUCOMISSrr_Int, X86::VUCOMISSrm_Int, TB_NO_REVERSE},
|
||||
};
|
||||
|
||||
static const X86MemoryFoldTableEntry Table2[] = {
|
||||
static const X86FoldTableEntry Table2[] = {
|
||||
{X86::ADD16rr_DB, X86::ADD16rm, TB_NO_REVERSE},
|
||||
{X86::ADD32rr_DB, X86::ADD32rm, TB_NO_REVERSE},
|
||||
{X86::ADD64rr_DB, X86::ADD64rm, TB_NO_REVERSE},
|
||||
@ -3251,7 +3251,7 @@ static const X86MemoryFoldTableEntry Table2[] = {
|
||||
{X86::XORPSrr, X86::XORPSrm, TB_ALIGN_16},
|
||||
};
|
||||
|
||||
static const X86MemoryFoldTableEntry Table3[] = {
|
||||
static const X86FoldTableEntry Table3[] = {
|
||||
{X86::VADDPDZ128rrkz, X86::VADDPDZ128rmkz, 0},
|
||||
{X86::VADDPDZ256rrkz, X86::VADDPDZ256rmkz, 0},
|
||||
{X86::VADDPDZrrkz, X86::VADDPDZrmkz, 0},
|
||||
@ -4861,7 +4861,7 @@ static const X86MemoryFoldTableEntry Table3[] = {
|
||||
{X86::VXORPSZrrkz, X86::VXORPSZrmkz, 0},
|
||||
};
|
||||
|
||||
static const X86MemoryFoldTableEntry Table4[] = {
|
||||
static const X86FoldTableEntry Table4[] = {
|
||||
{X86::VADDPDZ128rrk, X86::VADDPDZ128rmk, 0},
|
||||
{X86::VADDPDZ256rrk, X86::VADDPDZ256rmk, 0},
|
||||
{X86::VADDPDZrrk, X86::VADDPDZrmk, 0},
|
||||
|
@ -174,10 +174,10 @@ private:
|
||||
unsigned FoldedIdx, bool isManual);
|
||||
|
||||
// Print the given table as a static const C++ array of type
|
||||
// X86MemoryFoldTableEntry.
|
||||
// X86FoldTableEntry.
|
||||
void printTable(const FoldTable &Table, StringRef TableName,
|
||||
formatted_raw_ostream &OS) {
|
||||
OS << "static const X86MemoryFoldTableEntry " << TableName << "[] = {\n";
|
||||
OS << "static const X86FoldTableEntry " << TableName << "[] = {\n";
|
||||
|
||||
for (auto &E : Table)
|
||||
E.second.print(OS);
|
||||
|
Loading…
x
Reference in New Issue
Block a user