Skip to content

Commit 50ead2e

Browse files
committed
[X86][AsmParser] Avoid duplicated code in MatchAndEmit(ATT/Intel)Instruction, NFC
And VEXEncoding_* are renamed to OpcodePrefix_*. This is in preparation for the coming pseudo rex/rex2 prefixes support.
1 parent 4f8c961 commit 50ead2e

File tree

1 file changed

+83
-117
lines changed

1 file changed

+83
-117
lines changed

llvm/lib/Target/X86/AsmParser/X86AsmParser.cpp

Lines changed: 83 additions & 117 deletions
Original file line numberDiff line numberDiff line change
@@ -88,15 +88,15 @@ class X86AsmParser : public MCTargetAsmParser {
8888
bool Code16GCC;
8989
unsigned ForcedDataPrefix = 0;
9090

91-
enum VEXEncoding {
92-
VEXEncoding_Default,
93-
VEXEncoding_VEX,
94-
VEXEncoding_VEX2,
95-
VEXEncoding_VEX3,
96-
VEXEncoding_EVEX,
91+
enum OpcodePrefix {
92+
OpcodePrefix_Default,
93+
OpcodePrefix_VEX,
94+
OpcodePrefix_VEX2,
95+
OpcodePrefix_VEX3,
96+
OpcodePrefix_EVEX,
9797
};
9898

99-
VEXEncoding ForcedVEXEncoding = VEXEncoding_Default;
99+
OpcodePrefix ForcedOpcodePrefix = OpcodePrefix_Default;
100100

101101
enum DispEncoding {
102102
DispEncoding_Default,
@@ -1197,12 +1197,11 @@ class X86AsmParser : public MCTargetAsmParser {
11971197
bool ErrorMissingFeature(SMLoc IDLoc, const FeatureBitset &MissingFeatures,
11981198
bool MatchingInlineAsm);
11991199

1200-
bool MatchAndEmitATTInstruction(SMLoc IDLoc, unsigned &Opcode,
1200+
bool matchAndEmitATTInstruction(SMLoc IDLoc, unsigned &Opcode, MCInst &Inst,
12011201
OperandVector &Operands, MCStreamer &Out,
1202-
uint64_t &ErrorInfo,
1203-
bool MatchingInlineAsm);
1202+
uint64_t &ErrorInfo, bool MatchingInlineAsm);
12041203

1205-
bool MatchAndEmitIntelInstruction(SMLoc IDLoc, unsigned &Opcode,
1204+
bool matchAndEmitIntelInstruction(SMLoc IDLoc, unsigned &Opcode, MCInst &Inst,
12061205
OperandVector &Operands, MCStreamer &Out,
12071206
uint64_t &ErrorInfo,
12081207
bool MatchingInlineAsm);
@@ -3186,7 +3185,7 @@ bool X86AsmParser::ParseInstruction(ParseInstructionInfo &Info, StringRef Name,
31863185
InstInfo = &Info;
31873186

31883187
// Reset the forced VEX encoding.
3189-
ForcedVEXEncoding = VEXEncoding_Default;
3188+
ForcedOpcodePrefix = OpcodePrefix_Default;
31903189
ForcedDispEncoding = DispEncoding_Default;
31913190
UseApxExtendedReg = false;
31923191
ForcedNoFlag = false;
@@ -3203,13 +3202,13 @@ bool X86AsmParser::ParseInstruction(ParseInstructionInfo &Info, StringRef Name,
32033202
Parser.Lex(); // Eat curly.
32043203

32053204
if (Prefix == "vex")
3206-
ForcedVEXEncoding = VEXEncoding_VEX;
3205+
ForcedOpcodePrefix = OpcodePrefix_VEX;
32073206
else if (Prefix == "vex2")
3208-
ForcedVEXEncoding = VEXEncoding_VEX2;
3207+
ForcedOpcodePrefix = OpcodePrefix_VEX2;
32093208
else if (Prefix == "vex3")
3210-
ForcedVEXEncoding = VEXEncoding_VEX3;
3209+
ForcedOpcodePrefix = OpcodePrefix_VEX3;
32113210
else if (Prefix == "evex")
3212-
ForcedVEXEncoding = VEXEncoding_EVEX;
3211+
ForcedOpcodePrefix = OpcodePrefix_EVEX;
32133212
else if (Prefix == "disp8")
32143213
ForcedDispEncoding = DispEncoding_Disp8;
32153214
else if (Prefix == "disp32")
@@ -3235,15 +3234,15 @@ bool X86AsmParser::ParseInstruction(ParseInstructionInfo &Info, StringRef Name,
32353234
// Parse MASM style pseudo prefixes.
32363235
if (isParsingMSInlineAsm()) {
32373236
if (Name.equals_insensitive("vex"))
3238-
ForcedVEXEncoding = VEXEncoding_VEX;
3237+
ForcedOpcodePrefix = OpcodePrefix_VEX;
32393238
else if (Name.equals_insensitive("vex2"))
3240-
ForcedVEXEncoding = VEXEncoding_VEX2;
3239+
ForcedOpcodePrefix = OpcodePrefix_VEX2;
32413240
else if (Name.equals_insensitive("vex3"))
3242-
ForcedVEXEncoding = VEXEncoding_VEX3;
3241+
ForcedOpcodePrefix = OpcodePrefix_VEX3;
32433242
else if (Name.equals_insensitive("evex"))
3244-
ForcedVEXEncoding = VEXEncoding_EVEX;
3243+
ForcedOpcodePrefix = OpcodePrefix_EVEX;
32453244

3246-
if (ForcedVEXEncoding != VEXEncoding_Default) {
3245+
if (ForcedOpcodePrefix != OpcodePrefix_Default) {
32473246
if (getLexer().isNot(AsmToken::Identifier))
32483247
return Error(Parser.getTok().getLoc(), "Expected identifier");
32493248
// FIXME: The mnemonic won't match correctly if its not in lower case.
@@ -3741,7 +3740,7 @@ bool X86AsmParser::ParseInstruction(ParseInstructionInfo &Info, StringRef Name,
37413740
}
37423741

37433742
bool X86AsmParser::processInstruction(MCInst &Inst, const OperandVector &Ops) {
3744-
if (ForcedVEXEncoding != VEXEncoding_VEX3 &&
3743+
if (ForcedOpcodePrefix != OpcodePrefix_VEX3 &&
37453744
X86::optimizeInstFromVEX3ToVEX2(Inst, MII.get(Inst.getOpcode())))
37463745
return true;
37473746

@@ -4002,15 +4001,55 @@ void X86AsmParser::emitInstruction(MCInst &Inst, OperandVector &Operands,
40024001
applyLVILoadHardeningMitigation(Inst, Out);
40034002
}
40044003

4004+
static unsigned getPrefixes(OperandVector &Operands) {
4005+
unsigned Result = 0;
4006+
X86Operand &Prefix = static_cast<X86Operand &>(*Operands.back());
4007+
if (Prefix.isPrefix()) {
4008+
Result = Prefix.getPrefix();
4009+
Operands.pop_back();
4010+
}
4011+
return Result;
4012+
}
4013+
40054014
bool X86AsmParser::MatchAndEmitInstruction(SMLoc IDLoc, unsigned &Opcode,
40064015
OperandVector &Operands,
40074016
MCStreamer &Out, uint64_t &ErrorInfo,
40084017
bool MatchingInlineAsm) {
4009-
if (isParsingIntelSyntax())
4010-
return MatchAndEmitIntelInstruction(IDLoc, Opcode, Operands, Out, ErrorInfo,
4011-
MatchingInlineAsm);
4012-
return MatchAndEmitATTInstruction(IDLoc, Opcode, Operands, Out, ErrorInfo,
4013-
MatchingInlineAsm);
4018+
assert(!Operands.empty() && "Unexpect empty operand list!");
4019+
assert((*Operands[0]).isToken() && "Leading operand should always be a mnemonic!");
4020+
4021+
// First, handle aliases that expand to multiple instructions.
4022+
MatchFPUWaitAlias(IDLoc, static_cast<X86Operand &>(*Operands[0]), Operands,
4023+
Out, MatchingInlineAsm);
4024+
unsigned Prefixes = getPrefixes(Operands);
4025+
4026+
MCInst Inst;
4027+
4028+
// If VEX/EVEX encoding is forced, we need to pass the USE_* flag to the
4029+
// encoder and printer.
4030+
if (ForcedOpcodePrefix == OpcodePrefix_VEX)
4031+
Prefixes |= X86::IP_USE_VEX;
4032+
else if (ForcedOpcodePrefix == OpcodePrefix_VEX2)
4033+
Prefixes |= X86::IP_USE_VEX2;
4034+
else if (ForcedOpcodePrefix == OpcodePrefix_VEX3)
4035+
Prefixes |= X86::IP_USE_VEX3;
4036+
else if (ForcedOpcodePrefix == OpcodePrefix_EVEX)
4037+
Prefixes |= X86::IP_USE_EVEX;
4038+
4039+
// Set encoded flags for {disp8} and {disp32}.
4040+
if (ForcedDispEncoding == DispEncoding_Disp8)
4041+
Prefixes |= X86::IP_USE_DISP8;
4042+
else if (ForcedDispEncoding == DispEncoding_Disp32)
4043+
Prefixes |= X86::IP_USE_DISP32;
4044+
4045+
if (Prefixes)
4046+
Inst.setFlags(Prefixes);
4047+
4048+
return isParsingIntelSyntax()
4049+
? matchAndEmitIntelInstruction(IDLoc, Opcode, Inst, Operands, Out,
4050+
ErrorInfo, MatchingInlineAsm)
4051+
: matchAndEmitATTInstruction(IDLoc, Opcode, Inst, Operands, Out,
4052+
ErrorInfo, MatchingInlineAsm);
40144053
}
40154054

40164055
void X86AsmParser::MatchFPUWaitAlias(SMLoc IDLoc, X86Operand &Op,
@@ -4053,16 +4092,6 @@ bool X86AsmParser::ErrorMissingFeature(SMLoc IDLoc,
40534092
return Error(IDLoc, OS.str(), SMRange(), MatchingInlineAsm);
40544093
}
40554094

4056-
static unsigned getPrefixes(OperandVector &Operands) {
4057-
unsigned Result = 0;
4058-
X86Operand &Prefix = static_cast<X86Operand &>(*Operands.back());
4059-
if (Prefix.isPrefix()) {
4060-
Result = Prefix.getPrefix();
4061-
Operands.pop_back();
4062-
}
4063-
return Result;
4064-
}
4065-
40664095
unsigned X86AsmParser::checkTargetMatchPredicate(MCInst &Inst) {
40674096
unsigned Opc = Inst.getOpcode();
40684097
const MCInstrDesc &MCID = MII.get(Opc);
@@ -4072,63 +4101,31 @@ unsigned X86AsmParser::checkTargetMatchPredicate(MCInst &Inst) {
40724101
if (ForcedNoFlag == !(MCID.TSFlags & X86II::EVEX_NF) && !X86::isCFCMOVCC(Opc))
40734102
return Match_Unsupported;
40744103

4075-
if (ForcedVEXEncoding == VEXEncoding_EVEX &&
4104+
if (ForcedOpcodePrefix == OpcodePrefix_EVEX &&
40764105
(MCID.TSFlags & X86II::EncodingMask) != X86II::EVEX)
40774106
return Match_Unsupported;
40784107

4079-
if ((ForcedVEXEncoding == VEXEncoding_VEX ||
4080-
ForcedVEXEncoding == VEXEncoding_VEX2 ||
4081-
ForcedVEXEncoding == VEXEncoding_VEX3) &&
4108+
if ((ForcedOpcodePrefix == OpcodePrefix_VEX ||
4109+
ForcedOpcodePrefix == OpcodePrefix_VEX2 ||
4110+
ForcedOpcodePrefix == OpcodePrefix_VEX3) &&
40824111
(MCID.TSFlags & X86II::EncodingMask) != X86II::VEX)
40834112
return Match_Unsupported;
40844113

40854114
if ((MCID.TSFlags & X86II::ExplicitOpPrefixMask) ==
40864115
X86II::ExplicitVEXPrefix &&
4087-
(ForcedVEXEncoding != VEXEncoding_VEX &&
4088-
ForcedVEXEncoding != VEXEncoding_VEX2 &&
4089-
ForcedVEXEncoding != VEXEncoding_VEX3))
4116+
(ForcedOpcodePrefix != OpcodePrefix_VEX &&
4117+
ForcedOpcodePrefix != OpcodePrefix_VEX2 &&
4118+
ForcedOpcodePrefix != OpcodePrefix_VEX3))
40904119
return Match_Unsupported;
40914120

40924121
return Match_Success;
40934122
}
40944123

4095-
bool X86AsmParser::MatchAndEmitATTInstruction(SMLoc IDLoc, unsigned &Opcode,
4096-
OperandVector &Operands,
4097-
MCStreamer &Out,
4098-
uint64_t &ErrorInfo,
4099-
bool MatchingInlineAsm) {
4100-
assert(!Operands.empty() && "Unexpect empty operand list!");
4101-
assert((*Operands[0]).isToken() && "Leading operand should always be a mnemonic!");
4102-
SMRange EmptyRange = std::nullopt;
4103-
4104-
// First, handle aliases that expand to multiple instructions.
4105-
MatchFPUWaitAlias(IDLoc, static_cast<X86Operand &>(*Operands[0]), Operands,
4106-
Out, MatchingInlineAsm);
4124+
bool X86AsmParser::matchAndEmitATTInstruction(
4125+
SMLoc IDLoc, unsigned &Opcode, MCInst &Inst, OperandVector &Operands,
4126+
MCStreamer &Out, uint64_t &ErrorInfo, bool MatchingInlineAsm) {
41074127
X86Operand &Op = static_cast<X86Operand &>(*Operands[0]);
4108-
unsigned Prefixes = getPrefixes(Operands);
4109-
4110-
MCInst Inst;
4111-
4112-
// If VEX/EVEX encoding is forced, we need to pass the USE_* flag to the
4113-
// encoder and printer.
4114-
if (ForcedVEXEncoding == VEXEncoding_VEX)
4115-
Prefixes |= X86::IP_USE_VEX;
4116-
else if (ForcedVEXEncoding == VEXEncoding_VEX2)
4117-
Prefixes |= X86::IP_USE_VEX2;
4118-
else if (ForcedVEXEncoding == VEXEncoding_VEX3)
4119-
Prefixes |= X86::IP_USE_VEX3;
4120-
else if (ForcedVEXEncoding == VEXEncoding_EVEX)
4121-
Prefixes |= X86::IP_USE_EVEX;
4122-
4123-
// Set encoded flags for {disp8} and {disp32}.
4124-
if (ForcedDispEncoding == DispEncoding_Disp8)
4125-
Prefixes |= X86::IP_USE_DISP8;
4126-
else if (ForcedDispEncoding == DispEncoding_Disp32)
4127-
Prefixes |= X86::IP_USE_DISP32;
4128-
4129-
if (Prefixes)
4130-
Inst.setFlags(Prefixes);
4131-
4128+
SMRange EmptyRange = std::nullopt;
41324129
// In 16-bit mode, if data32 is specified, temporarily switch to 32-bit mode
41334130
// when matching the instruction.
41344131
if (ForcedDataPrefix == X86::Is32Bit)
@@ -4350,44 +4347,11 @@ bool X86AsmParser::MatchAndEmitATTInstruction(SMLoc IDLoc, unsigned &Opcode,
43504347
return true;
43514348
}
43524349

4353-
bool X86AsmParser::MatchAndEmitIntelInstruction(SMLoc IDLoc, unsigned &Opcode,
4354-
OperandVector &Operands,
4355-
MCStreamer &Out,
4356-
uint64_t &ErrorInfo,
4357-
bool MatchingInlineAsm) {
4358-
assert(!Operands.empty() && "Unexpect empty operand list!");
4359-
assert((*Operands[0]).isToken() && "Leading operand should always be a mnemonic!");
4360-
StringRef Mnemonic = (static_cast<X86Operand &>(*Operands[0])).getToken();
4361-
SMRange EmptyRange = std::nullopt;
4362-
StringRef Base = (static_cast<X86Operand &>(*Operands[0])).getToken();
4363-
unsigned Prefixes = getPrefixes(Operands);
4364-
4365-
// First, handle aliases that expand to multiple instructions.
4366-
MatchFPUWaitAlias(IDLoc, static_cast<X86Operand &>(*Operands[0]), Operands, Out, MatchingInlineAsm);
4350+
bool X86AsmParser::matchAndEmitIntelInstruction(
4351+
SMLoc IDLoc, unsigned &Opcode, MCInst &Inst, OperandVector &Operands,
4352+
MCStreamer &Out, uint64_t &ErrorInfo, bool MatchingInlineAsm) {
43674353
X86Operand &Op = static_cast<X86Operand &>(*Operands[0]);
4368-
4369-
MCInst Inst;
4370-
4371-
// If VEX/EVEX encoding is forced, we need to pass the USE_* flag to the
4372-
// encoder and printer.
4373-
if (ForcedVEXEncoding == VEXEncoding_VEX)
4374-
Prefixes |= X86::IP_USE_VEX;
4375-
else if (ForcedVEXEncoding == VEXEncoding_VEX2)
4376-
Prefixes |= X86::IP_USE_VEX2;
4377-
else if (ForcedVEXEncoding == VEXEncoding_VEX3)
4378-
Prefixes |= X86::IP_USE_VEX3;
4379-
else if (ForcedVEXEncoding == VEXEncoding_EVEX)
4380-
Prefixes |= X86::IP_USE_EVEX;
4381-
4382-
// Set encoded flags for {disp8} and {disp32}.
4383-
if (ForcedDispEncoding == DispEncoding_Disp8)
4384-
Prefixes |= X86::IP_USE_DISP8;
4385-
else if (ForcedDispEncoding == DispEncoding_Disp32)
4386-
Prefixes |= X86::IP_USE_DISP32;
4387-
4388-
if (Prefixes)
4389-
Inst.setFlags(Prefixes);
4390-
4354+
SMRange EmptyRange = std::nullopt;
43914355
// Find one unsized memory operand, if present.
43924356
X86Operand *UnsizedMemOp = nullptr;
43934357
for (const auto &Op : Operands) {
@@ -4402,6 +4366,7 @@ bool X86AsmParser::MatchAndEmitIntelInstruction(SMLoc IDLoc, unsigned &Opcode,
44024366

44034367
// Allow some instructions to have implicitly pointer-sized operands. This is
44044368
// compatible with gas.
4369+
StringRef Mnemonic = (static_cast<X86Operand &>(*Operands[0])).getToken();
44054370
if (UnsizedMemOp) {
44064371
static const char *const PtrSizedInstrs[] = {"call", "jmp", "push"};
44074372
for (const char *Instr : PtrSizedInstrs) {
@@ -4415,6 +4380,7 @@ bool X86AsmParser::MatchAndEmitIntelInstruction(SMLoc IDLoc, unsigned &Opcode,
44154380
SmallVector<unsigned, 8> Match;
44164381
FeatureBitset ErrorInfoMissingFeatures;
44174382
FeatureBitset MissingFeatures;
4383+
StringRef Base = (static_cast<X86Operand &>(*Operands[0])).getToken();
44184384

44194385
// If unsized push has immediate operand we should default the default pointer
44204386
// size for the size.

0 commit comments

Comments
 (0)