@@ -88,15 +88,15 @@ class X86AsmParser : public MCTargetAsmParser {
88
88
bool Code16GCC;
89
89
unsigned ForcedDataPrefix = 0 ;
90
90
91
- enum VEXEncoding {
92
- VEXEncoding_Default ,
93
- VEXEncoding_VEX ,
94
- VEXEncoding_VEX2 ,
95
- VEXEncoding_VEX3 ,
96
- VEXEncoding_EVEX ,
91
+ enum OpcodePrefix {
92
+ OpcodePrefix_Default ,
93
+ OpcodePrefix_VEX ,
94
+ OpcodePrefix_VEX2 ,
95
+ OpcodePrefix_VEX3 ,
96
+ OpcodePrefix_EVEX ,
97
97
};
98
98
99
- VEXEncoding ForcedVEXEncoding = VEXEncoding_Default ;
99
+ OpcodePrefix ForcedOpcodePrefix = OpcodePrefix_Default ;
100
100
101
101
enum DispEncoding {
102
102
DispEncoding_Default,
@@ -1197,12 +1197,11 @@ class X86AsmParser : public MCTargetAsmParser {
1197
1197
bool ErrorMissingFeature (SMLoc IDLoc, const FeatureBitset &MissingFeatures,
1198
1198
bool MatchingInlineAsm);
1199
1199
1200
- bool MatchAndEmitATTInstruction (SMLoc IDLoc, unsigned &Opcode,
1200
+ bool matchAndEmitATTInstruction (SMLoc IDLoc, unsigned &Opcode, MCInst &Inst ,
1201
1201
OperandVector &Operands, MCStreamer &Out,
1202
- uint64_t &ErrorInfo,
1203
- bool MatchingInlineAsm);
1202
+ uint64_t &ErrorInfo, bool MatchingInlineAsm);
1204
1203
1205
- bool MatchAndEmitIntelInstruction (SMLoc IDLoc, unsigned &Opcode,
1204
+ bool matchAndEmitIntelInstruction (SMLoc IDLoc, unsigned &Opcode, MCInst &Inst ,
1206
1205
OperandVector &Operands, MCStreamer &Out,
1207
1206
uint64_t &ErrorInfo,
1208
1207
bool MatchingInlineAsm);
@@ -3186,7 +3185,7 @@ bool X86AsmParser::ParseInstruction(ParseInstructionInfo &Info, StringRef Name,
3186
3185
InstInfo = &Info;
3187
3186
3188
3187
// Reset the forced VEX encoding.
3189
- ForcedVEXEncoding = VEXEncoding_Default ;
3188
+ ForcedOpcodePrefix = OpcodePrefix_Default ;
3190
3189
ForcedDispEncoding = DispEncoding_Default;
3191
3190
UseApxExtendedReg = false ;
3192
3191
ForcedNoFlag = false ;
@@ -3203,13 +3202,13 @@ bool X86AsmParser::ParseInstruction(ParseInstructionInfo &Info, StringRef Name,
3203
3202
Parser.Lex (); // Eat curly.
3204
3203
3205
3204
if (Prefix == " vex" )
3206
- ForcedVEXEncoding = VEXEncoding_VEX ;
3205
+ ForcedOpcodePrefix = OpcodePrefix_VEX ;
3207
3206
else if (Prefix == " vex2" )
3208
- ForcedVEXEncoding = VEXEncoding_VEX2 ;
3207
+ ForcedOpcodePrefix = OpcodePrefix_VEX2 ;
3209
3208
else if (Prefix == " vex3" )
3210
- ForcedVEXEncoding = VEXEncoding_VEX3 ;
3209
+ ForcedOpcodePrefix = OpcodePrefix_VEX3 ;
3211
3210
else if (Prefix == " evex" )
3212
- ForcedVEXEncoding = VEXEncoding_EVEX ;
3211
+ ForcedOpcodePrefix = OpcodePrefix_EVEX ;
3213
3212
else if (Prefix == " disp8" )
3214
3213
ForcedDispEncoding = DispEncoding_Disp8;
3215
3214
else if (Prefix == " disp32" )
@@ -3235,15 +3234,15 @@ bool X86AsmParser::ParseInstruction(ParseInstructionInfo &Info, StringRef Name,
3235
3234
// Parse MASM style pseudo prefixes.
3236
3235
if (isParsingMSInlineAsm ()) {
3237
3236
if (Name.equals_insensitive (" vex" ))
3238
- ForcedVEXEncoding = VEXEncoding_VEX ;
3237
+ ForcedOpcodePrefix = OpcodePrefix_VEX ;
3239
3238
else if (Name.equals_insensitive (" vex2" ))
3240
- ForcedVEXEncoding = VEXEncoding_VEX2 ;
3239
+ ForcedOpcodePrefix = OpcodePrefix_VEX2 ;
3241
3240
else if (Name.equals_insensitive (" vex3" ))
3242
- ForcedVEXEncoding = VEXEncoding_VEX3 ;
3241
+ ForcedOpcodePrefix = OpcodePrefix_VEX3 ;
3243
3242
else if (Name.equals_insensitive (" evex" ))
3244
- ForcedVEXEncoding = VEXEncoding_EVEX ;
3243
+ ForcedOpcodePrefix = OpcodePrefix_EVEX ;
3245
3244
3246
- if (ForcedVEXEncoding != VEXEncoding_Default ) {
3245
+ if (ForcedOpcodePrefix != OpcodePrefix_Default ) {
3247
3246
if (getLexer ().isNot (AsmToken::Identifier))
3248
3247
return Error (Parser.getTok ().getLoc (), " Expected identifier" );
3249
3248
// FIXME: The mnemonic won't match correctly if its not in lower case.
@@ -3741,7 +3740,7 @@ bool X86AsmParser::ParseInstruction(ParseInstructionInfo &Info, StringRef Name,
3741
3740
}
3742
3741
3743
3742
bool X86AsmParser::processInstruction (MCInst &Inst, const OperandVector &Ops) {
3744
- if (ForcedVEXEncoding != VEXEncoding_VEX3 &&
3743
+ if (ForcedOpcodePrefix != OpcodePrefix_VEX3 &&
3745
3744
X86::optimizeInstFromVEX3ToVEX2 (Inst, MII.get (Inst.getOpcode ())))
3746
3745
return true ;
3747
3746
@@ -4002,15 +4001,55 @@ void X86AsmParser::emitInstruction(MCInst &Inst, OperandVector &Operands,
4002
4001
applyLVILoadHardeningMitigation (Inst, Out);
4003
4002
}
4004
4003
4004
+ static unsigned getPrefixes (OperandVector &Operands) {
4005
+ unsigned Result = 0 ;
4006
+ X86Operand &Prefix = static_cast <X86Operand &>(*Operands.back ());
4007
+ if (Prefix.isPrefix ()) {
4008
+ Result = Prefix.getPrefix ();
4009
+ Operands.pop_back ();
4010
+ }
4011
+ return Result;
4012
+ }
4013
+
4005
4014
bool X86AsmParser::MatchAndEmitInstruction (SMLoc IDLoc, unsigned &Opcode,
4006
4015
OperandVector &Operands,
4007
4016
MCStreamer &Out, uint64_t &ErrorInfo,
4008
4017
bool MatchingInlineAsm) {
4009
- if (isParsingIntelSyntax ())
4010
- return MatchAndEmitIntelInstruction (IDLoc, Opcode, Operands, Out, ErrorInfo,
4011
- MatchingInlineAsm);
4012
- return MatchAndEmitATTInstruction (IDLoc, Opcode, Operands, Out, ErrorInfo,
4013
- MatchingInlineAsm);
4018
+ assert (!Operands.empty () && " Unexpect empty operand list!" );
4019
+ assert ((*Operands[0 ]).isToken () && " Leading operand should always be a mnemonic!" );
4020
+
4021
+ // First, handle aliases that expand to multiple instructions.
4022
+ MatchFPUWaitAlias (IDLoc, static_cast <X86Operand &>(*Operands[0 ]), Operands,
4023
+ Out, MatchingInlineAsm);
4024
+ unsigned Prefixes = getPrefixes (Operands);
4025
+
4026
+ MCInst Inst;
4027
+
4028
+ // If VEX/EVEX encoding is forced, we need to pass the USE_* flag to the
4029
+ // encoder and printer.
4030
+ if (ForcedOpcodePrefix == OpcodePrefix_VEX)
4031
+ Prefixes |= X86::IP_USE_VEX;
4032
+ else if (ForcedOpcodePrefix == OpcodePrefix_VEX2)
4033
+ Prefixes |= X86::IP_USE_VEX2;
4034
+ else if (ForcedOpcodePrefix == OpcodePrefix_VEX3)
4035
+ Prefixes |= X86::IP_USE_VEX3;
4036
+ else if (ForcedOpcodePrefix == OpcodePrefix_EVEX)
4037
+ Prefixes |= X86::IP_USE_EVEX;
4038
+
4039
+ // Set encoded flags for {disp8} and {disp32}.
4040
+ if (ForcedDispEncoding == DispEncoding_Disp8)
4041
+ Prefixes |= X86::IP_USE_DISP8;
4042
+ else if (ForcedDispEncoding == DispEncoding_Disp32)
4043
+ Prefixes |= X86::IP_USE_DISP32;
4044
+
4045
+ if (Prefixes)
4046
+ Inst.setFlags (Prefixes);
4047
+
4048
+ return isParsingIntelSyntax ()
4049
+ ? matchAndEmitIntelInstruction (IDLoc, Opcode, Inst, Operands, Out,
4050
+ ErrorInfo, MatchingInlineAsm)
4051
+ : matchAndEmitATTInstruction (IDLoc, Opcode, Inst, Operands, Out,
4052
+ ErrorInfo, MatchingInlineAsm);
4014
4053
}
4015
4054
4016
4055
void X86AsmParser::MatchFPUWaitAlias (SMLoc IDLoc, X86Operand &Op,
@@ -4053,16 +4092,6 @@ bool X86AsmParser::ErrorMissingFeature(SMLoc IDLoc,
4053
4092
return Error (IDLoc, OS.str (), SMRange (), MatchingInlineAsm);
4054
4093
}
4055
4094
4056
- static unsigned getPrefixes (OperandVector &Operands) {
4057
- unsigned Result = 0 ;
4058
- X86Operand &Prefix = static_cast <X86Operand &>(*Operands.back ());
4059
- if (Prefix.isPrefix ()) {
4060
- Result = Prefix.getPrefix ();
4061
- Operands.pop_back ();
4062
- }
4063
- return Result;
4064
- }
4065
-
4066
4095
unsigned X86AsmParser::checkTargetMatchPredicate (MCInst &Inst) {
4067
4096
unsigned Opc = Inst.getOpcode ();
4068
4097
const MCInstrDesc &MCID = MII.get (Opc);
@@ -4072,63 +4101,31 @@ unsigned X86AsmParser::checkTargetMatchPredicate(MCInst &Inst) {
4072
4101
if (ForcedNoFlag == !(MCID.TSFlags & X86II::EVEX_NF) && !X86::isCFCMOVCC (Opc))
4073
4102
return Match_Unsupported;
4074
4103
4075
- if (ForcedVEXEncoding == VEXEncoding_EVEX &&
4104
+ if (ForcedOpcodePrefix == OpcodePrefix_EVEX &&
4076
4105
(MCID.TSFlags & X86II::EncodingMask) != X86II::EVEX)
4077
4106
return Match_Unsupported;
4078
4107
4079
- if ((ForcedVEXEncoding == VEXEncoding_VEX ||
4080
- ForcedVEXEncoding == VEXEncoding_VEX2 ||
4081
- ForcedVEXEncoding == VEXEncoding_VEX3 ) &&
4108
+ if ((ForcedOpcodePrefix == OpcodePrefix_VEX ||
4109
+ ForcedOpcodePrefix == OpcodePrefix_VEX2 ||
4110
+ ForcedOpcodePrefix == OpcodePrefix_VEX3 ) &&
4082
4111
(MCID.TSFlags & X86II::EncodingMask) != X86II::VEX)
4083
4112
return Match_Unsupported;
4084
4113
4085
4114
if ((MCID.TSFlags & X86II::ExplicitOpPrefixMask) ==
4086
4115
X86II::ExplicitVEXPrefix &&
4087
- (ForcedVEXEncoding != VEXEncoding_VEX &&
4088
- ForcedVEXEncoding != VEXEncoding_VEX2 &&
4089
- ForcedVEXEncoding != VEXEncoding_VEX3 ))
4116
+ (ForcedOpcodePrefix != OpcodePrefix_VEX &&
4117
+ ForcedOpcodePrefix != OpcodePrefix_VEX2 &&
4118
+ ForcedOpcodePrefix != OpcodePrefix_VEX3 ))
4090
4119
return Match_Unsupported;
4091
4120
4092
4121
return Match_Success;
4093
4122
}
4094
4123
4095
- bool X86AsmParser::MatchAndEmitATTInstruction (SMLoc IDLoc, unsigned &Opcode,
4096
- OperandVector &Operands,
4097
- MCStreamer &Out,
4098
- uint64_t &ErrorInfo,
4099
- bool MatchingInlineAsm) {
4100
- assert (!Operands.empty () && " Unexpect empty operand list!" );
4101
- assert ((*Operands[0 ]).isToken () && " Leading operand should always be a mnemonic!" );
4102
- SMRange EmptyRange = std::nullopt;
4103
-
4104
- // First, handle aliases that expand to multiple instructions.
4105
- MatchFPUWaitAlias (IDLoc, static_cast <X86Operand &>(*Operands[0 ]), Operands,
4106
- Out, MatchingInlineAsm);
4124
+ bool X86AsmParser::matchAndEmitATTInstruction (
4125
+ SMLoc IDLoc, unsigned &Opcode, MCInst &Inst, OperandVector &Operands,
4126
+ MCStreamer &Out, uint64_t &ErrorInfo, bool MatchingInlineAsm) {
4107
4127
X86Operand &Op = static_cast <X86Operand &>(*Operands[0 ]);
4108
- unsigned Prefixes = getPrefixes (Operands);
4109
-
4110
- MCInst Inst;
4111
-
4112
- // If VEX/EVEX encoding is forced, we need to pass the USE_* flag to the
4113
- // encoder and printer.
4114
- if (ForcedVEXEncoding == VEXEncoding_VEX)
4115
- Prefixes |= X86::IP_USE_VEX;
4116
- else if (ForcedVEXEncoding == VEXEncoding_VEX2)
4117
- Prefixes |= X86::IP_USE_VEX2;
4118
- else if (ForcedVEXEncoding == VEXEncoding_VEX3)
4119
- Prefixes |= X86::IP_USE_VEX3;
4120
- else if (ForcedVEXEncoding == VEXEncoding_EVEX)
4121
- Prefixes |= X86::IP_USE_EVEX;
4122
-
4123
- // Set encoded flags for {disp8} and {disp32}.
4124
- if (ForcedDispEncoding == DispEncoding_Disp8)
4125
- Prefixes |= X86::IP_USE_DISP8;
4126
- else if (ForcedDispEncoding == DispEncoding_Disp32)
4127
- Prefixes |= X86::IP_USE_DISP32;
4128
-
4129
- if (Prefixes)
4130
- Inst.setFlags (Prefixes);
4131
-
4128
+ SMRange EmptyRange = std::nullopt;
4132
4129
// In 16-bit mode, if data32 is specified, temporarily switch to 32-bit mode
4133
4130
// when matching the instruction.
4134
4131
if (ForcedDataPrefix == X86::Is32Bit)
@@ -4350,44 +4347,11 @@ bool X86AsmParser::MatchAndEmitATTInstruction(SMLoc IDLoc, unsigned &Opcode,
4350
4347
return true ;
4351
4348
}
4352
4349
4353
- bool X86AsmParser::MatchAndEmitIntelInstruction (SMLoc IDLoc, unsigned &Opcode,
4354
- OperandVector &Operands,
4355
- MCStreamer &Out,
4356
- uint64_t &ErrorInfo,
4357
- bool MatchingInlineAsm) {
4358
- assert (!Operands.empty () && " Unexpect empty operand list!" );
4359
- assert ((*Operands[0 ]).isToken () && " Leading operand should always be a mnemonic!" );
4360
- StringRef Mnemonic = (static_cast <X86Operand &>(*Operands[0 ])).getToken ();
4361
- SMRange EmptyRange = std::nullopt;
4362
- StringRef Base = (static_cast <X86Operand &>(*Operands[0 ])).getToken ();
4363
- unsigned Prefixes = getPrefixes (Operands);
4364
-
4365
- // First, handle aliases that expand to multiple instructions.
4366
- MatchFPUWaitAlias (IDLoc, static_cast <X86Operand &>(*Operands[0 ]), Operands, Out, MatchingInlineAsm);
4350
+ bool X86AsmParser::matchAndEmitIntelInstruction (
4351
+ SMLoc IDLoc, unsigned &Opcode, MCInst &Inst, OperandVector &Operands,
4352
+ MCStreamer &Out, uint64_t &ErrorInfo, bool MatchingInlineAsm) {
4367
4353
X86Operand &Op = static_cast <X86Operand &>(*Operands[0 ]);
4368
-
4369
- MCInst Inst;
4370
-
4371
- // If VEX/EVEX encoding is forced, we need to pass the USE_* flag to the
4372
- // encoder and printer.
4373
- if (ForcedVEXEncoding == VEXEncoding_VEX)
4374
- Prefixes |= X86::IP_USE_VEX;
4375
- else if (ForcedVEXEncoding == VEXEncoding_VEX2)
4376
- Prefixes |= X86::IP_USE_VEX2;
4377
- else if (ForcedVEXEncoding == VEXEncoding_VEX3)
4378
- Prefixes |= X86::IP_USE_VEX3;
4379
- else if (ForcedVEXEncoding == VEXEncoding_EVEX)
4380
- Prefixes |= X86::IP_USE_EVEX;
4381
-
4382
- // Set encoded flags for {disp8} and {disp32}.
4383
- if (ForcedDispEncoding == DispEncoding_Disp8)
4384
- Prefixes |= X86::IP_USE_DISP8;
4385
- else if (ForcedDispEncoding == DispEncoding_Disp32)
4386
- Prefixes |= X86::IP_USE_DISP32;
4387
-
4388
- if (Prefixes)
4389
- Inst.setFlags (Prefixes);
4390
-
4354
+ SMRange EmptyRange = std::nullopt;
4391
4355
// Find one unsized memory operand, if present.
4392
4356
X86Operand *UnsizedMemOp = nullptr ;
4393
4357
for (const auto &Op : Operands) {
@@ -4402,6 +4366,7 @@ bool X86AsmParser::MatchAndEmitIntelInstruction(SMLoc IDLoc, unsigned &Opcode,
4402
4366
4403
4367
// Allow some instructions to have implicitly pointer-sized operands. This is
4404
4368
// compatible with gas.
4369
+ StringRef Mnemonic = (static_cast <X86Operand &>(*Operands[0 ])).getToken ();
4405
4370
if (UnsizedMemOp) {
4406
4371
static const char *const PtrSizedInstrs[] = {" call" , " jmp" , " push" };
4407
4372
for (const char *Instr : PtrSizedInstrs) {
@@ -4415,6 +4380,7 @@ bool X86AsmParser::MatchAndEmitIntelInstruction(SMLoc IDLoc, unsigned &Opcode,
4415
4380
SmallVector<unsigned , 8 > Match;
4416
4381
FeatureBitset ErrorInfoMissingFeatures;
4417
4382
FeatureBitset MissingFeatures;
4383
+ StringRef Base = (static_cast <X86Operand &>(*Operands[0 ])).getToken ();
4418
4384
4419
4385
// If unsized push has immediate operand we should default the default pointer
4420
4386
// size for the size.
0 commit comments