@@ -23,7 +23,7 @@ using namespace llvm;
23
23
// are currently emitted in X86GenInstrInfo.inc in alphabetical order. Which
24
24
// makes sorting these tables a simple matter of alphabetizing the table.
25
25
#include " X86GenFoldTables.inc"
26
- static const X86MemoryFoldTableEntry BroadcastFoldTable2 [] = {
26
+ static const X86MemoryFoldTableEntry BroadcastTable2 [] = {
27
27
{ X86::VADDPDZ128rr, X86::VADDPDZ128rmb, TB_BCAST_SD },
28
28
{ X86::VADDPDZ256rr, X86::VADDPDZ256rmb, TB_BCAST_SD },
29
29
{ X86::VADDPDZrr, X86::VADDPDZrmb, TB_BCAST_SD },
@@ -200,7 +200,7 @@ static const X86MemoryFoldTableEntry BroadcastFoldTable2[] = {
200
200
{ X86::VXORPSZrr, X86::VXORPSZrmb, TB_BCAST_SS },
201
201
};
202
202
203
- static const X86MemoryFoldTableEntry BroadcastFoldTable3 [] = {
203
+ static const X86MemoryFoldTableEntry BroadcastTable3 [] = {
204
204
{ X86::VFMADD132PDZ128r, X86::VFMADD132PDZ128mb, TB_BCAST_SD },
205
205
{ X86::VFMADD132PDZ256r, X86::VFMADD132PDZ256mb, TB_BCAST_SD },
206
206
{ X86::VFMADD132PDZr, X86::VFMADD132PDZmb, TB_BCAST_SD },
@@ -319,7 +319,7 @@ static const X86MemoryFoldTableEntry BroadcastFoldTable3[] = {
319
319
320
320
// Table to map instructions safe to broadcast using a different width from the
321
321
// element width.
322
- static const X86MemoryFoldTableEntry BroadcastSizeFoldTable2 [] = {
322
+ static const X86MemoryFoldTableEntry BroadcastSizeTable2 [] = {
323
323
{ X86::VANDNPDZ128rr, X86::VANDNPSZ128rmb, TB_BCAST_SS },
324
324
{ X86::VANDNPDZ256rr, X86::VANDNPSZ256rmb, TB_BCAST_SS },
325
325
{ X86::VANDNPDZrr, X86::VANDNPSZrmb, TB_BCAST_SS },
@@ -370,7 +370,7 @@ static const X86MemoryFoldTableEntry BroadcastSizeFoldTable2[] = {
370
370
{ X86::VXORPSZrr, X86::VXORPDZrmb, TB_BCAST_SD },
371
371
};
372
372
373
- static const X86MemoryFoldTableEntry BroadcastSizeFoldTable3 [] = {
373
+ static const X86MemoryFoldTableEntry BroadcastSizeTable3 [] = {
374
374
{ X86::VPTERNLOGDZ128rri, X86::VPTERNLOGQZ128rmbi, TB_BCAST_Q },
375
375
{ X86::VPTERNLOGDZ256rri, X86::VPTERNLOGQZ256rmbi, TB_BCAST_Q },
376
376
{ X86::VPTERNLOGDZrri, X86::VPTERNLOGQZrmbi, TB_BCAST_Q },
@@ -391,16 +391,16 @@ lookupFoldTableImpl(ArrayRef<X86MemoryFoldTableEntry> Table, unsigned RegOp) {
391
391
// Make sure the tables are sorted.
392
392
static std::atomic<bool > FoldTablesChecked (false );
393
393
if (!FoldTablesChecked.load (std::memory_order_relaxed)) {
394
- CHECK_SORTED_UNIQUE (MemoryFoldTable2Addr )
395
- CHECK_SORTED_UNIQUE (MemoryFoldTable0 )
396
- CHECK_SORTED_UNIQUE (MemoryFoldTable1 )
397
- CHECK_SORTED_UNIQUE (MemoryFoldTable2 )
398
- CHECK_SORTED_UNIQUE (MemoryFoldTable3 )
399
- CHECK_SORTED_UNIQUE (MemoryFoldTable4 )
400
- CHECK_SORTED_UNIQUE (BroadcastFoldTable2 )
401
- CHECK_SORTED_UNIQUE (BroadcastFoldTable3 )
402
- CHECK_SORTED_UNIQUE (BroadcastSizeFoldTable2 )
403
- CHECK_SORTED_UNIQUE (BroadcastSizeFoldTable3 )
394
+ CHECK_SORTED_UNIQUE (Table2Addr )
395
+ CHECK_SORTED_UNIQUE (Table0 )
396
+ CHECK_SORTED_UNIQUE (Table1 )
397
+ CHECK_SORTED_UNIQUE (Table2 )
398
+ CHECK_SORTED_UNIQUE (Table3 )
399
+ CHECK_SORTED_UNIQUE (Table4 )
400
+ CHECK_SORTED_UNIQUE (BroadcastTable2 )
401
+ CHECK_SORTED_UNIQUE (BroadcastTable3 )
402
+ CHECK_SORTED_UNIQUE (BroadcastSizeTable2 )
403
+ CHECK_SORTED_UNIQUE (BroadcastSizeTable3 )
404
404
FoldTablesChecked.store (true , std::memory_order_relaxed);
405
405
}
406
406
#endif
@@ -414,22 +414,22 @@ lookupFoldTableImpl(ArrayRef<X86MemoryFoldTableEntry> Table, unsigned RegOp) {
414
414
415
415
const X86MemoryFoldTableEntry *
416
416
llvm::lookupTwoAddrFoldTable (unsigned RegOp) {
417
- return lookupFoldTableImpl (MemoryFoldTable2Addr , RegOp);
417
+ return lookupFoldTableImpl (Table2Addr , RegOp);
418
418
}
419
419
420
420
const X86MemoryFoldTableEntry *
421
421
llvm::lookupFoldTable (unsigned RegOp, unsigned OpNum) {
422
422
ArrayRef<X86MemoryFoldTableEntry> FoldTable;
423
423
if (OpNum == 0 )
424
- FoldTable = ArrayRef (MemoryFoldTable0 );
424
+ FoldTable = ArrayRef (Table0 );
425
425
else if (OpNum == 1 )
426
- FoldTable = ArrayRef (MemoryFoldTable1 );
426
+ FoldTable = ArrayRef (Table1 );
427
427
else if (OpNum == 2 )
428
- FoldTable = ArrayRef (MemoryFoldTable2 );
428
+ FoldTable = ArrayRef (Table2 );
429
429
else if (OpNum == 3 )
430
- FoldTable = ArrayRef (MemoryFoldTable3 );
430
+ FoldTable = ArrayRef (Table3 );
431
431
else if (OpNum == 4 )
432
- FoldTable = ArrayRef (MemoryFoldTable4 );
432
+ FoldTable = ArrayRef (Table4 );
433
433
else
434
434
return nullptr ;
435
435
@@ -445,36 +445,36 @@ struct X86MemUnfoldTable {
445
445
std::vector<X86MemoryFoldTableEntry> Table;
446
446
447
447
X86MemUnfoldTable () {
448
- for (const X86MemoryFoldTableEntry &Entry : MemoryFoldTable2Addr )
448
+ for (const X86MemoryFoldTableEntry &Entry : Table2Addr )
449
449
// Index 0, folded load and store, no alignment requirement.
450
450
addTableEntry (Entry, TB_INDEX_0 | TB_FOLDED_LOAD | TB_FOLDED_STORE);
451
451
452
- for (const X86MemoryFoldTableEntry &Entry : MemoryFoldTable0 )
452
+ for (const X86MemoryFoldTableEntry &Entry : Table0 )
453
453
// Index 0, mix of loads and stores.
454
454
addTableEntry (Entry, TB_INDEX_0);
455
455
456
- for (const X86MemoryFoldTableEntry &Entry : MemoryFoldTable1 )
456
+ for (const X86MemoryFoldTableEntry &Entry : Table1 )
457
457
// Index 1, folded load
458
458
addTableEntry (Entry, TB_INDEX_1 | TB_FOLDED_LOAD);
459
459
460
- for (const X86MemoryFoldTableEntry &Entry : MemoryFoldTable2 )
460
+ for (const X86MemoryFoldTableEntry &Entry : Table2 )
461
461
// Index 2, folded load
462
462
addTableEntry (Entry, TB_INDEX_2 | TB_FOLDED_LOAD);
463
463
464
- for (const X86MemoryFoldTableEntry &Entry : MemoryFoldTable3 )
464
+ for (const X86MemoryFoldTableEntry &Entry : Table3 )
465
465
// Index 3, folded load
466
466
addTableEntry (Entry, TB_INDEX_3 | TB_FOLDED_LOAD);
467
467
468
- for (const X86MemoryFoldTableEntry &Entry : MemoryFoldTable4 )
468
+ for (const X86MemoryFoldTableEntry &Entry : Table4 )
469
469
// Index 4, folded load
470
470
addTableEntry (Entry, TB_INDEX_4 | TB_FOLDED_LOAD);
471
471
472
472
// Broadcast tables.
473
- for (const X86MemoryFoldTableEntry &Entry : BroadcastFoldTable2 )
473
+ for (const X86MemoryFoldTableEntry &Entry : BroadcastTable2 )
474
474
// Index 2, folded broadcast
475
475
addTableEntry (Entry, TB_INDEX_2 | TB_FOLDED_LOAD | TB_FOLDED_BCAST);
476
476
477
- for (const X86MemoryFoldTableEntry &Entry : BroadcastFoldTable3 )
477
+ for (const X86MemoryFoldTableEntry &Entry : BroadcastTable3 )
478
478
// Index 3, folded broadcast
479
479
addTableEntry (Entry, TB_INDEX_3 | TB_FOLDED_LOAD | TB_FOLDED_BCAST);
480
480
@@ -516,7 +516,7 @@ struct X86MemBroadcastFoldTable {
516
516
517
517
X86MemBroadcastFoldTable () {
518
518
// Broadcast tables.
519
- for (const X86MemoryFoldTableEntry &Reg2Bcst : BroadcastFoldTable2 ) {
519
+ for (const X86MemoryFoldTableEntry &Reg2Bcst : BroadcastTable2 ) {
520
520
unsigned RegOp = Reg2Bcst.KeyOp ;
521
521
unsigned BcstOp = Reg2Bcst.DstOp ;
522
522
if (const X86MemoryFoldTableEntry *Reg2Mem = lookupFoldTable (RegOp, 2 )) {
@@ -526,7 +526,7 @@ struct X86MemBroadcastFoldTable {
526
526
Table.push_back ({MemOp, BcstOp, Flags});
527
527
}
528
528
}
529
- for (const X86MemoryFoldTableEntry &Reg2Bcst : BroadcastSizeFoldTable2 ) {
529
+ for (const X86MemoryFoldTableEntry &Reg2Bcst : BroadcastSizeTable2 ) {
530
530
unsigned RegOp = Reg2Bcst.KeyOp ;
531
531
unsigned BcstOp = Reg2Bcst.DstOp ;
532
532
if (const X86MemoryFoldTableEntry *Reg2Mem = lookupFoldTable (RegOp, 2 )) {
@@ -537,7 +537,7 @@ struct X86MemBroadcastFoldTable {
537
537
}
538
538
}
539
539
540
- for (const X86MemoryFoldTableEntry &Reg2Bcst : BroadcastFoldTable3 ) {
540
+ for (const X86MemoryFoldTableEntry &Reg2Bcst : BroadcastTable3 ) {
541
541
unsigned RegOp = Reg2Bcst.KeyOp ;
542
542
unsigned BcstOp = Reg2Bcst.DstOp ;
543
543
if (const X86MemoryFoldTableEntry *Reg2Mem = lookupFoldTable (RegOp, 3 )) {
@@ -547,7 +547,7 @@ struct X86MemBroadcastFoldTable {
547
547
Table.push_back ({MemOp, BcstOp, Flags});
548
548
}
549
549
}
550
- for (const X86MemoryFoldTableEntry &Reg2Bcst : BroadcastSizeFoldTable3 ) {
550
+ for (const X86MemoryFoldTableEntry &Reg2Bcst : BroadcastSizeTable3 ) {
551
551
unsigned RegOp = Reg2Bcst.KeyOp ;
552
552
unsigned BcstOp = Reg2Bcst.DstOp ;
553
553
if (const X86MemoryFoldTableEntry *Reg2Mem = lookupFoldTable (RegOp, 3 )) {
0 commit comments