@@ -378,10 +378,9 @@ define i64 @PR59897(i1 %X1_2) {
378
378
379
379
define i16 @rev_xor_lhs_rev16 (i16 %a , i16 %b ) #0 {
380
380
; CHECK-LABEL: @rev_xor_lhs_rev16(
381
- ; CHECK-NEXT: [[TMP1:%.*]] = tail call i16 @llvm.bitreverse.i16(i16 [[A:%.*]])
382
- ; CHECK-NEXT: [[TMP2:%.*]] = xor i16 [[TMP1]], [[B:%.*]]
383
- ; CHECK-NEXT: [[TMP3:%.*]] = tail call i16 @llvm.bitreverse.i16(i16 [[TMP2]])
384
- ; CHECK-NEXT: ret i16 [[TMP3]]
381
+ ; CHECK-NEXT: [[TMP1:%.*]] = call i16 @llvm.bitreverse.i16(i16 [[B:%.*]])
382
+ ; CHECK-NEXT: [[TMP2:%.*]] = xor i16 [[TMP1]], [[A:%.*]]
383
+ ; CHECK-NEXT: ret i16 [[TMP2]]
385
384
;
386
385
%1 = tail call i16 @llvm.bitreverse.i16 (i16 %a )
387
386
%2 = xor i16 %1 , %b
@@ -391,10 +390,9 @@ define i16 @rev_xor_lhs_rev16(i16 %a, i16 %b) #0 {
391
390
392
391
define i32 @rev_and_rhs_rev32 (i32 %a , i32 %b ) #0 {
393
392
; CHECK-LABEL: @rev_and_rhs_rev32(
394
- ; CHECK-NEXT: [[TMP1:%.*]] = tail call i32 @llvm.bitreverse.i32(i32 [[B:%.*]])
395
- ; CHECK-NEXT: [[TMP2:%.*]] = and i32 [[TMP1]], [[A:%.*]]
396
- ; CHECK-NEXT: [[TMP3:%.*]] = tail call i32 @llvm.bitreverse.i32(i32 [[TMP2]])
397
- ; CHECK-NEXT: ret i32 [[TMP3]]
393
+ ; CHECK-NEXT: [[TMP1:%.*]] = call i32 @llvm.bitreverse.i32(i32 [[A:%.*]])
394
+ ; CHECK-NEXT: [[TMP2:%.*]] = and i32 [[TMP1]], [[B:%.*]]
395
+ ; CHECK-NEXT: ret i32 [[TMP2]]
398
396
;
399
397
%1 = tail call i32 @llvm.bitreverse.i32 (i32 %b )
400
398
%2 = and i32 %a , %1
@@ -404,10 +402,9 @@ define i32 @rev_and_rhs_rev32(i32 %a, i32 %b) #0 {
404
402
405
403
define i32 @rev_or_rhs_rev32 (i32 %a , i32 %b ) #0 {
406
404
; CHECK-LABEL: @rev_or_rhs_rev32(
407
- ; CHECK-NEXT: [[TMP1:%.*]] = tail call i32 @llvm.bitreverse.i32(i32 [[B:%.*]])
408
- ; CHECK-NEXT: [[TMP2:%.*]] = or i32 [[TMP1]], [[A:%.*]]
409
- ; CHECK-NEXT: [[TMP3:%.*]] = tail call i32 @llvm.bitreverse.i32(i32 [[TMP2]])
410
- ; CHECK-NEXT: ret i32 [[TMP3]]
405
+ ; CHECK-NEXT: [[TMP1:%.*]] = call i32 @llvm.bitreverse.i32(i32 [[A:%.*]])
406
+ ; CHECK-NEXT: [[TMP2:%.*]] = or i32 [[TMP1]], [[B:%.*]]
407
+ ; CHECK-NEXT: ret i32 [[TMP2]]
411
408
;
412
409
%1 = tail call i32 @llvm.bitreverse.i32 (i32 %b )
413
410
%2 = or i32 %a , %1
@@ -417,10 +414,9 @@ define i32 @rev_or_rhs_rev32(i32 %a, i32 %b) #0 {
417
414
418
415
define i64 @rev_or_rhs_rev64 (i64 %a , i64 %b ) #0 {
419
416
; CHECK-LABEL: @rev_or_rhs_rev64(
420
- ; CHECK-NEXT: [[TMP1:%.*]] = tail call i64 @llvm.bitreverse.i64(i64 [[B:%.*]])
421
- ; CHECK-NEXT: [[TMP2:%.*]] = or i64 [[TMP1]], [[A:%.*]]
422
- ; CHECK-NEXT: [[TMP3:%.*]] = tail call i64 @llvm.bitreverse.i64(i64 [[TMP2]])
423
- ; CHECK-NEXT: ret i64 [[TMP3]]
417
+ ; CHECK-NEXT: [[TMP1:%.*]] = call i64 @llvm.bitreverse.i64(i64 [[A:%.*]])
418
+ ; CHECK-NEXT: [[TMP2:%.*]] = or i64 [[TMP1]], [[B:%.*]]
419
+ ; CHECK-NEXT: ret i64 [[TMP2]]
424
420
;
425
421
%1 = tail call i64 @llvm.bitreverse.i64 (i64 %b )
426
422
%2 = or i64 %a , %1
@@ -430,10 +426,9 @@ define i64 @rev_or_rhs_rev64(i64 %a, i64 %b) #0 {
430
426
431
427
define i64 @rev_xor_rhs_rev64 (i64 %a , i64 %b ) #0 {
432
428
; CHECK-LABEL: @rev_xor_rhs_rev64(
433
- ; CHECK-NEXT: [[TMP1:%.*]] = tail call i64 @llvm.bitreverse.i64(i64 [[B:%.*]])
434
- ; CHECK-NEXT: [[TMP2:%.*]] = xor i64 [[TMP1]], [[A:%.*]]
435
- ; CHECK-NEXT: [[TMP3:%.*]] = tail call i64 @llvm.bitreverse.i64(i64 [[TMP2]])
436
- ; CHECK-NEXT: ret i64 [[TMP3]]
429
+ ; CHECK-NEXT: [[TMP1:%.*]] = call i64 @llvm.bitreverse.i64(i64 [[A:%.*]])
430
+ ; CHECK-NEXT: [[TMP2:%.*]] = xor i64 [[TMP1]], [[B:%.*]]
431
+ ; CHECK-NEXT: ret i64 [[TMP2]]
437
432
;
438
433
%1 = tail call i64 @llvm.bitreverse.i64 (i64 %b )
439
434
%2 = xor i64 %a , %1
@@ -443,10 +438,9 @@ define i64 @rev_xor_rhs_rev64(i64 %a, i64 %b) #0 {
443
438
444
439
define <2 x i32 > @rev_xor_rhs_i32vec (<2 x i32 > %a , <2 x i32 > %b ) #0 {
445
440
; CHECK-LABEL: @rev_xor_rhs_i32vec(
446
- ; CHECK-NEXT: [[TMP1:%.*]] = tail call <2 x i32> @llvm.bitreverse.v2i32(<2 x i32> [[B:%.*]])
447
- ; CHECK-NEXT: [[TMP2:%.*]] = xor <2 x i32> [[TMP1]], [[A:%.*]]
448
- ; CHECK-NEXT: [[TMP3:%.*]] = tail call <2 x i32> @llvm.bitreverse.v2i32(<2 x i32> [[TMP2]])
449
- ; CHECK-NEXT: ret <2 x i32> [[TMP3]]
441
+ ; CHECK-NEXT: [[TMP1:%.*]] = call <2 x i32> @llvm.bitreverse.v2i32(<2 x i32> [[A:%.*]])
442
+ ; CHECK-NEXT: [[TMP2:%.*]] = xor <2 x i32> [[TMP1]], [[B:%.*]]
443
+ ; CHECK-NEXT: ret <2 x i32> [[TMP2]]
450
444
;
451
445
%1 = tail call <2 x i32 > @llvm.bitreverse.v2i32 (<2 x i32 > %b )
452
446
%2 = xor <2 x i32 > %a , %1
@@ -486,11 +480,8 @@ define i64 @rev_and_rhs_rev64_multiuse2(i64 %a, i64 %b) #0 {
486
480
487
481
define i64 @rev_all_operand64 (i64 %a , i64 %b ) #0 {
488
482
; CHECK-LABEL: @rev_all_operand64(
489
- ; CHECK-NEXT: [[TMP1:%.*]] = tail call i64 @llvm.bitreverse.i64(i64 [[A:%.*]])
490
- ; CHECK-NEXT: [[TMP2:%.*]] = tail call i64 @llvm.bitreverse.i64(i64 [[B:%.*]])
491
- ; CHECK-NEXT: [[TMP3:%.*]] = and i64 [[TMP1]], [[TMP2]]
492
- ; CHECK-NEXT: [[TMP4:%.*]] = tail call i64 @llvm.bitreverse.i64(i64 [[TMP3]])
493
- ; CHECK-NEXT: ret i64 [[TMP4]]
483
+ ; CHECK-NEXT: [[TMP1:%.*]] = and i64 [[A:%.*]], [[B:%.*]]
484
+ ; CHECK-NEXT: ret i64 [[TMP1]]
494
485
;
495
486
%1 = tail call i64 @llvm.bitreverse.i64 (i64 %a )
496
487
%2 = tail call i64 @llvm.bitreverse.i64 (i64 %b )
@@ -503,11 +494,10 @@ define i64 @rev_all_operand64_multiuse_both(i64 %a, i64 %b) #0 {
503
494
; CHECK-LABEL: @rev_all_operand64_multiuse_both(
504
495
; CHECK-NEXT: [[TMP1:%.*]] = tail call i64 @llvm.bitreverse.i64(i64 [[A:%.*]])
505
496
; CHECK-NEXT: [[TMP2:%.*]] = tail call i64 @llvm.bitreverse.i64(i64 [[B:%.*]])
506
- ; CHECK-NEXT: [[TMP3:%.*]] = and i64 [[TMP1]], [[TMP2]]
507
- ; CHECK-NEXT: [[TMP4:%.*]] = tail call i64 @llvm.bitreverse.i64(i64 [[TMP3]])
497
+ ; CHECK-NEXT: [[TMP3:%.*]] = and i64 [[A]], [[B]]
508
498
; CHECK-NEXT: call void @use_i64(i64 [[TMP1]])
509
499
; CHECK-NEXT: call void @use_i64(i64 [[TMP2]])
510
- ; CHECK-NEXT: ret i64 [[TMP4 ]]
500
+ ; CHECK-NEXT: ret i64 [[TMP3 ]]
511
501
;
512
502
%1 = tail call i64 @llvm.bitreverse.i64 (i64 %a )
513
503
%2 = tail call i64 @llvm.bitreverse.i64 (i64 %b )
0 commit comments