@@ -820,7 +820,7 @@ define <16 x i8> @unsigned_sat_variable_v16i8_using_min(<16 x i8> %x, <16 x i8>
820
820
; AVX512-LABEL: unsigned_sat_variable_v16i8_using_min:
821
821
; AVX512: # %bb.0:
822
822
; AVX512-NEXT: vmovdqa %xmm1, %xmm2
823
- ; AVX512-NEXT: vpternlogq $15, %xmm1, %xmm1, % xmm2
823
+ ; AVX512-NEXT: vpternlogq {{.*#+}} xmm2 = ~ xmm2
824
824
; AVX512-NEXT: vpminub %xmm2, %xmm0, %xmm0
825
825
; AVX512-NEXT: vpaddb %xmm1, %xmm0, %xmm0
826
826
; AVX512-NEXT: retq
@@ -875,10 +875,10 @@ define <16 x i8> @unsigned_sat_variable_v16i8_using_cmp_notval(<16 x i8> %x, <16
875
875
; AVX512: # %bb.0:
876
876
; AVX512-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2
877
877
; AVX512-NEXT: vpaddb %xmm1, %xmm0, %xmm3
878
- ; AVX512-NEXT: vpternlogq $15, % xmm1, %xmm1, % xmm1
878
+ ; AVX512-NEXT: vpternlogq {{.*#+}} xmm1 = ~ xmm1
879
879
; AVX512-NEXT: vpminub %xmm1, %xmm0, %xmm1
880
880
; AVX512-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
881
- ; AVX512-NEXT: vpternlogq $222, %xmm2, % xmm3, % xmm0
881
+ ; AVX512-NEXT: vpternlogq {{.*#+}} xmm0 = xmm3 | ( xmm0 ^ xmm2)
882
882
; AVX512-NEXT: retq
883
883
%noty = xor <16 x i8 > %y , <i8 -1 , i8 -1 , i8 -1 , i8 -1 , i8 -1 , i8 -1 , i8 -1 , i8 -1 , i8 -1 , i8 -1 , i8 -1 , i8 -1 , i8 -1 , i8 -1 , i8 -1 , i8 -1 >
884
884
%a = add <16 x i8 > %x , %y
@@ -917,7 +917,7 @@ define <8 x i16> @unsigned_sat_variable_v8i16_using_min(<8 x i16> %x, <8 x i16>
917
917
; AVX512-LABEL: unsigned_sat_variable_v8i16_using_min:
918
918
; AVX512: # %bb.0:
919
919
; AVX512-NEXT: vmovdqa %xmm1, %xmm2
920
- ; AVX512-NEXT: vpternlogq $15, %xmm1, %xmm1, % xmm2
920
+ ; AVX512-NEXT: vpternlogq {{.*#+}} xmm2 = ~ xmm2
921
921
; AVX512-NEXT: vpminuw %xmm2, %xmm0, %xmm0
922
922
; AVX512-NEXT: vpaddw %xmm1, %xmm0, %xmm0
923
923
; AVX512-NEXT: retq
@@ -982,10 +982,10 @@ define <8 x i16> @unsigned_sat_variable_v8i16_using_cmp_notval(<8 x i16> %x, <8
982
982
; AVX512: # %bb.0:
983
983
; AVX512-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2
984
984
; AVX512-NEXT: vpaddw %xmm1, %xmm0, %xmm3
985
- ; AVX512-NEXT: vpternlogq $15, % xmm1, %xmm1, % xmm1
985
+ ; AVX512-NEXT: vpternlogq {{.*#+}} xmm1 = ~ xmm1
986
986
; AVX512-NEXT: vpminuw %xmm1, %xmm0, %xmm1
987
987
; AVX512-NEXT: vpcmpeqw %xmm1, %xmm0, %xmm0
988
- ; AVX512-NEXT: vpternlogq $222, %xmm2, % xmm3, % xmm0
988
+ ; AVX512-NEXT: vpternlogq {{.*#+}} xmm0 = xmm3 | ( xmm0 ^ xmm2)
989
989
; AVX512-NEXT: retq
990
990
%noty = xor <8 x i16 > %y , <i16 -1 , i16 -1 , i16 -1 , i16 -1 , i16 -1 , i16 -1 , i16 -1 , i16 -1 >
991
991
%a = add <8 x i16 > %x , %y
@@ -1029,7 +1029,7 @@ define <4 x i32> @unsigned_sat_variable_v4i32_using_min(<4 x i32> %x, <4 x i32>
1029
1029
; AVX512-LABEL: unsigned_sat_variable_v4i32_using_min:
1030
1030
; AVX512: # %bb.0:
1031
1031
; AVX512-NEXT: vmovdqa %xmm1, %xmm2
1032
- ; AVX512-NEXT: vpternlogq $15, %xmm1, %xmm1, % xmm2
1032
+ ; AVX512-NEXT: vpternlogq {{.*#+}} xmm2 = ~ xmm2
1033
1033
; AVX512-NEXT: vpminud %xmm2, %xmm0, %xmm0
1034
1034
; AVX512-NEXT: vpaddd %xmm1, %xmm0, %xmm0
1035
1035
; AVX512-NEXT: retq
@@ -1070,7 +1070,7 @@ define <4 x i32> @unsigned_sat_variable_v4i32_using_cmp_sum(<4 x i32> %x, <4 x i
1070
1070
; AVX512-LABEL: unsigned_sat_variable_v4i32_using_cmp_sum:
1071
1071
; AVX512: # %bb.0:
1072
1072
; AVX512-NEXT: vmovdqa %xmm1, %xmm2
1073
- ; AVX512-NEXT: vpternlogq $15, %xmm1, %xmm1, % xmm2
1073
+ ; AVX512-NEXT: vpternlogq {{.*#+}} xmm2 = ~ xmm2
1074
1074
; AVX512-NEXT: vpminud %xmm2, %xmm0, %xmm0
1075
1075
; AVX512-NEXT: vpaddd %xmm1, %xmm0, %xmm0
1076
1076
; AVX512-NEXT: retq
@@ -1117,7 +1117,7 @@ define <4 x i32> @unsigned_sat_variable_v4i32_using_cmp_notval(<4 x i32> %x, <4
1117
1117
; AVX512-LABEL: unsigned_sat_variable_v4i32_using_cmp_notval:
1118
1118
; AVX512: # %bb.0:
1119
1119
; AVX512-NEXT: vmovdqa %xmm1, %xmm3
1120
- ; AVX512-NEXT: vpternlogq $15, %xmm1, %xmm1, % xmm3
1120
+ ; AVX512-NEXT: vpternlogq {{.*#+}} xmm3 = ~ xmm3
1121
1121
; AVX512-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2
1122
1122
; AVX512-NEXT: vpcmpleud %xmm3, %xmm0, %k1
1123
1123
; AVX512-NEXT: vpaddd %xmm1, %xmm0, %xmm2 {%k1}
@@ -1202,7 +1202,7 @@ define <2 x i64> @unsigned_sat_variable_v2i64_using_min(<2 x i64> %x, <2 x i64>
1202
1202
; AVX512-LABEL: unsigned_sat_variable_v2i64_using_min:
1203
1203
; AVX512: # %bb.0:
1204
1204
; AVX512-NEXT: vmovdqa %xmm1, %xmm2
1205
- ; AVX512-NEXT: vpternlogq $15, %xmm1, %xmm1, % xmm2
1205
+ ; AVX512-NEXT: vpternlogq {{.*#+}} xmm2 = ~ xmm2
1206
1206
; AVX512-NEXT: vpminuq %xmm2, %xmm0, %xmm0
1207
1207
; AVX512-NEXT: vpaddq %xmm1, %xmm0, %xmm0
1208
1208
; AVX512-NEXT: retq
@@ -1272,7 +1272,7 @@ define <2 x i64> @unsigned_sat_variable_v2i64_using_cmp_sum(<2 x i64> %x, <2 x i
1272
1272
; AVX512-LABEL: unsigned_sat_variable_v2i64_using_cmp_sum:
1273
1273
; AVX512: # %bb.0:
1274
1274
; AVX512-NEXT: vmovdqa %xmm1, %xmm2
1275
- ; AVX512-NEXT: vpternlogq $15, %xmm1, %xmm1, % xmm2
1275
+ ; AVX512-NEXT: vpternlogq {{.*#+}} xmm2 = ~ xmm2
1276
1276
; AVX512-NEXT: vpminuq %xmm2, %xmm0, %xmm0
1277
1277
; AVX512-NEXT: vpaddq %xmm1, %xmm0, %xmm0
1278
1278
; AVX512-NEXT: retq
@@ -1339,7 +1339,7 @@ define <2 x i64> @unsigned_sat_variable_v2i64_using_cmp_notval(<2 x i64> %x, <2
1339
1339
; AVX512-LABEL: unsigned_sat_variable_v2i64_using_cmp_notval:
1340
1340
; AVX512: # %bb.0:
1341
1341
; AVX512-NEXT: vmovdqa %xmm1, %xmm3
1342
- ; AVX512-NEXT: vpternlogq $15, %xmm1, %xmm1, % xmm3
1342
+ ; AVX512-NEXT: vpternlogq {{.*#+}} xmm3 = ~ xmm3
1343
1343
; AVX512-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2
1344
1344
; AVX512-NEXT: vpcmpleuq %xmm3, %xmm0, %k1
1345
1345
; AVX512-NEXT: vpaddq %xmm1, %xmm0, %xmm2 {%k1}
0 commit comments