@@ -7,7 +7,7 @@ define <16 x i8> @avg_v16i8_mask(<16 x i8> %a, <16 x i8> %b, <16 x i8> %src, i16
7
7
; AVX512F: # %bb.0:
8
8
; AVX512F-NEXT: vpavgb %xmm1, %xmm0, %xmm0
9
9
; AVX512F-NEXT: kmovw %edi, %k1
10
- ; AVX512F-NEXT: vpternlogd $255, % zmm1, %zmm1, %zmm1 {%k1} {z}
10
+ ; AVX512F-NEXT: vpternlogd {{.*#+}} zmm1 {%k1} {z} = -1
11
11
; AVX512F-NEXT: vpmovdb %zmm1, %xmm1
12
12
; AVX512F-NEXT: vpblendvb %xmm1, %xmm0, %xmm2, %xmm0
13
13
; AVX512F-NEXT: vzeroupper
@@ -35,7 +35,7 @@ define <16 x i8> @avg_v16i8_maskz(<16 x i8> %a, <16 x i8> %b, i16 %mask) nounwin
35
35
; AVX512F: # %bb.0:
36
36
; AVX512F-NEXT: vpavgb %xmm1, %xmm0, %xmm0
37
37
; AVX512F-NEXT: kmovw %edi, %k1
38
- ; AVX512F-NEXT: vpternlogd $255, % zmm1, %zmm1, %zmm1 {%k1} {z}
38
+ ; AVX512F-NEXT: vpternlogd {{.*#+}} zmm1 {%k1} {z} = -1
39
39
; AVX512F-NEXT: vpmovdb %zmm1, %xmm1
40
40
; AVX512F-NEXT: vpand %xmm0, %xmm1, %xmm0
41
41
; AVX512F-NEXT: vzeroupper
@@ -64,9 +64,9 @@ define <32 x i8> @avg_v32i8_mask(<32 x i8> %a, <32 x i8> %b, <32 x i8> %src, i32
64
64
; AVX512F-NEXT: shrl $16, %edi
65
65
; AVX512F-NEXT: vpavgb %ymm1, %ymm0, %ymm0
66
66
; AVX512F-NEXT: kmovw %edi, %k2
67
- ; AVX512F-NEXT: vpternlogd $255, % zmm1, %zmm1, %zmm1 {%k1} {z}
67
+ ; AVX512F-NEXT: vpternlogd {{.*#+}} zmm1 {%k1} {z} = -1
68
68
; AVX512F-NEXT: vpmovdb %zmm1, %xmm1
69
- ; AVX512F-NEXT: vpternlogd $255, % zmm3, %zmm3, %zmm3 {%k2} {z}
69
+ ; AVX512F-NEXT: vpternlogd {{.*#+}} zmm3 {%k2} {z} = -1
70
70
; AVX512F-NEXT: vpmovdb %zmm3, %xmm3
71
71
; AVX512F-NEXT: vinserti128 $1, %xmm3, %ymm1, %ymm1
72
72
; AVX512F-NEXT: vpblendvb %ymm1, %ymm0, %ymm2, %ymm0
@@ -96,9 +96,9 @@ define <32 x i8> @avg_v32i8_maskz(<32 x i8> %a, <32 x i8> %b, i32 %mask) nounwin
96
96
; AVX512F-NEXT: shrl $16, %edi
97
97
; AVX512F-NEXT: vpavgb %ymm1, %ymm0, %ymm0
98
98
; AVX512F-NEXT: kmovw %edi, %k2
99
- ; AVX512F-NEXT: vpternlogd $255, % zmm1, %zmm1, %zmm1 {%k1} {z}
99
+ ; AVX512F-NEXT: vpternlogd {{.*#+}} zmm1 {%k1} {z} = -1
100
100
; AVX512F-NEXT: vpmovdb %zmm1, %xmm1
101
- ; AVX512F-NEXT: vpternlogd $255, % zmm2, %zmm2, %zmm2 {%k2} {z}
101
+ ; AVX512F-NEXT: vpternlogd {{.*#+}} zmm2 {%k2} {z} = -1
102
102
; AVX512F-NEXT: vpmovdb %zmm2, %xmm2
103
103
; AVX512F-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
104
104
; AVX512F-NEXT: vpand %ymm0, %ymm1, %ymm0
@@ -137,18 +137,18 @@ define <64 x i8> @avg_v64i8_mask(<64 x i8> %a, <64 x i8> %b, <64 x i8> %src, i64
137
137
; AVX512F-NEXT: kmovw %ecx, %k2
138
138
; AVX512F-NEXT: kmovw %eax, %k3
139
139
; AVX512F-NEXT: kmovw %edi, %k4
140
- ; AVX512F-NEXT: vpternlogd $255, % zmm0, %zmm0, %zmm0 {%k4} {z}
140
+ ; AVX512F-NEXT: vpternlogd {{.*#+}} zmm0 {%k4} {z} = -1
141
141
; AVX512F-NEXT: vpmovdb %zmm0, %xmm0
142
- ; AVX512F-NEXT: vpternlogd $255, % zmm3, %zmm3, %zmm3 {%k3} {z}
142
+ ; AVX512F-NEXT: vpternlogd {{.*#+}} zmm3 {%k3} {z} = -1
143
143
; AVX512F-NEXT: vpmovdb %zmm3, %xmm3
144
144
; AVX512F-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm0
145
- ; AVX512F-NEXT: vpternlogd $255, % zmm3, %zmm3, %zmm3 {%k1} {z}
145
+ ; AVX512F-NEXT: vpternlogd {{.*#+}} zmm3 {%k1} {z} = -1
146
146
; AVX512F-NEXT: vpmovdb %zmm3, %xmm3
147
- ; AVX512F-NEXT: vpternlogd $255, % zmm4, %zmm4, %zmm4 {%k2} {z}
147
+ ; AVX512F-NEXT: vpternlogd {{.*#+}} zmm4 {%k2} {z} = -1
148
148
; AVX512F-NEXT: vpmovdb %zmm4, %xmm4
149
149
; AVX512F-NEXT: vinserti128 $1, %xmm4, %ymm3, %ymm3
150
150
; AVX512F-NEXT: vinserti64x4 $1, %ymm0, %zmm3, %zmm0
151
- ; AVX512F-NEXT: vpternlogq $202, % zmm2, %zmm1, % zmm0
151
+ ; AVX512F-NEXT: vpternlogq {{.*#+}} zmm0 = zmm2 ^ ( zmm0 & (zmm1 ^ zmm2))
152
152
; AVX512F-NEXT: retq
153
153
;
154
154
; AVX512BWVL-LABEL: avg_v64i8_mask:
@@ -185,14 +185,14 @@ define <64 x i8> @avg_v64i8_maskz(<64 x i8> %a, <64 x i8> %b, i64 %mask) nounwin
185
185
; AVX512F-NEXT: kmovw %ecx, %k2
186
186
; AVX512F-NEXT: kmovw %eax, %k3
187
187
; AVX512F-NEXT: kmovw %edi, %k4
188
- ; AVX512F-NEXT: vpternlogd $255, % zmm1, %zmm1, %zmm1 {%k4} {z}
188
+ ; AVX512F-NEXT: vpternlogd {{.*#+}} zmm1 {%k4} {z} = -1
189
189
; AVX512F-NEXT: vpmovdb %zmm1, %xmm1
190
- ; AVX512F-NEXT: vpternlogd $255, % zmm2, %zmm2, %zmm2 {%k3} {z}
190
+ ; AVX512F-NEXT: vpternlogd {{.*#+}} zmm2 {%k3} {z} = -1
191
191
; AVX512F-NEXT: vpmovdb %zmm2, %xmm2
192
192
; AVX512F-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
193
- ; AVX512F-NEXT: vpternlogd $255, % zmm2, %zmm2, %zmm2 {%k1} {z}
193
+ ; AVX512F-NEXT: vpternlogd {{.*#+}} zmm2 {%k1} {z} = -1
194
194
; AVX512F-NEXT: vpmovdb %zmm2, %xmm2
195
- ; AVX512F-NEXT: vpternlogd $255, % zmm3, %zmm3, %zmm3 {%k2} {z}
195
+ ; AVX512F-NEXT: vpternlogd {{.*#+}} zmm3 {%k2} {z} = -1
196
196
; AVX512F-NEXT: vpmovdb %zmm3, %xmm3
197
197
; AVX512F-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2
198
198
; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm1
@@ -220,7 +220,7 @@ define <8 x i16> @avg_v8i16_mask(<8 x i16> %a, <8 x i16> %b, <8 x i16> %src, i8
220
220
; AVX512F: # %bb.0:
221
221
; AVX512F-NEXT: vpavgw %xmm1, %xmm0, %xmm0
222
222
; AVX512F-NEXT: kmovw %edi, %k1
223
- ; AVX512F-NEXT: vpternlogd $255, % zmm1, %zmm1, %zmm1 {%k1} {z}
223
+ ; AVX512F-NEXT: vpternlogd {{.*#+}} zmm1 {%k1} {z} = -1
224
224
; AVX512F-NEXT: vpmovdw %zmm1, %ymm1
225
225
; AVX512F-NEXT: vpblendvb %xmm1, %xmm0, %xmm2, %xmm0
226
226
; AVX512F-NEXT: vzeroupper
@@ -248,7 +248,7 @@ define <8 x i16> @avg_v8i16_maskz(<8 x i16> %a, <8 x i16> %b, i8 %mask) nounwind
248
248
; AVX512F: # %bb.0:
249
249
; AVX512F-NEXT: vpavgw %xmm1, %xmm0, %xmm0
250
250
; AVX512F-NEXT: kmovw %edi, %k1
251
- ; AVX512F-NEXT: vpternlogd $255, % zmm1, %zmm1, %zmm1 {%k1} {z}
251
+ ; AVX512F-NEXT: vpternlogd {{.*#+}} zmm1 {%k1} {z} = -1
252
252
; AVX512F-NEXT: vpmovdw %zmm1, %ymm1
253
253
; AVX512F-NEXT: vpand %xmm0, %xmm1, %xmm0
254
254
; AVX512F-NEXT: vzeroupper
@@ -275,7 +275,7 @@ define <16 x i16> @avg_v16i16_mask(<16 x i16> %a, <16 x i16> %b, <16 x i16> %src
275
275
; AVX512F: # %bb.0:
276
276
; AVX512F-NEXT: vpavgw %ymm1, %ymm0, %ymm0
277
277
; AVX512F-NEXT: kmovw %edi, %k1
278
- ; AVX512F-NEXT: vpternlogd $255, % zmm1, %zmm1, %zmm1 {%k1} {z}
278
+ ; AVX512F-NEXT: vpternlogd {{.*#+}} zmm1 {%k1} {z} = -1
279
279
; AVX512F-NEXT: vpmovdw %zmm1, %ymm1
280
280
; AVX512F-NEXT: vpblendvb %ymm1, %ymm0, %ymm2, %ymm0
281
281
; AVX512F-NEXT: retq
@@ -302,7 +302,7 @@ define <16 x i16> @avg_v16i16_maskz(<16 x i16> %a, <16 x i16> %b, i16 %mask) nou
302
302
; AVX512F: # %bb.0:
303
303
; AVX512F-NEXT: vpavgw %ymm1, %ymm0, %ymm0
304
304
; AVX512F-NEXT: kmovw %edi, %k1
305
- ; AVX512F-NEXT: vpternlogd $255, % zmm1, %zmm1, %zmm1 {%k1} {z}
305
+ ; AVX512F-NEXT: vpternlogd {{.*#+}} zmm1 {%k1} {z} = -1
306
306
; AVX512F-NEXT: vpmovdw %zmm1, %ymm1
307
307
; AVX512F-NEXT: vpand %ymm0, %ymm1, %ymm0
308
308
; AVX512F-NEXT: retq
@@ -334,12 +334,12 @@ define <32 x i16> @avg_v32i16_mask(<32 x i16> %a, <32 x i16> %b, <32 x i16> %src
334
334
; AVX512F-NEXT: vpavgw %ymm1, %ymm0, %ymm0
335
335
; AVX512F-NEXT: vinserti64x4 $1, %ymm3, %zmm0, %zmm1
336
336
; AVX512F-NEXT: kmovw %edi, %k2
337
- ; AVX512F-NEXT: vpternlogd $255, % zmm0, %zmm0, %zmm0 {%k1} {z}
337
+ ; AVX512F-NEXT: vpternlogd {{.*#+}} zmm0 {%k1} {z} = -1
338
338
; AVX512F-NEXT: vpmovdw %zmm0, %ymm0
339
- ; AVX512F-NEXT: vpternlogd $255, % zmm3, %zmm3, %zmm3 {%k2} {z}
339
+ ; AVX512F-NEXT: vpternlogd {{.*#+}} zmm3 {%k2} {z} = -1
340
340
; AVX512F-NEXT: vpmovdw %zmm3, %ymm3
341
341
; AVX512F-NEXT: vinserti64x4 $1, %ymm3, %zmm0, %zmm0
342
- ; AVX512F-NEXT: vpternlogq $202, % zmm2, %zmm1, % zmm0
342
+ ; AVX512F-NEXT: vpternlogq {{.*#+}} zmm0 = zmm2 ^ ( zmm0 & (zmm1 ^ zmm2))
343
343
; AVX512F-NEXT: retq
344
344
;
345
345
; AVX512BWVL-LABEL: avg_v32i16_mask:
@@ -370,9 +370,9 @@ define <32 x i16> @avg_v32i16_maskz(<32 x i16> %a, <32 x i16> %b, i32 %mask) nou
370
370
; AVX512F-NEXT: vpavgw %ymm1, %ymm0, %ymm0
371
371
; AVX512F-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0
372
372
; AVX512F-NEXT: kmovw %edi, %k2
373
- ; AVX512F-NEXT: vpternlogd $255, % zmm1, %zmm1, %zmm1 {%k1} {z}
373
+ ; AVX512F-NEXT: vpternlogd {{.*#+}} zmm1 {%k1} {z} = -1
374
374
; AVX512F-NEXT: vpmovdw %zmm1, %ymm1
375
- ; AVX512F-NEXT: vpternlogd $255, % zmm2, %zmm2, %zmm2 {%k2} {z}
375
+ ; AVX512F-NEXT: vpternlogd {{.*#+}} zmm2 {%k2} {z} = -1
376
376
; AVX512F-NEXT: vpmovdw %zmm2, %ymm2
377
377
; AVX512F-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1
378
378
; AVX512F-NEXT: vpandq %zmm0, %zmm1, %zmm0
0 commit comments