@@ -178,7 +178,7 @@ define void @test5(half %x, ptr %y) {
178
178
;
179
179
; X86-LABEL: test5:
180
180
; X86: # %bb.0:
181
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm0
181
+ ; X86-NEXT: vmovsh {{.*# +}} xmm0 = mem[0],zero,zero,zero,zero,zero,zero,zero
182
182
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
183
183
; X86-NEXT: vmovsh %xmm0, (%eax)
184
184
; X86-NEXT: retl
@@ -189,13 +189,13 @@ define void @test5(half %x, ptr %y) {
189
189
define half @test7 (ptr %x ) {
190
190
; X64-LABEL: test7:
191
191
; X64: # %bb.0:
192
- ; X64-NEXT: vmovsh (%rdi), % xmm0
192
+ ; X64-NEXT: vmovsh {{.*#+}} xmm0 = mem[0],zero,zero,zero,zero,zero,zero,zero
193
193
; X64-NEXT: retq
194
194
;
195
195
; X86-LABEL: test7:
196
196
; X86: # %bb.0:
197
197
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
198
- ; X86-NEXT: vmovsh (%eax), % xmm0
198
+ ; X86-NEXT: vmovsh {{.*#+}} xmm0 = mem[0],zero,zero,zero,zero,zero,zero,zero
199
199
; X86-NEXT: retl
200
200
%y = load i16 , ptr %x
201
201
%res = bitcast i16 %y to half
@@ -253,13 +253,13 @@ define <32 x i16> @test10c(ptr %x) {
253
253
define <8 x half > @test11 (ptr %x ) {
254
254
; X64-LABEL: test11:
255
255
; X64: # %bb.0:
256
- ; X64-NEXT: vmovsh (%rdi), % xmm0
256
+ ; X64-NEXT: vmovsh {{.*#+}} xmm0 = mem[0],zero,zero,zero,zero,zero,zero,zero
257
257
; X64-NEXT: retq
258
258
;
259
259
; X86-LABEL: test11:
260
260
; X86: # %bb.0:
261
261
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
262
- ; X86-NEXT: vmovsh (%eax), % xmm0
262
+ ; X86-NEXT: vmovsh {{.*#+}} xmm0 = mem[0],zero,zero,zero,zero,zero,zero,zero
263
263
; X86-NEXT: retl
264
264
%y = load half , ptr %x , align 2
265
265
%res = insertelement <8 x half >zeroinitializer , half %y , i32 0
@@ -269,13 +269,13 @@ define <8 x half> @test11(ptr %x) {
269
269
define <16 x half > @test11b (ptr %x ) {
270
270
; X64-LABEL: test11b:
271
271
; X64: # %bb.0:
272
- ; X64-NEXT: vmovsh (%rdi), % xmm0
272
+ ; X64-NEXT: vmovsh {{.*#+}} xmm0 = mem[0],zero,zero,zero,zero,zero,zero,zero
273
273
; X64-NEXT: retq
274
274
;
275
275
; X86-LABEL: test11b:
276
276
; X86: # %bb.0:
277
277
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
278
- ; X86-NEXT: vmovsh (%eax), % xmm0
278
+ ; X86-NEXT: vmovsh {{.*#+}} xmm0 = mem[0],zero,zero,zero,zero,zero,zero,zero
279
279
; X86-NEXT: retl
280
280
%y = load half , ptr %x , align 2
281
281
%res = insertelement <16 x half >zeroinitializer , half %y , i32 0
@@ -285,13 +285,13 @@ define <16 x half> @test11b(ptr %x) {
285
285
define <32 x half > @test11c (ptr %x ) {
286
286
; X64-LABEL: test11c:
287
287
; X64: # %bb.0:
288
- ; X64-NEXT: vmovsh (%rdi), % xmm0
288
+ ; X64-NEXT: vmovsh {{.*#+}} xmm0 = mem[0],zero,zero,zero,zero,zero,zero,zero
289
289
; X64-NEXT: retq
290
290
;
291
291
; X86-LABEL: test11c:
292
292
; X86: # %bb.0:
293
293
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
294
- ; X86-NEXT: vmovsh (%eax), % xmm0
294
+ ; X86-NEXT: vmovsh {{.*#+}} xmm0 = mem[0],zero,zero,zero,zero,zero,zero,zero
295
295
; X86-NEXT: retl
296
296
%y = load half , ptr %x , align 2
297
297
%res = insertelement <32 x half >zeroinitializer , half %y , i32 0
@@ -307,7 +307,7 @@ define <8 x half> @test14(half %x) {
307
307
;
308
308
; X86-LABEL: test14:
309
309
; X86: # %bb.0:
310
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm0
310
+ ; X86-NEXT: vmovsh {{.*# +}} xmm0 = mem[0],zero,zero,zero,zero,zero,zero,zero
311
311
; X86-NEXT: retl
312
312
%res = insertelement <8 x half >zeroinitializer , half %x , i32 0
313
313
ret <8 x half >%res
@@ -322,7 +322,7 @@ define <16 x half> @test14b(half %x) {
322
322
;
323
323
; X86-LABEL: test14b:
324
324
; X86: # %bb.0:
325
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm0
325
+ ; X86-NEXT: vmovsh {{.*# +}} xmm0 = mem[0],zero,zero,zero,zero,zero,zero,zero
326
326
; X86-NEXT: retl
327
327
%res = insertelement <16 x half >zeroinitializer , half %x , i32 0
328
328
ret <16 x half >%res
@@ -337,7 +337,7 @@ define <32 x half> @test14c(half %x) {
337
337
;
338
338
; X86-LABEL: test14c:
339
339
; X86: # %bb.0:
340
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm0
340
+ ; X86-NEXT: vmovsh {{.*# +}} xmm0 = mem[0],zero,zero,zero,zero,zero,zero,zero
341
341
; X86-NEXT: retl
342
342
%res = insertelement <32 x half >zeroinitializer , half %x , i32 0
343
343
ret <32 x half >%res
@@ -1253,7 +1253,7 @@ define half @test_movw2(i16 %x) {
1253
1253
;
1254
1254
; X86-LABEL: test_movw2:
1255
1255
; X86: # %bb.0:
1256
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm0
1256
+ ; X86-NEXT: vmovsh {{.*# +}} xmm0 = mem[0],zero,zero,zero,zero,zero,zero,zero
1257
1257
; X86-NEXT: retl
1258
1258
%res = bitcast i16 %x to half
1259
1259
ret half %res
@@ -1358,7 +1358,7 @@ define half @extract_f16_8(<32 x half> %x, i64 %idx) nounwind {
1358
1358
; X64-NEXT: subq $128, %rsp
1359
1359
; X64-NEXT: andl $31, %edi
1360
1360
; X64-NEXT: vmovaps %zmm0, (%rsp)
1361
- ; X64-NEXT: vmovsh (%rsp,%rdi,2), %xmm0
1361
+ ; X64-NEXT: vmovsh {{.*#+}} xmm0 = mem[0],zero,zero,zero,zero,zero,zero,zero
1362
1362
; X64-NEXT: movq %rbp, %rsp
1363
1363
; X64-NEXT: popq %rbp
1364
1364
; X64-NEXT: vzeroupper
@@ -1373,7 +1373,7 @@ define half @extract_f16_8(<32 x half> %x, i64 %idx) nounwind {
1373
1373
; X86-NEXT: movl 8(%ebp), %eax
1374
1374
; X86-NEXT: andl $31, %eax
1375
1375
; X86-NEXT: vmovaps %zmm0, (%esp)
1376
- ; X86-NEXT: vmovsh (%esp,%eax,2), %xmm0
1376
+ ; X86-NEXT: vmovsh {{.*#+}} xmm0 = mem[0],zero,zero,zero,zero,zero,zero,zero
1377
1377
; X86-NEXT: movl %ebp, %esp
1378
1378
; X86-NEXT: popl %ebp
1379
1379
; X86-NEXT: vzeroupper
@@ -1392,7 +1392,7 @@ define half @extract_f16_9(<64 x half> %x, i64 %idx) nounwind {
1392
1392
; X64-NEXT: andl $63, %edi
1393
1393
; X64-NEXT: vmovaps %zmm1, {{[0-9]+}}(%rsp)
1394
1394
; X64-NEXT: vmovaps %zmm0, (%rsp)
1395
- ; X64-NEXT: vmovsh (%rsp,%rdi,2), %xmm0
1395
+ ; X64-NEXT: vmovsh {{.*#+}} xmm0 = mem[0],zero,zero,zero,zero,zero,zero,zero
1396
1396
; X64-NEXT: movq %rbp, %rsp
1397
1397
; X64-NEXT: popq %rbp
1398
1398
; X64-NEXT: vzeroupper
@@ -1408,7 +1408,7 @@ define half @extract_f16_9(<64 x half> %x, i64 %idx) nounwind {
1408
1408
; X86-NEXT: andl $63, %eax
1409
1409
; X86-NEXT: vmovaps %zmm1, {{[0-9]+}}(%esp)
1410
1410
; X86-NEXT: vmovaps %zmm0, (%esp)
1411
- ; X86-NEXT: vmovsh (%esp,%eax,2), %xmm0
1411
+ ; X86-NEXT: vmovsh {{.*#+}} xmm0 = mem[0],zero,zero,zero,zero,zero,zero,zero
1412
1412
; X86-NEXT: movl %ebp, %esp
1413
1413
; X86-NEXT: popl %ebp
1414
1414
; X86-NEXT: vzeroupper
@@ -1797,11 +1797,11 @@ define <8 x half> @build_vector_xxxxuuuu(half %a0, half %a1, half %a2, half %a3)
1797
1797
;
1798
1798
; X86-LABEL: build_vector_xxxxuuuu:
1799
1799
; X86: # %bb.0:
1800
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm0
1801
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm1
1800
+ ; X86-NEXT: vmovsh {{.*# +}} xmm0 = mem[0],zero,zero,zero,zero,zero,zero,zero
1801
+ ; X86-NEXT: vmovsh {{.*# +}} xmm1 = mem[0],zero,zero,zero,zero,zero,zero,zero
1802
1802
; X86-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
1803
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm1
1804
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm2
1803
+ ; X86-NEXT: vmovsh {{.*# +}} xmm1 = mem[0],zero,zero,zero,zero,zero,zero,zero
1804
+ ; X86-NEXT: vmovsh {{.*# +}} xmm2 = mem[0],zero,zero,zero,zero,zero,zero,zero
1805
1805
; X86-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
1806
1806
; X86-NEXT: vinsertps {{.*#+}} xmm0 = xmm0[0],xmm1[0],zero,zero
1807
1807
; X86-NEXT: retl
@@ -1823,11 +1823,11 @@ define <8 x half> @build_vector_uuuuxxxx(half %a0, half %a1, half %a2, half %a3)
1823
1823
;
1824
1824
; X86-LABEL: build_vector_uuuuxxxx:
1825
1825
; X86: # %bb.0:
1826
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm0
1827
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm1
1826
+ ; X86-NEXT: vmovsh {{.*# +}} xmm0 = mem[0],zero,zero,zero,zero,zero,zero,zero
1827
+ ; X86-NEXT: vmovsh {{.*# +}} xmm1 = mem[0],zero,zero,zero,zero,zero,zero,zero
1828
1828
; X86-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
1829
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm1
1830
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm2
1829
+ ; X86-NEXT: vmovsh {{.*# +}} xmm1 = mem[0],zero,zero,zero,zero,zero,zero,zero
1830
+ ; X86-NEXT: vmovsh {{.*# +}} xmm2 = mem[0],zero,zero,zero,zero,zero,zero,zero
1831
1831
; X86-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
1832
1832
; X86-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
1833
1833
; X86-NEXT: vpbroadcastq %xmm0, %xmm0
@@ -1853,18 +1853,18 @@ define <8 x half> @build_vector_xxxxxxxx(half %a0, half %a1, half %a2, half %a3,
1853
1853
;
1854
1854
; X86-LABEL: build_vector_xxxxxxxx:
1855
1855
; X86: # %bb.0:
1856
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm0
1857
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm1
1856
+ ; X86-NEXT: vmovsh {{.*# +}} xmm0 = mem[0],zero,zero,zero,zero,zero,zero,zero
1857
+ ; X86-NEXT: vmovsh {{.*# +}} xmm1 = mem[0],zero,zero,zero,zero,zero,zero,zero
1858
1858
; X86-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
1859
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm1
1860
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm2
1859
+ ; X86-NEXT: vmovsh {{.*# +}} xmm1 = mem[0],zero,zero,zero,zero,zero,zero,zero
1860
+ ; X86-NEXT: vmovsh {{.*# +}} xmm2 = mem[0],zero,zero,zero,zero,zero,zero,zero
1861
1861
; X86-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
1862
1862
; X86-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
1863
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm1
1864
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm2
1863
+ ; X86-NEXT: vmovsh {{.*# +}} xmm1 = mem[0],zero,zero,zero,zero,zero,zero,zero
1864
+ ; X86-NEXT: vmovsh {{.*# +}} xmm2 = mem[0],zero,zero,zero,zero,zero,zero,zero
1865
1865
; X86-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
1866
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm2
1867
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm3
1866
+ ; X86-NEXT: vmovsh {{.*# +}} xmm2 = mem[0],zero,zero,zero,zero,zero,zero,zero
1867
+ ; X86-NEXT: vmovsh {{.*# +}} xmm3 = mem[0],zero,zero,zero,zero,zero,zero,zero
1868
1868
; X86-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
1869
1869
; X86-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1]
1870
1870
; X86-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
@@ -1895,18 +1895,18 @@ define <16 x half> @build_vector_xxxxuuuuuuuuxxxx(half %a0, half %a1, half %a2,
1895
1895
;
1896
1896
; X86-LABEL: build_vector_xxxxuuuuuuuuxxxx:
1897
1897
; X86: # %bb.0:
1898
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm0
1899
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm1
1898
+ ; X86-NEXT: vmovsh {{.*# +}} xmm0 = mem[0],zero,zero,zero,zero,zero,zero,zero
1899
+ ; X86-NEXT: vmovsh {{.*# +}} xmm1 = mem[0],zero,zero,zero,zero,zero,zero,zero
1900
1900
; X86-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
1901
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm1
1902
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm2
1901
+ ; X86-NEXT: vmovsh {{.*# +}} xmm1 = mem[0],zero,zero,zero,zero,zero,zero,zero
1902
+ ; X86-NEXT: vmovsh {{.*# +}} xmm2 = mem[0],zero,zero,zero,zero,zero,zero,zero
1903
1903
; X86-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
1904
1904
; X86-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
1905
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm1
1906
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm2
1905
+ ; X86-NEXT: vmovsh {{.*# +}} xmm1 = mem[0],zero,zero,zero,zero,zero,zero,zero
1906
+ ; X86-NEXT: vmovsh {{.*# +}} xmm2 = mem[0],zero,zero,zero,zero,zero,zero,zero
1907
1907
; X86-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
1908
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm2
1909
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm3
1908
+ ; X86-NEXT: vmovsh {{.*# +}} xmm2 = mem[0],zero,zero,zero,zero,zero,zero,zero
1909
+ ; X86-NEXT: vmovsh {{.*# +}} xmm3 = mem[0],zero,zero,zero,zero,zero,zero,zero
1910
1910
; X86-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
1911
1911
; X86-NEXT: vinsertps {{.*#+}} xmm1 = xmm1[0],xmm2[0],zero,zero
1912
1912
; X86-NEXT: vpbroadcastq %xmm0, %xmm0
@@ -2006,10 +2006,10 @@ define <8 x half> @test21(half %a, half %b, half %c) nounwind {
2006
2006
;
2007
2007
; X86-LABEL: test21:
2008
2008
; X86: # %bb.0:
2009
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm0
2010
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm1
2009
+ ; X86-NEXT: vmovsh {{.*# +}} xmm0 = mem[0],zero,zero,zero,zero,zero,zero,zero
2010
+ ; X86-NEXT: vmovsh {{.*# +}} xmm1 = mem[0],zero,zero,zero,zero,zero,zero,zero
2011
2011
; X86-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
2012
- ; X86-NEXT: vmovsh {{[0-9] +}}(%esp), % xmm1
2012
+ ; X86-NEXT: vmovsh {{.*# +}} xmm1 = mem[0],zero,zero,zero,zero,zero,zero,zero
2013
2013
; X86-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
2014
2014
; X86-NEXT: vpxor %xmm1, %xmm1, %xmm1
2015
2015
; X86-NEXT: vpbroadcastw %xmm1, %xmm1
0 commit comments