6
6
;
7
7
; XVABDS_[B/H/W/D]
8
8
;
9
- define <32 x i8 > @xvabsd_b (<32 x i8 > %a , <32 x i8 > %b ) # 0 {
9
+ define <32 x i8 > @xvabsd_b (<32 x i8 > %a , <32 x i8 > %b ) {
10
10
; CHECK-LABEL: xvabsd_b:
11
11
; CHECK: # %bb.0:
12
12
; CHECK-NEXT: xvmin.b $xr2, $xr0, $xr1
@@ -21,7 +21,7 @@ define <32 x i8> @xvabsd_b(<32 x i8> %a, <32 x i8> %b) #0 {
21
21
ret <32 x i8 > %trunc
22
22
}
23
23
24
- define <16 x i16 > @xvabsd_h (<16 x i16 > %a , <16 x i16 > %b ) # 0 {
24
+ define <16 x i16 > @xvabsd_h (<16 x i16 > %a , <16 x i16 > %b ) {
25
25
; CHECK-LABEL: xvabsd_h:
26
26
; CHECK: # %bb.0:
27
27
; CHECK-NEXT: xvmin.h $xr2, $xr0, $xr1
@@ -36,7 +36,7 @@ define <16 x i16> @xvabsd_h(<16 x i16> %a, <16 x i16> %b) #0 {
36
36
ret <16 x i16 > %trunc
37
37
}
38
38
39
- define <8 x i32 > @xvabsd_w (<8 x i32 > %a , <8 x i32 > %b ) # 0 {
39
+ define <8 x i32 > @xvabsd_w (<8 x i32 > %a , <8 x i32 > %b ) {
40
40
; CHECK-LABEL: xvabsd_w:
41
41
; CHECK: # %bb.0:
42
42
; CHECK-NEXT: xvmin.w $xr2, $xr0, $xr1
@@ -51,7 +51,7 @@ define <8 x i32> @xvabsd_w(<8 x i32> %a, <8 x i32> %b) #0 {
51
51
ret <8 x i32 > %trunc
52
52
}
53
53
54
- define <4 x i64 > @xvabsd_d (<4 x i64 > %a , <4 x i64 > %b ) # 0 {
54
+ define <4 x i64 > @xvabsd_d (<4 x i64 > %a , <4 x i64 > %b ) {
55
55
; CHECK-LABEL: xvabsd_d:
56
56
; CHECK: # %bb.0:
57
57
; CHECK-NEXT: xvmin.d $xr2, $xr0, $xr1
@@ -70,7 +70,7 @@ define <4 x i64> @xvabsd_d(<4 x i64> %a, <4 x i64> %b) #0 {
70
70
; XVABSD_[B/H/W/D]U
71
71
;
72
72
73
- define <32 x i8 > @xvabsd_bu (<32 x i8 > %a , <32 x i8 > %b ) # 0 {
73
+ define <32 x i8 > @xvabsd_bu (<32 x i8 > %a , <32 x i8 > %b ) {
74
74
; CHECK-LABEL: xvabsd_bu:
75
75
; CHECK: # %bb.0:
76
76
; CHECK-NEXT: xvmin.bu $xr2, $xr0, $xr1
@@ -85,7 +85,7 @@ define <32 x i8> @xvabsd_bu(<32 x i8> %a, <32 x i8> %b) #0 {
85
85
ret <32 x i8 > %trunc
86
86
}
87
87
88
- define <16 x i16 > @xvabsd_hu (<16 x i16 > %a , <16 x i16 > %b ) # 0 {
88
+ define <16 x i16 > @xvabsd_hu (<16 x i16 > %a , <16 x i16 > %b ) {
89
89
; CHECK-LABEL: xvabsd_hu:
90
90
; CHECK: # %bb.0:
91
91
; CHECK-NEXT: xvmin.hu $xr2, $xr0, $xr1
@@ -100,7 +100,7 @@ define <16 x i16> @xvabsd_hu(<16 x i16> %a, <16 x i16> %b) #0 {
100
100
ret <16 x i16 > %trunc
101
101
}
102
102
103
- define <8 x i32 > @xvabsd_wu (<8 x i32 > %a , <8 x i32 > %b ) # 0 {
103
+ define <8 x i32 > @xvabsd_wu (<8 x i32 > %a , <8 x i32 > %b ) {
104
104
; CHECK-LABEL: xvabsd_wu:
105
105
; CHECK: # %bb.0:
106
106
; CHECK-NEXT: xvmin.wu $xr2, $xr0, $xr1
@@ -115,7 +115,7 @@ define <8 x i32> @xvabsd_wu(<8 x i32> %a, <8 x i32> %b) #0 {
115
115
ret <8 x i32 > %trunc
116
116
}
117
117
118
- define <4 x i64 > @xvabsd_du (<4 x i64 > %a , <4 x i64 > %b ) # 0 {
118
+ define <4 x i64 > @xvabsd_du (<4 x i64 > %a , <4 x i64 > %b ) {
119
119
; CHECK-LABEL: xvabsd_du:
120
120
; CHECK: # %bb.0:
121
121
; CHECK-NEXT: xvmin.du $xr2, $xr0, $xr1
@@ -130,7 +130,7 @@ define <4 x i64> @xvabsd_du(<4 x i64> %a, <4 x i64> %b) #0 {
130
130
ret <4 x i64 > %trunc
131
131
}
132
132
133
- define <32 x i8 > @xvabsd_v32i8_nsw (<32 x i8 > %a , <32 x i8 > %b ) # 0 {
133
+ define <32 x i8 > @xvabsd_v32i8_nsw (<32 x i8 > %a , <32 x i8 > %b ) {
134
134
; CHECK-LABEL: xvabsd_v32i8_nsw:
135
135
; CHECK: # %bb.0:
136
136
; CHECK-NEXT: xvsub.b $xr0, $xr0, $xr1
@@ -142,7 +142,7 @@ define <32 x i8> @xvabsd_v32i8_nsw(<32 x i8> %a, <32 x i8> %b) #0 {
142
142
ret <32 x i8 > %abs
143
143
}
144
144
145
- define <16 x i16 > @xvabsd_v16i16_nsw (<16 x i16 > %a , <16 x i16 > %b ) # 0 {
145
+ define <16 x i16 > @xvabsd_v16i16_nsw (<16 x i16 > %a , <16 x i16 > %b ) {
146
146
; CHECK-LABEL: xvabsd_v16i16_nsw:
147
147
; CHECK: # %bb.0:
148
148
; CHECK-NEXT: xvsub.h $xr0, $xr0, $xr1
@@ -154,7 +154,7 @@ define <16 x i16> @xvabsd_v16i16_nsw(<16 x i16> %a, <16 x i16> %b) #0 {
154
154
ret <16 x i16 > %abs
155
155
}
156
156
157
- define <8 x i32 > @xvabsd_v8i32_nsw (<8 x i32 > %a , <8 x i32 > %b ) # 0 {
157
+ define <8 x i32 > @xvabsd_v8i32_nsw (<8 x i32 > %a , <8 x i32 > %b ) {
158
158
; CHECK-LABEL: xvabsd_v8i32_nsw:
159
159
; CHECK: # %bb.0:
160
160
; CHECK-NEXT: xvsub.w $xr0, $xr0, $xr1
@@ -166,7 +166,7 @@ define <8 x i32> @xvabsd_v8i32_nsw(<8 x i32> %a, <8 x i32> %b) #0 {
166
166
ret <8 x i32 > %abs
167
167
}
168
168
169
- define <4 x i64 > @xvabsd_v4i64_nsw (<4 x i64 > %a , <4 x i64 > %b ) # 0 {
169
+ define <4 x i64 > @xvabsd_v4i64_nsw (<4 x i64 > %a , <4 x i64 > %b ) {
170
170
; CHECK-LABEL: xvabsd_v4i64_nsw:
171
171
; CHECK: # %bb.0:
172
172
; CHECK-NEXT: xvsub.d $xr0, $xr0, $xr1
0 commit comments