1
1
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2
- ; RUN: llc < %s -mtriple=x86_64-linux | FileCheck %s
2
+ ; RUN: llc < %s -mtriple=i686-linux | FileCheck %s --check-prefixes=X86
3
+ ; RUN: llc < %s -mtriple=x86_64-linux | FileCheck %s --check-prefixes=X64
3
4
4
5
declare void @use (i8 )
5
6
6
7
define i8 @add_and_xor (i8 %x , i8 %y ) {
7
- ; CHECK-LABEL: add_and_xor:
8
- ; CHECK: # %bb.0:
9
- ; CHECK-NEXT: movl %edi, %eax
10
- ; CHECK-NEXT: orl %esi, %eax
11
- ; CHECK-NEXT: # kill: def $al killed $al killed $eax
12
- ; CHECK-NEXT: retq
8
+ ; X86-LABEL: add_and_xor:
9
+ ; X86: # %bb.0:
10
+ ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax
11
+ ; X86-NEXT: orb {{[0-9]+}}(%esp), %al
12
+ ; X86-NEXT: retl
13
+ ;
14
+ ; X64-LABEL: add_and_xor:
15
+ ; X64: # %bb.0:
16
+ ; X64-NEXT: movl %edi, %eax
17
+ ; X64-NEXT: orl %esi, %eax
18
+ ; X64-NEXT: # kill: def $al killed $al killed $eax
19
+ ; X64-NEXT: retq
13
20
%xor = xor i8 %x , -1
14
21
%and = and i8 %xor , %y
15
22
%add = add i8 %and , %x
16
23
ret i8 %add
17
24
}
18
25
19
26
define i8 @add_and_xor_wrong_const (i8 %x , i8 %y ) {
20
- ; CHECK-LABEL: add_and_xor_wrong_const:
21
- ; CHECK: # %bb.0:
22
- ; CHECK-NEXT: movl %edi, %eax
23
- ; CHECK-NEXT: xorb $-2, %al
24
- ; CHECK-NEXT: andb %sil, %al
25
- ; CHECK-NEXT: addb %dil, %al
26
- ; CHECK-NEXT: retq
27
+ ; X86-LABEL: add_and_xor_wrong_const:
28
+ ; X86: # %bb.0:
29
+ ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %ecx
30
+ ; X86-NEXT: movl %ecx, %eax
31
+ ; X86-NEXT: xorb $-2, %al
32
+ ; X86-NEXT: andb {{[0-9]+}}(%esp), %al
33
+ ; X86-NEXT: addb %cl, %al
34
+ ; X86-NEXT: retl
35
+ ;
36
+ ; X64-LABEL: add_and_xor_wrong_const:
37
+ ; X64: # %bb.0:
38
+ ; X64-NEXT: movl %edi, %eax
39
+ ; X64-NEXT: xorb $-2, %al
40
+ ; X64-NEXT: andb %sil, %al
41
+ ; X64-NEXT: addb %dil, %al
42
+ ; X64-NEXT: retq
27
43
%xor = xor i8 %x , -2
28
44
%and = and i8 %xor , %y
29
45
%add = add i8 %and , %x
30
46
ret i8 %add
31
47
}
32
48
33
49
define i8 @add_and_xor_wrong_op (i8 %x , i8 %y , i8 %z ) {
34
- ; CHECK-LABEL: add_and_xor_wrong_op:
35
- ; CHECK: # %bb.0:
36
- ; CHECK-NEXT: # kill: def $edx killed $edx def $rdx
37
- ; CHECK-NEXT: # kill: def $edi killed $edi def $rdi
38
- ; CHECK-NEXT: notb %dl
39
- ; CHECK-NEXT: andb %sil, %dl
40
- ; CHECK-NEXT: leal (%rdx,%rdi), %eax
41
- ; CHECK-NEXT: # kill: def $al killed $al killed $eax
42
- ; CHECK-NEXT: retq
50
+ ; X86-LABEL: add_and_xor_wrong_op:
51
+ ; X86: # %bb.0:
52
+ ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax
53
+ ; X86-NEXT: notb %al
54
+ ; X86-NEXT: andb {{[0-9]+}}(%esp), %al
55
+ ; X86-NEXT: addb {{[0-9]+}}(%esp), %al
56
+ ; X86-NEXT: retl
57
+ ;
58
+ ; X64-LABEL: add_and_xor_wrong_op:
59
+ ; X64: # %bb.0:
60
+ ; X64-NEXT: # kill: def $edx killed $edx def $rdx
61
+ ; X64-NEXT: # kill: def $edi killed $edi def $rdi
62
+ ; X64-NEXT: notb %dl
63
+ ; X64-NEXT: andb %sil, %dl
64
+ ; X64-NEXT: leal (%rdx,%rdi), %eax
65
+ ; X64-NEXT: # kill: def $al killed $al killed $eax
66
+ ; X64-NEXT: retq
43
67
%xor = xor i8 %z , -1
44
68
%and = and i8 %xor , %y
45
69
%add = add i8 %and , %x
46
70
ret i8 %add
47
71
}
48
72
49
73
define i8 @add_and_xor_commuted1 (i8 %x , i8 %y ) {
50
- ; CHECK-LABEL: add_and_xor_commuted1:
51
- ; CHECK: # %bb.0:
52
- ; CHECK-NEXT: movl %edi, %eax
53
- ; CHECK-NEXT: orl %esi, %eax
54
- ; CHECK-NEXT: # kill: def $al killed $al killed $eax
55
- ; CHECK-NEXT: retq
74
+ ; X86-LABEL: add_and_xor_commuted1:
75
+ ; X86: # %bb.0:
76
+ ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax
77
+ ; X86-NEXT: orb {{[0-9]+}}(%esp), %al
78
+ ; X86-NEXT: retl
79
+ ;
80
+ ; X64-LABEL: add_and_xor_commuted1:
81
+ ; X64: # %bb.0:
82
+ ; X64-NEXT: movl %edi, %eax
83
+ ; X64-NEXT: orl %esi, %eax
84
+ ; X64-NEXT: # kill: def $al killed $al killed $eax
85
+ ; X64-NEXT: retq
56
86
%xor = xor i8 %x , -1
57
87
%and = and i8 %y , %xor
58
88
%add = add i8 %and , %x
59
89
ret i8 %add
60
90
}
61
91
62
92
define i8 @add_and_xor_commuted2 (i8 %x , i8 %y ) {
63
- ; CHECK-LABEL: add_and_xor_commuted2:
64
- ; CHECK: # %bb.0:
65
- ; CHECK-NEXT: movl %edi, %eax
66
- ; CHECK-NEXT: orl %esi, %eax
67
- ; CHECK-NEXT: # kill: def $al killed $al killed $eax
68
- ; CHECK-NEXT: retq
93
+ ; X86-LABEL: add_and_xor_commuted2:
94
+ ; X86: # %bb.0:
95
+ ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax
96
+ ; X86-NEXT: orb {{[0-9]+}}(%esp), %al
97
+ ; X86-NEXT: retl
98
+ ;
99
+ ; X64-LABEL: add_and_xor_commuted2:
100
+ ; X64: # %bb.0:
101
+ ; X64-NEXT: movl %edi, %eax
102
+ ; X64-NEXT: orl %esi, %eax
103
+ ; X64-NEXT: # kill: def $al killed $al killed $eax
104
+ ; X64-NEXT: retq
69
105
%xor = xor i8 %x , -1
70
106
%and = and i8 %xor , %y
71
107
%add = add i8 %x , %and
72
108
ret i8 %add
73
109
}
74
110
75
111
define i8 @add_and_xor_commuted3 (i8 %x , i8 %y ) {
76
- ; CHECK-LABEL: add_and_xor_commuted3:
77
- ; CHECK: # %bb.0:
78
- ; CHECK-NEXT: movl %edi, %eax
79
- ; CHECK-NEXT: orl %esi, %eax
80
- ; CHECK-NEXT: # kill: def $al killed $al killed $eax
81
- ; CHECK-NEXT: retq
112
+ ; X86-LABEL: add_and_xor_commuted3:
113
+ ; X86: # %bb.0:
114
+ ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax
115
+ ; X86-NEXT: orb {{[0-9]+}}(%esp), %al
116
+ ; X86-NEXT: retl
117
+ ;
118
+ ; X64-LABEL: add_and_xor_commuted3:
119
+ ; X64: # %bb.0:
120
+ ; X64-NEXT: movl %edi, %eax
121
+ ; X64-NEXT: orl %esi, %eax
122
+ ; X64-NEXT: # kill: def $al killed $al killed $eax
123
+ ; X64-NEXT: retq
82
124
%xor = xor i8 %x , -1
83
125
%and = and i8 %y , %xor
84
126
%add = add i8 %x , %and
85
127
ret i8 %add
86
128
}
87
129
88
130
define i8 @add_and_xor_extra_use (i8 %x , i8 %y ) nounwind {
89
- ; CHECK-LABEL: add_and_xor_extra_use:
90
- ; CHECK: # %bb.0:
91
- ; CHECK-NEXT: pushq %rbp
92
- ; CHECK-NEXT: pushq %r14
93
- ; CHECK-NEXT: pushq %rbx
94
- ; CHECK-NEXT: movl %esi, %ebx
95
- ; CHECK-NEXT: movl %edi, %ebp
96
- ; CHECK-NEXT: movl %ebp, %eax
97
- ; CHECK-NEXT: notb %al
98
- ; CHECK-NEXT: movzbl %al, %r14d
99
- ; CHECK-NEXT: movl %r14d, %edi
100
- ; CHECK-NEXT: callq use@PLT
101
- ; CHECK-NEXT: andb %bl, %r14b
102
- ; CHECK-NEXT: movzbl %r14b, %edi
103
- ; CHECK-NEXT: callq use@PLT
104
- ; CHECK-NEXT: orb %bpl, %bl
105
- ; CHECK-NEXT: movl %ebx, %eax
106
- ; CHECK-NEXT: popq %rbx
107
- ; CHECK-NEXT: popq %r14
108
- ; CHECK-NEXT: popq %rbp
109
- ; CHECK-NEXT: retq
131
+ ; X86-LABEL: add_and_xor_extra_use:
132
+ ; X86: # %bb.0:
133
+ ; X86-NEXT: pushl %ebx
134
+ ; X86-NEXT: subl $8, %esp
135
+ ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %ebx
136
+ ; X86-NEXT: movb {{[0-9]+}}(%esp), %bh
137
+ ; X86-NEXT: notb %bh
138
+ ; X86-NEXT: movzbl %bh, %eax
139
+ ; X86-NEXT: movl %eax, (%esp)
140
+ ; X86-NEXT: calll use@PLT
141
+ ; X86-NEXT: andb %bl, %bh
142
+ ; X86-NEXT: movzbl %bh, %eax
143
+ ; X86-NEXT: movl %eax, (%esp)
144
+ ; X86-NEXT: calll use@PLT
145
+ ; X86-NEXT: orb {{[0-9]+}}(%esp), %bl
146
+ ; X86-NEXT: movl %ebx, %eax
147
+ ; X86-NEXT: addl $8, %esp
148
+ ; X86-NEXT: popl %ebx
149
+ ; X86-NEXT: retl
150
+ ;
151
+ ; X64-LABEL: add_and_xor_extra_use:
152
+ ; X64: # %bb.0:
153
+ ; X64-NEXT: pushq %rbp
154
+ ; X64-NEXT: pushq %r14
155
+ ; X64-NEXT: pushq %rbx
156
+ ; X64-NEXT: movl %esi, %ebx
157
+ ; X64-NEXT: movl %edi, %ebp
158
+ ; X64-NEXT: movl %ebp, %eax
159
+ ; X64-NEXT: notb %al
160
+ ; X64-NEXT: movzbl %al, %r14d
161
+ ; X64-NEXT: movl %r14d, %edi
162
+ ; X64-NEXT: callq use@PLT
163
+ ; X64-NEXT: andb %bl, %r14b
164
+ ; X64-NEXT: movzbl %r14b, %edi
165
+ ; X64-NEXT: callq use@PLT
166
+ ; X64-NEXT: orb %bpl, %bl
167
+ ; X64-NEXT: movl %ebx, %eax
168
+ ; X64-NEXT: popq %rbx
169
+ ; X64-NEXT: popq %r14
170
+ ; X64-NEXT: popq %rbp
171
+ ; X64-NEXT: retq
110
172
%xor = xor i8 %x , -1
111
173
call void @use (i8 %xor )
112
174
%and = and i8 %xor , %y
@@ -116,36 +178,60 @@ define i8 @add_and_xor_extra_use(i8 %x, i8 %y) nounwind {
116
178
}
117
179
118
180
define i64 @add_and_xor_const (i64 %x ) {
119
- ; CHECK-LABEL: add_and_xor_const:
120
- ; CHECK: # %bb.0:
121
- ; CHECK-NEXT: movq %rdi, %rax
122
- ; CHECK-NEXT: orq $1, %rax
123
- ; CHECK-NEXT: retq
181
+ ; X86-LABEL: add_and_xor_const:
182
+ ; X86: # %bb.0:
183
+ ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
184
+ ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
185
+ ; X86-NEXT: orl $1, %eax
186
+ ; X86-NEXT: retl
187
+ ;
188
+ ; X64-LABEL: add_and_xor_const:
189
+ ; X64: # %bb.0:
190
+ ; X64-NEXT: movq %rdi, %rax
191
+ ; X64-NEXT: orq $1, %rax
192
+ ; X64-NEXT: retq
124
193
%xor = xor i64 %x , -1
125
194
%and = and i64 %xor , 1
126
195
%add = add i64 %and , %x
127
196
ret i64 %add
128
197
}
129
198
130
199
define i64 @add_and_xor_const_wrong_op (i64 %x , i64 %y ) {
131
- ; CHECK-LABEL: add_and_xor_const_wrong_op:
132
- ; CHECK: # %bb.0:
133
- ; CHECK-NEXT: notl %esi
134
- ; CHECK-NEXT: andl $1, %esi
135
- ; CHECK-NEXT: leaq (%rsi,%rdi), %rax
136
- ; CHECK-NEXT: retq
200
+ ; X86-LABEL: add_and_xor_const_wrong_op:
201
+ ; X86: # %bb.0:
202
+ ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
203
+ ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
204
+ ; X86-NEXT: notl %eax
205
+ ; X86-NEXT: andl $1, %eax
206
+ ; X86-NEXT: addl {{[0-9]+}}(%esp), %eax
207
+ ; X86-NEXT: adcl $0, %edx
208
+ ; X86-NEXT: retl
209
+ ;
210
+ ; X64-LABEL: add_and_xor_const_wrong_op:
211
+ ; X64: # %bb.0:
212
+ ; X64-NEXT: notl %esi
213
+ ; X64-NEXT: andl $1, %esi
214
+ ; X64-NEXT: leaq (%rsi,%rdi), %rax
215
+ ; X64-NEXT: retq
137
216
%xor = xor i64 %y , -1
138
217
%and = and i64 %xor , 1
139
218
%add = add i64 %and , %x
140
219
ret i64 %add
141
220
}
142
221
143
222
define i64 @add_and_xor_const_explicit_trunc (i64 %x ) {
144
- ; CHECK-LABEL: add_and_xor_const_explicit_trunc:
145
- ; CHECK: # %bb.0:
146
- ; CHECK-NEXT: movq %rdi, %rax
147
- ; CHECK-NEXT: orq $1, %rax
148
- ; CHECK-NEXT: retq
223
+ ; X86-LABEL: add_and_xor_const_explicit_trunc:
224
+ ; X86: # %bb.0:
225
+ ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
226
+ ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
227
+ ; X86-NEXT: orl $1, %eax
228
+ ; X86-NEXT: retl
229
+ ;
230
+ ; X64-LABEL: add_and_xor_const_explicit_trunc:
231
+ ; X64: # %bb.0:
232
+ ; X64-NEXT: movq %rdi, %rax
233
+ ; X64-NEXT: orq $1, %rax
234
+ ; X64-NEXT: retq
149
235
%trunc = trunc i64 %x to i32
150
236
%xor = xor i32 %trunc , -1
151
237
%ext = sext i32 %xor to i64
@@ -155,15 +241,27 @@ define i64 @add_and_xor_const_explicit_trunc(i64 %x) {
155
241
}
156
242
157
243
define i64 @add_and_xor_const_explicit_trunc_wrong_mask (i64 %x ) {
158
- ; CHECK-LABEL: add_and_xor_const_explicit_trunc_wrong_mask:
159
- ; CHECK: # %bb.0:
160
- ; CHECK-NEXT: movl %edi, %eax
161
- ; CHECK-NEXT: notl %eax
162
- ; CHECK-NEXT: movslq %eax, %rcx
163
- ; CHECK-NEXT: movabsq $4294967297, %rax # imm = 0x100000001
164
- ; CHECK-NEXT: andq %rcx, %rax
165
- ; CHECK-NEXT: addq %rdi, %rax
166
- ; CHECK-NEXT: retq
244
+ ; X86-LABEL: add_and_xor_const_explicit_trunc_wrong_mask:
245
+ ; X86: # %bb.0:
246
+ ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
247
+ ; X86-NEXT: movl %ecx, %eax
248
+ ; X86-NEXT: notl %eax
249
+ ; X86-NEXT: movl %eax, %edx
250
+ ; X86-NEXT: shrl $31, %edx
251
+ ; X86-NEXT: andl $1, %eax
252
+ ; X86-NEXT: addl %ecx, %eax
253
+ ; X86-NEXT: adcl {{[0-9]+}}(%esp), %edx
254
+ ; X86-NEXT: retl
255
+ ;
256
+ ; X64-LABEL: add_and_xor_const_explicit_trunc_wrong_mask:
257
+ ; X64: # %bb.0:
258
+ ; X64-NEXT: movl %edi, %eax
259
+ ; X64-NEXT: notl %eax
260
+ ; X64-NEXT: movslq %eax, %rcx
261
+ ; X64-NEXT: movabsq $4294967297, %rax # imm = 0x100000001
262
+ ; X64-NEXT: andq %rcx, %rax
263
+ ; X64-NEXT: addq %rdi, %rax
264
+ ; X64-NEXT: retq
167
265
%trunc = trunc i64 %x to i32
168
266
%xor = xor i32 %trunc , -1
169
267
%ext = sext i32 %xor to i64
@@ -173,11 +271,17 @@ define i64 @add_and_xor_const_explicit_trunc_wrong_mask(i64 %x) {
173
271
}
174
272
175
273
define ptr @gep_and_xor (ptr %a , i64 %m ) {
176
- ; CHECK-LABEL: gep_and_xor:
177
- ; CHECK: # %bb.0:
178
- ; CHECK-NEXT: movq %rdi, %rax
179
- ; CHECK-NEXT: orq %rsi, %rax
180
- ; CHECK-NEXT: retq
274
+ ; X86-LABEL: gep_and_xor:
275
+ ; X86: # %bb.0:
276
+ ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
277
+ ; X86-NEXT: orl {{[0-9]+}}(%esp), %eax
278
+ ; X86-NEXT: retl
279
+ ;
280
+ ; X64-LABEL: gep_and_xor:
281
+ ; X64: # %bb.0:
282
+ ; X64-NEXT: movq %rdi, %rax
283
+ ; X64-NEXT: orq %rsi, %rax
284
+ ; X64-NEXT: retq
181
285
%old = ptrtoint ptr %a to i64
182
286
%old.not = and i64 %old , %m
183
287
%offset = xor i64 %old.not , %m
@@ -186,11 +290,17 @@ define ptr @gep_and_xor(ptr %a, i64 %m) {
186
290
}
187
291
188
292
define ptr @gep_and_xor_const (ptr %a ) {
189
- ; CHECK-LABEL: gep_and_xor_const:
190
- ; CHECK: # %bb.0:
191
- ; CHECK-NEXT: movq %rdi, %rax
192
- ; CHECK-NEXT: orq $1, %rax
193
- ; CHECK-NEXT: retq
293
+ ; X86-LABEL: gep_and_xor_const:
294
+ ; X86: # %bb.0:
295
+ ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
296
+ ; X86-NEXT: orl $1, %eax
297
+ ; X86-NEXT: retl
298
+ ;
299
+ ; X64-LABEL: gep_and_xor_const:
300
+ ; X64: # %bb.0:
301
+ ; X64-NEXT: movq %rdi, %rax
302
+ ; X64-NEXT: orq $1, %rax
303
+ ; X64-NEXT: retq
194
304
%old = ptrtoint ptr %a to i64
195
305
%old.not = and i64 %old , 1
196
306
%offset = xor i64 %old.not , 1
0 commit comments