Skip to content

Commit 4ead589

Browse files
committed
[X86] add-and-not.ll - add 32-bit test coverage
1 parent de71056 commit 4ead589

File tree

1 file changed

+207
-97
lines changed

1 file changed

+207
-97
lines changed

llvm/test/CodeGen/X86/add-and-not.ll

Lines changed: 207 additions & 97 deletions
Original file line numberDiff line numberDiff line change
@@ -1,112 +1,174 @@
11
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2-
; RUN: llc < %s -mtriple=x86_64-linux | FileCheck %s
2+
; RUN: llc < %s -mtriple=i686-linux | FileCheck %s --check-prefixes=X86
3+
; RUN: llc < %s -mtriple=x86_64-linux | FileCheck %s --check-prefixes=X64
34

45
declare void @use(i8)
56

67
define i8 @add_and_xor(i8 %x, i8 %y) {
7-
; CHECK-LABEL: add_and_xor:
8-
; CHECK: # %bb.0:
9-
; CHECK-NEXT: movl %edi, %eax
10-
; CHECK-NEXT: orl %esi, %eax
11-
; CHECK-NEXT: # kill: def $al killed $al killed $eax
12-
; CHECK-NEXT: retq
8+
; X86-LABEL: add_and_xor:
9+
; X86: # %bb.0:
10+
; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax
11+
; X86-NEXT: orb {{[0-9]+}}(%esp), %al
12+
; X86-NEXT: retl
13+
;
14+
; X64-LABEL: add_and_xor:
15+
; X64: # %bb.0:
16+
; X64-NEXT: movl %edi, %eax
17+
; X64-NEXT: orl %esi, %eax
18+
; X64-NEXT: # kill: def $al killed $al killed $eax
19+
; X64-NEXT: retq
1320
%xor = xor i8 %x, -1
1421
%and = and i8 %xor, %y
1522
%add = add i8 %and, %x
1623
ret i8 %add
1724
}
1825

1926
define i8 @add_and_xor_wrong_const(i8 %x, i8 %y) {
20-
; CHECK-LABEL: add_and_xor_wrong_const:
21-
; CHECK: # %bb.0:
22-
; CHECK-NEXT: movl %edi, %eax
23-
; CHECK-NEXT: xorb $-2, %al
24-
; CHECK-NEXT: andb %sil, %al
25-
; CHECK-NEXT: addb %dil, %al
26-
; CHECK-NEXT: retq
27+
; X86-LABEL: add_and_xor_wrong_const:
28+
; X86: # %bb.0:
29+
; X86-NEXT: movzbl {{[0-9]+}}(%esp), %ecx
30+
; X86-NEXT: movl %ecx, %eax
31+
; X86-NEXT: xorb $-2, %al
32+
; X86-NEXT: andb {{[0-9]+}}(%esp), %al
33+
; X86-NEXT: addb %cl, %al
34+
; X86-NEXT: retl
35+
;
36+
; X64-LABEL: add_and_xor_wrong_const:
37+
; X64: # %bb.0:
38+
; X64-NEXT: movl %edi, %eax
39+
; X64-NEXT: xorb $-2, %al
40+
; X64-NEXT: andb %sil, %al
41+
; X64-NEXT: addb %dil, %al
42+
; X64-NEXT: retq
2743
%xor = xor i8 %x, -2
2844
%and = and i8 %xor, %y
2945
%add = add i8 %and, %x
3046
ret i8 %add
3147
}
3248

3349
define i8 @add_and_xor_wrong_op(i8 %x, i8 %y, i8 %z) {
34-
; CHECK-LABEL: add_and_xor_wrong_op:
35-
; CHECK: # %bb.0:
36-
; CHECK-NEXT: # kill: def $edx killed $edx def $rdx
37-
; CHECK-NEXT: # kill: def $edi killed $edi def $rdi
38-
; CHECK-NEXT: notb %dl
39-
; CHECK-NEXT: andb %sil, %dl
40-
; CHECK-NEXT: leal (%rdx,%rdi), %eax
41-
; CHECK-NEXT: # kill: def $al killed $al killed $eax
42-
; CHECK-NEXT: retq
50+
; X86-LABEL: add_and_xor_wrong_op:
51+
; X86: # %bb.0:
52+
; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax
53+
; X86-NEXT: notb %al
54+
; X86-NEXT: andb {{[0-9]+}}(%esp), %al
55+
; X86-NEXT: addb {{[0-9]+}}(%esp), %al
56+
; X86-NEXT: retl
57+
;
58+
; X64-LABEL: add_and_xor_wrong_op:
59+
; X64: # %bb.0:
60+
; X64-NEXT: # kill: def $edx killed $edx def $rdx
61+
; X64-NEXT: # kill: def $edi killed $edi def $rdi
62+
; X64-NEXT: notb %dl
63+
; X64-NEXT: andb %sil, %dl
64+
; X64-NEXT: leal (%rdx,%rdi), %eax
65+
; X64-NEXT: # kill: def $al killed $al killed $eax
66+
; X64-NEXT: retq
4367
%xor = xor i8 %z, -1
4468
%and = and i8 %xor, %y
4569
%add = add i8 %and, %x
4670
ret i8 %add
4771
}
4872

4973
define i8 @add_and_xor_commuted1(i8 %x, i8 %y) {
50-
; CHECK-LABEL: add_and_xor_commuted1:
51-
; CHECK: # %bb.0:
52-
; CHECK-NEXT: movl %edi, %eax
53-
; CHECK-NEXT: orl %esi, %eax
54-
; CHECK-NEXT: # kill: def $al killed $al killed $eax
55-
; CHECK-NEXT: retq
74+
; X86-LABEL: add_and_xor_commuted1:
75+
; X86: # %bb.0:
76+
; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax
77+
; X86-NEXT: orb {{[0-9]+}}(%esp), %al
78+
; X86-NEXT: retl
79+
;
80+
; X64-LABEL: add_and_xor_commuted1:
81+
; X64: # %bb.0:
82+
; X64-NEXT: movl %edi, %eax
83+
; X64-NEXT: orl %esi, %eax
84+
; X64-NEXT: # kill: def $al killed $al killed $eax
85+
; X64-NEXT: retq
5686
%xor = xor i8 %x, -1
5787
%and = and i8 %y, %xor
5888
%add = add i8 %and, %x
5989
ret i8 %add
6090
}
6191

6292
define i8 @add_and_xor_commuted2(i8 %x, i8 %y) {
63-
; CHECK-LABEL: add_and_xor_commuted2:
64-
; CHECK: # %bb.0:
65-
; CHECK-NEXT: movl %edi, %eax
66-
; CHECK-NEXT: orl %esi, %eax
67-
; CHECK-NEXT: # kill: def $al killed $al killed $eax
68-
; CHECK-NEXT: retq
93+
; X86-LABEL: add_and_xor_commuted2:
94+
; X86: # %bb.0:
95+
; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax
96+
; X86-NEXT: orb {{[0-9]+}}(%esp), %al
97+
; X86-NEXT: retl
98+
;
99+
; X64-LABEL: add_and_xor_commuted2:
100+
; X64: # %bb.0:
101+
; X64-NEXT: movl %edi, %eax
102+
; X64-NEXT: orl %esi, %eax
103+
; X64-NEXT: # kill: def $al killed $al killed $eax
104+
; X64-NEXT: retq
69105
%xor = xor i8 %x, -1
70106
%and = and i8 %xor, %y
71107
%add = add i8 %x, %and
72108
ret i8 %add
73109
}
74110

75111
define i8 @add_and_xor_commuted3(i8 %x, i8 %y) {
76-
; CHECK-LABEL: add_and_xor_commuted3:
77-
; CHECK: # %bb.0:
78-
; CHECK-NEXT: movl %edi, %eax
79-
; CHECK-NEXT: orl %esi, %eax
80-
; CHECK-NEXT: # kill: def $al killed $al killed $eax
81-
; CHECK-NEXT: retq
112+
; X86-LABEL: add_and_xor_commuted3:
113+
; X86: # %bb.0:
114+
; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax
115+
; X86-NEXT: orb {{[0-9]+}}(%esp), %al
116+
; X86-NEXT: retl
117+
;
118+
; X64-LABEL: add_and_xor_commuted3:
119+
; X64: # %bb.0:
120+
; X64-NEXT: movl %edi, %eax
121+
; X64-NEXT: orl %esi, %eax
122+
; X64-NEXT: # kill: def $al killed $al killed $eax
123+
; X64-NEXT: retq
82124
%xor = xor i8 %x, -1
83125
%and = and i8 %y, %xor
84126
%add = add i8 %x, %and
85127
ret i8 %add
86128
}
87129

88130
define i8 @add_and_xor_extra_use(i8 %x, i8 %y) nounwind {
89-
; CHECK-LABEL: add_and_xor_extra_use:
90-
; CHECK: # %bb.0:
91-
; CHECK-NEXT: pushq %rbp
92-
; CHECK-NEXT: pushq %r14
93-
; CHECK-NEXT: pushq %rbx
94-
; CHECK-NEXT: movl %esi, %ebx
95-
; CHECK-NEXT: movl %edi, %ebp
96-
; CHECK-NEXT: movl %ebp, %eax
97-
; CHECK-NEXT: notb %al
98-
; CHECK-NEXT: movzbl %al, %r14d
99-
; CHECK-NEXT: movl %r14d, %edi
100-
; CHECK-NEXT: callq use@PLT
101-
; CHECK-NEXT: andb %bl, %r14b
102-
; CHECK-NEXT: movzbl %r14b, %edi
103-
; CHECK-NEXT: callq use@PLT
104-
; CHECK-NEXT: orb %bpl, %bl
105-
; CHECK-NEXT: movl %ebx, %eax
106-
; CHECK-NEXT: popq %rbx
107-
; CHECK-NEXT: popq %r14
108-
; CHECK-NEXT: popq %rbp
109-
; CHECK-NEXT: retq
131+
; X86-LABEL: add_and_xor_extra_use:
132+
; X86: # %bb.0:
133+
; X86-NEXT: pushl %ebx
134+
; X86-NEXT: subl $8, %esp
135+
; X86-NEXT: movzbl {{[0-9]+}}(%esp), %ebx
136+
; X86-NEXT: movb {{[0-9]+}}(%esp), %bh
137+
; X86-NEXT: notb %bh
138+
; X86-NEXT: movzbl %bh, %eax
139+
; X86-NEXT: movl %eax, (%esp)
140+
; X86-NEXT: calll use@PLT
141+
; X86-NEXT: andb %bl, %bh
142+
; X86-NEXT: movzbl %bh, %eax
143+
; X86-NEXT: movl %eax, (%esp)
144+
; X86-NEXT: calll use@PLT
145+
; X86-NEXT: orb {{[0-9]+}}(%esp), %bl
146+
; X86-NEXT: movl %ebx, %eax
147+
; X86-NEXT: addl $8, %esp
148+
; X86-NEXT: popl %ebx
149+
; X86-NEXT: retl
150+
;
151+
; X64-LABEL: add_and_xor_extra_use:
152+
; X64: # %bb.0:
153+
; X64-NEXT: pushq %rbp
154+
; X64-NEXT: pushq %r14
155+
; X64-NEXT: pushq %rbx
156+
; X64-NEXT: movl %esi, %ebx
157+
; X64-NEXT: movl %edi, %ebp
158+
; X64-NEXT: movl %ebp, %eax
159+
; X64-NEXT: notb %al
160+
; X64-NEXT: movzbl %al, %r14d
161+
; X64-NEXT: movl %r14d, %edi
162+
; X64-NEXT: callq use@PLT
163+
; X64-NEXT: andb %bl, %r14b
164+
; X64-NEXT: movzbl %r14b, %edi
165+
; X64-NEXT: callq use@PLT
166+
; X64-NEXT: orb %bpl, %bl
167+
; X64-NEXT: movl %ebx, %eax
168+
; X64-NEXT: popq %rbx
169+
; X64-NEXT: popq %r14
170+
; X64-NEXT: popq %rbp
171+
; X64-NEXT: retq
110172
%xor = xor i8 %x, -1
111173
call void @use(i8 %xor)
112174
%and = and i8 %xor, %y
@@ -116,36 +178,60 @@ define i8 @add_and_xor_extra_use(i8 %x, i8 %y) nounwind {
116178
}
117179

118180
define i64 @add_and_xor_const(i64 %x) {
119-
; CHECK-LABEL: add_and_xor_const:
120-
; CHECK: # %bb.0:
121-
; CHECK-NEXT: movq %rdi, %rax
122-
; CHECK-NEXT: orq $1, %rax
123-
; CHECK-NEXT: retq
181+
; X86-LABEL: add_and_xor_const:
182+
; X86: # %bb.0:
183+
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
184+
; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
185+
; X86-NEXT: orl $1, %eax
186+
; X86-NEXT: retl
187+
;
188+
; X64-LABEL: add_and_xor_const:
189+
; X64: # %bb.0:
190+
; X64-NEXT: movq %rdi, %rax
191+
; X64-NEXT: orq $1, %rax
192+
; X64-NEXT: retq
124193
%xor = xor i64 %x, -1
125194
%and = and i64 %xor, 1
126195
%add = add i64 %and, %x
127196
ret i64 %add
128197
}
129198

130199
define i64 @add_and_xor_const_wrong_op(i64 %x, i64 %y) {
131-
; CHECK-LABEL: add_and_xor_const_wrong_op:
132-
; CHECK: # %bb.0:
133-
; CHECK-NEXT: notl %esi
134-
; CHECK-NEXT: andl $1, %esi
135-
; CHECK-NEXT: leaq (%rsi,%rdi), %rax
136-
; CHECK-NEXT: retq
200+
; X86-LABEL: add_and_xor_const_wrong_op:
201+
; X86: # %bb.0:
202+
; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
203+
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
204+
; X86-NEXT: notl %eax
205+
; X86-NEXT: andl $1, %eax
206+
; X86-NEXT: addl {{[0-9]+}}(%esp), %eax
207+
; X86-NEXT: adcl $0, %edx
208+
; X86-NEXT: retl
209+
;
210+
; X64-LABEL: add_and_xor_const_wrong_op:
211+
; X64: # %bb.0:
212+
; X64-NEXT: notl %esi
213+
; X64-NEXT: andl $1, %esi
214+
; X64-NEXT: leaq (%rsi,%rdi), %rax
215+
; X64-NEXT: retq
137216
%xor = xor i64 %y, -1
138217
%and = and i64 %xor, 1
139218
%add = add i64 %and, %x
140219
ret i64 %add
141220
}
142221

143222
define i64 @add_and_xor_const_explicit_trunc(i64 %x) {
144-
; CHECK-LABEL: add_and_xor_const_explicit_trunc:
145-
; CHECK: # %bb.0:
146-
; CHECK-NEXT: movq %rdi, %rax
147-
; CHECK-NEXT: orq $1, %rax
148-
; CHECK-NEXT: retq
223+
; X86-LABEL: add_and_xor_const_explicit_trunc:
224+
; X86: # %bb.0:
225+
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
226+
; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
227+
; X86-NEXT: orl $1, %eax
228+
; X86-NEXT: retl
229+
;
230+
; X64-LABEL: add_and_xor_const_explicit_trunc:
231+
; X64: # %bb.0:
232+
; X64-NEXT: movq %rdi, %rax
233+
; X64-NEXT: orq $1, %rax
234+
; X64-NEXT: retq
149235
%trunc = trunc i64 %x to i32
150236
%xor = xor i32 %trunc, -1
151237
%ext = sext i32 %xor to i64
@@ -155,15 +241,27 @@ define i64 @add_and_xor_const_explicit_trunc(i64 %x) {
155241
}
156242

157243
define i64 @add_and_xor_const_explicit_trunc_wrong_mask(i64 %x) {
158-
; CHECK-LABEL: add_and_xor_const_explicit_trunc_wrong_mask:
159-
; CHECK: # %bb.0:
160-
; CHECK-NEXT: movl %edi, %eax
161-
; CHECK-NEXT: notl %eax
162-
; CHECK-NEXT: movslq %eax, %rcx
163-
; CHECK-NEXT: movabsq $4294967297, %rax # imm = 0x100000001
164-
; CHECK-NEXT: andq %rcx, %rax
165-
; CHECK-NEXT: addq %rdi, %rax
166-
; CHECK-NEXT: retq
244+
; X86-LABEL: add_and_xor_const_explicit_trunc_wrong_mask:
245+
; X86: # %bb.0:
246+
; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
247+
; X86-NEXT: movl %ecx, %eax
248+
; X86-NEXT: notl %eax
249+
; X86-NEXT: movl %eax, %edx
250+
; X86-NEXT: shrl $31, %edx
251+
; X86-NEXT: andl $1, %eax
252+
; X86-NEXT: addl %ecx, %eax
253+
; X86-NEXT: adcl {{[0-9]+}}(%esp), %edx
254+
; X86-NEXT: retl
255+
;
256+
; X64-LABEL: add_and_xor_const_explicit_trunc_wrong_mask:
257+
; X64: # %bb.0:
258+
; X64-NEXT: movl %edi, %eax
259+
; X64-NEXT: notl %eax
260+
; X64-NEXT: movslq %eax, %rcx
261+
; X64-NEXT: movabsq $4294967297, %rax # imm = 0x100000001
262+
; X64-NEXT: andq %rcx, %rax
263+
; X64-NEXT: addq %rdi, %rax
264+
; X64-NEXT: retq
167265
%trunc = trunc i64 %x to i32
168266
%xor = xor i32 %trunc, -1
169267
%ext = sext i32 %xor to i64
@@ -173,11 +271,17 @@ define i64 @add_and_xor_const_explicit_trunc_wrong_mask(i64 %x) {
173271
}
174272

175273
define ptr @gep_and_xor(ptr %a, i64 %m) {
176-
; CHECK-LABEL: gep_and_xor:
177-
; CHECK: # %bb.0:
178-
; CHECK-NEXT: movq %rdi, %rax
179-
; CHECK-NEXT: orq %rsi, %rax
180-
; CHECK-NEXT: retq
274+
; X86-LABEL: gep_and_xor:
275+
; X86: # %bb.0:
276+
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
277+
; X86-NEXT: orl {{[0-9]+}}(%esp), %eax
278+
; X86-NEXT: retl
279+
;
280+
; X64-LABEL: gep_and_xor:
281+
; X64: # %bb.0:
282+
; X64-NEXT: movq %rdi, %rax
283+
; X64-NEXT: orq %rsi, %rax
284+
; X64-NEXT: retq
181285
%old = ptrtoint ptr %a to i64
182286
%old.not = and i64 %old, %m
183287
%offset = xor i64 %old.not, %m
@@ -186,11 +290,17 @@ define ptr @gep_and_xor(ptr %a, i64 %m) {
186290
}
187291

188292
define ptr @gep_and_xor_const(ptr %a) {
189-
; CHECK-LABEL: gep_and_xor_const:
190-
; CHECK: # %bb.0:
191-
; CHECK-NEXT: movq %rdi, %rax
192-
; CHECK-NEXT: orq $1, %rax
193-
; CHECK-NEXT: retq
293+
; X86-LABEL: gep_and_xor_const:
294+
; X86: # %bb.0:
295+
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
296+
; X86-NEXT: orl $1, %eax
297+
; X86-NEXT: retl
298+
;
299+
; X64-LABEL: gep_and_xor_const:
300+
; X64: # %bb.0:
301+
; X64-NEXT: movq %rdi, %rax
302+
; X64-NEXT: orq $1, %rax
303+
; X64-NEXT: retq
194304
%old = ptrtoint ptr %a to i64
195305
%old.not = and i64 %old, 1
196306
%offset = xor i64 %old.not, 1

0 commit comments

Comments
 (0)