7
7
declare void @use (i32 )
8
8
9
9
define i32 @fold_and_xor_neg_v1_32 (i32 %x , i32 %y ) {
10
- ; X86-LABEL: fold_and_xor_neg_v1_32:
11
- ; X86: # %bb.0:
12
- ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
13
- ; X86-NEXT: movl %ecx, %eax
14
- ; X86-NEXT: negl %eax
15
- ; X86-NEXT: xorl %ecx, %eax
16
- ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax
17
- ; X86-NEXT: retl
10
+ ; X86-NOBMI- LABEL: fold_and_xor_neg_v1_32:
11
+ ; X86-NOBMI : # %bb.0:
12
+ ; X86-NOBMI- NEXT: movl {{[0-9]+}}(%esp), %ecx
13
+ ; X86-NOBMI- NEXT: movl %ecx, %eax
14
+ ; X86-NOBMI- NEXT: negl %eax
15
+ ; X86-NOBMI- NEXT: xorl %ecx, %eax
16
+ ; X86-NOBMI- NEXT: andl {{[0-9]+}}(%esp), %eax
17
+ ; X86-NOBMI- NEXT: retl
18
18
;
19
- ; X64-LABEL: fold_and_xor_neg_v1_32:
20
- ; X64: # %bb.0:
21
- ; X64-NEXT: movl %edi, %eax
22
- ; X64-NEXT: negl %eax
23
- ; X64-NEXT: xorl %edi, %eax
24
- ; X64-NEXT: andl %esi, %eax
25
- ; X64-NEXT: retq
19
+ ; X86-BMI-LABEL: fold_and_xor_neg_v1_32:
20
+ ; X86-BMI: # %bb.0:
21
+ ; X86-BMI-NEXT: blsmskl {{[0-9]+}}(%esp), %eax
22
+ ; X86-BMI-NEXT: andnl {{[0-9]+}}(%esp), %eax, %eax
23
+ ; X86-BMI-NEXT: retl
24
+ ;
25
+ ; X64-NOBMI-LABEL: fold_and_xor_neg_v1_32:
26
+ ; X64-NOBMI: # %bb.0:
27
+ ; X64-NOBMI-NEXT: movl %edi, %eax
28
+ ; X64-NOBMI-NEXT: negl %eax
29
+ ; X64-NOBMI-NEXT: xorl %edi, %eax
30
+ ; X64-NOBMI-NEXT: andl %esi, %eax
31
+ ; X64-NOBMI-NEXT: retq
32
+ ;
33
+ ; X64-BMI-LABEL: fold_and_xor_neg_v1_32:
34
+ ; X64-BMI: # %bb.0:
35
+ ; X64-BMI-NEXT: blsmskl %edi, %eax
36
+ ; X64-BMI-NEXT: andnl %esi, %eax, %eax
37
+ ; X64-BMI-NEXT: retq
26
38
%neg = sub i32 0 , %x
27
39
%xor = xor i32 %x , %neg
28
40
%and = and i32 %xor , %y
29
41
ret i32 %and
30
42
}
31
43
32
44
define i32 @fold_and_xor_neg_v2_32 (i32 %x , i32 %y ) {
33
- ; X86-LABEL: fold_and_xor_neg_v2_32:
34
- ; X86: # %bb.0:
35
- ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
36
- ; X86-NEXT: movl %ecx, %eax
37
- ; X86-NEXT: negl %eax
38
- ; X86-NEXT: xorl %ecx, %eax
39
- ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax
40
- ; X86-NEXT: retl
45
+ ; X86-NOBMI- LABEL: fold_and_xor_neg_v2_32:
46
+ ; X86-NOBMI : # %bb.0:
47
+ ; X86-NOBMI- NEXT: movl {{[0-9]+}}(%esp), %ecx
48
+ ; X86-NOBMI- NEXT: movl %ecx, %eax
49
+ ; X86-NOBMI- NEXT: negl %eax
50
+ ; X86-NOBMI- NEXT: xorl %ecx, %eax
51
+ ; X86-NOBMI- NEXT: andl {{[0-9]+}}(%esp), %eax
52
+ ; X86-NOBMI- NEXT: retl
41
53
;
42
- ; X64-LABEL: fold_and_xor_neg_v2_32:
43
- ; X64: # %bb.0:
44
- ; X64-NEXT: movl %edi, %eax
45
- ; X64-NEXT: negl %eax
46
- ; X64-NEXT: xorl %edi, %eax
47
- ; X64-NEXT: andl %esi, %eax
48
- ; X64-NEXT: retq
54
+ ; X86-BMI-LABEL: fold_and_xor_neg_v2_32:
55
+ ; X86-BMI: # %bb.0:
56
+ ; X86-BMI-NEXT: blsmskl {{[0-9]+}}(%esp), %eax
57
+ ; X86-BMI-NEXT: andnl {{[0-9]+}}(%esp), %eax, %eax
58
+ ; X86-BMI-NEXT: retl
59
+ ;
60
+ ; X64-NOBMI-LABEL: fold_and_xor_neg_v2_32:
61
+ ; X64-NOBMI: # %bb.0:
62
+ ; X64-NOBMI-NEXT: movl %edi, %eax
63
+ ; X64-NOBMI-NEXT: negl %eax
64
+ ; X64-NOBMI-NEXT: xorl %edi, %eax
65
+ ; X64-NOBMI-NEXT: andl %esi, %eax
66
+ ; X64-NOBMI-NEXT: retq
67
+ ;
68
+ ; X64-BMI-LABEL: fold_and_xor_neg_v2_32:
69
+ ; X64-BMI: # %bb.0:
70
+ ; X64-BMI-NEXT: blsmskl %edi, %eax
71
+ ; X64-BMI-NEXT: andnl %esi, %eax, %eax
72
+ ; X64-BMI-NEXT: retq
49
73
%neg = sub i32 0 , %x
50
74
%xor = xor i32 %x , %neg
51
75
%and = and i32 %y , %xor
52
76
ret i32 %and
53
77
}
54
78
55
79
define i32 @fold_and_xor_neg_v3_32 (i32 %x , i32 %y ) {
56
- ; X86-LABEL: fold_and_xor_neg_v3_32:
57
- ; X86: # %bb.0:
58
- ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
59
- ; X86-NEXT: movl %ecx, %eax
60
- ; X86-NEXT: negl %eax
61
- ; X86-NEXT: xorl %ecx, %eax
62
- ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax
63
- ; X86-NEXT: retl
80
+ ; X86-NOBMI- LABEL: fold_and_xor_neg_v3_32:
81
+ ; X86-NOBMI : # %bb.0:
82
+ ; X86-NOBMI- NEXT: movl {{[0-9]+}}(%esp), %ecx
83
+ ; X86-NOBMI- NEXT: movl %ecx, %eax
84
+ ; X86-NOBMI- NEXT: negl %eax
85
+ ; X86-NOBMI- NEXT: xorl %ecx, %eax
86
+ ; X86-NOBMI- NEXT: andl {{[0-9]+}}(%esp), %eax
87
+ ; X86-NOBMI- NEXT: retl
64
88
;
65
- ; X64-LABEL: fold_and_xor_neg_v3_32:
66
- ; X64: # %bb.0:
67
- ; X64-NEXT: movl %edi, %eax
68
- ; X64-NEXT: negl %eax
69
- ; X64-NEXT: xorl %edi, %eax
70
- ; X64-NEXT: andl %esi, %eax
71
- ; X64-NEXT: retq
89
+ ; X86-BMI-LABEL: fold_and_xor_neg_v3_32:
90
+ ; X86-BMI: # %bb.0:
91
+ ; X86-BMI-NEXT: blsmskl {{[0-9]+}}(%esp), %eax
92
+ ; X86-BMI-NEXT: andnl {{[0-9]+}}(%esp), %eax, %eax
93
+ ; X86-BMI-NEXT: retl
94
+ ;
95
+ ; X64-NOBMI-LABEL: fold_and_xor_neg_v3_32:
96
+ ; X64-NOBMI: # %bb.0:
97
+ ; X64-NOBMI-NEXT: movl %edi, %eax
98
+ ; X64-NOBMI-NEXT: negl %eax
99
+ ; X64-NOBMI-NEXT: xorl %edi, %eax
100
+ ; X64-NOBMI-NEXT: andl %esi, %eax
101
+ ; X64-NOBMI-NEXT: retq
102
+ ;
103
+ ; X64-BMI-LABEL: fold_and_xor_neg_v3_32:
104
+ ; X64-BMI: # %bb.0:
105
+ ; X64-BMI-NEXT: blsmskl %edi, %eax
106
+ ; X64-BMI-NEXT: andnl %esi, %eax, %eax
107
+ ; X64-BMI-NEXT: retq
72
108
%neg = sub i32 0 , %x
73
109
%xor = xor i32 %neg , %x
74
110
%and = and i32 %xor , %y
75
111
ret i32 %and
76
112
}
77
113
78
114
define i32 @fold_and_xor_neg_v4_32 (i32 %x , i32 %y ) {
79
- ; X86-LABEL: fold_and_xor_neg_v4_32:
80
- ; X86: # %bb.0:
81
- ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
82
- ; X86-NEXT: movl %ecx, %eax
83
- ; X86-NEXT: negl %eax
84
- ; X86-NEXT: xorl %ecx, %eax
85
- ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax
86
- ; X86-NEXT: retl
115
+ ; X86-NOBMI- LABEL: fold_and_xor_neg_v4_32:
116
+ ; X86-NOBMI : # %bb.0:
117
+ ; X86-NOBMI- NEXT: movl {{[0-9]+}}(%esp), %ecx
118
+ ; X86-NOBMI- NEXT: movl %ecx, %eax
119
+ ; X86-NOBMI- NEXT: negl %eax
120
+ ; X86-NOBMI- NEXT: xorl %ecx, %eax
121
+ ; X86-NOBMI- NEXT: andl {{[0-9]+}}(%esp), %eax
122
+ ; X86-NOBMI- NEXT: retl
87
123
;
88
- ; X64-LABEL: fold_and_xor_neg_v4_32:
89
- ; X64: # %bb.0:
90
- ; X64-NEXT: movl %edi, %eax
91
- ; X64-NEXT: negl %eax
92
- ; X64-NEXT: xorl %edi, %eax
93
- ; X64-NEXT: andl %esi, %eax
94
- ; X64-NEXT: retq
124
+ ; X86-BMI-LABEL: fold_and_xor_neg_v4_32:
125
+ ; X86-BMI: # %bb.0:
126
+ ; X86-BMI-NEXT: blsmskl {{[0-9]+}}(%esp), %eax
127
+ ; X86-BMI-NEXT: andnl {{[0-9]+}}(%esp), %eax, %eax
128
+ ; X86-BMI-NEXT: retl
129
+ ;
130
+ ; X64-NOBMI-LABEL: fold_and_xor_neg_v4_32:
131
+ ; X64-NOBMI: # %bb.0:
132
+ ; X64-NOBMI-NEXT: movl %edi, %eax
133
+ ; X64-NOBMI-NEXT: negl %eax
134
+ ; X64-NOBMI-NEXT: xorl %edi, %eax
135
+ ; X64-NOBMI-NEXT: andl %esi, %eax
136
+ ; X64-NOBMI-NEXT: retq
137
+ ;
138
+ ; X64-BMI-LABEL: fold_and_xor_neg_v4_32:
139
+ ; X64-BMI: # %bb.0:
140
+ ; X64-BMI-NEXT: blsmskl %edi, %eax
141
+ ; X64-BMI-NEXT: andnl %esi, %eax, %eax
142
+ ; X64-BMI-NEXT: retq
95
143
%neg = sub i32 0 , %x
96
144
%xor = xor i32 %neg , %x
97
145
%and = and i32 %y , %xor
@@ -118,13 +166,19 @@ define i64 @fold_and_xor_neg_v1_64(i64 %x, i64 %y) {
118
166
; X86-NEXT: .cfi_def_cfa_offset 4
119
167
; X86-NEXT: retl
120
168
;
121
- ; X64-LABEL: fold_and_xor_neg_v1_64:
122
- ; X64: # %bb.0:
123
- ; X64-NEXT: movq %rdi, %rax
124
- ; X64-NEXT: negq %rax
125
- ; X64-NEXT: xorq %rdi, %rax
126
- ; X64-NEXT: andq %rsi, %rax
127
- ; X64-NEXT: retq
169
+ ; X64-NOBMI-LABEL: fold_and_xor_neg_v1_64:
170
+ ; X64-NOBMI: # %bb.0:
171
+ ; X64-NOBMI-NEXT: movq %rdi, %rax
172
+ ; X64-NOBMI-NEXT: negq %rax
173
+ ; X64-NOBMI-NEXT: xorq %rdi, %rax
174
+ ; X64-NOBMI-NEXT: andq %rsi, %rax
175
+ ; X64-NOBMI-NEXT: retq
176
+ ;
177
+ ; X64-BMI-LABEL: fold_and_xor_neg_v1_64:
178
+ ; X64-BMI: # %bb.0:
179
+ ; X64-BMI-NEXT: blsmskq %rdi, %rax
180
+ ; X64-BMI-NEXT: andnq %rsi, %rax, %rax
181
+ ; X64-BMI-NEXT: retq
128
182
%neg = sub i64 0 , %x
129
183
%xor = xor i64 %x , %neg
130
184
%and = and i64 %xor , %y
@@ -290,8 +344,3 @@ define i32 @fold_and_xor_neg_v1_32_no_blsmsk_negative(i32 %x, i32 %y, i32 %z) {
290
344
%and = and i32 %xor , %y
291
345
ret i32 %and
292
346
}
293
- ;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
294
- ; X64-BMI: {{.*}}
295
- ; X64-NOBMI: {{.*}}
296
- ; X86-BMI: {{.*}}
297
- ; X86-NOBMI: {{.*}}
0 commit comments