@@ -33,60 +33,47 @@ extern void __add_wrong_size(void)
33
33
#define __X86_CASE_Q -1 /* sizeof will never return -1 */
34
34
#endif
35
35
36
+ /*
37
+ * An exchange-type operation, which takes a value and a pointer, and
38
+ * returns a the old value.
39
+ */
40
+ #define __xchg_op (ptr , arg , op , lock ) \
41
+ ({ \
42
+ __typeof__ (*(ptr)) __ret = (arg); \
43
+ switch (sizeof(*(ptr))) { \
44
+ case __X86_CASE_B: \
45
+ asm volatile (lock #op "b %b0, %1\n" \
46
+ : "+r" (__ret), "+m" (*(ptr)) \
47
+ : : "memory", "cc"); \
48
+ break; \
49
+ case __X86_CASE_W: \
50
+ asm volatile (lock #op "w %w0, %1\n" \
51
+ : "+r" (__ret), "+m" (*(ptr)) \
52
+ : : "memory", "cc"); \
53
+ break; \
54
+ case __X86_CASE_L: \
55
+ asm volatile (lock #op "l %0, %1\n" \
56
+ : "+r" (__ret), "+m" (*(ptr)) \
57
+ : : "memory", "cc"); \
58
+ break; \
59
+ case __X86_CASE_Q: \
60
+ asm volatile (lock #op "q %q0, %1\n" \
61
+ : "+r" (__ret), "+m" (*(ptr)) \
62
+ : : "memory", "cc"); \
63
+ break; \
64
+ default: \
65
+ __ ## op ## _wrong_size(); \
66
+ } \
67
+ __ret; \
68
+ })
69
+
36
70
/*
37
71
* Note: no "lock" prefix even on SMP: xchg always implies lock anyway.
38
72
* Since this is generally used to protect other memory information, we
39
73
* use "asm volatile" and "memory" clobbers to prevent gcc from moving
40
74
* information around.
41
75
*/
42
- #define __xchg (x , ptr , size ) \
43
- ({ \
44
- __typeof(*(ptr)) __x = (x); \
45
- switch (size) { \
46
- case __X86_CASE_B: \
47
- { \
48
- volatile u8 *__ptr = (volatile u8 *)(ptr); \
49
- asm volatile("xchgb %0,%1" \
50
- : "=q" (__x), "+m" (*__ptr) \
51
- : "0" (__x) \
52
- : "memory"); \
53
- break; \
54
- } \
55
- case __X86_CASE_W: \
56
- { \
57
- volatile u16 *__ptr = (volatile u16 *)(ptr); \
58
- asm volatile("xchgw %0,%1" \
59
- : "=r" (__x), "+m" (*__ptr) \
60
- : "0" (__x) \
61
- : "memory"); \
62
- break; \
63
- } \
64
- case __X86_CASE_L: \
65
- { \
66
- volatile u32 *__ptr = (volatile u32 *)(ptr); \
67
- asm volatile("xchgl %0,%1" \
68
- : "=r" (__x), "+m" (*__ptr) \
69
- : "0" (__x) \
70
- : "memory"); \
71
- break; \
72
- } \
73
- case __X86_CASE_Q: \
74
- { \
75
- volatile u64 *__ptr = (volatile u64 *)(ptr); \
76
- asm volatile("xchgq %0,%1" \
77
- : "=r" (__x), "+m" (*__ptr) \
78
- : "0" (__x) \
79
- : "memory"); \
80
- break; \
81
- } \
82
- default: \
83
- __xchg_wrong_size(); \
84
- } \
85
- __x; \
86
- })
87
-
88
- #define xchg (ptr , v ) \
89
- __xchg((v), (ptr), sizeof(*ptr))
76
+ #define xchg (ptr , v ) __xchg_op((ptr), (v), xchg, "")
90
77
91
78
/*
92
79
* Atomic compare and exchange. Compare OLD with MEM, if identical,
@@ -167,36 +154,6 @@ extern void __add_wrong_size(void)
167
154
__cmpxchg_local((ptr), (old), (new), sizeof(*ptr))
168
155
#endif
169
156
170
- #define __xadd (ptr , inc , lock ) \
171
- ({ \
172
- __typeof__ (*(ptr)) __ret = (inc); \
173
- switch (sizeof(*(ptr))) { \
174
- case __X86_CASE_B: \
175
- asm volatile (lock "xaddb %b0, %1\n" \
176
- : "+r" (__ret), "+m" (*(ptr)) \
177
- : : "memory", "cc"); \
178
- break; \
179
- case __X86_CASE_W: \
180
- asm volatile (lock "xaddw %w0, %1\n" \
181
- : "+r" (__ret), "+m" (*(ptr)) \
182
- : : "memory", "cc"); \
183
- break; \
184
- case __X86_CASE_L: \
185
- asm volatile (lock "xaddl %0, %1\n" \
186
- : "+r" (__ret), "+m" (*(ptr)) \
187
- : : "memory", "cc"); \
188
- break; \
189
- case __X86_CASE_Q: \
190
- asm volatile (lock "xaddq %q0, %1\n" \
191
- : "+r" (__ret), "+m" (*(ptr)) \
192
- : : "memory", "cc"); \
193
- break; \
194
- default: \
195
- __xadd_wrong_size(); \
196
- } \
197
- __ret; \
198
- })
199
-
200
157
/*
201
158
* xadd() adds "inc" to "*ptr" and atomically returns the previous
202
159
* value of "*ptr".
@@ -205,6 +162,7 @@ extern void __add_wrong_size(void)
205
162
* xadd_sync() is always locked
206
163
* xadd_local() is never locked
207
164
*/
165
+ #define __xadd (ptr , inc , lock ) __xchg_op((ptr), (inc), xadd, lock)
208
166
#define xadd (ptr , inc ) __xadd((ptr), (inc), LOCK_PREFIX)
209
167
#define xadd_sync (ptr , inc ) __xadd((ptr), (inc), "lock; ")
210
168
#define xadd_local (ptr , inc ) __xadd((ptr), (inc), "")
0 commit comments