Skip to content

Commit 31a8394

Browse files
committed
x86: consolidate xchg and xadd macros
They both have a basic "put new value in location, return old value" pattern, so they can use the same macro easily. Signed-off-by: Jeremy Fitzhardinge <[email protected]>
1 parent 3d94ae0 commit 31a8394

File tree

1 file changed

+36
-78
lines changed

1 file changed

+36
-78
lines changed

arch/x86/include/asm/cmpxchg.h

Lines changed: 36 additions & 78 deletions
Original file line numberDiff line numberDiff line change
@@ -33,60 +33,47 @@ extern void __add_wrong_size(void)
3333
#define __X86_CASE_Q -1 /* sizeof will never return -1 */
3434
#endif
3535

36+
/*
37+
* An exchange-type operation, which takes a value and a pointer, and
38+
* returns a the old value.
39+
*/
40+
#define __xchg_op(ptr, arg, op, lock) \
41+
({ \
42+
__typeof__ (*(ptr)) __ret = (arg); \
43+
switch (sizeof(*(ptr))) { \
44+
case __X86_CASE_B: \
45+
asm volatile (lock #op "b %b0, %1\n" \
46+
: "+r" (__ret), "+m" (*(ptr)) \
47+
: : "memory", "cc"); \
48+
break; \
49+
case __X86_CASE_W: \
50+
asm volatile (lock #op "w %w0, %1\n" \
51+
: "+r" (__ret), "+m" (*(ptr)) \
52+
: : "memory", "cc"); \
53+
break; \
54+
case __X86_CASE_L: \
55+
asm volatile (lock #op "l %0, %1\n" \
56+
: "+r" (__ret), "+m" (*(ptr)) \
57+
: : "memory", "cc"); \
58+
break; \
59+
case __X86_CASE_Q: \
60+
asm volatile (lock #op "q %q0, %1\n" \
61+
: "+r" (__ret), "+m" (*(ptr)) \
62+
: : "memory", "cc"); \
63+
break; \
64+
default: \
65+
__ ## op ## _wrong_size(); \
66+
} \
67+
__ret; \
68+
})
69+
3670
/*
3771
* Note: no "lock" prefix even on SMP: xchg always implies lock anyway.
3872
* Since this is generally used to protect other memory information, we
3973
* use "asm volatile" and "memory" clobbers to prevent gcc from moving
4074
* information around.
4175
*/
42-
#define __xchg(x, ptr, size) \
43-
({ \
44-
__typeof(*(ptr)) __x = (x); \
45-
switch (size) { \
46-
case __X86_CASE_B: \
47-
{ \
48-
volatile u8 *__ptr = (volatile u8 *)(ptr); \
49-
asm volatile("xchgb %0,%1" \
50-
: "=q" (__x), "+m" (*__ptr) \
51-
: "0" (__x) \
52-
: "memory"); \
53-
break; \
54-
} \
55-
case __X86_CASE_W: \
56-
{ \
57-
volatile u16 *__ptr = (volatile u16 *)(ptr); \
58-
asm volatile("xchgw %0,%1" \
59-
: "=r" (__x), "+m" (*__ptr) \
60-
: "0" (__x) \
61-
: "memory"); \
62-
break; \
63-
} \
64-
case __X86_CASE_L: \
65-
{ \
66-
volatile u32 *__ptr = (volatile u32 *)(ptr); \
67-
asm volatile("xchgl %0,%1" \
68-
: "=r" (__x), "+m" (*__ptr) \
69-
: "0" (__x) \
70-
: "memory"); \
71-
break; \
72-
} \
73-
case __X86_CASE_Q: \
74-
{ \
75-
volatile u64 *__ptr = (volatile u64 *)(ptr); \
76-
asm volatile("xchgq %0,%1" \
77-
: "=r" (__x), "+m" (*__ptr) \
78-
: "0" (__x) \
79-
: "memory"); \
80-
break; \
81-
} \
82-
default: \
83-
__xchg_wrong_size(); \
84-
} \
85-
__x; \
86-
})
87-
88-
#define xchg(ptr, v) \
89-
__xchg((v), (ptr), sizeof(*ptr))
76+
#define xchg(ptr, v) __xchg_op((ptr), (v), xchg, "")
9077

9178
/*
9279
* Atomic compare and exchange. Compare OLD with MEM, if identical,
@@ -167,36 +154,6 @@ extern void __add_wrong_size(void)
167154
__cmpxchg_local((ptr), (old), (new), sizeof(*ptr))
168155
#endif
169156

170-
#define __xadd(ptr, inc, lock) \
171-
({ \
172-
__typeof__ (*(ptr)) __ret = (inc); \
173-
switch (sizeof(*(ptr))) { \
174-
case __X86_CASE_B: \
175-
asm volatile (lock "xaddb %b0, %1\n" \
176-
: "+r" (__ret), "+m" (*(ptr)) \
177-
: : "memory", "cc"); \
178-
break; \
179-
case __X86_CASE_W: \
180-
asm volatile (lock "xaddw %w0, %1\n" \
181-
: "+r" (__ret), "+m" (*(ptr)) \
182-
: : "memory", "cc"); \
183-
break; \
184-
case __X86_CASE_L: \
185-
asm volatile (lock "xaddl %0, %1\n" \
186-
: "+r" (__ret), "+m" (*(ptr)) \
187-
: : "memory", "cc"); \
188-
break; \
189-
case __X86_CASE_Q: \
190-
asm volatile (lock "xaddq %q0, %1\n" \
191-
: "+r" (__ret), "+m" (*(ptr)) \
192-
: : "memory", "cc"); \
193-
break; \
194-
default: \
195-
__xadd_wrong_size(); \
196-
} \
197-
__ret; \
198-
})
199-
200157
/*
201158
* xadd() adds "inc" to "*ptr" and atomically returns the previous
202159
* value of "*ptr".
@@ -205,6 +162,7 @@ extern void __add_wrong_size(void)
205162
* xadd_sync() is always locked
206163
* xadd_local() is never locked
207164
*/
165+
#define __xadd(ptr, inc, lock) __xchg_op((ptr), (inc), xadd, lock)
208166
#define xadd(ptr, inc) __xadd((ptr), (inc), LOCK_PREFIX)
209167
#define xadd_sync(ptr, inc) __xadd((ptr), (inc), "lock; ")
210168
#define xadd_local(ptr, inc) __xadd((ptr), (inc), "")

0 commit comments

Comments
 (0)