Skip to content
This repository was archived by the owner on Nov 8, 2023. It is now read-only.

Commit d12157e

Browse files
mrutland-armPeter Zijlstra
authored andcommitted
locking/atomic: make atomic*_{cmp,}xchg optional
Most architectures define the atomic/atomic64 xchg and cmpxchg operations in terms of arch_xchg and arch_cmpxchg respectfully. Add fallbacks for these cases and remove the trivial cases from arch code. On some architectures the existing definitions are kept as these are used to build other arch_atomic*() operations. Signed-off-by: Mark Rutland <[email protected]> Signed-off-by: Peter Zijlstra (Intel) <[email protected]> Reviewed-by: Kees Cook <[email protected]> Link: https://lore.kernel.org/r/[email protected]
1 parent a7bafa7 commit d12157e

File tree

23 files changed

+179
-265
lines changed

23 files changed

+179
-265
lines changed

arch/alpha/include/asm/atomic.h

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -200,16 +200,6 @@ ATOMIC_OPS(xor, xor)
200200
#undef ATOMIC_OP_RETURN
201201
#undef ATOMIC_OP
202202

203-
#define arch_atomic64_cmpxchg(v, old, new) \
204-
(arch_cmpxchg(&((v)->counter), old, new))
205-
#define arch_atomic64_xchg(v, new) \
206-
(arch_xchg(&((v)->counter), new))
207-
208-
#define arch_atomic_cmpxchg(v, old, new) \
209-
(arch_cmpxchg(&((v)->counter), old, new))
210-
#define arch_atomic_xchg(v, new) \
211-
(arch_xchg(&((v)->counter), new))
212-
213203
/**
214204
* arch_atomic_fetch_add_unless - add unless the number is a given value
215205
* @v: pointer of type atomic_t

arch/arc/include/asm/atomic.h

Lines changed: 0 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -22,30 +22,6 @@
2222
#include <asm/atomic-spinlock.h>
2323
#endif
2424

25-
#define arch_atomic_cmpxchg(v, o, n) \
26-
({ \
27-
arch_cmpxchg(&((v)->counter), (o), (n)); \
28-
})
29-
30-
#ifdef arch_cmpxchg_relaxed
31-
#define arch_atomic_cmpxchg_relaxed(v, o, n) \
32-
({ \
33-
arch_cmpxchg_relaxed(&((v)->counter), (o), (n)); \
34-
})
35-
#endif
36-
37-
#define arch_atomic_xchg(v, n) \
38-
({ \
39-
arch_xchg(&((v)->counter), (n)); \
40-
})
41-
42-
#ifdef arch_xchg_relaxed
43-
#define arch_atomic_xchg_relaxed(v, n) \
44-
({ \
45-
arch_xchg_relaxed(&((v)->counter), (n)); \
46-
})
47-
#endif
48-
4925
/*
5026
* 64-bit atomics
5127
*/

arch/arc/include/asm/atomic64-arcv2.h

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -159,6 +159,7 @@ arch_atomic64_cmpxchg(atomic64_t *ptr, s64 expected, s64 new)
159159

160160
return prev;
161161
}
162+
#define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
162163

163164
static inline s64 arch_atomic64_xchg(atomic64_t *ptr, s64 new)
164165
{
@@ -179,6 +180,7 @@ static inline s64 arch_atomic64_xchg(atomic64_t *ptr, s64 new)
179180

180181
return prev;
181182
}
183+
#define arch_atomic64_xchg arch_atomic64_xchg
182184

183185
/**
184186
* arch_atomic64_dec_if_positive - decrement by 1 if old value positive

arch/arm/include/asm/atomic.h

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -210,6 +210,7 @@ static inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new)
210210

211211
return ret;
212212
}
213+
#define arch_atomic_cmpxchg arch_atomic_cmpxchg
213214

214215
#define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
215216

@@ -240,8 +241,6 @@ ATOMIC_OPS(xor, ^=, eor)
240241
#undef ATOMIC_OP_RETURN
241242
#undef ATOMIC_OP
242243

243-
#define arch_atomic_xchg(v, new) (arch_xchg(&((v)->counter), new))
244-
245244
#ifndef CONFIG_GENERIC_ATOMIC64
246245
typedef struct {
247246
s64 counter;

arch/arm64/include/asm/atomic.h

Lines changed: 0 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -142,24 +142,6 @@ static __always_inline long arch_atomic64_dec_if_positive(atomic64_t *v)
142142
#define arch_atomic_fetch_xor_release arch_atomic_fetch_xor_release
143143
#define arch_atomic_fetch_xor arch_atomic_fetch_xor
144144

145-
#define arch_atomic_xchg_relaxed(v, new) \
146-
arch_xchg_relaxed(&((v)->counter), (new))
147-
#define arch_atomic_xchg_acquire(v, new) \
148-
arch_xchg_acquire(&((v)->counter), (new))
149-
#define arch_atomic_xchg_release(v, new) \
150-
arch_xchg_release(&((v)->counter), (new))
151-
#define arch_atomic_xchg(v, new) \
152-
arch_xchg(&((v)->counter), (new))
153-
154-
#define arch_atomic_cmpxchg_relaxed(v, old, new) \
155-
arch_cmpxchg_relaxed(&((v)->counter), (old), (new))
156-
#define arch_atomic_cmpxchg_acquire(v, old, new) \
157-
arch_cmpxchg_acquire(&((v)->counter), (old), (new))
158-
#define arch_atomic_cmpxchg_release(v, old, new) \
159-
arch_cmpxchg_release(&((v)->counter), (old), (new))
160-
#define arch_atomic_cmpxchg(v, old, new) \
161-
arch_cmpxchg(&((v)->counter), (old), (new))
162-
163145
#define arch_atomic_andnot arch_atomic_andnot
164146

165147
/*
@@ -209,16 +191,6 @@ static __always_inline long arch_atomic64_dec_if_positive(atomic64_t *v)
209191
#define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release
210192
#define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
211193

212-
#define arch_atomic64_xchg_relaxed arch_atomic_xchg_relaxed
213-
#define arch_atomic64_xchg_acquire arch_atomic_xchg_acquire
214-
#define arch_atomic64_xchg_release arch_atomic_xchg_release
215-
#define arch_atomic64_xchg arch_atomic_xchg
216-
217-
#define arch_atomic64_cmpxchg_relaxed arch_atomic_cmpxchg_relaxed
218-
#define arch_atomic64_cmpxchg_acquire arch_atomic_cmpxchg_acquire
219-
#define arch_atomic64_cmpxchg_release arch_atomic_cmpxchg_release
220-
#define arch_atomic64_cmpxchg arch_atomic_cmpxchg
221-
222194
#define arch_atomic64_andnot arch_atomic64_andnot
223195

224196
#define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive

arch/csky/include/asm/atomic.h

Lines changed: 0 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -195,41 +195,6 @@ arch_atomic_dec_if_positive(atomic_t *v)
195195
}
196196
#define arch_atomic_dec_if_positive arch_atomic_dec_if_positive
197197

198-
#define ATOMIC_OP() \
199-
static __always_inline \
200-
int arch_atomic_xchg_relaxed(atomic_t *v, int n) \
201-
{ \
202-
return __xchg_relaxed(n, &(v->counter), 4); \
203-
} \
204-
static __always_inline \
205-
int arch_atomic_cmpxchg_relaxed(atomic_t *v, int o, int n) \
206-
{ \
207-
return __cmpxchg_relaxed(&(v->counter), o, n, 4); \
208-
} \
209-
static __always_inline \
210-
int arch_atomic_cmpxchg_acquire(atomic_t *v, int o, int n) \
211-
{ \
212-
return __cmpxchg_acquire(&(v->counter), o, n, 4); \
213-
} \
214-
static __always_inline \
215-
int arch_atomic_cmpxchg(atomic_t *v, int o, int n) \
216-
{ \
217-
return __cmpxchg(&(v->counter), o, n, 4); \
218-
}
219-
220-
#define ATOMIC_OPS() \
221-
ATOMIC_OP()
222-
223-
ATOMIC_OPS()
224-
225-
#define arch_atomic_xchg_relaxed arch_atomic_xchg_relaxed
226-
#define arch_atomic_cmpxchg_relaxed arch_atomic_cmpxchg_relaxed
227-
#define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg_acquire
228-
#define arch_atomic_cmpxchg arch_atomic_cmpxchg
229-
230-
#undef ATOMIC_OPS
231-
#undef ATOMIC_OP
232-
233198
#else
234199
#include <asm-generic/atomic.h>
235200
#endif

arch/hexagon/include/asm/atomic.h

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -36,12 +36,6 @@ static inline void arch_atomic_set(atomic_t *v, int new)
3636
*/
3737
#define arch_atomic_read(v) READ_ONCE((v)->counter)
3838

39-
#define arch_atomic_xchg(v, new) \
40-
(arch_xchg(&((v)->counter), (new)))
41-
42-
#define arch_atomic_cmpxchg(v, old, new) \
43-
(arch_cmpxchg(&((v)->counter), (old), (new)))
44-
4539
#define ATOMIC_OP(op) \
4640
static inline void arch_atomic_##op(int i, atomic_t *v) \
4741
{ \

arch/ia64/include/asm/atomic.h

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -207,13 +207,6 @@ ATOMIC64_FETCH_OP(xor, ^)
207207
#undef ATOMIC64_FETCH_OP
208208
#undef ATOMIC64_OP
209209

210-
#define arch_atomic_cmpxchg(v, old, new) (arch_cmpxchg(&((v)->counter), old, new))
211-
#define arch_atomic_xchg(v, new) (arch_xchg(&((v)->counter), new))
212-
213-
#define arch_atomic64_cmpxchg(v, old, new) \
214-
(arch_cmpxchg(&((v)->counter), old, new))
215-
#define arch_atomic64_xchg(v, new) (arch_xchg(&((v)->counter), new))
216-
217210
#define arch_atomic_add(i,v) (void)arch_atomic_add_return((i), (v))
218211
#define arch_atomic_sub(i,v) (void)arch_atomic_sub_return((i), (v))
219212

arch/loongarch/include/asm/atomic.h

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -181,9 +181,6 @@ static inline int arch_atomic_sub_if_positive(int i, atomic_t *v)
181181
return result;
182182
}
183183

184-
#define arch_atomic_cmpxchg(v, o, n) (arch_cmpxchg(&((v)->counter), (o), (n)))
185-
#define arch_atomic_xchg(v, new) (arch_xchg(&((v)->counter), (new)))
186-
187184
/*
188185
* arch_atomic_dec_if_positive - decrement by 1 if old value positive
189186
* @v: pointer of type atomic_t
@@ -342,10 +339,6 @@ static inline long arch_atomic64_sub_if_positive(long i, atomic64_t *v)
342339
return result;
343340
}
344341

345-
#define arch_atomic64_cmpxchg(v, o, n) \
346-
((__typeof__((v)->counter))arch_cmpxchg(&((v)->counter), (o), (n)))
347-
#define arch_atomic64_xchg(v, new) (arch_xchg(&((v)->counter), (new)))
348-
349342
/*
350343
* arch_atomic64_dec_if_positive - decrement by 1 if old value positive
351344
* @v: pointer of type atomic64_t

arch/m68k/include/asm/atomic.h

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -158,12 +158,7 @@ static inline int arch_atomic_inc_and_test(atomic_t *v)
158158
}
159159
#define arch_atomic_inc_and_test arch_atomic_inc_and_test
160160

161-
#ifdef CONFIG_RMW_INSNS
162-
163-
#define arch_atomic_cmpxchg(v, o, n) ((int)arch_cmpxchg(&((v)->counter), (o), (n)))
164-
#define arch_atomic_xchg(v, new) (arch_xchg(&((v)->counter), new))
165-
166-
#else /* !CONFIG_RMW_INSNS */
161+
#ifndef CONFIG_RMW_INSNS
167162

168163
static inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new)
169164
{
@@ -177,6 +172,7 @@ static inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new)
177172
local_irq_restore(flags);
178173
return prev;
179174
}
175+
#define arch_atomic_cmpxchg arch_atomic_cmpxchg
180176

181177
static inline int arch_atomic_xchg(atomic_t *v, int new)
182178
{
@@ -189,6 +185,7 @@ static inline int arch_atomic_xchg(atomic_t *v, int new)
189185
local_irq_restore(flags);
190186
return prev;
191187
}
188+
#define arch_atomic_xchg arch_atomic_xchg
192189

193190
#endif /* !CONFIG_RMW_INSNS */
194191

arch/mips/include/asm/atomic.h

Lines changed: 0 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -33,17 +33,6 @@ static __always_inline void arch_##pfx##_set(pfx##_t *v, type i) \
3333
{ \
3434
WRITE_ONCE(v->counter, i); \
3535
} \
36-
\
37-
static __always_inline type \
38-
arch_##pfx##_cmpxchg(pfx##_t *v, type o, type n) \
39-
{ \
40-
return arch_cmpxchg(&v->counter, o, n); \
41-
} \
42-
\
43-
static __always_inline type arch_##pfx##_xchg(pfx##_t *v, type n) \
44-
{ \
45-
return arch_xchg(&v->counter, n); \
46-
}
4736

4837
ATOMIC_OPS(atomic, int)
4938

arch/openrisc/include/asm/atomic.h

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -130,7 +130,4 @@ static inline int arch_atomic_fetch_add_unless(atomic_t *v, int a, int u)
130130

131131
#include <asm/cmpxchg.h>
132132

133-
#define arch_atomic_xchg(ptr, v) (arch_xchg(&(ptr)->counter, (v)))
134-
#define arch_atomic_cmpxchg(v, old, new) (arch_cmpxchg(&((v)->counter), (old), (new)))
135-
136133
#endif /* __ASM_OPENRISC_ATOMIC_H */

arch/parisc/include/asm/atomic.h

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -73,10 +73,6 @@ static __inline__ int arch_atomic_read(const atomic_t *v)
7373
return READ_ONCE((v)->counter);
7474
}
7575

76-
/* exported interface */
77-
#define arch_atomic_cmpxchg(v, o, n) (arch_cmpxchg(&((v)->counter), (o), (n)))
78-
#define arch_atomic_xchg(v, new) (arch_xchg(&((v)->counter), new))
79-
8076
#define ATOMIC_OP(op, c_op) \
8177
static __inline__ void arch_atomic_##op(int i, atomic_t *v) \
8278
{ \
@@ -218,11 +214,6 @@ arch_atomic64_read(const atomic64_t *v)
218214
return READ_ONCE((v)->counter);
219215
}
220216

221-
/* exported interface */
222-
#define arch_atomic64_cmpxchg(v, o, n) \
223-
((__typeof__((v)->counter))arch_cmpxchg(&((v)->counter), (o), (n)))
224-
#define arch_atomic64_xchg(v, new) (arch_xchg(&((v)->counter), new))
225-
226217
#endif /* !CONFIG_64BIT */
227218

228219

arch/powerpc/include/asm/atomic.h

Lines changed: 0 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -126,18 +126,6 @@ ATOMIC_OPS(xor, xor, "", K)
126126
#undef ATOMIC_OP_RETURN_RELAXED
127127
#undef ATOMIC_OP
128128

129-
#define arch_atomic_cmpxchg(v, o, n) \
130-
(arch_cmpxchg(&((v)->counter), (o), (n)))
131-
#define arch_atomic_cmpxchg_relaxed(v, o, n) \
132-
arch_cmpxchg_relaxed(&((v)->counter), (o), (n))
133-
#define arch_atomic_cmpxchg_acquire(v, o, n) \
134-
arch_cmpxchg_acquire(&((v)->counter), (o), (n))
135-
136-
#define arch_atomic_xchg(v, new) \
137-
(arch_xchg(&((v)->counter), new))
138-
#define arch_atomic_xchg_relaxed(v, new) \
139-
arch_xchg_relaxed(&((v)->counter), (new))
140-
141129
/**
142130
* atomic_fetch_add_unless - add unless the number is a given value
143131
* @v: pointer of type atomic_t
@@ -396,18 +384,6 @@ static __inline__ s64 arch_atomic64_dec_if_positive(atomic64_t *v)
396384
}
397385
#define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
398386

399-
#define arch_atomic64_cmpxchg(v, o, n) \
400-
(arch_cmpxchg(&((v)->counter), (o), (n)))
401-
#define arch_atomic64_cmpxchg_relaxed(v, o, n) \
402-
arch_cmpxchg_relaxed(&((v)->counter), (o), (n))
403-
#define arch_atomic64_cmpxchg_acquire(v, o, n) \
404-
arch_cmpxchg_acquire(&((v)->counter), (o), (n))
405-
406-
#define arch_atomic64_xchg(v, new) \
407-
(arch_xchg(&((v)->counter), new))
408-
#define arch_atomic64_xchg_relaxed(v, new) \
409-
arch_xchg_relaxed(&((v)->counter), (new))
410-
411387
/**
412388
* atomic64_fetch_add_unless - add unless the number is a given value
413389
* @v: pointer of type atomic64_t

0 commit comments

Comments
 (0)