Skip to content

Commit f52609f

Browse files
author
Ingo Molnar
committed
Merge branch 'locking/arch-atomic' into locking/core, because it's ready for upstream
Signed-off-by: Ingo Molnar <[email protected]>
2 parents 20f9ed1 + 41b9e9f commit f52609f

File tree

65 files changed

+841
-1015
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

65 files changed

+841
-1015
lines changed

arch/alpha/include/asm/atomic.h

Lines changed: 27 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -29,13 +29,13 @@
2929
* branch back to restart the operation.
3030
*/
3131

32-
#define ATOMIC_OP(op) \
32+
#define ATOMIC_OP(op, asm_op) \
3333
static __inline__ void atomic_##op(int i, atomic_t * v) \
3434
{ \
3535
unsigned long temp; \
3636
__asm__ __volatile__( \
3737
"1: ldl_l %0,%1\n" \
38-
" " #op "l %0,%2,%0\n" \
38+
" " #asm_op " %0,%2,%0\n" \
3939
" stl_c %0,%1\n" \
4040
" beq %0,2f\n" \
4141
".subsection 2\n" \
@@ -45,15 +45,15 @@ static __inline__ void atomic_##op(int i, atomic_t * v) \
4545
:"Ir" (i), "m" (v->counter)); \
4646
} \
4747

48-
#define ATOMIC_OP_RETURN(op) \
48+
#define ATOMIC_OP_RETURN(op, asm_op) \
4949
static inline int atomic_##op##_return(int i, atomic_t *v) \
5050
{ \
5151
long temp, result; \
5252
smp_mb(); \
5353
__asm__ __volatile__( \
5454
"1: ldl_l %0,%1\n" \
55-
" " #op "l %0,%3,%2\n" \
56-
" " #op "l %0,%3,%0\n" \
55+
" " #asm_op " %0,%3,%2\n" \
56+
" " #asm_op " %0,%3,%0\n" \
5757
" stl_c %0,%1\n" \
5858
" beq %0,2f\n" \
5959
".subsection 2\n" \
@@ -65,13 +65,13 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \
6565
return result; \
6666
}
6767

68-
#define ATOMIC64_OP(op) \
68+
#define ATOMIC64_OP(op, asm_op) \
6969
static __inline__ void atomic64_##op(long i, atomic64_t * v) \
7070
{ \
7171
unsigned long temp; \
7272
__asm__ __volatile__( \
7373
"1: ldq_l %0,%1\n" \
74-
" " #op "q %0,%2,%0\n" \
74+
" " #asm_op " %0,%2,%0\n" \
7575
" stq_c %0,%1\n" \
7676
" beq %0,2f\n" \
7777
".subsection 2\n" \
@@ -81,15 +81,15 @@ static __inline__ void atomic64_##op(long i, atomic64_t * v) \
8181
:"Ir" (i), "m" (v->counter)); \
8282
} \
8383

84-
#define ATOMIC64_OP_RETURN(op) \
84+
#define ATOMIC64_OP_RETURN(op, asm_op) \
8585
static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \
8686
{ \
8787
long temp, result; \
8888
smp_mb(); \
8989
__asm__ __volatile__( \
9090
"1: ldq_l %0,%1\n" \
91-
" " #op "q %0,%3,%2\n" \
92-
" " #op "q %0,%3,%0\n" \
91+
" " #asm_op " %0,%3,%2\n" \
92+
" " #asm_op " %0,%3,%0\n" \
9393
" stq_c %0,%1\n" \
9494
" beq %0,2f\n" \
9595
".subsection 2\n" \
@@ -101,15 +101,27 @@ static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \
101101
return result; \
102102
}
103103

104-
#define ATOMIC_OPS(opg) \
105-
ATOMIC_OP(opg) \
106-
ATOMIC_OP_RETURN(opg) \
107-
ATOMIC64_OP(opg) \
108-
ATOMIC64_OP_RETURN(opg)
104+
#define ATOMIC_OPS(op) \
105+
ATOMIC_OP(op, op##l) \
106+
ATOMIC_OP_RETURN(op, op##l) \
107+
ATOMIC64_OP(op, op##q) \
108+
ATOMIC64_OP_RETURN(op, op##q)
109109

110110
ATOMIC_OPS(add)
111111
ATOMIC_OPS(sub)
112112

113+
#define atomic_andnot atomic_andnot
114+
#define atomic64_andnot atomic64_andnot
115+
116+
ATOMIC_OP(and, and)
117+
ATOMIC_OP(andnot, bic)
118+
ATOMIC_OP(or, bis)
119+
ATOMIC_OP(xor, xor)
120+
ATOMIC64_OP(and, and)
121+
ATOMIC64_OP(andnot, bic)
122+
ATOMIC64_OP(or, bis)
123+
ATOMIC64_OP(xor, xor)
124+
113125
#undef ATOMIC_OPS
114126
#undef ATOMIC64_OP_RETURN
115127
#undef ATOMIC64_OP

arch/arc/include/asm/atomic.h

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -143,9 +143,13 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \
143143

144144
ATOMIC_OPS(add, +=, add)
145145
ATOMIC_OPS(sub, -=, sub)
146-
ATOMIC_OP(and, &=, and)
147146

148-
#define atomic_clear_mask(mask, v) atomic_and(~(mask), (v))
147+
#define atomic_andnot atomic_andnot
148+
149+
ATOMIC_OP(and, &=, and)
150+
ATOMIC_OP(andnot, &= ~, bic)
151+
ATOMIC_OP(or, |=, or)
152+
ATOMIC_OP(xor, ^=, xor)
149153

150154
#undef ATOMIC_OPS
151155
#undef ATOMIC_OP_RETURN

arch/arm/include/asm/atomic.h

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -194,6 +194,13 @@ static inline int __atomic_add_unless(atomic_t *v, int a, int u)
194194
ATOMIC_OPS(add, +=, add)
195195
ATOMIC_OPS(sub, -=, sub)
196196

197+
#define atomic_andnot atomic_andnot
198+
199+
ATOMIC_OP(and, &=, and)
200+
ATOMIC_OP(andnot, &= ~, bic)
201+
ATOMIC_OP(or, |=, orr)
202+
ATOMIC_OP(xor, ^=, eor)
203+
197204
#undef ATOMIC_OPS
198205
#undef ATOMIC_OP_RETURN
199206
#undef ATOMIC_OP
@@ -321,6 +328,13 @@ static inline long long atomic64_##op##_return(long long i, atomic64_t *v) \
321328
ATOMIC64_OPS(add, adds, adc)
322329
ATOMIC64_OPS(sub, subs, sbc)
323330

331+
#define atomic64_andnot atomic64_andnot
332+
333+
ATOMIC64_OP(and, and, and)
334+
ATOMIC64_OP(andnot, bic, bic)
335+
ATOMIC64_OP(or, orr, orr)
336+
ATOMIC64_OP(xor, eor, eor)
337+
324338
#undef ATOMIC64_OPS
325339
#undef ATOMIC64_OP_RETURN
326340
#undef ATOMIC64_OP

arch/arm64/include/asm/atomic.h

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,13 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \
8585
ATOMIC_OPS(add, add)
8686
ATOMIC_OPS(sub, sub)
8787

88+
#define atomic_andnot atomic_andnot
89+
90+
ATOMIC_OP(and, and)
91+
ATOMIC_OP(andnot, bic)
92+
ATOMIC_OP(or, orr)
93+
ATOMIC_OP(xor, eor)
94+
8895
#undef ATOMIC_OPS
8996
#undef ATOMIC_OP_RETURN
9097
#undef ATOMIC_OP
@@ -183,6 +190,13 @@ static inline long atomic64_##op##_return(long i, atomic64_t *v) \
183190
ATOMIC64_OPS(add, add)
184191
ATOMIC64_OPS(sub, sub)
185192

193+
#define atomic64_andnot atomic64_andnot
194+
195+
ATOMIC64_OP(and, and)
196+
ATOMIC64_OP(andnot, bic)
197+
ATOMIC64_OP(or, orr)
198+
ATOMIC64_OP(xor, eor)
199+
186200
#undef ATOMIC64_OPS
187201
#undef ATOMIC64_OP_RETURN
188202
#undef ATOMIC64_OP

arch/avr32/include/asm/atomic.h

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,18 @@ static inline int __atomic_##op##_return(int i, atomic_t *v) \
4444
ATOMIC_OP_RETURN(sub, sub, rKs21)
4545
ATOMIC_OP_RETURN(add, add, r)
4646

47+
#define ATOMIC_OP(op, asm_op) \
48+
ATOMIC_OP_RETURN(op, asm_op, r) \
49+
static inline void atomic_##op(int i, atomic_t *v) \
50+
{ \
51+
(void)__atomic_##op##_return(i, v); \
52+
}
53+
54+
ATOMIC_OP(and, and)
55+
ATOMIC_OP(or, or)
56+
ATOMIC_OP(xor, eor)
57+
58+
#undef ATOMIC_OP
4759
#undef ATOMIC_OP_RETURN
4860

4961
/*

arch/blackfin/include/asm/atomic.h

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -16,19 +16,21 @@
1616
#include <linux/types.h>
1717

1818
asmlinkage int __raw_uncached_fetch_asm(const volatile int *ptr);
19-
asmlinkage int __raw_atomic_update_asm(volatile int *ptr, int value);
20-
asmlinkage int __raw_atomic_clear_asm(volatile int *ptr, int value);
21-
asmlinkage int __raw_atomic_set_asm(volatile int *ptr, int value);
19+
asmlinkage int __raw_atomic_add_asm(volatile int *ptr, int value);
20+
21+
asmlinkage int __raw_atomic_and_asm(volatile int *ptr, int value);
22+
asmlinkage int __raw_atomic_or_asm(volatile int *ptr, int value);
2223
asmlinkage int __raw_atomic_xor_asm(volatile int *ptr, int value);
2324
asmlinkage int __raw_atomic_test_asm(const volatile int *ptr, int value);
2425

2526
#define atomic_read(v) __raw_uncached_fetch_asm(&(v)->counter)
2627

27-
#define atomic_add_return(i, v) __raw_atomic_update_asm(&(v)->counter, i)
28-
#define atomic_sub_return(i, v) __raw_atomic_update_asm(&(v)->counter, -(i))
28+
#define atomic_add_return(i, v) __raw_atomic_add_asm(&(v)->counter, i)
29+
#define atomic_sub_return(i, v) __raw_atomic_add_asm(&(v)->counter, -(i))
2930

30-
#define atomic_clear_mask(m, v) __raw_atomic_clear_asm(&(v)->counter, m)
31-
#define atomic_set_mask(m, v) __raw_atomic_set_asm(&(v)->counter, m)
31+
#define atomic_or(i, v) (void)__raw_atomic_or_asm(&(v)->counter, i)
32+
#define atomic_and(i, v) (void)__raw_atomic_and_asm(&(v)->counter, i)
33+
#define atomic_xor(i, v) (void)__raw_atomic_xor_asm(&(v)->counter, i)
3234

3335
#endif
3436

arch/blackfin/kernel/bfin_ksyms.c

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -83,11 +83,12 @@ EXPORT_SYMBOL(insl);
8383
EXPORT_SYMBOL(insl_16);
8484

8585
#ifdef CONFIG_SMP
86-
EXPORT_SYMBOL(__raw_atomic_update_asm);
87-
EXPORT_SYMBOL(__raw_atomic_clear_asm);
88-
EXPORT_SYMBOL(__raw_atomic_set_asm);
86+
EXPORT_SYMBOL(__raw_atomic_add_asm);
87+
EXPORT_SYMBOL(__raw_atomic_and_asm);
88+
EXPORT_SYMBOL(__raw_atomic_or_asm);
8989
EXPORT_SYMBOL(__raw_atomic_xor_asm);
9090
EXPORT_SYMBOL(__raw_atomic_test_asm);
91+
9192
EXPORT_SYMBOL(__raw_xchg_1_asm);
9293
EXPORT_SYMBOL(__raw_xchg_2_asm);
9394
EXPORT_SYMBOL(__raw_xchg_4_asm);

arch/blackfin/mach-bf561/atomic.S

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -587,10 +587,10 @@ ENDPROC(___raw_write_unlock_asm)
587587
* r0 = ptr
588588
* r1 = value
589589
*
590-
* Add a signed value to a 32bit word and return the new value atomically.
590+
* ADD a signed value to a 32bit word and return the new value atomically.
591591
* Clobbers: r3:0, p1:0
592592
*/
593-
ENTRY(___raw_atomic_update_asm)
593+
ENTRY(___raw_atomic_add_asm)
594594
p1 = r0;
595595
r3 = r1;
596596
[--sp] = rets;
@@ -603,19 +603,19 @@ ENTRY(___raw_atomic_update_asm)
603603
r0 = r3;
604604
rets = [sp++];
605605
rts;
606-
ENDPROC(___raw_atomic_update_asm)
606+
ENDPROC(___raw_atomic_add_asm)
607607

608608
/*
609609
* r0 = ptr
610610
* r1 = mask
611611
*
612-
* Clear the mask bits from a 32bit word and return the old 32bit value
612+
* AND the mask bits from a 32bit word and return the old 32bit value
613613
* atomically.
614614
* Clobbers: r3:0, p1:0
615615
*/
616-
ENTRY(___raw_atomic_clear_asm)
616+
ENTRY(___raw_atomic_and_asm)
617617
p1 = r0;
618-
r3 = ~r1;
618+
r3 = r1;
619619
[--sp] = rets;
620620
call _get_core_lock;
621621
r2 = [p1];
@@ -627,17 +627,17 @@ ENTRY(___raw_atomic_clear_asm)
627627
r0 = r3;
628628
rets = [sp++];
629629
rts;
630-
ENDPROC(___raw_atomic_clear_asm)
630+
ENDPROC(___raw_atomic_and_asm)
631631

632632
/*
633633
* r0 = ptr
634634
* r1 = mask
635635
*
636-
* Set the mask bits into a 32bit word and return the old 32bit value
636+
* OR the mask bits into a 32bit word and return the old 32bit value
637637
* atomically.
638638
* Clobbers: r3:0, p1:0
639639
*/
640-
ENTRY(___raw_atomic_set_asm)
640+
ENTRY(___raw_atomic_or_asm)
641641
p1 = r0;
642642
r3 = r1;
643643
[--sp] = rets;
@@ -651,7 +651,7 @@ ENTRY(___raw_atomic_set_asm)
651651
r0 = r3;
652652
rets = [sp++];
653653
rts;
654-
ENDPROC(___raw_atomic_set_asm)
654+
ENDPROC(___raw_atomic_or_asm)
655655

656656
/*
657657
* r0 = ptr
@@ -787,7 +787,7 @@ ENTRY(___raw_bit_set_asm)
787787
r2 = r1;
788788
r1 = 1;
789789
r1 <<= r2;
790-
jump ___raw_atomic_set_asm
790+
jump ___raw_atomic_or_asm
791791
ENDPROC(___raw_bit_set_asm)
792792

793793
/*
@@ -798,10 +798,10 @@ ENDPROC(___raw_bit_set_asm)
798798
* Clobbers: r3:0, p1:0
799799
*/
800800
ENTRY(___raw_bit_clear_asm)
801-
r2 = r1;
802-
r1 = 1;
803-
r1 <<= r2;
804-
jump ___raw_atomic_clear_asm
801+
r2 = 1;
802+
r2 <<= r1;
803+
r1 = ~r2;
804+
jump ___raw_atomic_and_asm
805805
ENDPROC(___raw_bit_clear_asm)
806806

807807
/*

arch/blackfin/mach-common/smp.c

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -195,7 +195,7 @@ void send_ipi(const struct cpumask *cpumask, enum ipi_message_type msg)
195195
local_irq_save(flags);
196196
for_each_cpu(cpu, cpumask) {
197197
bfin_ipi_data = &per_cpu(bfin_ipi, cpu);
198-
atomic_set_mask((1 << msg), &bfin_ipi_data->bits);
198+
atomic_or((1 << msg), &bfin_ipi_data->bits);
199199
atomic_inc(&bfin_ipi_data->count);
200200
}
201201
local_irq_restore(flags);

0 commit comments

Comments
 (0)