36
36
static inline void arch_atomic_##op(int i, atomic_t *v) \
37
37
{ \
38
38
__asm__ __volatile__( \
39
- "am"#asm_op"_db .w" " $zero, %1, %0 \n" \
39
+ "am"#asm_op".w" " $zero, %1, %0 \n" \
40
40
: "+ZB" (v->counter) \
41
41
: "r" (I) \
42
42
: "memory"); \
43
43
}
44
44
45
- #define ATOMIC_OP_RETURN (op , I , asm_op , c_op ) \
46
- static inline int arch_atomic_##op##_return_relaxed (int i, atomic_t *v) \
45
+ #define ATOMIC_OP_RETURN (op , I , asm_op , c_op , mb , suffix ) \
46
+ static inline int arch_atomic_##op##_return##suffix (int i, atomic_t *v) \
47
47
{ \
48
48
int result; \
49
49
\
50
50
__asm__ __volatile__( \
51
- "am"#asm_op"_db .w" " %1, %2, %0 \n" \
51
+ "am"#asm_op#mb" .w" " %1, %2, %0 \n" \
52
52
: "+ZB" (v->counter), "=&r" (result) \
53
53
: "r" (I) \
54
54
: "memory"); \
55
55
\
56
56
return result c_op I; \
57
57
}
58
58
59
- #define ATOMIC_FETCH_OP (op , I , asm_op ) \
60
- static inline int arch_atomic_fetch_##op##_relaxed (int i, atomic_t *v) \
59
+ #define ATOMIC_FETCH_OP (op , I , asm_op , mb , suffix ) \
60
+ static inline int arch_atomic_fetch_##op##suffix (int i, atomic_t *v) \
61
61
{ \
62
62
int result; \
63
63
\
64
64
__asm__ __volatile__( \
65
- "am"#asm_op"_db .w" " %1, %2, %0 \n" \
65
+ "am"#asm_op#mb" .w" " %1, %2, %0 \n" \
66
66
: "+ZB" (v->counter), "=&r" (result) \
67
67
: "r" (I) \
68
68
: "memory"); \
@@ -72,29 +72,53 @@ static inline int arch_atomic_fetch_##op##_relaxed(int i, atomic_t *v) \
72
72
73
73
#define ATOMIC_OPS (op , I , asm_op , c_op ) \
74
74
ATOMIC_OP(op, I, asm_op) \
75
- ATOMIC_OP_RETURN(op, I, asm_op, c_op) \
76
- ATOMIC_FETCH_OP(op, I, asm_op)
75
+ ATOMIC_OP_RETURN(op, I, asm_op, c_op, _db, ) \
76
+ ATOMIC_OP_RETURN(op, I, asm_op, c_op, , _relaxed) \
77
+ ATOMIC_FETCH_OP(op, I, asm_op, _db, ) \
78
+ ATOMIC_FETCH_OP(op, I, asm_op, , _relaxed)
77
79
78
80
ATOMIC_OPS (add , i , add , + )
79
81
ATOMIC_OPS (sub , - i , add , + )
80
82
83
+ #define arch_atomic_add_return arch_atomic_add_return
84
+ #define arch_atomic_add_return_acquire arch_atomic_add_return
85
+ #define arch_atomic_add_return_release arch_atomic_add_return
81
86
#define arch_atomic_add_return_relaxed arch_atomic_add_return_relaxed
87
+ #define arch_atomic_sub_return arch_atomic_sub_return
88
+ #define arch_atomic_sub_return_acquire arch_atomic_sub_return
89
+ #define arch_atomic_sub_return_release arch_atomic_sub_return
82
90
#define arch_atomic_sub_return_relaxed arch_atomic_sub_return_relaxed
91
+ #define arch_atomic_fetch_add arch_atomic_fetch_add
92
+ #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add
93
+ #define arch_atomic_fetch_add_release arch_atomic_fetch_add
83
94
#define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add_relaxed
95
+ #define arch_atomic_fetch_sub arch_atomic_fetch_sub
96
+ #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub
97
+ #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub
84
98
#define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub_relaxed
85
99
86
100
#undef ATOMIC_OPS
87
101
88
102
#define ATOMIC_OPS (op , I , asm_op ) \
89
103
ATOMIC_OP(op, I, asm_op) \
90
- ATOMIC_FETCH_OP(op, I, asm_op)
104
+ ATOMIC_FETCH_OP(op, I, asm_op, _db, ) \
105
+ ATOMIC_FETCH_OP(op, I, asm_op, , _relaxed)
91
106
92
107
ATOMIC_OPS (and , i , and )
93
108
ATOMIC_OPS (or , i , or )
94
109
ATOMIC_OPS (xor , i , xor )
95
110
111
+ #define arch_atomic_fetch_and arch_atomic_fetch_and
112
+ #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and
113
+ #define arch_atomic_fetch_and_release arch_atomic_fetch_and
96
114
#define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and_relaxed
115
+ #define arch_atomic_fetch_or arch_atomic_fetch_or
116
+ #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or
117
+ #define arch_atomic_fetch_or_release arch_atomic_fetch_or
97
118
#define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or_relaxed
119
+ #define arch_atomic_fetch_xor arch_atomic_fetch_xor
120
+ #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor
121
+ #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor
98
122
#define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor_relaxed
99
123
100
124
#undef ATOMIC_OPS
@@ -172,32 +196,32 @@ static inline int arch_atomic_sub_if_positive(int i, atomic_t *v)
172
196
static inline void arch_atomic64_##op(long i, atomic64_t *v) \
173
197
{ \
174
198
__asm__ __volatile__( \
175
- "am"#asm_op"_db .d " " $zero, %1, %0 \n" \
199
+ "am"#asm_op".d " " $zero, %1, %0 \n" \
176
200
: "+ZB" (v->counter) \
177
201
: "r" (I) \
178
202
: "memory"); \
179
203
}
180
204
181
- #define ATOMIC64_OP_RETURN (op , I , asm_op , c_op ) \
182
- static inline long arch_atomic64_##op##_return_relaxed (long i, atomic64_t *v) \
205
+ #define ATOMIC64_OP_RETURN (op , I , asm_op , c_op , mb , suffix ) \
206
+ static inline long arch_atomic64_##op##_return##suffix (long i, atomic64_t *v) \
183
207
{ \
184
208
long result; \
185
209
__asm__ __volatile__( \
186
- "am"#asm_op"_db .d " " %1, %2, %0 \n" \
210
+ "am"#asm_op#mb" .d " " %1, %2, %0 \n" \
187
211
: "+ZB" (v->counter), "=&r" (result) \
188
212
: "r" (I) \
189
213
: "memory"); \
190
214
\
191
215
return result c_op I; \
192
216
}
193
217
194
- #define ATOMIC64_FETCH_OP (op , I , asm_op ) \
195
- static inline long arch_atomic64_fetch_##op##_relaxed (long i, atomic64_t *v) \
218
+ #define ATOMIC64_FETCH_OP (op , I , asm_op , mb , suffix ) \
219
+ static inline long arch_atomic64_fetch_##op##suffix (long i, atomic64_t *v) \
196
220
{ \
197
221
long result; \
198
222
\
199
223
__asm__ __volatile__( \
200
- "am"#asm_op"_db .d " " %1, %2, %0 \n" \
224
+ "am"#asm_op#mb" .d " " %1, %2, %0 \n" \
201
225
: "+ZB" (v->counter), "=&r" (result) \
202
226
: "r" (I) \
203
227
: "memory"); \
@@ -207,29 +231,53 @@ static inline long arch_atomic64_fetch_##op##_relaxed(long i, atomic64_t *v) \
207
231
208
232
#define ATOMIC64_OPS (op , I , asm_op , c_op ) \
209
233
ATOMIC64_OP(op, I, asm_op) \
210
- ATOMIC64_OP_RETURN(op, I, asm_op, c_op) \
211
- ATOMIC64_FETCH_OP(op, I, asm_op)
234
+ ATOMIC64_OP_RETURN(op, I, asm_op, c_op, _db, ) \
235
+ ATOMIC64_OP_RETURN(op, I, asm_op, c_op, , _relaxed) \
236
+ ATOMIC64_FETCH_OP(op, I, asm_op, _db, ) \
237
+ ATOMIC64_FETCH_OP(op, I, asm_op, , _relaxed)
212
238
213
239
ATOMIC64_OPS (add , i , add , + )
214
240
ATOMIC64_OPS (sub , - i , add , + )
215
241
242
+ #define arch_atomic64_add_return arch_atomic64_add_return
243
+ #define arch_atomic64_add_return_acquire arch_atomic64_add_return
244
+ #define arch_atomic64_add_return_release arch_atomic64_add_return
216
245
#define arch_atomic64_add_return_relaxed arch_atomic64_add_return_relaxed
246
+ #define arch_atomic64_sub_return arch_atomic64_sub_return
247
+ #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return
248
+ #define arch_atomic64_sub_return_release arch_atomic64_sub_return
217
249
#define arch_atomic64_sub_return_relaxed arch_atomic64_sub_return_relaxed
250
+ #define arch_atomic64_fetch_add arch_atomic64_fetch_add
251
+ #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add
252
+ #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add
218
253
#define arch_atomic64_fetch_add_relaxed arch_atomic64_fetch_add_relaxed
254
+ #define arch_atomic64_fetch_sub arch_atomic64_fetch_sub
255
+ #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub
256
+ #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub
219
257
#define arch_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub_relaxed
220
258
221
259
#undef ATOMIC64_OPS
222
260
223
261
#define ATOMIC64_OPS (op , I , asm_op ) \
224
262
ATOMIC64_OP(op, I, asm_op) \
225
- ATOMIC64_FETCH_OP(op, I, asm_op)
263
+ ATOMIC64_FETCH_OP(op, I, asm_op, _db, ) \
264
+ ATOMIC64_FETCH_OP(op, I, asm_op, , _relaxed)
226
265
227
266
ATOMIC64_OPS (and , i , and )
228
267
ATOMIC64_OPS (or , i , or )
229
268
ATOMIC64_OPS (xor , i , xor )
230
269
270
+ #define arch_atomic64_fetch_and arch_atomic64_fetch_and
271
+ #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and
272
+ #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and
231
273
#define arch_atomic64_fetch_and_relaxed arch_atomic64_fetch_and_relaxed
274
+ #define arch_atomic64_fetch_or arch_atomic64_fetch_or
275
+ #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or
276
+ #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or
232
277
#define arch_atomic64_fetch_or_relaxed arch_atomic64_fetch_or_relaxed
278
+ #define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
279
+ #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor
280
+ #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor
233
281
#define arch_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor_relaxed
234
282
235
283
#undef ATOMIC64_OPS
0 commit comments