@@ -165,7 +165,7 @@ static inline unsigned int refcount_read(const refcount_t *r)
165
165
*
166
166
* Return: false if the passed refcount is 0, true otherwise
167
167
*/
168
- static inline __must_check bool refcount_add_not_zero (int i , refcount_t * r )
168
+ static inline __must_check bool __refcount_add_not_zero (int i , refcount_t * r , int * oldp )
169
169
{
170
170
int old = refcount_read (r );
171
171
@@ -174,12 +174,20 @@ static inline __must_check bool refcount_add_not_zero(int i, refcount_t *r)
174
174
break ;
175
175
} while (!atomic_try_cmpxchg_relaxed (& r -> refs , & old , old + i ));
176
176
177
+ if (oldp )
178
+ * oldp = old ;
179
+
177
180
if (unlikely (old < 0 || old + i < 0 ))
178
181
refcount_warn_saturate (r , REFCOUNT_ADD_NOT_ZERO_OVF );
179
182
180
183
return old ;
181
184
}
182
185
186
+ static inline __must_check bool refcount_add_not_zero (int i , refcount_t * r )
187
+ {
188
+ return __refcount_add_not_zero (i , r , NULL );
189
+ }
190
+
183
191
/**
184
192
* refcount_add - add a value to a refcount
185
193
* @i: the value to add to the refcount
@@ -196,16 +204,24 @@ static inline __must_check bool refcount_add_not_zero(int i, refcount_t *r)
196
204
* cases, refcount_inc(), or one of its variants, should instead be used to
197
205
* increment a reference count.
198
206
*/
199
- static inline void refcount_add (int i , refcount_t * r )
207
+ static inline void __refcount_add (int i , refcount_t * r , int * oldp )
200
208
{
201
209
int old = atomic_fetch_add_relaxed (i , & r -> refs );
202
210
211
+ if (oldp )
212
+ * oldp = old ;
213
+
203
214
if (unlikely (!old ))
204
215
refcount_warn_saturate (r , REFCOUNT_ADD_UAF );
205
216
else if (unlikely (old < 0 || old + i < 0 ))
206
217
refcount_warn_saturate (r , REFCOUNT_ADD_OVF );
207
218
}
208
219
220
+ static inline void refcount_add (int i , refcount_t * r )
221
+ {
222
+ __refcount_add (i , r , NULL );
223
+ }
224
+
209
225
/**
210
226
* refcount_inc_not_zero - increment a refcount unless it is 0
211
227
* @r: the refcount to increment
@@ -219,9 +235,14 @@ static inline void refcount_add(int i, refcount_t *r)
219
235
*
220
236
* Return: true if the increment was successful, false otherwise
221
237
*/
238
+ static inline __must_check bool __refcount_inc_not_zero (refcount_t * r , int * oldp )
239
+ {
240
+ return __refcount_add_not_zero (1 , r , oldp );
241
+ }
242
+
222
243
static inline __must_check bool refcount_inc_not_zero (refcount_t * r )
223
244
{
224
- return refcount_add_not_zero ( 1 , r );
245
+ return __refcount_inc_not_zero ( r , NULL );
225
246
}
226
247
227
248
/**
@@ -236,9 +257,14 @@ static inline __must_check bool refcount_inc_not_zero(refcount_t *r)
236
257
* Will WARN if the refcount is 0, as this represents a possible use-after-free
237
258
* condition.
238
259
*/
260
+ static inline void __refcount_inc (refcount_t * r , int * oldp )
261
+ {
262
+ __refcount_add (1 , r , oldp );
263
+ }
264
+
239
265
static inline void refcount_inc (refcount_t * r )
240
266
{
241
- refcount_add ( 1 , r );
267
+ __refcount_inc ( r , NULL );
242
268
}
243
269
244
270
/**
@@ -261,10 +287,13 @@ static inline void refcount_inc(refcount_t *r)
261
287
*
262
288
* Return: true if the resulting refcount is 0, false otherwise
263
289
*/
264
- static inline __must_check bool refcount_sub_and_test (int i , refcount_t * r )
290
+ static inline __must_check bool __refcount_sub_and_test (int i , refcount_t * r , int * oldp )
265
291
{
266
292
int old = atomic_fetch_sub_release (i , & r -> refs );
267
293
294
+ if (oldp )
295
+ * oldp = old ;
296
+
268
297
if (old == i ) {
269
298
smp_acquire__after_ctrl_dep ();
270
299
return true;
@@ -276,6 +305,11 @@ static inline __must_check bool refcount_sub_and_test(int i, refcount_t *r)
276
305
return false;
277
306
}
278
307
308
+ static inline __must_check bool refcount_sub_and_test (int i , refcount_t * r )
309
+ {
310
+ return __refcount_sub_and_test (i , r , NULL );
311
+ }
312
+
279
313
/**
280
314
* refcount_dec_and_test - decrement a refcount and test if it is 0
281
315
* @r: the refcount
@@ -289,9 +323,14 @@ static inline __must_check bool refcount_sub_and_test(int i, refcount_t *r)
289
323
*
290
324
* Return: true if the resulting refcount is 0, false otherwise
291
325
*/
326
+ static inline __must_check bool __refcount_dec_and_test (refcount_t * r , int * oldp )
327
+ {
328
+ return __refcount_sub_and_test (1 , r , oldp );
329
+ }
330
+
292
331
static inline __must_check bool refcount_dec_and_test (refcount_t * r )
293
332
{
294
- return refcount_sub_and_test ( 1 , r );
333
+ return __refcount_dec_and_test ( r , NULL );
295
334
}
296
335
297
336
/**
@@ -304,12 +343,22 @@ static inline __must_check bool refcount_dec_and_test(refcount_t *r)
304
343
* Provides release memory ordering, such that prior loads and stores are done
305
344
* before.
306
345
*/
307
- static inline void refcount_dec (refcount_t * r )
346
+ static inline void __refcount_dec (refcount_t * r , int * oldp )
308
347
{
309
- if (unlikely (atomic_fetch_sub_release (1 , & r -> refs ) <= 1 ))
348
+ int old = atomic_fetch_sub_release (1 , & r -> refs );
349
+
350
+ if (oldp )
351
+ * oldp = old ;
352
+
353
+ if (unlikely (old <= 1 ))
310
354
refcount_warn_saturate (r , REFCOUNT_DEC_LEAK );
311
355
}
312
356
357
+ static inline void refcount_dec (refcount_t * r )
358
+ {
359
+ __refcount_dec (r , NULL );
360
+ }
361
+
313
362
extern __must_check bool refcount_dec_if_one (refcount_t * r );
314
363
extern __must_check bool refcount_dec_not_one (refcount_t * r );
315
364
extern __must_check bool refcount_dec_and_mutex_lock (refcount_t * r , struct mutex * lock );
0 commit comments