Skip to content

Commit 9b6713c

Browse files
chuckleverbrauner
authored andcommitted
maple_tree: Add mtree_alloc_cyclic()
I need a cyclic allocator for the simple_offset implementation in fs/libfs.c. Signed-off-by: Chuck Lever <[email protected]> Link: https://lore.kernel.org/r/170820144179.6328.12838600511394432325.stgit@91.116.238.104.host.secureserver.net Signed-off-by: Christian Brauner <[email protected]>
1 parent ecba88a commit 9b6713c

File tree

2 files changed

+100
-0
lines changed

2 files changed

+100
-0
lines changed

include/linux/maple_tree.h

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -171,6 +171,7 @@ enum maple_type {
171171
#define MT_FLAGS_LOCK_IRQ 0x100
172172
#define MT_FLAGS_LOCK_BH 0x200
173173
#define MT_FLAGS_LOCK_EXTERN 0x300
174+
#define MT_FLAGS_ALLOC_WRAPPED 0x0800
174175

175176
#define MAPLE_HEIGHT_MAX 31
176177

@@ -319,6 +320,9 @@ int mtree_insert_range(struct maple_tree *mt, unsigned long first,
319320
int mtree_alloc_range(struct maple_tree *mt, unsigned long *startp,
320321
void *entry, unsigned long size, unsigned long min,
321322
unsigned long max, gfp_t gfp);
323+
int mtree_alloc_cyclic(struct maple_tree *mt, unsigned long *startp,
324+
void *entry, unsigned long range_lo, unsigned long range_hi,
325+
unsigned long *next, gfp_t gfp);
322326
int mtree_alloc_rrange(struct maple_tree *mt, unsigned long *startp,
323327
void *entry, unsigned long size, unsigned long min,
324328
unsigned long max, gfp_t gfp);
@@ -499,6 +503,9 @@ void *mas_find_range(struct ma_state *mas, unsigned long max);
499503
void *mas_find_rev(struct ma_state *mas, unsigned long min);
500504
void *mas_find_range_rev(struct ma_state *mas, unsigned long max);
501505
int mas_preallocate(struct ma_state *mas, void *entry, gfp_t gfp);
506+
int mas_alloc_cyclic(struct ma_state *mas, unsigned long *startp,
507+
void *entry, unsigned long range_lo, unsigned long range_hi,
508+
unsigned long *next, gfp_t gfp);
502509

503510
bool mas_nomem(struct ma_state *mas, gfp_t gfp);
504511
void mas_pause(struct ma_state *mas);

lib/maple_tree.c

Lines changed: 93 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4290,6 +4290,56 @@ static inline void *mas_insert(struct ma_state *mas, void *entry)
42904290

42914291
}
42924292

4293+
/**
4294+
* mas_alloc_cyclic() - Internal call to find somewhere to store an entry
4295+
* @mas: The maple state.
4296+
* @startp: Pointer to ID.
4297+
* @range_lo: Lower bound of range to search.
4298+
* @range_hi: Upper bound of range to search.
4299+
* @entry: The entry to store.
4300+
* @next: Pointer to next ID to allocate.
4301+
* @gfp: The GFP_FLAGS to use for allocations.
4302+
*
4303+
* Return: 0 if the allocation succeeded without wrapping, 1 if the
4304+
* allocation succeeded after wrapping, or -EBUSY if there are no
4305+
* free entries.
4306+
*/
4307+
int mas_alloc_cyclic(struct ma_state *mas, unsigned long *startp,
4308+
void *entry, unsigned long range_lo, unsigned long range_hi,
4309+
unsigned long *next, gfp_t gfp)
4310+
{
4311+
unsigned long min = range_lo;
4312+
int ret = 0;
4313+
4314+
range_lo = max(min, *next);
4315+
ret = mas_empty_area(mas, range_lo, range_hi, 1);
4316+
if ((mas->tree->ma_flags & MT_FLAGS_ALLOC_WRAPPED) && ret == 0) {
4317+
mas->tree->ma_flags &= ~MT_FLAGS_ALLOC_WRAPPED;
4318+
ret = 1;
4319+
}
4320+
if (ret < 0 && range_lo > min) {
4321+
ret = mas_empty_area(mas, min, range_hi, 1);
4322+
if (ret == 0)
4323+
ret = 1;
4324+
}
4325+
if (ret < 0)
4326+
return ret;
4327+
4328+
do {
4329+
mas_insert(mas, entry);
4330+
} while (mas_nomem(mas, gfp));
4331+
if (mas_is_err(mas))
4332+
return xa_err(mas->node);
4333+
4334+
*startp = mas->index;
4335+
*next = *startp + 1;
4336+
if (*next == 0)
4337+
mas->tree->ma_flags |= MT_FLAGS_ALLOC_WRAPPED;
4338+
4339+
return ret;
4340+
}
4341+
EXPORT_SYMBOL(mas_alloc_cyclic);
4342+
42934343
static __always_inline void mas_rewalk(struct ma_state *mas, unsigned long index)
42944344
{
42954345
retry:
@@ -6443,6 +6493,49 @@ int mtree_alloc_range(struct maple_tree *mt, unsigned long *startp,
64436493
}
64446494
EXPORT_SYMBOL(mtree_alloc_range);
64456495

6496+
/**
6497+
* mtree_alloc_cyclic() - Find somewhere to store this entry in the tree.
6498+
* @mt: The maple tree.
6499+
* @startp: Pointer to ID.
6500+
* @range_lo: Lower bound of range to search.
6501+
* @range_hi: Upper bound of range to search.
6502+
* @entry: The entry to store.
6503+
* @next: Pointer to next ID to allocate.
6504+
* @gfp: The GFP_FLAGS to use for allocations.
6505+
*
6506+
* Finds an empty entry in @mt after @next, stores the new index into
6507+
* the @id pointer, stores the entry at that index, then updates @next.
6508+
*
6509+
* @mt must be initialized with the MT_FLAGS_ALLOC_RANGE flag.
6510+
*
6511+
* Context: Any context. Takes and releases the mt.lock. May sleep if
6512+
* the @gfp flags permit.
6513+
*
6514+
* Return: 0 if the allocation succeeded without wrapping, 1 if the
6515+
* allocation succeeded after wrapping, -ENOMEM if memory could not be
6516+
* allocated, -EINVAL if @mt cannot be used, or -EBUSY if there are no
6517+
* free entries.
6518+
*/
6519+
int mtree_alloc_cyclic(struct maple_tree *mt, unsigned long *startp,
6520+
void *entry, unsigned long range_lo, unsigned long range_hi,
6521+
unsigned long *next, gfp_t gfp)
6522+
{
6523+
int ret;
6524+
6525+
MA_STATE(mas, mt, 0, 0);
6526+
6527+
if (!mt_is_alloc(mt))
6528+
return -EINVAL;
6529+
if (WARN_ON_ONCE(mt_is_reserved(entry)))
6530+
return -EINVAL;
6531+
mtree_lock(mt);
6532+
ret = mas_alloc_cyclic(&mas, startp, entry, range_lo, range_hi,
6533+
next, gfp);
6534+
mtree_unlock(mt);
6535+
return ret;
6536+
}
6537+
EXPORT_SYMBOL(mtree_alloc_cyclic);
6538+
64466539
int mtree_alloc_rrange(struct maple_tree *mt, unsigned long *startp,
64476540
void *entry, unsigned long size, unsigned long min,
64486541
unsigned long max, gfp_t gfp)

0 commit comments

Comments
 (0)