Skip to content

Commit 5a75034

Browse files
josefbacikkdave
authored andcommitted
btrfs: do not panic if we can't allocate a prealloc extent state
We sometimes have to allocate new extent states when clearing or setting new bits in an extent io tree. Generally we preallocate this before taking the tree spin lock, but we can use this preallocated extent state sometimes and then need to try to do a GFP_ATOMIC allocation under the lock. Unfortunately sometimes this fails, and then we hit the BUG_ON() and bring the box down. This happens roughly 20 times a week in our fleet. However the vast majority of callers use GFP_NOFS, which means that if this GFP_ATOMIC allocation fails, we could simply drop the spin lock, go back and allocate a new extent state with our given gfp mask, and begin again from where we left off. For the remaining callers that do not use GFP_NOFS, they are generally using GFP_NOWAIT, which still allows for some reclaim. So allow these allocations to attempt to happen outside of the spin lock so we don't need to rely on GFP_ATOMIC allocations. This in essence creates an infinite loop for anything that isn't GFP_NOFS. To address this we may want to migrate to using mempools for extent states so that we will always have emergency reserves in order to make our allocations. Signed-off-by: Josef Bacik <[email protected]> Reviewed-by: David Sterba <[email protected]> Signed-off-by: David Sterba <[email protected]>
1 parent da2a071 commit 5a75034

File tree

1 file changed

+14
-8
lines changed

1 file changed

+14
-8
lines changed

fs/btrfs/extent-io-tree.c

Lines changed: 14 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -572,7 +572,7 @@ int __clear_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
572572
if (bits & (EXTENT_LOCKED | EXTENT_BOUNDARY))
573573
clear = 1;
574574
again:
575-
if (!prealloc && gfpflags_allow_blocking(mask)) {
575+
if (!prealloc) {
576576
/*
577577
* Don't care for allocation failure here because we might end
578578
* up not needing the pre-allocated extent state at all, which
@@ -636,7 +636,8 @@ int __clear_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
636636

637637
if (state->start < start) {
638638
prealloc = alloc_extent_state_atomic(prealloc);
639-
BUG_ON(!prealloc);
639+
if (!prealloc)
640+
goto search_again;
640641
err = split_state(tree, state, prealloc, start);
641642
if (err)
642643
extent_io_tree_panic(tree, err);
@@ -657,7 +658,8 @@ int __clear_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
657658
*/
658659
if (state->start <= end && state->end > end) {
659660
prealloc = alloc_extent_state_atomic(prealloc);
660-
BUG_ON(!prealloc);
661+
if (!prealloc)
662+
goto search_again;
661663
err = split_state(tree, state, prealloc, end + 1);
662664
if (err)
663665
extent_io_tree_panic(tree, err);
@@ -987,7 +989,7 @@ static int __set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
987989
else
988990
ASSERT(failed_start == NULL && failed_state == NULL);
989991
again:
990-
if (!prealloc && gfpflags_allow_blocking(mask)) {
992+
if (!prealloc) {
991993
/*
992994
* Don't care for allocation failure here because we might end
993995
* up not needing the pre-allocated extent state at all, which
@@ -1012,7 +1014,8 @@ static int __set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
10121014
state = tree_search_for_insert(tree, start, &p, &parent);
10131015
if (!state) {
10141016
prealloc = alloc_extent_state_atomic(prealloc);
1015-
BUG_ON(!prealloc);
1017+
if (!prealloc)
1018+
goto search_again;
10161019
prealloc->start = start;
10171020
prealloc->end = end;
10181021
insert_state_fast(tree, prealloc, p, parent, bits, changeset);
@@ -1085,7 +1088,8 @@ static int __set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
10851088
}
10861089

10871090
prealloc = alloc_extent_state_atomic(prealloc);
1088-
BUG_ON(!prealloc);
1091+
if (!prealloc)
1092+
goto search_again;
10891093
err = split_state(tree, state, prealloc, start);
10901094
if (err)
10911095
extent_io_tree_panic(tree, err);
@@ -1122,7 +1126,8 @@ static int __set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
11221126
this_end = last_start - 1;
11231127

11241128
prealloc = alloc_extent_state_atomic(prealloc);
1125-
BUG_ON(!prealloc);
1129+
if (!prealloc)
1130+
goto search_again;
11261131

11271132
/*
11281133
* Avoid to free 'prealloc' if it can be merged with the later
@@ -1154,7 +1159,8 @@ static int __set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
11541159
}
11551160

11561161
prealloc = alloc_extent_state_atomic(prealloc);
1157-
BUG_ON(!prealloc);
1162+
if (!prealloc)
1163+
goto search_again;
11581164
err = split_state(tree, state, prealloc, end + 1);
11591165
if (err)
11601166
extent_io_tree_panic(tree, err);

0 commit comments

Comments
 (0)