@@ -885,85 +885,93 @@ struct bkey_packed *bch2_bkey_prev_filter(struct btree *b,
885
885
886
886
/* Insert */
887
887
888
+ static void rw_aux_tree_insert_entry (struct btree * b ,
889
+ struct bset_tree * t ,
890
+ unsigned idx )
891
+ {
892
+ EBUG_ON (!idx || idx > t -> size );
893
+ struct bkey_packed * start = rw_aux_to_bkey (b , t , idx - 1 );
894
+ struct bkey_packed * end = idx < t -> size
895
+ ? rw_aux_to_bkey (b , t , idx )
896
+ : btree_bkey_last (b , t );
897
+
898
+ if (t -> size < bset_rw_tree_capacity (b , t ) &&
899
+ (void * ) end - (void * ) start > L1_CACHE_BYTES ) {
900
+ struct bkey_packed * k = start ;
901
+
902
+ while (1 ) {
903
+ k = bkey_p_next (k );
904
+ if (k == end )
905
+ break ;
906
+
907
+ if ((void * ) k - (void * ) start >= L1_CACHE_BYTES ) {
908
+ memmove (& rw_aux_tree (b , t )[idx + 1 ],
909
+ & rw_aux_tree (b , t )[idx ],
910
+ (void * ) & rw_aux_tree (b , t )[t -> size ] -
911
+ (void * ) & rw_aux_tree (b , t )[idx ]);
912
+ t -> size ++ ;
913
+ rw_aux_tree_set (b , t , idx , k );
914
+ break ;
915
+ }
916
+ }
917
+ }
918
+ }
919
+
888
920
static void bch2_bset_fix_lookup_table (struct btree * b ,
889
921
struct bset_tree * t ,
890
922
struct bkey_packed * _where ,
891
923
unsigned clobber_u64s ,
892
924
unsigned new_u64s )
893
925
{
894
926
int shift = new_u64s - clobber_u64s ;
895
- unsigned l , j , where = __btree_node_key_to_offset (b , _where );
927
+ unsigned idx , j , where = __btree_node_key_to_offset (b , _where );
896
928
897
929
EBUG_ON (bset_has_ro_aux_tree (t ));
898
930
899
931
if (!bset_has_rw_aux_tree (t ))
900
932
return ;
901
933
934
+ if (where > rw_aux_tree (b , t )[t -> size - 1 ].offset ) {
935
+ rw_aux_tree_insert_entry (b , t , t -> size );
936
+ goto verify ;
937
+ }
938
+
902
939
/* returns first entry >= where */
903
- l = rw_aux_tree_bsearch (b , t , where );
904
-
905
- if (!l ) /* never delete first entry */
906
- l ++ ;
907
- else if (l < t -> size &&
908
- where < t -> end_offset &&
909
- rw_aux_tree (b , t )[l ].offset == where )
910
- rw_aux_tree_set (b , t , l ++ , _where );
911
-
912
- /* l now > where */
913
-
914
- for (j = l ;
915
- j < t -> size &&
916
- rw_aux_tree (b , t )[j ].offset < where + clobber_u64s ;
917
- j ++ )
918
- ;
919
-
920
- if (j < t -> size &&
921
- rw_aux_tree (b , t )[j ].offset + shift ==
922
- rw_aux_tree (b , t )[l - 1 ].offset )
923
- j ++ ;
924
-
925
- memmove (& rw_aux_tree (b , t )[l ],
926
- & rw_aux_tree (b , t )[j ],
927
- (void * ) & rw_aux_tree (b , t )[t -> size ] -
928
- (void * ) & rw_aux_tree (b , t )[j ]);
929
- t -> size -= j - l ;
930
-
931
- for (j = l ; j < t -> size ; j ++ )
932
- rw_aux_tree (b , t )[j ].offset += shift ;
940
+ idx = rw_aux_tree_bsearch (b , t , where );
941
+
942
+ if (rw_aux_tree (b , t )[idx ].offset == where ) {
943
+ if (!idx ) { /* never delete first entry */
944
+ idx ++ ;
945
+ } else if (where < t -> end_offset ) {
946
+ rw_aux_tree_set (b , t , idx ++ , _where );
947
+ } else {
948
+ EBUG_ON (where != t -> end_offset );
949
+ rw_aux_tree_insert_entry (b , t , -- t -> size );
950
+ goto verify ;
951
+ }
952
+ }
933
953
934
- EBUG_ON (l < t -> size &&
935
- rw_aux_tree (b , t )[l ].offset ==
936
- rw_aux_tree (b , t )[l - 1 ].offset );
954
+ EBUG_ON (idx < t -> size && rw_aux_tree (b , t )[idx ].offset <= where );
955
+ if (idx < t -> size &&
956
+ rw_aux_tree (b , t )[idx ].offset + shift ==
957
+ rw_aux_tree (b , t )[idx - 1 ].offset ) {
958
+ memmove (& rw_aux_tree (b , t )[idx ],
959
+ & rw_aux_tree (b , t )[idx + 1 ],
960
+ (void * ) & rw_aux_tree (b , t )[t -> size ] -
961
+ (void * ) & rw_aux_tree (b , t )[idx + 1 ]);
962
+ t -> size -= 1 ;
963
+ }
937
964
938
- if (t -> size < bset_rw_tree_capacity (b , t ) &&
939
- (l < t -> size
940
- ? rw_aux_tree (b , t )[l ].offset
941
- : t -> end_offset ) -
942
- rw_aux_tree (b , t )[l - 1 ].offset >
943
- L1_CACHE_BYTES / sizeof (u64 )) {
944
- struct bkey_packed * start = rw_aux_to_bkey (b , t , l - 1 );
945
- struct bkey_packed * end = l < t -> size
946
- ? rw_aux_to_bkey (b , t , l )
947
- : btree_bkey_last (b , t );
948
- struct bkey_packed * k = start ;
965
+ for (j = idx ; j < t -> size ; j ++ )
966
+ rw_aux_tree (b , t )[j ].offset += shift ;
949
967
950
- while (1 ) {
951
- k = bkey_p_next (k );
952
- if (k == end )
953
- break ;
968
+ EBUG_ON (idx < t -> size &&
969
+ rw_aux_tree (b , t )[idx ].offset ==
970
+ rw_aux_tree (b , t )[idx - 1 ].offset );
954
971
955
- if ((void * ) k - (void * ) start >= L1_CACHE_BYTES ) {
956
- memmove (& rw_aux_tree (b , t )[l + 1 ],
957
- & rw_aux_tree (b , t )[l ],
958
- (void * ) & rw_aux_tree (b , t )[t -> size ] -
959
- (void * ) & rw_aux_tree (b , t )[l ]);
960
- t -> size ++ ;
961
- rw_aux_tree_set (b , t , l , k );
962
- break ;
963
- }
964
- }
965
- }
972
+ rw_aux_tree_insert_entry (b , t , idx );
966
973
974
+ verify :
967
975
bch2_bset_verify_rw_aux_tree (b , t );
968
976
bset_aux_tree_verify (b );
969
977
}
0 commit comments