Skip to content

Commit c0e9d73

Browse files
bahusoidhazzik
authored andcommitted
Avoid recursive ProcessKey calls in BatchFetchQueue (#2016)
1 parent 109f085 commit c0e9d73

File tree

2 files changed

+115
-129
lines changed

2 files changed

+115
-129
lines changed

src/NHibernate/Async/Engine/BatchFetchQueue.cs

Lines changed: 105 additions & 119 deletions
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ internal async Task<object[]> GetCollectionBatchAsync(ICollectionPersister colle
7474
foreach (KeyValuePair<CollectionEntry, IPersistentCollection> me in map)
7575
{
7676
cancellationToken.ThrowIfCancellationRequested();
77-
if (await (ProcessKeyAsync(me)).ConfigureAwait(false))
77+
if (ProcessKey(me) ?? await (CheckCacheAndProcessResultAsync()).ConfigureAwait(false))
7878
{
7979
return keys;
8080
}
@@ -107,7 +107,7 @@ async Task<bool> CheckCacheAndProcessResultAsync()
107107
{
108108
for (var j = 0; j < collectionKeys.Count; j++)
109109
{
110-
if (await (ProcessKeyAsync(collectionKeys[indexes[j]].Key)).ConfigureAwait(false))
110+
if (ProcessKey(collectionKeys[indexes[j]].Key) == true)
111111
{
112112
return true;
113113
}
@@ -118,7 +118,7 @@ async Task<bool> CheckCacheAndProcessResultAsync()
118118
var results = await (AreCachedAsync(collectionKeys, indexes, collectionPersister, batchableCache, checkCache, cancellationToken)).ConfigureAwait(false);
119119
for (var j = 0; j < results.Length; j++)
120120
{
121-
if (!results[j] && await (ProcessKeyAsync(collectionKeys[indexes[j]].Key, true)).ConfigureAwait(false))
121+
if (!results[j] && ProcessKey(collectionKeys[indexes[j]].Key, true) == true)
122122
{
123123
return true;
124124
}
@@ -132,91 +132,84 @@ async Task<bool> CheckCacheAndProcessResultAsync()
132132
return false;
133133
}
134134

135-
Task<bool> ProcessKeyAsync(KeyValuePair<CollectionEntry, IPersistentCollection> me, bool ignoreCache = false)
135+
bool? ProcessKey(KeyValuePair<CollectionEntry, IPersistentCollection> me, bool ignoreCache = false)
136136
{
137-
try
137+
var ce = me.Key;
138+
var collection = me.Value;
139+
if (ce.LoadedKey == null)
138140
{
139-
var ce = me.Key;
140-
var collection = me.Value;
141-
if (ce.LoadedKey == null)
142-
{
143-
// the LoadedKey of the CollectionEntry might be null as it might have been reset to null
144-
// (see for example Collections.ProcessDereferencedCollection()
145-
// and CollectionEntry.AfterAction())
146-
// though we clear the queue on flush, it seems like a good idea to guard
147-
// against potentially null LoadedKey:s
148-
return Task.FromResult<bool>(false);
149-
}
141+
// the LoadedKey of the CollectionEntry might be null as it might have been reset to null
142+
// (see for example Collections.ProcessDereferencedCollection()
143+
// and CollectionEntry.AfterAction())
144+
// though we clear the queue on flush, it seems like a good idea to guard
145+
// against potentially null LoadedKey:s
146+
return false;
147+
}
150148

151-
if (collection.WasInitialized)
152-
{
153-
log.Warn("Encountered initialized collection in BatchFetchQueue, this should not happen.");
154-
return Task.FromResult<bool>(false);
155-
}
149+
if (collection.WasInitialized)
150+
{
151+
log.Warn("Encountered initialized collection in BatchFetchQueue, this should not happen.");
152+
return false;
153+
}
156154

157-
if (checkForEnd && (index == map.Count || index >= keyIndex.Value + batchSize))
155+
if (checkForEnd && (index == map.Count || index >= keyIndex.Value + batchSize))
156+
{
157+
return true;
158+
}
159+
if (collectionPersister.KeyType.IsEqual(key, ce.LoadedKey, collectionPersister.Factory))
160+
{
161+
if (collectionEntries != null)
158162
{
159-
return Task.FromResult<bool>(true);
163+
collectionEntries[0] = ce;
160164
}
161-
if (collectionPersister.KeyType.IsEqual(key, ce.LoadedKey, collectionPersister.Factory))
165+
keyIndex = index;
166+
}
167+
else if (!checkCache || batchableCache == null)
168+
{
169+
if (index < map.Count && (!keyIndex.HasValue || index < keyIndex.Value))
162170
{
163-
if (collectionEntries != null)
164-
{
165-
collectionEntries[0] = ce;
166-
}
167-
keyIndex = index;
171+
collectionKeys.Add(new KeyValuePair<KeyValuePair<CollectionEntry, IPersistentCollection>, int>(me, index));
172+
return false;
168173
}
169-
else if (!checkCache || batchableCache == null)
170-
{
171-
if (index < map.Count && (!keyIndex.HasValue || index < keyIndex.Value))
172-
{
173-
collectionKeys.Add(new KeyValuePair<KeyValuePair<CollectionEntry, IPersistentCollection>, int>(me, index));
174-
return Task.FromResult<bool>(false);
175-
}
176174

177-
// No need to check "!checkCache || !IsCached(ce.LoadedKey, collectionPersister)":
178-
// "batchableCache == null" already means there is no cache, so IsCached can only yield false.
179-
// (This method is now removed.)
180-
if (collectionEntries != null)
181-
{
182-
collectionEntries[i] = ce;
183-
}
184-
keys[i++] = ce.LoadedKey;
185-
}
186-
else if (ignoreCache)
175+
// No need to check "!checkCache || !IsCached(ce.LoadedKey, collectionPersister)":
176+
// "batchableCache == null" already means there is no cache, so IsCached can only yield false.
177+
// (This method is now removed.)
178+
if (collectionEntries != null)
187179
{
188-
if (collectionEntries != null)
189-
{
190-
collectionEntries[i] = ce;
191-
}
192-
keys[i++] = ce.LoadedKey;
180+
collectionEntries[i] = ce;
193181
}
194-
else
182+
keys[i++] = ce.LoadedKey;
183+
}
184+
else if (ignoreCache)
185+
{
186+
if (collectionEntries != null)
195187
{
196-
collectionKeys.Add(new KeyValuePair<KeyValuePair<CollectionEntry, IPersistentCollection>, int>(me, index));
197-
// Check the cache only when we have collected as many keys as are needed to fill the batch,
198-
// that are after the demanded key.
199-
if (!keyIndex.HasValue || index < keyIndex.Value + batchSize)
200-
{
201-
return Task.FromResult<bool>(false);
202-
}
203-
return CheckCacheAndProcessResultAsync();
188+
collectionEntries[i] = ce;
204189
}
205-
if (i == batchSize)
190+
keys[i++] = ce.LoadedKey;
191+
}
192+
else
193+
{
194+
collectionKeys.Add(new KeyValuePair<KeyValuePair<CollectionEntry, IPersistentCollection>, int>(me, index));
195+
// Check the cache only when we have collected as many keys as are needed to fill the batch,
196+
// that are after the demanded key.
197+
if (!keyIndex.HasValue || index < keyIndex.Value + batchSize)
206198
{
207-
i = 1; // End of array, start filling again from start
208-
if (index == map.Count || keyIndex.HasValue)
209-
{
210-
checkForEnd = true;
211-
return Task.FromResult<bool>(index == map.Count || index >= keyIndex.Value + batchSize);
212-
}
199+
return false;
213200
}
214-
return Task.FromResult<bool>(false);
201+
return null;
215202
}
216-
catch (Exception ex)
203+
if (i == batchSize)
217204
{
218-
return Task.FromException<bool>(ex);
205+
i = 1; // End of array, start filling again from start
206+
if (index == map.Count || keyIndex.HasValue)
207+
{
208+
checkForEnd = true;
209+
return index == map.Count || index >= keyIndex.Value + batchSize;
210+
}
219211
}
212+
return false;
220213
}
221214
}
222215

@@ -273,7 +266,7 @@ internal async Task<object[]> GetEntityBatchAsync(IEntityPersister persister, ob
273266
foreach (var key in set)
274267
{
275268
cancellationToken.ThrowIfCancellationRequested();
276-
if (await (ProcessKeyAsync(key)).ConfigureAwait(false))
269+
if (ProcessKey(key) ?? await (CheckCacheAndProcessResultAsync()).ConfigureAwait(false))
277270
{
278271
return ids;
279272
}
@@ -306,7 +299,7 @@ async Task<bool> CheckCacheAndProcessResultAsync()
306299
{
307300
for (var j = 0; j < entityKeys.Count; j++)
308301
{
309-
if (await (ProcessKeyAsync(entityKeys[indexes[j]].Key)).ConfigureAwait(false))
302+
if (ProcessKey(entityKeys[indexes[j]].Key) == true)
310303
{
311304
return true;
312305
}
@@ -317,7 +310,7 @@ async Task<bool> CheckCacheAndProcessResultAsync()
317310
var results = await (AreCachedAsync(entityKeys, indexes, persister, batchableCache, checkCache, cancellationToken)).ConfigureAwait(false);
318311
for (var j = 0; j < results.Length; j++)
319312
{
320-
if (!results[j] && await (ProcessKeyAsync(entityKeys[indexes[j]].Key, true)).ConfigureAwait(false))
313+
if (!results[j] && ProcessKey(entityKeys[indexes[j]].Key, true) == true)
321314
{
322315
return true;
323316
}
@@ -331,62 +324,55 @@ async Task<bool> CheckCacheAndProcessResultAsync()
331324
return false;
332325
}
333326

334-
Task<bool> ProcessKeyAsync(EntityKey key, bool ignoreCache = false)
327+
bool? ProcessKey(EntityKey key, bool ignoreCache = false)
335328
{
336-
try
329+
//TODO: this needn't exclude subclasses...
330+
if (checkForEnd && (index == set.Count || index >= idIndex.Value + batchSize))
337331
{
338-
//TODO: this needn't exclude subclasses...
339-
if (checkForEnd && (index == set.Count || index >= idIndex.Value + batchSize))
340-
{
341-
return Task.FromResult<bool>(true);
342-
}
343-
if (persister.IdentifierType.IsEqual(id, key.Identifier))
344-
{
345-
idIndex = index;
346-
}
347-
else if (!checkCache || batchableCache == null)
348-
{
349-
if (index < set.Count && (!idIndex.HasValue || index < idIndex.Value))
350-
{
351-
entityKeys.Add(new KeyValuePair<EntityKey, int>(key, index));
352-
return Task.FromResult<bool>(false);
353-
}
354-
355-
// No need to check "!checkCache || !IsCached(key, persister)": "batchableCache == null"
356-
// already means there is no cache, so IsCached can only yield false. (This method is now
357-
// removed.)
358-
ids[i++] = key.Identifier;
359-
}
360-
else if (ignoreCache)
361-
{
362-
ids[i++] = key.Identifier;
363-
}
364-
else
332+
return true;
333+
}
334+
if (persister.IdentifierType.IsEqual(id, key.Identifier))
335+
{
336+
idIndex = index;
337+
}
338+
else if (!checkCache || batchableCache == null)
339+
{
340+
if (index < set.Count && (!idIndex.HasValue || index < idIndex.Value))
365341
{
366342
entityKeys.Add(new KeyValuePair<EntityKey, int>(key, index));
367-
// Check the cache only when we have collected as many keys as are needed to fill the batch,
368-
// that are after the demanded key.
369-
if (!idIndex.HasValue || index < idIndex.Value + batchSize)
370-
{
371-
return Task.FromResult<bool>(false);
372-
}
373-
return CheckCacheAndProcessResultAsync();
343+
return false;
374344
}
375-
if (i == batchSize)
345+
346+
// No need to check "!checkCache || !IsCached(key, persister)": "batchableCache == null"
347+
// already means there is no cache, so IsCached can only yield false. (This method is now
348+
// removed.)
349+
ids[i++] = key.Identifier;
350+
}
351+
else if (ignoreCache)
352+
{
353+
ids[i++] = key.Identifier;
354+
}
355+
else
356+
{
357+
entityKeys.Add(new KeyValuePair<EntityKey, int>(key, index));
358+
// Check the cache only when we have collected as many keys as are needed to fill the batch,
359+
// that are after the demanded key.
360+
if (!idIndex.HasValue || index < idIndex.Value + batchSize)
376361
{
377-
i = 1; // End of array, start filling again from start
378-
if (index == set.Count || idIndex.HasValue)
379-
{
380-
checkForEnd = true;
381-
return Task.FromResult<bool>(index == set.Count || index >= idIndex.Value + batchSize);
382-
}
362+
return false;
383363
}
384-
return Task.FromResult<bool>(false);
364+
return null;
385365
}
386-
catch (Exception ex)
366+
if (i == batchSize)
387367
{
388-
return Task.FromException<bool>(ex);
368+
i = 1; // End of array, start filling again from start
369+
if (index == set.Count || idIndex.HasValue)
370+
{
371+
checkForEnd = true;
372+
return index == set.Count || index >= idIndex.Value + batchSize;
373+
}
389374
}
375+
return false;
390376
}
391377
}
392378

src/NHibernate/Engine/BatchFetchQueue.cs

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -244,7 +244,7 @@ internal object[] GetCollectionBatch(ICollectionPersister collectionPersister, o
244244

245245
foreach (KeyValuePair<CollectionEntry, IPersistentCollection> me in map)
246246
{
247-
if (ProcessKey(me))
247+
if (ProcessKey(me) ?? CheckCacheAndProcessResult())
248248
{
249249
return keys;
250250
}
@@ -276,7 +276,7 @@ bool CheckCacheAndProcessResult()
276276
{
277277
for (var j = 0; j < collectionKeys.Count; j++)
278278
{
279-
if (ProcessKey(collectionKeys[indexes[j]].Key))
279+
if (ProcessKey(collectionKeys[indexes[j]].Key) == true)
280280
{
281281
return true;
282282
}
@@ -287,7 +287,7 @@ bool CheckCacheAndProcessResult()
287287
var results = AreCached(collectionKeys, indexes, collectionPersister, batchableCache, checkCache);
288288
for (var j = 0; j < results.Length; j++)
289289
{
290-
if (!results[j] && ProcessKey(collectionKeys[indexes[j]].Key, true))
290+
if (!results[j] && ProcessKey(collectionKeys[indexes[j]].Key, true) == true)
291291
{
292292
return true;
293293
}
@@ -301,7 +301,7 @@ bool CheckCacheAndProcessResult()
301301
return false;
302302
}
303303

304-
bool ProcessKey(KeyValuePair<CollectionEntry, IPersistentCollection> me, bool ignoreCache = false)
304+
bool? ProcessKey(KeyValuePair<CollectionEntry, IPersistentCollection> me, bool ignoreCache = false)
305305
{
306306
var ce = me.Key;
307307
var collection = me.Value;
@@ -367,7 +367,7 @@ bool ProcessKey(KeyValuePair<CollectionEntry, IPersistentCollection> me, bool ig
367367
{
368368
return false;
369369
}
370-
return CheckCacheAndProcessResult();
370+
return null;
371371
}
372372
if (i == batchSize)
373373
{
@@ -427,7 +427,7 @@ internal object[] GetEntityBatch(IEntityPersister persister, object id, int batc
427427

428428
foreach (var key in set)
429429
{
430-
if (ProcessKey(key))
430+
if (ProcessKey(key) ?? CheckCacheAndProcessResult())
431431
{
432432
return ids;
433433
}
@@ -459,7 +459,7 @@ bool CheckCacheAndProcessResult()
459459
{
460460
for (var j = 0; j < entityKeys.Count; j++)
461461
{
462-
if (ProcessKey(entityKeys[indexes[j]].Key))
462+
if (ProcessKey(entityKeys[indexes[j]].Key) == true)
463463
{
464464
return true;
465465
}
@@ -470,7 +470,7 @@ bool CheckCacheAndProcessResult()
470470
var results = AreCached(entityKeys, indexes, persister, batchableCache, checkCache);
471471
for (var j = 0; j < results.Length; j++)
472472
{
473-
if (!results[j] && ProcessKey(entityKeys[indexes[j]].Key, true))
473+
if (!results[j] && ProcessKey(entityKeys[indexes[j]].Key, true) == true)
474474
{
475475
return true;
476476
}
@@ -484,7 +484,7 @@ bool CheckCacheAndProcessResult()
484484
return false;
485485
}
486486

487-
bool ProcessKey(EntityKey key, bool ignoreCache = false)
487+
bool? ProcessKey(EntityKey key, bool ignoreCache = false)
488488
{
489489
//TODO: this needn't exclude subclasses...
490490
if (checkForEnd && (index == set.Count || index >= idIndex.Value + batchSize))
@@ -521,7 +521,7 @@ bool ProcessKey(EntityKey key, bool ignoreCache = false)
521521
{
522522
return false;
523523
}
524-
return CheckCacheAndProcessResult();
524+
return null;
525525
}
526526
if (i == batchSize)
527527
{

0 commit comments

Comments
 (0)