Skip to content

Batch fetch queue fix #1925

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 8 commits into from
Dec 8, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
170 changes: 138 additions & 32 deletions src/NHibernate.Test/Async/CacheTest/BatchableCacheFixture.cs
Original file line number Diff line number Diff line change
Expand Up @@ -219,21 +219,23 @@ public async Task MultipleGetReadOnlyTestAsync()
var persister = Sfi.GetEntityPersister(typeof(ReadOnly).FullName);
Assert.That(persister.Cache.Cache, Is.Not.Null);
Assert.That(persister.Cache.Cache, Is.TypeOf<BatchableCache>());
var ids = new List<int>();
int[] getIds;
int[] loadIds;

using (var s = Sfi.OpenSession())
using (var tx = s.BeginTransaction())
{
var items = await (s.Query<ReadOnly>().ToListAsync());
ids.AddRange(items.OrderBy(o => o.Id).Select(o => o.Id));
loadIds = getIds = items.OrderBy(o => o.Id).Select(o => o.Id).ToArray();
await (tx.CommitAsync());
}
// Batch size 3
var parentTestCases = new List<Tuple<int, int[][], int[], Func<int, bool>>>
var parentTestCases = new List<Tuple<int[], int, int[][], int[], Func<int, bool>>>
{
// When the cache is empty, GetMultiple will be called two times. One time in type
// DefaultLoadEventListener and the other time in BatchingEntityLoader.
new Tuple<int, int[][], int[], Func<int, bool>>(
new Tuple<int[], int, int[][], int[], Func<int, bool>>(
loadIds,
0,
new[]
{
Expand All @@ -245,7 +247,8 @@ public async Task MultipleGetReadOnlyTestAsync()
),
// When there are not enough uninitialized entities after the demanded one to fill the batch,
// the nearest before the demanded entity are added.
new Tuple<int, int[][], int[], Func<int, bool>>(
new Tuple<int[], int, int[][], int[], Func<int, bool>>(
loadIds,
4,
new[]
{
Expand All @@ -255,7 +258,8 @@ public async Task MultipleGetReadOnlyTestAsync()
new[] {3, 4, 5},
null
),
new Tuple<int, int[][], int[], Func<int, bool>>(
new Tuple<int[], int, int[][], int[], Func<int, bool>>(
loadIds,
5,
new[]
{
Expand All @@ -265,7 +269,8 @@ public async Task MultipleGetReadOnlyTestAsync()
new[] {3, 4, 5},
null
),
new Tuple<int, int[][], int[], Func<int, bool>>(
new Tuple<int[], int, int[][], int[], Func<int, bool>>(
loadIds,
0,
new[]
{
Expand All @@ -274,7 +279,8 @@ public async Task MultipleGetReadOnlyTestAsync()
null,
(i) => i % 2 == 0 // Cache all even indexes before loading
),
new Tuple<int, int[][], int[], Func<int, bool>>(
new Tuple<int[], int, int[][], int[], Func<int, bool>>(
loadIds,
1,
new[]
{
Expand All @@ -284,7 +290,8 @@ public async Task MultipleGetReadOnlyTestAsync()
new[] {1, 3, 5},
(i) => i % 2 == 0
),
new Tuple<int, int[][], int[], Func<int, bool>>(
new Tuple<int[], int, int[][], int[], Func<int, bool>>(
loadIds,
5,
new[]
{
Expand All @@ -294,7 +301,8 @@ public async Task MultipleGetReadOnlyTestAsync()
new[] {1, 3, 5},
(i) => i % 2 == 0
),
new Tuple<int, int[][], int[], Func<int, bool>>(
new Tuple<int[], int, int[][], int[], Func<int, bool>>(
loadIds,
0,
new[]
{
Expand All @@ -304,7 +312,8 @@ public async Task MultipleGetReadOnlyTestAsync()
new[] {0, 2, 4},
(i) => i % 2 != 0
),
new Tuple<int, int[][], int[], Func<int, bool>>(
new Tuple<int[], int, int[][], int[], Func<int, bool>>(
loadIds,
4,
new[]
{
Expand All @@ -313,12 +322,56 @@ public async Task MultipleGetReadOnlyTestAsync()
},
new[] {0, 2, 4},
(i) => i % 2 != 0
),
// Tests by loading different ids
new Tuple<int[], int, int[][], int[], Func<int, bool>>(
loadIds.Where((v, i) => i != 0).ToArray(),
0,
new[]
{
new[] {0, 5, 4}, // triggered by LoadFromSecondLevelCache method of DefaultLoadEventListener type
new[] {3, 4, 5}, // triggered by Load method of BatchingEntityLoader type
},
new[] {0, 4, 5},
null
),
new Tuple<int[], int, int[][], int[], Func<int, bool>>(
loadIds.Where((v, i) => i != 4).ToArray(),
4,
new[]
{
new[] {4, 5, 3},
new[] {5, 3, 2},
},
new[] {3, 4, 5},
null
),
new Tuple<int[], int, int[][], int[], Func<int, bool>>(
loadIds.Where((v, i) => i != 0).ToArray(),
0,
new[]
{
new[] {0, 5, 4} // 0 get assembled and no further processing is done
},
null,
(i) => i % 2 == 0 // Cache all even indexes before loading
),
new Tuple<int[], int, int[][], int[], Func<int, bool>>(
loadIds.Where((v, i) => i != 1).ToArray(),
1,
new[]
{
new[] {1, 5, 4}, // 4 gets assembled inside LoadFromSecondLevelCache
new[] {5, 3, 2}
},
new[] {1, 3, 5},
(i) => i % 2 == 0
)
};

foreach (var tuple in parentTestCases)
{
await (AssertMultipleCacheCallsAsync<ReadOnly>(ids, tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4));
await (AssertMultipleCacheCallsAsync<ReadOnly>(tuple.Item1, getIds, tuple.Item2, tuple.Item3, tuple.Item4, tuple.Item5));
}
}

Expand All @@ -328,21 +381,23 @@ public async Task MultipleGetReadOnlyItemTestAsync()
var persister = Sfi.GetEntityPersister(typeof(ReadOnlyItem).FullName);
Assert.That(persister.Cache.Cache, Is.Not.Null);
Assert.That(persister.Cache.Cache, Is.TypeOf<BatchableCache>());
var ids = new List<int>();
int[] getIds;
int[] loadIds;

using (var s = Sfi.OpenSession())
using (var tx = s.BeginTransaction())
{
var items = await (s.Query<ReadOnlyItem>().Take(6).ToListAsync());
ids.AddRange(items.OrderBy(o => o.Id).Select(o => o.Id));
loadIds = getIds = items.OrderBy(o => o.Id).Select(o => o.Id).ToArray();
await (tx.CommitAsync());
}
// Batch size 4
var parentTestCases = new List<Tuple<int, int[][], int[], Func<int, bool>>>
var parentTestCases = new List<Tuple<int[], int, int[][], int[], Func<int, bool>>>
{
// When the cache is empty, GetMultiple will be called two times. One time in type
// DefaultLoadEventListener and the other time in BatchingEntityLoader.
new Tuple<int, int[][], int[], Func<int, bool>>(
new Tuple<int[], int, int[][], int[], Func<int, bool>>(
loadIds,
0,
new[]
{
Expand All @@ -354,7 +409,8 @@ public async Task MultipleGetReadOnlyItemTestAsync()
),
// When there are not enough uninitialized entities after the demanded one to fill the batch,
// the nearest before the demanded entity are added.
new Tuple<int, int[][], int[], Func<int, bool>>(
new Tuple<int[], int, int[][], int[], Func<int, bool>>(
loadIds,
4,
new[]
{
Expand All @@ -364,7 +420,8 @@ public async Task MultipleGetReadOnlyItemTestAsync()
new[] {2, 3, 4, 5},
null
),
new Tuple<int, int[][], int[], Func<int, bool>>(
new Tuple<int[], int, int[][], int[], Func<int, bool>>(
loadIds,
5,
new[]
{
Expand All @@ -374,7 +431,8 @@ public async Task MultipleGetReadOnlyItemTestAsync()
new[] {2, 3, 4, 5},
null
),
new Tuple<int, int[][], int[], Func<int, bool>>(
new Tuple<int[], int, int[][], int[], Func<int, bool>>(
loadIds,
0,
new[]
{
Expand All @@ -383,7 +441,8 @@ public async Task MultipleGetReadOnlyItemTestAsync()
null,
(i) => i % 2 == 0 // Cache all even indexes before loading
),
new Tuple<int, int[][], int[], Func<int, bool>>(
new Tuple<int[], int, int[][], int[], Func<int, bool>>(
loadIds,
1,
new[]
{
Expand All @@ -393,7 +452,8 @@ public async Task MultipleGetReadOnlyItemTestAsync()
new[] {1, 3, 5},
(i) => i % 2 == 0
),
new Tuple<int, int[][], int[], Func<int, bool>>(
new Tuple<int[], int, int[][], int[], Func<int, bool>>(
loadIds,
5,
new[]
{
Expand All @@ -403,7 +463,8 @@ public async Task MultipleGetReadOnlyItemTestAsync()
new[] {1, 3, 5},
(i) => i % 2 == 0
),
new Tuple<int, int[][], int[], Func<int, bool>>(
new Tuple<int[], int, int[][], int[], Func<int, bool>>(
loadIds,
0,
new[]
{
Expand All @@ -413,7 +474,8 @@ public async Task MultipleGetReadOnlyItemTestAsync()
new[] {0, 2, 4},
(i) => i % 2 != 0
),
new Tuple<int, int[][], int[], Func<int, bool>>(
new Tuple<int[], int, int[][], int[], Func<int, bool>>(
loadIds,
4,
new[]
{
Expand All @@ -422,12 +484,56 @@ public async Task MultipleGetReadOnlyItemTestAsync()
},
new[] {0, 2, 4},
(i) => i % 2 != 0
)
),
// Tests by loading different ids
new Tuple<int[], int, int[][], int[], Func<int, bool>>(
loadIds.Where((v, i) => i != 0).ToArray(),
0,
new[]
{
new[] {0, 5, 4, 3}, // triggered by LoadFromSecondLevelCache method of DefaultLoadEventListener type
new[] {5, 4, 3, 2}, // triggered by Load method of BatchingEntityLoader type
},
new[] {0, 5, 4, 3},
null
),
new Tuple<int[], int, int[][], int[], Func<int, bool>>(
loadIds.Where((v, i) => i != 5).ToArray(),
5,
new[]
{
new[] {5, 4, 3, 2},
new[] {4, 3, 2, 1},
},
new[] {2, 3, 4, 5},
null
),
new Tuple<int[], int, int[][], int[], Func<int, bool>>(
loadIds.Where((v, i) => i != 0).ToArray(),
0,
new[]
{
new[] {0, 5, 4, 3} // 0 get assembled and no further processing is done
},
null,
(i) => i % 2 == 0 // Cache all even indexes before loading
),
new Tuple<int[], int, int[][], int[], Func<int, bool>>(
loadIds.Where((v, i) => i != 1).ToArray(),
1,
new[]
{
new[] {1, 5, 4, 3}, // 4 get assembled inside LoadFromSecondLevelCache
new[] {5, 3, 2, 0}
},
new[] {1, 3, 5},
(i) => i % 2 == 0
),
};

foreach (var tuple in parentTestCases)
{
await (AssertMultipleCacheCallsAsync<ReadOnlyItem>(ids, tuple.Item1, tuple.Item2, tuple.Item3, tuple.Item4));
await (AssertMultipleCacheCallsAsync<ReadOnlyItem>(tuple.Item1, getIds, tuple.Item2, tuple.Item3, tuple.Item4, tuple.Item5));
}
}

Expand Down Expand Up @@ -764,7 +870,8 @@ public async Task QueryCacheTestAsync()
}
}

private async Task AssertMultipleCacheCallsAsync<TEntity>(List<int> ids, int idIndex, int[][] fetchedIdIndexes, int[] putIdIndexes, Func<int, bool> cacheBeforeLoadFn = null, CancellationToken cancellationToken = default(CancellationToken))
private async Task AssertMultipleCacheCallsAsync<TEntity>(IEnumerable<int> loadIds, IReadOnlyList<int> getIds, int idIndex,
int[][] fetchedIdIndexes, int[] putIdIndexes, Func<int, bool> cacheBeforeLoadFn = null, CancellationToken cancellationToken = default(CancellationToken))
where TEntity : CacheEntity
{
var persister = Sfi.GetEntityPersister(typeof(TEntity).FullName);
Expand All @@ -776,7 +883,7 @@ public async Task QueryCacheTestAsync()
using (var s = Sfi.OpenSession())
using (var tx = s.BeginTransaction())
{
foreach (var id in ids.Where((o, i) => cacheBeforeLoadFn(i)))
foreach (var id in getIds.Where((o, i) => cacheBeforeLoadFn(i)))
{
await (s.GetAsync<TEntity>(id, cancellationToken));
}
Expand All @@ -788,12 +895,11 @@ public async Task QueryCacheTestAsync()
using (var tx = s.BeginTransaction())
{
cache.ClearStatistics();

foreach (var id in ids)
foreach (var id in loadIds)
{
await (s.LoadAsync<TEntity>(id, cancellationToken));
}
var item = await (s.GetAsync<TEntity>(ids[idIndex], cancellationToken));
var item = await (s.GetAsync<TEntity>(getIds[idIndex], cancellationToken));
Assert.That(item, Is.Not.Null);
Assert.That(cache.GetCalls, Has.Count.EqualTo(0));
Assert.That(cache.PutCalls, Has.Count.EqualTo(0));
Expand All @@ -807,14 +913,14 @@ public async Task QueryCacheTestAsync()
Assert.That(cache.PutMultipleCalls, Has.Count.EqualTo(1));
Assert.That(
cache.PutMultipleCalls[0].OfType<CacheKey>().Select(o => (int) o.Key),
Is.EquivalentTo(putIdIndexes.Select(o => ids[o])));
Is.EquivalentTo(putIdIndexes.Select(o => getIds[o])));
}

for (int i = 0; i < fetchedIdIndexes.GetLength(0); i++)
{
Assert.That(
cache.GetMultipleCalls[i].OfType<CacheKey>().Select(o => (int) o.Key),
Is.EquivalentTo(fetchedIdIndexes[i].Select(o => ids[o])));
Is.EquivalentTo(fetchedIdIndexes[i].Select(o => getIds[o])));
}

await (tx.CommitAsync(cancellationToken));
Expand Down
Loading