Skip to content

Commit a00347b

Browse files
committed
[Sanitizer][RISCV][AArch64][Android] Adjust allocator tests
On 64-bit systems with small VMAs (e.g. 39-bit) we can't use `SizeClassAllocator64` parameterized with size class maps containing a large number of classes, as that will make the allocator region size too small (< 2^32). Several tests were already disabled for Android because of this. This patch provides the correct allocator configuration for RISC-V (riscv64), generalizes the gating condition for tests that can't be enabled for small VMA systems, and tweaks the tests that can be made compatible with those systems to enable them. Differential Revision: https://reviews.llvm.org/D97234
1 parent 1974065 commit a00347b

File tree

1 file changed

+30
-14
lines changed

1 file changed

+30
-14
lines changed

compiler-rt/lib/sanitizer_common/tests/sanitizer_allocator_test.cpp

Lines changed: 30 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,17 @@ using namespace __sanitizer;
3636
#define SKIP_ON_SOLARIS_SPARCV9(x) x
3737
#endif
3838

39+
// On 64-bit systems with small virtual address spaces (e.g. 39-bit) we can't
40+
// use size class maps with a large number of classes, as that will make the
41+
// SizeClassAllocator64 region size too small (< 2^32).
42+
#if SANITIZER_ANDROID && defined(__aarch64__)
43+
#define ALLOCATOR64_SMALL_SIZE 1
44+
#elif SANITIZER_RISCV64
45+
#define ALLOCATOR64_SMALL_SIZE 1
46+
#else
47+
#define ALLOCATOR64_SMALL_SIZE 0
48+
#endif
49+
3950
// Too slow for debug build
4051
#if !SANITIZER_DEBUG
4152

@@ -53,6 +64,11 @@ static const uptr kAllocatorSpace = 0x3000000000ULL;
5364
static const uptr kAllocatorSize = 0x2000000000ULL;
5465
static const u64 kAddressSpaceSize = 1ULL << 39;
5566
typedef VeryCompactSizeClassMap SizeClassMap;
67+
#elif SANITIZER_RISCV64
68+
const uptr kAllocatorSpace = ~(uptr)0;
69+
const uptr kAllocatorSize = 0x2000000000ULL; // 128G.
70+
static const u64 kAddressSpaceSize = 1ULL << 38;
71+
typedef VeryDenseSizeClassMap SizeClassMap;
5672
#else
5773
static const uptr kAllocatorSpace = 0x700000000000ULL;
5874
static const uptr kAllocatorSize = 0x010000000000ULL; // 1T.
@@ -276,8 +292,7 @@ TEST(SanitizerCommon, SizeClassAllocator64Dynamic) {
276292
TestSizeClassAllocator<Allocator64Dynamic>();
277293
}
278294

279-
#if !SANITIZER_ANDROID
280-
//FIXME(kostyak): find values so that those work on Android as well.
295+
#if !ALLOCATOR64_SMALL_SIZE
281296
TEST(SanitizerCommon, SizeClassAllocator64Compact) {
282297
TestSizeClassAllocator<Allocator64Compact>();
283298
}
@@ -361,7 +376,7 @@ TEST(SanitizerCommon, SizeClassAllocator64DynamicMetadataStress) {
361376
SizeClassAllocatorMetadataStress<Allocator64Dynamic>();
362377
}
363378

364-
#if !SANITIZER_ANDROID
379+
#if !ALLOCATOR64_SMALL_SIZE
365380
TEST(SanitizerCommon, SizeClassAllocator64CompactMetadataStress) {
366381
SizeClassAllocatorMetadataStress<Allocator64Compact>();
367382
}
@@ -408,7 +423,7 @@ TEST(SanitizerCommon, SizeClassAllocator64DynamicGetBlockBegin) {
408423
SizeClassAllocatorGetBlockBeginStress<Allocator64Dynamic>(
409424
1ULL << (SANITIZER_ANDROID ? 31 : 33));
410425
}
411-
#if !SANITIZER_ANDROID
426+
#if !ALLOCATOR64_SMALL_SIZE
412427
TEST(SanitizerCommon, SizeClassAllocator64CompactGetBlockBegin) {
413428
SizeClassAllocatorGetBlockBeginStress<Allocator64Compact>(1ULL << 33);
414429
}
@@ -520,7 +535,7 @@ TEST(SanitizerCommon, LargeMmapAllocatorMapUnmapCallback) {
520535

521536
// Don't test OOM conditions on Win64 because it causes other tests on the same
522537
// machine to OOM.
523-
#if SANITIZER_CAN_USE_ALLOCATOR64 && !SANITIZER_WINDOWS64 && !SANITIZER_ANDROID
538+
#if SANITIZER_CAN_USE_ALLOCATOR64 && !SANITIZER_WINDOWS64
524539
TEST(SanitizerCommon, SizeClassAllocator64Overflow) {
525540
Allocator64 a;
526541
a.Init(kReleaseToOSIntervalNever);
@@ -534,7 +549,8 @@ TEST(SanitizerCommon, SizeClassAllocator64Overflow) {
534549
uint32_t chunks[kNumChunks];
535550
bool allocation_failed = false;
536551
for (int i = 0; i < 1000000; i++) {
537-
if (!a.GetFromAllocator(&stats, 52, chunks, kNumChunks)) {
552+
uptr class_id = a.kNumClasses - 1;
553+
if (!a.GetFromAllocator(&stats, class_id, chunks, kNumChunks)) {
538554
allocation_failed = true;
539555
break;
540556
}
@@ -698,7 +714,7 @@ TEST(SanitizerCommon, CombinedAllocator64Dynamic) {
698714
TestCombinedAllocator<Allocator64Dynamic>();
699715
}
700716

701-
#if !SANITIZER_ANDROID
717+
#if !ALLOCATOR64_SMALL_SIZE
702718
TEST(SanitizerCommon, CombinedAllocator64Compact) {
703719
TestCombinedAllocator<Allocator64Compact>();
704720
}
@@ -759,7 +775,7 @@ TEST(SanitizerCommon, SizeClassAllocator64DynamicLocalCache) {
759775
TestSizeClassAllocatorLocalCache<Allocator64Dynamic>();
760776
}
761777

762-
#if !SANITIZER_ANDROID
778+
#if !ALLOCATOR64_SMALL_SIZE
763779
TEST(SanitizerCommon, SizeClassAllocator64CompactLocalCache) {
764780
TestSizeClassAllocatorLocalCache<Allocator64Compact>();
765781
}
@@ -1016,8 +1032,8 @@ TEST(SanitizerCommon, LargeMmapAllocatorBlockBegin) {
10161032

10171033
// Don't test OOM conditions on Win64 because it causes other tests on the same
10181034
// machine to OOM.
1019-
#if SANITIZER_CAN_USE_ALLOCATOR64 && !SANITIZER_WINDOWS64 && !SANITIZER_ANDROID
1020-
typedef __sanitizer::SizeClassMap<3, 4, 8, 38, 128, 16> SpecialSizeClassMap;
1035+
#if SANITIZER_CAN_USE_ALLOCATOR64 && !SANITIZER_WINDOWS64
1036+
typedef __sanitizer::SizeClassMap<2, 22, 22, 34, 128, 16> SpecialSizeClassMap;
10211037
template <typename AddressSpaceViewTy = LocalAddressSpaceView>
10221038
struct AP64_SpecialSizeClassMap {
10231039
static const uptr kSpaceBeg = kAllocatorSpace;
@@ -1044,15 +1060,15 @@ TEST(SanitizerCommon, SizeClassAllocator64PopulateFreeListOOM) {
10441060
// ...one man is on a mission to overflow a region with a series of
10451061
// successive allocations.
10461062

1047-
const uptr kClassID = 107;
1063+
const uptr kClassID = kAllocatorSize == ALLOCATOR64_SMALL_SIZE ? 18 : 24;
10481064
const uptr kAllocationSize = SpecialSizeClassMap::Size(kClassID);
10491065
ASSERT_LT(2 * kAllocationSize, kRegionSize);
10501066
ASSERT_GT(3 * kAllocationSize, kRegionSize);
10511067
EXPECT_NE(cache.Allocate(a, kClassID), nullptr);
10521068
EXPECT_NE(cache.Allocate(a, kClassID), nullptr);
10531069
EXPECT_EQ(cache.Allocate(a, kClassID), nullptr);
10541070

1055-
const uptr Class2 = 100;
1071+
const uptr Class2 = kAllocatorSize == ALLOCATOR64_SMALL_SIZE ? 15 : 21;
10561072
const uptr Size2 = SpecialSizeClassMap::Size(Class2);
10571073
ASSERT_EQ(Size2 * 8, kRegionSize);
10581074
char *p[7];
@@ -1338,15 +1354,15 @@ TEST(SanitizerCommon, SizeClassAllocator64ReleaseFreeMemoryToOS) {
13381354
TestReleaseFreeMemoryToOS<Allocator64>();
13391355
}
13401356

1341-
#if !SANITIZER_ANDROID
1357+
#if !ALLOCATOR64_SMALL_SIZE
13421358
TEST(SanitizerCommon, SizeClassAllocator64CompactReleaseFreeMemoryToOS) {
13431359
TestReleaseFreeMemoryToOS<Allocator64Compact>();
13441360
}
13451361

13461362
TEST(SanitizerCommon, SizeClassAllocator64VeryCompactReleaseFreeMemoryToOS) {
13471363
TestReleaseFreeMemoryToOS<Allocator64VeryCompact>();
13481364
}
1349-
#endif // !SANITIZER_ANDROID
1365+
#endif // !ALLOCATOR64_SMALL_SIZE
13501366

13511367
#endif // SANITIZER_CAN_USE_ALLOCATOR64
13521368

0 commit comments

Comments
 (0)