diff options
11 files changed, 346 insertions, 38 deletions
diff --git a/compiler-rt/lib/asan/asan_allocator.cpp b/compiler-rt/lib/asan/asan_allocator.cpp index b58116e17b7..cb9a9292f7a 100644 --- a/compiler-rt/lib/asan/asan_allocator.cpp +++ b/compiler-rt/lib/asan/asan_allocator.cpp @@ -48,8 +48,6 @@ static u32 RZSize2Log(u32 rz_size) { return res; } -static AsanAllocator &get_allocator(); - // The memory chunk allocated from the underlying allocator looks like this: // L L L L L L H H U U U U U U R R // L -- left redzone words (0 or more bytes) @@ -113,7 +111,7 @@ enum { struct AsanChunk: ChunkBase { uptr Beg() { return reinterpret_cast<uptr>(this) + kChunkHeaderSize; } uptr UsedSize(bool locked_version = false) { - if (user_requested_size != SizeClassMap::kMaxSize) + if (user_requested_size != get_allocator().KMaxSize()) return user_requested_size; return *reinterpret_cast<uptr *>( get_allocator().GetMetaData(AllocBeg(locked_version))); @@ -430,7 +428,7 @@ struct Allocator { bool using_primary_allocator = true; // If we are allocating from the secondary allocator, there will be no // automatic right redzone, so add the right redzone manually. - if (!PrimaryAllocator::CanAllocate(needed_size, alignment)) { + if (!get_allocator().CanAllocate(needed_size, alignment)) { needed_size += rz_size; using_primary_allocator = false; } @@ -499,7 +497,7 @@ struct Allocator { CHECK(allocator.FromPrimary(allocated)); } else { CHECK(!allocator.FromPrimary(allocated)); - m->user_requested_size = SizeClassMap::kMaxSize; + m->user_requested_size = get_allocator().KMaxSize(); uptr *meta = reinterpret_cast<uptr *>(allocator.GetMetaData(allocated)); meta[0] = size; meta[1] = chunk_beg; @@ -524,10 +522,10 @@ struct Allocator { thread_stats.mallocs++; thread_stats.malloced += size; thread_stats.malloced_redzones += needed_size - size; - if (needed_size > SizeClassMap::kMaxSize) + if (needed_size > get_allocator().KMaxSize()) thread_stats.malloc_large++; else - thread_stats.malloced_by_size[SizeClassMap::ClassID(needed_size)]++; + thread_stats.malloced_by_size[get_allocator().ClassID(needed_size)]++; void *res = reinterpret_cast<void *>(user_beg); if (can_fill && fl.max_malloc_fill_size) { @@ -791,7 +789,7 @@ struct Allocator { static Allocator instance(LINKER_INITIALIZED); -static AsanAllocator &get_allocator() { +AsanAllocator &get_allocator() { return instance.allocator; } diff --git a/compiler-rt/lib/asan/asan_allocator.h b/compiler-rt/lib/asan/asan_allocator.h index b37d8ef4e8d..ef610fb823e 100644 --- a/compiler-rt/lib/asan/asan_allocator.h +++ b/compiler-rt/lib/asan/asan_allocator.h @@ -118,39 +118,76 @@ struct AsanMapUnmapCallback { void OnUnmap(uptr p, uptr size) const; }; -#if SANITIZER_CAN_USE_ALLOCATOR64 +#if defined(__aarch64__) +// AArch64 supports 39, 42 and 48-bit VMA. +const uptr kAllocatorSpace = ~(uptr)0; +#if SANITIZER_ANDROID +const uptr kAllocatorSize = 0x2000000000ULL; // 128G. +typedef VeryCompactSizeClassMap SizeClassMap64; +#else +const uptr kAllocatorSize = 0x40000000000ULL; // 4T. +typedef DefaultSizeClassMap SizeClassMap64; +#endif + +template <typename AddressSpaceViewTy> +struct AP64 { // Allocator64 parameters. Deliberately using a short name. + static const uptr kSpaceBeg = kAllocatorSpace; + static const uptr kSpaceSize = kAllocatorSize; + static const uptr kMetadataSize = 0; + typedef __asan::SizeClassMap64 SizeClassMap; + typedef AsanMapUnmapCallback MapUnmapCallback; + static const uptr kFlags = 0; + using AddressSpaceView = AddressSpaceViewTy; +}; +template <typename AddressSpaceView> +using Allocator64ASVT = SizeClassAllocator64<AP64<AddressSpaceView>>; +using Allocator64 = Allocator64ASVT<LocalAddressSpaceView>; + +typedef CompactSizeClassMap SizeClassMap32; +template <typename AddressSpaceViewTy> +struct AP32 { + static const uptr kSpaceBeg = 0; + static const u64 kSpaceSize = SANITIZER_MMAP_RANGE_SIZE; + static const uptr kMetadataSize = 16; + typedef __asan::SizeClassMap32 SizeClassMap; + static const uptr kRegionSizeLog = 20; + using AddressSpaceView = AddressSpaceViewTy; + typedef AsanMapUnmapCallback MapUnmapCallback; + static const uptr kFlags = 0; +}; +template <typename AddressSpaceView> +using Allocator32ASVT = SizeClassAllocator32<AP32<AddressSpaceView>>; +using Allocator32 = Allocator32ASVT<LocalAddressSpaceView>; +using Allocator32or64 = RuntimeSelectAllocator<Allocator32, Allocator64>; + +static const uptr kMaxNumberOfSizeClasses = + SizeClassMap32::kNumClasses < SizeClassMap64::kNumClasses + ? SizeClassMap64::kNumClasses + : SizeClassMap32::kNumClasses; + +template <typename AddressSpaceView> +using PrimaryAllocatorASVT = + RuntimeSelectAllocator<Allocator32ASVT<AddressSpaceView>, + Allocator64ASVT<AddressSpaceView>>; +#elif SANITIZER_CAN_USE_ALLOCATOR64 # if SANITIZER_FUCHSIA const uptr kAllocatorSpace = ~(uptr)0; const uptr kAllocatorSize = 0x40000000000ULL; // 4T. -typedef DefaultSizeClassMap SizeClassMap; # elif defined(__powerpc64__) const uptr kAllocatorSpace = ~(uptr)0; const uptr kAllocatorSize = 0x20000000000ULL; // 2T. -typedef DefaultSizeClassMap SizeClassMap; -# elif defined(__aarch64__) && SANITIZER_ANDROID -// Android needs to support 39, 42 and 48 bit VMA. -const uptr kAllocatorSpace = ~(uptr)0; -const uptr kAllocatorSize = 0x2000000000ULL; // 128G. -typedef VeryCompactSizeClassMap SizeClassMap; -# elif defined(__aarch64__) -// AArch64/SANITIZER_CAN_USE_ALLOCATOR64 is only for 42-bit VMA -// so no need to different values for different VMA. -const uptr kAllocatorSpace = 0x10000000000ULL; -const uptr kAllocatorSize = 0x10000000000ULL; // 3T. -typedef DefaultSizeClassMap SizeClassMap; -#elif defined(__sparc__) +# elif defined(__sparc__) const uptr kAllocatorSpace = ~(uptr)0; const uptr kAllocatorSize = 0x20000000000ULL; // 2T. -typedef DefaultSizeClassMap SizeClassMap; # elif SANITIZER_WINDOWS const uptr kAllocatorSpace = ~(uptr)0; const uptr kAllocatorSize = 0x8000000000ULL; // 500G -typedef DefaultSizeClassMap SizeClassMap; # else const uptr kAllocatorSpace = 0x600000000000ULL; const uptr kAllocatorSize = 0x40000000000ULL; // 4T. -typedef DefaultSizeClassMap SizeClassMap; # endif +typedef DefaultSizeClassMap SizeClassMap; +static const uptr kMaxNumberOfSizeClasses = SizeClassMap::kNumClasses; template <typename AddressSpaceViewTy> struct AP64 { // Allocator64 parameters. Deliberately using a short name. static const uptr kSpaceBeg = kAllocatorSpace; @@ -164,9 +201,9 @@ struct AP64 { // Allocator64 parameters. Deliberately using a short name. template <typename AddressSpaceView> using PrimaryAllocatorASVT = SizeClassAllocator64<AP64<AddressSpaceView>>; -using PrimaryAllocator = PrimaryAllocatorASVT<LocalAddressSpaceView>; #else // Fallback to SizeClassAllocator32. typedef CompactSizeClassMap SizeClassMap; +static const uptr kMaxNumberOfSizeClasses = SizeClassMap::kNumClasses; template <typename AddressSpaceViewTy> struct AP32 { static const uptr kSpaceBeg = 0; @@ -180,16 +217,14 @@ struct AP32 { }; template <typename AddressSpaceView> using PrimaryAllocatorASVT = SizeClassAllocator32<AP32<AddressSpaceView> >; -using PrimaryAllocator = PrimaryAllocatorASVT<LocalAddressSpaceView>; #endif // SANITIZER_CAN_USE_ALLOCATOR64 -static const uptr kNumberOfSizeClasses = SizeClassMap::kNumClasses; - template <typename AddressSpaceView> using AsanAllocatorASVT = CombinedAllocator<PrimaryAllocatorASVT<AddressSpaceView>>; using AsanAllocator = AsanAllocatorASVT<LocalAddressSpaceView>; using AllocatorCache = AsanAllocator::AllocatorCache; +using PrimaryAllocator = PrimaryAllocatorASVT<LocalAddressSpaceView>; struct AsanThreadLocalMallocStorage { uptr quarantine_cache[16]; @@ -226,5 +261,7 @@ void asan_mz_force_unlock(); void PrintInternalAllocatorStats(); void AsanSoftRssLimitExceededCallback(bool exceeded); +AsanAllocator &get_allocator(); + } // namespace __asan #endif // ASAN_ALLOCATOR_H diff --git a/compiler-rt/lib/asan/asan_stats.cpp b/compiler-rt/lib/asan/asan_stats.cpp index bc4e8c15cc1..e4a1a390b2c 100644 --- a/compiler-rt/lib/asan/asan_stats.cpp +++ b/compiler-rt/lib/asan/asan_stats.cpp @@ -10,6 +10,7 @@ // // Code related to statistics collected by AddressSanitizer. //===----------------------------------------------------------------------===// +#include "asan_allocator.h" #include "asan_interceptors.h" #include "asan_internal.h" #include "asan_stats.h" @@ -30,9 +31,9 @@ void AsanStats::Clear() { } static void PrintMallocStatsArray(const char *prefix, - uptr (&array)[kNumberOfSizeClasses]) { + uptr *array, uptr size) { Printf("%s", prefix); - for (uptr i = 0; i < kNumberOfSizeClasses; i++) { + for (uptr i = 0; i < size; i++) { if (!array[i]) continue; Printf("%zu:%zu; ", i, array[i]); } @@ -50,7 +51,8 @@ void AsanStats::Print() { (mmaped-munmaped)>>20, mmaped>>20, munmaped>>20, mmaps, munmaps); - PrintMallocStatsArray(" mallocs by size class: ", malloced_by_size); + PrintMallocStatsArray(" mallocs by size class: ", malloced_by_size, + get_allocator().KMaxSize()); Printf("Stats: malloc large: %zu\n", malloc_large); } diff --git a/compiler-rt/lib/asan/asan_stats.h b/compiler-rt/lib/asan/asan_stats.h index d6da6534081..06ae089c858 100644 --- a/compiler-rt/lib/asan/asan_stats.h +++ b/compiler-rt/lib/asan/asan_stats.h @@ -38,7 +38,7 @@ struct AsanStats { uptr munmaps; uptr munmaped; uptr malloc_large; - uptr malloced_by_size[kNumberOfSizeClasses]; + uptr malloced_by_size[kMaxNumberOfSizeClasses]; // Ctor for global AsanStats (accumulated stats for dead threads). explicit AsanStats(LinkerInitialized) { } diff --git a/compiler-rt/lib/lsan/lsan_allocator.h b/compiler-rt/lib/lsan/lsan_allocator.h index e1397099767..590b6b1624c 100644 --- a/compiler-rt/lib/lsan/lsan_allocator.h +++ b/compiler-rt/lib/lsan/lsan_allocator.h @@ -49,8 +49,46 @@ struct ChunkMetadata { u32 stack_trace_id; }; -#if defined(__mips64) || defined(__aarch64__) || defined(__i386__) || \ - defined(__arm__) +#if defined(__aarch64__) +template <typename AddressSpaceViewTy> +struct AP32 { + static const uptr kSpaceBeg = 0; + static const u64 kSpaceSize = SANITIZER_MMAP_RANGE_SIZE; + static const uptr kMetadataSize = sizeof(ChunkMetadata); + typedef __sanitizer::CompactSizeClassMap SizeClassMap; + static const uptr kRegionSizeLog = 20; + using AddressSpaceView = AddressSpaceViewTy; + typedef NoOpMapUnmapCallback MapUnmapCallback; + static const uptr kFlags = 0; +}; + +const uptr kAllocatorSpace = 0x600000000000ULL; +const uptr kAllocatorSize = 0x40000000000ULL; // 4T. + +template <typename AddressSpaceViewTy> +struct AP64 { // Allocator64 parameters. Deliberately using a short name. + static const uptr kSpaceBeg = kAllocatorSpace; + static const uptr kSpaceSize = kAllocatorSize; + static const uptr kMetadataSize = sizeof(ChunkMetadata); + typedef DefaultSizeClassMap SizeClassMap; + typedef NoOpMapUnmapCallback MapUnmapCallback; + static const uptr kFlags = 0; + using AddressSpaceView = AddressSpaceViewTy; +}; + +template <typename AddressSpaceView> +using Allocator32ASVT = SizeClassAllocator32<AP32<AddressSpaceView>>; +template <typename AddressSpaceView> +using Allocator64ASVT = SizeClassAllocator64<AP64<AddressSpaceView>>; + +using Allocator32 = Allocator32ASVT<LocalAddressSpaceView>; +using Allocator64 = Allocator64ASVT<LocalAddressSpaceView>; + +template <typename AddressSpaceView> +using PrimaryAllocatorASVT = + RuntimeSelectAllocator<Allocator32ASVT<AddressSpaceView>, + Allocator64ASVT<AddressSpaceView>>; +#elif defined(__mips64) || defined(__i386__) || defined(__arm__) template <typename AddressSpaceViewTy> struct AP32 { static const uptr kSpaceBeg = 0; @@ -64,7 +102,6 @@ struct AP32 { }; template <typename AddressSpaceView> using PrimaryAllocatorASVT = SizeClassAllocator32<AP32<AddressSpaceView>>; -using PrimaryAllocator = PrimaryAllocatorASVT<LocalAddressSpaceView>; #elif defined(__x86_64__) || defined(__powerpc64__) # if defined(__powerpc64__) const uptr kAllocatorSpace = 0xa0000000000ULL; @@ -86,13 +123,13 @@ struct AP64 { // Allocator64 parameters. Deliberately using a short name. template <typename AddressSpaceView> using PrimaryAllocatorASVT = SizeClassAllocator64<AP64<AddressSpaceView>>; -using PrimaryAllocator = PrimaryAllocatorASVT<LocalAddressSpaceView>; #endif template <typename AddressSpaceView> using AllocatorASVT = CombinedAllocator<PrimaryAllocatorASVT<AddressSpaceView>>; using Allocator = AllocatorASVT<LocalAddressSpaceView>; using AllocatorCache = Allocator::AllocatorCache; +using PrimaryAllocator = PrimaryAllocatorASVT<LocalAddressSpaceView>; Allocator::AllocatorCache *GetAllocatorCache(); diff --git a/compiler-rt/lib/sanitizer_common/sanitizer_allocator.h b/compiler-rt/lib/sanitizer_common/sanitizer_allocator.h index 23d589888d3..4c9ec46578c 100644 --- a/compiler-rt/lib/sanitizer_common/sanitizer_allocator.h +++ b/compiler-rt/lib/sanitizer_common/sanitizer_allocator.h @@ -75,6 +75,7 @@ INLINE void RandomShuffle(T *a, u32 n, u32 *rand_state) { #include "sanitizer_allocator_local_cache.h" #include "sanitizer_allocator_secondary.h" #include "sanitizer_allocator_combined.h" +#include "sanitizer_runtime_select_allocator.h" } // namespace __sanitizer diff --git a/compiler-rt/lib/sanitizer_common/sanitizer_allocator_combined.h b/compiler-rt/lib/sanitizer_common/sanitizer_allocator_combined.h index 33f89d6d499..c11d1f83fb5 100644 --- a/compiler-rt/lib/sanitizer_common/sanitizer_allocator_combined.h +++ b/compiler-rt/lib/sanitizer_common/sanitizer_allocator_combined.h @@ -41,6 +41,10 @@ class CombinedAllocator { secondary_.Init(); } + bool CanAllocate(uptr size, uptr alignment) { + return primary_.CanAllocate(size, alignment); + } + void *Allocate(AllocatorCache *cache, uptr size, uptr alignment) { // Returning 0 on malloc(0) may break a lot of code. if (size == 0) @@ -194,6 +198,10 @@ class CombinedAllocator { secondary_.ForEachChunk(callback, arg); } + uptr KNumClasses() { return primary_.KNumClasses(); } + uptr KMaxSize() { return primary_.KMaxSize(); } + uptr ClassID(uptr size) { return primary_.ClassID(size); } + private: PrimaryAllocator primary_; SecondaryAllocator secondary_; diff --git a/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary32.h b/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary32.h index 3b1838b3985..9314ee8b448 100644 --- a/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary32.h +++ b/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary32.h @@ -271,6 +271,9 @@ class SizeClassAllocator32 { typedef SizeClassMap SizeClassMapT; static const uptr kNumClasses = SizeClassMap::kNumClasses; + static uptr KNumClasses() { return SizeClassMap::kNumClasses; } + static uptr KMaxSize() { return SizeClassMap::kMaxSize; } + private: static const uptr kRegionSize = 1 << kRegionSizeLog; static const uptr kNumPossibleRegions = kSpaceSize / kRegionSize; diff --git a/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary64.h b/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary64.h index 90603280e7c..7ecac5057af 100644 --- a/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary64.h +++ b/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary64.h @@ -319,6 +319,9 @@ class SizeClassAllocator64 { static const uptr kNumClasses = SizeClassMap::kNumClasses; static const uptr kNumClassesRounded = SizeClassMap::kNumClassesRounded; + static uptr KNumClasses() { return SizeClassMap::kNumClasses; } + static uptr KMaxSize() { return SizeClassMap::kMaxSize; } + // A packed array of counters. Each counter occupies 2^n bits, enough to store // counter's max_value. Ctor will try to allocate the required buffer via // mapper->MapPackedCounterArrayBuffer and the caller is expected to check diff --git a/compiler-rt/lib/sanitizer_common/sanitizer_runtime_select_allocator.h b/compiler-rt/lib/sanitizer_common/sanitizer_runtime_select_allocator.h new file mode 100644 index 00000000000..3b9e3544598 --- /dev/null +++ b/compiler-rt/lib/sanitizer_common/sanitizer_runtime_select_allocator.h @@ -0,0 +1,179 @@ +//===-- sanitizer_runtime_select_allocator.h --------------------*- C++ -*-===// +// +// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. +// See https://llvm.org/LICENSE.txt for license information. +// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception +// +//===----------------------------------------------------------------------===// +// +// Select one of the two allocators at runtime. +// +//===----------------------------------------------------------------------===// + +#ifndef SANITIZER_RUNTIME_SELECT_ALLOCATOR_H +#define SANITIZER_RUNTIME_SELECT_ALLOCATOR_H + +template <class Allocator1, class Allocator2> +class RuntimeSelectAllocator { + Allocator1 a1; + Allocator2 a2; + + public: + bool use_first_allocator; + + class RuntimeSelectAllocatorCache { + typename Allocator1::AllocatorCache a1; + typename Allocator2::AllocatorCache a2; + + public: + void Init(AllocatorGlobalStats *s) { + if (this->use_first_allocator) + a1.Init(s); + else + a2.Init(s); + } + void *Allocate(RuntimeSelectAllocator *allocator, uptr class_id) { + if (allocator->use_first_allocator) + return a1.Allocate(&allocator->a1, class_id); + return a2.Allocate(&allocator->a2, class_id); + } + + void Deallocate(RuntimeSelectAllocator *allocator, uptr class_id, void *p) { + if (allocator->use_first_allocator) + a1.Deallocate(&allocator->a1, class_id, p); + else + a2.Deallocate(&allocator->a2, class_id, p); + } + + void Drain(RuntimeSelectAllocator *allocator) { + if (allocator->use_first_allocator) + a1.Drain(&allocator->a1); + else + a2.Drain(&allocator->a2); + } + + void Destroy(RuntimeSelectAllocator *allocator, AllocatorGlobalStats *s) { + if (allocator->use_first_allocator) + a1.Destroy(&allocator->a1, s); + else + a2.Destroy(&allocator->a2, s); + } + }; + + using MapUnmapCallback = typename Allocator1::MapUnmapCallback; + using AddressSpaceView = typename Allocator1::AddressSpaceView; + using AllocatorCache = RuntimeSelectAllocatorCache; + + void Init(s32 release_to_os_interval_ms) { + // Use the first allocator when the address + // space is too small for the 64-bit allocator. + use_first_allocator = GetMaxVirtualAddress() < (((uptr)1ULL << 48) - 1); + if (use_first_allocator) + a1.Init(release_to_os_interval_ms); + else + a2.Init(release_to_os_interval_ms); + } + + bool CanAllocate(uptr size, uptr alignment) { + if (use_first_allocator) + return Allocator1::CanAllocate(size, alignment); + return Allocator2::CanAllocate(size, alignment); + } + + uptr ClassID(uptr size) { + if (use_first_allocator) + return Allocator1::ClassID(size); + return Allocator2::ClassID(size); + } + + uptr KNumClasses() { + if (use_first_allocator) + return Allocator1::KNumClasses(); + return Allocator2::KNumClasses(); + } + + uptr KMaxSize() { + if (use_first_allocator) + return Allocator1::KMaxSize(); + return Allocator2::KMaxSize(); + } + + bool PointerIsMine(const void *p) { + if (use_first_allocator) + return a1.PointerIsMine(p); + return a2.PointerIsMine(p); + } + + void *GetMetaData(const void *p) { + if (use_first_allocator) + return a1.GetMetaData(p); + return a2.GetMetaData(p); + } + + uptr GetSizeClass(const void *p) { + if (use_first_allocator) + return a1.GetSizeClass(p); + return a2.GetSizeClass(p); + } + + void ForEachChunk(ForEachChunkCallback callback, void *arg) { + if (use_first_allocator) + a1.ForEachChunk(callback, arg); + else + a2.ForEachChunk(callback, arg); + } + + void TestOnlyUnmap() { + if (use_first_allocator) + a1.TestOnlyUnmap(); + else + a2.TestOnlyUnmap(); + } + void ForceLock() { + if (use_first_allocator) + a1.ForceLock(); + else + a2.ForceLock(); + } + void ForceUnlock() { + if (use_first_allocator) + a1.ForceUnlock(); + else + a2.ForceUnlock(); + } + void *GetBlockBegin(const void *p) { + if (use_first_allocator) + return a1.GetBlockBegin(p); + return a2.GetBlockBegin(p); + } + uptr GetActuallyAllocatedSize(void *p) { + if (use_first_allocator) + return a1.GetActuallyAllocatedSize(p); + return a2.GetActuallyAllocatedSize(p); + } + void SetReleaseToOSIntervalMs(s32 release_to_os_interval_ms) { + if (use_first_allocator) + a1.SetReleaseToOSIntervalMs(release_to_os_interval_ms); + else + a2.SetReleaseToOSIntervalMs(release_to_os_interval_ms); + } + s32 ReleaseToOSIntervalMs() const { + if (use_first_allocator) + return a1.ReleaseToOSIntervalMs(); + return a2.ReleaseToOSIntervalMs(); + } + void ForceReleaseToOS() { + if (use_first_allocator) + a1.ForceReleaseToOS(); + else + a2.ForceReleaseToOS(); + } + void PrintStats() { + if (use_first_allocator) + a1.PrintStats(); + else + a2.PrintStats(); + } +}; + +#endif // SANITIZER_RUNTIME_SELECT_ALLOCATOR_H diff --git a/compiler-rt/lib/sanitizer_common/tests/sanitizer_allocator_test.cpp b/compiler-rt/lib/sanitizer_common/tests/sanitizer_allocator_test.cpp index 1015a60b4a5..dc26a0a445f 100644 --- a/compiler-rt/lib/sanitizer_common/tests/sanitizer_allocator_test.cpp +++ b/compiler-rt/lib/sanitizer_common/tests/sanitizer_allocator_test.cpp @@ -160,6 +160,9 @@ using Allocator32CompactASVT = SizeClassAllocator32<AP32Compact<AddressSpaceView>>; using Allocator32Compact = Allocator32CompactASVT<LocalAddressSpaceView>; +using Allocator32or64Compact = + RuntimeSelectAllocator<Allocator32Compact, Allocator64Compact>; + template <class SizeClassMap> void TestSizeClassMap() { typedef SizeClassMap SCMap; @@ -274,6 +277,13 @@ TEST(SanitizerCommon, SizeClassAllocator64Compact) { TestSizeClassAllocator<Allocator64Compact>(); } +TEST(SanitizerCommon, SizeClassAllocator32or64Compact) { + Allocator32or64Compact::UseAllocator1 = false; + TestSizeClassAllocator<Allocator32or64Compact>(); + Allocator32or64Compact::UseAllocator1 = true; + TestSizeClassAllocator<Allocator32or64Compact>(); +} + TEST(SanitizerCommon, SizeClassAllocator64Dense) { TestSizeClassAllocator<Allocator64Dense>(); } @@ -357,6 +367,12 @@ TEST(SanitizerCommon, SizeClassAllocator64DynamicMetadataStress) { TEST(SanitizerCommon, SizeClassAllocator64CompactMetadataStress) { SizeClassAllocatorMetadataStress<Allocator64Compact>(); } +TEST(SanitizerCommon, SizeClassAllocator32or64CompactMetadataStress) { + Allocator32or64Compact::UseAllocator1 = false; + SizeClassAllocatorMetadataStress<Allocator32or64Compact>(); + Allocator32or64Compact::UseAllocator1 = true; + SizeClassAllocatorMetadataStress<Allocator32or64Compact>(); +} #endif #endif @@ -404,6 +420,12 @@ TEST(SanitizerCommon, SizeClassAllocator64DynamicGetBlockBegin) { TEST(SanitizerCommon, SizeClassAllocator64CompactGetBlockBegin) { SizeClassAllocatorGetBlockBeginStress<Allocator64Compact>(1ULL << 33); } +TEST(SanitizerCommon, SizeClassAllocator32or64CompactGetBlockBegin) { + Allocator32or64Compact::UseAllocator1 = false; + SizeClassAllocatorGetBlockBeginStress<Allocator32or64Compact>(1ULL << 33); + Allocator32or64Compact::UseAllocator1 = true; + SizeClassAllocatorGetBlockBeginStress<Allocator32or64Compact>(1ULL << 33); +} #endif TEST(SanitizerCommon, SizeClassAllocator64VeryCompactGetBlockBegin) { // Does not have > 4Gb for each class. @@ -694,6 +716,12 @@ TEST(SanitizerCommon, CombinedAllocator64Dynamic) { TEST(SanitizerCommon, CombinedAllocator64Compact) { TestCombinedAllocator<Allocator64Compact>(); } +TEST(SanitizerCommon, CombinedRuntimeSelectAllocator) { + Allocator32or64Compact::UseAllocator1 = false; + TestCombinedAllocator<Allocator32or64Compact>(); + Allocator32or64Compact::UseAllocator1 = true; + TestCombinedAllocator<Allocator32or64Compact>(); +} #endif TEST(SanitizerCommon, CombinedAllocator64VeryCompact) { @@ -755,6 +783,12 @@ TEST(SanitizerCommon, SizeClassAllocator64DynamicLocalCache) { TEST(SanitizerCommon, SizeClassAllocator64CompactLocalCache) { TestSizeClassAllocatorLocalCache<Allocator64Compact>(); } +TEST(SanitizerCommon, SizeClassAllocator32or64CompactLocalCache) { + Allocator32or64Compact::UseAllocator1 = false; + TestSizeClassAllocatorLocalCache<Allocator32or64Compact>(); + Allocator32or64Compact::UseAllocator1 = true; + TestSizeClassAllocatorLocalCache<Allocator32or64Compact>(); +} #endif TEST(SanitizerCommon, SizeClassAllocator64VeryCompactLocalCache) { TestSizeClassAllocatorLocalCache<Allocator64VeryCompact>(); @@ -1333,6 +1367,12 @@ TEST(SanitizerCommon, SizeClassAllocator64ReleaseFreeMemoryToOS) { TEST(SanitizerCommon, SizeClassAllocator64CompactReleaseFreeMemoryToOS) { TestReleaseFreeMemoryToOS<Allocator64Compact>(); } +TEST(SanitizerCommon, SizeClassAllocator32or64CompactReleaseFreeMemoryToOS) { + Allocator32or64Compact::UseAllocator1 = false; + TestReleaseFreeMemoryToOS<Allocator32or64Compact>(); + Allocator32or64Compact::UseAllocator1 = true; + TestReleaseFreeMemoryToOS<Allocator32or64Compact>(); +} TEST(SanitizerCommon, SizeClassAllocator64VeryCompactReleaseFreeMemoryToOS) { TestReleaseFreeMemoryToOS<Allocator64VeryCompact>(); |