diff options
author | Etienne Bergeron <etienneb@google.com> | 2016-08-04 18:15:38 +0000 |
---|---|---|
committer | Etienne Bergeron <etienneb@google.com> | 2016-08-04 18:15:38 +0000 |
commit | 27eb6d521e976e803ed2468778b0eafcd6b12ae4 (patch) | |
tree | 7328f4d4b36f3d0c266c47fcb02b503ef0ca7908 | |
parent | 80f2eec4b2ef99243200336354f6879a1098aa07 (diff) | |
download | bcm5719-llvm-27eb6d521e976e803ed2468778b0eafcd6b12ae4.tar.gz bcm5719-llvm-27eb6d521e976e803ed2468778b0eafcd6b12ae4.zip |
[compiler-rt] Fix memory allocator for dynamic address space
Summary:
The sanitizer allocators can works with a dynamic address space
(i.e. specified with ~0ULL).
Unfortunately, the code was broken on GetMetadata and GetChunkIdx.
The current patch is moving the Win64 memory test to a dynamic
address space. There is a migration to move every concept to a
dynamic address space on windows.
To have a better coverage, the unittest are now testing
dynamic address space on other platforms too.
Reviewers: rnk, kcc
Subscribers: kubabrecka, dberris, llvm-commits, chrisha
Differential Revision: https://reviews.llvm.org/D23170
llvm-svn: 277745
3 files changed, 49 insertions, 5 deletions
diff --git a/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary64.h b/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary64.h index 75496283752..c7c906bc375 100644 --- a/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary64.h +++ b/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary64.h @@ -94,6 +94,13 @@ class SizeClassAllocator64 { return P >= SpaceBeg() && P < SpaceEnd(); } + uptr GetRegionBegin(const void *p) { + if (kUsingConstantSpaceBeg) + return reinterpret_cast<uptr>(p) & ~(kRegionSize - 1); + uptr space_beg = SpaceBeg(); + return ((reinterpret_cast<uptr>(p) - space_beg) & ~(kRegionSize - 1)) + space_beg; + } + uptr GetSizeClass(const void *p) { if (kUsingConstantSpaceBeg && (kSpaceBeg % kSpaceSize) == 0) return ((reinterpret_cast<uptr>(p)) / kRegionSize) % kNumClassesRounded; @@ -106,7 +113,7 @@ class SizeClassAllocator64 { uptr size = SizeClassMap::Size(class_id); if (!size) return nullptr; uptr chunk_idx = GetChunkIdx((uptr)p, size); - uptr reg_beg = (uptr)p & ~(kRegionSize - 1); + uptr reg_beg = GetRegionBegin(p); uptr beg = chunk_idx * size; uptr next_beg = beg + size; if (class_id >= kNumClasses) return nullptr; @@ -258,7 +265,10 @@ class SizeClassAllocator64 { return ®ions[class_id]; } - static uptr GetChunkIdx(uptr chunk, uptr size) { + uptr GetChunkIdx(uptr chunk, uptr size) { + if (!kUsingConstantSpaceBeg) + chunk -= SpaceBeg(); + uptr offset = chunk % kRegionSize; // Here we divide by a non-constant. This is costly. // size always fits into 32-bits. If the offset fits too, use 32-bit div. diff --git a/compiler-rt/lib/sanitizer_common/sanitizer_win.cc b/compiler-rt/lib/sanitizer_common/sanitizer_win.cc index cb4d7ddaa89..83481769ac7 100644 --- a/compiler-rt/lib/sanitizer_common/sanitizer_win.cc +++ b/compiler-rt/lib/sanitizer_common/sanitizer_win.cc @@ -221,8 +221,12 @@ void *MmapFixedNoAccess(uptr fixed_addr, uptr size, const char *name) { } void *MmapNoAccess(uptr size) { - // FIXME: unsupported. - return nullptr; + void *res = VirtualAlloc(nullptr, size, MEM_RESERVE, PAGE_NOACCESS); + if (res == 0) + Report("WARNING: %s failed to " + "mprotect %p (%zd) bytes (error code: %d)\n", + SanitizerToolName, size, size, GetLastError()); + return res; } bool MprotectNoAccess(uptr addr, uptr size) { diff --git a/compiler-rt/lib/sanitizer_common/tests/sanitizer_allocator_test.cc b/compiler-rt/lib/sanitizer_common/tests/sanitizer_allocator_test.cc index 03f765ba040..cfb44a1afbf 100644 --- a/compiler-rt/lib/sanitizer_common/tests/sanitizer_allocator_test.cc +++ b/compiler-rt/lib/sanitizer_common/tests/sanitizer_allocator_test.cc @@ -30,7 +30,10 @@ #if SANITIZER_CAN_USE_ALLOCATOR64 #if SANITIZER_WINDOWS -static const uptr kAllocatorSpace = 0x10000000000ULL; +// On Windows 64-bit there is no easy way to find a large enough fixed address +// space that is always available. Thus, a dynamically allocated address space +// is used instead (i.e. ~(uptr)0). +static const uptr kAllocatorSpace = ~(uptr)0; static const uptr kAllocatorSize = 0x10000000000ULL; // 1T. static const u64 kAddressSpaceSize = 1ULL << 40; #else @@ -41,6 +44,8 @@ static const u64 kAddressSpaceSize = 1ULL << 47; typedef SizeClassAllocator64< kAllocatorSpace, kAllocatorSize, 16, DefaultSizeClassMap> Allocator64; +typedef SizeClassAllocator64< + ~(uptr)0, kAllocatorSize, 16, DefaultSizeClassMap> Allocator64Dynamic; typedef SizeClassAllocator64< kAllocatorSpace, kAllocatorSize, 16, CompactSizeClassMap> Allocator64Compact; @@ -158,6 +163,10 @@ TEST(SanitizerCommon, SizeClassAllocator64) { TestSizeClassAllocator<Allocator64>(); } +TEST(SanitizerCommon, SizeClassAllocator64Dynamic) { + TestSizeClassAllocator<Allocator64Dynamic>(); +} + TEST(SanitizerCommon, SizeClassAllocator64Compact) { TestSizeClassAllocator<Allocator64Compact>(); } @@ -202,6 +211,10 @@ TEST(SanitizerCommon, SizeClassAllocator64MetadataStress) { SizeClassAllocatorMetadataStress<Allocator64>(); } +TEST(SanitizerCommon, SizeClassAllocator64DynamicMetadataStress) { + SizeClassAllocatorMetadataStress<Allocator64Dynamic>(); +} + TEST(SanitizerCommon, SizeClassAllocator64CompactMetadataStress) { SizeClassAllocatorMetadataStress<Allocator64Compact>(); } @@ -238,6 +251,9 @@ void SizeClassAllocatorGetBlockBeginStress() { TEST(SanitizerCommon, SizeClassAllocator64GetBlockBegin) { SizeClassAllocatorGetBlockBeginStress<Allocator64>(); } +TEST(SanitizerCommon, SizeClassAllocator64DynamicGetBlockBegin) { + SizeClassAllocatorGetBlockBeginStress<Allocator64Dynamic>(); +} TEST(SanitizerCommon, SizeClassAllocator64CompactGetBlockBegin) { SizeClassAllocatorGetBlockBeginStress<Allocator64Compact>(); } @@ -484,6 +500,12 @@ TEST(SanitizerCommon, CombinedAllocator64) { SizeClassAllocatorLocalCache<Allocator64> > (); } +TEST(SanitizerCommon, CombinedAllocator64Dynamic) { + TestCombinedAllocator<Allocator64Dynamic, + LargeMmapAllocator<>, + SizeClassAllocatorLocalCache<Allocator64Dynamic> > (); +} + TEST(SanitizerCommon, CombinedAllocator64Compact) { TestCombinedAllocator<Allocator64Compact, LargeMmapAllocator<>, @@ -537,6 +559,11 @@ TEST(SanitizerCommon, SizeClassAllocator64LocalCache) { SizeClassAllocatorLocalCache<Allocator64> >(); } +TEST(SanitizerCommon, SizeClassAllocator64DynamicLocalCache) { + TestSizeClassAllocatorLocalCache< + SizeClassAllocatorLocalCache<Allocator64Dynamic> >(); +} + TEST(SanitizerCommon, SizeClassAllocator64CompactLocalCache) { TestSizeClassAllocatorLocalCache< SizeClassAllocatorLocalCache<Allocator64Compact> >(); @@ -710,6 +737,9 @@ void TestSizeClassAllocatorIteration() { TEST(SanitizerCommon, SizeClassAllocator64Iteration) { TestSizeClassAllocatorIteration<Allocator64>(); } +TEST(SanitizerCommon, SizeClassAllocator64DynamicIteration) { + TestSizeClassAllocatorIteration<Allocator64Dynamic>(); +} #endif TEST(SanitizerCommon, SizeClassAllocator32Iteration) { |