summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--compiler-rt/lib/sanitizer_common/sanitizer_allocator.h58
-rw-r--r--compiler-rt/lib/sanitizer_common/sanitizer_posix_libcdep.cc3
-rw-r--r--compiler-rt/lib/sanitizer_common/tests/sanitizer_allocator_testlib.cc3
3 files changed, 21 insertions, 43 deletions
diff --git a/compiler-rt/lib/sanitizer_common/sanitizer_allocator.h b/compiler-rt/lib/sanitizer_common/sanitizer_allocator.h
index 344bf69d2e8..44d6fce3b29 100644
--- a/compiler-rt/lib/sanitizer_common/sanitizer_allocator.h
+++ b/compiler-rt/lib/sanitizer_common/sanitizer_allocator.h
@@ -297,10 +297,9 @@ typedef void (*ForEachChunkCallback)(uptr chunk, void *arg);
// SizeClassAllocator64 -- allocator for 64-bit address space.
//
-// Space: a portion of address space of kSpaceSize bytes starting at SpaceBeg.
-// If kSpaceBeg is ~0 then SpaceBeg is chosen dynamically my mmap.
-// Otherwise SpaceBeg=kSpaceBeg (fixed address).
-// kSpaceSize is a power of two.
+// Space: a portion of address space of kSpaceSize bytes starting at
+// a fixed address (kSpaceBeg). Both constants are powers of two and
+// kSpaceBeg is kSpaceSize-aligned.
// At the beginning the entire space is mprotect-ed, then small parts of it
// are mapped on demand.
//
@@ -323,15 +322,9 @@ class SizeClassAllocator64 {
typedef SizeClassAllocatorLocalCache<ThisT> AllocatorCache;
void Init() {
- if (kUsingConstantSpaceBeg) {
- CHECK_EQ(kSpaceBeg,
- reinterpret_cast<uptr>(MmapNoAccess(kSpaceBeg, kSpaceSize)));
- } else {
- NonConstSpaceBeg = reinterpret_cast<uptr>(
- MmapNoAccess(0, kSpaceSize + AdditionalSize()));
- CHECK_NE(NonConstSpaceBeg, ~(uptr)0);
- }
- MapWithCallback(SpaceEnd(), AdditionalSize());
+ CHECK_EQ(kSpaceBeg,
+ reinterpret_cast<uptr>(MmapNoAccess(kSpaceBeg, kSpaceSize)));
+ MapWithCallback(kSpaceEnd, AdditionalSize());
}
void MapWithCallback(uptr beg, uptr size) {
@@ -367,18 +360,12 @@ class SizeClassAllocator64 {
region->n_freed += b->count;
}
- bool PointerIsMine(const void *p) {
- uptr P = reinterpret_cast<uptr>(p);
- if (kUsingConstantSpaceBeg && (kSpaceBeg % kSpaceSize) == 0)
- return P / kSpaceSize == kSpaceBeg / kSpaceSize;
- return P >= SpaceBeg() && P < SpaceEnd();
+ static bool PointerIsMine(const void *p) {
+ return reinterpret_cast<uptr>(p) / kSpaceSize == kSpaceBeg / kSpaceSize;
}
- uptr GetSizeClass(const void *p) {
- if (kUsingConstantSpaceBeg && (kSpaceBeg % kSpaceSize) == 0)
- return ((reinterpret_cast<uptr>(p)) / kRegionSize) % kNumClassesRounded;
- return ((reinterpret_cast<uptr>(p) - SpaceBeg()) / kRegionSize) %
- kNumClassesRounded;
+ static uptr GetSizeClass(const void *p) {
+ return (reinterpret_cast<uptr>(p) / kRegionSize) % kNumClassesRounded;
}
void *GetBlockBegin(const void *p) {
@@ -396,7 +383,7 @@ class SizeClassAllocator64 {
return nullptr;
}
- uptr GetActuallyAllocatedSize(void *p) {
+ static uptr GetActuallyAllocatedSize(void *p) {
CHECK(PointerIsMine(p));
return SizeClassMap::Size(GetSizeClass(p));
}
@@ -407,9 +394,8 @@ class SizeClassAllocator64 {
uptr class_id = GetSizeClass(p);
uptr size = SizeClassMap::Size(class_id);
uptr chunk_idx = GetChunkIdx(reinterpret_cast<uptr>(p), size);
- return reinterpret_cast<void *>(SpaceBeg() +
- (kRegionSize * (class_id + 1)) -
- (1 + chunk_idx) * kMetadataSize);
+ return reinterpret_cast<void*>(kSpaceBeg + (kRegionSize * (class_id + 1)) -
+ (1 + chunk_idx) * kMetadataSize);
}
uptr TotalMemoryUsed() {
@@ -421,7 +407,7 @@ class SizeClassAllocator64 {
// Test-only.
void TestOnlyUnmap() {
- UnmapWithCallback(SpaceBeg(), kSpaceSize + AdditionalSize());
+ UnmapWithCallback(kSpaceBeg, kSpaceSize + AdditionalSize());
}
void PrintStats() {
@@ -469,7 +455,7 @@ class SizeClassAllocator64 {
for (uptr class_id = 1; class_id < kNumClasses; class_id++) {
RegionInfo *region = GetRegionInfo(class_id);
uptr chunk_size = SizeClassMap::Size(class_id);
- uptr region_beg = SpaceBeg() + class_id * kRegionSize;
+ uptr region_beg = kSpaceBeg + class_id * kRegionSize;
for (uptr chunk = region_beg;
chunk < region_beg + region->allocated_user;
chunk += chunk_size) {
@@ -490,13 +476,8 @@ class SizeClassAllocator64 {
private:
static const uptr kRegionSize = kSpaceSize / kNumClassesRounded;
-
- static const bool kUsingConstantSpaceBeg = kSpaceBeg != ~(uptr)0;
- uptr NonConstSpaceBeg;
- uptr SpaceBeg() const {
- return kUsingConstantSpaceBeg ? kSpaceBeg : NonConstSpaceBeg;
- }
- uptr SpaceEnd() const { return SpaceBeg() + kSpaceSize; }
+ static const uptr kSpaceEnd = kSpaceBeg + kSpaceSize;
+ COMPILER_CHECK(kSpaceBeg % kSpaceSize == 0);
// kRegionSize must be >= 2^32.
COMPILER_CHECK((kRegionSize) >= (1ULL << (SANITIZER_WORDSIZE / 2)));
// Populate the free list with at most this number of bytes at once
@@ -520,8 +501,7 @@ class SizeClassAllocator64 {
RegionInfo *GetRegionInfo(uptr class_id) {
CHECK_LT(class_id, kNumClasses);
- RegionInfo *regions =
- reinterpret_cast<RegionInfo *>(SpaceBeg() + kSpaceSize);
+ RegionInfo *regions = reinterpret_cast<RegionInfo*>(kSpaceBeg + kSpaceSize);
return &regions[class_id];
}
@@ -544,7 +524,7 @@ class SizeClassAllocator64 {
uptr count = size < kPopulateSize ? SizeClassMap::MaxCached(class_id) : 1;
uptr beg_idx = region->allocated_user;
uptr end_idx = beg_idx + count * size;
- uptr region_beg = SpaceBeg() + kRegionSize * class_id;
+ uptr region_beg = kSpaceBeg + kRegionSize * class_id;
if (end_idx + size > region->mapped_user) {
// Do the mmap for the user memory.
uptr map_size = kUserMapSize;
diff --git a/compiler-rt/lib/sanitizer_common/sanitizer_posix_libcdep.cc b/compiler-rt/lib/sanitizer_common/sanitizer_posix_libcdep.cc
index 20aed194ea0..6c1a8523943 100644
--- a/compiler-rt/lib/sanitizer_common/sanitizer_posix_libcdep.cc
+++ b/compiler-rt/lib/sanitizer_common/sanitizer_posix_libcdep.cc
@@ -272,8 +272,7 @@ void *MmapFixedNoReserve(uptr fixed_addr, uptr size, const char *name) {
void *MmapNoAccess(uptr fixed_addr, uptr size, const char *name) {
int fd = name ? GetNamedMappingFd(name, size) : -1;
- unsigned flags = MAP_PRIVATE | MAP_NORESERVE;
- if (fixed_addr != 0) flags |= MAP_FIXED;
+ unsigned flags = MAP_PRIVATE | MAP_FIXED | MAP_NORESERVE;
if (fd == -1) flags |= MAP_ANON;
return (void *)internal_mmap((void *)fixed_addr, size, PROT_NONE, flags, fd,
diff --git a/compiler-rt/lib/sanitizer_common/tests/sanitizer_allocator_testlib.cc b/compiler-rt/lib/sanitizer_common/tests/sanitizer_allocator_testlib.cc
index bdb3e3d119a..1a4c5501a92 100644
--- a/compiler-rt/lib/sanitizer_common/tests/sanitizer_allocator_testlib.cc
+++ b/compiler-rt/lib/sanitizer_common/tests/sanitizer_allocator_testlib.cc
@@ -37,8 +37,7 @@ namespace {
static const uptr kAllocatorSpace = 0x600000000000ULL;
static const uptr kAllocatorSize = 0x10000000000ULL; // 1T.
-typedef SizeClassAllocator64<~(uptr)0, kAllocatorSize, 0,
-// typedef SizeClassAllocator64<kAllocatorSpace, kAllocatorSize, 0,
+typedef SizeClassAllocator64<kAllocatorSpace, kAllocatorSize, 0,
CompactSizeClassMap> PrimaryAllocator;
typedef SizeClassAllocatorLocalCache<PrimaryAllocator> AllocatorCache;
typedef LargeMmapAllocator<> SecondaryAllocator;
OpenPOWER on IntegriCloud