summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorKostya Serebryany <kcc@google.com>2012-12-10 14:19:15 +0000
committerKostya Serebryany <kcc@google.com>2012-12-10 14:19:15 +0000
commit13bdbe698e929711360bfc9433b34a86f4b999d6 (patch)
treebd5e50215b9c63a103e400975ec1295ccbaa4e14
parent14282a91d55cdd9eb6b138c28df615681e49ce2a (diff)
downloadbcm5719-llvm-13bdbe698e929711360bfc9433b34a86f4b999d6.tar.gz
bcm5719-llvm-13bdbe698e929711360bfc9433b34a86f4b999d6.zip
[asan] move FakeStack into a separate file
llvm-svn: 169734
-rw-r--r--compiler-rt/lib/asan/CMakeLists.txt1
-rw-r--r--compiler-rt/lib/asan/asan_allocator.cc190
-rw-r--r--compiler-rt/lib/asan/asan_allocator.h35
-rw-r--r--compiler-rt/lib/asan/asan_fake_stack.cc182
4 files changed, 218 insertions, 190 deletions
diff --git a/compiler-rt/lib/asan/CMakeLists.txt b/compiler-rt/lib/asan/CMakeLists.txt
index 57a6c9a9c37..0f561e6cb6c 100644
--- a/compiler-rt/lib/asan/CMakeLists.txt
+++ b/compiler-rt/lib/asan/CMakeLists.txt
@@ -3,6 +3,7 @@
set(ASAN_SOURCES
asan_allocator.cc
asan_allocator2.cc
+ asan_fake_stack.cc
asan_globals.cc
asan_interceptors.cc
asan_linux.cc
diff --git a/compiler-rt/lib/asan/asan_allocator.cc b/compiler-rt/lib/asan/asan_allocator.cc
index 66c2aa2106c..4d422a7a9ac 100644
--- a/compiler-rt/lib/asan/asan_allocator.cc
+++ b/compiler-rt/lib/asan/asan_allocator.cc
@@ -65,37 +65,6 @@ static inline bool IsAligned(uptr a, uptr alignment) {
return (a & (alignment - 1)) == 0;
}
-static inline uptr Log2(uptr x) {
- CHECK(IsPowerOfTwo(x));
-#if !defined(_WIN32) || defined(__clang__)
- return __builtin_ctzl(x);
-#elif defined(_WIN64)
- unsigned long ret; // NOLINT
- _BitScanForward64(&ret, x);
- return ret;
-#else
- unsigned long ret; // NOLINT
- _BitScanForward(&ret, x);
- return ret;
-#endif
-}
-
-static inline uptr RoundUpToPowerOfTwo(uptr size) {
- CHECK(size);
- if (IsPowerOfTwo(size)) return size;
-
- unsigned long up; // NOLINT
-#if !defined(_WIN32) || defined(__clang__)
- up = SANITIZER_WORDSIZE - 1 - __builtin_clzl(size);
-#elif defined(_WIN64)
- _BitScanReverse64(&up, size);
-#else
- _BitScanReverse(&up, size);
-#endif
- CHECK(size < (1ULL << (up + 1)));
- CHECK(size > (1ULL << up));
- return 1UL << (up + 1);
-}
static inline uptr SizeClassToSize(u8 size_class) {
CHECK(size_class < kNumberOfSizeClasses);
@@ -865,170 +834,11 @@ void asan_mz_force_unlock() {
malloc_info.ForceUnlock();
}
-// ---------------------- Fake stack-------------------- {{{1
-FakeStack::FakeStack() {
- CHECK(REAL(memset) != 0);
- REAL(memset)(this, 0, sizeof(*this));
-}
-
-bool FakeStack::AddrIsInSizeClass(uptr addr, uptr size_class) {
- uptr mem = allocated_size_classes_[size_class];
- uptr size = ClassMmapSize(size_class);
- bool res = mem && addr >= mem && addr < mem + size;
- return res;
-}
-
-uptr FakeStack::AddrIsInFakeStack(uptr addr) {
- for (uptr i = 0; i < kNumberOfSizeClasses; i++) {
- if (AddrIsInSizeClass(addr, i)) return allocated_size_classes_[i];
- }
- return 0;
-}
-
-// We may want to compute this during compilation.
-inline uptr FakeStack::ComputeSizeClass(uptr alloc_size) {
- uptr rounded_size = RoundUpToPowerOfTwo(alloc_size);
- uptr log = Log2(rounded_size);
- CHECK(alloc_size <= (1UL << log));
- if (!(alloc_size > (1UL << (log-1)))) {
- Printf("alloc_size %zu log %zu\n", alloc_size, log);
- }
- CHECK(alloc_size > (1UL << (log-1)));
- uptr res = log < kMinStackFrameSizeLog ? 0 : log - kMinStackFrameSizeLog;
- CHECK(res < kNumberOfSizeClasses);
- CHECK(ClassSize(res) >= rounded_size);
- return res;
-}
-
-void FakeFrameFifo::FifoPush(FakeFrame *node) {
- CHECK(node);
- node->next = 0;
- if (first_ == 0 && last_ == 0) {
- first_ = last_ = node;
- } else {
- CHECK(first_);
- CHECK(last_);
- last_->next = node;
- last_ = node;
- }
-}
-
-FakeFrame *FakeFrameFifo::FifoPop() {
- CHECK(first_ && last_ && "Exhausted fake stack");
- FakeFrame *res = 0;
- if (first_ == last_) {
- res = first_;
- first_ = last_ = 0;
- } else {
- res = first_;
- first_ = first_->next;
- }
- return res;
-}
-
-void FakeStack::Init(uptr stack_size) {
- stack_size_ = stack_size;
- alive_ = true;
-}
-
-void FakeStack::Cleanup() {
- alive_ = false;
- for (uptr i = 0; i < kNumberOfSizeClasses; i++) {
- uptr mem = allocated_size_classes_[i];
- if (mem) {
- PoisonShadow(mem, ClassMmapSize(i), 0);
- allocated_size_classes_[i] = 0;
- UnmapOrDie((void*)mem, ClassMmapSize(i));
- }
- }
-}
-
-uptr FakeStack::ClassMmapSize(uptr size_class) {
- return RoundUpToPowerOfTwo(stack_size_);
-}
-
-void FakeStack::AllocateOneSizeClass(uptr size_class) {
- CHECK(ClassMmapSize(size_class) >= GetPageSizeCached());
- uptr new_mem = (uptr)MmapOrDie(
- ClassMmapSize(size_class), __FUNCTION__);
- // Printf("T%d new_mem[%zu]: %p-%p mmap %zu\n",
- // asanThreadRegistry().GetCurrent()->tid(),
- // size_class, new_mem, new_mem + ClassMmapSize(size_class),
- // ClassMmapSize(size_class));
- uptr i;
- for (i = 0; i < ClassMmapSize(size_class);
- i += ClassSize(size_class)) {
- size_classes_[size_class].FifoPush((FakeFrame*)(new_mem + i));
- }
- CHECK(i == ClassMmapSize(size_class));
- allocated_size_classes_[size_class] = new_mem;
-}
-
-uptr FakeStack::AllocateStack(uptr size, uptr real_stack) {
- if (!alive_) return real_stack;
- CHECK(size <= kMaxStackMallocSize && size > 1);
- uptr size_class = ComputeSizeClass(size);
- if (!allocated_size_classes_[size_class]) {
- AllocateOneSizeClass(size_class);
- }
- FakeFrame *fake_frame = size_classes_[size_class].FifoPop();
- CHECK(fake_frame);
- fake_frame->size_minus_one = size - 1;
- fake_frame->real_stack = real_stack;
- while (FakeFrame *top = call_stack_.top()) {
- if (top->real_stack > real_stack) break;
- call_stack_.LifoPop();
- DeallocateFrame(top);
- }
- call_stack_.LifoPush(fake_frame);
- uptr ptr = (uptr)fake_frame;
- PoisonShadow(ptr, size, 0);
- return ptr;
-}
-
-void FakeStack::DeallocateFrame(FakeFrame *fake_frame) {
- CHECK(alive_);
- uptr size = fake_frame->size_minus_one + 1;
- uptr size_class = ComputeSizeClass(size);
- CHECK(allocated_size_classes_[size_class]);
- uptr ptr = (uptr)fake_frame;
- CHECK(AddrIsInSizeClass(ptr, size_class));
- CHECK(AddrIsInSizeClass(ptr + size - 1, size_class));
- size_classes_[size_class].FifoPush(fake_frame);
-}
-
-void FakeStack::OnFree(uptr ptr, uptr size, uptr real_stack) {
- FakeFrame *fake_frame = (FakeFrame*)ptr;
- CHECK(fake_frame->magic = kRetiredStackFrameMagic);
- CHECK(fake_frame->descr != 0);
- CHECK(fake_frame->size_minus_one == size - 1);
- PoisonShadow(ptr, size, kAsanStackAfterReturnMagic);
-}
-
} // namespace __asan
// ---------------------- Interface ---------------- {{{1
using namespace __asan; // NOLINT
-uptr __asan_stack_malloc(uptr size, uptr real_stack) {
- if (!flags()->use_fake_stack) return real_stack;
- AsanThread *t = asanThreadRegistry().GetCurrent();
- if (!t) {
- // TSD is gone, use the real stack.
- return real_stack;
- }
- uptr ptr = t->fake_stack().AllocateStack(size, real_stack);
- // Printf("__asan_stack_malloc %p %zu %p\n", ptr, size, real_stack);
- return ptr;
-}
-
-void __asan_stack_free(uptr ptr, uptr size, uptr real_stack) {
- if (!flags()->use_fake_stack) return;
- if (ptr != real_stack) {
- FakeStack::OnFree(ptr, size, real_stack);
- }
-}
-
// ASan allocator doesn't reserve extra bytes, so normally we would
// just return "size".
uptr __asan_get_estimated_allocated_size(uptr size) {
diff --git a/compiler-rt/lib/asan/asan_allocator.h b/compiler-rt/lib/asan/asan_allocator.h
index 803aa239275..1e936c3054a 100644
--- a/compiler-rt/lib/asan/asan_allocator.h
+++ b/compiler-rt/lib/asan/asan_allocator.h
@@ -181,5 +181,40 @@ uptr asan_mz_size(const void *ptr);
void asan_mz_force_lock();
void asan_mz_force_unlock();
+// Log2 and RoundUpToPowerOfTwo should be inlined for performance.
+
+static inline uptr Log2(uptr x) {
+ CHECK(IsPowerOfTwo(x));
+#if !defined(_WIN32) || defined(__clang__)
+ return __builtin_ctzl(x);
+#elif defined(_WIN64)
+ unsigned long ret; // NOLINT
+ _BitScanForward64(&ret, x);
+ return ret;
+#else
+ unsigned long ret; // NOLINT
+ _BitScanForward(&ret, x);
+ return ret;
+#endif
+}
+
+static inline uptr RoundUpToPowerOfTwo(uptr size) {
+ CHECK(size);
+ if (IsPowerOfTwo(size)) return size;
+
+ unsigned long up; // NOLINT
+#if !defined(_WIN32) || defined(__clang__)
+ up = SANITIZER_WORDSIZE - 1 - __builtin_clzl(size);
+#elif defined(_WIN64)
+ _BitScanReverse64(&up, size);
+#else
+ _BitScanReverse(&up, size);
+#endif
+ CHECK(size < (1ULL << (up + 1)));
+ CHECK(size > (1ULL << up));
+ return 1UL << (up + 1);
+}
+
+
} // namespace __asan
#endif // ASAN_ALLOCATOR_H
diff --git a/compiler-rt/lib/asan/asan_fake_stack.cc b/compiler-rt/lib/asan/asan_fake_stack.cc
new file mode 100644
index 00000000000..7c5a16312d4
--- /dev/null
+++ b/compiler-rt/lib/asan/asan_fake_stack.cc
@@ -0,0 +1,182 @@
+//===-- asan_fake_stack.cc ------------------------------------------------===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file is a part of AddressSanitizer, an address sanity checker.
+//
+// FakeStack is used to detect use-after-return bugs.
+//===----------------------------------------------------------------------===//
+#include "asan_allocator.h"
+#include "asan_thread.h"
+#include "asan_thread_registry.h"
+#include "sanitizer/asan_interface.h"
+
+namespace __asan {
+
+FakeStack::FakeStack() {
+ CHECK(REAL(memset) != 0);
+ REAL(memset)(this, 0, sizeof(*this));
+}
+
+bool FakeStack::AddrIsInSizeClass(uptr addr, uptr size_class) {
+ uptr mem = allocated_size_classes_[size_class];
+ uptr size = ClassMmapSize(size_class);
+ bool res = mem && addr >= mem && addr < mem + size;
+ return res;
+}
+
+uptr FakeStack::AddrIsInFakeStack(uptr addr) {
+ for (uptr i = 0; i < kNumberOfSizeClasses; i++) {
+ if (AddrIsInSizeClass(addr, i)) return allocated_size_classes_[i];
+ }
+ return 0;
+}
+
+// We may want to compute this during compilation.
+inline uptr FakeStack::ComputeSizeClass(uptr alloc_size) {
+ uptr rounded_size = RoundUpToPowerOfTwo(alloc_size);
+ uptr log = Log2(rounded_size);
+ CHECK(alloc_size <= (1UL << log));
+ if (!(alloc_size > (1UL << (log-1)))) {
+ Printf("alloc_size %zu log %zu\n", alloc_size, log);
+ }
+ CHECK(alloc_size > (1UL << (log-1)));
+ uptr res = log < kMinStackFrameSizeLog ? 0 : log - kMinStackFrameSizeLog;
+ CHECK(res < kNumberOfSizeClasses);
+ CHECK(ClassSize(res) >= rounded_size);
+ return res;
+}
+
+void FakeFrameFifo::FifoPush(FakeFrame *node) {
+ CHECK(node);
+ node->next = 0;
+ if (first_ == 0 && last_ == 0) {
+ first_ = last_ = node;
+ } else {
+ CHECK(first_);
+ CHECK(last_);
+ last_->next = node;
+ last_ = node;
+ }
+}
+
+FakeFrame *FakeFrameFifo::FifoPop() {
+ CHECK(first_ && last_ && "Exhausted fake stack");
+ FakeFrame *res = 0;
+ if (first_ == last_) {
+ res = first_;
+ first_ = last_ = 0;
+ } else {
+ res = first_;
+ first_ = first_->next;
+ }
+ return res;
+}
+
+void FakeStack::Init(uptr stack_size) {
+ stack_size_ = stack_size;
+ alive_ = true;
+}
+
+void FakeStack::Cleanup() {
+ alive_ = false;
+ for (uptr i = 0; i < kNumberOfSizeClasses; i++) {
+ uptr mem = allocated_size_classes_[i];
+ if (mem) {
+ PoisonShadow(mem, ClassMmapSize(i), 0);
+ allocated_size_classes_[i] = 0;
+ UnmapOrDie((void*)mem, ClassMmapSize(i));
+ }
+ }
+}
+
+uptr FakeStack::ClassMmapSize(uptr size_class) {
+ return RoundUpToPowerOfTwo(stack_size_);
+}
+
+void FakeStack::AllocateOneSizeClass(uptr size_class) {
+ CHECK(ClassMmapSize(size_class) >= GetPageSizeCached());
+ uptr new_mem = (uptr)MmapOrDie(
+ ClassMmapSize(size_class), __FUNCTION__);
+ // Printf("T%d new_mem[%zu]: %p-%p mmap %zu\n",
+ // asanThreadRegistry().GetCurrent()->tid(),
+ // size_class, new_mem, new_mem + ClassMmapSize(size_class),
+ // ClassMmapSize(size_class));
+ uptr i;
+ for (i = 0; i < ClassMmapSize(size_class);
+ i += ClassSize(size_class)) {
+ size_classes_[size_class].FifoPush((FakeFrame*)(new_mem + i));
+ }
+ CHECK(i == ClassMmapSize(size_class));
+ allocated_size_classes_[size_class] = new_mem;
+}
+
+uptr FakeStack::AllocateStack(uptr size, uptr real_stack) {
+ if (!alive_) return real_stack;
+ CHECK(size <= kMaxStackMallocSize && size > 1);
+ uptr size_class = ComputeSizeClass(size);
+ if (!allocated_size_classes_[size_class]) {
+ AllocateOneSizeClass(size_class);
+ }
+ FakeFrame *fake_frame = size_classes_[size_class].FifoPop();
+ CHECK(fake_frame);
+ fake_frame->size_minus_one = size - 1;
+ fake_frame->real_stack = real_stack;
+ while (FakeFrame *top = call_stack_.top()) {
+ if (top->real_stack > real_stack) break;
+ call_stack_.LifoPop();
+ DeallocateFrame(top);
+ }
+ call_stack_.LifoPush(fake_frame);
+ uptr ptr = (uptr)fake_frame;
+ PoisonShadow(ptr, size, 0);
+ return ptr;
+}
+
+void FakeStack::DeallocateFrame(FakeFrame *fake_frame) {
+ CHECK(alive_);
+ uptr size = fake_frame->size_minus_one + 1;
+ uptr size_class = ComputeSizeClass(size);
+ CHECK(allocated_size_classes_[size_class]);
+ uptr ptr = (uptr)fake_frame;
+ CHECK(AddrIsInSizeClass(ptr, size_class));
+ CHECK(AddrIsInSizeClass(ptr + size - 1, size_class));
+ size_classes_[size_class].FifoPush(fake_frame);
+}
+
+void FakeStack::OnFree(uptr ptr, uptr size, uptr real_stack) {
+ FakeFrame *fake_frame = (FakeFrame*)ptr;
+ CHECK(fake_frame->magic = kRetiredStackFrameMagic);
+ CHECK(fake_frame->descr != 0);
+ CHECK(fake_frame->size_minus_one == size - 1);
+ PoisonShadow(ptr, size, kAsanStackAfterReturnMagic);
+}
+
+} // namespace __asan
+
+// ---------------------- Interface ---------------- {{{1
+using namespace __asan; // NOLINT
+
+uptr __asan_stack_malloc(uptr size, uptr real_stack) {
+ if (!flags()->use_fake_stack) return real_stack;
+ AsanThread *t = asanThreadRegistry().GetCurrent();
+ if (!t) {
+ // TSD is gone, use the real stack.
+ return real_stack;
+ }
+ uptr ptr = t->fake_stack().AllocateStack(size, real_stack);
+ // Printf("__asan_stack_malloc %p %zu %p\n", ptr, size, real_stack);
+ return ptr;
+}
+
+void __asan_stack_free(uptr ptr, uptr size, uptr real_stack) {
+ if (!flags()->use_fake_stack) return;
+ if (ptr != real_stack) {
+ FakeStack::OnFree(ptr, size, real_stack);
+ }
+}
OpenPOWER on IntegriCloud