summaryrefslogtreecommitdiffstats
path: root/compiler-rt/lib/tsan/rtl/tsan_mman.cc
diff options
context:
space:
mode:
Diffstat (limited to 'compiler-rt/lib/tsan/rtl/tsan_mman.cc')
-rw-r--r--compiler-rt/lib/tsan/rtl/tsan_mman.cc28
1 files changed, 14 insertions, 14 deletions
diff --git a/compiler-rt/lib/tsan/rtl/tsan_mman.cc b/compiler-rt/lib/tsan/rtl/tsan_mman.cc
index 7247c6e0003..9e31a9d832a 100644
--- a/compiler-rt/lib/tsan/rtl/tsan_mman.cc
+++ b/compiler-rt/lib/tsan/rtl/tsan_mman.cc
@@ -67,14 +67,14 @@ void InitializeAllocator() {
allocator()->Init(common_flags()->allocator_may_return_null);
}
-void AllocatorThreadStart(ThreadState *thr) {
- allocator()->InitCache(&thr->alloc_cache);
- internal_allocator()->InitCache(&thr->internal_alloc_cache);
+void AllocatorProcStart(Processor *proc) {
+ allocator()->InitCache(&proc->alloc_cache);
+ internal_allocator()->InitCache(&proc->internal_alloc_cache);
}
-void AllocatorThreadFinish(ThreadState *thr) {
- allocator()->DestroyCache(&thr->alloc_cache);
- internal_allocator()->DestroyCache(&thr->internal_alloc_cache);
+void AllocatorProcFinish(Processor *proc) {
+ allocator()->DestroyCache(&proc->alloc_cache);
+ internal_allocator()->DestroyCache(&proc->internal_alloc_cache);
}
void AllocatorPrintStats() {
@@ -98,7 +98,7 @@ static void SignalUnsafeCall(ThreadState *thr, uptr pc) {
void *user_alloc(ThreadState *thr, uptr pc, uptr sz, uptr align, bool signal) {
if ((sz >= (1ull << 40)) || (align >= (1ull << 40)))
return allocator()->ReturnNullOrDie();
- void *p = allocator()->Allocate(&thr->alloc_cache, sz, align);
+ void *p = allocator()->Allocate(&thr->proc->alloc_cache, sz, align);
if (p == 0)
return 0;
if (ctx && ctx->initialized)
@@ -120,7 +120,7 @@ void *user_calloc(ThreadState *thr, uptr pc, uptr size, uptr n) {
void user_free(ThreadState *thr, uptr pc, void *p, bool signal) {
if (ctx && ctx->initialized)
OnUserFree(thr, pc, (uptr)p, true);
- allocator()->Deallocate(&thr->alloc_cache, p);
+ allocator()->Deallocate(&thr->proc->alloc_cache, p);
if (signal)
SignalUnsafeCall(thr, pc);
}
@@ -136,7 +136,7 @@ void OnUserAlloc(ThreadState *thr, uptr pc, uptr p, uptr sz, bool write) {
void OnUserFree(ThreadState *thr, uptr pc, uptr p, bool write) {
CHECK_NE(p, (void*)0);
- uptr sz = ctx->metamap.FreeBlock(thr, pc, p);
+ uptr sz = ctx->metamap.FreeBlock(thr->proc, p);
DPrintf("#%d: free(%p, %zu)\n", thr->tid, p, sz);
if (write && thr->ignore_reads_and_writes == 0)
MemoryRangeFreed(thr, pc, (uptr)p, sz);
@@ -187,7 +187,7 @@ void *internal_alloc(MBlockType typ, uptr sz) {
thr->nomalloc = 0; // CHECK calls internal_malloc().
CHECK(0);
}
- return InternalAlloc(sz, &thr->internal_alloc_cache);
+ return InternalAlloc(sz, &thr->proc->internal_alloc_cache);
}
void internal_free(void *p) {
@@ -196,7 +196,7 @@ void internal_free(void *p) {
thr->nomalloc = 0; // CHECK calls internal_malloc().
CHECK(0);
}
- InternalFree(p, &thr->internal_alloc_cache);
+ InternalFree(p, &thr->proc->internal_alloc_cache);
}
} // namespace __tsan
@@ -238,8 +238,8 @@ uptr __sanitizer_get_allocated_size(const void *p) {
void __tsan_on_thread_idle() {
ThreadState *thr = cur_thread();
- allocator()->SwallowCache(&thr->alloc_cache);
- internal_allocator()->SwallowCache(&thr->internal_alloc_cache);
- ctx->metamap.OnThreadIdle(thr);
+ allocator()->SwallowCache(&thr->proc->alloc_cache);
+ internal_allocator()->SwallowCache(&thr->proc->internal_alloc_cache);
+ ctx->metamap.OnProcIdle(thr->proc);
}
} // extern "C"
OpenPOWER on IntegriCloud