diff options
author | Dean Michael Berris <dberris@google.com> | 2018-12-06 00:25:56 +0000 |
---|---|---|
committer | Dean Michael Berris <dberris@google.com> | 2018-12-06 00:25:56 +0000 |
commit | cb447a260446de91592227643e4775bd2fe54f2e (patch) | |
tree | 2d51b3b94d4f8b95a4be447dbd0b9e7261bd2db5 /compiler-rt/lib/xray/tests | |
parent | 2d36473873c6176f0658a95796046b383def2ac1 (diff) | |
download | bcm5719-llvm-cb447a260446de91592227643e4775bd2fe54f2e.tar.gz bcm5719-llvm-cb447a260446de91592227643e4775bd2fe54f2e.zip |
Re-land r348335 "[XRay] Move-only Allocator, FunctionCallTrie, and Array"
Continuation of D54989.
Additional changes:
- Use `.AppendEmplace(...)` instead of `.Append(Type{...})` to appease
GCC 4.8 with confusion on when an initializer_list is used as
opposed to a temporary aggregate initialized object.
llvm-svn: 348438
Diffstat (limited to 'compiler-rt/lib/xray/tests')
-rw-r--r-- | compiler-rt/lib/xray/tests/unit/function_call_trie_test.cc | 30 | ||||
-rw-r--r-- | compiler-rt/lib/xray/tests/unit/segmented_array_test.cc | 86 |
2 files changed, 116 insertions, 0 deletions
diff --git a/compiler-rt/lib/xray/tests/unit/function_call_trie_test.cc b/compiler-rt/lib/xray/tests/unit/function_call_trie_test.cc index 9b0f21090fb..01be691228f 100644 --- a/compiler-rt/lib/xray/tests/unit/function_call_trie_test.cc +++ b/compiler-rt/lib/xray/tests/unit/function_call_trie_test.cc @@ -309,6 +309,36 @@ TEST(FunctionCallTrieTest, MergeInto) { EXPECT_EQ(F2.Callees.size(), 0u); } +TEST(FunctionCallTrieTest, PlacementNewOnAlignedStorage) { + profilingFlags()->setDefaults(); + typename std::aligned_storage<sizeof(FunctionCallTrie::Allocators), + alignof(FunctionCallTrie::Allocators)>::type + AllocatorsStorage; + new (&AllocatorsStorage) + FunctionCallTrie::Allocators(FunctionCallTrie::InitAllocators()); + auto *A = + reinterpret_cast<FunctionCallTrie::Allocators *>(&AllocatorsStorage); + + typename std::aligned_storage<sizeof(FunctionCallTrie), + alignof(FunctionCallTrie)>::type FCTStorage; + new (&FCTStorage) FunctionCallTrie(*A); + auto *T = reinterpret_cast<FunctionCallTrie *>(&FCTStorage); + + // Put some data into it. + T->enterFunction(1, 0, 0); + T->exitFunction(1, 1, 0); + + // Re-initialize the objects in storage. + T->~FunctionCallTrie(); + A->~Allocators(); + new (A) FunctionCallTrie::Allocators(FunctionCallTrie::InitAllocators()); + new (T) FunctionCallTrie(*A); + + // Then put some data into it again. + T->enterFunction(1, 0, 0); + T->exitFunction(1, 1, 0); +} + } // namespace } // namespace __xray diff --git a/compiler-rt/lib/xray/tests/unit/segmented_array_test.cc b/compiler-rt/lib/xray/tests/unit/segmented_array_test.cc index 80991b1b97a..73120aafc8e 100644 --- a/compiler-rt/lib/xray/tests/unit/segmented_array_test.cc +++ b/compiler-rt/lib/xray/tests/unit/segmented_array_test.cc @@ -221,5 +221,91 @@ TEST(SegmentedArrayTest, SimulateStackBehaviour) { } } +TEST(SegmentedArrayTest, PlacementNewOnAlignedStorage) { + using AllocatorType = typename Array<ShadowStackEntry>::AllocatorType; + typename std::aligned_storage<sizeof(AllocatorType), + alignof(AllocatorType)>::type AllocatorStorage; + new (&AllocatorStorage) AllocatorType(1 << 10); + auto *A = reinterpret_cast<AllocatorType *>(&AllocatorStorage); + typename std::aligned_storage<sizeof(Array<ShadowStackEntry>), + alignof(Array<ShadowStackEntry>)>::type + ArrayStorage; + new (&ArrayStorage) Array<ShadowStackEntry>(*A); + auto *Data = reinterpret_cast<Array<ShadowStackEntry> *>(&ArrayStorage); + + static uint64_t Dummy = 0; + constexpr uint64_t Max = 9; + + for (uint64_t i = 0; i < Max; ++i) { + auto P = Data->Append({i, &Dummy}); + ASSERT_NE(P, nullptr); + ASSERT_EQ(P->NodePtr, &Dummy); + auto &Back = Data->back(); + ASSERT_EQ(Back.NodePtr, &Dummy); + ASSERT_EQ(Back.EntryTSC, i); + } + + // Simulate a stack by checking the data from the end as we're trimming. + auto Counter = Max; + ASSERT_EQ(Data->size(), size_t(Max)); + while (!Data->empty()) { + const auto &Top = Data->back(); + uint64_t *TopNode = Top.NodePtr; + EXPECT_EQ(TopNode, &Dummy) << "Counter = " << Counter; + Data->trim(1); + --Counter; + ASSERT_EQ(Data->size(), size_t(Counter)); + } + + // Once the stack is exhausted, we re-use the storage. + for (uint64_t i = 0; i < Max; ++i) { + auto P = Data->Append({i, &Dummy}); + ASSERT_NE(P, nullptr); + ASSERT_EQ(P->NodePtr, &Dummy); + auto &Back = Data->back(); + ASSERT_EQ(Back.NodePtr, &Dummy); + ASSERT_EQ(Back.EntryTSC, i); + } + + // We re-initialize the storage, by calling the destructor and + // placement-new'ing again. + Data->~Array(); + A->~AllocatorType(); + new (A) AllocatorType(1 << 10); + new (Data) Array<ShadowStackEntry>(*A); + + // Then re-do the test. + for (uint64_t i = 0; i < Max; ++i) { + auto P = Data->Append({i, &Dummy}); + ASSERT_NE(P, nullptr); + ASSERT_EQ(P->NodePtr, &Dummy); + auto &Back = Data->back(); + ASSERT_EQ(Back.NodePtr, &Dummy); + ASSERT_EQ(Back.EntryTSC, i); + } + + // Simulate a stack by checking the data from the end as we're trimming. + Counter = Max; + ASSERT_EQ(Data->size(), size_t(Max)); + while (!Data->empty()) { + const auto &Top = Data->back(); + uint64_t *TopNode = Top.NodePtr; + EXPECT_EQ(TopNode, &Dummy) << "Counter = " << Counter; + Data->trim(1); + --Counter; + ASSERT_EQ(Data->size(), size_t(Counter)); + } + + // Once the stack is exhausted, we re-use the storage. + for (uint64_t i = 0; i < Max; ++i) { + auto P = Data->Append({i, &Dummy}); + ASSERT_NE(P, nullptr); + ASSERT_EQ(P->NodePtr, &Dummy); + auto &Back = Data->back(); + ASSERT_EQ(Back.NodePtr, &Dummy); + ASSERT_EQ(Back.EntryTSC, i); + } +} + } // namespace } // namespace __xray |