summaryrefslogtreecommitdiffstats
path: root/llvm/lib/Transforms
diff options
context:
space:
mode:
Diffstat (limited to 'llvm/lib/Transforms')
-rw-r--r--llvm/lib/Transforms/Instrumentation/AddressSanitizer.cpp165
1 files changed, 106 insertions, 59 deletions
diff --git a/llvm/lib/Transforms/Instrumentation/AddressSanitizer.cpp b/llvm/lib/Transforms/Instrumentation/AddressSanitizer.cpp
index 0902f355b08..31cf273d051 100644
--- a/llvm/lib/Transforms/Instrumentation/AddressSanitizer.cpp
+++ b/llvm/lib/Transforms/Instrumentation/AddressSanitizer.cpp
@@ -55,6 +55,7 @@
#include "llvm/Transforms/Utils/PromoteMemToReg.h"
#include <algorithm>
#include <iomanip>
+#include <limits>
#include <sstream>
#include <string>
#include <system_error>
@@ -811,11 +812,19 @@ struct FunctionStackPoisoner : public InstVisitor<FunctionStackPoisoner> {
/// Finds alloca where the value comes from.
AllocaInst *findAllocaForValue(Value *V);
- void poisonStackFrameInline(ArrayRef<uint8_t> ShadowBytes, size_t Begin,
- size_t End, IRBuilder<> &IRB, Value *ShadowBase,
- bool DoPoison);
- void poisonStackFrame(ArrayRef<uint8_t> ShadowBytes, IRBuilder<> &IRB,
- Value *ShadowBase, bool DoPoison);
+
+ // Copies bytes from ShadowBytes into shadow memory for indexes where
+ // ShadowMask is not zero. If ShadowMask[i] is zero, we assume that
+ // ShadowBytes[i] is constantly zero and doesn't need to be overwritten.
+ void copyToShadow(ArrayRef<uint8_t> ShadowMask, ArrayRef<uint8_t> ShadowBytes,
+ IRBuilder<> &IRB, Value *ShadowBase);
+ void copyToShadow(ArrayRef<uint8_t> ShadowMask, ArrayRef<uint8_t> ShadowBytes,
+ size_t Begin, size_t End, IRBuilder<> &IRB,
+ Value *ShadowBase);
+ void copyToShadowInline(ArrayRef<uint8_t> ShadowMask,
+ ArrayRef<uint8_t> ShadowBytes, size_t Begin,
+ size_t End, IRBuilder<> &IRB, Value *ShadowBase);
+
void poisonAlloca(Value *V, uint64_t Size, IRBuilder<> &IRB, bool DoPoison);
Value *createAllocaForLayout(IRBuilder<> &IRB, const ASanStackFrameLayout &L,
@@ -1958,15 +1967,11 @@ void FunctionStackPoisoner::initializeCallbacks(Module &M) {
kAsanAllocasUnpoison, IRB.getVoidTy(), IntptrTy, IntptrTy, nullptr));
}
-// If DoPoison is true function copies ShadowBytes into shadow memory.
-// If DoPoison is false function sets 0s into shadow memory.
-// Function assumes that if ShadowBytes[i] is 0, then corresponding shadow
-// memory is constant for duration of the function and it contains 0s. So we
-// will try to minimize writes into corresponding addresses of the real shadow
-// memory.
-void FunctionStackPoisoner::poisonStackFrameInline(
- ArrayRef<uint8_t> ShadowBytes, size_t Begin, size_t End, IRBuilder<> &IRB,
- Value *ShadowBase, bool DoPoison) {
+void FunctionStackPoisoner::copyToShadowInline(ArrayRef<uint8_t> ShadowMask,
+ ArrayRef<uint8_t> ShadowBytes,
+ size_t Begin, size_t End,
+ IRBuilder<> &IRB,
+ Value *ShadowBase) {
if (Begin >= End)
return;
@@ -1976,11 +1981,12 @@ void FunctionStackPoisoner::poisonStackFrameInline(
const bool IsLittleEndian = F.getParent()->getDataLayout().isLittleEndian();
// Poison given range in shadow using larges store size with out leading and
- // trailing zeros. Zeros never change, so they need neither poisoning nor
- // up-poisoning, but we don't mind if some of them get into a middle of a
- // store.
+ // trailing zeros in ShadowMask. Zeros never change, so they need neither
+ // poisoning nor up-poisoning. Still we don't mind if some of them get into a
+ // middle of a store.
for (size_t i = Begin; i < End;) {
- if (!ShadowBytes[i]) {
+ if (!ShadowMask[i]) {
+ assert(!ShadowBytes[i]);
++i;
continue;
}
@@ -1991,22 +1997,17 @@ void FunctionStackPoisoner::poisonStackFrameInline(
StoreSizeInBytes /= 2;
// Minimize store size by trimming trailing zeros.
- for (size_t j = StoreSizeInBytes - 1; j && !ShadowBytes[i + j]; --j) {
+ for (size_t j = StoreSizeInBytes - 1; j && !ShadowMask[i + j]; --j) {
while (j <= StoreSizeInBytes / 2)
StoreSizeInBytes /= 2;
}
uint64_t Val = 0;
- if (DoPoison) {
- for (size_t j = 0; j < StoreSizeInBytes; j++) {
- if (IsLittleEndian)
- Val |= (uint64_t)ShadowBytes[i + j] << (8 * j);
- else
- Val = (Val << 8) | ShadowBytes[i + j];
- }
- assert(Val); // Impossible because ShadowBytes[i] != 0
- } else {
- Val = 0;
+ for (size_t j = 0; j < StoreSizeInBytes; j++) {
+ if (IsLittleEndian)
+ Val |= (uint64_t)ShadowBytes[i + j] << (8 * j);
+ else
+ Val = (Val << 8) | ShadowBytes[i + j];
}
Value *Ptr = IRB.CreateAdd(ShadowBase, ConstantInt::get(IntptrTy, i));
@@ -2018,30 +2019,33 @@ void FunctionStackPoisoner::poisonStackFrameInline(
}
}
-void FunctionStackPoisoner::poisonStackFrame(ArrayRef<uint8_t> ShadowBytes,
- IRBuilder<> &IRB,
- Value *ShadowBase, bool DoPoison) {
- auto ValueToWrite = [&](size_t i) {
- if (DoPoison)
- return ShadowBytes[i];
- return static_cast<uint8_t>(0);
- };
+void FunctionStackPoisoner::copyToShadow(ArrayRef<uint8_t> ShadowMask,
+ ArrayRef<uint8_t> ShadowBytes,
+ IRBuilder<> &IRB, Value *ShadowBase) {
+ copyToShadow(ShadowMask, ShadowBytes, 0, ShadowMask.size(), IRB, ShadowBase);
+}
- const size_t End = ShadowBytes.size();
- size_t Done = 0;
- for (size_t i = 0, j = 1; i < End; i = j++) {
- if (!ShadowBytes[i])
+void FunctionStackPoisoner::copyToShadow(ArrayRef<uint8_t> ShadowMask,
+ ArrayRef<uint8_t> ShadowBytes,
+ size_t Begin, size_t End,
+ IRBuilder<> &IRB, Value *ShadowBase) {
+ assert(ShadowMask.size() == ShadowBytes.size());
+ size_t Done = Begin;
+ for (size_t i = Begin, j = Begin + 1; i < End; i = j++) {
+ if (!ShadowMask[i]) {
+ assert(!ShadowBytes[i]);
continue;
- uint8_t Val = ValueToWrite(i);
+ }
+ uint8_t Val = ShadowBytes[i];
if (!AsanSetShadowFunc[Val])
continue;
// Skip same values.
- for (; j < End && ShadowBytes[j] && Val == ValueToWrite(j); ++j) {
+ for (; j < End && ShadowMask[j] && Val == ShadowBytes[j]; ++j) {
}
if (j - i >= ClMaxInlinePoisoningSize) {
- poisonStackFrameInline(ShadowBytes, Done, i, IRB, ShadowBase, DoPoison);
+ copyToShadowInline(ShadowMask, ShadowBytes, Done, i, IRB, ShadowBase);
IRB.CreateCall(AsanSetShadowFunc[Val],
{IRB.CreateAdd(ShadowBase, ConstantInt::get(IntptrTy, i)),
ConstantInt::get(IntptrTy, j - i)});
@@ -2049,7 +2053,7 @@ void FunctionStackPoisoner::poisonStackFrame(ArrayRef<uint8_t> ShadowBytes,
}
}
- poisonStackFrameInline(ShadowBytes, Done, End, IRB, ShadowBase, DoPoison);
+ copyToShadowInline(ShadowMask, ShadowBytes, Done, End, IRB, ShadowBase);
}
// Fake stack allocator (asan_fake_stack.h) has 11 size classes
@@ -2155,23 +2159,33 @@ void FunctionStackPoisoner::processStaticAllocas() {
// If we have a call to llvm.localescape, keep it in the entry block.
if (LocalEscapeCall) LocalEscapeCall->moveBefore(InsBefore);
- // Insert poison calls for lifetime intrinsics for static allocas.
+ // Find static allocas with lifetime analysis.
+ DenseMap<const AllocaInst *, const ASanStackVariableDescription *>
+ AllocaToSVDMap;
for (const auto &APC : StaticAllocaPoisonCallVec) {
assert(APC.InsBefore);
assert(APC.AI);
assert(ASan.isInterestingAlloca(*APC.AI));
assert(APC.AI->isStaticAlloca());
- IRBuilder<> IRB(APC.InsBefore);
- poisonAlloca(APC.AI, APC.Size, IRB, APC.DoPoison);
+ if (ClExperimentalPoisoning) {
+ AllocaToSVDMap[APC.AI] = nullptr;
+ } else {
+ IRBuilder<> IRB(APC.InsBefore);
+ poisonAlloca(APC.AI, APC.Size, IRB, APC.DoPoison);
+ }
}
SmallVector<ASanStackVariableDescription, 16> SVD;
SVD.reserve(AllocaVec.size());
for (AllocaInst *AI : AllocaVec) {
+ size_t UseAfterScopePoisonSize =
+ AllocaToSVDMap.find(AI) != AllocaToSVDMap.end()
+ ? ASan.getAllocaSizeInBytes(*AI)
+ : 0;
ASanStackVariableDescription D = {AI->getName().data(),
ASan.getAllocaSizeInBytes(*AI),
- 0,
+ UseAfterScopePoisonSize,
AI->getAlignment(),
AI,
0};
@@ -2182,8 +2196,7 @@ void FunctionStackPoisoner::processStaticAllocas() {
size_t MinHeaderSize = ASan.LongSize / 2;
const ASanStackFrameLayout &L =
ComputeASanStackFrameLayout(SVD, 1ULL << Mapping.Scale, MinHeaderSize);
- const SmallVector<uint8_t, 64> &ShadowBytes =
- GetShadowBytesAfterScope(SVD, L);
+
DEBUG(dbgs() << L.DescriptionString << " --- " << L.FrameSize << "\n");
uint64_t LocalStackSize = L.FrameSize;
bool DoStackMalloc = ClUseAfterReturn && !ASan.CompileKernel &&
@@ -2278,22 +2291,55 @@ void FunctionStackPoisoner::processStaticAllocas() {
IntptrPtrTy);
IRB.CreateStore(IRB.CreatePointerCast(&F, IntptrTy), BasePlus2);
- // Poison the stack redzones at the entry.
+ const auto &ShadowAfterScope = GetShadowBytesAfterScope(SVD, L);
+
+ // Poison the stack red zones at the entry.
Value *ShadowBase = ASan.memToShadow(LocalStackBase, IRB);
- poisonStackFrame(ShadowBytes, IRB, ShadowBase, true);
+ // As mask we must use most poisoned case: red zones and after scope.
+ // As bytes we can use either the same or just red zones only.
+ copyToShadow(ShadowAfterScope, ShadowAfterScope, IRB, ShadowBase);
+
+ if (ClExperimentalPoisoning && !StaticAllocaPoisonCallVec.empty()) {
+ // Complete AllocaToSVDMap
+ for (const auto &Desc : SVD) {
+ auto It = AllocaToSVDMap.find(Desc.AI);
+ if (It != AllocaToSVDMap.end()) {
+ It->second = &Desc;
+ }
+ }
+
+ const auto &ShadowInScope = GetShadowBytes(SVD, L);
+
+ // Poison static allocas near lifetime intrinsics.
+ for (const auto &APC : StaticAllocaPoisonCallVec) {
+ // Must be already set.
+ assert(AllocaToSVDMap[APC.AI]);
+ const auto &Desc = *AllocaToSVDMap[APC.AI];
+ assert(Desc.Offset % L.Granularity == 0);
+ size_t Begin = Desc.Offset / L.Granularity;
+ size_t End = Begin + (APC.Size + L.Granularity - 1) / L.Granularity;
+
+ IRBuilder<> IRB(APC.InsBefore);
+ copyToShadow(ShadowAfterScope,
+ APC.DoPoison ? ShadowAfterScope : ShadowInScope, Begin, End,
+ IRB, ShadowBase);
+ }
+ }
+
+ SmallVector<uint8_t, 64> ShadowClean(ShadowAfterScope.size(), 0);
auto UnpoisonStack = [&](IRBuilder<> &IRB) {
// Do this always as poisonAlloca can be disabled with
// detect_stack_use_after_scope=0.
- poisonStackFrame(ShadowBytes, IRB, ShadowBase, false);
- if (!StaticAllocaPoisonCallVec.empty()) {
+ copyToShadow(ShadowAfterScope, ShadowClean, IRB, ShadowBase);
+ if (!ClExperimentalPoisoning && !StaticAllocaPoisonCallVec.empty()) {
// If we poisoned some allocas in llvm.lifetime analysis,
// unpoison whole stack frame now.
poisonAlloca(LocalStackBase, LocalStackSize, IRB, false);
}
};
- SmallVector<uint8_t, 64> ShadowBytesAfterReturn;
+ SmallVector<uint8_t, 64> ShadowAfterReturn;
// (Un)poison the stack before all ret instructions.
for (auto Ret : RetVec) {
@@ -2321,9 +2367,10 @@ void FunctionStackPoisoner::processStaticAllocas() {
IRBuilder<> IRBPoison(ThenTerm);
if (StackMallocIdx <= 4) {
int ClassSize = kMinStackMallocSize << StackMallocIdx;
- ShadowBytesAfterReturn.resize(ClassSize >> Mapping.Scale,
- kAsanStackUseAfterReturnMagic);
- poisonStackFrame(ShadowBytesAfterReturn, IRBPoison, ShadowBase, true);
+ ShadowAfterReturn.resize(ClassSize / L.Granularity,
+ kAsanStackUseAfterReturnMagic);
+ copyToShadow(ShadowAfterReturn, ShadowAfterReturn, IRBPoison,
+ ShadowBase);
Value *SavedFlagPtrPtr = IRBPoison.CreateAdd(
FakeStack,
ConstantInt::get(IntptrTy, ClassSize - ASan.LongSize / 8));
OpenPOWER on IntegriCloud