summaryrefslogtreecommitdiffstats
path: root/llvm/lib/Transforms
diff options
context:
space:
mode:
Diffstat (limited to 'llvm/lib/Transforms')
-rw-r--r--llvm/lib/Transforms/IPO/FunctionAttrs.cpp11
-rw-r--r--llvm/lib/Transforms/ObjCARC/ObjCARCContract.cpp2
-rw-r--r--llvm/lib/Transforms/Scalar/DeadStoreElimination.cpp12
-rw-r--r--llvm/lib/Transforms/Scalar/LoopIdiomRecognize.cpp2
-rw-r--r--llvm/lib/Transforms/Scalar/MemCpyOptimizer.cpp22
-rw-r--r--llvm/lib/Transforms/Scalar/Sink.cpp4
-rw-r--r--llvm/lib/Transforms/Scalar/TailRecursionElimination.cpp2
7 files changed, 26 insertions, 29 deletions
diff --git a/llvm/lib/Transforms/IPO/FunctionAttrs.cpp b/llvm/lib/Transforms/IPO/FunctionAttrs.cpp
index f9850619f96..5352e32479b 100644
--- a/llvm/lib/Transforms/IPO/FunctionAttrs.cpp
+++ b/llvm/lib/Transforms/IPO/FunctionAttrs.cpp
@@ -130,17 +130,18 @@ static MemoryAccessKind checkFunctionMemoryAccess(Function &F, bool ThisBody,
SCCNodes.count(CS.getCalledFunction()))
continue;
FunctionModRefBehavior MRB = AAR.getModRefBehavior(CS);
+ ModRefInfo MRI = createModRefInfo(MRB);
// If the call doesn't access memory, we're done.
- if (!(MRB & MRI_ModRef))
+ if (isNoModRef(MRI))
continue;
if (!AliasAnalysis::onlyAccessesArgPointees(MRB)) {
// The call could access any memory. If that includes writes, give up.
- if (MRB & MRI_Mod)
+ if (isModSet(MRI))
return MAK_MayWrite;
// If it reads, note it.
- if (MRB & MRI_Ref)
+ if (isRefSet(MRI))
ReadsMemory = true;
continue;
}
@@ -162,10 +163,10 @@ static MemoryAccessKind checkFunctionMemoryAccess(Function &F, bool ThisBody,
if (AAR.pointsToConstantMemory(Loc, /*OrLocal=*/true))
continue;
- if (MRB & MRI_Mod)
+ if (isModSet(MRI))
// Writes non-local memory. Give up.
return MAK_MayWrite;
- if (MRB & MRI_Ref)
+ if (isRefSet(MRI))
// Ok, it reads non-local memory.
ReadsMemory = true;
}
diff --git a/llvm/lib/Transforms/ObjCARC/ObjCARCContract.cpp b/llvm/lib/Transforms/ObjCARC/ObjCARCContract.cpp
index e70e7591f6a..c4e61218f3f 100644
--- a/llvm/lib/Transforms/ObjCARC/ObjCARCContract.cpp
+++ b/llvm/lib/Transforms/ObjCARC/ObjCARCContract.cpp
@@ -248,7 +248,7 @@ static StoreInst *findSafeStoreForStoreStrongContraction(LoadInst *Load,
// Ok, now we know we have not seen a store yet. See if Inst can write to
// our load location, if it can not, just ignore the instruction.
- if (!(AA->getModRefInfo(Inst, Loc) & MRI_Mod))
+ if (!isModSet(AA->getModRefInfo(Inst, Loc)))
continue;
Store = dyn_cast<StoreInst>(Inst);
diff --git a/llvm/lib/Transforms/Scalar/DeadStoreElimination.cpp b/llvm/lib/Transforms/Scalar/DeadStoreElimination.cpp
index 877050ec177..e703014bb0e 100644
--- a/llvm/lib/Transforms/Scalar/DeadStoreElimination.cpp
+++ b/llvm/lib/Transforms/Scalar/DeadStoreElimination.cpp
@@ -594,11 +594,9 @@ static bool memoryIsNotModifiedBetween(Instruction *FirstI,
}
for (; BI != EI; ++BI) {
Instruction *I = &*BI;
- if (I->mayWriteToMemory() && I != SecondI) {
- auto Res = AA->getModRefInfo(I, MemLoc);
- if (Res & MRI_Mod)
+ if (I->mayWriteToMemory() && I != SecondI)
+ if (isModSet(AA->getModRefInfo(I, MemLoc)))
return false;
- }
}
if (B != FirstBB) {
assert(B != &FirstBB->getParent()->getEntryBlock() &&
@@ -822,9 +820,7 @@ static bool handleEndBlock(BasicBlock &BB, AliasAnalysis *AA,
// the call is live.
DeadStackObjects.remove_if([&](Value *I) {
// See if the call site touches the value.
- ModRefInfo A = AA->getModRefInfo(CS, I, getPointerSize(I, DL, *TLI));
-
- return A == MRI_ModRef || A == MRI_Ref;
+ return isRefSet(AA->getModRefInfo(CS, I, getPointerSize(I, DL, *TLI)));
});
// If all of the allocas were clobbered by the call then we're not going
@@ -1255,7 +1251,7 @@ static bool eliminateDeadStores(BasicBlock &BB, AliasAnalysis *AA,
if (DepWrite == &BB.front()) break;
// Can't look past this instruction if it might read 'Loc'.
- if (AA->getModRefInfo(DepWrite, Loc) & MRI_Ref)
+ if (isRefSet(AA->getModRefInfo(DepWrite, Loc)))
break;
InstDep = MD->getPointerDependencyFrom(Loc, /*isLoad=*/ false,
diff --git a/llvm/lib/Transforms/Scalar/LoopIdiomRecognize.cpp b/llvm/lib/Transforms/Scalar/LoopIdiomRecognize.cpp
index 3e331cddb4f..052ead8df31 100644
--- a/llvm/lib/Transforms/Scalar/LoopIdiomRecognize.cpp
+++ b/llvm/lib/Transforms/Scalar/LoopIdiomRecognize.cpp
@@ -788,7 +788,7 @@ mayLoopAccessLocation(Value *Ptr, ModRefInfo Access, Loop *L,
++BI)
for (Instruction &I : **BI)
if (IgnoredStores.count(&I) == 0 &&
- (AA.getModRefInfo(&I, StoreLoc) & Access))
+ intersectModRef(AA.getModRefInfo(&I, StoreLoc), Access))
return true;
return false;
diff --git a/llvm/lib/Transforms/Scalar/MemCpyOptimizer.cpp b/llvm/lib/Transforms/Scalar/MemCpyOptimizer.cpp
index 86d7b5e8ddd..cd3e4ba88bc 100644
--- a/llvm/lib/Transforms/Scalar/MemCpyOptimizer.cpp
+++ b/llvm/lib/Transforms/Scalar/MemCpyOptimizer.cpp
@@ -518,7 +518,7 @@ static bool moveUp(AliasAnalysis &AA, StoreInst *SI, Instruction *P,
const LoadInst *LI) {
// If the store alias this position, early bail out.
MemoryLocation StoreLoc = MemoryLocation::get(SI);
- if (AA.getModRefInfo(P, StoreLoc) != MRI_NoModRef)
+ if (isModOrRefSet(AA.getModRefInfo(P, StoreLoc)))
return false;
// Keep track of the arguments of all instruction we plan to lift
@@ -542,20 +542,20 @@ static bool moveUp(AliasAnalysis &AA, StoreInst *SI, Instruction *P,
for (auto I = --SI->getIterator(), E = P->getIterator(); I != E; --I) {
auto *C = &*I;
- bool MayAlias = AA.getModRefInfo(C, None) != MRI_NoModRef;
+ bool MayAlias = isModOrRefSet(AA.getModRefInfo(C, None));
bool NeedLift = false;
if (Args.erase(C))
NeedLift = true;
else if (MayAlias) {
NeedLift = llvm::any_of(MemLocs, [C, &AA](const MemoryLocation &ML) {
- return AA.getModRefInfo(C, ML);
+ return isModOrRefSet(AA.getModRefInfo(C, ML));
});
if (!NeedLift)
NeedLift =
llvm::any_of(CallSites, [C, &AA](const ImmutableCallSite &CS) {
- return AA.getModRefInfo(C, CS);
+ return isModOrRefSet(AA.getModRefInfo(C, CS));
});
}
@@ -565,18 +565,18 @@ static bool moveUp(AliasAnalysis &AA, StoreInst *SI, Instruction *P,
if (MayAlias) {
// Since LI is implicitly moved downwards past the lifted instructions,
// none of them may modify its source.
- if (AA.getModRefInfo(C, LoadLoc) & MRI_Mod)
+ if (isModSet(AA.getModRefInfo(C, LoadLoc)))
return false;
else if (auto CS = ImmutableCallSite(C)) {
// If we can't lift this before P, it's game over.
- if (AA.getModRefInfo(P, CS) != MRI_NoModRef)
+ if (isModOrRefSet(AA.getModRefInfo(P, CS)))
return false;
CallSites.push_back(CS);
} else if (isa<LoadInst>(C) || isa<StoreInst>(C) || isa<VAArgInst>(C)) {
// If we can't lift this before P, it's game over.
auto ML = MemoryLocation::get(C);
- if (AA.getModRefInfo(P, ML) != MRI_NoModRef)
+ if (isModOrRefSet(AA.getModRefInfo(P, ML)))
return false;
MemLocs.push_back(ML);
@@ -631,7 +631,7 @@ bool MemCpyOptPass::processStore(StoreInst *SI, BasicBlock::iterator &BBI) {
// of at the store position.
Instruction *P = SI;
for (auto &I : make_range(++LI->getIterator(), SI->getIterator())) {
- if (AA.getModRefInfo(&I, LoadLoc) & MRI_Mod) {
+ if (isModSet(AA.getModRefInfo(&I, LoadLoc))) {
P = &I;
break;
}
@@ -702,7 +702,7 @@ bool MemCpyOptPass::processStore(StoreInst *SI, BasicBlock::iterator &BBI) {
MemoryLocation StoreLoc = MemoryLocation::get(SI);
for (BasicBlock::iterator I = --SI->getIterator(), E = C->getIterator();
I != E; --I) {
- if (AA.getModRefInfo(&*I, StoreLoc) != MRI_NoModRef) {
+ if (isModOrRefSet(AA.getModRefInfo(&*I, StoreLoc))) {
C = nullptr;
break;
}
@@ -934,9 +934,9 @@ bool MemCpyOptPass::performCallSlotOptzn(Instruction *cpy, Value *cpyDest,
AliasAnalysis &AA = LookupAliasAnalysis();
ModRefInfo MR = AA.getModRefInfo(C, cpyDest, srcSize);
// If necessary, perform additional analysis.
- if (MR != MRI_NoModRef)
+ if (isModOrRefSet(MR))
MR = AA.callCapturesBefore(C, cpyDest, srcSize, &DT);
- if (MR != MRI_NoModRef)
+ if (isModOrRefSet(MR))
return false;
// We can't create address space casts here because we don't know if they're
diff --git a/llvm/lib/Transforms/Scalar/Sink.cpp b/llvm/lib/Transforms/Scalar/Sink.cpp
index 5210f165b87..cfb8a062299 100644
--- a/llvm/lib/Transforms/Scalar/Sink.cpp
+++ b/llvm/lib/Transforms/Scalar/Sink.cpp
@@ -68,7 +68,7 @@ static bool isSafeToMove(Instruction *Inst, AliasAnalysis &AA,
if (LoadInst *L = dyn_cast<LoadInst>(Inst)) {
MemoryLocation Loc = MemoryLocation::get(L);
for (Instruction *S : Stores)
- if (AA.getModRefInfo(S, Loc) & MRI_Mod)
+ if (isModSet(AA.getModRefInfo(S, Loc)))
return false;
}
@@ -83,7 +83,7 @@ static bool isSafeToMove(Instruction *Inst, AliasAnalysis &AA,
return false;
for (Instruction *S : Stores)
- if (AA.getModRefInfo(S, CS) & MRI_Mod)
+ if (isModSet(AA.getModRefInfo(S, CS)))
return false;
}
diff --git a/llvm/lib/Transforms/Scalar/TailRecursionElimination.cpp b/llvm/lib/Transforms/Scalar/TailRecursionElimination.cpp
index f5aa47f927e..a8782e05851 100644
--- a/llvm/lib/Transforms/Scalar/TailRecursionElimination.cpp
+++ b/llvm/lib/Transforms/Scalar/TailRecursionElimination.cpp
@@ -332,7 +332,7 @@ static bool canMoveAboveCall(Instruction *I, CallInst *CI, AliasAnalysis *AA) {
// Writes to memory only matter if they may alias the pointer
// being loaded from.
const DataLayout &DL = L->getModule()->getDataLayout();
- if ((AA->getModRefInfo(CI, MemoryLocation::get(L)) & MRI_Mod) ||
+ if (isModSet(AA->getModRefInfo(CI, MemoryLocation::get(L))) ||
!isSafeToLoadUnconditionally(L->getPointerOperand(),
L->getAlignment(), DL, L))
return false;
OpenPOWER on IntegriCloud