summaryrefslogtreecommitdiffstats
path: root/llvm/lib/Transforms/Scalar
diff options
context:
space:
mode:
Diffstat (limited to 'llvm/lib/Transforms/Scalar')
-rw-r--r--llvm/lib/Transforms/Scalar/ADCE.cpp2
-rw-r--r--llvm/lib/Transforms/Scalar/IndVarSimplify.cpp6
-rw-r--r--llvm/lib/Transforms/Scalar/JumpThreading.cpp4
-rw-r--r--llvm/lib/Transforms/Scalar/LoopInstSimplify.cpp4
-rw-r--r--llvm/lib/Transforms/Scalar/LoopStrengthReduce.cpp10
-rw-r--r--llvm/lib/Transforms/Scalar/Reassociate.cpp8
-rw-r--r--llvm/lib/Transforms/Scalar/SCCP.cpp3
-rw-r--r--llvm/lib/Transforms/Scalar/SROA.cpp14
-rw-r--r--llvm/lib/Transforms/Scalar/SampleProfile.cpp8
-rw-r--r--llvm/lib/Transforms/Scalar/ScalarReplAggregates.cpp2
-rw-r--r--llvm/lib/Transforms/Scalar/TailRecursionElimination.cpp2
11 files changed, 32 insertions, 31 deletions
diff --git a/llvm/lib/Transforms/Scalar/ADCE.cpp b/llvm/lib/Transforms/Scalar/ADCE.cpp
index 1a3a4aadce6..3d9198469bc 100644
--- a/llvm/lib/Transforms/Scalar/ADCE.cpp
+++ b/llvm/lib/Transforms/Scalar/ADCE.cpp
@@ -73,7 +73,7 @@ bool ADCE::runOnFunction(Function& F) {
for (Instruction::op_iterator OI = curr->op_begin(), OE = curr->op_end();
OI != OE; ++OI)
if (Instruction* Inst = dyn_cast<Instruction>(OI))
- if (alive.insert(Inst))
+ if (alive.insert(Inst).second)
worklist.push_back(Inst);
}
diff --git a/llvm/lib/Transforms/Scalar/IndVarSimplify.cpp b/llvm/lib/Transforms/Scalar/IndVarSimplify.cpp
index fc6d1837060..c01f57f26ea 100644
--- a/llvm/lib/Transforms/Scalar/IndVarSimplify.cpp
+++ b/llvm/lib/Transforms/Scalar/IndVarSimplify.cpp
@@ -1102,7 +1102,7 @@ void WidenIV::pushNarrowIVUsers(Instruction *NarrowDef, Instruction *WideDef) {
Instruction *NarrowUser = cast<Instruction>(U);
// Handle data flow merges and bizarre phi cycles.
- if (!Widened.insert(NarrowUser))
+ if (!Widened.insert(NarrowUser).second)
continue;
NarrowIVUsers.push_back(NarrowIVDefUse(NarrowDef, NarrowUser, WideDef));
@@ -1284,7 +1284,7 @@ void IndVarSimplify::SimplifyAndExtend(Loop *L,
static bool isHighCostExpansion(const SCEV *S, BranchInst *BI,
SmallPtrSetImpl<const SCEV*> &Processed,
ScalarEvolution *SE) {
- if (!Processed.insert(S))
+ if (!Processed.insert(S).second)
return false;
// If the backedge-taken count is a UDiv, it's very likely a UDiv that
@@ -1475,7 +1475,7 @@ static bool hasConcreteDefImpl(Value *V, SmallPtrSetImpl<Value*> &Visited,
// Optimistically handle other instructions.
for (User::op_iterator OI = I->op_begin(), E = I->op_end(); OI != E; ++OI) {
- if (!Visited.insert(*OI))
+ if (!Visited.insert(*OI).second)
continue;
if (!hasConcreteDefImpl(*OI, Visited, Depth+1))
return false;
diff --git a/llvm/lib/Transforms/Scalar/JumpThreading.cpp b/llvm/lib/Transforms/Scalar/JumpThreading.cpp
index 25a8b0cdbd3..60a4925eeb1 100644
--- a/llvm/lib/Transforms/Scalar/JumpThreading.cpp
+++ b/llvm/lib/Transforms/Scalar/JumpThreading.cpp
@@ -932,7 +932,7 @@ bool JumpThreading::SimplifyPartiallyRedundantLoad(LoadInst *LI) {
BasicBlock *PredBB = *PI;
// If we already scanned this predecessor, skip it.
- if (!PredsScanned.insert(PredBB))
+ if (!PredsScanned.insert(PredBB).second)
continue;
// Scan the predecessor to see if the value is available in the pred.
@@ -1151,7 +1151,7 @@ bool JumpThreading::ProcessThreadableEdges(Value *Cond, BasicBlock *BB,
for (unsigned i = 0, e = PredValues.size(); i != e; ++i) {
BasicBlock *Pred = PredValues[i].second;
- if (!SeenPreds.insert(Pred))
+ if (!SeenPreds.insert(Pred).second)
continue; // Duplicate predecessor entry.
// If the predecessor ends with an indirect goto, we can't change its
diff --git a/llvm/lib/Transforms/Scalar/LoopInstSimplify.cpp b/llvm/lib/Transforms/Scalar/LoopInstSimplify.cpp
index 7c29b8cc07e..8fd7c8fbaaa 100644
--- a/llvm/lib/Transforms/Scalar/LoopInstSimplify.cpp
+++ b/llvm/lib/Transforms/Scalar/LoopInstSimplify.cpp
@@ -152,7 +152,7 @@ bool LoopInstSimplify::runOnLoop(Loop *L, LPPassManager &LPM) {
for (succ_iterator SI = succ_begin(BB), SE = succ_end(BB); SI != SE;
++SI) {
BasicBlock *SuccBB = *SI;
- if (!Visited.insert(SuccBB))
+ if (!Visited.insert(SuccBB).second)
continue;
const Loop *SuccLoop = LI->getLoopFor(SuccBB);
@@ -165,7 +165,7 @@ bool LoopInstSimplify::runOnLoop(Loop *L, LPPassManager &LPM) {
for (unsigned i = 0; i < SubLoopExitBlocks.size(); ++i) {
BasicBlock *ExitBB = SubLoopExitBlocks[i];
- if (LI->getLoopFor(ExitBB) == L && Visited.insert(ExitBB))
+ if (LI->getLoopFor(ExitBB) == L && Visited.insert(ExitBB).second)
VisitStack.push_back(WorklistItem(ExitBB, false));
}
diff --git a/llvm/lib/Transforms/Scalar/LoopStrengthReduce.cpp b/llvm/lib/Transforms/Scalar/LoopStrengthReduce.cpp
index 9ef9b05639a..7b60373dc50 100644
--- a/llvm/lib/Transforms/Scalar/LoopStrengthReduce.cpp
+++ b/llvm/lib/Transforms/Scalar/LoopStrengthReduce.cpp
@@ -762,7 +762,7 @@ static bool isHighCostExpansion(const SCEV *S,
Processed, SE);
}
- if (!Processed.insert(S))
+ if (!Processed.insert(S).second)
return false;
if (const SCEVAddExpr *Add = dyn_cast<SCEVAddExpr>(S)) {
@@ -975,7 +975,7 @@ void Cost::RatePrimaryRegister(const SCEV *Reg,
Lose();
return;
}
- if (Regs.insert(Reg)) {
+ if (Regs.insert(Reg).second) {
RateRegister(Reg, Regs, L, SE, DT);
if (LoserRegs && isLoser())
LoserRegs->insert(Reg);
@@ -2802,7 +2802,7 @@ void LSRInstance::CollectChains() {
User::op_iterator IVOpIter = findIVOperand(I->op_begin(), IVOpEnd, L, SE);
while (IVOpIter != IVOpEnd) {
Instruction *IVOpInst = cast<Instruction>(*IVOpIter);
- if (UniqueOperands.insert(IVOpInst))
+ if (UniqueOperands.insert(IVOpInst).second)
ChainInstruction(I, IVOpInst, ChainUsersVec);
IVOpIter = findIVOperand(std::next(IVOpIter), IVOpEnd, L, SE);
}
@@ -3122,7 +3122,7 @@ LSRInstance::CollectLoopInvariantFixupsAndFormulae() {
const SCEV *S = Worklist.pop_back_val();
// Don't process the same SCEV twice
- if (!Visited.insert(S))
+ if (!Visited.insert(S).second)
continue;
if (const SCEVNAryExpr *N = dyn_cast<SCEVNAryExpr>(S))
@@ -3774,7 +3774,7 @@ void LSRInstance::GenerateCrossUseConstantOffsets() {
for (int LUIdx = UsedByIndices.find_first(); LUIdx != -1;
LUIdx = UsedByIndices.find_next(LUIdx))
// Make a memo of this use, offset, and register tuple.
- if (UniqueItems.insert(std::make_pair(LUIdx, Imm)))
+ if (UniqueItems.insert(std::make_pair(LUIdx, Imm)).second)
WorkItems.push_back(WorkItem(LUIdx, Imm, OrigReg));
}
}
diff --git a/llvm/lib/Transforms/Scalar/Reassociate.cpp b/llvm/lib/Transforms/Scalar/Reassociate.cpp
index b4bc9d4a468..e75fa8017bd 100644
--- a/llvm/lib/Transforms/Scalar/Reassociate.cpp
+++ b/llvm/lib/Transforms/Scalar/Reassociate.cpp
@@ -623,7 +623,7 @@ static bool LinearizeExprTree(BinaryOperator *I,
// If this is a binary operation of the right kind with only one use then
// add its operands to the expression.
if (BinaryOperator *BO = isReassociableOp(Op, Opcode)) {
- assert(Visited.insert(Op) && "Not first visit!");
+ assert(Visited.insert(Op).second && "Not first visit!");
DEBUG(dbgs() << "DIRECT ADD: " << *Op << " (" << Weight << ")\n");
Worklist.push_back(std::make_pair(BO, Weight));
continue;
@@ -633,7 +633,7 @@ static bool LinearizeExprTree(BinaryOperator *I,
LeafMap::iterator It = Leaves.find(Op);
if (It == Leaves.end()) {
// Not in the leaf map. Must be the first time we saw this operand.
- assert(Visited.insert(Op) && "Not first visit!");
+ assert(Visited.insert(Op).second && "Not first visit!");
if (!Op->hasOneUse()) {
// This value has uses not accounted for by the expression, so it is
// not safe to modify. Mark it as being a leaf.
@@ -1609,7 +1609,7 @@ Value *Reassociate::OptimizeAdd(Instruction *I,
SmallPtrSet<Value*, 8> Duplicates;
for (unsigned i = 0, e = Factors.size(); i != e; ++i) {
Value *Factor = Factors[i];
- if (!Duplicates.insert(Factor))
+ if (!Duplicates.insert(Factor).second)
continue;
unsigned Occ = ++FactorOccurrences[Factor];
@@ -1960,7 +1960,7 @@ void Reassociate::EraseInst(Instruction *I) {
// and add that since that's where optimization actually happens.
unsigned Opcode = Op->getOpcode();
while (Op->hasOneUse() && Op->user_back()->getOpcode() == Opcode &&
- Visited.insert(Op))
+ Visited.insert(Op).second)
Op = Op->user_back();
RedoInsts.insert(Op);
}
diff --git a/llvm/lib/Transforms/Scalar/SCCP.cpp b/llvm/lib/Transforms/Scalar/SCCP.cpp
index e973cdbf44c..cfc9a8e89fa 100644
--- a/llvm/lib/Transforms/Scalar/SCCP.cpp
+++ b/llvm/lib/Transforms/Scalar/SCCP.cpp
@@ -214,7 +214,8 @@ public:
///
/// This returns true if the block was not considered live before.
bool MarkBlockExecutable(BasicBlock *BB) {
- if (!BBExecutable.insert(BB)) return false;
+ if (!BBExecutable.insert(BB).second)
+ return false;
DEBUG(dbgs() << "Marking Block Executable: " << BB->getName() << '\n');
BBWorkList.push_back(BB); // Add the block to the work list!
return true;
diff --git a/llvm/lib/Transforms/Scalar/SROA.cpp b/llvm/lib/Transforms/Scalar/SROA.cpp
index 9dc00f8fe71..6135114eb17 100644
--- a/llvm/lib/Transforms/Scalar/SROA.cpp
+++ b/llvm/lib/Transforms/Scalar/SROA.cpp
@@ -349,7 +349,7 @@ public:
private:
void markAsDead(Instruction &I) {
- if (VisitedDeadInsts.insert(&I))
+ if (VisitedDeadInsts.insert(&I).second)
AS.DeadUsers.push_back(&I);
}
@@ -639,7 +639,7 @@ private:
}
for (User *U : I->users())
- if (Visited.insert(cast<Instruction>(U)))
+ if (Visited.insert(cast<Instruction>(U)).second)
Uses.push_back(std::make_pair(I, cast<Instruction>(U)));
} while (!Uses.empty());
@@ -848,7 +848,7 @@ public:
else
return false;
- } while (Visited.insert(Ptr));
+ } while (Visited.insert(Ptr).second);
return false;
}
@@ -1461,7 +1461,7 @@ static Value *getAdjustedPtr(IRBuilderTy &IRB, const DataLayout &DL, Value *Ptr,
break;
Offset += GEPOffset;
Ptr = GEP->getPointerOperand();
- if (!Visited.insert(Ptr))
+ if (!Visited.insert(Ptr).second)
break;
}
@@ -1498,7 +1498,7 @@ static Value *getAdjustedPtr(IRBuilderTy &IRB, const DataLayout &DL, Value *Ptr,
break;
}
assert(Ptr->getType()->isPointerTy() && "Unexpected operand type!");
- } while (Visited.insert(Ptr));
+ } while (Visited.insert(Ptr).second);
if (!OffsetPtr) {
if (!Int8Ptr) {
@@ -2861,7 +2861,7 @@ private:
/// This uses a set to de-duplicate users.
void enqueueUsers(Instruction &I) {
for (Use &U : I.uses())
- if (Visited.insert(U.getUser()))
+ if (Visited.insert(U.getUser()).second)
Queue.push_back(&U);
}
@@ -3588,7 +3588,7 @@ static void enqueueUsersInWorklist(Instruction &I,
SmallVectorImpl<Instruction *> &Worklist,
SmallPtrSetImpl<Instruction *> &Visited) {
for (User *U : I.users())
- if (Visited.insert(cast<Instruction>(U)))
+ if (Visited.insert(cast<Instruction>(U)).second)
Worklist.push_back(cast<Instruction>(U));
}
diff --git a/llvm/lib/Transforms/Scalar/SampleProfile.cpp b/llvm/lib/Transforms/Scalar/SampleProfile.cpp
index aa01508d25b..179bbf78366 100644
--- a/llvm/lib/Transforms/Scalar/SampleProfile.cpp
+++ b/llvm/lib/Transforms/Scalar/SampleProfile.cpp
@@ -305,7 +305,7 @@ void SampleProfileLoader::findEquivalencesFor(
for (auto *BB2 : Descendants) {
bool IsDomParent = DomTree->dominates(BB2, BB1);
bool IsInSameLoop = LI->getLoopFor(BB1) == LI->getLoopFor(BB2);
- if (BB1 != BB2 && VisitedBlocks.insert(BB2) && IsDomParent &&
+ if (BB1 != BB2 && VisitedBlocks.insert(BB2).second && IsDomParent &&
IsInSameLoop) {
EquivalenceClass[BB2] = BB1;
@@ -494,7 +494,7 @@ bool SampleProfileLoader::propagateThroughEdges(Function &F) {
<< " known. Set weight for block: ";
printBlockWeight(dbgs(), BB););
}
- if (VisitedBlocks.insert(BB))
+ if (VisitedBlocks.insert(BB).second)
Changed = true;
} else if (NumUnknownEdges == 1 && VisitedBlocks.count(BB)) {
// If there is a single unknown edge and the block has been
@@ -540,7 +540,7 @@ void SampleProfileLoader::buildEdges(Function &F) {
llvm_unreachable("Found a stale predecessors list in a basic block.");
for (pred_iterator PI = pred_begin(B1), PE = pred_end(B1); PI != PE; ++PI) {
BasicBlock *B2 = *PI;
- if (Visited.insert(B2))
+ if (Visited.insert(B2).second)
Predecessors[B1].push_back(B2);
}
@@ -550,7 +550,7 @@ void SampleProfileLoader::buildEdges(Function &F) {
llvm_unreachable("Found a stale successors list in a basic block.");
for (succ_iterator SI = succ_begin(B1), SE = succ_end(B1); SI != SE; ++SI) {
BasicBlock *B2 = *SI;
- if (Visited.insert(B2))
+ if (Visited.insert(B2).second)
Successors[B1].push_back(B2);
}
}
diff --git a/llvm/lib/Transforms/Scalar/ScalarReplAggregates.cpp b/llvm/lib/Transforms/Scalar/ScalarReplAggregates.cpp
index c6d6ec73761..f7fa9171862 100644
--- a/llvm/lib/Transforms/Scalar/ScalarReplAggregates.cpp
+++ b/llvm/lib/Transforms/Scalar/ScalarReplAggregates.cpp
@@ -1669,7 +1669,7 @@ void SROA::isSafePHISelectUseForScalarRepl(Instruction *I, uint64_t Offset,
AllocaInfo &Info) {
// If we've already checked this PHI, don't do it again.
if (PHINode *PN = dyn_cast<PHINode>(I))
- if (!Info.CheckedPHIs.insert(PN))
+ if (!Info.CheckedPHIs.insert(PN).second)
return;
for (User *U : I->users()) {
diff --git a/llvm/lib/Transforms/Scalar/TailRecursionElimination.cpp b/llvm/lib/Transforms/Scalar/TailRecursionElimination.cpp
index 6fe5e188b1a..65b1f142821 100644
--- a/llvm/lib/Transforms/Scalar/TailRecursionElimination.cpp
+++ b/llvm/lib/Transforms/Scalar/TailRecursionElimination.cpp
@@ -179,7 +179,7 @@ struct AllocaDerivedValueTracker {
auto AddUsesToWorklist = [&](Value *V) {
for (auto &U : V->uses()) {
- if (!Visited.insert(&U))
+ if (!Visited.insert(&U).second)
continue;
Worklist.push_back(&U);
}
OpenPOWER on IntegriCloud