summaryrefslogtreecommitdiffstats
path: root/llvm/lib/CodeGen
diff options
context:
space:
mode:
authorBenjamin Kramer <benny.kra@googlemail.com>2015-02-28 10:11:12 +0000
committerBenjamin Kramer <benny.kra@googlemail.com>2015-02-28 10:11:12 +0000
commit4f6ac16292f5435cd00782757e6794f2bf6b64f5 (patch)
treebbba5b9d8e7f5e816bce01feb0dd5c3c62365821 /llvm/lib/CodeGen
parentb759340fc82a209e36455af5e9dc4e6c0d9e7dfb (diff)
downloadbcm5719-llvm-4f6ac16292f5435cd00782757e6794f2bf6b64f5.tar.gz
bcm5719-llvm-4f6ac16292f5435cd00782757e6794f2bf6b64f5.zip
Replace std::copy with a back inserter with vector append where feasible
All of the cases were just appending from random access iterators to a vector. Using insert/append can grow the vector to the perfect size directly and moves the growing out of the loop. No intended functionalty change. llvm-svn: 230845
Diffstat (limited to 'llvm/lib/CodeGen')
-rw-r--r--llvm/lib/CodeGen/Analysis.cpp9
-rw-r--r--llvm/lib/CodeGen/IfConversion.cpp10
-rw-r--r--llvm/lib/CodeGen/PeepholeOptimizer.cpp3
3 files changed, 8 insertions, 14 deletions
diff --git a/llvm/lib/CodeGen/Analysis.cpp b/llvm/lib/CodeGen/Analysis.cpp
index e50b846ddd8..8e11fe1c9cf 100644
--- a/llvm/lib/CodeGen/Analysis.cpp
+++ b/llvm/lib/CodeGen/Analysis.cpp
@@ -312,8 +312,7 @@ static const Value *getNoopInput(const Value *V,
// previous aggregate. Combine the two paths to obtain the true address of
// our element.
ArrayRef<unsigned> ExtractLoc = EVI->getIndices();
- std::copy(ExtractLoc.rbegin(), ExtractLoc.rend(),
- std::back_inserter(ValLoc));
+ ValLoc.append(ExtractLoc.rbegin(), ExtractLoc.rend());
NoopInput = Op;
}
// Terminate if we couldn't find anything to look through.
@@ -601,10 +600,8 @@ bool llvm::returnTypeIsEligibleForTailCall(const Function *F,
// The manipulations performed when we're looking through an insertvalue or
// an extractvalue would happen at the front of the RetPath list, so since
// we have to copy it anyway it's more efficient to create a reversed copy.
- using std::copy;
- SmallVector<unsigned, 4> TmpRetPath, TmpCallPath;
- copy(RetPath.rbegin(), RetPath.rend(), std::back_inserter(TmpRetPath));
- copy(CallPath.rbegin(), CallPath.rend(), std::back_inserter(TmpCallPath));
+ SmallVector<unsigned, 4> TmpRetPath(RetPath.rbegin(), RetPath.rend());
+ SmallVector<unsigned, 4> TmpCallPath(CallPath.rbegin(), CallPath.rend());
// Finally, we can check whether the value produced by the tail call at this
// index is compatible with the value we return.
diff --git a/llvm/lib/CodeGen/IfConversion.cpp b/llvm/lib/CodeGen/IfConversion.cpp
index 7a295699cae..1a1f2a9d03c 100644
--- a/llvm/lib/CodeGen/IfConversion.cpp
+++ b/llvm/lib/CodeGen/IfConversion.cpp
@@ -1555,7 +1555,7 @@ void IfConverter::PredicateBlock(BBInfo &BBI,
UpdatePredRedefs(I, Redefs);
}
- std::copy(Cond.begin(), Cond.end(), std::back_inserter(BBI.Predicate));
+ BBI.Predicate.append(Cond.begin(), Cond.end());
BBI.IsAnalyzed = false;
BBI.NonPredSize = 0;
@@ -1620,9 +1620,8 @@ void IfConverter::CopyAndPredicateBlock(BBInfo &ToBBI, BBInfo &FromBBI,
}
}
- std::copy(FromBBI.Predicate.begin(), FromBBI.Predicate.end(),
- std::back_inserter(ToBBI.Predicate));
- std::copy(Cond.begin(), Cond.end(), std::back_inserter(ToBBI.Predicate));
+ ToBBI.Predicate.append(FromBBI.Predicate.begin(), FromBBI.Predicate.end());
+ ToBBI.Predicate.append(Cond.begin(), Cond.end());
ToBBI.ClobbersPred |= FromBBI.ClobbersPred;
ToBBI.IsAnalyzed = false;
@@ -1661,8 +1660,7 @@ void IfConverter::MergeBlocks(BBInfo &ToBBI, BBInfo &FromBBI, bool AddEdges) {
if (NBB && !FromBBI.BB->isSuccessor(NBB))
FromBBI.BB->addSuccessor(NBB);
- std::copy(FromBBI.Predicate.begin(), FromBBI.Predicate.end(),
- std::back_inserter(ToBBI.Predicate));
+ ToBBI.Predicate.append(FromBBI.Predicate.begin(), FromBBI.Predicate.end());
FromBBI.Predicate.clear();
ToBBI.NonPredSize += FromBBI.NonPredSize;
diff --git a/llvm/lib/CodeGen/PeepholeOptimizer.cpp b/llvm/lib/CodeGen/PeepholeOptimizer.cpp
index 283d1f26198..2800d846ff3 100644
--- a/llvm/lib/CodeGen/PeepholeOptimizer.cpp
+++ b/llvm/lib/CodeGen/PeepholeOptimizer.cpp
@@ -411,8 +411,7 @@ optimizeExtInstr(MachineInstr *MI, MachineBasicBlock *MBB,
if (ExtendLife && !ExtendedUses.empty())
// Extend the liveness of the extension result.
- std::copy(ExtendedUses.begin(), ExtendedUses.end(),
- std::back_inserter(Uses));
+ Uses.append(ExtendedUses.begin(), ExtendedUses.end());
// Now replace all uses.
bool Changed = false;
OpenPOWER on IntegriCloud