summaryrefslogtreecommitdiffstats
path: root/llvm
diff options
context:
space:
mode:
Diffstat (limited to 'llvm')
-rw-r--r--llvm/include/llvm/Analysis/MemoryDependenceAnalysis.h821
-rw-r--r--llvm/lib/Analysis/MemoryDependenceAnalysis.cpp298
2 files changed, 563 insertions, 556 deletions
diff --git a/llvm/include/llvm/Analysis/MemoryDependenceAnalysis.h b/llvm/include/llvm/Analysis/MemoryDependenceAnalysis.h
index daa1ba91c07..66cc041e5b0 100644
--- a/llvm/include/llvm/Analysis/MemoryDependenceAnalysis.h
+++ b/llvm/include/llvm/Analysis/MemoryDependenceAnalysis.h
@@ -24,443 +24,440 @@
#include "llvm/Pass.h"
namespace llvm {
- class Function;
- class FunctionPass;
- class Instruction;
- class CallSite;
- class AssumptionCache;
- class MemoryDependenceAnalysis;
- class PredIteratorCache;
- class DominatorTree;
- class PHITransAddr;
-
- /// MemDepResult - A memory dependence query can return one of three different
- /// answers, described below.
- class MemDepResult {
- enum DepType {
- /// Invalid - Clients of MemDep never see this.
- Invalid = 0,
-
- /// Clobber - This is a dependence on the specified instruction which
- /// clobbers the desired value. The pointer member of the MemDepResult
- /// pair holds the instruction that clobbers the memory. For example,
- /// this occurs when we see a may-aliased store to the memory location we
- /// care about.
- ///
- /// There are several cases that may be interesting here:
- /// 1. Loads are clobbered by may-alias stores.
- /// 2. Loads are considered clobbered by partially-aliased loads. The
- /// client may choose to analyze deeper into these cases.
- Clobber,
-
- /// Def - This is a dependence on the specified instruction which
- /// defines/produces the desired memory location. The pointer member of
- /// the MemDepResult pair holds the instruction that defines the memory.
- /// Cases of interest:
- /// 1. This could be a load or store for dependence queries on
- /// load/store. The value loaded or stored is the produced value.
- /// Note that the pointer operand may be different than that of the
- /// queried pointer due to must aliases and phi translation. Note
- /// that the def may not be the same type as the query, the pointers
- /// may just be must aliases.
- /// 2. For loads and stores, this could be an allocation instruction. In
- /// this case, the load is loading an undef value or a store is the
- /// first store to (that part of) the allocation.
- /// 3. Dependence queries on calls return Def only when they are
- /// readonly calls or memory use intrinsics with identical callees
- /// and no intervening clobbers. No validation is done that the
- /// operands to the calls are the same.
- Def,
-
- /// Other - This marker indicates that the query has no known dependency
- /// in the specified block. More detailed state info is encoded in the
- /// upper part of the pair (i.e. the Instruction*)
- Other
- };
- /// If DepType is "Other", the upper part of the pair
- /// (i.e. the Instruction* part) is instead used to encode more detailed
- /// type information as follows
- enum OtherType {
- /// NonLocal - This marker indicates that the query has no dependency in
- /// the specified block. To find out more, the client should query other
- /// predecessor blocks.
- NonLocal = 0x4,
- /// NonFuncLocal - This marker indicates that the query has no
- /// dependency in the specified function.
- NonFuncLocal = 0x8,
- /// Unknown - This marker indicates that the query dependency
- /// is unknown.
- Unknown = 0xc
- };
-
- typedef PointerIntPair<Instruction*, 2, DepType> PairTy;
- PairTy Value;
- explicit MemDepResult(PairTy V) : Value(V) {}
-
- public:
- MemDepResult() : Value(nullptr, Invalid) {}
-
- /// get methods: These are static ctor methods for creating various
- /// MemDepResult kinds.
- static MemDepResult getDef(Instruction *Inst) {
- assert(Inst && "Def requires inst");
- return MemDepResult(PairTy(Inst, Def));
- }
- static MemDepResult getClobber(Instruction *Inst) {
- assert(Inst && "Clobber requires inst");
- return MemDepResult(PairTy(Inst, Clobber));
- }
- static MemDepResult getNonLocal() {
- return MemDepResult(
- PairTy(reinterpret_cast<Instruction*>(NonLocal), Other));
- }
- static MemDepResult getNonFuncLocal() {
- return MemDepResult(
- PairTy(reinterpret_cast<Instruction*>(NonFuncLocal), Other));
- }
- static MemDepResult getUnknown() {
- return MemDepResult(
- PairTy(reinterpret_cast<Instruction*>(Unknown), Other));
- }
-
- /// isClobber - Return true if this MemDepResult represents a query that is
- /// an instruction clobber dependency.
- bool isClobber() const { return Value.getInt() == Clobber; }
-
- /// isDef - Return true if this MemDepResult represents a query that is
- /// an instruction definition dependency.
- bool isDef() const { return Value.getInt() == Def; }
-
- /// isNonLocal - Return true if this MemDepResult represents a query that
- /// is transparent to the start of the block, but where a non-local hasn't
- /// been done.
- bool isNonLocal() const {
- return Value.getInt() == Other
- && Value.getPointer() == reinterpret_cast<Instruction*>(NonLocal);
- }
-
- /// isNonFuncLocal - Return true if this MemDepResult represents a query
- /// that is transparent to the start of the function.
- bool isNonFuncLocal() const {
- return Value.getInt() == Other
- && Value.getPointer() == reinterpret_cast<Instruction*>(NonFuncLocal);
- }
-
- /// isUnknown - Return true if this MemDepResult represents a query which
- /// cannot and/or will not be computed.
- bool isUnknown() const {
- return Value.getInt() == Other
- && Value.getPointer() == reinterpret_cast<Instruction*>(Unknown);
- }
-
- /// getInst() - If this is a normal dependency, return the instruction that
- /// is depended on. Otherwise, return null.
- Instruction *getInst() const {
- if (Value.getInt() == Other) return nullptr;
- return Value.getPointer();
- }
-
- bool operator==(const MemDepResult &M) const { return Value == M.Value; }
- bool operator!=(const MemDepResult &M) const { return Value != M.Value; }
- bool operator<(const MemDepResult &M) const { return Value < M.Value; }
- bool operator>(const MemDepResult &M) const { return Value > M.Value; }
-
- private:
- friend class MemoryDependenceAnalysis;
- /// Dirty - Entries with this marker occur in a LocalDeps map or
- /// NonLocalDeps map when the instruction they previously referenced was
- /// removed from MemDep. In either case, the entry may include an
- /// instruction pointer. If so, the pointer is an instruction in the
- /// block where scanning can start from, saving some work.
- ///
- /// In a default-constructed MemDepResult object, the type will be Dirty
- /// and the instruction pointer will be null.
+class Function;
+class FunctionPass;
+class Instruction;
+class CallSite;
+class AssumptionCache;
+class MemoryDependenceAnalysis;
+class PredIteratorCache;
+class DominatorTree;
+class PHITransAddr;
+
+/// MemDepResult - A memory dependence query can return one of three different
+/// answers, described below.
+class MemDepResult {
+ enum DepType {
+ /// Invalid - Clients of MemDep never see this.
+ Invalid = 0,
+
+ /// Clobber - This is a dependence on the specified instruction which
+ /// clobbers the desired value. The pointer member of the MemDepResult
+ /// pair holds the instruction that clobbers the memory. For example,
+ /// this occurs when we see a may-aliased store to the memory location we
+ /// care about.
///
+ /// There are several cases that may be interesting here:
+ /// 1. Loads are clobbered by may-alias stores.
+ /// 2. Loads are considered clobbered by partially-aliased loads. The
+ /// client may choose to analyze deeper into these cases.
+ Clobber,
+
+ /// Def - This is a dependence on the specified instruction which
+ /// defines/produces the desired memory location. The pointer member of
+ /// the MemDepResult pair holds the instruction that defines the memory.
+ /// Cases of interest:
+ /// 1. This could be a load or store for dependence queries on
+ /// load/store. The value loaded or stored is the produced value.
+ /// Note that the pointer operand may be different than that of the
+ /// queried pointer due to must aliases and phi translation. Note
+ /// that the def may not be the same type as the query, the pointers
+ /// may just be must aliases.
+ /// 2. For loads and stores, this could be an allocation instruction. In
+ /// this case, the load is loading an undef value or a store is the
+ /// first store to (that part of) the allocation.
+ /// 3. Dependence queries on calls return Def only when they are
+ /// readonly calls or memory use intrinsics with identical callees
+ /// and no intervening clobbers. No validation is done that the
+ /// operands to the calls are the same.
+ Def,
+
+ /// Other - This marker indicates that the query has no known dependency
+ /// in the specified block. More detailed state info is encoded in the
+ /// upper part of the pair (i.e. the Instruction*)
+ Other
+ };
+ /// If DepType is "Other", the upper part of the pair
+ /// (i.e. the Instruction* part) is instead used to encode more detailed
+ /// type information as follows
+ enum OtherType {
+ /// NonLocal - This marker indicates that the query has no dependency in
+ /// the specified block. To find out more, the client should query other
+ /// predecessor blocks.
+ NonLocal = 0x4,
+ /// NonFuncLocal - This marker indicates that the query has no
+ /// dependency in the specified function.
+ NonFuncLocal = 0x8,
+ /// Unknown - This marker indicates that the query dependency
+ /// is unknown.
+ Unknown = 0xc
+ };
- /// isDirty - Return true if this is a MemDepResult in its dirty/invalid.
- /// state.
- bool isDirty() const { return Value.getInt() == Invalid; }
+ typedef PointerIntPair<Instruction *, 2, DepType> PairTy;
+ PairTy Value;
+ explicit MemDepResult(PairTy V) : Value(V) {}
+
+public:
+ MemDepResult() : Value(nullptr, Invalid) {}
+
+ /// get methods: These are static ctor methods for creating various
+ /// MemDepResult kinds.
+ static MemDepResult getDef(Instruction *Inst) {
+ assert(Inst && "Def requires inst");
+ return MemDepResult(PairTy(Inst, Def));
+ }
+ static MemDepResult getClobber(Instruction *Inst) {
+ assert(Inst && "Clobber requires inst");
+ return MemDepResult(PairTy(Inst, Clobber));
+ }
+ static MemDepResult getNonLocal() {
+ return MemDepResult(
+ PairTy(reinterpret_cast<Instruction *>(NonLocal), Other));
+ }
+ static MemDepResult getNonFuncLocal() {
+ return MemDepResult(
+ PairTy(reinterpret_cast<Instruction *>(NonFuncLocal), Other));
+ }
+ static MemDepResult getUnknown() {
+ return MemDepResult(
+ PairTy(reinterpret_cast<Instruction *>(Unknown), Other));
+ }
+
+ /// isClobber - Return true if this MemDepResult represents a query that is
+ /// an instruction clobber dependency.
+ bool isClobber() const { return Value.getInt() == Clobber; }
+
+ /// isDef - Return true if this MemDepResult represents a query that is
+ /// an instruction definition dependency.
+ bool isDef() const { return Value.getInt() == Def; }
+
+ /// isNonLocal - Return true if this MemDepResult represents a query that
+ /// is transparent to the start of the block, but where a non-local hasn't
+ /// been done.
+ bool isNonLocal() const {
+ return Value.getInt() == Other &&
+ Value.getPointer() == reinterpret_cast<Instruction *>(NonLocal);
+ }
+
+ /// isNonFuncLocal - Return true if this MemDepResult represents a query
+ /// that is transparent to the start of the function.
+ bool isNonFuncLocal() const {
+ return Value.getInt() == Other &&
+ Value.getPointer() == reinterpret_cast<Instruction *>(NonFuncLocal);
+ }
+
+ /// isUnknown - Return true if this MemDepResult represents a query which
+ /// cannot and/or will not be computed.
+ bool isUnknown() const {
+ return Value.getInt() == Other &&
+ Value.getPointer() == reinterpret_cast<Instruction *>(Unknown);
+ }
+
+ /// getInst() - If this is a normal dependency, return the instruction that
+ /// is depended on. Otherwise, return null.
+ Instruction *getInst() const {
+ if (Value.getInt() == Other)
+ return nullptr;
+ return Value.getPointer();
+ }
+
+ bool operator==(const MemDepResult &M) const { return Value == M.Value; }
+ bool operator!=(const MemDepResult &M) const { return Value != M.Value; }
+ bool operator<(const MemDepResult &M) const { return Value < M.Value; }
+ bool operator>(const MemDepResult &M) const { return Value > M.Value; }
+
+private:
+ friend class MemoryDependenceAnalysis;
+ /// Dirty - Entries with this marker occur in a LocalDeps map or
+ /// NonLocalDeps map when the instruction they previously referenced was
+ /// removed from MemDep. In either case, the entry may include an
+ /// instruction pointer. If so, the pointer is an instruction in the
+ /// block where scanning can start from, saving some work.
+ ///
+ /// In a default-constructed MemDepResult object, the type will be Dirty
+ /// and the instruction pointer will be null.
+ ///
- static MemDepResult getDirty(Instruction *Inst) {
- return MemDepResult(PairTy(Inst, Invalid));
- }
- };
+ /// isDirty - Return true if this is a MemDepResult in its dirty/invalid.
+ /// state.
+ bool isDirty() const { return Value.getInt() == Invalid; }
+
+ static MemDepResult getDirty(Instruction *Inst) {
+ return MemDepResult(PairTy(Inst, Invalid));
+ }
+};
- /// NonLocalDepEntry - This is an entry in the NonLocalDepInfo cache. For
- /// each BasicBlock (the BB entry) it keeps a MemDepResult.
- class NonLocalDepEntry {
- BasicBlock *BB;
- MemDepResult Result;
+/// NonLocalDepEntry - This is an entry in the NonLocalDepInfo cache. For
+/// each BasicBlock (the BB entry) it keeps a MemDepResult.
+class NonLocalDepEntry {
+ BasicBlock *BB;
+ MemDepResult Result;
- public:
- NonLocalDepEntry(BasicBlock *bb, MemDepResult result)
+public:
+ NonLocalDepEntry(BasicBlock *bb, MemDepResult result)
: BB(bb), Result(result) {}
- // This is used for searches.
- NonLocalDepEntry(BasicBlock *bb) : BB(bb) {}
+ // This is used for searches.
+ NonLocalDepEntry(BasicBlock *bb) : BB(bb) {}
- // BB is the sort key, it can't be changed.
- BasicBlock *getBB() const { return BB; }
+ // BB is the sort key, it can't be changed.
+ BasicBlock *getBB() const { return BB; }
- void setResult(const MemDepResult &R) { Result = R; }
+ void setResult(const MemDepResult &R) { Result = R; }
- const MemDepResult &getResult() const { return Result; }
+ const MemDepResult &getResult() const { return Result; }
- bool operator<(const NonLocalDepEntry &RHS) const {
- return BB < RHS.BB;
- }
- };
+ bool operator<(const NonLocalDepEntry &RHS) const { return BB < RHS.BB; }
+};
- /// NonLocalDepResult - This is a result from a NonLocal dependence query.
- /// For each BasicBlock (the BB entry) it keeps a MemDepResult and the
- /// (potentially phi translated) address that was live in the block.
- class NonLocalDepResult {
- NonLocalDepEntry Entry;
- Value *Address;
+/// NonLocalDepResult - This is a result from a NonLocal dependence query.
+/// For each BasicBlock (the BB entry) it keeps a MemDepResult and the
+/// (potentially phi translated) address that was live in the block.
+class NonLocalDepResult {
+ NonLocalDepEntry Entry;
+ Value *Address;
- public:
- NonLocalDepResult(BasicBlock *bb, MemDepResult result, Value *address)
+public:
+ NonLocalDepResult(BasicBlock *bb, MemDepResult result, Value *address)
: Entry(bb, result), Address(address) {}
- // BB is the sort key, it can't be changed.
- BasicBlock *getBB() const { return Entry.getBB(); }
+ // BB is the sort key, it can't be changed.
+ BasicBlock *getBB() const { return Entry.getBB(); }
- void setResult(const MemDepResult &R, Value *Addr) {
- Entry.setResult(R);
- Address = Addr;
- }
+ void setResult(const MemDepResult &R, Value *Addr) {
+ Entry.setResult(R);
+ Address = Addr;
+ }
- const MemDepResult &getResult() const { return Entry.getResult(); }
+ const MemDepResult &getResult() const { return Entry.getResult(); }
- /// getAddress - Return the address of this pointer in this block. This can
- /// be different than the address queried for the non-local result because
- /// of phi translation. This returns null if the address was not available
- /// in a block (i.e. because phi translation failed) or if this is a cached
- /// result and that address was deleted.
- ///
- /// The address is always null for a non-local 'call' dependence.
- Value *getAddress() const { return Address; }
+ /// getAddress - Return the address of this pointer in this block. This can
+ /// be different than the address queried for the non-local result because
+ /// of phi translation. This returns null if the address was not available
+ /// in a block (i.e. because phi translation failed) or if this is a cached
+ /// result and that address was deleted.
+ ///
+ /// The address is always null for a non-local 'call' dependence.
+ Value *getAddress() const { return Address; }
+};
+
+/// MemoryDependenceAnalysis - This is an analysis that determines, for a
+/// given memory operation, what preceding memory operations it depends on.
+/// It builds on alias analysis information, and tries to provide a lazy,
+/// caching interface to a common kind of alias information query.
+///
+/// The dependency information returned is somewhat unusual, but is pragmatic.
+/// If queried about a store or call that might modify memory, the analysis
+/// will return the instruction[s] that may either load from that memory or
+/// store to it. If queried with a load or call that can never modify memory,
+/// the analysis will return calls and stores that might modify the pointer,
+/// but generally does not return loads unless a) they are volatile, or
+/// b) they load from *must-aliased* pointers. Returning a dependence on
+/// must-alias'd pointers instead of all pointers interacts well with the
+/// internal caching mechanism.
+///
+class MemoryDependenceAnalysis : public FunctionPass {
+ // A map from instructions to their dependency.
+ typedef DenseMap<Instruction *, MemDepResult> LocalDepMapType;
+ LocalDepMapType LocalDeps;
+
+public:
+ typedef std::vector<NonLocalDepEntry> NonLocalDepInfo;
+
+private:
+ /// ValueIsLoadPair - This is a pair<Value*, bool> where the bool is true if
+ /// the dependence is a read only dependence, false if read/write.
+ typedef PointerIntPair<const Value *, 1, bool> ValueIsLoadPair;
+
+ /// BBSkipFirstBlockPair - This pair is used when caching information for a
+ /// block. If the pointer is null, the cache value is not a full query that
+ /// starts at the specified block. If non-null, the bool indicates whether
+ /// or not the contents of the block was skipped.
+ typedef PointerIntPair<BasicBlock *, 1, bool> BBSkipFirstBlockPair;
+
+ /// NonLocalPointerInfo - This record is the information kept for each
+ /// (value, is load) pair.
+ struct NonLocalPointerInfo {
+ /// Pair - The pair of the block and the skip-first-block flag.
+ BBSkipFirstBlockPair Pair;
+ /// NonLocalDeps - The results of the query for each relevant block.
+ NonLocalDepInfo NonLocalDeps;
+ /// Size - The maximum size of the dereferences of the
+ /// pointer. May be UnknownSize if the sizes are unknown.
+ uint64_t Size;
+ /// AATags - The AA tags associated with dereferences of the
+ /// pointer. The members may be null if there are no tags or
+ /// conflicting tags.
+ AAMDNodes AATags;
+
+ NonLocalPointerInfo() : Size(MemoryLocation::UnknownSize) {}
};
- /// MemoryDependenceAnalysis - This is an analysis that determines, for a
- /// given memory operation, what preceding memory operations it depends on.
- /// It builds on alias analysis information, and tries to provide a lazy,
- /// caching interface to a common kind of alias information query.
+ /// CachedNonLocalPointerInfo - This map stores the cached results of doing
+ /// a pointer lookup at the bottom of a block. The key of this map is the
+ /// pointer+isload bit, the value is a list of <bb->result> mappings.
+ typedef DenseMap<ValueIsLoadPair, NonLocalPointerInfo>
+ CachedNonLocalPointerInfo;
+ CachedNonLocalPointerInfo NonLocalPointerDeps;
+
+ // A map from instructions to their non-local pointer dependencies.
+ typedef DenseMap<Instruction *, SmallPtrSet<ValueIsLoadPair, 4>>
+ ReverseNonLocalPtrDepTy;
+ ReverseNonLocalPtrDepTy ReverseNonLocalPtrDeps;
+
+ /// PerInstNLInfo - This is the instruction we keep for each cached access
+ /// that we have for an instruction. The pointer is an owning pointer and
+ /// the bool indicates whether we have any dirty bits in the set.
+ typedef std::pair<NonLocalDepInfo, bool> PerInstNLInfo;
+
+ // A map from instructions to their non-local dependencies.
+ typedef DenseMap<Instruction *, PerInstNLInfo> NonLocalDepMapType;
+
+ NonLocalDepMapType NonLocalDeps;
+
+ // A reverse mapping from dependencies to the dependees. This is
+ // used when removing instructions to keep the cache coherent.
+ typedef DenseMap<Instruction *, SmallPtrSet<Instruction *, 4>>
+ ReverseDepMapType;
+ ReverseDepMapType ReverseLocalDeps;
+
+ // A reverse mapping from dependencies to the non-local dependees.
+ ReverseDepMapType ReverseNonLocalDeps;
+
+ /// Current AA implementation, just a cache.
+ AliasAnalysis *AA;
+ DominatorTree *DT;
+ AssumptionCache *AC;
+ const TargetLibraryInfo *TLI;
+ PredIteratorCache PredCache;
+
+public:
+ MemoryDependenceAnalysis();
+ ~MemoryDependenceAnalysis() override;
+ static char ID;
+
+ /// Pass Implementation stuff. This doesn't do any analysis eagerly.
+ bool runOnFunction(Function &) override;
+
+ /// Clean up memory in between runs
+ void releaseMemory() override;
+
+ /// getAnalysisUsage - Does not modify anything. It uses Value Numbering
+ /// and Alias Analysis.
///
- /// The dependency information returned is somewhat unusual, but is pragmatic.
- /// If queried about a store or call that might modify memory, the analysis
- /// will return the instruction[s] that may either load from that memory or
- /// store to it. If queried with a load or call that can never modify memory,
- /// the analysis will return calls and stores that might modify the pointer,
- /// but generally does not return loads unless a) they are volatile, or
- /// b) they load from *must-aliased* pointers. Returning a dependence on
- /// must-alias'd pointers instead of all pointers interacts well with the
- /// internal caching mechanism.
- ///
- class MemoryDependenceAnalysis : public FunctionPass {
- // A map from instructions to their dependency.
- typedef DenseMap<Instruction*, MemDepResult> LocalDepMapType;
- LocalDepMapType LocalDeps;
-
- public:
- typedef std::vector<NonLocalDepEntry> NonLocalDepInfo;
-
- private:
- /// ValueIsLoadPair - This is a pair<Value*, bool> where the bool is true if
- /// the dependence is a read only dependence, false if read/write.
- typedef PointerIntPair<const Value*, 1, bool> ValueIsLoadPair;
-
- /// BBSkipFirstBlockPair - This pair is used when caching information for a
- /// block. If the pointer is null, the cache value is not a full query that
- /// starts at the specified block. If non-null, the bool indicates whether
- /// or not the contents of the block was skipped.
- typedef PointerIntPair<BasicBlock*, 1, bool> BBSkipFirstBlockPair;
-
- /// NonLocalPointerInfo - This record is the information kept for each
- /// (value, is load) pair.
- struct NonLocalPointerInfo {
- /// Pair - The pair of the block and the skip-first-block flag.
- BBSkipFirstBlockPair Pair;
- /// NonLocalDeps - The results of the query for each relevant block.
- NonLocalDepInfo NonLocalDeps;
- /// Size - The maximum size of the dereferences of the
- /// pointer. May be UnknownSize if the sizes are unknown.
- uint64_t Size;
- /// AATags - The AA tags associated with dereferences of the
- /// pointer. The members may be null if there are no tags or
- /// conflicting tags.
- AAMDNodes AATags;
-
- NonLocalPointerInfo() : Size(MemoryLocation::UnknownSize) {}
- };
-
- /// CachedNonLocalPointerInfo - This map stores the cached results of doing
- /// a pointer lookup at the bottom of a block. The key of this map is the
- /// pointer+isload bit, the value is a list of <bb->result> mappings.
- typedef DenseMap<ValueIsLoadPair,
- NonLocalPointerInfo> CachedNonLocalPointerInfo;
- CachedNonLocalPointerInfo NonLocalPointerDeps;
-
- // A map from instructions to their non-local pointer dependencies.
- typedef DenseMap<Instruction*,
- SmallPtrSet<ValueIsLoadPair, 4> > ReverseNonLocalPtrDepTy;
- ReverseNonLocalPtrDepTy ReverseNonLocalPtrDeps;
-
- /// PerInstNLInfo - This is the instruction we keep for each cached access
- /// that we have for an instruction. The pointer is an owning pointer and
- /// the bool indicates whether we have any dirty bits in the set.
- typedef std::pair<NonLocalDepInfo, bool> PerInstNLInfo;
-
- // A map from instructions to their non-local dependencies.
- typedef DenseMap<Instruction*, PerInstNLInfo> NonLocalDepMapType;
-
- NonLocalDepMapType NonLocalDeps;
-
- // A reverse mapping from dependencies to the dependees. This is
- // used when removing instructions to keep the cache coherent.
- typedef DenseMap<Instruction*,
- SmallPtrSet<Instruction*, 4> > ReverseDepMapType;
- ReverseDepMapType ReverseLocalDeps;
-
- // A reverse mapping from dependencies to the non-local dependees.
- ReverseDepMapType ReverseNonLocalDeps;
-
- /// Current AA implementation, just a cache.
- AliasAnalysis *AA;
- DominatorTree *DT;
- AssumptionCache *AC;
- const TargetLibraryInfo *TLI;
- PredIteratorCache PredCache;
-
- public:
- MemoryDependenceAnalysis();
- ~MemoryDependenceAnalysis() override;
- static char ID;
-
- /// Pass Implementation stuff. This doesn't do any analysis eagerly.
- bool runOnFunction(Function &) override;
-
- /// Clean up memory in between runs
- void releaseMemory() override;
-
- /// getAnalysisUsage - Does not modify anything. It uses Value Numbering
- /// and Alias Analysis.
- ///
- void getAnalysisUsage(AnalysisUsage &AU) const override;
+ void getAnalysisUsage(AnalysisUsage &AU) const override;
- /// getDependency - Return the instruction on which a memory operation
- /// depends. See the class comment for more details. It is illegal to call
- /// this on non-memory instructions.
- MemDepResult getDependency(Instruction *QueryInst);
+ /// getDependency - Return the instruction on which a memory operation
+ /// depends. See the class comment for more details. It is illegal to call
+ /// this on non-memory instructions.
+ MemDepResult getDependency(Instruction *QueryInst);
- /// getNonLocalCallDependency - Perform a full dependency query for the
- /// specified call, returning the set of blocks that the value is
- /// potentially live across. The returned set of results will include a
- /// "NonLocal" result for all blocks where the value is live across.
- ///
- /// This method assumes the instruction returns a "NonLocal" dependency
- /// within its own block.
- ///
- /// This returns a reference to an internal data structure that may be
- /// invalidated on the next non-local query or when an instruction is
- /// removed. Clients must copy this data if they want it around longer than
- /// that.
- const NonLocalDepInfo &getNonLocalCallDependency(CallSite QueryCS);
-
- /// getNonLocalPointerDependency - Perform a full dependency query for an
- /// access to the QueryInst's specified memory location, returning the set
- /// of instructions that either define or clobber the value.
- ///
- /// Warning: For a volatile query instruction, the dependencies will be
- /// accurate, and thus usable for reordering, but it is never legal to
- /// remove the query instruction.
- ///
- /// This method assumes the pointer has a "NonLocal" dependency within
- /// QueryInst's parent basic block.
- void getNonLocalPointerDependency(Instruction *QueryInst,
+ /// getNonLocalCallDependency - Perform a full dependency query for the
+ /// specified call, returning the set of blocks that the value is
+ /// potentially live across. The returned set of results will include a
+ /// "NonLocal" result for all blocks where the value is live across.
+ ///
+ /// This method assumes the instruction returns a "NonLocal" dependency
+ /// within its own block.
+ ///
+ /// This returns a reference to an internal data structure that may be
+ /// invalidated on the next non-local query or when an instruction is
+ /// removed. Clients must copy this data if they want it around longer than
+ /// that.
+ const NonLocalDepInfo &getNonLocalCallDependency(CallSite QueryCS);
+
+ /// getNonLocalPointerDependency - Perform a full dependency query for an
+ /// access to the QueryInst's specified memory location, returning the set
+ /// of instructions that either define or clobber the value.
+ ///
+ /// Warning: For a volatile query instruction, the dependencies will be
+ /// accurate, and thus usable for reordering, but it is never legal to
+ /// remove the query instruction.
+ ///
+ /// This method assumes the pointer has a "NonLocal" dependency within
+ /// QueryInst's parent basic block.
+ void getNonLocalPointerDependency(Instruction *QueryInst,
SmallVectorImpl<NonLocalDepResult> &Result);
- /// removeInstruction - Remove an instruction from the dependence analysis,
- /// updating the dependence of instructions that previously depended on it.
- void removeInstruction(Instruction *InstToRemove);
-
- /// invalidateCachedPointerInfo - This method is used to invalidate cached
- /// information about the specified pointer, because it may be too
- /// conservative in memdep. This is an optional call that can be used when
- /// the client detects an equivalence between the pointer and some other
- /// value and replaces the other value with ptr. This can make Ptr available
- /// in more places that cached info does not necessarily keep.
- void invalidateCachedPointerInfo(Value *Ptr);
-
- /// invalidateCachedPredecessors - Clear the PredIteratorCache info.
- /// This needs to be done when the CFG changes, e.g., due to splitting
- /// critical edges.
- void invalidateCachedPredecessors();
-
- /// \brief Return the instruction on which a memory location depends.
- /// If isLoad is true, this routine ignores may-aliases with read-only
- /// operations. If isLoad is false, this routine ignores may-aliases
- /// with reads from read-only locations. If possible, pass the query
- /// instruction as well; this function may take advantage of the metadata
- /// annotated to the query instruction to refine the result.
- ///
- /// Note that this is an uncached query, and thus may be inefficient.
- ///
- MemDepResult getPointerDependencyFrom(const MemoryLocation &Loc,
- bool isLoad,
- BasicBlock::iterator ScanIt,
- BasicBlock *BB,
- Instruction *QueryInst = nullptr);
-
- MemDepResult getSimplePointerDependencyFrom(const MemoryLocation &MemLoc,
- bool isLoad,
- BasicBlock::iterator ScanIt,
- BasicBlock *BB,
- Instruction *QueryInst);
-
- /// This analysis looks for other loads and stores with invariant.group
- /// metadata and the same pointer operand. Returns Unknown if it does not
- /// find anything, and Def if it can be assumed that 2 instructions load or
- /// store the same value.
- /// FIXME: This analysis works only on single block because of restrictions
- /// at the call site.
- MemDepResult getInvariantGroupPointerDependency(LoadInst *LI,
- BasicBlock *BB);
-
- /// getLoadLoadClobberFullWidthSize - This is a little bit of analysis that
- /// looks at a memory location for a load (specified by MemLocBase, Offs,
- /// and Size) and compares it against a load. If the specified load could
- /// be safely widened to a larger integer load that is 1) still efficient,
- /// 2) safe for the target, and 3) would provide the specified memory
- /// location value, then this function returns the size in bytes of the
- /// load width to use. If not, this returns zero.
- static unsigned getLoadLoadClobberFullWidthSize(const Value *MemLocBase,
- int64_t MemLocOffs,
- unsigned MemLocSize,
- const LoadInst *LI);
-
- private:
- MemDepResult getCallSiteDependencyFrom(CallSite C, bool isReadOnlyCall,
- BasicBlock::iterator ScanIt,
- BasicBlock *BB);
- bool getNonLocalPointerDepFromBB(Instruction *QueryInst,
- const PHITransAddr &Pointer,
- const MemoryLocation &Loc, bool isLoad,
- BasicBlock *BB,
- SmallVectorImpl<NonLocalDepResult> &Result,
- DenseMap<BasicBlock *, Value *> &Visited,
- bool SkipFirstBlock = false);
- MemDepResult GetNonLocalInfoForBlock(Instruction *QueryInst,
- const MemoryLocation &Loc, bool isLoad,
- BasicBlock *BB, NonLocalDepInfo *Cache,
- unsigned NumSortedEntries);
-
- void RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair P);
-
- /// verifyRemoved - Verify that the specified instruction does not occur
- /// in our internal data structures.
- void verifyRemoved(Instruction *Inst) const;
- };
+ /// removeInstruction - Remove an instruction from the dependence analysis,
+ /// updating the dependence of instructions that previously depended on it.
+ void removeInstruction(Instruction *InstToRemove);
+
+ /// invalidateCachedPointerInfo - This method is used to invalidate cached
+ /// information about the specified pointer, because it may be too
+ /// conservative in memdep. This is an optional call that can be used when
+ /// the client detects an equivalence between the pointer and some other
+ /// value and replaces the other value with ptr. This can make Ptr available
+ /// in more places that cached info does not necessarily keep.
+ void invalidateCachedPointerInfo(Value *Ptr);
+
+ /// invalidateCachedPredecessors - Clear the PredIteratorCache info.
+ /// This needs to be done when the CFG changes, e.g., due to splitting
+ /// critical edges.
+ void invalidateCachedPredecessors();
+
+ /// \brief Return the instruction on which a memory location depends.
+ /// If isLoad is true, this routine ignores may-aliases with read-only
+ /// operations. If isLoad is false, this routine ignores may-aliases
+ /// with reads from read-only locations. If possible, pass the query
+ /// instruction as well; this function may take advantage of the metadata
+ /// annotated to the query instruction to refine the result.
+ ///
+ /// Note that this is an uncached query, and thus may be inefficient.
+ ///
+ MemDepResult getPointerDependencyFrom(const MemoryLocation &Loc, bool isLoad,
+ BasicBlock::iterator ScanIt,
+ BasicBlock *BB,
+ Instruction *QueryInst = nullptr);
+
+ MemDepResult getSimplePointerDependencyFrom(const MemoryLocation &MemLoc,
+ bool isLoad,
+ BasicBlock::iterator ScanIt,
+ BasicBlock *BB,
+ Instruction *QueryInst);
+
+ /// This analysis looks for other loads and stores with invariant.group
+ /// metadata and the same pointer operand. Returns Unknown if it does not
+ /// find anything, and Def if it can be assumed that 2 instructions load or
+ /// store the same value.
+ /// FIXME: This analysis works only on single block because of restrictions
+ /// at the call site.
+ MemDepResult getInvariantGroupPointerDependency(LoadInst *LI, BasicBlock *BB);
+
+ /// getLoadLoadClobberFullWidthSize - This is a little bit of analysis that
+ /// looks at a memory location for a load (specified by MemLocBase, Offs,
+ /// and Size) and compares it against a load. If the specified load could
+ /// be safely widened to a larger integer load that is 1) still efficient,
+ /// 2) safe for the target, and 3) would provide the specified memory
+ /// location value, then this function returns the size in bytes of the
+ /// load width to use. If not, this returns zero.
+ static unsigned getLoadLoadClobberFullWidthSize(const Value *MemLocBase,
+ int64_t MemLocOffs,
+ unsigned MemLocSize,
+ const LoadInst *LI);
+
+private:
+ MemDepResult getCallSiteDependencyFrom(CallSite C, bool isReadOnlyCall,
+ BasicBlock::iterator ScanIt,
+ BasicBlock *BB);
+ bool getNonLocalPointerDepFromBB(Instruction *QueryInst,
+ const PHITransAddr &Pointer,
+ const MemoryLocation &Loc, bool isLoad,
+ BasicBlock *BB,
+ SmallVectorImpl<NonLocalDepResult> &Result,
+ DenseMap<BasicBlock *, Value *> &Visited,
+ bool SkipFirstBlock = false);
+ MemDepResult GetNonLocalInfoForBlock(Instruction *QueryInst,
+ const MemoryLocation &Loc, bool isLoad,
+ BasicBlock *BB, NonLocalDepInfo *Cache,
+ unsigned NumSortedEntries);
+
+ void RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair P);
+
+ /// verifyRemoved - Verify that the specified instruction does not occur
+ /// in our internal data structures.
+ void verifyRemoved(Instruction *Inst) const;
+};
} // End llvm namespace
diff --git a/llvm/lib/Analysis/MemoryDependenceAnalysis.cpp b/llvm/lib/Analysis/MemoryDependenceAnalysis.cpp
index 214f96898f0..fe32a34fd62 100644
--- a/llvm/lib/Analysis/MemoryDependenceAnalysis.cpp
+++ b/llvm/lib/Analysis/MemoryDependenceAnalysis.cpp
@@ -45,8 +45,7 @@ STATISTIC(NumCacheNonLocalPtr,
"Number of fully cached non-local ptr responses");
STATISTIC(NumCacheDirtyNonLocalPtr,
"Number of cached, but dirty, non-local ptr responses");
-STATISTIC(NumUncacheNonLocalPtr,
- "Number of uncached non-local ptr responses");
+STATISTIC(NumUncacheNonLocalPtr, "Number of uncached non-local ptr responses");
STATISTIC(NumCacheCompleteNonLocalPtr,
"Number of block queries that were completely cached");
@@ -57,10 +56,10 @@ static cl::opt<unsigned> BlockScanLimit(
cl::desc("The number of instructions to scan in a block in memory "
"dependency analysis (default = 100)"));
-static cl::opt<unsigned> BlockNumberLimit(
- "memdep-block-number-limit", cl::Hidden, cl::init(1000),
- cl::desc("The number of blocks to scan during memory "
- "dependency analysis (default = 1000)"));
+static cl::opt<unsigned>
+ BlockNumberLimit("memdep-block-number-limit", cl::Hidden, cl::init(1000),
+ cl::desc("The number of blocks to scan during memory "
+ "dependency analysis (default = 1000)"));
// Limit on the number of memdep results to process.
static const unsigned int NumResultsLimit = 100;
@@ -69,19 +68,17 @@ char MemoryDependenceAnalysis::ID = 0;
// Register this pass...
INITIALIZE_PASS_BEGIN(MemoryDependenceAnalysis, "memdep",
- "Memory Dependence Analysis", false, true)
+ "Memory Dependence Analysis", false, true)
INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker)
INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass)
INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)
INITIALIZE_PASS_END(MemoryDependenceAnalysis, "memdep",
- "Memory Dependence Analysis", false, true)
+ "Memory Dependence Analysis", false, true)
-MemoryDependenceAnalysis::MemoryDependenceAnalysis()
- : FunctionPass(ID) {
+MemoryDependenceAnalysis::MemoryDependenceAnalysis() : FunctionPass(ID) {
initializeMemoryDependenceAnalysisPass(*PassRegistry::getPassRegistry());
}
-MemoryDependenceAnalysis::~MemoryDependenceAnalysis() {
-}
+MemoryDependenceAnalysis::~MemoryDependenceAnalysis() {}
/// Clean up memory in between runs
void MemoryDependenceAnalysis::releaseMemory() {
@@ -116,14 +113,15 @@ bool MemoryDependenceAnalysis::runOnFunction(Function &F) {
/// RemoveFromReverseMap - This is a helper function that removes Val from
/// 'Inst's set in ReverseMap. If the set becomes empty, remove Inst's entry.
template <typename KeyTy>
-static void RemoveFromReverseMap(DenseMap<Instruction*,
- SmallPtrSet<KeyTy, 4> > &ReverseMap,
- Instruction *Inst, KeyTy Val) {
- typename DenseMap<Instruction*, SmallPtrSet<KeyTy, 4> >::iterator
- InstIt = ReverseMap.find(Inst);
+static void
+RemoveFromReverseMap(DenseMap<Instruction *, SmallPtrSet<KeyTy, 4>> &ReverseMap,
+ Instruction *Inst, KeyTy Val) {
+ typename DenseMap<Instruction *, SmallPtrSet<KeyTy, 4>>::iterator InstIt =
+ ReverseMap.find(Inst);
assert(InstIt != ReverseMap.end() && "Reverse map out of sync?");
bool Found = InstIt->second.erase(Val);
- assert(Found && "Invalid reverse map!"); (void)Found;
+ assert(Found && "Invalid reverse map!");
+ (void)Found;
if (InstIt->second.empty())
ReverseMap.erase(InstIt);
}
@@ -208,9 +206,9 @@ static ModRefInfo GetLocation(const Instruction *Inst, MemoryLocation &Loc,
/// getCallSiteDependencyFrom - Private helper for finding the local
/// dependencies of a call site.
-MemDepResult MemoryDependenceAnalysis::
-getCallSiteDependencyFrom(CallSite CS, bool isReadOnlyCall,
- BasicBlock::iterator ScanIt, BasicBlock *BB) {
+MemDepResult MemoryDependenceAnalysis::getCallSiteDependencyFrom(
+ CallSite CS, bool isReadOnlyCall, BasicBlock::iterator ScanIt,
+ BasicBlock *BB) {
unsigned Limit = BlockScanLimit;
// Walk backwards through the block, looking for dependencies
@@ -235,7 +233,8 @@ getCallSiteDependencyFrom(CallSite CS, bool isReadOnlyCall,
if (auto InstCS = CallSite(Inst)) {
// Debug intrinsics don't cause dependences.
- if (isa<DbgInfoIntrinsic>(Inst)) continue;
+ if (isa<DbgInfoIntrinsic>(Inst))
+ continue;
// If these two calls do not interfere, look past it.
switch (AA->getModRefInfo(CS, InstCS)) {
case MRI_NoModRef:
@@ -297,7 +296,8 @@ unsigned MemoryDependenceAnalysis::getLoadLoadClobberFullWidthSize(
const Value *MemLocBase, int64_t MemLocOffs, unsigned MemLocSize,
const LoadInst *LI) {
// We can only extend simple integer loads.
- if (!isa<IntegerType>(LI->getType()) || !LI->isSimple()) return 0;
+ if (!isa<IntegerType>(LI->getType()) || !LI->isSimple())
+ return 0;
// Load widening is hostile to ThreadSanitizer: it may cause false positives
// or make the reports more cryptic (access sizes are wrong).
@@ -313,7 +313,8 @@ unsigned MemoryDependenceAnalysis::getLoadLoadClobberFullWidthSize(
// If the two pointers are not based on the same pointer, we can't tell that
// they are related.
- if (LIBase != MemLocBase) return 0;
+ if (LIBase != MemLocBase)
+ return 0;
// Okay, the two values are based on the same pointer, but returned as
// no-alias. This happens when we have things like two byte loads at "P+1"
@@ -322,7 +323,8 @@ unsigned MemoryDependenceAnalysis::getLoadLoadClobberFullWidthSize(
// the bits required by MemLoc.
// If MemLoc is before LI, then no widening of LI will help us out.
- if (MemLocOffs < LIOffs) return 0;
+ if (MemLocOffs < LIOffs)
+ return 0;
// Get the alignment of the load in bytes. We assume that it is safe to load
// any legal integer up to this size without a problem. For example, if we're
@@ -331,21 +333,22 @@ unsigned MemoryDependenceAnalysis::getLoadLoadClobberFullWidthSize(
// to i16.
unsigned LoadAlign = LI->getAlignment();
- int64_t MemLocEnd = MemLocOffs+MemLocSize;
+ int64_t MemLocEnd = MemLocOffs + MemLocSize;
// If no amount of rounding up will let MemLoc fit into LI, then bail out.
- if (LIOffs+LoadAlign < MemLocEnd) return 0;
+ if (LIOffs + LoadAlign < MemLocEnd)
+ return 0;
// This is the size of the load to try. Start with the next larger power of
// two.
- unsigned NewLoadByteSize = LI->getType()->getPrimitiveSizeInBits()/8U;
+ unsigned NewLoadByteSize = LI->getType()->getPrimitiveSizeInBits() / 8U;
NewLoadByteSize = NextPowerOf2(NewLoadByteSize);
while (1) {
// If this load size is bigger than our known alignment or would not fit
// into a native integer register, then we fail.
if (NewLoadByteSize > LoadAlign ||
- !DL.fitsInLegalInteger(NewLoadByteSize*8))
+ !DL.fitsInLegalInteger(NewLoadByteSize * 8))
return 0;
if (LIOffs + NewLoadByteSize > MemLocEnd &&
@@ -357,7 +360,7 @@ unsigned MemoryDependenceAnalysis::getLoadLoadClobberFullWidthSize(
return 0;
// If a load of this width would include all of MemLoc, then we succeed.
- if (LIOffs+NewLoadByteSize >= MemLocEnd)
+ if (LIOffs + NewLoadByteSize >= MemLocEnd)
return NewLoadByteSize;
NewLoadByteSize <<= 1;
@@ -374,7 +377,6 @@ static bool isVolatile(Instruction *Inst) {
return false;
}
-
/// getPointerDependencyFrom - Return the instruction on which a memory
/// location depends. If isLoad is true, this routine ignores may-aliases with
/// read-only operations. If isLoad is false, this routine ignores may-aliases
@@ -507,7 +509,7 @@ MemDepResult MemoryDependenceAnalysis::getSimplePointerDependencyFrom(
// Return "true" if and only if the instruction I is either a non-simple
// load or a non-simple store.
- auto isNonSimpleLoadOrStore = [] (Instruction *I) -> bool {
+ auto isNonSimpleLoadOrStore = [](Instruction *I) -> bool {
if (auto *LI = dyn_cast<LoadInst>(I))
return !LI->isSimple();
if (auto *SI = dyn_cast<StoreInst>(I))
@@ -517,7 +519,7 @@ MemDepResult MemoryDependenceAnalysis::getSimplePointerDependencyFrom(
// Return "true" if I is not a load and not a store, but it does access
// memory.
- auto isOtherMemAccess = [] (Instruction *I) -> bool {
+ auto isOtherMemAccess = [](Instruction *I) -> bool {
return !isa<LoadInst>(I) && !isa<StoreInst>(I) && I->mayReadOrWriteMemory();
};
@@ -527,7 +529,8 @@ MemDepResult MemoryDependenceAnalysis::getSimplePointerDependencyFrom(
if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(Inst))
// Debug intrinsics don't (and can't) cause dependencies.
- if (isa<DbgInfoIntrinsic>(II)) continue;
+ if (isa<DbgInfoIntrinsic>(II))
+ continue;
// Limit the amount of scanning we do so we don't end up with quadratic
// running time on extreme testcases.
@@ -549,11 +552,11 @@ MemDepResult MemoryDependenceAnalysis::getSimplePointerDependencyFrom(
}
}
- // Values depend on loads if the pointers are must aliased. This means that
- // a load depends on another must aliased load from the same value.
- // One exception is atomic loads: a value can depend on an atomic load that it
- // does not alias with when this atomic load indicates that another thread may
- // be accessing the location.
+ // Values depend on loads if the pointers are must aliased. This means
+ // that a load depends on another must aliased load from the same value.
+ // One exception is atomic loads: a value can depend on an atomic load that
+ // it does not alias with when this atomic load indicates that another
+ // thread may be accessing the location.
if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
// While volatile access cannot be eliminated, they do not have to clobber
@@ -570,7 +573,8 @@ MemDepResult MemoryDependenceAnalysis::getSimplePointerDependencyFrom(
}
// Atomic loads have complications involved.
- // A Monotonic (or higher) load is OK if the query inst is itself not atomic.
+ // A Monotonic (or higher) load is OK if the query inst is itself not
+ // atomic.
// FIXME: This is overly conservative.
if (LI->isAtomic() && LI->getOrdering() > Unordered) {
if (!QueryInst || isNonSimpleLoadOrStore(QueryInst) ||
@@ -673,7 +677,7 @@ MemDepResult MemoryDependenceAnalysis::getSimplePointerDependencyFrom(
if (R == MustAlias)
return MemDepResult::getDef(Inst);
if (isInvariantLoad)
- continue;
+ continue;
return MemDepResult::getClobber(Inst);
}
@@ -703,7 +707,7 @@ MemDepResult MemoryDependenceAnalysis::getSimplePointerDependencyFrom(
}
if (isInvariantLoad)
- continue;
+ continue;
// See if this instruction (e.g. a call or vaarg) mod/ref's the pointer.
ModRefInfo MR = AA->getModRefInfo(Inst, MemLoc);
@@ -798,7 +802,8 @@ MemDepResult MemoryDependenceAnalysis::getDependency(Instruction *QueryInst) {
/// cache arrays are properly kept sorted.
static void AssertSorted(MemoryDependenceAnalysis::NonLocalDepInfo &Cache,
int Count = -1) {
- if (Count == -1) Count = Cache.size();
+ if (Count == -1)
+ Count = Cache.size();
assert(std::is_sorted(Cache.begin(), Cache.begin() + Count) &&
"Cache isn't sorted!");
}
@@ -819,7 +824,8 @@ static void AssertSorted(MemoryDependenceAnalysis::NonLocalDepInfo &Cache,
const MemoryDependenceAnalysis::NonLocalDepInfo &
MemoryDependenceAnalysis::getNonLocalCallDependency(CallSite QueryCS) {
assert(getDependency(QueryCS.getInstruction()).isNonLocal() &&
- "getNonLocalCallDependency should only be used on calls with non-local deps!");
+ "getNonLocalCallDependency should only be used on calls with "
+ "non-local deps!");
PerInstNLInfo &CacheP = NonLocalDeps[QueryCS.getInstruction()];
NonLocalDepInfo &Cache = CacheP.first;
@@ -827,7 +833,7 @@ MemoryDependenceAnalysis::getNonLocalCallDependency(CallSite QueryCS) {
/// the cached case, this can happen due to instructions being deleted etc. In
/// the uncached case, this starts out as the set of predecessors we care
/// about.
- SmallVector<BasicBlock*, 32> DirtyBlocks;
+ SmallVector<BasicBlock *, 32> DirtyBlocks;
if (!Cache.empty()) {
// Okay, we have a cache entry. If we know it is not dirty, just return it
@@ -839,8 +845,8 @@ MemoryDependenceAnalysis::getNonLocalCallDependency(CallSite QueryCS) {
// If we already have a partially computed set of results, scan them to
// determine what is dirty, seeding our initial DirtyBlocks worklist.
- for (NonLocalDepInfo::iterator I = Cache.begin(), E = Cache.end();
- I != E; ++I)
+ for (NonLocalDepInfo::iterator I = Cache.begin(), E = Cache.end(); I != E;
+ ++I)
if (I->getResult().isDirty())
DirtyBlocks.push_back(I->getBB());
@@ -848,7 +854,7 @@ MemoryDependenceAnalysis::getNonLocalCallDependency(CallSite QueryCS) {
std::sort(Cache.begin(), Cache.end());
++NumCacheDirtyNonLocal;
- //cerr << "CACHED CASE: " << DirtyBlocks.size() << " dirty: "
+ // cerr << "CACHED CASE: " << DirtyBlocks.size() << " dirty: "
// << Cache.size() << " cached: " << *QueryInst;
} else {
// Seed DirtyBlocks with each of the preds of QueryInst's block.
@@ -861,7 +867,7 @@ MemoryDependenceAnalysis::getNonLocalCallDependency(CallSite QueryCS) {
// isReadonlyCall - If this is a read-only call, we can be more aggressive.
bool isReadonlyCall = AA->onlyReadsMemory(QueryCS);
- SmallPtrSet<BasicBlock*, 32> Visited;
+ SmallPtrSet<BasicBlock *, 32> Visited;
unsigned NumSortedEntries = Cache.size();
DEBUG(AssertSorted(Cache));
@@ -879,13 +885,13 @@ MemoryDependenceAnalysis::getNonLocalCallDependency(CallSite QueryCS) {
// the cache set. If so, find it.
DEBUG(AssertSorted(Cache, NumSortedEntries));
NonLocalDepInfo::iterator Entry =
- std::upper_bound(Cache.begin(), Cache.begin()+NumSortedEntries,
- NonLocalDepEntry(DirtyBB));
+ std::upper_bound(Cache.begin(), Cache.begin() + NumSortedEntries,
+ NonLocalDepEntry(DirtyBB));
if (Entry != Cache.begin() && std::prev(Entry)->getBB() == DirtyBB)
--Entry;
NonLocalDepEntry *ExistingResult = nullptr;
- if (Entry != Cache.begin()+NumSortedEntries &&
+ if (Entry != Cache.begin() + NumSortedEntries &&
Entry->getBB() == DirtyBB) {
// If we already have an entry, and if it isn't already dirty, the block
// is done.
@@ -912,7 +918,8 @@ MemoryDependenceAnalysis::getNonLocalCallDependency(CallSite QueryCS) {
MemDepResult Dep;
if (ScanPos != DirtyBB->begin()) {
- Dep = getCallSiteDependencyFrom(QueryCS, isReadonlyCall,ScanPos, DirtyBB);
+ Dep =
+ getCallSiteDependencyFrom(QueryCS, isReadonlyCall, ScanPos, DirtyBB);
} else if (DirtyBB != &DirtyBB->getParent()->getEntryBlock()) {
// No dependence found. If this is the entry block of the function, it is
// a clobber, otherwise it is unknown.
@@ -954,9 +961,8 @@ MemoryDependenceAnalysis::getNonLocalCallDependency(CallSite QueryCS) {
/// This method assumes the pointer has a "NonLocal" dependency within its
/// own block.
///
-void MemoryDependenceAnalysis::
-getNonLocalPointerDependency(Instruction *QueryInst,
- SmallVectorImpl<NonLocalDepResult> &Result) {
+void MemoryDependenceAnalysis::getNonLocalPointerDependency(
+ Instruction *QueryInst, SmallVectorImpl<NonLocalDepResult> &Result) {
const MemoryLocation Loc = MemoryLocation::get(QueryInst);
bool isLoad = isa<LoadInst>(QueryInst);
BasicBlock *FromBB = QueryInst->getParent();
@@ -983,8 +989,7 @@ getNonLocalPointerDependency(Instruction *QueryInst,
return false;
};
if (isVolatile(QueryInst) || isOrdered(QueryInst)) {
- Result.push_back(NonLocalDepResult(FromBB,
- MemDepResult::getUnknown(),
+ Result.push_back(NonLocalDepResult(FromBB, MemDepResult::getUnknown(),
const_cast<Value *>(Loc.Ptr)));
return;
}
@@ -995,13 +1000,12 @@ getNonLocalPointerDependency(Instruction *QueryInst,
// each block. Because of critical edges, we currently bail out if querying
// a block with multiple different pointers. This can happen during PHI
// translation.
- DenseMap<BasicBlock*, Value*> Visited;
+ DenseMap<BasicBlock *, Value *> Visited;
if (!getNonLocalPointerDepFromBB(QueryInst, Address, Loc, isLoad, FromBB,
Result, Visited, true))
return;
Result.clear();
- Result.push_back(NonLocalDepResult(FromBB,
- MemDepResult::getUnknown(),
+ Result.push_back(NonLocalDepResult(FromBB, MemDepResult::getUnknown(),
const_cast<Value *>(Loc.Ptr)));
}
@@ -1015,14 +1019,13 @@ MemDepResult MemoryDependenceAnalysis::GetNonLocalInfoForBlock(
// Do a binary search to see if we already have an entry for this block in
// the cache set. If so, find it.
- NonLocalDepInfo::iterator Entry =
- std::upper_bound(Cache->begin(), Cache->begin()+NumSortedEntries,
- NonLocalDepEntry(BB));
- if (Entry != Cache->begin() && (Entry-1)->getBB() == BB)
+ NonLocalDepInfo::iterator Entry = std::upper_bound(
+ Cache->begin(), Cache->begin() + NumSortedEntries, NonLocalDepEntry(BB));
+ if (Entry != Cache->begin() && (Entry - 1)->getBB() == BB)
--Entry;
NonLocalDepEntry *ExistingResult = nullptr;
- if (Entry != Cache->begin()+NumSortedEntries && Entry->getBB() == BB)
+ if (Entry != Cache->begin() + NumSortedEntries && Entry->getBB() == BB)
ExistingResult = &*Entry;
// If we have a cached entry, and it is non-dirty, use it as the value for
@@ -1050,8 +1053,8 @@ MemDepResult MemoryDependenceAnalysis::GetNonLocalInfoForBlock(
}
// Scan the block for the dependency.
- MemDepResult Dep = getPointerDependencyFrom(Loc, isLoad, ScanPos, BB,
- QueryInst);
+ MemDepResult Dep =
+ getPointerDependencyFrom(Loc, isLoad, ScanPos, BB, QueryInst);
// If we had a dirty entry for the block, update it. Otherwise, just add
// a new entry.
@@ -1090,7 +1093,7 @@ SortNonLocalDepInfoCache(MemoryDependenceAnalysis::NonLocalDepInfo &Cache,
NonLocalDepEntry Val = Cache.back();
Cache.pop_back();
MemoryDependenceAnalysis::NonLocalDepInfo::iterator Entry =
- std::upper_bound(Cache.begin(), Cache.end()-1, Val);
+ std::upper_bound(Cache.begin(), Cache.end() - 1, Val);
Cache.insert(Entry, Val);
// FALL THROUGH.
}
@@ -1100,7 +1103,7 @@ SortNonLocalDepInfoCache(MemoryDependenceAnalysis::NonLocalDepInfo &Cache,
NonLocalDepEntry Val = Cache.back();
Cache.pop_back();
MemoryDependenceAnalysis::NonLocalDepInfo::iterator Entry =
- std::upper_bound(Cache.begin(), Cache.end(), Val);
+ std::upper_bound(Cache.begin(), Cache.end(), Val);
Cache.insert(Entry, Val);
}
break;
@@ -1142,7 +1145,7 @@ bool MemoryDependenceAnalysis::getNonLocalPointerDepFromBB(
// Get the NLPI for CacheKey, inserting one into the map if it doesn't
// already have one.
std::pair<CachedNonLocalPointerInfo::iterator, bool> Pair =
- NonLocalPointerDeps.insert(std::make_pair(CacheKey, InitialNLPI));
+ NonLocalPointerDeps.insert(std::make_pair(CacheKey, InitialNLPI));
NonLocalPointerInfo *CacheInfo = &Pair.first->second;
// If we already have a cache entry for this CacheKey, we may need to do some
@@ -1154,17 +1157,17 @@ bool MemoryDependenceAnalysis::getNonLocalPointerDepFromBB(
CacheInfo->Pair = BBSkipFirstBlockPair();
CacheInfo->Size = Loc.Size;
for (NonLocalDepInfo::iterator DI = CacheInfo->NonLocalDeps.begin(),
- DE = CacheInfo->NonLocalDeps.end(); DI != DE; ++DI)
+ DE = CacheInfo->NonLocalDeps.end();
+ DI != DE; ++DI)
if (Instruction *Inst = DI->getResult().getInst())
RemoveFromReverseMap(ReverseNonLocalPtrDeps, Inst, CacheKey);
CacheInfo->NonLocalDeps.clear();
} else if (CacheInfo->Size > Loc.Size) {
// This query's Size is less than the cached one. Conservatively restart
// the query using the greater size.
- return getNonLocalPointerDepFromBB(QueryInst, Pointer,
- Loc.getWithNewSize(CacheInfo->Size),
- isLoad, StartBB, Result, Visited,
- SkipFirstBlock);
+ return getNonLocalPointerDepFromBB(
+ QueryInst, Pointer, Loc.getWithNewSize(CacheInfo->Size), isLoad,
+ StartBB, Result, Visited, SkipFirstBlock);
}
// If the query's AATags are inconsistent with the cached one,
@@ -1175,16 +1178,16 @@ bool MemoryDependenceAnalysis::getNonLocalPointerDepFromBB(
CacheInfo->Pair = BBSkipFirstBlockPair();
CacheInfo->AATags = AAMDNodes();
for (NonLocalDepInfo::iterator DI = CacheInfo->NonLocalDeps.begin(),
- DE = CacheInfo->NonLocalDeps.end(); DI != DE; ++DI)
+ DE = CacheInfo->NonLocalDeps.end();
+ DI != DE; ++DI)
if (Instruction *Inst = DI->getResult().getInst())
RemoveFromReverseMap(ReverseNonLocalPtrDeps, Inst, CacheKey);
CacheInfo->NonLocalDeps.clear();
}
if (Loc.AATags)
- return getNonLocalPointerDepFromBB(QueryInst,
- Pointer, Loc.getWithoutAATags(),
- isLoad, StartBB, Result, Visited,
- SkipFirstBlock);
+ return getNonLocalPointerDepFromBB(
+ QueryInst, Pointer, Loc.getWithoutAATags(), isLoad, StartBB, Result,
+ Visited, SkipFirstBlock);
}
}
@@ -1201,7 +1204,7 @@ bool MemoryDependenceAnalysis::getNonLocalPointerDepFromBB(
if (!Visited.empty()) {
for (NonLocalDepInfo::iterator I = Cache->begin(), E = Cache->end();
I != E; ++I) {
- DenseMap<BasicBlock*, Value*>::iterator VI = Visited.find(I->getBB());
+ DenseMap<BasicBlock *, Value *>::iterator VI = Visited.find(I->getBB());
if (VI == Visited.end() || VI->second == Pointer.getAddr())
continue;
@@ -1213,17 +1216,16 @@ bool MemoryDependenceAnalysis::getNonLocalPointerDepFromBB(
}
Value *Addr = Pointer.getAddr();
- for (NonLocalDepInfo::iterator I = Cache->begin(), E = Cache->end();
- I != E; ++I) {
+ for (NonLocalDepInfo::iterator I = Cache->begin(), E = Cache->end(); I != E;
+ ++I) {
Visited.insert(std::make_pair(I->getBB(), Addr));
if (I->getResult().isNonLocal()) {
continue;
}
if (!DT) {
- Result.push_back(NonLocalDepResult(I->getBB(),
- MemDepResult::getUnknown(),
- Addr));
+ Result.push_back(
+ NonLocalDepResult(I->getBB(), MemDepResult::getUnknown(), Addr));
} else if (DT->isReachableFromEntry(I->getBB())) {
Result.push_back(NonLocalDepResult(I->getBB(), I->getResult(), Addr));
}
@@ -1241,11 +1243,11 @@ bool MemoryDependenceAnalysis::getNonLocalPointerDepFromBB(
else
CacheInfo->Pair = BBSkipFirstBlockPair();
- SmallVector<BasicBlock*, 32> Worklist;
+ SmallVector<BasicBlock *, 32> Worklist;
Worklist.push_back(StartBB);
// PredList used inside loop.
- SmallVector<std::pair<BasicBlock*, PHITransAddr>, 16> PredList;
+ SmallVector<std::pair<BasicBlock *, PHITransAddr>, 16> PredList;
// Keep track of the entries that we know are sorted. Previously cached
// entries will all be sorted. The entries we add we only sort on demand (we
@@ -1287,15 +1289,13 @@ bool MemoryDependenceAnalysis::getNonLocalPointerDepFromBB(
// Get the dependency info for Pointer in BB. If we have cached
// information, we will use it, otherwise we compute it.
DEBUG(AssertSorted(*Cache, NumSortedEntries));
- MemDepResult Dep = GetNonLocalInfoForBlock(QueryInst,
- Loc, isLoad, BB, Cache,
- NumSortedEntries);
+ MemDepResult Dep = GetNonLocalInfoForBlock(QueryInst, Loc, isLoad, BB,
+ Cache, NumSortedEntries);
// If we got a Def or Clobber, add this to the list of results.
if (!Dep.isNonLocal()) {
if (!DT) {
- Result.push_back(NonLocalDepResult(BB,
- MemDepResult::getUnknown(),
+ Result.push_back(NonLocalDepResult(BB, MemDepResult::getUnknown(),
Pointer.getAddr()));
continue;
} else if (DT->isReachableFromEntry(BB)) {
@@ -1311,11 +1311,11 @@ bool MemoryDependenceAnalysis::getNonLocalPointerDepFromBB(
// the same Pointer.
if (!Pointer.NeedsPHITranslationFromBlock(BB)) {
SkipFirstBlock = false;
- SmallVector<BasicBlock*, 16> NewBlocks;
+ SmallVector<BasicBlock *, 16> NewBlocks;
for (BasicBlock *Pred : PredCache.get(BB)) {
// Verify that we haven't looked at this block yet.
- std::pair<DenseMap<BasicBlock*,Value*>::iterator, bool>
- InsertRes = Visited.insert(std::make_pair(Pred, Pointer.getAddr()));
+ std::pair<DenseMap<BasicBlock *, Value *>::iterator, bool> InsertRes =
+ Visited.insert(std::make_pair(Pred, Pointer.getAddr()));
if (InsertRes.second) {
// First time we've looked at *PI.
NewBlocks.push_back(Pred);
@@ -1377,8 +1377,8 @@ bool MemoryDependenceAnalysis::getNonLocalPointerDepFromBB(
// with PHI translation when a critical edge exists and the PHI node in
// the successor translates to a pointer value different than the
// pointer the block was first analyzed with.
- std::pair<DenseMap<BasicBlock*,Value*>::iterator, bool>
- InsertRes = Visited.insert(std::make_pair(Pred, PredPtrVal));
+ std::pair<DenseMap<BasicBlock *, Value *>::iterator, bool> InsertRes =
+ Visited.insert(std::make_pair(Pred, PredPtrVal));
if (!InsertRes.second) {
// We found the pred; take it off the list of preds to visit.
@@ -1430,9 +1430,8 @@ bool MemoryDependenceAnalysis::getNonLocalPointerDepFromBB(
// assume it is unknown, but this also does not block PRE of the load.
if (!CanTranslate ||
getNonLocalPointerDepFromBB(QueryInst, PredPointer,
- Loc.getWithNewPtr(PredPtrVal),
- isLoad, Pred,
- Result, Visited)) {
+ Loc.getWithNewPtr(PredPtrVal), isLoad,
+ Pred, Result, Visited)) {
// Add the entry to the Result list.
NonLocalDepResult Entry(Pred, MemDepResult::getUnknown(), PredPtrVal);
Result.push_back(Entry);
@@ -1488,17 +1487,17 @@ bool MemoryDependenceAnalysis::getNonLocalPointerDepFromBB(
return true;
bool foundBlock = false;
- for (NonLocalDepEntry &I: llvm::reverse(*Cache)) {
+ for (NonLocalDepEntry &I : llvm::reverse(*Cache)) {
if (I.getBB() != BB)
continue;
- assert((GotWorklistLimit || I.getResult().isNonLocal() || \
+ assert((GotWorklistLimit || I.getResult().isNonLocal() ||
!DT->isReachableFromEntry(BB)) &&
"Should only be here with transparent block");
foundBlock = true;
I.setResult(MemDepResult::getUnknown());
- Result.push_back(NonLocalDepResult(I.getBB(), I.getResult(),
- Pointer.getAddr()));
+ Result.push_back(
+ NonLocalDepResult(I.getBB(), I.getResult(), Pointer.getAddr()));
break;
}
(void)foundBlock;
@@ -1513,11 +1512,11 @@ bool MemoryDependenceAnalysis::getNonLocalPointerDepFromBB(
/// RemoveCachedNonLocalPointerDependencies - If P exists in
/// CachedNonLocalPointerInfo, remove it.
-void MemoryDependenceAnalysis::
-RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair P) {
- CachedNonLocalPointerInfo::iterator It =
- NonLocalPointerDeps.find(P);
- if (It == NonLocalPointerDeps.end()) return;
+void MemoryDependenceAnalysis::RemoveCachedNonLocalPointerDependencies(
+ ValueIsLoadPair P) {
+ CachedNonLocalPointerInfo::iterator It = NonLocalPointerDeps.find(P);
+ if (It == NonLocalPointerDeps.end())
+ return;
// Remove all of the entries in the BB->val map. This involves removing
// instructions from the reverse map.
@@ -1525,7 +1524,8 @@ RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair P) {
for (unsigned i = 0, e = PInfo.size(); i != e; ++i) {
Instruction *Target = PInfo[i].getResult().getInst();
- if (!Target) continue; // Ignore non-local dep results.
+ if (!Target)
+ continue; // Ignore non-local dep results.
assert(Target->getParent() == PInfo[i].getBB());
// Eliminating the dirty entry from 'Cache', so update the reverse info.
@@ -1536,7 +1536,6 @@ RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair P) {
NonLocalPointerDeps.erase(It);
}
-
/// invalidateCachedPointerInfo - This method is used to invalidate cached
/// information about the specified pointer, because it may be too
/// conservative in memdep. This is an optional call that can be used when
@@ -1545,7 +1544,8 @@ RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair P) {
/// in more places that cached info does not necessarily keep.
void MemoryDependenceAnalysis::invalidateCachedPointerInfo(Value *Ptr) {
// If Ptr isn't really a pointer, just ignore it.
- if (!Ptr->getType()->isPointerTy()) return;
+ if (!Ptr->getType()->isPointerTy())
+ return;
// Flush store info for the pointer.
RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(Ptr, false));
// Flush load info for the pointer.
@@ -1600,7 +1600,7 @@ void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) {
// Loop over all of the things that depend on the instruction we're removing.
//
- SmallVector<std::pair<Instruction*, Instruction*>, 8> ReverseDepsToAdd;
+ SmallVector<std::pair<Instruction *, Instruction *>, 8> ReverseDepsToAdd;
// If we find RemInst as a clobber or Def in any of the maps for other values,
// we need to replace its entry with a dirty version of the instruction after
@@ -1625,10 +1625,11 @@ void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) {
LocalDeps[InstDependingOnRemInst] = NewDirtyVal;
// Make sure to remember that new things depend on NewDepInst.
- assert(NewDirtyVal.getInst() && "There is no way something else can have "
+ assert(NewDirtyVal.getInst() &&
+ "There is no way something else can have "
"a local dep on this if it is a terminator!");
- ReverseDepsToAdd.push_back(std::make_pair(NewDirtyVal.getInst(),
- InstDependingOnRemInst));
+ ReverseDepsToAdd.push_back(
+ std::make_pair(NewDirtyVal.getInst(), InstDependingOnRemInst));
}
ReverseLocalDeps.erase(ReverseDepIt);
@@ -1636,8 +1637,8 @@ void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) {
// Add new reverse deps after scanning the set, to avoid invalidating the
// 'ReverseDeps' reference.
while (!ReverseDepsToAdd.empty()) {
- ReverseLocalDeps[ReverseDepsToAdd.back().first]
- .insert(ReverseDepsToAdd.back().second);
+ ReverseLocalDeps[ReverseDepsToAdd.back().first].insert(
+ ReverseDepsToAdd.back().second);
ReverseDepsToAdd.pop_back();
}
}
@@ -1652,8 +1653,10 @@ void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) {
INLD.second = true;
for (NonLocalDepInfo::iterator DI = INLD.first.begin(),
- DE = INLD.first.end(); DI != DE; ++DI) {
- if (DI->getResult().getInst() != RemInst) continue;
+ DE = INLD.first.end();
+ DI != DE; ++DI) {
+ if (DI->getResult().getInst() != RemInst)
+ continue;
// Convert to a dirty entry for the subsequent instruction.
DI->setResult(NewDirtyVal);
@@ -1667,8 +1670,8 @@ void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) {
// Add new reverse deps after scanning the set, to avoid invalidating 'Set'
while (!ReverseDepsToAdd.empty()) {
- ReverseNonLocalDeps[ReverseDepsToAdd.back().first]
- .insert(ReverseDepsToAdd.back().second);
+ ReverseNonLocalDeps[ReverseDepsToAdd.back().first].insert(
+ ReverseDepsToAdd.back().second);
ReverseDepsToAdd.pop_back();
}
}
@@ -1676,9 +1679,10 @@ void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) {
// If the instruction is in ReverseNonLocalPtrDeps then it appears as a
// value in the NonLocalPointerDeps info.
ReverseNonLocalPtrDepTy::iterator ReversePtrDepIt =
- ReverseNonLocalPtrDeps.find(RemInst);
+ ReverseNonLocalPtrDeps.find(RemInst);
if (ReversePtrDepIt != ReverseNonLocalPtrDeps.end()) {
- SmallVector<std::pair<Instruction*, ValueIsLoadPair>,8> ReversePtrDepsToAdd;
+ SmallVector<std::pair<Instruction *, ValueIsLoadPair>, 8>
+ ReversePtrDepsToAdd;
for (ValueIsLoadPair P : ReversePtrDepIt->second) {
assert(P.getPointer() != RemInst &&
@@ -1692,7 +1696,8 @@ void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) {
// Update any entries for RemInst to use the instruction after it.
for (NonLocalDepInfo::iterator DI = NLPDI.begin(), DE = NLPDI.end();
DI != DE; ++DI) {
- if (DI->getResult().getInst() != RemInst) continue;
+ if (DI->getResult().getInst() != RemInst)
+ continue;
// Convert to a dirty entry for the subsequent instruction.
DI->setResult(NewDirtyVal);
@@ -1709,13 +1714,12 @@ void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) {
ReverseNonLocalPtrDeps.erase(ReversePtrDepIt);
while (!ReversePtrDepsToAdd.empty()) {
- ReverseNonLocalPtrDeps[ReversePtrDepsToAdd.back().first]
- .insert(ReversePtrDepsToAdd.back().second);
+ ReverseNonLocalPtrDeps[ReversePtrDepsToAdd.back().first].insert(
+ ReversePtrDepsToAdd.back().second);
ReversePtrDepsToAdd.pop_back();
}
}
-
assert(!NonLocalDeps.count(RemInst) && "RemInst got reinserted?");
DEBUG(verifyRemoved(RemInst));
}
@@ -1725,14 +1729,16 @@ void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) {
void MemoryDependenceAnalysis::verifyRemoved(Instruction *D) const {
#ifndef NDEBUG
for (LocalDepMapType::const_iterator I = LocalDeps.begin(),
- E = LocalDeps.end(); I != E; ++I) {
+ E = LocalDeps.end();
+ I != E; ++I) {
assert(I->first != D && "Inst occurs in data structures");
- assert(I->second.getInst() != D &&
- "Inst occurs in data structures");
+ assert(I->second.getInst() != D && "Inst occurs in data structures");
}
- for (CachedNonLocalPointerInfo::const_iterator I =NonLocalPointerDeps.begin(),
- E = NonLocalPointerDeps.end(); I != E; ++I) {
+ for (CachedNonLocalPointerInfo::const_iterator
+ I = NonLocalPointerDeps.begin(),
+ E = NonLocalPointerDeps.end();
+ I != E; ++I) {
assert(I->first.getPointer() != D && "Inst occurs in NLPD map key");
const NonLocalDepInfo &Val = I->second.NonLocalDeps;
for (NonLocalDepInfo::const_iterator II = Val.begin(), E = Val.end();
@@ -1741,23 +1747,27 @@ void MemoryDependenceAnalysis::verifyRemoved(Instruction *D) const {
}
for (NonLocalDepMapType::const_iterator I = NonLocalDeps.begin(),
- E = NonLocalDeps.end(); I != E; ++I) {
+ E = NonLocalDeps.end();
+ I != E; ++I) {
assert(I->first != D && "Inst occurs in data structures");
const PerInstNLInfo &INLD = I->second;
for (NonLocalDepInfo::const_iterator II = INLD.first.begin(),
- EE = INLD.first.end(); II != EE; ++II)
- assert(II->getResult().getInst() != D && "Inst occurs in data structures");
+ EE = INLD.first.end();
+ II != EE; ++II)
+ assert(II->getResult().getInst() != D &&
+ "Inst occurs in data structures");
}
for (ReverseDepMapType::const_iterator I = ReverseLocalDeps.begin(),
- E = ReverseLocalDeps.end(); I != E; ++I) {
+ E = ReverseLocalDeps.end();
+ I != E; ++I) {
assert(I->first != D && "Inst occurs in data structures");
for (Instruction *Inst : I->second)
assert(Inst != D && "Inst occurs in data structures");
}
for (ReverseDepMapType::const_iterator I = ReverseNonLocalDeps.begin(),
- E = ReverseNonLocalDeps.end();
+ E = ReverseNonLocalDeps.end();
I != E; ++I) {
assert(I->first != D && "Inst occurs in data structures");
for (Instruction *Inst : I->second)
@@ -1765,13 +1775,13 @@ void MemoryDependenceAnalysis::verifyRemoved(Instruction *D) const {
}
for (ReverseNonLocalPtrDepTy::const_iterator
- I = ReverseNonLocalPtrDeps.begin(),
- E = ReverseNonLocalPtrDeps.end(); I != E; ++I) {
+ I = ReverseNonLocalPtrDeps.begin(),
+ E = ReverseNonLocalPtrDeps.end();
+ I != E; ++I) {
assert(I->first != D && "Inst occurs in rev NLPD map");
for (ValueIsLoadPair P : I->second)
- assert(P != ValueIsLoadPair(D, false) &&
- P != ValueIsLoadPair(D, true) &&
+ assert(P != ValueIsLoadPair(D, false) && P != ValueIsLoadPair(D, true) &&
"Inst occurs in ReverseNonLocalPtrDeps map");
}
#endif
OpenPOWER on IntegriCloud