diff options
| -rw-r--r-- | polly/include/polly/CodeGen/BlockGenerators.h | 52 | ||||
| -rw-r--r-- | polly/lib/CodeGen/BlockGenerators.cpp | 43 |
2 files changed, 45 insertions, 50 deletions
diff --git a/polly/include/polly/CodeGen/BlockGenerators.h b/polly/include/polly/CodeGen/BlockGenerators.h index 85f864d1349..da926df875f 100644 --- a/polly/include/polly/CodeGen/BlockGenerators.h +++ b/polly/include/polly/CodeGen/BlockGenerators.h @@ -67,7 +67,7 @@ public: /// @see The ScalarMap and PHIOpMap member. using ScalarAllocaMapTy = DenseMap<AssertingVH<Value>, AssertingVH<Value>>; - typedef llvm::DenseMap<const llvm::Value *, llvm::Value *> ValueMapT; + typedef llvm::DenseMap<llvm::Value *, llvm::Value *> ValueMapT; typedef llvm::SmallVector<ValueMapT, 8> VectorValueMapT; /// @brief Simple vector of instructions to store escape users. @@ -429,9 +429,8 @@ protected: /// /// @returns o A newly synthesized value. /// o NULL, if synthesizing the value failed. - Value *trySynthesizeNewValue(ScopStmt &Stmt, const Value *Old, - ValueMapT &BBMap, LoopToScevMapT <S, - Loop *L) const; + Value *trySynthesizeNewValue(ScopStmt &Stmt, Value *Old, ValueMapT &BBMap, + LoopToScevMapT <S, Loop *L) const; /// @brief Get the new version of a value. /// @@ -457,10 +456,10 @@ protected: /// @returns o The old value, if it is still valid. /// o The new value, if available. /// o NULL, if no value is found. - Value *getNewValue(ScopStmt &Stmt, const Value *Old, ValueMapT &BBMap, + Value *getNewValue(ScopStmt &Stmt, Value *Old, ValueMapT &BBMap, LoopToScevMapT <S, Loop *L) const; - void copyInstScalar(ScopStmt &Stmt, const Instruction *Inst, ValueMapT &BBMap, + void copyInstScalar(ScopStmt &Stmt, Instruction *Inst, ValueMapT &BBMap, LoopToScevMapT <S); /// @brief Get the innermost loop that surrounds an instruction. @@ -474,22 +473,22 @@ protected: /// which may contain new access expressions for certain /// memory accesses. Value *generateLocationAccessed(ScopStmt &Stmt, const Instruction *Inst, - const Value *Pointer, ValueMapT &BBMap, + Value *Pointer, ValueMapT &BBMap, LoopToScevMapT <S, isl_id_to_ast_expr *NewAccesses); /// @param NewAccesses A map from memory access ids to new ast expressions, /// which may contain new access expressions for certain /// memory accesses. - Value *generateScalarLoad(ScopStmt &Stmt, const LoadInst *load, - ValueMapT &BBMap, LoopToScevMapT <S, + Value *generateScalarLoad(ScopStmt &Stmt, LoadInst *load, ValueMapT &BBMap, + LoopToScevMapT <S, isl_id_to_ast_expr *NewAccesses); /// @param NewAccesses A map from memory access ids to new ast expressions, /// which may contain new access expressions for certain /// memory accesses. - void generateScalarStore(ScopStmt &Stmt, const StoreInst *store, - ValueMapT &BBMap, LoopToScevMapT <S, + void generateScalarStore(ScopStmt &Stmt, StoreInst *store, ValueMapT &BBMap, + LoopToScevMapT <S, isl_id_to_ast_expr *NewAccesses); /// @brief Copy a single PHI instruction. @@ -498,7 +497,7 @@ protected: /// subclasses to handle PHIs different. /// /// @returns The nullptr as the BlockGenerator does not copy PHIs. - virtual Value *copyPHIInstruction(ScopStmt &, const PHINode *, ValueMapT &, + virtual Value *copyPHIInstruction(ScopStmt &, PHINode *, ValueMapT &, LoopToScevMapT &) { return nullptr; } @@ -522,9 +521,8 @@ protected: /// @param NewAccesses A map from memory access ids to new ast expressions, /// which may contain new access expressions for certain /// memory accesses. - void copyInstruction(ScopStmt &Stmt, const Instruction *Inst, - ValueMapT &BBMap, LoopToScevMapT <S, - isl_id_to_ast_expr *NewAccesses); + void copyInstruction(ScopStmt &Stmt, Instruction *Inst, ValueMapT &BBMap, + LoopToScevMapT <S, isl_id_to_ast_expr *NewAccesses); /// @brief Helper to get the newest version of @p ScalarValue. /// @@ -605,7 +603,7 @@ private: int getVectorWidth(); - Value *getVectorValue(ScopStmt &Stmt, const Value *Old, ValueMapT &VectorMap, + Value *getVectorValue(ScopStmt &Stmt, Value *Old, ValueMapT &VectorMap, VectorValueMapT &ScalarMaps, Loop *L); Type *getVectorPtrTy(const Value *V, int Width); @@ -628,7 +626,7 @@ private: /// @param NewAccesses A map from memory access ids to new ast /// expressions, which may contain new access /// expressions for certain memory accesses. - Value *generateStrideOneLoad(ScopStmt &Stmt, const LoadInst *Load, + Value *generateStrideOneLoad(ScopStmt &Stmt, LoadInst *Load, VectorValueMapT &ScalarMaps, __isl_keep isl_id_to_ast_expr *NewAccesses, bool NegativeStride); @@ -646,7 +644,7 @@ private: /// @param NewAccesses A map from memory access ids to new ast expressions, /// which may contain new access expressions for certain /// memory accesses. - Value *generateStrideZeroLoad(ScopStmt &Stmt, const LoadInst *Load, + Value *generateStrideZeroLoad(ScopStmt &Stmt, LoadInst *Load, ValueMapT &BBMap, __isl_keep isl_id_to_ast_expr *NewAccesses); @@ -664,34 +662,34 @@ private: /// @param NewAccesses A map from memory access ids to new ast expressions, /// which may contain new access expressions for certain /// memory accesses. - Value *generateUnknownStrideLoad(ScopStmt &Stmt, const LoadInst *Load, + Value *generateUnknownStrideLoad(ScopStmt &Stmt, LoadInst *Load, VectorValueMapT &ScalarMaps, __isl_keep isl_id_to_ast_expr *NewAccesses); /// @param NewAccesses A map from memory access ids to new ast expressions, /// which may contain new access expressions for certain /// memory accesses. - void generateLoad(ScopStmt &Stmt, const LoadInst *Load, ValueMapT &VectorMap, + void generateLoad(ScopStmt &Stmt, LoadInst *Load, ValueMapT &VectorMap, VectorValueMapT &ScalarMaps, __isl_keep isl_id_to_ast_expr *NewAccesses); - void copyUnaryInst(ScopStmt &Stmt, const UnaryInstruction *Inst, + void copyUnaryInst(ScopStmt &Stmt, UnaryInstruction *Inst, ValueMapT &VectorMap, VectorValueMapT &ScalarMaps); - void copyBinaryInst(ScopStmt &Stmt, const BinaryOperator *Inst, + void copyBinaryInst(ScopStmt &Stmt, BinaryOperator *Inst, ValueMapT &VectorMap, VectorValueMapT &ScalarMaps); /// @param NewAccesses A map from memory access ids to new ast expressions, /// which may contain new access expressions for certain /// memory accesses. - void copyStore(ScopStmt &Stmt, const StoreInst *Store, ValueMapT &VectorMap, + void copyStore(ScopStmt &Stmt, StoreInst *Store, ValueMapT &VectorMap, VectorValueMapT &ScalarMaps, __isl_keep isl_id_to_ast_expr *NewAccesses); /// @param NewAccesses A map from memory access ids to new ast expressions, /// which may contain new access expressions for certain /// memory accesses. - void copyInstScalarized(ScopStmt &Stmt, const Instruction *Inst, + void copyInstScalarized(ScopStmt &Stmt, Instruction *Inst, ValueMapT &VectorMap, VectorValueMapT &ScalarMaps, __isl_keep isl_id_to_ast_expr *NewAccesses); @@ -703,8 +701,8 @@ private: /// @param NewAccesses A map from memory access ids to new ast expressions, /// which may contain new access expressions for certain /// memory accesses. - void copyInstruction(ScopStmt &Stmt, const Instruction *Inst, - ValueMapT &VectorMap, VectorValueMapT &ScalarMaps, + void copyInstruction(ScopStmt &Stmt, Instruction *Inst, ValueMapT &VectorMap, + VectorValueMapT &ScalarMaps, __isl_keep isl_id_to_ast_expr *NewAccesses); /// @param NewAccesses A map from memory access ids to new ast expressions, @@ -797,7 +795,7 @@ private: /// @param LTS A map from old loops to new induction variables as SCEVs. /// /// @returns The copied instruction or nullptr if no copy was made. - virtual Value *copyPHIInstruction(ScopStmt &Stmt, const PHINode *Inst, + virtual Value *copyPHIInstruction(ScopStmt &Stmt, PHINode *Inst, ValueMapT &BBMap, LoopToScevMapT <S) override; }; diff --git a/polly/lib/CodeGen/BlockGenerators.cpp b/polly/lib/CodeGen/BlockGenerators.cpp index 62eb868b943..b170fb3a64f 100644 --- a/polly/lib/CodeGen/BlockGenerators.cpp +++ b/polly/lib/CodeGen/BlockGenerators.cpp @@ -99,7 +99,7 @@ BlockGenerator::BlockGenerator(PollyIRBuilder &B, LoopInfo &LI, EntryBB(nullptr), PHIOpMap(PHIOpMap), ScalarMap(ScalarMap), EscapeMap(EscapeMap), GlobalMap(GlobalMap) {} -Value *BlockGenerator::trySynthesizeNewValue(ScopStmt &Stmt, const Value *Old, +Value *BlockGenerator::trySynthesizeNewValue(ScopStmt &Stmt, Value *Old, ValueMapT &BBMap, LoopToScevMapT <S, Loop *L) const { @@ -129,9 +129,8 @@ Value *BlockGenerator::trySynthesizeNewValue(ScopStmt &Stmt, const Value *Old, return nullptr; } -Value *BlockGenerator::getNewValue(ScopStmt &Stmt, const Value *Old, - ValueMapT &BBMap, LoopToScevMapT <S, - Loop *L) const { +Value *BlockGenerator::getNewValue(ScopStmt &Stmt, Value *Old, ValueMapT &BBMap, + LoopToScevMapT <S, Loop *L) const { // We assume constants never change. // This avoids map lookups for many calls to this function. if (isa<Constant>(Old)) @@ -167,7 +166,7 @@ Value *BlockGenerator::getNewValue(ScopStmt &Stmt, const Value *Old, return nullptr; } -void BlockGenerator::copyInstScalar(ScopStmt &Stmt, const Instruction *Inst, +void BlockGenerator::copyInstScalar(ScopStmt &Stmt, Instruction *Inst, ValueMapT &BBMap, LoopToScevMapT <S) { // We do not generate debug intrinsics as we did not investigate how to // copy them correctly. At the current state, they just crash the code @@ -200,8 +199,8 @@ void BlockGenerator::copyInstScalar(ScopStmt &Stmt, const Instruction *Inst, } Value *BlockGenerator::generateLocationAccessed( - ScopStmt &Stmt, const Instruction *Inst, const Value *Pointer, - ValueMapT &BBMap, LoopToScevMapT <S, isl_id_to_ast_expr *NewAccesses) { + ScopStmt &Stmt, const Instruction *Inst, Value *Pointer, ValueMapT &BBMap, + LoopToScevMapT <S, isl_id_to_ast_expr *NewAccesses) { const MemoryAccess &MA = Stmt.getAccessFor(Inst); isl_ast_expr *AccessExpr = isl_id_to_ast_expr_get(NewAccesses, MA.getId()); @@ -234,7 +233,7 @@ Loop *BlockGenerator::getLoopForInst(const llvm::Instruction *Inst) { return LI.getLoopFor(Inst->getParent()); } -Value *BlockGenerator::generateScalarLoad(ScopStmt &Stmt, const LoadInst *Load, +Value *BlockGenerator::generateScalarLoad(ScopStmt &Stmt, LoadInst *Load, ValueMapT &BBMap, LoopToScevMapT <S, isl_id_to_ast_expr *NewAccesses) { if (Value *PreloadLoad = GlobalMap.lookup(Load)) @@ -253,7 +252,7 @@ Value *BlockGenerator::generateScalarLoad(ScopStmt &Stmt, const LoadInst *Load, return ScalarLoad; } -void BlockGenerator::generateScalarStore(ScopStmt &Stmt, const StoreInst *Store, +void BlockGenerator::generateScalarStore(ScopStmt &Stmt, StoreInst *Store, ValueMapT &BBMap, LoopToScevMapT <S, isl_id_to_ast_expr *NewAccesses) { auto *Pointer = Store->getPointerOperand(); @@ -269,7 +268,7 @@ void BlockGenerator::generateScalarStore(ScopStmt &Stmt, const StoreInst *Store, Builder.CreateAlignedStore(ValueOperand, NewPointer, Store->getAlignment()); } -void BlockGenerator::copyInstruction(ScopStmt &Stmt, const Instruction *Inst, +void BlockGenerator::copyInstruction(ScopStmt &Stmt, Instruction *Inst, ValueMapT &BBMap, LoopToScevMapT <S, isl_id_to_ast_expr *NewAccesses) { @@ -642,7 +641,7 @@ VectorBlockGenerator::VectorBlockGenerator(BlockGenerator &BlockGen, assert(Schedule && "No statement domain provided"); } -Value *VectorBlockGenerator::getVectorValue(ScopStmt &Stmt, const Value *Old, +Value *VectorBlockGenerator::getVectorValue(ScopStmt &Stmt, Value *Old, ValueMapT &VectorMap, VectorValueMapT &ScalarMaps, Loop *L) { @@ -674,7 +673,7 @@ Type *VectorBlockGenerator::getVectorPtrTy(const Value *Val, int Width) { } Value *VectorBlockGenerator::generateStrideOneLoad( - ScopStmt &Stmt, const LoadInst *Load, VectorValueMapT &ScalarMaps, + ScopStmt &Stmt, LoadInst *Load, VectorValueMapT &ScalarMaps, __isl_keep isl_id_to_ast_expr *NewAccesses, bool NegativeStride = false) { unsigned VectorWidth = getVectorWidth(); auto *Pointer = Load->getPointerOperand(); @@ -705,7 +704,7 @@ Value *VectorBlockGenerator::generateStrideOneLoad( } Value *VectorBlockGenerator::generateStrideZeroLoad( - ScopStmt &Stmt, const LoadInst *Load, ValueMapT &BBMap, + ScopStmt &Stmt, LoadInst *Load, ValueMapT &BBMap, __isl_keep isl_id_to_ast_expr *NewAccesses) { auto *Pointer = Load->getPointerOperand(); Type *VectorPtrType = getVectorPtrTy(Pointer, 1); @@ -728,7 +727,7 @@ Value *VectorBlockGenerator::generateStrideZeroLoad( } Value *VectorBlockGenerator::generateUnknownStrideLoad( - ScopStmt &Stmt, const LoadInst *Load, VectorValueMapT &ScalarMaps, + ScopStmt &Stmt, LoadInst *Load, VectorValueMapT &ScalarMaps, __isl_keep isl_id_to_ast_expr *NewAccesses ) { @@ -752,7 +751,7 @@ Value *VectorBlockGenerator::generateUnknownStrideLoad( } void VectorBlockGenerator::generateLoad( - ScopStmt &Stmt, const LoadInst *Load, ValueMapT &VectorMap, + ScopStmt &Stmt, LoadInst *Load, ValueMapT &VectorMap, VectorValueMapT &ScalarMaps, __isl_keep isl_id_to_ast_expr *NewAccesses) { if (Value *PreloadLoad = GlobalMap.lookup(Load)) { VectorMap[Load] = Builder.CreateVectorSplat(getVectorWidth(), PreloadLoad, @@ -786,8 +785,7 @@ void VectorBlockGenerator::generateLoad( VectorMap[Load] = NewLoad; } -void VectorBlockGenerator::copyUnaryInst(ScopStmt &Stmt, - const UnaryInstruction *Inst, +void VectorBlockGenerator::copyUnaryInst(ScopStmt &Stmt, UnaryInstruction *Inst, ValueMapT &VectorMap, VectorValueMapT &ScalarMaps) { int VectorWidth = getVectorWidth(); @@ -801,8 +799,7 @@ void VectorBlockGenerator::copyUnaryInst(ScopStmt &Stmt, VectorMap[Inst] = Builder.CreateCast(Cast->getOpcode(), NewOperand, DestType); } -void VectorBlockGenerator::copyBinaryInst(ScopStmt &Stmt, - const BinaryOperator *Inst, +void VectorBlockGenerator::copyBinaryInst(ScopStmt &Stmt, BinaryOperator *Inst, ValueMapT &VectorMap, VectorValueMapT &ScalarMaps) { Loop *L = getLoopForInst(Inst); @@ -819,7 +816,7 @@ void VectorBlockGenerator::copyBinaryInst(ScopStmt &Stmt, } void VectorBlockGenerator::copyStore( - ScopStmt &Stmt, const StoreInst *Store, ValueMapT &VectorMap, + ScopStmt &Stmt, StoreInst *Store, ValueMapT &VectorMap, VectorValueMapT &ScalarMaps, __isl_keep isl_id_to_ast_expr *NewAccesses) { const MemoryAccess &Access = Stmt.getAccessFor(Store); @@ -893,7 +890,7 @@ bool VectorBlockGenerator::extractScalarValues(const Instruction *Inst, } void VectorBlockGenerator::copyInstScalarized( - ScopStmt &Stmt, const Instruction *Inst, ValueMapT &VectorMap, + ScopStmt &Stmt, Instruction *Inst, ValueMapT &VectorMap, VectorValueMapT &ScalarMaps, __isl_keep isl_id_to_ast_expr *NewAccesses) { bool HasVectorOperand; int VectorWidth = getVectorWidth(); @@ -921,7 +918,7 @@ void VectorBlockGenerator::copyInstScalarized( int VectorBlockGenerator::getVectorWidth() { return VLTS.size(); } void VectorBlockGenerator::copyInstruction( - ScopStmt &Stmt, const Instruction *Inst, ValueMapT &VectorMap, + ScopStmt &Stmt, Instruction *Inst, ValueMapT &VectorMap, VectorValueMapT &ScalarMaps, __isl_keep isl_id_to_ast_expr *NewAccesses) { // Terminator instructions control the control flow. They are explicitly // expressed in the clast and do not need to be copied. @@ -1215,7 +1212,7 @@ void RegionGenerator::addOperandToPHI(ScopStmt &Stmt, const PHINode *PHI, PHICopy->addIncoming(OpCopy, BBCopy); } -Value *RegionGenerator::copyPHIInstruction(ScopStmt &Stmt, const PHINode *PHI, +Value *RegionGenerator::copyPHIInstruction(ScopStmt &Stmt, PHINode *PHI, ValueMapT &BBMap, LoopToScevMapT <S) { unsigned NumIncoming = PHI->getNumIncomingValues(); |

