diff options
| -rw-r--r-- | llvm/lib/Target/ARM/ARMFastISel.cpp | 21 | 
1 files changed, 13 insertions, 8 deletions
diff --git a/llvm/lib/Target/ARM/ARMFastISel.cpp b/llvm/lib/Target/ARM/ARMFastISel.cpp index cbd96a72e07..b824d118bc2 100644 --- a/llvm/lib/Target/ARM/ARMFastISel.cpp +++ b/llvm/lib/Target/ARM/ARMFastISel.cpp @@ -163,6 +163,7 @@ class ARMFastISel final : public FastISel {      // Utility routines.    private: +    bool isPositionIndependent() const;      bool isTypeLegal(Type *Ty, MVT &VT);      bool isLoadTypeLegal(Type *Ty, MVT &VT);      bool ARMEmitCmp(const Value *Src1Value, const Value *Src2Value, @@ -575,6 +576,10 @@ unsigned ARMFastISel::ARMMaterializeInt(const Constant *C, MVT VT) {    return ResultReg;  } +bool ARMFastISel::isPositionIndependent() const { +  return TM.getRelocationModel() == Reloc::PIC_; +} +  unsigned ARMFastISel::ARMMaterializeGV(const GlobalValue *GV, MVT VT) {    // For now 32-bit only.    if (VT != MVT::i32 || GV->isThreadLocal()) return 0; @@ -590,16 +595,17 @@ unsigned ARMFastISel::ARMMaterializeGV(const GlobalValue *GV, MVT VT) {    bool IsThreadLocal = GVar && GVar->isThreadLocal();    if (!Subtarget->isTargetMachO() && IsThreadLocal) return 0; +  bool IsPositionIndependent = isPositionIndependent();    // Use movw+movt when possible, it avoids constant pool entries.    // Non-darwin targets only support static movt relocations in FastISel.    if (Subtarget->useMovt(*FuncInfo.MF) && -      (Subtarget->isTargetMachO() || RelocM == Reloc::Static)) { +      (Subtarget->isTargetMachO() || !IsPositionIndependent)) {      unsigned Opc;      unsigned char TF = 0;      if (Subtarget->isTargetMachO())        TF = ARMII::MO_NONLAZY; -    if (RelocM == Reloc::PIC_) +    if (IsPositionIndependent)        Opc = isThumb2 ? ARM::t2MOV_ga_pcrel : ARM::MOV_ga_pcrel;      else        Opc = isThumb2 ? ARM::t2MOVi32imm : ARM::MOVi32imm; @@ -613,12 +619,11 @@ unsigned ARMFastISel::ARMMaterializeGV(const GlobalValue *GV, MVT VT) {        Align = DL.getTypeAllocSize(GV->getType());      } -    if (Subtarget->isTargetELF() && RelocM == Reloc::PIC_) +    if (Subtarget->isTargetELF() && IsPositionIndependent)        return ARMLowerPICELF(GV, Align, VT);      // Grab index. -    unsigned PCAdj = (RelocM != Reloc::PIC_) ? 0 : -      (Subtarget->isThumb() ? 4 : 8); +    unsigned PCAdj = IsPositionIndependent ? (Subtarget->isThumb() ? 4 : 8) : 0;      unsigned Id = AFI->createPICLabelUId();      ARMConstantPoolValue *CPV = ARMConstantPoolConstant::Create(GV, Id,                                                                  ARMCP::CPValue, @@ -628,10 +633,10 @@ unsigned ARMFastISel::ARMMaterializeGV(const GlobalValue *GV, MVT VT) {      // Load value.      MachineInstrBuilder MIB;      if (isThumb2) { -      unsigned Opc = (RelocM!=Reloc::PIC_) ? ARM::t2LDRpci : ARM::t2LDRpci_pic; +      unsigned Opc = IsPositionIndependent ? ARM::t2LDRpci_pic : ARM::t2LDRpci;        MIB = BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, TII.get(Opc),                      DestReg).addConstantPoolIndex(Idx); -      if (RelocM == Reloc::PIC_) +      if (IsPositionIndependent)          MIB.addImm(Id);        AddOptionalDefs(MIB);      } else { @@ -643,7 +648,7 @@ unsigned ARMFastISel::ARMMaterializeGV(const GlobalValue *GV, MVT VT) {                  .addImm(0);        AddOptionalDefs(MIB); -      if (RelocM == Reloc::PIC_) { +      if (IsPositionIndependent) {          unsigned Opc = IsIndirect ? ARM::PICLDR : ARM::PICADD;          unsigned NewDestReg = createResultReg(TLI.getRegClassFor(VT));  | 

