diff options
Diffstat (limited to 'clang/lib/CodeGen/CGClass.cpp')
-rw-r--r-- | clang/lib/CodeGen/CGClass.cpp | 98 |
1 files changed, 49 insertions, 49 deletions
diff --git a/clang/lib/CodeGen/CGClass.cpp b/clang/lib/CodeGen/CGClass.cpp index 071f8b32377..97bea977349 100644 --- a/clang/lib/CodeGen/CGClass.cpp +++ b/clang/lib/CodeGen/CGClass.cpp @@ -66,8 +66,8 @@ CodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl, ComputeNonVirtualBaseClassOffset(getContext(), ClassDecl, PathBegin, PathEnd); if (Offset.isZero()) - return 0; - + return nullptr; + llvm::Type *PtrDiffTy = Types.ConvertType(getContext().getPointerDiffType()); @@ -114,7 +114,7 @@ ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, llvm::Value *ptr, CharUnits nonVirtualOffset, llvm::Value *virtualOffset) { // Assert that we have something to do. - assert(!nonVirtualOffset.isZero() || virtualOffset != 0); + assert(!nonVirtualOffset.isZero() || virtualOffset != nullptr); // Compute the offset from the static and dynamic components. llvm::Value *baseOffset; @@ -143,8 +143,8 @@ CodeGenFunction::GetAddressOfBaseClass(llvm::Value *Value, assert(PathBegin != PathEnd && "Base path should not be empty!"); CastExpr::path_const_iterator Start = PathBegin; - const CXXRecordDecl *VBase = 0; - + const CXXRecordDecl *VBase = nullptr; + // Sema has done some convenient canonicalization here: if the // access path involved any virtual steps, the conversion path will // *start* with a step down to the correct virtual base subobject, @@ -169,7 +169,7 @@ CodeGenFunction::GetAddressOfBaseClass(llvm::Value *Value, const ASTRecordLayout &layout = getContext().getASTRecordLayout(Derived); CharUnits vBaseOffset = layout.getVBaseClassOffset(VBase); NonVirtualOffset += vBaseOffset; - VBase = 0; // we no longer have a virtual step + VBase = nullptr; // we no longer have a virtual step } // Get the base pointer type. @@ -180,11 +180,11 @@ CodeGenFunction::GetAddressOfBaseClass(llvm::Value *Value, // just do a bitcast; null checks are unnecessary. if (NonVirtualOffset.isZero() && !VBase) { return Builder.CreateBitCast(Value, BasePtrTy); - } + } + + llvm::BasicBlock *origBB = nullptr; + llvm::BasicBlock *endBB = nullptr; - llvm::BasicBlock *origBB = 0; - llvm::BasicBlock *endBB = 0; - // Skip over the offset (and the vtable load) if we're supposed to // null-check the pointer. if (NullCheckValue) { @@ -198,7 +198,7 @@ CodeGenFunction::GetAddressOfBaseClass(llvm::Value *Value, } // Compute the virtual offset. - llvm::Value *VirtualOffset = 0; + llvm::Value *VirtualOffset = nullptr; if (VBase) { VirtualOffset = CGM.getCXXABI().GetVirtualBaseClassOffset(*this, Value, Derived, VBase); @@ -246,11 +246,11 @@ CodeGenFunction::GetAddressOfDerivedClass(llvm::Value *Value, // No offset, we can just cast back. return Builder.CreateBitCast(Value, DerivedPtrTy); } - - llvm::BasicBlock *CastNull = 0; - llvm::BasicBlock *CastNotNull = 0; - llvm::BasicBlock *CastEnd = 0; - + + llvm::BasicBlock *CastNull = nullptr; + llvm::BasicBlock *CastNotNull = nullptr; + llvm::BasicBlock *CastEnd = nullptr; + if (NullCheckValue) { CastNull = createBasicBlock("cast.null"); CastNotNull = createBasicBlock("cast.notnull"); @@ -290,7 +290,7 @@ llvm::Value *CodeGenFunction::GetVTTParameter(GlobalDecl GD, bool Delegating) { if (!CGM.getCXXABI().NeedsVTTParameter(GD)) { // This constructor/destructor does not need a VTT parameter. - return 0; + return nullptr; } const CXXRecordDecl *RD = cast<CXXMethodDecl>(CurCodeDecl)->getParent(); @@ -454,7 +454,7 @@ static void EmitAggMemberInitializer(CodeGenFunction &CGF, switch (CGF.getEvaluationKind(T)) { case TEK_Scalar: - CGF.EmitScalarInit(Init, /*decl*/ 0, LV, false); + CGF.EmitScalarInit(Init, /*decl*/ nullptr, LV, false); break; case TEK_Complex: CGF.EmitComplexExprIntoLValue(Init, LV, /*isInit*/ true); @@ -607,7 +607,7 @@ void CodeGenFunction::EmitInitializerForField(FieldDecl *Field, EmitComplexExprIntoLValue(Init, LHS, /*isInit*/ true); break; case TEK_Aggregate: { - llvm::Value *ArrayIndexVar = 0; + llvm::Value *ArrayIndexVar = nullptr; if (ArrayIndexes.size()) { llvm::Type *SizeTy = ConvertType(getContext().getSizeType()); @@ -777,8 +777,8 @@ namespace { const VarDecl *SrcRec) : CGF(CGF), ClassDecl(ClassDecl), SrcRec(SrcRec), RecLayout(CGF.getContext().getASTRecordLayout(ClassDecl)), - FirstField(0), LastField(0), FirstFieldOffset(0), LastFieldOffset(0), - LastAddedFieldIndex(0) { } + FirstField(nullptr), LastField(nullptr), FirstFieldOffset(0), + LastFieldOffset(0), LastAddedFieldIndex(0) {} static bool isMemcpyableField(FieldDecl *F) { Qualifiers Qual = F->getType().getQualifiers(); @@ -788,7 +788,7 @@ namespace { } void addMemcpyableField(FieldDecl *F) { - if (FirstField == 0) + if (!FirstField) addInitialField(F); else addNextField(F); @@ -810,7 +810,7 @@ namespace { void emitMemcpy() { // Give the subclass a chance to bail out if it feels the memcpy isn't // worth it (e.g. Hasn't aggregated enough data). - if (FirstField == 0) { + if (!FirstField) { return; } @@ -844,7 +844,7 @@ namespace { } void reset() { - FirstField = 0; + FirstField = nullptr; } protected: @@ -917,7 +917,7 @@ namespace { FunctionArgList &Args) { if (CD->isCopyOrMoveConstructor() && CD->isDefaulted()) return Args[Args.size() - 1]; - return 0; + return nullptr; } // Returns true if a CXXCtorInitializer represents a member initialization @@ -926,7 +926,7 @@ namespace { if (!MemcpyableCtor) return false; FieldDecl *Field = MemberInit->getMember(); - assert(Field != 0 && "No field for member init."); + assert(Field && "No field for member init."); QualType FieldType = Field->getType(); CXXConstructExpr *CE = dyn_cast<CXXConstructExpr>(MemberInit->getInit()); @@ -1014,71 +1014,71 @@ namespace { // exists. Otherwise returns null. FieldDecl *getMemcpyableField(Stmt *S) { if (!AssignmentsMemcpyable) - return 0; + return nullptr; if (BinaryOperator *BO = dyn_cast<BinaryOperator>(S)) { // Recognise trivial assignments. if (BO->getOpcode() != BO_Assign) - return 0; + return nullptr; MemberExpr *ME = dyn_cast<MemberExpr>(BO->getLHS()); if (!ME) - return 0; + return nullptr; FieldDecl *Field = dyn_cast<FieldDecl>(ME->getMemberDecl()); if (!Field || !isMemcpyableField(Field)) - return 0; + return nullptr; Stmt *RHS = BO->getRHS(); if (ImplicitCastExpr *EC = dyn_cast<ImplicitCastExpr>(RHS)) RHS = EC->getSubExpr(); if (!RHS) - return 0; + return nullptr; MemberExpr *ME2 = dyn_cast<MemberExpr>(RHS); if (dyn_cast<FieldDecl>(ME2->getMemberDecl()) != Field) - return 0; + return nullptr; return Field; } else if (CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(S)) { CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(MCE->getCalleeDecl()); if (!(MD && (MD->isCopyAssignmentOperator() || MD->isMoveAssignmentOperator()) && MD->isTrivial())) - return 0; + return nullptr; MemberExpr *IOA = dyn_cast<MemberExpr>(MCE->getImplicitObjectArgument()); if (!IOA) - return 0; + return nullptr; FieldDecl *Field = dyn_cast<FieldDecl>(IOA->getMemberDecl()); if (!Field || !isMemcpyableField(Field)) - return 0; + return nullptr; MemberExpr *Arg0 = dyn_cast<MemberExpr>(MCE->getArg(0)); if (!Arg0 || Field != dyn_cast<FieldDecl>(Arg0->getMemberDecl())) - return 0; + return nullptr; return Field; } else if (CallExpr *CE = dyn_cast<CallExpr>(S)) { FunctionDecl *FD = dyn_cast<FunctionDecl>(CE->getCalleeDecl()); if (!FD || FD->getBuiltinID() != Builtin::BI__builtin_memcpy) - return 0; + return nullptr; Expr *DstPtr = CE->getArg(0); if (ImplicitCastExpr *DC = dyn_cast<ImplicitCastExpr>(DstPtr)) DstPtr = DC->getSubExpr(); UnaryOperator *DUO = dyn_cast<UnaryOperator>(DstPtr); if (!DUO || DUO->getOpcode() != UO_AddrOf) - return 0; + return nullptr; MemberExpr *ME = dyn_cast<MemberExpr>(DUO->getSubExpr()); if (!ME) - return 0; + return nullptr; FieldDecl *Field = dyn_cast<FieldDecl>(ME->getMemberDecl()); if (!Field || !isMemcpyableField(Field)) - return 0; + return nullptr; Expr *SrcPtr = CE->getArg(1); if (ImplicitCastExpr *SC = dyn_cast<ImplicitCastExpr>(SrcPtr)) SrcPtr = SC->getSubExpr(); UnaryOperator *SUO = dyn_cast<UnaryOperator>(SrcPtr); if (!SUO || SUO->getOpcode() != UO_AddrOf) - return 0; + return nullptr; MemberExpr *ME2 = dyn_cast<MemberExpr>(SUO->getSubExpr()); if (!ME2 || Field != dyn_cast<FieldDecl>(ME2->getMemberDecl())) - return 0; + return nullptr; return Field; } - return 0; + return nullptr; } bool AssignmentsMemcpyable; @@ -1137,7 +1137,7 @@ void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD, CXXConstructorDecl::init_const_iterator B = CD->init_begin(), E = CD->init_end(); - llvm::BasicBlock *BaseCtorContinueBB = 0; + llvm::BasicBlock *BaseCtorContinueBB = nullptr; if (ClassDecl->getNumVBases() && !CGM.getTarget().getCXXABI().hasConstructorVariants()) { // The ABIs that don't have constructor variants need to put a branch @@ -1374,7 +1374,7 @@ namespace { public: CallDtorDeleteConditional(llvm::Value *ShouldDeleteCondition) : ShouldDeleteCondition(ShouldDeleteCondition) { - assert(ShouldDeleteCondition != NULL); + assert(ShouldDeleteCondition != nullptr); } void Emit(CodeGenFunction &CGF, Flags flags) override { @@ -1552,7 +1552,7 @@ CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor, // because of GCC extensions that permit zero-length arrays. There // are probably legitimate places where we could assume that this // doesn't happen, but it's not clear that it's worth it. - llvm::BranchInst *zeroCheckBranch = 0; + llvm::BranchInst *zeroCheckBranch = nullptr; // Optimize for a constant count. llvm::ConstantInt *constantCount @@ -1870,7 +1870,7 @@ CodeGenFunction::InitializeVTablePointer(BaseSubobject Base, return; // Compute where to store the address point. - llvm::Value *VirtualOffset = 0; + llvm::Value *VirtualOffset = nullptr; CharUnits NonVirtualOffset = CharUnits::Zero(); if (NeedsVirtualOffset) { @@ -1968,7 +1968,7 @@ void CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) { // Initialize the vtable pointers for this class and all of its bases. VisitedVirtualBasesSetTy VBases; InitializeVTablePointers(BaseSubobject(RD, CharUnits::Zero()), - /*NearestVBase=*/0, + /*NearestVBase=*/nullptr, /*OffsetFromNearestVBase=*/CharUnits::Zero(), /*BaseIsNonVirtualPrimaryBase=*/false, RD, VBases); @@ -2177,7 +2177,7 @@ void CodeGenFunction::EmitLambdaDelegatingInvokeBody(const CXXMethodDecl *MD) { assert(MD->isFunctionTemplateSpecialization()); const TemplateArgumentList *TAL = MD->getTemplateSpecializationArgs(); FunctionTemplateDecl *CallOpTemplate = CallOp->getDescribedFunctionTemplate(); - void *InsertPos = 0; + void *InsertPos = nullptr; FunctionDecl *CorrespondingCallOpSpecialization = CallOpTemplate->findSpecialization(TAL->data(), TAL->size(), InsertPos); assert(CorrespondingCallOpSpecialization); |