clang API Documentation
00001 //=- AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis -*- C++ -*-=// 00002 // 00003 // The LLVM Compiler Infrastructure 00004 // 00005 // This file is distributed under the University of Illinois Open Source 00006 // License. See LICENSE.TXT for details. 00007 // 00008 //===----------------------------------------------------------------------===// 00009 // 00010 // This file defines analysis_warnings::[Policy,Executor]. 00011 // Together they are used by Sema to issue warnings based on inexpensive 00012 // static analysis algorithms in libAnalysis. 00013 // 00014 //===----------------------------------------------------------------------===// 00015 00016 #include "clang/Sema/AnalysisBasedWarnings.h" 00017 #include "clang/AST/DeclCXX.h" 00018 #include "clang/AST/DeclObjC.h" 00019 #include "clang/AST/EvaluatedExprVisitor.h" 00020 #include "clang/AST/ExprCXX.h" 00021 #include "clang/AST/ExprObjC.h" 00022 #include "clang/AST/ParentMap.h" 00023 #include "clang/AST/RecursiveASTVisitor.h" 00024 #include "clang/AST/StmtCXX.h" 00025 #include "clang/AST/StmtObjC.h" 00026 #include "clang/AST/StmtVisitor.h" 00027 #include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h" 00028 #include "clang/Analysis/Analyses/Consumed.h" 00029 #include "clang/Analysis/Analyses/ReachableCode.h" 00030 #include "clang/Analysis/Analyses/ThreadSafety.h" 00031 #include "clang/Analysis/Analyses/UninitializedValues.h" 00032 #include "clang/Analysis/AnalysisContext.h" 00033 #include "clang/Analysis/CFG.h" 00034 #include "clang/Analysis/CFGStmtMap.h" 00035 #include "clang/Basic/SourceLocation.h" 00036 #include "clang/Basic/SourceManager.h" 00037 #include "clang/Lex/Lexer.h" 00038 #include "clang/Lex/Preprocessor.h" 00039 #include "clang/Sema/ScopeInfo.h" 00040 #include "clang/Sema/SemaInternal.h" 00041 #include "llvm/ADT/ArrayRef.h" 00042 #include "llvm/ADT/BitVector.h" 00043 #include "llvm/ADT/FoldingSet.h" 00044 #include "llvm/ADT/ImmutableMap.h" 00045 #include "llvm/ADT/MapVector.h" 00046 #include "llvm/ADT/PostOrderIterator.h" 00047 #include "llvm/ADT/SmallString.h" 00048 #include "llvm/ADT/SmallVector.h" 00049 #include "llvm/ADT/StringRef.h" 00050 #include "llvm/Support/Casting.h" 00051 #include <algorithm> 00052 #include <deque> 00053 #include <iterator> 00054 #include <vector> 00055 00056 using namespace clang; 00057 00058 //===----------------------------------------------------------------------===// 00059 // Unreachable code analysis. 00060 //===----------------------------------------------------------------------===// 00061 00062 namespace { 00063 class UnreachableCodeHandler : public reachable_code::Callback { 00064 Sema &S; 00065 public: 00066 UnreachableCodeHandler(Sema &s) : S(s) {} 00067 00068 void HandleUnreachable(reachable_code::UnreachableKind UK, 00069 SourceLocation L, 00070 SourceRange SilenceableCondVal, 00071 SourceRange R1, 00072 SourceRange R2) override { 00073 unsigned diag = diag::warn_unreachable; 00074 switch (UK) { 00075 case reachable_code::UK_Break: 00076 diag = diag::warn_unreachable_break; 00077 break; 00078 case reachable_code::UK_Return: 00079 diag = diag::warn_unreachable_return; 00080 break; 00081 case reachable_code::UK_Loop_Increment: 00082 diag = diag::warn_unreachable_loop_increment; 00083 break; 00084 case reachable_code::UK_Other: 00085 break; 00086 } 00087 00088 S.Diag(L, diag) << R1 << R2; 00089 00090 SourceLocation Open = SilenceableCondVal.getBegin(); 00091 if (Open.isValid()) { 00092 SourceLocation Close = SilenceableCondVal.getEnd(); 00093 Close = S.getLocForEndOfToken(Close); 00094 if (Close.isValid()) { 00095 S.Diag(Open, diag::note_unreachable_silence) 00096 << FixItHint::CreateInsertion(Open, "/* DISABLES CODE */ (") 00097 << FixItHint::CreateInsertion(Close, ")"); 00098 } 00099 } 00100 } 00101 }; 00102 } 00103 00104 /// CheckUnreachable - Check for unreachable code. 00105 static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) { 00106 // As a heuristic prune all diagnostics not in the main file. Currently 00107 // the majority of warnings in headers are false positives. These 00108 // are largely caused by configuration state, e.g. preprocessor 00109 // defined code, etc. 00110 // 00111 // Note that this is also a performance optimization. Analyzing 00112 // headers many times can be expensive. 00113 if (!S.getSourceManager().isInMainFile(AC.getDecl()->getLocStart())) 00114 return; 00115 00116 UnreachableCodeHandler UC(S); 00117 reachable_code::FindUnreachableCode(AC, S.getPreprocessor(), UC); 00118 } 00119 00120 /// \brief Warn on logical operator errors in CFGBuilder 00121 class LogicalErrorHandler : public CFGCallback { 00122 Sema &S; 00123 00124 public: 00125 LogicalErrorHandler(Sema &S) : CFGCallback(), S(S) {} 00126 00127 static bool HasMacroID(const Expr *E) { 00128 if (E->getExprLoc().isMacroID()) 00129 return true; 00130 00131 // Recurse to children. 00132 for (ConstStmtRange SubStmts = E->children(); SubStmts; ++SubStmts) 00133 if (*SubStmts) 00134 if (const Expr *SubExpr = dyn_cast<Expr>(*SubStmts)) 00135 if (HasMacroID(SubExpr)) 00136 return true; 00137 00138 return false; 00139 } 00140 00141 void compareAlwaysTrue(const BinaryOperator *B, bool isAlwaysTrue) { 00142 if (HasMacroID(B)) 00143 return; 00144 00145 SourceRange DiagRange = B->getSourceRange(); 00146 S.Diag(B->getExprLoc(), diag::warn_tautological_overlap_comparison) 00147 << DiagRange << isAlwaysTrue; 00148 } 00149 00150 void compareBitwiseEquality(const BinaryOperator *B, bool isAlwaysTrue) { 00151 if (HasMacroID(B)) 00152 return; 00153 00154 SourceRange DiagRange = B->getSourceRange(); 00155 S.Diag(B->getExprLoc(), diag::warn_comparison_bitwise_always) 00156 << DiagRange << isAlwaysTrue; 00157 } 00158 }; 00159 00160 00161 //===----------------------------------------------------------------------===// 00162 // Check for infinite self-recursion in functions 00163 //===----------------------------------------------------------------------===// 00164 00165 // All blocks are in one of three states. States are ordered so that blocks 00166 // can only move to higher states. 00167 enum RecursiveState { 00168 FoundNoPath, 00169 FoundPath, 00170 FoundPathWithNoRecursiveCall 00171 }; 00172 00173 static void checkForFunctionCall(Sema &S, const FunctionDecl *FD, 00174 CFGBlock &Block, unsigned ExitID, 00175 llvm::SmallVectorImpl<RecursiveState> &States, 00176 RecursiveState State) { 00177 unsigned ID = Block.getBlockID(); 00178 00179 // A block's state can only move to a higher state. 00180 if (States[ID] >= State) 00181 return; 00182 00183 States[ID] = State; 00184 00185 // Found a path to the exit node without a recursive call. 00186 if (ID == ExitID && State == FoundPathWithNoRecursiveCall) 00187 return; 00188 00189 if (State == FoundPathWithNoRecursiveCall) { 00190 // If the current state is FoundPathWithNoRecursiveCall, the successors 00191 // will be either FoundPathWithNoRecursiveCall or FoundPath. To determine 00192 // which, process all the Stmt's in this block to find any recursive calls. 00193 for (const auto &B : Block) { 00194 if (B.getKind() != CFGElement::Statement) 00195 continue; 00196 00197 const CallExpr *CE = dyn_cast<CallExpr>(B.getAs<CFGStmt>()->getStmt()); 00198 if (CE && CE->getCalleeDecl() && 00199 CE->getCalleeDecl()->getCanonicalDecl() == FD) { 00200 00201 // Skip function calls which are qualified with a templated class. 00202 if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>( 00203 CE->getCallee()->IgnoreParenImpCasts())) { 00204 if (NestedNameSpecifier *NNS = DRE->getQualifier()) { 00205 if (NNS->getKind() == NestedNameSpecifier::TypeSpec && 00206 isa<TemplateSpecializationType>(NNS->getAsType())) { 00207 continue; 00208 } 00209 } 00210 } 00211 00212 if (const CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(CE)) { 00213 if (isa<CXXThisExpr>(MCE->getImplicitObjectArgument()) || 00214 !MCE->getMethodDecl()->isVirtual()) { 00215 State = FoundPath; 00216 break; 00217 } 00218 } else { 00219 State = FoundPath; 00220 break; 00221 } 00222 } 00223 } 00224 } 00225 00226 for (CFGBlock::succ_iterator I = Block.succ_begin(), E = Block.succ_end(); 00227 I != E; ++I) 00228 if (*I) 00229 checkForFunctionCall(S, FD, **I, ExitID, States, State); 00230 } 00231 00232 static void checkRecursiveFunction(Sema &S, const FunctionDecl *FD, 00233 const Stmt *Body, 00234 AnalysisDeclContext &AC) { 00235 FD = FD->getCanonicalDecl(); 00236 00237 // Only run on non-templated functions and non-templated members of 00238 // templated classes. 00239 if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate && 00240 FD->getTemplatedKind() != FunctionDecl::TK_MemberSpecialization) 00241 return; 00242 00243 CFG *cfg = AC.getCFG(); 00244 if (!cfg) return; 00245 00246 // If the exit block is unreachable, skip processing the function. 00247 if (cfg->getExit().pred_empty()) 00248 return; 00249 00250 // Mark all nodes as FoundNoPath, then begin processing the entry block. 00251 llvm::SmallVector<RecursiveState, 16> states(cfg->getNumBlockIDs(), 00252 FoundNoPath); 00253 checkForFunctionCall(S, FD, cfg->getEntry(), cfg->getExit().getBlockID(), 00254 states, FoundPathWithNoRecursiveCall); 00255 00256 // Check that the exit block is reachable. This prevents triggering the 00257 // warning on functions that do not terminate. 00258 if (states[cfg->getExit().getBlockID()] == FoundPath) 00259 S.Diag(Body->getLocStart(), diag::warn_infinite_recursive_function); 00260 } 00261 00262 //===----------------------------------------------------------------------===// 00263 // Check for missing return value. 00264 //===----------------------------------------------------------------------===// 00265 00266 enum ControlFlowKind { 00267 UnknownFallThrough, 00268 NeverFallThrough, 00269 MaybeFallThrough, 00270 AlwaysFallThrough, 00271 NeverFallThroughOrReturn 00272 }; 00273 00274 /// CheckFallThrough - Check that we don't fall off the end of a 00275 /// Statement that should return a value. 00276 /// 00277 /// \returns AlwaysFallThrough iff we always fall off the end of the statement, 00278 /// MaybeFallThrough iff we might or might not fall off the end, 00279 /// NeverFallThroughOrReturn iff we never fall off the end of the statement or 00280 /// return. We assume NeverFallThrough iff we never fall off the end of the 00281 /// statement but we may return. We assume that functions not marked noreturn 00282 /// will return. 00283 static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) { 00284 CFG *cfg = AC.getCFG(); 00285 if (!cfg) return UnknownFallThrough; 00286 00287 // The CFG leaves in dead things, and we don't want the dead code paths to 00288 // confuse us, so we mark all live things first. 00289 llvm::BitVector live(cfg->getNumBlockIDs()); 00290 unsigned count = reachable_code::ScanReachableFromBlock(&cfg->getEntry(), 00291 live); 00292 00293 bool AddEHEdges = AC.getAddEHEdges(); 00294 if (!AddEHEdges && count != cfg->getNumBlockIDs()) 00295 // When there are things remaining dead, and we didn't add EH edges 00296 // from CallExprs to the catch clauses, we have to go back and 00297 // mark them as live. 00298 for (const auto *B : *cfg) { 00299 if (!live[B->getBlockID()]) { 00300 if (B->pred_begin() == B->pred_end()) { 00301 if (B->getTerminator() && isa<CXXTryStmt>(B->getTerminator())) 00302 // When not adding EH edges from calls, catch clauses 00303 // can otherwise seem dead. Avoid noting them as dead. 00304 count += reachable_code::ScanReachableFromBlock(B, live); 00305 continue; 00306 } 00307 } 00308 } 00309 00310 // Now we know what is live, we check the live precessors of the exit block 00311 // and look for fall through paths, being careful to ignore normal returns, 00312 // and exceptional paths. 00313 bool HasLiveReturn = false; 00314 bool HasFakeEdge = false; 00315 bool HasPlainEdge = false; 00316 bool HasAbnormalEdge = false; 00317 00318 // Ignore default cases that aren't likely to be reachable because all 00319 // enums in a switch(X) have explicit case statements. 00320 CFGBlock::FilterOptions FO; 00321 FO.IgnoreDefaultsWithCoveredEnums = 1; 00322 00323 for (CFGBlock::filtered_pred_iterator 00324 I = cfg->getExit().filtered_pred_start_end(FO); I.hasMore(); ++I) { 00325 const CFGBlock& B = **I; 00326 if (!live[B.getBlockID()]) 00327 continue; 00328 00329 // Skip blocks which contain an element marked as no-return. They don't 00330 // represent actually viable edges into the exit block, so mark them as 00331 // abnormal. 00332 if (B.hasNoReturnElement()) { 00333 HasAbnormalEdge = true; 00334 continue; 00335 } 00336 00337 // Destructors can appear after the 'return' in the CFG. This is 00338 // normal. We need to look pass the destructors for the return 00339 // statement (if it exists). 00340 CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend(); 00341 00342 for ( ; ri != re ; ++ri) 00343 if (ri->getAs<CFGStmt>()) 00344 break; 00345 00346 // No more CFGElements in the block? 00347 if (ri == re) { 00348 if (B.getTerminator() && isa<CXXTryStmt>(B.getTerminator())) { 00349 HasAbnormalEdge = true; 00350 continue; 00351 } 00352 // A labeled empty statement, or the entry block... 00353 HasPlainEdge = true; 00354 continue; 00355 } 00356 00357 CFGStmt CS = ri->castAs<CFGStmt>(); 00358 const Stmt *S = CS.getStmt(); 00359 if (isa<ReturnStmt>(S)) { 00360 HasLiveReturn = true; 00361 continue; 00362 } 00363 if (isa<ObjCAtThrowStmt>(S)) { 00364 HasFakeEdge = true; 00365 continue; 00366 } 00367 if (isa<CXXThrowExpr>(S)) { 00368 HasFakeEdge = true; 00369 continue; 00370 } 00371 if (isa<MSAsmStmt>(S)) { 00372 // TODO: Verify this is correct. 00373 HasFakeEdge = true; 00374 HasLiveReturn = true; 00375 continue; 00376 } 00377 if (isa<CXXTryStmt>(S)) { 00378 HasAbnormalEdge = true; 00379 continue; 00380 } 00381 if (std::find(B.succ_begin(), B.succ_end(), &cfg->getExit()) 00382 == B.succ_end()) { 00383 HasAbnormalEdge = true; 00384 continue; 00385 } 00386 00387 HasPlainEdge = true; 00388 } 00389 if (!HasPlainEdge) { 00390 if (HasLiveReturn) 00391 return NeverFallThrough; 00392 return NeverFallThroughOrReturn; 00393 } 00394 if (HasAbnormalEdge || HasFakeEdge || HasLiveReturn) 00395 return MaybeFallThrough; 00396 // This says AlwaysFallThrough for calls to functions that are not marked 00397 // noreturn, that don't return. If people would like this warning to be more 00398 // accurate, such functions should be marked as noreturn. 00399 return AlwaysFallThrough; 00400 } 00401 00402 namespace { 00403 00404 struct CheckFallThroughDiagnostics { 00405 unsigned diag_MaybeFallThrough_HasNoReturn; 00406 unsigned diag_MaybeFallThrough_ReturnsNonVoid; 00407 unsigned diag_AlwaysFallThrough_HasNoReturn; 00408 unsigned diag_AlwaysFallThrough_ReturnsNonVoid; 00409 unsigned diag_NeverFallThroughOrReturn; 00410 enum { Function, Block, Lambda } funMode; 00411 SourceLocation FuncLoc; 00412 00413 static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) { 00414 CheckFallThroughDiagnostics D; 00415 D.FuncLoc = Func->getLocation(); 00416 D.diag_MaybeFallThrough_HasNoReturn = 00417 diag::warn_falloff_noreturn_function; 00418 D.diag_MaybeFallThrough_ReturnsNonVoid = 00419 diag::warn_maybe_falloff_nonvoid_function; 00420 D.diag_AlwaysFallThrough_HasNoReturn = 00421 diag::warn_falloff_noreturn_function; 00422 D.diag_AlwaysFallThrough_ReturnsNonVoid = 00423 diag::warn_falloff_nonvoid_function; 00424 00425 // Don't suggest that virtual functions be marked "noreturn", since they 00426 // might be overridden by non-noreturn functions. 00427 bool isVirtualMethod = false; 00428 if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func)) 00429 isVirtualMethod = Method->isVirtual(); 00430 00431 // Don't suggest that template instantiations be marked "noreturn" 00432 bool isTemplateInstantiation = false; 00433 if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Func)) 00434 isTemplateInstantiation = Function->isTemplateInstantiation(); 00435 00436 if (!isVirtualMethod && !isTemplateInstantiation) 00437 D.diag_NeverFallThroughOrReturn = 00438 diag::warn_suggest_noreturn_function; 00439 else 00440 D.diag_NeverFallThroughOrReturn = 0; 00441 00442 D.funMode = Function; 00443 return D; 00444 } 00445 00446 static CheckFallThroughDiagnostics MakeForBlock() { 00447 CheckFallThroughDiagnostics D; 00448 D.diag_MaybeFallThrough_HasNoReturn = 00449 diag::err_noreturn_block_has_return_expr; 00450 D.diag_MaybeFallThrough_ReturnsNonVoid = 00451 diag::err_maybe_falloff_nonvoid_block; 00452 D.diag_AlwaysFallThrough_HasNoReturn = 00453 diag::err_noreturn_block_has_return_expr; 00454 D.diag_AlwaysFallThrough_ReturnsNonVoid = 00455 diag::err_falloff_nonvoid_block; 00456 D.diag_NeverFallThroughOrReturn = 0; 00457 D.funMode = Block; 00458 return D; 00459 } 00460 00461 static CheckFallThroughDiagnostics MakeForLambda() { 00462 CheckFallThroughDiagnostics D; 00463 D.diag_MaybeFallThrough_HasNoReturn = 00464 diag::err_noreturn_lambda_has_return_expr; 00465 D.diag_MaybeFallThrough_ReturnsNonVoid = 00466 diag::warn_maybe_falloff_nonvoid_lambda; 00467 D.diag_AlwaysFallThrough_HasNoReturn = 00468 diag::err_noreturn_lambda_has_return_expr; 00469 D.diag_AlwaysFallThrough_ReturnsNonVoid = 00470 diag::warn_falloff_nonvoid_lambda; 00471 D.diag_NeverFallThroughOrReturn = 0; 00472 D.funMode = Lambda; 00473 return D; 00474 } 00475 00476 bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid, 00477 bool HasNoReturn) const { 00478 if (funMode == Function) { 00479 return (ReturnsVoid || 00480 D.isIgnored(diag::warn_maybe_falloff_nonvoid_function, 00481 FuncLoc)) && 00482 (!HasNoReturn || 00483 D.isIgnored(diag::warn_noreturn_function_has_return_expr, 00484 FuncLoc)) && 00485 (!ReturnsVoid || 00486 D.isIgnored(diag::warn_suggest_noreturn_block, FuncLoc)); 00487 } 00488 00489 // For blocks / lambdas. 00490 return ReturnsVoid && !HasNoReturn; 00491 } 00492 }; 00493 00494 } 00495 00496 /// CheckFallThroughForFunctionDef - Check that we don't fall off the end of a 00497 /// function that should return a value. Check that we don't fall off the end 00498 /// of a noreturn function. We assume that functions and blocks not marked 00499 /// noreturn will return. 00500 static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body, 00501 const BlockExpr *blkExpr, 00502 const CheckFallThroughDiagnostics& CD, 00503 AnalysisDeclContext &AC) { 00504 00505 bool ReturnsVoid = false; 00506 bool HasNoReturn = false; 00507 00508 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 00509 ReturnsVoid = FD->getReturnType()->isVoidType(); 00510 HasNoReturn = FD->isNoReturn(); 00511 } 00512 else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) { 00513 ReturnsVoid = MD->getReturnType()->isVoidType(); 00514 HasNoReturn = MD->hasAttr<NoReturnAttr>(); 00515 } 00516 else if (isa<BlockDecl>(D)) { 00517 QualType BlockTy = blkExpr->getType(); 00518 if (const FunctionType *FT = 00519 BlockTy->getPointeeType()->getAs<FunctionType>()) { 00520 if (FT->getReturnType()->isVoidType()) 00521 ReturnsVoid = true; 00522 if (FT->getNoReturnAttr()) 00523 HasNoReturn = true; 00524 } 00525 } 00526 00527 DiagnosticsEngine &Diags = S.getDiagnostics(); 00528 00529 // Short circuit for compilation speed. 00530 if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn)) 00531 return; 00532 00533 SourceLocation LBrace = Body->getLocStart(), RBrace = Body->getLocEnd(); 00534 // Either in a function body compound statement, or a function-try-block. 00535 switch (CheckFallThrough(AC)) { 00536 case UnknownFallThrough: 00537 break; 00538 00539 case MaybeFallThrough: 00540 if (HasNoReturn) 00541 S.Diag(RBrace, CD.diag_MaybeFallThrough_HasNoReturn); 00542 else if (!ReturnsVoid) 00543 S.Diag(RBrace, CD.diag_MaybeFallThrough_ReturnsNonVoid); 00544 break; 00545 case AlwaysFallThrough: 00546 if (HasNoReturn) 00547 S.Diag(RBrace, CD.diag_AlwaysFallThrough_HasNoReturn); 00548 else if (!ReturnsVoid) 00549 S.Diag(RBrace, CD.diag_AlwaysFallThrough_ReturnsNonVoid); 00550 break; 00551 case NeverFallThroughOrReturn: 00552 if (ReturnsVoid && !HasNoReturn && CD.diag_NeverFallThroughOrReturn) { 00553 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 00554 S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 0 << FD; 00555 } else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) { 00556 S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 1 << MD; 00557 } else { 00558 S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn); 00559 } 00560 } 00561 break; 00562 case NeverFallThrough: 00563 break; 00564 } 00565 } 00566 00567 //===----------------------------------------------------------------------===// 00568 // -Wuninitialized 00569 //===----------------------------------------------------------------------===// 00570 00571 namespace { 00572 /// ContainsReference - A visitor class to search for references to 00573 /// a particular declaration (the needle) within any evaluated component of an 00574 /// expression (recursively). 00575 class ContainsReference : public EvaluatedExprVisitor<ContainsReference> { 00576 bool FoundReference; 00577 const DeclRefExpr *Needle; 00578 00579 public: 00580 ContainsReference(ASTContext &Context, const DeclRefExpr *Needle) 00581 : EvaluatedExprVisitor<ContainsReference>(Context), 00582 FoundReference(false), Needle(Needle) {} 00583 00584 void VisitExpr(Expr *E) { 00585 // Stop evaluating if we already have a reference. 00586 if (FoundReference) 00587 return; 00588 00589 EvaluatedExprVisitor<ContainsReference>::VisitExpr(E); 00590 } 00591 00592 void VisitDeclRefExpr(DeclRefExpr *E) { 00593 if (E == Needle) 00594 FoundReference = true; 00595 else 00596 EvaluatedExprVisitor<ContainsReference>::VisitDeclRefExpr(E); 00597 } 00598 00599 bool doesContainReference() const { return FoundReference; } 00600 }; 00601 } 00602 00603 static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) { 00604 QualType VariableTy = VD->getType().getCanonicalType(); 00605 if (VariableTy->isBlockPointerType() && 00606 !VD->hasAttr<BlocksAttr>()) { 00607 S.Diag(VD->getLocation(), diag::note_block_var_fixit_add_initialization) 00608 << VD->getDeclName() 00609 << FixItHint::CreateInsertion(VD->getLocation(), "__block "); 00610 return true; 00611 } 00612 00613 // Don't issue a fixit if there is already an initializer. 00614 if (VD->getInit()) 00615 return false; 00616 00617 // Don't suggest a fixit inside macros. 00618 if (VD->getLocEnd().isMacroID()) 00619 return false; 00620 00621 SourceLocation Loc = S.getLocForEndOfToken(VD->getLocEnd()); 00622 00623 // Suggest possible initialization (if any). 00624 std::string Init = S.getFixItZeroInitializerForType(VariableTy, Loc); 00625 if (Init.empty()) 00626 return false; 00627 00628 S.Diag(Loc, diag::note_var_fixit_add_initialization) << VD->getDeclName() 00629 << FixItHint::CreateInsertion(Loc, Init); 00630 return true; 00631 } 00632 00633 /// Create a fixit to remove an if-like statement, on the assumption that its 00634 /// condition is CondVal. 00635 static void CreateIfFixit(Sema &S, const Stmt *If, const Stmt *Then, 00636 const Stmt *Else, bool CondVal, 00637 FixItHint &Fixit1, FixItHint &Fixit2) { 00638 if (CondVal) { 00639 // If condition is always true, remove all but the 'then'. 00640 Fixit1 = FixItHint::CreateRemoval( 00641 CharSourceRange::getCharRange(If->getLocStart(), 00642 Then->getLocStart())); 00643 if (Else) { 00644 SourceLocation ElseKwLoc = Lexer::getLocForEndOfToken( 00645 Then->getLocEnd(), 0, S.getSourceManager(), S.getLangOpts()); 00646 Fixit2 = FixItHint::CreateRemoval( 00647 SourceRange(ElseKwLoc, Else->getLocEnd())); 00648 } 00649 } else { 00650 // If condition is always false, remove all but the 'else'. 00651 if (Else) 00652 Fixit1 = FixItHint::CreateRemoval( 00653 CharSourceRange::getCharRange(If->getLocStart(), 00654 Else->getLocStart())); 00655 else 00656 Fixit1 = FixItHint::CreateRemoval(If->getSourceRange()); 00657 } 00658 } 00659 00660 /// DiagUninitUse -- Helper function to produce a diagnostic for an 00661 /// uninitialized use of a variable. 00662 static void DiagUninitUse(Sema &S, const VarDecl *VD, const UninitUse &Use, 00663 bool IsCapturedByBlock) { 00664 bool Diagnosed = false; 00665 00666 switch (Use.getKind()) { 00667 case UninitUse::Always: 00668 S.Diag(Use.getUser()->getLocStart(), diag::warn_uninit_var) 00669 << VD->getDeclName() << IsCapturedByBlock 00670 << Use.getUser()->getSourceRange(); 00671 return; 00672 00673 case UninitUse::AfterDecl: 00674 case UninitUse::AfterCall: 00675 S.Diag(VD->getLocation(), diag::warn_sometimes_uninit_var) 00676 << VD->getDeclName() << IsCapturedByBlock 00677 << (Use.getKind() == UninitUse::AfterDecl ? 4 : 5) 00678 << const_cast<DeclContext*>(VD->getLexicalDeclContext()) 00679 << VD->getSourceRange(); 00680 S.Diag(Use.getUser()->getLocStart(), diag::note_uninit_var_use) 00681 << IsCapturedByBlock << Use.getUser()->getSourceRange(); 00682 return; 00683 00684 case UninitUse::Maybe: 00685 case UninitUse::Sometimes: 00686 // Carry on to report sometimes-uninitialized branches, if possible, 00687 // or a 'may be used uninitialized' diagnostic otherwise. 00688 break; 00689 } 00690 00691 // Diagnose each branch which leads to a sometimes-uninitialized use. 00692 for (UninitUse::branch_iterator I = Use.branch_begin(), E = Use.branch_end(); 00693 I != E; ++I) { 00694 assert(Use.getKind() == UninitUse::Sometimes); 00695 00696 const Expr *User = Use.getUser(); 00697 const Stmt *Term = I->Terminator; 00698 00699 // Information used when building the diagnostic. 00700 unsigned DiagKind; 00701 StringRef Str; 00702 SourceRange Range; 00703 00704 // FixIts to suppress the diagnostic by removing the dead condition. 00705 // For all binary terminators, branch 0 is taken if the condition is true, 00706 // and branch 1 is taken if the condition is false. 00707 int RemoveDiagKind = -1; 00708 const char *FixitStr = 00709 S.getLangOpts().CPlusPlus ? (I->Output ? "true" : "false") 00710 : (I->Output ? "1" : "0"); 00711 FixItHint Fixit1, Fixit2; 00712 00713 switch (Term ? Term->getStmtClass() : Stmt::DeclStmtClass) { 00714 default: 00715 // Don't know how to report this. Just fall back to 'may be used 00716 // uninitialized'. FIXME: Can this happen? 00717 continue; 00718 00719 // "condition is true / condition is false". 00720 case Stmt::IfStmtClass: { 00721 const IfStmt *IS = cast<IfStmt>(Term); 00722 DiagKind = 0; 00723 Str = "if"; 00724 Range = IS->getCond()->getSourceRange(); 00725 RemoveDiagKind = 0; 00726 CreateIfFixit(S, IS, IS->getThen(), IS->getElse(), 00727 I->Output, Fixit1, Fixit2); 00728 break; 00729 } 00730 case Stmt::ConditionalOperatorClass: { 00731 const ConditionalOperator *CO = cast<ConditionalOperator>(Term); 00732 DiagKind = 0; 00733 Str = "?:"; 00734 Range = CO->getCond()->getSourceRange(); 00735 RemoveDiagKind = 0; 00736 CreateIfFixit(S, CO, CO->getTrueExpr(), CO->getFalseExpr(), 00737 I->Output, Fixit1, Fixit2); 00738 break; 00739 } 00740 case Stmt::BinaryOperatorClass: { 00741 const BinaryOperator *BO = cast<BinaryOperator>(Term); 00742 if (!BO->isLogicalOp()) 00743 continue; 00744 DiagKind = 0; 00745 Str = BO->getOpcodeStr(); 00746 Range = BO->getLHS()->getSourceRange(); 00747 RemoveDiagKind = 0; 00748 if ((BO->getOpcode() == BO_LAnd && I->Output) || 00749 (BO->getOpcode() == BO_LOr && !I->Output)) 00750 // true && y -> y, false || y -> y. 00751 Fixit1 = FixItHint::CreateRemoval(SourceRange(BO->getLocStart(), 00752 BO->getOperatorLoc())); 00753 else 00754 // false && y -> false, true || y -> true. 00755 Fixit1 = FixItHint::CreateReplacement(BO->getSourceRange(), FixitStr); 00756 break; 00757 } 00758 00759 // "loop is entered / loop is exited". 00760 case Stmt::WhileStmtClass: 00761 DiagKind = 1; 00762 Str = "while"; 00763 Range = cast<WhileStmt>(Term)->getCond()->getSourceRange(); 00764 RemoveDiagKind = 1; 00765 Fixit1 = FixItHint::CreateReplacement(Range, FixitStr); 00766 break; 00767 case Stmt::ForStmtClass: 00768 DiagKind = 1; 00769 Str = "for"; 00770 Range = cast<ForStmt>(Term)->getCond()->getSourceRange(); 00771 RemoveDiagKind = 1; 00772 if (I->Output) 00773 Fixit1 = FixItHint::CreateRemoval(Range); 00774 else 00775 Fixit1 = FixItHint::CreateReplacement(Range, FixitStr); 00776 break; 00777 case Stmt::CXXForRangeStmtClass: 00778 if (I->Output == 1) { 00779 // The use occurs if a range-based for loop's body never executes. 00780 // That may be impossible, and there's no syntactic fix for this, 00781 // so treat it as a 'may be uninitialized' case. 00782 continue; 00783 } 00784 DiagKind = 1; 00785 Str = "for"; 00786 Range = cast<CXXForRangeStmt>(Term)->getRangeInit()->getSourceRange(); 00787 break; 00788 00789 // "condition is true / loop is exited". 00790 case Stmt::DoStmtClass: 00791 DiagKind = 2; 00792 Str = "do"; 00793 Range = cast<DoStmt>(Term)->getCond()->getSourceRange(); 00794 RemoveDiagKind = 1; 00795 Fixit1 = FixItHint::CreateReplacement(Range, FixitStr); 00796 break; 00797 00798 // "switch case is taken". 00799 case Stmt::CaseStmtClass: 00800 DiagKind = 3; 00801 Str = "case"; 00802 Range = cast<CaseStmt>(Term)->getLHS()->getSourceRange(); 00803 break; 00804 case Stmt::DefaultStmtClass: 00805 DiagKind = 3; 00806 Str = "default"; 00807 Range = cast<DefaultStmt>(Term)->getDefaultLoc(); 00808 break; 00809 } 00810 00811 S.Diag(Range.getBegin(), diag::warn_sometimes_uninit_var) 00812 << VD->getDeclName() << IsCapturedByBlock << DiagKind 00813 << Str << I->Output << Range; 00814 S.Diag(User->getLocStart(), diag::note_uninit_var_use) 00815 << IsCapturedByBlock << User->getSourceRange(); 00816 if (RemoveDiagKind != -1) 00817 S.Diag(Fixit1.RemoveRange.getBegin(), diag::note_uninit_fixit_remove_cond) 00818 << RemoveDiagKind << Str << I->Output << Fixit1 << Fixit2; 00819 00820 Diagnosed = true; 00821 } 00822 00823 if (!Diagnosed) 00824 S.Diag(Use.getUser()->getLocStart(), diag::warn_maybe_uninit_var) 00825 << VD->getDeclName() << IsCapturedByBlock 00826 << Use.getUser()->getSourceRange(); 00827 } 00828 00829 /// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an 00830 /// uninitialized variable. This manages the different forms of diagnostic 00831 /// emitted for particular types of uses. Returns true if the use was diagnosed 00832 /// as a warning. If a particular use is one we omit warnings for, returns 00833 /// false. 00834 static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD, 00835 const UninitUse &Use, 00836 bool alwaysReportSelfInit = false) { 00837 00838 if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Use.getUser())) { 00839 // Inspect the initializer of the variable declaration which is 00840 // being referenced prior to its initialization. We emit 00841 // specialized diagnostics for self-initialization, and we 00842 // specifically avoid warning about self references which take the 00843 // form of: 00844 // 00845 // int x = x; 00846 // 00847 // This is used to indicate to GCC that 'x' is intentionally left 00848 // uninitialized. Proven code paths which access 'x' in 00849 // an uninitialized state after this will still warn. 00850 if (const Expr *Initializer = VD->getInit()) { 00851 if (!alwaysReportSelfInit && DRE == Initializer->IgnoreParenImpCasts()) 00852 return false; 00853 00854 ContainsReference CR(S.Context, DRE); 00855 CR.Visit(const_cast<Expr*>(Initializer)); 00856 if (CR.doesContainReference()) { 00857 S.Diag(DRE->getLocStart(), 00858 diag::warn_uninit_self_reference_in_init) 00859 << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange(); 00860 return true; 00861 } 00862 } 00863 00864 DiagUninitUse(S, VD, Use, false); 00865 } else { 00866 const BlockExpr *BE = cast<BlockExpr>(Use.getUser()); 00867 if (VD->getType()->isBlockPointerType() && !VD->hasAttr<BlocksAttr>()) 00868 S.Diag(BE->getLocStart(), 00869 diag::warn_uninit_byref_blockvar_captured_by_block) 00870 << VD->getDeclName(); 00871 else 00872 DiagUninitUse(S, VD, Use, true); 00873 } 00874 00875 // Report where the variable was declared when the use wasn't within 00876 // the initializer of that declaration & we didn't already suggest 00877 // an initialization fixit. 00878 if (!SuggestInitializationFixit(S, VD)) 00879 S.Diag(VD->getLocStart(), diag::note_uninit_var_def) 00880 << VD->getDeclName(); 00881 00882 return true; 00883 } 00884 00885 namespace { 00886 class FallthroughMapper : public RecursiveASTVisitor<FallthroughMapper> { 00887 public: 00888 FallthroughMapper(Sema &S) 00889 : FoundSwitchStatements(false), 00890 S(S) { 00891 } 00892 00893 bool foundSwitchStatements() const { return FoundSwitchStatements; } 00894 00895 void markFallthroughVisited(const AttributedStmt *Stmt) { 00896 bool Found = FallthroughStmts.erase(Stmt); 00897 assert(Found); 00898 (void)Found; 00899 } 00900 00901 typedef llvm::SmallPtrSet<const AttributedStmt*, 8> AttrStmts; 00902 00903 const AttrStmts &getFallthroughStmts() const { 00904 return FallthroughStmts; 00905 } 00906 00907 void fillReachableBlocks(CFG *Cfg) { 00908 assert(ReachableBlocks.empty() && "ReachableBlocks already filled"); 00909 std::deque<const CFGBlock *> BlockQueue; 00910 00911 ReachableBlocks.insert(&Cfg->getEntry()); 00912 BlockQueue.push_back(&Cfg->getEntry()); 00913 // Mark all case blocks reachable to avoid problems with switching on 00914 // constants, covered enums, etc. 00915 // These blocks can contain fall-through annotations, and we don't want to 00916 // issue a warn_fallthrough_attr_unreachable for them. 00917 for (const auto *B : *Cfg) { 00918 const Stmt *L = B->getLabel(); 00919 if (L && isa<SwitchCase>(L) && ReachableBlocks.insert(B)) 00920 BlockQueue.push_back(B); 00921 } 00922 00923 while (!BlockQueue.empty()) { 00924 const CFGBlock *P = BlockQueue.front(); 00925 BlockQueue.pop_front(); 00926 for (CFGBlock::const_succ_iterator I = P->succ_begin(), 00927 E = P->succ_end(); 00928 I != E; ++I) { 00929 if (*I && ReachableBlocks.insert(*I)) 00930 BlockQueue.push_back(*I); 00931 } 00932 } 00933 } 00934 00935 bool checkFallThroughIntoBlock(const CFGBlock &B, int &AnnotatedCnt) { 00936 assert(!ReachableBlocks.empty() && "ReachableBlocks empty"); 00937 00938 int UnannotatedCnt = 0; 00939 AnnotatedCnt = 0; 00940 00941 std::deque<const CFGBlock*> BlockQueue(B.pred_begin(), B.pred_end()); 00942 while (!BlockQueue.empty()) { 00943 const CFGBlock *P = BlockQueue.front(); 00944 BlockQueue.pop_front(); 00945 if (!P) continue; 00946 00947 const Stmt *Term = P->getTerminator(); 00948 if (Term && isa<SwitchStmt>(Term)) 00949 continue; // Switch statement, good. 00950 00951 const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(P->getLabel()); 00952 if (SW && SW->getSubStmt() == B.getLabel() && P->begin() == P->end()) 00953 continue; // Previous case label has no statements, good. 00954 00955 const LabelStmt *L = dyn_cast_or_null<LabelStmt>(P->getLabel()); 00956 if (L && L->getSubStmt() == B.getLabel() && P->begin() == P->end()) 00957 continue; // Case label is preceded with a normal label, good. 00958 00959 if (!ReachableBlocks.count(P)) { 00960 for (CFGBlock::const_reverse_iterator ElemIt = P->rbegin(), 00961 ElemEnd = P->rend(); 00962 ElemIt != ElemEnd; ++ElemIt) { 00963 if (Optional<CFGStmt> CS = ElemIt->getAs<CFGStmt>()) { 00964 if (const AttributedStmt *AS = asFallThroughAttr(CS->getStmt())) { 00965 S.Diag(AS->getLocStart(), 00966 diag::warn_fallthrough_attr_unreachable); 00967 markFallthroughVisited(AS); 00968 ++AnnotatedCnt; 00969 break; 00970 } 00971 // Don't care about other unreachable statements. 00972 } 00973 } 00974 // If there are no unreachable statements, this may be a special 00975 // case in CFG: 00976 // case X: { 00977 // A a; // A has a destructor. 00978 // break; 00979 // } 00980 // // <<<< This place is represented by a 'hanging' CFG block. 00981 // case Y: 00982 continue; 00983 } 00984 00985 const Stmt *LastStmt = getLastStmt(*P); 00986 if (const AttributedStmt *AS = asFallThroughAttr(LastStmt)) { 00987 markFallthroughVisited(AS); 00988 ++AnnotatedCnt; 00989 continue; // Fallthrough annotation, good. 00990 } 00991 00992 if (!LastStmt) { // This block contains no executable statements. 00993 // Traverse its predecessors. 00994 std::copy(P->pred_begin(), P->pred_end(), 00995 std::back_inserter(BlockQueue)); 00996 continue; 00997 } 00998 00999 ++UnannotatedCnt; 01000 } 01001 return !!UnannotatedCnt; 01002 } 01003 01004 // RecursiveASTVisitor setup. 01005 bool shouldWalkTypesOfTypeLocs() const { return false; } 01006 01007 bool VisitAttributedStmt(AttributedStmt *S) { 01008 if (asFallThroughAttr(S)) 01009 FallthroughStmts.insert(S); 01010 return true; 01011 } 01012 01013 bool VisitSwitchStmt(SwitchStmt *S) { 01014 FoundSwitchStatements = true; 01015 return true; 01016 } 01017 01018 // We don't want to traverse local type declarations. We analyze their 01019 // methods separately. 01020 bool TraverseDecl(Decl *D) { return true; } 01021 01022 // We analyze lambda bodies separately. Skip them here. 01023 bool TraverseLambdaBody(LambdaExpr *LE) { return true; } 01024 01025 private: 01026 01027 static const AttributedStmt *asFallThroughAttr(const Stmt *S) { 01028 if (const AttributedStmt *AS = dyn_cast_or_null<AttributedStmt>(S)) { 01029 if (hasSpecificAttr<FallThroughAttr>(AS->getAttrs())) 01030 return AS; 01031 } 01032 return nullptr; 01033 } 01034 01035 static const Stmt *getLastStmt(const CFGBlock &B) { 01036 if (const Stmt *Term = B.getTerminator()) 01037 return Term; 01038 for (CFGBlock::const_reverse_iterator ElemIt = B.rbegin(), 01039 ElemEnd = B.rend(); 01040 ElemIt != ElemEnd; ++ElemIt) { 01041 if (Optional<CFGStmt> CS = ElemIt->getAs<CFGStmt>()) 01042 return CS->getStmt(); 01043 } 01044 // Workaround to detect a statement thrown out by CFGBuilder: 01045 // case X: {} case Y: 01046 // case X: ; case Y: 01047 if (const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(B.getLabel())) 01048 if (!isa<SwitchCase>(SW->getSubStmt())) 01049 return SW->getSubStmt(); 01050 01051 return nullptr; 01052 } 01053 01054 bool FoundSwitchStatements; 01055 AttrStmts FallthroughStmts; 01056 Sema &S; 01057 llvm::SmallPtrSet<const CFGBlock *, 16> ReachableBlocks; 01058 }; 01059 } 01060 01061 static void DiagnoseSwitchLabelsFallthrough(Sema &S, AnalysisDeclContext &AC, 01062 bool PerFunction) { 01063 // Only perform this analysis when using C++11. There is no good workflow 01064 // for this warning when not using C++11. There is no good way to silence 01065 // the warning (no attribute is available) unless we are using C++11's support 01066 // for generalized attributes. Once could use pragmas to silence the warning, 01067 // but as a general solution that is gross and not in the spirit of this 01068 // warning. 01069 // 01070 // NOTE: This an intermediate solution. There are on-going discussions on 01071 // how to properly support this warning outside of C++11 with an annotation. 01072 if (!AC.getASTContext().getLangOpts().CPlusPlus11) 01073 return; 01074 01075 FallthroughMapper FM(S); 01076 FM.TraverseStmt(AC.getBody()); 01077 01078 if (!FM.foundSwitchStatements()) 01079 return; 01080 01081 if (PerFunction && FM.getFallthroughStmts().empty()) 01082 return; 01083 01084 CFG *Cfg = AC.getCFG(); 01085 01086 if (!Cfg) 01087 return; 01088 01089 FM.fillReachableBlocks(Cfg); 01090 01091 for (CFG::reverse_iterator I = Cfg->rbegin(), E = Cfg->rend(); I != E; ++I) { 01092 const CFGBlock *B = *I; 01093 const Stmt *Label = B->getLabel(); 01094 01095 if (!Label || !isa<SwitchCase>(Label)) 01096 continue; 01097 01098 int AnnotatedCnt; 01099 01100 if (!FM.checkFallThroughIntoBlock(*B, AnnotatedCnt)) 01101 continue; 01102 01103 S.Diag(Label->getLocStart(), 01104 PerFunction ? diag::warn_unannotated_fallthrough_per_function 01105 : diag::warn_unannotated_fallthrough); 01106 01107 if (!AnnotatedCnt) { 01108 SourceLocation L = Label->getLocStart(); 01109 if (L.isMacroID()) 01110 continue; 01111 if (S.getLangOpts().CPlusPlus11) { 01112 const Stmt *Term = B->getTerminator(); 01113 // Skip empty cases. 01114 while (B->empty() && !Term && B->succ_size() == 1) { 01115 B = *B->succ_begin(); 01116 Term = B->getTerminator(); 01117 } 01118 if (!(B->empty() && Term && isa<BreakStmt>(Term))) { 01119 Preprocessor &PP = S.getPreprocessor(); 01120 TokenValue Tokens[] = { 01121 tok::l_square, tok::l_square, PP.getIdentifierInfo("clang"), 01122 tok::coloncolon, PP.getIdentifierInfo("fallthrough"), 01123 tok::r_square, tok::r_square 01124 }; 01125 StringRef AnnotationSpelling = "[[clang::fallthrough]]"; 01126 StringRef MacroName = PP.getLastMacroWithSpelling(L, Tokens); 01127 if (!MacroName.empty()) 01128 AnnotationSpelling = MacroName; 01129 SmallString<64> TextToInsert(AnnotationSpelling); 01130 TextToInsert += "; "; 01131 S.Diag(L, diag::note_insert_fallthrough_fixit) << 01132 AnnotationSpelling << 01133 FixItHint::CreateInsertion(L, TextToInsert); 01134 } 01135 } 01136 S.Diag(L, diag::note_insert_break_fixit) << 01137 FixItHint::CreateInsertion(L, "break; "); 01138 } 01139 } 01140 01141 for (const auto *F : FM.getFallthroughStmts()) 01142 S.Diag(F->getLocStart(), diag::warn_fallthrough_attr_invalid_placement); 01143 } 01144 01145 static bool isInLoop(const ASTContext &Ctx, const ParentMap &PM, 01146 const Stmt *S) { 01147 assert(S); 01148 01149 do { 01150 switch (S->getStmtClass()) { 01151 case Stmt::ForStmtClass: 01152 case Stmt::WhileStmtClass: 01153 case Stmt::CXXForRangeStmtClass: 01154 case Stmt::ObjCForCollectionStmtClass: 01155 return true; 01156 case Stmt::DoStmtClass: { 01157 const Expr *Cond = cast<DoStmt>(S)->getCond(); 01158 llvm::APSInt Val; 01159 if (!Cond->EvaluateAsInt(Val, Ctx)) 01160 return true; 01161 return Val.getBoolValue(); 01162 } 01163 default: 01164 break; 01165 } 01166 } while ((S = PM.getParent(S))); 01167 01168 return false; 01169 } 01170 01171 01172 static void diagnoseRepeatedUseOfWeak(Sema &S, 01173 const sema::FunctionScopeInfo *CurFn, 01174 const Decl *D, 01175 const ParentMap &PM) { 01176 typedef sema::FunctionScopeInfo::WeakObjectProfileTy WeakObjectProfileTy; 01177 typedef sema::FunctionScopeInfo::WeakObjectUseMap WeakObjectUseMap; 01178 typedef sema::FunctionScopeInfo::WeakUseVector WeakUseVector; 01179 typedef std::pair<const Stmt *, WeakObjectUseMap::const_iterator> 01180 StmtUsesPair; 01181 01182 ASTContext &Ctx = S.getASTContext(); 01183 01184 const WeakObjectUseMap &WeakMap = CurFn->getWeakObjectUses(); 01185 01186 // Extract all weak objects that are referenced more than once. 01187 SmallVector<StmtUsesPair, 8> UsesByStmt; 01188 for (WeakObjectUseMap::const_iterator I = WeakMap.begin(), E = WeakMap.end(); 01189 I != E; ++I) { 01190 const WeakUseVector &Uses = I->second; 01191 01192 // Find the first read of the weak object. 01193 WeakUseVector::const_iterator UI = Uses.begin(), UE = Uses.end(); 01194 for ( ; UI != UE; ++UI) { 01195 if (UI->isUnsafe()) 01196 break; 01197 } 01198 01199 // If there were only writes to this object, don't warn. 01200 if (UI == UE) 01201 continue; 01202 01203 // If there was only one read, followed by any number of writes, and the 01204 // read is not within a loop, don't warn. Additionally, don't warn in a 01205 // loop if the base object is a local variable -- local variables are often 01206 // changed in loops. 01207 if (UI == Uses.begin()) { 01208 WeakUseVector::const_iterator UI2 = UI; 01209 for (++UI2; UI2 != UE; ++UI2) 01210 if (UI2->isUnsafe()) 01211 break; 01212 01213 if (UI2 == UE) { 01214 if (!isInLoop(Ctx, PM, UI->getUseExpr())) 01215 continue; 01216 01217 const WeakObjectProfileTy &Profile = I->first; 01218 if (!Profile.isExactProfile()) 01219 continue; 01220 01221 const NamedDecl *Base = Profile.getBase(); 01222 if (!Base) 01223 Base = Profile.getProperty(); 01224 assert(Base && "A profile always has a base or property."); 01225 01226 if (const VarDecl *BaseVar = dyn_cast<VarDecl>(Base)) 01227 if (BaseVar->hasLocalStorage() && !isa<ParmVarDecl>(Base)) 01228 continue; 01229 } 01230 } 01231 01232 UsesByStmt.push_back(StmtUsesPair(UI->getUseExpr(), I)); 01233 } 01234 01235 if (UsesByStmt.empty()) 01236 return; 01237 01238 // Sort by first use so that we emit the warnings in a deterministic order. 01239 SourceManager &SM = S.getSourceManager(); 01240 std::sort(UsesByStmt.begin(), UsesByStmt.end(), 01241 [&SM](const StmtUsesPair &LHS, const StmtUsesPair &RHS) { 01242 return SM.isBeforeInTranslationUnit(LHS.first->getLocStart(), 01243 RHS.first->getLocStart()); 01244 }); 01245 01246 // Classify the current code body for better warning text. 01247 // This enum should stay in sync with the cases in 01248 // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak. 01249 // FIXME: Should we use a common classification enum and the same set of 01250 // possibilities all throughout Sema? 01251 enum { 01252 Function, 01253 Method, 01254 Block, 01255 Lambda 01256 } FunctionKind; 01257 01258 if (isa<sema::BlockScopeInfo>(CurFn)) 01259 FunctionKind = Block; 01260 else if (isa<sema::LambdaScopeInfo>(CurFn)) 01261 FunctionKind = Lambda; 01262 else if (isa<ObjCMethodDecl>(D)) 01263 FunctionKind = Method; 01264 else 01265 FunctionKind = Function; 01266 01267 // Iterate through the sorted problems and emit warnings for each. 01268 for (const auto &P : UsesByStmt) { 01269 const Stmt *FirstRead = P.first; 01270 const WeakObjectProfileTy &Key = P.second->first; 01271 const WeakUseVector &Uses = P.second->second; 01272 01273 // For complicated expressions like 'a.b.c' and 'x.b.c', WeakObjectProfileTy 01274 // may not contain enough information to determine that these are different 01275 // properties. We can only be 100% sure of a repeated use in certain cases, 01276 // and we adjust the diagnostic kind accordingly so that the less certain 01277 // case can be turned off if it is too noisy. 01278 unsigned DiagKind; 01279 if (Key.isExactProfile()) 01280 DiagKind = diag::warn_arc_repeated_use_of_weak; 01281 else 01282 DiagKind = diag::warn_arc_possible_repeated_use_of_weak; 01283 01284 // Classify the weak object being accessed for better warning text. 01285 // This enum should stay in sync with the cases in 01286 // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak. 01287 enum { 01288 Variable, 01289 Property, 01290 ImplicitProperty, 01291 Ivar 01292 } ObjectKind; 01293 01294 const NamedDecl *D = Key.getProperty(); 01295 if (isa<VarDecl>(D)) 01296 ObjectKind = Variable; 01297 else if (isa<ObjCPropertyDecl>(D)) 01298 ObjectKind = Property; 01299 else if (isa<ObjCMethodDecl>(D)) 01300 ObjectKind = ImplicitProperty; 01301 else if (isa<ObjCIvarDecl>(D)) 01302 ObjectKind = Ivar; 01303 else 01304 llvm_unreachable("Unexpected weak object kind!"); 01305 01306 // Show the first time the object was read. 01307 S.Diag(FirstRead->getLocStart(), DiagKind) 01308 << int(ObjectKind) << D << int(FunctionKind) 01309 << FirstRead->getSourceRange(); 01310 01311 // Print all the other accesses as notes. 01312 for (const auto &Use : Uses) { 01313 if (Use.getUseExpr() == FirstRead) 01314 continue; 01315 S.Diag(Use.getUseExpr()->getLocStart(), 01316 diag::note_arc_weak_also_accessed_here) 01317 << Use.getUseExpr()->getSourceRange(); 01318 } 01319 } 01320 } 01321 01322 namespace { 01323 class UninitValsDiagReporter : public UninitVariablesHandler { 01324 Sema &S; 01325 typedef SmallVector<UninitUse, 2> UsesVec; 01326 typedef llvm::PointerIntPair<UsesVec *, 1, bool> MappedType; 01327 // Prefer using MapVector to DenseMap, so that iteration order will be 01328 // the same as insertion order. This is needed to obtain a deterministic 01329 // order of diagnostics when calling flushDiagnostics(). 01330 typedef llvm::MapVector<const VarDecl *, MappedType> UsesMap; 01331 UsesMap *uses; 01332 01333 public: 01334 UninitValsDiagReporter(Sema &S) : S(S), uses(nullptr) {} 01335 ~UninitValsDiagReporter() { 01336 flushDiagnostics(); 01337 } 01338 01339 MappedType &getUses(const VarDecl *vd) { 01340 if (!uses) 01341 uses = new UsesMap(); 01342 01343 MappedType &V = (*uses)[vd]; 01344 if (!V.getPointer()) 01345 V.setPointer(new UsesVec()); 01346 01347 return V; 01348 } 01349 01350 void handleUseOfUninitVariable(const VarDecl *vd, 01351 const UninitUse &use) override { 01352 getUses(vd).getPointer()->push_back(use); 01353 } 01354 01355 void handleSelfInit(const VarDecl *vd) override { 01356 getUses(vd).setInt(true); 01357 } 01358 01359 void flushDiagnostics() { 01360 if (!uses) 01361 return; 01362 01363 for (const auto &P : *uses) { 01364 const VarDecl *vd = P.first; 01365 const MappedType &V = P.second; 01366 01367 UsesVec *vec = V.getPointer(); 01368 bool hasSelfInit = V.getInt(); 01369 01370 // Specially handle the case where we have uses of an uninitialized 01371 // variable, but the root cause is an idiomatic self-init. We want 01372 // to report the diagnostic at the self-init since that is the root cause. 01373 if (!vec->empty() && hasSelfInit && hasAlwaysUninitializedUse(vec)) 01374 DiagnoseUninitializedUse(S, vd, 01375 UninitUse(vd->getInit()->IgnoreParenCasts(), 01376 /* isAlwaysUninit */ true), 01377 /* alwaysReportSelfInit */ true); 01378 else { 01379 // Sort the uses by their SourceLocations. While not strictly 01380 // guaranteed to produce them in line/column order, this will provide 01381 // a stable ordering. 01382 std::sort(vec->begin(), vec->end(), 01383 [](const UninitUse &a, const UninitUse &b) { 01384 // Prefer a more confident report over a less confident one. 01385 if (a.getKind() != b.getKind()) 01386 return a.getKind() > b.getKind(); 01387 return a.getUser()->getLocStart() < b.getUser()->getLocStart(); 01388 }); 01389 01390 for (const auto &U : *vec) { 01391 // If we have self-init, downgrade all uses to 'may be uninitialized'. 01392 UninitUse Use = hasSelfInit ? UninitUse(U.getUser(), false) : U; 01393 01394 if (DiagnoseUninitializedUse(S, vd, Use)) 01395 // Skip further diagnostics for this variable. We try to warn only 01396 // on the first point at which a variable is used uninitialized. 01397 break; 01398 } 01399 } 01400 01401 // Release the uses vector. 01402 delete vec; 01403 } 01404 delete uses; 01405 } 01406 01407 private: 01408 static bool hasAlwaysUninitializedUse(const UsesVec* vec) { 01409 return std::any_of(vec->begin(), vec->end(), [](const UninitUse &U) { 01410 return U.getKind() == UninitUse::Always || 01411 U.getKind() == UninitUse::AfterCall || 01412 U.getKind() == UninitUse::AfterDecl; 01413 }); 01414 } 01415 }; 01416 } 01417 01418 namespace clang { 01419 namespace { 01420 typedef SmallVector<PartialDiagnosticAt, 1> OptionalNotes; 01421 typedef std::pair<PartialDiagnosticAt, OptionalNotes> DelayedDiag; 01422 typedef std::list<DelayedDiag> DiagList; 01423 01424 struct SortDiagBySourceLocation { 01425 SourceManager &SM; 01426 SortDiagBySourceLocation(SourceManager &SM) : SM(SM) {} 01427 01428 bool operator()(const DelayedDiag &left, const DelayedDiag &right) { 01429 // Although this call will be slow, this is only called when outputting 01430 // multiple warnings. 01431 return SM.isBeforeInTranslationUnit(left.first.first, right.first.first); 01432 } 01433 }; 01434 }} 01435 01436 //===----------------------------------------------------------------------===// 01437 // -Wthread-safety 01438 //===----------------------------------------------------------------------===// 01439 namespace clang { 01440 namespace threadSafety { 01441 01442 class ThreadSafetyReporter : public clang::threadSafety::ThreadSafetyHandler { 01443 Sema &S; 01444 DiagList Warnings; 01445 SourceLocation FunLocation, FunEndLocation; 01446 01447 const FunctionDecl *CurrentFunction; 01448 bool Verbose; 01449 01450 OptionalNotes getNotes() const { 01451 if (Verbose && CurrentFunction) { 01452 PartialDiagnosticAt FNote(CurrentFunction->getBody()->getLocStart(), 01453 S.PDiag(diag::note_thread_warning_in_fun) 01454 << CurrentFunction->getNameAsString()); 01455 return OptionalNotes(1, FNote); 01456 } 01457 return OptionalNotes(); 01458 } 01459 01460 OptionalNotes getNotes(const PartialDiagnosticAt &Note) const { 01461 OptionalNotes ONS(1, Note); 01462 if (Verbose && CurrentFunction) { 01463 PartialDiagnosticAt FNote(CurrentFunction->getBody()->getLocStart(), 01464 S.PDiag(diag::note_thread_warning_in_fun) 01465 << CurrentFunction->getNameAsString()); 01466 ONS.push_back(FNote); 01467 } 01468 return ONS; 01469 } 01470 01471 OptionalNotes getNotes(const PartialDiagnosticAt &Note1, 01472 const PartialDiagnosticAt &Note2) const { 01473 OptionalNotes ONS; 01474 ONS.push_back(Note1); 01475 ONS.push_back(Note2); 01476 if (Verbose && CurrentFunction) { 01477 PartialDiagnosticAt FNote(CurrentFunction->getBody()->getLocStart(), 01478 S.PDiag(diag::note_thread_warning_in_fun) 01479 << CurrentFunction->getNameAsString()); 01480 ONS.push_back(FNote); 01481 } 01482 return ONS; 01483 } 01484 01485 // Helper functions 01486 void warnLockMismatch(unsigned DiagID, StringRef Kind, Name LockName, 01487 SourceLocation Loc) { 01488 // Gracefully handle rare cases when the analysis can't get a more 01489 // precise source location. 01490 if (!Loc.isValid()) 01491 Loc = FunLocation; 01492 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind << LockName); 01493 Warnings.push_back(DelayedDiag(Warning, getNotes())); 01494 } 01495 01496 public: 01497 ThreadSafetyReporter(Sema &S, SourceLocation FL, SourceLocation FEL) 01498 : S(S), FunLocation(FL), FunEndLocation(FEL), 01499 CurrentFunction(nullptr), Verbose(false) {} 01500 01501 void setVerbose(bool b) { Verbose = b; } 01502 01503 /// \brief Emit all buffered diagnostics in order of sourcelocation. 01504 /// We need to output diagnostics produced while iterating through 01505 /// the lockset in deterministic order, so this function orders diagnostics 01506 /// and outputs them. 01507 void emitDiagnostics() { 01508 Warnings.sort(SortDiagBySourceLocation(S.getSourceManager())); 01509 for (const auto &Diag : Warnings) { 01510 S.Diag(Diag.first.first, Diag.first.second); 01511 for (const auto &Note : Diag.second) 01512 S.Diag(Note.first, Note.second); 01513 } 01514 } 01515 01516 void handleInvalidLockExp(StringRef Kind, SourceLocation Loc) override { 01517 PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_cannot_resolve_lock) 01518 << Loc); 01519 Warnings.push_back(DelayedDiag(Warning, getNotes())); 01520 } 01521 01522 void handleUnmatchedUnlock(StringRef Kind, Name LockName, 01523 SourceLocation Loc) override { 01524 warnLockMismatch(diag::warn_unlock_but_no_lock, Kind, LockName, Loc); 01525 } 01526 01527 void handleIncorrectUnlockKind(StringRef Kind, Name LockName, 01528 LockKind Expected, LockKind Received, 01529 SourceLocation Loc) override { 01530 if (Loc.isInvalid()) 01531 Loc = FunLocation; 01532 PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_unlock_kind_mismatch) 01533 << Kind << LockName << Received 01534 << Expected); 01535 Warnings.push_back(DelayedDiag(Warning, getNotes())); 01536 } 01537 01538 void handleDoubleLock(StringRef Kind, Name LockName, SourceLocation Loc) override { 01539 warnLockMismatch(diag::warn_double_lock, Kind, LockName, Loc); 01540 } 01541 01542 void handleMutexHeldEndOfScope(StringRef Kind, Name LockName, 01543 SourceLocation LocLocked, 01544 SourceLocation LocEndOfScope, 01545 LockErrorKind LEK) override { 01546 unsigned DiagID = 0; 01547 switch (LEK) { 01548 case LEK_LockedSomePredecessors: 01549 DiagID = diag::warn_lock_some_predecessors; 01550 break; 01551 case LEK_LockedSomeLoopIterations: 01552 DiagID = diag::warn_expecting_lock_held_on_loop; 01553 break; 01554 case LEK_LockedAtEndOfFunction: 01555 DiagID = diag::warn_no_unlock; 01556 break; 01557 case LEK_NotLockedAtEndOfFunction: 01558 DiagID = diag::warn_expecting_locked; 01559 break; 01560 } 01561 if (LocEndOfScope.isInvalid()) 01562 LocEndOfScope = FunEndLocation; 01563 01564 PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID) << Kind 01565 << LockName); 01566 if (LocLocked.isValid()) { 01567 PartialDiagnosticAt Note(LocLocked, S.PDiag(diag::note_locked_here) 01568 << Kind); 01569 Warnings.push_back(DelayedDiag(Warning, getNotes(Note))); 01570 return; 01571 } 01572 Warnings.push_back(DelayedDiag(Warning, getNotes())); 01573 } 01574 01575 void handleExclusiveAndShared(StringRef Kind, Name LockName, 01576 SourceLocation Loc1, 01577 SourceLocation Loc2) override { 01578 PartialDiagnosticAt Warning(Loc1, 01579 S.PDiag(diag::warn_lock_exclusive_and_shared) 01580 << Kind << LockName); 01581 PartialDiagnosticAt Note(Loc2, S.PDiag(diag::note_lock_exclusive_and_shared) 01582 << Kind << LockName); 01583 Warnings.push_back(DelayedDiag(Warning, getNotes(Note))); 01584 } 01585 01586 void handleNoMutexHeld(StringRef Kind, const NamedDecl *D, 01587 ProtectedOperationKind POK, AccessKind AK, 01588 SourceLocation Loc) override { 01589 assert((POK == POK_VarAccess || POK == POK_VarDereference) && 01590 "Only works for variables"); 01591 unsigned DiagID = POK == POK_VarAccess? 01592 diag::warn_variable_requires_any_lock: 01593 diag::warn_var_deref_requires_any_lock; 01594 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) 01595 << D->getNameAsString() << getLockKindFromAccessKind(AK)); 01596 Warnings.push_back(DelayedDiag(Warning, getNotes())); 01597 } 01598 01599 void handleMutexNotHeld(StringRef Kind, const NamedDecl *D, 01600 ProtectedOperationKind POK, Name LockName, 01601 LockKind LK, SourceLocation Loc, 01602 Name *PossibleMatch) override { 01603 unsigned DiagID = 0; 01604 if (PossibleMatch) { 01605 switch (POK) { 01606 case POK_VarAccess: 01607 DiagID = diag::warn_variable_requires_lock_precise; 01608 break; 01609 case POK_VarDereference: 01610 DiagID = diag::warn_var_deref_requires_lock_precise; 01611 break; 01612 case POK_FunctionCall: 01613 DiagID = diag::warn_fun_requires_lock_precise; 01614 break; 01615 case POK_PassByRef: 01616 DiagID = diag::warn_guarded_pass_by_reference; 01617 break; 01618 case POK_PtPassByRef: 01619 DiagID = diag::warn_pt_guarded_pass_by_reference; 01620 break; 01621 } 01622 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind 01623 << D->getNameAsString() 01624 << LockName << LK); 01625 PartialDiagnosticAt Note(Loc, S.PDiag(diag::note_found_mutex_near_match) 01626 << *PossibleMatch); 01627 if (Verbose && POK == POK_VarAccess) { 01628 PartialDiagnosticAt VNote(D->getLocation(), 01629 S.PDiag(diag::note_guarded_by_declared_here) 01630 << D->getNameAsString()); 01631 Warnings.push_back(DelayedDiag(Warning, getNotes(Note, VNote))); 01632 } else 01633 Warnings.push_back(DelayedDiag(Warning, getNotes(Note))); 01634 } else { 01635 switch (POK) { 01636 case POK_VarAccess: 01637 DiagID = diag::warn_variable_requires_lock; 01638 break; 01639 case POK_VarDereference: 01640 DiagID = diag::warn_var_deref_requires_lock; 01641 break; 01642 case POK_FunctionCall: 01643 DiagID = diag::warn_fun_requires_lock; 01644 break; 01645 case POK_PassByRef: 01646 DiagID = diag::warn_guarded_pass_by_reference; 01647 break; 01648 case POK_PtPassByRef: 01649 DiagID = diag::warn_pt_guarded_pass_by_reference; 01650 break; 01651 } 01652 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind 01653 << D->getNameAsString() 01654 << LockName << LK); 01655 if (Verbose && POK == POK_VarAccess) { 01656 PartialDiagnosticAt Note(D->getLocation(), 01657 S.PDiag(diag::note_guarded_by_declared_here) 01658 << D->getNameAsString()); 01659 Warnings.push_back(DelayedDiag(Warning, getNotes(Note))); 01660 } else 01661 Warnings.push_back(DelayedDiag(Warning, getNotes())); 01662 } 01663 } 01664 01665 01666 virtual void handleNegativeNotHeld(StringRef Kind, Name LockName, Name Neg, 01667 SourceLocation Loc) override { 01668 PartialDiagnosticAt Warning(Loc, 01669 S.PDiag(diag::warn_acquire_requires_negative_cap) 01670 << Kind << LockName << Neg); 01671 Warnings.push_back(DelayedDiag(Warning, getNotes())); 01672 } 01673 01674 01675 void handleFunExcludesLock(StringRef Kind, Name FunName, Name LockName, 01676 SourceLocation Loc) override { 01677 PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_fun_excludes_mutex) 01678 << Kind << FunName << LockName); 01679 Warnings.push_back(DelayedDiag(Warning, getNotes())); 01680 } 01681 01682 void enterFunction(const FunctionDecl* FD) override { 01683 CurrentFunction = FD; 01684 } 01685 01686 void leaveFunction(const FunctionDecl* FD) override { 01687 CurrentFunction = 0; 01688 } 01689 }; 01690 01691 } 01692 } 01693 01694 //===----------------------------------------------------------------------===// 01695 // -Wconsumed 01696 //===----------------------------------------------------------------------===// 01697 01698 namespace clang { 01699 namespace consumed { 01700 namespace { 01701 class ConsumedWarningsHandler : public ConsumedWarningsHandlerBase { 01702 01703 Sema &S; 01704 DiagList Warnings; 01705 01706 public: 01707 01708 ConsumedWarningsHandler(Sema &S) : S(S) {} 01709 01710 void emitDiagnostics() override { 01711 Warnings.sort(SortDiagBySourceLocation(S.getSourceManager())); 01712 for (const auto &Diag : Warnings) { 01713 S.Diag(Diag.first.first, Diag.first.second); 01714 for (const auto &Note : Diag.second) 01715 S.Diag(Note.first, Note.second); 01716 } 01717 } 01718 01719 void warnLoopStateMismatch(SourceLocation Loc, 01720 StringRef VariableName) override { 01721 PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_loop_state_mismatch) << 01722 VariableName); 01723 01724 Warnings.push_back(DelayedDiag(Warning, OptionalNotes())); 01725 } 01726 01727 void warnParamReturnTypestateMismatch(SourceLocation Loc, 01728 StringRef VariableName, 01729 StringRef ExpectedState, 01730 StringRef ObservedState) override { 01731 01732 PartialDiagnosticAt Warning(Loc, S.PDiag( 01733 diag::warn_param_return_typestate_mismatch) << VariableName << 01734 ExpectedState << ObservedState); 01735 01736 Warnings.push_back(DelayedDiag(Warning, OptionalNotes())); 01737 } 01738 01739 void warnParamTypestateMismatch(SourceLocation Loc, StringRef ExpectedState, 01740 StringRef ObservedState) override { 01741 01742 PartialDiagnosticAt Warning(Loc, S.PDiag( 01743 diag::warn_param_typestate_mismatch) << ExpectedState << ObservedState); 01744 01745 Warnings.push_back(DelayedDiag(Warning, OptionalNotes())); 01746 } 01747 01748 void warnReturnTypestateForUnconsumableType(SourceLocation Loc, 01749 StringRef TypeName) override { 01750 PartialDiagnosticAt Warning(Loc, S.PDiag( 01751 diag::warn_return_typestate_for_unconsumable_type) << TypeName); 01752 01753 Warnings.push_back(DelayedDiag(Warning, OptionalNotes())); 01754 } 01755 01756 void warnReturnTypestateMismatch(SourceLocation Loc, StringRef ExpectedState, 01757 StringRef ObservedState) override { 01758 01759 PartialDiagnosticAt Warning(Loc, S.PDiag( 01760 diag::warn_return_typestate_mismatch) << ExpectedState << ObservedState); 01761 01762 Warnings.push_back(DelayedDiag(Warning, OptionalNotes())); 01763 } 01764 01765 void warnUseOfTempInInvalidState(StringRef MethodName, StringRef State, 01766 SourceLocation Loc) override { 01767 01768 PartialDiagnosticAt Warning(Loc, S.PDiag( 01769 diag::warn_use_of_temp_in_invalid_state) << MethodName << State); 01770 01771 Warnings.push_back(DelayedDiag(Warning, OptionalNotes())); 01772 } 01773 01774 void warnUseInInvalidState(StringRef MethodName, StringRef VariableName, 01775 StringRef State, SourceLocation Loc) override { 01776 01777 PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_use_in_invalid_state) << 01778 MethodName << VariableName << State); 01779 01780 Warnings.push_back(DelayedDiag(Warning, OptionalNotes())); 01781 } 01782 }; 01783 }}} 01784 01785 //===----------------------------------------------------------------------===// 01786 // AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based 01787 // warnings on a function, method, or block. 01788 //===----------------------------------------------------------------------===// 01789 01790 clang::sema::AnalysisBasedWarnings::Policy::Policy() { 01791 enableCheckFallThrough = 1; 01792 enableCheckUnreachable = 0; 01793 enableThreadSafetyAnalysis = 0; 01794 enableConsumedAnalysis = 0; 01795 } 01796 01797 static unsigned isEnabled(DiagnosticsEngine &D, unsigned diag) { 01798 return (unsigned)!D.isIgnored(diag, SourceLocation()); 01799 } 01800 01801 clang::sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s) 01802 : S(s), 01803 NumFunctionsAnalyzed(0), 01804 NumFunctionsWithBadCFGs(0), 01805 NumCFGBlocks(0), 01806 MaxCFGBlocksPerFunction(0), 01807 NumUninitAnalysisFunctions(0), 01808 NumUninitAnalysisVariables(0), 01809 MaxUninitAnalysisVariablesPerFunction(0), 01810 NumUninitAnalysisBlockVisits(0), 01811 MaxUninitAnalysisBlockVisitsPerFunction(0) { 01812 01813 using namespace diag; 01814 DiagnosticsEngine &D = S.getDiagnostics(); 01815 01816 DefaultPolicy.enableCheckUnreachable = 01817 isEnabled(D, warn_unreachable) || 01818 isEnabled(D, warn_unreachable_break) || 01819 isEnabled(D, warn_unreachable_return) || 01820 isEnabled(D, warn_unreachable_loop_increment); 01821 01822 DefaultPolicy.enableThreadSafetyAnalysis = 01823 isEnabled(D, warn_double_lock); 01824 01825 DefaultPolicy.enableConsumedAnalysis = 01826 isEnabled(D, warn_use_in_invalid_state); 01827 } 01828 01829 static void flushDiagnostics(Sema &S, const sema::FunctionScopeInfo *fscope) { 01830 for (const auto &D : fscope->PossiblyUnreachableDiags) 01831 S.Diag(D.Loc, D.PD); 01832 } 01833 01834 void clang::sema:: 01835 AnalysisBasedWarnings::IssueWarnings(sema::AnalysisBasedWarnings::Policy P, 01836 sema::FunctionScopeInfo *fscope, 01837 const Decl *D, const BlockExpr *blkExpr) { 01838 01839 // We avoid doing analysis-based warnings when there are errors for 01840 // two reasons: 01841 // (1) The CFGs often can't be constructed (if the body is invalid), so 01842 // don't bother trying. 01843 // (2) The code already has problems; running the analysis just takes more 01844 // time. 01845 DiagnosticsEngine &Diags = S.getDiagnostics(); 01846 01847 // Do not do any analysis for declarations in system headers if we are 01848 // going to just ignore them. 01849 if (Diags.getSuppressSystemWarnings() && 01850 S.SourceMgr.isInSystemHeader(D->getLocation())) 01851 return; 01852 01853 // For code in dependent contexts, we'll do this at instantiation time. 01854 if (cast<DeclContext>(D)->isDependentContext()) 01855 return; 01856 01857 if (Diags.hasUncompilableErrorOccurred() || Diags.hasFatalErrorOccurred()) { 01858 // Flush out any possibly unreachable diagnostics. 01859 flushDiagnostics(S, fscope); 01860 return; 01861 } 01862 01863 const Stmt *Body = D->getBody(); 01864 assert(Body); 01865 01866 // Construct the analysis context with the specified CFG build options. 01867 AnalysisDeclContext AC(/* AnalysisDeclContextManager */ nullptr, D); 01868 01869 // Don't generate EH edges for CallExprs as we'd like to avoid the n^2 01870 // explosion for destructors that can result and the compile time hit. 01871 AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true; 01872 AC.getCFGBuildOptions().AddEHEdges = false; 01873 AC.getCFGBuildOptions().AddInitializers = true; 01874 AC.getCFGBuildOptions().AddImplicitDtors = true; 01875 AC.getCFGBuildOptions().AddTemporaryDtors = true; 01876 AC.getCFGBuildOptions().AddCXXNewAllocator = false; 01877 01878 // Force that certain expressions appear as CFGElements in the CFG. This 01879 // is used to speed up various analyses. 01880 // FIXME: This isn't the right factoring. This is here for initial 01881 // prototyping, but we need a way for analyses to say what expressions they 01882 // expect to always be CFGElements and then fill in the BuildOptions 01883 // appropriately. This is essentially a layering violation. 01884 if (P.enableCheckUnreachable || P.enableThreadSafetyAnalysis || 01885 P.enableConsumedAnalysis) { 01886 // Unreachable code analysis and thread safety require a linearized CFG. 01887 AC.getCFGBuildOptions().setAllAlwaysAdd(); 01888 } 01889 else { 01890 AC.getCFGBuildOptions() 01891 .setAlwaysAdd(Stmt::BinaryOperatorClass) 01892 .setAlwaysAdd(Stmt::CompoundAssignOperatorClass) 01893 .setAlwaysAdd(Stmt::BlockExprClass) 01894 .setAlwaysAdd(Stmt::CStyleCastExprClass) 01895 .setAlwaysAdd(Stmt::DeclRefExprClass) 01896 .setAlwaysAdd(Stmt::ImplicitCastExprClass) 01897 .setAlwaysAdd(Stmt::UnaryOperatorClass) 01898 .setAlwaysAdd(Stmt::AttributedStmtClass); 01899 } 01900 01901 // Install the logical handler for -Wtautological-overlap-compare 01902 std::unique_ptr<LogicalErrorHandler> LEH; 01903 if (!Diags.isIgnored(diag::warn_tautological_overlap_comparison, 01904 D->getLocStart())) { 01905 LEH.reset(new LogicalErrorHandler(S)); 01906 AC.getCFGBuildOptions().Observer = LEH.get(); 01907 } 01908 01909 // Emit delayed diagnostics. 01910 if (!fscope->PossiblyUnreachableDiags.empty()) { 01911 bool analyzed = false; 01912 01913 // Register the expressions with the CFGBuilder. 01914 for (const auto &D : fscope->PossiblyUnreachableDiags) { 01915 if (D.stmt) 01916 AC.registerForcedBlockExpression(D.stmt); 01917 } 01918 01919 if (AC.getCFG()) { 01920 analyzed = true; 01921 for (const auto &D : fscope->PossiblyUnreachableDiags) { 01922 bool processed = false; 01923 if (D.stmt) { 01924 const CFGBlock *block = AC.getBlockForRegisteredExpression(D.stmt); 01925 CFGReverseBlockReachabilityAnalysis *cra = 01926 AC.getCFGReachablityAnalysis(); 01927 // FIXME: We should be able to assert that block is non-null, but 01928 // the CFG analysis can skip potentially-evaluated expressions in 01929 // edge cases; see test/Sema/vla-2.c. 01930 if (block && cra) { 01931 // Can this block be reached from the entrance? 01932 if (cra->isReachable(&AC.getCFG()->getEntry(), block)) 01933 S.Diag(D.Loc, D.PD); 01934 processed = true; 01935 } 01936 } 01937 if (!processed) { 01938 // Emit the warning anyway if we cannot map to a basic block. 01939 S.Diag(D.Loc, D.PD); 01940 } 01941 } 01942 } 01943 01944 if (!analyzed) 01945 flushDiagnostics(S, fscope); 01946 } 01947 01948 01949 // Warning: check missing 'return' 01950 if (P.enableCheckFallThrough) { 01951 const CheckFallThroughDiagnostics &CD = 01952 (isa<BlockDecl>(D) ? CheckFallThroughDiagnostics::MakeForBlock() 01953 : (isa<CXXMethodDecl>(D) && 01954 cast<CXXMethodDecl>(D)->getOverloadedOperator() == OO_Call && 01955 cast<CXXMethodDecl>(D)->getParent()->isLambda()) 01956 ? CheckFallThroughDiagnostics::MakeForLambda() 01957 : CheckFallThroughDiagnostics::MakeForFunction(D)); 01958 CheckFallThroughForBody(S, D, Body, blkExpr, CD, AC); 01959 } 01960 01961 // Warning: check for unreachable code 01962 if (P.enableCheckUnreachable) { 01963 // Only check for unreachable code on non-template instantiations. 01964 // Different template instantiations can effectively change the control-flow 01965 // and it is very difficult to prove that a snippet of code in a template 01966 // is unreachable for all instantiations. 01967 bool isTemplateInstantiation = false; 01968 if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(D)) 01969 isTemplateInstantiation = Function->isTemplateInstantiation(); 01970 if (!isTemplateInstantiation) 01971 CheckUnreachable(S, AC); 01972 } 01973 01974 // Check for thread safety violations 01975 if (P.enableThreadSafetyAnalysis) { 01976 SourceLocation FL = AC.getDecl()->getLocation(); 01977 SourceLocation FEL = AC.getDecl()->getLocEnd(); 01978 threadSafety::ThreadSafetyReporter Reporter(S, FL, FEL); 01979 if (!Diags.isIgnored(diag::warn_thread_safety_beta, D->getLocStart())) 01980 Reporter.setIssueBetaWarnings(true); 01981 if (!Diags.isIgnored(diag::warn_thread_safety_verbose, D->getLocStart())) 01982 Reporter.setVerbose(true); 01983 01984 threadSafety::runThreadSafetyAnalysis(AC, Reporter); 01985 Reporter.emitDiagnostics(); 01986 } 01987 01988 // Check for violations of consumed properties. 01989 if (P.enableConsumedAnalysis) { 01990 consumed::ConsumedWarningsHandler WarningHandler(S); 01991 consumed::ConsumedAnalyzer Analyzer(WarningHandler); 01992 Analyzer.run(AC); 01993 } 01994 01995 if (!Diags.isIgnored(diag::warn_uninit_var, D->getLocStart()) || 01996 !Diags.isIgnored(diag::warn_sometimes_uninit_var, D->getLocStart()) || 01997 !Diags.isIgnored(diag::warn_maybe_uninit_var, D->getLocStart())) { 01998 if (CFG *cfg = AC.getCFG()) { 01999 UninitValsDiagReporter reporter(S); 02000 UninitVariablesAnalysisStats stats; 02001 std::memset(&stats, 0, sizeof(UninitVariablesAnalysisStats)); 02002 runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC, 02003 reporter, stats); 02004 02005 if (S.CollectStats && stats.NumVariablesAnalyzed > 0) { 02006 ++NumUninitAnalysisFunctions; 02007 NumUninitAnalysisVariables += stats.NumVariablesAnalyzed; 02008 NumUninitAnalysisBlockVisits += stats.NumBlockVisits; 02009 MaxUninitAnalysisVariablesPerFunction = 02010 std::max(MaxUninitAnalysisVariablesPerFunction, 02011 stats.NumVariablesAnalyzed); 02012 MaxUninitAnalysisBlockVisitsPerFunction = 02013 std::max(MaxUninitAnalysisBlockVisitsPerFunction, 02014 stats.NumBlockVisits); 02015 } 02016 } 02017 } 02018 02019 bool FallThroughDiagFull = 02020 !Diags.isIgnored(diag::warn_unannotated_fallthrough, D->getLocStart()); 02021 bool FallThroughDiagPerFunction = !Diags.isIgnored( 02022 diag::warn_unannotated_fallthrough_per_function, D->getLocStart()); 02023 if (FallThroughDiagFull || FallThroughDiagPerFunction) { 02024 DiagnoseSwitchLabelsFallthrough(S, AC, !FallThroughDiagFull); 02025 } 02026 02027 if (S.getLangOpts().ObjCARCWeak && 02028 !Diags.isIgnored(diag::warn_arc_repeated_use_of_weak, D->getLocStart())) 02029 diagnoseRepeatedUseOfWeak(S, fscope, D, AC.getParentMap()); 02030 02031 02032 // Check for infinite self-recursion in functions 02033 if (!Diags.isIgnored(diag::warn_infinite_recursive_function, 02034 D->getLocStart())) { 02035 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 02036 checkRecursiveFunction(S, FD, Body, AC); 02037 } 02038 } 02039 02040 // If none of the previous checks caused a CFG build, trigger one here 02041 // for -Wtautological-overlap-compare 02042 if (!Diags.isIgnored(diag::warn_tautological_overlap_comparison, 02043 D->getLocStart())) { 02044 AC.getCFG(); 02045 } 02046 02047 // Collect statistics about the CFG if it was built. 02048 if (S.CollectStats && AC.isCFGBuilt()) { 02049 ++NumFunctionsAnalyzed; 02050 if (CFG *cfg = AC.getCFG()) { 02051 // If we successfully built a CFG for this context, record some more 02052 // detail information about it. 02053 NumCFGBlocks += cfg->getNumBlockIDs(); 02054 MaxCFGBlocksPerFunction = std::max(MaxCFGBlocksPerFunction, 02055 cfg->getNumBlockIDs()); 02056 } else { 02057 ++NumFunctionsWithBadCFGs; 02058 } 02059 } 02060 } 02061 02062 void clang::sema::AnalysisBasedWarnings::PrintStats() const { 02063 llvm::errs() << "\n*** Analysis Based Warnings Stats:\n"; 02064 02065 unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs; 02066 unsigned AvgCFGBlocksPerFunction = 02067 !NumCFGsBuilt ? 0 : NumCFGBlocks/NumCFGsBuilt; 02068 llvm::errs() << NumFunctionsAnalyzed << " functions analyzed (" 02069 << NumFunctionsWithBadCFGs << " w/o CFGs).\n" 02070 << " " << NumCFGBlocks << " CFG blocks built.\n" 02071 << " " << AvgCFGBlocksPerFunction 02072 << " average CFG blocks per function.\n" 02073 << " " << MaxCFGBlocksPerFunction 02074 << " max CFG blocks per function.\n"; 02075 02076 unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0 02077 : NumUninitAnalysisVariables/NumUninitAnalysisFunctions; 02078 unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0 02079 : NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions; 02080 llvm::errs() << NumUninitAnalysisFunctions 02081 << " functions analyzed for uninitialiazed variables\n" 02082 << " " << NumUninitAnalysisVariables << " variables analyzed.\n" 02083 << " " << AvgUninitVariablesPerFunction 02084 << " average variables per function.\n" 02085 << " " << MaxUninitAnalysisVariablesPerFunction 02086 << " max variables per function.\n" 02087 << " " << NumUninitAnalysisBlockVisits << " block visits.\n" 02088 << " " << AvgUninitBlockVisitsPerFunction 02089 << " average block visits per function.\n" 02090 << " " << MaxUninitAnalysisBlockVisitsPerFunction 02091 << " max block visits per function.\n"; 02092 }