void TransferFunctions::VisitBlockExpr(BlockExpr *be) {
  const BlockDecl *bd = be->getBlockDecl();
  for (BlockDecl::capture_const_iterator i = bd->capture_begin(),
        e = bd->capture_end() ; i != e; ++i) {
    const VarDecl *vd = i->getVariable();
    if (!isTrackedVar(vd))
      continue;
    if (i->isByRef()) {
      vals[vd] = Initialized;
      continue;
    }
    reportUse(be, vd);
  }
}
示例#2
0
const ImplicitParamDecl *AnalysisDeclContext::getSelfDecl() const {
  if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D))
    return MD->getSelfDecl();
  if (const BlockDecl *BD = dyn_cast<BlockDecl>(D)) {
    // See if 'self' was captured by the block.
    for (BlockDecl::capture_const_iterator it = BD->capture_begin(),
         et = BD->capture_end(); it != et; ++it) {
      const VarDecl *VD = it->getVariable();
      if (VD->getName() == "self")
        return dyn_cast<ImplicitParamDecl>(VD);
    }    
  }

  return NULL;
}
示例#3
0
void TransferFunctions::VisitBlockExpr(BlockExpr *be) {
  const BlockDecl *bd = be->getBlockDecl();
  for (BlockDecl::capture_const_iterator i = bd->capture_begin(),
        e = bd->capture_end() ; i != e; ++i) {
    const VarDecl *vd = i->getVariable();
    if (!isTrackedVar(vd))
      continue;
    if (i->isByRef()) {
      vals[vd] = Initialized;
      continue;
    }
    Value v = vals[vd];
    if (handler && isUninitialized(v))
      handler->handleUseOfUninitVariable(be, vd, isAlwaysUninit(v));
  }
}
static DeclVec* LazyInitializeReferencedDecls(const BlockDecl *BD,
                                              void *&Vec,
                                              llvm::BumpPtrAllocator &A) {
  if (Vec)
    return (DeclVec*) Vec;

  BumpVectorContext BC(A);
  DeclVec *BV = (DeclVec*) A.Allocate<DeclVec>();
  new (BV) DeclVec(BC, 10);

  // Go through the capture list.
  for (BlockDecl::capture_const_iterator CI = BD->capture_begin(),
       CE = BD->capture_end(); CI != CE; ++CI) {
    BV->push_back(CI->getVariable(), BC);
  }

  // Find the referenced global/static variables.
  FindBlockDeclRefExprsVals F(*BV, BC);
  F.Visit(BD->getBody());

  Vec = BV;
  return BV;
}
示例#5
0
/// BuildScopeInformation - The statements from CI to CE are known to form a
/// coherent VLA scope with a specified parent node.  Walk through the
/// statements, adding any labels or gotos to LabelAndGotoScopes and recursively
/// walking the AST as needed.
void JumpScopeChecker::BuildScopeInformation(Stmt *S, unsigned &origParentScope) {
  // If this is a statement, rather than an expression, scopes within it don't
  // propagate out into the enclosing scope.  Otherwise we have to worry
  // about block literals, which have the lifetime of their enclosing statement.
  unsigned independentParentScope = origParentScope;
  unsigned &ParentScope = ((isa<Expr>(S) && !isa<StmtExpr>(S)) 
                            ? origParentScope : independentParentScope);

  bool SkipFirstSubStmt = false;
  
  // If we found a label, remember that it is in ParentScope scope.
  switch (S->getStmtClass()) {
  case Stmt::AddrLabelExprClass:
    IndirectJumpTargets.push_back(cast<AddrLabelExpr>(S)->getLabel());
    break;

  case Stmt::IndirectGotoStmtClass:
    // "goto *&&lbl;" is a special case which we treat as equivalent
    // to a normal goto.  In addition, we don't calculate scope in the
    // operand (to avoid recording the address-of-label use), which
    // works only because of the restricted set of expressions which
    // we detect as constant targets.
    if (cast<IndirectGotoStmt>(S)->getConstantTarget()) {
      LabelAndGotoScopes[S] = ParentScope;
      Jumps.push_back(S);
      return;
    }

    LabelAndGotoScopes[S] = ParentScope;
    IndirectJumps.push_back(cast<IndirectGotoStmt>(S));
    break;

  case Stmt::SwitchStmtClass:
    // Evaluate the condition variable before entering the scope of the switch
    // statement.
    if (VarDecl *Var = cast<SwitchStmt>(S)->getConditionVariable()) {
      BuildScopeInformation(Var, ParentScope);
      SkipFirstSubStmt = true;
    }
    // Fall through
      
  case Stmt::GotoStmtClass:
    // Remember both what scope a goto is in as well as the fact that we have
    // it.  This makes the second scan not have to walk the AST again.
    LabelAndGotoScopes[S] = ParentScope;
    Jumps.push_back(S);
    break;

  case Stmt::CXXTryStmtClass: {
    CXXTryStmt *TS = cast<CXXTryStmt>(S);
    unsigned newParentScope;
    Scopes.push_back(GotoScope(ParentScope,
                               diag::note_protected_by_cxx_try,
                               diag::note_exits_cxx_try,
                               TS->getSourceRange().getBegin()));
    if (Stmt *TryBlock = TS->getTryBlock())
      BuildScopeInformation(TryBlock, (newParentScope = Scopes.size()-1));

    // Jump from the catch into the try is not allowed either.
    for (unsigned I = 0, E = TS->getNumHandlers(); I != E; ++I) {
      CXXCatchStmt *CS = TS->getHandler(I);
      Scopes.push_back(GotoScope(ParentScope,
                                 diag::note_protected_by_cxx_catch,
                                 diag::note_exits_cxx_catch,
                                 CS->getSourceRange().getBegin()));
      BuildScopeInformation(CS->getHandlerBlock(), 
                            (newParentScope = Scopes.size()-1));
    }
    return;
  }

  default:
    break;
  }

  for (Stmt::child_range CI = S->children(); CI; ++CI) {
    if (SkipFirstSubStmt) {
      SkipFirstSubStmt = false;
      continue;
    }
    
    Stmt *SubStmt = *CI;
    if (SubStmt == 0) continue;

    // Cases, labels, and defaults aren't "scope parents".  It's also
    // important to handle these iteratively instead of recursively in
    // order to avoid blowing out the stack.
    while (true) {
      Stmt *Next;
      if (CaseStmt *CS = dyn_cast<CaseStmt>(SubStmt))
        Next = CS->getSubStmt();
      else if (DefaultStmt *DS = dyn_cast<DefaultStmt>(SubStmt))
        Next = DS->getSubStmt();
      else if (LabelStmt *LS = dyn_cast<LabelStmt>(SubStmt))
        Next = LS->getSubStmt();
      else
        break;

      LabelAndGotoScopes[SubStmt] = ParentScope;
      SubStmt = Next;
    }

    // If this is a declstmt with a VLA definition, it defines a scope from here
    // to the end of the containing context.
    if (DeclStmt *DS = dyn_cast<DeclStmt>(SubStmt)) {
      // The decl statement creates a scope if any of the decls in it are VLAs
      // or have the cleanup attribute.
      for (DeclStmt::decl_iterator I = DS->decl_begin(), E = DS->decl_end();
           I != E; ++I)
        BuildScopeInformation(*I, ParentScope);
      continue;
    }
    // Disallow jumps into any part of an @try statement by pushing a scope and
    // walking all sub-stmts in that scope.
    if (ObjCAtTryStmt *AT = dyn_cast<ObjCAtTryStmt>(SubStmt)) {
      unsigned newParentScope;
      // Recursively walk the AST for the @try part.
      Scopes.push_back(GotoScope(ParentScope,
                                 diag::note_protected_by_objc_try,
                                 diag::note_exits_objc_try,
                                 AT->getAtTryLoc()));
      if (Stmt *TryPart = AT->getTryBody())
        BuildScopeInformation(TryPart, (newParentScope = Scopes.size()-1));

      // Jump from the catch to the finally or try is not valid.
      for (unsigned I = 0, N = AT->getNumCatchStmts(); I != N; ++I) {
        ObjCAtCatchStmt *AC = AT->getCatchStmt(I);
        Scopes.push_back(GotoScope(ParentScope,
                                   diag::note_protected_by_objc_catch,
                                   diag::note_exits_objc_catch,
                                   AC->getAtCatchLoc()));
        // @catches are nested and it isn't
        BuildScopeInformation(AC->getCatchBody(), 
                              (newParentScope = Scopes.size()-1));
      }

      // Jump from the finally to the try or catch is not valid.
      if (ObjCAtFinallyStmt *AF = AT->getFinallyStmt()) {
        Scopes.push_back(GotoScope(ParentScope,
                                   diag::note_protected_by_objc_finally,
                                   diag::note_exits_objc_finally,
                                   AF->getAtFinallyLoc()));
        BuildScopeInformation(AF, (newParentScope = Scopes.size()-1));
      }

      continue;
    }
    
    unsigned newParentScope;
    // Disallow jumps into the protected statement of an @synchronized, but
    // allow jumps into the object expression it protects.
    if (ObjCAtSynchronizedStmt *AS = dyn_cast<ObjCAtSynchronizedStmt>(SubStmt)){
      // Recursively walk the AST for the @synchronized object expr, it is
      // evaluated in the normal scope.
      BuildScopeInformation(AS->getSynchExpr(), ParentScope);

      // Recursively walk the AST for the @synchronized part, protected by a new
      // scope.
      Scopes.push_back(GotoScope(ParentScope,
                                 diag::note_protected_by_objc_synchronized,
                                 diag::note_exits_objc_synchronized,
                                 AS->getAtSynchronizedLoc()));
      BuildScopeInformation(AS->getSynchBody(), 
                            (newParentScope = Scopes.size()-1));
      continue;
    }

    // Disallow jumps into the protected statement of an @autoreleasepool.
    if (ObjCAutoreleasePoolStmt *AS = dyn_cast<ObjCAutoreleasePoolStmt>(SubStmt)){
      // Recursively walk the AST for the @autoreleasepool part, protected by a new
      // scope.
      Scopes.push_back(GotoScope(ParentScope,
                                 diag::note_protected_by_objc_autoreleasepool,
                                 diag::note_exits_objc_autoreleasepool,
                                 AS->getAtLoc()));
      BuildScopeInformation(AS->getSubStmt(), (newParentScope = Scopes.size()-1));
      continue;
    }

    // Disallow jumps past full-expressions that use blocks with
    // non-trivial cleanups of their captures.  This is theoretically
    // implementable but a lot of work which we haven't felt up to doing.
    if (ExprWithCleanups *EWC = dyn_cast<ExprWithCleanups>(SubStmt)) {
      for (unsigned i = 0, e = EWC->getNumObjects(); i != e; ++i) {
        const BlockDecl *BDecl = EWC->getObject(i);
        for (BlockDecl::capture_const_iterator ci = BDecl->capture_begin(),
             ce = BDecl->capture_end(); ci != ce; ++ci) {
          VarDecl *variable = ci->getVariable();
          BuildScopeInformation(variable, BDecl, ParentScope);
        }
      }
    }
    
    // Recursively walk the AST.
    BuildScopeInformation(SubStmt, ParentScope);
  }
}