Exemple #1
0
void CGObjCRuntime::EmitTryCatchStmt(CodeGenFunction &CGF,
                                     const ObjCAtTryStmt &S,
                                     llvm::Constant *beginCatchFn,
                                     llvm::Constant *endCatchFn,
                                     llvm::Constant *exceptionRethrowFn) {
  // Jump destination for falling out of catch bodies.
  CodeGenFunction::JumpDest Cont;
  if (S.getNumCatchStmts())
    Cont = CGF.getJumpDestInCurrentScope("eh.cont");

  CodeGenFunction::FinallyInfo FinallyInfo;
  if (const ObjCAtFinallyStmt *Finally = S.getFinallyStmt())
    FinallyInfo.enter(CGF, Finally->getFinallyBody(),
                      beginCatchFn, endCatchFn, exceptionRethrowFn);

  SmallVector<CatchHandler, 8> Handlers;

  // Enter the catch, if there is one.
  if (S.getNumCatchStmts()) {
    for (unsigned I = 0, N = S.getNumCatchStmts(); I != N; ++I) {
      const ObjCAtCatchStmt *CatchStmt = S.getCatchStmt(I);
      const VarDecl *CatchDecl = CatchStmt->getCatchParamDecl();

      Handlers.push_back(CatchHandler());
      CatchHandler &Handler = Handlers.back();
      Handler.Variable = CatchDecl;
      Handler.Body = CatchStmt->getCatchBody();
      Handler.Block = CGF.createBasicBlock("catch");

      // @catch(...) always matches.
      if (!CatchDecl) {
        Handler.TypeInfo = 0; // catch-all
        // Don't consider any other catches.
        break;
      }

      Handler.TypeInfo = GetEHType(CatchDecl->getType());
    }

    EHCatchScope *Catch = CGF.EHStack.pushCatch(Handlers.size());
    for (unsigned I = 0, E = Handlers.size(); I != E; ++I)
      Catch->setHandler(I, Handlers[I].TypeInfo, Handlers[I].Block);
  }
  
  // Emit the try body.
  CGF.EmitStmt(S.getTryBody());

  // Leave the try.
  if (S.getNumCatchStmts())
    CGF.popCatchScope();

  // Remember where we were.
  CGBuilderTy::InsertPoint SavedIP = CGF.Builder.saveAndClearIP();

  // Emit the handlers.
  for (unsigned I = 0, E = Handlers.size(); I != E; ++I) {
    CatchHandler &Handler = Handlers[I];

    CGF.EmitBlock(Handler.Block);
    llvm::Value *RawExn = CGF.getExceptionFromSlot();

    // Enter the catch.
    llvm::Value *Exn = RawExn;
    if (beginCatchFn) {
      Exn = CGF.Builder.CreateCall(beginCatchFn, RawExn, "exn.adjusted");
      cast<llvm::CallInst>(Exn)->setDoesNotThrow();
    }

    CodeGenFunction::RunCleanupsScope cleanups(CGF);

    if (endCatchFn) {
      // Add a cleanup to leave the catch.
      bool EndCatchMightThrow = (Handler.Variable == 0);

      CGF.EHStack.pushCleanup<CallObjCEndCatch>(NormalAndEHCleanup,
                                                EndCatchMightThrow,
                                                endCatchFn);
    }

    // Bind the catch parameter if it exists.
    if (const VarDecl *CatchParam = Handler.Variable) {
      llvm::Type *CatchType = CGF.ConvertType(CatchParam->getType());
      llvm::Value *CastExn = CGF.Builder.CreateBitCast(Exn, CatchType);

      CGF.EmitAutoVarDecl(*CatchParam);
      CGF.Builder.CreateStore(CastExn, CGF.GetAddrOfLocalVar(CatchParam));
    }

    CGF.ObjCEHValueStack.push_back(Exn);
    CGF.EmitStmt(Handler.Body);
    CGF.ObjCEHValueStack.pop_back();

    // Leave any cleanups associated with the catch.
    cleanups.ForceCleanup();

    CGF.EmitBranchThroughCleanup(Cont);
  }  

  // Go back to the try-statement fallthrough.
  CGF.Builder.restoreIP(SavedIP);

  // Pop out of the finally.
  if (S.getFinallyStmt())
    FinallyInfo.exit(CGF);

  if (Cont.isValid())
    CGF.EmitBlock(Cont.getBlock());
}
/// BuildScopeInformation - The statements from CI to CE are known to form a
/// coherent VLA scope with a specified parent node.  Walk through the
/// statements, adding any labels or gotos to LabelAndGotoScopes and recursively
/// walking the AST as needed.
void JumpScopeChecker::BuildScopeInformation(Stmt *S,
                                             unsigned &origParentScope) {
  // If this is a statement, rather than an expression, scopes within it don't
  // propagate out into the enclosing scope.  Otherwise we have to worry
  // about block literals, which have the lifetime of their enclosing statement.
  unsigned independentParentScope = origParentScope;
  unsigned &ParentScope = ((isa<Expr>(S) && !isa<StmtExpr>(S))
                            ? origParentScope : independentParentScope);

  unsigned StmtsToSkip = 0u;

  // If we found a label, remember that it is in ParentScope scope.
  switch (S->getStmtClass()) {
  case Stmt::AddrLabelExprClass:
    IndirectJumpTargets.push_back(cast<AddrLabelExpr>(S)->getLabel());
    break;

  case Stmt::ObjCForCollectionStmtClass: {
    auto *CS = cast<ObjCForCollectionStmt>(S);
    unsigned Diag = diag::note_protected_by_objc_fast_enumeration;
    unsigned NewParentScope = Scopes.size();
    Scopes.push_back(GotoScope(ParentScope, Diag, 0, S->getLocStart()));
    BuildScopeInformation(CS->getBody(), NewParentScope);
    return;
  }

  case Stmt::IndirectGotoStmtClass:
    // "goto *&&lbl;" is a special case which we treat as equivalent
    // to a normal goto.  In addition, we don't calculate scope in the
    // operand (to avoid recording the address-of-label use), which
    // works only because of the restricted set of expressions which
    // we detect as constant targets.
    if (cast<IndirectGotoStmt>(S)->getConstantTarget()) {
      LabelAndGotoScopes[S] = ParentScope;
      Jumps.push_back(S);
      return;
    }

    LabelAndGotoScopes[S] = ParentScope;
    IndirectJumps.push_back(cast<IndirectGotoStmt>(S));
    break;

  case Stmt::SwitchStmtClass:
    // Evaluate the C++17 init stmt and condition variable
    // before entering the scope of the switch statement.
    if (Stmt *Init = cast<SwitchStmt>(S)->getInit()) {
      BuildScopeInformation(Init, ParentScope);
      ++StmtsToSkip;
    }
    if (VarDecl *Var = cast<SwitchStmt>(S)->getConditionVariable()) {
      BuildScopeInformation(Var, ParentScope);
      ++StmtsToSkip;
    }
    LLVM_FALLTHROUGH;

  case Stmt::GotoStmtClass:
    // Remember both what scope a goto is in as well as the fact that we have
    // it.  This makes the second scan not have to walk the AST again.
    LabelAndGotoScopes[S] = ParentScope;
    Jumps.push_back(S);
    break;

  case Stmt::IfStmtClass: {
    IfStmt *IS = cast<IfStmt>(S);
    if (!(IS->isConstexpr() || IS->isObjCAvailabilityCheck()))
      break;

    unsigned Diag = IS->isConstexpr() ? diag::note_protected_by_constexpr_if
                                      : diag::note_protected_by_if_available;

    if (VarDecl *Var = IS->getConditionVariable())
      BuildScopeInformation(Var, ParentScope);

    // Cannot jump into the middle of the condition.
    unsigned NewParentScope = Scopes.size();
    Scopes.push_back(GotoScope(ParentScope, Diag, 0, IS->getLocStart()));
    BuildScopeInformation(IS->getCond(), NewParentScope);

    // Jumps into either arm of an 'if constexpr' are not allowed.
    NewParentScope = Scopes.size();
    Scopes.push_back(GotoScope(ParentScope, Diag, 0, IS->getLocStart()));
    BuildScopeInformation(IS->getThen(), NewParentScope);
    if (Stmt *Else = IS->getElse()) {
      NewParentScope = Scopes.size();
      Scopes.push_back(GotoScope(ParentScope, Diag, 0, IS->getLocStart()));
      BuildScopeInformation(Else, NewParentScope);
    }
    return;
  }

  case Stmt::CXXTryStmtClass: {
    CXXTryStmt *TS = cast<CXXTryStmt>(S);
    {
      unsigned NewParentScope = Scopes.size();
      Scopes.push_back(GotoScope(ParentScope,
                                 diag::note_protected_by_cxx_try,
                                 diag::note_exits_cxx_try,
                                 TS->getSourceRange().getBegin()));
      if (Stmt *TryBlock = TS->getTryBlock())
        BuildScopeInformation(TryBlock, NewParentScope);
    }

    // Jump from the catch into the try is not allowed either.
    for (unsigned I = 0, E = TS->getNumHandlers(); I != E; ++I) {
      CXXCatchStmt *CS = TS->getHandler(I);
      unsigned NewParentScope = Scopes.size();
      Scopes.push_back(GotoScope(ParentScope,
                                 diag::note_protected_by_cxx_catch,
                                 diag::note_exits_cxx_catch,
                                 CS->getSourceRange().getBegin()));
      BuildScopeInformation(CS->getHandlerBlock(), NewParentScope);
    }
    return;
  }

  case Stmt::SEHTryStmtClass: {
    SEHTryStmt *TS = cast<SEHTryStmt>(S);
    {
      unsigned NewParentScope = Scopes.size();
      Scopes.push_back(GotoScope(ParentScope,
                                 diag::note_protected_by_seh_try,
                                 diag::note_exits_seh_try,
                                 TS->getSourceRange().getBegin()));
      if (Stmt *TryBlock = TS->getTryBlock())
        BuildScopeInformation(TryBlock, NewParentScope);
    }

    // Jump from __except or __finally into the __try are not allowed either.
    if (SEHExceptStmt *Except = TS->getExceptHandler()) {
      unsigned NewParentScope = Scopes.size();
      Scopes.push_back(GotoScope(ParentScope,
                                 diag::note_protected_by_seh_except,
                                 diag::note_exits_seh_except,
                                 Except->getSourceRange().getBegin()));
      BuildScopeInformation(Except->getBlock(), NewParentScope);
    } else if (SEHFinallyStmt *Finally = TS->getFinallyHandler()) {
      unsigned NewParentScope = Scopes.size();
      Scopes.push_back(GotoScope(ParentScope,
                                 diag::note_protected_by_seh_finally,
                                 diag::note_exits_seh_finally,
                                 Finally->getSourceRange().getBegin()));
      BuildScopeInformation(Finally->getBlock(), NewParentScope);
    }

    return;
  }

  case Stmt::DeclStmtClass: {
    // If this is a declstmt with a VLA definition, it defines a scope from here
    // to the end of the containing context.
    DeclStmt *DS = cast<DeclStmt>(S);
    // The decl statement creates a scope if any of the decls in it are VLAs
    // or have the cleanup attribute.
    for (auto *I : DS->decls())
      BuildScopeInformation(I, origParentScope);
    return;
  }

  case Stmt::ObjCAtTryStmtClass: {
    // Disallow jumps into any part of an @try statement by pushing a scope and
    // walking all sub-stmts in that scope.
    ObjCAtTryStmt *AT = cast<ObjCAtTryStmt>(S);
    // Recursively walk the AST for the @try part.
    {
      unsigned NewParentScope = Scopes.size();
      Scopes.push_back(GotoScope(ParentScope,
                                 diag::note_protected_by_objc_try,
                                 diag::note_exits_objc_try,
                                 AT->getAtTryLoc()));
      if (Stmt *TryPart = AT->getTryBody())
        BuildScopeInformation(TryPart, NewParentScope);
    }

    // Jump from the catch to the finally or try is not valid.
    for (unsigned I = 0, N = AT->getNumCatchStmts(); I != N; ++I) {
      ObjCAtCatchStmt *AC = AT->getCatchStmt(I);
      unsigned NewParentScope = Scopes.size();
      Scopes.push_back(GotoScope(ParentScope,
                                 diag::note_protected_by_objc_catch,
                                 diag::note_exits_objc_catch,
                                 AC->getAtCatchLoc()));
      // @catches are nested and it isn't
      BuildScopeInformation(AC->getCatchBody(), NewParentScope);
    }

    // Jump from the finally to the try or catch is not valid.
    if (ObjCAtFinallyStmt *AF = AT->getFinallyStmt()) {
      unsigned NewParentScope = Scopes.size();
      Scopes.push_back(GotoScope(ParentScope,
                                 diag::note_protected_by_objc_finally,
                                 diag::note_exits_objc_finally,
                                 AF->getAtFinallyLoc()));
      BuildScopeInformation(AF, NewParentScope);
    }

    return;
  }

  case Stmt::ObjCAtSynchronizedStmtClass: {
    // Disallow jumps into the protected statement of an @synchronized, but
    // allow jumps into the object expression it protects.
    ObjCAtSynchronizedStmt *AS = cast<ObjCAtSynchronizedStmt>(S);
    // Recursively walk the AST for the @synchronized object expr, it is
    // evaluated in the normal scope.
    BuildScopeInformation(AS->getSynchExpr(), ParentScope);

    // Recursively walk the AST for the @synchronized part, protected by a new
    // scope.
    unsigned NewParentScope = Scopes.size();
    Scopes.push_back(GotoScope(ParentScope,
                               diag::note_protected_by_objc_synchronized,
                               diag::note_exits_objc_synchronized,
                               AS->getAtSynchronizedLoc()));
    BuildScopeInformation(AS->getSynchBody(), NewParentScope);
    return;
  }

  case Stmt::ObjCAutoreleasePoolStmtClass: {
    // Disallow jumps into the protected statement of an @autoreleasepool.
    ObjCAutoreleasePoolStmt *AS = cast<ObjCAutoreleasePoolStmt>(S);
    // Recursively walk the AST for the @autoreleasepool part, protected by a
    // new scope.
    unsigned NewParentScope = Scopes.size();
    Scopes.push_back(GotoScope(ParentScope,
                               diag::note_protected_by_objc_autoreleasepool,
                               diag::note_exits_objc_autoreleasepool,
                               AS->getAtLoc()));
    BuildScopeInformation(AS->getSubStmt(), NewParentScope);
    return;
  }

  case Stmt::ExprWithCleanupsClass: {
    // Disallow jumps past full-expressions that use blocks with
    // non-trivial cleanups of their captures.  This is theoretically
    // implementable but a lot of work which we haven't felt up to doing.
    ExprWithCleanups *EWC = cast<ExprWithCleanups>(S);
    for (unsigned i = 0, e = EWC->getNumObjects(); i != e; ++i) {
      const BlockDecl *BDecl = EWC->getObject(i);
      for (const auto &CI : BDecl->captures()) {
        VarDecl *variable = CI.getVariable();
        BuildScopeInformation(variable, BDecl, origParentScope);
      }
    }
    break;
  }

  case Stmt::MaterializeTemporaryExprClass: {
    // Disallow jumps out of scopes containing temporaries lifetime-extended to
    // automatic storage duration.
    MaterializeTemporaryExpr *MTE = cast<MaterializeTemporaryExpr>(S);
    if (MTE->getStorageDuration() == SD_Automatic) {
      SmallVector<const Expr *, 4> CommaLHS;
      SmallVector<SubobjectAdjustment, 4> Adjustments;
      const Expr *ExtendedObject =
          MTE->GetTemporaryExpr()->skipRValueSubobjectAdjustments(
              CommaLHS, Adjustments);
      if (ExtendedObject->getType().isDestructedType()) {
        Scopes.push_back(GotoScope(ParentScope, 0,
                                   diag::note_exits_temporary_dtor,
                                   ExtendedObject->getExprLoc()));
        origParentScope = Scopes.size()-1;
      }
    }
    break;
  }

  case Stmt::CaseStmtClass:
  case Stmt::DefaultStmtClass:
  case Stmt::LabelStmtClass:
    LabelAndGotoScopes[S] = ParentScope;
    break;

  default:
    break;
  }

  for (Stmt *SubStmt : S->children()) {
    if (!SubStmt)
        continue;
    if (StmtsToSkip) {
      --StmtsToSkip;
      continue;
    }

    // Cases, labels, and defaults aren't "scope parents".  It's also
    // important to handle these iteratively instead of recursively in
    // order to avoid blowing out the stack.
    while (true) {
      Stmt *Next;
      if (SwitchCase *SC = dyn_cast<SwitchCase>(SubStmt))
        Next = SC->getSubStmt();
      else if (LabelStmt *LS = dyn_cast<LabelStmt>(SubStmt))
        Next = LS->getSubStmt();
      else
        break;

      LabelAndGotoScopes[SubStmt] = ParentScope;
      SubStmt = Next;
    }

    // Recursively walk the AST.
    BuildScopeInformation(SubStmt, ParentScope);
  }
}