/// InitializePreprocessor - Initialize the preprocessor getting it and the /// environment ready to process a single file. This returns true on error. /// void clang::InitializePreprocessor(Preprocessor &PP, const PreprocessorOptions &InitOpts, const HeaderSearchOptions &HSOpts) { std::vector<char> PredefineBuffer; InitializeFileRemapping(PP.getDiagnostics(), PP.getSourceManager(), PP.getFileManager(), InitOpts); const char *LineDirective = "# 1 \"<built-in>\" 3\n"; PredefineBuffer.insert(PredefineBuffer.end(), LineDirective, LineDirective+strlen(LineDirective)); // Install things like __POWERPC__, __GNUC__, etc into the macro table. if (InitOpts.UsePredefines) InitializePredefinedMacros(PP.getTargetInfo(), PP.getLangOptions(), PredefineBuffer); // Add on the predefines from the driver. Wrap in a #line directive to report // that they come from the command line. LineDirective = "# 1 \"<command line>\" 1\n"; PredefineBuffer.insert(PredefineBuffer.end(), LineDirective, LineDirective+strlen(LineDirective)); // Process #define's and #undef's in the order they are given. for (unsigned i = 0, e = InitOpts.Macros.size(); i != e; ++i) { if (InitOpts.Macros[i].second) // isUndef UndefineBuiltinMacro(PredefineBuffer, InitOpts.Macros[i].first.c_str()); else DefineBuiltinMacro(PredefineBuffer, InitOpts.Macros[i].first.c_str(), &PP.getDiagnostics()); } // If -imacros are specified, include them now. These are processed before // any -include directives. for (unsigned i = 0, e = InitOpts.MacroIncludes.size(); i != e; ++i) AddImplicitIncludeMacros(PredefineBuffer, InitOpts.MacroIncludes[i]); // Process -include directives. for (unsigned i = 0, e = InitOpts.Includes.size(); i != e; ++i) { const std::string &Path = InitOpts.Includes[i]; if (Path == InitOpts.ImplicitPTHInclude) AddImplicitIncludePTH(PredefineBuffer, PP, Path); else AddImplicitInclude(PredefineBuffer, Path); } // Exit the command line and go back to <built-in> (2 is LC_LEAVE). LineDirective = "# 1 \"<built-in>\" 2\n"; PredefineBuffer.insert(PredefineBuffer.end(), LineDirective, LineDirective+strlen(LineDirective)); // Null terminate PredefinedBuffer and add it. PredefineBuffer.push_back(0); PP.setPredefines(&PredefineBuffer[0]); // Initialize the header search object. ApplyHeaderSearchOptions(PP.getHeaderSearchInfo(), HSOpts, PP.getLangOptions(), PP.getTargetInfo().getTriple()); }
Sema::Sema(Preprocessor &pp, ASTContext &ctxt, ASTConsumer &consumer, bool CompleteTranslationUnit, CodeCompleteConsumer *CodeCompleter) : TheTargetAttributesSema(0), LangOpts(pp.getLangOptions()), PP(pp), Context(ctxt), Consumer(consumer), Diags(PP.getDiagnostics()), SourceMgr(PP.getSourceManager()), ExternalSource(0), CodeCompleter(CodeCompleter), CurContext(0), PackContext(0), TopFunctionScope(0), ParsingDeclDepth(0), IdResolver(pp.getLangOptions()), StdNamespace(0), StdBadAlloc(0), GlobalNewDeleteDeclared(false), CompleteTranslationUnit(CompleteTranslationUnit), NumSFINAEErrors(0), NonInstantiationEntries(0), CurrentInstantiationScope(0), TyposCorrected(0) { TUScope = 0; if (getLangOptions().CPlusPlus) FieldCollector.reset(new CXXFieldCollector()); // Tell diagnostics how to render things from the AST library. PP.getDiagnostics().SetArgToStringFn(&FormatASTNodeDiagnosticArgument, &Context); ExprEvalContexts.push_back( ExpressionEvaluationContextRecord(PotentiallyEvaluated, 0)); }
/// HasExtension - Return true if we recognize and implement the feature /// specified by the identifier, either as an extension or a standard language /// feature. static bool HasExtension(const Preprocessor &PP, const IdentifierInfo *II) { if (HasFeature(PP, II)) return true; // If the use of an extension results in an error diagnostic, extensions are // effectively unavailable, so just return false here. if (PP.getDiagnostics().getExtensionHandlingBehavior() == DiagnosticsEngine::Ext_Error) return false; const LangOptions &LangOpts = PP.getLangOptions(); // Because we inherit the feature list from HasFeature, this string switch // must be less restrictive than HasFeature's. return llvm::StringSwitch<bool>(II->getName()) // C11 features supported by other languages as extensions. .Case("c_alignas", true) .Case("c_generic_selections", true) .Case("c_static_assert", true) // C++0x features supported by other languages as extensions. .Case("cxx_deleted_functions", LangOpts.CPlusPlus) .Case("cxx_explicit_conversions", LangOpts.CPlusPlus) .Case("cxx_inline_namespaces", LangOpts.CPlusPlus) .Case("cxx_nonstatic_member_init", LangOpts.CPlusPlus) .Case("cxx_override_control", LangOpts.CPlusPlus) .Case("cxx_range_for", LangOpts.CPlusPlus) .Case("cxx_reference_qualified_functions", LangOpts.CPlusPlus) .Case("cxx_rvalue_references", LangOpts.CPlusPlus) .Default(false); }
/// HasFeature - Return true if we recognize and implement the specified feature /// specified by the identifier. static bool HasFeature(const Preprocessor &PP, const IdentifierInfo *II) { const LangOptions &LangOpts = PP.getLangOptions(); return llvm::StringSwitch<bool>(II->getName()) .Case("attribute_analyzer_noreturn", true) .Case("attribute_cf_returns_not_retained", true) .Case("attribute_cf_returns_retained", true) .Case("attribute_ext_vector_type", true) .Case("attribute_ns_returns_not_retained", true) .Case("attribute_ns_returns_retained", true) .Case("attribute_objc_ivar_unused", true) .Case("attribute_overloadable", true) .Case("blocks", LangOpts.Blocks) .Case("cxx_attributes", LangOpts.CPlusPlus0x) .Case("cxx_auto_type", LangOpts.CPlusPlus0x) .Case("cxx_decltype", LangOpts.CPlusPlus0x) .Case("cxx_deleted_functions", LangOpts.CPlusPlus0x) .Case("cxx_exceptions", LangOpts.Exceptions) .Case("cxx_rtti", LangOpts.RTTI) .Case("cxx_static_assert", LangOpts.CPlusPlus0x) .Case("objc_nonfragile_abi", LangOpts.ObjCNonFragileABI) .Case("objc_weak_class", LangOpts.ObjCNonFragileABI) .Case("ownership_holds", true) .Case("ownership_returns", true) .Case("ownership_takes", true) .Case("cxx_inline_namespaces", true) //.Case("cxx_concepts", false) //.Case("cxx_lambdas", false) //.Case("cxx_nullptr", false) //.Case("cxx_rvalue_references", false) //.Case("cxx_variadic_templates", false) .Case("tls", PP.getTargetInfo().isTLSSupported()) .Default(false); }
/// LexRawTokensFromMainFile - Lets all the raw tokens from the main file into /// the specified vector. static void LexRawTokensFromMainFile(Preprocessor &PP, std::vector<Token> &RawTokens) { SourceManager &SM = PP.getSourceManager(); // Create a lexer to lex all the tokens of the main file in raw mode. Even // though it is in raw mode, it will not return comments. const llvm::MemoryBuffer *FromFile = SM.getBuffer(SM.getMainFileID()); Lexer RawLex(SM.getMainFileID(), FromFile, SM, PP.getLangOptions()); // Switch on comment lexing because we really do want them. RawLex.SetCommentRetentionState(true); Token RawTok; do { RawLex.LexFromRawLexer(RawTok); // If we have an identifier with no identifier info for our raw token, look // up the indentifier info. This is important for equality comparison of // identifier tokens. if (RawTok.is(tok::raw_identifier)) PP.LookUpIdentifierInfo(RawTok); RawTokens.push_back(RawTok); } while (RawTok.isNot(tok::eof)); }
/// FindExpectedDiags - Lex the main source file to find all of the // expected errors and warnings. static void FindExpectedDiags(Preprocessor &PP, ExpectedData &ED) { // Create a raw lexer to pull all the comments out of the main file. We don't // want to look in #include'd headers for expected-error strings. SourceManager &SM = PP.getSourceManager(); FileID FID = SM.getMainFileID(); if (SM.getMainFileID().isInvalid()) return; // Create a lexer to lex all the tokens of the main file in raw mode. const llvm::MemoryBuffer *FromFile = SM.getBuffer(FID); Lexer RawLex(FID, FromFile, SM, PP.getLangOptions()); // Return comments as tokens, this is how we find expected diagnostics. RawLex.SetCommentRetentionState(true); Token Tok; Tok.setKind(tok::comment); while (Tok.isNot(tok::eof)) { RawLex.Lex(Tok); if (!Tok.is(tok::comment)) continue; std::string Comment = PP.getSpelling(Tok); if (Comment.empty()) continue; // Find all expected errors/warnings/notes. ParseDirective(&Comment[0], Comment.size(), ED, PP, Tok.getLocation()); }; }
static void emitPremigrationErrors(const CapturedDiagList &arcDiags, const DiagnosticOptions &diagOpts, Preprocessor &PP) { TextDiagnosticPrinter printer(llvm::errs(), diagOpts); llvm::IntrusiveRefCntPtr<DiagnosticIDs> DiagID(new DiagnosticIDs()); llvm::IntrusiveRefCntPtr<DiagnosticsEngine> Diags( new DiagnosticsEngine(DiagID, &printer, /*ShouldOwnClient=*/false)); Diags->setSourceManager(&PP.getSourceManager()); printer.BeginSourceFile(PP.getLangOptions(), &PP); arcDiags.reportDiagnostics(*Diags); printer.EndSourceFile(); }
/// InitializePreprocessor - Initialize the preprocessor getting it and the /// environment ready to process a single file. This returns true on error. /// bool InitializePreprocessor(Preprocessor &PP, const PreprocessorInitOptions& InitOpts) { std::vector<char> PredefineBuffer; const char *LineDirective = "# 1 \"<built-in>\" 3\n"; PredefineBuffer.insert(PredefineBuffer.end(), LineDirective, LineDirective+strlen(LineDirective)); // Install things like __POWERPC__, __GNUC__, etc into the macro table. InitializePredefinedMacros(PP.getTargetInfo(), PP.getLangOptions(), PredefineBuffer); // Add on the predefines from the driver. Wrap in a #line directive to report // that they come from the command line. LineDirective = "# 1 \"<command line>\" 1\n"; PredefineBuffer.insert(PredefineBuffer.end(), LineDirective, LineDirective+strlen(LineDirective)); // Process #define's and #undef's in the order they are given. for (PreprocessorInitOptions::macro_iterator I = InitOpts.macro_begin(), E = InitOpts.macro_end(); I != E; ++I) { if (I->second) // isUndef UndefineBuiltinMacro(PredefineBuffer, I->first.c_str()); else DefineBuiltinMacro(PredefineBuffer, I->first.c_str()); } // If -imacros are specified, include them now. These are processed before // any -include directives. for (PreprocessorInitOptions::imacro_iterator I = InitOpts.imacro_begin(), E = InitOpts.imacro_end(); I != E; ++I) AddImplicitIncludeMacros(PredefineBuffer, *I); // Process -include directives. for (PreprocessorInitOptions::include_iterator I = InitOpts.include_begin(), E = InitOpts.include_end(); I != E; ++I) { if (I->second) // isPTH AddImplicitIncludePTH(PredefineBuffer, PP, I->first); else AddImplicitInclude(PredefineBuffer, I->first); } // Null terminate PredefinedBuffer and add it. PredefineBuffer.push_back(0); PP.setPredefines(&PredefineBuffer[0]); // Once we've read this, we're done. return false; }
Sema::Sema(Preprocessor &pp, ASTContext &ctxt, ASTConsumer &consumer, TranslationUnitKind TUKind, CodeCompleteConsumer *CodeCompleter) : TheTargetAttributesSema(0), FPFeatures(pp.getLangOptions()), LangOpts(pp.getLangOptions()), PP(pp), Context(ctxt), Consumer(consumer), Diags(PP.getDiagnostics()), SourceMgr(PP.getSourceManager()), CollectStats(false), ExternalSource(0), CodeCompleter(CodeCompleter), CurContext(0), OriginalLexicalContext(0), PackContext(0), MSStructPragmaOn(false), VisContext(0), ExprNeedsCleanups(0), LateTemplateParser(0), OpaqueParser(0), IdResolver(pp.getLangOptions()), CXXTypeInfoDecl(0), MSVCGuidDecl(0), GlobalNewDeleteDeclared(false), ObjCShouldCallSuperDealloc(false), ObjCShouldCallSuperFinalize(false), TUKind(TUKind), NumSFINAEErrors(0), SuppressAccessChecking(false), AccessCheckingSFINAE(false), InNonInstantiationSFINAEContext(false), NonInstantiationEntries(0), ArgumentPackSubstitutionIndex(-1), CurrentInstantiationScope(0), TyposCorrected(0), AnalysisWarnings(*this) { TUScope = 0; LoadedExternalKnownNamespaces = false; if (getLangOptions().CPlusPlus) FieldCollector.reset(new CXXFieldCollector()); // Tell diagnostics how to render things from the AST library. PP.getDiagnostics().SetArgToStringFn(&FormatASTNodeDiagnosticArgument, &Context); ExprEvalContexts.push_back( ExpressionEvaluationContextRecord(PotentiallyEvaluated, 0, false)); FunctionScopes.push_back(new FunctionScopeInfo(Diags)); }
void clang::DoRewriteTest(Preprocessor &PP, raw_ostream* OS) { SourceManager &SM = PP.getSourceManager(); const LangOptions &LangOpts = PP.getLangOptions(); TokenRewriter Rewriter(SM.getMainFileID(), SM, LangOpts); // Throw <i> </i> tags around comments. for (TokenRewriter::token_iterator I = Rewriter.token_begin(), E = Rewriter.token_end(); I != E; ++I) { if (I->isNot(tok::comment)) continue; Rewriter.AddTokenBefore(I, "<i>"); Rewriter.AddTokenAfter(I, "</i>"); } // Print out the output. for (TokenRewriter::token_iterator I = Rewriter.token_begin(), E = Rewriter.token_end(); I != E; ++I) *OS << PP.getSpelling(*I); }
/// FindExpectedDiags - Lex the main source file to find all of the // expected errors and warnings. static void FindExpectedDiags(Preprocessor &PP, DiagList &ExpectedErrors, DiagList &ExpectedWarnings, DiagList &ExpectedNotes) { // Create a raw lexer to pull all the comments out of the main file. We don't // want to look in #include'd headers for expected-error strings. FileID FID = PP.getSourceManager().getMainFileID(); // Create a lexer to lex all the tokens of the main file in raw mode. Lexer RawLex(FID, PP.getSourceManager(), PP.getLangOptions()); // Return comments as tokens, this is how we find expected diagnostics. RawLex.SetCommentRetentionState(true); Token Tok; Tok.setKind(tok::comment); while (Tok.isNot(tok::eof)) { RawLex.Lex(Tok); if (!Tok.is(tok::comment)) continue; std::string Comment = PP.getSpelling(Tok); if (Comment.empty()) continue; // Find all expected errors. FindDiagnostics(&Comment[0], Comment.size(), ExpectedErrors, PP, Tok.getLocation(), "expected-error"); // Find all expected warnings. FindDiagnostics(&Comment[0], Comment.size(), ExpectedWarnings, PP, Tok.getLocation(), "expected-warning"); // Find all expected notes. FindDiagnostics(&Comment[0], Comment.size(), ExpectedNotes, PP, Tok.getLocation(), "expected-note"); }; }
/// EvaluateValue - Evaluate the token PeekTok (and any others needed) and /// return the computed value in Result. Return true if there was an error /// parsing. This function also returns information about the form of the /// expression in DT. See above for information on what DT means. /// /// If ValueLive is false, then this value is being evaluated in a context where /// the result is not used. As such, avoid diagnostics that relate to /// evaluation. static bool EvaluateValue(PPValue &Result, Token &PeekTok, DefinedTracker &DT, bool ValueLive, Preprocessor &PP) { DT.State = DefinedTracker::Unknown; // If this token's spelling is a pp-identifier, check to see if it is // 'defined' or if it is a macro. Note that we check here because many // keywords are pp-identifiers, so we can't check the kind. if (IdentifierInfo *II = PeekTok.getIdentifierInfo()) { if (II->isStr("defined")) { // Handle "defined X" and "defined(X)". return(EvaluateDefined(Result, PeekTok, DT, ValueLive, PP)); } else { // If this identifier isn't 'defined' or one of the special // preprocessor keywords and it wasn't macro expanded, it turns // into a simple 0, unless it is the C++ keyword "true", in which case it // turns into "1". if (ValueLive) PP.Diag(PeekTok, diag::warn_pp_undef_identifier) << II; Result.Val = II->getTokenID() == tok::kw_true; Result.Val.setIsUnsigned(false); // "0" is signed intmax_t 0. Result.setRange(PeekTok.getLocation()); PP.LexNonComment(PeekTok); return false; } } switch (PeekTok.getKind()) { default: // Non-value token. PP.Diag(PeekTok, diag::err_pp_expr_bad_token_start_expr); return true; case tok::eom: case tok::r_paren: // If there is no expression, report and exit. PP.Diag(PeekTok, diag::err_pp_expected_value_in_expr); return true; case tok::numeric_constant: { llvm::SmallString<64> IntegerBuffer; IntegerBuffer.resize(PeekTok.getLength()); const char *ThisTokBegin = &IntegerBuffer[0]; unsigned ActualLength = PP.getSpelling(PeekTok, ThisTokBegin); NumericLiteralParser Literal(ThisTokBegin, ThisTokBegin+ActualLength, PeekTok.getLocation(), PP); if (Literal.hadError) return true; // a diagnostic was already reported. if (Literal.isFloatingLiteral() || Literal.isImaginary) { PP.Diag(PeekTok, diag::err_pp_illegal_floating_literal); return true; } assert(Literal.isIntegerLiteral() && "Unknown ppnumber"); // long long is a C99 feature. if (!PP.getLangOptions().C99 && !PP.getLangOptions().CPlusPlus0x && Literal.isLongLong) PP.Diag(PeekTok, diag::ext_longlong); // Parse the integer literal into Result. if (Literal.GetIntegerValue(Result.Val)) { // Overflow parsing integer literal. if (ValueLive) PP.Diag(PeekTok, diag::warn_integer_too_large); Result.Val.setIsUnsigned(true); } else { // Set the signedness of the result to match whether there was a U suffix // or not. Result.Val.setIsUnsigned(Literal.isUnsigned); // Detect overflow based on whether the value is signed. If signed // and if the value is too large, emit a warning "integer constant is so // large that it is unsigned" e.g. on 12345678901234567890 where intmax_t // is 64-bits. if (!Literal.isUnsigned && Result.Val.isNegative()) { // Don't warn for a hex literal: 0x8000..0 shouldn't warn. if (ValueLive && Literal.getRadix() != 16) PP.Diag(PeekTok, diag::warn_integer_too_large_for_signed); Result.Val.setIsUnsigned(true); } } // Consume the token. Result.setRange(PeekTok.getLocation()); PP.LexNonComment(PeekTok); return false; } case tok::char_constant: { // 'x' llvm::SmallString<32> CharBuffer; CharBuffer.resize(PeekTok.getLength()); const char *ThisTokBegin = &CharBuffer[0]; unsigned ActualLength = PP.getSpelling(PeekTok, ThisTokBegin); CharLiteralParser Literal(ThisTokBegin, ThisTokBegin+ActualLength, PeekTok.getLocation(), PP); if (Literal.hadError()) return true; // A diagnostic was already emitted. // Character literals are always int or wchar_t, expand to intmax_t. const TargetInfo &TI = PP.getTargetInfo(); unsigned NumBits; if (Literal.isMultiChar()) NumBits = TI.getIntWidth(); else if (Literal.isWide()) NumBits = TI.getWCharWidth(); else NumBits = TI.getCharWidth(); // Set the width. llvm::APSInt Val(NumBits); // Set the value. Val = Literal.getValue(); // Set the signedness. Val.setIsUnsigned(!PP.getLangOptions().CharIsSigned); if (Result.Val.getBitWidth() > Val.getBitWidth()) { Result.Val = Val.extend(Result.Val.getBitWidth()); } else { assert(Result.Val.getBitWidth() == Val.getBitWidth() && "intmax_t smaller than char/wchar_t?"); Result.Val = Val; } // Consume the token. Result.setRange(PeekTok.getLocation()); PP.LexNonComment(PeekTok); return false; } case tok::l_paren: { SourceLocation Start = PeekTok.getLocation(); PP.LexNonComment(PeekTok); // Eat the (. // Parse the value and if there are any binary operators involved, parse // them. if (EvaluateValue(Result, PeekTok, DT, ValueLive, PP)) return true; // If this is a silly value like (X), which doesn't need parens, check for // !(defined X). if (PeekTok.is(tok::r_paren)) { // Just use DT unmodified as our result. } else { // Otherwise, we have something like (x+y), and we consumed '(x'. if (EvaluateDirectiveSubExpr(Result, 1, PeekTok, ValueLive, PP)) return true; if (PeekTok.isNot(tok::r_paren)) { PP.Diag(PeekTok.getLocation(), diag::err_pp_expected_rparen) << Result.getRange(); PP.Diag(Start, diag::note_matching) << "("; return true; } DT.State = DefinedTracker::Unknown; } Result.setRange(Start, PeekTok.getLocation()); PP.LexNonComment(PeekTok); // Eat the ). return false; } case tok::plus: { SourceLocation Start = PeekTok.getLocation(); // Unary plus doesn't modify the value. PP.LexNonComment(PeekTok); if (EvaluateValue(Result, PeekTok, DT, ValueLive, PP)) return true; Result.setBegin(Start); return false; } case tok::minus: { SourceLocation Loc = PeekTok.getLocation(); PP.LexNonComment(PeekTok); if (EvaluateValue(Result, PeekTok, DT, ValueLive, PP)) return true; Result.setBegin(Loc); // C99 6.5.3.3p3: The sign of the result matches the sign of the operand. Result.Val = -Result.Val; // -MININT is the only thing that overflows. Unsigned never overflows. bool Overflow = !Result.isUnsigned() && Result.Val.isMinSignedValue(); // If this operator is live and overflowed, report the issue. if (Overflow && ValueLive) PP.Diag(Loc, diag::warn_pp_expr_overflow) << Result.getRange(); DT.State = DefinedTracker::Unknown; return false; } case tok::tilde: { SourceLocation Start = PeekTok.getLocation(); PP.LexNonComment(PeekTok); if (EvaluateValue(Result, PeekTok, DT, ValueLive, PP)) return true; Result.setBegin(Start); // C99 6.5.3.3p4: The sign of the result matches the sign of the operand. Result.Val = ~Result.Val; DT.State = DefinedTracker::Unknown; return false; } case tok::exclaim: { SourceLocation Start = PeekTok.getLocation(); PP.LexNonComment(PeekTok); if (EvaluateValue(Result, PeekTok, DT, ValueLive, PP)) return true; Result.setBegin(Start); Result.Val = !Result.Val; // C99 6.5.3.3p5: The sign of the result is 'int', aka it is signed. Result.Val.setIsUnsigned(false); if (DT.State == DefinedTracker::DefinedMacro) DT.State = DefinedTracker::NotDefinedMacro; else if (DT.State == DefinedTracker::NotDefinedMacro) DT.State = DefinedTracker::DefinedMacro; return false; } // FIXME: Handle #assert } }
/// HasFeature - Return true if we recognize and implement the feature /// specified by the identifier as a standard language feature. static bool HasFeature(const Preprocessor &PP, const IdentifierInfo *II) { const LangOptions &LangOpts = PP.getLangOptions(); return llvm::StringSwitch<bool>(II->getName()) .Case("address_sanitizer", LangOpts.AddressSanitizer) .Case("attribute_analyzer_noreturn", true) .Case("attribute_availability", true) .Case("attribute_cf_returns_not_retained", true) .Case("attribute_cf_returns_retained", true) .Case("attribute_deprecated_with_message", true) .Case("attribute_ext_vector_type", true) .Case("attribute_ns_returns_not_retained", true) .Case("attribute_ns_returns_retained", true) .Case("attribute_ns_consumes_self", true) .Case("attribute_ns_consumed", true) .Case("attribute_cf_consumed", true) .Case("attribute_objc_ivar_unused", true) .Case("attribute_objc_method_family", true) .Case("attribute_overloadable", true) .Case("attribute_unavailable_with_message", true) .Case("blocks", LangOpts.Blocks) .Case("cxx_exceptions", LangOpts.Exceptions) .Case("cxx_rtti", LangOpts.RTTI) .Case("enumerator_attributes", true) // Objective-C features .Case("objc_arr", LangOpts.ObjCAutoRefCount) // FIXME: REMOVE? .Case("objc_arc", LangOpts.ObjCAutoRefCount) .Case("objc_arc_weak", LangOpts.ObjCAutoRefCount && LangOpts.ObjCRuntimeHasWeak) .Case("objc_fixed_enum", LangOpts.ObjC2) .Case("objc_instancetype", LangOpts.ObjC2) .Case("objc_nonfragile_abi", LangOpts.ObjCNonFragileABI) .Case("objc_weak_class", LangOpts.ObjCNonFragileABI) .Case("ownership_holds", true) .Case("ownership_returns", true) .Case("ownership_takes", true) .Case("arc_cf_code_audited", true) // C11 features .Case("c_alignas", LangOpts.C11) .Case("c_generic_selections", LangOpts.C11) .Case("c_static_assert", LangOpts.C11) // C++0x features .Case("cxx_access_control_sfinae", LangOpts.CPlusPlus0x) .Case("cxx_alias_templates", LangOpts.CPlusPlus0x) .Case("cxx_alignas", LangOpts.CPlusPlus0x) .Case("cxx_attributes", LangOpts.CPlusPlus0x) .Case("cxx_auto_type", LangOpts.CPlusPlus0x) //.Case("cxx_constexpr", false); .Case("cxx_decltype", LangOpts.CPlusPlus0x) .Case("cxx_default_function_template_args", LangOpts.CPlusPlus0x) .Case("cxx_defaulted_functions", LangOpts.CPlusPlus0x) .Case("cxx_delegating_constructors", LangOpts.CPlusPlus0x) .Case("cxx_deleted_functions", LangOpts.CPlusPlus0x) .Case("cxx_explicit_conversions", LangOpts.CPlusPlus0x) //.Case("cxx_generalized_initializers", LangOpts.CPlusPlus0x) .Case("cxx_implicit_moves", LangOpts.CPlusPlus0x) //.Case("cxx_inheriting_constructors", false) .Case("cxx_inline_namespaces", LangOpts.CPlusPlus0x) //.Case("cxx_lambdas", false) .Case("cxx_nonstatic_member_init", LangOpts.CPlusPlus0x) .Case("cxx_noexcept", LangOpts.CPlusPlus0x) .Case("cxx_nullptr", LangOpts.CPlusPlus0x) .Case("cxx_override_control", LangOpts.CPlusPlus0x) .Case("cxx_range_for", LangOpts.CPlusPlus0x) .Case("cxx_raw_string_literals", LangOpts.CPlusPlus0x) .Case("cxx_reference_qualified_functions", LangOpts.CPlusPlus0x) .Case("cxx_rvalue_references", LangOpts.CPlusPlus0x) .Case("cxx_strong_enums", LangOpts.CPlusPlus0x) .Case("cxx_static_assert", LangOpts.CPlusPlus0x) .Case("cxx_trailing_return", LangOpts.CPlusPlus0x) .Case("cxx_unicode_literals", LangOpts.CPlusPlus0x) //.Case("cxx_unrestricted_unions", false) //.Case("cxx_user_literals", false) .Case("cxx_variadic_templates", LangOpts.CPlusPlus0x) // Type traits .Case("has_nothrow_assign", LangOpts.CPlusPlus) .Case("has_nothrow_copy", LangOpts.CPlusPlus) .Case("has_nothrow_constructor", LangOpts.CPlusPlus) .Case("has_trivial_assign", LangOpts.CPlusPlus) .Case("has_trivial_copy", LangOpts.CPlusPlus) .Case("has_trivial_constructor", LangOpts.CPlusPlus) .Case("has_trivial_destructor", LangOpts.CPlusPlus) .Case("has_virtual_destructor", LangOpts.CPlusPlus) .Case("is_abstract", LangOpts.CPlusPlus) .Case("is_base_of", LangOpts.CPlusPlus) .Case("is_class", LangOpts.CPlusPlus) .Case("is_convertible_to", LangOpts.CPlusPlus) // __is_empty is available only if the horrible // "struct __is_empty" parsing hack hasn't been needed in this // translation unit. If it has, __is_empty reverts to a normal // identifier and __has_feature(is_empty) evaluates false. .Case("is_empty", LangOpts.CPlusPlus && PP.getIdentifierInfo("__is_empty")->getTokenID() != tok::identifier) .Case("is_enum", LangOpts.CPlusPlus) .Case("is_final", LangOpts.CPlusPlus) .Case("is_literal", LangOpts.CPlusPlus) .Case("is_standard_layout", LangOpts.CPlusPlus) // __is_pod is available only if the horrible // "struct __is_pod" parsing hack hasn't been needed in this // translation unit. If it has, __is_pod reverts to a normal // identifier and __has_feature(is_pod) evaluates false. .Case("is_pod", LangOpts.CPlusPlus && PP.getIdentifierInfo("__is_pod")->getTokenID() != tok::identifier) .Case("is_polymorphic", LangOpts.CPlusPlus) .Case("is_trivial", LangOpts.CPlusPlus) .Case("is_trivially_copyable", LangOpts.CPlusPlus) .Case("is_union", LangOpts.CPlusPlus) .Case("tls", PP.getTargetInfo().isTLSSupported()) .Case("underlying_type", LangOpts.CPlusPlus) .Default(false); }
/// RewriteMacrosInInput - Implement -rewrite-macros mode. void clang::RewriteMacrosInInput(Preprocessor &PP,const std::string &InFileName, const std::string &OutFileName) { SourceManager &SM = PP.getSourceManager(); Rewriter Rewrite; Rewrite.setSourceMgr(SM, PP.getLangOptions()); RewriteBuffer &RB = Rewrite.getEditBuffer(SM.getMainFileID()); std::vector<Token> RawTokens; LexRawTokensFromMainFile(PP, RawTokens); unsigned CurRawTok = 0; Token RawTok = GetNextRawTok(RawTokens, CurRawTok, false); // Get the first preprocessing token. PP.EnterMainSourceFile(); Token PPTok; PP.Lex(PPTok); // Preprocess the input file in parallel with raw lexing the main file. Ignore // all tokens that are preprocessed from a file other than the main file (e.g. // a header). If we see tokens that are in the preprocessed file but not the // lexed file, we have a macro expansion. If we see tokens in the lexed file // that aren't in the preprocessed view, we have macros that expand to no // tokens, or macro arguments etc. while (RawTok.isNot(tok::eof) || PPTok.isNot(tok::eof)) { SourceLocation PPLoc = SM.getInstantiationLoc(PPTok.getLocation()); // If PPTok is from a different source file, ignore it. if (!SM.isFromMainFile(PPLoc)) { PP.Lex(PPTok); continue; } // If the raw file hits a preprocessor directive, they will be extra tokens // in the raw file that don't exist in the preprocsesed file. However, we // choose to preserve them in the output file and otherwise handle them // specially. if (RawTok.is(tok::hash) && RawTok.isAtStartOfLine()) { // If this is a #warning directive or #pragma mark (GNU extensions), // comment the line out. if (RawTokens[CurRawTok].is(tok::identifier)) { const IdentifierInfo *II = RawTokens[CurRawTok].getIdentifierInfo(); if (!strcmp(II->getName(), "warning")) { // Comment out #warning. RB.InsertTextAfter(SM.getFileOffset(RawTok.getLocation()), "//", 2); } else if (!strcmp(II->getName(), "pragma") && RawTokens[CurRawTok+1].is(tok::identifier) && !strcmp(RawTokens[CurRawTok+1].getIdentifierInfo()->getName(), "mark")){ // Comment out #pragma mark. RB.InsertTextAfter(SM.getFileOffset(RawTok.getLocation()), "//", 2); } } // Otherwise, if this is a #include or some other directive, just leave it // in the file by skipping over the line. RawTok = GetNextRawTok(RawTokens, CurRawTok, false); while (!RawTok.isAtStartOfLine() && RawTok.isNot(tok::eof)) RawTok = GetNextRawTok(RawTokens, CurRawTok, false); continue; } // Okay, both tokens are from the same file. Get their offsets from the // start of the file. unsigned PPOffs = SM.getFileOffset(PPLoc); unsigned RawOffs = SM.getFileOffset(RawTok.getLocation()); // If the offsets are the same and the token kind is the same, ignore them. if (PPOffs == RawOffs && isSameToken(RawTok, PPTok)) { RawTok = GetNextRawTok(RawTokens, CurRawTok, false); PP.Lex(PPTok); continue; } // If the PP token is farther along than the raw token, something was // deleted. Comment out the raw token. if (RawOffs <= PPOffs) { // Comment out a whole run of tokens instead of bracketing each one with // comments. Add a leading space if RawTok didn't have one. bool HasSpace = RawTok.hasLeadingSpace(); RB.InsertTextAfter(RawOffs, " /*"+HasSpace, 2+!HasSpace); unsigned EndPos; do { EndPos = RawOffs+RawTok.getLength(); RawTok = GetNextRawTok(RawTokens, CurRawTok, true); RawOffs = SM.getFileOffset(RawTok.getLocation()); if (RawTok.is(tok::comment)) { // Skip past the comment. RawTok = GetNextRawTok(RawTokens, CurRawTok, false); break; } } while (RawOffs <= PPOffs && !RawTok.isAtStartOfLine() && (PPOffs != RawOffs || !isSameToken(RawTok, PPTok))); RB.InsertTextBefore(EndPos, "*/", 2); continue; } // Otherwise, there was a replacement an expansion. Insert the new token // in the output buffer. Insert the whole run of new tokens at once to get // them in the right order. unsigned InsertPos = PPOffs; std::string Expansion; while (PPOffs < RawOffs) { Expansion += ' ' + PP.getSpelling(PPTok); PP.Lex(PPTok); PPLoc = SM.getInstantiationLoc(PPTok.getLocation()); PPOffs = SM.getFileOffset(PPLoc); } Expansion += ' '; RB.InsertTextBefore(InsertPos, &Expansion[0], Expansion.size()); } // Create the output file. llvm::OwningPtr<llvm::raw_ostream> OwnedStream; llvm::raw_ostream *OutFile; if (OutFileName == "-") { OutFile = &llvm::outs(); } else if (!OutFileName.empty()) { std::string Err; OutFile = new llvm::raw_fd_ostream(OutFileName.c_str(), false, Err); OwnedStream.reset(OutFile); } else if (InFileName == "-") { OutFile = &llvm::outs(); } else { llvm::sys::Path Path(InFileName); Path.eraseSuffix(); Path.appendSuffix("cpp"); std::string Err; OutFile = new llvm::raw_fd_ostream(Path.toString().c_str(), false, Err); OwnedStream.reset(OutFile); } // Get the buffer corresponding to MainFileID. If we haven't changed it, then // we are done. if (const RewriteBuffer *RewriteBuf = Rewrite.getRewriteBufferFor(SM.getMainFileID())) { //printf("Changed:\n"); *OutFile << std::string(RewriteBuf->begin(), RewriteBuf->end()); } else { fprintf(stderr, "No changes\n"); } OutFile->flush(); }
/// HighlightMacros - This uses the macro table state from the end of the /// file, to re-expand macros and insert (into the HTML) information about the /// macro expansions. This won't be perfectly perfect, but it will be /// reasonably close. void html::HighlightMacros(Rewriter &R, FileID FID, const Preprocessor& PP) { // Re-lex the raw token stream into a token buffer. const SourceManager &SM = PP.getSourceManager(); std::vector<Token> TokenStream; const llvm::MemoryBuffer *FromFile = SM.getBuffer(FID); Lexer L(FID, FromFile, SM, PP.getLangOptions()); // Lex all the tokens in raw mode, to avoid entering #includes or expanding // macros. while (1) { Token Tok; L.LexFromRawLexer(Tok); // If this is a # at the start of a line, discard it from the token stream. // We don't want the re-preprocess step to see #defines, #includes or other // preprocessor directives. if (Tok.is(tok::hash) && Tok.isAtStartOfLine()) continue; // If this is a ## token, change its kind to unknown so that repreprocessing // it will not produce an error. if (Tok.is(tok::hashhash)) Tok.setKind(tok::unknown); // If this raw token is an identifier, the raw lexer won't have looked up // the corresponding identifier info for it. Do this now so that it will be // macro expanded when we re-preprocess it. if (Tok.is(tok::raw_identifier)) PP.LookUpIdentifierInfo(Tok); TokenStream.push_back(Tok); if (Tok.is(tok::eof)) break; } // Temporarily change the diagnostics object so that we ignore any generated // diagnostics from this pass. DiagnosticsEngine TmpDiags(PP.getDiagnostics().getDiagnosticIDs(), new IgnoringDiagConsumer); // FIXME: This is a huge hack; we reuse the input preprocessor because we want // its state, but we aren't actually changing it (we hope). This should really // construct a copy of the preprocessor. Preprocessor &TmpPP = const_cast<Preprocessor&>(PP); DiagnosticsEngine *OldDiags = &TmpPP.getDiagnostics(); TmpPP.setDiagnostics(TmpDiags); // Inform the preprocessor that we don't want comments. TmpPP.SetCommentRetentionState(false, false); // Enter the tokens we just lexed. This will cause them to be macro expanded // but won't enter sub-files (because we removed #'s). TmpPP.EnterTokenStream(&TokenStream[0], TokenStream.size(), false, false); TokenConcatenation ConcatInfo(TmpPP); // Lex all the tokens. Token Tok; TmpPP.Lex(Tok); while (Tok.isNot(tok::eof)) { // Ignore non-macro tokens. if (!Tok.getLocation().isMacroID()) { TmpPP.Lex(Tok); continue; } // Okay, we have the first token of a macro expansion: highlight the // expansion by inserting a start tag before the macro expansion and // end tag after it. std::pair<SourceLocation, SourceLocation> LLoc = SM.getExpansionRange(Tok.getLocation()); // Ignore tokens whose instantiation location was not the main file. if (SM.getFileID(LLoc.first) != FID) { TmpPP.Lex(Tok); continue; } assert(SM.getFileID(LLoc.second) == FID && "Start and end of expansion must be in the same ultimate file!"); std::string Expansion = EscapeText(TmpPP.getSpelling(Tok)); unsigned LineLen = Expansion.size(); Token PrevPrevTok; Token PrevTok = Tok; // Okay, eat this token, getting the next one. TmpPP.Lex(Tok); // Skip all the rest of the tokens that are part of this macro // instantiation. It would be really nice to pop up a window with all the // spelling of the tokens or something. while (!Tok.is(tok::eof) && SM.getExpansionLoc(Tok.getLocation()) == LLoc.first) { // Insert a newline if the macro expansion is getting large. if (LineLen > 60) { Expansion += "<br>"; LineLen = 0; } LineLen -= Expansion.size(); // If the tokens were already space separated, or if they must be to avoid // them being implicitly pasted, add a space between them. if (Tok.hasLeadingSpace() || ConcatInfo.AvoidConcat(PrevPrevTok, PrevTok, Tok)) Expansion += ' '; // Escape any special characters in the token text. Expansion += EscapeText(TmpPP.getSpelling(Tok)); LineLen += Expansion.size(); PrevPrevTok = PrevTok; PrevTok = Tok; TmpPP.Lex(Tok); } // Insert the expansion as the end tag, so that multi-line macros all get // highlighted. Expansion = "<span class='expansion'>" + Expansion + "</span></span>"; HighlightRange(R, LLoc.first, LLoc.second, "<span class='macro'>", Expansion.c_str()); } // Restore diagnostics object back to its own thing. TmpPP.setDiagnostics(*OldDiags); }
/// SyntaxHighlight - Relex the specified FileID and annotate the HTML with /// information about keywords, macro expansions etc. This uses the macro /// table state from the end of the file, so it won't be perfectly perfect, /// but it will be reasonably close. void html::SyntaxHighlight(Rewriter &R, FileID FID, const Preprocessor &PP) { RewriteBuffer &RB = R.getEditBuffer(FID); const SourceManager &SM = PP.getSourceManager(); const llvm::MemoryBuffer *FromFile = SM.getBuffer(FID); Lexer L(FID, FromFile, SM, PP.getLangOptions()); const char *BufferStart = L.getBufferStart(); // Inform the preprocessor that we want to retain comments as tokens, so we // can highlight them. L.SetCommentRetentionState(true); // Lex all the tokens in raw mode, to avoid entering #includes or expanding // macros. Token Tok; L.LexFromRawLexer(Tok); while (Tok.isNot(tok::eof)) { // Since we are lexing unexpanded tokens, all tokens are from the main // FileID. unsigned TokOffs = SM.getFileOffset(Tok.getLocation()); unsigned TokLen = Tok.getLength(); switch (Tok.getKind()) { default: break; case tok::identifier: llvm_unreachable("tok::identifier in raw lexing mode!"); break; case tok::raw_identifier: { // Fill in Result.IdentifierInfo and update the token kind, // looking up the identifier in the identifier table. PP.LookUpIdentifierInfo(Tok); // If this is a pp-identifier, for a keyword, highlight it as such. if (Tok.isNot(tok::identifier)) HighlightRange(RB, TokOffs, TokOffs+TokLen, BufferStart, "<span class='keyword'>", "</span>"); break; } case tok::comment: HighlightRange(RB, TokOffs, TokOffs+TokLen, BufferStart, "<span class='comment'>", "</span>"); break; case tok::utf8_string_literal: // Chop off the u part of u8 prefix ++TokOffs; --TokLen; // FALL THROUGH to chop the 8 case tok::wide_string_literal: case tok::utf16_string_literal: case tok::utf32_string_literal: // Chop off the L, u, U or 8 prefix ++TokOffs; --TokLen; // FALL THROUGH. case tok::string_literal: HighlightRange(RB, TokOffs, TokOffs+TokLen, BufferStart, "<span class='string_literal'>", "</span>"); break; case tok::hash: { // If this is a preprocessor directive, all tokens to end of line are too. if (!Tok.isAtStartOfLine()) break; // Eat all of the tokens until we get to the next one at the start of // line. unsigned TokEnd = TokOffs+TokLen; L.LexFromRawLexer(Tok); while (!Tok.isAtStartOfLine() && Tok.isNot(tok::eof)) { TokEnd = SM.getFileOffset(Tok.getLocation())+Tok.getLength(); L.LexFromRawLexer(Tok); } // Find end of line. This is a hack. HighlightRange(RB, TokOffs, TokEnd, BufferStart, "<span class='directive'>", "</span>"); // Don't skip the next token. continue; } } L.LexFromRawLexer(Tok); } }
/// InitializePreprocessor - Initialize the preprocessor getting it and the /// environment ready to process a single file. This returns true on error. /// void clang::InitializePreprocessor(Preprocessor &PP, const PreprocessorOptions &InitOpts, const HeaderSearchOptions &HSOpts, const FrontendOptions &FEOpts) { const LangOptions &LangOpts = PP.getLangOptions(); std::string PredefineBuffer; PredefineBuffer.reserve(4080); llvm::raw_string_ostream Predefines(PredefineBuffer); MacroBuilder Builder(Predefines); InitializeFileRemapping(PP.getDiagnostics(), PP.getSourceManager(), PP.getFileManager(), InitOpts); // Specify whether the preprocessor should replace #include/#import with // module imports when plausible. PP.setAutoModuleImport(InitOpts.AutoModuleImport); // Emit line markers for various builtin sections of the file. We don't do // this in asm preprocessor mode, because "# 4" is not a line marker directive // in this mode. if (!PP.getLangOptions().AsmPreprocessor) Builder.append("# 1 \"<built-in>\" 3"); // Install things like __POWERPC__, __GNUC__, etc into the macro table. if (InitOpts.UsePredefines) { InitializePredefinedMacros(PP.getTargetInfo(), LangOpts, FEOpts, Builder); // Install definitions to make Objective-C++ ARC work well with various // C++ Standard Library implementations. if (LangOpts.ObjC1 && LangOpts.CPlusPlus && LangOpts.ObjCAutoRefCount) { switch (InitOpts.ObjCXXARCStandardLibrary) { case ARCXX_nolib: break; case ARCXX_libcxx: AddObjCXXARCLibcxxDefines(LangOpts, Builder); break; case ARCXX_libstdcxx: AddObjCXXARCLibstdcxxDefines(LangOpts, Builder); break; } } } // Even with predefines off, some macros are still predefined. // These should all be defined in the preprocessor according to the // current language configuration. InitializeStandardPredefinedMacros(PP.getTargetInfo(), PP.getLangOptions(), FEOpts, Builder); // Add on the predefines from the driver. Wrap in a #line directive to report // that they come from the command line. if (!PP.getLangOptions().AsmPreprocessor) Builder.append("# 1 \"<command line>\" 1"); // Process #define's and #undef's in the order they are given. for (unsigned i = 0, e = InitOpts.Macros.size(); i != e; ++i) { if (InitOpts.Macros[i].second) // isUndef Builder.undefineMacro(InitOpts.Macros[i].first); else DefineBuiltinMacro(Builder, InitOpts.Macros[i].first, PP.getDiagnostics()); } // If -imacros are specified, include them now. These are processed before // any -include directives. for (unsigned i = 0, e = InitOpts.MacroIncludes.size(); i != e; ++i) AddImplicitIncludeMacros(Builder, InitOpts.MacroIncludes[i], PP.getFileManager()); // Process -include directives. for (unsigned i = 0, e = InitOpts.Includes.size(); i != e; ++i) { const std::string &Path = InitOpts.Includes[i]; if (Path == InitOpts.ImplicitPTHInclude) AddImplicitIncludePTH(Builder, PP, Path); else AddImplicitInclude(Builder, Path, PP.getFileManager()); } // Exit the command line and go back to <built-in> (2 is LC_LEAVE). if (!PP.getLangOptions().AsmPreprocessor) Builder.append("# 1 \"<built-in>\" 2"); // Instruct the preprocessor to skip the preamble. PP.setSkipMainFilePreamble(InitOpts.PrecompiledPreambleBytes.first, InitOpts.PrecompiledPreambleBytes.second); // Copy PredefinedBuffer into the Preprocessor. PP.setPredefines(Predefines.str()); // Initialize the header search object. ApplyHeaderSearchOptions(PP.getHeaderSearchInfo(), HSOpts, PP.getLangOptions(), PP.getTargetInfo().getTriple()); }
/// Finish - This does final analysis of the declspec, rejecting things like /// "_Imaginary" (lacking an FP type). This returns a diagnostic to issue or /// diag::NUM_DIAGNOSTICS if there is no error. After calling this method, /// DeclSpec is guaranteed self-consistent, even if an error occurred. void DeclSpec::Finish(DiagnosticsEngine &D, Preprocessor &PP) { // Before possibly changing their values, save specs as written. SaveWrittenBuiltinSpecs(); SaveStorageSpecifierAsWritten(); // Check the type specifier components first. // Validate and finalize AltiVec vector declspec. if (TypeAltiVecVector) { if (TypeAltiVecBool) { // Sign specifiers are not allowed with vector bool. (PIM 2.1) if (TypeSpecSign != TSS_unspecified) { Diag(D, TSSLoc, diag::err_invalid_vector_bool_decl_spec) << getSpecifierName((TSS)TypeSpecSign); } // Only char/int are valid with vector bool. (PIM 2.1) if (((TypeSpecType != TST_unspecified) && (TypeSpecType != TST_char) && (TypeSpecType != TST_int)) || TypeAltiVecPixel) { Diag(D, TSTLoc, diag::err_invalid_vector_bool_decl_spec) << (TypeAltiVecPixel ? "__pixel" : getSpecifierName((TST)TypeSpecType)); } // Only 'short' is valid with vector bool. (PIM 2.1) if ((TypeSpecWidth != TSW_unspecified) && (TypeSpecWidth != TSW_short)) Diag(D, TSWLoc, diag::err_invalid_vector_bool_decl_spec) << getSpecifierName((TSW)TypeSpecWidth); // Elements of vector bool are interpreted as unsigned. (PIM 2.1) if ((TypeSpecType == TST_char) || (TypeSpecType == TST_int) || (TypeSpecWidth != TSW_unspecified)) TypeSpecSign = TSS_unsigned; } if (TypeAltiVecPixel) { //TODO: perform validation TypeSpecType = TST_int; TypeSpecSign = TSS_unsigned; TypeSpecWidth = TSW_short; TypeSpecOwned = false; } } // signed/unsigned are only valid with int/char/wchar_t. if (TypeSpecSign != TSS_unspecified) { if (TypeSpecType == TST_unspecified) TypeSpecType = TST_int; // unsigned -> unsigned int, signed -> signed int. else if (TypeSpecType != TST_int && TypeSpecType != TST_char && TypeSpecType != TST_wchar) { Diag(D, TSSLoc, diag::err_invalid_sign_spec) << getSpecifierName((TST)TypeSpecType); // signed double -> double. TypeSpecSign = TSS_unspecified; } } // Validate the width of the type. switch (TypeSpecWidth) { case TSW_unspecified: break; case TSW_short: // short int case TSW_longlong: // long long int if (TypeSpecType == TST_unspecified) TypeSpecType = TST_int; // short -> short int, long long -> long long int. else if (TypeSpecType != TST_int) { Diag(D, TSWLoc, TypeSpecWidth == TSW_short ? diag::err_invalid_short_spec : diag::err_invalid_longlong_spec) << getSpecifierName((TST)TypeSpecType); TypeSpecType = TST_int; TypeSpecOwned = false; } break; case TSW_long: // long double, long int if (TypeSpecType == TST_unspecified) TypeSpecType = TST_int; // long -> long int. else if (TypeSpecType != TST_int && TypeSpecType != TST_double) { Diag(D, TSWLoc, diag::err_invalid_long_spec) << getSpecifierName((TST)TypeSpecType); TypeSpecType = TST_int; TypeSpecOwned = false; } break; } // TODO: if the implementation does not implement _Complex or _Imaginary, // disallow their use. Need information about the backend. if (TypeSpecComplex != TSC_unspecified) { if (TypeSpecType == TST_unspecified) { Diag(D, TSCLoc, diag::ext_plain_complex) << FixItHint::CreateInsertion( PP.getLocForEndOfToken(getTypeSpecComplexLoc()), " double"); TypeSpecType = TST_double; // _Complex -> _Complex double. } else if (TypeSpecType == TST_int || TypeSpecType == TST_char) { // Note that this intentionally doesn't include _Complex _Bool. Diag(D, TSTLoc, diag::ext_integer_complex); } else if (TypeSpecType != TST_float && TypeSpecType != TST_double) { Diag(D, TSCLoc, diag::err_invalid_complex_spec) << getSpecifierName((TST)TypeSpecType); TypeSpecComplex = TSC_unspecified; } } // If no type specifier was provided and we're parsing a language where // the type specifier is not optional, but we got 'auto' as a storage // class specifier, then assume this is an attempt to use C++0x's 'auto' // type specifier. // FIXME: Does Microsoft really support implicit int in C++? if (PP.getLangOptions().CPlusPlus && !PP.getLangOptions().MicrosoftExt && TypeSpecType == TST_unspecified && StorageClassSpec == SCS_auto) { TypeSpecType = TST_auto; StorageClassSpec = StorageClassSpecAsWritten = SCS_unspecified; TSTLoc = TSTNameLoc = StorageClassSpecLoc; StorageClassSpecLoc = SourceLocation(); } // Diagnose if we've recovered from an ill-formed 'auto' storage class // specifier in a pre-C++0x dialect of C++. if (!PP.getLangOptions().CPlusPlus0x && TypeSpecType == TST_auto) Diag(D, TSTLoc, diag::ext_auto_type_specifier); if (PP.getLangOptions().CPlusPlus && !PP.getLangOptions().CPlusPlus0x && StorageClassSpec == SCS_auto) Diag(D, StorageClassSpecLoc, diag::warn_auto_storage_class) << FixItHint::CreateRemoval(StorageClassSpecLoc); // C++ [class.friend]p6: // No storage-class-specifier shall appear in the decl-specifier-seq // of a friend declaration. if (isFriendSpecified() && getStorageClassSpec()) { DeclSpec::SCS SC = getStorageClassSpec(); const char *SpecName = getSpecifierName(SC); SourceLocation SCLoc = getStorageClassSpecLoc(); SourceLocation SCEndLoc = SCLoc.getLocWithOffset(strlen(SpecName)); Diag(D, SCLoc, diag::err_friend_storage_spec) << SpecName << FixItHint::CreateRemoval(SourceRange(SCLoc, SCEndLoc)); ClearStorageClassSpecs(); } assert(!TypeSpecOwned || isDeclRep((TST) TypeSpecType)); // Okay, now we can infer the real type. // TODO: return "auto function" and other bad things based on the real type. // 'data definition has no type or storage class'? }
// #pragma pack(...) comes in the following delicious flavors: // pack '(' [integer] ')' // pack '(' 'show' ')' // pack '(' ('push' | 'pop') [',' identifier] [, integer] ')' void PragmaPackHandler::HandlePragma(Preprocessor &PP, PragmaIntroducerKind Introducer, Token &PackTok) { SourceLocation PackLoc = PackTok.getLocation(); Token Tok; PP.Lex(Tok); if (Tok.isNot(tok::l_paren)) { PP.Diag(Tok.getLocation(), diag::warn_pragma_expected_lparen) << "pack"; return; } Sema::PragmaPackKind Kind = Sema::PPK_Default; IdentifierInfo *Name = 0; ExprResult Alignment; SourceLocation LParenLoc = Tok.getLocation(); PP.Lex(Tok); if (Tok.is(tok::numeric_constant)) { Alignment = Actions.ActOnNumericConstant(Tok); if (Alignment.isInvalid()) return; PP.Lex(Tok); // In MSVC/gcc, #pragma pack(4) sets the alignment without affecting // the push/pop stack. // In Apple gcc, #pragma pack(4) is equivalent to #pragma pack(push, 4) if (PP.getLangOptions().ApplePragmaPack) Kind = Sema::PPK_Push; } else if (Tok.is(tok::identifier)) { const IdentifierInfo *II = Tok.getIdentifierInfo(); if (II->isStr("show")) { Kind = Sema::PPK_Show; PP.Lex(Tok); } else { if (II->isStr("push")) { Kind = Sema::PPK_Push; } else if (II->isStr("pop")) { Kind = Sema::PPK_Pop; } else { PP.Diag(Tok.getLocation(), diag::warn_pragma_pack_invalid_action); return; } PP.Lex(Tok); if (Tok.is(tok::comma)) { PP.Lex(Tok); if (Tok.is(tok::numeric_constant)) { Alignment = Actions.ActOnNumericConstant(Tok); if (Alignment.isInvalid()) return; PP.Lex(Tok); } else if (Tok.is(tok::identifier)) { Name = Tok.getIdentifierInfo(); PP.Lex(Tok); if (Tok.is(tok::comma)) { PP.Lex(Tok); if (Tok.isNot(tok::numeric_constant)) { PP.Diag(Tok.getLocation(), diag::warn_pragma_pack_malformed); return; } Alignment = Actions.ActOnNumericConstant(Tok); if (Alignment.isInvalid()) return; PP.Lex(Tok); } } else { PP.Diag(Tok.getLocation(), diag::warn_pragma_pack_malformed); return; } } } } else if (PP.getLangOptions().ApplePragmaPack) { // In MSVC/gcc, #pragma pack() resets the alignment without affecting // the push/pop stack. // In Apple gcc #pragma pack() is equivalent to #pragma pack(pop). Kind = Sema::PPK_Pop; } if (Tok.isNot(tok::r_paren)) { PP.Diag(Tok.getLocation(), diag::warn_pragma_expected_rparen) << "pack"; return; } SourceLocation RParenLoc = Tok.getLocation(); PP.Lex(Tok); if (Tok.isNot(tok::eod)) { PP.Diag(Tok.getLocation(), diag::warn_pragma_extra_tokens_at_eol) << "pack"; return; } Actions.ActOnPragmaPack(Kind, Name, Alignment.release(), PackLoc, LParenLoc, RParenLoc); }
IdentifierResolver::IdentifierResolver(Preprocessor &PP) : LangOpt(PP.getLangOptions()), PP(PP), IdDeclInfos(new IdDeclInfoMap) { }
/// EvaluateDirectiveSubExpr - Evaluate the subexpression whose first token is /// PeekTok, and whose precedence is PeekPrec. This returns the result in LHS. /// /// If ValueLive is false, then this value is being evaluated in a context where /// the result is not used. As such, avoid diagnostics that relate to /// evaluation, such as division by zero warnings. static bool EvaluateDirectiveSubExpr(PPValue &LHS, unsigned MinPrec, Token &PeekTok, bool ValueLive, Preprocessor &PP) { unsigned PeekPrec = getPrecedence(PeekTok.getKind()); // If this token isn't valid, report the error. if (PeekPrec == ~0U) { PP.Diag(PeekTok.getLocation(), diag::err_pp_expr_bad_token_binop) << LHS.getRange(); return true; } while (1) { // If this token has a lower precedence than we are allowed to parse, return // it so that higher levels of the recursion can parse it. if (PeekPrec < MinPrec) return false; tok::TokenKind Operator = PeekTok.getKind(); // If this is a short-circuiting operator, see if the RHS of the operator is // dead. Note that this cannot just clobber ValueLive. Consider // "0 && 1 ? 4 : 1 / 0", which is parsed as "(0 && 1) ? 4 : (1 / 0)". In // this example, the RHS of the && being dead does not make the rest of the // expr dead. bool RHSIsLive; if (Operator == tok::ampamp && LHS.Val == 0) RHSIsLive = false; // RHS of "0 && x" is dead. else if (Operator == tok::pipepipe && LHS.Val != 0) RHSIsLive = false; // RHS of "1 || x" is dead. else if (Operator == tok::question && LHS.Val == 0) RHSIsLive = false; // RHS (x) of "0 ? x : y" is dead. else RHSIsLive = ValueLive; // Consume the operator, remembering the operator's location for reporting. SourceLocation OpLoc = PeekTok.getLocation(); PP.LexNonComment(PeekTok); PPValue RHS(LHS.getBitWidth()); // Parse the RHS of the operator. DefinedTracker DT; if (EvaluateValue(RHS, PeekTok, DT, RHSIsLive, PP)) return true; // Remember the precedence of this operator and get the precedence of the // operator immediately to the right of the RHS. unsigned ThisPrec = PeekPrec; PeekPrec = getPrecedence(PeekTok.getKind()); // If this token isn't valid, report the error. if (PeekPrec == ~0U) { PP.Diag(PeekTok.getLocation(), diag::err_pp_expr_bad_token_binop) << RHS.getRange(); return true; } // Decide whether to include the next binop in this subexpression. For // example, when parsing x+y*z and looking at '*', we want to recursively // handle y*z as a single subexpression. We do this because the precedence // of * is higher than that of +. The only strange case we have to handle // here is for the ?: operator, where the precedence is actually lower than // the LHS of the '?'. The grammar rule is: // // conditional-expression ::= // logical-OR-expression ? expression : conditional-expression // where 'expression' is actually comma-expression. unsigned RHSPrec; if (Operator == tok::question) // The RHS of "?" should be maximally consumed as an expression. RHSPrec = getPrecedence(tok::comma); else // All others should munch while higher precedence. RHSPrec = ThisPrec+1; if (PeekPrec >= RHSPrec) { if (EvaluateDirectiveSubExpr(RHS, RHSPrec, PeekTok, RHSIsLive, PP)) return true; PeekPrec = getPrecedence(PeekTok.getKind()); } assert(PeekPrec <= ThisPrec && "Recursion didn't work!"); // Usual arithmetic conversions (C99 6.3.1.8p1): result is unsigned if // either operand is unsigned. llvm::APSInt Res(LHS.getBitWidth()); switch (Operator) { case tok::question: // No UAC for x and y in "x ? y : z". case tok::lessless: // Shift amount doesn't UAC with shift value. case tok::greatergreater: // Shift amount doesn't UAC with shift value. case tok::comma: // Comma operands are not subject to UACs. case tok::pipepipe: // Logical || does not do UACs. case tok::ampamp: // Logical && does not do UACs. break; // No UAC default: Res.setIsUnsigned(LHS.isUnsigned()|RHS.isUnsigned()); // If this just promoted something from signed to unsigned, and if the // value was negative, warn about it. if (ValueLive && Res.isUnsigned()) { if (!LHS.isUnsigned() && LHS.Val.isNegative()) PP.Diag(OpLoc, diag::warn_pp_convert_lhs_to_positive) << LHS.Val.toString(10, true) + " to " + LHS.Val.toString(10, false) << LHS.getRange() << RHS.getRange(); if (!RHS.isUnsigned() && RHS.Val.isNegative()) PP.Diag(OpLoc, diag::warn_pp_convert_rhs_to_positive) << RHS.Val.toString(10, true) + " to " + RHS.Val.toString(10, false) << LHS.getRange() << RHS.getRange(); } LHS.Val.setIsUnsigned(Res.isUnsigned()); RHS.Val.setIsUnsigned(Res.isUnsigned()); } // FIXME: All of these should detect and report overflow?? bool Overflow = false; switch (Operator) { default: assert(0 && "Unknown operator token!"); case tok::percent: if (RHS.Val != 0) Res = LHS.Val % RHS.Val; else if (ValueLive) { PP.Diag(OpLoc, diag::err_pp_remainder_by_zero) << LHS.getRange() << RHS.getRange(); return true; } break; case tok::slash: if (RHS.Val != 0) { Res = LHS.Val / RHS.Val; if (LHS.Val.isSigned()) // MININT/-1 --> overflow. Overflow = LHS.Val.isMinSignedValue() && RHS.Val.isAllOnesValue(); } else if (ValueLive) { PP.Diag(OpLoc, diag::err_pp_division_by_zero) << LHS.getRange() << RHS.getRange(); return true; } break; case tok::star: Res = LHS.Val * RHS.Val; if (Res.isSigned() && LHS.Val != 0 && RHS.Val != 0) Overflow = Res/RHS.Val != LHS.Val || Res/LHS.Val != RHS.Val; break; case tok::lessless: { // Determine whether overflow is about to happen. unsigned ShAmt = static_cast<unsigned>(RHS.Val.getLimitedValue()); if (ShAmt >= LHS.Val.getBitWidth()) Overflow = true, ShAmt = LHS.Val.getBitWidth()-1; else if (LHS.isUnsigned()) Overflow = false; else if (LHS.Val.isNonNegative()) // Don't allow sign change. Overflow = ShAmt >= LHS.Val.countLeadingZeros(); else Overflow = ShAmt >= LHS.Val.countLeadingOnes(); Res = LHS.Val << ShAmt; break; } case tok::greatergreater: { // Determine whether overflow is about to happen. unsigned ShAmt = static_cast<unsigned>(RHS.Val.getLimitedValue()); if (ShAmt >= LHS.getBitWidth()) Overflow = true, ShAmt = LHS.getBitWidth()-1; Res = LHS.Val >> ShAmt; break; } case tok::plus: Res = LHS.Val + RHS.Val; if (LHS.isUnsigned()) Overflow = false; else if (LHS.Val.isNonNegative() == RHS.Val.isNonNegative() && Res.isNonNegative() != LHS.Val.isNonNegative()) Overflow = true; // Overflow for signed addition. break; case tok::minus: Res = LHS.Val - RHS.Val; if (LHS.isUnsigned()) Overflow = false; else if (LHS.Val.isNonNegative() != RHS.Val.isNonNegative() && Res.isNonNegative() != LHS.Val.isNonNegative()) Overflow = true; // Overflow for signed subtraction. break; case tok::lessequal: Res = LHS.Val <= RHS.Val; Res.setIsUnsigned(false); // C99 6.5.8p6, result is always int (signed) break; case tok::less: Res = LHS.Val < RHS.Val; Res.setIsUnsigned(false); // C99 6.5.8p6, result is always int (signed) break; case tok::greaterequal: Res = LHS.Val >= RHS.Val; Res.setIsUnsigned(false); // C99 6.5.8p6, result is always int (signed) break; case tok::greater: Res = LHS.Val > RHS.Val; Res.setIsUnsigned(false); // C99 6.5.8p6, result is always int (signed) break; case tok::exclaimequal: Res = LHS.Val != RHS.Val; Res.setIsUnsigned(false); // C99 6.5.9p3, result is always int (signed) break; case tok::equalequal: Res = LHS.Val == RHS.Val; Res.setIsUnsigned(false); // C99 6.5.9p3, result is always int (signed) break; case tok::amp: Res = LHS.Val & RHS.Val; break; case tok::caret: Res = LHS.Val ^ RHS.Val; break; case tok::pipe: Res = LHS.Val | RHS.Val; break; case tok::ampamp: Res = (LHS.Val != 0 && RHS.Val != 0); Res.setIsUnsigned(false); // C99 6.5.13p3, result is always int (signed) break; case tok::pipepipe: Res = (LHS.Val != 0 || RHS.Val != 0); Res.setIsUnsigned(false); // C99 6.5.14p3, result is always int (signed) break; case tok::comma: // Comma is invalid in pp expressions in c89/c++ mode, but is valid in C99 // if not being evaluated. if (!PP.getLangOptions().C99 || ValueLive) PP.Diag(OpLoc, diag::ext_pp_comma_expr) << LHS.getRange() << RHS.getRange(); Res = RHS.Val; // LHS = LHS,RHS -> RHS. break; case tok::question: { // Parse the : part of the expression. if (PeekTok.isNot(tok::colon)) { PP.Diag(PeekTok.getLocation(), diag::err_expected_colon) << LHS.getRange(), RHS.getRange(); PP.Diag(OpLoc, diag::note_matching) << "?"; return true; } // Consume the :. PP.LexNonComment(PeekTok); // Evaluate the value after the :. bool AfterColonLive = ValueLive && LHS.Val == 0; PPValue AfterColonVal(LHS.getBitWidth()); DefinedTracker DT; if (EvaluateValue(AfterColonVal, PeekTok, DT, AfterColonLive, PP)) return true; // Parse anything after the : with the same precedence as ?. We allow // things of equal precedence because ?: is right associative. if (EvaluateDirectiveSubExpr(AfterColonVal, ThisPrec, PeekTok, AfterColonLive, PP)) return true; // Now that we have the condition, the LHS and the RHS of the :, evaluate. Res = LHS.Val != 0 ? RHS.Val : AfterColonVal.Val; RHS.setEnd(AfterColonVal.getRange().getEnd()); // Usual arithmetic conversions (C99 6.3.1.8p1): result is unsigned if // either operand is unsigned. Res.setIsUnsigned(RHS.isUnsigned() | AfterColonVal.isUnsigned()); // Figure out the precedence of the token after the : part. PeekPrec = getPrecedence(PeekTok.getKind()); break; } case tok::colon: // Don't allow :'s to float around without being part of ?: exprs. PP.Diag(OpLoc, diag::err_pp_colon_without_question) << LHS.getRange() << RHS.getRange(); return true; } // If this operator is live and overflowed, report the issue. if (Overflow && ValueLive) PP.Diag(OpLoc, diag::warn_pp_expr_overflow) << LHS.getRange() << RHS.getRange(); // Put the result back into 'LHS' for our next iteration. LHS.Val = Res; LHS.setEnd(RHS.getRange().getEnd()); } return false; }
/// ParseAST - Parse the entire file specified, notifying the ASTConsumer as /// the file is parsed. This inserts the parsed decls into the translation unit /// held by Ctx. /// void clang::ParseAST(Preprocessor &PP, ASTConsumer *Consumer, ASTContext &Ctx, bool PrintStats, bool CompleteTranslationUnit, CodeCompleteConsumer *CompletionConsumer) { // Collect global stats on Decls/Stmts (until we have a module streamer). if (PrintStats) { Decl::CollectingStats(true); Stmt::CollectingStats(true); } Sema S(PP, Ctx, *Consumer, CompleteTranslationUnit, CompletionConsumer); Parser P(PP, S); PP.EnterMainSourceFile(); // Initialize the parser. P.Initialize(); Consumer->Initialize(Ctx); if (SemaConsumer *SC = dyn_cast<SemaConsumer>(Consumer)) SC->InitializeSema(S); if (ExternalASTSource *External = Ctx.getExternalSource()) { if (ExternalSemaSource *ExternalSema = dyn_cast<ExternalSemaSource>(External)) ExternalSema->InitializeSema(S); External->StartTranslationUnit(Consumer); } Parser::DeclGroupPtrTy ADecl; while (!P.ParseTopLevelDecl(ADecl)) { // Not end of file. // If we got a null return and something *was* parsed, ignore it. This // is due to a top-level semicolon, an action override, or a parse error // skipping something. if (ADecl) Consumer->HandleTopLevelDecl(ADecl.getAsVal<DeclGroupRef>()); }; // Check for any pending objective-c implementation decl. while ((ADecl = P.FinishPendingObjCActions())) Consumer->HandleTopLevelDecl(ADecl.getAsVal<DeclGroupRef>()); // Process any TopLevelDecls generated by #pragma weak. for (llvm::SmallVector<Decl*,2>::iterator I = S.WeakTopLevelDecls().begin(), E = S.WeakTopLevelDecls().end(); I != E; ++I) Consumer->HandleTopLevelDecl(DeclGroupRef(*I)); // Dump record layouts, if requested. if (PP.getLangOptions().DumpRecordLayouts) DumpRecordLayouts(Ctx); Consumer->HandleTranslationUnit(Ctx); if (ExternalSemaSource *ESS = dyn_cast_or_null<ExternalSemaSource>(Ctx.getExternalSource())) ESS->ForgetSema(); if (SemaConsumer *SC = dyn_cast<SemaConsumer>(Consumer)) SC->ForgetSema(); if (PrintStats) { fprintf(stderr, "\nSTATISTICS:\n"); P.getActions().PrintStats(); Ctx.PrintStats(); Decl::PrintStats(); Stmt::PrintStats(); Consumer->PrintStats(); } }
/// HasFeature - Return true if we recognize and implement the specified feature /// specified by the identifier. static bool HasFeature(const Preprocessor &PP, const IdentifierInfo *II) { const LangOptions &LangOpts = PP.getLangOptions(); return llvm::StringSwitch<bool>(II->getName()) .Case("attribute_analyzer_noreturn", true) .Case("attribute_cf_returns_not_retained", true) .Case("attribute_cf_returns_retained", true) .Case("attribute_deprecated_with_message", true) .Case("attribute_ext_vector_type", true) .Case("attribute_ns_returns_not_retained", true) .Case("attribute_ns_returns_retained", true) .Case("attribute_ns_consumes_self", true) .Case("attribute_ns_consumed", true) .Case("attribute_cf_consumed", true) .Case("attribute_objc_ivar_unused", true) .Case("attribute_objc_method_family", true) .Case("attribute_overloadable", true) .Case("attribute_unavailable_with_message", true) .Case("blocks", LangOpts.Blocks) .Case("cxx_exceptions", LangOpts.Exceptions) .Case("cxx_rtti", LangOpts.RTTI) .Case("enumerator_attributes", true) .Case("objc_nonfragile_abi", LangOpts.ObjCNonFragileABI) .Case("objc_weak_class", LangOpts.ObjCNonFragileABI) .Case("ownership_holds", true) .Case("ownership_returns", true) .Case("ownership_takes", true) // C++0x features .Case("cxx_attributes", LangOpts.CPlusPlus0x) .Case("cxx_auto_type", LangOpts.CPlusPlus0x) .Case("cxx_decltype", LangOpts.CPlusPlus0x) .Case("cxx_default_function_template_args", LangOpts.CPlusPlus0x) .Case("cxx_deleted_functions", LangOpts.CPlusPlus0x) .Case("cxx_inline_namespaces", LangOpts.CPlusPlus0x) //.Case("cxx_lambdas", false) //.Case("cxx_nullptr", false) .Case("cxx_reference_qualified_functions", LangOpts.CPlusPlus0x) .Case("cxx_rvalue_references", LangOpts.CPlusPlus0x) .Case("cxx_strong_enums", LangOpts.CPlusPlus0x) .Case("cxx_static_assert", LangOpts.CPlusPlus0x) .Case("cxx_trailing_return", LangOpts.CPlusPlus0x) .Case("cxx_variadic_templates", LangOpts.CPlusPlus0x) // Type traits .Case("has_nothrow_assign", LangOpts.CPlusPlus) .Case("has_nothrow_copy", LangOpts.CPlusPlus) .Case("has_nothrow_constructor", LangOpts.CPlusPlus) .Case("has_trivial_assign", LangOpts.CPlusPlus) .Case("has_trivial_copy", LangOpts.CPlusPlus) .Case("has_trivial_constructor", LangOpts.CPlusPlus) .Case("has_trivial_destructor", LangOpts.CPlusPlus) .Case("has_virtual_destructor", LangOpts.CPlusPlus) .Case("is_abstract", LangOpts.CPlusPlus) .Case("is_base_of", LangOpts.CPlusPlus) .Case("is_class", LangOpts.CPlusPlus) .Case("is_convertible_to", LangOpts.CPlusPlus) .Case("is_empty", LangOpts.CPlusPlus) .Case("is_enum", LangOpts.CPlusPlus) .Case("is_pod", LangOpts.CPlusPlus) .Case("is_polymorphic", LangOpts.CPlusPlus) .Case("is_union", LangOpts.CPlusPlus) .Case("is_literal", LangOpts.CPlusPlus) .Case("tls", PP.getTargetInfo().isTLSSupported()) .Default(false); }