Esempio n. 1
0
void SourceLines::loadFile(const SourceFiles::FileName & name)
{
    ifstream file(name.c_str());
    if (file.is_open() == false)
    {
        ostringstream ss;
        ss << "cannot open source file " << name;
        throw SourceFileError(ss.str());
    }

    LineCollection & lines = sources_[name];

    string line;
    Tokens::FileContent fullSource;
    while (getline(file, line))
    {
        lines.push_back(line);
        fullSource += line;

        // built-in rule
        if (file.eof())
        {
            Reports::add(name, static_cast<int>(lines.size()), "no newline at end of file");
        }
        else
        {
            fullSource += '\n';
        }
    }

    Tokens::parse(name, fullSource);
}
Esempio n. 2
0
void SourceLines::loadFile(const SourceFiles::FileName & name)
{
    if (name == "-")
    {
        SourceLines::loadFile(std::cin, name);
    }
    else
    {
        std::ifstream file(name.c_str());
        if (file.is_open() == false)
        {
            std::ostringstream ss;
            ss << "Cannot open source file " << name << ": "
               << strerror(errno);
            throw SourceFileError(ss.str());
        }
        SourceLines::loadFile(file, name);
        if (file.bad())
        {
            throw std::runtime_error(
                "Cannot read from " + name + ": " + strerror(errno));
        }
    }
}
Esempio n. 3
0
void Tokens::parse(const SourceFiles::FileName & name, const FileContent & src)
{
    TokenCollection & tokensInFile = fileTokens_[name];

    // wave throws exceptions when given an empty file
    if (src.empty() == false)
    {
        try
        {
            typedef wave::cpplexer::lex_token<> token_type;
            typedef wave::cpplexer::lex_iterator<token_type> lexer_type;
            typedef token_type::position_type position_type;

            const position_type pos(name.c_str());
            lexer_type it = lexer_type(src.begin(), src.end(), pos,
                wave::language_support(wave::support_cpp | wave::support_option_long_long));
            const lexer_type end = lexer_type();

            const int lineCount = SourceLines::getLineCount(name);

            for ( ; it != end; ++it)
            {
                const wave::token_id id(*it);

                const token_type::position_type pos = it->get_position();
                const string value = it->get_value().c_str();
                const int line = pos.get_line();
                const int column = pos.get_column() - 1;
                const int length = static_cast<int>(value.size());

                bool useReference = true;
                if (id == wave::T_NEWLINE || id == wave::T_EOF || line > lineCount)
                {
                    useReference = false;
                }
                else
                {
                    const string & sourceLine = SourceLines::getLine(name, line);
                    if (column > static_cast<int>(sourceLine.size()) ||
                        value != sourceLine.substr(column, length))
                    {
                        useReference = false;
                    }
                }

                if (useReference)
                {
                    // the reference representation of the token is stored

                    tokensInFile.push_back(TokenRef(id, line, column, length));
                }
                else
                {
                    // value of the token has no representation in the physical line
                    // so the real token value is stored in physicalTokens

                    tokensInFile.push_back(TokenRef(id, line, column, length, value));
                }
            }
        }
        catch (const wave::cpplexer::cpplexer_exception & e)
        {
            ostringstream ss;
            ss << name << ':' << e.line_no() << ": illegal token in column " << e.column_no()
                << ", giving up (hint: fix the file or remove it from the working set)";
            throw TokensError(ss.str());
        }
    }
}