Symbols Preprocessor::preprocessed(const QByteArray &filename, QIODevice *file) { QByteArray input = file->readAll(); if (input.isEmpty()) return symbols; // phase 1: get rid of backslash-newlines input = cleaned(input); // phase 2: tokenize for the preprocessor symbols = tokenize(input); #if 0 for (int j = 0; j < symbols.size(); ++j) fprintf(stderr, "line %d: %s(%s)\n", symbols[j].lineNum, symbols[j].lexem().constData(), tokenTypeName(symbols[j].token)); #endif // phase 3: preprocess conditions and substitute macros Symbols result; preprocess(filename, result); #if 0 for (int j = 0; j < result.size(); ++j) fprintf(stderr, "line %d: %s(%s)\n", result[j].lineNum, result[j].lexem().constData(), tokenTypeName(result[j].token)); #endif return result; }
static void printToken (const tokenInfo *const token) { fprintf (stderr, "%p:\n\ttype:\t%s\n\tline:\t%lu\n\tscope:\t%s\n", (void *) token, tokenTypeName (token->type), token->lineNumber, vStringValue (token->scope)); switch (token->type) { case TOKEN_IDENTIFIER: case TOKEN_STRING: case TOKEN_VARIABLE: fprintf (stderr, "\tcontent:\t%s\n", vStringValue (token->string)); break; case TOKEN_KEYWORD: { size_t n = ARRAY_SIZE (PhpKeywordTable); size_t i; fprintf (stderr, "\tkeyword:\t"); for (i = 0; i < n; i++) { if (PhpKeywordTable[i].id == token->keyword) { fprintf (stderr, "%s\n", PhpKeywordTable[i].name); break; } } if (i >= n) fprintf (stderr, "(unknown)\n"); } default: break; } }
TEST_P(TokenizerTest, Tokenize) { const char* testName = GetParam(); char* inputFile = (char*) malloc(sizeof(char) * (strlen(DATA_PATH) + strlen(testName) + strlen(".in") + 1)); strcpy(inputFile, DATA_PATH); strcat(inputFile, testName); strcat(inputFile, ".in"); std::fstream inputStream(inputFile, std::fstream::in); ASSERT_TRUE(inputStream.good()); char* resultFile = (char*) malloc(sizeof(char) * (strlen(DATA_PATH) + strlen(testName) + strlen(".out") + 1)); strcpy(resultFile, DATA_PATH); strcat(resultFile, testName); strcat(resultFile, ".out"); std::fstream resultStream(resultFile, std::fstream::in); ASSERT_TRUE(resultStream.good()); Tokenizer tokenizer(&inputStream); Tokenizer::Token token; std::string output; do { tokenizer.nextToken(token); ASSERT_NE(token.type, Tokenizer::Token::None); std::string line; line.append(tokenTypeName(token.type)); if (shouldPrintContents(token.type)) { line.push_back(' '); line.push_back('"'); line.append(removeNewLines(token.contents)); line.push_back('"'); } output.append(line); output.push_back('\n'); } while (token.type != Tokenizer::Token::EndOfInput); std::istringstream outputStream(output); int line = 1; while (resultStream.good() || outputStream.good()) { std::string outputLine, resultLine; getline(outputStream, outputLine); getline(resultStream, resultLine); ASSERT_EQ(resultLine, outputLine) << "Line " << line; ++line; } }