void initParser(pANTLR3_INPUT_STREAM* input,pJpVocabularyLexer* lxr, pANTLR3_COMMON_TOKEN_STREAM* tstream, pJpVocabularyParser* psr) { *lxr = JpVocabularyLexerNew(*input); // CLexerNew is generated by ANTLR if (*lxr == NULL) { ANTLR3_FPRINTF(stderr, "Unable to create the lexer due to malloc() failure1\n"); exit(ANTLR3_ERR_NOMEM); } *tstream = antlr3CommonTokenStreamSourceNew(ANTLR3_SIZE_HINT, TOKENSOURCE((*lxr))); if (*tstream == NULL) { ANTLR3_FPRINTF(stderr, "Out of memory trying to allocate token stream\n"); exit(ANTLR3_ERR_NOMEM); } *psr = JpVocabularyParserNew(*tstream); // CParserNew is generated by ANTLR3 if (*psr == NULL) { ANTLR3_FPRINTF(stderr, "Out of memory trying to allocate parser\n"); exit(ANTLR3_ERR_NOMEM); } }
nx::color::RadialGradientColor::RadialGradientColor( const wchar_t *desc ) { pANTLR3_UINT8 input_string = (pANTLR3_UINT8)desc; ANTLR3_UINT32 size = static_cast<ANTLR3_UINT32>(wcslen(desc) * 2); pANTLR3_INPUT_STREAM stream = antlr3StringStreamNew( input_string, ANTLR3_ENC_UTF16, size, (pANTLR3_UINT8)"RadialGradientColor"); pANTLR3_UINT8 fName; pANTLR3_COMMON_TOKEN_STREAM tstream; pRadialGradientColorLexer lxr = RadialGradientColorLexerNew(stream); tstream = antlr3CommonTokenStreamSourceNew(ANTLR3_SIZE_HINT, TOKENSOURCE(lxr)); pRadialGradientColorParser psr = RadialGradientColorParserNew(tstream); nx::color::RadialGradientColor *color = psr->radialColor(psr); if (psr->pParser->rec->state->errorCount > 0) { LOG(SEVERITY_LEVEL_ERROR) << L"Failed to parse radial color descriptor '" << desc << L"'"; throw std::invalid_argument("Failed to parse radial color descriptor"); } m_offsetRatio = color->m_offsetRatio; m_gradientStops = std::move(color->m_gradientStops); delete color; }
std::shared_ptr<nx::color::colorsetdef::ColorSetDO> nx::color::colorsetdef::Decode(const std::wstring &s) { pANTLR3_UINT8 input_string = (pANTLR3_UINT8)s.c_str(); ANTLR3_UINT32 size = static_cast<ANTLR3_UINT32>(s.size() * 2); pANTLR3_INPUT_STREAM stream = antlr3StringStreamNew( input_string, ANTLR3_ENC_UTF16, size, (pANTLR3_UINT8)"colorsetdef"); pANTLR3_UINT8 fName; pANTLR3_COMMON_TOKEN_STREAM tstream; pColorSetDefLexer lxr = ColorSetDefLexerNew(stream); tstream = antlr3CommonTokenStreamSourceNew(ANTLR3_SIZE_HINT, TOKENSOURCE(lxr)); pColorSetDefParser psr = ColorSetDefParserNew(tstream); std::shared_ptr<nx::color::colorsetdef::ColorSetDO> pColorSetDO(psr->colorSetDef(psr)); if (pColorSetDO != nullptr) { size_t n = pColorSetDO->colors.size(); std::wcout << n << std::endl; for (size_t i = 0; i < n; ++i) { std::shared_ptr<nx::color::colorsetdef::IColorDO> color = pColorSetDO->colors[i]; switch (color->GetColorType()) { case COLOR_TYPE_SOLID: { std::shared_ptr<nx::color::colorsetdef::SolidColorDO> solid = std::dynamic_pointer_cast<nx::color::colorsetdef::SolidColorDO>(color); std::wcout << *solid << std::endl; } break; case COLOR_TYPE_LINEAR: { std::shared_ptr<nx::color::colorsetdef::LinearColorDO> linear = std::dynamic_pointer_cast<nx::color::colorsetdef::LinearColorDO>(color); std::wcout << *linear << std::endl; } break; case COLOR_TYPE_RADIAL: { std::shared_ptr<nx::color::colorsetdef::RadialColorDO> radial = std::dynamic_pointer_cast<nx::color::colorsetdef::RadialColorDO>(color); std::wcout << *radial << std::endl; } break; } } } psr->free(psr); psr = NULL; tstream->free(tstream); tstream = NULL; lxr->free(lxr); lxr = NULL; stream->close(stream); stream = NULL; std::shared_ptr<nx::color::colorsetdef::ColorSetDO> p(pColorSetDO); return p; }
/*! Read expressions from file or memory and Evaluate */ int EvaluateExpressions(const char *buffer, int buf_length, int is_filename) { pANTLR3_INPUT_STREAM input_stream = NULL; pEvaLexer lxr = NULL; pANTLR3_COMMON_TOKEN_STREAM token_stream = NULL; pEvaParser psr = NULL; pEvaTree treeParser = NULL; pANTLR3_COMMON_TREE_NODE_STREAM nodes = NULL; EvaParser_program_return eva_ast; int error = 0; ResetErrorString(); /*Is it a file or memory*/ if (is_filename) { input_stream = antlr3AsciiFileStreamNew((pANTLR3_UINT8)buffer); } else { input_stream = antlr3NewAsciiStringCopyStream((pANTLR3_UINT8)buffer, (ANTLR3_UINT32)buf_length, NULL); } ABORT_IF(input_stream == NULL); /*Invoke lexer and tokenzie*/ lxr = EvaLexerNew(input_stream); ABORT_IF(lxr == NULL); token_stream = antlr3CommonTokenStreamSourceNew(ANTLR3_SIZE_HINT, TOKENSOURCE(lxr)); ABORT_IF(token_stream == NULL); /*Parse the expression*/ psr = EvaParserNew(token_stream); ABORT_IF(psr == NULL); //psr->pParser->rec->recoverFromMismatchedElement = recoverFromMismatchedElement; psr->pParser->rec->displayRecognitionError = DisplayRecognitionError; /*create ast from the parser*/ eva_ast = psr->program(psr); /*check if there is parsing error*/ ABORT_IF(psr->pParser->rec->state->errorCount > 0); nodes = antlr3CommonTreeNodeStreamNewTree(eva_ast.tree, ANTLR3_SIZE_HINT); ABORT_IF(nodes == NULL); /*Walk the tree and evaluate the expression*/ treeParser = EvaTreeNew(nodes); ABORT_IF(treeParser == NULL); /*Take action*/ treeParser->program(treeParser); /*All done lets cleanup*/ clean_up: FREE(treeParser); FREE(nodes); FREE(psr); FREE(token_stream); FREE(lxr); FREE(input_stream); return error; }
QVector<SCsParserToken> SCsParser::getTokens(const QString &text) { QVector<SCsParserToken> token; pANTLR3_INPUT_STREAM input; pSCsCLexer lxr; pANTLR3_COMMON_TOKEN_STREAM tstream; std::string strData = text.toStdString(); input = createInputStream(strData); if( input == NULL ) { input->free(input); return token; } lxr = SCsCLexerNew(input); if( lxr == NULL ) { lxr->free(lxr); return token; } tstream = antlr3CommonTokenStreamSourceNew(ANTLR3_SIZE_HINT, TOKENSOURCE(lxr)); if( tstream == NULL ) { tstream->free(tstream); return token; } pANTLR3_VECTOR tokens = tstream->getTokens(tstream); pANTLR3_COMMON_TOKEN tok; pANTLR3_STRING tokText; for(int i=0; i<tokens->count; i++) { tok = (pANTLR3_COMMON_TOKEN) tokens->elements[i].element; tokText = tok->getText(tok); token.append(SCsParserToken(tok->getType(tok), QString((char*)tokText->chars), tok->getLine(tok), tok->getCharPositionInLine(tok))); } freeLexerExceptionList(); freeParserExceptionList(); tstream->free(tstream); lxr->free(lxr); input->free(input); return token; }
int fill_parser_complect( pParserComplect complect ) { complect->lexer = TurtleLexerNew( complect->stream ); if ( complect->lexer == NULL ) { fprintf( stderr, "failed to alloc lexer\n" ); return 0; } complect->tokens = antlr3CommonTokenStreamSourceNew (ANTLR3_SIZE_HINT, TOKENSOURCE(complect->lexer)); complect->parser = TurtleParserNew( complect->tokens ); return 1; }
bool SCsTranslator::processString(const String &data) { pANTLR3_INPUT_STREAM input; #if defined( __WIN32__ ) || defined( _WIN32 ) input = antlr3StringStreamNew((pANTLR3_UINT8)data.c_str(), ANTLR3_ENC_UTF8, (ANTLR3_UINT32)data.length(), (pANTLR3_UINT8)"scs"); #elif defined( __APPLE_CC__) input = antlr3StringStreamNew((pANTLR3_UINT8)data.c_str(), ANTLR3_ENC_UTF8, data.length(), (pANTLR3_UINT8)"scs"); #else input = antlr3NewAsciiStringCopyStream((pANTLR3_UINT8)data.c_str(), data.length(), (pANTLR3_UINT8)"scs"); #endif pscsLexer lex; pANTLR3_COMMON_TOKEN_STREAM tokens; pscsParser parser; lex = scsLexerNew(input); tokens = antlr3CommonTokenStreamSourceNew(ANTLR3_SIZE_HINT, TOKENSOURCE(lex)); parser = scsParserNew(tokens); scsParser_syntax_return r; pANTLR3_BASE_TREE tree; try { r = parser->syntax(parser); } catch (const Exception &e) { THROW_EXCEPT(Exception::ERR_PARSE, e.getDescription(), mParams.fileName, e.getLineNumber()); } tree = r.tree; //dumpDot(tree); // translate buildScText(tree); //dumpScs("test.scsd"); parser->free(parser); tokens->free(tokens); lex->free(lex); input->close(input); return true; }
void MySQLScanner::setup() { log_debug2("Lexer setup\n"); d->_input = antlr3StringStreamNew((pANTLR3_UINT8)d->_text, d->_input_encoding, (ANTLR3_UINT32)d->_text_length, (pANTLR3_UINT8)"mysql-script"); d->_input->setUcaseLA(d->_input, ANTLR3_TRUE); // Make input case-insensitive. String literals must all be upper case in the grammar! d->_lexer = MySQLLexerNew(d->_input); d->_lexer->pLexer->rec->state->userp = &d->_context; d->_token_source = TOKENSOURCE(d->_lexer); log_debug2("Lexer setup ended\n"); }
void setup() { this->lexer = DonutLexerNew(this->stream); if(!this->lexer){ DONUT_EXCEPTION(Exception, "Failed to read stream for %s", filename_.c_str()); } this->tokenStream = antlr3CommonTokenStreamSourceNew(ANTLR3_SIZE_HINT, TOKENSOURCE(lexer)); if(!this->tokenStream){ DONUT_EXCEPTION(Exception, "Failed to create token stream for %s", filename_.c_str()); } this->parser = DonutParserNew(tokenStream); if(!this->parser){ DONUT_EXCEPTION(Exception, "Failed to create parser for %s", filename_.c_str()); } }
CifFileParser( builder_base* builder, std::string const filename, bool const strict=true) { // antlr3FileStreamNew doesn't know how to eat pipes properly, so I didn't use it evetually // input = antlr3FileStreamNew(pANTLR3_UINT8(filename.c_str()), ANTLR3_ENC_8BIT); std::stringstream data; std::ifstream myfile(filename, std::ifstream::in); if (!myfile.is_open()) { std::cerr << "Error: could not open file " << filename << std::endl; exit(1); } char * buf = new char[1024*1024+1]; while (true) { myfile.read(buf, 1024*1024); int bytes_read = myfile.gcount(); buf[bytes_read]='\0'; // null-terminate data << buf; if ( bytes_read < 1024*1024 ) { break; } } delete buf; myfile.close(); // see http://stackoverflow.com/questions/1374468/stringstream-string-and-char-conversion-confusion std::string const & data_string = data.str(); data.clear(); input = antlr3StringStreamNew( pANTLR3_UINT8(data_string.c_str()), ANTLR3_ENC_8BIT, data_string.size(), pANTLR3_UINT8(filename.c_str()) ); lxr = cifLexerNew(input); tstream = antlr3CommonTokenStreamSourceNew(ANTLR3_SIZE_HINT, TOKENSOURCE(lxr)); psr = cifParserNew(tstream); psr->pParser->rec->displayRecognitionError = parser_displayRecognitionError; psr->errors = builder->new_array(); lxr->pLexer->rec->displayRecognitionError = lexer_displayRecognitionError; lxr->errors = builder->new_array(); psr->parse(psr, builder, strict); fflush(stderr); }
void callSimple(pANTLR3_INPUT_STREAM input) { pANTLR3_COMMON_TOKEN_STREAM tstream; pSimpleLexer lex; pSimpleParser parser; printf("enter embedded Simple escape\n"); lex = SimpleLexerNew(input); tstream = antlr3CommonTokenStreamSourceNew(ANTLR3_SIZE_HINT, TOKENSOURCE(lex)); parser = SimpleParserNew(tstream); parser->statement(parser); parser ->free(parser); tstream ->free(tstream); lex ->free(lex); }
// static FilterParser* FilterParser::parse(const std::string& expression) throw(ParseException) { Logger* log = getLogger(NULL); BaseExpression* rootExpression = NULL; std::list<Token*> lTokens; std::set<std::string> xpathTokens; int errorCode = -1; const char* errorMessage; if (expression.length() != 0) { //throw (ParseException) { pANTLR3_INPUT_STREAM input; pfilter_expressionLexer lex; pANTLR3_COMMON_TOKEN_STREAM tokens; pfilter_expressionParser parser; const char* filter = expression.c_str(); if (log->isDebug()) log->debug("filter expression: %s, len: %d, Size Hint: %d", filter, strlen(filter), ANTLR3_SIZE_HINT); input = antlr3StringStreamNew((pANTLR3_UINT8)filter, 8, (ANTLR3_UINT32)strlen(filter), (pANTLR3_UINT8)"name"); lex = filter_expressionLexerNew (input); tokens = antlr3CommonTokenStreamSourceNew (ANTLR3_SIZE_HINT, TOKENSOURCE(lex)); parser = filter_expressionParserNew (tokens); rootExpression = parser ->start_point(parser); xpathTokens = __parser_tokens(); if (parser->pParser->rec->state->exception != NULL) { errorCode = D_ERROR_PARSEERROR; errorMessage = (char*)parser->pParser->rec->state->exception->message; } // Must manually clean up // parser ->free(parser); tokens ->free(tokens); lex ->free(lex); input ->close(input); } if (errorCode > -1) { throw ParseException(errorCode, errorMessage); } FilterParser* filterparser = new FilterParser(expression, rootExpression, lTokens); filterparser->setTokens(xpathTokens); return filterparser; }
int main(int argc, char *argv[]) { pANTLR3_INPUT_STREAM input; pANTLR3_COMMON_TOKEN_STREAM tstream; pGNUCaLexer lxr; pGNUCaParser psr; int ret = 1; if (argc != 2) goto err; ret++; input = antlr3AsciiFileStreamNew((unsigned char *)argv[1]); if (!input) goto err; ret++; lxr = GNUCaLexerNew(input); if (!lxr) goto err_input; ret++; tstream = antlr3CommonTokenStreamSourceNew(ANTLR3_SIZE_HINT, TOKENSOURCE(lxr)); if (!tstream) goto err_lxr; ret++; psr = GNUCaParserNew(tstream); if (!psr) goto err_tstream; psr->translationUnit(psr); ret = 0; psr->free(psr); err_tstream: tstream->free(tstream); err_lxr: lxr->free(lxr); err_input: input->close(input); err: return ret; }
/** * A lightweight function to determine the type of the given query by scanning only the absolute * minimum text to make a funded decision. */ MySQLQueryType MySQLQueryIdentifier::getQueryType(const char *text, size_t length, bool is_utf8) { log_debug2("Starting query type determination\n"); pANTLR3_INPUT_STREAM input = antlr3StringStreamNew((pANTLR3_UINT8)text, is_utf8 ? ANTLR3_ENC_UTF8 : ANTLR3_ENC_8BIT, (ANTLR3_UINT32)length, (pANTLR3_UINT8)"type-check"); input->setUcaseLA(input, ANTLR3_TRUE); pMySQLLexer lexer = MySQLLexerNew(input); // Reset temp vars used during lexing. We may not have scanned the tokens that reset those // as we do only a minimum number of token retrievals. d->_context.inVersionComment = false; d->_context.versionMatched = false; lexer->pLexer->rec->state->userp = &d->_context; MySQLQueryType result = determineQueryType(TOKENSOURCE(lexer)); lexer->free(lexer); input->close(input); log_debug2("Query type determination done\n"); return result; }
BSONObj* BSONParser::parse(const std::string& sbson) { Logger* log = getLogger(NULL); BSONObj* root = NULL; int errorCode = -1; const char* errorMessage; if (sbson.length() != 0) { //throw (ParseException) { pANTLR3_INPUT_STREAM input; pbson_grammarLexer lex; pANTLR3_COMMON_TOKEN_STREAM tokens; pbson_grammarParser parser; const char* bsonExpression = sbson.c_str(); input = antlr3StringStreamNew((pANTLR3_UINT8)bsonExpression, 8, (ANTLR3_UINT32)strlen(bsonExpression), (pANTLR3_UINT8)"name"); lex = bson_grammarLexerNew (input); tokens = antlr3CommonTokenStreamSourceNew (ANTLR3_SIZE_HINT, TOKENSOURCE(lex)); parser = bson_grammarParserNew (tokens); root = parser ->start_point(parser); if (parser->pParser->rec->state->exception != NULL) { errorCode = D_ERROR_PARSEERROR; errorMessage = (char*)parser->pParser->rec->state->exception->message; } // Must manually clean up // parser ->free(parser); tokens ->free(tokens); lex ->free(lex); input ->close(input); } if (errorCode > -1) { //throw ParseException(errorCode, errorMessage); } return root; }
bool uSQL::SQLParser::parse(const std::string &queryString) { clear(); pANTLR3_INPUT_STREAM input = antlr3StringStreamNew( (pANTLR3_UINT8)queryString.c_str(), ANTLR3_ENC_UTF8, (ANTLR3_UINT32)queryString.length(), (pANTLR3_UINT8)""); pSQLLexer lexer = SQLLexerNew(input); pANTLR3_COMMON_TOKEN_STREAM tokens = antlr3CommonTokenStreamSourceNew(ANTLR3_SIZE_HINT, TOKENSOURCE(lexer)); pSQLParser parser = SQLParserNew(tokens); parser->uSqlParser = this; parser->statement_list(parser, this); bool parserResult = true; if (0 < parser->pParser->rec->state->errorCount) { parserResult = false; } parser->free(parser); tokens->free(tokens); lexer->free(lexer); input->close(input); return parserResult; }
// Main entry point for this example // int ANTLR3_CDECL main (int argc, char *argv[]) { // Now we declare the ANTLR related local variables we need. // Note that unless you are convinced you will never need thread safe // versions for your project, then you should always create such things // as instance variables for each invocation. // ------------------- // Name of the input file. Note that we always use the abstract type pANTLR3_UINT8 // for ASCII/8 bit strings - the runtime library guarantees that this will be // good on all platforms. This is a general rule - always use the ANTLR3 supplied // typedefs for pointers/types/etc. // pANTLR3_UINT8 fName; // The ANTLR3 character input stream, which abstracts the input source such that // it is easy to provide input from different sources such as files, or // memory strings. // // For an ASCII/latin-1 memory string use: // input = antlr3NewAsciiStringInPlaceStream (stringtouse, (ANTLR3_UINT64) length, NULL); // // For a UCS2 (16 bit) memory string use: // input = antlr3NewUCS2StringInPlaceStream (stringtouse, (ANTLR3_UINT64) length, NULL); // // For input from a file, see code below // // Note that this is essentially a pointer to a structure containing pointers to functions. // You can create your own input stream type (copy one of the existing ones) and override any // individual function by installing your own pointer after you have created the standard // version. // pANTLR3_INPUT_STREAM input; // The lexer is of course generated by ANTLR, and so the lexer type is not upper case. // The lexer is supplied with a pANTLR3_INPUT_STREAM from whence it consumes its // input and generates a token stream as output. // pCLexer lxr; // The token stream is produced by the ANTLR3 generated lexer. Again it is a structure based // API/Object, which you can customise and override methods of as you wish. a Token stream is // supplied to the generated parser, and you can write your own token stream and pass this in // if you wish. // pANTLR3_COMMON_TOKEN_STREAM tstream; // The C parser is also generated by ANTLR and accepts a token stream as explained // above. The token stream can be any source in fact, so long as it implements the // ANTLR3_TOKEN_SOURCE interface. In this case the parser does not return anything // but it can of course specify any kind of return type from the rule you invoke // when calling it. // pCParser psr; // Create the input stream based upon the argument supplied to us on the command line // for this example, the input will always default to ./input if there is no explicit // argument. // if (argc < 2 || argv[1] == NULL) { fName =(pANTLR3_UINT8)"./input"; // Note in VS2005 debug, working directory must be configured } else { fName = (pANTLR3_UINT8)argv[1]; } // Create the input stream using the supplied file name // (Use antlr3AsciiFileStreamNew for UCS2/16bit input). // input = antlr3AsciiFileStreamNew(fName); // The input will be created successfully, providing that there is enough // memory and the file exists etc // if ( input == NULL) { fprintf(stderr, "Failed to open file %s\n", (char *)fName); exit(1); } // Our input stream is now open and all set to go, so we can create a new instance of our // lexer and set the lexer input to our input stream: // (file | memory | ?) --> inputstream -> lexer --> tokenstream --> parser ( --> treeparser )? // lxr = CLexerNew(input); // CLexerNew is generated by ANTLR // Need to check for errors // if ( lxr == NULL ) { fprintf(stderr, "Unable to create the lexer due to malloc() failure1\n"); exit(1); } // Our lexer is in place, so we can create the token stream from it // NB: Nothing happens yet other than the file has been read. We are just // connecting all these things together and they will be invoked when we // call the parser rule. ANTLR3_SIZE_HINT can be left at the default usually // unless you have a very large token stream/input. Each generated lexer // provides a token source interface, which is the second argument to the // token stream creator. // Note that even if you implement your own token structure, it will always // contain a standard common token within it and this is the pointer that // you pass around to everything else. A common token as a pointer within // it that should point to your own outer token structure. // tstream = antlr3CommonTokenStreamSourceNew(ANTLR3_SIZE_HINT, TOKENSOURCE(lxr)); if (tstream == NULL) { fprintf(stderr, "Out of memory trying to allocate token stream\n"); exit(1); } // Finally, now that we have our lexer constructed, we can create the parser // psr = CParserNew(tstream); // CParserNew is generated by ANTLR3 if (psr == NULL) { fprintf(stderr, "Out of memory trying to allocate parser\n"); exit(ANTLR3_ERR_NOMEM); } // We are all ready to go. Though that looked complicated at first glance, // I am sure, you will see that in fact most of the code above is dealing // with errors and there isn't really that much to do (isn't this always the // case in C? ;-). // // So, we now invoke the parser. All elements of ANTLR3 generated C components // as well as the ANTLR C runtime library itself are pseudo objects. This means // that they are represented as pointers to structures, which contain any // instance data they need, and a set of pointers to other interfaces or // 'methods'. Note that in general, these few pointers we have created here are // the only things you will ever explicitly free() as everything else is created // via factories, that allocated memory efficiently and free() everything they use // automatically when you close the parser/lexer/etc. // // Note that this means only that the methods are always called via the object // pointer and the first argument to any method, is a pointer to the structure itself. // It also has the side advantage, if you are using an IDE such as VS2005 that can do it // that when you type ->, you will see a list of tall the methods the object supports. // psr->translation_unit(psr); // We did not return anything from this parser rule, so we can finish. It only remains // to close down our open objects, in the reverse order we created them // psr ->free (psr); psr = NULL; tstream ->free (tstream); tstream = NULL; lxr ->free (lxr); lxr = NULL; input ->close (input); input = NULL; return 0; }
jalParser_program_return ParseSource(pANTLR3_UINT8 fName) { // The ANTLR3 character input stream, which abstracts the input source such that // it is easy to provide input from different sources such as files, or // memory strings. pANTLR3_INPUT_STREAM input; // The lexer (ANTLR generated) is supplied with a pANTLR3_INPUT_STREAM from whence it consumes its // input and generates a token stream as output. // (moved to global) pjalLexer lxr; // The token stream (ANTLR generated) pANTLR3_COMMON_TOKEN_STREAM tstream; // The C parser (ANTLR generated) // (moved to global) pjalParser psr; // Create the input stream using the supplied file name input = antlr3AsciiFileStreamNew(fName); if ( input == NULL) { CodeOutput(VERBOSE_M, "Failed to open file %s\n", (char *)fName); fprintf(stderr, "Failed to open file %s\n", (char *)fName); exit(1); } // Our input stream is now open and all set to go, so we can create a new instance of our // lexer and set the lexer input to our input stream: // (file | memory | ?) --> inputstream -> lexer --> tokenstream --> parser ( --> treeparser )? // lxr = jalLexerNew(input); // CLexerNew is generated by ANTLR assert(lxr != NULL) ; // Unable to create the lexer due to malloc() failure1 // Our lexer is in place, so we can create the token stream from it // NB: Nothing happens yet other than the file has been read. We are just // connecting all these things together and they will be invoked when we // call the parser rule. ANTLR3_SIZE_HINT can be left at the default usually // unless you have a very large token stream/input. Each generated lexer // provides a token source interface, which is the second argument to the // token stream creator. // Note that even if you implement your own token structure, it will always // contain a standard common token within it and this is the pointer that // you pass around to everything else. A common token as a pointer within // it that should point to your own outer token structure. // tstream = antlr3CommonTokenStreamSourceNew(ANTLR3_SIZE_HINT, TOKENSOURCE(lxr)); assert (tstream != NULL); // Out of memory trying to allocate token stream // Finally, now that we have our lexer constructed, we can create the parser // psr = jalParserNew(tstream); // CParserNew is generated by ANTLR3 assert (psr != NULL); // Out of memory trying to allocate parser // We are all ready to go. Though that looked complicated at first glance, // I am sure, you will see that in fact most of the code above is dealing // with errors and there isn't really that much to do (isn't this always the // case in C? ;-). // // So, we now invoke the parser. All elements of ANTLR3 generated C components // as well as the ANTLR C runtime library itself are pseudo objects. This means // that they are represented as pointers to structures, which contain any // instance data they need, and a set of pointers to other interfaces or // 'methods'. Note that in general, these few pointers we have created here are // the only things you will ever explicitly free() as everything else is created // via factories, that allocated memory efficiently and free() everything they use // automatically when you close the parser/lexer/etc. // // Note that this means only that the methods are always called via the object // pointer and the first argument to any method, is a pointer to the structure itself. // It also has the side advantage, if you are using an IDE such as VS2005 that can do it // that when you type ->, you will see a list of tall the methods the object supports. // jalParser_program_return r = psr->program(psr); // js 'program' is the root element of our language grammar // If the parser ran correctly, we will have a tree to parse. In general I recommend // keeping your own flags as part of the error trapping, but here is how you can // work out if there were errors if you are using the generic error messages // if (psr->pParser->rec->state->errorCount > 0) { fprintf(stderr, "The parser returned %d errors, tree walking aborted.\n", psr->pParser->rec->state->errorCount); exit(1); } return r; }
static int smartpl_parse_file(const char *file, struct playlist_info *pli) { pANTLR3_INPUT_STREAM input; pSMARTPLLexer lxr; pANTLR3_COMMON_TOKEN_STREAM tstream; pSMARTPLParser psr; SMARTPLParser_playlist_return qtree; pANTLR3_COMMON_TREE_NODE_STREAM nodes; pSMARTPL2SQL sqlconv; SMARTPL2SQL_playlist_return plreturn; int ret; #if ANTLR3C_NEW_INPUT input = antlr3FileStreamNew((pANTLR3_UINT8) file, ANTLR3_ENC_8BIT); #else input = antlr3AsciiFileStreamNew((pANTLR3_UINT8) file); #endif // The input will be created successfully, providing that there is enough memory and the file exists etc if (input == NULL) { DPRINTF(E_LOG, L_SCAN, "Unable to open smart playlist file %s\n", file); return -1; } lxr = SMARTPLLexerNew(input); // Need to check for errors if (lxr == NULL) { DPRINTF(E_LOG, L_SCAN, "Could not create SMARTPL lexer\n"); ret = -1; goto lxr_fail; } tstream = antlr3CommonTokenStreamSourceNew(ANTLR3_SIZE_HINT, TOKENSOURCE(lxr)); if (tstream == NULL) { DPRINTF(E_LOG, L_SCAN, "Could not create SMARTPL token stream\n"); ret = -1; goto tkstream_fail; } // Finally, now that we have our lexer constructed, we can create the parser psr = SMARTPLParserNew(tstream); // CParserNew is generated by ANTLR3 if (tstream == NULL) { DPRINTF(E_LOG, L_SCAN, "Could not create SMARTPL parser\n"); ret = -1; goto psr_fail; } qtree = psr->playlist(psr); /* Check for parser errors */ if (psr->pParser->rec->state->errorCount > 0) { DPRINTF(E_LOG, L_SCAN, "SMARTPL query parser terminated with %d errors\n", psr->pParser->rec->state->errorCount); ret = -1; goto psr_error; } DPRINTF(E_DBG, L_SCAN, "SMARTPL query AST:\n\t%s\n", qtree.tree->toStringTree(qtree.tree)->chars); nodes = antlr3CommonTreeNodeStreamNewTree(qtree.tree, ANTLR3_SIZE_HINT); if (!nodes) { DPRINTF(E_LOG, L_SCAN, "Could not create node stream\n"); ret = -1; goto psr_error; } sqlconv = SMARTPL2SQLNew(nodes); if (!sqlconv) { DPRINTF(E_LOG, L_SCAN, "Could not create SQL converter\n"); ret = -1; goto sql_fail; } plreturn = sqlconv->playlist(sqlconv); /* Check for tree parser errors */ if (sqlconv->pTreeParser->rec->state->errorCount > 0) { DPRINTF(E_LOG, L_SCAN, "SMARTPL query tree parser terminated with %d errors\n", sqlconv->pTreeParser->rec->state->errorCount); ret = -1; goto sql_error; } if (plreturn.title && plreturn.query) { DPRINTF(E_DBG, L_SCAN, "SMARTPL SQL title '%s' query: -%s-\n", plreturn.title->chars, plreturn.query->chars); if (pli->title) free(pli->title); pli->title = strdup((char *)plreturn.title->chars); if (pli->query) free(pli->query); pli->query = strdup((char *)plreturn.query->chars); ret = 0; } else { DPRINTF(E_LOG, L_SCAN, "Invalid SMARTPL query\n"); ret = -1; } sql_error: sqlconv->free(sqlconv); sql_fail: nodes->free(nodes); psr_error: psr->free(psr); psr_fail: tstream->free(tstream); tkstream_fail: lxr->free(lxr); lxr_fail: input->close(input); return ret; }
bool parseProto (const char*filename, const char *outputFilename,const char * outputInternalNamespace, const char*outputExternalNamespace, char**package,pANTLR3_HASH_TABLE typeTable, bool cleanUp,ProtoJSParser_protocol_return*retval, pProtoJSLexer*ret_lxr, pProtoJSParser*ret_psr,pANTLR3_COMMON_TOKEN_STREAM*ret_tstream, pANTLR3_INPUT_STREAM* ret_stream) { pANTLR3_INPUT_STREAM input = antlr3AsciiFileStreamNew((pANTLR3_UINT8)filename); if ( input == NULL ) { fprintf(stderr, "Failed to open file %s\n", (char *)filename); exit(1); } pProtoJSLexer lxr = ProtoJSLexerNew(input); if ( lxr == NULL ) { fprintf(stderr, "Unable to create the lexer due to malloc() failure1\n"); exit(1); } pANTLR3_COMMON_TOKEN_STREAM tstream = antlr3CommonTokenStreamSourceNew(ANTLR3_SIZE_HINT, TOKENSOURCE(lxr)); if (tstream == NULL) { fprintf(stderr, "Out of memory trying to allocate token stream\n"); exit(1); } pProtoJSParser ctx; pProtoJSParser psr = ctx = ProtoJSParserNew(tstream); if (psr == NULL) { fprintf(stderr, "Out of memory trying to allocate parser\n"); exit(ANTLR3_ERR_NOMEM); } SCOPE_TYPE(NameSpace) ns=NameSpacePush(ctx); ctx->pProtoJSParser_NameSpaceTop=ns; ns->filename=tstream->tstream->tokenSource->strFactory->newRaw(tstream->tstream->tokenSource->strFactory); ns->filename->append8(SCOPE_TOP(NameSpace)->filename,(const char*)outputFilename); ns->internalNamespace=tstream->tstream->tokenSource->strFactory->newRaw(tstream->tstream->tokenSource->strFactory); ns->internalNamespace->append8(ns->internalNamespace,(const char*)outputInternalNamespace); ns->externalNamespace=tstream->tstream->tokenSource->strFactory->newRaw(tstream->tstream->tokenSource->strFactory); ns->externalNamespace->append8(SCOPE_TOP(NameSpace)->externalNamespace,(const char*)outputExternalNamespace); if (strlen(outputExternalNamespace)) { ns->externalNamespace->append8(ns->externalNamespace,"."); } initNameSpace(ctx,ns); pANTLR3_HASH_TABLE tempTable=ns->qualifiedTypes; if (*package){ ns->package->set8(ns->package,*package); ns->packageDot->set8(ns->packageDot,*package); ns->packageDot->append8(ns->packageDot,"."); } if (typeTable) { ns->qualifiedTypes=typeTable; } ProtoJSParser_protocol_return pbjAST=psr->protocol(psr); if (!*package) { *package=strdup((const char*)ns->package->chars); } ns->qualifiedTypes=tempTable; bool success=true; if (psr->pParser->rec->getNumberOfSyntaxErrors(psr->pParser->rec) > 0) { success=false; ANTLR3_FPRINTF(stderr, "The parser returned %d errors, tree walking aborted.\n", psr->pParser->rec->getNumberOfSyntaxErrors(psr->pParser->rec)); }else { } if (cleanUp) { NameSpacePop(ctx); psr->free(psr); psr = NULL; tstream->free(tstream); tstream = NULL; lxr->free(lxr); lxr = NULL; input->close(input); input = NULL; }else { *retval=pbjAST; *ret_lxr=lxr; *ret_psr=psr; *ret_tstream=tstream; *ret_stream=input; } return success; }
QVector<SCsParserException> SCsParser::getExceptions(const QString &text) { QVector<SCsParserException> exceptions; pANTLR3_INPUT_STREAM input; pSCsCLexer lxr; pANTLR3_COMMON_TOKEN_STREAM tstream; pSCsCParser psr; std::string strData = text.toStdString(); input = createInputStream(strData); if( input == NULL ) { input->free(input); return exceptions; } lxr = SCsCLexerNew(input); if( lxr == NULL ) { lxr->free(lxr); return exceptions; } tstream = antlr3CommonTokenStreamSourceNew(ANTLR3_SIZE_HINT, TOKENSOURCE(lxr)); if( tstream == NULL ) { tstream->free(tstream); return exceptions; } psr = SCsCParserNew(tstream); if( psr == NULL ) { psr->free(psr); return exceptions; } psr->syntax(psr); _ParserException *psrEx = ParserHeadException(); _LexerException *lxrEx = LexerHeadException(); while(psrEx) { exceptions.push_back(SCsParserException(SCsParserException::PARSER, psrEx->mLine, psrEx->mCharPositionInLine, psrEx->mType)); psrEx = psrEx->pNextException; } while(lxrEx) { exceptions.push_back(SCsParserException(SCsParserException::LEXER, lxrEx->mLine, lxrEx->mCharPositionInLine, lxrEx->mType)); lxrEx = lxrEx->pNextException; } freeLexerExceptionList(); freeParserExceptionList(); psr->free(psr); tstream->free(tstream); lxr->free(lxr); input->free(input); return exceptions; }
int main(int argc, char **argv) { pANTLR3_INPUT_STREAM input; pANTLR3_COMMON_TOKEN_STREAM tstream; pANTLR3_COMMON_TREE_NODE_STREAM tnstream; ParserParser_translationUnit_return parseTree; pParserLexer lxr; pParserParser psr; pASTParser ASTp; int ret = 1; if (argc < 2) goto err; ret++; input = antlr3FileStreamNew((pANTLR3_UINT8)argv[1], ANTLR3_ENC_8BIT); if (!input) goto err; ret++; lxr = ParserLexerNew(input); if (!lxr) goto err_input; ret++; tstream = antlr3CommonTokenStreamSourceNew(ANTLR3_SIZE_HINT, TOKENSOURCE(lxr)); if (!tstream) goto err_lxr; ret++; psr = ParserParserNew(tstream); if (!psr) goto err_tstream; parseTree = psr->translationUnit(psr); puts((char *)parseTree.tree->toStringTree(parseTree.tree)->chars); ret++; tnstream = antlr3CommonTreeNodeStreamNewTree(parseTree.tree, ANTLR3_SIZE_HINT); if (!tnstream) goto err_psr; ret++; ASTp = ASTParserNew(tnstream); if (!ASTp) goto err_tnstream; ASTp->translationUnit(ASTp); ret = 0; ASTp->free(ASTp); err_tnstream: tnstream->free(tnstream); err_psr: psr->free(psr); err_tstream: tstream->free(tstream); err_lxr: lxr->free(lxr); err_input: input->close(input); err: return ret; }
int main(int argc, char *argv[]) { try { if(argc < 2) { std::cerr << "Input file not specified!!!" << std::endl; return EXIT_FAILURE; } // check if input file exist std::ifstream inFile(argv[1]); if(!inFile.is_open()) { std::cerr << "Input file '" << argv[1] << "' not found!!!" << std::endl; return EXIT_FAILURE; } // create file name std::string filePath(argv[1]); size_t nameStart = filePath.find_last_of("/\\"); size_t extStart = filePath.find_last_of("."); std::string fileName(filePath, nameStart + 1, extStart - nameStart); fileName += "cpp"; // prepare output stream std::ofstream outFile(fileName.c_str()); if(!outFile.is_open()) { std::cerr << "Cannot create output file '" << fileName << "'!!!" << std::endl; return EXIT_FAILURE; } // create logger & translator freettcn::translator::CLogger logger; freettcn::translator::CDumper dumper(outFile); freettcn::translator::CTranslator translator(argv[1], logger); // create input stream pANTLR3_UINT8 fName((pANTLR3_UINT8)argv[1]); pANTLR3_INPUT_STREAM input = antlr3AsciiFileStreamNew(fName); if(!input) std::cerr << "Unable to open file " << (char *)fName << " due to malloc() failure1" << std::endl; // create lexer pttcn3Lexer lexer = ttcn3LexerNew(input); if(!lexer) { std::cerr << "Unable to create the lexer due to malloc() failure1" << std::endl; return EXIT_FAILURE; } // override warning messages for lexer lexer->pLexer->rec->displayRecognitionError = freettcn::translator::DisplayRecognitionError; // create tokens stream from input file pANTLR3_COMMON_TOKEN_STREAM tokens = antlr3CommonTokenStreamSourceNew(ANTLR3_SIZE_HINT, TOKENSOURCE(lexer)); if(!tokens) { std::cerr << "Out of memory trying to allocate token stream" << std::endl; return EXIT_FAILURE; } // create parser pttcn3Parser parser = ttcn3ParserNew(tokens); if(!parser) { std::cerr << "Out of memory trying to allocate parser" << std::endl; return EXIT_FAILURE; } // override warning messages for parser parser->pParser->rec->displayRecognitionError = freettcn::translator::DisplayRecognitionError; if(argc >= 3 && std::string(argv[2]) == "-t") { // print tokens stream pANTLR3_UINT8 *tokenNames = parser->pParser->rec->state->tokenNames; if(!tokenNames) { std::cerr << "Token names not initiated!!!" << std::endl; return EXIT_FAILURE; } // estimate the longest token name size_t maxTokenLength = 0; pANTLR3_VECTOR vector = tokens->getTokens(tokens); for(unsigned i=0; i<vector->size(vector); i++) { pANTLR3_COMMON_TOKEN token = (pANTLR3_COMMON_TOKEN)vector->get(vector, i); maxTokenLength = std::max(maxTokenLength, std::string((char *)tokenNames[token->getType(token)]).size()); } // print tokens for(unsigned i=0; i<vector->size(vector); i++) { pANTLR3_COMMON_TOKEN token = (pANTLR3_COMMON_TOKEN)vector->get(vector, i); std::cout << "Token[" << std::setw(3) << std::right << token->getType(token) << "] - " << std::setw(maxTokenLength) << std::left << tokenNames[token->getType(token)] << " = " << token->getText(token)->chars << std::endl; } return EXIT_SUCCESS; } // parse tokes stream parser->ttcn3Module(parser); if(translator.WarningNum() || translator.ErrorNum()) { std::cerr << filePath << ": Errors: " << translator.ErrorNum() << "; Warnings: " << translator.WarningNum() << std::endl; return EXIT_FAILURE; } // dump to output file translator.Dump(dumper); // must manually clean up // parser->free(parser); tokens->free(tokens); lexer->free(lexer); input->close(input); } catch(freettcn::Exception &ex) { std::cerr << "Error: Unhandled freettcn exception caught:" << std::endl; std::cerr << ex.what() << std::endl; return EXIT_FAILURE; } catch(std::exception &ex) { std::cerr << "Error: Unhandled system exception caught:" << std::endl;; std::cerr << ex.what() << std::endl;; return EXIT_FAILURE; } catch(...) { std::cerr << "Unknown exception caught!!!" << std::endl; return EXIT_FAILURE; } return EXIT_SUCCESS; }
int main(int /*argc*/, char** /*argv*/) { pANTLR3_UINT8 fileName = (pANTLR3_UINT8)"test.mat"; pANTLR3_COMMON_TREE_NODE_STREAM treeNodes; pANTLR3_INPUT_STREAM input = antlr3FileStreamNew(fileName, ANTLR3_ENC_UTF8); if (input == nullptr) { ANTLR3_FPRINTF(stderr, "unable to open %s", (char *)fileName); return ANTLR3_ERR_NOFILE; } pTalonMaterialLexer lexer = TalonMaterialLexerNew(input); if (lexer == nullptr) { ANTLR3_FPRINTF(stderr, "unable to create lexer."); return ANTLR3_ERR_NOMEM; } pANTLR3_COMMON_TOKEN_STREAM tokenStream = antlr3CommonTokenStreamSourceNew(ANTLR3_SIZE_HINT, TOKENSOURCE(lexer)); if (tokenStream == nullptr) { ANTLR3_FPRINTF(stderr, "unable to create tokenStream."); return ANTLR3_ERR_NOMEM; } pTalonMaterialParser parser = TalonMaterialParserNew(tokenStream); if (parser == nullptr) { ANTLR3_FPRINTF(stderr, "unable to create parser."); return ANTLR3_ERR_NOMEM; } auto module = parser->module(parser); auto errorCount = parser->pParser->rec->getNumberOfSyntaxErrors(parser->pParser->rec); if (errorCount > 0) { ANTLR3_FPRINTF(stderr, "The parser returned %d errors, tree walking aborted.\n", errorCount); } else { ANTLR3_FPRINTF(stdout, "Parser found no errors."); } return 0; }
char * rsp_query_parse_sql(const char *rsp_query) { /* Input RSP query, fed to the lexer */ pANTLR3_INPUT_STREAM query; /* Lexer and the resulting token stream, fed to the parser */ pRSPLexer lxr; pANTLR3_COMMON_TOKEN_STREAM tkstream; /* Parser and the resulting AST, fed to the tree parser */ pRSPParser psr; RSPParser_query_return qtree; pANTLR3_COMMON_TREE_NODE_STREAM nodes; /* Tree parser and the resulting SQL query string */ pRSP2SQL sqlconv; pANTLR3_STRING sql; char *ret = NULL; DPRINTF(E_DBG, L_RSP, "Trying RSP query -%s-\n", rsp_query); #if ANTLR3C_NEW_INPUT query = antlr3StringStreamNew ((pANTLR3_UINT8)rsp_query, ANTLR3_ENC_8BIT, (ANTLR3_UINT64)strlen(rsp_query), (pANTLR3_UINT8)"RSP query"); #else query = antlr3NewAsciiStringInPlaceStream ((pANTLR3_UINT8)rsp_query, (ANTLR3_UINT64)strlen(rsp_query), (pANTLR3_UINT8)"RSP query"); #endif if (!query) { DPRINTF(E_DBG, L_RSP, "Could not create input stream\n"); return NULL; } lxr = RSPLexerNew(query); if (!lxr) { DPRINTF(E_DBG, L_RSP, "Could not create RSP lexer\n"); goto lxr_fail; } tkstream = antlr3CommonTokenStreamSourceNew(ANTLR3_SIZE_HINT, TOKENSOURCE(lxr)); if (!tkstream) { DPRINTF(E_DBG, L_RSP, "Could not create RSP token stream\n"); goto tkstream_fail; } psr = RSPParserNew(tkstream); if (!psr) { DPRINTF(E_DBG, L_RSP, "Could not create RSP parser\n"); goto psr_fail; } qtree = psr->query(psr); /* Check for parser errors */ if (psr->pParser->rec->state->errorCount > 0) { DPRINTF(E_LOG, L_RSP, "RSP query parser terminated with %d errors\n", psr->pParser->rec->state->errorCount); goto psr_error; } DPRINTF(E_SPAM, L_RSP, "RSP query AST:\n\t%s\n", qtree.tree->toStringTree(qtree.tree)->chars); nodes = antlr3CommonTreeNodeStreamNewTree(qtree.tree, ANTLR3_SIZE_HINT); if (!nodes) { DPRINTF(E_DBG, L_RSP, "Could not create node stream\n"); goto psr_error; } sqlconv = RSP2SQLNew(nodes); if (!sqlconv) { DPRINTF(E_DBG, L_RSP, "Could not create SQL converter\n"); goto sql_fail; } sql = sqlconv->query(sqlconv); /* Check for tree parser errors */ if (sqlconv->pTreeParser->rec->state->errorCount > 0) { DPRINTF(E_LOG, L_RSP, "RSP query tree parser terminated with %d errors\n", sqlconv->pTreeParser->rec->state->errorCount); goto sql_error; } if (sql) { DPRINTF(E_DBG, L_RSP, "RSP SQL query: -%s-\n", sql->chars); ret = strdup((char *)sql->chars); } else { DPRINTF(E_LOG, L_RSP, "Invalid RSP query\n"); ret = NULL; } sql_error: sqlconv->free(sqlconv); sql_fail: nodes->free(nodes); psr_error: psr->free(psr); psr_fail: tkstream->free(tkstream); tkstream_fail: lxr->free(lxr); lxr_fail: query->close(query); return ret; }
bool CAdjustStock::ParseAdjustLogic(const char* logicExp, HASH_MAP< string, double >&signalMap) { pANTLR3_INPUT_STREAM input; pExprLexer lexer; pANTLR3_COMMON_TOKEN_STREAM tstream ; pExprParser parser; pANTLR3_BASE_TREE root ; if( strlen(logicExp) == 0 ) return false ; try { input = antlr3StringStreamNew( (pANTLR3_UINT8) logicExp, ANTLR3_ENC_UTF8, strlen(logicExp), (pANTLR3_UINT8)"Expr" ); SHOULD( input, "fail to create stream from string: " << logicExp ); lexer = ExprLexerNew( input ); SHOULD( input, "fail to create lexer" ); tstream = antlr3CommonTokenStreamSourceNew( ANTLR3_SIZE_HINT, TOKENSOURCE(lexer) ); SHOULD( tstream, "fail to create token stream" ); parser = ExprParserNew( tstream ); SHOULD( parser, "fail to create parser" ); ExprParser_expr_return statments = (ExprParser_expr_return)( parser->expr(parser) ); int errs = parser->pParser->rec->state->errorCount; if( errs>0 ){ LOG_FILE(LOG_LEVEL::LOG_INFO,"[AdjustStock] The parser returned %d errors, tree walking aborted.\n", errs); return false ; } root = statments.tree; SHOULD( root, "fail to get tree" ); pANTLR3_TOKEN_STREAM stream = tstream->tstream; m_adjustTree.tree_ = root; m_adjustTree.stream_ = stream; //根据语法树的节点得到叶子节点类型为VAR的变量,返给调仓查询 //修改叶子节点是变量的值,key用Value替换 getVARList( m_adjustTree, m_varVec ); HASH_MAP< string, double >::iterator mit; //test: srcSingleMap[000407_alpha_7_indus_sort, 1.23] for ( size_t i =0; i<m_varVec.size(); i++ ) { mit = signalMap.find(m_varVec[i]); if ( mit != signalMap.end() ) { m_parseMap[m_varVec[i]] = mit->second; } else { ISM_LOG_ERR("[AdjustStock]","the signal value not founded in singal map!"); return false; } } //calculate the result from the replaced tree int result = calcExpr(m_adjustTree); if( parser ) { parser->free( parser );} if( tstream ) { tstream->free( tstream );} if( lexer ) { lexer->free( lexer );} if( input ) { input->close( input );} return result==1 ; } catch( Exp& e ) { RETHROW( e, "fail to parse. line: " << logicExp ); } }
QSet<int> SCsParser::getErrorLines(const QString &text) { QSet<int> errorLines; pANTLR3_INPUT_STREAM input; pSCsCLexer lxr; pANTLR3_COMMON_TOKEN_STREAM tstream; pSCsCParser psr; std::string strData = text.toStdString(); input = createInputStream(strData); if(input == NULL) { return errorLines; } lxr = SCsCLexerNew(input); if( lxr == NULL ) { input->free(input); return errorLines; } tstream = antlr3CommonTokenStreamSourceNew(ANTLR3_SIZE_HINT, TOKENSOURCE(lxr)); if( tstream == NULL ) { lxr->free(lxr); input->free(input); return errorLines; } psr = SCsCParserNew(tstream); if( psr == NULL ) { tstream->free(tstream); lxr->free(lxr); input->free(input); return errorLines; } psr->syntax(psr); _ParserException *psrEx = ParserHeadException(); _LexerException *lxrEx = LexerHeadException(); while(psrEx) { errorLines.insert(psrEx->mLine); psrEx = psrEx->pNextException; } while(lxrEx) { errorLines.insert(lxrEx->mLine); lxrEx = lxrEx->pNextException; } freeLexerExceptionList(); freeParserExceptionList(); psr->free(psr); tstream->free(tstream); lxr->free(lxr); input->free(input); return errorLines; }