예제 #1
0
// Return installed API files.
QStringList QsciAPIs::installedAPIFiles() const
{
    QString qtdir = QLibraryInfo::location(QLibraryInfo::DataPath);

    QDir apidir = QDir(QString("%1/qsci/api/%2").arg(qtdir).arg(lexer()->lexer()));
    QStringList filenames;

    QStringList filters;
    filters << "*.api";

    QFileInfoList flist = apidir.entryInfoList(filters, QDir::Files, QDir::IgnoreCase);

    foreach (QFileInfo fi, flist)
        filenames << fi.absoluteFilePath();

    return filenames;
}
예제 #2
0
void DocumentEditor::setLanguage(const QString &language_) {
	QsciLexer* l = lexer();
	//detach lexer from document before delete it
	setLexer(0);
	if(l != 0) {
		delete l;
		l = 0;
	}

	//set the new lexer
	l = LexerManager::getInstance().lexerFactory(language_, this);
	setLexer(l);

	//reload settings for lexer
	Settings settings;
	settings.applyToDocument(this);
}
예제 #3
0
int main(int argc, char *argv[])
{
  if((in = fopen(argv[1], "r")) == NULL)
    {
      printf("lexer crashed: bad read file \n");
      return -1;
    }
  if((out = fopen(argv[2], "w")) == NULL)
    {
      printf("lexer crashed: bad write file \n");
      return -1;
    }
  lexer();
  fclose(in);
  fclose(out);
  return 0;
}
예제 #4
0
const Result *parse(Source source, Result *(Parser::RequirementParser::*rule)(), std::string &errors)
{
	Input input(source);
	Parser::RequirementLexer lexer(input);
	Parser::RequirementParser parser(lexer);
	try {
		const Result *result = (parser.*rule)();
		errors = parser.errors;
		if (errors.empty())
			return result;
		else
			::free((void *)result);
	} catch (const antlr::TokenStreamException &ex) {
		errors = ex.toString() + "\n";
	}
	return NULL;			// signal failure
}
void repl(std::ostream& out)
{
    Lexer lexer(0, &out);
    ResultType result;
    Parser parser(lexer, result);
    while (true)
    {
        auto request = input();
        if (request == "exit") break;
        std::stringstream stream_in(request);
        lexer.restart(stream_in);
        if (0 == parser.parse())
        {
            std::cout << result << std::endl;
        }
    }    
}
예제 #6
0
    /// handle specialized clipboard text, with leading "(fp_lib_table", OR
    /// spreadsheet formatted text.
    void paste_text( const wxString& cb_text ) override
    {
        FP_LIB_TABLE_GRID* tbl = (FP_LIB_TABLE_GRID*) m_grid->GetTable();
        size_t             ndx = cb_text.find( "(fp_lib_table" );

        if( ndx != std::string::npos )
        {
            // paste the FP_LIB_TABLE_ROWs of s-expression (fp_lib_table), starting
            // at column 0 regardless of current cursor column.

            STRING_LINE_READER  slr( TO_UTF8( cb_text ), "Clipboard" );
            LIB_TABLE_LEXER     lexer( &slr );
            FP_LIB_TABLE        tmp_tbl;
            bool                parsed = true;

            try
            {
                tmp_tbl.Parse( &lexer );
            }
            catch( PARSE_ERROR& pe )
            {
                DisplayError( m_dialog, pe.What() );
                parsed = false;
            }

            if( parsed )
            {
                // make sure the table is big enough...
                if( tmp_tbl.GetCount() > (unsigned) tbl->GetNumberRows() )
                    tbl->AppendRows( tmp_tbl.GetCount() - tbl->GetNumberRows() );

                for( unsigned i = 0;  i < tmp_tbl.GetCount();  ++i )
                    tbl->rows.replace( i, tmp_tbl.At( i ).clone() );
            }

            m_grid->AutoSizeColumns( false );
        }
        else
        {
            // paste spreadsheet formatted text.
            GRID_TRICKS::paste_text( cb_text );

            m_grid->AutoSizeColumns( false );
        }
    }
예제 #7
0
		MaterialPtr Compiler::Compile(Sys_GraphicsPtr pGraphics, const std::string& src, const boost::filesystem::path& filename)
		{
			m_file = filename;

			logger() << "-- compiling material(" << m_file << "): \n";
			
			Lexer lexer(src);
			
			MaterialParser parser(nullptr, logger());

			if(false == parser.Parse(&lexer, m_file == "" ? "./" : m_file.parent_path()))
			{
				logger() << "== failed.\n";
				return MaterialPtr();
			}
			logger() << "== ok.\n";
			return parser.CreateObject(pGraphics);
		}
예제 #8
0
ASTNode *
do_native_parse(unsigned char *buffer, size_t length)
{
	ASTNode *result;

	antlr::CharInputBuffer input(buffer, length, false);

	CPPLexer lexer(input);
	lexer.setFilename("unknown");

	CPPParser parser(lexer);
	parser.setFilename("unknown");
	parser.init();
	result = parser.translation_unit();
	parser.uninit();

	return result;
}
예제 #9
0
parser::parser(std::istream &file) :
  m_queue()
{
  lexer		lexer(file);
  struct lexer::token	*token;

  for (token = lexer.next_token(); token != NULL; token = lexer.next_token())
    {
      if (token->type != lexer::Separator)
	{
	  if (token->type != lexer::Identifier)
	    throw std::exception();
	  else
	    m_queue.push(automate(lexer, token));
	}
      delete token;
    }
}
예제 #10
0
/**
 * @brief compile handler which starts lexing and parsing
 *
 * @param raw_code pl0 source code
 * @retval int TRUE or FALSE
 */
int compile(FILE *raw_code) {
	SOURCECODE pl0_code = sc_init();
	int status;

	puts("Start lexical scanning...");

	lexer(pl0_code, raw_code);

	puts("Finished lexical scanning!\n");

	puts("Start parsing...\n");

	status = init_parsing(pl0_code);

	sc_destroy(pl0_code);

	return status;
}
예제 #11
0
TEST(sexpr, lexer)
{
    io::LineSpan span;
    sexpr::Lexer lexer(io::from_string, "<lexer-test1>"_s, " foo( ) 123\"\" \n"_s);
    EXPECT_EQ(lexer.peek(), sexpr::TOK_TOKEN);
    EXPECT_EQ(lexer.val_string(), "foo"_s);
    EXPECT_EQ(lexer.span().error_str("test"_s),
            "<lexer-test1>:1:2: error: test\n"
            " foo( ) 123\"\" \n"
            " ^~~\n"_s
    );
    lexer.adv();
    EXPECT_EQ(lexer.peek(), sexpr::TOK_OPEN);
    EXPECT_EQ(lexer.span().error_str("test"_s),
            "<lexer-test1>:1:5: error: test\n"
            " foo( ) 123\"\" \n"
            "    ^\n"_s
    );
    lexer.adv();
    EXPECT_EQ(lexer.peek(), sexpr::TOK_CLOSE);
    EXPECT_EQ(lexer.span().error_str("test"_s),
            "<lexer-test1>:1:7: error: test\n"
            " foo( ) 123\"\" \n"
            "      ^\n"_s
    );
    lexer.adv();
    EXPECT_EQ(lexer.peek(), sexpr::TOK_TOKEN);
    EXPECT_EQ(lexer.val_string(), "123"_s);
    EXPECT_EQ(lexer.span().error_str("test"_s),
            "<lexer-test1>:1:9: error: test\n"
            " foo( ) 123\"\" \n"
            "        ^~~\n"_s
    );
    lexer.adv();
    EXPECT_EQ(lexer.peek(), sexpr::TOK_STRING);
    EXPECT_EQ(lexer.val_string(), ""_s);
    EXPECT_EQ(lexer.span().error_str("test"_s),
            "<lexer-test1>:1:12: error: test\n"
            " foo( ) 123\"\" \n"
            "           ^~\n"_s
    );
    lexer.adv();
    EXPECT_EQ(lexer.peek(), sexpr::TOK_EOF);
}
예제 #12
0
int main( int argc, char *argv[] )
{
    ANTLR_USING_NAMESPACE(std);
    ANTLR_USING_NAMESPACE(antlr);
    ANTLR_USING_NAMESPACE(VmiCalls);

    if( argc < 2 )
	exit( 0 );
    try {
        ifstream input( argv[1] );
	VmiCallsLexer lexer(input);
	TokenBuffer buffer(lexer);
	VmiCallsParser parser(buffer);

	ASTFactory ast_factory;
	parser.initializeASTFactory( ast_factory );
	parser.setASTFactory( &ast_factory );

	parser.vmiCallsFile();
	RefAST a = parser.getAST();

	VmiCallsHtmlEmitter tree_walker;
	tree_walker.initializeASTFactory( ast_factory );
	tree_walker.setASTFactory( &ast_factory );

	tree_walker.vmiCalls( a );

	/*
	cout << "Tree:" << endl;
	cout << a->toStringTree() << endl;
	*/
    }
    catch( ANTLRException& e )
    {
	cerr << "exception: " << e.getMessage() << endl;
	return -1;
    }
    catch( exception& e )
    {
	cerr << "exception: " << e.what() << endl;
	return -1;
    }
    return 0;
}
예제 #13
0
void BaseCommandsQsciApi::updateAutoCompletionList(const QStringList& context, QStringList& list) {
  BaseCommandsQsciLexer* lex = static_cast<BaseCommandsQsciLexer*>(lexer());
  auto commands = lex->commands();
  for (auto it = context.begin(); it != context.end(); ++it) {
    QString val = *it;
    for (size_t i = 0; i < commands.size(); ++i) {
      core::CommandInfo cmd = commands[i];
      if (canSkipCommand(cmd)) {
        continue;
      }

      QString jval;
      common::ConvertFromString(cmd.name, &jval);
      if (jval.startsWith(val, Qt::CaseInsensitive)) {
        list.append(jval + "?1");
      }
    }
  }
}
예제 #14
0
//#################### LOADING METHODS ####################
InputBinding_CPtr BindingFile::load(const std::string& filename)
{
	std::map<InputAction,Inputter_CPtr> inputters;

	// Construct the string -> key map.
	std::map<std::string,SDLKey> stringToKey;
	for(int i=0; i<SDLK_LAST; ++i)
	{
		SDLKey key = SDLKey(i);
		std::string name = SDL_GetKeyName(key);
		stringToKey.insert(std::make_pair(name,key));
	}

	XMLLexer_Ptr lexer(new XMLLexer(filename));
	XMLParser parser(lexer);
	XMLElement_CPtr root = parser.parse();
	XMLElement_CPtr bindingElt = root->find_unique_child("binding");

	std::vector<XMLElement_CPtr> actionElts = bindingElt->find_children("action");
	for(size_t i=0, size=actionElts.size(); i<size; ++i)
	{
		const XMLElement_CPtr& actionElt = actionElts[i];
		const std::string& actionName = actionElt->attribute("name");
		const std::string& actionInput = actionElt->attribute("input");

		InputAction action = lexical_cast<InputAction>(actionName);
		Inputter_CPtr inputter;

		std::map<std::string,SDLKey>::const_iterator jt = stringToKey.find(actionInput);
		if(jt != stringToKey.end())
		{
			inputter.reset(new KeyInputter(jt->second));
		}
		else if(actionInput == "left mouse") inputter.reset(new MouseButtonInputter(MOUSE_BUTTON_LEFT));
		else if(actionInput == "middle mouse") inputter.reset(new MouseButtonInputter(MOUSE_BUTTON_MIDDLE));
		else if(actionInput == "right mouse") inputter.reset(new MouseButtonInputter(MOUSE_BUTTON_RIGHT));
		else throw Exception("Unknown input action: " + actionInput);

		inputters.insert(std::make_pair(action, inputter));
	}

	return InputBinding_CPtr(new InputBinding(inputters));
}
예제 #15
0
// Add auto-completion words based on the last partial word entered.
void QsciAPIs::lastPartialWord(const QString &word, QStringList &with_context, bool &unambig)
{
    if (lexer()->caseSensitive())
    {
        QMap<QString, WordIndexList>::const_iterator it;
        QStringList::const_iterator wit = QsciAPIsPrepared::lowerBound(prep->words, word);

        if (wit == prep->words.end())
            it = prep->wdict.end();
        else
            it = prep->wdict.find(*wit);

        while (it != prep->wdict.end())
        {
            if (!it.key().startsWith(word))
                break;

            addAPIEntries(it.data(), false, with_context, unambig);

            ++it;
        }
    }
    else
    {
        QMap<QString, QString>::const_iterator it;
        QStringList::const_iterator wit = QsciAPIsPrepared::lowerBound(prep->words, word);

        if (wit == prep->words.end())
            it = prep->cdict.end();
        else
            it = prep->cdict.find(*wit);

        while (it != prep->cdict.end())
        {
            if (!it.key().startsWith(word))
                break;

            addAPIEntries(prep->wdict[it.data()], false, with_context, unambig);

            ++it;
        }
    }
}
예제 #16
0
// Here's where we do the real work...
static void parseFile(const string& f)
{
	try
	{
		ifstream s(f.c_str());

		// Create a scanner that reads from the input stream
		JavaLexer lexer(s);
		lexer.setFilename(f);

/*
		while (true) {
			RefToken t = lexer.nextToken();
			if (t->getType() == Token::EOF_TYPE)
				break;
			cout << t->getText() << ":" << t->getType() << endl;
		}
*/

		// Create a parser that reads from the scanner
		JavaRecognizer parser(lexer);
		parser.setFilename(f);

		// make an ast factory
		ASTFactory ast_factory;

		// initialize and put it in the parser...
		parser.initializeASTFactory(ast_factory);
		parser.setASTFactory(&ast_factory);

		// start parsing at the compilationUnit rule
		parser.compilationUnit();

		// do something with the tree
		doTreeAction( ast_factory, parser.getAST() );
	}
	catch (ANTLRException& e) {
		cerr << "parser exception: " << e.toString() << endl;
	}
	catch (exception& e) {
		cerr << "exception: " << e.what() << endl;
	}
}
예제 #17
0
bool SharedMemory::createHandle(Handle& handle, Protection protection)
{
    ASSERT_ARG(handle, handle.isNull());

    RChunk chunk;
    if (chunk.SetReturnedHandle(m_handle))
        return false;

    // Convert the name (string form) to a uint32_t.
    TName globalChunkName = chunk.Name();
    TLex lexer(globalChunkName);
    TUint32 nameAsInt = 0;
    if (lexer.Val(nameAsInt, EDecimal))
        return false;

    handle.m_chunkID = nameAsInt;
    handle.m_size = m_size;
    return true;
}
예제 #18
0
void		sv_receive_command(t_sv_prop *sv, int cl)
{
	int 	rd;
	int		ret_cmd;

	if ((rd = E(-1, recv(CL_SOCK(cl), sv->fds[cl].rd, BUF_SIZE, 0), ERR_RECV, NO_EXIT)) > 0)
	{
		printf("[sv_receive_command] [client : %d] [commande : %s] [lu : %d]\n", cl, sv->fds[cl].rd, rd);
		sv->cmd->cmda = lexer(sv->fds[cl].rd);
		ret_cmd = execute(sv, cl);
		if (!ret_cmd)
			pterr(ERR_CMD_NOT_FOUND);
		ft_strarray_del(&(sv->cmd->cmda));
		//ft_bzero(sv->fds[cl].rd, rd);
		ft_bzero(sv->fds[cl].rd, BUF_SIZE + 1);		
	}
	else
		clean_fd(&(sv->fds[cl]));
}
예제 #19
0
void		lexer(t_vars *v)
{
  static int	loop;

  if (v->alias && which_it_is(v) != BUILT_ALI)
    {
      if (loop < 10 && is_there_an_alias(v))
	{
	  loop += 1;
	  build_argv(v);
	  lexer(v);
	}
      else if (loop == 10)
	loop = 0;
    }
  else if (there_is_home(v))
    modify_prompt_for_home(v);
  loop = 0;
}
/**
  * Parses a comma separated string and constructs a list out of the values
  */
void CTestCalInterimApiSuiteStepBase::TokenizeStringL(const TDesC& aString, RArray<TPtrC>& aList, TChar aSeparator)
	{
	TLex lexer(aString);

	while(!lexer.Eos())
		{
		lexer.SkipSpaceAndMark();

		while(!lexer.Eos() && lexer.Get() != aSeparator)
			{
			}

		if(!lexer.Eos())
			{
			lexer.UnGet(); // Do not include trailing ','
			}
		aList.AppendL(lexer.MarkedToken());
		lexer.Inc();
		}
	}
/*
   Parses a comma separated string and constructs a list out of the values
   @param	aString a reference to a string to be tokenized
   @param	aList is an out parameter to store a list of tokens in an arry
   @param	aSeparator is a character used to delimit the tokens
  */
void CTestRControlChannel::TokenizeStringL(const TDesC& aString, RArray<TPtrC>& aList, TChar aSeparator)
	{
	TLex lexer(aString);

	while(!lexer.Eos())
		{
		lexer.SkipSpaceAndMark();

		while(!lexer.Eos() && lexer.Get() != aSeparator)
			{
			}

		if(!lexer.Eos())
			{
			lexer.UnGet();
			}
		aList.AppendL(lexer.MarkedToken());
		lexer.Inc();
		}
	}
예제 #22
0
TEST(sexpr, parselist)
{
    sexpr::SExpr s;
    sexpr::Lexer lexer(io::from_string, "<parser-test1>"_s, "(foo)(bar)\n"_s);

    EXPECT_TRUE(sexpr::parse(lexer, s));
    EXPECT_EQ(s._type, sexpr::LIST);
    EXPECT_EQ(s._list.size(), 1);
    EXPECT_EQ(s._list[0]._type, sexpr::TOKEN);
    EXPECT_EQ(s._list[0]._str, "foo"_s);

    EXPECT_TRUE(sexpr::parse(lexer, s));
    EXPECT_EQ(s._type, sexpr::LIST);
    EXPECT_EQ(s._list.size(), 1);
    EXPECT_EQ(s._list[0]._type, sexpr::TOKEN);
    EXPECT_EQ(s._list[0]._str, "bar"_s);

    EXPECT_FALSE(sexpr::parse(lexer, s));
    EXPECT_EQ(lexer.peek(), sexpr::TOK_EOF);
}
예제 #23
0
파일: start_shell.c 프로젝트: girards/42sh
int		start_shell(t_42sh *shell)
{
  t_token	*token;
  int		fd;

  fd = creat(".hist42sh", 0644);
  if ((my_clear() == -1))
    return (-42);
  while (1)
    {
      token = NULL;
      signal(SIGINT, get_sigint);
      prompt(shell);
      shell->cmd = read_line(fd);
      if (lexer(shell->cmd, &token, shell) == -42)
	return (-42);
      free_my_tok(token);
    }
  return (0);
}
/** Parses a comma separated string and constructs a list out of the values
@param	aString The string to be tokenized
@param	aList Output list containing the tokens
@param	aSeparator Separator to recognize the tokens
*/
void CContactsPBAPExportUtilityClass::TokenizeStringL(const TDesC& aString, RArray<TPtrC>& aList, TChar aSeparator)
{
    TLex lexer(aString);

    while(!lexer.Eos())
    {
        lexer.SkipSpaceAndMark();

        while(!lexer.Eos() && lexer.Get() != aSeparator)
        {
        }

        if(!lexer.Eos())
        {
            lexer.UnGet(); // Do not include trailing ','
        }
        aList.AppendL(lexer.MarkedToken());
        lexer.Inc();
    }
}
예제 #25
0
/*
================
CmdSystemEx::ExecuteConfig
================
*/
void CmdSystemEx::ExecuteConfig( const char *filename ) {
	// was just full lines dunno why BOM error should be printed for plain text configs
	Lexer lexer(LEXER_FULL_LINES|LEXER_NO_BOM_WARNING);
	if ( !lexer.LoadFile(filename) )
		return;

	try {
		const Token *token;
		const char *p;
		while ( (token = lexer.ReadToken()) != OG_NULL ) {
			p = token->GetString();
			if ( p && *p )
				ExecuteCmd( p, inEngineStartup );
		}
	}
	catch( LexerError &err ) {
		String errStr;
		err.ToString( errStr );
		User::Error( ERR_LEXER_FAILURE, errStr.c_str(), filename );
	}
}
예제 #26
0
void SyntaxHighlighter::highlightBlock(const QString& text)
{
    TextBlockData *blockData = static_cast<TextBlockData*>(currentBlockUserData());
    if(!blockData) {
        blockData = new TextBlockData;
        blockData->tokens.reserve(8);
        setCurrentBlockUserData(blockData);
    }
    else {
        blockData->tokens.clear();
    }

    int previousState = previousBlockState();
    if (previousState == -1)
        previousState = ScLexer::InCode;

    ScLexer lexer( text, 0, previousState );

    while (lexer.offset() < text.size()) {
        switch (lexer.state()) {
        case ScLexer::InCode:
            highlightBlockInCode(lexer);
            break;

        case ScLexer::InString:
            highlightBlockInString(lexer);
            break;

        case ScLexer::InSymbol:
            highlightBlockInSymbol(lexer);
            break;

        default:
            if(lexer.state() >= ScLexer::InComment)
                highlightBlockInComment(lexer);
        }
    }

    setCurrentBlockState( lexer.state() );
}
예제 #27
0
bool CGptBind::run( )
{
   ifstream in((_options->sourcefile+".sld").c_str());
   GptBindLexer lexer(in);
   GptBindParser parser(lexer);

//   string asmProgram = parser.program(_options->sourcefile);
   parser.sld_grammar(_options->sourcefile);

   string cppOutput = parser.getCpp();
   string hppOutput = parser.getHpp();
   string makefileOutput = parser.getMakefile();

   ofstream cppFile((_options->destfile + ".cpp").c_str(), ios_base::out);
   if (!cppFile) {
      cout << "ERRO: não foi possível abrir o arquivo: \"" << _options->destfile << "\"" << endl;
      return false;
   }

   cppFile << cppOutput;

   ofstream hppFile((_options->destfile + ".hpp").c_str(), ios_base::out);
   if (!hppFile) {
      cout << "ERRO: não foi possível abrir o arquivo: \"" << _options->destfile << "\"" << endl;
      return false;
   }

   hppFile << hppOutput;

   ofstream makefileFile(("Makefile." + _options->destfile).c_str(), ios_base::out);
   if (!makefileFile) {
      cout << "ERRO: não foi possível abrir o arquivo: \"" << _options->destfile << "\"" << endl;
      return false;
   }

   makefileFile << makefileOutput;

   return true;
}
예제 #28
0
void WriteTokens(std::istream& is, std::ostream& os)
{
    Lexer lexer(is);
    int currentLine = 1;

    do
    {
        lexer.GetNext();

        for (int i = currentLine; i < lexer.CurrentLine; i++)
        {
            os << '\n';
        }

        currentLine = lexer.CurrentLine;

        os << lexer.CurrentToken << ' ';

    } while (lexer.CurrentToken.Type != TokenType::EndOfFile);

    os << '\n';
}
예제 #29
0
std::unique_ptr<ParsedFile> parseFileSyntax(const FileForParsing& ffp) {
  auto file = std::unique_ptr<ParsedFile>(new ParsedFile());
  Lexer lexer(ffp);
  while(lexer.hasCurrentToken()) {
    if(lexer.currType()==STATEMENT_END) {
      lexer.advance(); //Eat ';'
      continue; //Extra semicolons are ok
    }
    bool pub = lexer.currType()==PUB;
    if(pub)
      lexer.advance();
    unique_ptr<Definition> definition = parseDefinition(lexer, pub);
    if(definition) { //A nice definition was returned, and has eaten it's own semicolon
      definition->printSignature();
      file->m_definitions.push_back(std::move(definition));
    }
    else if(lexer.hasCurrentToken()) //Error occurred, but already printed
      skipUntilNewDefinition(lexer);
  }
  terminateIfErrors();
  return file;
}
예제 #30
0
NodeType * deserialize_lex_forwarder( const std::string & lexerClassName,
                                      std::istream & src
                                    )
{
    // CERR << "deserialize_lex_forwarder("<<lexerClassName<<")\n";
    std::auto_ptr<FlexLexer> lexer( ::s11n::cl::classload<FlexLexer>( lexerClassName ) );
    if( ! lexer.get() )
    {
        throw ::s11n::s11n_exception(
            "%s:%d: s11n::io::deserialize_lex_forwarder(): Lexer '%s' was not found by classload<FlexLexer>(). It is probably not registered with the classloader.",
            __FILE__,
            __LINE__,
            lexerClassName.c_str() );
    }

    typedef s11n::io::data_node_tree_builder<NodeType> BuilderType;
    typedef tree_builder_context<BuilderContext> BC;
    std::auto_ptr<BuilderType> treebuilder( new BuilderType );
    treebuilder->auto_delete( true ); // if we throw, let it clean up
    try
    {
        BC::bind( lexer.get(), treebuilder.get() );
        // ^^^ sets up the comm channel between the builder and lexer
        Private::lex_api_hider_yylex(lexer.get(),src); // executes the lexer
    }
    catch ( const std::exception & ex )
    {
        BC::unbind( lexer.get() ); // free up lexer-to-builder binding
        throw ex;
    }
    catch (...)
    {
        BC::unbind( lexer.get() ); // free up lexer-to-builder binding
        throw;
    }
    BC::unbind( lexer.get() ); // free up lexer-to-builder binding
    treebuilder->auto_delete( false ); // we're taking ownership of the children
    return treebuilder->root_node();
}