Пример #1
0
void read_scene(tokenizer& t,document_type& doc)
{
    //std::cerr << "unsupported data: Scene" << std::endl;
    OutputDebugStringA("Scene>>>>");
    t.expect_literal( "{" );
    for( ;; ) {
        substr token = t();
        OutputDebugStringA((token.str() + "\n").c_str());
        if( token == "}" ) { break; }
        if( token == "amb" ) {
            doc.scene.ambient.red = t.expect_float(0,1);
            doc.scene.ambient.green = t.expect_float(0,1);
            doc.scene.ambient.blue = t.expect_float(0,1);
            doc.scene.ambient.alpha = 1;
            t.expect_linefeed();
        } else if( token == "dirlights" ) {
            t();
            skip_chunk(t);
            skip_to_linefeed(t);
        } else {
            skip_to_linefeed(t);
        }
    }
    OutputDebugStringA("Scene<<<<");
}
Пример #2
0
void recognize_vertical(istream& is, ostream& os, const ner& recognizer, tokenizer& tokenizer) {
  string para;
  vector<string_piece> forms;
  vector<named_entity> entities;
  unsigned total_tokens = 0;
  string entity_ids, entity_text;

  while (getpara(is, para)) {
    // Tokenize and tag
    tokenizer.set_text(para);
    while (tokenizer.next_sentence(&forms, nullptr)) {
      recognizer.recognize(forms, entities);
      sort_entities(entities);

      for (auto&& entity : entities) {
        entity_ids.clear();
        entity_text.clear();
        for (auto i = entity.start; i < entity.start + entity.length; i++) {
          if (i > entity.start) {
            entity_ids += ',';
            entity_text += ' ';
          }
          entity_ids += to_string(total_tokens + i + 1);
          entity_text.append(forms[i].str, forms[i].len);
        }
        os << entity_ids << '\t' << entity.type << '\t' << entity_text << '\n';
      }
      os << flush;
      total_tokens += forms.size() + 1;
    }
  }
}
Пример #3
0
template <typename R> bool extract_identifier(R& result,tokenizer& tokenizer)
{
 result.clear();
 
 //buffer
 
 array<ascii> buffer;
 array<ascii> current;
 
 //identifier

 if(!tokenizer.identifier(current))
  return false;
  
 buffer.append(current);
  
 //word
 
 if(tokenizer.word(current))
  buffer.append(current);
  
 //delimited
 
 if(!tokenizer.is_delimited())
  return false;
  
 //commit

 result=buffer;
 
 update(symbols()._identifier,buffer);
 
 return true;
}
Пример #4
0
template <typename R> bool extract_word(R& result,tokenizer& tokenizer)
{
 result.clear();
 
 //buffer
 
 array<ascii> buffer;

 //word
 
 if(!tokenizer.word(buffer))
  return false;
  
 //delimited
 
 if(!tokenizer.is_delimited())
  return false;
  
 //commit
 
 result=buffer;
 
 update(symbols()._word,buffer);
 
 return true;
}
Пример #5
0
void tag_xml(istream& is, ostream& os, const tagger& tagger, tokenizer& tokenizer, const tagset_converter& tagset_converter, const derivation_formatter& derivation, morpho::guesser_mode guesser) {
  string para;
  vector<string_piece> forms;
  vector<tagged_lemma> tags;

  while (getpara(is, para)) {
    // Tokenize and tag
    tokenizer.set_text(para);
    const char* unprinted = para.c_str();
    while (tokenizer.next_sentence(&forms, nullptr)) {
      tagger.tag(forms, tags, guesser);

      for (unsigned i = 0; i < forms.size(); i++) {
        tagset_converter.convert(tags[i]);
        derivation.format_derivation(tags[i].lemma);

        os << xml_encoded(string_piece(unprinted, forms[i].str - unprinted));
        if (!i) os << "<sentence>";
        os << "<token lemma=\"" << xml_encoded(tags[i].lemma, true) << "\" tag=\"" << xml_encoded(tags[i].tag, true) << "\">"
           << xml_encoded(forms[i]) << "</token>";
        if (i + 1 == forms.size()) os << "</sentence>";
        unprinted = forms[i].str + forms[i].len;
      }
    }
    os << xml_encoded(string_piece(unprinted, para.c_str() + para.size() - unprinted)) << flush;
  }
}
Пример #6
0
spellEvent::spellEvent(tokenizer& t) : wowEvent(t)
{
	spellID = asInt(t.token(9));
	string spellName = t.token(10); trimQuotes(spellName);
	spells[spellID] = spellName;
	spellSchool = asuIntFromHexa(t.token(11));
}
Пример #7
0
bool number::is_next(tokenizer &tokens, int i, void *data)
{
	while (tokens.peek_char(i) == '-' || tokens.peek_char(i) == '+' || tokens.peek_char(i) == '.')
		i++;

	return (tokens.peek_char(i) >= '0' && tokens.peek_char(i) <= '9');
}
Пример #8
0
bool statement::is_next(tokenizer &tokens, int i, void *data)
{
	return (node_id::is_next(tokens, i, data) ||
			tokens.is_next("subgraph") ||
			tokens.is_next("graph") ||
			tokens.is_next("node") ||
			tokens.is_next("edge"));
}
Пример #9
0
void tokenize_vertical(istream& is, ostream& os, tokenizer& tokenizer) {
  string para;
  vector<string_piece> forms;
  while (getpara(is, para)) {
    // Tokenize
    tokenizer.set_text(para);
    while (tokenizer.next_sentence(&forms, nullptr)) {
      for (auto&& form : forms) {
        os << form << '\n';
      }
      os << '\n' << flush;
    }
  }
}
Пример #10
0
	inline stmt_def_field(const statement&parent,const token&tk,tokenizer&t):
		statement{parent,tk},
		ident_{t.next_token()}
	{
		if(ident_.is_name(""))
			throw compiler_error(ident_,"expected field name");

		if(!t.is_next_char('{'))
			throw compiler_error(ident_,"expected '{' initial value   then '}' ",ident_.name());

		while(true){
			if(t.is_next_char('}'))break;
			tokens_.push_back(t.next_token());
		}
	}
Пример #11
0
template <typename R> bool extract_control(R& result,tokenizer& tokenizer)
{
 result.clear();
 
 //controls
 
 dictionary<string,id<string>> controls=
 {
  "\r",symbols()._cr,
  "\n",symbols()._lf
 };

 //buffer
 
 array<ascii> buffer;

 //any
 
 if(!tokenizer.any(buffer,controls.keys()))
  return false;
  
 //commit
 
 result=buffer;

 update(controls[buffer.join("")],buffer);
 
 return true;
}
Пример #12
0
	inline stmt_def_func_param(const statement&parent,tokenizer&t):
		statement{parent,t.next_token()}
	{
		assert(!tok().is_name(""));

		if(!t.is_next_char(':'))
			return;

		while(true){
			if(t.is_eos())throw compiler_error(*this,"unexpected end of stream",tok().name_copy());
			keywords_.push_back(t.next_token());
			if(t.is_next_char(':'))
					continue;
			break;
		}
	}
Пример #13
0
void attribute_list::parse(tokenizer &tokens, void *data)
{
	tokens.syntax_start(this);

	tokens.increment(false);
	tokens.expect<assignment_list>();

	while (tokens.decrement(__FILE__, __LINE__, data))
	{
		attributes.push_back(assignment_list(tokens, data));

		tokens.increment(false);
		tokens.expect<assignment_list>();
	}

	tokens.syntax_end(this);
}
Пример #14
0
void read_scene(tokenizer& t,document_type& doc)
{
        //std::cerr << "unsupported data: Scene" << std::endl;
        t.expect_literal( "{" );
        for( ;; ) {
                substr token = t();
                if( token == "}" ) { break; }
                if( token == "amb" ) {
                        doc.scene.ambient.red = t.expect_float(0,1);
                        doc.scene.ambient.green = t.expect_float(0,1);
                        doc.scene.ambient.blue = t.expect_float(0,1);
                        doc.scene.ambient.alpha = 1;
						t.expect_linefeed();
                } else {
                        skip_to_linefeed(t);
                }
        }
}
Пример #15
0
static void tokenize_xml(istream& is, ostream& os, tokenizer& tokenizer) {
  string para;
  vector<string_piece> forms;
  while (getpara(is, para)) {
    // Tokenize
    tokenizer.set_text(para);
    const char* unprinted = para.c_str();
    while (tokenizer.next_sentence(&forms, nullptr))
      for (unsigned i = 0; i < forms.size(); i++) {
        if (unprinted < forms[i].str) os << xml_encoded(string_piece(unprinted, forms[i].str - unprinted));
        if (!i) os << "<sentence>";
        os << "<token>" << xml_encoded(forms[i]) << "</token>";
        if (i + 1 == forms.size()) os << "</sentence>";
        unprinted = forms[i].str + forms[i].len;
      }

    if (unprinted < para.c_str() + para.size()) os << xml_encoded(string_piece(unprinted, para.c_str() + para.size() - unprinted));
    os << flush;
  }
}
Пример #16
0
void tag_vertical(istream& is, ostream& os, const tagger& tagger, tokenizer& tokenizer, const tagset_converter& tagset_converter, const derivation_formatter& derivation, morpho::guesser_mode guesser) {
  string para;
  vector<string_piece> forms;
  vector<tagged_lemma> tags;

  while (getpara(is, para)) {
    // Tokenize and tag
    tokenizer.set_text(para);
    while (tokenizer.next_sentence(&forms, nullptr)) {
      tagger.tag(forms, tags, guesser);

      for (unsigned i = 0; i < tags.size(); i++) {
        tagset_converter.convert(tags[i]);
        derivation.format_derivation(tags[i].lemma);
        os << forms[i] << '\t' << tags[i].lemma << '\t' << tags[i].tag << '\n';
      }
      os << endl;
    }
  }
}
Пример #17
0
damage::damage(tokenizer& t, int offset)
{
	dmgDone = asInt(t.token(22+offset));
	overkill = asInt(t.token(23+offset));
	magicSchool = asInt(t.token(24+offset));
	resisted = asInt(t.token(25 + offset));
	blocked = asInt(t.token(26 + offset));
	absorbed = asInt(t.token(27 + offset));
	critical = asInt(t.token(28 + offset));
	glancing = asInt(t.token(29 + offset));
	crushing = asInt(t.token(30 + offset));
	multistrike = asInt(t.token(31 + offset));
}
Пример #18
0
void recognize_untokenized(istream& is, ostream& os, const ner& recognizer, tokenizer& tokenizer) {
  string para;
  vector<string_piece> forms;
  vector<named_entity> entities;
  vector<size_t> entity_ends;

  while (getpara(is, para)) {
    // Tokenize the text and find named entities
    tokenizer.set_text(para);
    const char* unprinted = para.c_str();
    while (tokenizer.next_sentence(&forms, nullptr)) {
      recognizer.recognize(forms, entities);
      sort_entities(entities);

      for (unsigned i = 0, e = 0; i < forms.size(); i++) {
        if (unprinted < forms[i].str) os << xml_encoded(string_piece(unprinted, forms[i].str - unprinted));
        if (i == 0) os << "<sentence>";

        // Open entities starting at current token
        for (; e < entities.size() && entities[e].start == i; e++) {
          os << "<ne type=\"" << xml_encoded(entities[e].type, true) << "\">";
          entity_ends.push_back(entities[e].start + entities[e].length - 1);
        }

        // The token itself
        os << "<token>" << xml_encoded(forms[i]) << "</token>";

        // Close entities ending after current token
        while (!entity_ends.empty() && entity_ends.back() == i) {
          os << "</ne>";
          entity_ends.pop_back();
        }
        if (i + 1 == forms.size()) os << "</sentence>";
        unprinted = forms[i].str + forms[i].len;
      }
    }
    // Write rest of the text (should be just spaces)
    if (unprinted < para.c_str() + para.size()) os << xml_encoded(string_piece(unprinted, para.c_str() + para.size() - unprinted));
    os << flush;
  }
}
Пример #19
0
std::list<toSQLParse::statement> toSQLParse::parse(tokenizer &tokens)
{
	std::list<toSQLParse::statement> ret;
	statement cur(statement::Statement);
	for (cur = parseStatement(tokens, false, false);
			cur.subTokens().begin() != cur.subTokens().end();
			cur = parseStatement(tokens, false, false))
	{
		if (cur.Type == statement::List)
		{
			QMessageBox::warning(QApplication::activeWindow(), "Sqliteman",
								 "toSQLparse: Unbalanced parenthesis (Too many ')')");
		}
		ret.insert(ret.end(), cur);
	}
	QString str = tokens.remaining(false);
	if (!str.isEmpty())
		ret.insert(ret.end(), statement(statement::Raw,
										str, tokens.line()));
	return ret;
}
Пример #20
0
void recognize_conll(istream& is, ostream& os, const ner& recognizer, tokenizer& tokenizer) {
  string para;
  vector<string_piece> forms;
  vector<named_entity> entities;

  while (getpara(is, para)) {
    // Tokenize and tag
    tokenizer.set_text(para);
    while (tokenizer.next_sentence(&forms, nullptr)) {
      recognizer.recognize(forms, entities);
      sort_entities(entities);

      string entity_type;
      unsigned in_entity = 0;
      bool entity_start;
      for (unsigned i = 0, e = 0; i < forms.size(); i++) {
        if (!in_entity && e < entities.size() && entities[e].start == i) {
          in_entity = entities[e].length;
          entity_start = true;
          entity_type = entities[e].type;
          e++;
        }

        os << forms[i] << '\t';
        if (in_entity) {
          os << (entity_start ? "B-" : "I-") << entity_type;
          entity_start = false;
          in_entity--;
        } else {
          os << '_';
        }
        os << '\n';
      }

      os << '\n' << flush;
    }
  }
}
Пример #21
0
void parse(tokenizer& tokenizer)
{
 //clear
 
 clear();

 //empty
 
 if(tokenizer.is_empty())
  return;

 //escape

 if(try_(tokenizer,&self::extract_escape<fake>))
  return;

 //number

 if(try_(tokenizer,&self::extract_number<fake>))
  return;

 //identifier

 if(try_(tokenizer,&self::extract_identifier<fake>))
  return;  

 //delimiter

 if(try_(tokenizer,&self::extract_delimiter<fake>))
  return;  

 //word

 if(try_(tokenizer,&self::extract_word<fake>))
  return;  
  
 //control

 if(try_(tokenizer,&self::extract_control<fake>))
  return;  

 //blank

 if(try_(tokenizer,&self::extract_blank<fake>))
  return;  
  
 //unit
 
 check(try_(tokenizer,&self::extract_unit<fake>));
}
Пример #22
0
void read_vertices(tokenizer& t,int count,std::vector<vertex_type>& vertices)
{
    t.expect_literal( "{" );
    t.expect_linefeed();
    for( int i = 0 ; i< count ; i++ ) {
        vertex_type v; 
        v.x = t.expect_float();
        v.y = t.expect_float();
        v.z = t.expect_float();
        t.expect_linefeed();
        vertices.push_back( v );
    }
    t.expect_literal( "}" );
    t.expect_linefeed();
}
Пример #23
0
template <typename R> bool extract_number(R& result,tokenizer& tokenizer)
{
 result.clear();
 
 //buffer
 
 array<ascii> buffer;
 array<ascii> current;
 
 //sign

 if(tokenizer.any(current,pack("+","-")))
  buffer.append(current);
 
 //integer part
 
 if(tokenizer.digit(current))
  buffer.append(current);
 
 //real part
 
 if(tokenizer.delimit(current,"."))
 {
  buffer.append(current);
  
  if(tokenizer.digit(current))
   buffer.append(current);
 }
 
 //parse with c api
 
 if(!buffer.join("").is_real())
  return false;

 //no dot after
 
 if(tokenizer.starts_with("."))
  return false;

 //delimited
 
 if(!tokenizer.is_delimited())
  return false;
  
 //commit

 result=buffer;

 update(symbols()._number,buffer);
 
 return true;
}
Пример #24
0
void read_header(tokenizer& t,document_type& doc)
{
    t.expect_literal( "Metasequoia" ) ; 
    t.expect_literal( "Document" ) ; 
    t.expect_linefeed() ; 
    t.expect_literal( "Format" ) ; 
    t.expect_literal( "Text" ) ; 
    t.expect_literal( "Ver" );

    substr version = t();
    std::stringstream ss( version.str() );
    char c;
    ss >> doc.major_version
       >> c
       >> doc.minor_version;

    t.expect_linefeed();
}
Пример #25
0
wowEvent::wowEvent(tokenizer&t)
{
	time_ms = t.time_ms;
	string tmp = t.token(1); // sourceGUID
	sourceFlags = asuIntFromHexa(t.token(3));
#ifndef _DEBUG
	string sourceName;
	string destName;
#else
	nr = ++count;
	if (nr == 2532)
	{
		nr = 2532;
	}
#endif // DEBUG
	if (tmp != "0000000000000000")
	{
		sourceName = t.token(2); trimQuotes(sourceName);
		guidImpl source = guids.insert(tmp, sourceName);
		sourceGUID = source.guid;
		atype = source.type;
	} else
	{
		atype = Nil;
	}
	sourceRaidFlags = asuIntFromHexa(t.token(4));

	tmp = t.token(5); // destGUID
	destName = t.token(6); trimQuotes(destName);
	destFlags = asuIntFromHexa(t.token(7));
	destRaidFlags = asuIntFromHexa(t.token(8));

	if (destName != "nil")
	{
		guidImpl dest = guids.insert(tmp, destName);
		destGUID = dest.guid;
	}
} // wowEvent::wowEvent(tokenizer&t)
Пример #26
0
template <typename R> bool extract_blank(R& result,tokenizer& tokenizer)
{
 result.clear();
 
 //buffer
 
 array<ascii> buffer;
 
 //blank
 
 if(!tokenizer.blank(buffer))
  return false;
  
 //commit
 
 result=buffer;
 
 update(symbols()._blank,buffer);
 
 return true;
}
Пример #27
0
void read_faces(tokenizer& t,int count,std::vector<face_type>& faces)
{ 
    t.expect_literal( "{" );
    t.expect_linefeed();
    for( int i = 0 ; i< count ; i++ ) { 
        face_type f;
        f.vertex_count = t.expect_integer( 2, 4 );
        f.material_index = -1;
        for( int j = 0 ; j < f.vertex_count ; j++ ) { 
            f.colors[ j ].red = f.colors[ j ].green = 
                f.colors[ j ].blue = f.colors[ j ].alpha = 1;
        }
        for( ; ; ) { 
            substr token = t();
            if( token == "V" ) { 
                t.expect_literal( "(" );
                for( int j = 0 ; j< f.vertex_count ; j++ ) { 
                    f.vertex_indices[ j ] =
                        t.expect_integer( 0 );
                }
                t.expect_literal( ")" );
            } else if( token == "M" ) { 
                t.expect_literal( "(" );
                f.material_index = t.expect_integer( -1 );
                t.expect_literal( ")" );
            } else if( token == "UV" ) { 
                t.expect_literal( "(" );
                for( int j = 0 ; j< f.vertex_count ; j++ ) { 
                    f.uv[ j ].u = t.expect_float();
                    f.uv[ j ].v = t.expect_float();
                }
                t.expect_literal( ")" );
            } else if( token == "COL" ) { 
                t.expect_literal( "(" );
                for( int j = 0 ; j < f.vertex_count ; j++ ) { 
                    DWORD c = t.expect_dword();
                    f.colors[ j ].red =
                        ( c & 0xff ) / 255.0f;
                    f.colors[ j ].green =
                        ( ( c & 0xff00 ) >> 8 ) /
                        255.0f;
                    f.colors[ j ].blue =
                        ( ( c & 0xff0000 ) >> 16 ) /
                        255.0f;
                    f.colors[ j ].alpha =
                        ( ( c & 0xff000000 ) >> 24 ) /
                        255.0f;
                }
                t.expect_literal( ")" );
            } else if( token == "\n" ) { 
                break;
            } else { 
                throw mqo_reader_error(
                    "unexpected token: "+token.str() );
            }
        }
Пример #28
0
void read_material(tokenizer& t,document_type& doc)
{
    int count=t.expect_integer(1);
    t.expect_literal( "{" );
    t.expect_linefeed();
    for(int i=0;i<count;i++){
        material_type m;
        m.name=t.expect_string(31).str();
        m.shader = shader_phong;
        m.vertex_color = false;
        m.color.red = m.color.green = m.color.blue = m.color.alpha =
            1.0f;
        m.diffuse = m.ambient = m.emissive = m.specular = m.power =
            1.0f;
        m.projection = projection_uv;
        m.proj_pos.x = m.proj_pos.y = m.proj_pos.z = 0;
        m.proj_scale.x = m.proj_scale.y = m.proj_scale.z = 0;
        m.proj_angle.heading =
            m.proj_angle.pitching =
            m.proj_angle.banking = 0;
        for(;;){
            substr token = t();
            if( token == "shader" ) { 
                t.expect_literal( "(" );
                m.shader = shader_type(
                    t.expect_integer( 0, 4 ) );
                t.expect_literal( ")" );
            } else if( token == "vcol" ) { 
                t.expect_literal( "(" );
                m.vertex_color = t.expect_bool();
                t.expect_literal( ")" );
            } else if( token == "col" ) { 
                t.expect_literal( "(" );
                m.color.red = t.expect_float( 0, 1.0f );
                m.color.green = t.expect_float( 0, 1.0f );
                m.color.blue = t.expect_float( 0, 1.0f );
                m.color.alpha = t.expect_float( 0, 1.0f );
                t.expect_literal( ")" );
            } else if( token == "dif" ) { 
                t.expect_literal( "(" );
                m.diffuse = t.expect_float( 0, 1.0f );
                t.expect_literal( ")" );
            } else if( token == "amb" ) { 
                t.expect_literal( "(" );
                m.ambient = t.expect_float( 0, 1.0f );
                t.expect_literal( ")" );
            } else if( token == "emi" ) { 
                t.expect_literal( "(" );
                m.emissive = t.expect_float( 0, 1.0f );
                t.expect_literal( ")" );
            } else if( token == "spc" ) { 
                t.expect_literal( "(" );
                m.specular = t.expect_float( 0, 1.0f );
                t.expect_literal( ")" );
            } else if( token == "power" ) { 
                t.expect_literal( "(" );
                m.power = t.expect_float( 0, 100.0f );
                t.expect_literal( ")" );
            } else if( token == "tex" ) { 
                t.expect_literal( "(" );
                m.texture = t.expect_string( 63 ).str();
                t.expect_literal( ")" );
            } else if( token == "aplane" ) { 
                t.expect_literal( "(" );
                m.aplane = t.expect_string( 63 ).str();
                t.expect_literal( ")" );
            } else if( token == "bump" ) { 
                t.expect_literal( "(" );
                m.bump = t.expect_string( 63 ).str();
                t.expect_literal( ")" );
            } else if( token == "proj_type" ) { 
                t.expect_literal( "(" );
                m.projection = projection_type( 
                    t.expect_integer( 0, 3 ) );
                t.expect_literal( ")" );
            } else if( token == "proj_pos" ) { 
                t.expect_literal( "(" );
                m.proj_pos.x = t.expect_float();
                m.proj_pos.y = t.expect_float();
                m.proj_pos.z = t.expect_float();
                t.expect_literal( ")" );
            } else if( token == "proj_scale" ) { 
                t.expect_literal( "(" );
                m.proj_scale.x = t.expect_float();
                m.proj_scale.y = t.expect_float();
                m.proj_scale.z = t.expect_float();
                t.expect_literal( ")" );
            } else if( token == "proj_angle" ) { 
                t.expect_literal( "(" );
                m.proj_angle.heading = t.expect_float();
                m.proj_angle.pitching = t.expect_float();
                m.proj_angle.banking = t.expect_float();
                t.expect_literal( ")" );
            } else if( token == "\n" ) { 
                break;
            } else { 
                throw mqo_reader_error(
                    "unexpected token: "+token.str() );
            }
        }
        doc.materials.push_back( m );
    }
    t.expect_literal( "}" );
    t.expect_linefeed();
}
Пример #29
0
inline unique_ptr<statement>create_statement_from_tokenizer(const statement&parent,tokenizer&t){
	auto tk=t.next_token();
	if(tk.is_name("#"))return make_unique<stmt_comment>(parent,move(tk),t);// ie    print("hello") // comment
	if(t.is_peek_char('('))return create_call_statement_from_tokenizer(parent,move(tk),t); // ie  f(...)
	return make_unique<statement>(parent,move(tk));// ie  0x80
}
Пример #30
0
/*#define TOPARSE_DEBUG
#include <QtDebug>*/
toSQLParse::statement toSQLParse::parseStatement(tokenizer &tokens, bool declare, bool lst)
{
	statement ret(statement::Statement);

//	 toSyntaxAnalyzer &syntax = tokens.analyzer();

	QString first;
	QString realfirst;
	bool nokey = false;
	bool block = false;
	for (QString token = tokens.getToken(true, true);
			!token.isNull();
			token = tokens.getToken(true, true))
	{
		QString upp = token.toUpper();

		if (first.isNull() && !token.startsWith(("/*")) && !token.startsWith("--") && !token.startsWith("//"))
			realfirst = first = upp;

#ifdef TOPARSE_DEBUG
        printf("%s (%d)\n", (const char*)token.toUtf8(), tokens.line());
        printf("    %s - %s\n", (const char*)first.toUtf8(), (const char*)realfirst.toUtf8());
#endif

// SQLITEMAN
		 if (upp == ("PROCEDURE") ||
				 upp == ("FUNCTION") ||
				 upp == ("PACKAGE"))
         {
//              qDebug() << "PROCEDURE";
			 block = true;
         }

		 if (upp == ("SELF"))
         {
//              qDebug() << "SELF";
			 block = false;
         }

        if (upp == "BEGIN" && (first.isNull() || first == "BEGIN"))
        {
//             qDebug() << "plain BEGIN";
            ret.subTokens().insert(ret.subTokens().end(), statement(statement::Keyword, token, tokens.line()));
            nokey = false;            
        }
		else if (first != ("END") && ((first == ("IF") && upp == ("THEN")) ||
								  upp == ("LOOP") ||
								  upp == ("DO") ||
								  (/*syntax.declareBlock()*/true && upp == ("DECLARE")) ||
								  (block && upp == ("AS")) ||
								  (block && upp == ("IS")) ||
								  ((!declare || block) && upp == ("BEGIN"))))
		 {
//              qDebug() << "first != (\"END\") ";
			 block = false;
			 statement blk(statement::Block);
			 ret.subTokens().insert(ret.subTokens().end(), statement(statement::Keyword, token, tokens.line()));
			 blk.subTokens().insert(blk.subTokens().end(), ret);
			 statement cur(statement::Statement);
			 bool dcl = (upp == ("DECLARE") || upp == ("IS") || upp == ("AS"));
			 do
			 {
				 cur = parseStatement(tokens, dcl, false);
				 if (cur.Type == statement::List)
				 {
					 QMessageBox::warning(QApplication::activeWindow(), "Sqliteman",
										  "toSQLparse: Unbalanced parenthesis (Too many ')')");
				 }
				 blk.subTokens().insert(blk.subTokens().end(), cur);
				 if (cur.subTokens().begin() != cur.subTokens().end() &&
						 (*(cur.subTokens().begin())).String.toUpper() == ("BEGIN"))
					 dcl = false;
			 }
			 while (cur.subTokens().begin() != cur.subTokens().end() &&
					 (*cur.subTokens().begin()).String.toUpper() != ("END"));
			 return blk;
		 }
		 else if (((first == "IF" && upp == "THEN") ||
				   (first == "WHEN" && upp == "THEN") ||
				   (first == "ELSIF" && upp == "THEN") ||
				   upp == ("BEGIN") ||
				   upp == ("EXCEPTION") ||
				   first == ("ELSE")) && !lst)
		 {
//              qDebug() << "else if first==IF";
			 ret.subTokens().insert(ret.subTokens().end(), statement(statement::Keyword, token, tokens.line()));
			 return ret;
		 }
		 else if (first == ("ASSIGN") ||
				  first == ("SET") ||
				  first == ("PROMPT") ||
				  first == ("COLUMN") ||
				  first == ("SPOOL") ||
				  first == ("STORE") ||
				  first == ("REMARK") ||
				  first == ("REM"))
		 {
//              qDebug() << "ASSIGN";
			 ret.subTokens().insert(ret.subTokens().end(), statement(statement::Keyword, token, tokens.line()));
			 int line = tokens.line();
			 int offset = tokens.offset();
			 for (QString tmp = tokens.getToken(true, true);line == tokens.line();tmp = tokens.getToken(true, true))
				 ret.subTokens().insert(ret.subTokens().end(), statement(statement::Token, tmp, line));
			 tokens.setLine(line);
			 tokens.setOffset(offset);
			 tokens.remaining(true);
			 return ret;
		 }
		 else if (upp == (",") ||
// 		if (upp == (",") ||
//				  (syntax.reservedWord(upp) &&
				  (isKeyword(upp) &&
				  upp != ("NOT") &&
				  upp != ("IS") &&
				  upp != ("LIKE") &&
				  upp != ("IN") &&
				  upp != ("ELSE") &&
				  upp != ("ELSIF") &&
				  upp != ("END") &&
				  upp != ("BETWEEN") &&
				  upp != ("ASC") &&
				  upp != ("DESC") &&
				  upp != ("NULL")) && !nokey)
		{

		}
		else if (upp == ("("))
		{
//             qDebug() << "start (";
			ret.subTokens().insert(ret.subTokens().end(), statement(statement::Token, token, tokens.line()));
			statement lst = parseStatement(tokens, false, true);
			statement t = toPop(lst.subTokens());
			if (lst.Type != statement::List)
			{
				QMessageBox::warning(QApplication::activeWindow(), "Sqliteman",
									 "toSQLparse: Unbalanced parenthesis (Too many '(')");
			}
			nokey = false;
			if (first == ("CREATE") && !block)
			{
				statement end = parseStatement(tokens, false, true);
				statement blk(statement::Block);
				blk.subTokens().insert(blk.subTokens().end(), ret);
				blk.subTokens().insert(blk.subTokens().end(), lst);
				end.subTokens().insert(end.subTokens().begin(), t);
				blk.subTokens().insert(blk.subTokens().end(), end);
				return blk;
			}
			else
			{
				ret.subTokens().insert(ret.subTokens().end(), lst);
				ret.subTokens().insert(ret.subTokens().end(), t);
			}
		}
		else if (upp == (")"))
		{
//             qDebug() << "end )";
			ret.Type = statement::List;
			ret.subTokens().insert(ret.subTokens().end(), statement(statement::Token, token, tokens.line()));
			return ret;
		}
		else if (upp == (";"))
		{
//             qDebug() << "bodkociarka";
			ret.subTokens().insert(ret.subTokens().end(), statement(statement::Token, token, tokens.line()));
			return ret;
		}
		else if (upp.startsWith(("/*+")) || upp.startsWith(("--+")))
		{
//             qDebug() << "hint --+";
			QString com = token;
			if (com.startsWith(("--+")))
				com = ("/*+ ") + com.mid(3) + (" */");
			ret.subTokens().insert(ret.subTokens().end(), statement(statement::Token,
								   com.simplified(), tokens.line()));
		}
		else if (upp.startsWith(("/*")) || upp.startsWith(("--")) || upp.startsWith("//"))
		{
//             qDebug() << "comment";
			if ( ret.subTokens().empty() )
			{
				if (ret.Comment.isNull())
					ret.Comment = token;
				else
					ret.Comment += ("\n") + token;
			}
			else
			{
				QString &com = (*ret.subTokens().rbegin()).Comment;
				if (com.isEmpty())
					com = token;
				else
					com += ("\n") + token;
			}
		}
		else
		{
//             qDebug() << "plain else" <<token<< tokens.line();
			ret.subTokens().insert(ret.subTokens().end(), statement(statement::Token, token, tokens.line()));
			nokey = (token == ("."));
		}
		if (upp == ("AS") || upp == ("IS"))
        {
//             qDebug() << "setting first: " << upp;
			first = upp;
        }
		else if (first == ("IS") && upp == ("NULL"))
        {
//             qDebug() << "setting first (real): " << realfirst;
			first = realfirst;
        }
	}
	return ret;
}