void Token::ThrowIfInproperInit() throw(TokenException) { switch(m_type) { case PrimativeToken::Number: case PrimativeToken::Identifier: if (!HasValue()) { throw TokenException(); } break; case PrimativeToken::StringLit: // Strings can be empty return; default: if (HasValue()) { throw TokenException(); } break; } }
/** * Skip any whitespace then look for a token, throwing an exception if no valid token * is found. * * Advance the string iterator past the parsed token on success. On failure the string iterator is * in an undefined location. */ const Token& Tokeniser::nextToken() { if ( tokens.size()>tokp ) return tokens[tokp++]; // Don't extend stream of tokens further than the end of stream; if ( tokp>0 && tokens[tokp-1].type==T_EOS ) return tokens[tokp-1]; tokens.push_back(Token()); Token& tok = tokens[tokp++]; if (tokenise(inp, inEnd, tok)) return tok; throw TokenException("Found illegal character"); }
/** * Skip any whitespace then look for a token, throwing an exception if no valid token * is found. * * Advance the string iterator past the parsed token on success. On failure the string iterator is * in an undefined location. */ const Token& Tokeniser::nextToken() { if ( tokens.size()>tokp ) return tokens[tokp++]; // Don't extend stream of tokens further than the end of stream; if ( tokp>0 && tokens[tokp-1].type==T_EOS ) return tokens[tokp-1]; skipWS(inp, inEnd); tokens.push_back(Token()); Token& tok = tokens[tokp++]; if (tokeniseEos(inp, inEnd, tok)) return tok; if (tokeniseIdentifierOrReservedWord(inp, inEnd, tok)) return tok; if (tokeniseNumeric(inp, inEnd, tok)) return tok; if (tokeniseString(inp, inEnd, tok)) return tok; if (tokeniseParens(inp, inEnd, tok)) return tok; if (tokeniseOperator(inp, inEnd, tok)) return tok; throw TokenException("Found illegal character"); }