// Returns a value between 0 and 255, or -1 on end of input. static int StreamGet(Stream* s) { if(StreamEnd(s)) { return -1; } if(s->type == ST_STRING) { return s->string[s->stringPos++]; } else if(s->type == ST_FILE) { return fgetc(s->file); } abort(); }
bool irc::tokenstream::GetToken(std::string &token) { bool first = !pos; if (!spacesepstream::GetToken(token)) return false; /* This is the last parameter */ if (token[0] == ':' && !first) { token.erase(token.begin()); if (!StreamEnd()) { token += ' '; token += GetRemaining(); } pos = tokens.length() + 1; } return true; }
// Returns the next token or NULL on end of input. static const char* TokenizerNext(Tokenizer* tokenizer) { const char ws[] = " \n\r\t\v\b\f"; // 7 const char delims[] = "()[]{}"; // 6 if(StreamEnd(tokenizer->stream)) { return NULL; } unsigned int pos = 0; while(1) { if(tokenizer->c == -1) { if(pos == 0) { return NULL; } goto end; } for(unsigned int i = 0; i < 7; ++i) { if(tokenizer->c == ws[i]) { if(pos == 0) { goto nextChar; } else { tokenizer->c = StreamGet(tokenizer->stream); goto end; } } } for(unsigned int i = 0; i < 6; ++i) { if(tokenizer->c == delims[i]) { if(pos != 0) { goto end; } else { tokenizer->token[pos++] = tokenizer->c; tokenizer->c = StreamGet(tokenizer->stream); goto end; } } } if(pos == tokenizer->tokenSize) { unsigned int newSize = tokenizer->tokenSize * 2; char* newToken = (char*)realloc(tokenizer->token, newSize); if(newToken == NULL) { // TODO: return error here instead. fputs("realloc failed", stderr); abort(); } tokenizer->tokenSize = newSize; tokenizer->token = newToken; } tokenizer->token[pos++] = tokenizer->c; nextChar: tokenizer->c = StreamGet(tokenizer->stream); } end: tokenizer->token[pos] = 0; return tokenizer->token; }