bool isEndOfToken(const bool in_str, const string& s, size_t p) { return ( (in_str && isStringBoundary(s[p])) || (p == s.length() - 1 && (in_str && isStringBoundary(s[p]))) || (!in_str && (isParen(s[p]) || isParen(s[p+1]))) || (blank(in_str, s[p+1]))); }
bool Lexer::isBoundary(char c) { if (c == 0) return true; if (isWhitespace(c)) return true; if (isStringBoundary(c)) return true; if (isCompoundBoundary(c)) return true; if (isPunctuation(c)) return true; return false; }
bool isStartOfToken(const bool in_str, const string& s, size_t p) { return !in_str && (isStringBoundary(s[p]) || isParen(s[p]) || (!blank(in_str, s[p]) && (p == 0 || blank(in_str, s[p-1]) || isParen(s[p-1])))); }
void Tokenizer::onTokenStart(size_t& start, size_t i, char c) { start = i; if (isStringBoundary(c)) { in_string = true; } }