int isPattern(Node *pattern) { if(getNodeType(pattern) == N_APPLICATION || getNodeType(pattern) == N_TUPLE) { int i; for(i=0;i<pattern->degree;i++) { if(!isPattern(pattern->subtrees[i])) return 0; } return 1; } else if(getNodeType(pattern) == TK_TEXT || getNodeType(pattern) == TK_VAR || getNodeType(pattern) == TK_STRING || getNodeType(pattern) == TK_BOOL || getNodeType(pattern) == TK_INT || getNodeType(pattern) == TK_DOUBLE) { return 1; } else { return 0; } }
//////////////////////////////////////////////////////////////////////////////// // When a Lexer object is constructed with a string, this method walks through // the stream of low-level tokens. bool Lexer::token (std::string& token, Lexer::Type& type) { // Eat white space. while (isWhitespace (_text[_cursor])) utf8_next_char (_text, _cursor); // Terminate at EOS. if (isEOS ()) return false; // The sequence is specific, and must follow these rules: // - date < duration < uuid < identifier // - dom < uuid // - uuid < hex < number // - url < pair < identifier // - hex < number // - separator < tag < operator // - path < substitution < pattern // - set < number // - word last if (isString (token, type, "'\"") || isDate (token, type) || isDuration (token, type) || isURL (token, type) || isPair (token, type) || isUUID (token, type, true) || isSet (token, type) || isDOM (token, type) || isHexNumber (token, type) || isNumber (token, type) || isSeparator (token, type) || isTag (token, type) || isPath (token, type) || isSubstitution (token, type) || isPattern (token, type) || isOperator (token, type) || isIdentifier (token, type) || isWord (token, type)) return true; return false; }
bool FileInfo::isPattern(const QString &str) { return isPattern(QStringRef(&str)); }
const Node<TypeVarList>& TypeVar::typeVarList() const { assert(isPattern()); return patternVar_.typeVarList; }
const Node<Type>& TypeVar::patternType() const { assert(isPattern()); return patternVar_.type; }