/** * @return true if the token stream in its present state can convert * the token stream to a case expression. * * @param parser -- Parser that is used to parse out whole elements. * @param stream -- The token stream. * @param elements -- The element output, first element is the condition, every * other element should be case element containers, and the * last element could be a default element. * @return true if the stream of tokens can be accepted, false otherwise. */ bool Case_::CanAccept_( Parser const& parser , TokenStream& stream , std::vector<strine::Element>& elements ) const { bool can_accept = MatchToken_(stream, "(") && MatchToken_(stream, "case"); if (can_accept && stream.HasTokens()) { Element current_element; can_accept =parser.ParseAny(stream, current_element); elements.push_back(current_element); } else { can_accept = false; } while(can_accept && stream.HasTokens()) { stream.Push(); bool isEnd = MatchToken_(stream, ")"); if (isEnd) { stream.Consume(); break; } // Reset the stream, attempt to parse a list. stream.Rollback(); // Right now I'm not sure what symbol to use for an "ANY"- needs to // be something self-explanatory. bool isAlmostEnd = MatchToken_(stream, "->"); if (isAlmostEnd) { stream.Consume(); assert(0); break; } stream.Rollback(); // Typically we'll have a condition. can_accept = (MatchToken_(stream, "[")) && CanAcceptListRemainder_(parser, stream, elements); } return can_accept; }
/** * Parse the end of the token stream. * * @param stream -- Stream that has been prepped. */ bool Builtin_::ParseEnd_(TokenStream& stream) const { bool is_end = false; if (stream.HasTokens()) { stream.Push(); Element current_element = stream.NextToken(); is_end = (current_element.Type() == Types::TOKEN && current_element.ToString() == ")" ); stream.Rollback(); } return is_end; }
/** * Parse out any element possible EXCEPT FOR TOKENS! * * @param in -- THe token stream to parse from. * @param element -- The element to populate if possible. * * @return true if something was parsed, false otherwise. */ bool Parser::ParseAny( TokenStream& in, Element& element ) const { bool success = false; if (in.HasTokens()) { in.Push(); // First rule of thumb, if the first element in the stream is non-token then // return that element since it should be the first. strine::Element current = in.NextToken(); if ( current.Type() != Types::TOKEN ) { element = current; success = true; } else { in.Rollback(); strine::Element element_out; // Go through all the parsables in the container, and try to parse them. // If it can be parsed, then do it. size_t const PARSABLES_SIZE = this->parsables.size(); for(size_t i=0; i<PARSABLES_SIZE; ++i) { std::shared_ptr<ParsableElement_> parsable = this->parsables[i]; success = parsable->Process( *this , in , element_out ); if (success) { element = element_out; break; } } } } return success; }
/** * @return true if the token stream in its present state can convert * the token stream to a set expression. * * @param in -- The token stream. * @return true if the stream of tokens can be accepted, false otherwise. */ bool Function_::CanAccept_( Parser const& parser , TokenStream& stream , std::vector<strine::Element>& elements) const { bool can_accept = false; // ( if (stream.HasTokens()) { Element current_element = stream.NextToken(); can_accept = (current_element.Type() == Types::TOKEN) && (current_element.ToString() == "("); } // function if (can_accept && stream.HasTokens()) { Element current_element = stream.NextToken(); can_accept = (current_element.Type() == Types::TOKEN) && (current_element.ToString() == "function"); } // ( if (can_accept && stream.HasTokens()) { Element current_element = stream.NextToken(); can_accept = (current_element.Type() == Types::TOKEN) && (current_element.ToString() == "("); } // args while(can_accept && stream.HasTokens()) { strine::Element current_element = stream.NextToken(); if (current_element.Type() == Types::TOKEN) { if (current_element.ToString() == ")") { break; } else { can_accept = false; } } else if (current_element.Type() == Types::VARIABLE) { elements.push_back(current_element); } else { can_accept = false; } } if (can_accept) { stream.Push(); strine::Element new_body; can_accept = parser.ParseAny(stream, new_body); if (can_accept && stream.HasTokens()) { stream.Consume(); Element current_element = stream.NextToken(); can_accept = (current_element.Type() == Types::TOKEN) && (current_element.ToString() == ")"); elements.push_back(new_body); } else { stream.Rollback(); can_accept = false; } } return can_accept; }
/** * One of the key methods inside of parser can take a general description * of what should be on the token stream and attempts to figure it out. * * @param in -- The input token stream. * @param rules -- The set of input rules we tokenize by. * @param elements -- The set of elements. * * @return true if the set of rules can be parsed, false otherwise. */ bool Parser::CanParse( TokenStream& in , std::vector<ParseRule> const& rules , std::vector<Element>& elements) const { bool success = true; elements.clear(); size_t rule_index = 0; while(in.HasTokens() && rule_index < rules.size()) { // Grab the current rule. ParseRule const& rule = rules[rule_index]; // Check to see if the current rule states that we expect a token, // if this is the case, then try to match the next element as a // token. if (rule.Type() == strine::Types::TOKEN) { strine::Element current_element = in.NextToken(); std::string const rule_token(rule.Token()); std::string const element_string(current_element.ToString()); success = (rule_token == element_string); } // If the rule states that the type should be ANY, try to parse // out any element by using ParseAny. else if (rule.Type() == ParseRule::ANY) { strine::Element current_element; // Push a marker onto the token stream. in.Push(); success = ParseAny( in, current_element ); if (success) { elements.push_back(current_element); } else { in.Rollback(); } } // If the rule.Type() hasn't been specified, then go ahead and try // to match up the types. else { strine::Element current_element = in.NextToken(); if (rule.Type() == current_element.Type()) { success = true; elements.push_back(current_element); } else { success = false; } } if (false == success) { break; } rule_index += 1; } if (rule_index < rules.size()) { success = false; } return success; }
/** * @return true if the token stream in its present state can convert * the token stream to a builtin expression. * * @param in -- The token stream. * @return true if the stream of tokens can be accepted, false otherwise. */ bool Builtin_::CanAccept_( Parser const& parser , TokenStream& stream , std::vector<Element>& elements) const { bool can_accept = false; // ( if (stream.HasTokens()) { Element current_element = stream.NextToken(); can_accept = (current_element.Type() == Types::TOKEN) && (current_element.ToString() == "("); } can_accept = can_accept && stream.HasTokens(); // builtin if (can_accept) { Element current_element = stream.NextToken(); can_accept = (current_element.Type() == Types::TOKEN) && (current_element.ToString() == "builtin"); } can_accept = can_accept && stream.HasTokens(); // Symbol if (can_accept) { Element current_element = stream.NextToken(); can_accept = (current_element.Type() == Types::STRING); elements.push_back(current_element); } can_accept = can_accept && stream.HasTokens(); // if (can_accept) { if (ParseEnd_(stream)) { stream.NextToken(); } else { Element current_element; stream.Push(); can_accept = parser.ParseAny( stream, current_element ); if (false == can_accept) { stream.Rollback(); } else { elements.push_back(current_element); stream.Consume(); } can_accept = ParseEnd_(stream); if (can_accept) { stream.NextToken(); } } } return can_accept; }