inline std::size_t regex_split(OutputIterator out,
                   std::basic_string<charT, Traits1, Alloc1>& s, 
                   const reg_expression<charT, Traits2, Alloc2>& e,
                   unsigned flags = match_default)
{
   return regex_split(out, s, e, flags, UINT_MAX);
}
예제 #2
0
inline std::size_t regex_split(OutputIterator out,
                   std::basic_string<charT, Traits1, Alloc1>& s, 
                   const basic_regex<charT, Traits2>& e,
                   match_flag_type flags = match_default)
{
   return regex_split(out, s, e, flags, UINT_MAX);
}
예제 #3
0
/**
 * Test the RegEx tokenizer.
 * line - REGEX/,token
 */
int regex_tokenizer_test(Dictionary dict, const char *line)
{
	int linelen = strlen(line);
	char *regex = alloca(linelen+1);
	char *token = alloca(linelen);
	char *regex_end;
	int tokenizer_flags;

	strcpy(regex, line);
	regex_end = index(regex, '/');
	if (NULL == regex_end)
	{
		printf("Missing terminating '/' in regex.\nUsage: /REGEX/,token\n");
		return 101;
	}
	*regex_end = '\0';
	regex_end++;
	/* FIXME: Add iterations for more flags if needed. */
	switch (*regex_end)
	{
		case 'M':
			tokenizer_flags = MARK_TOKENS;
			regex_end++;
			break;
	}
	if (',' != *regex_end)
	{
		printf("Missing terminating ',' after regex end.\nUsage: /REGEX/,token\n");
		return 102;
	}
	strcpy(token, regex_end + 1);
	if ('\0' == token[0])
	{
		printf("Missing token\nUsage: /REGEX/,token\n");
		return 103;
	}

	return regex_split(regex, tokenizer_flags, token, dict);
}
예제 #4
0
inline std::size_t regex_split(OutputIterator out,
                   std::basic_string<charT, Traits1, Alloc1>& s)
{
   return regex_split(out, s, re_detail::get_default_expression(charT(0)), match_default, UINT_MAX);
}