示例#1
0
void ParserFileReader::read() {
	do read_token(); while(curr_token_type == NEWLINE);
	while(curr_token_type != END) {
		if(curr_token_type != NONTERMINAL) {
			raise_error(
				std::string("expected ")
				+ token_type_name(NONTERMINAL)
				+ " on left side of rule but read "
				+ curr_token_name());
		}
		curr_rule.left = symbol_indexer.index_symbol(
			curr_token_value, SymbolIndexer::symbol::NONTERMINAL);
		expect_token(ARROW);
		curr_rule.right.clear();
		curr_rule.right.push_back(rule::right_type::value_type());
		while(true) {
			read_token();
			if(curr_token_type == NEWLINE) break;
			rule::right_type::value_type &curr_string = curr_rule.right.back();
			switch(curr_token_type) {
			case NONTERMINAL:
			case TERMINAL:
				curr_string.push_back(
					symbol_indexer.index_symbol(
						curr_token_value,
						curr_token_type == NONTERMINAL ?
							SymbolIndexer::symbol::NONTERMINAL
							: SymbolIndexer::symbol::TERMINAL));
				break;
			case PIPE:
				curr_rule.right.push_back(rule::right_type::value_type());
				break;
			default:
				raise_error(
					std::string("expected ")
					+ token_type_name(NONTERMINAL)
					+ " or " + token_type_name(TERMINAL)
					+ " but read " + curr_token_name());
			}
		}
		output.push_back(curr_rule);
		do read_token(); while(curr_token_type == NEWLINE);
	}
	if(output.empty()) {
		raise_error("no rules specified");
	}
	symbol_indexer.create_mapping(symbol_info);
}
示例#2
0
static void
dump_token (scanner_t *scanner, token_t *token)
{
	char *str = token_text_dup (scanner, token);
	
	printf ("token '%s' of type '%s' at line %d\n", str, token_type_name (token->type), token->line);
	free (str);
}
示例#3
0
void test_samples() {
	for(size_t i = 0; i < sizeof(samples) / sizeof(samples[0]); i++) {
		char* code = samples[i].code;
		//printf("test: %s\n", code);
		
		token_list_t tokens = { 0 };
		tokenize(str_from_c(code), &tokens, stderr);
		
		st_check_int(tokens.len, samples[i].tokens_len);
		for(size_t j = 0; j < samples[i].tokens_len; j++) {
			token_p actual_token = &tokens.ptr[j];
			token_p expected_token = &samples[i].tokens_ptr[j];
			
			st_check_msg(actual_token->type == expected_token->type, "got %s, expected %s",
				token_type_name(actual_token->type), token_type_name(expected_token->type));
			st_check_int(actual_token->source.len, expected_token->source.len);
			st_check_strn(actual_token->source.ptr, expected_token->source.ptr, expected_token->source.len);
			if (actual_token->type == T_INT) {
				st_check(actual_token->int_val == expected_token->int_val);
			} else if (actual_token->type == T_ERROR) {
				// Check that an error message is present, exact content doesn't matter, will change anyway
				st_check(actual_token->str_val.len > 0);
				st_check_not_null(actual_token->str_val.ptr);
			} else if (expected_token->str_val.ptr != NULL) {
				st_check_int(actual_token->str_val.len, expected_token->str_val.len);
				st_check_strn(actual_token->str_val.ptr, expected_token->str_val.ptr, expected_token->str_val.len);
			} else {
				st_check_null(actual_token->str_val.ptr);
				st_check_int(actual_token->str_val.len, 0);
			}
		}
		
		for(size_t j = 0; j < tokens.len; j++)
			token_cleanup(&tokens.ptr[j]);
		list_destroy(&tokens);
	}
}
示例#4
0
void test_token_type_name() {
	st_check_str( token_type_name(T_COMMENT), "T_COMMENT" );
	st_check_str( token_type_name(T_SL_ASSIGN), "T_SL_ASSIGN" );
	st_check_str( token_type_name(T_EOF), "T_EOF" );
}