static void syntax_error(parser_t* parser, const char* expected, ast_t* ast, const char* terminating) { assert(parser != NULL); assert(expected != NULL); assert(parser->token != NULL); if(parser->last_matched == NULL) { error(parser->source, token_line_number(parser->token), token_line_position(parser->token), "syntax error: no code found"); } else { if(terminating == NULL) { error(parser->source, token_line_number(parser->token), token_line_position(parser->token), "syntax error: expected %s after %s", expected, parser->last_matched); } else { assert(ast != NULL); errorframe_t frame = NULL; ast_error_frame(&frame, ast, "syntax error: unterminated %s", terminating); errorframe(&frame, parser->source, token_line_number(parser->token), token_line_position(parser->token), "expected terminating %s before here", expected); errorframe_report(&frame); } } }
void ast_error_continue(errors_t* errors, ast_t* ast, const char* fmt, ...) { va_list ap; va_start(ap, fmt); errorv_continue(errors, token_source(ast->t), token_line_number(ast->t), token_line_position(ast->t), fmt, ap); va_end(ap); }
void ast_error_frame(errorframe_t* frame, ast_t* ast, const char* fmt, ...) { assert(frame != NULL); assert(ast != NULL); assert(fmt != NULL); va_list ap; va_start(ap, fmt); errorframev(frame, token_source(ast->t), token_line_number(ast->t), token_line_position(ast->t), fmt, ap); va_end(ap); }
/* Tidy up a successfully parsed rule. * Args: * rule_set is a NULL terminated list. * out_found reports whether an optional token was found. Only set on * success. May be set to NULL if this information is not needed. * * Returns: * AST created, NULL for none. */ ast_t* parse_rule_complete(parser_t* parser, rule_state_t* state) { assert(parser != NULL); assert(state != NULL); process_deferred_ast(parser, state); if(state->scope && state->ast != NULL) ast_scope(state->ast); if(trace_enable) printf("Rule %s: Complete\n", state->fn_name); if(state->restart == NULL) return state->ast; // We have a restart point, check next token is legal token_id id = current_token_id(parser); if(trace_enable) printf("Rule %s: Check restart set for next token %s\n", state->fn_name, token_print(parser->token)); for(const token_id* p = state->restart; *p != TK_NONE; p++) { if(*p == id) { // Legal token found if(trace_enable) printf("Rule %s: Restart check successful\n", state->fn_name); return state->ast; } } // Next token is not in restart set, error if(trace_enable) printf("Rule %s: Restart check error\n", state->fn_name); assert(parser->token != NULL); error(parser->source, token_line_number(parser->token), token_line_position(parser->token), "syntax error: unexpected token %s after %s", token_print(parser->token), state->desc); ast_free(state->ast); parser->failed = true; ditch_restart(parser, state); return NULL; }
// Add an AST node for the specified token, which may be deferred void add_deferrable_ast(parser_t* parser, rule_state_t* state, token_id id) { assert(parser->token != NULL); if(!state->matched && state->ast == NULL && !state->deferred) { // This is the first AST node, defer creation state->deferred = true; state->deferred_id = id; state->line = token_line_number(parser->token); state->pos = token_line_position(parser->token); return; } add_ast(parser, state, ast_new(parser->token, id), default_builder); }
static void fetch_next_lexer_token(parser_t* parser, bool free_prev_token) { token_t* old_token = parser->token; token_t* new_token = lexer_next(parser->lexer); if(old_token != NULL) parser->last_token_line = token_line_number(old_token); if(old_token != NULL && token_get_id(new_token) == TK_EOF) { // Use location of last token for EOF to get better error reporting token_set_pos(new_token, token_source(old_token), token_line_number(old_token), token_line_position(old_token)); } if(free_prev_token) token_free(old_token); parser->token = new_token; }
// Get the next token ready for when we need it static void get_next_token(build_parser_t* builder) { assert(builder != NULL); if(builder->have_token) return; if(builder->token != NULL) token_free(builder->token); builder->token = lexer_next(builder->lexer); assert(builder->token != NULL); ast_token_id id; switch(token_get_id(builder->token)) { case TK_LPAREN_NEW: case TK_LPAREN: id = AT_LPAREN; break; case TK_RPAREN: id = AT_RPAREN; break; case TK_LSQUARE_NEW: case TK_LSQUARE: id = AT_LSQUARE; break; case TK_RSQUARE: id = AT_RSQUARE; break; case TK_LBRACE: id = AT_LBRACE; break; case TK_RBRACE: id = AT_RBRACE; break; case TK_EOF: id = AT_EOF; break; case TK_LEX_ERROR: id = AT_ERROR; break; case TK_ID: id = AT_ID; break; case TK_STRING: id = AT_STRING; break; default: id = AT_TOKEN; break; } //printf("Got token %s %d -> %d\n", token_print(builder->token), // token_get_id(builder->token), id); builder->id = id; builder->have_token = true; builder->line = token_line_number(builder->token); builder->pos = token_line_position(builder->token); }
size_t ast_pos(ast_t* ast) { assert(ast != NULL); return token_line_position(ast->t); }