/* * We already know that a type_specifier is following */ static struct type_specifier *parse_type_specifier(struct parser *parser) { union token tok = lexer_next_token(parser->lexer); if (tok.tok_tag == TOK_STRUCT || tok.tok_tag == TOK_UNION) { lexer_put_back(parser->lexer, tok); return parse_struct_or_union_specifier(parser); } else if (tok.tok_tag == TOK_ENUM) { return parse_enum_specifier(parser); } else { struct type_specifier *sp = type_specifier_init(tok.tok_tag); if (tok.tok_tag == TOK_TYPE_NAME) { sp->type_name = tok.id.s; } return sp; } }
static struct dynarr *store_token_until_newline(struct lexer *lexer) { struct dynarr *darr = dynarr_init(); union token tok; while (1) { tok = lexer_next_token(lexer); if (tok.tok_tag == TOK_NEWLINE) { break; } union token *ptok = mallocz(sizeof(*ptok)); *ptok = tok; dynarr_add(darr, ptok); } return darr; }
static struct parameter_type_list *parse_parameter_type_list(struct parser *parser) { struct parameter_type_list *param_type_list = parameter_type_list_init(); struct parameter_declaration *decl = parse_parameter_declaration(parser); dynarr_add(param_type_list->param_decl_list, decl); union token tok; while (1) { tok = lexer_next_token(parser->lexer); if (tok.tok_tag != TOK_COMMA) { lexer_put_back(parser->lexer, tok); break; } tok = lexer_next_token(parser->lexer); if (tok.tok_tag == TOK_ELLIPSIS) { param_type_list->has_ellipsis = 1; break; } lexer_put_back(parser->lexer, tok); decl = parse_parameter_declaration(parser); dynarr_add(param_type_list->param_decl_list, decl); } return param_type_list; }
void pp_define(struct lexer *lexer) { // want_pp_keyword is set to 1 by caller (pp_entry). We should set it to 0 so the // following case can be correctly handled: // #define f(i) { if (i) { ... }} // Otherwisek, 'if' may be interpreted as PP_TOK_IF int old_want_sharp = lexer_push_config(lexer, want_sharp, 1); int old_want_pp_keyword = lexer_push_config(lexer, want_pp_keyword, 0); // must enable want_space befoer get id token since parsing id will peek int old_want_space = lexer_push_config(lexer, want_space, 1); union token idtok; while (true) { idtok = lexer_next_token(lexer); if (idtok.tok_tag != TOK_SPACE && idtok.tok_tag != TOK_TAB) { break; } } assume(idtok, TOK_IDENTIFIER); union token peektok = lexer_next_token(lexer); lexer_pop_config(lexer, want_space, old_want_space); if (peektok.tok_tag == TOK_LPAREN) { lexer_put_back(lexer, peektok); pp_define_func_macro(lexer, idtok.id.s); } else { if (peektok.tok_tag != TOK_SPACE && peektok.tok_tag != TOK_TAB) { lexer_put_back(lexer, peektok); } pp_define_object_macro(lexer, idtok.id.s); } token_destroy(idtok); lexer_pop_config(lexer, want_pp_keyword, old_want_pp_keyword); lexer_pop_config(lexer, want_sharp, old_want_sharp); }
struct syntree *parse(struct parser *parser) { struct translation_unit *tu = translation_unit_init(); struct external_declaration *external_decl; union token tok; // define the builtin macros open_header_file(parser->lexer, "scc-builtin.h", TOK_LT); while ((tok = lexer_next_token(parser->lexer)).tok_tag != TOK_EOF) { lexer_put_back(parser->lexer, tok); external_decl = parse_external_decl(parser); dynarr_add(tu->external_decl_list, external_decl); } return syntree_init(tu); }
static int dag_parse(struct dag *d, FILE *stream) { struct lexer *bk = lexer_create(STREAM, stream, 1, 1); bk->d = d; bk->stream = stream; bk->category = dag_task_category_lookup_or_create(d, "default"); struct dag_variable_lookup_set s = { d, NULL, NULL, NULL }; bk->environment = &s; struct token *t; while((t = lexer_peek_next_token(bk))) { s.category = bk->category; s.node = NULL; s.table = NULL; switch (t->type) { case TOKEN_NEWLINE: case TOKEN_SPACE: /* Skip newlines, spaces at top level. */ lexer_free_token(lexer_next_token(bk)); break; case TOKEN_SYNTAX: dag_parse_syntax(bk); break; case TOKEN_FILES: dag_parse_node(bk); break; case TOKEN_VARIABLE: dag_parse_variable(bk, NULL); break; default: lexer_report_error(bk, "Unexpected token. Expected one of NEWLINE, SPACE, SYNTAX, FILES, or VARIABLE, but got: %s\n:", lexer_print_token(t)); break; } } dag_close_over_environment(d); dag_compile_ancestors(d); free(bk); return 1; }
int main_test(int argc, char **argv) { FILE *ff = fopen("../example/example.makeflow", "r"); struct lexer_book *bk = lexer_init_book(STREAM, ff, 1, 1); debug_config(argv[0]); debug_flags_set("all"); struct token *t; while((t = lexer_next_token(bk, NULL)) != NULL) print_token(stderr, t); return 0; }
int main(int argc, char **argv) { FILE *ff = fopen("../example/example.makeflow", "r"); struct lexer *lx = lexer_init_book(STREAM, ff, 1, 1); //debug_config(argv[0]); debug_config("lexer-test"); debug_flags_set("all"); verbose_parsing = 1; struct token *t; while((t = lexer_next_token(lx)) != NULL) print_token(stderr, t); return 0; }
static struct labeled_statement *parse_labeled_statement(struct parser *parser) { union token initok = lexer_next_token(parser->lexer); struct labeled_statement *labeled_stmt = labeled_statement_init(initok.tok_tag); switch (initok.tok_tag) { case TOK_IDENTIFIER: labeled_stmt->label_str = initok.id.s; break; case TOK_CASE: labeled_stmt->case_expr = parse_constant_expression(parser); break; case TOK_DEFAULT: break; default: panic("invalid labeled statement"); } expect(parser->lexer, TOK_COLON); labeled_stmt->stmt = parse_statement(parser); return labeled_stmt; }
static int dag_parse_node_regular_command(struct lexer *bk, struct dag_node *n) { struct buffer b; buffer_init(&b); struct token *t; while((t = lexer_next_token(bk)) && t->type != TOKEN_NEWLINE) { switch(t->type) { case TOKEN_SPACE: buffer_printf(&b, " "); break; case TOKEN_LITERAL: buffer_printf(&b, "%s", t->lexeme); break; case TOKEN_IO_REDIRECT: buffer_printf(&b, "%s", t->lexeme); break; default: lexer_report_error(bk, "Unexpected command token: %s.\n", lexer_print_token(t)); break; } lexer_free_token(t); } if(!t) { lexer_report_error(bk, "Command does not end with newline.\n"); } n->command = xxstrdup(buffer_tostring(&b)); buffer_free(&b); debug(D_MAKEFLOW_PARSER, "node command=%s", n->command); return 1; }
static struct init_declarator_list *parse_init_declarator_list_with_la(struct parser *parser, struct declarator *declarator) { assert(declarator != NULL); struct dynarr *darr = dynarr_init(); struct init_declarator *init_declarator = parse_init_declarator_with_la(parser, declarator); union token tok; while (1) { dynarr_add(darr, init_declarator); tok = lexer_next_token(parser->lexer); if (tok.tok_tag == TOK_COMMA) { // go on for next init_declarator } else if (tok.tok_tag == TOK_SEMICOLON) { break; } else { file_reader_dump_remaining(parser->lexer->cstream); token_dump(tok); panic("expect ',' or ';'"); } init_declarator = parse_init_declarator(parser); } return init_declarator_list_init(darr); }
static struct selection_statement *parse_if_statement(struct parser *parser) { struct expression *expr; struct statement *truestmt, *falsestmt = NULL; union token tok; struct selection_statement *selstmt = selection_statement_init(SEL_TYPE_IF); expect(parser->lexer, TOK_LPAREN); expr = parse_expression(parser); expect(parser->lexer, TOK_RPAREN); truestmt = parse_statement(parser); tok = lexer_next_token(parser->lexer); if (tok.tok_tag == TOK_ELSE) { falsestmt = parse_statement(parser); } else { lexer_put_back(parser->lexer, tok); } selstmt->if_stmt.expr = expr; selstmt->if_stmt.truestmt = truestmt; selstmt->if_stmt.falsestmt = falsestmt; return selstmt; }
int run_file(const char* filename, context* ctx) { reader r; lexer_state* ls = calloc(sizeof(lexer_state), 1); parser_state* ps = calloc(sizeof(parser_state), 1); FILE* fp = fopen(filename, "r"); if(!fp) { printf("Error: can't open file '%s'\n", filename); return 1; } file_reader_create(&r, fp); lexer_create(ls, &r); ps->ls = ls; ps->die_on_error = 0; ps->error.max = 20; ps->t = lexer_next_token(ps->ls); expression_node* program = parse_program(ps); expression_node* eval = expression_node_evaluate(program, ctx); expression_node_destroy(eval); expression_node_destroy(program); lexer_destroy(ls); free(ls); free(ps); free(r.fn_data); fclose(fp); return 0; }
// TODO support abstract_declarator static struct parameter_declaration *parse_parameter_declaration(struct parser *parser) { int old_disable_typedef = lexer_push_config(parser->lexer, disable_typedef, 0); struct declaration_specifiers *decl_specifiers = parse_declaration_specifiers(parser); // to support item *item, which item is a type name // we need disable typedef after we get decl specifiers // // To support the case that the declarator recursively contains type (func ptr as // parameter), we enable typedef at the beginning (void) lexer_push_config(parser->lexer, disable_typedef, 1); union token tok = lexer_next_token(parser->lexer); struct declarator *declarator = NULL; if (initiate_declarator(tok)) { lexer_put_back(parser->lexer, tok); declarator = parse_declarator(parser); } else { lexer_put_back(parser->lexer, tok); } lexer_pop_config(parser->lexer, disable_typedef, old_disable_typedef); return parameter_declaration_init(decl_specifiers, declarator); }
/** * Parse expression. */ void parser_parse_expression (parser_context_t *context_p, /**< context */ int options) /**< option flags */ { size_t grouping_level = 0; parser_stack_push_uint8 (context_p, LEXER_EXPRESSION_START); while (true) { if (options & PARSE_EXPR_HAS_LITERAL) { JERRY_ASSERT (context_p->last_cbc_opcode == CBC_PUSH_LITERAL); /* True only for the first expression. */ options &= ~PARSE_EXPR_HAS_LITERAL; } else { parser_parse_unary_expression (context_p, &grouping_level); } while (true) { parser_process_unary_expression (context_p); /* The engine flush binary opcodes above this precedence. */ uint8_t min_prec_treshold = CBC_MAXIMUM_BYTE_VALUE; if (LEXER_IS_BINARY_OP_TOKEN (context_p->token.type)) { min_prec_treshold = parser_binary_precedence_table[context_p->token.type - LEXER_FIRST_BINARY_OP]; if (LEXER_IS_BINARY_LVALUE_TOKEN (context_p->token.type) || context_p->token.type == LEXER_LOGICAL_OR || context_p->token.type == LEXER_LOGICAL_AND) { /* Right-to-left evaluation order. */ min_prec_treshold++; } } else { min_prec_treshold = 0; } parser_process_binary_opcodes (context_p, min_prec_treshold); if (context_p->token.type == LEXER_RIGHT_PAREN) { if (context_p->stack_top_uint8 == LEXER_LEFT_PAREN || context_p->stack_top_uint8 == LEXER_COMMA_SEP_LIST) { JERRY_ASSERT (grouping_level > 0); grouping_level--; if (context_p->stack_top_uint8 == LEXER_COMMA_SEP_LIST) { parser_push_result (context_p); parser_flush_cbc (context_p); } parser_stack_pop_uint8 (context_p); lexer_next_token (context_p); continue; } } else if (context_p->token.type == LEXER_QUESTION_MARK) { cbc_opcode_t opcode = CBC_BRANCH_IF_FALSE_FORWARD; parser_branch_t cond_branch; parser_branch_t uncond_branch; parser_push_result (context_p); if (context_p->last_cbc_opcode == CBC_LOGICAL_NOT) { context_p->last_cbc_opcode = PARSER_CBC_UNAVAILABLE; opcode = CBC_BRANCH_IF_TRUE_FORWARD; } parser_emit_cbc_forward_branch (context_p, opcode, &cond_branch); lexer_next_token (context_p); parser_parse_expression (context_p, PARSE_EXPR_NO_COMMA); parser_emit_cbc_forward_branch (context_p, CBC_JUMP_FORWARD, &uncond_branch); parser_set_branch_to_current_position (context_p, &cond_branch); /* Although byte code is constructed for two branches, * only one of them will be executed. To reflect this * the stack is manually adjusted. */ JERRY_ASSERT (context_p->stack_depth > 0); context_p->stack_depth--; if (context_p->token.type != LEXER_COLON) { parser_raise_error (context_p, PARSER_ERR_COLON_FOR_CONDITIONAL_EXPECTED); } lexer_next_token (context_p); parser_parse_expression (context_p, PARSE_EXPR_NO_COMMA); parser_set_branch_to_current_position (context_p, &uncond_branch); /* Last opcode rewrite is not allowed because * the result may come from the first branch. */ parser_flush_cbc (context_p); continue; } break; } if (context_p->token.type == LEXER_COMMA) { if (!(options & PARSE_EXPR_NO_COMMA) || grouping_level > 0) { if (!CBC_NO_RESULT_OPERATION (context_p->last_cbc_opcode)) { parser_emit_cbc (context_p, CBC_POP); } if (context_p->stack_top_uint8 == LEXER_LEFT_PAREN) { parser_mem_page_t *page_p = context_p->stack.first_p; JERRY_ASSERT (page_p != NULL); page_p->bytes[context_p->stack.last_position - 1] = LEXER_COMMA_SEP_LIST; context_p->stack_top_uint8 = LEXER_COMMA_SEP_LIST; } lexer_next_token (context_p); continue; } } else if (LEXER_IS_BINARY_OP_TOKEN (context_p->token.type)) { parser_append_binary_token (context_p); lexer_next_token (context_p); continue; } break; } if (grouping_level != 0) { parser_raise_error (context_p, PARSER_ERR_RIGHT_PAREN_EXPECTED); } JERRY_ASSERT (context_p->stack_top_uint8 == LEXER_EXPRESSION_START); parser_stack_pop_uint8 (context_p); if (options & PARSE_EXPR_STATEMENT) { if (!CBC_NO_RESULT_OPERATION (context_p->last_cbc_opcode)) { parser_emit_cbc (context_p, CBC_POP); } } else if (options & PARSE_EXPR_BLOCK) { if (CBC_NO_RESULT_COMPOUND_ASSIGMENT (context_p->last_cbc_opcode)) { context_p->last_cbc_opcode = PARSER_TO_BINARY_OPERATION_WITH_BLOCK (context_p->last_cbc_opcode); parser_flush_cbc (context_p); } else if (CBC_NO_RESULT_BLOCK (context_p->last_cbc_opcode)) { JERRY_ASSERT (CBC_SAME_ARGS (context_p->last_cbc_opcode, context_p->last_cbc_opcode + 2)); PARSER_PLUS_EQUAL_U16 (context_p->last_cbc_opcode, 2); parser_flush_cbc (context_p); } else { if (CBC_NO_RESULT_OPERATION (context_p->last_cbc_opcode)) { JERRY_ASSERT (CBC_SAME_ARGS (context_p->last_cbc_opcode, context_p->last_cbc_opcode + 1)); context_p->last_cbc_opcode++; } parser_emit_cbc (context_p, CBC_POP_BLOCK); } } else { parser_push_result (context_p); } } /* parser_parse_expression */
/** * Parse the postfix part of unary operators, and * generate byte code for the whole expression. */ static void parser_process_unary_expression (parser_context_t *context_p) /**< context */ { /* Parse postfix part of a primary expression. */ while (true) { /* Since break would only break the switch, we use * continue to continue this loop. Without continue, * the code abandons the loop. */ switch (context_p->token.type) { case LEXER_DOT: { parser_push_result (context_p); lexer_expect_identifier (context_p, LEXER_STRING_LITERAL); JERRY_ASSERT (context_p->token.type == LEXER_LITERAL && context_p->token.lit_location.type == LEXER_STRING_LITERAL); if (context_p->last_cbc_opcode == CBC_PUSH_LITERAL) { JERRY_ASSERT (CBC_ARGS_EQ (CBC_PUSH_PROP_LITERAL_LITERAL, CBC_HAS_LITERAL_ARG | CBC_HAS_LITERAL_ARG2)); context_p->last_cbc_opcode = CBC_PUSH_PROP_LITERAL_LITERAL; context_p->last_cbc.value = context_p->lit_object.index; } else if (context_p->last_cbc_opcode == CBC_PUSH_THIS) { context_p->last_cbc_opcode = PARSER_CBC_UNAVAILABLE; parser_emit_cbc_literal_from_token (context_p, CBC_PUSH_PROP_THIS_LITERAL); } else { parser_emit_cbc_literal_from_token (context_p, CBC_PUSH_PROP_LITERAL); } lexer_next_token (context_p); continue; } case LEXER_LEFT_SQUARE: { parser_push_result (context_p); lexer_next_token (context_p); parser_parse_expression (context_p, PARSE_EXPR); if (context_p->token.type != LEXER_RIGHT_SQUARE) { parser_raise_error (context_p, PARSER_ERR_RIGHT_SQUARE_EXPECTED); } lexer_next_token (context_p); if (context_p->last_cbc_opcode == CBC_PUSH_LITERAL) { context_p->last_cbc_opcode = CBC_PUSH_PROP_LITERAL; } else if (context_p->last_cbc_opcode == CBC_PUSH_TWO_LITERALS) { context_p->last_cbc_opcode = CBC_PUSH_PROP_LITERAL_LITERAL; } else if (context_p->last_cbc_opcode == CBC_PUSH_THIS_LITERAL) { context_p->last_cbc_opcode = CBC_PUSH_PROP_THIS_LITERAL; } else { parser_emit_cbc (context_p, CBC_PUSH_PROP); } continue; } case LEXER_LEFT_PAREN: { size_t call_arguments = 0; uint16_t opcode = CBC_CALL; bool is_eval = false; parser_push_result (context_p); if (context_p->stack_top_uint8 == LEXER_KEYW_NEW) { parser_stack_pop_uint8 (context_p); opcode = CBC_NEW; } else { if (context_p->last_cbc_opcode == CBC_PUSH_LITERAL && context_p->last_cbc.literal_object_type == LEXER_LITERAL_OBJECT_EVAL) { JERRY_ASSERT (context_p->last_cbc.literal_type == LEXER_IDENT_LITERAL); context_p->status_flags |= PARSER_ARGUMENTS_NEEDED | PARSER_LEXICAL_ENV_NEEDED | PARSER_NO_REG_STORE; is_eval = true; } if (context_p->last_cbc_opcode == CBC_PUSH_PROP) { context_p->last_cbc_opcode = CBC_PUSH_PROP_REFERENCE; opcode = CBC_CALL_PROP; } else if (context_p->last_cbc_opcode == CBC_PUSH_PROP_LITERAL) { context_p->last_cbc_opcode = CBC_PUSH_PROP_LITERAL_REFERENCE; opcode = CBC_CALL_PROP; } else if (context_p->last_cbc_opcode == CBC_PUSH_PROP_LITERAL_LITERAL) { context_p->last_cbc_opcode = CBC_PUSH_PROP_LITERAL_LITERAL_REFERENCE; opcode = CBC_CALL_PROP; } else if (context_p->last_cbc_opcode == CBC_PUSH_PROP_THIS_LITERAL) { context_p->last_cbc_opcode = CBC_PUSH_PROP_THIS_LITERAL_REFERENCE; opcode = CBC_CALL_PROP; } else if ((context_p->status_flags & (PARSER_INSIDE_WITH | PARSER_RESOLVE_THIS_FOR_CALLS)) && PARSER_IS_PUSH_LITERAL (context_p->last_cbc_opcode) && context_p->last_cbc.literal_type == LEXER_IDENT_LITERAL) { opcode = CBC_CALL_PROP; if (context_p->last_cbc_opcode == CBC_PUSH_LITERAL) { context_p->last_cbc_opcode = CBC_PUSH_IDENT_REFERENCE; } else if (context_p->last_cbc_opcode == CBC_PUSH_TWO_LITERALS) { context_p->last_cbc_opcode = CBC_PUSH_LITERAL; parser_emit_cbc_literal (context_p, CBC_PUSH_IDENT_REFERENCE, context_p->last_cbc.value); } else { JERRY_ASSERT (context_p->last_cbc_opcode == CBC_PUSH_THREE_LITERALS); context_p->last_cbc_opcode = CBC_PUSH_TWO_LITERALS; parser_emit_cbc_literal (context_p, CBC_PUSH_IDENT_REFERENCE, context_p->last_cbc.third_literal_index); } } } lexer_next_token (context_p); if (context_p->token.type != LEXER_RIGHT_PAREN) { while (true) { if (++call_arguments > CBC_MAXIMUM_BYTE_VALUE) { parser_raise_error (context_p, PARSER_ERR_ARGUMENT_LIMIT_REACHED); } parser_parse_expression (context_p, PARSE_EXPR_NO_COMMA); if (context_p->token.type != LEXER_COMMA) { break; } lexer_next_token (context_p); } if (context_p->token.type != LEXER_RIGHT_PAREN) { parser_raise_error (context_p, PARSER_ERR_RIGHT_PAREN_EXPECTED); } } lexer_next_token (context_p); if (is_eval) { parser_emit_cbc (context_p, CBC_EVAL); } if (call_arguments == 0) { if (opcode == CBC_CALL) { parser_emit_cbc (context_p, CBC_CALL0); continue; } if (opcode == CBC_CALL_PROP) { parser_emit_cbc (context_p, CBC_CALL0_PROP); continue; } if (opcode == CBC_NEW) { parser_emit_cbc (context_p, CBC_NEW0); continue; } } if (call_arguments == 1) { if (opcode == CBC_CALL) { parser_emit_cbc (context_p, CBC_CALL1); continue; } if (opcode == CBC_CALL_PROP) { parser_emit_cbc (context_p, CBC_CALL1_PROP); continue; } if (opcode == CBC_NEW) { parser_emit_cbc (context_p, CBC_NEW1); continue; } } if (call_arguments == 2) { if (opcode == CBC_CALL) { parser_emit_cbc (context_p, CBC_CALL2); continue; } if (opcode == CBC_CALL_PROP) { parser_flush_cbc (context_p); /* Manually adjusting stack usage. */ JERRY_ASSERT (context_p->stack_depth > 0); context_p->stack_depth--; parser_emit_cbc (context_p, CBC_CALL2_PROP); continue; } } parser_emit_cbc_call (context_p, opcode, call_arguments); continue; } default: { if (context_p->stack_top_uint8 == LEXER_KEYW_NEW) { parser_push_result (context_p); parser_emit_cbc (context_p, CBC_NEW0); parser_stack_pop_uint8 (context_p); continue; } if (!context_p->token.was_newline && (context_p->token.type == LEXER_INCREASE || context_p->token.type == LEXER_DECREASE)) { cbc_opcode_t opcode = (context_p->token.type == LEXER_INCREASE) ? CBC_POST_INCR : CBC_POST_DECR; parser_push_result (context_p); parser_emit_unary_lvalue_opcode (context_p, opcode); lexer_next_token (context_p); } break; } } break; } /* Generate byte code for the unary operators. */ while (true) { uint8_t token = context_p->stack_top_uint8; if (!LEXER_IS_UNARY_OP_TOKEN (token)) { break; } parser_push_result (context_p); parser_stack_pop_uint8 (context_p); if (LEXER_IS_UNARY_LVALUE_OP_TOKEN (token)) { if (token == LEXER_KEYW_DELETE) { token = CBC_DELETE_PUSH_RESULT; } else { token = (uint8_t) (LEXER_UNARY_LVALUE_OP_TOKEN_TO_OPCODE (token)); } parser_emit_unary_lvalue_opcode (context_p, (cbc_opcode_t) token); } else { token = (uint8_t) (LEXER_UNARY_OP_TOKEN_TO_OPCODE (token)); if (token == CBC_TYPEOF) { if (PARSER_IS_PUSH_LITERAL (context_p->last_cbc_opcode) && context_p->last_cbc.literal_type == LEXER_IDENT_LITERAL) { if (context_p->last_cbc_opcode == CBC_PUSH_LITERAL) { context_p->last_cbc_opcode = CBC_TYPEOF_IDENT; } else if (context_p->last_cbc_opcode == CBC_PUSH_TWO_LITERALS) { context_p->last_cbc_opcode = CBC_PUSH_LITERAL; parser_emit_cbc_literal (context_p, CBC_TYPEOF_IDENT, context_p->last_cbc.value); } else { JERRY_ASSERT (context_p->last_cbc_opcode == CBC_PUSH_THREE_LITERALS); context_p->last_cbc_opcode = CBC_PUSH_TWO_LITERALS; parser_emit_cbc_literal (context_p, CBC_TYPEOF_IDENT, context_p->last_cbc.third_literal_index); } } else { parser_emit_cbc (context_p, token); } } else { if (context_p->last_cbc_opcode == CBC_PUSH_LITERAL) { /* It is not worth to combine with push multiple literals * since the byte code size will not decrease. */ JERRY_ASSERT (CBC_SAME_ARGS (context_p->last_cbc_opcode, token + 1)); context_p->last_cbc_opcode = (uint16_t) (token + 1); } else { parser_emit_cbc (context_p, token); } } } } } /* parser_process_unary_expression */
/** * Parse and record unary operators, and parse the primary literal. */ static void parser_parse_unary_expression (parser_context_t *context_p, /**< context */ size_t *grouping_level_p) /**< grouping level */ { int new_was_seen = 0; /* Collect unary operators. */ while (true) { /* Convert plus and minus binary operators to unary operators. */ if (context_p->token.type == LEXER_ADD) { context_p->token.type = LEXER_PLUS; } else if (context_p->token.type == LEXER_SUBTRACT) { context_p->token.type = LEXER_NEGATE; } /* Bracketed expressions are primary expressions. At this * point their left paren is pushed onto the stack and * they are processed when their closing paren is reached. */ if (context_p->token.type == LEXER_LEFT_PAREN) { (*grouping_level_p)++; new_was_seen = 0; } else if (context_p->token.type == LEXER_KEYW_NEW) { /* After 'new' unary operators are not allowed. */ new_was_seen = 1; } else if (new_was_seen || !LEXER_IS_UNARY_OP_TOKEN (context_p->token.type)) { break; } parser_stack_push_uint8 (context_p, context_p->token.type); lexer_next_token (context_p); } /* Parse primary expression. */ switch (context_p->token.type) { case LEXER_LITERAL: { cbc_opcode_t opcode = CBC_PUSH_LITERAL; if (context_p->token.lit_location.type == LEXER_IDENT_LITERAL || context_p->token.lit_location.type == LEXER_STRING_LITERAL) { lexer_construct_literal_object (context_p, &context_p->token.lit_location, context_p->token.lit_location.type); } else if (context_p->token.lit_location.type == LEXER_NUMBER_LITERAL) { bool is_negative_number = false; while (context_p->stack_top_uint8 == LEXER_PLUS || context_p->stack_top_uint8 == LEXER_NEGATE) { if (context_p->stack_top_uint8 == LEXER_NEGATE) { is_negative_number = !is_negative_number; } parser_stack_pop_uint8 (context_p); } if (lexer_construct_number_object (context_p, true, is_negative_number)) { JERRY_ASSERT (context_p->lit_object.index <= CBC_PUSH_NUMBER_BYTE_RANGE_END); if (context_p->lit_object.index == 0) { parser_emit_cbc (context_p, CBC_PUSH_NUMBER_0); break; } parser_emit_cbc_push_number (context_p, is_negative_number); break; } } if (context_p->lit_object.type != LEXER_LITERAL_OBJECT_EVAL) { if (context_p->last_cbc_opcode == CBC_PUSH_LITERAL) { context_p->last_cbc_opcode = CBC_PUSH_TWO_LITERALS; context_p->last_cbc.value = context_p->lit_object.index; context_p->last_cbc.literal_type = context_p->token.lit_location.type; context_p->last_cbc.literal_object_type = context_p->lit_object.type; break; } if (context_p->last_cbc_opcode == CBC_PUSH_TWO_LITERALS) { context_p->last_cbc_opcode = CBC_PUSH_THREE_LITERALS; context_p->last_cbc.third_literal_index = context_p->lit_object.index; context_p->last_cbc.literal_type = context_p->token.lit_location.type; context_p->last_cbc.literal_object_type = context_p->lit_object.type; break; } if (context_p->last_cbc_opcode == CBC_PUSH_THIS) { context_p->last_cbc_opcode = PARSER_CBC_UNAVAILABLE; opcode = CBC_PUSH_THIS_LITERAL; } } parser_emit_cbc_literal_from_token (context_p, opcode); break; } case LEXER_KEYW_FUNCTION: { int literals = 0; uint16_t literal1 = 0; uint16_t literal2 = 0; if (context_p->last_cbc_opcode == CBC_PUSH_LITERAL) { literals = 1; literal1 = context_p->last_cbc.literal_index; context_p->last_cbc_opcode = PARSER_CBC_UNAVAILABLE; } else if (context_p->last_cbc_opcode == CBC_PUSH_TWO_LITERALS) { literals = 2; literal1 = context_p->last_cbc.literal_index; literal2 = context_p->last_cbc.value; context_p->last_cbc_opcode = PARSER_CBC_UNAVAILABLE; } else { parser_flush_cbc (context_p); } uint32_t status_flags = PARSER_IS_FUNCTION | PARSER_IS_FUNC_EXPRESSION | PARSER_IS_CLOSURE; if (context_p->status_flags & PARSER_INSIDE_WITH) { status_flags |= PARSER_RESOLVE_THIS_FOR_CALLS; } lexer_construct_function_object (context_p, status_flags); JERRY_ASSERT (context_p->last_cbc_opcode == PARSER_CBC_UNAVAILABLE); if (literals == 1) { context_p->last_cbc_opcode = CBC_PUSH_TWO_LITERALS; context_p->last_cbc.literal_index = literal1; context_p->last_cbc.value = (uint16_t) (context_p->literal_count - 1); } else if (literals == 2) { context_p->last_cbc_opcode = CBC_PUSH_THREE_LITERALS; context_p->last_cbc.literal_index = literal1; context_p->last_cbc.value = literal2; context_p->last_cbc.third_literal_index = (uint16_t) (context_p->literal_count - 1); } else { parser_emit_cbc_literal (context_p, CBC_PUSH_LITERAL, (uint16_t) (context_p->literal_count - 1)); } context_p->last_cbc.literal_type = LEXER_FUNCTION_LITERAL; context_p->last_cbc.literal_object_type = LEXER_LITERAL_OBJECT_ANY; break; } case LEXER_LEFT_BRACE: { parser_parse_object_literal (context_p); break; } case LEXER_LEFT_SQUARE: { parser_parse_array_literal (context_p); break; } case LEXER_DIVIDE: case LEXER_ASSIGN_DIVIDE: { lexer_construct_regexp_object (context_p, false); if (context_p->last_cbc_opcode == CBC_PUSH_LITERAL) { context_p->last_cbc_opcode = CBC_PUSH_TWO_LITERALS; context_p->last_cbc.value = (uint16_t) (context_p->literal_count - 1); } else if (context_p->last_cbc_opcode == CBC_PUSH_TWO_LITERALS) { context_p->last_cbc_opcode = CBC_PUSH_THREE_LITERALS; context_p->last_cbc.third_literal_index = (uint16_t) (context_p->literal_count - 1); } else { parser_emit_cbc_literal (context_p, CBC_PUSH_LITERAL, (uint16_t) (context_p->literal_count - 1)); } context_p->last_cbc.literal_type = LEXER_REGEXP_LITERAL; context_p->last_cbc.literal_object_type = LEXER_LITERAL_OBJECT_ANY; break; } case LEXER_KEYW_THIS: { parser_emit_cbc (context_p, CBC_PUSH_THIS); break; } case LEXER_LIT_TRUE: { parser_emit_cbc (context_p, CBC_PUSH_TRUE); break; } case LEXER_LIT_FALSE: { parser_emit_cbc (context_p, CBC_PUSH_FALSE); break; } case LEXER_LIT_NULL: { parser_emit_cbc (context_p, CBC_PUSH_NULL); break; } default: { parser_raise_error (context_p, PARSER_ERR_PRIMARY_EXP_EXPECTED); break; } } lexer_next_token (context_p); } /* parser_parse_unary_expression */
struct ast_node *parser_acc_typeclass(struct parser *p) { struct ast_node *n = NULL; struct token *tok; struct ast_node *name; struct ast_node *genr = NULL; const struct inplocation_mark *start; enum parser_fndecl_list_err err; tok = lexer_lookahead(p->lexer, 1); if (!tok || tok->type != TOKEN_KW_TYPECLASS) { return NULL; } start = token_get_start(tok); lexer_push(p->lexer); //consume typeclass keyword lexer_next_token(p->lexer); name = parser_acc_identifier(p); if (!name) { parser_synerr(p, lexer_last_token_start(p->lexer), NULL, "Expected an identifier for the typeclass " "name after 'class'"); goto err; } genr = parser_acc_genrdecl(p); if (!genr && parser_has_syntax_error_reset(p)) { parser_synerr(p, ast_node_endmark(name), NULL, "Expected a generic declaration for typeclass \"" RF_STR_PF_FMT"\" after identifier", RF_STR_PF_ARG(ast_identifier_str(name))); goto err_free_genr; } tok = lexer_next_token(p->lexer); if (!tok || tok->type != TOKEN_SM_OCBRACE) { parser_synerr(p, ast_node_endmark(name), NULL, "Expected '{' at \""RF_STR_PF_FMT"\" typeclass " "declaration after identifier", RF_STR_PF_ARG(ast_identifier_str(name))); goto err_free_genr; } n = ast_typeclass_create(start, NULL, name, genr); if (!n) { RF_ERRNOMEM(); goto err_free_genr; } err = parser_acc_fndecl_list(p, n, FNDECL_PARTOF_TYPECLASS); switch (err) { case PARSER_FNDECL_LIST_EMPTY: parser_synerr(p, token_get_end(tok), NULL, "Expected at least one function declaration inside " "the body of typeclass \""RF_STR_PF_FMT"\" after '{'", RF_STR_PF_ARG(ast_identifier_str(name))); goto err_free_typeclass; break; case PARSER_FNDECL_LIST_FAILURE: parser_synerr(p, lexer_last_token_end(p->lexer), NULL, "Expected a proper function declaration inside " "typeclass \""RF_STR_PF_FMT"\"", RF_STR_PF_ARG(ast_identifier_str(name))); goto err_free_typeclass; break; default: // SUCCESS break; } tok = lexer_next_token(p->lexer); if (!tok || tok->type != TOKEN_SM_CCBRACE) { parser_synerr(p, lexer_last_token_end(p->lexer), NULL, "Expected '}' at the end of \""RF_STR_PF_FMT"\" " "typeclass declaration", RF_STR_PF_ARG(ast_identifier_str(name))); goto err_free_typeclass; } ast_node_set_end(n, token_get_end(tok)); lexer_pop(p->lexer); return n; err_free_genr: if (genr) { ast_node_destroy(genr); } err_free_typeclass: if (n) { ast_node_destroy(n); } err: lexer_rollback(p->lexer); return NULL; }
struct ast_node *parser_acc_ifexpr(struct parser *p, enum token_type if_type) { struct ast_node *n; struct ast_node *branch; struct token *tok; const struct inplocation_mark *start; RF_ASSERT(if_type == TOKEN_KW_IF || if_type == TOKEN_KW_ELIF, "parse_ifexp called with invalid token type"); tok = lexer_lookahead(p->lexer, 1); if (!tok || tok->type != if_type) { return NULL; } start = token_get_start(tok); // consume 'if' or 'elif' lexer_next_token(p->lexer); lexer_push(p->lexer); // parse the taken branch branch = parser_acc_condbranch(p, tok); if (!branch) { goto err; } // create the if expression n = ast_ifexpr_create(start, ast_node_endmark(branch), branch, NULL); if (!n) { ast_node_destroy(branch); RF_ERRNOMEM(); goto err; } tok = lexer_lookahead(p->lexer, 1); while (tok && (tok->type == TOKEN_KW_ELIF || tok->type == TOKEN_KW_ELSE)) { if (tok->type == TOKEN_KW_ELIF) { branch = parser_acc_ifexpr(p, TOKEN_KW_ELIF); if (!branch) { // error reporting should already happen in parser_acc_ifexpr() goto err_free; } } else { //can only be an else lexer_next_token(p->lexer); // consume it branch = parser_acc_block(p, true); if (!branch) { parser_synerr(p, token_get_end(tok), NULL, "Expected a block after 'else'"); goto err_free; } } ast_ifexpr_add_fallthrough_branch(n, branch); ast_node_set_end(n, ast_node_endmark(branch)); tok = lexer_lookahead(p->lexer, 1); } lexer_pop(p->lexer); return n; err_free: ast_node_destroy(n); err: lexer_rollback(p->lexer); return NULL; }
struct ast_node *parser_acc_typeinstance(struct parser *p) { struct ast_node *n = NULL; struct token *tok; struct ast_node *class_name; struct ast_node *type_name; struct ast_node *genr = NULL; const struct inplocation_mark *start; enum parser_fnimpl_list_err err; tok = lexer_lookahead(p->lexer, 1); if (!tok || tok->type != TOKEN_KW_TYPEINSTANCE) { return NULL; } start = token_get_start(tok); //consumer typeclass isntance keyword lexer_next_token(p->lexer); class_name = parser_acc_identifier(p); if (!class_name) { parser_synerr(p, lexer_last_token_start(p->lexer), NULL, "Expected an identifier for the typeclass instance " "name after 'instance'"); goto err; } type_name = parser_acc_identifier(p); if (!type_name) { parser_synerr(p, lexer_last_token_start(p->lexer), NULL, "Expected an identifier for the name of \""RF_STR_PF_FMT"\" " "typeclass instance", RF_STR_PF_ARG(ast_identifier_str(class_name))); goto err; } genr = parser_acc_genrdecl(p); if (!genr && parser_has_syntax_error_reset(p)) { parser_synerr(p, ast_node_endmark(type_name), NULL, "Expected a generic declaration for type instance \"" RF_STR_PF_FMT"\" after type name \""RF_STR_PF_FMT"\"", RF_STR_PF_ARG(ast_identifier_str(class_name)), RF_STR_PF_ARG(ast_identifier_str(type_name))); goto err; } tok = lexer_next_token(p->lexer); if (!tok || tok->type != TOKEN_SM_OCBRACE) { parser_synerr(p, ast_node_endmark(type_name), NULL, "Expected '{' at type instance \""RF_STR_PF_FMT"\" " "after \""RF_STR_PF_FMT"\"", RF_STR_PF_ARG(ast_identifier_str(class_name)), RF_STR_PF_ARG(ast_identifier_str(type_name))); goto err_free_genr; } n = ast_typeinstance_create(start, NULL, class_name, type_name, genr); if (!n) { RF_ERRNOMEM(); goto err_free_genr; } err = parser_acc_fnimpl_list(p, n); switch (err) { case PARSER_FNIMPL_LIST_EMPTY: parser_synerr(p, token_get_end(tok), NULL, "Expected at least one function implementation inside " "the body of typeinstace \""RF_STR_PF_FMT"\" for " "\""RF_STR_PF_FMT"\" after'{'", RF_STR_PF_ARG(ast_identifier_str(class_name)), RF_STR_PF_ARG(ast_identifier_str(type_name))); goto err_free_typeinstance; break; case PARSER_FNIMPL_LIST_FAILURE: parser_synerr(p, lexer_last_token_end(p->lexer), NULL, "Expected a proper function implementation inside " "the body of typeinstace \""RF_STR_PF_FMT"\" for " "\""RF_STR_PF_FMT"\" after'{'", RF_STR_PF_ARG(ast_identifier_str(class_name)), RF_STR_PF_ARG(ast_identifier_str(type_name))); goto err_free_typeinstance; break; default: // SUCCESS break; } tok = lexer_next_token(p->lexer); if (!tok || tok->type != TOKEN_SM_CCBRACE) { parser_synerr(p, lexer_last_token_end(p->lexer), NULL, "Expected '}' at the end of \""RF_STR_PF_FMT"\" " "typeinstance for \""RF_STR_PF_FMT"\"", RF_STR_PF_ARG(ast_identifier_str(class_name)), RF_STR_PF_ARG(ast_identifier_str(type_name))); goto err_free_typeinstance; } ast_node_set_end(n, token_get_end(tok)); return n; err_free_genr: if (genr) { ast_node_destroy(genr); } err_free_typeinstance: if (n) { ast_node_destroy(n); } err: return NULL; }
static int dag_parse_node_filelist(struct lexer *bk, struct dag_node *n) { int before_colon = 1; char *filename; char *newname; struct token *t, *arrow, *rename; while((t = lexer_next_token(bk))) { filename = NULL; newname = NULL; switch (t->type) { case TOKEN_COLON: before_colon = 0; lexer_free_token(t); break; case TOKEN_NEWLINE: /* Finished reading file list */ lexer_free_token(t); return 1; break; case TOKEN_LITERAL: rename = NULL; arrow = lexer_peek_next_token(bk); if(!arrow) { lexer_report_error(bk, "Rule specification is incomplete."); } else if(arrow->type == TOKEN_REMOTE_RENAME) //Is the arrow really an arrow? { lexer_free_token(lexer_next_token(bk)); //Jump arrow. rename = lexer_next_token(bk); if(!rename) { lexer_report_error(bk, "Remote name specification is incomplete."); } } filename = t->lexeme; newname = rename ? rename->lexeme : NULL; if(before_colon) dag_node_add_target_file(n, filename, newname); else dag_node_add_source_file(n, filename, newname); lexer_free_token(t); if(rename) { lexer_free_token(rename); } break; default: lexer_report_error(bk, "Error reading file list. %s", lexer_print_token(t)); break; } } return 0; }
struct ast_node *parser_acc_fndecl(struct parser *p, int fndecl_position) { struct ast_node *n; struct token *tok; struct token *oparen_tok; struct ast_node *name; struct ast_node *genr = NULL; struct ast_node *args = NULL; struct ast_node *ret_type = NULL; const struct inplocation_mark *start; const struct inplocation_mark *end; lexer_push(p->lexer); tok = lexer_lookahead(p->lexer, 1); if (fndecl_position != FNDECL_PARTOF_FOREIGN_IMPORT) { if (!tok || tok->type != TOKEN_KW_FUNCTION) { goto err; } //consume function keyword lexer_next_token(p->lexer); } // start should be either start of fn, or start of next token (an identifier) start = token_get_start(tok); name = parser_acc_identifier(p); if (!name) { parser_synerr(p, lexer_last_token_start(p->lexer), NULL, "Expected an identifier for the function name after 'fn'"); goto err; } tok = lexer_lookahead(p->lexer, 1); if (GENRDECL_START_COND(tok)) { genr = parser_acc_genrdecl(p); if (!genr) { goto err; } } tok = lexer_next_token(p->lexer); if (!tok || tok->type != TOKEN_SM_OPAREN) { parser_synerr(p, lexer_last_token_start(p->lexer), NULL, "Expected '(' at function declaration"); goto err_free_genr; } oparen_tok = tok; args = parser_acc_typedesc(p); if (!args && parser_has_syntax_error_reset(p)) { parser_synerr(p, token_get_end(tok), NULL, "Expected either a type description for the function's " "arguments or ')' after '('"); goto err_free_genr; } tok = lexer_next_token(p->lexer); if (!tok || tok->type != TOKEN_SM_CPAREN) { if (args) { parser_synerr(p, lexer_last_token_end(p->lexer), NULL, "Expected ')' at function declaration after " "type description"); } else { parser_synerr(p, token_get_end(oparen_tok), NULL, "Expected ')' at function declaration after '('"); } goto err_free_args; } end = token_get_end(tok); tok = lexer_lookahead(p->lexer, 1); if (tok && tok->type == TOKEN_OP_IMPL) { //consume '->' lexer_next_token(p->lexer); ret_type = parser_acc_typedesc(p); if (!ret_type) { parser_synerr(p, token_get_end(tok), NULL, "Expected type description for the function's " "return type after '->'"); goto err_free_args; } end = ast_node_endmark(ret_type); } n = ast_fndecl_create(start, end, fndecl_position, name, genr, args, ret_type); if (!n) { RF_ERRNOMEM(); goto err_free_rettype; } lexer_pop(p->lexer); return n; err_free_rettype: if (ret_type) { ast_node_destroy(ret_type); } err_free_args: if (args) { ast_node_destroy(args); } err_free_genr: if (genr) { ast_node_destroy(genr); } err: lexer_rollback(p->lexer); return NULL; }
struct ast_node *parser_acc_fncall(struct parser *p, bool expect_it) { struct ast_node *n; struct token *tok; struct ast_node *name; struct ast_node *genr = NULL; struct ast_node *args = NULL; lexer_push(p->lexer); name = parser_acc_identifier(p); if (!name) { goto err; } genr = parser_acc_genrattr(p, false); if (!genr && parser_has_syntax_error(p)) { // name is an identifier and even in failure does not need to get destroyed here goto err; } tok = lexer_lookahead(p->lexer, 1); if (!tok || tok->type != TOKEN_SM_OPAREN) { if (expect_it) { parser_synerr(p, lexer_last_token_start(p->lexer), NULL, "Expected '('"); } goto err_free_genr; } //consume '(' lexer_next_token(p->lexer); args = parser_acc_expression(p); if (!args) { if (parser_has_syntax_error(p)) { parser_synerr(p, lexer_last_token_start(p->lexer), NULL, "Expected argument expression for function call"); goto err_free_genr; } } tok = lexer_lookahead(p->lexer, 1); if (!tok || tok->type != TOKEN_SM_CPAREN) { if (expect_it) { parser_synerr(p, lexer_last_token_end(p->lexer), NULL, "Expected ')' at end of "RF_STR_PF_FMT" function call", RF_STR_PF_ARG(ast_identifier_str(name))); } goto err_free_args; } //consume ')' lexer_next_token(p->lexer); n = ast_fncall_create(ast_node_startmark(name), token_get_end(tok), name, args, genr); if (!n) { RF_ERRNOMEM(); goto err_free_args; } lexer_pop(p->lexer); return n; err_free_args: if (args) { ast_node_destroy(args); } err_free_genr: if (genr) { ast_node_destroy(genr); } err: lexer_rollback(p->lexer); return NULL; }
/** * Parse object literal. */ static void parser_parse_object_literal (parser_context_t *context_p) /**< context */ { JERRY_ASSERT (context_p->token.type == LEXER_LEFT_BRACE); parser_emit_cbc (context_p, CBC_CREATE_OBJECT); parser_stack_push_uint8 (context_p, PARSER_OBJECT_PROPERTY_START); while (true) { lexer_expect_object_literal_id (context_p, false); if (context_p->token.type == LEXER_RIGHT_BRACE) { break; } if (context_p->token.type == LEXER_PROPERTY_GETTER || context_p->token.type == LEXER_PROPERTY_SETTER) { uint32_t status_flags; cbc_ext_opcode_t opcode; uint16_t literal_index; parser_object_literal_item_types_t item_type; if (context_p->token.type == LEXER_PROPERTY_GETTER) { status_flags = PARSER_IS_FUNCTION | PARSER_IS_CLOSURE | PARSER_IS_PROPERTY_GETTER; opcode = CBC_EXT_SET_GETTER; item_type = PARSER_OBJECT_PROPERTY_GETTER; } else { status_flags = PARSER_IS_FUNCTION | PARSER_IS_CLOSURE | PARSER_IS_PROPERTY_SETTER; opcode = CBC_EXT_SET_SETTER; item_type = PARSER_OBJECT_PROPERTY_SETTER; } if (context_p->status_flags & PARSER_INSIDE_WITH) { status_flags |= PARSER_RESOLVE_THIS_FOR_CALLS; } lexer_expect_object_literal_id (context_p, true); literal_index = context_p->lit_object.index; parser_append_object_literal_item (context_p, literal_index, item_type); parser_flush_cbc (context_p); lexer_construct_function_object (context_p, status_flags); parser_emit_cbc_literal (context_p, CBC_PUSH_LITERAL, literal_index); JERRY_ASSERT (context_p->last_cbc_opcode == CBC_PUSH_LITERAL); context_p->last_cbc_opcode = PARSER_TO_EXT_OPCODE (opcode); context_p->last_cbc.value = (uint16_t) (context_p->literal_count - 1); lexer_next_token (context_p); } else { uint16_t literal_index = context_p->lit_object.index; parser_append_object_literal_item (context_p, literal_index, PARSER_OBJECT_PROPERTY_VALUE); lexer_next_token (context_p); if (context_p->token.type != LEXER_COLON) { parser_raise_error (context_p, PARSER_ERR_COLON_EXPECTED); } lexer_next_token (context_p); parser_parse_expression (context_p, PARSE_EXPR_NO_COMMA); parser_emit_cbc_literal (context_p, CBC_SET_PROPERTY, literal_index); } if (context_p->token.type == LEXER_RIGHT_BRACE) { break; } else if (context_p->token.type != LEXER_COMMA) { parser_raise_error (context_p, PARSER_ERR_OBJECT_ITEM_SEPARATOR_EXPECTED); } } while (context_p->stack_top_uint8 != PARSER_OBJECT_PROPERTY_START) { parser_stack_pop (context_p, NULL, 3); } parser_stack_pop_uint8 (context_p); } /* parser_parse_object_literal */