mp_lexer_t *mp_lexer_new(qstr src_name, void *stream_data, mp_lexer_stream_next_char_t stream_next_char, mp_lexer_stream_close_t stream_close) { mp_lexer_t *lex = m_new_maybe(mp_lexer_t, 1); // check for memory allocation error if (lex == NULL) { if (stream_close) { stream_close(stream_data); } return NULL; } lex->source_name = src_name; lex->stream_data = stream_data; lex->stream_next_char = stream_next_char; lex->stream_close = stream_close; lex->line = 1; lex->column = 1; lex->emit_dent = 0; lex->nested_bracket_level = 0; lex->alloc_indent_level = MICROPY_ALLOC_LEXER_INDENT_INIT; lex->num_indent_level = 1; lex->indent_level = m_new_maybe(uint16_t, lex->alloc_indent_level); vstr_init(&lex->vstr, 32); // check for memory allocation error if (lex->indent_level == NULL || vstr_had_error(&lex->vstr)) { mp_lexer_free(lex); return NULL; } // store sentinel for first indentation level lex->indent_level[0] = 0; // preload characters lex->chr0 = stream_next_char(stream_data); lex->chr1 = stream_next_char(stream_data); lex->chr2 = stream_next_char(stream_data); // if input stream is 0, 1 or 2 characters long and doesn't end in a newline, then insert a newline at the end if (lex->chr0 == MP_LEXER_CHAR_EOF) { lex->chr0 = '\n'; } else if (lex->chr1 == MP_LEXER_CHAR_EOF) { if (lex->chr0 != '\n' && lex->chr0 != '\r') { lex->chr1 = '\n'; } } else if (lex->chr2 == MP_LEXER_CHAR_EOF) { if (lex->chr1 != '\n' && lex->chr1 != '\r') { lex->chr2 = '\n'; } } // preload first token mp_lexer_next_token_into(lex, &lex->tok_cur, true); return lex; }
void mp_obj_exception_add_traceback(mp_obj_t self_in, qstr file, size_t line, qstr block) { GET_NATIVE_EXCEPTION(self, self_in); // append this traceback info to traceback data // if memory allocation fails (eg because gc is locked), just return if (self->traceback_data == NULL) { self->traceback_data = m_new_maybe(size_t, 3); if (self->traceback_data == NULL) { return; } self->traceback_alloc = 3; self->traceback_len = 0; } else if (self->traceback_len + 3 > self->traceback_alloc) { // be conservative with growing traceback data size_t *tb_data = m_renew_maybe(size_t, self->traceback_data, self->traceback_alloc, self->traceback_alloc + 3, true); if (tb_data == NULL) { return; } self->traceback_data = tb_data; self->traceback_alloc += 3; } size_t *tb_data = &self->traceback_data[self->traceback_len]; self->traceback_len += 3; tb_data[0] = file; tb_data[1] = line; tb_data[2] = block; }
mp_uint_t sdcard_write_blocks(const uint8_t *src, uint32_t block_num, uint32_t num_blocks) { // check that SD card is initialised if (sd_handle.Instance == NULL) { return HAL_ERROR; } HAL_StatusTypeDef err = HAL_OK; // check that src pointer is aligned on a 4-byte boundary if (((uint32_t)src & 3) != 0) { // pointer is not aligned, so allocate a temporary block to do the write uint8_t *src_aligned = m_new_maybe(uint8_t, SDCARD_BLOCK_SIZE); if (src_aligned == NULL) { return HAL_ERROR; } for (size_t i = 0; i < num_blocks; ++i) { memcpy(src_aligned, src + i * SDCARD_BLOCK_SIZE, SDCARD_BLOCK_SIZE); err = sdcard_write_blocks(src_aligned, block_num + i, 1); if (err != HAL_OK) { break; } } m_del(uint8_t, src_aligned, SDCARD_BLOCK_SIZE); return err; } if (query_irq() == IRQ_STATE_ENABLED) { // we must disable USB irqs to prevent MSC contention with SD card uint32_t basepri = raise_irq_pri(IRQ_PRI_OTG_FS); #if SDIO_USE_GPDMA dma_init(&sd_tx_dma, &SDMMC_TX_DMA, &sd_handle); sd_handle.hdmatx = &sd_tx_dma; #endif // make sure cache is flushed to RAM so the DMA can read the correct data MP_HAL_CLEAN_DCACHE(src, num_blocks * SDCARD_BLOCK_SIZE); err = HAL_SD_WriteBlocks_DMA(&sd_handle, (uint8_t*)src, block_num, num_blocks); if (err == HAL_OK) { err = sdcard_wait_finished(&sd_handle, 60000); } #if SDIO_USE_GPDMA dma_deinit(&SDMMC_TX_DMA); sd_handle.hdmatx = NULL; #endif restore_irq_pri(basepri); } else { err = HAL_SD_WriteBlocks(&sd_handle, (uint8_t*)src, block_num, num_blocks, 60000); if (err == HAL_OK) { err = sdcard_wait_finished(&sd_handle, 60000); } } return err; }
mp_obj_t mp_obj_new_exception_msg_varg(const mp_obj_type_t *exc_type, const char *fmt, ...) { assert(fmt != NULL); // Check that the given type is an exception type assert(exc_type->make_new == mp_obj_exception_make_new); // Try to allocate memory for the message mp_obj_str_t *o_str = m_new_obj_maybe(mp_obj_str_t); size_t o_str_alloc = strlen(fmt) + 1; byte *o_str_buf = m_new_maybe(byte, o_str_alloc); bool used_emg_buf = false; #if MICROPY_ENABLE_EMERGENCY_EXCEPTION_BUF // If memory allocation failed and there is an emergency buffer then try to use // that buffer to store the string object and its data (at least 16 bytes for // the string data), reserving room at the start for the traceback and 1-tuple. if ((o_str == NULL || o_str_buf == NULL) && mp_emergency_exception_buf_size >= EMG_TRACEBACK_ALLOC * sizeof(size_t) + sizeof(mp_obj_tuple_t) + sizeof(mp_obj_t) + sizeof(mp_obj_str_t) + 16) { used_emg_buf = true; o_str = (mp_obj_str_t*)((uint8_t*)MP_STATE_VM(mp_emergency_exception_buf) + EMG_TRACEBACK_ALLOC * sizeof(size_t) + sizeof(mp_obj_tuple_t) + sizeof(mp_obj_t)); o_str_buf = (byte*)&o_str[1]; o_str_alloc = (uint8_t*)MP_STATE_VM(mp_emergency_exception_buf) + mp_emergency_exception_buf_size - o_str_buf; } #endif if (o_str == NULL) { // No memory for the string object so create the exception with no args return mp_obj_exception_make_new(exc_type, 0, 0, NULL); } if (o_str_buf == NULL) { // No memory for the string buffer: assume that the fmt string is in ROM // and use that data as the data of the string o_str->len = o_str_alloc - 1; // will be equal to strlen(fmt) o_str->data = (const byte*)fmt; } else { // We have some memory to format the string struct _exc_printer_t exc_pr = {!used_emg_buf, o_str_alloc, 0, o_str_buf}; mp_print_t print = {&exc_pr, exc_add_strn}; va_list ap; va_start(ap, fmt); mp_vprintf(&print, fmt, ap); va_end(ap); exc_pr.buf[exc_pr.len] = '\0'; o_str->len = exc_pr.len; o_str->data = exc_pr.buf; } // Create the string object and call mp_obj_exception_make_new to create the exception o_str->base.type = &mp_type_str; o_str->hash = qstr_compute_hash(o_str->data, o_str->len); mp_obj_t arg = MP_OBJ_FROM_PTR(o_str); return mp_obj_exception_make_new(exc_type, 1, 0, &arg); }
void mp_obj_exception_add_traceback(mp_obj_t self_in, qstr file, size_t line, qstr block) { GET_NATIVE_EXCEPTION(self, self_in); // append this traceback info to traceback data // if memory allocation fails (eg because gc is locked), just return if (self->traceback_data == NULL) { self->traceback_data = m_new_maybe(size_t, TRACEBACK_ENTRY_LEN); if (self->traceback_data == NULL) { #if MICROPY_ENABLE_EMERGENCY_EXCEPTION_BUF if (mp_emergency_exception_buf_size >= EMG_TRACEBACK_ALLOC * sizeof(size_t)) { // There is room in the emergency buffer for traceback data size_t *tb = (size_t*)MP_STATE_VM(mp_emergency_exception_buf); self->traceback_data = tb; self->traceback_alloc = EMG_TRACEBACK_ALLOC; } else { // Can't allocate and no room in emergency buffer return; } #else // Can't allocate return; #endif } else { // Allocated the traceback data on the heap self->traceback_alloc = TRACEBACK_ENTRY_LEN; } self->traceback_len = 0; } else if (self->traceback_len + TRACEBACK_ENTRY_LEN > self->traceback_alloc) { #if MICROPY_ENABLE_EMERGENCY_EXCEPTION_BUF if (self->traceback_data == (size_t*)MP_STATE_VM(mp_emergency_exception_buf)) { // Can't resize the emergency buffer return; } #endif // be conservative with growing traceback data size_t *tb_data = m_renew_maybe(size_t, self->traceback_data, self->traceback_alloc, self->traceback_alloc + TRACEBACK_ENTRY_LEN, true); if (tb_data == NULL) { return; } self->traceback_data = tb_data; self->traceback_alloc += TRACEBACK_ENTRY_LEN; } size_t *tb_data = &self->traceback_data[self->traceback_len]; self->traceback_len += TRACEBACK_ENTRY_LEN; tb_data[0] = file; tb_data[1] = line; tb_data[2] = block; }
mp_uint_t sdcard_write_blocks(const uint8_t *src, uint32_t block_num, uint32_t num_blocks) { // check that SD card is initialised if (sd_handle.Instance == NULL) { return SD_ERROR; } HAL_SD_ErrorTypedef err = SD_OK; // check that src pointer is aligned on a 4-byte boundary if (((uint32_t)src & 3) != 0) { // pointer is not aligned, so allocate a temporary block to do the write uint8_t *src_aligned = m_new_maybe(uint8_t, SDCARD_BLOCK_SIZE); if (src_aligned == NULL) { return SD_ERROR; } for (size_t i = 0; i < num_blocks; ++i) { memcpy(src_aligned, src + i * SDCARD_BLOCK_SIZE, SDCARD_BLOCK_SIZE); err = sdcard_write_blocks(src_aligned, block_num + i, 1); if (err != SD_OK) { break; } } m_del(uint8_t, src_aligned, SDCARD_BLOCK_SIZE); return err; } if (query_irq() == IRQ_STATE_ENABLED) { // we must disable USB irqs to prevent MSC contention with SD card uint32_t basepri = raise_irq_pri(IRQ_PRI_OTG_FS); dma_init(&sd_tx_dma, &dma_SDIO_0_TX, &sd_handle); sd_handle.hdmatx = &sd_tx_dma; err = HAL_SD_WriteBlocks_BlockNumber_DMA(&sd_handle, (uint32_t*)src, block_num, SDCARD_BLOCK_SIZE, num_blocks); if (err == SD_OK) { // wait for DMA transfer to finish, with a large timeout err = HAL_SD_CheckWriteOperation(&sd_handle, 100000000); } dma_deinit(&dma_SDIO_0_TX); sd_handle.hdmatx = NULL; restore_irq_pri(basepri); } else { err = HAL_SD_WriteBlocks_BlockNumber(&sd_handle, (uint32_t*)src, block_num, SDCARD_BLOCK_SIZE, num_blocks); } return err; }
mp_parse_tree_t mp_parse(mp_lexer_t *lex, mp_parse_input_kind_t input_kind) { // initialise parser and allocate memory for its stacks parser_t parser; parser.parse_error = PARSE_ERROR_NONE; parser.rule_stack_alloc = MICROPY_ALLOC_PARSE_RULE_INIT; parser.rule_stack_top = 0; parser.rule_stack = m_new_maybe(rule_stack_t, parser.rule_stack_alloc); parser.result_stack_alloc = MICROPY_ALLOC_PARSE_RESULT_INIT; parser.result_stack_top = 0; parser.result_stack = m_new_maybe(mp_parse_node_t, parser.result_stack_alloc); parser.lexer = lex; parser.tree.chunk = NULL; parser.cur_chunk = NULL; #if MICROPY_COMP_CONST mp_map_init(&parser.consts, 0); #endif // check if we could allocate the stacks if (parser.rule_stack == NULL || parser.result_stack == NULL) { goto memory_error; } // work out the top-level rule to use, and push it on the stack size_t top_level_rule; switch (input_kind) { case MP_PARSE_SINGLE_INPUT: top_level_rule = RULE_single_input; break; case MP_PARSE_EVAL_INPUT: top_level_rule = RULE_eval_input; break; default: top_level_rule = RULE_file_input; } push_rule(&parser, lex->tok_line, rules[top_level_rule], 0); // parse! size_t n, i; // state for the current rule size_t rule_src_line; // source line for the first token matched by the current rule bool backtrack = false; const rule_t *rule = NULL; for (;;) { next_rule: if (parser.rule_stack_top == 0 || parser.parse_error) { break; } pop_rule(&parser, &rule, &i, &rule_src_line); n = rule->act & RULE_ACT_ARG_MASK; /* // debugging printf("depth=%d ", parser.rule_stack_top); for (int j = 0; j < parser.rule_stack_top; ++j) { printf(" "); } printf("%s n=%d i=%d bt=%d\n", rule->rule_name, n, i, backtrack); */ switch (rule->act & RULE_ACT_KIND_MASK) { case RULE_ACT_OR: if (i > 0 && !backtrack) { goto next_rule; } else { backtrack = false; } for (; i < n; ++i) { uint16_t kind = rule->arg[i] & RULE_ARG_KIND_MASK; if (kind == RULE_ARG_TOK) { if (lex->tok_kind == (rule->arg[i] & RULE_ARG_ARG_MASK)) { push_result_token(&parser); mp_lexer_to_next(lex); goto next_rule; } } else { assert(kind == RULE_ARG_RULE); if (i + 1 < n) { push_rule(&parser, rule_src_line, rule, i + 1); // save this or-rule } push_rule_from_arg(&parser, rule->arg[i]); // push child of or-rule goto next_rule; } } backtrack = true; break; case RULE_ACT_AND: { // failed, backtrack if we can, else syntax error if (backtrack) { assert(i > 0); if ((rule->arg[i - 1] & RULE_ARG_KIND_MASK) == RULE_ARG_OPT_RULE) { // an optional rule that failed, so continue with next arg push_result_node(&parser, MP_PARSE_NODE_NULL); backtrack = false; } else { // a mandatory rule that failed, so propagate backtrack if (i > 1) { // already eaten tokens so can't backtrack goto syntax_error; } else { goto next_rule; } } } // progress through the rule for (; i < n; ++i) { switch (rule->arg[i] & RULE_ARG_KIND_MASK) { case RULE_ARG_TOK: { // need to match a token mp_token_kind_t tok_kind = rule->arg[i] & RULE_ARG_ARG_MASK; if (lex->tok_kind == tok_kind) { // matched token if (tok_kind == MP_TOKEN_NAME) { push_result_token(&parser); } mp_lexer_to_next(lex); } else { // failed to match token if (i > 0) { // already eaten tokens so can't backtrack goto syntax_error; } else { // this rule failed, so backtrack backtrack = true; goto next_rule; } } break; } case RULE_ARG_RULE: case RULE_ARG_OPT_RULE: rule_and_no_other_choice: push_rule(&parser, rule_src_line, rule, i + 1); // save this and-rule push_rule_from_arg(&parser, rule->arg[i]); // push child of and-rule goto next_rule; default: assert(0); goto rule_and_no_other_choice; // to help flow control analysis } } assert(i == n); // matched the rule, so now build the corresponding parse_node #if !MICROPY_ENABLE_DOC_STRING // this code discards lonely statements, such as doc strings if (input_kind != MP_PARSE_SINGLE_INPUT && rule->rule_id == RULE_expr_stmt && peek_result(&parser, 0) == MP_PARSE_NODE_NULL) { mp_parse_node_t p = peek_result(&parser, 1); if ((MP_PARSE_NODE_IS_LEAF(p) && !MP_PARSE_NODE_IS_ID(p)) || MP_PARSE_NODE_IS_STRUCT_KIND(p, RULE_string)) { pop_result(&parser); // MP_PARSE_NODE_NULL mp_parse_node_t pn = pop_result(&parser); // possibly RULE_string if (MP_PARSE_NODE_IS_STRUCT(pn)) { mp_parse_node_struct_t *pns = (mp_parse_node_struct_t *)pn; if (MP_PARSE_NODE_STRUCT_KIND(pns) == RULE_string) { m_del(char, (char*)pns->nodes[0], (size_t)pns->nodes[1]); } } push_result_rule(&parser, rule_src_line, rules[RULE_pass_stmt], 0); break; } } #endif // count number of arguments for the parse node i = 0; size_t num_not_nil = 0; for (size_t x = n; x > 0;) { --x; if ((rule->arg[x] & RULE_ARG_KIND_MASK) == RULE_ARG_TOK) { mp_token_kind_t tok_kind = rule->arg[x] & RULE_ARG_ARG_MASK; if (tok_kind == MP_TOKEN_NAME) { // only tokens which were names are pushed to stack i += 1; num_not_nil += 1; } } else { // rules are always pushed if (peek_result(&parser, i) != MP_PARSE_NODE_NULL) { num_not_nil += 1; } i += 1; } } if (num_not_nil == 1 && (rule->act & RULE_ACT_ALLOW_IDENT)) { // this rule has only 1 argument and should not be emitted mp_parse_node_t pn = MP_PARSE_NODE_NULL; for (size_t x = 0; x < i; ++x) { mp_parse_node_t pn2 = pop_result(&parser); if (pn2 != MP_PARSE_NODE_NULL) { pn = pn2; } } push_result_node(&parser, pn); } else { // this rule must be emitted if (rule->act & RULE_ACT_ADD_BLANK) { // and add an extra blank node at the end (used by the compiler to store data) push_result_node(&parser, MP_PARSE_NODE_NULL); i += 1; } push_result_rule(&parser, rule_src_line, rule, i); } break; }
mp_parse_node_t mp_parse(mp_lexer_t *lex, mp_parse_input_kind_t input_kind, mp_parse_error_kind_t *parse_error_kind_out) { // initialise parser and allocate memory for its stacks parser_t parser; parser.had_memory_error = false; parser.rule_stack_alloc = MICROPY_ALLOC_PARSE_RULE_INIT; parser.rule_stack_top = 0; parser.rule_stack = m_new_maybe(rule_stack_t, parser.rule_stack_alloc); parser.result_stack_alloc = MICROPY_ALLOC_PARSE_RESULT_INIT; parser.result_stack_top = 0; parser.result_stack = m_new_maybe(mp_parse_node_t, parser.result_stack_alloc); parser.lexer = lex; // check if we could allocate the stacks if (parser.rule_stack == NULL || parser.result_stack == NULL) { goto memory_error; } // work out the top-level rule to use, and push it on the stack int top_level_rule; switch (input_kind) { case MP_PARSE_SINGLE_INPUT: top_level_rule = RULE_single_input; break; case MP_PARSE_EVAL_INPUT: top_level_rule = RULE_eval_input; break; default: top_level_rule = RULE_file_input; } push_rule(&parser, mp_lexer_cur(lex)->src_line, rules[top_level_rule], 0); // parse! uint n, i; // state for the current rule uint rule_src_line; // source line for the first token matched by the current rule bool backtrack = false; const rule_t *rule = NULL; mp_token_kind_t tok_kind; bool emit_rule; bool had_trailing_sep; for (;;) { next_rule: if (parser.rule_stack_top == 0 || parser.had_memory_error) { break; } pop_rule(&parser, &rule, &i, &rule_src_line); n = rule->act & RULE_ACT_ARG_MASK; /* // debugging printf("depth=%d ", parser.rule_stack_top); for (int j = 0; j < parser.rule_stack_top; ++j) { printf(" "); } printf("%s n=%d i=%d bt=%d\n", rule->rule_name, n, i, backtrack); */ switch (rule->act & RULE_ACT_KIND_MASK) { case RULE_ACT_OR: if (i > 0 && !backtrack) { goto next_rule; } else { backtrack = false; } for (; i < n - 1; ++i) { switch (rule->arg[i] & RULE_ARG_KIND_MASK) { case RULE_ARG_TOK: if (mp_lexer_is_kind(lex, rule->arg[i] & RULE_ARG_ARG_MASK)) { push_result_token(&parser, lex); mp_lexer_to_next(lex); goto next_rule; } break; case RULE_ARG_RULE: push_rule(&parser, rule_src_line, rule, i + 1); // save this or-rule push_rule_from_arg(&parser, rule->arg[i]); // push child of or-rule goto next_rule; default: assert(0); } } if ((rule->arg[i] & RULE_ARG_KIND_MASK) == RULE_ARG_TOK) { if (mp_lexer_is_kind(lex, rule->arg[i] & RULE_ARG_ARG_MASK)) { push_result_token(&parser, lex); mp_lexer_to_next(lex); } else { backtrack = true; goto next_rule; } } else { push_rule_from_arg(&parser, rule->arg[i]); } break; case RULE_ACT_AND: // failed, backtrack if we can, else syntax error if (backtrack) { assert(i > 0); if ((rule->arg[i - 1] & RULE_ARG_KIND_MASK) == RULE_ARG_OPT_RULE) { // an optional rule that failed, so continue with next arg push_result_node(&parser, MP_PARSE_NODE_NULL); backtrack = false; } else { // a mandatory rule that failed, so propagate backtrack if (i > 1) { // already eaten tokens so can't backtrack goto syntax_error; } else { goto next_rule; } } } // progress through the rule for (; i < n; ++i) { switch (rule->arg[i] & RULE_ARG_KIND_MASK) { case RULE_ARG_TOK: // need to match a token tok_kind = rule->arg[i] & RULE_ARG_ARG_MASK; if (mp_lexer_is_kind(lex, tok_kind)) { // matched token if (tok_kind == MP_TOKEN_NAME) { push_result_token(&parser, lex); } mp_lexer_to_next(lex); } else { // failed to match token if (i > 0) { // already eaten tokens so can't backtrack goto syntax_error; } else { // this rule failed, so backtrack backtrack = true; goto next_rule; } } break; case RULE_ARG_RULE: case RULE_ARG_OPT_RULE: push_rule(&parser, rule_src_line, rule, i + 1); // save this and-rule push_rule_from_arg(&parser, rule->arg[i]); // push child of and-rule goto next_rule; default: assert(0); } } assert(i == n); // matched the rule, so now build the corresponding parse_node // count number of arguments for the parse_node i = 0; emit_rule = false; for (int x = 0; x < n; ++x) { if ((rule->arg[x] & RULE_ARG_KIND_MASK) == RULE_ARG_TOK) { tok_kind = rule->arg[x] & RULE_ARG_ARG_MASK; if (tok_kind >= MP_TOKEN_NAME) { emit_rule = true; } if (tok_kind == MP_TOKEN_NAME) { // only tokens which were names are pushed to stack i += 1; } } else { // rules are always pushed i += 1; } } #if !MICROPY_EMIT_CPYTHON && !MICROPY_ENABLE_DOC_STRING // this code discards lonely statements, such as doc strings if (input_kind != MP_PARSE_SINGLE_INPUT && rule->rule_id == RULE_expr_stmt && peek_result(&parser, 0) == MP_PARSE_NODE_NULL) { mp_parse_node_t p = peek_result(&parser, 1); if ((MP_PARSE_NODE_IS_LEAF(p) && !MP_PARSE_NODE_IS_ID(p)) || MP_PARSE_NODE_IS_STRUCT_KIND(p, RULE_string)) { pop_result(&parser); pop_result(&parser); push_result_rule(&parser, rule_src_line, rules[RULE_pass_stmt], 0); break; } } #endif // always emit these rules, even if they have only 1 argument if (rule->rule_id == RULE_expr_stmt || rule->rule_id == RULE_yield_stmt) { emit_rule = true; } // never emit these rules if they have only 1 argument // NOTE: can't put atom_paren here because we need it to distinguisg, for example, [a,b] from [(a,b)] // TODO possibly put varargslist_name, varargslist_equal here as well if (rule->rule_id == RULE_else_stmt || rule->rule_id == RULE_testlist_comp_3b || rule->rule_id == RULE_import_as_names_paren || rule->rule_id == RULE_typedargslist_name || rule->rule_id == RULE_typedargslist_colon || rule->rule_id == RULE_typedargslist_equal || rule->rule_id == RULE_dictorsetmaker_colon || rule->rule_id == RULE_classdef_2 || rule->rule_id == RULE_with_item_as || rule->rule_id == RULE_assert_stmt_extra || rule->rule_id == RULE_as_name || rule->rule_id == RULE_raise_stmt_from || rule->rule_id == RULE_vfpdef) { emit_rule = false; } // always emit these rules, and add an extra blank node at the end (to be used by the compiler to store data) if (ADD_BLANK_NODE(rule->rule_id)) { emit_rule = true; push_result_node(&parser, MP_PARSE_NODE_NULL); i += 1; } int num_not_nil = 0; for (int x = 0; x < i; ++x) { if (peek_result(&parser, x) != MP_PARSE_NODE_NULL) { num_not_nil += 1; } } //printf("done and %s n=%d i=%d notnil=%d\n", rule->rule_name, n, i, num_not_nil); if (emit_rule) { push_result_rule(&parser, rule_src_line, rule, i); } else if (num_not_nil == 0) { push_result_rule(&parser, rule_src_line, rule, i); // needed for, eg, atom_paren, testlist_comp_3b //result_stack_show(parser); //assert(0); } else if (num_not_nil == 1) { // single result, leave it on stack mp_parse_node_t pn = MP_PARSE_NODE_NULL; for (int x = 0; x < i; ++x) { mp_parse_node_t pn2 = pop_result(&parser); if (pn2 != MP_PARSE_NODE_NULL) { pn = pn2; } } push_result_node(&parser, pn); } else { push_result_rule(&parser, rule_src_line, rule, i); } break; case RULE_ACT_LIST: // n=2 is: item item* // n=1 is: item (sep item)* // n=3 is: item (sep item)* [sep] if (backtrack) { list_backtrack: had_trailing_sep = false; if (n == 2) { if (i == 1) { // fail on item, first time round; propagate backtrack goto next_rule; } else { // fail on item, in later rounds; finish with this rule backtrack = false; } } else { if (i == 1) { // fail on item, first time round; propagate backtrack goto next_rule; } else if ((i & 1) == 1) { // fail on item, in later rounds; have eaten tokens so can't backtrack if (n == 3) { // list allows trailing separator; finish parsing list had_trailing_sep = true; backtrack = false; } else { // list doesn't allowing trailing separator; fail goto syntax_error; } } else { // fail on separator; finish parsing list backtrack = false; } } } else { for (;;) { uint arg = rule->arg[i & 1 & n]; switch (arg & RULE_ARG_KIND_MASK) { case RULE_ARG_TOK: if (mp_lexer_is_kind(lex, arg & RULE_ARG_ARG_MASK)) { if (i & 1 & n) { // separators which are tokens are not pushed to result stack } else { push_result_token(&parser, lex); } mp_lexer_to_next(lex); // got element of list, so continue parsing list i += 1; } else { // couldn't get element of list i += 1; backtrack = true; goto list_backtrack; } break; case RULE_ARG_RULE: push_rule(&parser, rule_src_line, rule, i + 1); // save this list-rule push_rule_from_arg(&parser, arg); // push child of list-rule goto next_rule; default: assert(0); } } } assert(i >= 1); // compute number of elements in list, result in i i -= 1; if ((n & 1) && (rule->arg[1] & RULE_ARG_KIND_MASK) == RULE_ARG_TOK) { // don't count separators when they are tokens i = (i + 1) / 2; } if (i == 1) { // list matched single item if (had_trailing_sep) { // if there was a trailing separator, make a list of a single item push_result_rule(&parser, rule_src_line, rule, i); } else { // just leave single item on stack (ie don't wrap in a list) } } else { //printf("done list %s %d %d\n", rule->rule_name, n, i); push_result_rule(&parser, rule_src_line, rule, i); } break; default: assert(0); } } mp_parse_node_t result; // check if we had a memory error if (parser.had_memory_error) { memory_error: *parse_error_kind_out = MP_PARSE_ERROR_MEMORY; result = MP_PARSE_NODE_NULL; goto finished; } // check we are at the end of the token stream if (!mp_lexer_is_kind(lex, MP_TOKEN_END)) { goto syntax_error; } //printf("--------------\n"); //result_stack_show(parser); //printf("rule stack alloc: %d\n", parser.rule_stack_alloc); //printf("result stack alloc: %d\n", parser.result_stack_alloc); //printf("number of parse nodes allocated: %d\n", num_parse_nodes_allocated); // get the root parse node that we created assert(parser.result_stack_top == 1); result = parser.result_stack[0]; finished: // free the memory that we don't need anymore m_del(rule_stack_t, parser.rule_stack, parser.rule_stack_alloc); m_del(mp_parse_node_t, parser.result_stack, parser.result_stack_alloc); // return the result return result; syntax_error: if (mp_lexer_is_kind(lex, MP_TOKEN_INDENT)) { *parse_error_kind_out = MP_PARSE_ERROR_UNEXPECTED_INDENT; } else if (mp_lexer_is_kind(lex, MP_TOKEN_DEDENT_MISMATCH)) { *parse_error_kind_out = MP_PARSE_ERROR_UNMATCHED_UNINDENT; } else { *parse_error_kind_out = MP_PARSE_ERROR_INVALID_SYNTAX; #ifdef USE_RULE_NAME // debugging: print the rule name that failed and the token printf("rule: %s\n", rule->rule_name); #if MICROPY_DEBUG_PRINTERS mp_token_show(mp_lexer_cur(lex)); #endif #endif } result = MP_PARSE_NODE_NULL; goto finished; }