Esempio n. 1
0
static void push_result_rule(parser_t *parser, const rule_t *rule, int num_args) {
    mp_parse_node_struct_t *pn = parse_node_new_struct(rule->rule_id, num_args);
    for (int i = num_args; i > 0; i--) {
        pn->nodes[i - 1] = pop_result(parser);
    }
    push_result_node(parser, (mp_parse_node_t)pn);
}
Esempio n. 2
0
STATIC void push_result_rule(parser_t *parser, size_t src_line, const rule_t *rule, size_t num_args) {
    // optimise away parenthesis around an expression if possible
    if (rule->rule_id == RULE_atom_paren) {
        // there should be just 1 arg for this rule
        mp_parse_node_t pn = peek_result(parser, 0);
        if (MP_PARSE_NODE_IS_NULL(pn)) {
            // need to keep parenthesis for ()
        } else if (MP_PARSE_NODE_IS_STRUCT_KIND(pn, RULE_testlist_comp)) {
            // need to keep parenthesis for (a, b, ...)
        } else {
            // parenthesis around a single expression, so it's just the expression
            return;
        }
    }

    #if MICROPY_COMP_CONST_FOLDING
    if (fold_logical_constants(parser, rule, &num_args)) {
        // we folded this rule so return straight away
        return;
    }
    if (fold_constants(parser, rule, num_args)) {
        // we folded this rule so return straight away
        return;
    }
    #endif

    mp_parse_node_struct_t *pn = parser_alloc(parser, sizeof(mp_parse_node_struct_t) + sizeof(mp_parse_node_t) * num_args);
    pn->source_line = src_line;
    pn->kind_num_nodes = (rule->rule_id & 0xff) | (num_args << 8);
    for (size_t i = num_args; i > 0; i--) {
        pn->nodes[i - 1] = pop_result(parser);
    }
    push_result_node(parser, (mp_parse_node_t)pn);
}
Esempio n. 3
0
STATIC void push_result_token(parser_t *parser, const rule_t *rule) {
    mp_parse_node_t pn;
    mp_lexer_t *lex = parser->lexer;
    if (lex->tok_kind == MP_TOKEN_NAME) {
        qstr id = qstr_from_strn(lex->vstr.buf, lex->vstr.len);
        #if MICROPY_COMP_CONST
        // if name is a standalone identifier, look it up in the table of dynamic constants
        mp_map_elem_t *elem;
        if (rule->rule_id == RULE_atom
            && (elem = mp_map_lookup(&parser->consts, MP_OBJ_NEW_QSTR(id), MP_MAP_LOOKUP)) != NULL) {
            if (MP_OBJ_IS_SMALL_INT(elem->value)) {
                pn = mp_parse_node_new_small_int(MP_OBJ_SMALL_INT_VALUE(elem->value));
            } else {
                pn = make_node_const_object(parser, lex->tok_line, elem->value);
            }
        } else {
            pn = mp_parse_node_new_leaf(MP_PARSE_NODE_ID, id);
        }
        #else
        (void)rule;
        pn = mp_parse_node_new_leaf(MP_PARSE_NODE_ID, id);
        #endif
    } else if (lex->tok_kind == MP_TOKEN_INTEGER) {
        mp_obj_t o = mp_parse_num_integer(lex->vstr.buf, lex->vstr.len, 0, lex);
        if (MP_OBJ_IS_SMALL_INT(o)) {
            pn = mp_parse_node_new_small_int(MP_OBJ_SMALL_INT_VALUE(o));
        } else {
            pn = make_node_const_object(parser, lex->tok_line, o);
        }
    } else if (lex->tok_kind == MP_TOKEN_FLOAT_OR_IMAG) {
        mp_obj_t o = mp_parse_num_decimal(lex->vstr.buf, lex->vstr.len, true, false, lex);
        pn = make_node_const_object(parser, lex->tok_line, o);
    } else if (lex->tok_kind == MP_TOKEN_STRING || lex->tok_kind == MP_TOKEN_BYTES) {
        // Don't automatically intern all strings/bytes.  doc strings (which are usually large)
        // will be discarded by the compiler, and so we shouldn't intern them.
        qstr qst = MP_QSTR_NULL;
        if (lex->vstr.len <= MICROPY_ALLOC_PARSE_INTERN_STRING_LEN) {
            // intern short strings
            qst = qstr_from_strn(lex->vstr.buf, lex->vstr.len);
        } else {
            // check if this string is already interned
            qst = qstr_find_strn(lex->vstr.buf, lex->vstr.len);
        }
        if (qst != MP_QSTR_NULL) {
            // qstr exists, make a leaf node
            pn = mp_parse_node_new_leaf(lex->tok_kind == MP_TOKEN_STRING ? MP_PARSE_NODE_STRING : MP_PARSE_NODE_BYTES, qst);
        } else {
            // not interned, make a node holding a pointer to the string/bytes object
            mp_obj_t o = mp_obj_new_str_of_type(
                lex->tok_kind == MP_TOKEN_STRING ? &mp_type_str : &mp_type_bytes,
                (const byte*)lex->vstr.buf, lex->vstr.len);
            pn = make_node_const_object(parser, lex->tok_line, o);
        }
    } else {
        pn = mp_parse_node_new_leaf(MP_PARSE_NODE_TOKEN, lex->tok_kind);
    }
    push_result_node(parser, pn);
}
Esempio n. 4
0
STATIC bool fold_logical_constants(parser_t *parser, const rule_t *rule, size_t *num_args) {
    if (rule->rule_id == RULE_or_test
        || rule->rule_id == RULE_and_test) {
        // folding for binary logical ops: or and
        size_t copy_to = *num_args;
        for (size_t i = copy_to; i > 0;) {
            mp_parse_node_t pn = peek_result(parser, --i);
            parser->result_stack[parser->result_stack_top - copy_to] = pn;
            if (i == 0) {
                // always need to keep the last value
                break;
            }
            if (rule->rule_id == RULE_or_test) {
                if (mp_parse_node_is_const_true(pn)) {
                    //
                    break;
                } else if (!mp_parse_node_is_const_false(pn)) {
                    copy_to -= 1;
                }
            } else {
                // RULE_and_test
                if (mp_parse_node_is_const_false(pn)) {
                    break;
                } else if (!mp_parse_node_is_const_true(pn)) {
                    copy_to -= 1;
                }
            }
        }
        copy_to -= 1; // copy_to now contains number of args to pop

        // pop and discard all the short-circuited expressions
        for (size_t i = 0; i < copy_to; ++i) {
            pop_result(parser);
        }
        *num_args -= copy_to;

        // we did a complete folding if there's only 1 arg left
        return *num_args == 1;

    } else if (rule->rule_id == RULE_not_test_2) {
        // folding for unary logical op: not
        mp_parse_node_t pn = peek_result(parser, 0);
        if (mp_parse_node_is_const_false(pn)) {
            pn = mp_parse_node_new_leaf(MP_PARSE_NODE_TOKEN, MP_TOKEN_KW_TRUE);
        } else if (mp_parse_node_is_const_true(pn)) {
            pn = mp_parse_node_new_leaf(MP_PARSE_NODE_TOKEN, MP_TOKEN_KW_FALSE);
        } else {
            return false;
        }
        pop_result(parser);
        push_result_node(parser, pn);
        return true;
    }

    return false;
}
Esempio n. 5
0
STATIC void push_result_rule(parser_t *parser, int src_line, const rule_t *rule, int num_args) {
    mp_parse_node_struct_t *pn = m_new_obj_var_maybe(mp_parse_node_struct_t, mp_parse_node_t, num_args);
    if (pn == NULL) {
        memory_error(parser);
        return;
    }
    pn->source_line = src_line;
    pn->kind_num_nodes = (rule->rule_id & 0xff) | (num_args << 8);
    for (int i = num_args; i > 0; i--) {
        pn->nodes[i - 1] = pop_result(parser);
    }
    push_result_node(parser, (mp_parse_node_t)pn);
}
Esempio n. 6
0
STATIC void push_result_string(parser_t *parser, int src_line, const char *str, uint len) {
    mp_parse_node_struct_t *pn = m_new_obj_var_maybe(mp_parse_node_struct_t, mp_parse_node_t, 2);
    if (pn == NULL) {
        memory_error(parser);
        return;
    }
    pn->source_line = src_line;
    pn->kind_num_nodes = RULE_string | (2 << 8);
    char *p = m_new(char, len);
    memcpy(p, str, len);
    pn->nodes[0] = (machine_int_t)p;
    pn->nodes[1] = len;
    push_result_node(parser, (mp_parse_node_t)pn);
}
Esempio n. 7
0
mp_parse_node_t mp_parse(mp_lexer_t *lex, mp_parse_input_kind_t input_kind) {

    // allocate memory for the parser and its stacks

    parser_t *parser = m_new_obj(parser_t);

    parser->rule_stack_alloc = 64;
    parser->rule_stack_top = 0;
    parser->rule_stack = m_new(rule_stack_t, parser->rule_stack_alloc);

    parser->result_stack_alloc = 64;
    parser->result_stack_top = 0;
    parser->result_stack = m_new(mp_parse_node_t, parser->result_stack_alloc);

    // work out the top-level rule to use, and push it on the stack
    int top_level_rule;
    switch (input_kind) {
        case MP_PARSE_SINGLE_INPUT: top_level_rule = RULE_single_input; break;
        //case MP_PARSE_EVAL_INPUT: top_level_rule = RULE_eval_input; break;
        default: top_level_rule = RULE_file_input;
    }
    push_rule(parser, rules[top_level_rule], 0);

    // parse!

    uint n, i;
    bool backtrack = false;
    const rule_t *rule;
    mp_token_kind_t tok_kind;
    bool emit_rule;
    bool had_trailing_sep;

    for (;;) {
        next_rule:
        if (parser->rule_stack_top == 0) {
            break;
        }

        pop_rule(parser, &rule, &i);
        n = rule->act & RULE_ACT_ARG_MASK;

        /*
        // debugging
        printf("depth=%d ", parser->rule_stack_top);
        for (int j = 0; j < parser->rule_stack_top; ++j) {
            printf(" ");
        }
        printf("%s n=%d i=%d bt=%d\n", rule->rule_name, n, i, backtrack);
        */

        switch (rule->act & RULE_ACT_KIND_MASK) {
            case RULE_ACT_OR:
                if (i > 0 && !backtrack) {
                    goto next_rule;
                } else {
                    backtrack = false;
                }
                for (; i < n - 1; ++i) {
                    switch (rule->arg[i] & RULE_ARG_KIND_MASK) {
                        case RULE_ARG_TOK:
                            if (mp_lexer_is_kind(lex, rule->arg[i] & RULE_ARG_ARG_MASK)) {
                                push_result_token(parser, lex);
                                mp_lexer_to_next(lex);
                                goto next_rule;
                            }
                            break;
                        case RULE_ARG_RULE:
                            push_rule(parser, rule, i + 1);
                            push_rule_from_arg(parser, rule->arg[i]);
                            goto next_rule;
                        default:
                            assert(0);
                    }
                }
                if ((rule->arg[i] & RULE_ARG_KIND_MASK) == RULE_ARG_TOK) {
                    if (mp_lexer_is_kind(lex, rule->arg[i] & RULE_ARG_ARG_MASK)) {
                        push_result_token(parser, lex);
                        mp_lexer_to_next(lex);
                    } else {
                        backtrack = true;
                        goto next_rule;
                    }
                } else {
                    push_rule_from_arg(parser, rule->arg[i]);
                }
                break;

            case RULE_ACT_AND:

                // failed, backtrack if we can, else syntax error
                if (backtrack) {
                    assert(i > 0);
                    if ((rule->arg[i - 1] & RULE_ARG_KIND_MASK) == RULE_ARG_OPT_RULE) {
                        // an optional rule that failed, so continue with next arg
                        push_result_node(parser, MP_PARSE_NODE_NULL);
                        backtrack = false;
                    } else {
                        // a mandatory rule that failed, so propagate backtrack
                        if (i > 1) {
                            // already eaten tokens so can't backtrack
                            goto syntax_error;
                        } else {
                            goto next_rule;
                        }
                    }
                }

                // progress through the rule
                for (; i < n; ++i) {
                    switch (rule->arg[i] & RULE_ARG_KIND_MASK) {
                        case RULE_ARG_TOK:
                            // need to match a token
                            tok_kind = rule->arg[i] & RULE_ARG_ARG_MASK;
                            if (mp_lexer_is_kind(lex, tok_kind)) {
                                // matched token
                                if (tok_kind == MP_TOKEN_NAME) {
                                    push_result_token(parser, lex);
                                }
                                mp_lexer_to_next(lex);
                            } else {
                                // failed to match token
                                if (i > 0) {
                                    // already eaten tokens so can't backtrack
                                    goto syntax_error;
                                } else {
                                    // this rule failed, so backtrack
                                    backtrack = true;
                                    goto next_rule;
                                }
                            }
                            break;
                        case RULE_ARG_RULE:
                            //if (i + 1 < n) {
                                push_rule(parser, rule, i + 1);
                            //}
                            push_rule_from_arg(parser, rule->arg[i]);
                            goto next_rule;
                        case RULE_ARG_OPT_RULE:
                            push_rule(parser, rule, i + 1);
                            push_rule_from_arg(parser, rule->arg[i]);
                            goto next_rule;
                        default:
                            assert(0);
                    }
                }

                assert(i == n);

                // matched the rule, so now build the corresponding parse_node

                // count number of arguments for the parse_node
                i = 0;
                emit_rule = false;
                for (int x = 0; x < n; ++x) {
                    if ((rule->arg[x] & RULE_ARG_KIND_MASK) == RULE_ARG_TOK) {
                        tok_kind = rule->arg[x] & RULE_ARG_ARG_MASK;
                        if (tok_kind >= MP_TOKEN_NAME) {
                            emit_rule = true;
                        }
                        if (tok_kind == MP_TOKEN_NAME) {
                            // only tokens which were names are pushed to stack
                            i += 1;
                        }
                    } else {
                        // rules are always pushed
                        i += 1;
                    }
                }

                // always emit these rules, even if they have only 1 argument
                if (rule->rule_id == RULE_expr_stmt || rule->rule_id == RULE_yield_stmt) {
                    emit_rule = true;
                }

                // never emit these rules if they have only 1 argument
                // NOTE: can't put atom_paren here because we need it to distinguisg, for example, [a,b] from [(a,b)]
                // TODO possibly put varargslist_name, varargslist_equal here as well
                if (rule->rule_id == RULE_else_stmt || rule->rule_id == RULE_testlist_comp_3b || rule->rule_id == RULE_import_as_names_paren || rule->rule_id == RULE_typedargslist_name || rule->rule_id == RULE_typedargslist_colon || rule->rule_id == RULE_typedargslist_equal || rule->rule_id == RULE_dictorsetmaker_colon || rule->rule_id == RULE_classdef_2 || rule->rule_id == RULE_with_item_as || rule->rule_id == RULE_assert_stmt_extra || rule->rule_id == RULE_as_name || rule->rule_id == RULE_raise_stmt_from || rule->rule_id == RULE_vfpdef) {
                    emit_rule = false;
                }

                // always emit these rules, and add an extra blank node at the end (to be used by the compiler to store data)
                if (rule->rule_id == RULE_funcdef || rule->rule_id == RULE_classdef || rule->rule_id == RULE_comp_for || rule->rule_id == RULE_lambdef || rule->rule_id == RULE_lambdef_nocond) {
                    emit_rule = true;
                    push_result_node(parser, MP_PARSE_NODE_NULL);
                    i += 1;
                }

                int num_not_nil = 0;
                for (int x = 0; x < i; ++x) {
                    if (peek_result(parser, x) != MP_PARSE_NODE_NULL) {
                        num_not_nil += 1;
                    }
                }
                //printf("done and %s n=%d i=%d notnil=%d\n", rule->rule_name, n, i, num_not_nil);
                if (emit_rule) {
                    push_result_rule(parser, rule, i);
                } else if (num_not_nil == 0) {
                    push_result_rule(parser, rule, i); // needed for, eg, atom_paren, testlist_comp_3b
                    //result_stack_show(parser);
                    //assert(0);
                } else if (num_not_nil == 1) {
                    // single result, leave it on stack
                    mp_parse_node_t pn = MP_PARSE_NODE_NULL;
                    for (int x = 0; x < i; ++x) {
                        mp_parse_node_t pn2 = pop_result(parser);
                        if (pn2 != MP_PARSE_NODE_NULL) {
                            pn = pn2;
                        }
                    }
                    push_result_node(parser, pn);
                } else {
                    push_result_rule(parser, rule, i);
                }
                break;

            case RULE_ACT_LIST:
                // n=2 is: item item*
                // n=1 is: item (sep item)*
                // n=3 is: item (sep item)* [sep]
                if (backtrack) {
                    list_backtrack:
                    had_trailing_sep = false;
                    if (n == 2) {
                        if (i == 1) {
                            // fail on item, first time round; propagate backtrack
                            goto next_rule;
                        } else {
                            // fail on item, in later rounds; finish with this rule
                            backtrack = false;
                        }
                    } else {
                        if (i == 1) {
                            // fail on item, first time round; propagate backtrack
                            goto next_rule;
                        } else if ((i & 1) == 1) {
                            // fail on item, in later rounds; have eaten tokens so can't backtrack
                            if (n == 3) {
                                // list allows trailing separator; finish parsing list
                                had_trailing_sep = true;
                                backtrack = false;
                            } else {
                                // list doesn't allowing trailing separator; fail
                                goto syntax_error;
                            }
                        } else {
                            // fail on separator; finish parsing list
                            backtrack = false;
                        }
                    }
                } else {
                    for (;;) {
                        uint arg = rule->arg[i & 1 & n];
                        switch (arg & RULE_ARG_KIND_MASK) {
                            case RULE_ARG_TOK:
                                if (mp_lexer_is_kind(lex, arg & RULE_ARG_ARG_MASK)) {
                                    if (i & 1 & n) {
                                        // separators which are tokens are not pushed to result stack
                                    } else {
                                        push_result_token(parser, lex);
                                    }
                                    mp_lexer_to_next(lex);
                                    // got element of list, so continue parsing list
                                    i += 1;
                                } else {
                                    // couldn't get element of list
                                    i += 1;
                                    backtrack = true;
                                    goto list_backtrack;
                                }
                                break;
                            case RULE_ARG_RULE:
                                push_rule(parser, rule, i + 1);
                                push_rule_from_arg(parser, arg);
                                goto next_rule;
                            default:
                                assert(0);
                        }
                    }
                }
                assert(i >= 1);

                // compute number of elements in list, result in i
                i -= 1;
                if ((n & 1) && (rule->arg[1] & RULE_ARG_KIND_MASK) == RULE_ARG_TOK) {
                    // don't count separators when they are tokens
                    i = (i + 1) / 2;
                }

                if (i == 1) {
                    // list matched single item
                    if (had_trailing_sep) {
                        // if there was a trailing separator, make a list of a single item
                        push_result_rule(parser, rule, i);
                    } else {
                        // just leave single item on stack (ie don't wrap in a list)
                    }
                } else {
                    //printf("done list %s %d %d\n", rule->rule_name, n, i);
                    push_result_rule(parser, rule, i);
                }
                break;

            default:
                assert(0);
        }
    }

    // check we are at the end of the token stream
    if (!mp_lexer_is_kind(lex, MP_TOKEN_END)) {
        goto syntax_error;
    }

    //printf("--------------\n");
    //result_stack_show(parser);
    //printf("rule stack alloc: %d\n", parser->rule_stack_alloc);
    //printf("result stack alloc: %d\n", parser->result_stack_alloc);
    //printf("number of parse nodes allocated: %d\n", num_parse_nodes_allocated);

    // get the root parse node that we created
    assert(parser->result_stack_top == 1);
    mp_parse_node_t result = parser->result_stack[0];

finished:
    // free the memory that we don't need anymore
    m_del(rule_stack_t, parser->rule_stack, parser->rule_stack_alloc);
    m_del(mp_parse_node_t, parser->result_stack, parser->result_stack_alloc);
    m_del_obj(parser_t, parser);

    // return the result
    return result;

syntax_error:
    // TODO these should raise a proper exception
    if (mp_lexer_is_kind(lex, MP_TOKEN_INDENT)) {
        mp_lexer_show_error_pythonic(lex, "IndentationError: unexpected indent");
    } else if (mp_lexer_is_kind(lex, MP_TOKEN_DEDENT_MISMATCH)) {
        mp_lexer_show_error_pythonic(lex, "IndentationError: unindent does not match any outer indentation level");
    } else {
        mp_lexer_show_error_pythonic(lex, "syntax error:");
#ifdef USE_RULE_NAME
        mp_lexer_show_error(lex, rule->rule_name);
#endif
        mp_token_show(mp_lexer_cur(lex));
    }
    result = MP_PARSE_NODE_NULL;
    goto finished;
}
Esempio n. 8
0
static void push_result_token(parser_t *parser, const mp_lexer_t *lex) {
    const mp_token_t *tok = mp_lexer_cur(lex);
    mp_parse_node_t pn;
    if (tok->kind == MP_TOKEN_NAME) {
        pn = mp_parse_node_new_leaf(MP_PARSE_NODE_ID, qstr_from_strn_copy(tok->str, tok->len));
    } else if (tok->kind == MP_TOKEN_NUMBER) {
        bool dec = false;
        bool small_int = true;
        machine_int_t int_val = 0;
        int len = tok->len;
        const char *str = tok->str;
        int base = 10;
        int i = 0;
        if (len >= 3 && str[0] == '0') {
            if (str[1] == 'o' || str[1] == 'O') {
                // octal
                base = 8;
                i = 2;
            } else if (str[1] == 'x' || str[1] == 'X') {
                // hexadecimal
                base = 16;
                i = 2;
            } else if (str[1] == 'b' || str[1] == 'B') {
                // binary
                base = 2;
                i = 2;
            }
        }
        bool overflow = false;
        for (; i < len; i++) {
            machine_int_t old_val = int_val;
            if (unichar_isdigit(str[i]) && str[i] - '0' < base) {
                int_val = base * int_val + str[i] - '0';
            } else if (base == 16 && 'a' <= str[i] && str[i] <= 'f') {
                int_val = base * int_val + str[i] - 'a' + 10;
            } else if (base == 16 && 'A' <= str[i] && str[i] <= 'F') {
                int_val = base * int_val + str[i] - 'A' + 10;
            } else if (str[i] == '.' || str[i] == 'e' || str[i] == 'E' || str[i] == 'j' || str[i] == 'J') {
                dec = true;
                break;
            } else {
                small_int = false;
                break;
            }
            if (int_val < old_val) {
                // If new value became less than previous, it's overflow
                overflow = true;
            } else if ((old_val ^ int_val) & WORD_MSBIT_HIGH) {
                // If signed number changed sign - it's overflow
                overflow = true;
            }
        }
        if (dec) {
            pn = mp_parse_node_new_leaf(MP_PARSE_NODE_DECIMAL, qstr_from_strn_copy(str, len));
        } else if (small_int && !overflow && MP_FIT_SMALL_INT(int_val)) {
            pn = mp_parse_node_new_leaf(MP_PARSE_NODE_SMALL_INT, int_val);
        } else {
            pn = mp_parse_node_new_leaf(MP_PARSE_NODE_INTEGER, qstr_from_strn_copy(str, len));
        }
    } else if (tok->kind == MP_TOKEN_STRING) {
        pn = mp_parse_node_new_leaf(MP_PARSE_NODE_STRING, qstr_from_strn_copy(tok->str, tok->len));
    } else if (tok->kind == MP_TOKEN_BYTES) {
        pn = mp_parse_node_new_leaf(MP_PARSE_NODE_BYTES, qstr_from_strn_copy(tok->str, tok->len));
    } else {
        pn = mp_parse_node_new_leaf(MP_PARSE_NODE_TOKEN, tok->kind);
    }
    push_result_node(parser, pn);
}
Esempio n. 9
0
mp_parse_tree_t mp_parse(mp_lexer_t *lex, mp_parse_input_kind_t input_kind) {

    // initialise parser and allocate memory for its stacks

    parser_t parser;

    parser.rule_stack_alloc = MICROPY_ALLOC_PARSE_RULE_INIT;
    parser.rule_stack_top = 0;
    parser.rule_stack = m_new(rule_stack_t, parser.rule_stack_alloc);

    parser.result_stack_alloc = MICROPY_ALLOC_PARSE_RESULT_INIT;
    parser.result_stack_top = 0;
    parser.result_stack = m_new(mp_parse_node_t, parser.result_stack_alloc);

    parser.lexer = lex;

    parser.tree.chunk = NULL;
    parser.cur_chunk = NULL;

    #if MICROPY_COMP_CONST
    mp_map_init(&parser.consts, 0);
    #endif

    // work out the top-level rule to use, and push it on the stack
    size_t top_level_rule;
    switch (input_kind) {
        case MP_PARSE_SINGLE_INPUT: top_level_rule = RULE_single_input; break;
        case MP_PARSE_EVAL_INPUT: top_level_rule = RULE_eval_input; break;
        default: top_level_rule = RULE_file_input;
    }
    push_rule(&parser, lex->tok_line, rules[top_level_rule], 0);

    // parse!

    size_t n, i; // state for the current rule
    size_t rule_src_line; // source line for the first token matched by the current rule
    bool backtrack = false;
    const rule_t *rule = NULL;

    for (;;) {
        next_rule:
        if (parser.rule_stack_top == 0) {
            break;
        }

        pop_rule(&parser, &rule, &i, &rule_src_line);
        n = rule->act & RULE_ACT_ARG_MASK;

        /*
        // debugging
        printf("depth=%d ", parser.rule_stack_top);
        for (int j = 0; j < parser.rule_stack_top; ++j) {
            printf(" ");
        }
        printf("%s n=%d i=%d bt=%d\n", rule->rule_name, n, i, backtrack);
        */

        switch (rule->act & RULE_ACT_KIND_MASK) {
            case RULE_ACT_OR:
                if (i > 0 && !backtrack) {
                    goto next_rule;
                } else {
                    backtrack = false;
                }
                for (; i < n; ++i) {
                    uint16_t kind = rule->arg[i] & RULE_ARG_KIND_MASK;
                    if (kind == RULE_ARG_TOK) {
                        if (lex->tok_kind == (rule->arg[i] & RULE_ARG_ARG_MASK)) {
                            push_result_token(&parser, rule);
                            mp_lexer_to_next(lex);
                            goto next_rule;
                        }
                    } else {
                        assert(kind == RULE_ARG_RULE);
                        if (i + 1 < n) {
                            push_rule(&parser, rule_src_line, rule, i + 1); // save this or-rule
                        }
                        push_rule_from_arg(&parser, rule->arg[i]); // push child of or-rule
                        goto next_rule;
                    }
                }
                backtrack = true;
                break;

            case RULE_ACT_AND: {

                // failed, backtrack if we can, else syntax error
                if (backtrack) {
                    assert(i > 0);
                    if ((rule->arg[i - 1] & RULE_ARG_KIND_MASK) == RULE_ARG_OPT_RULE) {
                        // an optional rule that failed, so continue with next arg
                        push_result_node(&parser, MP_PARSE_NODE_NULL);
                        backtrack = false;
                    } else {
                        // a mandatory rule that failed, so propagate backtrack
                        if (i > 1) {
                            // already eaten tokens so can't backtrack
                            goto syntax_error;
                        } else {
                            goto next_rule;
                        }
                    }
                }

                // progress through the rule
                for (; i < n; ++i) {
                    if ((rule->arg[i] & RULE_ARG_KIND_MASK) == RULE_ARG_TOK) {
                        // need to match a token
                        mp_token_kind_t tok_kind = rule->arg[i] & RULE_ARG_ARG_MASK;
                        if (lex->tok_kind == tok_kind) {
                            // matched token
                            if (tok_kind == MP_TOKEN_NAME) {
                                push_result_token(&parser, rule);
                            }
                            mp_lexer_to_next(lex);
                        } else {
                            // failed to match token
                            if (i > 0) {
                                // already eaten tokens so can't backtrack
                                goto syntax_error;
                            } else {
                                // this rule failed, so backtrack
                                backtrack = true;
                                goto next_rule;
                            }
                        }
                    } else {
                        push_rule(&parser, rule_src_line, rule, i + 1); // save this and-rule
                        push_rule_from_arg(&parser, rule->arg[i]); // push child of and-rule
                        goto next_rule;
                    }
                }

                assert(i == n);

                // matched the rule, so now build the corresponding parse_node

                #if !MICROPY_ENABLE_DOC_STRING
                // this code discards lonely statements, such as doc strings
                if (input_kind != MP_PARSE_SINGLE_INPUT && rule->rule_id == RULE_expr_stmt && peek_result(&parser, 0) == MP_PARSE_NODE_NULL) {
                    mp_parse_node_t p = peek_result(&parser, 1);
                    if ((MP_PARSE_NODE_IS_LEAF(p) && !MP_PARSE_NODE_IS_ID(p))
                        || MP_PARSE_NODE_IS_STRUCT_KIND(p, RULE_const_object)) {
                        pop_result(&parser); // MP_PARSE_NODE_NULL
                        pop_result(&parser); // const expression (leaf or RULE_const_object)
                        // Pushing the "pass" rule here will overwrite any RULE_const_object
                        // entry that was on the result stack, allowing the GC to reclaim
                        // the memory from the const object when needed.
                        push_result_rule(&parser, rule_src_line, rules[RULE_pass_stmt], 0);
                        break;
                    }
                }
                #endif

                // count number of arguments for the parse node
                i = 0;
                size_t num_not_nil = 0;
                for (size_t x = n; x > 0;) {
                    --x;
                    if ((rule->arg[x] & RULE_ARG_KIND_MASK) == RULE_ARG_TOK) {
                        mp_token_kind_t tok_kind = rule->arg[x] & RULE_ARG_ARG_MASK;
                        if (tok_kind == MP_TOKEN_NAME) {
                            // only tokens which were names are pushed to stack
                            i += 1;
                            num_not_nil += 1;
                        }
                    } else {
                        // rules are always pushed
                        if (peek_result(&parser, i) != MP_PARSE_NODE_NULL) {
                            num_not_nil += 1;
                        }
                        i += 1;
                    }
                }

                if (num_not_nil == 1 && (rule->act & RULE_ACT_ALLOW_IDENT)) {
                    // this rule has only 1 argument and should not be emitted
                    mp_parse_node_t pn = MP_PARSE_NODE_NULL;
                    for (size_t x = 0; x < i; ++x) {
                        mp_parse_node_t pn2 = pop_result(&parser);
                        if (pn2 != MP_PARSE_NODE_NULL) {
                            pn = pn2;
                        }
                    }
                    push_result_node(&parser, pn);
                } else {
                    // this rule must be emitted

                    if (rule->act & RULE_ACT_ADD_BLANK) {
                        // and add an extra blank node at the end (used by the compiler to store data)
                        push_result_node(&parser, MP_PARSE_NODE_NULL);
                        i += 1;
                    }

                    push_result_rule(&parser, rule_src_line, rule, i);
                }
                break;
            }

            default: {
                assert((rule->act & RULE_ACT_KIND_MASK) == RULE_ACT_LIST);

                // n=2 is: item item*
                // n=1 is: item (sep item)*
                // n=3 is: item (sep item)* [sep]
                bool had_trailing_sep;
                if (backtrack) {
                    list_backtrack:
                    had_trailing_sep = false;
                    if (n == 2) {
                        if (i == 1) {
                            // fail on item, first time round; propagate backtrack
                            goto next_rule;
                        } else {
                            // fail on item, in later rounds; finish with this rule
                            backtrack = false;
                        }
                    } else {
                        if (i == 1) {
                            // fail on item, first time round; propagate backtrack
                            goto next_rule;
                        } else if ((i & 1) == 1) {
                            // fail on item, in later rounds; have eaten tokens so can't backtrack
                            if (n == 3) {
                                // list allows trailing separator; finish parsing list
                                had_trailing_sep = true;
                                backtrack = false;
                            } else {
                                // list doesn't allowing trailing separator; fail
                                goto syntax_error;
                            }
                        } else {
                            // fail on separator; finish parsing list
                            backtrack = false;
                        }
                    }
                } else {
                    for (;;) {
                        size_t arg = rule->arg[i & 1 & n];
                        if ((arg & RULE_ARG_KIND_MASK) == RULE_ARG_TOK) {
                            if (lex->tok_kind == (arg & RULE_ARG_ARG_MASK)) {
                                if (i & 1 & n) {
                                    // separators which are tokens are not pushed to result stack
                                } else {
                                    push_result_token(&parser, rule);
                                }
                                mp_lexer_to_next(lex);
                                // got element of list, so continue parsing list
                                i += 1;
                            } else {
                                // couldn't get element of list
                                i += 1;
                                backtrack = true;
                                goto list_backtrack;
                            }
                        } else {
                            assert((arg & RULE_ARG_KIND_MASK) == RULE_ARG_RULE);
                            push_rule(&parser, rule_src_line, rule, i + 1); // save this list-rule
                            push_rule_from_arg(&parser, arg); // push child of list-rule
                            goto next_rule;
                        }
                    }
                }
                assert(i >= 1);

                // compute number of elements in list, result in i
                i -= 1;
                if ((n & 1) && (rule->arg[1] & RULE_ARG_KIND_MASK) == RULE_ARG_TOK) {
                    // don't count separators when they are tokens
                    i = (i + 1) / 2;
                }

                if (i == 1) {
                    // list matched single item
                    if (had_trailing_sep) {
                        // if there was a trailing separator, make a list of a single item
                        push_result_rule(&parser, rule_src_line, rule, i);
                    } else {
                        // just leave single item on stack (ie don't wrap in a list)
                    }
                } else {
                    push_result_rule(&parser, rule_src_line, rule, i);
                }
                break;
            }
        }
    }

    #if MICROPY_COMP_CONST
    mp_map_deinit(&parser.consts);
    #endif

    // truncate final chunk and link into chain of chunks
    if (parser.cur_chunk != NULL) {
        (void)m_renew_maybe(byte, parser.cur_chunk,
            sizeof(mp_parse_chunk_t) + parser.cur_chunk->alloc,
            sizeof(mp_parse_chunk_t) + parser.cur_chunk->union_.used,
            false);
        parser.cur_chunk->alloc = parser.cur_chunk->union_.used;
        parser.cur_chunk->union_.next = parser.tree.chunk;
        parser.tree.chunk = parser.cur_chunk;
    }

    if (
        lex->tok_kind != MP_TOKEN_END // check we are at the end of the token stream
        || parser.result_stack_top == 0 // check that we got a node (can fail on empty input)
        ) {
    syntax_error:;
        mp_obj_t exc;
        if (lex->tok_kind == MP_TOKEN_INDENT) {
            exc = mp_obj_new_exception_msg(&mp_type_IndentationError,
                "unexpected indent");
        } else if (lex->tok_kind == MP_TOKEN_DEDENT_MISMATCH) {
            exc = mp_obj_new_exception_msg(&mp_type_IndentationError,
                "unindent does not match any outer indentation level");
        } else {
            exc = mp_obj_new_exception_msg(&mp_type_SyntaxError,
                "invalid syntax");
        }
        // add traceback to give info about file name and location
        // we don't have a 'block' name, so just pass the NULL qstr to indicate this
        mp_obj_exception_add_traceback(exc, lex->source_name, lex->tok_line, MP_QSTR_NULL);
        nlr_raise(exc);
    }

    // get the root parse node that we created
    assert(parser.result_stack_top == 1);
    parser.tree.root = parser.result_stack[0];

    // free the memory that we don't need anymore
    m_del(rule_stack_t, parser.rule_stack, parser.rule_stack_alloc);
    m_del(mp_parse_node_t, parser.result_stack, parser.result_stack_alloc);

    // we also free the lexer on behalf of the caller
    mp_lexer_free(lex);

    return parser.tree;
}
Esempio n. 10
0
STATIC bool fold_constants(parser_t *parser, const rule_t *rule, size_t num_args) {
    // this code does folding of arbitrary integer expressions, eg 1 + 2 * 3 + 4
    // it does not do partial folding, eg 1 + 2 + x -> 3 + x

    mp_obj_t arg0;
    if (rule->rule_id == RULE_expr
        || rule->rule_id == RULE_xor_expr
        || rule->rule_id == RULE_and_expr) {
        // folding for binary ops: | ^ &
        mp_parse_node_t pn = peek_result(parser, num_args - 1);
        if (!mp_parse_node_get_int_maybe(pn, &arg0)) {
            return false;
        }
        mp_binary_op_t op;
        if (rule->rule_id == RULE_expr) {
            op = MP_BINARY_OP_OR;
        } else if (rule->rule_id == RULE_xor_expr) {
            op = MP_BINARY_OP_XOR;
        } else {
            op = MP_BINARY_OP_AND;
        }
        for (ssize_t i = num_args - 2; i >= 0; --i) {
            pn = peek_result(parser, i);
            mp_obj_t arg1;
            if (!mp_parse_node_get_int_maybe(pn, &arg1)) {
                return false;
            }
            arg0 = mp_binary_op(op, arg0, arg1);
        }
    } else if (rule->rule_id == RULE_shift_expr
        || rule->rule_id == RULE_arith_expr
        || rule->rule_id == RULE_term) {
        // folding for binary ops: << >> + - * / % //
        mp_parse_node_t pn = peek_result(parser, num_args - 1);
        if (!mp_parse_node_get_int_maybe(pn, &arg0)) {
            return false;
        }
        for (ssize_t i = num_args - 2; i >= 1; i -= 2) {
            pn = peek_result(parser, i - 1);
            mp_obj_t arg1;
            if (!mp_parse_node_get_int_maybe(pn, &arg1)) {
                return false;
            }
            mp_token_kind_t tok = MP_PARSE_NODE_LEAF_ARG(peek_result(parser, i));
            static const uint8_t token_to_op[] = {
                MP_BINARY_OP_ADD,
                MP_BINARY_OP_SUBTRACT,
                MP_BINARY_OP_MULTIPLY,
                255,//MP_BINARY_OP_POWER,
                255,//MP_BINARY_OP_TRUE_DIVIDE,
                MP_BINARY_OP_FLOOR_DIVIDE,
                MP_BINARY_OP_MODULO,
                255,//MP_BINARY_OP_LESS
                MP_BINARY_OP_LSHIFT,
                255,//MP_BINARY_OP_MORE
                MP_BINARY_OP_RSHIFT,
            };
            mp_binary_op_t op = token_to_op[tok - MP_TOKEN_OP_PLUS];
            if (op == (mp_binary_op_t)255) {
                return false;
            }
            int rhs_sign = mp_obj_int_sign(arg1);
            if (op <= MP_BINARY_OP_RSHIFT) {
                // << and >> can't have negative rhs
                if (rhs_sign < 0) {
                    return false;
                }
            } else if (op >= MP_BINARY_OP_FLOOR_DIVIDE) {
                // % and // can't have zero rhs
                if (rhs_sign == 0) {
                    return false;
                }
            }
            arg0 = mp_binary_op(op, arg0, arg1);
        }
    } else if (rule->rule_id == RULE_factor_2) {
        // folding for unary ops: + - ~
        mp_parse_node_t pn = peek_result(parser, 0);
        if (!mp_parse_node_get_int_maybe(pn, &arg0)) {
            return false;
        }
        mp_token_kind_t tok = MP_PARSE_NODE_LEAF_ARG(peek_result(parser, 1));
        mp_unary_op_t op;
        if (tok == MP_TOKEN_OP_PLUS) {
            op = MP_UNARY_OP_POSITIVE;
        } else if (tok == MP_TOKEN_OP_MINUS) {
            op = MP_UNARY_OP_NEGATIVE;
        } else {
            assert(tok == MP_TOKEN_OP_TILDE); // should be
            op = MP_UNARY_OP_INVERT;
        }
        arg0 = mp_unary_op(op, arg0);

    #if MICROPY_COMP_CONST
    } else if (rule->rule_id == RULE_expr_stmt) {
        mp_parse_node_t pn1 = peek_result(parser, 0);
        if (!MP_PARSE_NODE_IS_NULL(pn1)
            && !(MP_PARSE_NODE_IS_STRUCT_KIND(pn1, RULE_expr_stmt_augassign)
            || MP_PARSE_NODE_IS_STRUCT_KIND(pn1, RULE_expr_stmt_assign_list))) {
            // this node is of the form <x> = <y>
            mp_parse_node_t pn0 = peek_result(parser, 1);
            if (MP_PARSE_NODE_IS_ID(pn0)
                && MP_PARSE_NODE_IS_STRUCT_KIND(pn1, RULE_atom_expr_normal)
                && MP_PARSE_NODE_IS_ID(((mp_parse_node_struct_t*)pn1)->nodes[0])
                && MP_PARSE_NODE_LEAF_ARG(((mp_parse_node_struct_t*)pn1)->nodes[0]) == MP_QSTR_const
                && MP_PARSE_NODE_IS_STRUCT_KIND(((mp_parse_node_struct_t*)pn1)->nodes[1], RULE_trailer_paren)
                ) {
                // code to assign dynamic constants: id = const(value)

                // get the id
                qstr id = MP_PARSE_NODE_LEAF_ARG(pn0);

                // get the value
                mp_parse_node_t pn_value = ((mp_parse_node_struct_t*)((mp_parse_node_struct_t*)pn1)->nodes[1])->nodes[0];
                mp_obj_t value;
                if (!mp_parse_node_get_int_maybe(pn_value, &value)) {
                    mp_obj_t exc = mp_obj_new_exception_msg(&mp_type_SyntaxError,
                        "constant must be an integer");
                    mp_obj_exception_add_traceback(exc, parser->lexer->source_name,
                        ((mp_parse_node_struct_t*)pn1)->source_line, MP_QSTR_NULL);
                    nlr_raise(exc);
                }

                // store the value in the table of dynamic constants
                mp_map_elem_t *elem = mp_map_lookup(&parser->consts, MP_OBJ_NEW_QSTR(id), MP_MAP_LOOKUP_ADD_IF_NOT_FOUND);
                assert(elem->value == MP_OBJ_NULL);
                elem->value = value;

                // If the constant starts with an underscore then treat it as a private
                // variable and don't emit any code to store the value to the id.
                if (qstr_str(id)[0] == '_') {
                    pop_result(parser); // pop const(value)
                    pop_result(parser); // pop id
                    push_result_rule(parser, 0, rules[RULE_pass_stmt], 0); // replace with "pass"
                    return true;
                }

                // replace const(value) with value
                pop_result(parser);
                push_result_node(parser, pn_value);

                // finished folding this assignment, but we still want it to be part of the tree
                return false;
            }
        }
        return false;
    #endif

    #if MICROPY_COMP_MODULE_CONST
    } else if (rule->rule_id == RULE_atom_expr_normal) {
        mp_parse_node_t pn0 = peek_result(parser, 1);
        mp_parse_node_t pn1 = peek_result(parser, 0);
        if (!(MP_PARSE_NODE_IS_ID(pn0)
            && MP_PARSE_NODE_IS_STRUCT_KIND(pn1, RULE_trailer_period))) {
            return false;
        }
        // id1.id2
        // look it up in constant table, see if it can be replaced with an integer
        mp_parse_node_struct_t *pns1 = (mp_parse_node_struct_t*)pn1;
        assert(MP_PARSE_NODE_IS_ID(pns1->nodes[0]));
        qstr q_base = MP_PARSE_NODE_LEAF_ARG(pn0);
        qstr q_attr = MP_PARSE_NODE_LEAF_ARG(pns1->nodes[0]);
        mp_map_elem_t *elem = mp_map_lookup((mp_map_t*)&mp_constants_map, MP_OBJ_NEW_QSTR(q_base), MP_MAP_LOOKUP);
        if (elem == NULL) {
            return false;
        }
        mp_obj_t dest[2];
        mp_load_method_maybe(elem->value, q_attr, dest);
        if (!(dest[0] != MP_OBJ_NULL && MP_OBJ_IS_INT(dest[0]) && dest[1] == MP_OBJ_NULL)) {
            return false;
        }
        arg0 = dest[0];
    #endif

    } else {
        return false;
    }

    // success folding this rule

    for (size_t i = num_args; i > 0; i--) {
        pop_result(parser);
    }
    if (MP_OBJ_IS_SMALL_INT(arg0)) {
        push_result_node(parser, mp_parse_node_new_small_int(MP_OBJ_SMALL_INT_VALUE(arg0)));
    } else {
        // TODO reuse memory for parse node struct?
        push_result_node(parser, make_node_const_object(parser, 0, arg0));
    }

    return true;
}
Esempio n. 11
0
mp_parse_tree_t mp_parse(mp_lexer_t *lex, mp_parse_input_kind_t input_kind) {

    // initialise parser and allocate memory for its stacks

    parser_t parser;

    parser.parse_error = PARSE_ERROR_NONE;

    parser.rule_stack_alloc = MICROPY_ALLOC_PARSE_RULE_INIT;
    parser.rule_stack_top = 0;
    parser.rule_stack = m_new_maybe(rule_stack_t, parser.rule_stack_alloc);

    parser.result_stack_alloc = MICROPY_ALLOC_PARSE_RESULT_INIT;
    parser.result_stack_top = 0;
    parser.result_stack = m_new_maybe(mp_parse_node_t, parser.result_stack_alloc);

    parser.lexer = lex;

    parser.tree.chunk = NULL;
    parser.cur_chunk = NULL;

    #if MICROPY_COMP_CONST
    mp_map_init(&parser.consts, 0);
    #endif

    // check if we could allocate the stacks
    if (parser.rule_stack == NULL || parser.result_stack == NULL) {
        goto memory_error;
    }

    // work out the top-level rule to use, and push it on the stack
    size_t top_level_rule;
    switch (input_kind) {
        case MP_PARSE_SINGLE_INPUT: top_level_rule = RULE_single_input; break;
        case MP_PARSE_EVAL_INPUT: top_level_rule = RULE_eval_input; break;
        default: top_level_rule = RULE_file_input;
    }
    push_rule(&parser, lex->tok_line, rules[top_level_rule], 0);

    // parse!

    size_t n, i; // state for the current rule
    size_t rule_src_line; // source line for the first token matched by the current rule
    bool backtrack = false;
    const rule_t *rule = NULL;

    for (;;) {
        next_rule:
        if (parser.rule_stack_top == 0 || parser.parse_error) {
            break;
        }

        pop_rule(&parser, &rule, &i, &rule_src_line);
        n = rule->act & RULE_ACT_ARG_MASK;

        /*
        // debugging
        printf("depth=%d ", parser.rule_stack_top);
        for (int j = 0; j < parser.rule_stack_top; ++j) {
            printf(" ");
        }
        printf("%s n=%d i=%d bt=%d\n", rule->rule_name, n, i, backtrack);
        */

        switch (rule->act & RULE_ACT_KIND_MASK) {
            case RULE_ACT_OR:
                if (i > 0 && !backtrack) {
                    goto next_rule;
                } else {
                    backtrack = false;
                }
                for (; i < n; ++i) {
                    uint16_t kind = rule->arg[i] & RULE_ARG_KIND_MASK;
                    if (kind == RULE_ARG_TOK) {
                        if (lex->tok_kind == (rule->arg[i] & RULE_ARG_ARG_MASK)) {
                            push_result_token(&parser);
                            mp_lexer_to_next(lex);
                            goto next_rule;
                        }
                    } else {
                        assert(kind == RULE_ARG_RULE);
                        if (i + 1 < n) {
                            push_rule(&parser, rule_src_line, rule, i + 1); // save this or-rule
                        }
                        push_rule_from_arg(&parser, rule->arg[i]); // push child of or-rule
                        goto next_rule;
                    }
                }
                backtrack = true;
                break;

            case RULE_ACT_AND: {

                // failed, backtrack if we can, else syntax error
                if (backtrack) {
                    assert(i > 0);
                    if ((rule->arg[i - 1] & RULE_ARG_KIND_MASK) == RULE_ARG_OPT_RULE) {
                        // an optional rule that failed, so continue with next arg
                        push_result_node(&parser, MP_PARSE_NODE_NULL);
                        backtrack = false;
                    } else {
                        // a mandatory rule that failed, so propagate backtrack
                        if (i > 1) {
                            // already eaten tokens so can't backtrack
                            goto syntax_error;
                        } else {
                            goto next_rule;
                        }
                    }
                }

                // progress through the rule
                for (; i < n; ++i) {
                    switch (rule->arg[i] & RULE_ARG_KIND_MASK) {
                        case RULE_ARG_TOK: {
                            // need to match a token
                            mp_token_kind_t tok_kind = rule->arg[i] & RULE_ARG_ARG_MASK;
                            if (lex->tok_kind == tok_kind) {
                                // matched token
                                if (tok_kind == MP_TOKEN_NAME) {
                                    push_result_token(&parser);
                                }
                                mp_lexer_to_next(lex);
                            } else {
                                // failed to match token
                                if (i > 0) {
                                    // already eaten tokens so can't backtrack
                                    goto syntax_error;
                                } else {
                                    // this rule failed, so backtrack
                                    backtrack = true;
                                    goto next_rule;
                                }
                            }
                            break;
                        }
                        case RULE_ARG_RULE:
                        case RULE_ARG_OPT_RULE:
                        rule_and_no_other_choice:
                            push_rule(&parser, rule_src_line, rule, i + 1); // save this and-rule
                            push_rule_from_arg(&parser, rule->arg[i]); // push child of and-rule
                            goto next_rule;
                        default:
                            assert(0);
                            goto rule_and_no_other_choice; // to help flow control analysis
                    }
                }

                assert(i == n);

                // matched the rule, so now build the corresponding parse_node

                #if !MICROPY_ENABLE_DOC_STRING
                // this code discards lonely statements, such as doc strings
                if (input_kind != MP_PARSE_SINGLE_INPUT && rule->rule_id == RULE_expr_stmt && peek_result(&parser, 0) == MP_PARSE_NODE_NULL) {
                    mp_parse_node_t p = peek_result(&parser, 1);
                    if ((MP_PARSE_NODE_IS_LEAF(p) && !MP_PARSE_NODE_IS_ID(p)) || MP_PARSE_NODE_IS_STRUCT_KIND(p, RULE_string)) {
                        pop_result(&parser); // MP_PARSE_NODE_NULL
                        mp_parse_node_t pn = pop_result(&parser); // possibly RULE_string
                        if (MP_PARSE_NODE_IS_STRUCT(pn)) {
                            mp_parse_node_struct_t *pns = (mp_parse_node_struct_t *)pn;
                            if (MP_PARSE_NODE_STRUCT_KIND(pns) == RULE_string) {
                                m_del(char, (char*)pns->nodes[0], (size_t)pns->nodes[1]);
                            }
                        }
                        push_result_rule(&parser, rule_src_line, rules[RULE_pass_stmt], 0);
                        break;
                    }
                }
                #endif

                // count number of arguments for the parse node
                i = 0;
                size_t num_not_nil = 0;
                for (size_t x = n; x > 0;) {
                    --x;
                    if ((rule->arg[x] & RULE_ARG_KIND_MASK) == RULE_ARG_TOK) {
                        mp_token_kind_t tok_kind = rule->arg[x] & RULE_ARG_ARG_MASK;
                        if (tok_kind == MP_TOKEN_NAME) {
                            // only tokens which were names are pushed to stack
                            i += 1;
                            num_not_nil += 1;
                        }
                    } else {
                        // rules are always pushed
                        if (peek_result(&parser, i) != MP_PARSE_NODE_NULL) {
                            num_not_nil += 1;
                        }
                        i += 1;
                    }
                }

                if (num_not_nil == 1 && (rule->act & RULE_ACT_ALLOW_IDENT)) {
                    // this rule has only 1 argument and should not be emitted
                    mp_parse_node_t pn = MP_PARSE_NODE_NULL;
                    for (size_t x = 0; x < i; ++x) {
                        mp_parse_node_t pn2 = pop_result(&parser);
                        if (pn2 != MP_PARSE_NODE_NULL) {
                            pn = pn2;
                        }
                    }
                    push_result_node(&parser, pn);
                } else {
                    // this rule must be emitted

                    if (rule->act & RULE_ACT_ADD_BLANK) {
                        // and add an extra blank node at the end (used by the compiler to store data)
                        push_result_node(&parser, MP_PARSE_NODE_NULL);
                        i += 1;
                    }

                    push_result_rule(&parser, rule_src_line, rule, i);
                }
                break;
            }
Esempio n. 12
0
mp_parse_node_t mp_parse(mp_lexer_t *lex, mp_parse_input_kind_t input_kind, mp_parse_error_kind_t *parse_error_kind_out) {

    // initialise parser and allocate memory for its stacks

    parser_t parser;

    parser.had_memory_error = false;

    parser.rule_stack_alloc = MICROPY_ALLOC_PARSE_RULE_INIT;
    parser.rule_stack_top = 0;
    parser.rule_stack = m_new_maybe(rule_stack_t, parser.rule_stack_alloc);

    parser.result_stack_alloc = MICROPY_ALLOC_PARSE_RESULT_INIT;
    parser.result_stack_top = 0;
    parser.result_stack = m_new_maybe(mp_parse_node_t, parser.result_stack_alloc);

    parser.lexer = lex;

    // check if we could allocate the stacks
    if (parser.rule_stack == NULL || parser.result_stack == NULL) {
        goto memory_error;
    }

    // work out the top-level rule to use, and push it on the stack
    int top_level_rule;
    switch (input_kind) {
        case MP_PARSE_SINGLE_INPUT: top_level_rule = RULE_single_input; break;
        case MP_PARSE_EVAL_INPUT: top_level_rule = RULE_eval_input; break;
        default: top_level_rule = RULE_file_input;
    }
    push_rule(&parser, mp_lexer_cur(lex)->src_line, rules[top_level_rule], 0);

    // parse!

    uint n, i; // state for the current rule
    uint rule_src_line; // source line for the first token matched by the current rule
    bool backtrack = false;
    const rule_t *rule = NULL;
    mp_token_kind_t tok_kind;
    bool emit_rule;
    bool had_trailing_sep;

    for (;;) {
        next_rule:
        if (parser.rule_stack_top == 0 || parser.had_memory_error) {
            break;
        }

        pop_rule(&parser, &rule, &i, &rule_src_line);
        n = rule->act & RULE_ACT_ARG_MASK;

        /*
        // debugging
        printf("depth=%d ", parser.rule_stack_top);
        for (int j = 0; j < parser.rule_stack_top; ++j) {
            printf(" ");
        }
        printf("%s n=%d i=%d bt=%d\n", rule->rule_name, n, i, backtrack);
        */

        switch (rule->act & RULE_ACT_KIND_MASK) {
            case RULE_ACT_OR:
                if (i > 0 && !backtrack) {
                    goto next_rule;
                } else {
                    backtrack = false;
                }
                for (; i < n - 1; ++i) {
                    switch (rule->arg[i] & RULE_ARG_KIND_MASK) {
                        case RULE_ARG_TOK:
                            if (mp_lexer_is_kind(lex, rule->arg[i] & RULE_ARG_ARG_MASK)) {
                                push_result_token(&parser, lex);
                                mp_lexer_to_next(lex);
                                goto next_rule;
                            }
                            break;
                        case RULE_ARG_RULE:
                            push_rule(&parser, rule_src_line, rule, i + 1); // save this or-rule
                            push_rule_from_arg(&parser, rule->arg[i]); // push child of or-rule
                            goto next_rule;
                        default:
                            assert(0);
                    }
                }
                if ((rule->arg[i] & RULE_ARG_KIND_MASK) == RULE_ARG_TOK) {
                    if (mp_lexer_is_kind(lex, rule->arg[i] & RULE_ARG_ARG_MASK)) {
                        push_result_token(&parser, lex);
                        mp_lexer_to_next(lex);
                    } else {
                        backtrack = true;
                        goto next_rule;
                    }
                } else {
                    push_rule_from_arg(&parser, rule->arg[i]);
                }
                break;

            case RULE_ACT_AND:

                // failed, backtrack if we can, else syntax error
                if (backtrack) {
                    assert(i > 0);
                    if ((rule->arg[i - 1] & RULE_ARG_KIND_MASK) == RULE_ARG_OPT_RULE) {
                        // an optional rule that failed, so continue with next arg
                        push_result_node(&parser, MP_PARSE_NODE_NULL);
                        backtrack = false;
                    } else {
                        // a mandatory rule that failed, so propagate backtrack
                        if (i > 1) {
                            // already eaten tokens so can't backtrack
                            goto syntax_error;
                        } else {
                            goto next_rule;
                        }
                    }
                }

                // progress through the rule
                for (; i < n; ++i) {
                    switch (rule->arg[i] & RULE_ARG_KIND_MASK) {
                        case RULE_ARG_TOK:
                            // need to match a token
                            tok_kind = rule->arg[i] & RULE_ARG_ARG_MASK;
                            if (mp_lexer_is_kind(lex, tok_kind)) {
                                // matched token
                                if (tok_kind == MP_TOKEN_NAME) {
                                    push_result_token(&parser, lex);
                                }
                                mp_lexer_to_next(lex);
                            } else {
                                // failed to match token
                                if (i > 0) {
                                    // already eaten tokens so can't backtrack
                                    goto syntax_error;
                                } else {
                                    // this rule failed, so backtrack
                                    backtrack = true;
                                    goto next_rule;
                                }
                            }
                            break;
                        case RULE_ARG_RULE:
                        case RULE_ARG_OPT_RULE:
                            push_rule(&parser, rule_src_line, rule, i + 1); // save this and-rule
                            push_rule_from_arg(&parser, rule->arg[i]); // push child of and-rule
                            goto next_rule;
                        default:
                            assert(0);
                    }
                }

                assert(i == n);

                // matched the rule, so now build the corresponding parse_node

                // count number of arguments for the parse_node
                i = 0;
                emit_rule = false;
                for (int x = 0; x < n; ++x) {
                    if ((rule->arg[x] & RULE_ARG_KIND_MASK) == RULE_ARG_TOK) {
                        tok_kind = rule->arg[x] & RULE_ARG_ARG_MASK;
                        if (tok_kind >= MP_TOKEN_NAME) {
                            emit_rule = true;
                        }
                        if (tok_kind == MP_TOKEN_NAME) {
                            // only tokens which were names are pushed to stack
                            i += 1;
                        }
                    } else {
                        // rules are always pushed
                        i += 1;
                    }
                }

#if !MICROPY_EMIT_CPYTHON && !MICROPY_ENABLE_DOC_STRING
                // this code discards lonely statements, such as doc strings
                if (input_kind != MP_PARSE_SINGLE_INPUT && rule->rule_id == RULE_expr_stmt && peek_result(&parser, 0) == MP_PARSE_NODE_NULL) {
                    mp_parse_node_t p = peek_result(&parser, 1);
                    if ((MP_PARSE_NODE_IS_LEAF(p) && !MP_PARSE_NODE_IS_ID(p)) || MP_PARSE_NODE_IS_STRUCT_KIND(p, RULE_string)) {
                        pop_result(&parser);
                        pop_result(&parser);
                        push_result_rule(&parser, rule_src_line, rules[RULE_pass_stmt], 0);
                        break;
                    }
                }
#endif

                // always emit these rules, even if they have only 1 argument
                if (rule->rule_id == RULE_expr_stmt || rule->rule_id == RULE_yield_stmt) {
                    emit_rule = true;
                }

                // never emit these rules if they have only 1 argument
                // NOTE: can't put atom_paren here because we need it to distinguisg, for example, [a,b] from [(a,b)]
                // TODO possibly put varargslist_name, varargslist_equal here as well
                if (rule->rule_id == RULE_else_stmt || rule->rule_id == RULE_testlist_comp_3b || rule->rule_id == RULE_import_as_names_paren || rule->rule_id == RULE_typedargslist_name || rule->rule_id == RULE_typedargslist_colon || rule->rule_id == RULE_typedargslist_equal || rule->rule_id == RULE_dictorsetmaker_colon || rule->rule_id == RULE_classdef_2 || rule->rule_id == RULE_with_item_as || rule->rule_id == RULE_assert_stmt_extra || rule->rule_id == RULE_as_name || rule->rule_id == RULE_raise_stmt_from || rule->rule_id == RULE_vfpdef) {
                    emit_rule = false;
                }

                // always emit these rules, and add an extra blank node at the end (to be used by the compiler to store data)
                if (ADD_BLANK_NODE(rule->rule_id)) {
                    emit_rule = true;
                    push_result_node(&parser, MP_PARSE_NODE_NULL);
                    i += 1;
                }

                int num_not_nil = 0;
                for (int x = 0; x < i; ++x) {
                    if (peek_result(&parser, x) != MP_PARSE_NODE_NULL) {
                        num_not_nil += 1;
                    }
                }
                //printf("done and %s n=%d i=%d notnil=%d\n", rule->rule_name, n, i, num_not_nil);
                if (emit_rule) {
                    push_result_rule(&parser, rule_src_line, rule, i);
                } else if (num_not_nil == 0) {
                    push_result_rule(&parser, rule_src_line, rule, i); // needed for, eg, atom_paren, testlist_comp_3b
                    //result_stack_show(parser);
                    //assert(0);
                } else if (num_not_nil == 1) {
                    // single result, leave it on stack
                    mp_parse_node_t pn = MP_PARSE_NODE_NULL;
                    for (int x = 0; x < i; ++x) {
                        mp_parse_node_t pn2 = pop_result(&parser);
                        if (pn2 != MP_PARSE_NODE_NULL) {
                            pn = pn2;
                        }
                    }
                    push_result_node(&parser, pn);
                } else {
                    push_result_rule(&parser, rule_src_line, rule, i);
                }
                break;

            case RULE_ACT_LIST:
                // n=2 is: item item*
                // n=1 is: item (sep item)*
                // n=3 is: item (sep item)* [sep]
                if (backtrack) {
                    list_backtrack:
                    had_trailing_sep = false;
                    if (n == 2) {
                        if (i == 1) {
                            // fail on item, first time round; propagate backtrack
                            goto next_rule;
                        } else {
                            // fail on item, in later rounds; finish with this rule
                            backtrack = false;
                        }
                    } else {
                        if (i == 1) {
                            // fail on item, first time round; propagate backtrack
                            goto next_rule;
                        } else if ((i & 1) == 1) {
                            // fail on item, in later rounds; have eaten tokens so can't backtrack
                            if (n == 3) {
                                // list allows trailing separator; finish parsing list
                                had_trailing_sep = true;
                                backtrack = false;
                            } else {
                                // list doesn't allowing trailing separator; fail
                                goto syntax_error;
                            }
                        } else {
                            // fail on separator; finish parsing list
                            backtrack = false;
                        }
                    }
                } else {
                    for (;;) {
                        uint arg = rule->arg[i & 1 & n];
                        switch (arg & RULE_ARG_KIND_MASK) {
                            case RULE_ARG_TOK:
                                if (mp_lexer_is_kind(lex, arg & RULE_ARG_ARG_MASK)) {
                                    if (i & 1 & n) {
                                        // separators which are tokens are not pushed to result stack
                                    } else {
                                        push_result_token(&parser, lex);
                                    }
                                    mp_lexer_to_next(lex);
                                    // got element of list, so continue parsing list
                                    i += 1;
                                } else {
                                    // couldn't get element of list
                                    i += 1;
                                    backtrack = true;
                                    goto list_backtrack;
                                }
                                break;
                            case RULE_ARG_RULE:
                                push_rule(&parser, rule_src_line, rule, i + 1); // save this list-rule
                                push_rule_from_arg(&parser, arg); // push child of list-rule
                                goto next_rule;
                            default:
                                assert(0);
                        }
                    }
                }
                assert(i >= 1);

                // compute number of elements in list, result in i
                i -= 1;
                if ((n & 1) && (rule->arg[1] & RULE_ARG_KIND_MASK) == RULE_ARG_TOK) {
                    // don't count separators when they are tokens
                    i = (i + 1) / 2;
                }

                if (i == 1) {
                    // list matched single item
                    if (had_trailing_sep) {
                        // if there was a trailing separator, make a list of a single item
                        push_result_rule(&parser, rule_src_line, rule, i);
                    } else {
                        // just leave single item on stack (ie don't wrap in a list)
                    }
                } else {
                    //printf("done list %s %d %d\n", rule->rule_name, n, i);
                    push_result_rule(&parser, rule_src_line, rule, i);
                }
                break;

            default:
                assert(0);
        }
    }

    mp_parse_node_t result;

    // check if we had a memory error
    if (parser.had_memory_error) {
memory_error:
        *parse_error_kind_out = MP_PARSE_ERROR_MEMORY;
        result = MP_PARSE_NODE_NULL;
        goto finished;

    }

    // check we are at the end of the token stream
    if (!mp_lexer_is_kind(lex, MP_TOKEN_END)) {
        goto syntax_error;
    }

    //printf("--------------\n");
    //result_stack_show(parser);
    //printf("rule stack alloc: %d\n", parser.rule_stack_alloc);
    //printf("result stack alloc: %d\n", parser.result_stack_alloc);
    //printf("number of parse nodes allocated: %d\n", num_parse_nodes_allocated);

    // get the root parse node that we created
    assert(parser.result_stack_top == 1);
    result = parser.result_stack[0];

finished:
    // free the memory that we don't need anymore
    m_del(rule_stack_t, parser.rule_stack, parser.rule_stack_alloc);
    m_del(mp_parse_node_t, parser.result_stack, parser.result_stack_alloc);

    // return the result
    return result;

syntax_error:
    if (mp_lexer_is_kind(lex, MP_TOKEN_INDENT)) {
        *parse_error_kind_out = MP_PARSE_ERROR_UNEXPECTED_INDENT;
    } else if (mp_lexer_is_kind(lex, MP_TOKEN_DEDENT_MISMATCH)) {
        *parse_error_kind_out = MP_PARSE_ERROR_UNMATCHED_UNINDENT;
    } else {
        *parse_error_kind_out = MP_PARSE_ERROR_INVALID_SYNTAX;
#ifdef USE_RULE_NAME
        // debugging: print the rule name that failed and the token
        printf("rule: %s\n", rule->rule_name);
#if MICROPY_DEBUG_PRINTERS
        mp_token_show(mp_lexer_cur(lex));
#endif
#endif
    }
    result = MP_PARSE_NODE_NULL;
    goto finished;
}
Esempio n. 13
0
STATIC void push_result_token(parser_t *parser, const mp_lexer_t *lex) {
    const mp_token_t *tok = mp_lexer_cur(lex);
    mp_parse_node_t pn;
    if (tok->kind == MP_TOKEN_NAME) {
        pn = mp_parse_node_new_leaf(MP_PARSE_NODE_ID, qstr_from_strn(tok->str, tok->len));
    } else if (tok->kind == MP_TOKEN_NUMBER) {
        bool dec = false;
        bool small_int = true;
        machine_int_t int_val = 0;
        int len = tok->len;
        const char *str = tok->str;
        int base = 0;
        int i = mp_parse_num_base(str, len, &base);
        bool overflow = false;
        for (; i < len; i++) {
            int dig;
            if (unichar_isdigit(str[i]) && str[i] - '0' < base) {
                dig = str[i] - '0';
            } else if (base == 16 && 'a' <= str[i] && str[i] <= 'f') {
                dig = str[i] - 'a' + 10;
            } else if (base == 16 && 'A' <= str[i] && str[i] <= 'F') {
                dig = str[i] - 'A' + 10;
            } else if (str[i] == '.' || str[i] == 'e' || str[i] == 'E' || str[i] == 'j' || str[i] == 'J') {
                dec = true;
                break;
            } else {
                small_int = false;
                break;
            }
            // add next digi and check for overflow
            if (mp_small_int_mul_overflow(int_val, base)) {
                overflow = true;
            }
            int_val = int_val * base + dig;
            if (!MP_SMALL_INT_FITS(int_val)) {
                overflow = true;
            }
        }
        if (dec) {
            pn = mp_parse_node_new_leaf(MP_PARSE_NODE_DECIMAL, qstr_from_strn(str, len));
        } else if (small_int && !overflow && MP_SMALL_INT_FITS(int_val)) {
            pn = mp_parse_node_new_leaf(MP_PARSE_NODE_SMALL_INT, int_val);
        } else {
            pn = mp_parse_node_new_leaf(MP_PARSE_NODE_INTEGER, qstr_from_strn(str, len));
        }
    } else if (tok->kind == MP_TOKEN_STRING) {
        // Don't automatically intern all strings.  doc strings (which are usually large)
        // will be discarded by the compiler, and so we shouldn't intern them.
        qstr qst = MP_QSTR_NULL;
        if (tok->len <= MICROPY_ALLOC_PARSE_INTERN_STRING_LEN) {
            // intern short strings
            qst = qstr_from_strn(tok->str, tok->len);
        } else {
            // check if this string is already interned
            qst = qstr_find_strn(tok->str, tok->len);
        }
        if (qst != MP_QSTR_NULL) {
            // qstr exists, make a leaf node
            pn = mp_parse_node_new_leaf(MP_PARSE_NODE_STRING, qst);
        } else {
            // not interned, make a node holding a pointer to the string data
            push_result_string(parser, mp_lexer_cur(lex)->src_line, tok->str, tok->len);
            return;
        }
    } else if (tok->kind == MP_TOKEN_BYTES) {
        pn = mp_parse_node_new_leaf(MP_PARSE_NODE_BYTES, qstr_from_strn(tok->str, tok->len));
    } else {
        pn = mp_parse_node_new_leaf(MP_PARSE_NODE_TOKEN, tok->kind);
    }
    push_result_node(parser, pn);
}