Пример #1
0
int highlight_node(const char *filename, struct buffer *buf)
{
    int ret;
    int length = 0;
    int lasttype = -1;
    struct ibuf *ibuf = ibuf_init();
    struct tokenizer *t = tokenizer_init();

    if (tokenizer_set_file(t, filename, buf->language) == -1) {
        if_print_message("%s:%d tokenizer_set_file error", __FILE__, __LINE__);
        return -1;
    }

    while ((ret = tokenizer_get_token(t)) > 0) {
        enum tokenizer_type e = tokenizer_get_packet_type(t);

        /*if_print_message  ( "TOKEN(%d:%s)\n", e, tokenizer_get_printable_enum ( e ) ); */

        if (e == TOKENIZER_NEWLINE) {
            sbpush(buf->tlines, strdup(ibuf_get(ibuf)));

            if (length > buf->max_width)
                buf->max_width = length;

            length = 0;
            lasttype = -1;
            ibuf_clear(ibuf);
        } else {
            const char *tok_data = tokenizer_get_data(t);
            enum hl_group_kind hlg = hlg_from_tokenizer_type(e, tok_data);

            if (hlg == HLG_LAST) {
                logger_write_pos(logger, __FILE__, __LINE__, "Bad hlg_type for '%s', e==%d\n", tok_data, e);
                hlg = HLG_TEXT;
            }

            /* Set the highlight group type */
            add_type(ibuf, &lasttype, hlg);
            /* Add the text and bump our length */
            length += ibuf_add(ibuf, tok_data);
        }
    }

    ibuf_free(ibuf);
    tokenizer_destroy(t);
    return 0;
}
Пример #2
0
static int highlight_node(struct list_node *node)
{
    int i;
    int ret;
    int line = 0;
    int length = 0;
    int lasttype = -1;
    struct token_data tok_data;
    struct tokenizer *t = tokenizer_init();
    struct buffer *buf = &node->file_buf;

    for (i = 0; i < sbcount(buf->lines); i++) {
        sbfree(buf->lines[i].attrs);
        buf->lines[i].attrs = NULL;
    }

    if (!buf->file_data) {
        for (line = 0; line < sbcount(buf->lines); line++) {
            struct source_line *sline = &buf->lines[line];

            tokenizer_set_buffer(t, sline->line, buf->language);

            length = 0;
            lasttype = -1;
            while ((ret = tokenizer_get_token(t, &tok_data)) > 0) {
                if (tok_data.e == TOKENIZER_NEWLINE)
                    break;

                enum hl_group_kind hlg = hlg_from_tokenizer_type(tok_data.e, tok_data.data);

                /* Add attribute if highlight group has changed */
                if (lasttype != hlg) {
                    sbpush(buf->lines[line].attrs, hl_line_attr(length, hlg));

                    lasttype = hlg;
                }

                /* Add the text and bump our length */
                length += strlen(tok_data.data);
            }
        }

    } else {
        if (tokenizer_set_buffer(t, buf->file_data, buf->language) == -1) {
            if_print_message("%s:%d tokenizer_set_buffer error", __FILE__, __LINE__);
            return -1;
        }

        while ((ret = tokenizer_get_token(t, &tok_data)) > 0) {
            if (tok_data.e == TOKENIZER_NEWLINE) {
                if (length > buf->max_width)
                    buf->max_width = length;

                length = 0;
                lasttype = -1;
                line++;
            } else {
                enum hl_group_kind hlg = hlg_from_tokenizer_type(tok_data.e, tok_data.data);

                if (hlg == HLG_LAST) {
                    clog_error(CLOG_CGDB, "Bad hlg_type for '%s', e==%d\n", tok_data.data, tok_data.e);
                    hlg = HLG_TEXT;
                }

                /* Add attribute if highlight group has changed */
                if (lasttype != hlg) {
                    sbpush(buf->lines[line].attrs, hl_line_attr(length, hlg));

                    lasttype = hlg;
                }

                /* Add the text and bump our length */
                length += strlen(tok_data.data);
            }
        }
    }

    tokenizer_destroy(t);
    return 0;
}