Example #1
0
File: cgdbrc.c Project: denizt/cgdb
int command_parse_file(FILE * fp)
{
    char buffer[4096];
    char *p = buffer;
    int linenumber = 0;

    while (linenumber++, fgets(p, sizeof (buffer) - (p - buffer), fp)) {
        int bufferlen = strlen(buffer);

        if ((bufferlen - 2 >= 0) && buffer[bufferlen - 2] == '\\') {
            /* line continuation character, read another line into the buffer */
            linenumber--;
            p = buffer + bufferlen - 2;
            continue;
        }

        if (command_parse_string(buffer)) {
            /* buffer already has an \n */
            if_print_message("Error parsing line %d: %s", linenumber, buffer);
            /* return -1; don't return, lets keep parsing the file. */
        }

        p = buffer;
    }

    return 0;
}
Example #2
0
int highlight_node(const char *filename, struct buffer *buf)
{
    int ret;
    int length = 0;
    int lasttype = -1;
    struct ibuf *ibuf = ibuf_init();
    struct tokenizer *t = tokenizer_init();

    if (tokenizer_set_file(t, filename, buf->language) == -1) {
        if_print_message("%s:%d tokenizer_set_file error", __FILE__, __LINE__);
        return -1;
    }

    while ((ret = tokenizer_get_token(t)) > 0) {
        enum tokenizer_type e = tokenizer_get_packet_type(t);

        /*if_print_message  ( "TOKEN(%d:%s)\n", e, tokenizer_get_printable_enum ( e ) ); */

        if (e == TOKENIZER_NEWLINE) {
            sbpush(buf->tlines, strdup(ibuf_get(ibuf)));

            if (length > buf->max_width)
                buf->max_width = length;

            length = 0;
            lasttype = -1;
            ibuf_clear(ibuf);
        } else {
            const char *tok_data = tokenizer_get_data(t);
            enum hl_group_kind hlg = hlg_from_tokenizer_type(e, tok_data);

            if (hlg == HLG_LAST) {
                logger_write_pos(logger, __FILE__, __LINE__, "Bad hlg_type for '%s', e==%d\n", tok_data, e);
                hlg = HLG_TEXT;
            }

            /* Set the highlight group type */
            add_type(ibuf, &lasttype, hlg);
            /* Add the text and bump our length */
            length += ibuf_add(ibuf, tok_data);
        }
    }

    ibuf_free(ibuf);
    tokenizer_destroy(t);
    return 0;
}
Example #3
0
static int highlight_node(struct list_node *node)
{
    int i;
    int ret;
    int line = 0;
    int length = 0;
    int lasttype = -1;
    struct token_data tok_data;
    struct tokenizer *t = tokenizer_init();
    struct buffer *buf = &node->file_buf;

    for (i = 0; i < sbcount(buf->lines); i++) {
        sbfree(buf->lines[i].attrs);
        buf->lines[i].attrs = NULL;
    }

    if (!buf->file_data) {
        for (line = 0; line < sbcount(buf->lines); line++) {
            struct source_line *sline = &buf->lines[line];

            tokenizer_set_buffer(t, sline->line, buf->language);

            length = 0;
            lasttype = -1;
            while ((ret = tokenizer_get_token(t, &tok_data)) > 0) {
                if (tok_data.e == TOKENIZER_NEWLINE)
                    break;

                enum hl_group_kind hlg = hlg_from_tokenizer_type(tok_data.e, tok_data.data);

                /* Add attribute if highlight group has changed */
                if (lasttype != hlg) {
                    sbpush(buf->lines[line].attrs, hl_line_attr(length, hlg));

                    lasttype = hlg;
                }

                /* Add the text and bump our length */
                length += strlen(tok_data.data);
            }
        }

    } else {
        if (tokenizer_set_buffer(t, buf->file_data, buf->language) == -1) {
            if_print_message("%s:%d tokenizer_set_buffer error", __FILE__, __LINE__);
            return -1;
        }

        while ((ret = tokenizer_get_token(t, &tok_data)) > 0) {
            if (tok_data.e == TOKENIZER_NEWLINE) {
                if (length > buf->max_width)
                    buf->max_width = length;

                length = 0;
                lasttype = -1;
                line++;
            } else {
                enum hl_group_kind hlg = hlg_from_tokenizer_type(tok_data.e, tok_data.data);

                if (hlg == HLG_LAST) {
                    clog_error(CLOG_CGDB, "Bad hlg_type for '%s', e==%d\n", tok_data.data, tok_data.e);
                    hlg = HLG_TEXT;
                }

                /* Add attribute if highlight group has changed */
                if (lasttype != hlg) {
                    sbpush(buf->lines[line].attrs, hl_line_attr(length, hlg));

                    lasttype = hlg;
                }

                /* Add the text and bump our length */
                length += strlen(tok_data.data);
            }
        }
    }

    tokenizer_destroy(t);
    return 0;
}
Example #4
0
File: cgdb.c Project: i4fumi/cgdb
/**
 * The goal of this function is to display the tab completion information
 * to the user in an asychronous and potentially interactive manner.
 *
 * It will output to the screen as much as can be until user input is needed. 
 * If user input is needed, then this function must stop, and wait until 
 * that data has been recieved.
 *
 * If this function is called a second time with the same completion_ptr
 * parameter, it will continue outputting the tab completion data from
 * where it left off.
 *
 * \param completion_ptr
 * The tab completion data to output to the user
 *
 * \param key
 * The key the user wants to pass to the query command that was made.
 * If -1, then this is assummed to be the first time this function has been
 * called.
 *
 * \return
 * 0 on success or -1 on error
 */
static int handle_tab_completion_request(tab_completion_ptr comptr, int key)
{
    int query_items;
    int gdb_window_size;

    if (!comptr)
        return -1;

    query_items = rline_get_rl_completion_query_items(rline);
    gdb_window_size = get_gdb_height();

    if (comptr->state == TAB_COMPLETION_START) {
        if_print("\n");

        if (query_items > 0 && comptr->num_matches >= query_items) {
            if_print_message("Display all %d possibilities? (y or n)\n",
                    comptr->num_matches);
            comptr->state = TAB_COMPLETION_QUERY_POSSIBILITIES;
            return 0;
        }

        comptr->state = TAB_COMPLETION_COMPLETION_DISPLAY;
    }

    if (comptr->state == TAB_COMPLETION_QUERY_POSSIBILITIES) {
        int val = cgdb_get_y_or_n(key, 0);

        if (val == 1)
            comptr->state = TAB_COMPLETION_COMPLETION_DISPLAY;
        else if (val == 0)
            comptr->state = TAB_COMPLETION_DONE;
        else
            return 0;           /* stay at the same state */
    }

    if (comptr->state == TAB_COMPLETION_QUERY_PAGER) {
        int i = cgdb_get_y_or_n(key, 1);

        if_clear_line();        /* Clear the --More-- */
        if (i == 0)
            comptr->state = TAB_COMPLETION_DONE;
        else if (i == 2) {
            comptr->lines--;
            comptr->state = TAB_COMPLETION_COMPLETION_DISPLAY;
        } else {
            comptr->lines = 0;
            comptr->state = TAB_COMPLETION_COMPLETION_DISPLAY;
        }
    }

    if (comptr->state == TAB_COMPLETION_COMPLETION_DISPLAY) {
        for (; comptr->total <= comptr->num_matches; comptr->total++) {
            if_print(comptr->matches[comptr->total]);
            if_print("\n");

            comptr->lines++;
            if (comptr->lines >= (gdb_window_size - 1) &&
                    comptr->total < comptr->num_matches) {
                if_print("--More--");
                comptr->state = TAB_COMPLETION_QUERY_PAGER;
                comptr->total++;
                return 0;
            }
        }

        comptr->state = TAB_COMPLETION_DONE;
    }

    if (comptr->state == TAB_COMPLETION_DONE) {
        tab_completion_destroy(completion_ptr);
        completion_ptr = NULL;
        is_tab_completing = 0;
        rline_rl_forced_update_display(rline);
    }

    return 0;
}
Example #5
0
void if_sdc_print(const char *buf)
{
    if_print_message("cgdb sdc:%s", buf);
}
Example #6
0
static int highlight_node ( struct list_node *node ) {
    struct tokenizer *t = tokenizer_init ();
    int ret;
    struct ibuf *ibuf = ibuf_init ();
    ibuf_addchar ( ibuf, HL_CHAR );
    ibuf_addchar ( ibuf, HLG_TEXT );

    /* Initialize */
    node->buf.length = 0;
    node->buf.tlines = NULL;
    node->buf.max_width = 0;

    if ( tokenizer_set_file ( t, node->path, node->language ) == -1 ) {
        if_print_message ("%s:%d tokenizer_set_file error", __FILE__, __LINE__);
        return -1;
    }

    while ( ( ret = tokenizer_get_token ( t ) ) > 0 ) {
        enum tokenizer_type e = tokenizer_get_packet_type ( t );
        /*if_print_message  ( "TOKEN(%d:%s)\n", e, tokenizer_get_printable_enum ( e ) );*/

        switch ( e ) {
        case TOKENIZER_KEYWORD:
            ibuf_addchar ( ibuf, HL_CHAR );
            ibuf_addchar ( ibuf, HLG_KEYWORD );
            ibuf_add ( ibuf, tokenizer_get_data ( t ) );
            ibuf_addchar ( ibuf, HL_CHAR );
            ibuf_addchar ( ibuf, HLG_TEXT );
            break;
        case TOKENIZER_TYPE:
            ibuf_addchar ( ibuf, HL_CHAR );
            ibuf_addchar ( ibuf, HLG_TYPE );
            ibuf_add ( ibuf, tokenizer_get_data ( t ) );
            ibuf_addchar ( ibuf, HL_CHAR );
            ibuf_addchar ( ibuf, HLG_TEXT );
            break;
        case TOKENIZER_LITERAL:
            ibuf_addchar ( ibuf, HL_CHAR );
            ibuf_addchar ( ibuf, HLG_LITERAL );
            ibuf_add ( ibuf, tokenizer_get_data ( t ) );
            ibuf_addchar ( ibuf, HL_CHAR );
            ibuf_addchar ( ibuf, HLG_TEXT );
            break;
        case TOKENIZER_NUMBER:
            ibuf_add ( ibuf, tokenizer_get_data ( t ) );
            break;
        case TOKENIZER_COMMENT:
            ibuf_addchar ( ibuf, HL_CHAR );
            ibuf_addchar ( ibuf, HLG_COMMENT );
            ibuf_add ( ibuf, tokenizer_get_data ( t ) );
            ibuf_addchar ( ibuf, HL_CHAR );
            ibuf_addchar ( ibuf, HLG_TEXT );
            break;
        case TOKENIZER_DIRECTIVE:
            ibuf_addchar ( ibuf, HL_CHAR );
            ibuf_addchar ( ibuf, HLG_DIRECTIVE );
            ibuf_add ( ibuf, tokenizer_get_data ( t ) );
            ibuf_addchar ( ibuf, HL_CHAR );
            ibuf_addchar ( ibuf, HLG_TEXT );
            break;
        case TOKENIZER_TEXT:
            ibuf_add ( ibuf, tokenizer_get_data ( t ) );
            break;
        case TOKENIZER_NEWLINE:
            node->buf.length++;
            node->buf.tlines = realloc ( node->buf.tlines, sizeof ( char *) * node->buf.length );
            node->buf.tlines[node->buf.length-1] = strdup ( ibuf_get ( ibuf ) );

            if ( ibuf_length ( ibuf ) > node->buf.max_width )
                node->buf.max_width = ibuf_length ( ibuf );

            ibuf_clear ( ibuf );
            ibuf_addchar ( ibuf, HL_CHAR );
            ibuf_addchar ( ibuf, HLG_TEXT );
            break;
        case TOKENIZER_ERROR:
            ibuf_add ( ibuf, tokenizer_get_data ( t ) );
            break;
        default:
            return -1;
            break;
        }
    }

    return 0;
}