int highlight_node(const char *filename, struct buffer *buf) { int ret; int length = 0; int lasttype = -1; struct ibuf *ibuf = ibuf_init(); struct tokenizer *t = tokenizer_init(); if (tokenizer_set_file(t, filename, buf->language) == -1) { if_print_message("%s:%d tokenizer_set_file error", __FILE__, __LINE__); return -1; } while ((ret = tokenizer_get_token(t)) > 0) { enum tokenizer_type e = tokenizer_get_packet_type(t); /*if_print_message ( "TOKEN(%d:%s)\n", e, tokenizer_get_printable_enum ( e ) ); */ if (e == TOKENIZER_NEWLINE) { sbpush(buf->tlines, strdup(ibuf_get(ibuf))); if (length > buf->max_width) buf->max_width = length; length = 0; lasttype = -1; ibuf_clear(ibuf); } else { const char *tok_data = tokenizer_get_data(t); enum hl_group_kind hlg = hlg_from_tokenizer_type(e, tok_data); if (hlg == HLG_LAST) { logger_write_pos(logger, __FILE__, __LINE__, "Bad hlg_type for '%s', e==%d\n", tok_data, e); hlg = HLG_TEXT; } /* Set the highlight group type */ add_type(ibuf, &lasttype, hlg); /* Add the text and bump our length */ length += ibuf_add(ibuf, tok_data); } } ibuf_free(ibuf); tokenizer_destroy(t); return 0; }
static int highlight_node ( struct list_node *node ) { struct tokenizer *t = tokenizer_init (); int ret; struct ibuf *ibuf = ibuf_init (); ibuf_addchar ( ibuf, HL_CHAR ); ibuf_addchar ( ibuf, HLG_TEXT ); /* Initialize */ node->buf.length = 0; node->buf.tlines = NULL; node->buf.max_width = 0; if ( tokenizer_set_file ( t, node->path, node->language ) == -1 ) { if_print_message ("%s:%d tokenizer_set_file error", __FILE__, __LINE__); return -1; } while ( ( ret = tokenizer_get_token ( t ) ) > 0 ) { enum tokenizer_type e = tokenizer_get_packet_type ( t ); /*if_print_message ( "TOKEN(%d:%s)\n", e, tokenizer_get_printable_enum ( e ) );*/ switch ( e ) { case TOKENIZER_KEYWORD: ibuf_addchar ( ibuf, HL_CHAR ); ibuf_addchar ( ibuf, HLG_KEYWORD ); ibuf_add ( ibuf, tokenizer_get_data ( t ) ); ibuf_addchar ( ibuf, HL_CHAR ); ibuf_addchar ( ibuf, HLG_TEXT ); break; case TOKENIZER_TYPE: ibuf_addchar ( ibuf, HL_CHAR ); ibuf_addchar ( ibuf, HLG_TYPE ); ibuf_add ( ibuf, tokenizer_get_data ( t ) ); ibuf_addchar ( ibuf, HL_CHAR ); ibuf_addchar ( ibuf, HLG_TEXT ); break; case TOKENIZER_LITERAL: ibuf_addchar ( ibuf, HL_CHAR ); ibuf_addchar ( ibuf, HLG_LITERAL ); ibuf_add ( ibuf, tokenizer_get_data ( t ) ); ibuf_addchar ( ibuf, HL_CHAR ); ibuf_addchar ( ibuf, HLG_TEXT ); break; case TOKENIZER_NUMBER: ibuf_add ( ibuf, tokenizer_get_data ( t ) ); break; case TOKENIZER_COMMENT: ibuf_addchar ( ibuf, HL_CHAR ); ibuf_addchar ( ibuf, HLG_COMMENT ); ibuf_add ( ibuf, tokenizer_get_data ( t ) ); ibuf_addchar ( ibuf, HL_CHAR ); ibuf_addchar ( ibuf, HLG_TEXT ); break; case TOKENIZER_DIRECTIVE: ibuf_addchar ( ibuf, HL_CHAR ); ibuf_addchar ( ibuf, HLG_DIRECTIVE ); ibuf_add ( ibuf, tokenizer_get_data ( t ) ); ibuf_addchar ( ibuf, HL_CHAR ); ibuf_addchar ( ibuf, HLG_TEXT ); break; case TOKENIZER_TEXT: ibuf_add ( ibuf, tokenizer_get_data ( t ) ); break; case TOKENIZER_NEWLINE: node->buf.length++; node->buf.tlines = realloc ( node->buf.tlines, sizeof ( char *) * node->buf.length ); node->buf.tlines[node->buf.length-1] = strdup ( ibuf_get ( ibuf ) ); if ( ibuf_length ( ibuf ) > node->buf.max_width ) node->buf.max_width = ibuf_length ( ibuf ); ibuf_clear ( ibuf ); ibuf_addchar ( ibuf, HL_CHAR ); ibuf_addchar ( ibuf, HLG_TEXT ); break; case TOKENIZER_ERROR: ibuf_add ( ibuf, tokenizer_get_data ( t ) ); break; default: return -1; break; } } return 0; }