예제 #1
0
int config_parse_file(server *srv, config_t *context, const char *fn) {
    tokenizer_t t;
    stream s;
    int ret;
    conf_buffer *filename;

    if (buffer_is_empty(context->basedir) &&
            (fn[0] == '/' || fn[0] == '\\') &&
            (fn[0] == '.' && (fn[1] == '/' || fn[1] == '\\'))) {
        filename = buffer_init_string(fn);
    } else {
        filename = buffer_init_buffer(context->basedir);
        buffer_append_string(filename, fn);
    }

    if (0 != stream_open(&s, filename->ptr)) {
        if (s.size == 0) {
            /* the file was empty, nothing to parse */
            ret = 0;
        } else {
            log_error_write(srv, __FILE__, __LINE__, "sbss",
                    "opening configfile ", filename, "failed:", strerror(errno));
            ret = -1;
        }
    } else {
        tokenizer_init(&t, filename, s.start, s.size);
        ret = config_parse(srv, context, &t);
    }

    stream_close(&s);
    buffer_free(filename);
    return ret;
}
예제 #2
0
int config_parse_cmd(server *srv, config_t *context, const char *cmd) {
	tokenizer_t t;
	int ret;
	buffer *source;
	buffer *out;
	char *oldpwd;

	if (NULL == (oldpwd = getCWD())) {
		log_error_write(srv, __FILE__, __LINE__, "s",
				"cannot get cwd", strerror(errno));
		return -1;
	}

	source = buffer_init_string(cmd);
	out = buffer_init();

	if (!buffer_is_empty(context->basedir)) {
		chdir(context->basedir->ptr);
	}

	if (0 != proc_open_buffer(cmd, NULL, out, NULL)) {
		log_error_write(srv, __FILE__, __LINE__, "sbss",
				"opening", source, "failed:", strerror(errno));
		ret = -1;
	} else {
		tokenizer_init(&t, source, out->ptr, out->used);
		ret = config_parse(srv, context, &t);
	}

	buffer_free(source);
	buffer_free(out);
	chdir(oldpwd);
	free(oldpwd);
	return ret;
}
예제 #3
0
local size_t sentencizer_next(struct mascara *imp, struct mr_token **tks)
{
   struct sentencizer *szr = (struct sentencizer *)imp;
   struct sentence *sent = &szr->sent;

   assert(szr->str && "text no set");
   sentence_clear(sent);

   size_t len;
   const unsigned char *last_period;
   const unsigned char *str = next_sentence(szr, &len, &last_period);
   if (!str) {
      *tks = NULL;
      return 0;
   }
   size_t offset_incr = szr->offset_incr + str - szr->str;

   struct tokenizer tkr;
   tokenizer_init(&tkr, szr->vtab);
   tokenizer_set_text(&tkr.base, str, len, offset_incr);

   struct mr_token *tk;
   while (tokenizer_next(&tkr.base, &tk)) {
      if (tk->str == (const char *)last_period ||
         !sentencizer_reattach_period(sent, tk)) {
         sentence_add(sent, tk);
         if (sent->len == MR_MAX_SENTENCE_LEN) {
            szr->p = (const unsigned char *)tk->str + tk->len;
            break;
         }
      }
   }
   *tks = sent->tokens;
   return sent->len;
}
예제 #4
0
//void ubasic_init(const char *program)
void ubasic_init(char *program)
{
	program_ptr = program;
	for_stack_ptr = gosub_stack_ptr = 0;
	tokenizer_init(program_ptr);
	ended = 0;
	ErrLine = 0;
}
예제 #5
0
//static void run_statement(void)
void run_statement(void)
{
	accept(TOKENIZER_RUN);
	accept(TOKENIZER_CR);
	program_ptr = BASICBUF;
	for_stack_ptr = gosub_stack_ptr = 0;
	tokenizer_init(program_ptr);
	ended = 0;
}
예제 #6
0
float evaluate(char *expression_string)
{
  last_error = NULL;
  tokenizer_init( expression_string );
  get_sym();
  float result =  expression();
  expect(T_EOF);
  return result;
}
예제 #7
0
파일: ubasic.c 프로젝트: CobooGuo/ubasic
/*---------------------------------------------------------------------------*/
void
ubasic_init(const char *program)
{
  program_ptr = program;
  for_stack_ptr = gosub_stack_ptr = 0;
  index_free();
  tokenizer_init(program);
  ended = 0;
}
예제 #8
0
/**
 * Umschalten des Programm-Kontextes
 * \param p_name Programmname
 */
void switch_proc(char * p_name) {
	botfs_file_descr_t new_prog;
	if (botfs_open(p_name, &new_prog, BOTFS_MODE_r, GET_MMC_BUFFER(ubasic_buffer)) != 0) {
		tokenizer_error_print(current_linenum, UNKNOWN_SUBPROC);
		ubasic_break();
	} else {
		bot_ubasic_load_file(p_name, &new_prog);
		program_ptr = 0;
		tokenizer_init(program_ptr);
	}
}
예제 #9
0
파일: ubasic.c 프로젝트: CobooGuo/ubasic
/*---------------------------------------------------------------------------*/
void
ubasic_init_peek_poke(const char *program, peek_func peek, poke_func poke)
{
  program_ptr = program;
  for_stack_ptr = gosub_stack_ptr = 0;
  index_free();
  peek_function = peek;
  poke_function = poke;
  tokenizer_init(program);
  ended = 0;
}
예제 #10
0
void assembler_init(const char *in_file)
{
	char *f_buffer = f_load_file(in_file);
	if (f_buffer == NULL) {
		printf("compiler error: error on opening input file ...\n");
		exit(EXIT_FAILURE);
	}
	ASM_DESCR.MAIN_FILE = f_process_file(f_buffer);
	fmt_init();
	tokenizer_init();
	parser_init();
	icl_init();
}
예제 #11
0
/*---------------------------------------------------------------------------*/
void ubasic_init(const char *program)
{
  int i;
  program_ptr = program;
  for_stack_ptr = gosub_stack_ptr = 0;
  index_free();
  tokenizer_init(program);
  data_position = program_ptr;
  data_seek = 1;
  ended = 0;
  for (i = 0; i < MAX_STRING; i++)
    strings[i] = nullstr;
}
예제 #12
0
파일: highlight.cpp 프로젝트: scottlu/cgdb
int highlight_node(const char *filename, struct buffer *buf)
{
    int ret;
    int length = 0;
    int lasttype = -1;
    struct ibuf *ibuf = ibuf_init();
    struct tokenizer *t = tokenizer_init();

    if (tokenizer_set_file(t, filename, buf->language) == -1) {
        if_print_message("%s:%d tokenizer_set_file error", __FILE__, __LINE__);
        return -1;
    }

    while ((ret = tokenizer_get_token(t)) > 0) {
        enum tokenizer_type e = tokenizer_get_packet_type(t);

        /*if_print_message  ( "TOKEN(%d:%s)\n", e, tokenizer_get_printable_enum ( e ) ); */

        if (e == TOKENIZER_NEWLINE) {
            sbpush(buf->tlines, strdup(ibuf_get(ibuf)));

            if (length > buf->max_width)
                buf->max_width = length;

            length = 0;
            lasttype = -1;
            ibuf_clear(ibuf);
        } else {
            const char *tok_data = tokenizer_get_data(t);
            enum hl_group_kind hlg = hlg_from_tokenizer_type(e, tok_data);

            if (hlg == HLG_LAST) {
                logger_write_pos(logger, __FILE__, __LINE__, "Bad hlg_type for '%s', e==%d\n", tok_data, e);
                hlg = HLG_TEXT;
            }

            /* Set the highlight group type */
            add_type(ibuf, &lasttype, hlg);
            /* Add the text and bump our length */
            length += ibuf_add(ibuf, tok_data);
        }
    }

    ibuf_free(ibuf);
    tokenizer_destroy(t);
    return 0;
}
예제 #13
0
/*---------------------------------------------------------------------------*/
static void
jump_linenum_slow(int linenum)
{
  tokenizer_init(program_ptr);
  while(tokenizer_num() != linenum) {
    do {
      do {
        tokenizer_next();
      } while(current_token != TOKENIZER_CR &&
          current_token != TOKENIZER_ENDOFINPUT);
      if(current_token == TOKENIZER_CR) {
        tokenizer_next();
      }
    } while(current_token != TOKENIZER_NUMBER);
    DEBUG_PRINTF("jump_linenum_slow: Found line %d\n", tokenizer_num());
  }
}
예제 #14
0
int main ( int argc, char * argv[] )
{
    static char* testinp = "First sentence. Second nice sentence. Is this all? No, this is the end!";
    char* output;
    int outlen;
	t_tokenizer mytokenizer;
	int i;

	printf("Input='%s'\n", testinp);

	printf("Initializing... ");	
	i = tokenizer_init(&mytokenizer);
	printf("Done(%d)\n", i);
	
	tokenizer_tokenize(mytokenizer, testinp, &output, &outlen);
	printf("Output(%d)='%s'\n", outlen, output);
	
	free(output);
	tokenizer_destroy(mytokenizer);

	return 0;
}
예제 #15
0
//static void jump_linenum(int linenum)
void jump_linenum(int linenum)
{
	ptr = BASICBUF;
	nextptr = ptr;

	tokenizer_init(ptr);
	while(tokenizer_num() != linenum) {
		do {
			do {
				tokenizer_next();
			} while(tokenizer_token() != TOKENIZER_CR &&
					tokenizer_token() != TOKENIZER_ENDOFINPUT);
			if(tokenizer_token() == TOKENIZER_CR) {
				tokenizer_next();
			}
			if(tokenizer_token() == TOKENIZER_ENDOFINPUT){
				ended = 1;
				return;
			}
		} while(tokenizer_token() != TOKENIZER_NUMBER);
	}
}
예제 #16
0
static void jump_linenum (int linenum)
{
  FILE *original;
  int   finished;

  /* Not only does this verify whether the scanner finished,
     if it has finished, it additionally closes the stream. */
  finished = tokenizer_finished();
  /* We save this copy in case the scanner wasn't finished. */
  original = io_handle();

  /* Start a new scanner from the beginning of the file. */
  tokenizer_init(io_file());
  reset(T_ERROR);
  io_reset();

  /* Search for linenum. */
  find_linenum(linenum);

  /* If the search ended at EOF, linenum could not be found! */
  if (tokenizer_finished())
  {
    dprintf(
      "*warning: could not jump to `%d'\n",
      E_WARNING, linenum);
    /* Set back to original stream */
    io_set(io_file(), original);
    /* Prepare scanner to continue. */
    if (!finished)
    {
      reset(T_NUMBER);
      io_reset();
      io_next();
    }
  }
}
예제 #17
0
파일: parser.c 프로젝트: fachat/af65k
void parser_push(const context_t *ctx, const line_t *line) {

	position_t *pos = line->position;

	// is the first block already set?
	if (p->blk == NULL) {
		p->blk = block_init(NULL, pos);
	}

	statement_t *stmt = new_statement(ctx);

	const operation_t *op = NULL;
	const char *name = NULL;
	label_t *label = NULL;

	// allow the tokenizer to fold comma into ",x" etc addressing mode tokens
	int allow_index = 0;

	// tokenize the line
	pstate_t state = P_INIT;
	tokenizer_t *tok = tokenizer_init(line->line);
	while (tokenizer_next(tok, allow_index)) {
		switch(state) {
		case P_OP:
			if (tok->type == T_TOKEN && tok->vals.op == OP_COLON) {
				// accept after label
				// continue to next 
				stmt->type = S_LABEQPC;
				statement_push(stmt);
				stmt = new_statement(ctx);
				state = P_INIT;
				break;
			}
			if (tok->type == T_TOKEN && tok->vals.op == OP_ASSIGN) {
				// after label, that's a label value definition
				stmt->type = S_LABDEF;
				// next define the label from param
				state = P_PARAM;
				break;
			}
			// fall-through!
		case P_INIT:
			switch(tok->type) {
			case T_NAME:
				name = mem_alloc_strn(tok->line + tok->ptr, tok->len);
				op = operation_find(name);
				if (op != NULL) {
					// check if the operation is compatible with the current CPU
					if (0 == (ctx->cpu->isa & op->isa)) {
						// TODO: config for either no message or error
						warn_operation_not_for_cpu(pos, name, ctx->cpu->name);
						op = NULL;
					}
				}
				if (op == NULL) {
					// label
					// TODO: redefinition?
					label = label_init(ctx, name, pos);
					if (state == P_OP) {
						// we already had a label
						stmt->type = S_LABEQPC;
						statement_push(stmt);
						stmt = new_statement(ctx);
					}
					stmt->label = label;
					// expect operation next (but accept labels too)
					state = P_OP;
				} else {
					// operation
					stmt->op = op;
					state = P_PARAM;
				}
				break;
			default:
				// syntax error
				error_syntax(pos);
				goto end;
				break;
			}
			break;
		case P_PARAM:
			// parse parameters
			arith_parse(tok, allow_index, &stmt->param);
			break;
		default:
			error_syntax(pos);
			goto end;
			break;
		};
	}
	statement_push(stmt);
end:
	tokenizer_free(tok);
}
예제 #18
0
파일: highlight.c 프로젝트: rsenn/cgdb
static int highlight_node ( struct list_node *node ) {
    struct tokenizer *t = tokenizer_init ();
    int ret;
    struct ibuf *ibuf = ibuf_init ();
    ibuf_addchar ( ibuf, HL_CHAR );
    ibuf_addchar ( ibuf, HLG_TEXT );

    /* Initialize */
    node->buf.length = 0;
    node->buf.tlines = NULL;
    node->buf.max_width = 0;

    if ( tokenizer_set_file ( t, node->path, node->language ) == -1 ) {
        if_print_message ("%s:%d tokenizer_set_file error", __FILE__, __LINE__);
        return -1;
    }

    while ( ( ret = tokenizer_get_token ( t ) ) > 0 ) {
        enum tokenizer_type e = tokenizer_get_packet_type ( t );
        /*if_print_message  ( "TOKEN(%d:%s)\n", e, tokenizer_get_printable_enum ( e ) );*/

        switch ( e ) {
        case TOKENIZER_KEYWORD:
            ibuf_addchar ( ibuf, HL_CHAR );
            ibuf_addchar ( ibuf, HLG_KEYWORD );
            ibuf_add ( ibuf, tokenizer_get_data ( t ) );
            ibuf_addchar ( ibuf, HL_CHAR );
            ibuf_addchar ( ibuf, HLG_TEXT );
            break;
        case TOKENIZER_TYPE:
            ibuf_addchar ( ibuf, HL_CHAR );
            ibuf_addchar ( ibuf, HLG_TYPE );
            ibuf_add ( ibuf, tokenizer_get_data ( t ) );
            ibuf_addchar ( ibuf, HL_CHAR );
            ibuf_addchar ( ibuf, HLG_TEXT );
            break;
        case TOKENIZER_LITERAL:
            ibuf_addchar ( ibuf, HL_CHAR );
            ibuf_addchar ( ibuf, HLG_LITERAL );
            ibuf_add ( ibuf, tokenizer_get_data ( t ) );
            ibuf_addchar ( ibuf, HL_CHAR );
            ibuf_addchar ( ibuf, HLG_TEXT );
            break;
        case TOKENIZER_NUMBER:
            ibuf_add ( ibuf, tokenizer_get_data ( t ) );
            break;
        case TOKENIZER_COMMENT:
            ibuf_addchar ( ibuf, HL_CHAR );
            ibuf_addchar ( ibuf, HLG_COMMENT );
            ibuf_add ( ibuf, tokenizer_get_data ( t ) );
            ibuf_addchar ( ibuf, HL_CHAR );
            ibuf_addchar ( ibuf, HLG_TEXT );
            break;
        case TOKENIZER_DIRECTIVE:
            ibuf_addchar ( ibuf, HL_CHAR );
            ibuf_addchar ( ibuf, HLG_DIRECTIVE );
            ibuf_add ( ibuf, tokenizer_get_data ( t ) );
            ibuf_addchar ( ibuf, HL_CHAR );
            ibuf_addchar ( ibuf, HLG_TEXT );
            break;
        case TOKENIZER_TEXT:
            ibuf_add ( ibuf, tokenizer_get_data ( t ) );
            break;
        case TOKENIZER_NEWLINE:
            node->buf.length++;
            node->buf.tlines = realloc ( node->buf.tlines, sizeof ( char *) * node->buf.length );
            node->buf.tlines[node->buf.length-1] = strdup ( ibuf_get ( ibuf ) );

            if ( ibuf_length ( ibuf ) > node->buf.max_width )
                node->buf.max_width = ibuf_length ( ibuf );

            ibuf_clear ( ibuf );
            ibuf_addchar ( ibuf, HL_CHAR );
            ibuf_addchar ( ibuf, HLG_TEXT );
            break;
        case TOKENIZER_ERROR:
            ibuf_add ( ibuf, tokenizer_get_data ( t ) );
            break;
        default:
            return -1;
            break;
        }
    }

    return 0;
}
예제 #19
0
파일: sources.cpp 프로젝트: lizh06/cgdb
static int highlight_node(struct list_node *node)
{
    int i;
    int ret;
    int line = 0;
    int length = 0;
    int lasttype = -1;
    struct token_data tok_data;
    struct tokenizer *t = tokenizer_init();
    struct buffer *buf = &node->file_buf;

    for (i = 0; i < sbcount(buf->lines); i++) {
        sbfree(buf->lines[i].attrs);
        buf->lines[i].attrs = NULL;
    }

    if (!buf->file_data) {
        for (line = 0; line < sbcount(buf->lines); line++) {
            struct source_line *sline = &buf->lines[line];

            tokenizer_set_buffer(t, sline->line, buf->language);

            length = 0;
            lasttype = -1;
            while ((ret = tokenizer_get_token(t, &tok_data)) > 0) {
                if (tok_data.e == TOKENIZER_NEWLINE)
                    break;

                enum hl_group_kind hlg = hlg_from_tokenizer_type(tok_data.e, tok_data.data);

                /* Add attribute if highlight group has changed */
                if (lasttype != hlg) {
                    sbpush(buf->lines[line].attrs, hl_line_attr(length, hlg));

                    lasttype = hlg;
                }

                /* Add the text and bump our length */
                length += strlen(tok_data.data);
            }
        }

    } else {
        if (tokenizer_set_buffer(t, buf->file_data, buf->language) == -1) {
            if_print_message("%s:%d tokenizer_set_buffer error", __FILE__, __LINE__);
            return -1;
        }

        while ((ret = tokenizer_get_token(t, &tok_data)) > 0) {
            if (tok_data.e == TOKENIZER_NEWLINE) {
                if (length > buf->max_width)
                    buf->max_width = length;

                length = 0;
                lasttype = -1;
                line++;
            } else {
                enum hl_group_kind hlg = hlg_from_tokenizer_type(tok_data.e, tok_data.data);

                if (hlg == HLG_LAST) {
                    clog_error(CLOG_CGDB, "Bad hlg_type for '%s', e==%d\n", tok_data.data, tok_data.e);
                    hlg = HLG_TEXT;
                }

                /* Add attribute if highlight group has changed */
                if (lasttype != hlg) {
                    sbpush(buf->lines[line].attrs, hl_line_attr(length, hlg));

                    lasttype = hlg;
                }

                /* Add the text and bump our length */
                length += strlen(tok_data.data);
            }
        }
    }

    tokenizer_destroy(t);
    return 0;
}
예제 #20
0
void vvtbi_init (const char *source)
{
  tokenizer_init(source);
  /* initialize the variable container. */
  memset(variables, 0, VVTBI_VARIABLES);
}