コード例 #1
0
ファイル: lex.c プロジェクト: zayac/pipo
int
main (int argc, char *argv[])
{
  struct lexer *lex = (struct lexer *) malloc (sizeof (struct lexer));
  struct token *tok = NULL;

  if (argc <= 1)
    {
      fprintf (stderr, "No input file\n");
      goto cleanup;
    }

  if (!lexer_init (lex, argv[1]))
    goto cleanup;

  while ((tok = lexer_get_token (lex))->tok_class != tok_eof)
    {
      token_print (tok);
      token_free (tok);
    }

  token_free (tok);
  lexer_finalize (lex);

cleanup:
  if (lex)
    free (lex);

  return 0;
}
コード例 #2
0
ファイル: pdb_types.c プロジェクト: MJL85/Doom3Metamod
/*
 *	Standard node loading function to be used for
 *	node load callbacks if desired.
 */
int pdb_standard_load_node(FILE* fptr, struct pdb_node_t* pptr,
	char** tok_arr, int* line) {
	
	int type = pptr->type;
	struct pdb_node_types_t* ctiptr = NULL;
	void* cptr = NULL;
	char* tok = pdb_get_token(fptr, &type, line);

	while (tok) {
		/*
		 *	Is the block over?
		 */
		if ((type & BLOCK_CLOSE) == BLOCK_CLOSE) {
			free(tok);
			return 1;
		}
		
		tok_arr = pdb_token_parse(tok);

		/*
		 *	Create the child node and add to parent.
		 */
		ctiptr = pdb_get_type_info(type);
		if (!ctiptr) {
			fprintf(stderr, "%s:%s():%i: Error: Unknown child type %i on line "
				"%i; halting database load.\n", __FILE__, __FUNCTION__,
				__LINE__, type, *line);
			free(tok);
			return 0;
		}
		cptr = ctiptr->create_cb(tok_arr[0], pptr, tok_arr);
		
		/*
		 *	Load the child node (if supported).
		 */
		if (ctiptr->load_cb) {
			if (!ctiptr->load_cb(fptr, cptr, tok_arr, line)) {
				fprintf(stderr, "%s:%s():%i: Error: An error occured while "
					"loading the database; halting database load on line %i.\n",
					__FILE__, __FUNCTION__, __LINE__, *line);
				token_free(tok_arr);
				free(tok);
				return 0;
			}
		}
		
		/*
		 *	Get next token.
		 */
		token_free(tok_arr);
		free(tok);
		type = pptr->type;
		tok = pdb_get_token(fptr, &type, line);
	}
	
	return 1;
}
コード例 #3
0
ファイル: pdb.c プロジェクト: weaponrex/xplugin-sdk
/*
 *	Return a given node from the database.
 */
DLLEXP struct pdb_node_t* pdb_query_node(struct pdb* dbptr, char* path) {
	char** tok_arr;
	struct pdb_node_t* nptr;
	struct pdb_node_types_t* tiptr;
	int i = 0;
	
	if (!dbptr)
		return 0;

	PDB_MUTEX_LOCK(dbptr);

	if (!strcmp(path, "") || !strcmp(path, "/")) {
		PDB_MUTEX_UNLOCK(dbptr);
		return dbptr->data;
	}
	
	tok_arr = token_parse(path, PDB_PATH_DELIM, NULL);
	
	nptr = dbptr->data;
	tiptr = NULL;
	
	while (tok_arr[i]) {
		tiptr = pdb_get_type_info(nptr->type);
		nptr = tiptr->query_cb(nptr, tok_arr[i]);
		if (!nptr)
			break;
		++i;
	}
	
	token_free(tok_arr);
	
	PDB_MUTEX_UNLOCK(dbptr);

	return nptr;
}
コード例 #4
0
void scan_file(char *filename) {
  FILE *fp = fopen(filename, "r");
  if (!fp) {
    printf("Can't open input file %s!\n", filename);
    exit(1);
  }

  int current_line = 0;
  int more_lines = true;
  while (more_lines) {
    int line_len = get_current_line_length(fp);
    {
      char *buf = malloc(line_len);
      char *starting_buf = buf;
      more_lines = read_line(fp, buf);
      current_line++;

      while (1) {
        Token *token = scan_string(&buf);
        if (!token) {
          break; 
        } 
        if (token->type == INVALID) {
          printf("Error: Unrecognized token '%s' in %s (row %i, col %i)\n", token->text, filename, current_line, (int)(buf - starting_buf)); 
          exit(1);
        }
        token_print(*token); 
        token_free(token);
      }
      free(buf - line_len);
    }
  }
}
コード例 #5
0
ファイル: parserapi.c プロジェクト: DevL/ponyc
bool parse(ast_t* package, source_t* source, rule_t start,
  const char* expected)
{
  assert(package != NULL);
  assert(source != NULL);
  assert(expected != NULL);

  // Open the lexer
  lexer_t* lexer = lexer_open(source);

  if(lexer == NULL)
    return false;

  // Create a parser and attach the lexer
  parser_t* parser = POOL_ALLOC(parser_t);
  parser->source = source;
  parser->lexer = lexer;
  parser->token = lexer_next(lexer);
  parser->last_matched = NULL;
  parser->last_token_line = 0;
  parser->next_flags = 0;
  parser->failed = false;

  // Parse given start rule
  builder_fn_t build_fn;
  ast_t* ast = start(parser, &build_fn, expected);

  if(ast == PARSE_ERROR)
    ast = NULL;

  if(ast == RULE_NOT_FOUND)
  {
    syntax_error(parser, expected, NULL, NULL);
    ast = NULL;
  }

  if(parser->failed)
  {
    ast_free(ast);
    ast = NULL;
  }

  lexer_close(lexer);
  token_free(parser->token);
  POOL_FREE(parser_t, parser);

  if(ast == NULL)
  {
    source_close(source);
    return false;
  }

  assert(ast_id(ast) == TK_MODULE);
  assert(ast_data(ast) == NULL);
  ast_setdata(ast, source);
  ast_add(package, ast);
  return true;
}
コード例 #6
0
ファイル: synexp.c プロジェクト: Pitel/IFJ2007
void syn_free_stack(Tstack *stack, Tstack_syntax *stack_data)
{
    assert(stack != NULL);
    assert(stack_data != NULL);
    while (stack_empty(stack) != true) {
	// Free stack
	stack_top(stack, (void *) &stack_data);
	token_free(&stack_data->stoken);
	free(stack_data);
	stack_pop(stack);
    }
}
コード例 #7
0
ファイル: parser-lib.c プロジェクト: childhood/libxr
static int __parse(parser_context* ctx, 
            parser parser_cb, 
            parser_alloc parser_alloc_cb, 
            parser_free parser_free_cb,
            lexer lexer_cb)
{
  void* parser;
  token* t;
  stream* s = ctx->stream;
  int retval = -1;

  if (ctx == NULL || ctx->error != NULL || ctx->stream == NULL)
    return -1;

  parser = parser_alloc_cb((void *(*)(size_t))g_malloc);
  while ((t = lexer_cb(s)) != NULL)
  {
    if (t->type == TK_UNKNOWN)
    {
      ctx->error = g_strdup_printf("Unknown token '%s' at line %d char %d\n", t->text, t->sline, t->scol);
      token_free(t);
      goto err;
    }
    else if (t->type == TK_EOF)
    {
      token_free(t);
      parser_cb(parser, 0, NULL, ctx);
      break;
    }

    parser_cb(parser, t->type, t, ctx);
    if (ctx->error)
      goto err;
  }

  retval = 0;
 err:
  parser_free_cb(parser, g_free);
  return retval;
}
コード例 #8
0
ファイル: token.c プロジェクト: BLepers/yt_history
int token_parse_json(struct access_token **tokenp, struct evbuffer *buf)
{
	char cbuf[1024];
	int removed;
	int ret;

	struct access_token *token;

	struct json_tokener *tokener;
	enum json_tokener_error jerr;
	struct json_object *obj;

	tokener = json_tokener_new();
	if (tokener == NULL) {
		return ENOMEM;
	}

	do {
		removed = evbuffer_remove(buf, cbuf, sizeof(cbuf));
		obj = json_tokener_parse_ex(tokener, cbuf, removed);
		jerr = json_tokener_get_error(tokener);
		verbose(FIREHOSE, "%s(): Passed %d bytes, result %p (%s), remaining %zd\n",
			__func__, removed, obj, json_tokener_error_desc(jerr),
		       evbuffer_get_length(buf));
	} while (obj == NULL && jerr == json_tokener_continue && evbuffer_get_length(buf) > 0);

	json_tokener_free(tokener);

	if (obj != NULL) {
		token = malloc(sizeof(*token));
		if (token == NULL) {
			ret = ENOMEM;
		} else {
			memset(token, 0, sizeof(*token));
			ret = build_token_into(token, obj);
			if (ret != 0) {
				token_free(token);
			}
		}
	} else {
		verbose(FIREHOSE, "%s(): json tokener reported: %s\n",
			__func__, json_tokener_error_desc(jerr));
	}

	json_object_put(obj);

	if (ret == 0) {
		*tokenp = token;
	}

	return ret;
}
コード例 #9
0
ファイル: main.c プロジェクト: fyra/fribid
/**
 * Called when a token has been added or removed.
 */
static void notifyCallback(Token *token, TokenChange change) {
    switch (change) {
        case TokenChange_Added:
            platform_addToken(token);
            break;
        case TokenChange_Changed:
            // Not supported
            break;
        case TokenChange_Removed:
            platform_removeToken(token);
            token_free(token);
            break;
    }
}
コード例 #10
0
ファイル: token.c プロジェクト: BlurryRoots/Lq
void
token_list_free( token_list_t* someList )
{
    size_t i;

    if( ! someList || ! someList->tokens )
    {
        return;
    }

    for( i = 0; i < someList->size; ++i )
    {
        token_free( & (someList->tokens[i]) );
    }
    free( someList->tokens );

    someList->tokens = NULL;
    someList->size = 0;
}
コード例 #11
0
ファイル: parserapi.c プロジェクト: DevL/ponyc
static void fetch_next_lexer_token(parser_t* parser, bool free_prev_token)
{
  token_t* old_token = parser->token;
  token_t* new_token = lexer_next(parser->lexer);

  if(old_token != NULL)
    parser->last_token_line = token_line_number(old_token);

  if(old_token != NULL && token_get_id(new_token) == TK_EOF)
  {
    // Use location of last token for EOF to get better error reporting
    token_set_pos(new_token, token_source(old_token),
      token_line_number(old_token), token_line_position(old_token));
  }

  if(free_prev_token)
    token_free(old_token);

  parser->token = new_token;
}
コード例 #12
0
ファイル: ast.c プロジェクト: awaidmann/ponyc
void ast_free(ast_t* ast)
{
  if(ast == NULL)
    return;

  ast_t* child = ast->child;
  ast_t* next;

  while(child != NULL)
  {
    next = child->sibling;
    ast_free(child);
    child = next;
  }

  ast_free(ast->type);

  switch(token_get_id(ast->t))
  {
    case TK_PROGRAM:
      program_free((program_t*)ast->data);
      break;

    case TK_PACKAGE:
      package_free((package_t*)ast->data);
      break;

    case TK_MODULE:
      source_close((source_t*)ast->data);
      break;

    default:
      break;
  }

  token_free(ast->t);
  symtab_free(ast->symtab);
  POOL_FREE(ast_t, ast);
}
コード例 #13
0
ファイル: builder.c プロジェクト: Potpourri/ponyc
// Get the next token ready for when we need it
static void get_next_token(build_parser_t* builder)
{
  assert(builder != NULL);

  if(builder->have_token)
    return;

  if(builder->token != NULL)
    token_free(builder->token);

  builder->token = lexer_next(builder->lexer);
  assert(builder->token != NULL);
  ast_token_id id;

  switch(token_get_id(builder->token))
  {
    case TK_LPAREN_NEW:
    case TK_LPAREN:     id = AT_LPAREN;  break;
    case TK_RPAREN:     id = AT_RPAREN;  break;
    case TK_LSQUARE_NEW:
    case TK_LSQUARE:    id = AT_LSQUARE; break;
    case TK_RSQUARE:    id = AT_RSQUARE; break;
    case TK_LBRACE:     id = AT_LBRACE;  break;
    case TK_RBRACE:     id = AT_RBRACE;  break;
    case TK_EOF:        id = AT_EOF;     break;
    case TK_LEX_ERROR:  id = AT_ERROR;   break;
    case TK_ID:         id = AT_ID;      break;
    case TK_STRING:     id = AT_STRING;  break;
    default:            id = AT_TOKEN;   break;
  }

  //printf("Got token %s %d -> %d\n", token_print(builder->token),
  //  token_get_id(builder->token), id);
  builder->id = id;
  builder->have_token = true;
  builder->line = token_line_number(builder->token);
  builder->pos = token_line_position(builder->token);
}
コード例 #14
0
ファイル: lisp.c プロジェクト: nikuuchi/sukima
void lisp_main(char *file)
{
	FILE *fp;
	if((fp=fopen(file,"r")) == NULL){
		printf("file open error\n");
		exit(0);
	}

	cons_t *root = Cons_New();
	token_t *lex_buf = (token_t *)malloc(sizeof(token_t));

	//--Lexer
	startLex(lex_buf,fp);
	//dumpLexer(lex_buf);

	//--Parser
	parse(lex_buf,root);
	dumpCons_t(root); //debug

	//--eval
	//printf("\n--answer:%d\n",eval(root)); //AST

	//--run
	printf("\n");
	bytecode_t *bytecode = Bytecode_New();
	hash_table_t *hash = HashTable_init();
	int esp = 1;
	//value_t st[1024];

	compile(root,bytecode,hash);
	vm_exec(bytecode,esp,hash,0);

	HashTable_free(hash);
	Bytecode_free(bytecode);
	token_free(lex_buf);
	freeCons_t(root);
	fclose(fp);
}
コード例 #15
0
ファイル: xenddo.c プロジェクト: wjlei1990/WORKFLOW
/** 
 * Parse the acton command "ENDDO"
 * 
 * @param nerr 
 *   Error Return Flag
 *   - 0 on Success
 *
 * @note Global Variables
 *   - ndolevel: Decremented by one.
 *
 * @date   870817:  Original version.
 *
 */
void 
xenddo(int *nerr) {

  FILE *nun;
  
  *nerr = 0;
  if( cnd.ndolevel > 0 ){
    getclun( &nun, nerr );
    
    if( Ndotype[cnd.ndolevel] == 1 ){
      /* -- End of WHILE, backup to WHILE statement */
      backspace(nun,Ndolines[cnd.ndolevel] + 1);
      cnd.ndolevel = cnd.ndolevel - 1;
    } else {
      /* -- End of DO, get next loop variable */
      if( ldolist( nerr ) ){
        backspace(nun, Ndolines[cnd.ndolevel]);
      } else {
        /*    No more variables in this do list */
        /*
        deletev( (char*)kcnd.kdovar[cnd.ndolevel - 1], MCPFN+1, 
                 (char*)kcnd.kdolist[cnd.ndolevel - 1],MCPFN+1, 
                 nerr );
        */
        if(do_token[cnd.ndolevel-1]) {
          token_free(do_token[cnd.ndolevel-1]);
          do_token[cnd.ndolevel-1] = NULL;
        }
        cnd.ndolevel = cnd.ndolevel - 1;
      }
    }
  } else {
    /* - Raise error condition if not in an do condition. */
    *nerr = 1;
  }
  return;
}
コード例 #16
0
ファイル: lisp.c プロジェクト: nikuuchi/sukima
void lisp_repl()
{
	char *line;
	using_history();

	hash_table_t *hash = HashTable_init();
	int esp = 1;

	while((line = readline("Sukima>")) != NULL) {
		add_history(line);
		token_t *lex_buf =  (token_t *)malloc(sizeof(token_t));
		token_t *lex_current = lex_buf;

		lex_current = lex(lex_current,line,strlen(line));

		lex_current->type = TY_EOL;
		//dumpLexer(lex_buf);

		cons_t *root = Cons_New();
		parse(lex_buf,root);
		dumpCons_t(root); //debug
		printf("\n");

		bytecode_t *bytecode = Bytecode_New();
		//value_t st[128];
		compile(root,bytecode,hash);
		vm_exec(bytecode,esp,hash,0);
		Bytecode_free(bytecode);
		token_free(lex_buf);
		freeCons_t(root);
		free(line);
	}

	HashTable_free(hash);

}
コード例 #17
0
ファイル: synexp.c プロジェクト: Pitel/IFJ2007
/**
 * Expression analyser
 *
 * @param stream Stream for loading next token
 * @param token Current token
 * @param symtab Symbol table (AVL root)
 * @param cons_counter number for original naming of item in symbol table
 */
Tnode_ptr syntax_synexp(FILE *stream, Tlex_token *token,
			Tnode_ptr *symtab, size_t *cons_counter,
			Tthree_adr **ta)
{
    assert(stream != NULL);
    assert(token != NULL);
    assert(symtab != NULL);
    assert(cons_counter != NULL);

    size_t precedence_table[][SYN_TABLE_SIZE] = {
//	   1 ,  2 ,  3 ,  4 ,  5 ,  6 ,  7 ,  8 ,  9 , 10 , 11 , 12 , 13 , 14 , 15 , 16 , 17 , 18 , 19 , 20 , 21 , 22 , 23 , 24 , 25 , 26
/*1*/	{ P_E, P_E, P_E, P_E, P_M, P_M, P_M, P_M, P_M, P_E, P_M, P_E, P_E, P_M, P_M, P_M, P_M, P_M, P_M, P_E, P_M, P_E, P_E, P_E, P_E, P_E } ,
/*2*/	{ P_E, P_E, P_E, P_E, P_M, P_M, P_M, P_M, P_M, P_E, P_M, P_E, P_E, P_M, P_M, P_M, P_M, P_M, P_M, P_E, P_M, P_E, P_E, P_E, P_E, P_E } ,
/*3*/	{ P_E, P_E, P_E, P_E, P_M, P_M, P_M, P_M, P_M, P_E, P_M, P_E, P_E, P_M, P_M, P_M, P_M, P_M, P_M, P_E, P_M, P_E, P_E, P_E, P_E, P_E } ,
/*4*/	{ P_E, P_E, P_E, P_E, P_M, P_M, P_M, P_M, P_M, P_E, P_M, P_E, P_E, P_M, P_M, P_M, P_M, P_M, P_M, P_E, P_M, P_E, P_E, P_E, P_E, P_E } ,
/*5*/	{ P_L, P_L, P_L, P_L, P_M, P_M, P_L, P_L, P_L, P_E, P_M, P_E, P_E, P_M, P_M, P_M, P_M, P_M, P_M, P_L, P_M, P_E, P_E, P_E, P_E, P_E } ,
/*6*/	{ P_L, P_L, P_L, P_L, P_M, P_M, P_L, P_L, P_L, P_E, P_M, P_E, P_E, P_M, P_M, P_M, P_M, P_M, P_M, P_L, P_M, P_E, P_E, P_E, P_E, P_E } ,
/*7*/	{ P_L, P_L, P_L, P_L, P_M, P_M, P_M, P_M, P_L, P_E, P_M, P_E, P_E, P_M, P_M, P_M, P_M, P_M, P_M, P_L, P_M, P_E, P_E, P_E, P_E, P_E } ,
/*8*/	{ P_L, P_L, P_L, P_L, P_M, P_M, P_M, P_M, P_L, P_E, P_M, P_E, P_E, P_M, P_M, P_M, P_M, P_M, P_M, P_L, P_M, P_E, P_E, P_E, P_E, P_E } ,
/*9*/	{ P_L, P_L, P_L, P_L, P_M, P_M, P_M, P_M, P_L, P_E, P_M, P_E, P_E, P_M, P_M, P_M, P_M, P_M, P_M, P_L, P_M, P_E, P_E, P_E, P_E, P_E } ,
/*10*/	{ P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E } ,
/*11*/	{ P_L, P_L, P_L, P_L, P_L, P_L, P_L, P_L, P_L, P_E, P_Q, P_E, P_E, P_L, P_L, P_L, P_L, P_L, P_L, P_L, P_E, P_E, P_E, P_E, P_E, P_E } ,
/*12*/	{ P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E } ,
/*13*/	{ P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E } ,
/*14*/	{ P_L, P_L, P_L, P_L, P_L, P_L, P_L, P_L, P_L, P_E, P_M, P_E, P_E, P_M, P_M, P_L, P_L, P_L, P_L, P_L, P_M, P_E, P_E, P_E, P_E, P_E } ,
/*15*/	{ P_L, P_L, P_L, P_L, P_L, P_L, P_L, P_L, P_L, P_E, P_M, P_E, P_E, P_M, P_M, P_L, P_L, P_L, P_L, P_L, P_M, P_E, P_E, P_E, P_E, P_E } ,
/*16*/	{ P_L, P_L, P_L, P_L, P_L, P_L, P_L, P_L, P_L, P_E, P_M, P_E, P_E, P_M, P_M, P_M, P_M, P_M, P_M, P_L, P_M, P_E, P_E, P_E, P_E, P_E } ,
/*17*/	{ P_L, P_L, P_L, P_L, P_L, P_L, P_L, P_L, P_L, P_E, P_M, P_E, P_E, P_M, P_M, P_M, P_M, P_M, P_M, P_L, P_M, P_E, P_E, P_E, P_E, P_E } ,
/*18*/	{ P_L, P_L, P_L, P_L, P_L, P_L, P_L, P_L, P_L, P_E, P_M, P_E, P_E, P_M, P_M, P_M, P_M, P_M, P_M, P_L, P_M, P_E, P_E, P_E, P_E, P_E } ,
/*19*/	{ P_L, P_L, P_L, P_L, P_L, P_L, P_L, P_L, P_L, P_E, P_M, P_E, P_E, P_M, P_M, P_M, P_M, P_M, P_M, P_L, P_M, P_E, P_E, P_E, P_E, P_E } ,
/*20*/	{ P_L, P_L, P_L, P_L, P_L, P_L, P_L, P_L, P_L, P_E, P_E, P_E, P_E, P_L, P_L, P_L, P_L, P_L, P_L, P_L, P_EQ,P_E, P_E, P_E, P_E, P_E } ,
/*21*/	{ P_E, P_E, P_E, P_E, P_M, P_M, P_M, P_M, P_M, P_E, P_M, P_E, P_E, P_M, P_M, P_M, P_M, P_M, P_M, P_E, P_M, P_E, P_E, P_E, P_E, P_E } ,
/*22*/	{ P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E } ,
/*23*/	{ P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E } ,
/*24*/	{ P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E } ,
/*25*/	{ P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E } ,
/*26*/	{ P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E, P_E } ,
    };

    Tstack stack;
    stack_init(&stack);
    Tstack_syntax *stack_data = NULL;
    char *cons_name_tmp;
    Tnode_ptr operand1 = NULL;
    Tnode_ptr operand2 = NULL;
    Tdata_union value_tmp;
    Tnode_ptr SearchSymbol = NULL;
    Tnode_ptr Result = NULL;
    size_t type_tmp = 0;
    size_t type_tmp2 = 0;

    stack_data = malloc(sizeof(*stack_data));
    if (stack_data == NULL) {
	error(ERROR_NO_MEMORY);
	return NULL;
    }
    str_init(&stack_data->stoken.lexeme);
    str_strcatCh(&stack_data->stoken.lexeme, '$');
    stack_data->stoken.token_type = TOKEN_DOLAR;
    stack_data->symtab_ptr = NULL;
    stack_push(&stack, (void *) &stack_data);
    do {
	if (stack_data->stoken.token_type == TOKEN_SYNEXP_E) {	// jump over TOKEN_SYNTAX_E
	    stack_lookatnext(&stack, (void *) &stack_data);
	}
	switch (precedence_table
		[(size_t) stack_data->stoken.token_type -
		 1][(size_t) token->token_type - 1]) {
	    case P_EQ:
		stack_data = malloc(sizeof(*stack_data));
		if (stack_data == NULL) {	// TODO: Cleanup stack, test it
		    error(ERROR_NO_MEMORY);
		    syn_free_stack(&stack, stack_data);
		    return NULL;
		}
		stack_data->stoken.lexeme = token->lexeme;
		stack_data->stoken.token_type = token->token_type;
		stack_push(&stack, (void *) &stack_data);
		lex(stream, token);
		if (get_error()) {
		    syn_free_stack(&stack, stack_data);
		    return NULL;
		}
		break;
	    case P_L:		// Copy and push
		stack_data = malloc(sizeof(*stack_data));
		if (stack_data == NULL) {	// TODO: Cleanup stack, test it
		    error(ERROR_NO_MEMORY);
		    syn_free_stack(&stack, stack_data);
		    return NULL;
		}
		stack_data->stoken.lexeme = token->lexeme;
		stack_data->stoken.token_type = token->token_type;
		stack_push(&stack, (void *) &stack_data);
		lex(stream, token);
		if (get_error()) {
		    syn_free_stack(&stack, stack_data);
		    return NULL;
		}
		break;
	    case P_M:
		stack_top(&stack, (void *) &stack_data);
		SearchSymbol = NULL;
		switch (stack_data->stoken.token_type) {
		    case TOKEN_SYMBOL:
			SearchSymbol = NULL;
			avl_search_node(*symtab,
					stack_data->stoken.lexeme.data,
					&SearchSymbol);
			if (SearchSymbol == NULL) {
			    error(ERROR_SEMANTIC);
			    syn_free_stack(&stack, stack_data);
			    return NULL;
			} else {
			    str_free(&(stack_data)->stoken.lexeme);
			    stack_pop(&stack);
			    stack_data->stoken.token_type = TOKEN_SYNEXP_E;
			    stack_data->symtab_ptr = SearchSymbol;
			    stack_push(&stack, (void *) &stack_data);
			}
			break;
		    case TOKEN_INT:
			cons_name_tmp = syn_next_cons_name(cons_counter);
			Tdata_union integer;
			integer.i =
			    (int) strtol(stack_data->stoken.lexeme.data,
					 NULL, 10);
			avl_insert(symtab, cons_name_tmp, SYMBOL_INT,
				   &integer);
			avl_search_node(*symtab, cons_name_tmp,
					&SearchSymbol);
			str_free(&(stack_data)->stoken.lexeme);
			stack_pop(&stack);
			stack_data->stoken.token_type = TOKEN_SYNEXP_E;
			stack_data->symtab_ptr = SearchSymbol;
			stack_push(&stack, (void *) &stack_data);
			break;
		    case TOKEN_DOUBLE:
			cons_name_tmp = syn_next_cons_name(cons_counter);
			Tdata_union Double;
			Double.d =
			    strtod(stack_data->stoken.lexeme.data, NULL);
			avl_insert(symtab, cons_name_tmp, SYMBOL_DOUBLE,
				   &Double);
			avl_search_node(*symtab, cons_name_tmp,
					&SearchSymbol);
			str_free(&(stack_data)->stoken.lexeme);
			stack_pop(&stack);
			stack_data->stoken.token_type = TOKEN_SYNEXP_E;
			stack_data->symtab_ptr = SearchSymbol;
			stack_push(&stack, (void *) &stack_data);
			break;
		    case TOKEN_STRING:
			cons_name_tmp = syn_next_cons_name(cons_counter);
			Tdata_union String;
			str_init(&String.s);
			str_strcatStr(&String.s,
				      &(stack_data->stoken.lexeme));
			avl_insert(symtab, cons_name_tmp, SYMBOL_STRING,
				   &String);
			avl_search_node(*symtab, cons_name_tmp,
					&SearchSymbol);
			str_free(&(stack_data)->stoken.lexeme);
			stack_pop(&stack);
			stack_data->stoken.token_type = TOKEN_SYNEXP_E;
			stack_data->symtab_ptr = SearchSymbol;
			stack_push(&stack, (void *) &stack_data);
			break;
		    case TOKEN_R_BRACKET:
			str_free(&(stack_data)->stoken.lexeme);
			free(stack_data);
			stack_pop(&stack);
			stack_top(&stack, (void *) &stack_data);
			if (stack_data->stoken.token_type ==
			    TOKEN_SYNEXP_E) {
			    operand1 = stack_data->symtab_ptr;
			    free(stack_data);
			    stack_pop(&stack);
			    stack_top(&stack, (void *) &stack_data);
			    if (stack_data->stoken.token_type ==
				TOKEN_L_BRACKET) {
				str_free(&(stack_data)->stoken.lexeme);
				stack_pop(&stack);
				stack_data->stoken.token_type =
				    TOKEN_SYNEXP_E;
				stack_data->symtab_ptr = operand1;
				stack_push(&stack, (void *) &stack_data);
			    } else {	// otestovat
				error(ERROR_SYNTAX);
				syn_free_stack(&stack, stack_data);
				return NULL;
			    }
			} else {	// otestovat
			    error(ERROR_SYNTAX);
			    syn_free_stack(&stack, stack_data);
			    return NULL;
			}
			break;
		    case TOKEN_SYNEXP_E:
			operand2 = stack_data->symtab_ptr;
			free(stack_data);
			stack_pop(&stack);
			stack_top(&stack, (void *) &stack_data);
			switch (stack_data->stoken.token_type) {
			    case TOKEN_DIVIDE:
				str_free(&(stack_data)->stoken.lexeme);
				free(stack_data);
				stack_pop(&stack);
				stack_top(&stack, (void *) &stack_data);
				if (stack_data->stoken.token_type ==
				    TOKEN_SYNEXP_E) {
				    operand1 = stack_data->symtab_ptr;
				    cons_name_tmp =
					syn_next_cons_name(cons_counter);
				    switch (operand1->type) {
					case TOKEN_INT:
					case TOKEN_DOUBLE:
					    if (operand2->type !=
						TOKEN_STRING) {
						value_tmp.d = 0;
						type_tmp = SYMBOL_DOUBLE;
					    } else {
						error(ERROR_SEMANTIC);
						free(cons_name_tmp);
						syn_free_stack(&stack,
							       stack_data);
						return NULL;
					    }
					    break;
					case TOKEN_STRING:
					    error(ERROR_SEMANTIC);
					    free(cons_name_tmp);
					    syn_free_stack(&stack,
							   stack_data);
					    return NULL;
					    break;
					default:
					    break;
				    }
				    stack_pop(&stack);
				    stack_data->stoken.token_type =
					TOKEN_SYNEXP_E;
				    avl_insert(symtab, cons_name_tmp,
					       type_tmp, &value_tmp);
				    avl_search_node(*symtab, cons_name_tmp,
						    &SearchSymbol);
				    stack_data->symtab_ptr = SearchSymbol;
				    if (SearchSymbol == NULL) {
					error(ERROR_SYNTAX);
					syn_free_stack(&stack, stack_data);
					return NULL;
				    }
				    ta_insert_last(TA_DIV, &operand1,
						   &operand2,
						   &SearchSymbol, ta);
				    stack_push(&stack,
					       (void *) &stack_data);
				    if (get_error()) {
					syn_free_stack(&stack, stack_data);
					return NULL;
				    }
				} else {
				    error(ERROR_SYNTAX);
				    syn_free_stack(&stack, stack_data);
				    return NULL;
				}
				break;
			    case TOKEN_EQUAL:
			    case TOKEN_NOT_EQUAL:
			    case TOKEN_LESS:
			    case TOKEN_LESS_EQUAL:
			    case TOKEN_MORE:
			    case TOKEN_MORE_EQUAL:
				type_tmp2 = stack_data->stoken.token_type;
				str_free(&(stack_data)->stoken.lexeme);
				free(stack_data);
				stack_pop(&stack);
				stack_top(&stack, (void *) &stack_data);
				if (stack_data->stoken.token_type ==
				    TOKEN_SYNEXP_E) {
				    operand1 = stack_data->symtab_ptr;
				    cons_name_tmp =
					syn_next_cons_name(cons_counter);
				    switch (operand1->type) {
					case TOKEN_INT:
					    if (operand2->type !=
						SYMBOL_STRING) {
						value_tmp.i = -1;
						type_tmp = SYMBOL_INT;
					    } else {
						error(ERROR_SEMANTIC);
						free(cons_name_tmp);
						syn_free_stack(&stack,
							       stack_data);
						return NULL;
					    }
					    break;
					case TOKEN_DOUBLE:
					    if (operand2->type !=
						SYMBOL_STRING) {
						value_tmp.i = -1;
						type_tmp = SYMBOL_INT;
					    } else {
						error(ERROR_SEMANTIC);
						free(cons_name_tmp);
						syn_free_stack(&stack,
							       stack_data);
						return NULL;
					    }
					    break;
					case TOKEN_STRING:
					    if (operand2->type ==
						SYMBOL_STRING) {
						value_tmp.i = -1;
						type_tmp = SYMBOL_INT;
					    } else {
						error(ERROR_SEMANTIC);
						free(cons_name_tmp);
						syn_free_stack(&stack,
							       stack_data);
						return NULL;
					    }
					    break;
					default:
					    break;
				    }
				    stack_pop(&stack);
				    stack_data->stoken.token_type =
					TOKEN_SYNEXP_E;
				    avl_insert(symtab, cons_name_tmp,
					       type_tmp, &value_tmp);
				    avl_search_node(*symtab, cons_name_tmp,
						    &SearchSymbol);
				    stack_data->symtab_ptr = SearchSymbol;
				    if (SearchSymbol == NULL) {
					error(ERROR_SYNTAX);
					syn_free_stack(&stack, stack_data);
					return NULL;
				    }
				    switch (type_tmp2) {
					case TOKEN_EQUAL:
					    ta_insert_last(TA_EQUAL,
							   &operand1,
							   &operand2,
							   &SearchSymbol,
							   ta);
					    break;
					case TOKEN_NOT_EQUAL:
					    ta_insert_last(TA_NOT_EQUAL,
							   &operand1,
							   &operand2,
							   &SearchSymbol,
							   ta);
					    break;
					case TOKEN_LESS:
					    ta_insert_last(TA_LESS,
							   &operand1,
							   &operand2,
							   &SearchSymbol,
							   ta);
					    break;
					case TOKEN_LESS_EQUAL:
					    ta_insert_last(TA_LESS_EQUAL,
							   &operand1,
							   &operand2,
							   &SearchSymbol,
							   ta);
					    break;
					case TOKEN_MORE:
					    ta_insert_last(TA_MORE,
							   &operand1,
							   &operand2,
							   &SearchSymbol,
							   ta);
					    break;
					case TOKEN_MORE_EQUAL:
					    ta_insert_last(TA_MORE_EQUAL,
							   &operand1,
							   &operand2,
							   &SearchSymbol,
							   ta);
					    break;
				    }
				    stack_push(&stack,
					       (void *) &stack_data);
				} else {
				    error(ERROR_SYNTAX);
				    syn_free_stack(&stack, stack_data);
				    return NULL;
				}
				break;
			    case TOKEN_MINUS:
				str_free(&(stack_data)->stoken.lexeme);
				free(stack_data);
				stack_pop(&stack);
				stack_top(&stack, (void *) &stack_data);
				if (stack_data->stoken.token_type ==
				    TOKEN_SYNEXP_E) {
				    operand1 = stack_data->symtab_ptr;
				    cons_name_tmp =
					syn_next_cons_name(cons_counter);
				    switch (operand1->type) {
					case TOKEN_INT:
					    if (operand2->type ==
						TOKEN_INT) {
						value_tmp.i = 0;
						type_tmp = SYMBOL_INT;
					    } else if (operand2->type ==
						       TOKEN_DOUBLE) {
						value_tmp.d = 0;
						type_tmp = SYMBOL_DOUBLE;
					    } else {
						error(ERROR_SEMANTIC);
						free(cons_name_tmp);
						syn_free_stack(&stack,
							       stack_data);
						return NULL;
					    }
					    break;
					case TOKEN_DOUBLE:
					    if (operand2->type !=
						TOKEN_STRING) {
						value_tmp.d = 0;
						type_tmp = SYMBOL_DOUBLE;
					    } else {
						error(ERROR_SEMANTIC);
						free(cons_name_tmp);
						syn_free_stack(&stack,
							       stack_data);
						return NULL;
					    }
					    break;
					case TOKEN_STRING:
					    error(ERROR_SEMANTIC);
					    free(cons_name_tmp);
					    syn_free_stack(&stack,
							   stack_data);
					    return NULL;
					    break;
					default:
					    break;
				    }
				    stack_pop(&stack);
				    stack_data->stoken.token_type =
					TOKEN_SYNEXP_E;
				    avl_insert(symtab, cons_name_tmp,
					       type_tmp, &value_tmp);
				    avl_search_node(*symtab, cons_name_tmp,
						    &SearchSymbol);
				    stack_data->symtab_ptr = SearchSymbol;
				    if (SearchSymbol == NULL) {
					error(ERROR_SYNTAX);
					syn_free_stack(&stack, stack_data);
					return NULL;
				    }
				    ta_insert_last(TA_SUB, &operand1,
						   &operand2,
						   &SearchSymbol, ta);
				    stack_push(&stack,
					       (void *) &stack_data);
				} else {
				    error(ERROR_SYNTAX);
				    syn_free_stack(&stack, stack_data);
				    return NULL;
				}
				break;
			    case TOKEN_MULTIPLY:
				str_free(&(stack_data)->stoken.lexeme);
				free(stack_data);
				stack_pop(&stack);
				stack_top(&stack, (void *) &stack_data);
				if (stack_data->stoken.token_type ==
				    TOKEN_SYNEXP_E) {
				    operand1 = stack_data->symtab_ptr;
				    cons_name_tmp =
					syn_next_cons_name(cons_counter);
				    switch (operand1->type) {
					case TOKEN_INT:
					    if (operand2->type ==
						TOKEN_INT) {
						value_tmp.i = 0;
						type_tmp = SYMBOL_INT;
					    } else if (operand2->type ==
						       TOKEN_DOUBLE) {
						value_tmp.d = 0;
						type_tmp = SYMBOL_DOUBLE;
					    } else {
						error(ERROR_SEMANTIC);
						free(cons_name_tmp);
						syn_free_stack(&stack,
							       stack_data);
						return NULL;
					    }
					    break;
					case TOKEN_DOUBLE:
					    if (operand2->type !=
						TOKEN_STRING) {
						value_tmp.d = 0;
						type_tmp = SYMBOL_DOUBLE;
					    } else {
						error(ERROR_SEMANTIC);
						free(cons_name_tmp);
						syn_free_stack(&stack,
							       stack_data);
						return NULL;
					    }
					    break;
					case TOKEN_STRING:
					    error(ERROR_SEMANTIC);
					    free(cons_name_tmp);
					    syn_free_stack(&stack,
							   stack_data);
					    return NULL;
					default:
					    break;
				    }
				    stack_pop(&stack);
				    stack_data->stoken.token_type =
					TOKEN_SYNEXP_E;
				    avl_insert(symtab, cons_name_tmp,
					       type_tmp, &value_tmp);
				    avl_search_node(*symtab, cons_name_tmp,
						    &SearchSymbol);
				    stack_data->symtab_ptr = SearchSymbol;
				    if (SearchSymbol == NULL) {	// LADENI
					error(ERROR_SYNTAX);
					syn_free_stack(&stack, stack_data);
					return NULL;
				    }
				    ta_insert_last(TA_MUL, &operand1,
						   &operand2,
						   &SearchSymbol, ta);
				    stack_push(&stack,
					       (void *) &stack_data);
				} else {
				    error(ERROR_SYNTAX);
				    syn_free_stack(&stack, stack_data);
				    return NULL;
				}
				break;
			    case TOKEN_OVER:
				str_free(&(stack_data)->stoken.lexeme);
				free(stack_data);
				stack_pop(&stack);
				stack_top(&stack, (void *) &stack_data);
				if (stack_data->stoken.token_type ==
				    TOKEN_SYNEXP_E) {
				    operand1 = stack_data->symtab_ptr;
				    cons_name_tmp =
					syn_next_cons_name(cons_counter);
				    switch (operand1->type) {
					case TOKEN_INT:
					case TOKEN_DOUBLE:
					    if (operand2->type !=
						TOKEN_STRING) {
						value_tmp.d = 0;
						type_tmp = SYMBOL_DOUBLE;
					    } else {
						error(ERROR_SEMANTIC);
						free(cons_name_tmp);
						syn_free_stack(&stack,
							       stack_data);
						return NULL;
					    }
					    break;
					case TOKEN_STRING:
					    error(ERROR_SEMANTIC);
					    free(cons_name_tmp);
					    syn_free_stack(&stack,
							   stack_data);
					    return NULL;
					    break;
					default:
					    break;
				    }
				    stack_pop(&stack);
				    stack_data->stoken.token_type =
					TOKEN_SYNEXP_E;
				    avl_insert(symtab, cons_name_tmp,
					       type_tmp, &value_tmp);
				    avl_search_node(*symtab, cons_name_tmp,
						    &SearchSymbol);
				    if (get_error()) {
					error(ERROR_SYNTAX);
					syn_free_stack(&stack, stack_data);
					return NULL;
				    }
				    stack_data->symtab_ptr = SearchSymbol;
				    ta_insert_last(TA_OVER, &operand1,
						   &operand2,
						   &SearchSymbol, ta);
				    stack_push(&stack,
					       (void *) &stack_data);
				} else {
				    error(ERROR_SYNTAX);
				    syn_free_stack(&stack, stack_data);
				    return NULL;
				}
				break;
			    case TOKEN_PLUS:
				str_free(&(stack_data)->stoken.lexeme);
				free(stack_data);
				stack_pop(&stack);
				stack_top(&stack, (void *) &stack_data);
				if (stack_data->stoken.token_type ==
				    TOKEN_SYNEXP_E) {
				    operand1 = stack_data->symtab_ptr;
				    cons_name_tmp =
					syn_next_cons_name(cons_counter);
				    switch (operand1->type) {
					case TOKEN_INT:
					    if (operand2->type ==
						TOKEN_INT) {
						value_tmp.i = 0;
						type_tmp = SYMBOL_INT;
					    } else if (operand2->type ==
						       TOKEN_DOUBLE) {
						value_tmp.d = 0;
						type_tmp = SYMBOL_DOUBLE;
					    } else {
						error(ERROR_SEMANTIC);
						free(cons_name_tmp);
						syn_free_stack(&stack,
							       stack_data);
						return NULL;
					    }
					    break;
					case TOKEN_DOUBLE:
					    if (operand2->type !=
						TOKEN_STRING) {
						value_tmp.d = 0;
						type_tmp = SYMBOL_DOUBLE;
					    } else {
						error(ERROR_SEMANTIC);
						free(cons_name_tmp);
						syn_free_stack(&stack,
							       stack_data);
						return NULL;
					    }
					    break;
					case TOKEN_STRING:
					    if (operand2->type ==
						TOKEN_STRING) {
						str_init(&value_tmp.s);
						type_tmp = SYMBOL_STRING;
					    } else {
						error(ERROR_SEMANTIC);
						free(cons_name_tmp);
						syn_free_stack(&stack,
							       stack_data);
						return NULL;
					    }
					    break;
					default:
					    break;
				    }
				    stack_pop(&stack);
				    stack_data->stoken.token_type =
					TOKEN_SYNEXP_E;
				    avl_insert(symtab, cons_name_tmp,
					       type_tmp, &value_tmp);
				    avl_search_node(*symtab, cons_name_tmp,
						    &SearchSymbol);
				    if (get_error()) {
					error(ERROR_SYNTAX);
					syn_free_stack(&stack, stack_data);
					return NULL;
				    }
				    stack_data->symtab_ptr = SearchSymbol;
				    ta_insert_last(TA_ADD, &operand1,
						   &operand2,
						   &SearchSymbol, ta);
				    stack_push(&stack,
					       (void *) &stack_data);
				} else {
				    error(ERROR_SYNTAX);
				    syn_free_stack(&stack, stack_data);
				    return NULL;
				}
				break;
			    default:
				error(ERROR_SYNTAX);
				syn_free_stack(&stack, stack_data);
				return NULL;
				break;
			}
			break;
		    default:
			error(ERROR_SYNTAX);
			syn_free_stack(&stack, stack_data);
			return NULL;
			break;
		}
	    case P_Q:		// end of do-while
		break;
	    case P_E:		// continue in default
	    default:		// Unexpected Error, this status will not come. Maybe replace by case P_Q.
		error(ERROR_SYNTAX);
		syn_free_stack(&stack, stack_data);
		return NULL;
		break;
	}
    } while (token->token_type != TOKEN_DOLAR
	     || stack_data->stoken.token_type != TOKEN_DOLAR);

    stack_top(&stack, (void *) &stack_data);
    if (stack_data->symtab_ptr == NULL) {
	error(ERROR_SYNTAX);
	token_free(&(stack_data)->stoken);
	free(stack_data);
	stack_pop(&stack);	// dealloc $ in stack
	token_free(token);	// dealloc $ in buffer
	return NULL;
    }
    Result = stack_data->symtab_ptr;
    free(stack_data);
    stack_pop(&stack);		// dealloc last exp
    stack_top(&stack, (void *) &stack_data);
    token_free(&(stack_data)->stoken);
    free(stack_data);
    stack_pop(&stack);		// dealloc $ in stack
    token_free(token);		// dealloc $ in buffer
    lex(stream, token);		// token for syntax.c
    if (get_error()) {
	return NULL;
    }
    return Result;
}
コード例 #18
0
ファイル: check_token.c プロジェクト: mikewest/css-parser
void teardown() {
    token_free( t1 );
}
コード例 #19
0
ファイル: token.c プロジェクト: BlurryRoots/Lq
/*
    Creates and appends a new token to given token list.
*/
int
token_list_append(
    token_list_t* someList,
    const string_t* someSymbol,
    token_type someType,
    size_t someLine,
    size_t someColumn )
{
    size_t i;
    token_t* oldOnes;
    token_t t;

    if( ! someList->tokens )
    {
        someList->tokens = calloc(
            1,
            sizeof( token_t )
        );
    }
    else
    {
        oldOnes = someList->tokens;
        someList->tokens = calloc(
            someList->size + 1,
            sizeof( token_t )
        );
        if( ! someList->tokens )
        {
            someList->tokens = oldOnes;
            return 0;
        }

        for( i = 0; i < someList->size; ++i )
        {
            someList->tokens[i].type = oldOnes[i].type;
            someList->tokens[i].line = oldOnes[i].line;
            someList->tokens[i].column = oldOnes[i].column;

            someList->tokens[i].symbol = string_new();
            string_copy( someList->tokens[i].symbol,
                         oldOnes[i].symbol );

            token_free( & oldOnes[i] );
        }
        free( oldOnes );
    }

    someList->tokens[someList->size].symbol = string_new();

    if( someList->tokens[someList->size].symbol == NULL
     || ! string_copy(
            someList->tokens[someList->size].symbol,
            someSymbol ) )
    {
        return 0;
    }

    someList->tokens[someList->size].type = someType;
    someList->tokens[someList->size].line = someLine;
    someList->tokens[someList->size].column = someColumn;

    ++someList->size;

    return 1;
}
コード例 #20
0
ファイル: token.c プロジェクト: brendan-rius/12sh
END_TEST

START_TEST (test_token_free_null_ptr)
{
  token_free(NULL);
}