示例#1
0
accept_t lexer_read_file_list(struct lexer_book *bk)
{
    int count = 0;

    lexer_discard_white_space(bk);

    struct token *t;
    do {
        t = lexer_read_file(bk);
        if(t->type == NEWLINE && count == 0) {
            return NO;
        } else if(t->type != NEWLINE && count == 0) {
            /* Add file list start marker */
            lexer_push_token(bk, lexer_pack_token(bk, FILES));
        }

        lexer_push_token(bk, t);
        count++;
    } while(t->type != NEWLINE);

    if(count > 1)
        return YES;
    else {
        return NO;
    }
}
示例#2
0
文件: lexer.c 项目: brenden/cctools
struct token *lexer_read_file(struct lexer_book *bk)
{
	int c = lexer_next_peek(bk);

	switch(c)
	{
	case CHAR_EOF:
		lexer_report_error(bk, "Found end of file while completing file list.\n");
		return NULL;
		break;
	case '\n' :
		lexer_next_char(bk);           /* Jump \n */
		lexer_add_to_lexeme(bk, c);
		return lexer_pack_token(bk, NEWLINE);
		break;
	case '#' :
		lexer_discard_comments(bk);
		lexer_add_to_lexeme(bk, '\n');
		return lexer_pack_token(bk, NEWLINE);
	case ':' :
		lexer_next_char(bk);           /* Jump : */
		return lexer_pack_token(bk, COLON);
		break;
	case ' ' :
	case '\t':
		/* Discard white-space and try again */
		lexer_discard_white_space(bk);
		return lexer_read_file(bk);
		break;
	case '$'  :
		return lexer_read_substitution(bk);
		break;
	case '\'':
		lexer_add_to_lexeme(bk, '\'');
		lexer_read_literal_quoted(bk);
		lexer_add_to_lexeme(bk, '\'');
		return lexer_pack_token(bk, LITERAL);
		break;
	case '-' :
		if( lexer_peek_remote_rename_syntax(bk) )
		{
			lexer_next_char(bk);           /* Jump -> */
			lexer_next_char(bk);           
			return lexer_pack_token(bk, REMOTE_RENAME);
		}
		/* Else fall through */
	default:
		return lexer_read_filename(bk);
		break;
	}
}
示例#3
0
文件: lexer.c 项目: brenden/cctools
void lexer_append_all_tokens(struct lexer_book *bk, struct lexer_book *bk_s)
{
	struct token *head_s;
	
	bk_s->substitution_mode = bk->substitution_mode;
	while( !bk_s->eof )
	{
		if(lexer_next_peek(bk_s) == CHAR_EOF)
		{
			/* Found end of string while completing command */
			bk_s->lexeme_end++;
			bk_s->eof = 1;
		}
		else
		{
			switch(bk_s->substitution_mode)
			{
			case CHAR_EOF:
			case COMMAND:
				head_s = lexer_read_command_argument(bk_s);
				break;
			case FILES:
				head_s = lexer_read_file(bk_s);
				break;
			case SYNTAX:
				lexer_read_expandable(bk_s, CHAR_EOF);
				head_s = lexer_pack_token(bk_s, LITERAL);
				break;
			default:
				lexer_read_line(bk_s);
				continue;
				break;
			}

			if(head_s)
				lexer_push_token(bk_s, head_s);
		}
	}

	while( (head_s = list_pop_tail(bk_s->token_queue)) != NULL )
		list_push_head(bk->token_queue, head_s);
}
示例#4
0
文件: lexer.c 项目: NeilB879/cctools
struct list *lexer_read_file_list_aux(struct lexer *lx)
{
	struct list *tokens = list_create();

	lexer_discard_white_space(lx);

	while(1) {
		struct token *t = lexer_read_file(lx);
		if(!t) break;

		//Do substitution recursively
		if(t->type == TOKEN_SUBSTITUTION) {
			tokens = list_splice(tokens, lexer_expand_substitution(lx, t, lexer_read_file_list_aux));
			lexer_free_token(t);
			continue;
		} else {
			list_push_tail(tokens, t);
			if(t->type==TOKEN_NEWLINE) break;
		}
	}

	return tokens;
}