int main(void) { ACData tk = make_token("barf"); ACData n = make_int(3); ACData bs = give_binding(tk,n); ACData tk2 = make_token("frab"); ACData n2 = make_int(-3); ACData bs2 = give_binding(tk2,n2); ACData bs3, bs4; ACData t,l; bs3 = give_overriding(bs,bs2); bs4 = give_disjoint_union(bs3,bs2); print_ac_data(bs3); t = make_tuple(tk,bs3); print_ac_data(t); printf("\n"); l = make_list(tk,bs3); print_ac_data(l); printf("\n"); l = lpush(l,l); print_ac_data(l); printf("\n"); l = lpop(l); print_ac_data(l); printf("\n"); return 0; }
/* * Reads function-like macro arguments. Returns true if the argument list ends * with "...". Otherwise false. */ static bool read_funclike_define_args(CppContext *ctx, Dict *param) { for (;;) { Token *tok = read_cpp_token(ctx); if (is_punct(tok, ')')) return false; if (dict_size(param)) { if (!is_punct(tok, ',')) error_token(tok, "',' expected, but got '%s'", token_to_string(tok)); tok = read_cpp_token(ctx); } if (!tok || tok->toktype == TOKTYPE_NEWLINE) error_token(tok, "missing ')' in macro parameter list"); if (is_punct(tok, KEYWORD_THREEDOTS)) { Token *subst = make_token(ctx, TOKTYPE_MACRO_PARAM, (TokenValue)dict_size(param)); dict_put(param, to_string("__VA_ARGS__"), subst); Token *tok1 = read_cpp_token(ctx); if (!is_punct(tok1, ')')) error_token(tok1, "')' expected, but got '%s'", token_to_string(tok1)); return true; } if (tok->toktype != TOKTYPE_IDENT) error_token(tok, "identifier expected, but got '%s'", token_to_string(tok)); Token *subst = make_token(ctx, TOKTYPE_MACRO_PARAM, (TokenValue)dict_size(param)); dict_put(param, tok->val.str, subst); } }
void cmd_build_list(queue_t *qb,char *buf) { char *cur = buf, *start = NULL, *fin = NULL; ui_token_t *t; q_init(qb); start = cur; while(*cur != '\0'){ if (*cur == '&' && *(cur + 1) != '&') { /* Do nothing if we have only one & */ } else if (*cur == '|' && *(cur + 1) != '|') { /* Do nothing if we have only one | */ } else if (((*cur == ' ')||(*cur == '\t')) && ((*(cur - 1) == ' ')||(*(cur - 1) == '\t'))) { /* Make one big token for white space */ } else { if (strchr(tokenbreaks,*cur)) { if (cur != buf) { fin = cur; t = make_token(start,fin-start); q_enqueue(qb,&(t->qb)); start = cur; /* Start new token */ } } else { /* If we are on a normal character but the last character was */ /* a special char we need to start a new token */ if ((cur > buf) && strchr(tokenbreaks,*(cur-1))) { fin = cur; t = make_token(start,fin-start); q_enqueue(qb,&(t->qb)); start = cur; /* Start new token */ } else { /* If the last charecter wasn't special keep going with */ /* current token */ } } } cur++; } fin = cur; if (fin-start > 0) { t = make_token(start,fin-start); q_enqueue(qb,&(t->qb)); } return; }
// Process a block comment the leading / * for which has been seen, but not // consumed static token_t* nested_comment(lexer_t* lexer) { consume_chars(lexer, 2); // Leading / * size_t depth = 1; while(depth > 0) { if(lexer->len <= 1) { lex_error(lexer, "Nested comment doesn't terminate"); lexer->ptr += lexer->len; lexer->len = 0; return make_token(lexer, TK_LEX_ERROR); } if(look(lexer) == '*' && lookn(lexer, 2) == '/') { consume_chars(lexer, 2); depth--; } else if(look(lexer) == '/' && lookn(lexer, 2) == '*') { consume_chars(lexer, 2); depth++; } else { consume_chars(lexer, 1); } } lexer->newline = false; return NULL; }
uint32_t expr(char *e, bool *success) { int i; if(!make_token(e)) { *success = false; return 0; } #ifdef DDEBUG printf("%d\n",nr_token); for (i=0;i<nr_token;i++) printf("%d\t",tokens[i].type); #endif /* TODO: Insert codes to evaluate the expression. */ //int p=0,q=nr_token; for (i=0;i<nr_token;i++){ if(tokens[i].type=='*'&&(i==0||(tokens[i-1].type!=DEC&&tokens[i-1].type!=HEX&&tokens[i-1].type!=REG&&tokens[i-1].type!=')'))) tokens[i].type=POINT; } for (i=0;i<nr_token;i++){ if(tokens[i].type=='-'&&(i==0||(tokens[i-1].type!=DEC&&tokens[i-1].type!=HEX&&tokens[i-1].type!=REG&&tokens[i-1].type!=')'))) tokens[i].type=NEG; } uint32_t result=eval(0,nr_token-1); if (!valid) *success=false; return result; panic("please implement me"); return 0; }
void token_demo() { printf("Demo token.h\n"); token_t * t = make_token(TC_CMT, "blabla", 6, 1); printf("t{%d, %s, %d, %d}\n" , t->token_class, t->yytext, t->yyleng, t->kw_setting); }
// Make a token with the specified ID and current token text static token_t* make_token_with_text(lexer_t* lexer, token_id id) { token_t* t = make_token(lexer, id); append_to_token(lexer, '\0'); token_set_string(t, stringtab(lexer->buffer)); return t; }
uint32_t expr(char *e, bool *success) { if(!make_token(e)) { *success = false; return 0; } /* TODO: Insert codes to evaluate the expression. */ int i; for(i=0;i<nr_token;i++) { if(tokens[i].type=='*' && (i==0 || Isop(i-1))) tokens[i].type=DEREF; } int p=0,q=nr_token-1; uint32_t result=eval(p,q); //printf("test 0x%2x\n",result); if(!reg_right) { *success=false; return 0; } if(!is_obj) { *success=false; return 0; } return result; }
// Report that the current literal token doesn't terminate static token_t* literal_doesnt_terminate(lexer_t* lexer) { lex_error(lexer, "Literal doesn't terminate"); lexer->ptr += lexer->len; lexer->len = 0; return make_token(lexer, TK_LEX_ERROR); }
uint32_t expr(char *e, bool *success) { if(!make_token(e)) { *success = false; return 0; } /* TODO: Insert codes to evaluate the expression. */ return eval(0,nr_token-1); }
uint32_t expr(char *e, bool *success) { if(!make_token(e)) { *success = false; return 0; } *success = true; /* Insert codes to evaluate the expression. */ return eval(0, nr_token - 1); //panic("please implement me"); }
uint32_t expr(char *e, bool *success) { if(!make_token(e)) { *success = false; return 0; } /* TODO: Insert codes to evaluate the expression. */ panic("please implement me"); return 0; }
void test_parse_number() { struct token_list *tkl = make_token_list(); struct token *tk = make_token(tok_number, NULL, 0.0, 42); append_token_list(tkl, tk); struct ast_node *result = parse_number(tkl); EXPECT_EQ(result->val, tk); EXPECT_EQ(result->num_children, 0); destroy_token_list(tkl); delete_node(result); }
void convert(int the_mode) { word_buffer[word_size]='\0'; /* we must end this string */ if(the_mode == MODE_STRING) current_node->car=make_string(word_buffer); else current_node->car=make_token(word_buffer); current_node->cdr=cons(NULL,NULL); current_node=current_node->cdr; reset(); }
static svn_error_t *ra_svn_open_root(void *edit_baton, svn_revnum_t rev, apr_pool_t *pool, void **root_baton) { ra_svn_edit_baton_t *eb = edit_baton; const char *token = make_token('d', eb, pool); SVN_ERR(check_for_error(eb, pool)); SVN_ERR(svn_ra_svn__write_cmd_open_root(eb->conn, pool, rev, token)); *root_baton = ra_svn_make_baton(eb->conn, pool, eb, token); return SVN_NO_ERROR; }
// Process a slash, which has been seen, but not consumed static token_t* slash(lexer_t* lexer) { if(lookn(lexer, 2) == '*') return nested_comment(lexer); if(lookn(lexer, 2) == '/') return line_comment(lexer); consume_chars(lexer, 1); return make_token(lexer, TK_DIVIDE); }
// Make a token with the specified ID and current token text static token_t* make_token_with_text(lexer_t* lexer, token_id id) { token_t* t = make_token(lexer, id); if(lexer->buffer == NULL) // No text for token token_set_string(t, stringtab(""), 0); else token_set_string(t, stringtab_len(lexer->buffer, lexer->buflen), lexer->buflen); return t; }
static svn_error_t *ra_svn_open_dir(const char *path, void *parent_baton, svn_revnum_t rev, apr_pool_t *pool, void **child_baton) { ra_svn_baton_t *b = parent_baton; const char *token = make_token('d', b->eb, pool); SVN_ERR(check_for_error(b->eb, pool)); SVN_ERR(svn_ra_svn__write_cmd_open_dir(b->conn, pool, path, b->token, token, rev)); *child_baton = ra_svn_make_baton(b->conn, pool, b->eb, token); return SVN_NO_ERROR; }
void test_parse_paren() { struct token_list *tkl = make_token_list(); struct token *tk0 = make_token(tok_punc, "(", 0.0, 0); struct token *tk1 = make_token(tok_number, NULL, 0.0, 42); struct token *tk2 = make_token(tok_punc, "+", 0.0, 0); struct token *tk3 = make_token(tok_number, NULL, 0.0, 24); struct token *tk4 = make_token(tok_punc, ")", 0.0, 0); append_token_list(tkl, tk0); append_token_list(tkl, tk1); append_token_list(tkl, tk2); append_token_list(tkl, tk3); append_token_list(tkl, tk4); struct ast_node *result = parse_paren(tkl); EXPECT_EQ(result->val, tk2); EXPECT_EQ(result->num_children, 2); EXPECT_EQ(result->children[0]->val, tk1); EXPECT_EQ(result->children[1]->val, tk3); EXPECT_EQ(result->children[0]->num_children, 0); EXPECT_EQ(result->children[1]->num_children, 0); destroy_token_list(tkl); delete_node(result); }
struct token_t* get_next_token(char* input, int* position) { printf("----Entering get_next_token\n"); int len = strlen(input); if (len < (*position) || input == NULL || (*position) < 0) { printf("RETURN NULL\n"); return NULL; } if ((*position) > len) { printf("RETURN EOF\n"); return make_token(END_OF_FILE, ""); } char current_char = input[(*position)]; while (current_char != '\0') { if (isspace(current_char)) { (*position)++; current_char = input[(*position)]; } else if (isdigit(current_char)) { struct token_t* ret = read_int(input, position); printf("after read_int position is: %d\n", *position); (*position)++; return ret; } else if (current_char == '*') { (*position)++; return make_token(MUL, "*"); } else if (current_char == '/') { (*position)++; return make_token(DIV, "/"); } else { printf("Got invalid input: %s\n", substr(*position, strlen(input), input)); exit(1); } } return make_token(END_OF_FILE, ""); }
uint32_t expr(char *e, bool *success) { if(!make_token(e)) { *success = false; return 0; } if (check_parentheses_matched(0,nr_token-1)==false){ *success = false; return 0; } return eval(0,nr_token-1,success); /* TODO: Insert codes to evaluate the expression. */ // panic("please implement me"); }
static svn_error_t *ra_svn_add_dir(const char *path, void *parent_baton, const char *copy_path, svn_revnum_t copy_rev, apr_pool_t *pool, void **child_baton) { ra_svn_baton_t *b = parent_baton; const char *token = make_token('d', b->eb, pool); SVN_ERR_ASSERT((copy_path && SVN_IS_VALID_REVNUM(copy_rev)) || (!copy_path && !SVN_IS_VALID_REVNUM(copy_rev))); SVN_ERR(check_for_error(b->eb, pool)); SVN_ERR(svn_ra_svn__write_cmd_add_dir(b->conn, pool, path, b->token, token, copy_path, copy_rev)); *child_baton = ra_svn_make_baton(b->conn, pool, b->eb, token); return SVN_NO_ERROR; }
uint32_t expr(char *e, bool *success) { if(!make_token(e)) { *success = false; return 0; } *success = true; /* TODO: Insert codes to evaluate the expression. */ int i; for( i = 0; i < nr_token; i ++ ){ if( tokens[i].type == MUL && ( i == 0 || prior[ tokens[i - 1].type ] > -2 ) ) tokens[i].type = PTR; if( tokens[i].type == SUB && ( i == 0 || prior[ tokens[i - 1].type ] > -2 ) ) tokens[i].type = NEG; } return eval( 0, nr_token - 1 ); panic("please implement me"); return 0; }
void tqueue_demo() { printf("Demo tokenqueue\n"); tqueue_t tq; tqueue_init(&tq); token_t * t = make_token(TC_CMT, "blabla", 6, 1); printf("tq.length %d, tqueue_empty %d\n", tq.length, tqueue_empty(&tq)); tqueue_push_back(&tq, t); tqueue_peek_n(&tq, 0); printf("tq.length %d, tqueue_empty %d\n", tq.length, tqueue_empty(&tq)); tqueue_drop_head(&tq); printf("tq.length %d, tqueue_empty %d\n", tq.length, tqueue_empty(&tq)); printf("'tqueue_peek_n(&tq, 0) == token' %d\n", tqueue_peek_n(&tq, 0) == t); }
static TREE * create_fun_token(CSOUND *csound, TREE *right, char *fname) { TREE *ans; ans = (TREE*)csound->Malloc(csound, sizeof(TREE)); if (UNLIKELY(ans == NULL)) exit(1); ans->type = T_FUNCTION; ans->value = make_token(csound, fname); ans->value->type = T_FUNCTION; ans->left = NULL; ans->right = right; ans->next = NULL; ans->len = 0; ans->markup = NULL; ans->rate = -1; return ans; }
/* * Parses and keeps a list of commands with pointers to their arguments * and then carries out the requested actions. Returns 0 on success. */ int process_input(char *input) { struct SLList tokens; init_list(&tokens); char *input_current_start = input; const char special_delims[] = "<>|"; // break up tokens into the list and collect argument lists for them. do { int token_str_length = strcspn(input_current_start, special_delims); char *tmp_token_string = (char *)malloc(token_str_length+1); if (!tmp_token_string) { fprintf(stderr, "error: could not allocate memory to process token, %s\n", strerror(errno)); return -1; } strncpy(tmp_token_string, input_current_start, token_str_length); // if this is the last token, replace the ending '\n' before adding to list. if (tmp_token_string[token_str_length-1] == '\n') { tmp_token_string[token_str_length-1] = '\0'; } else { tmp_token_string[token_str_length] = '\0'; } char modifier = input_current_start[token_str_length]; if (input_current_start[token_str_length-1] == '2' && input_current_start[token_str_length] == '>') { modifier = 'e'; tmp_token_string[token_str_length-1] = '\0'; } add_back(&tokens, make_token(tmp_token_string, modifier)); input_current_start += token_str_length+1; free(tmp_token_string); } while (*(input_current_start-2) != '\n'); // carry out the instructions formed by the tokens. //traverse(&tokens, (void *)evaluate); evaluate(&tokens); // TODO: add any necessary clean up. return 0; }
uint32_t expr(char *e, bool *success) { if(!make_token(e)) { *success = false; return 0; } /* TODO: Insert codes to evaluate the expression. */ int i; for(i = 0; i < nr_token; i ++) { if(tokens[i].type == '*' && (i == 0 || is_op(tokens[i-1].type))) { tokens[i].type = DEREF; } } /* TEST: printf("Test func:expr(char*,bool*)\n and the nr_token is %d\n",nr_token); */ uint32_t result = eval(0, nr_token-1, success); return result; }
uint32_t expr(char *e/*, bool *success*/) { if(!make_token(e)) { //*success = false; return 0; } int i; for(i=0;i<nr_token;i++) { if(tokens[i].type==MUL&&((i==0)||tokens[i-1].type<RP)) { tokens[i].type=POINTER; } if(tokens[i].type==SUB&&((i==0)||tokens[i-1].type!=RP)) { tokens[i].type=NEG; } } return eval(0,nr_token-1); }
uint32_t expr(char *e, bool *success) { if(!make_token(e)) { *success = false; printf("Error:make_token() func run error\n"); return 0; } *success = true; /* TODO: Insert codes to evaluate the expression. */ /*int i = 0; for(i = 0; i < nr_token; i++) { switch(tokens[i].type) { case IDENTIFIER: case REG: printf("number or register %s\n", tokens[i].str); break; case '+': case '-': case '*': case '/': case'(': case ')': printf("%c\n", tokens[i].type); break; case EQ: printf("==\n"); break; default: printf("token clarify error\n"); } }*/ int result = eval(0, nr_token - 1); //panic("please implement me"); return result; }
static int xor_login(char *name) { size_t secret_size = sizeof(secret) - 1; size_t token_size, sig_size, signed_token_size; char *token, *signed_token; char sig[secret_size]; if ((token = make_token(name)) == NULL) return -1; token_size = cgc_strlen(token); signed_token_size = token_size + 2 * secret_size + 2; if ((signed_token = realloc(token, signed_token_size)) == NULL) { free(token); return -1; } if ((sig_size = xor_sig(signed_token, token_size, sig)) == 0) { free(signed_token); return -1; } signed_token[token_size] = '|'; bin_to_hex(signed_token + token_size + 1, sig, sig_size); signed_token[signed_token_size - 1] = '\n'; if (write_all(STDOUT, signed_token, signed_token_size) != signed_token_size) return -1; cgc_memset(sig, '\x00', sig_size); cgc_memset(signed_token, '\x00', signed_token_size); free(signed_token); return 0; }