bool test_expandreadmacro() { list *l = tokenize(" abc 123 "); char *s; assert(list_length(l) == 2); delete_tokens(l); l = tokenize(" (( 1 2 3) (hello world) )"); l = expand_readmacro(l); print_tokens(l); delete_tokens(l); l = tokenize("'x"); l = expand_readmacro(l); print_tokens(l); delete_tokens(l); l = tokenize("`x"); l = expand_readmacro(l); print_tokens(l); s = (char*)car(l); delete_tokens(l); l = tokenize(",x"); l = expand_readmacro(l); print_tokens(l); delete_tokens(l); l = tokenize(",@x"); l = expand_readmacro(l); print_tokens(l); delete_tokens(l); l = tokenize("`(xxx ,y ,@a )"); l = expand_readmacro(l); print_tokens(l); delete_tokens(l); l = tokenize("`(xxx (,y (,@a) . z ))"); l = expand_readmacro(l); print_tokens(l); delete_tokens(l); l = tokenize("'(1 . 2)"); l = expand_readmacro(l); print_tokens(l); delete_tokens(l); return true; }
void execute(char* command) { if (create_tokens(command)) { create_process(); delete_tokens(); } }
static void read_cdf_header(FILE *infile, cdf_text *mycdf, char* linebuffer){ tokenset *cur_tokenset; /* move to the Chip section */ AdvanceToSection(infile,"[Chip]",linebuffer); findStartsWith(infile,"Name",linebuffer); /* Read the Name */ cur_tokenset = tokenize(linebuffer,"=\r\n"); mycdf->header.name = Calloc(strlen(get_token(cur_tokenset,1))+1,char); strcpy(mycdf->header.name,get_token(cur_tokenset,1)); delete_tokens(cur_tokenset); /* Read the Rows and Cols, Number of units etc */ findStartsWith(infile,"Rows",linebuffer); cur_tokenset = tokenize(linebuffer,"="); mycdf->header.rows = atoi(get_token(cur_tokenset,1)); delete_tokens(cur_tokenset); findStartsWith(infile,"Cols",linebuffer); cur_tokenset = tokenize(linebuffer,"="); mycdf->header.cols = atoi(get_token(cur_tokenset,1)); delete_tokens(cur_tokenset); findStartsWith(infile,"NumberOfUnits",linebuffer); cur_tokenset = tokenize(linebuffer,"="); mycdf->header.numberofunits = atoi(get_token(cur_tokenset,1)); delete_tokens(cur_tokenset); findStartsWith(infile,"MaxUnit",linebuffer); cur_tokenset = tokenize(linebuffer,"="); mycdf->header.maxunit = atoi(get_token(cur_tokenset,1)); delete_tokens(cur_tokenset); findStartsWith(infile,"NumQCUnits",linebuffer); cur_tokenset = tokenize(linebuffer,"="); mycdf->header.NumQCUnits = atoi(get_token(cur_tokenset,1)); delete_tokens(cur_tokenset); findStartsWith(infile,"ChipReference",linebuffer); cur_tokenset = tokenize(linebuffer,"=\r\n"); if (cur_tokenset->n > 1){ mycdf->header.chipreference = Calloc(strlen(get_token(cur_tokenset,1))+1,char); strcpy(mycdf->header.chipreference,get_token(cur_tokenset,1)); } else {
int test_tokenize() { list *l = tokenize(" abc 123 "); char *s; assert(list_length(l) == 2); delete_tokens(l); l = tokenize(" (( 1 2 3) (hello world) )"); assert(list_length(l) == 11); delete_tokens(l); l = tokenize("'x"); s = (char*)car(l); assert(s[0] == '\''); delete_tokens(l); l = tokenize("`x"); s = (char*)car(l); assert(s[0] == '`'); assert(list_length(l) == 2); delete_tokens(l); l = tokenize(",x"); s = (char*)car(l); assert(s[0] == ','); assert(list_length(l) == 2); delete_tokens(l); l = tokenize(",@x"); s = (char*)car(l); assert(s[0] == ','); assert(s[1] == '@'); delete_tokens(l); l = tokenize("`(xxx ,y ,@a )"); assert(list_length(l) == 8); delete_tokens(l); return 1; }
Rule::~Rule() { delete_conditions(); delete_tokens(); }