int LineParser::parse(char *line, int ignore_escaping/*=0*/) // returns -1 on error { freetokens(); bool bPrevCB=m_bCommentBlock; int n=doline(line, ignore_escaping); if (n) return n; if (m_nt) { m_bCommentBlock=bPrevCB; m_tokens=(char**)malloc(sizeof(char*)*m_nt); n=doline(line, ignore_escaping); if (n) { freetokens(); return -1; } } return 0; }
void freetokens(tnode * root) { tnode * node = root; int i; for (i = 0; node && i != NODEHASHSIZE; ++i) { if (node->next[i]) { tref * ref = node->next[i]; while (ref) { tref * next = ref->nexthash; freetokens(ref->node); free(ref); ref = next; } node->next[i] = 0; } } if (--node->refcount == 0) { free(node); } }
LineParser::~LineParser() { freetokens(); }
int main(int argc, char **argv) { char *filenamein = NULL; char *filenameout = NULL; char *inputstr = NULL; token *tokenlist = NULL; term *ast = NULL; int numtokens = 0; /* If argc < 3, we don't have 2 files at least as arguments */ if(argc < 3) { /* Show usage for the uninformed. */ help(argv[0]); exit(-1); } else if(argc > 3) /* If we have more than 3, there's probably flags specified */ { getargs(argc, argv); } /* If args >= 3, the last two must be the input file and the output file. */ filenamein = argv[argc - 2]; filenameout = argv[argc - 1]; inputstr = getfile(filenamein); /* Lex the input string. If numtokens < 0, there's an issue. */ numtokens = lex(inputstr, &tokenlist); free(inputstr); if(numtokens == -1 || (tokenlist == NULL && numtokens > 0)) { puts("A lexing error occured."); exit(-1); } /* If -l is specified (currently default) show the result of the lex */ if(FLAGS & SHOWLEX) { printtokens(tokenlist, numtokens); } /*parse(tokenlist, numtokens, &ast);*/ int perr = prog(tokenlist, numtokens, &ast); if(perr) puts("parsing error"); if(FLAGS & SHOWPARSE) { printast(ast, numtokens); } codegen(ast, filenameout); if(tokenlist != NULL) { freetokens(&tokenlist, numtokens); } if(ast != NULL) { freeast(&ast); } exit(0); }
/* Small lexical analyzer used for tokenizing a buffer of a data. This is used for tokenizing graph data from the input file and tokenizing commands entered by the user at runtime. Lexer tokenizes based on the following regex: token: id | num id: (a...Z)+ (a...Z | 0...9)* num: magnit | - magnit magnit: (0...9)+ (dot (0...9)*)? | (dot (0...9)+) @param buf Pointer to the buffer that is tokenized. @return Returns a pointer to a linked list of tokens on success, and NULL if there is a lexical error. */ gtoken_s *lex (unsigned char *buf) { unsigned char backup, gotnum, *bckptr; gtoken_s *curr; stream_ = NULL; gotnum = 0; for (curr = NULL, bckptr = buf; *buf != UEOF;) { switch (*buf) { case ',': curr = gtoken_s_ (curr, ",", T_COMMA); buf++; break; case '=': curr = gtoken_s_ (curr, "=", T_EQU); buf++; break; case '{': curr = gtoken_s_ (curr, "{", T_OPENBRACE); buf++; break; case '}': buf++; curr = gtoken_s_ (curr, "}", T_CLOSEBRACE); break; default: if (*buf <= ' ') while(*++buf <= ' '); else if ((*buf >= 'A' && *buf <= 'Z') || (*buf >= 'a' && *buf <= 'z')) { for (bckptr = buf, ++buf; (*buf >= 'A' && *buf <= 'Z') || (*buf >= 'a' && *buf <= 'z') || (*buf >= '0' && *buf <= '9'); buf++) { if (buf - bckptr == MAXLEXLEN) { throw_exception(); } } backup = *buf; *buf = '\0'; curr = gtoken_s_ (curr, bckptr, T_ID); *buf = backup; } else if ((*buf >= '0' && *buf <= '9') || *buf == '.' || *buf == '-') { bckptr = buf; if (*buf >= '0' && *buf <= '9') gotnum = 1; else if (*buf == '-') { buf++; if ((*buf < '0' || *buf > '9') && *buf != '.') { printf ("Symbol Error %.15s\n", bckptr); throw_exception(); } else if ((*buf >= '0' && *buf <= '9')) gotnum = 1; } if (*buf != '.') { for (buf++; (*buf >= '0' && *buf <= '9'); buf++) { gotnum = 1; if (buf - bckptr == MAXLEXLEN) { printf("Too Long ID: %.15s", bckptr); throw_exception(); } } } if (*buf == '.') { for (buf++; (*buf >='0' && *buf <= '9'); buf++) { gotnum = 1; if (buf - bckptr == MAXLEXLEN) { printf("Too Long ID: %.15s", bckptr); throw_exception(); } } } if (gotnum) { backup = *buf; *buf = '\0'; gotnum = 0; curr = gtoken_s_ (curr, bckptr, T_NUM); *buf = backup; } else { printf("Symbol Error %c\n", *bckptr); throw_exception(); } } else { printf("Symbol Error %c\n", *bckptr); throw_exception(); } break; } } curr = gtoken_s_ (curr, "$", T_EOF); return stream_; exception_: freetokens (stream_); stream_ = NULL; return NULL; }