void lexer_get_token_str(be_jse_lex_ctx_t *lex, char *str, size_t len) { if (lex->tk == BE_TOKEN_ID) { strncpy(str, "ID:",len); strncat(str, lexer_get_token(lex),len); } else if (lex->tk == BE_TOKEN_STR) { strncpy(str, "String:'",len); strncat(str, lexer_get_token(lex),len); strncat(str, "'",len); } else { lexer_token_to_str(lex->tk, str, len); } }
int main (int argc, char *argv[]) { struct lexer *lex = (struct lexer *) malloc (sizeof (struct lexer)); struct token *tok = NULL; if (argc <= 1) { fprintf (stderr, "No input file\n"); goto cleanup; } if (!lexer_init (lex, argv[1])) goto cleanup; while ((tok = lexer_get_token (lex))->tok_class != tok_eof) { token_print (tok); token_free (tok); } token_free (tok); lexer_finalize (lex); cleanup: if (lex) free (lex); return 0; }
int main(void) { #if 0 int i; printf("enter text: "); fflush(stdout); gets(buf); fflush(stdin); printf(" text entered: %s\n", buf); lexer_init(buf); for(i = 0, lexer_process(); lexer_has_token() && i < 8; i++, lexer_process()) { printf("\thas token\n"); token[i] = lexer_get_token(); token_display(&token[i]); } #endif shell_do(); return 0; }
int main(int argc, char const *argv[]) { for (--argc, ++argv; 0 < argc; --argc, ++argv) { lexer_t *lexer = NULL; char *buffer = NULL; FILE *fp = NULL; long length = -1; long readlen = 0; if (!(*argv)) { fprintf(stderr, "NULL argument to bmxlex\n"); return 1; } fp = fopen(*argv, "rb"); if (fp == NULL) { fprintf(stderr, "Failed to open file %s\n", *argv); return 1; } if (fseek(fp, 0, SEEK_END)) { fprintf(stderr, "Failed to seek to the end of %s\n", *argv); goto ERROR_CLOSE_FILE; } length = ftell(fp); if (length == -1) { fprintf(stderr, "Failed to get the length of %s\n", *argv); goto ERROR_CLOSE_FILE; } if (fseek(fp, 0, SEEK_SET)) { fprintf(stderr, "Failed to seek to the start of %s\n", *argv); goto ERROR_CLOSE_FILE; } buffer = (char *)malloc(length); if (buffer == NULL) { fprintf(stderr, "Failed to allocate memory for source buffer for %s\n", *argv); goto ERROR_CLOSE_FILE; } if ((readlen = fread(buffer, 1, length, fp)) != length) { fprintf(stderr, "Failed to read file %s to source buffer (read %ld)\n", *argv, readlen); goto ERROR_CLOSE_FREE; } fclose(fp); lexer = lexer_new(buffer, buffer + length); int err = lexer_run(lexer); if (err) { fprintf(stderr, "Failed to lex %s\nError: %s\n", *argv, lexer_get_error(lexer)); // free(buffer); // return 1; } { size_t num_tokens = lexer_get_num_tokens(lexer); size_t index = 0; for (; index < num_tokens; ++index) { token_t token; char *str = NULL; char *trapped = NULL; size_t len = 0; size_t trapped_len = 0; size_t idx = 0; size_t trapped_idx = 0; lexer_get_token(lexer, index, &token); if (token.kind == TOK_INVALID) { free(buffer); fprintf(stderr, "Invalid token encountered at %d:%d in %s\n", token.line, token.column, *argv); return 1; } str = token_to_string(&token); len = strlen(str); for (; idx < len; ++idx) trapped_len += ((str[idx] == '\n') ? 2 : 1); trapped = calloc(trapped_len + 1, 1); for (idx = 0; idx < len; ++idx, ++trapped_idx) { if (str[idx] == '\n') { trapped[trapped_idx++] = '\\'; trapped[trapped_idx] = 'n'; } else { trapped[trapped_idx] = str[idx]; } } fprintf(stdout, "\"%s\"[%d:%d]:[%d:%s]:%s\n", *argv, token.line, token.column, token.kind, g_token_names[token.kind], trapped); free(str); free(trapped); } } lexer_destroy(lexer); free(buffer); continue; // error gotos ERROR_CLOSE_FREE: free(buffer); ERROR_CLOSE_FILE: fclose(fp); return 1; } return 0; }