static void test_lex(const char *input) { struct ds output; ds_init(&output); struct lexer lexer; lexer_init(&lexer, input); ds_clear(&output); while (lexer_get(&lexer) != LEX_T_END) { size_t len = output.length; lex_token_format(&lexer.token, &output); /* Check that the formatted version can really be parsed back * losslessly. */ if (lexer.token.type != LEX_T_ERROR) { const char *s = ds_cstr(&output) + len; struct lexer l2; lexer_init(&l2, s); lexer_get(&l2); compare_token(&lexer.token, &l2.token); lexer_destroy(&l2); } ds_put_char(&output, ' '); } lexer_destroy(&lexer); ds_chomp(&output, ' '); puts(ds_cstr(&output)); ds_destroy(&output); }
int cmd_lex(FILE *file, const char *filename) { struct token token; buffer_t token_value; token_value = buffer_create(1024); lexer_init(file); log_set_unit(basename(filename)); printf("Line\tText\tValue\tType\n"); fflush(stdout); while (lexer_next_token(&token)) { lexer_token_value(&token, token_value); printf("%d:%d\t%s\t%s\t%s\n", token.line, token.column, token.text, buffer_data(token_value), lexer_token_type_name(token.type)); fflush(stdout); lexer_token_free_data(&token); } buffer_free(token_value); lexer_destroy(); log_close(); return EXIT_SUCCESS; }
int main(void) { #if 0 int i; printf("enter text: "); fflush(stdout); gets(buf); fflush(stdin); printf(" text entered: %s\n", buf); lexer_init(buf); for(i = 0, lexer_process(); lexer_has_token() && i < 8; i++, lexer_process()) { printf("\thas token\n"); token[i] = lexer_get_token(); token_display(&token[i]); } #endif shell_do(); return 0; }
static int config_parse(struct darray *sections, const char *file, bool always_open) { char *file_data; struct lexer lex; struct base_token token; struct strref section_name; FILE *f; f = os_fopen(file, "rb"); if (always_open && !f) f = os_fopen(file, "w+"); if (!f) return CONFIG_FILENOTFOUND; os_fread_utf8(f, &file_data); fclose(f); if (!file_data) return CONFIG_SUCCESS; lexer_init(&lex); lexer_start_move(&lex, file_data); base_token_clear(&token); while (lexer_getbasetoken(&lex, &token, PARSE_WHITESPACE)) { struct config_section *section; while (token.type == BASETOKEN_WHITESPACE) { if (!lexer_getbasetoken(&lex, &token, PARSE_WHITESPACE)) goto complete; } if (*token.text.array != '[') { while (!is_newline(*token.text.array)) { if (!lexer_getbasetoken(&lex, &token, PARSE_WHITESPACE)) goto complete; } continue; } strref_clear(§ion_name); config_parse_string(&lex, §ion_name, ']'); if (!section_name.len) break; section = darray_push_back_new(sizeof(struct config_section), sections); section->name = bstrdup_n(section_name.array, section_name.len); config_parse_section(section, &lex); } complete: lexer_free(&lex); return CONFIG_SUCCESS; }
int parse_strategy (struct symtab *symtab, const char *text) { str_t workdir; int status; str_init (workdir, 64); str_getcwd (workdir); clean_error_msgs (); lexer_init (text); symbol_table_clean (symtab); config_set_default (symtab->config_table); status = yyparse (symtab); chdir (CSTR(workdir)); str_free (workdir); if (status != 0) symbol_table_clean (symtab); return status; }
static int config_parse_file(struct darray *sections, const char *file, bool always_open) { char *file_data; struct lexer lex; FILE *f; f = os_fopen(file, "rb"); if (always_open && !f) f = os_fopen(file, "w+"); if (!f) return CONFIG_FILENOTFOUND; os_fread_utf8(f, &file_data); fclose(f); if (!file_data) return CONFIG_SUCCESS; lexer_init(&lex); lexer_start_move(&lex, file_data); parse_config_data(sections, &lex); lexer_free(&lex); return CONFIG_SUCCESS; }
int main (int argc, char *argv[]) { struct lexer *lex = (struct lexer *) malloc (sizeof (struct lexer)); struct token *tok = NULL; if (argc <= 1) { fprintf (stderr, "No input file\n"); goto cleanup; } if (!lexer_init (lex, argv[1])) goto cleanup; while ((tok = lexer_get_token (lex))->tok_class != tok_eof) { token_print (tok); token_free (tok); } token_free (tok); lexer_finalize (lex); cleanup: if (lex) free (lex); return 0; }
int main (int argc, char * argv[]) { int source_i; char * text; struct _token * tokens; struct _parser parser; memset(&parser, 0, sizeof(struct _parser)); if (argc < 3) { fprintf(stderr, "usage: %s <binary_out> <assembly source file>s\n", argv[0]); exit(0); } lexer_init(); for (source_i = 2; source_i < argc; source_i++) { text = assembler_read_file(argv[source_i]); tokens = lexer(text); free(text); parser_parse(&parser, tokens); } assembler_memory_definition_labels(&parser); assemble(&parser, argv[1]); lexer_tokens_delete(tokens); return 0; }
void TestParseExpr_3(CuTest *tc) { /* Lexer data. */ char sqlexpr[] = "((((1)+2)))"; db_eetnode_t *expr; db_lexer_t lexer; lexer_init(&lexer, sqlexpr); /* Memory management data. */ db_int segment_size = 1000; unsigned char segment[segment_size]; db_query_mm_t mm; init_query_mm(&mm, segment, sizeof(segment)); /* Do the whole process. */ parseexpression(&expr, &lexer, 0, strlen(sqlexpr), &mm, 0); db_eetnode_t *np = expr; CuAssertTrue(tc, (db_uint8)DB_EETNODE_CONST_DBINT == ((db_eetnode_dbint_t*)np)->base.type); CuAssertTrue(tc, 1 == ((db_eetnode_dbint_t*)np)->integer); MOVEPOINTERNBYTES(np, np, sizeof(db_eetnode_dbint_t), db_eetnode_t*); CuAssertTrue(tc, (db_uint8)DB_EETNODE_CONST_DBINT == ((db_eetnode_dbint_t*)np)->base.type); CuAssertTrue(tc, 2 == ((db_eetnode_dbint_t*)np)->integer); MOVEPOINTERNBYTES(np, np, sizeof(db_eetnode_dbint_t), db_eetnode_t*); CuAssertTrue(tc, (db_uint8)DB_EETNODE_OP_ADD == np->type); MOVEPOINTERNBYTES(np, np, sizeof(db_eetnode_t), db_eetnode_t*); }
void TestParseExpr_16(CuTest *tc) { /* Lexer data. */ char sqlexpr[] = "MAX(LENGTH('apple'))"; db_eetnode_t *expr; db_lexer_t lexer; lexer_init(&lexer, sqlexpr); /* Memory management data. */ db_int segment_size = 1000; unsigned char segment[segment_size]; db_query_mm_t mm; init_query_mm(&mm, segment, sizeof(segment)); /* Do the whole process. */ parseexpression(&expr, &lexer, 0, strlen(sqlexpr), &mm, 0); db_eetnode_t *np = expr; printf("np->type: %d\n", np->type); fflush(stdout); CuAssertTrue(tc, (db_uint8)DB_EETNODE_CONST_DBSTRING == np->type); CuAssertTrue(tc, 0==strcmp(((db_eetnode_dbstring_t*)np)->string, "apple")); MOVEPOINTERNBYTES(np, np, sizeof(db_eetnode_dbstring_t), db_eetnode_t*); CuAssertTrue(tc, (db_uint8)DB_EETNODE_FUNC_LENGTH_DBSTRING == np->type); MOVEPOINTERNBYTES(np, np, sizeof(db_eetnode_t), db_eetnode_t*); CuAssertTrue(tc, (db_uint8)DB_EETNODE_AGGR_TEMP == np->type); CuAssertTrue(tc, DB_AGGR_MAX == ((db_eetnode_aggr_temp_t*)np)->aggr_type); MOVEPOINTERNBYTES(np, np, sizeof(db_eetnode_aggr_temp_t), db_eetnode_t*); }
Node* analyze_line(String* str) { Node* node; st_token_buf = NULL; lexer_init(str); parser_init(); node = parse_acceptable(); switch ( parser_state() ) { case PS_ACCEPT: if ( node == NULL ) { fprintf(stderr, "parse error\n"); } break; case PS_ERROR: parser_print_error(); if ( node != NULL ) { delete_tree(node); node = NULL; } break; } if ( st_token_buf != NULL) { delete_token(st_token_buf); st_token_buf = NULL; } return node; }
void cf_lexer_init(struct cf_lexer *lex) { lexer_init(&lex->base_lexer); da_init(lex->tokens); lex->file = NULL; lex->reformatted = NULL; lex->write_offset = NULL; lex->unexpected_eof = false; }
/* Test second and third phases. This code expression does not make sense but allows for important testing. */ void TestParseExpr_17(CuTest *tc) { /* Lexer data. */ char sqlexpr[] = "LENGTH(LENGTH('string1'),T.a*2+1/3)"; db_eetnode_t *expr; db_lexer_t lexer; lexer_init(&lexer, sqlexpr); /* Memory management data. */ db_int segment_size = 1000; unsigned char segment[segment_size]; db_query_mm_t mm; init_query_mm(&mm, segment, sizeof(segment)); /* Do the whole process. */ parseexpression(&expr, &lexer, 0, strlen(sqlexpr), &mm, 0); /* Confirm that expression converted properly. */ db_eetnode_t *np = expr; CuAssertTrue(tc, (db_uint8)DB_EETNODE_CONST_DBSTRING == np->type); CuAssertTrue(tc, 0==strcmp(((db_eetnode_dbstring_t*)np)->string, "string1")); MOVEPOINTERNBYTES(np, np, sizeof(db_eetnode_dbstring_t), db_eetnode_t*); CuAssertTrue(tc, (db_uint8)DB_EETNODE_FUNC_LENGTH_DBSTRING == np->type); MOVEPOINTERNBYTES(np, np, sizeof(db_eetnode_t), db_eetnode_t*); CuAssertTrue(tc, (db_uint8)DB_EETNODE_ATTR == np->type); MOVEPOINTERNBYTES(np, np, sizeof(db_eetnode_attr_t), db_eetnode_t*); CuAssertTrue(tc, (db_uint8)DB_EETNODE_CONST_DBINT == ((db_eetnode_dbint_t*)np)->base.type); CuAssertTrue(tc, 2 == ((db_eetnode_dbint_t*)np)->integer); MOVEPOINTERNBYTES(np, np, sizeof(db_eetnode_dbint_t), db_eetnode_t*); CuAssertTrue(tc, (db_uint8)DB_EETNODE_OP_MULT == np->type); MOVEPOINTERNBYTES(np, np, sizeof(db_eetnode_t), db_eetnode_t*); CuAssertTrue(tc, (db_uint8)DB_EETNODE_CONST_DBINT == ((db_eetnode_dbint_t*)np)->base.type); CuAssertTrue(tc, 1 == ((db_eetnode_dbint_t*)np)->integer); MOVEPOINTERNBYTES(np, np, sizeof(db_eetnode_dbint_t), db_eetnode_t*); CuAssertTrue(tc, (db_uint8)DB_EETNODE_CONST_DBINT == ((db_eetnode_dbint_t*)np)->base.type); CuAssertTrue(tc, 3 == ((db_eetnode_dbint_t*)np)->integer); MOVEPOINTERNBYTES(np, np, sizeof(db_eetnode_dbint_t), db_eetnode_t*); CuAssertTrue(tc, (db_uint8)DB_EETNODE_OP_DIV == np->type); MOVEPOINTERNBYTES(np, np, sizeof(db_eetnode_t), db_eetnode_t*); CuAssertTrue(tc, (db_uint8)DB_EETNODE_OP_ADD == np->type); MOVEPOINTERNBYTES(np, np, sizeof(db_eetnode_t), db_eetnode_t*); CuAssertTrue(tc, (db_uint8)DB_EETNODE_FUNC_LENGTH_DBSTRING == np->type); MOVEPOINTERNBYTES(np, np, sizeof(db_eetnode_t), db_eetnode_t*); }
int main(int argc, char **argv) { token_t t; fpo = stdout; mbox_mode = true; /* to allow multiple messages */ process_arglist(argc, argv); process_config_files(false, longopts_bogolexer); if (encoding == E_UNKNOWN) encoding = E_DEFAULT; textblock_init(); if (!passthrough) { if (quiet) fprintf(fpo, "quiet mode.\n"); else fprintf(fpo, "normal mode.\n"); } bogoreader_init(argc, (const char * const *) argv); while ((*reader_more)()) { word_t token; lexer_init(); while ((t = get_token( &token )) != NONE) { count += 1; if (passthrough) { fprintf(fpo, "%s\n", token.u.text); } else if (!quiet) fprintf(fpo, "get_token: %d \"%s\"\n", (int)t, token.u.text); } } if ( !passthrough ) fprintf(fpo, "%d tokens read.\n", count); /* cleanup storage */ token_cleanup(); mime_cleanup(); textblock_free(); MEMDISPLAY; return 0; }
/* Like actions_parse(), but the actions are taken from 's'. */ char * OVS_WARN_UNUSED_RESULT actions_parse_string(const char *s, const struct action_params *ap, struct ofpbuf *ofpacts, struct expr **prereqsp) { struct lexer lexer; char *error; lexer_init(&lexer, s); lexer_get(&lexer); error = actions_parse(&lexer, ap, ofpacts, prereqsp); lexer_destroy(&lexer); return error; }
void init_all(int argc, char **argv) { // process command line arguments char *filename = NULL; FILE *fp = stdin; EmObject *ob; if (argc > 1) filename = argv[1]; if (filename != NULL) { if ((fp = fopen(filename, "r")) == NULL) { fprintf(stderr, "Cannot open file %s\n", filename); exit(1); } source.type = SOURCE_TYPE_FILE; } else { source.type = SOURCE_TYPE_PROMPT; } source.filename = filename; source.fp = fp; // Constant hash table literalTable = newhashobject(); // Add commonly used literals ob = newintobject(1); hashobject_insert_by_string(literalTable, "1", ob); DECREF(ob); ob = newintobject(-1); hashobject_insert_by_string(literalTable, "-1", ob); DECREF(ob); ob = newintobject(0); hashobject_insert_by_string(literalTable, "0", ob); DECREF(ob); ob = newstringobject("*"); hashobject_insert_by_string(literalTable, "*", ob); DECREF(ob); hashobject_insert_by_string(literalTable, "null", &nulobj); // initialize the lexer lexer_init(); // initialize the VM vm_init(); }
int main(int argc, char *argv[]) { struct Lexer l; lexer_state s; if (argc != 2) { fprintf(stderr, "Usage: %s path\n", argv[0]); return EXIT_FAILURE; } s = lexer_init(&l, argv[1]); do { s = lexer_next(&l); } while (s == LEXER_READY); return EXIT_SUCCESS; }
static PARSER* parser_init(const char *file, FILE *io, int reload) { PARSER *p = calloc(1, sizeof(PARSER)); if (!p) { pgr_abort(ABORT_MEMFAIL); } p->backends = make_backend(NULL); p->f = parse_top; p->l = lexer_init(file, io); if (!p->l) { parser_free(p); return NULL; } return p; }
/** * @ingroup mslibs_dotconf_core_test * @return Указатель на комплект тестов класса Lexer */ ut::test_unit * ts_lexer() { ut::test_suite * ts = BOOST_TEST_SUITE("lexer"); ts->add( BOOST_TEST_CASE( &tc_empty ) ); ts->add( BOOST_PARAM_TEST_CASE( &tc_endl , endl_data , endl_end ) ); ts->add( BOOST_PARAM_TEST_CASE( &tc_bslash , bslash_data , bslash_end ) ); ts->add( BOOST_PARAM_TEST_CASE( &tc_quote , quote_data , quote_end ) ); ts->add( BOOST_PARAM_TEST_CASE( &tc_eqsign , eqsign_data , eqsign_end ) ); ts->add( BOOST_PARAM_TEST_CASE ( &tc_comment , comment_data , comment_end ) ); ts->add( BOOST_PARAM_TEST_CASE( &tc_node , node_data , node_end ) ); lexer_init(); ts->add( BOOST_PARAM_TEST_CASE ( &tc_lexer, lexer_data.begin() , lexer_data.end() ) ); return ts ; }
int cmd_parse_expr(FILE *file, const char *filename, const char *cmd) { log_set_unit(basename(filename)); lexer_init(file); parser_init(); generator_init(); struct node* node = NULL; symtable_t symtable = NULL; code_t code = NULL; if (strcmp(cmd, "parse_expr") == 0) { parser_flags_set(0); node = parser_parse_expr(); print_node(node, 0, 0); } else if (strcmp(cmd, "parse_stmt") == 0) { parser_flags_set(0); node = parser_parse_statement(); print_node(node, 0, 0); } else if (strcmp(cmd, "parse") == 0) { parser_flags_set(PF_RESOLVE_NAMES); symtable = parser_parse(); print_symtable(symtable, 0); } else if (strcmp(cmd, "compile") == 0) { symtable = parser_parse(); if (symtable != NULL) { code = generator_process(symtable); optimizer_optimize(code); generator_print_code(code); } else { print_symtable(symtable, 0); } } parser_free_node(node); symtable_destroy(symtable, 1); generator_free_code(code); generator_destroy(); parser_destroy(); lexer_destroy(); log_close(); return EXIT_SUCCESS; }
void test_lexer(void) { Lexer lexer; SSlice data; SSliceStatus sstatus; LexerStatus lstatus; sstatus = sslice_assign_validate(&data, REAL_TEMPLATE); if (sstatus != SSLICE_OK) { die("Error loading template into SSlice: %d", sstatus); } lexer_init(&lexer); lexer_set_data(&lexer, &data); while (true) { Token *token; char *token_as_string; lstatus = lexer_load_next(&lexer); if (lstatus != LEXER_OK) { break; } token = lexer_get_current_token(&lexer); if (token->type == TOKEN_UNKNOWN) { die("Got unknown token\n"); } token_as_string = token_to_string(token); printf("Token: %s [%s]\n", TokenTypes[token->type], token_as_string); free(token_as_string); } if (lstatus == LEXER_END) { puts("End of lexer"); } else { die("Bad lexer status: %d\n", lstatus); } }
/* Like actions_parse(), but the actions are taken from 's'. */ char * OVS_WARN_UNUSED_RESULT actions_parse_string(const char *s, const struct shash *symtab, const struct simap *ports, const struct simap *ct_zones, uint8_t first_table, uint8_t n_tables, uint8_t cur_table, uint8_t output_table, struct ofpbuf *ofpacts, struct expr **prereqsp) { struct lexer lexer; char *error; lexer_init(&lexer, s); lexer_get(&lexer); error = actions_parse(&lexer, symtab, ports, ct_zones, first_table, n_tables, cur_table, output_table, ofpacts, prereqsp); lexer_destroy(&lexer); return error; }
int config_open_string(config_t **config, const char *str) { struct lexer lex; if (!config) return CONFIG_ERROR; *config = bzalloc(sizeof(struct config_data)); if (!*config) return CONFIG_ERROR; (*config)->file = NULL; lexer_init(&lex); lexer_start(&lex, str); parse_config_data(&(*config)->sections, &lex); lexer_free(&lex); return CONFIG_SUCCESS; }
static void read_from_stream(FILE *stm) { assert(stm); #define BUF_SIZE 1024 char buf[BUF_SIZE]; char *input = malloc(sizeof(char) * BUF_SIZE); size_t input_size = 1; input[0] = buf[0] = '\0'; while (fgets(buf, BUF_SIZE, stm)) { input_size += strlen(buf); input = realloc(input, input_size); strcat(input, buf); } lexer_init(input); parse(); free(input); }
int main(int argc, char **argv) { primsigs = primitives(); Parser ps; Lexer lexer; FILE *ic, *oc; int err = 0; getopts(argc, argv); ic = fopen(input, "r"); if(ic == NULL){ printf("Can not open the file.\n"); return 1; } parser_init(&ps); lexer_init(&lexer); lexer_setin(&lexer, ic); err |= yyparse(&ps, &lexer, "input"); fclose(ic); if(proc_compile && err == 0){ oc = fopen(compiled_file, "w"); err |= compile(oc, &ps.prog); fclose(oc); } if(proc_assemble && err == 0){ char cmd[100]; sprintf(cmd, "gcc -o %s %s -L%s %s", output, compiled_file, libdir, LIBS); system(cmd); } return 0; }
void TestParseExpr_15(CuTest *tc) { /* Lexer data. */ char sqlexpr[] = "'apple'"; db_eetnode_t *expr; db_lexer_t lexer; lexer_init(&lexer, sqlexpr); /* Memory management data. */ db_int segment_size = 1000; unsigned char segment[segment_size]; db_query_mm_t mm; init_query_mm(&mm, segment, sizeof(segment)); /* Do the whole process. */ parseexpression(&expr, &lexer, 0, strlen(sqlexpr), &mm, 0); db_eetnode_t *np = expr; CuAssertTrue(tc, (db_uint8)DB_EETNODE_CONST_DBSTRING == np->type); CuAssertTrue(tc, 0==strcmp(((db_eetnode_dbstring_t*)np)->string, "apple")); MOVEPOINTERNBYTES(np, np, sizeof(db_eetnode_dbstring_t), db_eetnode_t*); }
inline BaseLexer() {lexer_init(&lex);}
/* Tokenize input text and save words in the wordhash_t hash table. * * Returns: true if the EOF token has not been read. */ void collect_words(wordhash_t *wh) { if (DEBUG_WORDLIST(2)) fprintf(dbgout, "### collect_words() begins\n"); lexer_init(); for (;;){ wordprop_t *wp; word_t token; token_t cls = get_token( &token ); if (cls == NONE) break; if (cls == BOGO_LEX_LINE) { char *beg = (char *)token.u.text+1; /* skip leading quote mark */ char *end = strchr(beg, '"'); assert(end); token.leng = end - beg; memmove(token.u.text, token.u.text + 1, token.leng + 1); token.u.text[token.leng] = '\0'; /* ensure nul termination */ } wp = wordhash_insert(wh, &token, sizeof(wordprop_t), &wordprop_init); if (wh->type != WH_CNTS) wp->freq = 1; /******* EK **********/ #ifdef CP866 /* mime charset hack */ { static bool hasCharset=false; if (hasCharset) /* prev token == charset */ { if (token.leng > 5 && !strncmp(token.text, "mime:", 5)) set_charset(token.text+5); } hasCharset = 0; if (token.leng == 5+7) { if (!strncmp(token.text, "mime:", 5) && !strncasecmp(token.text+5, "charset", 7)) hasCharset = true; } } #endif /******* end of EK addition **********/ if (DEBUG_WORDLIST(3)) { fprintf(dbgout, "%3d ", (int) wh->count); word_puts(&token, 0, dbgout); fputc('\n', dbgout); } if (cls == BOGO_LEX_LINE) { char *s = (char *)token.u.text; s += token.leng + 2; wp->cnts.bad = atoi(s); s = strchr(s+1, ' ') + 1; wp->cnts.good = atoi(s); wp->cnts.msgs_good = msgs_good; wp->cnts.msgs_bad = msgs_bad; } } if (DEBUG_WORDLIST(2)) fprintf(dbgout, "### collect_words() ends\n"); return; }
int calc_parser::parse (char* input_start) { int p; // Production (rule) number. int t; // Terminal symbol number. int x = 0; // State number. lexer_init (input_start); // Initialize lexer. PS_end = P_stack + STKSIZE; // Set parse-stack end pointer. PS = P_stack; // Set parse-stack pointer. Read: t = get_token (); // Get incoming token. prt_line (); token.sti = -t; // Symbol-table index = -t. if (tact_numb[t] >= 0) // If token action ... { token.sti = (*tact_func [tact_numb[t]])(t); // Call token-action function. } Shft: if (Bm [Br[x] + Bc[t]] & Bmask[t]) // Check B-matrix for shift action. { if (++PS >= PS_end) goto Over; // Check for stack overflow. PS->state = x; // Put current state on stack. PS->sti = token.sti; // Put token symbol table index on stack. PS->line = token.linenumb; // Put its line number on stack. PS->node = 0; // Set node on stack to zero. x = Tm [Tr[x] + Tc[t]]; // Get next state from terminal transition matrix. while (x <= 0) // While shift-reduce actions. { p = -x; // Reduce stack by production p. PS -= PL[p]; // Reduce stack ptr by production length. if (!reduce(p)) return -linenumb; // Call reduce action with rule number. x = Nm [Nr[PS->state] + Nc[p]]; // Get next state from nonterminal transition. } goto Read; // Go to read next token. } if ((p = Rr[x]) > 0 || (p = Rm [Rc[t] - p]) > 0) // Get reduction? { PS -= PL[p]; // Reduce parse stack ptr by rule length - 1. if (PL[p] < 0) // Null production? { if (PS >= PS_end) goto Over; // Check for overflow. PS->node = 0; // Clear node pointer. PS->state = x; // Stack current state, replacing old state. } while (1) { if (!reduce(p)) return -linenumb; // Call reduce action with rule number. x = Nm [Nr[PS->state] + Nc[p]]; // Get next state from nonterminal transition. if (x > 0) goto Shft; // Continue parsing. p = -x; // Set production number. PS -= PL[p]; // Reduce parse stack ptr by rule length - 1. } } if (x == ACCEPT_STATE) // If Goal production. { PS -= PL[0]; // Reduce parse stack ptr by rule length - 1. if (!reduce(0)) return -linenumb; // Call reduce action with rule number. if (linenumb > 0) linenumb = --linenumb; // Reduce line number by one. root = PS[0].node; // Define root node. return linenumb; // Return number of lines parsed. } prt_error (term_symb[t]); // Print syntax error message. return -linenumb; // Return negative number of lines for failure. Over: printf ("\nParser stack overflow.\n\n"); return -linenumb; // Return negative number of lines for failure. }