void reduction_step(reduction_list * temp_reduction_list, uint32_t red_list_node, token_node * list_itr, parsing_ctx * ctx) { uint32_t node, offset; const uint32_t *itr, *end; if (is_terminal(list_itr->token)) { itr = &ctx->gr_token_alloc[token_value(list_itr->token)]; end = itr + 1; } else { offset = rewrite_rules[list_itr->token]; itr = &(rewrite_rules[offset]); end = itr + *itr + 1; ++itr; } /* For each token in the current stack element token list. */ for (; itr != end; ++itr) { node = get_son_with(vect_reduction_tree, red_list_node, *itr); /* If current position has a son corresponding to the current token, navigate the tree. */ if (node != 0) { append_position_on_reduction_list(temp_reduction_list, node); } } }
uint32_t get_son_with(const uint16_t * tree, uint32_t offset, gr_token label) { uint32_t itr; for (itr = offset + 2; itr < offset + tree[offset + 1] + 2; itr += 2) { if (tree[itr] == token_value(label)) { return tree[itr + 1]; } } return 0; }
static int next_bdf(char **str, int *seg, int *bus, int *dev, int *func) { char *token; if ( !(*str) || !strchr(*str, ',') ) return 0; token = *str; *seg = token_value(token); token = strchr(token, ',') + 1; *bus = token_value(token); token = strchr(token, ',') + 1; *dev = token_value(token); token = strchr(token, ',') + 1; *func = token_value(token); token = strchr(token, ','); *str = token ? token + 1 : NULL; return 1; }
int b_exit(t_context *context, t_list *args) { size_t arg_count; int exit_code; arg_count = ft_lstsize(args); if (arg_count > 2) return (builtin_error(EXIT_BUILTIN, ERR_TOO_MANY_ARGS, 0)); exit_code = EXIT_SUCCESS; if (arg_count == 2) exit_code = ft_atoi(token_value(args, 2)); shell_exit(context, exit_code); return (BUILTIN_SUCCESS); }
int b_unsetenv(t_context *context, t_list *args) { size_t arg_count; size_t i; arg_count = ft_lstsize(args); if (arg_count == 1) return (builtin_error(UNSETENV_BUILTIN, ERR_TOO_FEW_ARGS, 0)); i = 1; while (i < arg_count) { environ_remove(context, token_value(args, i + 1)); ++i; } return (BUILTIN_SUCCESS); }
uint8_t rewrite_to_axiom(gr_token token) { uint32_t offset; const uint32_t *end, *ptr; ptr = rewrite_rules; offset = ptr[token_value(token)]; ptr = &ptr[offset]; end = ptr + *ptr + 1; for (++ptr; ptr != end; ++ptr) { if (*ptr == __S) { return 1; } } return 0; }
Token *token_from_lexeme(Token *lexeme) { char *word = string_value(lexeme_value(lexeme)); void *value = NULL; char first_char = word[0]; if ( char_is_line_end(first_char) ) { token_type(lexeme) = FX_TOKEN_LINE_END; string_free(token_value(lexeme)); } else if ( char_is_statement_end(first_char) ) { token_type(lexeme) = FX_TOKEN_STATEMENT_END; string_free(token_value(lexeme)); } else if ( char_is_method_selector(first_char) ) { token_type(lexeme) = FX_TOKEN_ATTRIBUTE_SELECTOR; string_free(token_value(lexeme)); } else if ( char_opens_group(first_char) ) { token_type(lexeme) = FX_TOKEN_GROUP_START; string_free(token_value(lexeme)); } else if ( char_closes_group(first_char) ) { token_type(lexeme) = FX_TOKEN_GROUP_END; string_free(token_value(lexeme)); } else if ( char_is_separator(first_char) ) { token_type(lexeme) = FX_TOKEN_COMMA; string_free(token_value(lexeme)); } else if ( char_opens_block(first_char) ) { token_type(lexeme) = FX_TOKEN_BLOCK_START; string_free(token_value(lexeme)); } else if ( char_closes_block(first_char) ) { token_type(lexeme) = FX_TOKEN_BLOCK_END; string_free(token_value(lexeme)); } else if ( char_is_setter(first_char) && lexeme_length(lexeme) == 1 ) { token_type(lexeme) = FX_TOKEN_LOCAL_SETTER; string_free(token_value(lexeme)); } else if ( char_is_colon(first_char) && lexeme_length(lexeme) == 1 ) { token_type(lexeme) = FX_TOKEN_ATTRIBUTE_SETTER; string_free(token_value(lexeme)); } else if ( char_is_deferred_arg(first_char) && lexeme_length(lexeme) == 1 ) { token_type(lexeme) = FX_TOKEN_DEFERRED_ARGUMENT; string_free(token_value(lexeme)); } else if ( word_is_block_declaration(word) ) { token_type(lexeme) = FX_TOKEN_BLOCK_DECLARATION; string_free(token_value(lexeme)); } else if ( char_is_regex_bookend(first_char) ) { token_type(lexeme) = FX_TOKEN_REGEX; } else if ( char_is_string_bookend(first_char) ) { token_type(lexeme) = FX_TOKEN_STRING; word[lexeme_length(lexeme) - 1] = '\0'; // shortening string contents to remove the quotation marks value = String_create((word+1)); check(value, "token string value is NULL"); string_free(token_value(lexeme)); token_value(lexeme) = value; } else if ( lexed_word_is_number(word) ) { token_type(lexeme) = FX_TOKEN_NUMBER; value = Number_create(word); check(value, "token number is NULL"); string_free(token_value(lexeme)); token_value(lexeme) = value; } else if ( char_is_capitalized(first_char) ) { token_type(lexeme) = FX_TOKEN_GLOBAL_ID; } else if ( char_is_colon(first_char) ) { token_type(lexeme) = FX_TOKEN_ATOM; value = String_create((word+1)); check(value, "token string value is NULL"); string_free(token_value(lexeme)); token_value(lexeme) = value; } else { token_type(lexeme) = FX_TOKEN_ID; } return lexeme; error: return NULL; }
static int eqntoken_eval(eqneval_t *data) { int loop,toend; int r=0; float a,b; eqntoken_t *token,*lp; int i; static const int precidence[]=OPprecidence; static const int type[]=OPtype; int types[4]={ -10 , -10 , -10 , -10 }; int precs[4]={ -10, -10 , -10, -10 }; int stack_entry=data->stackp; toend=0; do { itemop_t ops[4]={OPeolist, OPeolist, OPeolist, OPeolist}; eqntoken_t *tokens[4]={ NULL, NULL, NULL, NULL }; loop=0; token=data->list; data->list=eqntoken_next(data->list); if(data->list==token) toend++; /* catch endings */ for(lp=token,i=0;(i<4);i++,lp=eqntoken_next(lp)) /* lookahead */ { ops[i]=lp->op; tokens[i]=lp; assert(ops[i]>=0); assert((ops[i]*sizeof(int))<sizeof(precidence)); assert((ops[i]*sizeof(int))<sizeof(type)); types[i]=type[ops[i]]; precs[i]=precidence[ops[i]]; if(OP_ISEND(lp->op)) /* we did last token, stop */ break; } D(8,fprintf(stderr,"Eval: lookahead = ")); for(i=0;i<4;i++) { char *syms[]=OPsyms; D(8,fprintf(stderr," %s,t%i,p%i" ,syms[ops[i]],types[i],precs[i])); } D(8,fprintf(stderr,"\n")); if(OP_ISANYV(token->op)) { r=token_value(token,&a); D(5,fprintf(stderr,"Value: %g\n",a)); PUSH(a); } else if(types[0]==2) /* binary op */ { a=1.0; b=1.0; POP(a); if( /* literal and higher precidence operator after literal not () */ (OP_ISV(ops[1]) && (precs[2] > precs[0]) && (types[2]!=4) ) || /* operator (ie open/close) which has a higher precidence */ ((types[1]>=2) && (precs[1] > precs[0])) ) { /* SHIFT */ D(6,fprintf(stderr,"Shift: recursion\n")); eqntoken_eval(data); POP(b); } else /* REDUCE */ { D(6,fprintf(stderr,"Reduce: \n")); if( OP_ISANYV(ops[1]) ) { data->list=eqntoken_next(data->list); r+=token_value(tokens[1],&b); } else { /* error, need literal */ assert(0); } } D(6,fprintf(stderr,"Doing Binary Op, %g op %g \n",a,b)); switch(token->op) { case OPadd: a=a+b; break; case OPsub: a=a-b; break; case OPmul: a=a*b; break; case OPdiv: if(b!=0.0) a=a/b; else a=0.0; break; case OPexp: a=pow(a,b); break; default: assert(0); } PUSH(a); } else if(types[0]==4) /* open/close */ { if(token->op==OPopen) { D(6,fprintf(stderr,"open { : \n")); loop=1; } else if(token->op==OPclose) { D(6,fprintf(stderr,"close } : returning \n")); return 0; } } else if(token->op == OPeolist); if(toend) { D(6,fprintf(stderr,"Ending: \n")); return 0; } else { D(6,fprintf(stderr,"Stack: entry=%i now=%i %s \n",stack_entry,data->stackp, ((stack_entry<data->stackp)||loop)?"looping":"returning")); } } while((stack_entry<data->stackp)||loop); return r; }