static const out_val *lea_assign_lhs(const expr *e, out_ctx *octx) { /* generate our assignment (e->expr), then lea * our lhs, i.e. the struct identifier * we're assigning to */ out_val_consume(octx, gen_expr(e->expr, octx)); return gen_expr(e->lhs, octx); }
void gen_stmt_for(const stmt *s, out_ctx *octx) { struct out_dbg_lbl *el[2][2]; out_blk *blk_test = out_blk_new(octx, "for_test"), *blk_body = out_blk_new(octx, "for_body"), *blk_end = out_blk_new(octx, "for_end"), *blk_inc = out_blk_new(octx, "for_inc"); flow_gen(s->flow, s->flow->for_init_symtab, el, octx); /* don't else-if, possible to have both (comma-exp for init) */ if(s->flow->for_init){ out_val_consume(octx, gen_expr(s->flow->for_init, octx)); out_comment(octx, "for-init"); } out_ctrl_transfer_make_current(octx, blk_test); if(s->flow->for_while){ const out_val *for_cond; for_cond = gen_expr(s->flow->for_while, octx); out_ctrl_branch(octx, for_cond, blk_body, blk_end); }else{ out_ctrl_transfer(octx, blk_body, NULL, NULL); } stmt_init_blks(s, blk_inc, blk_end); out_current_blk(octx, blk_body); { gen_stmt(s->lhs, octx); out_ctrl_transfer(octx, blk_inc, NULL, NULL); } out_current_blk(octx, blk_inc); { if(s->flow->for_inc) out_val_consume(octx, gen_expr(s->flow->for_inc, octx)); out_ctrl_transfer(octx, blk_test, NULL, NULL); } out_current_blk(octx, blk_end); flow_end(s->flow, s->flow->for_init_symtab, el, octx); }
static const out_val *consume_one( out_ctx *octx, const out_val *const ret, const out_val *const a, const out_val *const b) { out_val_consume(octx, ret == a ? b : a); return ret; }
static out_val *try_const_fold( out_ctx *octx, enum op_type binop, const out_val *lhs, const out_val *rhs) { const char *err = NULL; const integral_t eval = const_op_exec( lhs->bits.val_i, &rhs->bits.val_i, binop, lhs->t, &err); if(!err){ type *t = lhs->t; out_val_consume(octx, lhs); out_val_consume(octx, rhs); return out_new_l(octx, t, eval); } return NULL; }
static void gen_expr_compound_lit_code(const expr *e, out_ctx *octx) { if(!e->expr_comp_lit_cgen){ expr *initexp = e->bits.complit.decl->bits.var.init.expr; /* prevent the sub gen_expr() call from coming back in here * when references to the compound literal symbol are generated. * * this is undone afterwards to allow re-entry per non-recursive * gen_expr() call, for example, function inlining means this * expression may be generated more than once */ GEN_CONST_CAST(expr *, e)->expr_comp_lit_cgen = 1; if(initexp) out_val_consume(octx, gen_expr(initexp, octx)); GEN_CONST_CAST(expr *, e)->expr_comp_lit_cgen = 0; } }
static out_val *try_mem_offset( out_ctx *octx, enum op_type binop, const out_val *vconst, const out_val *vregp_or_lbl, const out_val *rhs) { int step; /* if it's a minus, we enforce an order */ if((binop == op_plus || (binop == op_minus && vconst == rhs)) && (vregp_or_lbl->type != V_LBL || (fopt_mode & FOPT_SYMBOL_ARITH)) && (step = calc_ptr_step(vregp_or_lbl->t)) != -1) { out_val *mut_vregp_or_lbl = v_dup_or_reuse( octx, vregp_or_lbl, vregp_or_lbl->t); long *p; switch(mut_vregp_or_lbl->type){ case V_LBL: p = &mut_vregp_or_lbl->bits.lbl.offset; break; case V_REG: p = &mut_vregp_or_lbl->bits.regoff.offset; break; default: assert(0); } *p += (binop == op_minus ? -1 : 1) * vconst->bits.val_i * step; out_val_consume(octx, vconst); return mut_vregp_or_lbl; } return NULL; }