void fold_expr_assign_compound(expr *e, symtable *stab) { const char *const desc = "compound assignment"; #define lvalue e->lhs fold_inc_writes_if_sym(lvalue, stab); fold_expr_nodecay(e->lhs, stab); FOLD_EXPR(e->rhs, stab); fold_check_expr(e->lhs, FOLD_CHK_NO_ST_UN, desc); fold_check_expr(e->rhs, FOLD_CHK_NO_ST_UN, desc); /* skip the addr we inserted */ if(!expr_must_lvalue(lvalue, desc)){ /* prevent ICE from type_size(vla), etc */ e->tree_type = lvalue->tree_type; return; } expr_assign_const_check(lvalue, &e->where); fold_check_restrict(lvalue, e->rhs, desc, &e->where); UCC_ASSERT(op_can_compound(e->bits.compoundop.op), "non-compound op in compound expr"); /*expr_promote_int_if_smaller(&e->lhs, stab); * lhs int promotion is handled in code-gen */ expr_promote_int_if_smaller(&e->rhs, stab); { type *tlhs, *trhs; type *resolved = op_required_promotion( e->bits.compoundop.op, lvalue, e->rhs, &e->where, desc, &tlhs, &trhs); if(tlhs){ /* must cast the lvalue, then down cast once the operation is done * special handling for expr_kind(e->lhs, cast) is done in the gen-code */ e->bits.compoundop.upcast_ty = tlhs; }else if(trhs){ fold_insert_casts(trhs, &e->rhs, stab); } e->tree_type = lvalue->tree_type; (void)resolved; /*type_free_1(resolved); XXX: memleak */ } /* type check is done in op_required_promotion() */ #undef lvalue }
void fold_expr_addr(expr *e, symtable *stab) { if(e->bits.lbl.spel){ decl *in_func = symtab_func(stab); if(!in_func) die_at(&e->where, "address-of-label outside a function"); if(e->bits.lbl.static_ctx) in_func->bits.func.contains_static_label_addr = 1; (e->bits.lbl.label = symtab_label_find_or_new( stab, e->bits.lbl.spel, &e->where)) ->uses++; /* address of label - void * */ e->tree_type = type_ptr_to(type_nav_btype(cc1_type_nav, type_void)); }else{ /* if it's an identifier, act as a read */ fold_inc_writes_if_sym(e->lhs, stab); fold_expr_nodecay(e->lhs, stab); e->tree_type = type_ptr_to(e->lhs->tree_type); /* can address: lvalues, arrays and functions */ if(!expr_is_addressable(e->lhs)){ warn_at_print_error(&e->where, "can't take the address of %s (%s)", expr_str_friendly(e->lhs), type_to_str(e->lhs->tree_type)); fold_had_error = 1; return; } if(expr_kind(e->lhs, identifier)){ sym *sym = e->lhs->bits.ident.bits.ident.sym; if(sym){ decl *d = sym->decl; if((d->store & STORE_MASK_STORE) == store_register) die_at(&e->lhs->where, "can't take the address of register"); } } fold_check_expr(e->lhs, FOLD_CHK_ALLOW_VOID | FOLD_CHK_NO_BITFIELD, "address-of"); } }
void fold_expr_assign(expr *e, symtable *stab) { sym *lhs_sym = NULL; int is_struct_cpy = 0; lhs_sym = fold_inc_writes_if_sym(e->lhs, stab); fold_expr_nodecay(e->lhs, stab); fold_expr_nodecay(e->rhs, stab); if(lhs_sym) lhs_sym->nreads--; /* cancel the read that fold_ident thinks it got */ is_struct_cpy = !!type_is_s_or_u(e->lhs->tree_type); if(!is_struct_cpy) FOLD_EXPR(e->rhs, stab); /* lval2rval the rhs */ if(type_is_primitive(e->rhs->tree_type, type_void)){ fold_had_error = 1; warn_at_print_error(&e->where, "assignment from void expression"); e->tree_type = type_nav_btype(cc1_type_nav, type_int); return; } expr_must_lvalue(e->lhs, "assignment"); if(!e->assign_is_init) expr_assign_const_check(e->lhs, &e->where); fold_check_restrict(e->lhs, e->rhs, "assignment", &e->where); /* this makes sense, but it's also critical for code-gen: * if we assign to a volatile lvalue, we don't want the volatile-ness * to propagate, as we are now an rvalue, and don't want our value read * as we decay */ e->tree_type = type_unqualify(e->lhs->tree_type); /* type check */ fold_type_chk_and_cast_ty( e->lhs->tree_type, &e->rhs, stab, &e->where, "assignment"); /* the only way to get a value into a bitfield (aside from memcpy / indirection) is via this * hence we're fine doing the truncation check here */ { decl *mem; if(expr_kind(e->lhs, struct) && (mem = e->lhs->bits.struct_mem.d) /* maybe null from s->non_present_memb */ && mem->bits.var.field_width) { bitfield_trunc_check(mem, e->rhs); } } if(is_struct_cpy){ e->expr = builtin_new_memcpy( e->lhs, e->rhs, type_size(e->rhs->tree_type, &e->rhs->where)); FOLD_EXPR(e->expr, stab); /* set is_lval, so we can participate in struct-copy chains * FIXME: don't interpret as an lvalue, e.g. (a = b) = c; * this is currently special cased in expr_is_lval() * * CHECK THIS */ if(cc1_backend == BACKEND_ASM) e->f_gen = lea_assign_lhs; e->f_islval = expr_is_lval_struct; } }