void expr_assign_const_check(expr *e, where *w) { struct_union_enum_st *su; if(type_is_const(e->tree_type)){ fold_had_error = 1; warn_at_print_error(w, "can't modify const expression %s", expr_str_friendly(e)); }else if((su = type_is_s_or_u(e->tree_type)) && su->contains_const){ fold_had_error = 1; warn_at_print_error(w, "can't assign struct - contains const member"); } }
static attribute *parse_attr_cleanup(symtable *scope, const char *ident) { char *sp; where ident_loc; attribute *attr = NULL; struct symtab_entry ent; (void)ident; EAT(token_open_paren); if(curtok != token_identifier) die_at(NULL, "identifier expected for cleanup function"); where_cc1_current(&ident_loc); sp = token_current_spel(); EAT(token_identifier); if(symtab_search(scope, sp, NULL, &ent) && ent.type == SYMTAB_ENT_DECL) { attr = attribute_new(attr_cleanup); attr->bits.cleanup = ent.bits.decl; } else { warn_at_print_error(&ident_loc, "function '%s' not found", sp); fold_had_error = 1; } EAT(token_close_paren); return attr; }
void fold_stmt_label(stmt *s) { label *l = symtab_label_find_or_new( s->symtab, s->bits.lbl.spel, &s->where); /* update its where */ memcpy_safe(&l->where, &s->where); /* update its scope */ l->scope = s->symtab; /* update code the label uses */ l->next_stmt = s; if(l->complete){ warn_at_print_error(&s->where, "duplicate label '%s'", s->bits.lbl.spel); fold_had_error = 1; }else{ l->complete = 1; } s->bits.lbl.label = l; l->unused = s->bits.lbl.unused; fold_stmt(s->lhs); /* compound */ }
void eat2(enum token t, const char *fnam, int line, int die) { if(t != curtok){ const int ident = curtok == token_identifier; parse_had_error = 1; warn_at_print_error(NULL, "expecting token %s, got %s %s%s%s(%s:%d)", token_to_str(t), token_to_str(curtok), ident ? "\"" : "", ident ? token_current_spel_peek() : "", ident ? "\" " : "", fnam, line); if(die || --cc1_error_limit <= 0) exit(1); /* XXX: we continue here, assuming we had the token anyway */ }else{ if(curtok_save != token_unknown){ curtok = curtok_save; curtok_save = token_unknown; }else{ nexttoken(); } } }
static int check_arg_counts( funcargs *args_from_decl, unsigned count_decl, expr **exprargs, expr *fnexpr, char *sp) { where *const loc = &fnexpr->where; /* this block is purely count checking */ if(!FUNCARGS_EMPTY_NOVOID(args_from_decl)){ const unsigned count_arg = dynarray_count(exprargs); if(count_decl != count_arg && (args_from_decl->variadic ? count_arg < count_decl : 1)) { decl *call_decl; /* may be args_old_proto but also args_void if copied from * another prototype elsewhere */ int warn = args_from_decl->args_old_proto && !args_from_decl->args_void; int warning_emitted = 1; #define common_warning \ "too %s arguments to function %s%s(got %d, need %d)",\ count_arg > count_decl ? "many" : "few", \ sp ? sp : "", \ sp ? " " : "", \ count_arg, count_decl if(warn){ warning_emitted = cc1_warn_at(loc, funcall_argcount, common_warning); }else{ warn_at_print_error(loc, common_warning); } #undef common_warning if(warning_emitted && (call_decl = expr_to_declref(fnexpr->expr, NULL))) { note_at(&call_decl->where, "'%s' declared here", call_decl->spel); } if(!warn){ fold_had_error = 1; return 1; } } }else if(args_from_decl->args_void_implicit && exprargs){ cc1_warn_at(loc, funcall_argcount, "too many arguments to implicitly (void)-function"); } return 0; }
void fold_expr_addr(expr *e, symtable *stab) { if(e->bits.lbl.spel){ decl *in_func = symtab_func(stab); if(!in_func) die_at(&e->where, "address-of-label outside a function"); if(e->bits.lbl.static_ctx) in_func->bits.func.contains_static_label_addr = 1; (e->bits.lbl.label = symtab_label_find_or_new( stab, e->bits.lbl.spel, &e->where)) ->uses++; /* address of label - void * */ e->tree_type = type_ptr_to(type_nav_btype(cc1_type_nav, type_void)); }else{ /* if it's an identifier, act as a read */ fold_inc_writes_if_sym(e->lhs, stab); fold_expr_nodecay(e->lhs, stab); e->tree_type = type_ptr_to(e->lhs->tree_type); /* can address: lvalues, arrays and functions */ if(!expr_is_addressable(e->lhs)){ warn_at_print_error(&e->where, "can't take the address of %s (%s)", expr_str_friendly(e->lhs), type_to_str(e->lhs->tree_type)); fold_had_error = 1; return; } if(expr_kind(e->lhs, identifier)){ sym *sym = e->lhs->bits.ident.bits.ident.sym; if(sym){ decl *d = sym->decl; if((d->store & STORE_MASK_STORE) == store_register) die_at(&e->lhs->where, "can't take the address of register"); } } fold_check_expr(e->lhs, FOLD_CHK_ALLOW_VOID | FOLD_CHK_NO_BITFIELD, "address-of"); } }
int expr_must_lvalue(expr *e, const char *desc) { int lval = (expr_is_lval(e) == LVALUE_USER_ASSIGNABLE); if(!lval || type_is_array(e->tree_type)){ fold_had_error = 1; warn_at_print_error(&e->where, "%s to %s - %s", desc, type_to_str(e->tree_type), lval ? "arrays not assignable" : "not an lvalue"); return 0; } return 1; }
static void check_arg_types( funcargs *args_from_decl, expr **exprargs, symtable *stab, char *sp, where *const exprloc) { if(exprargs && args_from_decl->arglist){ int i; char buf[64]; int finished_expr_args = 0; for(i = 0; ; i++){ decl *decl_arg = args_from_decl->arglist[i]; if(!decl_arg) break; if(!type_is_complete(decl_arg->ref)){ warn_at_print_error(&decl_arg->where, "incomplete parameter type '%s'", type_to_str(decl_arg->ref)); fold_had_error = 1; note_at(exprloc, "in call here"); } /* exprargs[i] may be NULL - old style function */ if(finished_expr_args || !exprargs[i]){ finished_expr_args = 1; continue; } ARG_BUF(buf, i, sp); fold_type_chk_and_cast_ty( decl_arg->ref, &exprargs[i], stab, &exprargs[i]->where, buf); /* f(int [static 5]) check */ static_array_check(decl_arg, exprargs[i]); } } }
static void try_pointer_propagate( expr *e, enum type_cmp cmp, type *const tt_l, type *const tt_r) { /* 6.5.15 p6 */ int l_ptr = !!type_is_ptr_or_block(tt_l); int r_ptr = !!type_is_ptr_or_block(tt_r); /* if both the second and third operands are pointers */ if(l_ptr && r_ptr){ int allowed = TYPE_EQUAL_ANY | TYPE_QUAL_ADD | TYPE_QUAL_SUB | TYPE_QUAL_POINTED_ADD | TYPE_QUAL_POINTED_SUB; if(cmp & allowed){ e->tree_type = pointer_to_qualified(type_next(tt_l), tt_l, tt_r); } } if(!e->tree_type && (l_ptr || r_ptr)){ /* or one is a null pointer constant and the other is a pointer */ int l_ptr_null = expr_is_null_ptr( e->lhs ? e->lhs : e->expr, NULL_STRICT_INT); int r_ptr_null = expr_is_null_ptr(e->rhs, NULL_STRICT_INT); /* both may still be pointers here */ if((l_ptr && r_ptr_null) || (r_ptr && l_ptr_null)){ type *pointed_to; if(l_ptr_null != r_ptr_null){ /* only one is an int - pick the other side */ pointed_to = type_next(l_ptr_null ? tt_r : tt_l); }else{ /* both are pointers, pick either side */ pointed_to = type_next(l_ptr ? tt_l : tt_r); } e->tree_type = pointer_to_qualified( pointed_to, l_ptr ? tt_l : NULL, r_ptr ? tt_r : NULL); } } if(!e->tree_type && l_ptr && r_ptr){ e->tree_type = pointer_to_qualified( type_nav_btype(cc1_type_nav, type_void), tt_l, tt_r); /* gcc/clang relax the rule here. * 0 ? (A *)0 : (B *)0 * becomes a void pointer too */ if(!type_is_void_ptr(tt_l) && !type_is_void_ptr(tt_r)){ char buf[TYPE_STATIC_BUFSIZ]; cc1_warn_at(&e->where, mismatch_conditional, "conditional type mismatch (%s vs %s)", type_to_str(tt_l), type_to_str_r(buf, tt_r)); } } if(!e->tree_type){ char buf[TYPE_STATIC_BUFSIZ]; warn_at_print_error(&e->where, "conditional type mismatch (%s vs %s)", type_to_str(tt_l), type_to_str_r(buf, tt_r)); fold_had_error = 1; e->tree_type = type_nav_btype(cc1_type_nav, type_void); } }
void fold_expr_assign(expr *e, symtable *stab) { sym *lhs_sym = NULL; int is_struct_cpy = 0; lhs_sym = fold_inc_writes_if_sym(e->lhs, stab); fold_expr_nodecay(e->lhs, stab); fold_expr_nodecay(e->rhs, stab); if(lhs_sym) lhs_sym->nreads--; /* cancel the read that fold_ident thinks it got */ is_struct_cpy = !!type_is_s_or_u(e->lhs->tree_type); if(!is_struct_cpy) FOLD_EXPR(e->rhs, stab); /* lval2rval the rhs */ if(type_is_primitive(e->rhs->tree_type, type_void)){ fold_had_error = 1; warn_at_print_error(&e->where, "assignment from void expression"); e->tree_type = type_nav_btype(cc1_type_nav, type_int); return; } expr_must_lvalue(e->lhs, "assignment"); if(!e->assign_is_init) expr_assign_const_check(e->lhs, &e->where); fold_check_restrict(e->lhs, e->rhs, "assignment", &e->where); /* this makes sense, but it's also critical for code-gen: * if we assign to a volatile lvalue, we don't want the volatile-ness * to propagate, as we are now an rvalue, and don't want our value read * as we decay */ e->tree_type = type_unqualify(e->lhs->tree_type); /* type check */ fold_type_chk_and_cast_ty( e->lhs->tree_type, &e->rhs, stab, &e->where, "assignment"); /* the only way to get a value into a bitfield (aside from memcpy / indirection) is via this * hence we're fine doing the truncation check here */ { decl *mem; if(expr_kind(e->lhs, struct) && (mem = e->lhs->bits.struct_mem.d) /* maybe null from s->non_present_memb */ && mem->bits.var.field_width) { bitfield_trunc_check(mem, e->rhs); } } if(is_struct_cpy){ e->expr = builtin_new_memcpy( e->lhs, e->rhs, type_size(e->rhs->tree_type, &e->rhs->where)); FOLD_EXPR(e->expr, stab); /* set is_lval, so we can participate in struct-copy chains * FIXME: don't interpret as an lvalue, e.g. (a = b) = c; * this is currently special cased in expr_is_lval() * * CHECK THIS */ if(cc1_backend == BACKEND_ASM) e->f_gen = lea_assign_lhs; e->f_islval = expr_is_lval_struct; } }
void fold_expr_funcall(expr *e, symtable *stab) { type *func_ty; funcargs *args_from_decl; char *sp = NULL; unsigned count_decl; check_implicit_funcall(e, stab, &sp); FOLD_EXPR(e->expr, stab); func_ty = e->expr->tree_type; if(!type_is_callable(func_ty)){ warn_at_print_error(&e->expr->where, "%s-expression (type '%s') not callable", expr_str_friendly(e->expr, 0), type_to_str(func_ty)); fold_had_error = 1; e->tree_type = type_nav_btype(cc1_type_nav, type_int); return; } e->tree_type = type_func_call(func_ty, &args_from_decl); /* func count comparison, only if the func has arg-decls, or the func is f(void) */ UCC_ASSERT(args_from_decl, "no funcargs for decl %s", sp); count_decl = dynarray_count(args_from_decl->arglist); if(check_arg_counts(args_from_decl, count_decl, e->funcargs, e, sp)) return; if(e->funcargs){ check_arg_voidness_and_nonnulls( e, stab, args_from_decl, count_decl, e->funcargs, sp); } if(!FUNCARGS_EMPTY_NOVOID(args_from_decl)) check_arg_types(args_from_decl, e->funcargs, stab, sp, &e->where); if(e->funcargs) default_promote_args(e->funcargs, count_decl, stab); if(type_is_s_or_u(e->tree_type)){ /* handled transparently by the backend */ e->f_islval = expr_is_lval_struct; cc1_warn_at(&e->expr->where, aggregate_return, "called function returns aggregate (%s)", type_to_str(e->tree_type)); } /* attr */ { type *fnty = e->expr->tree_type; /* look through decays */ if(expr_kind(e->expr, cast) && expr_cast_is_lval2rval(e->expr)) fnty = expr_cast_child(e->expr)->tree_type; format_check_call(fnty, e->funcargs, args_from_decl->variadic); sentinel_check( &e->where, e, e->funcargs, args_from_decl->variadic, count_decl, stab); } /* check the subexp tree type to get the funcall attributes */ if(func_or_builtin_attr_present(e, attr_warn_unused)) e->freestanding = 0; /* needs use */ if(sp && !cc1_fopt.freestanding) check_standard_funcs(sp, e->funcargs); }