static int check_arg_counts(
		funcargs *args_from_decl,
		unsigned count_decl,
		expr **exprargs,
		expr *fnexpr, char *sp)
{
	where *const loc = &fnexpr->where;

	/* this block is purely count checking */
	if(!FUNCARGS_EMPTY_NOVOID(args_from_decl)){
		const unsigned count_arg  = dynarray_count(exprargs);

		if(count_decl != count_arg
		&& (args_from_decl->variadic ? count_arg < count_decl : 1))
		{
			decl *call_decl;
			/* may be args_old_proto but also args_void if copied from
			 * another prototype elsewhere */
			int warn = args_from_decl->args_old_proto
				&& !args_from_decl->args_void;
			int warning_emitted = 1;

#define common_warning                                         \
					"too %s arguments to function %s%s(got %d, need %d)",\
					count_arg > count_decl ? "many" : "few",             \
					sp ? sp : "",                                        \
					sp ? " " : "",                                       \
					count_arg, count_decl

			if(warn){
				warning_emitted = cc1_warn_at(loc, funcall_argcount, common_warning);
			}else{
				warn_at_print_error(loc, common_warning);
			}

#undef common_warning

			if(warning_emitted
			&& (call_decl = expr_to_declref(fnexpr->expr, NULL)))
			{
				note_at(&call_decl->where, "'%s' declared here", call_decl->spel);
			}

			if(!warn){
				fold_had_error = 1;
				return 1;
			}
		}
	}else if(args_from_decl->args_void_implicit && exprargs){
		cc1_warn_at(loc, funcall_argcount,
				"too many arguments to implicitly (void)-function");
	}
	return 0;
}
Example #2
0
void sanitize_boundscheck(
    expr *elhs, expr *erhs,
    out_ctx *octx,
    const out_val *lhs, const out_val *rhs)
{
    decl *array_decl = NULL;
    type *array_ty;
    expr *expr_sz;
    consty sz;
    const out_val *val;

    if(!(cc1_sanitize & CC1_UBSAN))
        return;

    if(type_is_ptr(elhs->tree_type))
        array_decl = expr_to_declref(elhs, NULL), val = rhs;
    else if(type_is_ptr(erhs->tree_type))
        array_decl = expr_to_declref(erhs, NULL), val = lhs;

    if(!array_decl)
        return;

    if(!(array_ty = type_is(array_decl->ref, type_array)))
        return;

    expr_sz = array_ty->bits.array.size;
    if(!expr_sz)
        return;
    const_fold(expr_sz, &sz);

    if(sz.type != CONST_NUM)
        return;

    if(!K_INTEGRAL(sz.bits.num))
        return;

    /* force unsigned compare, which catches negative indexes */
    sanitize_assert_order(val, op_le, sz.bits.num.val.i, uintptr_ty(), octx, "bounds");
}
Example #3
0
void fold_expr_assign(expr *e, symtable *stab)
{
	sym *lhs_sym = NULL;
	int is_struct_cpy = 0;
	expr *rhs_nocast;

	lhs_sym = fold_inc_writes_if_sym(e->lhs, stab);

	fold_expr_nodecay(e->lhs, stab);
	fold_expr_nodecay(e->rhs, stab);

	if(lhs_sym)
		lhs_sym->nreads--; /* cancel the read that fold_ident thinks it got */

	is_struct_cpy = !!type_is_s_or_u(e->lhs->tree_type);
	if(!is_struct_cpy)
		FOLD_EXPR(e->rhs, stab); /* lval2rval the rhs */

	if(type_is_primitive(e->rhs->tree_type, type_void)){
		fold_had_error = 1;
		warn_at_print_error(&e->where, "assignment from void expression");
		e->tree_type = type_nav_btype(cc1_type_nav, type_int);
		return;
	}

	expr_must_lvalue(e->lhs, "assignment");

	if(!e->assign_is_init)
		expr_assign_const_check(e->lhs, &e->where);

	fold_check_restrict(e->lhs, e->rhs, "assignment", &e->where);

	/* this makes sense, but it's also critical for code-gen:
	 * if we assign to a volatile lvalue, we don't want the volatile-ness
	 * to propagate, as we are now an rvalue, and don't want our value read
	 * as we decay
	 *
	 * (see the same code in expr_assign_compound.c)
	 */
	e->tree_type = type_unqualify(e->lhs->tree_type);

	/* type check */
	fold_type_chk_and_cast_ty(
			e->lhs->tree_type, &e->rhs,
			stab, &e->where, "assignment");

	/* the only way to get a value into a bitfield (aside from memcpy / indirection) is via this
	 * hence we're fine doing the truncation check here
	 */
	{
		decl *mem;
		if(expr_kind(e->lhs, struct)
		&& (mem = e->lhs->bits.struct_mem.d) /* maybe null from s->non_present_memb */
		&& mem->bits.var.field_width)
		{
			bitfield_trunc_check(mem, e->rhs);
		}
	}

	rhs_nocast = expr_skip_implicit_casts(e->rhs);
	if(expr_kind(rhs_nocast, funcall)){
		expr *callexpr = rhs_nocast;
		decl *rhs_call_decl = expr_to_declref(callexpr->expr, NULL);

		if(rhs_call_decl && rhs_call_decl->spel && !strcmp(rhs_call_decl->spel, "malloc")){
			c_func_check_malloc(callexpr, e->lhs->tree_type);
		}
	}

	if(is_struct_cpy){
		e->expr = builtin_new_memcpy(
				e->lhs, e->rhs,
				type_size(e->rhs->tree_type, &e->rhs->where));

		FOLD_EXPR(e->expr, stab);

		/* set is_lval, so we can participate in struct-copy chains
		 * - this isn't interpreted as an lvalue, e.g. (a = b) = c; */
		if(cc1_backend == BACKEND_ASM)
			e->f_gen = lea_assign_lhs;
		e->f_islval = expr_is_lval_struct;
	}
}