Esempio n. 1
0
static void gen_expr_struct_lea(expr *e)
{
	ASSERT_NOT_DOT();

	gen_expr(e->lhs);

	/* cast for void* arithmetic */
	out_change_type(type_ptr_to(type_nav_btype(cc1_type_nav, type_void)));
	out_push_l(type_nav_btype(cc1_type_nav, type_intptr_t), struct_offset(e)); /* integral offset */
	out_op(op_plus);

	if(fopt_mode & FOPT_VERBOSE_ASM)
		out_comment("struct member %s", e->bits.struct_mem.d->spel);


	{
		decl *d = e->bits.struct_mem.d;

		out_change_type(type_ptr_to(d->ref));

		/* set if we're a bitfield - out_deref() and out_store()
		 * i.e. read + write then handle this
		 */
		if(d->bits.var.field_width){
			unsigned w = const_fold_val_i(d->bits.var.field_width);
			out_set_bitfield(d->bits.var.struct_offset_bitfield, w);
			out_comment("struct bitfield lea");
		}
	}
}
Esempio n. 2
0
static void test_quals(void)
{
	type *tint = type_nav_btype(cc1_type_nav, type_int);
	type *tconstint = type_qualify(tint, qual_const);

	test(tconstint == type_qualify(tconstint, qual_const));
}
Esempio n. 3
0
type *type_sign(struct type_nav *root, type *ty, int make_signed)
{
	enum type_primitive prim = type_get_primitive(ty);
	int is_signed;

	assert(prim != type_unknown);
	assert(type_intrank(prim) != -1);

	is_signed = type_primitive_is_signed(prim, 1);

	if(make_signed){
		if(!is_signed){
			if(TYPE_PRIMITIVE_IS_CHAR(prim)){
				prim = type_schar;
			}else{
				prim = TYPE_PRIMITIVE_TO_SIGNED(prim);
			}
		}
	}else if(is_signed){
		if(TYPE_PRIMITIVE_IS_CHAR(prim)){
			prim = type_uchar;
		}else{
			prim = TYPE_PRIMITIVE_TO_UNSIGNED(prim);
		}
	}

	return type_nav_btype(root, prim);
}
Esempio n. 4
0
type *type_nav_MAX_FOR(struct type_nav *root, unsigned sz)
{
	enum type_primitive p = type_primitive_not_less_than_size(sz);
	if(p != type_unknown)
		return type_nav_btype(root, p);
	assert(0 && "no type max");
}
Esempio n. 5
0
static void sanitize_assert(const out_val *cond, out_ctx *octx, const char *desc)
{
    out_blk *land = out_blk_new(octx, "san_end");
    out_blk *blk_undef = out_blk_new(octx, "san_bad");

    out_ctrl_branch(octx,
                    cond,
                    land,
                    blk_undef);

    out_current_blk(octx, blk_undef);
    out_comment(octx, "sanitizer for %s", desc);
    if(cc1_sanitize_handler_fn) {
        type *voidty = type_nav_btype(cc1_type_nav, type_void);
        funcargs *args = funcargs_new();
        type *fnty_noptr = type_func_of(voidty, args, NULL);
        type *fnty_ptr = type_ptr_to(fnty_noptr);
        char *mangled = func_mangle(cc1_sanitize_handler_fn, fnty_noptr);

        const out_val *fn = out_new_lbl(octx, fnty_ptr, mangled, 0);

        out_val_release(octx, out_call(octx, fn, NULL, fnty_ptr));

        if(mangled != cc1_sanitize_handler_fn)
            free(mangled);
    }
    out_ctrl_end_undefined(octx);

    out_current_blk(octx, land);
}
Esempio n. 6
0
type *type_nav_MAX_FOR(struct type_nav *root, unsigned sz, int is_signed)
{
	enum type_primitive p = type_primitive_not_less_than_size(sz, is_signed);

	UCC_ASSERT(p != type_unknown, "no type max for %u", sz);

	return type_nav_btype(root, p);
}
Esempio n. 7
0
static void check_implicit_funcall(expr *e, symtable *stab, char **const psp)
{
	struct symtab_entry ent;
	funcargs *args;
	decl *df, *owning_func;
	type *func_ty;

	if(e->expr->in_parens
	|| !expr_kind(e->expr, identifier)
	/* not folded yet, hence no 'e->expr->bits.ident.type != IDENT_NORM' */
	/* get the spel that parse stashes in the identifier expr: */
	|| !((*psp) = e->expr->bits.ident.bits.ident.spel))
	{
		return;
	}

	/* check for implicit function */
	if(symtab_search(stab, *psp, NULL, &ent)
	&& ent.type == SYMTAB_ENT_DECL)
	{
		e->expr->bits.ident.bits.ident.sym = ent.bits.decl->sym;
		return;
	}

	args = funcargs_new();

	/* set up the funcargs as if it's "x()" - i.e. any args */
	funcargs_empty(args);

	func_ty = type_func_of(
			type_nav_btype(cc1_type_nav, type_int),
			args,
			symtab_new(stab, &e->where) /*new symtable for args*/);

	cc1_warn_at(&e->expr->where, implicit_func,
			"implicit declaration of function \"%s\"", *psp);

	df = decl_new();
	memcpy_safe(&df->where, &e->where);
	df->ref = func_ty;
	df->spel = e->expr->bits.ident.bits.ident.spel;
	df->flags |= DECL_FLAGS_IMPLICIT;

	fold_decl(df, stab); /* update calling conv, for e.g. */

	df->sym->type = sym_global;

	e->expr->bits.ident.bits.ident.sym = df->sym;
	e->expr->tree_type = func_ty;

	owning_func = symtab_func(stab);
	if(owning_func)
		symtab_insert_before(symtab_root(stab), owning_func, df);
	else
		symtab_add_to_scope(symtab_root(stab), df); /* function call at global scope */
}
Esempio n. 8
0
void v_stack_adj(out_ctx *octx, v_stackt amt, int sub)
{
	out_flush_volatile(
			octx,
			out_op(
				octx, sub ? op_minus : op_plus,
				v_new_sp(octx, NULL),
				out_new_l(
					octx,
					type_nav_btype(cc1_type_nav, type_intptr_t),
					amt)));
}
Esempio n. 9
0
void fold_expr_addr(expr *e, symtable *stab)
{
	if(e->bits.lbl.spel){
		decl *in_func = symtab_func(stab);

		if(!in_func)
			die_at(&e->where, "address-of-label outside a function");

		if(e->bits.lbl.static_ctx)
			in_func->bits.func.contains_static_label_addr = 1;

		(e->bits.lbl.label =
		 symtab_label_find_or_new(
			 stab, e->bits.lbl.spel, &e->where))
			->uses++;

		/* address of label - void * */
		e->tree_type = type_ptr_to(type_nav_btype(cc1_type_nav, type_void));

	}else{
		/* if it's an identifier, act as a read */
		fold_inc_writes_if_sym(e->lhs, stab);

		fold_expr_nodecay(e->lhs, stab);

		e->tree_type = type_ptr_to(e->lhs->tree_type);

		/* can address: lvalues, arrays and functions */
		if(!expr_is_addressable(e->lhs)){
			warn_at_print_error(&e->where, "can't take the address of %s (%s)",
					expr_str_friendly(e->lhs), type_to_str(e->lhs->tree_type));
			fold_had_error = 1;
			return;
		}

		if(expr_kind(e->lhs, identifier)){
			sym *sym = e->lhs->bits.ident.bits.ident.sym;
			if(sym){
				decl *d = sym->decl;

				if((d->store & STORE_MASK_STORE) == store_register)
					die_at(&e->lhs->where, "can't take the address of register");
			}
		}

		fold_check_expr(e->lhs, FOLD_CHK_ALLOW_VOID | FOLD_CHK_NO_BITFIELD,
				"address-of");
	}
}
Esempio n. 10
0
const out_val *out_aalloc(
		out_ctx *octx, unsigned sz, unsigned align, type *in_ty)
{
	type *ty = type_ptr_to(in_ty
		? in_ty : type_nav_btype(cc1_type_nav, type_nchar));

	align_sz(&sz, align);

	/* packing takes care of everything */
	pack_next(&octx->cur_stack_sz, NULL, sz, align);

	v_set_cur_stack_sz(octx, octx->cur_stack_sz);

	return v_new_bp3_below(octx, NULL, ty, octx->cur_stack_sz);
}
Esempio n. 11
0
void fold_expr_str(expr *e, symtable *stab)
{
	const stringlit *const strlit = e->bits.strlit.lit_at.lit;
	expr *sz;

	sz = expr_new_val(strlit->len);
	FOLD_EXPR(sz, stab);

	/* (const? char []) */
	e->tree_type = type_array_of(
			type_qualify(
				type_nav_btype(
					cc1_type_nav,
					strlit->wide ? type_wchar : type_nchar),
				e->bits.strlit.is_func ? qual_const : qual_none),
			sz);
}
Esempio n. 12
0
void fold_expr_stmt(expr *e, symtable *stab)
{
	stmt *last_stmt;
	int last;

	(void)stab;

	last = dynarray_count(e->code->bits.code.stmts);
	if(last){
		last_stmt = e->code->bits.code.stmts[last - 1];
		last_stmt->freestanding = 1; /* allow the final to be freestanding */
		last_stmt->expr_no_pop = 1;
	}

	fold_stmt(e->code); /* symtab should've been set by parse */

	if(last && stmt_kind(last_stmt, expr)){
		expr *last_expr = last_stmt->expr;

		e->tree_type = last_expr->tree_type;
		if(fold_check_expr(e,
				FOLD_CHK_ALLOW_VOID,
				"({ ... }) statement"))
		{
			return;
		}

		switch(expr_is_lval(last_expr)){
			case LVALUE_NO:
				break;
			case LVALUE_STRUCT:
			case LVALUE_USER_ASSIGNABLE:
				e->f_islval = expr_is_lval_struct;
		}
	}else{
		e->tree_type = type_nav_btype(cc1_type_nav, type_void);
	}

	e->freestanding = 1; /* ({ ... }) on its own is freestanding */
}
Esempio n. 13
0
static type *uintptr_ty(void)
{
    return type_nav_btype(cc1_type_nav, type_uintptr_t);
}
Esempio n. 14
0
void fold_expr_assign(expr *e, symtable *stab)
{
	sym *lhs_sym = NULL;
	int is_struct_cpy = 0;

	lhs_sym = fold_inc_writes_if_sym(e->lhs, stab);

	fold_expr_nodecay(e->lhs, stab);
	fold_expr_nodecay(e->rhs, stab);

	if(lhs_sym)
		lhs_sym->nreads--; /* cancel the read that fold_ident thinks it got */

	is_struct_cpy = !!type_is_s_or_u(e->lhs->tree_type);
	if(!is_struct_cpy)
		FOLD_EXPR(e->rhs, stab); /* lval2rval the rhs */

	if(type_is_primitive(e->rhs->tree_type, type_void)){
		fold_had_error = 1;
		warn_at_print_error(&e->where, "assignment from void expression");
		e->tree_type = type_nav_btype(cc1_type_nav, type_int);
		return;
	}

	expr_must_lvalue(e->lhs, "assignment");

	if(!e->assign_is_init)
		expr_assign_const_check(e->lhs, &e->where);

	fold_check_restrict(e->lhs, e->rhs, "assignment", &e->where);

	/* this makes sense, but it's also critical for code-gen:
	 * if we assign to a volatile lvalue, we don't want the volatile-ness
	 * to propagate, as we are now an rvalue, and don't want our value read
	 * as we decay
	 */
	e->tree_type = type_unqualify(e->lhs->tree_type);

	/* type check */
	fold_type_chk_and_cast_ty(
			e->lhs->tree_type, &e->rhs,
			stab, &e->where, "assignment");

	/* the only way to get a value into a bitfield (aside from memcpy / indirection) is via this
	 * hence we're fine doing the truncation check here
	 */
	{
		decl *mem;
		if(expr_kind(e->lhs, struct)
		&& (mem = e->lhs->bits.struct_mem.d) /* maybe null from s->non_present_memb */
		&& mem->bits.var.field_width)
		{
			bitfield_trunc_check(mem, e->rhs);
		}
	}


	if(is_struct_cpy){
		e->expr = builtin_new_memcpy(
				e->lhs, e->rhs,
				type_size(e->rhs->tree_type, &e->rhs->where));

		FOLD_EXPR(e->expr, stab);

		/* set is_lval, so we can participate in struct-copy chains
		 * FIXME: don't interpret as an lvalue, e.g. (a = b) = c;
		 * this is currently special cased in expr_is_lval()
		 *
		 * CHECK THIS
		 */
		if(cc1_backend == BACKEND_ASM)
			e->f_gen = lea_assign_lhs;
		e->f_islval = expr_is_lval_struct;
	}
}
Esempio n. 15
0
void fold_expr_struct(expr *e, symtable *stab)
{
	/*
	 * lhs = any ptr-to-struct expr
	 * rhs = struct member ident
	 */
	const int ptr_expect = !e->expr_is_st_dot;
	struct_union_enum_st *sue;
	char *spel;

	fold_expr_no_decay(e->lhs, stab);
	/* don't fold the rhs - just a member name */

	if(e->rhs){
		UCC_ASSERT(expr_kind(e->rhs, identifier),
				"struct/union member not identifier (%s)", e->rhs->f_str());

		UCC_ASSERT(!e->bits.struct_mem.d, "already have a struct-member");

		spel = e->rhs->bits.ident.spel;
	}else{
		UCC_ASSERT(e->bits.struct_mem.d, "no member specified already?");
		spel = NULL;
	}

	/* we access a struct, of the right ptr depth */
	{
		type *r = e->lhs->tree_type;

		if(ptr_expect){
			type *rtest = type_is(r, type_ptr);

			if(!rtest && !(rtest = type_is(r, type_array)))
				goto err;

			r = rtest->ref; /* safe - rtest is an array */
		}

		if(!(sue = type_is_s_or_u(r))){
err:
			die_at(&e->lhs->where, "'%s' (%s-expr) is not a %sstruct or union (member %s)",
					type_to_str(e->lhs->tree_type),
					e->lhs->f_str(),
					ptr_expect ? "pointer to " : "",
					spel);
		}
	}

	if(!sue_complete(sue)){
		char wbuf[WHERE_BUF_SIZ];

		die_at(&e->lhs->where, "%s incomplete type (%s)\n"
				"%s: note: forward declared here",
				ptr_expect
					? "dereferencing pointer to"
					: "accessing member of",
				type_to_str(e->lhs->tree_type),
				where_str_r(wbuf, &sue->where));
	}

	if(spel){
		/* found the struct, find the member */
		decl *d_mem = struct_union_member_find(sue, spel,
				&e->bits.struct_mem.extra_off, NULL);

		if(!d_mem)
			die_at(&e->where, "%s %s has no member named \"%s\"",
					sue_str(sue), sue->spel, spel);

		e->rhs->tree_type = (e->bits.struct_mem.d = d_mem)->ref;
	}/* else already have the member */

	/*
	 * if it's a.b, convert to (&a)->b for asm gen
	 * e = { lhs = "a", rhs = "b", type = dot }
	 * e = {
	 *   type = ptr,
	 *   lhs = { cast<void *>, expr = { expr = "a", type = addr } },
	 *   rhs = "b",
	 * }
	 */
	if(!ptr_expect){
		expr *cast, *addr;

		addr = expr_new_addr(e->lhs);
		cast = expr_new_cast(addr,
				type_ptr_to(type_nav_btype(cc1_type_nav, type_void)),
				1);

		e->lhs = cast;
		e->expr_is_st_dot = 0;

		FOLD_EXPR(e->lhs, stab);
	}

	/* pull qualifiers from the struct to the member */
	e->tree_type = type_qualify(
			e->bits.struct_mem.d->ref,
			type_qual(e->lhs->tree_type));
}
Esempio n. 16
0
static void apply_ptr_step(
		out_ctx *octx,
		const out_val **lhs, const out_val **rhs,
		const out_val **div_out)
{
	int l_ptr = !!type_is((*lhs)->t, type_ptr);
	int r_ptr = !!type_is((*rhs)->t, type_ptr);
	int ptr_step;

	if(!l_ptr && !r_ptr)
		return;

	ptr_step = calc_ptr_step((l_ptr ? *lhs : *rhs)->t);

	if(l_ptr ^ r_ptr){
		/* ptr +/- int, adjust the non-ptr by sizeof *ptr */
		const out_val **incdec = (l_ptr ? rhs : lhs);
		out_val *mut_incdec;

		*incdec = mut_incdec = v_dup_or_reuse(octx, *incdec, (*incdec)->t);

		switch(mut_incdec->type){
			case V_CONST_I:
				if(ptr_step == -1){
					*incdec = out_op(octx, op_multiply,
							*incdec,
							vla_size(
								type_next((l_ptr ? *lhs : *rhs)->t),
								octx));

					mut_incdec = NULL; /* safety */
				}else{
					mut_incdec->bits.val_i *= ptr_step;
				}
				break;

			case V_CONST_F:
				assert(0 && "float pointer inc?");

			case V_LBL:
			case V_FLAG:
			case V_REG_SPILT:
				assert(mut_incdec->retains == 1);
				*incdec = (out_val *)v_to_reg(octx, *incdec);

			case V_REG:
			{
				const out_val *n;
				if(ptr_step == -1){
					n = vla_size(
							type_next((l_ptr ? *lhs : *rhs)->t),
							octx);
				}else{
					n = out_new_l(
						octx,
						type_nav_btype(cc1_type_nav, type_intptr_t),
						ptr_step);
				}

				*incdec = (out_val *)out_op(octx, op_multiply, *incdec, n);
				break;
			}
		}

	}else if(l_ptr && r_ptr){
		/* difference - divide afterwards */
		if(ptr_step == -1){
			*div_out = vla_size(type_next((*lhs)->t), octx);
		}else{
			*div_out = out_new_l(octx,
					type_ptr_to(type_nav_btype(cc1_type_nav, type_void)),
					ptr_step);
		}
	}
}
Esempio n. 17
0
void fold_expr_if(expr *e, symtable *stab)
{
	const char *desc = "?:";
	consty konst;
	type *tt_l, *tt_r;

	FOLD_EXPR(e->expr, stab);
	const_fold(e->expr, &konst);

	fold_check_expr(e->expr, FOLD_CHK_NO_ST_UN, desc);

	if(e->lhs){
		e->lhs = fold_expr_nonstructdecay(e->lhs, stab);
		fold_check_expr(e->lhs,
				FOLD_CHK_ALLOW_VOID,
				"?: left operand");
	}

	e->rhs = fold_expr_nonstructdecay(e->rhs, stab);
	fold_check_expr(e->rhs,
			FOLD_CHK_ALLOW_VOID,
			"?: right operand");

	e->freestanding = (e->lhs ? e->lhs : e->expr)->freestanding || e->rhs->freestanding;

	/*

	Arithmetic                             Arithmetic                           Arithmetic type after usual arithmetic conversions
	Structure or union type                Compatible structure or union type   Structure or union type with all the qualifiers on both operands
	void                                   void                                 void
	Pointer to compatible type             Pointer to compatible type           Pointer to type with all the qualifiers specified for the type
	Pointer to type                        NULL pointer (the constant 0)        Pointer to type
	Pointer to object or incomplete type   Pointer to void                      Pointer to void with all the qualifiers specified for the type

	GCC and Clang seem to relax the last rule:
		a) resolve if either is any pointer, not just (void *)
	  b) resolve to a pointer to the incomplete-type
	*/

	tt_l = (e->lhs ? e->lhs : e->expr)->tree_type;
	tt_r = e->rhs->tree_type;


	/* C11 6.5.15 */
	if(type_is_arith(tt_l) && type_is_arith(tt_r)){
		/* 6.5.15 p4 */
		expr **middle_op = e->lhs ? &e->lhs : &e->expr;

		expr_check_sign(desc, *middle_op, e->rhs, &e->where);

		e->tree_type = op_promote_types(
				op_unknown,
				middle_op, &e->rhs, stab,
				&e->where, desc);

	}else if(type_is_void(tt_l) || type_is_void(tt_r)){
		e->tree_type = type_nav_btype(cc1_type_nav, type_void);

	}else{
		const enum type_cmp cmp = type_cmp(tt_l, tt_r, 0);

		if((cmp & (TYPE_EQUAL_ANY | TYPE_QUAL_ADD | TYPE_QUAL_SUB))
		&& type_is_s_or_u(tt_l))
		{
			e->f_islval = expr_is_lval_struct;
			e->tree_type = type_qualify(tt_l, type_qual(tt_l) | type_qual(tt_r));

		}else{
			try_pointer_propagate(e, cmp, tt_l, tt_r);
		}
	}
}
Esempio n. 18
0
static void impl_overlay_mem_reg(
		out_ctx *octx,
		unsigned memsz, unsigned nregs,
		struct vreg regs[], int mem2reg,
		const out_val *ptr)
{
	const unsigned pws = platform_word_size();
	struct vreg *cur_reg = regs;
	unsigned reg_i = 0;

	if(memsz == 0){
		out_val_release(octx, ptr);
		return;
	}

	UCC_ASSERT(
			nregs * pws >= memsz,
			"not enough registers for memory overlay");

	out_comment(octx,
			"overlay, %s2%s(%u)",
			mem2reg ? "mem" : "reg",
			mem2reg ? "reg" : "mem",
			memsz);

	if(!mem2reg){
		/* reserve all registers so we don't accidentally wipe before the spill */
		for(reg_i = 0; reg_i < nregs; reg_i++)
			v_reserve_reg(octx, &regs[reg_i]);
	}

	for(;; cur_reg++, reg_i++){
		/* read/write whatever size is required */
		type *this_ty;
		unsigned this_sz;

		if(cur_reg->is_float){
			UCC_ASSERT(memsz >= 4, "float for memsz %u?", memsz);

			this_ty = type_nav_btype(
					cc1_type_nav,
					memsz > 4 ? type_double : type_float);

		}else{
			this_ty = type_nav_MAX_FOR(cc1_type_nav, memsz);
		}
		this_sz = type_size(this_ty, NULL);

		UCC_ASSERT(this_sz <= memsz, "reading/writing too much memory");

		ptr = out_change_type(octx, ptr, type_ptr_to(this_ty));

		out_val_retain(octx, ptr);

		if(mem2reg){
			const out_val *fetched;

			/* can use impl_deref, as we have a register already,
			 * and know that the memory is an lvalue and not a bitfield
			 *
			 * this means we can load straight into the desired register
			 */
			fetched = impl_deref(octx, ptr, cur_reg);

			UCC_ASSERT(reg_i < nregs, "reg oob");

			if(fetched->type != V_REG || !vreg_eq(&fetched->bits.regoff.reg, cur_reg)){
				/* move to register */
				v_freeup_reg(octx, cur_reg);
				fetched = v_to_reg_given(octx, fetched, cur_reg);
			}
			out_flush_volatile(octx, fetched);
			v_reserve_reg(octx, cur_reg); /* prevent changes */

		}else{
			const out_val *vreg = v_new_reg(octx, NULL, this_ty, cur_reg);

			out_store(octx, ptr, vreg);
		}

		memsz -= this_sz;

		/* early termination */
		if(memsz == 0)
			break;

		/* increment our memory pointer */
		ptr = out_change_type(
				octx,
				ptr,
				type_ptr_to(type_nav_btype(cc1_type_nav, type_uchar)));

		ptr = out_op(octx, op_plus,
				ptr,
				out_new_l(
					octx,
					type_nav_btype(cc1_type_nav, type_intptr_t),
					pws));
	}

	out_val_release(octx, ptr);

	/* done, unreserve all registers */
	for(reg_i = 0; reg_i < nregs; reg_i++)
		v_unreserve_reg(octx, &regs[reg_i]);
}
Esempio n. 19
0
void fold_expr_funcall(expr *e, symtable *stab)
{
	type *func_ty;
	funcargs *args_from_decl;
	char *sp = NULL;
	unsigned count_decl;

	check_implicit_funcall(e, stab, &sp);

	FOLD_EXPR(e->expr, stab);
	func_ty = e->expr->tree_type;

	if(!type_is_callable(func_ty)){
		warn_at_print_error(&e->expr->where,
				"%s-expression (type '%s') not callable",
				expr_str_friendly(e->expr, 0),
				type_to_str(func_ty));

		fold_had_error = 1;

		e->tree_type = type_nav_btype(cc1_type_nav, type_int);
		return;
	}

	e->tree_type = type_func_call(func_ty, &args_from_decl);

	/* func count comparison, only if the func has arg-decls, or the func is f(void) */
	UCC_ASSERT(args_from_decl, "no funcargs for decl %s", sp);

	count_decl = dynarray_count(args_from_decl->arglist);

	if(check_arg_counts(args_from_decl, count_decl, e->funcargs, e, sp))
		return;

	if(e->funcargs){
		check_arg_voidness_and_nonnulls(
				e, stab,
				args_from_decl, count_decl,
				e->funcargs, sp);
	}

	if(!FUNCARGS_EMPTY_NOVOID(args_from_decl))
		check_arg_types(args_from_decl, e->funcargs, stab, sp, &e->where);

	if(e->funcargs)
		default_promote_args(e->funcargs, count_decl, stab);

	if(type_is_s_or_u(e->tree_type)){
		/* handled transparently by the backend */
		e->f_islval = expr_is_lval_struct;

		cc1_warn_at(&e->expr->where,
				aggregate_return,
				"called function returns aggregate (%s)",
				type_to_str(e->tree_type));
	}

	/* attr */
	{
		type *fnty = e->expr->tree_type;

		/* look through decays */
		if(expr_kind(e->expr, cast) && expr_cast_is_lval2rval(e->expr))
			fnty = expr_cast_child(e->expr)->tree_type;

		format_check_call(fnty, e->funcargs, args_from_decl->variadic);

		sentinel_check(
				&e->where, e,
				e->funcargs, args_from_decl->variadic,
				count_decl, stab);
	}

	/* check the subexp tree type to get the funcall attributes */
	if(func_or_builtin_attr_present(e, attr_warn_unused))
		e->freestanding = 0; /* needs use */

	if(sp && !cc1_fopt.freestanding)
		check_standard_funcs(sp, e->funcargs);
}
Esempio n. 20
0
static void try_pointer_propagate(
		expr *e, enum type_cmp cmp,
		type *const tt_l, type *const tt_r)
{
	/* 6.5.15 p6 */
	int l_ptr = !!type_is_ptr_or_block(tt_l);
	int r_ptr = !!type_is_ptr_or_block(tt_r);

	/* if both the second and third operands are pointers */
	if(l_ptr && r_ptr){
		int allowed = TYPE_EQUAL_ANY
				| TYPE_QUAL_ADD
				| TYPE_QUAL_SUB
				| TYPE_QUAL_POINTED_ADD
				| TYPE_QUAL_POINTED_SUB;

		if(cmp & allowed){
			e->tree_type = pointer_to_qualified(type_next(tt_l), tt_l, tt_r);
		}
	}

	if(!e->tree_type && (l_ptr || r_ptr)){
		/* or one is a null pointer constant and the other is a pointer */
		int l_ptr_null = expr_is_null_ptr(
				e->lhs ? e->lhs : e->expr, NULL_STRICT_INT);

		int r_ptr_null = expr_is_null_ptr(e->rhs, NULL_STRICT_INT);

		/* both may still be pointers here */
		if((l_ptr && r_ptr_null) || (r_ptr && l_ptr_null)){
			type *pointed_to;

			if(l_ptr_null != r_ptr_null){
				/* only one is an int - pick the other side */
				pointed_to = type_next(l_ptr_null ? tt_r : tt_l);
			}else{
				/* both are pointers, pick either side */
				pointed_to = type_next(l_ptr ? tt_l : tt_r);
			}

			e->tree_type = pointer_to_qualified(
					pointed_to,
					l_ptr ? tt_l : NULL,
					r_ptr ? tt_r : NULL);
		}
	}

	if(!e->tree_type && l_ptr && r_ptr){
		e->tree_type = pointer_to_qualified(
					type_nav_btype(cc1_type_nav, type_void),
					tt_l, tt_r);

		/* gcc/clang relax the rule here.
		 * 0 ? (A *)0 : (B *)0
		 * becomes a void pointer too */
		if(!type_is_void_ptr(tt_l) && !type_is_void_ptr(tt_r)){
			char buf[TYPE_STATIC_BUFSIZ];

			cc1_warn_at(&e->where,
					mismatch_conditional,
					"conditional type mismatch (%s vs %s)",
					type_to_str(tt_l), type_to_str_r(buf, tt_r));
		}
	}

	if(!e->tree_type){
		char buf[TYPE_STATIC_BUFSIZ];

		warn_at_print_error(&e->where, "conditional type mismatch (%s vs %s)",
				type_to_str(tt_l), type_to_str_r(buf, tt_r));

		fold_had_error = 1;

		e->tree_type = type_nav_btype(cc1_type_nav, type_void);
	}
}
Esempio n. 21
0
void fold_expr_if(expr *e, symtable *stab)
{
	consty konst;
	type *tt_l, *tt_r;

	FOLD_EXPR(e->expr, stab);
	const_fold(e->expr, &konst);

	fold_check_expr(e->expr, FOLD_CHK_NO_ST_UN, "if-expr");

	if(e->lhs){
		FOLD_EXPR(e->lhs, stab);
		fold_check_expr(e->lhs,
				FOLD_CHK_NO_ST_UN | FOLD_CHK_ALLOW_VOID,
				"if-lhs");
	}

	FOLD_EXPR(e->rhs, stab);
	fold_check_expr(e->rhs,
			FOLD_CHK_NO_ST_UN | FOLD_CHK_ALLOW_VOID,
			"if-rhs");


	/*

	Arithmetic                             Arithmetic                           Arithmetic type after usual arithmetic conversions
	// Structure or union type                Compatible structure or union type   Structure or union type with all the qualifiers on both operands
	void                                   void                                 void
	Pointer to compatible type             Pointer to compatible type           Pointer to type with all the qualifiers specified for the type
	Pointer to type                        NULL pointer (the constant 0)        Pointer to type
	Pointer to object or incomplete type   Pointer to void                      Pointer to void with all the qualifiers specified for the type

	GCC and Clang seem to relax the last rule:
		a) resolve if either is any pointer, not just (void *)
	  b) resolve to a pointer to the incomplete-type
	*/

	tt_l = (e->lhs ? e->lhs : e->expr)->tree_type;
	tt_r = e->rhs->tree_type;

	if(type_is_integral(tt_l) && type_is_integral(tt_r)){
		expr **middle_op = e->lhs ? &e->lhs : &e->expr;

		expr_check_sign("?:", *middle_op, e->rhs, &e->where);

		e->tree_type = op_promote_types(
				op_unknown,
				middle_op, &e->rhs, &e->where, stab);

	}else if(type_is_void(tt_l) || type_is_void(tt_r)){
		e->tree_type = type_nav_btype(cc1_type_nav, type_void);

	}else if(type_cmp(tt_l, tt_r, 0) & TYPE_EQUAL_ANY){
		/* pointer to 'compatible' type */
		e->tree_type = type_qualify(tt_l,
				type_qual(tt_l) | type_qual(tt_r));

	}else{
		/* brace yourself. */
		int l_ptr_null = expr_is_null_ptr(
				e->lhs ? e->lhs : e->expr, NULL_STRICT_VOID_PTR);

		int r_ptr_null = expr_is_null_ptr(e->rhs, NULL_STRICT_VOID_PTR);

		int l_complete = !l_ptr_null && type_is_complete(tt_l);
		int r_complete = !r_ptr_null && type_is_complete(tt_r);

		if((l_complete && r_ptr_null) || (r_complete && l_ptr_null)){
			e->tree_type = l_ptr_null ? tt_r : tt_l;

		}else{
			int l_ptr = l_ptr_null || type_is(tt_l, type_ptr);
			int r_ptr = r_ptr_null || type_is(tt_r, type_ptr);

			if(l_ptr || r_ptr){
				fold_type_chk_warn(
						tt_l, tt_r, &e->where, "?: pointer type mismatch");

				/* qualified void * */
				e->tree_type = type_qualify(
						type_ptr_to(type_nav_btype(cc1_type_nav, type_void)),
						type_qual(tt_l) | type_qual(tt_r));

			}else{
				char buf[TYPE_STATIC_BUFSIZ];

				warn_at(&e->where, "conditional type mismatch (%s vs %s)",
						type_to_str(tt_l), type_to_str_r(buf, tt_r));

				e->tree_type = type_nav_btype(cc1_type_nav, type_void);
			}
		}
	}

	e->freestanding = (e->lhs ? e->lhs : e->expr)->freestanding || e->rhs->freestanding;
}