Ejemplo n.º 1
0
void fold_expr_cast(expr *e, symtable *stab)
{
    fold_expr(e->expr, stab);

    fold_disallow_st_un(e->expr, "cast-expr");

    /*
     * if we don't have a valid tree_type, get one
     * this is only the case where we're involving a tdef or typeof
     */

    if(e->tree_type->type->primitive == type_unknown) {
        decl_free(e->tree_type);
        e->tree_type = decl_copy(e->expr->tree_type);
    }

    fold_decl(e->tree_type, stab); /* struct lookup, etc */

    fold_disallow_st_un(e, "cast-target");

#ifdef CAST_COLLAPSE
    if(expr_kind(e->expr, cast)) {
        /* get rid of e->expr, replace with e->expr->rhs */
        expr *del = e->expr;

        e->expr = e->expr->expr;

        /*decl_free(del->tree_type); XXX: memleak */
        expr_free(del);

        fold_expr_cast(e, stab);
    }
#endif
}
Ejemplo n.º 2
0
void gen_expr_funcall(expr *e, symtable *stab)
{
	const char *const fname = e->expr->spel;
	expr **iter;
	int nargs = 0;

	if(fopt_mode & FOPT_ENABLE_ASM && fname && !strcmp(fname, ASM_INLINE_FNAME)){
		const char *str;
		expr *arg1;
		int i;

		if(!e->funcargs || e->funcargs[1] || !expr_kind(e->funcargs[0], addr))
			die_at(&e->where, "invalid __asm__ arguments");

		arg1 = e->funcargs[0];
		str = arg1->array_store->data.str;
		for(i = 0; i < arg1->array_store->len - 1; i++){
			char ch = str[i];
			if(!isprint(ch) && !isspace(ch))
invalid:
				die_at(&arg1->where, "invalid __asm__ string (character %d)", ch);
		}

		if(str[i])
			goto invalid;

		asm_temp(0, "; start manual __asm__");
		fprintf(cc_out[SECTION_TEXT], "%s\n", arg1->array_store->data.str);
		asm_temp(0, "; end manual __asm__");
	}else{
		/* continue with normal funcall */

		if(e->funcargs){
			/* need to push on in reverse order */
			for(iter = e->funcargs; *iter; iter++);
			for(iter--; iter >= e->funcargs; iter--){
				gen_expr(*iter, stab);
				nargs++;
			}
		}

		if(e->sym && !e->sym->decl->decl_ptr && e->sym->decl->spel){
			/* simple */
			asm_temp(1, "call %s", e->sym->decl->spel);
		}else{
			gen_expr(e->expr, stab);
			asm_temp(1, "pop rax  ; function address");
			asm_temp(1, "call rax ; duh");
		}

		if(nargs)
			asm_temp(1, "add rsp, %d ; %d arg%s",
					nargs * platform_word_size(),
					nargs,
					nargs == 1 ? "" : "s");

		asm_temp(1, "push rax ; ret");
	}
}
Ejemplo n.º 3
0
static void check_implicit_funcall(expr *e, symtable *stab, char **const psp)
{
	struct symtab_entry ent;
	funcargs *args;
	decl *df, *owning_func;
	type *func_ty;

	if(e->expr->in_parens
	|| !expr_kind(e->expr, identifier)
	/* not folded yet, hence no 'e->expr->bits.ident.type != IDENT_NORM' */
	/* get the spel that parse stashes in the identifier expr: */
	|| !((*psp) = e->expr->bits.ident.bits.ident.spel))
	{
		return;
	}

	/* check for implicit function */
	if(symtab_search(stab, *psp, NULL, &ent)
	&& ent.type == SYMTAB_ENT_DECL)
	{
		e->expr->bits.ident.bits.ident.sym = ent.bits.decl->sym;
		return;
	}

	args = funcargs_new();

	/* set up the funcargs as if it's "x()" - i.e. any args */
	funcargs_empty(args);

	func_ty = type_func_of(
			type_nav_btype(cc1_type_nav, type_int),
			args,
			symtab_new(stab, &e->where) /*new symtable for args*/);

	cc1_warn_at(&e->expr->where, implicit_func,
			"implicit declaration of function \"%s\"", *psp);

	df = decl_new();
	memcpy_safe(&df->where, &e->where);
	df->ref = func_ty;
	df->spel = e->expr->bits.ident.bits.ident.spel;
	df->flags |= DECL_FLAGS_IMPLICIT;

	fold_decl(df, stab); /* update calling conv, for e.g. */

	df->sym->type = sym_global;

	e->expr->bits.ident.bits.ident.sym = df->sym;
	e->expr->tree_type = func_ty;

	owning_func = symtab_func(stab);
	if(owning_func)
		symtab_insert_before(symtab_root(stab), owning_func, df);
	else
		symtab_add_to_scope(symtab_root(stab), df); /* function call at global scope */
}
Ejemplo n.º 4
0
void fold_expr_addr(expr *e, symtable *stab)
{
	if(e->bits.lbl.spel){
		decl *in_func = symtab_func(stab);

		if(!in_func)
			die_at(&e->where, "address-of-label outside a function");

		if(e->bits.lbl.static_ctx)
			in_func->bits.func.contains_static_label_addr = 1;

		(e->bits.lbl.label =
		 symtab_label_find_or_new(
			 stab, e->bits.lbl.spel, &e->where))
			->uses++;

		/* address of label - void * */
		e->tree_type = type_ptr_to(type_nav_btype(cc1_type_nav, type_void));

	}else{
		/* if it's an identifier, act as a read */
		fold_inc_writes_if_sym(e->lhs, stab);

		fold_expr_nodecay(e->lhs, stab);

		e->tree_type = type_ptr_to(e->lhs->tree_type);

		/* can address: lvalues, arrays and functions */
		if(!expr_is_addressable(e->lhs)){
			warn_at_print_error(&e->where, "can't take the address of %s (%s)",
					expr_str_friendly(e->lhs), type_to_str(e->lhs->tree_type));
			fold_had_error = 1;
			return;
		}

		if(expr_kind(e->lhs, identifier)){
			sym *sym = e->lhs->bits.ident.bits.ident.sym;
			if(sym){
				decl *d = sym->decl;

				if((d->store & STORE_MASK_STORE) == store_register)
					die_at(&e->lhs->where, "can't take the address of register");
			}
		}

		fold_check_expr(e->lhs, FOLD_CHK_ALLOW_VOID | FOLD_CHK_NO_BITFIELD,
				"address-of");
	}
}
Ejemplo n.º 5
0
int expr_is_lval(expr *e)
{
	if(!e->f_lea)
		return 0;

	/* special case:
	 * (a = b) = c
	 * ^~~~~~~ not an lvalue, but internally we handle it as one
	 */
	if(expr_kind(e, assign) && type_is_s_or_u(e->tree_type))
		return 0;

	if(type_is_array(e->tree_type))
		return 0;

	return 1;
}
Ejemplo n.º 6
0
void bitfield_trunc_check(decl *mem, expr *from)
{
	consty k;

	if(expr_kind(from, cast)){
		/* we'll warn about bitfield truncation, prevent warnings
		 * about cast truncation
		 */
		from->expr_cast_implicit = 0;
	}

	const_fold(from, &k);

	if(k.type == CONST_NUM){
		const sintegral_t kexp = k.bits.num.val.i;
		/* highest may be -1 - kexp is zero */
		const int highest = integral_high_bit(k.bits.num.val.i, from->tree_type);
		const int is_signed = type_is_signed(mem->bits.var.field_width->tree_type);

		const_fold(mem->bits.var.field_width, &k);

		UCC_ASSERT(k.type == CONST_NUM, "bitfield size not val?");
		UCC_ASSERT(K_INTEGRAL(k.bits.num), "fp bitfield size?");

		if(highest > (sintegral_t)k.bits.num.val.i
		|| (is_signed && highest == (sintegral_t)k.bits.num.val.i))
		{
			sintegral_t kexp_to = kexp & ~(-1UL << k.bits.num.val.i);

			cc1_warn_at(&from->where,
					bitfield_trunc,
					"truncation in store to bitfield alters value: "
					"%" NUMERIC_FMT_D " -> %" NUMERIC_FMT_D,
					kexp, kexp_to);
		}
	}
}
Ejemplo n.º 7
0
static void asm_declare_init(enum section_type sec, decl_init *init, type *tfor)
{
	type *r;

	if(init == DYNARRAY_NULL)
		init = NULL;

	if(!init){
		/* don't initialise flex-arrays */
		if(!type_is_incomplete_array(tfor)){
			asm_declare_pad(sec, type_size(tfor, NULL),
					"null init"/*, type_to_str(tfor)*/);
		}else{
			asm_out_section(sec, ASM_COMMENT " flex array init skipped\n");
		}

	}else if((r = type_is_primitive(tfor, type_struct))){
		/* array of stmts for each member
		 * assumes the ->bits.inits order is member order
		 */
		struct_union_enum_st *const sue = r->bits.type->sue;
		sue_member **mem;
		decl_init **i;
		unsigned end_of_last = 0;
		struct bitfield_val *bitfields = NULL;
		unsigned nbitfields = 0;
		decl *first_bf = NULL;
		expr *copy_from_exp;

		UCC_ASSERT(init->type == decl_init_brace, "unbraced struct");

#define DEBUG(s, ...) /*fprintf(f, "\033[35m" s "\033[m\n", __VA_ARGS__)*/

		i = init->bits.ar.inits;

		/* check for compound-literal copy-init */
		if((copy_from_exp = decl_init_is_struct_copy(init, sue))){
			decl_init *copy_from_init;

			copy_from_exp = expr_skip_lval2rval(copy_from_exp);

			/* the only struct-expression that's possible
			 * in static context is a compound literal */
			assert(expr_kind(copy_from_exp, compound_lit)
					&& "unhandled expression init");

			copy_from_init = copy_from_exp->bits.complit.decl->bits.var.init.dinit;
			assert(copy_from_init->type == decl_init_brace);

			i = copy_from_init->bits.ar.inits;
		}

		/* iterate using members, not inits */
		for(mem = sue->members;
				mem && *mem;
				mem++)
		{
			decl *d_mem = (*mem)->struct_member;
			decl_init *di_to_use = NULL;

			if(i){
				int inc = 1;

				if(*i == NULL)
					inc = 0;
				else if(*i != DYNARRAY_NULL)
					di_to_use = *i;

				if(inc){
					i++;
					if(!*i)
						i = NULL; /* reached end */
				}
			}

			DEBUG("init for %ld/%s, %s",
					mem - sue->members, d_mem->spel,
					di_to_use ? di_to_use->bits.expr->f_str() : NULL);

			/* only pad if we're not on a bitfield or we're on the first bitfield */
			if(!d_mem->bits.var.field_width || !first_bf){
				DEBUG("prev padding, offset=%d, end_of_last=%d",
						d_mem->struct_offset, end_of_last);

				UCC_ASSERT(
						d_mem->bits.var.struct_offset >= end_of_last,
						"negative struct pad, sue %s, member %s "
						"offset %u, end_of_last %u",
						sue->spel, decl_to_str(d_mem),
						d_mem->bits.var.struct_offset, end_of_last);

				asm_declare_pad(sec,
						d_mem->bits.var.struct_offset - end_of_last,
						"prev struct padding");
			}

			if(d_mem->bits.var.field_width){
				if(!first_bf || d_mem->bits.var.first_bitfield){
					if(first_bf){
						DEBUG("new bitfield group (%s is new boundary), old:",
								d_mem->spel);
						/* next bitfield group - store the current */
						bitfields_out(sec, bitfields, &nbitfields, first_bf->ref);
					}
					first_bf = d_mem;
				}

				bitfields = bitfields_add(
						bitfields, &nbitfields,
						d_mem, di_to_use);

			}else{
				if(nbitfields){
					DEBUG("at non-bitfield, prev-bitfield out:", 0);
					bitfields_out(sec, bitfields, &nbitfields, first_bf->ref);
					first_bf = NULL;
				}

				DEBUG("normal init for %s:", d_mem->spel);
				asm_declare_init(sec, di_to_use, d_mem->ref);
			}

			if(type_is_incomplete_array(d_mem->ref)){
				UCC_ASSERT(!mem[1], "flex-arr not at end");
			}else if(!d_mem->bits.var.field_width || d_mem->bits.var.first_bitfield){
				unsigned last_sz = type_size(d_mem->ref, NULL);

				end_of_last = d_mem->bits.var.struct_offset + last_sz;
				DEBUG("done with member \"%s\", end_of_last = %d",
						d_mem->spel, end_of_last);
			}
		}

		if(nbitfields)
			bitfields_out(sec, bitfields, &nbitfields, first_bf->ref);
		free(bitfields);

		/* need to pad to struct size */
		asm_declare_pad(sec,
				sue_size(sue, NULL) - end_of_last,
				"struct tail");

	}else if((r = type_is(tfor, type_array))){
		size_t i, len;
		decl_init **p;
		type *next = type_next(tfor);

		UCC_ASSERT(init->type == decl_init_brace, "unbraced struct");

		if(type_is_incomplete_array(tfor)){
			len = dynarray_count(init->bits.ar.inits);
		}else{
			UCC_ASSERT(type_is_complete(tfor), "incomplete array/type init");
			len = type_array_len(tfor);
		}

		for(i = len, p = init->bits.ar.inits;
				i > 0;
				i--)
		{
			decl_init *this = NULL;
			if(*p){
				this = *p++;

				if(this != DYNARRAY_NULL && this->type == decl_init_copy){
					/*fprintf(f, "# copy from %lu\n", DECL_INIT_COPY_IDX(this, init));*/
					struct init_cpy *icpy = *this->bits.range_copy;
					/* resolve the copy */
					this = icpy->range_init;
				}
			}

			asm_declare_init(sec, this, next);
		}

	}else if((r = type_is_primitive(tfor, type_union))){
		/* union inits are decl_init_brace with spaces up to the first union init,
		 * then NULL/end of the init-array */
		struct_union_enum_st *sue = type_is_s_or_u(r);
		unsigned i, sub = 0;
		decl_init *u_init;

		UCC_ASSERT(init->type == decl_init_brace, "brace init expected");

		/* skip the empties until we get to one */
		for(i = 0; init->bits.ar.inits[i] == DYNARRAY_NULL; i++);

		if((u_init = init->bits.ar.inits[i])){
			decl *mem = sue->members[i]->struct_member;
			type *mem_r = mem->ref;

			/* union init, member at index `i' */
			if(mem->bits.var.field_width){
				/* we know it's integral */
				struct bitfield_val bfv;

				ASSERT_SCALAR(u_init);

				bitfield_val_set(&bfv, u_init->bits.expr, mem->bits.var.field_width);

				asm_declare_init_bitfields(sec, &bfv, 1, mem_r);
			}else{
				asm_declare_init(sec, u_init, mem_r);
			}

			sub = type_size(mem_r, NULL);
		} /* else null union init */

		asm_declare_pad(sec,
				type_size(r, NULL) - sub,
				"union extra");

	}else{
		/* scalar */
		expr *exp = init->bits.expr;

		UCC_ASSERT(init->type == decl_init_scalar, "scalar init expected");

		/* exp->tree_type should match tfor */
		{
			char buf[TYPE_STATIC_BUFSIZ];

			UCC_ASSERT(
					type_cmp(exp->tree_type, tfor, TYPE_CMP_ALLOW_TENATIVE_ARRAY) != TYPE_NOT_EQUAL,
					"mismatching init types: %s and %s",
					type_to_str_r(buf, exp->tree_type),
					type_to_str(tfor));
		}

		/* use tfor, since "abc" has type (char[]){(int)'a', (int)'b', ...} */
		DEBUG("  scalar init for %s:", type_to_str(tfor));
		static_val(sec, tfor, exp);
	}
}
Ejemplo n.º 8
0
void fold_expr_funcall(expr *e, symtable *stab)
{
	type *func_ty;
	funcargs *args_from_decl;
	char *sp = NULL;
	unsigned count_decl;

	check_implicit_funcall(e, stab, &sp);

	FOLD_EXPR(e->expr, stab);
	func_ty = e->expr->tree_type;

	if(!type_is_callable(func_ty)){
		warn_at_print_error(&e->expr->where,
				"%s-expression (type '%s') not callable",
				expr_str_friendly(e->expr, 0),
				type_to_str(func_ty));

		fold_had_error = 1;

		e->tree_type = type_nav_btype(cc1_type_nav, type_int);
		return;
	}

	e->tree_type = type_func_call(func_ty, &args_from_decl);

	/* func count comparison, only if the func has arg-decls, or the func is f(void) */
	UCC_ASSERT(args_from_decl, "no funcargs for decl %s", sp);

	count_decl = dynarray_count(args_from_decl->arglist);

	if(check_arg_counts(args_from_decl, count_decl, e->funcargs, e, sp))
		return;

	if(e->funcargs){
		check_arg_voidness_and_nonnulls(
				e, stab,
				args_from_decl, count_decl,
				e->funcargs, sp);
	}

	if(!FUNCARGS_EMPTY_NOVOID(args_from_decl))
		check_arg_types(args_from_decl, e->funcargs, stab, sp, &e->where);

	if(e->funcargs)
		default_promote_args(e->funcargs, count_decl, stab);

	if(type_is_s_or_u(e->tree_type)){
		/* handled transparently by the backend */
		e->f_islval = expr_is_lval_struct;

		cc1_warn_at(&e->expr->where,
				aggregate_return,
				"called function returns aggregate (%s)",
				type_to_str(e->tree_type));
	}

	/* attr */
	{
		type *fnty = e->expr->tree_type;

		/* look through decays */
		if(expr_kind(e->expr, cast) && expr_cast_is_lval2rval(e->expr))
			fnty = expr_cast_child(e->expr)->tree_type;

		format_check_call(fnty, e->funcargs, args_from_decl->variadic);

		sentinel_check(
				&e->where, e,
				e->funcargs, args_from_decl->variadic,
				count_decl, stab);
	}

	/* check the subexp tree type to get the funcall attributes */
	if(func_or_builtin_attr_present(e, attr_warn_unused))
		e->freestanding = 0; /* needs use */

	if(sp && !cc1_fopt.freestanding)
		check_standard_funcs(sp, e->funcargs);
}
Ejemplo n.º 9
0
void fold_expr_funcall(expr *e, symtable *stab)
{
	decl *df;
	funcargs *args_exp;

	if(expr_kind(e->expr, identifier) && e->expr->spel){
		char *const sp = e->expr->spel;

		e->sym = symtab_search(stab, sp);
		if(!e->sym){
			df = decl_new_where(&e->where);

			df->type->primitive = type_int;
			df->type->spec     |= spec_extern;

			cc1_warn_at(&e->where, 0, WARN_IMPLICIT_FUNC, "implicit declaration of function \"%s\"", sp);

			df->spel = sp;

			df->funcargs = funcargs_new();

			if(e->funcargs)
				/* set up the funcargs as if it's "x()" - i.e. any args */
				function_empty_args(df->funcargs);

			e->sym = symtab_add(symtab_root(stab), df, sym_global, SYMTAB_WITH_SYM, SYMTAB_PREPEND);
		}else{
			df = e->sym->decl;
		}

		fold_expr(e->expr, stab);
	}else{
		fold_expr(e->expr, stab);

		/*
		 * convert int (*)() to remove the deref
		 */
		if(decl_is_func_ptr(e->expr->tree_type)){
			/* XXX: memleak */
			e->expr = e->expr->lhs;
			fprintf(stderr, "FUNCPTR\n");
		}else{
			fprintf(stderr, "decl %s\n", decl_to_str(e->expr->tree_type));
		}

		df = e->expr->tree_type;

		if(!decl_is_callable(df)){
			die_at(&e->expr->where, "expression %s (%s) not callable",
					e->expr->f_str(),
					decl_to_str(df));
		}
	}

	e->tree_type = decl_copy(df);
	/*
	 * int (*x)();
	 * (*x)();
	 * evaluates to tree_type = int;
	 */
	decl_func_deref(e->tree_type);


	if(e->funcargs){
		expr **iter;
		for(iter = e->funcargs; *iter; iter++)
			fold_expr(*iter, stab);
	}

	/* func count comparison, only if the func has arg-decls, or the func is f(void) */
	args_exp = decl_funcargs(e->tree_type);

	UCC_ASSERT(args_exp, "no funcargs for decl %s", df->spel);

	if(args_exp->arglist || args_exp->args_void){
		expr **iter_arg;
		decl **iter_decl;
		int count_decl, count_arg;

		count_decl = count_arg = 0;

		for(iter_arg  = e->funcargs;       iter_arg  && *iter_arg;  iter_arg++,  count_arg++);
		for(iter_decl = args_exp->arglist; iter_decl && *iter_decl; iter_decl++, count_decl++);

		if(count_decl != count_arg && (args_exp->variadic ? count_arg < count_decl : 1)){
			die_at(&e->where, "too %s arguments to function %s (got %d, need %d)",
					count_arg > count_decl ? "many" : "few",
					df->spel, count_arg, count_decl);
		}

		if(e->funcargs){
			funcargs *argument_decls = funcargs_new();

			for(iter_arg = e->funcargs; *iter_arg; iter_arg++)
				dynarray_add((void ***)&argument_decls->arglist, (*iter_arg)->tree_type);

			fold_funcargs_equal(args_exp, argument_decls, 1, &e->where, "argument", df->spel);
			funcargs_free(argument_decls, 0);
		}
	}
}
Ejemplo n.º 10
0
void fold_expr_struct(expr *e, symtable *stab)
{
	/*
	 * lhs = any ptr-to-struct expr
	 * rhs = struct member ident
	 */
	const int ptr_expect = !e->expr_is_st_dot;
	struct_union_enum_st *sue;
	char *spel;

	fold_expr_no_decay(e->lhs, stab);
	/* don't fold the rhs - just a member name */

	if(e->rhs){
		UCC_ASSERT(expr_kind(e->rhs, identifier),
				"struct/union member not identifier (%s)", e->rhs->f_str());

		UCC_ASSERT(!e->bits.struct_mem.d, "already have a struct-member");

		spel = e->rhs->bits.ident.spel;
	}else{
		UCC_ASSERT(e->bits.struct_mem.d, "no member specified already?");
		spel = NULL;
	}

	/* we access a struct, of the right ptr depth */
	{
		type *r = e->lhs->tree_type;

		if(ptr_expect){
			type *rtest = type_is(r, type_ptr);

			if(!rtest && !(rtest = type_is(r, type_array)))
				goto err;

			r = rtest->ref; /* safe - rtest is an array */
		}

		if(!(sue = type_is_s_or_u(r))){
err:
			die_at(&e->lhs->where, "'%s' (%s-expr) is not a %sstruct or union (member %s)",
					type_to_str(e->lhs->tree_type),
					e->lhs->f_str(),
					ptr_expect ? "pointer to " : "",
					spel);
		}
	}

	if(!sue_complete(sue)){
		char wbuf[WHERE_BUF_SIZ];

		die_at(&e->lhs->where, "%s incomplete type (%s)\n"
				"%s: note: forward declared here",
				ptr_expect
					? "dereferencing pointer to"
					: "accessing member of",
				type_to_str(e->lhs->tree_type),
				where_str_r(wbuf, &sue->where));
	}

	if(spel){
		/* found the struct, find the member */
		decl *d_mem = struct_union_member_find(sue, spel,
				&e->bits.struct_mem.extra_off, NULL);

		if(!d_mem)
			die_at(&e->where, "%s %s has no member named \"%s\"",
					sue_str(sue), sue->spel, spel);

		e->rhs->tree_type = (e->bits.struct_mem.d = d_mem)->ref;
	}/* else already have the member */

	/*
	 * if it's a.b, convert to (&a)->b for asm gen
	 * e = { lhs = "a", rhs = "b", type = dot }
	 * e = {
	 *   type = ptr,
	 *   lhs = { cast<void *>, expr = { expr = "a", type = addr } },
	 *   rhs = "b",
	 * }
	 */
	if(!ptr_expect){
		expr *cast, *addr;

		addr = expr_new_addr(e->lhs);
		cast = expr_new_cast(addr,
				type_ptr_to(type_nav_btype(cc1_type_nav, type_void)),
				1);

		e->lhs = cast;
		e->expr_is_st_dot = 0;

		FOLD_EXPR(e->lhs, stab);
	}

	/* pull qualifiers from the struct to the member */
	e->tree_type = type_qualify(
			e->bits.struct_mem.d->ref,
			type_qual(e->lhs->tree_type));
}
Ejemplo n.º 11
0
void fold_expr_assign(expr *e, symtable *stab)
{
	sym *lhs_sym = NULL;
	int is_struct_cpy = 0;
	expr *rhs_nocast;

	lhs_sym = fold_inc_writes_if_sym(e->lhs, stab);

	fold_expr_nodecay(e->lhs, stab);
	fold_expr_nodecay(e->rhs, stab);

	if(lhs_sym)
		lhs_sym->nreads--; /* cancel the read that fold_ident thinks it got */

	is_struct_cpy = !!type_is_s_or_u(e->lhs->tree_type);
	if(!is_struct_cpy)
		FOLD_EXPR(e->rhs, stab); /* lval2rval the rhs */

	if(type_is_primitive(e->rhs->tree_type, type_void)){
		fold_had_error = 1;
		warn_at_print_error(&e->where, "assignment from void expression");
		e->tree_type = type_nav_btype(cc1_type_nav, type_int);
		return;
	}

	expr_must_lvalue(e->lhs, "assignment");

	if(!e->assign_is_init)
		expr_assign_const_check(e->lhs, &e->where);

	fold_check_restrict(e->lhs, e->rhs, "assignment", &e->where);

	/* this makes sense, but it's also critical for code-gen:
	 * if we assign to a volatile lvalue, we don't want the volatile-ness
	 * to propagate, as we are now an rvalue, and don't want our value read
	 * as we decay
	 *
	 * (see the same code in expr_assign_compound.c)
	 */
	e->tree_type = type_unqualify(e->lhs->tree_type);

	/* type check */
	fold_type_chk_and_cast_ty(
			e->lhs->tree_type, &e->rhs,
			stab, &e->where, "assignment");

	/* the only way to get a value into a bitfield (aside from memcpy / indirection) is via this
	 * hence we're fine doing the truncation check here
	 */
	{
		decl *mem;
		if(expr_kind(e->lhs, struct)
		&& (mem = e->lhs->bits.struct_mem.d) /* maybe null from s->non_present_memb */
		&& mem->bits.var.field_width)
		{
			bitfield_trunc_check(mem, e->rhs);
		}
	}

	rhs_nocast = expr_skip_implicit_casts(e->rhs);
	if(expr_kind(rhs_nocast, funcall)){
		expr *callexpr = rhs_nocast;
		decl *rhs_call_decl = expr_to_declref(callexpr->expr, NULL);

		if(rhs_call_decl && rhs_call_decl->spel && !strcmp(rhs_call_decl->spel, "malloc")){
			c_func_check_malloc(callexpr, e->lhs->tree_type);
		}
	}

	if(is_struct_cpy){
		e->expr = builtin_new_memcpy(
				e->lhs, e->rhs,
				type_size(e->rhs->tree_type, &e->rhs->where));

		FOLD_EXPR(e->expr, stab);

		/* set is_lval, so we can participate in struct-copy chains
		 * - this isn't interpreted as an lvalue, e.g. (a = b) = c; */
		if(cc1_backend == BACKEND_ASM)
			e->f_gen = lea_assign_lhs;
		e->f_islval = expr_is_lval_struct;
	}
}
Ejemplo n.º 12
0
expr *expr_skip_casts(expr *e)
{
	while(expr_kind(e, cast))
		e = e->expr;
	return e;
}