Exemplo n.º 1
0
void expr_assign_const_check(expr *e, where *w)
{
	struct_union_enum_st *su;

	if(type_is_const(e->tree_type)){
		fold_had_error = 1;
		warn_at_print_error(w, "can't modify const expression %s",
				expr_str_friendly(e));
	}else if((su = type_is_s_or_u(e->tree_type)) && su->contains_const){
		fold_had_error = 1;
		warn_at_print_error(w, "can't assign struct - contains const member");
	}
}
Exemplo n.º 2
0
int expr_is_lval(expr *e)
{
	if(!e->f_lea)
		return 0;

	/* special case:
	 * (a = b) = c
	 * ^~~~~~~ not an lvalue, but internally we handle it as one
	 */
	if(expr_kind(e, assign) && type_is_s_or_u(e->tree_type))
		return 0;

	if(type_is_array(e->tree_type))
		return 0;

	return 1;
}
Exemplo n.º 3
0
const out_val *gen_expr_assign(const expr *e, out_ctx *octx)
{
	const out_val *val, *store;

	UCC_ASSERT(!e->assign_is_post, "assign_is_post set for non-compound assign");

	assert(!type_is_s_or_u(e->tree_type));

	val = gen_expr(e->rhs, octx);
	store = gen_expr(e->lhs, octx);
	out_val_retain(octx, store);

	out_store(octx, store, val);

	/* re-read from the store,
	 * e.g. if the value has undergone bitfield truncation */
	return out_deref(octx, store);
}
Exemplo n.º 4
0
unsigned type_align(type *r, const where *from)
{
	struct_union_enum_st *sue;
	type *test;
	attribute *align;

	align = type_attr_present(r, attr_aligned);

	if(align){
		if(align->bits.align){
			consty k;

			const_fold(align->bits.align, &k);

			assert(k.type == CONST_NUM && K_INTEGRAL(k.bits.num));

			return k.bits.num.val.i;
		}

		return platform_align_max();
	}

	if((sue = type_is_s_or_u(r)))
		/* safe - can't have an instance without a ->sue */
		return sue->align;

	if(type_is(r, type_ptr)
	|| type_is(r, type_block))
	{
		return platform_word_size();
	}

	if((test = type_is(r, type_btype)))
		return btype_align(test->bits.type, from);

	if((test = type_is(r, type_array)))
		return type_align(test->ref, from);

	return 1;
}
Exemplo n.º 5
0
void fold_expr_if(expr *e, symtable *stab)
{
	const char *desc = "?:";
	consty konst;
	type *tt_l, *tt_r;

	FOLD_EXPR(e->expr, stab);
	const_fold(e->expr, &konst);

	fold_check_expr(e->expr, FOLD_CHK_NO_ST_UN, desc);

	if(e->lhs){
		e->lhs = fold_expr_nonstructdecay(e->lhs, stab);
		fold_check_expr(e->lhs,
				FOLD_CHK_ALLOW_VOID,
				"?: left operand");
	}

	e->rhs = fold_expr_nonstructdecay(e->rhs, stab);
	fold_check_expr(e->rhs,
			FOLD_CHK_ALLOW_VOID,
			"?: right operand");

	e->freestanding = (e->lhs ? e->lhs : e->expr)->freestanding || e->rhs->freestanding;

	/*

	Arithmetic                             Arithmetic                           Arithmetic type after usual arithmetic conversions
	Structure or union type                Compatible structure or union type   Structure or union type with all the qualifiers on both operands
	void                                   void                                 void
	Pointer to compatible type             Pointer to compatible type           Pointer to type with all the qualifiers specified for the type
	Pointer to type                        NULL pointer (the constant 0)        Pointer to type
	Pointer to object or incomplete type   Pointer to void                      Pointer to void with all the qualifiers specified for the type

	GCC and Clang seem to relax the last rule:
		a) resolve if either is any pointer, not just (void *)
	  b) resolve to a pointer to the incomplete-type
	*/

	tt_l = (e->lhs ? e->lhs : e->expr)->tree_type;
	tt_r = e->rhs->tree_type;


	/* C11 6.5.15 */
	if(type_is_arith(tt_l) && type_is_arith(tt_r)){
		/* 6.5.15 p4 */
		expr **middle_op = e->lhs ? &e->lhs : &e->expr;

		expr_check_sign(desc, *middle_op, e->rhs, &e->where);

		e->tree_type = op_promote_types(
				op_unknown,
				middle_op, &e->rhs, stab,
				&e->where, desc);

	}else if(type_is_void(tt_l) || type_is_void(tt_r)){
		e->tree_type = type_nav_btype(cc1_type_nav, type_void);

	}else{
		const enum type_cmp cmp = type_cmp(tt_l, tt_r, 0);

		if((cmp & (TYPE_EQUAL_ANY | TYPE_QUAL_ADD | TYPE_QUAL_SUB))
		&& type_is_s_or_u(tt_l))
		{
			e->f_islval = expr_is_lval_struct;
			e->tree_type = type_qualify(tt_l, type_qual(tt_l) | type_qual(tt_r));

		}else{
			try_pointer_propagate(e, cmp, tt_l, tt_r);
		}
	}
}
Exemplo n.º 6
0
void fold_expr_assign(expr *e, symtable *stab)
{
	sym *lhs_sym = NULL;
	int is_struct_cpy = 0;

	lhs_sym = fold_inc_writes_if_sym(e->lhs, stab);

	fold_expr_nodecay(e->lhs, stab);
	fold_expr_nodecay(e->rhs, stab);

	if(lhs_sym)
		lhs_sym->nreads--; /* cancel the read that fold_ident thinks it got */

	is_struct_cpy = !!type_is_s_or_u(e->lhs->tree_type);
	if(!is_struct_cpy)
		FOLD_EXPR(e->rhs, stab); /* lval2rval the rhs */

	if(type_is_primitive(e->rhs->tree_type, type_void)){
		fold_had_error = 1;
		warn_at_print_error(&e->where, "assignment from void expression");
		e->tree_type = type_nav_btype(cc1_type_nav, type_int);
		return;
	}

	expr_must_lvalue(e->lhs, "assignment");

	if(!e->assign_is_init)
		expr_assign_const_check(e->lhs, &e->where);

	fold_check_restrict(e->lhs, e->rhs, "assignment", &e->where);

	/* this makes sense, but it's also critical for code-gen:
	 * if we assign to a volatile lvalue, we don't want the volatile-ness
	 * to propagate, as we are now an rvalue, and don't want our value read
	 * as we decay
	 */
	e->tree_type = type_unqualify(e->lhs->tree_type);

	/* type check */
	fold_type_chk_and_cast_ty(
			e->lhs->tree_type, &e->rhs,
			stab, &e->where, "assignment");

	/* the only way to get a value into a bitfield (aside from memcpy / indirection) is via this
	 * hence we're fine doing the truncation check here
	 */
	{
		decl *mem;
		if(expr_kind(e->lhs, struct)
		&& (mem = e->lhs->bits.struct_mem.d) /* maybe null from s->non_present_memb */
		&& mem->bits.var.field_width)
		{
			bitfield_trunc_check(mem, e->rhs);
		}
	}


	if(is_struct_cpy){
		e->expr = builtin_new_memcpy(
				e->lhs, e->rhs,
				type_size(e->rhs->tree_type, &e->rhs->where));

		FOLD_EXPR(e->expr, stab);

		/* set is_lval, so we can participate in struct-copy chains
		 * FIXME: don't interpret as an lvalue, e.g. (a = b) = c;
		 * this is currently special cased in expr_is_lval()
		 *
		 * CHECK THIS
		 */
		if(cc1_backend == BACKEND_ASM)
			e->f_gen = lea_assign_lhs;
		e->f_islval = expr_is_lval_struct;
	}
}
Exemplo n.º 7
0
static void asm_declare_init(enum section_type sec, decl_init *init, type *tfor)
{
	type *r;

	if(init == DYNARRAY_NULL)
		init = NULL;

	if(!init){
		/* don't initialise flex-arrays */
		if(!type_is_incomplete_array(tfor)){
			asm_declare_pad(sec, type_size(tfor, NULL),
					"null init"/*, type_to_str(tfor)*/);
		}else{
			asm_out_section(sec, ASM_COMMENT " flex array init skipped\n");
		}

	}else if((r = type_is_primitive(tfor, type_struct))){
		/* array of stmts for each member
		 * assumes the ->bits.inits order is member order
		 */
		struct_union_enum_st *const sue = r->bits.type->sue;
		sue_member **mem;
		decl_init **i;
		unsigned end_of_last = 0;
		struct bitfield_val *bitfields = NULL;
		unsigned nbitfields = 0;
		decl *first_bf = NULL;
		expr *copy_from_exp;

		UCC_ASSERT(init->type == decl_init_brace, "unbraced struct");

#define DEBUG(s, ...) /*fprintf(f, "\033[35m" s "\033[m\n", __VA_ARGS__)*/

		i = init->bits.ar.inits;

		/* check for compound-literal copy-init */
		if((copy_from_exp = decl_init_is_struct_copy(init, sue))){
			decl_init *copy_from_init;

			copy_from_exp = expr_skip_lval2rval(copy_from_exp);

			/* the only struct-expression that's possible
			 * in static context is a compound literal */
			assert(expr_kind(copy_from_exp, compound_lit)
					&& "unhandled expression init");

			copy_from_init = copy_from_exp->bits.complit.decl->bits.var.init.dinit;
			assert(copy_from_init->type == decl_init_brace);

			i = copy_from_init->bits.ar.inits;
		}

		/* iterate using members, not inits */
		for(mem = sue->members;
				mem && *mem;
				mem++)
		{
			decl *d_mem = (*mem)->struct_member;
			decl_init *di_to_use = NULL;

			if(i){
				int inc = 1;

				if(*i == NULL)
					inc = 0;
				else if(*i != DYNARRAY_NULL)
					di_to_use = *i;

				if(inc){
					i++;
					if(!*i)
						i = NULL; /* reached end */
				}
			}

			DEBUG("init for %ld/%s, %s",
					mem - sue->members, d_mem->spel,
					di_to_use ? di_to_use->bits.expr->f_str() : NULL);

			/* only pad if we're not on a bitfield or we're on the first bitfield */
			if(!d_mem->bits.var.field_width || !first_bf){
				DEBUG("prev padding, offset=%d, end_of_last=%d",
						d_mem->struct_offset, end_of_last);

				UCC_ASSERT(
						d_mem->bits.var.struct_offset >= end_of_last,
						"negative struct pad, sue %s, member %s "
						"offset %u, end_of_last %u",
						sue->spel, decl_to_str(d_mem),
						d_mem->bits.var.struct_offset, end_of_last);

				asm_declare_pad(sec,
						d_mem->bits.var.struct_offset - end_of_last,
						"prev struct padding");
			}

			if(d_mem->bits.var.field_width){
				if(!first_bf || d_mem->bits.var.first_bitfield){
					if(first_bf){
						DEBUG("new bitfield group (%s is new boundary), old:",
								d_mem->spel);
						/* next bitfield group - store the current */
						bitfields_out(sec, bitfields, &nbitfields, first_bf->ref);
					}
					first_bf = d_mem;
				}

				bitfields = bitfields_add(
						bitfields, &nbitfields,
						d_mem, di_to_use);

			}else{
				if(nbitfields){
					DEBUG("at non-bitfield, prev-bitfield out:", 0);
					bitfields_out(sec, bitfields, &nbitfields, first_bf->ref);
					first_bf = NULL;
				}

				DEBUG("normal init for %s:", d_mem->spel);
				asm_declare_init(sec, di_to_use, d_mem->ref);
			}

			if(type_is_incomplete_array(d_mem->ref)){
				UCC_ASSERT(!mem[1], "flex-arr not at end");
			}else if(!d_mem->bits.var.field_width || d_mem->bits.var.first_bitfield){
				unsigned last_sz = type_size(d_mem->ref, NULL);

				end_of_last = d_mem->bits.var.struct_offset + last_sz;
				DEBUG("done with member \"%s\", end_of_last = %d",
						d_mem->spel, end_of_last);
			}
		}

		if(nbitfields)
			bitfields_out(sec, bitfields, &nbitfields, first_bf->ref);
		free(bitfields);

		/* need to pad to struct size */
		asm_declare_pad(sec,
				sue_size(sue, NULL) - end_of_last,
				"struct tail");

	}else if((r = type_is(tfor, type_array))){
		size_t i, len;
		decl_init **p;
		type *next = type_next(tfor);

		UCC_ASSERT(init->type == decl_init_brace, "unbraced struct");

		if(type_is_incomplete_array(tfor)){
			len = dynarray_count(init->bits.ar.inits);
		}else{
			UCC_ASSERT(type_is_complete(tfor), "incomplete array/type init");
			len = type_array_len(tfor);
		}

		for(i = len, p = init->bits.ar.inits;
				i > 0;
				i--)
		{
			decl_init *this = NULL;
			if(*p){
				this = *p++;

				if(this != DYNARRAY_NULL && this->type == decl_init_copy){
					/*fprintf(f, "# copy from %lu\n", DECL_INIT_COPY_IDX(this, init));*/
					struct init_cpy *icpy = *this->bits.range_copy;
					/* resolve the copy */
					this = icpy->range_init;
				}
			}

			asm_declare_init(sec, this, next);
		}

	}else if((r = type_is_primitive(tfor, type_union))){
		/* union inits are decl_init_brace with spaces up to the first union init,
		 * then NULL/end of the init-array */
		struct_union_enum_st *sue = type_is_s_or_u(r);
		unsigned i, sub = 0;
		decl_init *u_init;

		UCC_ASSERT(init->type == decl_init_brace, "brace init expected");

		/* skip the empties until we get to one */
		for(i = 0; init->bits.ar.inits[i] == DYNARRAY_NULL; i++);

		if((u_init = init->bits.ar.inits[i])){
			decl *mem = sue->members[i]->struct_member;
			type *mem_r = mem->ref;

			/* union init, member at index `i' */
			if(mem->bits.var.field_width){
				/* we know it's integral */
				struct bitfield_val bfv;

				ASSERT_SCALAR(u_init);

				bitfield_val_set(&bfv, u_init->bits.expr, mem->bits.var.field_width);

				asm_declare_init_bitfields(sec, &bfv, 1, mem_r);
			}else{
				asm_declare_init(sec, u_init, mem_r);
			}

			sub = type_size(mem_r, NULL);
		} /* else null union init */

		asm_declare_pad(sec,
				type_size(r, NULL) - sub,
				"union extra");

	}else{
		/* scalar */
		expr *exp = init->bits.expr;

		UCC_ASSERT(init->type == decl_init_scalar, "scalar init expected");

		/* exp->tree_type should match tfor */
		{
			char buf[TYPE_STATIC_BUFSIZ];

			UCC_ASSERT(
					type_cmp(exp->tree_type, tfor, TYPE_CMP_ALLOW_TENATIVE_ARRAY) != TYPE_NOT_EQUAL,
					"mismatching init types: %s and %s",
					type_to_str_r(buf, exp->tree_type),
					type_to_str(tfor));
		}

		/* use tfor, since "abc" has type (char[]){(int)'a', (int)'b', ...} */
		DEBUG("  scalar init for %s:", type_to_str(tfor));
		static_val(sec, tfor, exp);
	}
}
Exemplo n.º 8
0
static enum type_cmp type_cmp_r(
		type *const orig_a,
		type *const orig_b,
		enum type_cmp_opts opts)
{
	enum type_cmp ret;
	type *a, *b;
	int subchk = 1;

	if(!orig_a || !orig_b)
		return orig_a == orig_b ? TYPE_EQUAL : TYPE_NOT_EQUAL;

	a = type_skip_all(orig_a);
	b = type_skip_all(orig_b);

	/* array/func decay takes care of any array->ptr checks */
	if(a->type != b->type){
		/* allow _Bool <- pointer */
		if(type_is_primitive(a, type__Bool) && type_is_ptr(b))
			return TYPE_CONVERTIBLE_IMPLICIT;

		/* allow int <-> ptr (or block) */
		if((type_is_ptr_or_block(a) && type_is_integral(b))
		|| (type_is_ptr_or_block(b) && type_is_integral(a)))
		{
			return TYPE_CONVERTIBLE_EXPLICIT;
		}

		/* allow void <- anything */
		if(type_is_void(a))
			return TYPE_CONVERTIBLE_IMPLICIT;

		/* allow block <-> fnptr */
		if((type_is_fptr(a) && type_is(b, type_block))
		|| (type_is_fptr(b) && type_is(a, type_block)))
		{
			return TYPE_CONVERTIBLE_EXPLICIT;
		}

		return TYPE_NOT_EQUAL;
	}

	switch(a->type){
		case type_auto:
			ICE("__auto_type");

		case type_btype:
			subchk = 0;
			ret = btype_cmp(a->bits.type, b->bits.type);
			break;

		case type_array:
			if(a->bits.array.is_vla || b->bits.array.is_vla){
				/* fine, pretend they're equal even if different expressions */
				ret = TYPE_EQUAL_TYPEDEF;

			}else{
				const int a_has_sz = !!a->bits.array.size;
				const int b_has_sz = !!b->bits.array.size;

				if(a_has_sz && b_has_sz){
					integral_t av = const_fold_val_i(a->bits.array.size);
					integral_t bv = const_fold_val_i(b->bits.array.size);

					if(av != bv)
						return TYPE_NOT_EQUAL;
				}else if(a_has_sz != b_has_sz){
					if((opts & TYPE_CMP_ALLOW_TENATIVE_ARRAY) == 0)
						return TYPE_NOT_EQUAL;
				}
			}

			/* next */
			break;

		case type_block:
		case type_ptr:
			break;

		case type_cast:
		case type_tdef:
		case type_attr:
		case type_where:
			ICE("should've been skipped");

		case type_func:
			switch(funcargs_cmp(a->bits.func.args, b->bits.func.args)){
				case FUNCARGS_EXACT_EQUAL:
				case FUNCARGS_IMPLICIT_CONV:
					break;
				default:
					/* "void (int)" and "void (int, int)" aren't equal,
					 * but a cast can soon fix it */
					return TYPE_CONVERTIBLE_EXPLICIT;
			}
			break;
	}

	if(subchk)
		ret = type_cmp_r(a->ref, b->ref, opts);

	if(ret == TYPE_NOT_EQUAL
	&& a->type == type_func)
	{
		/* "int (int)" and "void (int)" aren't equal - but castable */
		ret = TYPE_CONVERTIBLE_EXPLICIT;
	}

	if(ret == TYPE_NOT_EQUAL
	&& a->type == type_ptr
	&& fopt_mode & FOPT_PLAN9_EXTENSIONS)
	{
		/* allow b to be an anonymous member of a, if pointers */
		struct_union_enum_st *a_sue = type_is_s_or_u(a),
		                     *b_sue = type_is_s_or_u(b);

		if(a_sue && b_sue /* already know they aren't equal */){
			/* b_sue has an a_sue,
			 * the implicit cast adjusts to return said a_sue */
			if(struct_union_member_find_sue(b_sue, a_sue))
				return TYPE_CONVERTIBLE_IMPLICIT;
		}
	}

	/* allow ptr <-> ptr */
	if(ret == TYPE_NOT_EQUAL && type_is_ptr(a) && type_is_ptr(b))
		ret = TYPE_CONVERTIBLE_EXPLICIT;

	/* char * and int * are explicitly conv.,
	 * even though char and int are implicit */
	if(ret == TYPE_CONVERTIBLE_IMPLICIT && a->type == type_ptr)
		ret = TYPE_CONVERTIBLE_EXPLICIT;

	if(a->type == type_ptr || a->type == type_block){
		switch(ret){
#define MAP(a, b) case a: ret = b; break
			MAP(TYPE_QUAL_ADD, TYPE_QUAL_POINTED_ADD);
			MAP(TYPE_QUAL_SUB, TYPE_QUAL_POINTED_SUB);
			MAP(TYPE_QUAL_POINTED_ADD, TYPE_QUAL_NESTED_CHANGE);
			MAP(TYPE_QUAL_POINTED_SUB, TYPE_QUAL_NESTED_CHANGE);
#undef MAP
			default:
				break;
		}
	}

	if(ret & TYPE_EQUAL_ANY){
		enum type_qualifier a_qual = type_qual(orig_a);
		enum type_qualifier b_qual = type_qual(orig_b);

		if(a_qual && b_qual){
			switch(type_qual_cmp(a_qual, b_qual)){
				case -1:
					/* a has more */
					ret = TYPE_QUAL_ADD;
					break;
				case 1:
					/* b has more */
					ret = TYPE_QUAL_SUB;
					break;
			}
		}else if(a_qual){
			ret = TYPE_QUAL_ADD;
		}else if(b_qual){
			ret = TYPE_QUAL_SUB;
		} /* else neither are casts */
	}

	if(ret == TYPE_EQUAL){
		int at = orig_a->type == type_tdef;
		int bt = orig_b->type == type_tdef;

		if(at != bt){
			/* one is a typedef */
			ret = TYPE_EQUAL_TYPEDEF;
		}else if(at){
			/* both typedefs */
			if(orig_a->bits.tdef.decl != orig_b->bits.tdef.decl){
				ret = TYPE_EQUAL_TYPEDEF;
			}
		}
		/* else no typedefs */
	}

	return ret;
}
Exemplo n.º 9
0
void fold_expr_funcall(expr *e, symtable *stab)
{
	type *func_ty;
	funcargs *args_from_decl;
	char *sp = NULL;
	unsigned count_decl;

	check_implicit_funcall(e, stab, &sp);

	FOLD_EXPR(e->expr, stab);
	func_ty = e->expr->tree_type;

	if(!type_is_callable(func_ty)){
		warn_at_print_error(&e->expr->where,
				"%s-expression (type '%s') not callable",
				expr_str_friendly(e->expr, 0),
				type_to_str(func_ty));

		fold_had_error = 1;

		e->tree_type = type_nav_btype(cc1_type_nav, type_int);
		return;
	}

	e->tree_type = type_func_call(func_ty, &args_from_decl);

	/* func count comparison, only if the func has arg-decls, or the func is f(void) */
	UCC_ASSERT(args_from_decl, "no funcargs for decl %s", sp);

	count_decl = dynarray_count(args_from_decl->arglist);

	if(check_arg_counts(args_from_decl, count_decl, e->funcargs, e, sp))
		return;

	if(e->funcargs){
		check_arg_voidness_and_nonnulls(
				e, stab,
				args_from_decl, count_decl,
				e->funcargs, sp);
	}

	if(!FUNCARGS_EMPTY_NOVOID(args_from_decl))
		check_arg_types(args_from_decl, e->funcargs, stab, sp, &e->where);

	if(e->funcargs)
		default_promote_args(e->funcargs, count_decl, stab);

	if(type_is_s_or_u(e->tree_type)){
		/* handled transparently by the backend */
		e->f_islval = expr_is_lval_struct;

		cc1_warn_at(&e->expr->where,
				aggregate_return,
				"called function returns aggregate (%s)",
				type_to_str(e->tree_type));
	}

	/* attr */
	{
		type *fnty = e->expr->tree_type;

		/* look through decays */
		if(expr_kind(e->expr, cast) && expr_cast_is_lval2rval(e->expr))
			fnty = expr_cast_child(e->expr)->tree_type;

		format_check_call(fnty, e->funcargs, args_from_decl->variadic);

		sentinel_check(
				&e->where, e,
				e->funcargs, args_from_decl->variadic,
				count_decl, stab);
	}

	/* check the subexp tree type to get the funcall attributes */
	if(func_or_builtin_attr_present(e, attr_warn_unused))
		e->freestanding = 0; /* needs use */

	if(sp && !cc1_fopt.freestanding)
		check_standard_funcs(sp, e->funcargs);
}
Exemplo n.º 10
0
void fold_expr_struct(expr *e, symtable *stab)
{
	/*
	 * lhs = any ptr-to-struct expr
	 * rhs = struct member ident
	 */
	const int ptr_expect = !e->expr_is_st_dot;
	struct_union_enum_st *sue;
	char *spel;

	fold_expr_no_decay(e->lhs, stab);
	/* don't fold the rhs - just a member name */

	if(e->rhs){
		UCC_ASSERT(expr_kind(e->rhs, identifier),
				"struct/union member not identifier (%s)", e->rhs->f_str());

		UCC_ASSERT(!e->bits.struct_mem.d, "already have a struct-member");

		spel = e->rhs->bits.ident.spel;
	}else{
		UCC_ASSERT(e->bits.struct_mem.d, "no member specified already?");
		spel = NULL;
	}

	/* we access a struct, of the right ptr depth */
	{
		type *r = e->lhs->tree_type;

		if(ptr_expect){
			type *rtest = type_is(r, type_ptr);

			if(!rtest && !(rtest = type_is(r, type_array)))
				goto err;

			r = rtest->ref; /* safe - rtest is an array */
		}

		if(!(sue = type_is_s_or_u(r))){
err:
			die_at(&e->lhs->where, "'%s' (%s-expr) is not a %sstruct or union (member %s)",
					type_to_str(e->lhs->tree_type),
					e->lhs->f_str(),
					ptr_expect ? "pointer to " : "",
					spel);
		}
	}

	if(!sue_complete(sue)){
		char wbuf[WHERE_BUF_SIZ];

		die_at(&e->lhs->where, "%s incomplete type (%s)\n"
				"%s: note: forward declared here",
				ptr_expect
					? "dereferencing pointer to"
					: "accessing member of",
				type_to_str(e->lhs->tree_type),
				where_str_r(wbuf, &sue->where));
	}

	if(spel){
		/* found the struct, find the member */
		decl *d_mem = struct_union_member_find(sue, spel,
				&e->bits.struct_mem.extra_off, NULL);

		if(!d_mem)
			die_at(&e->where, "%s %s has no member named \"%s\"",
					sue_str(sue), sue->spel, spel);

		e->rhs->tree_type = (e->bits.struct_mem.d = d_mem)->ref;
	}/* else already have the member */

	/*
	 * if it's a.b, convert to (&a)->b for asm gen
	 * e = { lhs = "a", rhs = "b", type = dot }
	 * e = {
	 *   type = ptr,
	 *   lhs = { cast<void *>, expr = { expr = "a", type = addr } },
	 *   rhs = "b",
	 * }
	 */
	if(!ptr_expect){
		expr *cast, *addr;

		addr = expr_new_addr(e->lhs);
		cast = expr_new_cast(addr,
				type_ptr_to(type_nav_btype(cc1_type_nav, type_void)),
				1);

		e->lhs = cast;
		e->expr_is_st_dot = 0;

		FOLD_EXPR(e->lhs, stab);
	}

	/* pull qualifiers from the struct to the member */
	e->tree_type = type_qualify(
			e->bits.struct_mem.d->ref,
			type_qual(e->lhs->tree_type));
}