Beispiel #1
0
void flow_fold(stmt_flow *flow, symtable **pstab)
{
	if(flow){
		decl **i;

		*pstab = flow->for_init_symtab;

		fold_shadow_dup_check_block_decls(*pstab);

		/* sanity check on _flow_ vars only */
		for(i = symtab_decls(*pstab); i && *i; i++){
			decl *const d = *i;

			switch((enum decl_storage)(d->store & STORE_MASK_STORE)){
				case store_auto:
				case store_default:
				case store_register:
					break;
				default:
					die_at(&d->where, "%s variable in statement-initialisation",
							decl_store_to_str(d->store));
			}

			/* block decls/for-init decls must be complete */
			fold_check_decl_complete(d);

			if(d->bits.var.init.expr)
				FOLD_EXPR(d->bits.var.init.expr, *pstab);
		}
	}
}
static void check_arg_voidness_and_nonnulls(
		expr *callexpr, symtable *stab,
		funcargs *args_from_decl, unsigned count_decl,
		expr **exprargs, char *sp)
{
	/* this block folds the args and type-checks */
	unsigned long nonnulls = 0;
	unsigned i;
	attribute *da;

	if((da = func_or_builtin_attr_present(callexpr, attr_nonnull)))
		nonnulls = da->bits.nonnull_args;

	for(i = 0; exprargs[i]; i++){
		expr *arg = FOLD_EXPR(exprargs[i], stab);
		char buf[64];

		ARG_BUF(buf, i, sp);

		if(fold_check_expr(arg, FOLD_CHK_NO_ST_UN, buf))
			continue;

		if(i < count_decl && (nonnulls & (1 << i))
		&& type_is_ptr(args_from_decl->arglist[i]->ref)
		&& expr_is_null_ptr(arg, NULL_STRICT_INT))
		{
			cc1_warn_at(&arg->where,
					attr_nonnull,
					"null passed where non-null required (arg %d)",
					i + 1);
		}
	}
}
Beispiel #3
0
static void parse_test_init_expr(stmt *t, struct stmt_ctx *ctx)
{
	where here;

	where_cc1_current(&here);

	EAT(token_open_paren);

	/* if C99, we create a new scope here, for e.g.
	 * if(5 > (enum { a, b })a){ return a; } return b;
	 * "return b" can't see 'b' since its scope is only the if
	 *
	 * C90 drags the scope of the enum up to the enclosing block
	 */
	if(cc1_std >= STD_C99){
		ctx->scope = t->symtab = symtab_new(t->symtab, &here);
	}

  if(parse_at_decl(ctx->scope, 1)){
		decl *d;

		/* if we are at a type, push a scope for it, for
		 * for(int i ...), if(int i = ...) etc
		 */
		symtable *init_scope = symtab_new(t->symtab, &here);

		t->flow = stmt_flow_new(init_scope);

		d = parse_decl(
				DECL_SPEL_NEED, 0,
				init_scope, init_scope);

		UCC_ASSERT(d, "at decl, but no decl?");

		UCC_ASSERT(
				t->flow->for_init_symtab == init_scope,
				"wrong scope for stmt-init");

		flow_fold(t->flow, &t->symtab);
		ctx->scope = t->symtab;

		/* `d' is added to the scope implicitly */

		if(accept(token_comma)){
			/* if(int i = 5, i > f()){ ... } */
			t->expr = parse_expr_exp(ctx->scope, 0);
		}else{
			/* if(int i = 5) -> if(i) */
			t->expr = expr_new_identifier(d->spel);
		}
	}else{
		t->expr = parse_expr_exp(t->symtab, 0);
	}
	FOLD_EXPR(t->expr, t->symtab);

	EAT(token_close_paren);
}
void fold_expr_assign_compound(expr *e, symtable *stab)
{
	const char *const desc = "compound assignment";
#define lvalue e->lhs

	fold_inc_writes_if_sym(lvalue, stab);

	fold_expr_nodecay(e->lhs, stab);
	FOLD_EXPR(e->rhs, stab);

	fold_check_expr(e->lhs, FOLD_CHK_NO_ST_UN, desc);
	fold_check_expr(e->rhs, FOLD_CHK_NO_ST_UN, desc);

	/* skip the addr we inserted */
	if(!expr_must_lvalue(lvalue, desc)){
		/* prevent ICE from type_size(vla), etc */
		e->tree_type = lvalue->tree_type;
		return;
	}

	expr_assign_const_check(lvalue, &e->where);

	fold_check_restrict(lvalue, e->rhs, desc, &e->where);

	UCC_ASSERT(op_can_compound(e->bits.compoundop.op), "non-compound op in compound expr");

	/*expr_promote_int_if_smaller(&e->lhs, stab);
	 * lhs int promotion is handled in code-gen */
	expr_promote_int_if_smaller(&e->rhs, stab);

	{
		type *tlhs, *trhs;
		type *resolved = op_required_promotion(
				e->bits.compoundop.op, lvalue, e->rhs,
				&e->where, desc,
				&tlhs, &trhs);

		if(tlhs){
			/* must cast the lvalue, then down cast once the operation is done
			 * special handling for expr_kind(e->lhs, cast) is done in the gen-code
			 */
			e->bits.compoundop.upcast_ty = tlhs;

		}else if(trhs){
			fold_insert_casts(trhs, &e->rhs, stab);
		}

		e->tree_type = lvalue->tree_type;

		(void)resolved;
		/*type_free_1(resolved); XXX: memleak */
	}

	/* type check is done in op_required_promotion() */
#undef lvalue
}
static expr *optional_parened_expr(symtable *scope)
{
    if(accept(token_open_paren)) {
        expr *e;

        if(accept(token_close_paren))
            goto out;

        e = PARSE_EXPR_NO_COMMA(scope, 0);
        FOLD_EXPR(e, scope);

        FOLD_EXPR(e, scope);

        EAT(token_close_paren);

        return e;
    }
out:
    return NULL;
}
void fold_stmt_case_range(stmt *s)
{
	integral_t lv, rv;

	FOLD_EXPR(s->expr,  s->symtab);
	FOLD_EXPR(s->expr2, s->symtab);

	fold_check_expr(s->expr,
			FOLD_CHK_INTEGRAL | FOLD_CHK_CONST_I,
			"case-range");
	lv = const_fold_val_i(s->expr);

	fold_check_expr(s->expr2,
			FOLD_CHK_INTEGRAL | FOLD_CHK_CONST_I,
			"case-range");
	rv = const_fold_val_i(s->expr2);

	if(lv >= rv)
		die_at(&s->where, "case range equal or inverse");

	s->bits.case_lbl = out_label_case(CASE_RANGE, lv);
	fold_stmt_and_add_to_curswitch(s, &s->bits.case_lbl);
}
static void sentinel_check(where *w, expr *e, expr **args,
		const int variadic, const int nstdargs, symtable *stab)
{
#define ATTR_WARN_RET(w, ...) \
	do{ cc1_warn_at(w, attr_sentinel, __VA_ARGS__); return; }while(0)

	attribute *attr = func_or_builtin_attr_present(e, attr_sentinel);
	int i, nvs;
	expr *sentinel;

	if(!attr)
		return;

	if(!variadic)
		return; /* warning emitted elsewhere, on the decl */

	if(attr->bits.sentinel){
		consty k;

		FOLD_EXPR(attr->bits.sentinel, stab);
		const_fold(attr->bits.sentinel, &k);

		if(k.type != CONST_NUM || !K_INTEGRAL(k.bits.num))
			die_at(&attr->where, "sentinel attribute not reducible to integer constant");

		i = k.bits.num.val.i;
	}else{
		i = 0;
	}

	nvs = dynarray_count(args) - nstdargs;

	if(nvs == 0)
		ATTR_WARN_RET(w, "not enough variadic arguments for a sentinel");

	UCC_ASSERT(nvs >= 0, "too few args");

	if(i >= nvs)
		ATTR_WARN_RET(w, "sentinel index is not a variadic argument");

	sentinel = args[(nstdargs + nvs - 1) - i];

	/* must be of a pointer type, printf("%p\n", 0) is undefined */
	if(!expr_is_null_ptr(sentinel, NULL_STRICT_ANY_PTR))
		ATTR_WARN_RET(&sentinel->where, "sentinel argument expected (got %s)",
				type_to_str(sentinel->tree_type));

#undef ATTR_WARN_RET
}
Beispiel #8
0
void fold_stmt_goto(stmt *s)
{
	if(!symtab_func(s->symtab))
		die_at(&s->where, "goto outside of a function");

	if(s->expr){
		FOLD_EXPR(s->expr, s->symtab);
	}else{
		(s->bits.lbl.label =
		 symtab_label_find_or_new(
			 s->symtab, s->bits.lbl.spel, &s->where))
			->uses++;

		dynarray_add(&s->bits.lbl.label->jumpers, s);
	}
}
void fold_expr_assign_compound(expr *e, symtable *stab)
{
	expr *const lvalue = e->lhs;

	fold_inc_writes_if_sym(lvalue, stab);

	fold_expr_no_decay(e->lhs, stab);
	FOLD_EXPR(e->rhs, stab);

	fold_check_expr(e->lhs, FOLD_CHK_NO_ST_UN, "compound assignment");
	fold_check_expr(e->rhs, FOLD_CHK_NO_ST_UN, "compound assignment");

	/* skip the addr we inserted */
	expr_must_lvalue(lvalue);

	expr_assign_const_check(lvalue, &e->where);

	fold_check_restrict(lvalue, e->rhs, "compound assignment", &e->where);

	UCC_ASSERT(op_can_compound(e->op), "non-compound op in compound expr");

	{
		type *tlhs, *trhs;
		type *resolved = op_required_promotion(e->op, lvalue, e->rhs, &e->where, &tlhs, &trhs);

		if(tlhs){
			/* must cast the lvalue, then down cast once the operation is done
			 * special handling for expr_kind(e->lhs, cast) is done in the gen-code
			 */
			fold_insert_casts(tlhs, &e->lhs, stab);

			/* casts may be inserted anyway, and don't want to rely on
			 * .implicit_cast stuff */
			e->bits.compound_upcast = 1;

		}else if(trhs){
			fold_insert_casts(trhs, &e->rhs, stab);
		}

		e->tree_type = lvalue->tree_type;

		(void)resolved;
		/*type_free_1(resolved); XXX: memleak */
	}

	/* type check is done in op_required_promotion() */
}
Beispiel #10
0
void fold_expr_str(expr *e, symtable *stab)
{
	const stringlit *const strlit = e->bits.strlit.lit_at.lit;
	expr *sz;

	sz = expr_new_val(strlit->len);
	FOLD_EXPR(sz, stab);

	/* (const? char []) */
	e->tree_type = type_array_of(
			type_qualify(
				type_nav_btype(
					cc1_type_nav,
					strlit->wide ? type_wchar : type_nchar),
				e->bits.strlit.is_func ? qual_const : qual_none),
			sz);
}
Beispiel #11
0
void fold_expr_if(expr *e, symtable *stab)
{
	const char *desc = "?:";
	consty konst;
	type *tt_l, *tt_r;

	FOLD_EXPR(e->expr, stab);
	const_fold(e->expr, &konst);

	fold_check_expr(e->expr, FOLD_CHK_NO_ST_UN, desc);

	if(e->lhs){
		e->lhs = fold_expr_nonstructdecay(e->lhs, stab);
		fold_check_expr(e->lhs,
				FOLD_CHK_ALLOW_VOID,
				"?: left operand");
	}

	e->rhs = fold_expr_nonstructdecay(e->rhs, stab);
	fold_check_expr(e->rhs,
			FOLD_CHK_ALLOW_VOID,
			"?: right operand");

	e->freestanding = (e->lhs ? e->lhs : e->expr)->freestanding || e->rhs->freestanding;

	/*

	Arithmetic                             Arithmetic                           Arithmetic type after usual arithmetic conversions
	Structure or union type                Compatible structure or union type   Structure or union type with all the qualifiers on both operands
	void                                   void                                 void
	Pointer to compatible type             Pointer to compatible type           Pointer to type with all the qualifiers specified for the type
	Pointer to type                        NULL pointer (the constant 0)        Pointer to type
	Pointer to object or incomplete type   Pointer to void                      Pointer to void with all the qualifiers specified for the type

	GCC and Clang seem to relax the last rule:
		a) resolve if either is any pointer, not just (void *)
	  b) resolve to a pointer to the incomplete-type
	*/

	tt_l = (e->lhs ? e->lhs : e->expr)->tree_type;
	tt_r = e->rhs->tree_type;


	/* C11 6.5.15 */
	if(type_is_arith(tt_l) && type_is_arith(tt_r)){
		/* 6.5.15 p4 */
		expr **middle_op = e->lhs ? &e->lhs : &e->expr;

		expr_check_sign(desc, *middle_op, e->rhs, &e->where);

		e->tree_type = op_promote_types(
				op_unknown,
				middle_op, &e->rhs, stab,
				&e->where, desc);

	}else if(type_is_void(tt_l) || type_is_void(tt_r)){
		e->tree_type = type_nav_btype(cc1_type_nav, type_void);

	}else{
		const enum type_cmp cmp = type_cmp(tt_l, tt_r, 0);

		if((cmp & (TYPE_EQUAL_ANY | TYPE_QUAL_ADD | TYPE_QUAL_SUB))
		&& type_is_s_or_u(tt_l))
		{
			e->f_islval = expr_is_lval_struct;
			e->tree_type = type_qualify(tt_l, type_qual(tt_l) | type_qual(tt_r));

		}else{
			try_pointer_propagate(e, cmp, tt_l, tt_r);
		}
	}
}
Beispiel #12
0
void fold_expr_assign(expr *e, symtable *stab)
{
	sym *lhs_sym = NULL;
	int is_struct_cpy = 0;

	lhs_sym = fold_inc_writes_if_sym(e->lhs, stab);

	fold_expr_nodecay(e->lhs, stab);
	fold_expr_nodecay(e->rhs, stab);

	if(lhs_sym)
		lhs_sym->nreads--; /* cancel the read that fold_ident thinks it got */

	is_struct_cpy = !!type_is_s_or_u(e->lhs->tree_type);
	if(!is_struct_cpy)
		FOLD_EXPR(e->rhs, stab); /* lval2rval the rhs */

	if(type_is_primitive(e->rhs->tree_type, type_void)){
		fold_had_error = 1;
		warn_at_print_error(&e->where, "assignment from void expression");
		e->tree_type = type_nav_btype(cc1_type_nav, type_int);
		return;
	}

	expr_must_lvalue(e->lhs, "assignment");

	if(!e->assign_is_init)
		expr_assign_const_check(e->lhs, &e->where);

	fold_check_restrict(e->lhs, e->rhs, "assignment", &e->where);

	/* this makes sense, but it's also critical for code-gen:
	 * if we assign to a volatile lvalue, we don't want the volatile-ness
	 * to propagate, as we are now an rvalue, and don't want our value read
	 * as we decay
	 */
	e->tree_type = type_unqualify(e->lhs->tree_type);

	/* type check */
	fold_type_chk_and_cast_ty(
			e->lhs->tree_type, &e->rhs,
			stab, &e->where, "assignment");

	/* the only way to get a value into a bitfield (aside from memcpy / indirection) is via this
	 * hence we're fine doing the truncation check here
	 */
	{
		decl *mem;
		if(expr_kind(e->lhs, struct)
		&& (mem = e->lhs->bits.struct_mem.d) /* maybe null from s->non_present_memb */
		&& mem->bits.var.field_width)
		{
			bitfield_trunc_check(mem, e->rhs);
		}
	}


	if(is_struct_cpy){
		e->expr = builtin_new_memcpy(
				e->lhs, e->rhs,
				type_size(e->rhs->tree_type, &e->rhs->where));

		FOLD_EXPR(e->expr, stab);

		/* set is_lval, so we can participate in struct-copy chains
		 * FIXME: don't interpret as an lvalue, e.g. (a = b) = c;
		 * this is currently special cased in expr_is_lval()
		 *
		 * CHECK THIS
		 */
		if(cc1_backend == BACKEND_ASM)
			e->f_gen = lea_assign_lhs;
		e->f_islval = expr_is_lval_struct;
	}
}
void fold_expr_funcall(expr *e, symtable *stab)
{
	type *func_ty;
	funcargs *args_from_decl;
	char *sp = NULL;
	unsigned count_decl;

	check_implicit_funcall(e, stab, &sp);

	FOLD_EXPR(e->expr, stab);
	func_ty = e->expr->tree_type;

	if(!type_is_callable(func_ty)){
		warn_at_print_error(&e->expr->where,
				"%s-expression (type '%s') not callable",
				expr_str_friendly(e->expr, 0),
				type_to_str(func_ty));

		fold_had_error = 1;

		e->tree_type = type_nav_btype(cc1_type_nav, type_int);
		return;
	}

	e->tree_type = type_func_call(func_ty, &args_from_decl);

	/* func count comparison, only if the func has arg-decls, or the func is f(void) */
	UCC_ASSERT(args_from_decl, "no funcargs for decl %s", sp);

	count_decl = dynarray_count(args_from_decl->arglist);

	if(check_arg_counts(args_from_decl, count_decl, e->funcargs, e, sp))
		return;

	if(e->funcargs){
		check_arg_voidness_and_nonnulls(
				e, stab,
				args_from_decl, count_decl,
				e->funcargs, sp);
	}

	if(!FUNCARGS_EMPTY_NOVOID(args_from_decl))
		check_arg_types(args_from_decl, e->funcargs, stab, sp, &e->where);

	if(e->funcargs)
		default_promote_args(e->funcargs, count_decl, stab);

	if(type_is_s_or_u(e->tree_type)){
		/* handled transparently by the backend */
		e->f_islval = expr_is_lval_struct;

		cc1_warn_at(&e->expr->where,
				aggregate_return,
				"called function returns aggregate (%s)",
				type_to_str(e->tree_type));
	}

	/* attr */
	{
		type *fnty = e->expr->tree_type;

		/* look through decays */
		if(expr_kind(e->expr, cast) && expr_cast_is_lval2rval(e->expr))
			fnty = expr_cast_child(e->expr)->tree_type;

		format_check_call(fnty, e->funcargs, args_from_decl->variadic);

		sentinel_check(
				&e->where, e,
				e->funcargs, args_from_decl->variadic,
				count_decl, stab);
	}

	/* check the subexp tree type to get the funcall attributes */
	if(func_or_builtin_attr_present(e, attr_warn_unused))
		e->freestanding = 0; /* needs use */

	if(sp && !cc1_fopt.freestanding)
		check_standard_funcs(sp, e->funcargs);
}
Beispiel #14
0
void fold_expr_struct(expr *e, symtable *stab)
{
	/*
	 * lhs = any ptr-to-struct expr
	 * rhs = struct member ident
	 */
	const int ptr_expect = !e->expr_is_st_dot;
	struct_union_enum_st *sue;
	char *spel;

	fold_expr_no_decay(e->lhs, stab);
	/* don't fold the rhs - just a member name */

	if(e->rhs){
		UCC_ASSERT(expr_kind(e->rhs, identifier),
				"struct/union member not identifier (%s)", e->rhs->f_str());

		UCC_ASSERT(!e->bits.struct_mem.d, "already have a struct-member");

		spel = e->rhs->bits.ident.spel;
	}else{
		UCC_ASSERT(e->bits.struct_mem.d, "no member specified already?");
		spel = NULL;
	}

	/* we access a struct, of the right ptr depth */
	{
		type *r = e->lhs->tree_type;

		if(ptr_expect){
			type *rtest = type_is(r, type_ptr);

			if(!rtest && !(rtest = type_is(r, type_array)))
				goto err;

			r = rtest->ref; /* safe - rtest is an array */
		}

		if(!(sue = type_is_s_or_u(r))){
err:
			die_at(&e->lhs->where, "'%s' (%s-expr) is not a %sstruct or union (member %s)",
					type_to_str(e->lhs->tree_type),
					e->lhs->f_str(),
					ptr_expect ? "pointer to " : "",
					spel);
		}
	}

	if(!sue_complete(sue)){
		char wbuf[WHERE_BUF_SIZ];

		die_at(&e->lhs->where, "%s incomplete type (%s)\n"
				"%s: note: forward declared here",
				ptr_expect
					? "dereferencing pointer to"
					: "accessing member of",
				type_to_str(e->lhs->tree_type),
				where_str_r(wbuf, &sue->where));
	}

	if(spel){
		/* found the struct, find the member */
		decl *d_mem = struct_union_member_find(sue, spel,
				&e->bits.struct_mem.extra_off, NULL);

		if(!d_mem)
			die_at(&e->where, "%s %s has no member named \"%s\"",
					sue_str(sue), sue->spel, spel);

		e->rhs->tree_type = (e->bits.struct_mem.d = d_mem)->ref;
	}/* else already have the member */

	/*
	 * if it's a.b, convert to (&a)->b for asm gen
	 * e = { lhs = "a", rhs = "b", type = dot }
	 * e = {
	 *   type = ptr,
	 *   lhs = { cast<void *>, expr = { expr = "a", type = addr } },
	 *   rhs = "b",
	 * }
	 */
	if(!ptr_expect){
		expr *cast, *addr;

		addr = expr_new_addr(e->lhs);
		cast = expr_new_cast(addr,
				type_ptr_to(type_nav_btype(cc1_type_nav, type_void)),
				1);

		e->lhs = cast;
		e->expr_is_st_dot = 0;

		FOLD_EXPR(e->lhs, stab);
	}

	/* pull qualifiers from the struct to the member */
	e->tree_type = type_qualify(
			e->bits.struct_mem.d->ref,
			type_qual(e->lhs->tree_type));
}
Beispiel #15
0
void fold_expr_if(expr *e, symtable *stab)
{
	consty konst;
	type *tt_l, *tt_r;

	FOLD_EXPR(e->expr, stab);
	const_fold(e->expr, &konst);

	fold_check_expr(e->expr, FOLD_CHK_NO_ST_UN, "if-expr");

	if(e->lhs){
		FOLD_EXPR(e->lhs, stab);
		fold_check_expr(e->lhs,
				FOLD_CHK_NO_ST_UN | FOLD_CHK_ALLOW_VOID,
				"if-lhs");
	}

	FOLD_EXPR(e->rhs, stab);
	fold_check_expr(e->rhs,
			FOLD_CHK_NO_ST_UN | FOLD_CHK_ALLOW_VOID,
			"if-rhs");


	/*

	Arithmetic                             Arithmetic                           Arithmetic type after usual arithmetic conversions
	// Structure or union type                Compatible structure or union type   Structure or union type with all the qualifiers on both operands
	void                                   void                                 void
	Pointer to compatible type             Pointer to compatible type           Pointer to type with all the qualifiers specified for the type
	Pointer to type                        NULL pointer (the constant 0)        Pointer to type
	Pointer to object or incomplete type   Pointer to void                      Pointer to void with all the qualifiers specified for the type

	GCC and Clang seem to relax the last rule:
		a) resolve if either is any pointer, not just (void *)
	  b) resolve to a pointer to the incomplete-type
	*/

	tt_l = (e->lhs ? e->lhs : e->expr)->tree_type;
	tt_r = e->rhs->tree_type;

	if(type_is_integral(tt_l) && type_is_integral(tt_r)){
		expr **middle_op = e->lhs ? &e->lhs : &e->expr;

		expr_check_sign("?:", *middle_op, e->rhs, &e->where);

		e->tree_type = op_promote_types(
				op_unknown,
				middle_op, &e->rhs, &e->where, stab);

	}else if(type_is_void(tt_l) || type_is_void(tt_r)){
		e->tree_type = type_nav_btype(cc1_type_nav, type_void);

	}else if(type_cmp(tt_l, tt_r, 0) & TYPE_EQUAL_ANY){
		/* pointer to 'compatible' type */
		e->tree_type = type_qualify(tt_l,
				type_qual(tt_l) | type_qual(tt_r));

	}else{
		/* brace yourself. */
		int l_ptr_null = expr_is_null_ptr(
				e->lhs ? e->lhs : e->expr, NULL_STRICT_VOID_PTR);

		int r_ptr_null = expr_is_null_ptr(e->rhs, NULL_STRICT_VOID_PTR);

		int l_complete = !l_ptr_null && type_is_complete(tt_l);
		int r_complete = !r_ptr_null && type_is_complete(tt_r);

		if((l_complete && r_ptr_null) || (r_complete && l_ptr_null)){
			e->tree_type = l_ptr_null ? tt_r : tt_l;

		}else{
			int l_ptr = l_ptr_null || type_is(tt_l, type_ptr);
			int r_ptr = r_ptr_null || type_is(tt_r, type_ptr);

			if(l_ptr || r_ptr){
				fold_type_chk_warn(
						tt_l, tt_r, &e->where, "?: pointer type mismatch");

				/* qualified void * */
				e->tree_type = type_qualify(
						type_ptr_to(type_nav_btype(cc1_type_nav, type_void)),
						type_qual(tt_l) | type_qual(tt_r));

			}else{
				char buf[TYPE_STATIC_BUFSIZ];

				warn_at(&e->where, "conditional type mismatch (%s vs %s)",
						type_to_str(tt_l), type_to_str_r(buf, tt_r));

				e->tree_type = type_nav_btype(cc1_type_nav, type_void);
			}
		}
	}

	e->freestanding = (e->lhs ? e->lhs : e->expr)->freestanding || e->rhs->freestanding;
}