Exemplo n.º 1
0
static void blk_codegen(out_blk *blk, struct flush_state *st)
{
	char **i;

	/* before any instructions, if we have a pending jmpto and
	 * we aren't the target branch, we need to cut off the last
	 * block with a jump to said jmpto */
	if(st->jmpto){
		if(st->jmpto != blk)
			blk_jmpthread(st);
		else if(cc1_fopt.verbose_asm)
			asm_out_section(NULL, "\t# implicit jump to next line\n");
		st->jmpto = NULL;
	}

	asm_out_section(NULL, "%s: # %s\n", blk->lbl, blk->desc);
	if(blk->force_lbl)
		asm_out_section(NULL, "%s: # mustgen_spel\n", blk->force_lbl);

	out_dbg_labels_emit_release_v(&blk->labels.start);

	for(i = blk->insns; i && *i; i++)
		asm_out_section(NULL, "%s", *i);

	out_dbg_labels_emit_release_v(&blk->labels.end);
}
Exemplo n.º 2
0
void asm_nam_begin3(enum section_type sec, const char *lbl, unsigned align)
{
	asm_out_section(sec,
			".align %u\n"
			"%s:\n",
			align, lbl);
}
Exemplo n.º 3
0
void asm_declare_decl_init(decl *d)
{
	enum section_type sec;

	if((d->store & STORE_MASK_STORE) == store_extern){
		asm_predeclare_extern(d);
		return;
	}

	sec = type_is_const(d->ref) ? SECTION_RODATA : SECTION_DATA;

	if(d->bits.var.init.dinit && !decl_init_is_zero(d->bits.var.init.dinit)){
		asm_nam_begin(sec, d);
		asm_declare_init(sec, d->bits.var.init.dinit, d->ref);
		asm_out_section(sec, "\n");

	}else if(d->bits.var.init.compiler_generated && fopt_mode & FOPT_COMMON){
		const char *common_prefix = "comm ";

		/* section doesn't matter */
		sec = SECTION_BSS;

		if(decl_linkage(d) == linkage_internal){
			if(AS_SUPPORTS_LOCAL_COMMON){
				asm_out_section(sec, ".local %s\n", decl_asm_spel(d));
			}else{
				common_prefix = "zerofill __DATA,__bss,";
			}
		}

		asm_out_section(sec, ".%s%s,%u,%u\n",
				common_prefix,
				decl_asm_spel(d), decl_size(d), decl_align(d));

	}else{
		/* always resB, since we use decl_size() */
		asm_nam_begin(SECTION_BSS, d);
		asm_reserve_bytes(SECTION_BSS, decl_size(d));
	}
}
Exemplo n.º 4
0
static void asm_declare_init_bitfields(
		enum section_type sec,
		struct bitfield_val *vals, unsigned n,
		type *ty)
{
#define BITFIELD_DBG(...) /*fprintf(stderr, __VA_ARGS__)*/
	integral_t v = 0;
	unsigned width = 0;
	unsigned i;

	BITFIELD_DBG("bitfield out -- new\n");
	for(i = 0; i < n; i++){
		integral_t this = integral_truncate_bits(
				vals[i].val, vals[i].width, NULL);

		width += vals[i].width;

		BITFIELD_DBG("bitfield out: 0x%llx << %u gives ",
				this, vals[i].offset);

		v |= this << vals[i].offset;

		BITFIELD_DBG("0x%llx\n", v);
	}

	BITFIELD_DBG("bitfield done with 0x%llx\n", v);

	if(width > 0){
		asm_declare_init_type(sec, ty);
		asm_out_section(sec, "%" NUMERIC_FMT_D "\n", v);
	}else{
		asm_out_section(sec,
				ASM_COMMENT " skipping zero length bitfield%s init\n",
				n == 1 ? "" : "s");
	}
}
Exemplo n.º 5
0
void asm_out_fp(enum section_type sec, type *ty, floating_t f)
{
	switch(type_primitive(ty)){
		case type_float:
			{
				union { float f; unsigned u; } u;
				u.f = f;
				asm_out_section(sec, ".long %u # float %f\n", u.u, u.f);
				break;
			}

		case type_double:
			{
				union { double d; unsigned long ul; } u;
				u.d = f;
				asm_out_section(sec, ".quad %lu # double %f\n", u.ul, u.d);
				break;
			}
		case type_ldouble:
			ICE("TODO");
		default:
			ICE("bad float type");
	}
}
Exemplo n.º 6
0
static void bfs_block(out_blk *blk, struct flush_state *st)
{
	if(blk->emitted || !blk->reachable)
		return;
	blk->emitted = 1;

	if(blk->merge_preds){
		out_blk **i;
		for(i = blk->merge_preds; *i; i++){
			bfs_block(*i, st);
		}
	}

	switch(blk->type){
		case BLK_UNINIT:
			assert(0 && "uninitialised block type");

		case BLK_TERMINAL:
		case BLK_NEXT_EXPR:
		case BLK_NEXT_BLOCK:
			blk_codegen(blk, st);

			if(blk->type == BLK_NEXT_BLOCK){
				blk_jmpnext(blk->bits.next, st);
				bfs_block(blk->bits.next, st);
			}
			break;

		case BLK_COND:
			blk_codegen(blk, st);
			asm_out_section(NULL, "\t%s\n", blk->bits.cond.insn);

			/* we always jump to the true block if the conditional failed */
			blk_jmpnext(blk->bits.cond.if_1_blk, st);

			/* if it's unlikely, we want the false block already in the pipeline */
			if(blk->bits.cond.unlikely){
				bfs_block(blk->bits.cond.if_0_blk, st);
				bfs_block(blk->bits.cond.if_1_blk, st);
			}else{
				bfs_block(blk->bits.cond.if_1_blk, st);
				bfs_block(blk->bits.cond.if_0_blk, st);
			}
			break;
	}
}
Exemplo n.º 7
0
static void blk_jmpthread(struct flush_state *st)
{
	out_blk *to = st->jmpto;

	if(cc1_fopt.thread_jumps){
		int lim = 0;

		while(!to->insns && to->type == BLK_NEXT_BLOCK && lim < JMP_THREAD_LIM){
			to = to->bits.next;
			lim++; /* prevent circulars */
		}

		if(lim && cc1_fopt.verbose_asm)
			asm_out_section(NULL, "\t# jump threaded through %d blocks\n", lim);
	}

	impl_jmp(to->lbl);
}
Exemplo n.º 8
0
static void static_val(enum section_type sec, type *ty, expr *e)
{
	consty k;

	memset(&k, 0, sizeof k);

	const_fold(e, &k);

	switch(k.type){
		case CONST_NEED_ADDR:
		case CONST_NO:
			ICE("non-constant expr-%s const=%d%s",
					e->f_str(),
					k.type,
					k.type == CONST_NEED_ADDR ? " (needs addr)" : "");
			break;

		case CONST_NUM:
			if(K_FLOATING(k.bits.num)){
				/* asm fp const */
				asm_out_fp(sec, ty, k.bits.num.val.f);
			}else{
				char buf[INTEGRAL_BUF_SIZ];
				asm_declare_init_type(sec, ty);
				integral_str(buf, sizeof buf, k.bits.num.val.i, e->tree_type);
				asm_out_section(sec, "%s", buf);
			}
			break;

		case CONST_ADDR:
			asm_declare_init_type(sec, ty);
			if(k.bits.addr.is_lbl)
				asm_out_section(sec, "%s", k.bits.addr.bits.lbl);
			else
				asm_out_section(sec, "%ld", k.bits.addr.bits.memaddr);
			break;

		case CONST_STRK:
			stringlit_use(k.bits.str->lit); /* must be before the label access */
			asm_declare_init_type(sec, ty);
			asm_out_section(sec, "%s", k.bits.str->lit->lbl);
			break;
	}

	/* offset in bytes, no mul needed */
	if(k.offset)
		asm_out_section(sec, " + %ld", k.offset);
	asm_out_section(sec, "\n");
}
Exemplo n.º 9
0
void blk_flushall(out_ctx *octx, out_blk *first, char *end_dbg_lbl)
{
	struct flush_state st = { 0 };
	out_blk **must_i;

	if(cc1_fopt.dump_basic_blocks)
		dot_blocks(first);

	mark_reachable_blocks(first);
	for(must_i = octx->mustgen; must_i && *must_i; must_i++)
		mark_reachable_blocks(*must_i);

	bfs_block(first, &st);

	for(must_i = octx->mustgen; must_i && *must_i; must_i++)
		bfs_block(*must_i, &st);

	if(st.jmpto)
		impl_jmp(st.jmpto->lbl);

	asm_out_section(NULL, "%s:\n", end_dbg_lbl);

	out_dbg_labels_emit_release_v(&octx->pending_lbls);
}
Exemplo n.º 10
0
static void asm_declare_init_type(enum section_type sec, type *ty)
{
	asm_out_section(sec, ".%s ", asm_type_directive(ty));
}
Exemplo n.º 11
0
static void asm_declare_pad(enum section_type sec, unsigned pad, const char *why)
{
	if(pad)
		asm_out_section(sec, ".space %u " ASM_COMMENT " %s\n", pad, why);
}
Exemplo n.º 12
0
static void asm_predecl(const char *type, decl *d)
{
	asm_out_section(SECTION_TEXT, ".%s %s\n", type, decl_asm_spel(d));
}
Exemplo n.º 13
0
static void asm_declare_init(enum section_type sec, decl_init *init, type *tfor)
{
	type *r;

	if(init == DYNARRAY_NULL)
		init = NULL;

	if(!init){
		/* don't initialise flex-arrays */
		if(!type_is_incomplete_array(tfor)){
			asm_declare_pad(sec, type_size(tfor, NULL),
					"null init"/*, type_to_str(tfor)*/);
		}else{
			asm_out_section(sec, ASM_COMMENT " flex array init skipped\n");
		}

	}else if((r = type_is_primitive(tfor, type_struct))){
		/* array of stmts for each member
		 * assumes the ->bits.inits order is member order
		 */
		struct_union_enum_st *const sue = r->bits.type->sue;
		sue_member **mem;
		decl_init **i;
		unsigned end_of_last = 0;
		struct bitfield_val *bitfields = NULL;
		unsigned nbitfields = 0;
		decl *first_bf = NULL;
		expr *copy_from_exp;

		UCC_ASSERT(init->type == decl_init_brace, "unbraced struct");

#define DEBUG(s, ...) /*fprintf(f, "\033[35m" s "\033[m\n", __VA_ARGS__)*/

		i = init->bits.ar.inits;

		/* check for compound-literal copy-init */
		if((copy_from_exp = decl_init_is_struct_copy(init, sue))){
			decl_init *copy_from_init;

			copy_from_exp = expr_skip_lval2rval(copy_from_exp);

			/* the only struct-expression that's possible
			 * in static context is a compound literal */
			assert(expr_kind(copy_from_exp, compound_lit)
					&& "unhandled expression init");

			copy_from_init = copy_from_exp->bits.complit.decl->bits.var.init.dinit;
			assert(copy_from_init->type == decl_init_brace);

			i = copy_from_init->bits.ar.inits;
		}

		/* iterate using members, not inits */
		for(mem = sue->members;
				mem && *mem;
				mem++)
		{
			decl *d_mem = (*mem)->struct_member;
			decl_init *di_to_use = NULL;

			if(i){
				int inc = 1;

				if(*i == NULL)
					inc = 0;
				else if(*i != DYNARRAY_NULL)
					di_to_use = *i;

				if(inc){
					i++;
					if(!*i)
						i = NULL; /* reached end */
				}
			}

			DEBUG("init for %ld/%s, %s",
					mem - sue->members, d_mem->spel,
					di_to_use ? di_to_use->bits.expr->f_str() : NULL);

			/* only pad if we're not on a bitfield or we're on the first bitfield */
			if(!d_mem->bits.var.field_width || !first_bf){
				DEBUG("prev padding, offset=%d, end_of_last=%d",
						d_mem->struct_offset, end_of_last);

				UCC_ASSERT(
						d_mem->bits.var.struct_offset >= end_of_last,
						"negative struct pad, sue %s, member %s "
						"offset %u, end_of_last %u",
						sue->spel, decl_to_str(d_mem),
						d_mem->bits.var.struct_offset, end_of_last);

				asm_declare_pad(sec,
						d_mem->bits.var.struct_offset - end_of_last,
						"prev struct padding");
			}

			if(d_mem->bits.var.field_width){
				if(!first_bf || d_mem->bits.var.first_bitfield){
					if(first_bf){
						DEBUG("new bitfield group (%s is new boundary), old:",
								d_mem->spel);
						/* next bitfield group - store the current */
						bitfields_out(sec, bitfields, &nbitfields, first_bf->ref);
					}
					first_bf = d_mem;
				}

				bitfields = bitfields_add(
						bitfields, &nbitfields,
						d_mem, di_to_use);

			}else{
				if(nbitfields){
					DEBUG("at non-bitfield, prev-bitfield out:", 0);
					bitfields_out(sec, bitfields, &nbitfields, first_bf->ref);
					first_bf = NULL;
				}

				DEBUG("normal init for %s:", d_mem->spel);
				asm_declare_init(sec, di_to_use, d_mem->ref);
			}

			if(type_is_incomplete_array(d_mem->ref)){
				UCC_ASSERT(!mem[1], "flex-arr not at end");
			}else if(!d_mem->bits.var.field_width || d_mem->bits.var.first_bitfield){
				unsigned last_sz = type_size(d_mem->ref, NULL);

				end_of_last = d_mem->bits.var.struct_offset + last_sz;
				DEBUG("done with member \"%s\", end_of_last = %d",
						d_mem->spel, end_of_last);
			}
		}

		if(nbitfields)
			bitfields_out(sec, bitfields, &nbitfields, first_bf->ref);
		free(bitfields);

		/* need to pad to struct size */
		asm_declare_pad(sec,
				sue_size(sue, NULL) - end_of_last,
				"struct tail");

	}else if((r = type_is(tfor, type_array))){
		size_t i, len;
		decl_init **p;
		type *next = type_next(tfor);

		UCC_ASSERT(init->type == decl_init_brace, "unbraced struct");

		if(type_is_incomplete_array(tfor)){
			len = dynarray_count(init->bits.ar.inits);
		}else{
			UCC_ASSERT(type_is_complete(tfor), "incomplete array/type init");
			len = type_array_len(tfor);
		}

		for(i = len, p = init->bits.ar.inits;
				i > 0;
				i--)
		{
			decl_init *this = NULL;
			if(*p){
				this = *p++;

				if(this != DYNARRAY_NULL && this->type == decl_init_copy){
					/*fprintf(f, "# copy from %lu\n", DECL_INIT_COPY_IDX(this, init));*/
					struct init_cpy *icpy = *this->bits.range_copy;
					/* resolve the copy */
					this = icpy->range_init;
				}
			}

			asm_declare_init(sec, this, next);
		}

	}else if((r = type_is_primitive(tfor, type_union))){
		/* union inits are decl_init_brace with spaces up to the first union init,
		 * then NULL/end of the init-array */
		struct_union_enum_st *sue = type_is_s_or_u(r);
		unsigned i, sub = 0;
		decl_init *u_init;

		UCC_ASSERT(init->type == decl_init_brace, "brace init expected");

		/* skip the empties until we get to one */
		for(i = 0; init->bits.ar.inits[i] == DYNARRAY_NULL; i++);

		if((u_init = init->bits.ar.inits[i])){
			decl *mem = sue->members[i]->struct_member;
			type *mem_r = mem->ref;

			/* union init, member at index `i' */
			if(mem->bits.var.field_width){
				/* we know it's integral */
				struct bitfield_val bfv;

				ASSERT_SCALAR(u_init);

				bitfield_val_set(&bfv, u_init->bits.expr, mem->bits.var.field_width);

				asm_declare_init_bitfields(sec, &bfv, 1, mem_r);
			}else{
				asm_declare_init(sec, u_init, mem_r);
			}

			sub = type_size(mem_r, NULL);
		} /* else null union init */

		asm_declare_pad(sec,
				type_size(r, NULL) - sub,
				"union extra");

	}else{
		/* scalar */
		expr *exp = init->bits.expr;

		UCC_ASSERT(init->type == decl_init_scalar, "scalar init expected");

		/* exp->tree_type should match tfor */
		{
			char buf[TYPE_STATIC_BUFSIZ];

			UCC_ASSERT(
					type_cmp(exp->tree_type, tfor, TYPE_CMP_ALLOW_TENATIVE_ARRAY) != TYPE_NOT_EQUAL,
					"mismatching init types: %s and %s",
					type_to_str_r(buf, exp->tree_type),
					type_to_str(tfor));
		}

		/* use tfor, since "abc" has type (char[]){(int)'a', (int)'b', ...} */
		DEBUG("  scalar init for %s:", type_to_str(tfor));
		static_val(sec, tfor, exp);
	}
}