Esempio n. 1
0
void	cg_lab (mlabel *l, int4 base)
{
	mstr		glob_name;
	lab_tabent	lent;

	if (l->ml && l->gbl)
	{
		lent.lab_name.len = l->mvname.len;
		lent.lab_name.addr = (char *)(l->mvname.addr - (char *)stringpool.base);
		lent.LABENT_LNR_OFFSET = (SIZEOF(lnr_tabent) * l->ml->line_number) + base;
		lent.has_parms = (NO_FORMALLIST != l->formalcnt);	/* Flag to indicate a formallist */
		emit_immed((char *)&lent, SIZEOF(lent));
		mlabel2xtern(&glob_name, &int_module_name, &l->mvname);
		define_symbol(GTM_CODE, &glob_name, lent.LABENT_LNR_OFFSET);
	}
}
Esempio n. 2
0
void	obj_code (uint4 src_lines, void *checksum_ctx)
{
	int		status;
	rhdtyp		rhead;
	mline		*mlx, *mly;
	var_tabent	*vptr;
	int4		lnr_pad_len;
	intrpt_state_t	prev_intrpt_state;
	DCL_THREADGBL_ACCESS;

	SETUP_THREADGBL_ACCESS;
	assert(!run_time);
	obj_init();
	/* Define the routine name global symbol. */
	define_symbol(GTM_MODULE_DEF_PSECT, (mstr *)&int_module_name, 0);
	memset(&rhead, 0, SIZEOF(rhead));
	alloc_reg();
	jmp_opto();
	curr_addr = SIZEOF(rhdtyp);
	cg_phase = CGP_APPROX_ADDR;
	cg_phase_last = CGP_NOSTATE;
	code_gen();
	code_size = curr_addr;
	cg_phase = CGP_ADDR_OPT;
	shrink_jmps();
	comp_lits(&rhead);
	if ((cmd_qlf.qlf & CQ_MACHINE_CODE))
	{
		cg_phase = CGP_ASSEMBLY;
		code_gen();
	}
	if (!(cmd_qlf.qlf & CQ_OBJECT))
		return;
	rhead.ptext_ptr = SIZEOF(rhead);
	set_rtnhdr_checksum(&rhead, (gtm_rtn_src_chksum_ctx *)checksum_ctx);
	rhead.vartab_ptr = code_size;
	rhead.vartab_len = mvmax;
	code_size += mvmax * SIZEOF(var_tabent);
	rhead.labtab_ptr = code_size;
	rhead.labtab_len = mlmax;
	code_size += mlmax * SIZEOF(lab_tabent);
	rhead.lnrtab_ptr = code_size;
	rhead.lnrtab_len = src_lines;
	rhead.compiler_qlf = cmd_qlf.qlf;
	if (cmd_qlf.qlf & CQ_EMBED_SOURCE)
	{
                rhead.routine_source_offset = TREF(routine_source_offset);
                rhead.routine_source_length = (uint4)(stringpool.free - stringpool.base) - TREF(routine_source_offset);
	}
	rhead.temp_mvals = sa_temps[TVAL_REF];
	rhead.temp_size = sa_temps_offset[TCAD_REF];
	code_size += src_lines * SIZEOF(int4);
	lnr_pad_len = PADLEN(code_size, SECTION_ALIGN_BOUNDARY);
	code_size += lnr_pad_len;
	DEFER_INTERRUPTS(INTRPT_IN_OBJECT_FILE_COMPILE, prev_intrpt_state);
	create_object_file(&rhead);
	ENABLE_INTERRUPTS(INTRPT_IN_OBJECT_FILE_COMPILE, prev_intrpt_state);
	cg_phase = CGP_MACHINE;
	code_gen();
	/* Variable table: */
	vptr = (var_tabent *)mcalloc(mvmax * SIZEOF(var_tabent));
	if (mvartab)
		walktree(mvartab, cg_var, (char *)&vptr);
	else
		assert(0 == mvmax);
	emit_immed((char *)vptr, mvmax * SIZEOF(var_tabent));
	/* Label table: */
	if (mlabtab)
		walktree((mvar *)mlabtab, cg_lab, (char *)rhead.lnrtab_ptr);
	else
		assert(0 == mlmax);
	/* External entry definitions: */
	emit_immed((char *)&(mline_root.externalentry->rtaddr), SIZEOF(mline_root.externalentry->rtaddr));	/* line 0 */
	for (mlx = mline_root.child; mlx; mlx = mly)
	{
		if (mlx->table)
			emit_immed((char *)&(mlx->externalentry->rtaddr), SIZEOF(mlx->externalentry->rtaddr));
		if (0 == (mly = mlx->child))				/* note assignment */
			if (0 == (mly = mlx->sibling))			/* note assignment */
				for (mly = mlx;  ;  )
				{
					if (0 == (mly = mly->parent))	/* note assignment */
						break;
					if (mly->sibling)
					{
						mly = mly->sibling;
						break;
					}
				}
	}
	if (0 != lnr_pad_len) /* emit padding so literal text pool starts on proper boundary */
		emit_immed(PADCHARS, lnr_pad_len);
#	if !defined(__MVS__) && !defined(__s390__)	/* assert not valid for instructions on OS390 */
	assert(code_size == psect_use_tab[GTM_CODE]);
#	endif
	emit_literals();
	DEFER_INTERRUPTS(INTRPT_IN_OBJECT_FILE_COMPILE, prev_intrpt_state);
	finish_object_file();
	ENABLE_INTERRUPTS(INTRPT_IN_OBJECT_FILE_COMPILE, prev_intrpt_state);
	CLOSE_OBJECT_FILE(object_file_des, status);
	if (-1 == status)
		rts_error_csa(CSA_ARG(NULL) VARLSTCNT(8) ERR_SYSCALL, 5, RTS_ERROR_LITERAL("close()"), CALLFROM, errno);
	/* Ready to make object visible. Rename from tmp name to real routine name */
	RENAME_TMP_OBJECT_FILE(object_file_name);
}
Esempio n. 3
0
void	obj_code (uint4 src_lines, uint4 checksum)
{
	rhdtyp		rhead;
	mline		*mlx, *mly;
	var_tabent	*vptr;
	int4		lnr_pad_len;

	assert(!run_time);
	obj_init();
	/* Define the routine name global symbol. */
	define_symbol(GTM_MODULE_DEF_PSECT, (mstr *)&int_module_name, 0);
	memset(&rhead, 0, SIZEOF(rhead));
	alloc_reg();
	jmp_opto();
	curr_addr = SIZEOF(rhdtyp);
	cg_phase = CGP_APPROX_ADDR;
	cg_phase_last = CGP_NOSTATE;
	code_gen();
	code_size = curr_addr;
	cg_phase = CGP_ADDR_OPT;
	shrink_jmps();
	comp_lits(&rhead);
	if ((cmd_qlf.qlf & CQ_MACHINE_CODE))
	{
		cg_phase = CGP_ASSEMBLY;
		code_gen();
	}
	if (!(cmd_qlf.qlf & CQ_OBJECT))
		return;
	rhead.ptext_ptr = SIZEOF(rhead);
	rhead.checksum = checksum;
	rhead.vartab_ptr = code_size;
	rhead.vartab_len = mvmax;
	code_size += mvmax * SIZEOF(var_tabent);
	rhead.labtab_ptr = code_size;
	rhead.labtab_len = mlmax;
	code_size += mlmax * SIZEOF(lab_tabent);
	rhead.lnrtab_ptr = code_size;
	rhead.lnrtab_len = src_lines;
	rhead.compiler_qlf = cmd_qlf.qlf;
	rhead.temp_mvals = sa_temps[TVAL_REF];
	rhead.temp_size = sa_temps_offset[TCAD_REF];
	code_size += src_lines * SIZEOF(int4);
	lnr_pad_len = PADLEN(code_size, SECTION_ALIGN_BOUNDARY);
	code_size += lnr_pad_len;
	create_object_file(&rhead);
	cg_phase = CGP_MACHINE;
	code_gen();
	/* Variable table: */
	vptr = (var_tabent *)mcalloc(mvmax * SIZEOF(var_tabent));
	if (mvartab)
		walktree(mvartab, cg_var, (char *)&vptr);
	else
		assert(0 == mvmax);
	emit_immed((char *)vptr, mvmax * SIZEOF(var_tabent));
	/* Label table: */
	if (mlabtab)
		walktree((mvar *)mlabtab, cg_lab, (char *)rhead.lnrtab_ptr);
	else
		assert(0 == mlmax);
	/* External entry definitions: */
	emit_immed((char *)&(mline_root.externalentry->rtaddr), SIZEOF(mline_root.externalentry->rtaddr));	/* line 0 */
	for (mlx = mline_root.child; mlx; mlx = mly)
	{
		if (mlx->table)
			emit_immed((char *)&(mlx->externalentry->rtaddr), SIZEOF(mlx->externalentry->rtaddr));
		if (0 == (mly = mlx->child))				/* note assignment */
			if (0 == (mly = mlx->sibling))			/* note assignment */
				for (mly = mlx;  ;  )
				{
					if (0 == (mly = mly->parent))	/* note assignment */
						break;
					if (mly->sibling)
					{
						mly = mly->sibling;
						break;
					}
				}
	}
	if (0 != lnr_pad_len) /* emit padding so literal text pool starts on proper boundary */
		emit_immed(PADCHARS, lnr_pad_len);
#if !defined(__MVS__) && !defined(__s390__)	/* assert not valid for instructions on OS390 */
	assert(code_size == psect_use_tab[GTM_CODE]);
#endif
	emit_literals();
	close_object_file();
}
Esempio n. 4
0
short *emit_vax_inst(short *inst, oprtype **fst_opr, oprtype **lst_opr)
     /* fst_opr and lst_opr are triple operands */
{
	short	sav_in;
	bool	oc_int;
	int4	cnt;
	oprtype *opr;
	triple	*ct;

	code_idx = 0;
	force_32 = 0;

	switch  (cg_phase)
	{
	case CGP_ADDR_OPT:
	case CGP_APPROX_ADDR:
	case CGP_MACHINE:
		switch ((sav_in = *inst++))
		{
			case VXI_BEQL:
			case VXI_BGEQ:
			case VXI_BGTR:
			case VXI_BLEQ:
			case VXI_BLSS:
			case VXI_BNEQ:
			case VXI_BRB:
			case VXI_BRW:
				emit_jmp(sav_in, &inst);
				break;
			case VXI_BLBC:
			case VXI_BLBS:
				assert(VXT_REG == *inst);
				inst++;
				inst++;
				emit_xfer(4*xf_dt_get);
				code_buf[code_idx++] = I386_INS_CMP_eAX_Iv;
				*((int4 *)&code_buf[code_idx]) = 0;
				code_idx += SIZEOF(int4);
				if (sav_in == VXI_BLBC)
					emit_jmp(VXI_BEQL, &inst);
				else
				{
					assert(sav_in == VXI_BLBS);
					emit_jmp(VXI_BNEQ, &inst);
				}
				break;
			case VXI_BICB2:
			case VXI_BISB2:
				assert(VXT_LIT == *inst);
				inst++;
				assert(1 == *inst);
				inst++;
				assert(VXT_REG == *inst);
				inst++;
				inst++;
				if (sav_in == VXI_BICB2)
					emit_xfer(4*xf_dt_false);
				else
				{
					assert(sav_in == VXI_BISB2);
					emit_xfer(4*xf_dt_true);
				}
				break;
			case VXI_CALLS:
				oc_int = TRUE;
				if (VXT_LIT == *inst)
				{
					inst++;
					cnt = (int4) *inst++;
				} else
				{
					assert(VXT_VAL == *inst);
					inst++;
					opr = *(fst_opr + *inst);
					assert(opr->oprclass == TRIP_REF);
					ct = opr->oprval.tref;
					if (ct->destination.oprclass)
					{
						opr = &ct->destination;
					}
					if (opr->oprclass == TRIP_REF)
					{
						assert(ct->opcode == OC_ILIT);
						cnt = ct->operand[0].oprval.ilit;
						if (cnt >= -128  &&  cnt <= 127)
						{
							code_buf[code_idx++] = I386_INS_PUSH_Ib;
							code_buf[code_idx++] = cnt & 0xff;
						}
						else
						{
							code_buf[code_idx++] = I386_INS_PUSH_Iv;
							*((int4 *)&code_buf[code_idx]) = cnt;
							code_idx += SIZEOF(int4);
						}
						cnt++;
						inst++;
					}
					else
					{
						assert(opr->oprclass == TINT_REF);
						oc_int = FALSE;
						opr = *(fst_opr + *inst++);
						emit_trip(PUSH, opr, TRUE, 0);
					}
				}
				assert(VXT_XFER == *inst);
				inst++;
				emit_xfer(*inst++);
				if (oc_int)
				{
					if (cnt)
					{
						code_buf[code_idx++] = I386_INS_LEA_Gv_M;
						emit_base_offset(I386_REG_ESP, I386_REG_ESP, 4*cnt);
					}
				}
				else
				{
					emit_trip(LOAD, opr, TRUE, I386_REG_EDX);

					code_buf[code_idx++] = I386_INS_LEA_Gv_M;
					emit_base_offset(I386_REG_ESP, I386_REG_ESP, 4);
				}
				break;
			case VXI_CLRL:
				assert(VXT_VAL == *inst);
				inst++;
				emit_trip(CLEAR, *(fst_opr + *inst++), TRUE, 0);
				break;
			case VXI_CMPL:
				assert(VXT_VAL == *inst);
				inst++;
				emit_trip(LOAD, *(fst_opr + *inst++), TRUE, I386_REG_EDX);
				assert(VXT_VAL == *inst);
				inst++;
				emit_trip(COMPARE, *(fst_opr + *inst++), TRUE, I386_REG_EDX);
				break;
			case VXI_INCL:
				assert(VXT_VAL == *inst);
				inst++;
				emit_trip(INCREMENT, *(fst_opr + *inst++), TRUE, 0);
				break;
			case VXI_JMP:
				if (VXT_VAL == *inst)
				{
					inst++;
					emit_trip(JUMP, *(fst_opr + *inst++), FALSE, 0);
				}
				else
				{
					emit_jmp(sav_in, &inst);
				}
				break;
			case VXI_JSB:
				assert(VXT_XFER == *inst);
				inst++;
				emit_xfer(*inst++);
				break;
			case VXI_MOVAB:
				if (VXT_JMP == *inst)
				{
					inst += 2;
					emit_pcrel(LOAD_ADDRESS, I386_REG_EAX);
					assert(VXT_ADDR == *inst);
					inst++;
					emit_trip(STORE, *(fst_opr + *inst++), FALSE, I386_REG_EAX);
				} else if ((VXT_ADDR == *inst) || (VXT_VAL == *inst))
				{
					bool	addr;
					unsigned char reg;
					short	save_inst;

					addr = (VXT_VAL == *inst);
					inst++;
					save_inst = *inst++;
					assert(VXT_REG == *inst);
					inst++;
					reg = ((*inst++ & 0x01) ? I386_REG_EDX : I386_REG_EAX); /* r0 and r1 are only ones used */
					emit_trip(LOAD_ADDRESS, *(fst_opr + save_inst), addr, reg);
				} else
					assertpro(FALSE && *inst);
				break;
			case VXI_MOVC3:
				assert(VXT_LIT == *inst);
				inst += 2;
				assert(VXT_VAL == *inst);
				inst++;
				code_buf[code_idx++] = I386_INS_PUSH_eSI;
				code_buf[code_idx++] = I386_INS_PUSH_eDI;

				emit_trip(LOAD_ADDRESS, *(fst_opr + *inst++), TRUE, I386_REG_ECX);

				assert(VXT_VAL == *inst);
				inst++;
				emit_trip(LOAD_ADDRESS, *(fst_opr + *inst++), TRUE, I386_REG_EDI);

				code_buf[code_idx++] = I386_INS_MOV_Gv_Ev;
				modrm_byte.modrm.reg_opcode = I386_REG_ESI;
				modrm_byte.modrm.mod = I386_MOD32_REGISTER;
				modrm_byte.modrm.r_m = I386_REG_ECX;
				code_buf[code_idx++] = modrm_byte.byte;

				code_buf[code_idx++] = I386_INS_MOV_eCX;
				*((int4 *)&code_buf[code_idx]) = (int4)SIZEOF(mval);
				code_idx += SIZEOF(int4);

				code_buf[code_idx++] = I386_INS_REP_E_Prefix;
				code_buf[code_idx++] = I386_INS_MOVSB_Xb_Yb;

				code_buf[code_idx++] = I386_INS_POP_eDI;
				code_buf[code_idx++] = I386_INS_POP_eSI;
				break;
			case VXI_MOVL:
				if (VXT_REG == *inst)
				{
					inst++;
					if (*inst > 0x5f)	/* OC_CURRHD */  /* any mode >= 6 (deferred), any register */
					{
						inst++;
						assert(VXT_ADDR == *inst);
						inst++;

						emit_xfer(4*xf_get_msf);
						emit_op_base_offset(LOAD, I386_REG_EAX, 0, I386_REG_EAX);
						emit_trip(STORE, *(fst_opr + *inst++), FALSE, I386_REG_EAX);
					} else
					{
						bool addr;

						assert(0x50 == *inst);  /* register mode: R0 */
						inst++;
						if ((VXT_VAL == *inst) || (VXT_ADDR == *inst))
						{
							addr = (VXT_VAL == *inst);
							inst++;
							emit_trip(STORE, *(fst_opr + *inst++), addr, I386_REG_EAX);
						}
						else if (VXT_REG == *inst)
						{
							unsigned char	reg;

							inst++;
							if ((*inst & 0x0f) == 10)	/* VAX $TEST */
							{
								code_buf[code_idx++] = I386_INS_PUSH_eAX;
								emit_xfer(4*xf_dt_store);
								code_buf[code_idx++] = I386_INS_POP_eAX;
							}
							else
							{
								code_buf[code_idx++] = I386_INS_MOV_Ev_Gv;
								modrm_byte.modrm.reg_opcode = I386_REG_EAX;
								modrm_byte.modrm.mod = I386_MOD32_REGISTER;
								modrm_byte.modrm.r_m = i386_reg(*inst);
								code_buf[code_idx++] = modrm_byte.byte;
							}
							inst++;
						} else
							assertpro(FALSE && *inst);
					}
				} else if (VXT_VAL == *inst)
				{
					inst++;
					emit_trip(LOAD, *(fst_opr + *inst++), TRUE, I386_REG_EDX);
					assert(VXT_REG == *inst);
					inst++;
					assert(0x51 == *inst);  /* register mode: R1 */
					inst++;
				} else
					assertpro(FALSE && *inst);
				break;
			case VXT_IREPAB:
				assert(VXT_VAL == *inst);
				inst += 2;
				emit_trip(PUSH_ADDRESS, *lst_opr, TRUE, 0);
				break;
			case VXI_PUSHAB:
				if (VXT_JMP == *inst)
				{
					inst += 2;
					emit_pcrel(PUSH_ADDRESS, 0);
				} else if (VXT_VAL == *inst)
				{
					inst++;
					emit_trip(PUSH_ADDRESS, *(fst_opr + *inst++), TRUE, 0);
				} else
					assertpro(FALSE && *inst);
				break;
			case VXT_IREPL:
				assert(VXT_VAL == *inst);
				inst += 2;
				emit_trip(PUSH, *lst_opr, TRUE, 0);
				break;
			case VXI_PUSHL:
				if (VXT_LIT == *inst)
				{
					int4	lit;

					inst++;
					lit = *inst++;
					if (lit >= -128  &&  lit <= 127)
					{
						code_buf[code_idx++] = I386_INS_PUSH_Ib;
						code_buf[code_idx++] = lit & 0xff;
					}
					else
					{
						code_buf[code_idx++] = I386_INS_PUSH_Iv;
						*((int4 *)&code_buf[code_idx]) = lit;
						code_idx += SIZEOF(int4);
					}
				} else if (VXT_ADDR == *inst)
				{
					inst++;
					emit_trip(PUSH, *(fst_opr + *inst++), FALSE, 0);
				} else if (VXT_VAL == *inst)
				{
					inst++;
					emit_trip(PUSH, *(fst_opr + *inst++), TRUE, 0);
				} else
					assertpro(FALSE && *inst);
				break;
			case VXI_TSTL:
				if (VXT_VAL == *inst)
				{
				  inst++;
				  emit_trip(TEST, *(fst_opr + *inst++), TRUE, 0);
				}
				else if (VXT_REG == *inst)
				{
				    inst++;
				    code_buf[code_idx++] = I386_INS_CMP_eAX_Iv;
				    assert(I386_REG_EAX == i386_reg(*inst));	/* VAX R0 */
				    inst++;
				    *((int4 *)&code_buf[code_idx]) = 0;	/* 32 bit immediate 0 */
				    code_idx += SIZEOF(int4);
				} else
					assertpro(FALSE && *inst);
				break;
			default:
				assertpro(FALSE && sav_in);
		}
		break;
	default:
		assertpro(FALSE && cg_phase);
		break;
	}
	assert(code_idx < BUFFERED_CODE_SIZE);
	if (cg_phase == CGP_MACHINE)
	{
         	generated_code_size += code_idx;
		emit_immed ((char *)&code_buf[0], SIZEOF(unsigned char) * code_idx);
	} else if (cg_phase != CGP_ASSEMBLY)
	{
	        if (cg_phase == CGP_APPROX_ADDR)
		{
		      calculated_code_size += code_idx;
		}
		curr_addr += SIZEOF(unsigned char) * code_idx;
	}
	code_reference += SIZEOF(unsigned char) * code_idx;
	jmp_offset -= SIZEOF(unsigned char) * code_idx;
	return inst;
}