Пример #1
0
void op_jcxz_rel8_impl()
{
	isa_target = isa_regs->eip + isa_inst.imm.b;
	if (!isa_load_reg(x86_reg_cx))
		isa_regs->eip = isa_target;
	x86_uinst_new(x86_uinst_branch, x86_dep_ecx, 0, 0, 0, 0, 0, 0);
}
Пример #2
0
void x86_isa_jcxz_rel8_impl(X86Context *ctx)
{
	struct x86_regs_t *regs = ctx->regs;

	ctx->target_eip = regs->eip + ctx->inst.imm.b;
	if (!X86ContextLoadReg(ctx, X86InstRegCx))
		regs->eip = ctx->target_eip;
	x86_uinst_new(ctx, x86_uinst_branch, x86_dep_ecx, 0, 0, 0, 0, 0, 0);
}
Пример #3
0
/* Run the emulation of one x86 macro-instruction and create its uops.
 * If any of the uops is a control uop, this uop will be the return value of
 * the function. Otherwise, the first decoded uop is returned. */
static struct x86_uop_t *X86ThreadFetchInst(X86Thread *self,
		int fetch_trace_cache) {
	X86Cpu *cpu = self->cpu;
	X86Core *core = self->core;
	X86Context *ctx = self->ctx;

	struct x86_uop_t *uop;
	struct x86_uop_t *ret_uop;

	struct x86_uinst_t *uinst;
	int uinst_count;
	int uinst_index;

	/* Functional simulation */
	self->fetch_eip = self->fetch_neip;
	X86ContextSetEip(ctx, self->fetch_eip);
	X86ContextExecute(ctx);
	self->fetch_neip = self->fetch_eip + ctx->inst.size;

	/* If no micro-instruction was generated by this instruction, create a
	 * 'nop' micro-instruction. This makes sure that there is always a micro-
	 * instruction representing the regular control flow of macro-instructions
	 * of the program. It is important for the traces stored in the trace
	 * cache. */
	if (!x86_uinst_list->count)
		x86_uinst_new(ctx, x86_uinst_nop, 0, 0, 0, 0, 0, 0, 0);

	/* Micro-instructions created by the x86 instructions can be found now
	 * in 'x86_uinst_list'. */
	uinst_count = list_count(x86_uinst_list);
	uinst_index = 0;
	ret_uop = NULL;
	while (list_count(x86_uinst_list)) {
		/* Get uinst from head of list */
		uinst = list_remove_at(x86_uinst_list, 0);

		/* Create uop */
		uop = x86_uop_create();
		uop->uinst = uinst;
		assert(uinst->opcode >= 0 && uinst->opcode < x86_uinst_opcode_count);
		uop->flags = x86_uinst_info[uinst->opcode].flags;
		uop->id = cpu->uop_id_counter++;
		uop->id_in_core = core->uop_id_counter++;

		uop->ctx = ctx;
		uop->thread = self;

		uop->mop_count = uinst_count;
		uop->mop_size = ctx->inst.size;
		uop->mop_id = uop->id - uinst_index;
		uop->mop_index = uinst_index;

		uop->eip = self->fetch_eip;
		uop->in_fetch_queue = 1;
		uop->trace_cache = fetch_trace_cache;
		uop->specmode = X86ContextGetState(ctx, X86ContextSpecMode);
		uop->fetch_address = self->fetch_address;
		uop->fetch_access = self->fetch_access;
		uop->neip = ctx->regs->eip;
		uop->pred_neip = self->fetch_neip;
		uop->target_neip = ctx->target_eip;

		/* Process uop dependences and classify them in integer, floating-point,
		 * flags, etc. */
		x86_uop_count_deps(uop);

		/* Calculate physical address of a memory access */
		if (uop->flags & X86_UINST_MEM) {
			if (uinst->address == ctx->mem_mod_low && ctx->mem_mod_low!=0)
            	{ //fprintf(stderr, "%x, low\n", uinst->address);
                 ctx->mem_low = uop->data-(uop->data & (self->data_mod->block_size-1));
            	 uop->addr = uinst->address; 
            	}
            else if (uinst->address == ctx->mem_mod_high && ctx->mem_mod_high!=0 )
                 { //fprintf(stderr, "%x, high\n", uinst->address);
                 	ctx->mem_high = uop->data | (self->data_mod->block_size-1);
                   uop->addr = uinst->address;
                    }	

			else if (!FPGARegCheck(ctx, uop, uinst->address)) {

				if (self->standalone) {
					uop->phy_addr = uinst->address;
					uop->addr = uinst->address;
					mem_read_copy(ctx->mem, uop->addr, 4, &(uop->data));
				} else {
					uop->phy_addr = mmu_translate(
							self->ctx->address_space_index, uinst->address);
					uop->addr = uinst->address;
					mem_read_copy(ctx->mem, uop->addr, 4, &(uop->data));
				}
			}
		}

		/* Trace */
		if (x86_tracing()) {
			char str[MAX_STRING_SIZE];
			char inst_name[MAX_STRING_SIZE];
			char uinst_name[MAX_STRING_SIZE];

			char *str_ptr;

			int str_size;

			str_ptr = str;
			str_size = sizeof str;

			/* Command */
			str_printf(&str_ptr, &str_size, "x86.new_inst id=%lld core=%d",
					uop->id_in_core, core->id);

			/* Speculative mode */
			if (uop->specmode)
				str_printf(&str_ptr, &str_size, " spec=\"t\"");

			/* Macro-instruction name */
			if (!uinst_index) {
				x86_inst_dump_buf(&ctx->inst, inst_name, sizeof inst_name);
				str_printf(&str_ptr, &str_size, " asm=\"%s\"", inst_name);
			}

			/* Rest */
			x86_uinst_dump_buf(uinst, uinst_name, sizeof uinst_name);
			str_printf(&str_ptr, &str_size, " uasm=\"%s\" stg=\"fe\"",
					uinst_name);

			/* Dump */
			x86_trace("%s\n", str);
		}

		/* Select as returned uop */
		if (!ret_uop || (uop->flags & X86_UINST_CTRL))
			ret_uop = uop;

		/* Insert into fetch queue */

		list_add(self->fetch_queue, uop);
		if (fetch_trace_cache)
			self->trace_cache_queue_occ++;

		/* Statistics */
		cpu->num_fetched_uinst++;
		self->num_fetched_uinst++;
		if (fetch_trace_cache)
			self->trace_cache->num_fetched_uinst++;

		/* Next uinst */
		uinst_index++;

	}

	/* Increase fetch queue occupancy if instruction does not come from
	 * trace cache, and return. */
	if (ret_uop && !fetch_trace_cache)
		self->fetchq_occ += ret_uop->mop_size;
	return ret_uop;
}