void gencfc1(void) { #ifdef INTERPRET_CFC1 gencallinterp((native_type)cached_interpreter_table.CFC1, 0); #else gencheck_cop1_unusable(); #ifdef __x86_64__ if(dst->f.r.nrd == 31) mov_xreg32_m32rel(EAX, (unsigned int*)&FCR31); else mov_xreg32_m32rel(EAX, (unsigned int*)&FCR0); mov_m32rel_xreg32((unsigned int*)dst->f.r.rt, EAX); sar_reg32_imm8(EAX, 31); mov_m32rel_xreg32(((unsigned int*)dst->f.r.rt)+1, EAX); #else if(dst->f.r.nrd == 31) mov_eax_memoffs32((unsigned int*)&FCR31); else mov_eax_memoffs32((unsigned int*)&FCR0); mov_memoffs32_eax((unsigned int*)dst->f.r.rt); sar_reg32_imm8(EAX, 31); mov_memoffs32_eax(((unsigned int*)dst->f.r.rt)+1); #endif #endif }
void gencfc1(void) { #if defined(COUNT_INSTR) inc_m32rel(&instr_count[113]); #endif #ifdef INTERPRET_CFC1 gencallinterp((unsigned long long)cached_interpreter_table.CFC1, 0); #else gencheck_cop1_unusable(); if(dst->f.r.nrd == 31) mov_xreg32_m32rel(EAX, (unsigned int*)&FCR31); else mov_xreg32_m32rel(EAX, (unsigned int*)&FCR0); mov_m32rel_xreg32((unsigned int*)dst->f.r.rt, EAX); sar_reg32_imm8(EAX, 31); mov_m32rel_xreg32(((unsigned int*)dst->f.r.rt)+1, EAX); #endif }
void gendmfc1(void) { #if defined(COUNT_INSTR) inc_m32rel(&instr_count[112]); #endif #ifdef INTERPRET_DMFC1 gencallinterp((unsigned long long)cached_interpreter_table.DMFC1, 0); #else gencheck_cop1_unusable(); mov_xreg64_m64rel(RAX, (unsigned long long *) (®_cop1_double[dst->f.r.nrd])); mov_reg32_preg64(EBX, RAX); mov_reg32_preg64pimm32(ECX, RAX, 4); mov_m32rel_xreg32((unsigned int*)dst->f.r.rt, EBX); mov_m32rel_xreg32(((unsigned int*)dst->f.r.rt)+1, ECX); #endif }
void genctc1(void) { #if defined(COUNT_INSTR) inc_m32rel(&instr_count[116]); #endif #ifdef INTERPRET_CTC1 gencallinterp((unsigned long long)cached_interpreter_table.CTC1, 0); #else gencheck_cop1_unusable(); if (dst->f.r.nrd != 31) return; mov_xreg32_m32rel(EAX, (unsigned int*)dst->f.r.rt); mov_m32rel_xreg32((unsigned int*)&FCR31, EAX); and_eax_imm32(3); cmp_eax_imm32(0); jne_rj(13); mov_m32rel_imm32((unsigned int*)&rounding_mode, 0x33F); // 11 jmp_imm_short(51); // 2 cmp_eax_imm32(1); // 5 jne_rj(13); // 2 mov_m32rel_imm32((unsigned int*)&rounding_mode, 0xF3F); // 11 jmp_imm_short(31); // 2 cmp_eax_imm32(2); // 5 jne_rj(13); // 2 mov_m32rel_imm32((unsigned int*)&rounding_mode, 0xB3F); // 11 jmp_imm_short(11); // 2 mov_m32rel_imm32((unsigned int*)&rounding_mode, 0x73F); // 11 fldcw_m16rel((unsigned short*)&rounding_mode); #endif }
void genmfc1(void) { #if defined(COUNT_INSTR) inc_m32rel(&instr_count[111]); #endif #ifdef INTERPRET_MFC1 gencallinterp((unsigned long long)cached_interpreter_table.MFC1, 0); #else gencheck_cop1_unusable(); mov_xreg64_m64rel(RAX, (unsigned long long *)(&(r4300_cp1_regs_simple())[g_dev.r4300.recomp.dst->f.r.nrd])); mov_reg32_preg64(EBX, RAX); mov_m32rel_xreg32((unsigned int*)g_dev.r4300.recomp.dst->f.r.rt, EBX); sar_reg32_imm8(EBX, 31); mov_m32rel_xreg32(((unsigned int*)g_dev.r4300.recomp.dst->f.r.rt)+1, EBX); #endif }
void genctc1(void) { #ifdef INTERPRET_CTC1 gencallinterp((native_type)cached_interpreter_table.CTC1, 0); #else gencheck_cop1_unusable(); if (dst->f.r.nrd != 31) return; #ifdef __x86_64__ mov_xreg32_m32rel(EAX, (unsigned int*)dst->f.r.rt); mov_m32rel_xreg32((unsigned int*)&FCR31, EAX); and_eax_imm32(3); cmp_eax_imm32(0); jne_rj(13); mov_m32rel_imm32((unsigned int*)&rounding_mode, 0x33F); // 11 jmp_imm_short(51); // 2 cmp_eax_imm32(1); // 5 jne_rj(13); // 2 mov_m32rel_imm32((unsigned int*)&rounding_mode, 0xF3F); // 11 jmp_imm_short(31); // 2 cmp_eax_imm32(2); // 5 jne_rj(13); // 2 mov_m32rel_imm32((unsigned int*)&rounding_mode, 0xB3F); // 11 jmp_imm_short(11); // 2 mov_m32rel_imm32((unsigned int*)&rounding_mode, 0x73F); // 11 fldcw_m16rel((unsigned short*)&rounding_mode); #else mov_eax_memoffs32((unsigned int*)dst->f.r.rt); mov_memoffs32_eax((unsigned int*)&FCR31); and_eax_imm32(3); cmp_eax_imm32(0); jne_rj(12); mov_m32_imm32((unsigned int*)&rounding_mode, 0x33F); // 10 jmp_imm_short(48); // 2 cmp_eax_imm32(1); // 5 jne_rj(12); // 2 mov_m32_imm32((unsigned int*)&rounding_mode, 0xF3F); // 10 jmp_imm_short(29); // 2 cmp_eax_imm32(2); // 5 jne_rj(12); // 2 mov_m32_imm32((unsigned int*)&rounding_mode, 0xB3F); // 10 jmp_imm_short(10); // 2 mov_m32_imm32((unsigned int*)&rounding_mode, 0x73F); // 10 fldcw_m16((unsigned short*)&rounding_mode); #endif #endif }
void gendmfc1(void) { #ifdef INTERPRET_DMFC1 gencallinterp((native_type)cached_interpreter_table.DMFC1, 0); #else gencheck_cop1_unusable(); #ifdef __x86_64__ mov_xreg64_m64rel(RAX, (unsigned long long *) (®_cop1_double[dst->f.r.nrd])); mov_reg32_preg64(EBX, RAX); mov_reg32_preg64pimm32(ECX, RAX, 4); mov_m32rel_xreg32((unsigned int*)dst->f.r.rt, EBX); mov_m32rel_xreg32(((unsigned int*)dst->f.r.rt)+1, ECX); #else mov_eax_memoffs32((unsigned int*)(®_cop1_double[dst->f.r.nrd])); mov_reg32_preg32(EBX, EAX); mov_reg32_preg32pimm32(ECX, EAX, 4); mov_m32_reg32((unsigned int*)dst->f.r.rt, EBX); mov_m32_reg32(((unsigned int*)dst->f.r.rt)+1, ECX); #endif #endif }
void genjalr(void) { #if defined(COUNT_INSTR) inc_m32rel(&instr_count[62]); #endif #ifdef INTERPRET_JALR gencallinterp((unsigned long long)cached_interpreter_table.JALR, 0); #else static unsigned int precomp_instr_size = sizeof(precomp_instr); unsigned int diff = (unsigned int) offsetof(precomp_instr, local_addr); unsigned int diff_need = (unsigned int) offsetof(precomp_instr, reg_cache_infos.need_map); unsigned int diff_wrap = (unsigned int) offsetof(precomp_instr, reg_cache_infos.jump_wrapper); if (((dst->addr & 0xFFF) == 0xFFC && (dst->addr < 0x80000000 || dst->addr >= 0xC0000000))||no_compiled_jump) { gencallinterp((unsigned long long)cached_interpreter_table.JALR, 1); return; } free_registers_move_start(); mov_xreg32_m32rel(EAX, (unsigned int *)dst->f.r.rs); mov_m32rel_xreg32((unsigned int *)&local_rs, EAX); gendelayslot(); mov_m32rel_imm32((unsigned int *)(dst-1)->f.r.rd, dst->addr+4); if ((dst->addr+4) & 0x80000000) mov_m32rel_imm32(((unsigned int *)(dst-1)->f.r.rd)+1, 0xFFFFFFFF); else mov_m32rel_imm32(((unsigned int *)(dst-1)->f.r.rd)+1, 0); mov_xreg32_m32rel(EAX, (unsigned int *)&local_rs); mov_m32rel_xreg32((unsigned int *)&last_addr, EAX); gencheck_interupt_reg(); mov_xreg32_m32rel(EAX, (unsigned int *)&local_rs); mov_reg32_reg32(EBX, EAX); and_eax_imm32(0xFFFFF000); cmp_eax_imm32(dst_block->start & 0xFFFFF000); je_near_rj(0); jump_start_rel32(); mov_m32rel_xreg32(&jump_to_address, EBX); mov_reg64_imm64(RAX, (unsigned long long) (dst+1)); mov_m64rel_xreg64((unsigned long long *)(&PC), RAX); mov_reg64_imm64(RAX, (unsigned long long) jump_to_func); call_reg64(RAX); /* will never return from call */ jump_end_rel32(); mov_reg64_imm64(RSI, (unsigned long long) dst_block->block); mov_reg32_reg32(EAX, EBX); sub_eax_imm32(dst_block->start); shr_reg32_imm8(EAX, 2); mul_m32rel((unsigned int *)(&precomp_instr_size)); mov_reg32_preg64preg64pimm32(EBX, RAX, RSI, diff_need); cmp_reg32_imm32(EBX, 1); jne_rj(11); add_reg32_imm32(EAX, diff_wrap); // 6 add_reg64_reg64(RAX, RSI); // 3 jmp_reg64(RAX); // 2 mov_reg32_preg64preg64pimm32(EBX, RAX, RSI, diff); mov_rax_memoffs64((unsigned long long *) &dst_block->code); add_reg64_reg64(RAX, RBX); jmp_reg64(RAX); #endif }