static MacroAssemblerCodeRef virtualForGenerator(VM* vm, FunctionPtr compile, FunctionPtr notJSFunction, const char* name, CodeSpecializationKind kind) { JSInterfaceJIT jit; JSInterfaceJIT::JumpList slowCase; #if USE(JSVALUE64) slowCase.append(jit.emitJumpIfNotJSCell(JSInterfaceJIT::regT0)); #else // USE(JSVALUE64) slowCase.append(jit.branch32(JSInterfaceJIT::NotEqual, JSInterfaceJIT::regT1, JSInterfaceJIT::TrustedImm32(JSValue::CellTag))); #endif // USE(JSVALUE64) slowCase.append(jit.emitJumpIfNotType(JSInterfaceJIT::regT0, JSInterfaceJIT::regT1, JSFunctionType)); // Finish canonical initialization before JS function call. jit.loadPtr(JSInterfaceJIT::Address(JSInterfaceJIT::regT0, JSFunction::offsetOfScopeChain()), JSInterfaceJIT::regT1); jit.emitPutCellToCallFrameHeader(JSInterfaceJIT::regT1, JSStack::ScopeChain); jit.loadPtr(JSInterfaceJIT::Address(JSInterfaceJIT::regT0, JSFunction::offsetOfExecutable()), JSInterfaceJIT::regT2); JSInterfaceJIT::Jump hasCodeBlock1 = jit.branch32(JSInterfaceJIT::GreaterThanOrEqual, JSInterfaceJIT::Address(JSInterfaceJIT::regT2, FunctionExecutable::offsetOfNumParametersFor(kind)), JSInterfaceJIT::TrustedImm32(0)); jit.preserveReturnAddressAfterCall(JSInterfaceJIT::regT3); jit.storePtr(JSInterfaceJIT::callFrameRegister, &vm->topCallFrame); jit.restoreArgumentReference(); JSInterfaceJIT::Call callCompile = jit.call(); jit.restoreReturnAddressBeforeReturn(JSInterfaceJIT::regT3); jit.loadPtr(JSInterfaceJIT::Address(JSInterfaceJIT::regT0, JSFunction::offsetOfExecutable()), JSInterfaceJIT::regT2); hasCodeBlock1.link(&jit); jit.loadPtr(JSInterfaceJIT::Address(JSInterfaceJIT::regT2, FunctionExecutable::offsetOfJITCodeWithArityCheckFor(kind)), JSInterfaceJIT::regT0); #if !ASSERT_DISABLED JSInterfaceJIT::Jump ok = jit.branchTestPtr(JSInterfaceJIT::NonZero, JSInterfaceJIT::regT0); jit.breakpoint(); ok.link(&jit); #endif jit.jump(JSInterfaceJIT::regT0); slowCase.link(&jit); JSInterfaceJIT::Call callNotJSFunction = generateSlowCaseFor(vm, jit); LinkBuffer patchBuffer(*vm, &jit, GLOBAL_THUNK_ID); patchBuffer.link(callCompile, compile); patchBuffer.link(callNotJSFunction, notJSFunction); return FINALIZE_CODE(patchBuffer, ("virtual %s trampoline", name)); }
static MacroAssemblerCodeRef linkForGenerator(VM* vm, FunctionPtr lazyLink, FunctionPtr notJSFunction, const char* name) { JSInterfaceJIT jit; JSInterfaceJIT::JumpList slowCase; #if USE(JSVALUE64) slowCase.append(jit.emitJumpIfNotJSCell(JSInterfaceJIT::regT0)); slowCase.append(jit.emitJumpIfNotType(JSInterfaceJIT::regT0, JSInterfaceJIT::regT1, JSFunctionType)); #else // USE(JSVALUE64) slowCase.append(jit.branch32(JSInterfaceJIT::NotEqual, JSInterfaceJIT::regT1, JSInterfaceJIT::TrustedImm32(JSValue::CellTag))); slowCase.append(jit.emitJumpIfNotType(JSInterfaceJIT::regT0, JSInterfaceJIT::regT1, JSFunctionType)); #endif // USE(JSVALUE64) // Finish canonical initialization before JS function call. jit.loadPtr(JSInterfaceJIT::Address(JSInterfaceJIT::regT0, JSFunction::offsetOfScopeChain()), JSInterfaceJIT::regT1); jit.emitPutCellToCallFrameHeader(JSInterfaceJIT::regT1, JSStack::ScopeChain); // Also initialize ReturnPC for use by lazy linking and exceptions. jit.preserveReturnAddressAfterCall(JSInterfaceJIT::regT3); jit.emitPutToCallFrameHeader(JSInterfaceJIT::regT3, JSStack::ReturnPC); jit.storePtr(JSInterfaceJIT::callFrameRegister, &vm->topCallFrame); jit.restoreArgumentReference(); JSInterfaceJIT::Call callLazyLink = jit.call(); jit.restoreReturnAddressBeforeReturn(JSInterfaceJIT::regT3); jit.jump(JSInterfaceJIT::regT0); slowCase.link(&jit); JSInterfaceJIT::Call callNotJSFunction = generateSlowCaseFor(vm, jit); LinkBuffer patchBuffer(*vm, &jit, GLOBAL_THUNK_ID); patchBuffer.link(callLazyLink, lazyLink); patchBuffer.link(callNotJSFunction, notJSFunction); return FINALIZE_CODE(patchBuffer, ("link %s trampoline", name)); }
static MacroAssemblerCodeRef nativeForGenerator(VM* vm, CodeSpecializationKind kind) { int executableOffsetToFunction = NativeExecutable::offsetOfNativeFunctionFor(kind); JSInterfaceJIT jit; jit.emitPutImmediateToCallFrameHeader(0, JSStack::CodeBlock); jit.storePtr(JSInterfaceJIT::callFrameRegister, &vm->topCallFrame); #if CPU(X86) // Load caller frame's scope chain into this callframe so that whatever we call can // get to its global data. jit.emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, JSInterfaceJIT::regT0); jit.emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, JSInterfaceJIT::regT1, JSInterfaceJIT::regT0); jit.emitPutCellToCallFrameHeader(JSInterfaceJIT::regT1, JSStack::ScopeChain); jit.peek(JSInterfaceJIT::regT1); jit.emitPutToCallFrameHeader(JSInterfaceJIT::regT1, JSStack::ReturnPC); // Calling convention: f(ecx, edx, ...); // Host function signature: f(ExecState*); jit.move(JSInterfaceJIT::callFrameRegister, X86Registers::ecx); jit.subPtr(JSInterfaceJIT::TrustedImm32(16 - sizeof(void*)), JSInterfaceJIT::stackPointerRegister); // Align stack after call. // call the function jit.emitGetFromCallFrameHeaderPtr(JSStack::Callee, JSInterfaceJIT::regT1); jit.loadPtr(JSInterfaceJIT::Address(JSInterfaceJIT::regT1, JSFunction::offsetOfExecutable()), JSInterfaceJIT::regT1); jit.move(JSInterfaceJIT::regT0, JSInterfaceJIT::callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack. jit.call(JSInterfaceJIT::Address(JSInterfaceJIT::regT1, executableOffsetToFunction)); jit.addPtr(JSInterfaceJIT::TrustedImm32(16 - sizeof(void*)), JSInterfaceJIT::stackPointerRegister); #elif CPU(X86_64) // Load caller frame's scope chain into this callframe so that whatever we call can // get to its global data. jit.emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, JSInterfaceJIT::regT0); jit.emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, JSInterfaceJIT::regT1, JSInterfaceJIT::regT0); jit.emitPutCellToCallFrameHeader(JSInterfaceJIT::regT1, JSStack::ScopeChain); jit.peek(JSInterfaceJIT::regT1); jit.emitPutToCallFrameHeader(JSInterfaceJIT::regT1, JSStack::ReturnPC); #if !OS(WINDOWS) // Calling convention: f(edi, esi, edx, ecx, ...); // Host function signature: f(ExecState*); jit.move(JSInterfaceJIT::callFrameRegister, X86Registers::edi); jit.subPtr(JSInterfaceJIT::TrustedImm32(16 - sizeof(int64_t)), JSInterfaceJIT::stackPointerRegister); // Align stack after call. jit.emitGetFromCallFrameHeaderPtr(JSStack::Callee, X86Registers::esi); jit.loadPtr(JSInterfaceJIT::Address(X86Registers::esi, JSFunction::offsetOfExecutable()), X86Registers::r9); jit.move(JSInterfaceJIT::regT0, JSInterfaceJIT::callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack. jit.call(JSInterfaceJIT::Address(X86Registers::r9, executableOffsetToFunction)); jit.addPtr(JSInterfaceJIT::TrustedImm32(16 - sizeof(int64_t)), JSInterfaceJIT::stackPointerRegister); #else // Calling convention: f(ecx, edx, r8, r9, ...); // Host function signature: f(ExecState*); jit.move(JSInterfaceJIT::callFrameRegister, X86Registers::ecx); // Leave space for the callee parameter home addresses and align the stack. jit.subPtr(JSInterfaceJIT::TrustedImm32(4 * sizeof(int64_t) + 16 - sizeof(int64_t)), JSInterfaceJIT::stackPointerRegister); jit.emitGetFromCallFrameHeaderPtr(JSStack::Callee, X86Registers::edx); jit.loadPtr(JSInterfaceJIT::Address(X86Registers::edx, JSFunction::offsetOfExecutable()), X86Registers::r9); jit.move(JSInterfaceJIT::regT0, JSInterfaceJIT::callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack. jit.call(JSInterfaceJIT::Address(X86Registers::r9, executableOffsetToFunction)); jit.addPtr(JSInterfaceJIT::TrustedImm32(4 * sizeof(int64_t) + 16 - sizeof(int64_t)), JSInterfaceJIT::stackPointerRegister); #endif #elif CPU(ARM) // Load caller frame's scope chain into this callframe so that whatever we call can // get to its global data. jit.emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, JSInterfaceJIT::regT2); jit.emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, JSInterfaceJIT::regT1, JSInterfaceJIT::regT2); jit.emitPutCellToCallFrameHeader(JSInterfaceJIT::regT1, JSStack::ScopeChain); jit.preserveReturnAddressAfterCall(JSInterfaceJIT::regT3); // Callee preserved jit.emitPutToCallFrameHeader(JSInterfaceJIT::regT3, JSStack::ReturnPC); // Calling convention: f(r0 == regT0, r1 == regT1, ...); // Host function signature: f(ExecState*); jit.move(JSInterfaceJIT::callFrameRegister, ARMRegisters::r0); jit.emitGetFromCallFrameHeaderPtr(JSStack::Callee, ARMRegisters::r1); jit.move(JSInterfaceJIT::regT2, JSInterfaceJIT::callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack. jit.loadPtr(JSInterfaceJIT::Address(ARMRegisters::r1, JSFunction::offsetOfExecutable()), JSInterfaceJIT::regT2); jit.call(JSInterfaceJIT::Address(JSInterfaceJIT::regT2, executableOffsetToFunction)); jit.restoreReturnAddressBeforeReturn(JSInterfaceJIT::regT3); #elif CPU(SH4) // Load caller frame's scope chain into this callframe so that whatever we call can // get to its global data. jit.emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, JSInterfaceJIT::regT2); jit.emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, JSInterfaceJIT::regT1, JSInterfaceJIT::regT2); jit.emitPutCellToCallFrameHeader(JSInterfaceJIT::regT1, JSStack::ScopeChain); jit.preserveReturnAddressAfterCall(JSInterfaceJIT::regT3); // Callee preserved jit.emitPutToCallFrameHeader(JSInterfaceJIT::regT3, JSStack::ReturnPC); // Calling convention: f(r0 == regT4, r1 == regT5, ...); // Host function signature: f(ExecState*); jit.move(JSInterfaceJIT::callFrameRegister, JSInterfaceJIT::regT4); jit.emitGetFromCallFrameHeaderPtr(JSStack::Callee, JSInterfaceJIT::regT5); jit.move(JSInterfaceJIT::regT2, JSInterfaceJIT::callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack. jit.loadPtr(JSInterfaceJIT::Address(JSInterfaceJIT::regT5, JSFunction::offsetOfExecutable()), JSInterfaceJIT::regT2); jit.call(JSInterfaceJIT::Address(JSInterfaceJIT::regT2, executableOffsetToFunction), JSInterfaceJIT::regT0); jit.restoreReturnAddressBeforeReturn(JSInterfaceJIT::regT3); #elif CPU(MIPS) // Load caller frame's scope chain into this callframe so that whatever we call can // get to its global data. jit.emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, JSInterfaceJIT::regT0); jit.emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, JSInterfaceJIT::regT1, JSInterfaceJIT::regT0); jit.emitPutCellToCallFrameHeader(JSInterfaceJIT::regT1, JSStack::ScopeChain); jit.preserveReturnAddressAfterCall(JSInterfaceJIT::regT3); // Callee preserved jit.emitPutToCallFrameHeader(JSInterfaceJIT::regT3, JSStack::ReturnPC); // Calling convention: f(a0, a1, a2, a3); // Host function signature: f(ExecState*); // Allocate stack space for 16 bytes (8-byte aligned) // 16 bytes (unused) for 4 arguments jit.subPtr(JSInterfaceJIT::TrustedImm32(16), JSInterfaceJIT::stackPointerRegister); // Setup arg0 jit.move(JSInterfaceJIT::callFrameRegister, MIPSRegisters::a0); // Call jit.emitGetFromCallFrameHeaderPtr(JSStack::Callee, MIPSRegisters::a2); jit.loadPtr(JSInterfaceJIT::Address(MIPSRegisters::a2, JSFunction::offsetOfExecutable()), JSInterfaceJIT::regT2); jit.move(JSInterfaceJIT::regT0, JSInterfaceJIT::callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack. jit.call(JSInterfaceJIT::Address(JSInterfaceJIT::regT2, executableOffsetToFunction)); // Restore stack space jit.addPtr(JSInterfaceJIT::TrustedImm32(16), JSInterfaceJIT::stackPointerRegister); jit.restoreReturnAddressBeforeReturn(JSInterfaceJIT::regT3); #else #error "JIT not supported on this platform." UNUSED_PARAM(executableOffsetToFunction); breakpoint(); #endif // Check for an exception #if USE(JSVALUE64) jit.load64(&(vm->exception), JSInterfaceJIT::regT2); JSInterfaceJIT::Jump exceptionHandler = jit.branchTest64(JSInterfaceJIT::NonZero, JSInterfaceJIT::regT2); #else JSInterfaceJIT::Jump exceptionHandler = jit.branch32( JSInterfaceJIT::NotEqual, JSInterfaceJIT::AbsoluteAddress(reinterpret_cast<char*>(&vm->exception) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), JSInterfaceJIT::TrustedImm32(JSValue::EmptyValueTag)); #endif // Return. jit.ret(); // Handle an exception exceptionHandler.link(&jit); // Grab the return address. jit.preserveReturnAddressAfterCall(JSInterfaceJIT::regT1); jit.move(JSInterfaceJIT::TrustedImmPtr(&vm->exceptionLocation), JSInterfaceJIT::regT2); jit.storePtr(JSInterfaceJIT::regT1, JSInterfaceJIT::regT2); jit.storePtr(JSInterfaceJIT::callFrameRegister, &vm->topCallFrame); jit.move(JSInterfaceJIT::TrustedImmPtr(FunctionPtr(ctiVMThrowTrampolineSlowpath).value()), JSInterfaceJIT::regT1); jit.jump(JSInterfaceJIT::regT1); LinkBuffer patchBuffer(*vm, &jit, GLOBAL_THUNK_ID); return FINALIZE_CODE(patchBuffer, ("native %s trampoline", toCString(kind).data())); }