void vm_perform_shutdown_actions() { // Warning: do not call 'exit_globals()' here. All threads are still running. // Calling 'exit_globals()' will disable thread-local-storage and cause all // kinds of assertions to trigger in debug mode. if (is_init_completed()) { Thread* thread = Thread::current(); if (thread->is_Java_thread()) { // We are leaving the VM, set state to native (in case any OS exit // handlers call back to the VM) JavaThread* jt = (JavaThread*)thread; // Must always be walkable or have no last_Java_frame when in // thread_in_native jt->frame_anchor()->make_walkable(jt); jt->set_thread_state(_thread_in_native); } } notify_vm_shutdown(); }
int CppInterpreter::native_entry(methodOop method, intptr_t UNUSED, TRAPS) { // Make sure method is native and not abstract assert(method->is_native() && !method->is_abstract(), "should be"); JavaThread *thread = (JavaThread *) THREAD; ZeroStack *stack = thread->zero_stack(); // Allocate and initialize our frame InterpreterFrame *frame = InterpreterFrame::build(method, CHECK_0); thread->push_zero_frame(frame); interpreterState istate = frame->interpreter_state(); intptr_t *locals = istate->locals(); // Update the invocation counter if ((UseCompiler || CountCompiledCalls) && !method->is_synchronized()) { InvocationCounter *counter = method->invocation_counter(); counter->increment(); if (counter->reached_InvocationLimit()) { CALL_VM_NOCHECK( InterpreterRuntime::frequency_counter_overflow(thread, NULL)); if (HAS_PENDING_EXCEPTION) goto unwind_and_return; } } // Lock if necessary BasicObjectLock *monitor; monitor = NULL; if (method->is_synchronized()) { monitor = (BasicObjectLock*) istate->stack_base(); oop lockee = monitor->obj(); markOop disp = lockee->mark()->set_unlocked(); monitor->lock()->set_displaced_header(disp); if (Atomic::cmpxchg_ptr(monitor, lockee->mark_addr(), disp) != disp) { if (thread->is_lock_owned((address) disp->clear_lock_bits())) { monitor->lock()->set_displaced_header(NULL); } else { CALL_VM_NOCHECK(InterpreterRuntime::monitorenter(thread, monitor)); if (HAS_PENDING_EXCEPTION) goto unwind_and_return; } } } // Get the signature handler InterpreterRuntime::SignatureHandler *handler; { address handlerAddr = method->signature_handler(); if (handlerAddr == NULL) { CALL_VM_NOCHECK(InterpreterRuntime::prepare_native_call(thread, method)); if (HAS_PENDING_EXCEPTION) goto unlock_unwind_and_return; handlerAddr = method->signature_handler(); assert(handlerAddr != NULL, "eh?"); } if (handlerAddr == (address) InterpreterRuntime::slow_signature_handler) { CALL_VM_NOCHECK(handlerAddr = InterpreterRuntime::slow_signature_handler(thread, method, NULL,NULL)); if (HAS_PENDING_EXCEPTION) goto unlock_unwind_and_return; } handler = \ InterpreterRuntime::SignatureHandler::from_handlerAddr(handlerAddr); } // Get the native function entry point address function; function = method->native_function(); assert(function != NULL, "should be set if signature handler is"); // Build the argument list stack->overflow_check(handler->argument_count() * 2, THREAD); if (HAS_PENDING_EXCEPTION) goto unlock_unwind_and_return; void **arguments; void *mirror; { arguments = (void **) stack->alloc(handler->argument_count() * sizeof(void **)); void **dst = arguments; void *env = thread->jni_environment(); *(dst++) = &env; if (method->is_static()) { istate->set_oop_temp( method->constants()->pool_holder()->java_mirror()); mirror = istate->oop_temp_addr(); *(dst++) = &mirror; } intptr_t *src = locals; for (int i = dst - arguments; i < handler->argument_count(); i++) { ffi_type *type = handler->argument_type(i); if (type == &ffi_type_pointer) { if (*src) { stack->push((intptr_t) src); *(dst++) = stack->sp(); } else { *(dst++) = src; } src--; } else if (type->size == 4) { *(dst++) = src--; } else if (type->size == 8) { src--; *(dst++) = src--; } else { ShouldNotReachHere(); } } } // Set up the Java frame anchor thread->set_last_Java_frame(); // Change the thread state to _thread_in_native ThreadStateTransition::transition_from_java(thread, _thread_in_native); // Make the call intptr_t result[4 - LogBytesPerWord]; ffi_call(handler->cif(), (void (*)()) function, result, arguments); // Change the thread state back to _thread_in_Java. // ThreadStateTransition::transition_from_native() cannot be used // here because it does not check for asynchronous exceptions. // We have to manage the transition ourself. thread->set_thread_state(_thread_in_native_trans); // Make sure new state is visible in the GC thread if (os::is_MP()) { if (UseMembar) { OrderAccess::fence(); } else { InterfaceSupport::serialize_memory(thread); } } // Handle safepoint operations, pending suspend requests, // and pending asynchronous exceptions. if (SafepointSynchronize::do_call_back() || thread->has_special_condition_for_native_trans()) { JavaThread::check_special_condition_for_native_trans(thread); CHECK_UNHANDLED_OOPS_ONLY(thread->clear_unhandled_oops()); } // Finally we can change the thread state to _thread_in_Java. thread->set_thread_state(_thread_in_Java); fixup_after_potential_safepoint(); // Clear the frame anchor thread->reset_last_Java_frame(); // If the result was an oop then unbox it and store it in // oop_temp where the garbage collector can see it before // we release the handle it might be protected by. if (handler->result_type() == &ffi_type_pointer) { if (result[0]) istate->set_oop_temp(*(oop *) result[0]); else istate->set_oop_temp(NULL); } // Reset handle block thread->active_handles()->clear(); unlock_unwind_and_return: // Unlock if necessary if (monitor) { BasicLock *lock = monitor->lock(); markOop header = lock->displaced_header(); oop rcvr = monitor->obj(); monitor->set_obj(NULL); if (header != NULL) { if (Atomic::cmpxchg_ptr(header, rcvr->mark_addr(), lock) != lock) { monitor->set_obj(rcvr); { HandleMark hm(thread); CALL_VM_NOCHECK(InterpreterRuntime::monitorexit(thread, monitor)); } } } } unwind_and_return: // Unwind the current activation thread->pop_zero_frame(); // Pop our parameters stack->set_sp(stack->sp() + method->size_of_parameters()); // Push our result if (!HAS_PENDING_EXCEPTION) { BasicType type = result_type_of(method); stack->set_sp(stack->sp() - type2size[type]); switch (type) { case T_VOID: break; case T_BOOLEAN: #ifndef VM_LITTLE_ENDIAN result[0] <<= (BitsPerWord - BitsPerByte); #endif SET_LOCALS_INT(*(jboolean *) result != 0, 0); break; case T_CHAR: #ifndef VM_LITTLE_ENDIAN result[0] <<= (BitsPerWord - BitsPerShort); #endif SET_LOCALS_INT(*(jchar *) result, 0); break; case T_BYTE: #ifndef VM_LITTLE_ENDIAN result[0] <<= (BitsPerWord - BitsPerByte); #endif SET_LOCALS_INT(*(jbyte *) result, 0); break; case T_SHORT: #ifndef VM_LITTLE_ENDIAN result[0] <<= (BitsPerWord - BitsPerShort); #endif SET_LOCALS_INT(*(jshort *) result, 0); break; case T_INT: #ifndef VM_LITTLE_ENDIAN result[0] <<= (BitsPerWord - BitsPerInt); #endif SET_LOCALS_INT(*(jint *) result, 0); break; case T_LONG: SET_LOCALS_LONG(*(jlong *) result, 0); break; case T_FLOAT: SET_LOCALS_FLOAT(*(jfloat *) result, 0); break; case T_DOUBLE: SET_LOCALS_DOUBLE(*(jdouble *) result, 0); break; case T_OBJECT: case T_ARRAY: SET_LOCALS_OBJECT(istate->oop_temp(), 0); break; default: ShouldNotReachHere(); } } // No deoptimized frames on the stack return 0; }