static void ti_enumerate_thread_stack(TIEnv* ti_env, StackIterator* si) { ASSERT_NO_INTERPRETER TIIterationState *state = ti_env->iteration_state; state->depth = 0; while (!si_is_past_end(si)) { CodeChunkInfo* cci = si_get_code_chunk_info(si); if (cci) { state->method = (jmethodID)cci->get_method(); state->root_kind = JVMTI_HEAP_ROOT_STACK_LOCAL; // FIXME: set up frame base (platform dependent!) cci->get_jit()->get_root_set_from_stack_frame(cci->get_method(), 0, si_get_jit_context(si)); } else { state->method = (jmethodID)m2n_get_method(si_get_m2n(si)); state->root_kind = JVMTI_HEAP_ROOT_JNI_LOCAL; oh_enumerate_handles(m2n_get_local_handles(si_get_m2n(si))); } state->depth += 1; si_goto_previous(si); } si_free(si); }
// function can be safe point & should be called with disable reqursion = 1 void exn_throw_for_JIT(ManagedObject* exn_obj, Class_Handle exn_class, Method_Handle exn_constr, U_8* jit_exn_constr_args, jvalue* vm_exn_constr_args) { /* * !!!! NO LOGGER IS ALLOWED IN THIS FUNCTION !!! * !!!! RELEASE BUILD WILL BE BROKEN !!! * !!!! NO TRACE2, INFO, WARN, ECHO, ASSERT, ... */ assert(!hythread_is_suspend_enabled()); if(exn_raised()) { return; } ASSERT_NO_INTERPRETER ASSERT_RAISE_AREA; if ((exn_obj == NULL) && (exn_class == NULL)) { exn_class = VM_Global_State::loader_env->java_lang_NullPointerException_Class; } ManagedObject* local_exn_obj = exn_obj; StackIterator* si = (StackIterator*) STD_ALLOCA(si_size()); si_fill_from_native(si); if (exn_raised()) { return; } #ifndef _IPF_ assert(is_gc_frame_before_m2n_frame()); #endif // _IPF_ assert(!exn_raised()); if (si_is_past_end(si)) { //FIXME LAZY EXCEPTION (2006.05.12) // should be replaced by lazy version set_exception_object_internal(local_exn_obj); return; } si_transfer_all_preserved_registers(si); assert(!exn_raised()); DebugUtilsTI* ti = VM_Global_State::loader_env->TI; exn_obj = exn_propagate_exception(si, &local_exn_obj, exn_class, exn_constr, jit_exn_constr_args, vm_exn_constr_args); if (exn_raised()) { //si_free(si); return; } M2nFrame* m2nFrame = m2n_get_last_frame(); ObjectHandles* last_m2n_frame_handles = m2n_get_local_handles(m2nFrame); if (last_m2n_frame_handles) { free_local_object_handles2(last_m2n_frame_handles); } set_exception_object_internal(exn_obj); if (ti->get_global_capability(DebugUtilsTI::TI_GC_ENABLE_EXCEPTION_EVENT)) { Registers regs = {0}; VM_thread *thread = p_TLS_vmthread; NativeCodePtr callback = (NativeCodePtr) jvmti_exception_catch_callback; si_copy_to_registers(si, ®s); vm_set_exception_registers(thread, regs); si_set_callback(si, &callback); } else if (p_TLS_vmthread->restore_guard_page) { Registers regs = {0}; VM_thread *thread = p_TLS_vmthread; NativeCodePtr callback = (NativeCodePtr) exception_catch_callback; si_copy_to_registers(si, ®s); vm_set_exception_registers(thread, regs); si_set_callback(si, &callback); } // don't put any call here si_transfer_control(si); } //exn_throw_for_JIT
// function can be safe point & should be called with disable recursion = 1 static ManagedObject * exn_propagate_exception( StackIterator * si, ManagedObject ** exn_obj, Class_Handle exn_class, Method_Handle exn_constr, U_8 * jit_exn_constr_args, jvalue* vm_exn_constr_args) { assert(!hythread_is_suspend_enabled()); ASSERT_RAISE_AREA; ASSERT_NO_INTERPRETER; assert(*exn_obj || exn_class); // Save the throw context StackIterator *throw_si = (StackIterator*) STD_ALLOCA(si_size()); memcpy(throw_si, si, si_size()); // Skip first frame if it is an M2nFrame (which is always a transition from managed to the throw code). // The M2nFrame will be removed from the thread's M2nFrame list but transfer control or copy to registers. if (si_is_native(si)) { si_goto_previous(si); } Method *interrupted_method; NativeCodePtr interrupted_method_location; JIT *interrupted_method_jit; bool restore_guard_page = p_TLS_vmthread->restore_guard_page; if (!si_is_native(si)) { CodeChunkInfo *interrupted_cci = si_get_code_chunk_info(si); assert(interrupted_cci); interrupted_method = interrupted_cci->get_method(); interrupted_method_location = si_get_ip(si); interrupted_method_jit = interrupted_cci->get_jit(); } else { interrupted_method = m2n_get_method(si_get_m2n(si)); interrupted_method_location = 0; interrupted_method_jit = 0; } if (NULL != *exn_obj) { // Gregory - When *exn_obj is NULL it means we're called from exn_athrow_regs // which means that IP points exactly to the right location. But // when *exn_obj is not NULL, it means that we're called from exn_throw_for_JIT // where *exn_obj is already constructed and is thrown by code via athrow. // So in this case IP reported by stack iterator is past the athrow bytecode // and should be moved back to be inside of bytecode location for interrupted // method. interrupted_method_location = (NativeCodePtr)((POINTER_SIZE_INT)interrupted_method_location - 1); // Determine the type of the exception for the type tests below. exn_class = (*exn_obj)->vt()->clss; } #ifdef VM_STATS assert(exn_class); exn_class->class_thrown(); UNSAFE_REGION_START VM_Statistics::get_vm_stats().num_exceptions++; UNSAFE_REGION_END #endif // VM_STATS // Remove single step breakpoints which could have been set on the // exception bytecode DebugUtilsTI *ti = VM_Global_State::loader_env->TI; if (ti->isEnabled() && ti->is_single_step_enabled()) { jvmti_thread_t jvmti_thread = jthread_self_jvmti(); ti->vm_brpt->lock(); if (NULL != jvmti_thread->ss_state) { jvmti_remove_single_step_breakpoints(ti, jvmti_thread); } ti->vm_brpt->unlock(); } // When VM is in shutdown stage we need to execute "finally" clause to // release monitors and propagate an exception to the upper frames. Class_Handle search_exn_class = !VM_Global_State::loader_env->IsVmShutdowning() ? exn_class : VM_Global_State::loader_env->JavaLangObject_Class; if (!si_is_native(si)) { bool same_frame = true; while (!si_is_past_end(si) && !si_is_native(si)) { CodeChunkInfo *cci = si_get_code_chunk_info(si); assert(cci); Method *method = cci->get_method(); JIT *jit = cci->get_jit(); assert(method && jit); NativeCodePtr ip = si_get_ip(si); bool is_ip_past = !!si_get_jit_context(si)->is_ip_past; #ifdef VM_STATS cci->num_throws++; #endif // VM_STATS // Examine this frame's exception handlers looking for a match unsigned num_handlers = cci->get_num_target_exception_handlers(); for (unsigned i = 0; i < num_handlers; i++) { Target_Exception_Handler_Ptr handler = cci->get_target_exception_handler_info(i); if (!handler) continue; if (handler->is_in_range(ip, is_ip_past) && handler->is_assignable(search_exn_class)) { // Found a handler that catches the exception. #ifdef VM_STATS cci->num_catches++; if (same_frame) { VM_Statistics::get_vm_stats().num_exceptions_caught_same_frame++; } if (handler->is_exc_obj_dead()) { VM_Statistics::get_vm_stats().num_exceptions_dead_object++; if (!*exn_obj) { VM_Statistics::get_vm_stats().num_exceptions_object_not_created++; } } #endif // VM_STATS if (restore_guard_page) { bool res = check_stack_size_enough_for_exception_catch(si_get_sp(si)); //must always be enough. otherwise program behavior is unspecified: finally blocks, monitor exits are not executed assert(res); if (!res) { break; } } // Setup handler context jit->fix_handler_context(method, si_get_jit_context(si)); si_set_ip(si, handler->get_handler_ip(), false); // Start single step in exception handler if (ti->isEnabled() && ti->is_single_step_enabled()) { jvmti_thread_t jvmti_thread = jthread_self_jvmti(); ti->vm_brpt->lock(); if (NULL != jvmti_thread->ss_state) { uint16 bc; NativeCodePtr ip = handler->get_handler_ip(); OpenExeJpdaError UNREF result = jit->get_bc_location_for_native(method, ip, &bc); assert(EXE_ERROR_NONE == result); jvmti_StepLocation method_start = {(Method *)method, ip, bc, false}; jvmti_set_single_step_breakpoints(ti, jvmti_thread, &method_start, 1); } ti->vm_brpt->unlock(); } // Create exception if necessary if (!*exn_obj && !handler->is_exc_obj_dead()) { assert(!exn_raised()); *exn_obj = create_lazy_exception(exn_class, exn_constr, jit_exn_constr_args, vm_exn_constr_args); } if (jvmti_is_exception_event_requested()) { // Create exception if necessary if (NULL == *exn_obj) { *exn_obj = create_lazy_exception(exn_class, exn_constr, jit_exn_constr_args, vm_exn_constr_args); } // Reload exception object pointer because it could have // moved while calling JVMTI callback *exn_obj = jvmti_jit_exception_event_callback_call(*exn_obj, interrupted_method_jit, interrupted_method, interrupted_method_location, jit, method, handler->get_handler_ip()); } CTRACE(("setting return pointer to %d", exn_obj)); si_set_return_pointer(si, (void **) exn_obj); //si_free(throw_si); return NULL; } } // No appropriate handler found, undo synchronization vm_monitor_exit_synchronized_method(si); jvalue ret_val = {(jlong)0}; jvmti_process_method_exception_exit_event( reinterpret_cast<jmethodID>(method), JNI_TRUE, ret_val, si); // Goto previous frame si_goto_previous(si); same_frame = false; } } // Exception propagates to the native code assert(si_is_native(si)); // The current thread exception is set to the exception and we return 0/NULL to the native code if (*exn_obj == NULL) { *exn_obj = create_lazy_exception(exn_class, exn_constr, jit_exn_constr_args, vm_exn_constr_args); } assert(!hythread_is_suspend_enabled()); CodeChunkInfo *catch_cci = si_get_code_chunk_info(si); Method *catch_method = NULL; if (catch_cci) catch_method = catch_cci->get_method(); // Reload exception object pointer because it could have // moved while calling JVMTI callback if (exn_raised()) { //si_free(throw_si); return NULL; } *exn_obj = jvmti_jit_exception_event_callback_call(*exn_obj, interrupted_method_jit, interrupted_method, interrupted_method_location, NULL, NULL, NULL); //si_free(throw_si); return *exn_obj; } //exn_propagate_exception