NativeCodePtr compile_me(Method* method) { ASSERT_RAISE_AREA; ASSERT_NO_INTERPRETER; TRACE("compile_me " << method); GcFrame gc; compile_protect_arguments(method, &gc); if (exn_raised()) { return NULL; } tmn_suspend_enable(); if (method->is_abstract()) { compile_raise_exception("java/lang/AbstractMethodError", "", method); tmn_suspend_disable(); return NULL; } DebugUtilsTI *ti = VM_Global_State::loader_env->TI; JIT_Result res = compile_do_compilation(method); if (res != JIT_SUCCESS) { INFO2("compile", "Cannot compile " << method); if (!exn_raised()) { compile_raise_exception("java/lang/InternalError", "Cannot compile ", method); } tmn_suspend_disable(); return NULL; } tmn_suspend_disable(); NativeCodePtr entry_point = method->get_code_addr(); INFO2("compile.code", "Compiled method " << method << ", entry " << entry_point); if (method->get_pending_breakpoints() != 0) jvmti_set_pending_breakpoints(method); if(ti->isEnabled() && ti->is_single_step_enabled() && !method->is_native()) { jvmti_thread_t jvmti_thread = jthread_self_jvmti(); assert(jvmti_thread); jvmti_set_single_step_breakpoints_for_method(ti, jvmti_thread, method); } return entry_point; } // compile_me
// function can be safe point & should be called with disable recursion = 1 static ManagedObject * exn_propagate_exception( StackIterator * si, ManagedObject ** exn_obj, Class_Handle exn_class, Method_Handle exn_constr, U_8 * jit_exn_constr_args, jvalue* vm_exn_constr_args) { assert(!hythread_is_suspend_enabled()); ASSERT_RAISE_AREA; ASSERT_NO_INTERPRETER; assert(*exn_obj || exn_class); // Save the throw context StackIterator *throw_si = (StackIterator*) STD_ALLOCA(si_size()); memcpy(throw_si, si, si_size()); // Skip first frame if it is an M2nFrame (which is always a transition from managed to the throw code). // The M2nFrame will be removed from the thread's M2nFrame list but transfer control or copy to registers. if (si_is_native(si)) { si_goto_previous(si); } Method *interrupted_method; NativeCodePtr interrupted_method_location; JIT *interrupted_method_jit; bool restore_guard_page = p_TLS_vmthread->restore_guard_page; if (!si_is_native(si)) { CodeChunkInfo *interrupted_cci = si_get_code_chunk_info(si); assert(interrupted_cci); interrupted_method = interrupted_cci->get_method(); interrupted_method_location = si_get_ip(si); interrupted_method_jit = interrupted_cci->get_jit(); } else { interrupted_method = m2n_get_method(si_get_m2n(si)); interrupted_method_location = 0; interrupted_method_jit = 0; } if (NULL != *exn_obj) { // Gregory - When *exn_obj is NULL it means we're called from exn_athrow_regs // which means that IP points exactly to the right location. But // when *exn_obj is not NULL, it means that we're called from exn_throw_for_JIT // where *exn_obj is already constructed and is thrown by code via athrow. // So in this case IP reported by stack iterator is past the athrow bytecode // and should be moved back to be inside of bytecode location for interrupted // method. interrupted_method_location = (NativeCodePtr)((POINTER_SIZE_INT)interrupted_method_location - 1); // Determine the type of the exception for the type tests below. exn_class = (*exn_obj)->vt()->clss; } #ifdef VM_STATS assert(exn_class); exn_class->class_thrown(); UNSAFE_REGION_START VM_Statistics::get_vm_stats().num_exceptions++; UNSAFE_REGION_END #endif // VM_STATS // Remove single step breakpoints which could have been set on the // exception bytecode DebugUtilsTI *ti = VM_Global_State::loader_env->TI; if (ti->isEnabled() && ti->is_single_step_enabled()) { jvmti_thread_t jvmti_thread = jthread_self_jvmti(); ti->vm_brpt->lock(); if (NULL != jvmti_thread->ss_state) { jvmti_remove_single_step_breakpoints(ti, jvmti_thread); } ti->vm_brpt->unlock(); } // When VM is in shutdown stage we need to execute "finally" clause to // release monitors and propagate an exception to the upper frames. Class_Handle search_exn_class = !VM_Global_State::loader_env->IsVmShutdowning() ? exn_class : VM_Global_State::loader_env->JavaLangObject_Class; if (!si_is_native(si)) { bool same_frame = true; while (!si_is_past_end(si) && !si_is_native(si)) { CodeChunkInfo *cci = si_get_code_chunk_info(si); assert(cci); Method *method = cci->get_method(); JIT *jit = cci->get_jit(); assert(method && jit); NativeCodePtr ip = si_get_ip(si); bool is_ip_past = !!si_get_jit_context(si)->is_ip_past; #ifdef VM_STATS cci->num_throws++; #endif // VM_STATS // Examine this frame's exception handlers looking for a match unsigned num_handlers = cci->get_num_target_exception_handlers(); for (unsigned i = 0; i < num_handlers; i++) { Target_Exception_Handler_Ptr handler = cci->get_target_exception_handler_info(i); if (!handler) continue; if (handler->is_in_range(ip, is_ip_past) && handler->is_assignable(search_exn_class)) { // Found a handler that catches the exception. #ifdef VM_STATS cci->num_catches++; if (same_frame) { VM_Statistics::get_vm_stats().num_exceptions_caught_same_frame++; } if (handler->is_exc_obj_dead()) { VM_Statistics::get_vm_stats().num_exceptions_dead_object++; if (!*exn_obj) { VM_Statistics::get_vm_stats().num_exceptions_object_not_created++; } } #endif // VM_STATS if (restore_guard_page) { bool res = check_stack_size_enough_for_exception_catch(si_get_sp(si)); //must always be enough. otherwise program behavior is unspecified: finally blocks, monitor exits are not executed assert(res); if (!res) { break; } } // Setup handler context jit->fix_handler_context(method, si_get_jit_context(si)); si_set_ip(si, handler->get_handler_ip(), false); // Start single step in exception handler if (ti->isEnabled() && ti->is_single_step_enabled()) { jvmti_thread_t jvmti_thread = jthread_self_jvmti(); ti->vm_brpt->lock(); if (NULL != jvmti_thread->ss_state) { uint16 bc; NativeCodePtr ip = handler->get_handler_ip(); OpenExeJpdaError UNREF result = jit->get_bc_location_for_native(method, ip, &bc); assert(EXE_ERROR_NONE == result); jvmti_StepLocation method_start = {(Method *)method, ip, bc, false}; jvmti_set_single_step_breakpoints(ti, jvmti_thread, &method_start, 1); } ti->vm_brpt->unlock(); } // Create exception if necessary if (!*exn_obj && !handler->is_exc_obj_dead()) { assert(!exn_raised()); *exn_obj = create_lazy_exception(exn_class, exn_constr, jit_exn_constr_args, vm_exn_constr_args); } if (jvmti_is_exception_event_requested()) { // Create exception if necessary if (NULL == *exn_obj) { *exn_obj = create_lazy_exception(exn_class, exn_constr, jit_exn_constr_args, vm_exn_constr_args); } // Reload exception object pointer because it could have // moved while calling JVMTI callback *exn_obj = jvmti_jit_exception_event_callback_call(*exn_obj, interrupted_method_jit, interrupted_method, interrupted_method_location, jit, method, handler->get_handler_ip()); } CTRACE(("setting return pointer to %d", exn_obj)); si_set_return_pointer(si, (void **) exn_obj); //si_free(throw_si); return NULL; } } // No appropriate handler found, undo synchronization vm_monitor_exit_synchronized_method(si); jvalue ret_val = {(jlong)0}; jvmti_process_method_exception_exit_event( reinterpret_cast<jmethodID>(method), JNI_TRUE, ret_val, si); // Goto previous frame si_goto_previous(si); same_frame = false; } } // Exception propagates to the native code assert(si_is_native(si)); // The current thread exception is set to the exception and we return 0/NULL to the native code if (*exn_obj == NULL) { *exn_obj = create_lazy_exception(exn_class, exn_constr, jit_exn_constr_args, vm_exn_constr_args); } assert(!hythread_is_suspend_enabled()); CodeChunkInfo *catch_cci = si_get_code_chunk_info(si); Method *catch_method = NULL; if (catch_cci) catch_method = catch_cci->get_method(); // Reload exception object pointer because it could have // moved while calling JVMTI callback if (exn_raised()) { //si_free(throw_si); return NULL; } *exn_obj = jvmti_jit_exception_event_callback_call(*exn_obj, interrupted_method_jit, interrupted_method, interrupted_method_location, NULL, NULL, NULL); //si_free(throw_si); return *exn_obj; } //exn_propagate_exception