M2nFrame* m2n_push_suspended_frame(VM_thread* thread, Registers* regs) { M2nFrame* m2nf = (M2nFrame*)STD_MALLOC(sizeof(M2nFrame)); assert(m2nf); m2n_push_suspended_frame(thread, m2nf, regs); return m2nf; }
// exception catch support for JVMTI, also restore stack after Stack Overflow Error void jvmti_exception_catch_callback() { Registers regs = {0}; VM_thread *thread = p_TLS_vmthread; assert(thread); if (thread->regs) { regs = *(Registers*)thread->regs; } M2nFrame* m2n = (M2nFrame *) STD_ALLOCA(m2n_get_size()); m2n_push_suspended_frame(thread, m2n, ®s); M2nFrame* prev_m2n = m2n_get_previous_frame(m2n); StackIterator* si = (StackIterator*) STD_ALLOCA(si_size()); si_fill_from_registers(si, ®s, false, prev_m2n); // si_create_from_registers uses large stack space, // so guard page restored after its invoke, // but befor ti agent callback invokation, // because it should work on protected page. if (p_TLS_vmthread->restore_guard_page) { int res = port_thread_restore_guard_page(); if (res != 0) { Global_Env *env = VM_Global_State::loader_env; if (si_is_native(si)) { m2n_set_last_frame(prev_m2n); if ((interpreter_enabled() || (!prev_m2n) || (m2n_get_frame_type(prev_m2n) & FRAME_NON_UNWINDABLE))) { exn_raise_by_class(env->java_lang_StackOverflowError_Class); } else { //si_free(si); exn_throw_by_class(env->java_lang_StackOverflowError_Class); } } else { //si_free(si); exn_throw_by_class(env->java_lang_StackOverflowError_Class); } } p_TLS_vmthread->restore_guard_page = false; } if (!si_is_native(si)) { CodeChunkInfo* catch_cci = si_get_code_chunk_info(si); assert(catch_cci); Method* catch_method = catch_cci->get_method(); NativeCodePtr catch_method_location = si_get_ip(si); JIT* catch_method_jit = catch_cci->get_jit(); ManagedObject** exn_obj = (ManagedObject**) si_get_return_pointer(si); *exn_obj = jvmti_jit_exception_catch_event_callback_call( *exn_obj, catch_method_jit, catch_method, catch_method_location); } si_transfer_control(si); }
// exception catch callback to restore stack after Stack Overflow Error void exception_catch_callback() { Registers regs = {0}; VM_thread *thread = p_TLS_vmthread; assert(thread); if (thread->regs) { regs = *(Registers*)thread->regs; } M2nFrame* m2n = (M2nFrame *) STD_ALLOCA(m2n_get_size()); m2n_push_suspended_frame(thread, m2n, ®s); M2nFrame* prev_m2n = m2n_get_previous_frame(m2n); StackIterator* si = (StackIterator*) STD_ALLOCA(si_size()); si_fill_from_registers(si, ®s, false, prev_m2n); // si_create_from_registers uses large stack space, // so guard page restored after its invoke. if (p_TLS_vmthread->restore_guard_page) { int res = port_thread_restore_guard_page(); if (res != 0) { Global_Env *env = VM_Global_State::loader_env; if (si_is_native(si)) { m2n_set_last_frame(prev_m2n); if ((interpreter_enabled() || (!prev_m2n) || (m2n_get_frame_type(prev_m2n) & FRAME_NON_UNWINDABLE))) { exn_raise_by_class(env->java_lang_StackOverflowError_Class); } else { //si_free(si); exn_throw_by_class(env->java_lang_StackOverflowError_Class); } } else { //si_free(si); exn_throw_by_class(env->java_lang_StackOverflowError_Class); } } p_TLS_vmthread->restore_guard_page = false; } si_transfer_control(si); }
void m2n_push_suspended_frame(M2nFrame* m2nf, Registers* regs) { m2n_push_suspended_frame(p_TLS_vmthread, m2nf, regs); }
M2nFrame* m2n_push_suspended_frame(Registers* regs) { return m2n_push_suspended_frame(p_TLS_vmthread, regs); }
// function can be safe point & should be called with disable reqursion = 1 void exn_athrow_regs(Registers * regs, Class_Handle exn_class, bool java_code, bool transfer_control) { assert(!hythread_is_suspend_enabled()); assert(exn_class); #ifndef _IPF_ M2nFrame *cur_m2nf = (M2nFrame *) STD_ALLOCA(m2n_get_size()); M2nFrame *unw_m2nf; ManagedObject *exn_obj = NULL; StackIterator *si; DebugUtilsTI* ti = VM_Global_State::loader_env->TI; VM_thread* vmthread = p_TLS_vmthread; if (java_code) m2n_push_suspended_frame(vmthread, cur_m2nf, regs); else // Gregory - // Initialize cur_m2nf pointer in case we've crashed in native code that is unwindable, // e.g. in the code that sets non-unwindable state for the native code area cur_m2nf = m2n_get_last_frame(); BEGIN_RAISE_AREA; si = (StackIterator*) STD_ALLOCA(si_size()); si_fill_from_native(si); ManagedObject *local_exn_obj = NULL; exn_obj = exn_propagate_exception(si, &local_exn_obj, exn_class, NULL, NULL, NULL); //free local handles ObjectHandles* last_m2n_frame_handles = m2n_get_local_handles(cur_m2nf); if (last_m2n_frame_handles) { free_local_object_handles2(last_m2n_frame_handles); } if (ti->get_global_capability(DebugUtilsTI::TI_GC_ENABLE_EXCEPTION_EVENT)) { VM_thread *thread = p_TLS_vmthread; NativeCodePtr callback = (NativeCodePtr) jvmti_exception_catch_callback; si_copy_to_registers(si, regs); vm_set_exception_registers(thread, *regs); si_set_callback(si, &callback); } else if (p_TLS_vmthread->restore_guard_page) { VM_thread *thread = p_TLS_vmthread; NativeCodePtr callback = (NativeCodePtr) exception_catch_callback; si_copy_to_registers(si, regs); vm_set_exception_registers(thread, *regs); si_set_callback(si, &callback); } si_copy_to_registers(si, regs); if (transfer_control) { // Let NCAI to continue single stepping in exception handler ncai_setup_signal_step(&vmthread->jvmti_thread, (NativeCodePtr)regs->get_ip()); set_exception_object_internal(exn_obj); si_transfer_control(si); assert(!"si_transfer_control should not return"); } unw_m2nf = si_get_m2n(si); //si_free(si); END_RAISE_AREA; set_exception_object_internal(exn_obj); m2n_set_last_frame(unw_m2nf); #endif } //exn_athrow_regs