Exemplo n.º 1
0
static void ti_enumerate_thread_stack(TIEnv* ti_env, StackIterator* si)
{
    ASSERT_NO_INTERPRETER

    TIIterationState *state = ti_env->iteration_state;
    state->depth = 0;

    while (!si_is_past_end(si)) {

        CodeChunkInfo* cci = si_get_code_chunk_info(si);
        if (cci) {
            state->method = (jmethodID)cci->get_method();
            state->root_kind = JVMTI_HEAP_ROOT_STACK_LOCAL;
            // FIXME: set up frame base (platform dependent!)
            cci->get_jit()->get_root_set_from_stack_frame(cci->get_method(), 0, si_get_jit_context(si));
        } else {
            state->method = (jmethodID)m2n_get_method(si_get_m2n(si));
            state->root_kind = JVMTI_HEAP_ROOT_JNI_LOCAL;
            oh_enumerate_handles(m2n_get_local_handles(si_get_m2n(si)));
        }
        state->depth += 1;
        si_goto_previous(si);
    }
    si_free(si);
}
Exemplo n.º 2
0
static IDATA rt_jthread_monitor_enter(ManagedObject*  monitor) {
    const unsigned handles_size = (unsigned)(sizeof(ObjectHandlesNew)+sizeof(ManagedObject*)*4);
    ObjectHandlesNew* handels = (ObjectHandlesNew *)STD_ALLOCA(handles_size);
    handels->capacity = 4;
    handels->size = 0;
    handels->next = NULL;

    m2n_set_local_handles(m2n_get_last_frame(), (ObjectHandles *) handels);

    ObjectHandle monitorJavaObj = oh_allocate_local_handle();
    monitorJavaObj->object = monitor;

    IDATA result = jthread_monitor_enter(monitorJavaObj);

    free_local_object_handles2(m2n_get_local_handles(m2n_get_last_frame()));
    m2n_set_local_handles(m2n_get_last_frame(), NULL);

    return result;
}
void
interp_ti_enumerate_root_set_single_thread_on_stack(jvmtiEnv* ti_env, VM_thread *thread) {
    TRACE2("enumeration", "interp_enumerate_root_set_single_thread_on_stack()");
    StackIterator_interp* si;
    si = interp_si_create_from_native(thread);
    
    int i;
    int depth;
    DEBUG_GC("\n\nGC enumeration in interpreter stack:\n");
    for (depth = 0; !interp_si_is_past_end(si); depth++) {
        Method* method = (Method*)interp_si_get_method(si);
        jmethodID method_id = (jmethodID)method;
        int slot = 0;

        if (si->This) {
            vm_ti_enumerate_stack_root(ti_env,
                    (void**)&si->This, si->This,
                    JVMTI_HEAP_ROOT_STACK_LOCAL,
                    depth, method_id, slot++);
            DEBUG_GC("  [THIS]: " << si->This);
        }

        if (si->exc) {
            vm_ti_enumerate_stack_root(ti_env,
                (void**)&si->exc, si->exc,
                JVMTI_HEAP_ROOT_STACK_LOCAL,
                depth, method_id, slot++);
            DEBUG_GC("  [EXCEPTION]: " << si->exc);
        }

        if (method->is_native()) {
            DEBUG_GC("[METHOD <native>]: " << method);
            interp_si_goto_previous(si);
            continue;
        }

        DEBUG_GC("[METHOD "<< si->stack.size << " " << (int)si->locals.varNum << "]: "
                << method);

        if (si->stack.size)
            for(i = 0; i <= si->stack.index; i++) {
                if (si->stack.refs[i] == FLAG_OBJECT) {
                    DEBUG_GC("  Stack[" << i << "] ");
                    REF* ref = &si->stack.data[i].ref;
                    ManagedObject *obj = UNCOMPRESS_INTERP(*ref);
                    if (obj == 0) {
                        DEBUG_GC("NULL");
                    } else {
                        DEBUG_GC(obj);
                        vm_ti_enumerate_stack_root(ti_env,
                            ref, (Managed_Object_Handle)obj, 
                            JVMTI_HEAP_ROOT_STACK_LOCAL,
                            depth, method_id, slot++);
                    }
                }
            }

                unsigned j;
        if (si->locals.varNum)
            for(j = 0; j < si->locals.varNum; j++) {
                if (si->locals.refs[j] == FLAG_OBJECT) {
                    DEBUG_GC("  Locals[" << j << "] ");
                    REF* ref = &si->locals.vars[j].ref;
                    ManagedObject *obj = UNCOMPRESS_INTERP(*ref);
                    if (obj == 0) {
                        DEBUG_GC("NULL\n");
                    } else {
                        DEBUG_GC(obj);
                        vm_ti_enumerate_stack_root(ti_env,
                            ref, (Managed_Object_Handle)obj, 
                            JVMTI_HEAP_ROOT_STACK_LOCAL,
                            depth, method_id, slot++);
                    }
                }
            }
        MonitorList *ml = si->locked_monitors;
        while(ml) {
            vm_ti_enumerate_stack_root(ti_env,
                    &ml->monitor, ml->monitor,
                    JVMTI_HEAP_ROOT_MONITOR,
                    depth, method_id, slot++);
            ml = ml->next;
        }
        interp_si_goto_previous(si);
    }

    // enumerate m2n frames
    M2nFrame *m2n = m2n_get_last_frame(thread);
    while(m2n) {
        oh_enumerate_handles(m2n_get_local_handles(m2n));
        m2n = m2n_get_previous_frame(m2n);
    }
}
void
interp_enumerate_root_set_single_thread_on_stack(VM_thread *thread) {
    TRACE2("enumeration", "interp_enumerate_root_set_single_thread_on_stack()");
    StackIterator_interp* si;
    si = interp_si_create_from_native(thread);
    
    int i;
    DEBUG_GC("\n\nGC enumeration in interpreter stack:\n");
    while(!interp_si_is_past_end(si)) {
        Method* method = (Method*)interp_si_get_method(si);
        method = method;

        if (si->This) {
            vm_enumerate_root_reference((void**)&si->This, FALSE);
            DEBUG_GC("  [THIS]: " << si->This);
        }

        if (si->exc) {
            vm_enumerate_root_reference((void**)&si->exc, FALSE);
            DEBUG_GC("  [EXCEPTION]: " << si->exc);
        }

        if (method->is_native()) {
            DEBUG_GC("[METHOD <native>]: " << method);
            interp_si_goto_previous(si);
            continue;
        }

        DEBUG_GC("[METHOD "<< si->stack.size << " " << (int)si->locals.varNum << "]: "
                << method);

        if (si->stack.size)
            for(i = 0; i <= si->stack.index; i++) {
                if (si->stack.refs[i] == FLAG_OBJECT) {
                    DEBUG_GC("  Stack[" << i << "] ");
                    REF* ref = &si->stack.data[i].ref;
                    ManagedObject *obj = UNCOMPRESS_INTERP(*ref);
                    if (obj == 0) {
                        DEBUG_GC("NULL");
                    } else {
                        DEBUG_GC(obj);
                        vm_enumerate(ref, FALSE); // CHECK!!! can we enumerate uncompressed ref in compressed mode
                    }
                }
            }

                unsigned j;
        if (si->locals.varNum)
            for(j = 0; j < si->locals.varNum; j++) {
                if (si->locals.refs[j] == FLAG_OBJECT) {
                    DEBUG_GC("  Locals[" << j << "] ");
                    REF* ref = &si->locals.vars[j].ref;
                    ManagedObject *obj = UNCOMPRESS_INTERP(*ref);
                    if (obj == 0) {
                        DEBUG_GC("NULL\n");
                    } else {
                        DEBUG_GC(obj);
                        vm_enumerate(ref, FALSE); // CHECK!!! can we enumerate uncompressed ref in compressed mode
                    }
                }
            }
        MonitorList *ml = si->locked_monitors;
        while(ml) {
            vm_enumerate_root_reference((void**)&ml->monitor, FALSE);
            ml = ml->next;
        }
        interp_si_goto_previous(si);
    }

    // enumerate m2n frames
    M2nFrame *m2n = m2n_get_last_frame(thread);
    while(m2n) {
        oh_enumerate_handles(m2n_get_local_handles(m2n));
        m2n = m2n_get_previous_frame(m2n);
    }
}
Exemplo n.º 5
0
// function can be safe point & should be called with disable reqursion = 1
void exn_athrow_regs(Registers * regs, Class_Handle exn_class, bool java_code, bool transfer_control)
{
    assert(!hythread_is_suspend_enabled());
    assert(exn_class);

#ifndef _IPF_
    M2nFrame *cur_m2nf = (M2nFrame *) STD_ALLOCA(m2n_get_size());
    M2nFrame *unw_m2nf;
    ManagedObject *exn_obj = NULL;
    StackIterator *si;
    DebugUtilsTI* ti = VM_Global_State::loader_env->TI;
    VM_thread* vmthread = p_TLS_vmthread;

    if (java_code)
        m2n_push_suspended_frame(vmthread, cur_m2nf, regs);
    else
        // Gregory -
        // Initialize cur_m2nf pointer in case we've crashed in native code that is unwindable,
	// e.g. in the code that sets non-unwindable state for the native code area
        cur_m2nf = m2n_get_last_frame();

    BEGIN_RAISE_AREA;

    si = (StackIterator*) STD_ALLOCA(si_size());
    si_fill_from_native(si);
    ManagedObject *local_exn_obj = NULL;
    exn_obj = exn_propagate_exception(si, &local_exn_obj, exn_class, NULL, NULL, NULL);

    //free local handles
    ObjectHandles* last_m2n_frame_handles = m2n_get_local_handles(cur_m2nf);

    if (last_m2n_frame_handles) {
        free_local_object_handles2(last_m2n_frame_handles);
    }

    if (ti->get_global_capability(DebugUtilsTI::TI_GC_ENABLE_EXCEPTION_EVENT)) {
        VM_thread *thread = p_TLS_vmthread;
        NativeCodePtr callback = (NativeCodePtr)
                jvmti_exception_catch_callback;

        si_copy_to_registers(si, regs);
        vm_set_exception_registers(thread, *regs);
        si_set_callback(si, &callback);
    } else if (p_TLS_vmthread->restore_guard_page) {
        VM_thread *thread = p_TLS_vmthread;
        NativeCodePtr callback = (NativeCodePtr)
                exception_catch_callback;
        si_copy_to_registers(si, regs);
        vm_set_exception_registers(thread, *regs);
        si_set_callback(si, &callback);
    }

    si_copy_to_registers(si, regs);

    if (transfer_control) {
        // Let NCAI to continue single stepping in exception handler
        ncai_setup_signal_step(&vmthread->jvmti_thread, (NativeCodePtr)regs->get_ip());

        set_exception_object_internal(exn_obj);
        si_transfer_control(si);
        assert(!"si_transfer_control should not return");
    } 

    unw_m2nf = si_get_m2n(si);
    //si_free(si);

    END_RAISE_AREA;

    set_exception_object_internal(exn_obj);
    m2n_set_last_frame(unw_m2nf);
#endif
}   //exn_athrow_regs
Exemplo n.º 6
0
// function can be safe point & should be called with disable reqursion = 1
void exn_throw_for_JIT(ManagedObject* exn_obj, Class_Handle exn_class,
    Method_Handle exn_constr, U_8* jit_exn_constr_args, jvalue* vm_exn_constr_args)
{
/*
 * !!!! NO LOGGER IS ALLOWED IN THIS FUNCTION !!!
 * !!!! RELEASE BUILD WILL BE BROKEN          !!!
 * !!!! NO TRACE2, INFO, WARN, ECHO, ASSERT, ...
 */
    assert(!hythread_is_suspend_enabled());

    if(exn_raised()) {
        return;
    }

    ASSERT_NO_INTERPRETER
    ASSERT_RAISE_AREA;

    if ((exn_obj == NULL) && (exn_class == NULL)) {
        exn_class = VM_Global_State::loader_env->java_lang_NullPointerException_Class;
    }
    ManagedObject* local_exn_obj = exn_obj;
    StackIterator* si = (StackIterator*) STD_ALLOCA(si_size());
    si_fill_from_native(si);

    if (exn_raised()) {
        return;
    }

#ifndef _IPF_
    assert(is_gc_frame_before_m2n_frame());
#endif // _IPF_

    assert(!exn_raised());

    if (si_is_past_end(si)) {
        //FIXME LAZY EXCEPTION (2006.05.12)
        // should be replaced by lazy version
        set_exception_object_internal(local_exn_obj);
        return;
    }

    si_transfer_all_preserved_registers(si);
    assert(!exn_raised());

    DebugUtilsTI* ti = VM_Global_State::loader_env->TI;
    exn_obj = exn_propagate_exception(si, &local_exn_obj, exn_class, exn_constr,
        jit_exn_constr_args, vm_exn_constr_args);

    if (exn_raised()) {
        //si_free(si);
        return;
    }

    M2nFrame* m2nFrame = m2n_get_last_frame();
    ObjectHandles* last_m2n_frame_handles = m2n_get_local_handles(m2nFrame);

    if (last_m2n_frame_handles) {
        free_local_object_handles2(last_m2n_frame_handles);
    }
    set_exception_object_internal(exn_obj);

    if (ti->get_global_capability(DebugUtilsTI::TI_GC_ENABLE_EXCEPTION_EVENT)) {
        Registers regs = {0};
        VM_thread *thread = p_TLS_vmthread;
        NativeCodePtr callback = (NativeCodePtr)
                jvmti_exception_catch_callback;

        si_copy_to_registers(si, &regs);
        vm_set_exception_registers(thread, regs);
        si_set_callback(si, &callback);
    } else if (p_TLS_vmthread->restore_guard_page) {
        Registers regs = {0};
        VM_thread *thread = p_TLS_vmthread;
        NativeCodePtr callback = (NativeCodePtr)
                exception_catch_callback;
        si_copy_to_registers(si, &regs);
        vm_set_exception_registers(thread, regs);
        si_set_callback(si, &callback);
    }

    // don't put any call here
    si_transfer_control(si);
}   //exn_throw_for_JIT