ObjectArray* Java_aura_rt_VM_getStackClasses(Env* env, Class* c, jint skipNum, jint maxDepth) { CallStack* callStack = rvmCaptureCallStack(env); if (!callStack) return NULL; jint index = 0; rvmGetNextCallStackMethod(env, callStack, &index); // Skip VM.getStackClasses() rvmGetNextCallStackMethod(env, callStack, &index); // Skip caller of VM.getStackClasses() while (skipNum > 0) { CallStackFrame* frame = rvmGetNextCallStackMethod(env, callStack, &index); if (!frame) return NULL; skipNum--; } jint first = index; jint depth = 0; while (rvmGetNextCallStackMethod(env, callStack, &index)) { depth++; } if (maxDepth > -1 && maxDepth < depth) { depth = maxDepth; } ObjectArray* result = rvmNewObjectArray(env, depth, java_lang_Class, NULL, NULL); if (!result) return NULL; jint i; index = first; for (i = 0; i < depth; i++) { CallStackFrame* frame = rvmGetNextCallStackMethod(env, callStack, &index); result->values[i] = (Object*) frame->method->clazz; } return result; }
CallStack* rvmCaptureCallStackForThread(Env* env, Thread* thread) { if (thread == env->currentThread) { return rvmCaptureCallStack(env); } // dumpThreadStackTrace() must not be called concurrently obtainThreadStackTraceLock(); if (!shared_callStack) { shared_callStack = rvmAllocateMemoryAtomicUncollectable(env, sizeof(CallStack) + sizeof(CallStackFrame) * MAX_CALL_STACK_LENGTH); if (!shared_callStack) { releaseThreadStackTraceLock(); return NULL; } } memset(shared_callStack, 0, sizeof(CallStack) + sizeof(CallStackFrame) * MAX_CALL_STACK_LENGTH); dumpThreadStackTrace(env, thread, shared_callStack); if (rvmExceptionOccurred(env)) { releaseThreadStackTraceLock(); return NULL; } // Make a copy of the CallStack that is just big enough CallStack* copy = allocateCallStackFrames(env, shared_callStack->length); if (!copy) { releaseThreadStackTraceLock(); return NULL; } memcpy(copy, shared_callStack, sizeof(CallStack) + sizeof(CallStackFrame) * shared_callStack->length); releaseThreadStackTraceLock(); return copy; }
jlong Java_java_lang_Throwable_nativeFillInStackTrace(Env* env, Object* thiz) { if (rvmIsCriticalOutOfMemoryError(env, thiz)) { // nativeFillInStackTrace() was called on the shared criticalOutOfMemoryError. // Don't try to capture the call stack since it will most likely just // lead to another OOM and more recursion. return 0; } return PTR_TO_LONG(rvmCaptureCallStack(env)); }