Array* rvmAllocateMemoryForArray(Env* env, Class* arrayClass, jint length) { jint elementSize = rvmGetArrayElementSize(env, arrayClass); if (elementSize == 0) { return NULL; } jlong size = (jlong) sizeof(Array) + (jlong) length * (jlong) elementSize; if (size > (jlong) (size_t) -1) { rvmThrowOutOfMemoryError(env); return NULL; } Array* m = NULL; if (CLASS_IS_PRIMITIVE(arrayClass->componentType)) { // Primitive array objects contain no pointers except for the Class // pointer and possibly a fat monitor. Those are allocated uncollectably // and will be reachable even if we alocate this atomically. m = (Array*) gcAllocateKind((size_t) size, atomicObjectGCKind); } else if (length < 30) { // TODO: Use GC bitmap descriptor for small Object arrays. m = (Array*) gcAllocateKind((size_t) size, objectGCKind); } else { // Large Object array. Conservatively scanned. Only the lock (if thin) // and the length fields could become a problem if they look like // pointers into the heap. m = (Array*) gcAllocateKind((size_t) size, largeArrayGCKind); } if (!m) { rvmThrowOutOfMemoryError(env); return NULL; } return m; }
Object* rvmAllocateMemoryForObject(Env* env, Class* clazz) { Object* m = NULL; if (CLASS_IS_FINALIZABLE(clazz) || CLASS_IS_REFERENCE(clazz) || (clazz->superclass && clazz->superclass == org_robovm_rt_bro_Struct) || (clazz->superclass && clazz->superclass == java_nio_MemoryBlock) || (clazz == java_nio_MemoryBlock)) { // These types of objects must be marked specially. We could probably // do this using GC bitmap descriptors instead. Also instances of // java.lang.Throwable must be marked specially but it has at least 1 // reference field and will thus not be allocated atomically. m = (Object*) gcAllocateKind(clazz->instanceDataSize, objectGCKind); } else if (CLASS_IS_REF_FREE(clazz)) { // Objects with 0 instance reference fields contain no pointers except for the Class // pointer and possibly a fat monitor. Those are allocated uncollectably // and will be reachable even if we alocate this atomically. m = (Object*) gcAllocateKind(clazz->instanceDataSize, atomicObjectGCKind); } else { // TODO: Use GC bitmap descriptors for small Objects. m = (Object*) gcAllocateKind(clazz->instanceDataSize, objectGCKind); } if (!m) { if (clazz == java_lang_OutOfMemoryError) { // We can't even allocate an OutOfMemoryError object. Prevent // infinite recursion by returning the shared criticalOutOfMemoryError // object. return criticalOutOfMemoryError; } rvmThrowOutOfMemoryError(env); return NULL; } return m; }
void* rvmAllocateMemoryAtomic(Env* env, jint size) { void* m = gcAllocateAtomic(size); if (!m) { rvmThrowOutOfMemoryError(env); return NULL; } return m; }
void* rvmAllocateMemoryUncollectable(Env* env, jint size) { void* m = gcAllocateUncollectable(size); if (!m) { rvmThrowOutOfMemoryError(env); return NULL; } return m; }
void* rvmSaveSignals(Env* env) { SavedSignals* state = malloc(sizeof(SavedSignals)); if (!state) { rvmThrowOutOfMemoryError(env); return NULL; } return state; }
void* rvmAllocateMemory(Env* env, size_t size) { void* m = gcAllocate(size); if (!m) { rvmThrowOutOfMemoryError(env); return NULL; } return m; }
void* allocateMemoryOfKind(Env* env, size_t size, uint32_t kind) { void* m = gcAllocateKind(size, kind); if (!m) { rvmThrowOutOfMemoryError(env); return NULL; } return m; }
Class* rvmAllocateMemoryForClass(Env* env, jint classDataSize) { Class* m = (Class*) gcAllocateKind(classDataSize, objectGCKind); if (!m) { rvmThrowOutOfMemoryError(env); return NULL; } return m; }
jlong rvmStartThread(Env* env, JavaThread* threadObj) { Env* newEnv = rvmCreateEnv(env->vm); if (!newEnv) { rvmThrowOutOfMemoryError(env); // rvmCreateEnv() doesn't throw OutOfMemoryError if allocation fails return 0; } rvmLockThreadsList(); if (threadObj->threadPtr != 0) { rvmThrowIllegalStateException(env, "thread has already been started"); rvmUnlockThreadsList(); return 0; } Thread* thread = allocThread(env); if (!thread) { rvmUnlockThreadsList(); return 0; } size_t stackSize = (size_t) threadObj->stackSize; if (stackSize == 0) { stackSize = THREAD_DEFAULT_STACK_SIZE; } else if (stackSize < THREAD_MIN_STACK_SIZE) { stackSize = THREAD_MIN_STACK_SIZE; } stackSize += THREAD_SIGNAL_STACK_SIZE; stackSize = (stackSize + THREAD_STACK_SIZE_MULTIPLE - 1) & ~(THREAD_STACK_SIZE_MULTIPLE - 1); pthread_attr_t threadAttr; pthread_attr_init(&threadAttr); pthread_attr_setdetachstate(&threadAttr, PTHREAD_CREATE_DETACHED); pthread_attr_setstacksize(&threadAttr, stackSize); pthread_attr_setguardsize(&threadAttr, THREAD_STACK_GUARD_SIZE); ThreadEntryPointArgs args = {0}; args.env = newEnv; args.thread = thread; args.threadObj = threadObj; int err = 0; if ((err = pthread_create(&thread->pThread, &threadAttr, startThreadEntryPoint, &args)) != 0) { rvmUnlockThreadsList(); rvmThrowInternalErrorErrno(env, err); return 0; } while (thread->status != THREAD_STARTING) { pthread_cond_wait(&threadStartCond, &threadsLock); } DL_PREPEND(threads, thread); pthread_cond_broadcast(&threadsChangedCond); thread->status = THREAD_VMWAIT; pthread_cond_broadcast(&threadStartCond); rvmUnlockThreadsList(); return PTR_TO_LONG(thread); }
jlong Java_aura_rt_VM_malloc(Env* env, Class* c, jint size) { void* m = malloc(size); if (!m) { rvmThrowOutOfMemoryError(env); return 0; } memset(m, 0, size); return PTR_TO_LONG(m); }
ObjectArray* Java_java_net_NetworkInterface_getInterfaceNames(Env* env, JClass* cls) { if (!java_lang_String_array) { java_lang_String_array = rvmFindClassUsingLoader(env, "[Ljava/lang/String;", NULL); if (!java_lang_String_array) { return NULL; } } struct if_nameindex* ifs = if_nameindex(); if (!ifs) { // Assume out of memory rvmThrowOutOfMemoryError(env); return NULL; } jint count = 0; while (ifs[count].if_index > 0) { count++; } ObjectArray* result = rvmNewObjectArray(env, count, NULL, java_lang_String_array, NULL); if (!result) { goto done; } jint i = 0; for (i = 0; i < count; i++) { Object* name = rvmNewStringUTF(env, ifs[i].if_name, -1); if (!name) { goto done; } result->values[i] = name; } done: if_freenameindex(ifs); return result; }