/* Mark all objects referred to by the ClassObject. */ static void scanClassObject(const ClassObject *clazz, GcMarkContext *ctx) { LOGV_SCAN("---------> %s\n", clazz->name); if (IS_CLASS_FLAG_SET(clazz, CLASS_ISARRAY)) { /* We're an array; mark the class object of the contents * of the array. * * Note that we won't necessarily reach the array's element * class by scanning the array contents; the array may be * zero-length, or may only contain null objects. */ markObjectNonNull((Object *)clazz->elementClass, ctx); } /* We scan these explicitly in case the only remaining * reference to a particular class object is via a data * object; we may not be guaranteed to reach all * live class objects via a classloader. */ markObject((Object *)clazz->super, ctx); // may be NULL (java.lang.Object) markObject(clazz->classLoader, ctx); // may be NULL scanStaticFields(clazz, ctx); markInterfaces(clazz, ctx); }
/* * Callback applied to root references. If the root location contains * a white reference it is pushed on the mark stack and grayed. */ static void markObjectVisitor(void *addr, void *arg) { Object *obj; assert(addr != NULL); assert(arg != NULL); obj = *(Object **)addr; if (obj != NULL) { markObjectNonNull(obj, arg, true); } }
/* * Callback applied to root references during root remarking. Marks * white objects and pushes them on the mark stack. */ static void rootReMarkObjectVisitor(void *addr, u4 thread, RootType type, void *arg) { assert(addr != NULL); assert(arg != NULL); Object *obj = *(Object **)addr; GcMarkContext *ctx = (GcMarkContext *)arg; if (obj != NULL) { markObjectNonNull(obj, ctx, true); } }
/* Mark all of a ClassObject's interfaces. */ static void markInterfaces(const ClassObject *clazz, GcMarkContext *ctx) { ClassObject **interfaces; int interfaceCount; int i; /* Mark all interfaces. */ interfaces = clazz->interfaces; interfaceCount = clazz->interfaceCount; for (i = 0; i < interfaceCount; i++) { markObjectNonNull((Object *)*interfaces, ctx); interfaces++; } }
/* All objects for stronger reference levels have been * marked before this is called. */ void dvmHeapHandleReferences(Object *refListHead, enum RefType refType) { Object *reference; GcMarkContext *markContext = &gDvm.gcHeap->markContext; const int offVmData = gDvm.offJavaLangRefReference_vmData; const int offReferent = gDvm.offJavaLangRefReference_referent; bool workRequired = false; size_t numCleared = 0; size_t numEnqueued = 0; reference = refListHead; while (reference != NULL) { Object *next; Object *referent; /* Pull the interesting fields out of the Reference object. */ next = dvmGetFieldObject(reference, offVmData); referent = dvmGetFieldObject(reference, offReferent); //TODO: when handling REF_PHANTOM, unlink any references // that fail this initial if(). We need to re-walk // the list, and it would be nice to avoid the extra // work. if (referent != NULL && !isMarked(ptr2chunk(referent), markContext)) { bool schedClear, schedEnqueue; /* This is the strongest reference that refers to referent. * Do the right thing. */ switch (refType) { case REF_SOFT: case REF_WEAK: schedClear = clearReference(reference); schedEnqueue = enqueueReference(reference); break; case REF_PHANTOM: /* PhantomReferences are not cleared automatically. * Until someone clears it (or the reference itself * is collected), the referent must remain alive. * * It's necessary to fully mark the referent because * it will still be present during the next GC, and * all objects that it points to must be valid. * (The referent will be marked outside of this loop, * after handing all references of this strength, in * case multiple references point to the same object.) */ schedClear = false; /* A PhantomReference is only useful with a * queue, but since it's possible to create one * without a queue, we need to check. */ schedEnqueue = enqueueReference(reference); break; default: assert(!"Bad reference type"); schedClear = false; schedEnqueue = false; break; } numCleared += schedClear ? 1 : 0; numEnqueued += schedEnqueue ? 1 : 0; if (schedClear || schedEnqueue) { uintptr_t workBits; /* Stuff the clear/enqueue bits in the bottom of * the pointer. Assumes that objects are 8-byte * aligned. * * Note that we are adding the *Reference* (which * is by definition already marked at this point) to * this list; we're not adding the referent (which * has already been cleared). */ assert(((intptr_t)reference & 3) == 0); assert(((WORKER_CLEAR | WORKER_ENQUEUE) & ~3) == 0); workBits = (schedClear ? WORKER_CLEAR : 0) | (schedEnqueue ? WORKER_ENQUEUE : 0); if (!dvmHeapAddRefToLargeTable( &gDvm.gcHeap->referenceOperations, (Object *)((uintptr_t)reference | workBits))) { LOGE_HEAP("dvmMalloc(): no room for any more " "reference operations\n"); dvmAbort(); } workRequired = true; } if (refType != REF_PHANTOM) { /* Let later GCs know not to reschedule this reference. */ dvmSetFieldObject(reference, offVmData, SCHEDULED_REFERENCE_MAGIC); } // else this is handled later for REF_PHANTOM } // else there was a stronger reference to the referent. reference = next; } #define refType2str(r) \ ((r) == REF_SOFT ? "soft" : ( \ (r) == REF_WEAK ? "weak" : ( \ (r) == REF_PHANTOM ? "phantom" : "UNKNOWN" ))) LOGD_HEAP("dvmHeapHandleReferences(): cleared %zd, enqueued %zd %s references\n", numCleared, numEnqueued, refType2str(refType)); /* Walk though the reference list again, and mark any non-clear/marked * referents. Only PhantomReferences can have non-clear referents * at this point. */ if (refType == REF_PHANTOM) { bool scanRequired = false; HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_REFERENCE_CLEANUP, 0); reference = refListHead; while (reference != NULL) { Object *next; Object *referent; /* Pull the interesting fields out of the Reference object. */ next = dvmGetFieldObject(reference, offVmData); referent = dvmGetFieldObject(reference, offReferent); if (referent != NULL && !isMarked(ptr2chunk(referent), markContext)) { markObjectNonNull(referent, markContext); scanRequired = true; /* Let later GCs know not to reschedule this reference. */ dvmSetFieldObject(reference, offVmData, SCHEDULED_REFERENCE_MAGIC); } reference = next; } HPROF_CLEAR_GC_SCAN_STATE(); if (scanRequired) { processMarkStack(markContext); } } if (workRequired) { dvmSignalHeapWorker(false); } }
/* Mark all objects that obj refers to. * * Called on every object in markList. */ static void scanObject(const Object *obj, GcMarkContext *ctx) { ClassObject *clazz; assert(dvmIsValidObject(obj)); LOGV_SCAN("0x%08x %s\n", (uint)obj, obj->clazz->name); #if WITH_HPROF if (gDvm.gcHeap->hprofContext != NULL) { hprofDumpHeapObject(gDvm.gcHeap->hprofContext, obj); } #endif /* Get and mark the class object for this particular instance. */ clazz = obj->clazz; if (clazz == NULL) { /* This can happen if we catch an object between * dvmMalloc() and DVM_OBJECT_INIT(). The object * won't contain any references yet, so we can * just skip it. */ return; } else if (clazz == gDvm.unlinkedJavaLangClass) { /* This class hasn't been linked yet. We're guaranteed * that the object doesn't contain any references that * aren't already tracked, so we can skip scanning it. * * NOTE: unlinkedJavaLangClass is not on the heap, so * it's very important that we don't try marking it. */ return; } #if WITH_OBJECT_HEADERS gMarkParent = obj; if (ptr2chunk(obj)->scanGeneration == gGeneration) { LOGE("object 0x%08x was already scanned this generation\n", (uintptr_t)obj); dvmAbort(); } ptr2chunk(obj)->oldScanGeneration = ptr2chunk(obj)->scanGeneration; ptr2chunk(obj)->scanGeneration = gGeneration; ptr2chunk(obj)->scanCount++; #endif assert(dvmIsValidObject((Object *)clazz)); markObjectNonNull((Object *)clazz, ctx); /* Mark any references in this object. */ if (IS_CLASS_FLAG_SET(clazz, CLASS_ISARRAY)) { /* It's an array object. */ if (IS_CLASS_FLAG_SET(clazz, CLASS_ISOBJECTARRAY)) { /* It's an array of object references. */ scanObjectArray((ArrayObject *)obj, ctx); } // else there's nothing else to scan } else { /* It's a DataObject-compatible object. */ scanInstanceFields((DataObject *)obj, clazz, ctx); if (IS_CLASS_FLAG_SET(clazz, CLASS_ISREFERENCE)) { GcHeap *gcHeap = gDvm.gcHeap; Object *referent; /* It's a subclass of java/lang/ref/Reference. * The fields in this class have been arranged * such that scanInstanceFields() did not actually * mark the "referent" field; we need to handle * it specially. * * If the referent already has a strong mark (isMarked(referent)), * we don't care about its reference status. */ referent = dvmGetFieldObject(obj, gDvm.offJavaLangRefReference_referent); if (referent != NULL && !isMarked(ptr2chunk(referent), &gcHeap->markContext)) { u4 refFlags; if (gcHeap->markAllReferents) { LOG_REF("Hard-marking a reference\n"); /* Don't bother with normal reference-following * behavior, just mark the referent. This should * only be used when following objects that just * became scheduled for finalization. */ markObjectNonNull(referent, ctx); goto skip_reference; } /* See if this reference was handled by a previous GC. */ if (dvmGetFieldObject(obj, gDvm.offJavaLangRefReference_vmData) == SCHEDULED_REFERENCE_MAGIC) { LOG_REF("Skipping scheduled reference\n"); /* Don't reschedule it, but make sure that its * referent doesn't get collected (in case it's * a PhantomReference and wasn't cleared automatically). */ //TODO: Mark these after handling all new refs of // this strength, in case the new refs refer // to the same referent. Not a very common // case, though. markObjectNonNull(referent, ctx); goto skip_reference; } /* Find out what kind of reference is pointing * to referent. */ refFlags = GET_CLASS_FLAG_GROUP(clazz, CLASS_ISREFERENCE | CLASS_ISWEAKREFERENCE | CLASS_ISPHANTOMREFERENCE); /* We use the vmData field of Reference objects * as a next pointer in a singly-linked list. * That way, we don't need to allocate any memory * while we're doing a GC. */ #define ADD_REF_TO_LIST(list, ref) \ do { \ Object *ARTL_ref_ = (/*de-const*/Object *)(ref); \ dvmSetFieldObject(ARTL_ref_, \ gDvm.offJavaLangRefReference_vmData, list); \ list = ARTL_ref_; \ } while (false) /* At this stage, we just keep track of all of * the live references that we've seen. Later, * we'll walk through each of these lists and * deal with the referents. */ if (refFlags == CLASS_ISREFERENCE) { /* It's a soft reference. Depending on the state, * we'll attempt to collect all of them, some of * them, or none of them. */ if (gcHeap->softReferenceCollectionState == SR_COLLECT_NONE) { sr_collect_none: markObjectNonNull(referent, ctx); } else if (gcHeap->softReferenceCollectionState == SR_COLLECT_ALL) { sr_collect_all: ADD_REF_TO_LIST(gcHeap->softReferences, obj); } else { /* We'll only try to collect half of the * referents. */ if (gcHeap->softReferenceColor++ & 1) { goto sr_collect_none; } goto sr_collect_all; } } else { /* It's a weak or phantom reference. * Clearing CLASS_ISREFERENCE will reveal which. */ refFlags &= ~CLASS_ISREFERENCE; if (refFlags == CLASS_ISWEAKREFERENCE) { ADD_REF_TO_LIST(gcHeap->weakReferences, obj); } else if (refFlags == CLASS_ISPHANTOMREFERENCE) { ADD_REF_TO_LIST(gcHeap->phantomReferences, obj); } else { assert(!"Unknown reference type"); } } #undef ADD_REF_TO_LIST } } skip_reference: /* If this is a class object, mark various other things that * its internals point to. * * All class objects are instances of java.lang.Class, * including the java.lang.Class class object. */ if (clazz == gDvm.classJavaLangClass) { scanClassObject((ClassObject *)obj, ctx); } } #if WITH_OBJECT_HEADERS gMarkParent = NULL; #endif }
/* Find unreachable objects that need to be finalized, * and schedule them for finalization. */ void dvmHeapScheduleFinalizations() { HeapRefTable newPendingRefs; LargeHeapRefTable *finRefs = gDvm.gcHeap->finalizableRefs; Object **ref; Object **lastRef; size_t totalPendCount; GcMarkContext *markContext = &gDvm.gcHeap->markContext; /* * All reachable objects have been marked. * Any unmarked finalizable objects need to be finalized. */ /* Create a table that the new pending refs will * be added to. */ if (!dvmHeapInitHeapRefTable(&newPendingRefs, 128)) { //TODO: mark all finalizable refs and hope that // we can schedule them next time. Watch out, // because we may be expecting to free up space // by calling finalizers. LOGE_GC("dvmHeapScheduleFinalizations(): no room for " "pending finalizations\n"); dvmAbort(); } /* Walk through finalizableRefs and move any unmarked references * to the list of new pending refs. */ totalPendCount = 0; while (finRefs != NULL) { Object **gapRef; size_t newPendCount = 0; gapRef = ref = finRefs->refs.table; lastRef = finRefs->refs.nextEntry; while (ref < lastRef) { DvmHeapChunk *hc; hc = ptr2chunk(*ref); if (!isMarked(hc, markContext)) { if (!dvmHeapAddToHeapRefTable(&newPendingRefs, *ref)) { //TODO: add the current table and allocate // a new, smaller one. LOGE_GC("dvmHeapScheduleFinalizations(): " "no room for any more pending finalizations: %zd\n", dvmHeapNumHeapRefTableEntries(&newPendingRefs)); dvmAbort(); } newPendCount++; } else { /* This ref is marked, so will remain on finalizableRefs. */ if (newPendCount > 0) { /* Copy it up to fill the holes. */ *gapRef++ = *ref; } else { /* No holes yet; don't bother copying. */ gapRef++; } } ref++; } finRefs->refs.nextEntry = gapRef; //TODO: if the table is empty when we're done, free it. totalPendCount += newPendCount; finRefs = finRefs->next; } LOGD_GC("dvmHeapScheduleFinalizations(): %zd finalizers triggered.\n", totalPendCount); if (totalPendCount == 0) { /* No objects required finalization. * Free the empty temporary table. */ dvmClearReferenceTable(&newPendingRefs); return; } /* Add the new pending refs to the main list. */ if (!dvmHeapAddTableToLargeTable(&gDvm.gcHeap->pendingFinalizationRefs, &newPendingRefs)) { LOGE_GC("dvmHeapScheduleFinalizations(): can't insert new " "pending finalizations\n"); dvmAbort(); } //TODO: try compacting the main list with a memcpy loop /* Mark the refs we just moved; we don't want them or their * children to get swept yet. */ ref = newPendingRefs.table; lastRef = newPendingRefs.nextEntry; assert(ref < lastRef); HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_FINALIZING, 0); while (ref < lastRef) { markObjectNonNull(*ref, markContext); ref++; } HPROF_CLEAR_GC_SCAN_STATE(); /* Set markAllReferents so that we don't collect referents whose * only references are in final-reachable objects. * TODO: eventually provide normal reference behavior by properly * marking these references. */ gDvm.gcHeap->markAllReferents = true; processMarkStack(markContext); gDvm.gcHeap->markAllReferents = false; dvmSignalHeapWorker(false); }
/* If the object hasn't already been marked, mark it and * schedule it to be scanned for references. * * obj may not be NULL. The macro dvmMarkObject() should * be used in situations where a reference may be NULL. * * This function may only be called when marking the root * set. When recursing, use the internal markObject(). */ void dvmMarkObjectNonNull(const Object *obj) { assert(obj != NULL); markObjectNonNull(obj, &gDvm.gcHeap->markContext, false); }
/* Used to mark objects when recursing. Recursion is done by moving * the finger across the bitmaps in address order and marking child * objects. Any newly-marked objects whose addresses are lower than * the finger won't be visited by the bitmap scan, so those objects * need to be added to the mark stack. */ static void markObject(const Object *obj, GcMarkContext *ctx) { if (obj != NULL) { markObjectNonNull(obj, ctx, true); } }