void trace(JSObject* map, JS::GCCellPtr key, JS::GCCellPtr value) override { JSObject* kdelegate = nullptr; if (key.is<JSObject>()) kdelegate = js::GetWeakmapKeyDelegate(&key.as<JSObject>()); fprintf(output, "WeakMapEntry map=%p key=%p keyDelegate=%p value=%p\n", map, key.asCell(), kdelegate, value.asCell()); }
void DumpHeapTracer::onChild(const JS::GCCellPtr& thing) { if (gc::IsInsideNursery(thing.asCell())) return; char buffer[1024]; getTracingEdgeName(buffer, sizeof(buffer)); fprintf(output, "%s%p %c %s\n", prefix, thing.asCell(), MarkDescriptor(thing.asCell()), buffer); }
void ObjectGroupCycleCollectorTracer::onChild(const JS::GCCellPtr& thing) { if (thing.is<JSObject>() || thing.is<JSScript>()) { // Invoke the inner cycle collector callback on this child. It will not // recurse back into TraceChildren. innerTracer->onChild(thing); return; } if (thing.is<ObjectGroup>()) { // If this group is required to be in an ObjectGroup chain, trace it // via the provided worklist rather than continuing to recurse. ObjectGroup& group = thing.as<ObjectGroup>(); if (group.maybeUnboxedLayout()) { for (size_t i = 0; i < seen.length(); i++) { if (seen[i] == &group) return; } if (seen.append(&group) && worklist.append(&group)) { return; } else { // If append fails, keep tracing normally. The worst that will // happen is we end up overrecursing. } } } TraceChildren(this, thing.asCell(), thing.kind()); }
void CheckHeapTracer::onChild(const JS::GCCellPtr& thing) { Cell* cell = thing.asCell(); if (visited.lookup(cell)) return; if (!visited.put(cell)) { oom = true; return; } if (!IsGCThingValidAfterMovingGC(cell)) { failures++; fprintf(stderr, "Stale pointer %p\n", cell); const char* name = contextName(); for (int index = parentIndex; index != -1; index = stack[index].parentIndex) { const WorkItem& parent = stack[index]; cell = parent.thing.asCell(); fprintf(stderr, " from %s %p %s edge\n", GCTraceKindToAscii(cell->getTraceKind()), cell, name); name = parent.name; } fprintf(stderr, " from root %s\n", name); return; } WorkItem item(thing, contextName(), parentIndex); if (!stack.append(item)) oom = true; }
/* * This function builds up the heap snapshot by adding edges to the current * node. */ void VerifyPreTracer::onChild(const JS::GCCellPtr& thing) { MOZ_ASSERT(!IsInsideNursery(thing.asCell())); edgeptr += sizeof(EdgeValue); if (edgeptr >= term) { edgeptr = term; return; } VerifyNode* node = curnode; uint32_t i = node->count; node->edges[i].thing = thing.asCell(); node->edges[i].kind = thing.kind(); node->edges[i].label = contextName(); node->count++; }
/* * This function is called by EndVerifyBarriers for every heap edge. If the edge * already existed in the original snapshot, we "cancel it out" by overwriting * it with nullptr. EndVerifyBarriers later asserts that the remaining * non-nullptr edges (i.e., the ones from the original snapshot that must have * been modified) must point to marked objects. */ void CheckEdgeTracer::onChild(const JS::GCCellPtr& thing) { /* Avoid n^2 behavior. */ if (node->count > MAX_VERIFIER_EDGES) return; for (uint32_t i = 0; i < node->count; i++) { if (node->edges[i].thing == thing.asCell()) { MOZ_ASSERT(node->edges[i].kind == thing.kind()); node->edges[i].thing = nullptr; return; } } }
Node::Node(const JS::GCCellPtr &thing) { DispatchTraceKindTyped(ConstructFunctor(), thing.asCell(), thing.kind(), this); }
Node::Node(const JS::GCCellPtr &thing) { js::gc::CallTyped(ConstructFunctor(), thing.asCell(), thing.kind(), this); }