JavascriptGeneratorFunction::JavascriptGeneratorFunction(DynamicType* type) : ScriptFunctionBase(type, &functionInfo), scriptFunction(nullptr) { // Constructor used during copy on write. DebugOnly(VerifyEntryPoint()); }
void SmallFinalizableHeapBucketBaseT<TBlockType>::Verify() { BaseT::Verify(); #if DBG RecyclerVerifyListConsistencyData recyclerVerifyListConsistencyData; if (TBlockType::HeapBlockAttributes::IsSmallBlock) { recyclerVerifyListConsistencyData.SetupVerifyListConsistencyDataForSmallBlock(nullptr, false, true); } else if (TBlockType::HeapBlockAttributes::IsMediumBlock) { recyclerVerifyListConsistencyData.SetupVerifyListConsistencyDataForMediumBlock(nullptr, false, true); } else { Assert(false); } HeapBlockList::ForEach(this->pendingDisposeList, [this, &recyclerVerifyListConsistencyData](TBlockType * heapBlock) { DebugOnly(this->VerifyBlockConsistencyInList(heapBlock, recyclerVerifyListConsistencyData)); heapBlock->Verify(true); }); #endif }
void Scope::SetIsObject() { if (this->isObject) { return; } this->isObject = true; // We might set the scope to be object after we have process the symbol // (e.g. "With" scope referencing a symbol in an outer scope). // If we have func assignment, we need to mark the function to not do stack nested function // as these are now assigned to a scope object. FuncInfo * funcInfo = this->GetFunc(); if (funcInfo && !funcInfo->HasMaybeEscapedNestedFunc()) { this->ForEachSymbolUntil([funcInfo](Symbol * const sym) { if (sym->GetHasFuncAssignment()) { funcInfo->SetHasMaybeEscapedNestedFunc(DebugOnly(_u("DelayedObjectScopeAssignment"))); return true; } return false; }); } }
//===================================================================================================== // Free //===================================================================================================== void LargeHeapBucket::ExplicitFree(void * object, size_t sizeCat) { Assert(HeapInfo::GetMediumObjectAlignedSizeNoCheck(sizeCat) == this->sizeCat); LargeObjectHeader * header = LargeHeapBlock::GetHeaderFromAddress(object); Assert(header->GetAttributes(this->heapInfo->recycler->Cookie) == ObjectInfoBits::NoBit || header->GetAttributes(this->heapInfo->recycler->Cookie) == ObjectInfoBits::LeafBit); Assert(!header->isExplicitFreed); DebugOnly(header->isExplicitFreed = true); Assert(header->objectSize >= sizeCat); #if DBG HeapBlock* heapBlock = this->GetRecycler()->FindHeapBlock(object); Assert(heapBlock != nullptr); Assert(heapBlock->IsLargeHeapBlock()); LargeHeapBlock * largeHeapBlock = (LargeHeapBlock *)heapBlock; LargeObjectHeader * dbgHeader; Assert(largeHeapBlock->GetObjectHeader(object, &dbgHeader)); Assert(dbgHeader == header); #endif FreeObject * freeObject = (FreeObject *)object; freeObject->SetNext(this->explicitFreeList); this->explicitFreeList = freeObject; header->SetAttributes(this->heapInfo->recycler->Cookie, ObjectInfoBits::LeafBit); // We can stop scanning it now. }
int Scope::AddScopeSlot() { int slot = scopeSlotCount++; if (scopeSlotCount == Js::ScopeSlots::MaxEncodedSlotCount) { this->GetEnclosingFunc()->SetHasMaybeEscapedNestedFunc(DebugOnly(_u("TooManySlots"))); } return slot; }
BoundFunction::BoundFunction(DynamicType * type) : JavascriptFunction(type, &functionInfo), targetFunction(nullptr), boundThis(nullptr), count(0), boundArgs(nullptr) { // Constructor used during copy on write. DebugOnly(VerifyEntryPoint()); }
JavascriptWeakMap::WeakMapKeyMap* JavascriptWeakMap::AddWeakMapKeyMapToKey(DynamicObject* key) { // The internal property may exist on an object that has had DynamicObject::ResetObject called on itself. // In that case the value stored in the property slot should be null. DebugOnly(Var unused = nullptr); Assert(!key->GetInternalProperty(key, InternalPropertyIds::WeakMapKeyMap, &unused, nullptr, nullptr) || unused == nullptr); WeakMapKeyMap* weakMapKeyData = RecyclerNew(GetScriptContext()->GetRecycler(), WeakMapKeyMap, GetScriptContext()->GetRecycler()); BOOL success = key->SetInternalProperty(InternalPropertyIds::WeakMapKeyMap, weakMapKeyData, PropertyOperation_Force, nullptr); Assert(success); return weakMapKeyData; }
void FuncInfo::SetHasMaybeEscapedNestedFunc(DebugOnly(char16 const * reason)) { if (PHASE_TESTTRACE(Js::StackFuncPhase, this->byteCodeFunction) && !hasEscapedUseNestedFunc) { char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE]; char16 const * r = _u(""); DebugOnly(r = reason); Output::Print(_u("HasMaybeEscapedNestedFunc (%s): %s (function %s)\n"), r, this->byteCodeFunction->GetDisplayName(), this->byteCodeFunction->GetDebugNumberSet(debugStringBuffer)); Output::Flush(); } hasEscapedUseNestedFunc = true; }
void Symbol::SetHasMaybeEscapedUseInternal(ByteCodeGenerator * byteCodeGenerator) { Assert(!hasMaybeEscapedUse); Assert(!this->GetIsFormal()); hasMaybeEscapedUse = true; if (PHASE_TESTTRACE(Js::StackFuncPhase, byteCodeGenerator->TopFuncInfo()->byteCodeFunction)) { Output::Print(L"HasMaybeEscapedUse: %s\n", this->GetName().GetBuffer()); Output::Flush(); } if (this->GetHasFuncAssignment()) { this->GetScope()->GetFunc()->SetHasMaybeEscapedNestedFunc( DebugOnly(this->symbolType == STFunction ? L"MaybeEscapedUseFuncDecl" : L"MaybeEscapedUse")); } }
void Scope::SetHasLocalInClosure(bool has) { // (Note: if any catch var is closure-captured, we won't merge the catch scope with the function scope. // So don't mark the function scope "has local in closure".) bool notCatch = this->scopeType != ScopeType_Catch && this->scopeType != ScopeType_CatchParamPattern; if (has && (this == func->GetBodyScope() || this == func->GetParamScope()) || (GetCanMerge() && notCatch)) { func->SetHasLocalInClosure(true); } else { if (hasCrossScopeFuncAssignment) { func->SetHasMaybeEscapedNestedFunc(DebugOnly(_u("InstantiateScopeWithCrossScopeAssignment"))); } SetMustInstantiate(true); } }
void Symbol::SetHasFuncAssignmentInternal(ByteCodeGenerator * byteCodeGenerator) { Assert(!hasFuncAssignment); hasFuncAssignment = true; FuncInfo * top = byteCodeGenerator->TopFuncInfo(); if (PHASE_TESTTRACE(Js::StackFuncPhase, top->byteCodeFunction)) { Output::Print(L"HasFuncAssignment: %s\n", this->GetName().GetBuffer()); Output::Flush(); } if (this->GetHasMaybeEscapedUse() || this->GetScope()->GetIsObject()) { byteCodeGenerator->TopFuncInfo()->SetHasMaybeEscapedNestedFunc(DebugOnly( this->GetIsFormal() ? L"FormalAssignment" : this->GetScope()->GetIsObject() ? L"ObjectScopeAssignment" : L"MaybeEscapedUse")); } }
void RecyclerSweep::AddUnaccountedNewObjectAllocBytes(SmallHeapBlockT<TBlockAttributes> * heapBlock) { #if ENABLE_PARTIAL_GC // Only need to update the unaccounted alloc bytes if we are in partial collect mode if (recycler->inPartialCollectMode) { uint unaccountedAllocBytes = heapBlock->GetAndClearUnaccountedAllocBytes(); Assert(heapBlock->lastUncollectedAllocBytes == 0 || unaccountedAllocBytes == 0); DebugOnly(heapBlock->lastUncollectedAllocBytes += unaccountedAllocBytes); recycler->partialUncollectedAllocBytes += unaccountedAllocBytes; this->nextPartialUncollectedAllocBytes += unaccountedAllocBytes; } else #endif { // We don't care, clear the unaccounted to start tracking for new object for next GC heapBlock->ClearAllAllocBytes(); } }
BoundFunction::BoundFunction(RecyclableObject* targetFunction, Var boundThis, Var* args, uint argsCount, DynamicType * type) : JavascriptFunction(type, &functionInfo), count(argsCount), boundArgs(nullptr) { DebugOnly(VerifyEntryPoint()); this->targetFunction = targetFunction; this->boundThis = boundThis; if (argsCount != 0) { this->boundArgs = RecyclerNewArray(this->GetScriptContext()->GetRecycler(), Var, argsCount); for (uint i = 0; i < argsCount; i++) { this->boundArgs[i] = args[i]; } } }
FuncInfo::FuncInfo( const char16 *name, ArenaAllocator *alloc, Scope *paramScope, Scope *bodyScope, ParseNode *pnode, Js::ParseableFunctionInfo* byteCodeFunction) : alloc(alloc), varRegsCount(0), constRegsCount(2), inArgsCount(0), innerScopeCount(0), currentInnerScopeIndex((uint)-1), firstTmpReg(Js::Constants::NoRegister), curTmpReg(Js::Constants::NoRegister), outArgsMaxDepth(0), outArgsCurrentExpr(0), #if DBG outArgsDepth(0), #endif name(name), nullConstantRegister(Js::Constants::NoRegister), undefinedConstantRegister(Js::Constants::NoRegister), trueConstantRegister(Js::Constants::NoRegister), falseConstantRegister(Js::Constants::NoRegister), thisPointerRegister(Js::Constants::NoRegister), superRegister(Js::Constants::NoRegister), superCtorRegister(Js::Constants::NoRegister), newTargetRegister(Js::Constants::NoRegister), envRegister(Js::Constants::NoRegister), frameObjRegister(Js::Constants::NoRegister), frameSlotsRegister(Js::Constants::NoRegister), paramSlotsRegister(Js::Constants::NoRegister), frameDisplayRegister(Js::Constants::NoRegister), funcObjRegister(Js::Constants::NoRegister), localClosureReg(Js::Constants::NoRegister), yieldRegister(Js::Constants::NoRegister), paramScope(paramScope), bodyScope(bodyScope), funcExprScope(nullptr), root(pnode), capturedSyms(nullptr), capturedSymMap(nullptr), currentChildFunction(nullptr), currentChildScope(nullptr), callsEval(false), childCallsEval(false), hasArguments(false), hasHeapArguments(false), isTopLevelEventHandler(false), hasLocalInClosure(false), hasClosureReference(false), hasGlobalReference(false), hasCachedScope(false), funcExprNameReference(false), applyEnclosesArgs(false), escapes(false), hasDeferredChild(false), childHasWith(false), hasLoop(false), hasEscapedUseNestedFunc(false), needEnvRegister(false), hasCapturedThis(false), #if DBG isReused(false), #endif staticFuncId(-1), inlineCacheMap(nullptr), slotProfileIdMap(alloc), argsPlaceHolderSlotCount(0), thisScopeSlot(Js::Constants::NoProperty), innerThisScopeSlot(Js::Constants::NoProperty), superScopeSlot(Js::Constants::NoProperty), innerSuperScopeSlot(Js::Constants::NoProperty), superCtorScopeSlot(Js::Constants::NoProperty), innerSuperCtorScopeSlot(Js::Constants::NoProperty), newTargetScopeSlot(Js::Constants::NoProperty), innerNewTargetScopeSlot(Js::Constants::NoProperty), isThisLexicallyCaptured(false), isSuperLexicallyCaptured(false), isSuperCtorLexicallyCaptured(false), isNewTargetLexicallyCaptured(false), inlineCacheCount(0), rootObjectLoadInlineCacheCount(0), rootObjectLoadMethodInlineCacheCount(0), rootObjectStoreInlineCacheCount(0), isInstInlineCacheCount(0), referencedPropertyIdCount(0), argumentsSymbol(nullptr), innerArgumentsSymbol(nullptr), nonUserNonTempRegistersToInitialize(alloc), constantToRegister(alloc, 17), stringToRegister(alloc, 17), doubleConstantToRegister(alloc, 17), stringTemplateCallsiteRegisterMap(alloc, 17), targetStatements(alloc), nextForInLoopLevel(0), maxForInLoopLevel(0) { this->byteCodeFunction = byteCodeFunction; bodyScope->SetFunc(this); if (paramScope != nullptr) { paramScope->SetFunc(this); } if (pnode && pnode->sxFnc.NestedFuncEscapes()) { this->SetHasMaybeEscapedNestedFunc(DebugOnly(_u("Child"))); } }
char * LargeHeapBucket::TryAllocFromExplicitFreeList(Recycler * recycler, size_t sizeCat, ObjectInfoBits attributes) { Assert((attributes & InternalObjectInfoBitMask) == attributes); FreeObject * currFreeObject = this->explicitFreeList; FreeObject * prevFreeObject = nullptr; while (currFreeObject != nullptr) { char * memBlock = (char *)currFreeObject; LargeObjectHeader * header = LargeHeapBlock::GetHeaderFromAddress(memBlock); Assert(header->isExplicitFreed); Assert(HeapInfo::GetMediumObjectAlignedSizeNoCheck(header->objectSize) == this->sizeCat); if (header->objectSize < sizeCat) { prevFreeObject = currFreeObject; currFreeObject = currFreeObject->GetNext(); continue; } DebugOnly(header->isExplicitFreed = false); if (prevFreeObject) { prevFreeObject->SetNext(currFreeObject->GetNext()); } else { this->explicitFreeList = currFreeObject->GetNext(); } #ifdef RECYCLER_MEMORY_VERIFY HeapBlock* heapBlock = recycler->FindHeapBlock(memBlock); Assert(heapBlock != nullptr); Assert(heapBlock->IsLargeHeapBlock()); LargeHeapBlock * largeHeapBlock = (LargeHeapBlock *)heapBlock; LargeObjectHeader * dbgHeader; Assert(largeHeapBlock->GetObjectHeader(memBlock, &dbgHeader)); Assert(dbgHeader == header); ((FreeObject *)memBlock)->DebugFillNext(); #endif #ifdef RECYCLER_ZERO_MEM_CHECK // TODO: large heap block doesn't separate leaf object on to different page allocator. // so all the memory should still be zeroed. memset(memBlock, 0, sizeof(FreeObject)); #endif header->SetAttributes(recycler->Cookie, (attributes & StoredObjectInfoBitMask)); if ((attributes & ObjectInfoBits::FinalizeBit) != 0) { LargeHeapBlock* heapBlock = (LargeHeapBlock *)recycler->FindHeapBlock(memBlock); heapBlock->finalizeCount++; #ifdef RECYCLER_FINALIZE_CHECK heapInfo->liveFinalizableObjectCount++; heapInfo->newFinalizableObjectCount++; #endif } return memBlock; } return nullptr; }
JavascriptExternalFunction::JavascriptExternalFunction(DynamicType *type) : RuntimeFunction(type, &EntryInfo::ExternalFunctionThunk), nativeMethod(nullptr), signature(nullptr), callbackState(nullptr), initMethod(nullptr), oneBit(1), typeSlots(0), hasAccessors(0), callCount(0), prototypeTypeId(-1), flags(0) { DebugOnly(VerifyEntryPoint()); }
JavascriptExternalFunction::JavascriptExternalFunction(ExternalMethod entryPoint, DynamicType* type, InitializeMethod method, unsigned short deferredSlotCount, bool accessors) : RuntimeFunction(type, &EntryInfo::ExternalFunctionThunk), nativeMethod(entryPoint), signature(nullptr), callbackState(nullptr), initMethod(method), oneBit(1), typeSlots(deferredSlotCount), hasAccessors(accessors), callCount(0), prototypeTypeId(-1), flags(0) { DebugOnly(VerifyEntryPoint()); }
void SmallHeapBlockAllocator<TBlockType>::Clear() { TBlockType * heapBlock = this->heapBlock; if (heapBlock != nullptr) { Assert(heapBlock->isInAllocator); heapBlock->isInAllocator = false; FreeObject * remainingFreeObjectList = nullptr; if (this->endAddress != nullptr) { #ifdef RECYCLER_TRACK_NATIVE_ALLOCATED_OBJECTS TrackNativeAllocatedObjects(); lastNonNativeBumpAllocatedBlock = nullptr; #endif #ifdef PROFILE_RECYCLER_ALLOC // Need to tell the tracker this->bucket->heapInfo->recycler->TrackUnallocated((char *)this->freeObjectList, this->endAddress, this->bucket->sizeCat); #endif RecyclerMemoryTracking::ReportUnallocated(this->heapBlock->heapBucket->heapInfo->recycler, (char *)this->freeObjectList, this->endAddress, heapBlock->heapBucket->sizeCat); #ifdef RECYCLER_PERF_COUNTERS size_t unallocatedObjects = heapBlock->objectCount - ((char *)this->freeObjectList - heapBlock->address) / heapBlock->objectSize; size_t unallocatedObjectBytes = unallocatedObjects * heapBlock->GetObjectSize(); RECYCLER_PERF_COUNTER_ADD(LiveObject, unallocatedObjects); RECYCLER_PERF_COUNTER_ADD(LiveObjectSize, unallocatedObjectBytes); RECYCLER_PERF_COUNTER_SUB(FreeObjectSize, unallocatedObjectBytes); RECYCLER_PERF_COUNTER_ADD(SmallHeapBlockLiveObject, unallocatedObjects); RECYCLER_PERF_COUNTER_ADD(SmallHeapBlockLiveObjectSize, unallocatedObjectBytes); RECYCLER_PERF_COUNTER_SUB(SmallHeapBlockFreeObjectSize, unallocatedObjectBytes); #endif Assert(heapBlock->freeObjectList == nullptr); this->endAddress = nullptr; } else { remainingFreeObjectList = this->freeObjectList; heapBlock->freeObjectList = remainingFreeObjectList; } this->freeObjectList = nullptr; // this->freeObjectList and this->lastFreeCount are accessed in SmallHeapBlock::ResetMarks // the order of access there is first we see if lastFreeCount = 0, and if it is, we assert // that freeObjectList = null. Because of ARM's memory model, we need to insert barriers // so that the two variables can be accessed correctly across threads. Here, after we write // to this->freeObjectList, we insert a write barrier so that if this->lastFreeCount is 0, // this->freeObjectList must have been set to null. On the other end, we stick a read barrier // We use the MemoryBarrier macro because of ARMs lack of a separate read barrier #if defined(_M_ARM32_OR_ARM64) #if DBG MemoryBarrier(); #endif #endif if (remainingFreeObjectList == nullptr) { uint lastFreeCount = heapBlock->GetAndClearLastFreeCount(); heapBlock->heapBucket->heapInfo->uncollectedAllocBytes += lastFreeCount * heapBlock->GetObjectSize(); Assert(heapBlock->lastUncollectedAllocBytes == 0); DebugOnly(heapBlock->lastUncollectedAllocBytes = lastFreeCount * heapBlock->GetObjectSize()); } else { DebugOnly(heapBlock->SetIsClearedFromAllocator(true)); } this->heapBlock = nullptr; RECYCLER_SLOW_CHECK(heapBlock->CheckDebugFreeBitVector(false)); } else if (this->freeObjectList != nullptr) { // Explicit Free Object List #ifdef RECYCLER_MEMORY_VERIFY FreeObject* freeObject = this->freeObjectList; while (freeObject) { HeapBlock* heapBlock = this->bucket->GetRecycler()->FindHeapBlock((void*) freeObject); Assert(heapBlock != nullptr); Assert(!heapBlock->IsLargeHeapBlock()); TBlockType* smallBlock = (TBlockType*)heapBlock; smallBlock->ClearExplicitFreeBitForObject((void*) freeObject); freeObject = freeObject->GetNext(); } #endif this->freeObjectList = nullptr; } }
JavascriptExternalFunction::JavascriptExternalFunction(StdCallJavascriptMethod entryPoint, DynamicType* type) : RuntimeFunction(type, &EntryInfo::StdCallExternalFunctionThunk), stdCallNativeMethod(entryPoint), signature(nullptr), callbackState(nullptr), initMethod(nullptr), oneBit(1), typeSlots(0), hasAccessors(0), flags(0), deferredLength(0) { DebugOnly(VerifyEntryPoint()); }
JavascriptExternalFunction::JavascriptExternalFunction(JavascriptExternalFunction* entryPoint, DynamicType* type) : RuntimeFunction(type, &EntryInfo::WrappedFunctionThunk), wrappedMethod(entryPoint), callbackState(nullptr), initMethod(nullptr), oneBit(1), typeSlots(0), hasAccessors(0), flags(0), deferredLength(0) { DebugOnly(VerifyEntryPoint()); }
JavascriptAsyncFunction::JavascriptAsyncFunction(DynamicType* type, GeneratorVirtualScriptFunction* scriptFunction) : JavascriptGeneratorFunction(type, &functionInfo, scriptFunction) { DebugOnly(VerifyEntryPoint()); }
JavascriptGeneratorFunction::JavascriptGeneratorFunction(DynamicType* type, FunctionInfo* functionInfo, GeneratorVirtualScriptFunction* scriptFunction) : ScriptFunctionBase(type, functionInfo), scriptFunction(scriptFunction) { DebugOnly(VerifyEntryPoint()); }
// // Load persisted scope info. // void ScopeInfo::GetScopeInfo(Parser *parser, ByteCodeGenerator* byteCodeGenerator, FuncInfo* funcInfo, Scope* scope) { ScriptContext* scriptContext; ArenaAllocator* alloc; // Load scope attributes and push onto scope stack. scope->SetIsDynamic(this->isDynamic); if (this->isObject) { scope->SetIsObject(); } scope->SetMustInstantiate(this->mustInstantiate); if (!this->GetCanMergeWithBodyScope()) { scope->SetCannotMergeWithBodyScope(); } scope->SetHasOwnLocalInClosure(this->hasLocalInClosure); if (parser) { scriptContext = parser->GetScriptContext(); alloc = parser->GetAllocator(); } else { TRACE_BYTECODE(_u("\nRestore ScopeInfo: %s #symbols: %d %s\n"), funcInfo->name, symbolCount, isObject ? _u("isObject") : _u("")); Assert(!this->isCached || scope == funcInfo->GetBodyScope()); funcInfo->SetHasCachedScope(this->isCached); byteCodeGenerator->PushScope(scope); // The scope is already populated, so we're done. return; } // Load scope symbols // On first access to the scopeinfo, replace the ID's with PropertyRecord*'s to save the dictionary lookup // on later accesses. Replace them all before allocating Symbol's to prevent inconsistency on OOM. if (!this->areNamesCached && !PHASE_OFF1(Js::CacheScopeInfoNamesPhase)) { for (int i = 0; i < symbolCount; i++) { PropertyId propertyId = GetSymbolId(i); if (propertyId != 0) // There may be empty slots, e.g. "arguments" may have no slot { PropertyRecord const* name = scriptContext->GetPropertyName(propertyId); this->SetPropertyName(i, name); } } this->areNamesCached = true; } for (int i = 0; i < symbolCount; i++) { PropertyRecord const* name = nullptr; if (this->areNamesCached) { name = this->GetPropertyName(i); } else { PropertyId propertyId = GetSymbolId(i); if (propertyId != 0) // There may be empty slots, e.g. "arguments" may have no slot { name = scriptContext->GetPropertyName(propertyId); } } if (name != nullptr) { SymbolType symbolType = GetSymbolType(i); SymbolName symName(name->GetBuffer(), name->GetLength()); Symbol *sym = Anew(alloc, Symbol, symName, nullptr, symbolType); sym->SetScopeSlot(static_cast<PropertyId>(i)); sym->SetIsBlockVar(GetIsBlockVariable(i)); if (GetHasFuncAssignment(i)) { sym->RestoreHasFuncAssignment(); } scope->AddNewSymbol(sym); if (parser) { parser->RestorePidRefForSym(sym); } TRACE_BYTECODE(_u("%12s %d\n"), sym->GetName().GetBuffer(), sym->GetScopeSlot()); } } this->scope = scope; DebugOnly(scope->isRestored = true); }
BoundFunction::BoundFunction(Arguments args, DynamicType * type) : JavascriptFunction(type, &functionInfo), count(0), boundArgs(nullptr) { DebugOnly(VerifyEntryPoint()); AssertMsg(args.Info.Count > 0, "wrong number of args in BoundFunction"); ScriptContext *scriptContext = this->GetScriptContext(); targetFunction = RecyclableObject::FromVar(args[0]); // Let proto be targetFunction.[[GetPrototypeOf]](). RecyclableObject* proto = JavascriptOperators::GetPrototype(targetFunction); if (proto != type->GetPrototype()) { if (type->GetIsShared()) { this->ChangeType(); type = this->GetDynamicType(); } type->SetPrototype(proto); } // If targetFunction is proxy, need to make sure that traps are called in right order as per 19.2.3.2 in RC#4 dated April 3rd 2015. // Here although we won't use value of length, this is just to make sure that we call traps involved with HasOwnProperty(Target, "length") and Get(Target, "length") if (JavascriptProxy::Is(targetFunction)) { if (JavascriptOperators::HasOwnProperty(targetFunction, PropertyIds::length, scriptContext) == TRUE) { int len = 0; Var varLength; if (targetFunction->GetProperty(targetFunction, PropertyIds::length, &varLength, nullptr, scriptContext)) { len = JavascriptConversion::ToInt32(varLength, scriptContext); } } GetTypeHandler()->EnsureObjectReady(this); } if (args.Info.Count > 1) { boundThis = args[1]; // function object and "this" arg const uint countAccountedFor = 2; count = args.Info.Count - countAccountedFor; // Store the args excluding function obj and "this" arg if (args.Info.Count > 2) { boundArgs = RecyclerNewArray(scriptContext->GetRecycler(), Var, count); for (uint i=0; i<count; i++) { boundArgs[i] = args[i+countAccountedFor]; } } } else { // If no "this" is passed, "undefined" is used boundThis = scriptContext->GetLibrary()->GetUndefined(); } }
void RecyclerSweep::BeginSweep(Recycler * recycler) #endif { { // We are about to sweep, give the runtime a chance to see the now-immutable state of the world. // And clean up all the cache not monitor by the GC (e.g. inline caches) AUTO_NO_EXCEPTION_REGION; recycler->collectionWrapper->PreSweepCallback(); } Assert(!recycler->IsSweeping()); Assert(recycler->recyclerSweep == nullptr); memset(this, 0, sizeof(RecyclerSweep)); this->recycler = recycler; recycler->recyclerSweep = this; // We might still have block that has disposed but not put back into the allocable // heap block list yet, which happens if we finish disposing object during concurrent // reset mark and can't // modify the heap block lists // CONCURRENT-TODO: Consider doing it during FinishDisposeObjects to get these block // available sooner as well. We will still need it here as we only always get to // finish dispose before sweep. this->FlushPendingTransferDisposedObjects(); #if ENABLE_CONCURRENT_GC // Take the small heap block new heap block list and store in RecyclerSweep temporary // We get merge later before we start sweeping the bucket. leafData.pendingMergeNewHeapBlockList = recycler->autoHeap.newLeafHeapBlockList; normalData.pendingMergeNewHeapBlockList = recycler->autoHeap.newNormalHeapBlockList; #ifdef RECYCLER_WRITE_BARRIER withBarrierData.pendingMergeNewHeapBlockList = recycler->autoHeap.newNormalWithBarrierHeapBlockList; finalizableWithBarrierData.pendingMergeNewHeapBlockList = recycler->autoHeap.newFinalizableWithBarrierHeapBlockList; #endif finalizableData.pendingMergeNewHeapBlockList = recycler->autoHeap.newFinalizableHeapBlockList; #ifdef RECYCLER_VISITED_HOST recyclerVisitedHostData.pendingMergeNewHeapBlockList = recycler->autoHeap.newRecyclerVisitedHostHeapBlockList; #endif mediumLeafData.pendingMergeNewHeapBlockList = recycler->autoHeap.newMediumLeafHeapBlockList; mediumNormalData.pendingMergeNewHeapBlockList = recycler->autoHeap.newMediumNormalHeapBlockList; #ifdef RECYCLER_WRITE_BARRIER mediumWithBarrierData.pendingMergeNewHeapBlockList = recycler->autoHeap.newMediumNormalWithBarrierHeapBlockList; mediumFinalizableWithBarrierData.pendingMergeNewHeapBlockList = recycler->autoHeap.newMediumFinalizableWithBarrierHeapBlockList; #endif mediumFinalizableData.pendingMergeNewHeapBlockList = recycler->autoHeap.newMediumFinalizableHeapBlockList; #ifdef RECYCLER_VISITED_HOST mediumRecyclerVisitedHostData.pendingMergeNewHeapBlockList = recycler->autoHeap.newMediumRecyclerVisitedHostHeapBlockList; #endif recycler->autoHeap.newLeafHeapBlockList = nullptr; recycler->autoHeap.newNormalHeapBlockList = nullptr; recycler->autoHeap.newFinalizableHeapBlockList = nullptr; #ifdef RECYCLER_VISITED_HOST recycler->autoHeap.newRecyclerVisitedHostHeapBlockList = nullptr; #endif #ifdef RECYCLER_WRITE_BARRIER recycler->autoHeap.newNormalWithBarrierHeapBlockList = nullptr; recycler->autoHeap.newFinalizableWithBarrierHeapBlockList = nullptr; #endif recycler->autoHeap.newMediumLeafHeapBlockList = nullptr; recycler->autoHeap.newMediumNormalHeapBlockList = nullptr; recycler->autoHeap.newMediumFinalizableHeapBlockList = nullptr; #ifdef RECYCLER_VISITED_HOST recycler->autoHeap.newMediumRecyclerVisitedHostHeapBlockList = nullptr; #endif #ifdef RECYCLER_WRITE_BARRIER recycler->autoHeap.newMediumNormalWithBarrierHeapBlockList = nullptr; recycler->autoHeap.newMediumFinalizableWithBarrierHeapBlockList = nullptr; #endif #endif #if ENABLE_PARTIAL_GC Assert(recycler->clientTrackedObjectList.Empty()); // We should not have partialUncollectedAllocBytes unless we are in partial collect at this point Assert(recycler->partialUncollectedAllocBytes == 0 || recycler->inPartialCollectMode); Assert(recycler->autoHeap.uncollectedAllocBytes >= recycler->partialUncollectedAllocBytes); // if the cost of rescan is too high, we want to disable partial GC starting from the // upcoming Sweep. We basically move the check up from AdjustPartialHeuristics to here // such that we can have the decision before sweep. this->rescanRootBytes = rescanRootBytes; RECYCLER_STATS_SET(recycler, rescanRootBytes, rescanRootBytes); if (this->DoPartialCollectMode()) { // enable partial collect for sweep & next round of GC DebugOnly(this->partial = true); // REVIEW: is adjustPartialHeuristicsMode the same as in PartialCollectMode? this->adjustPartialHeuristics = adjustPartialHeuristics; this->StartPartialCollectMode(); } else { // disable partial collect if (recycler->inPartialCollectMode) { recycler->FinishPartialCollect(); } Assert(recycler->partialUncollectedAllocBytes == 0); Assert(!recycler->inPartialCollectMode); } if (this->inPartialCollect) { // We just did a partial collect. // We only want to count objects that survived this collect towards the next full GC. // Thus, clear out uncollectedAllocBytes here; we will adjust to account for objects that // survived this partial collect in EndSweep. recycler->ResetHeuristicCounters(); } else #endif { Assert(!this->inPartialCollect); // We just did a full collect. // We reset uncollectedAllocBytes when we kicked off the collection, // so don't reset it here (but do reset partial heuristics). recycler->ResetPartialHeuristicCounters(); } }