bool
GC_ObjectHeapIteratorAddressOrderedList::shouldReturnCurrentObject() {
	if(_scanPtr < _scanPtrTop) {
#if defined(OMR_VALGRIND_MEMCHECK)
		bool scanPtrObjExists = valgrindCheckObjectInPool(_extensions,(uintptr_t) _scanPtr);
		if(!scanPtrObjExists) valgrindMakeMemDefined(((uintptr_t) _scanPtr),sizeof(omrobjectptr_t));
#endif /* defined(OMR_VALGRIND_MEMCHECK) */		
		_isDeadObject = _extensions->objectModel.isDeadObject(_scanPtr);
#if defined(OMR_VALGRIND_MEMCHECK)
		if(!scanPtrObjExists) valgrindMakeMemNoaccess(((uintptr_t) _scanPtr),sizeof(omrobjectptr_t));
#endif /* defined(OMR_VALGRIND_MEMCHECK) */		
		if (_isDeadObject) {
#if defined(OMR_VALGRIND_MEMCHECK)			
	if(!scanPtrObjExists) valgrindMakeMemDefined(((uintptr_t) _scanPtr),sizeof(omrobjectptr_t));
#endif /* defined(OMR_VALGRIND_MEMCHECK) */					
			_isSingleSlotHole = _extensions->objectModel.isSingleSlotDeadObject(_scanPtr);
#if defined(OMR_VALGRIND_MEMCHECK)			
	if(!scanPtrObjExists) valgrindMakeMemNoaccess(((uintptr_t) _scanPtr),sizeof(omrobjectptr_t));
#endif /* defined(OMR_VALGRIND_MEMCHECK) */					
			_deadObjectSize = computeDeadObjectSize();
			return _includeDeadObjects;
#if defined(OMR_GC_CONCURRENT_SCAVENGER)
		} else if (MM_ForwardedHeader(_scanPtr).isStrictlyForwardedPointer()) {
			return _includeForwardedObjects;
#endif /* OMR_GC_CONCURRENT_SCAVENGER */
		} else {
			return true;
		}
	}

	return false;
}
示例#2
0
void valgrindClearRange(MM_GCExtensionsBase *extensions, uintptr_t baseAddress, uintptr_t size)
{
    if (size == 0)
    {
        return;
    }
    uintptr_t topInclusiveAddr = baseAddress + size - 1;

#if defined(VALGRIND_REQUEST_LOGS)
    VALGRIND_PRINTF_BACKTRACE("Clearing objects in range b/w 0x%lx and  0x%lx\n", baseAddress, topInclusiveAddr);
#endif /* defined(VALGRIND_REQUEST_LOGS) */

    MUTEX_ENTER(extensions->memcheckHashTableMutex);
    GC_HashTableIterator it(extensions->memcheckHashTable);
    uintptr_t *currentSlotPointer = (uintptr_t *)it.nextSlot();
    while (currentSlotPointer != NULL)
    {
        if (baseAddress <= *currentSlotPointer && topInclusiveAddr >= *currentSlotPointer)
        {
            valgrindFreeObjectDirect(extensions, *currentSlotPointer);
            it.removeSlot();
        }
        currentSlotPointer = (uintptr_t *)it.nextSlot();
    }
    MUTEX_EXIT(extensions->memcheckHashTableMutex);

    /* Valgrind automatically marks free objects as noaccess.
    We still mark the entire region as no access for any left out areas */
    valgrindMakeMemNoaccess(baseAddress, size);
}
/**
 * @see GC_ObjectHeapIterator::nextObjectNoAdvance()
 */
omrobjectptr_t
GC_ObjectHeapIteratorAddressOrderedList::nextObjectNoAdvance() {
	if(!_pastFirstObject) {
		_pastFirstObject = true;
		if (shouldReturnCurrentObject()) {
			return _scanPtr;
		}
	}

#if defined(OMR_VALGRIND_MEMCHECK)			
	bool scanPtrObjExists;
#endif /* defined(OMR_VALGRIND_MEMCHECK) */						
	while(_scanPtr < _scanPtrTop) {
#if defined(OMR_VALGRIND_MEMCHECK)				
		scanPtrObjExists = valgrindCheckObjectInPool(_extensions,(uintptr_t) _scanPtr);
		if(!scanPtrObjExists) valgrindMakeMemDefined(((uintptr_t) _scanPtr),sizeof(omrobjectptr_t));
#endif /* defined(OMR_VALGRIND_MEMCHECK) */							
		/* These flags were set before we returned the last object, but the object might have changed. */
		_isDeadObject = _extensions->objectModel.isDeadObject(_scanPtr);
		_isSingleSlotHole = _isDeadObject ? _extensions->objectModel.isSingleSlotDeadObject(_scanPtr) : false;
#if defined(OMR_VALGRIND_MEMCHECK)				
	if(!scanPtrObjExists) valgrindMakeMemNoaccess(((uintptr_t) _scanPtr),sizeof(omrobjectptr_t));
#endif /* defined(OMR_VALGRIND_MEMCHECK) */					
		
		if(!_isDeadObject) {
#if defined(OMR_GC_CONCURRENT_SCAVENGER)
			MM_ForwardedHeader header(_scanPtr);
			if (header.isStrictlyForwardedPointer()) {
				uintptr_t sizeInBytesBeforeMove = _extensions->objectModel.getConsumedSizeInBytesWithHeaderBeforeMove(header.getForwardedObject());
				_scanPtr = (omrobjectptr_t) ( ((uintptr_t)_scanPtr) + sizeInBytesBeforeMove );
			} else
#endif /* OMR_GC_CONCURRENT_SCAVENGER */
			{
				/* either regular object, or self forwarded */
						_scanPtr = (omrobjectptr_t) ( ((uintptr_t)_scanPtr) + _extensions->objectModel.getConsumedSizeInBytesWithHeader(_scanPtr) );
				}
		} else {
			_deadObjectSize = computeDeadObjectSize();
			advanceScanPtr( _deadObjectSize );
		}

		if (shouldReturnCurrentObject()) {
			return _scanPtr;
		}
	}

	return NULL;
}
示例#4
0
bool
MM_HeapVirtualMemory::heapAddRange(MM_EnvironmentBase* env, MM_MemorySubSpace* subspace, uintptr_t size, void* lowAddress, void* highAddress)
{
	MM_Collector* globalCollector = env->getExtensions()->getGlobalCollector();

	bool result = true;
	if (NULL != globalCollector) {
		result = globalCollector->heapAddRange(env, subspace, size, lowAddress, highAddress);
	}
	
	env->getExtensions()->identityHashDataAddRange(env, subspace, size, lowAddress, highAddress);

#if defined(OMR_VALGRIND_MEMCHECK)
	valgrindMakeMemNoaccess((uintptr_t)lowAddress,size);
#endif /* defined(OMR_VALGRIND_MEMCHECK) */

	return result;
}