/** Replacement for malloc. */ static void *rtMemReplacementMalloc(size_t cb) { size_t cbAligned = RTMEM_REPLACMENT_ALIGN(cb); void *pv = rtR3MemAlloc("r-malloc", RTMEMTYPE_RTMEMALLOC, cb, cbAligned, "heap", ASMReturnAddress(), RT_SRC_POS); if (!pv) pv = g_pfnOrgMalloc(cb); return pv; }
/*virtual*/ void WriteLockHandle::lockWrite(LOCKVAL_SRC_POS_DECL) { #ifdef VBOX_WITH_MAIN_LOCK_VALIDATION RTCritSectEnterDebug(&m->sem, (uintptr_t)ASMReturnAddress(), RT_SRC_POS_ARGS); #else RTCritSectEnter(&m->sem); #endif }
/*virtual*/ void RWLockHandle::lockRead(LOCKVAL_SRC_POS_DECL) { #ifdef GLUE_USE_CRITSECTRW # ifdef VBOX_WITH_MAIN_LOCK_VALIDATION int vrc = RTCritSectRwEnterSharedDebug(&m->CritSect, (uintptr_t)ASMReturnAddress(), RT_SRC_POS_ARGS); # else int vrc = RTCritSectRwEnterShared(&m->CritSect); # endif #else # ifdef VBOX_WITH_MAIN_LOCK_VALIDATION int vrc = RTSemRWRequestReadDebug(m->sem, RT_INDEFINITE_WAIT, (uintptr_t)ASMReturnAddress(), RT_SRC_POS_ARGS); # else int vrc = RTSemRWRequestRead(m->sem, RT_INDEFINITE_WAIT); # endif #endif AssertRC(vrc); }
/** Replacement for free(). */ static void rtMemReplacementFree(void *pv) { if (pv) { /* We're not strict about where the memory was allocated. */ PRTMEMBLOCK pBlock = rtmemBlockGet(pv); if (pBlock) rtR3MemFree("r-free", RTMEMTYPE_RTMEMFREE, pv, ASMReturnAddress(), RT_SRC_POS); else g_pfnOrgFree(pv); } }
/** * Free memory allocated using MMHyperAlloc(). * The caller validates the parameters of this request. * * @returns VBox status code. * @param pVM The VM to operate on. * @param pv The memory to free. * @remark Try avoid free hyper memory. */ static int mmHyperFreeInternal(PVM pVM, void *pv) { Log2(("MMHyperFree: pv=%p\n", pv)); if (!pv) return VINF_SUCCESS; AssertMsgReturn(RT_ALIGN_P(pv, MMHYPER_HEAP_ALIGN_MIN) == pv, ("Invalid pointer %p!\n", pv), VERR_INVALID_POINTER); /* * Get the heap and stats. * Validate the chunk at the same time. */ PMMHYPERCHUNK pChunk = (PMMHYPERCHUNK)((PMMHYPERCHUNK)pv - 1); AssertMsgReturn( (uintptr_t)pChunk + pChunk->offNext >= (uintptr_t)pChunk || RT_ALIGN_32(pChunk->offNext, MMHYPER_HEAP_ALIGN_MIN) != pChunk->offNext, ("%p: offNext=%#RX32\n", pv, pChunk->offNext), VERR_INVALID_POINTER); AssertMsgReturn(MMHYPERCHUNK_ISUSED(pChunk), ("%p: Not used!\n", pv), VERR_INVALID_POINTER); int32_t offPrev = MMHYPERCHUNK_GET_OFFPREV(pChunk); AssertMsgReturn( (uintptr_t)pChunk + offPrev <= (uintptr_t)pChunk && !((uint32_t)-offPrev & (MMHYPER_HEAP_ALIGN_MIN - 1)), ("%p: offPrev=%#RX32!\n", pv, offPrev), VERR_INVALID_POINTER); /* statistics */ #ifdef VBOX_WITH_STATISTICS PMMHYPERSTAT pStat = (PMMHYPERSTAT)((uintptr_t)pChunk + pChunk->offStat); AssertMsgReturn( RT_ALIGN_P(pStat, MMHYPER_HEAP_ALIGN_MIN) == (void *)pStat && pChunk->offStat, ("%p: offStat=%#RX32!\n", pv, pChunk->offStat), VERR_INVALID_POINTER); #else AssertMsgReturn(!pChunk->offStat, ("%p: offStat=%#RX32!\n", pv, pChunk->offStat), VERR_INVALID_POINTER); #endif /* The heap structure. */ PMMHYPERHEAP pHeap = (PMMHYPERHEAP)((uintptr_t)pChunk + pChunk->offHeap); AssertMsgReturn( !((uintptr_t)pHeap & PAGE_OFFSET_MASK) && pChunk->offHeap, ("%p: pHeap=%#x offHeap=%RX32\n", pv, pHeap->u32Magic, pChunk->offHeap), VERR_INVALID_POINTER); AssertMsgReturn(pHeap->u32Magic == MMHYPERHEAP_MAGIC, ("%p: u32Magic=%#x\n", pv, pHeap->u32Magic), VERR_INVALID_POINTER); Assert(pHeap == pVM->mm.s.CTX_SUFF(pHyperHeap)); /* Some more verifications using additional info from pHeap. */ AssertMsgReturn((uintptr_t)pChunk + offPrev >= (uintptr_t)pHeap->CTX_SUFF(pbHeap), ("%p: offPrev=%#RX32!\n", pv, offPrev), VERR_INVALID_POINTER); AssertMsgReturn(pChunk->offNext < pHeap->cbHeap, ("%p: offNext=%#RX32!\n", pv, pChunk->offNext), VERR_INVALID_POINTER); AssertMsgReturn( (uintptr_t)pv - (uintptr_t)pHeap->CTX_SUFF(pbHeap) <= pHeap->offPageAligned, ("Invalid pointer %p! (heap: %p-%p)\n", pv, pHeap->CTX_SUFF(pbHeap), (char *)pHeap->CTX_SUFF(pbHeap) + pHeap->offPageAligned), VERR_INVALID_POINTER); #ifdef MMHYPER_HEAP_STRICT mmHyperHeapCheck(pHeap); #endif #if defined(VBOX_WITH_STATISTICS) || defined(MMHYPER_HEAP_FREE_POISON) /* calc block size. */ const uint32_t cbChunk = pChunk->offNext ? pChunk->offNext : pHeap->CTX_SUFF(pbHeap) + pHeap->offPageAligned - (uint8_t *)pChunk; #endif #ifdef MMHYPER_HEAP_FREE_POISON /* poison the block */ memset(pChunk + 1, MMHYPER_HEAP_FREE_POISON, cbChunk - sizeof(*pChunk)); #endif #ifdef MMHYPER_HEAP_FREE_DELAY # ifdef MMHYPER_HEAP_FREE_POISON /* * Check poison. */ unsigned i = RT_ELEMENTS(pHeap->aDelayedFrees); while (i-- > 0) if (pHeap->aDelayedFrees[i].offChunk) { PMMHYPERCHUNK pCur = (PMMHYPERCHUNK)((uintptr_t)pHeap + pHeap->aDelayedFrees[i].offChunk); const size_t cb = pCur->offNext ? pCur->offNext - sizeof(*pCur) : pHeap->CTX_SUFF(pbHeap) + pHeap->offPageAligned - (uint8_t *)pCur - sizeof(*pCur); uint8_t *pab = (uint8_t *)(pCur + 1); for (unsigned off = 0; off < cb; off++) AssertReleaseMsg(pab[off] == 0xCB, ("caller=%RTptr cb=%#zx off=%#x: %.*Rhxs\n", pHeap->aDelayedFrees[i].uCaller, cb, off, RT_MIN(cb - off, 32), &pab[off])); } # endif /* MMHYPER_HEAP_FREE_POISON */ /* * Delayed freeing. */ int rc = VINF_SUCCESS; if (pHeap->aDelayedFrees[pHeap->iDelayedFree].offChunk) { PMMHYPERCHUNK pChunkFree = (PMMHYPERCHUNK)((uintptr_t)pHeap + pHeap->aDelayedFrees[pHeap->iDelayedFree].offChunk); rc = mmHyperFree(pHeap, pChunkFree); } pHeap->aDelayedFrees[pHeap->iDelayedFree].offChunk = (uintptr_t)pChunk - (uintptr_t)pHeap; pHeap->aDelayedFrees[pHeap->iDelayedFree].uCaller = (uintptr_t)ASMReturnAddress(); pHeap->iDelayedFree = (pHeap->iDelayedFree + 1) % RT_ELEMENTS(pHeap->aDelayedFrees); #else /* !MMHYPER_HEAP_FREE_POISON */ /* * Call the worker. */ int rc = mmHyperFree(pHeap, pChunk); #endif /* !MMHYPER_HEAP_FREE_POISON */ /* * Update statistics. */ #ifdef VBOX_WITH_STATISTICS pStat->cFrees++; if (RT_SUCCESS(rc)) { pStat->cbFreed += cbChunk; pStat->cbCurAllocated -= cbChunk; } else pStat->cFailures++; #endif return rc; }
/** Replacement for realloc. */ static void *rtMemReplacementRealloc(void *pvOld, size_t cbNew) { if (pvOld) { /* We're not strict about where the memory was allocated. */ PRTMEMBLOCK pBlock = rtmemBlockGet(pvOld); if (pBlock) { size_t cbAligned = RTMEM_REPLACMENT_ALIGN(cbNew); return rtR3MemRealloc("r-realloc", RTMEMTYPE_RTMEMREALLOC, pvOld, cbAligned, "heap", ASMReturnAddress(), RT_SRC_POS); } return g_pfnOrgRealloc(pvOld, cbNew); } return rtMemReplacementMalloc(cbNew); }
/** Replacement for calloc. */ static void *rtMemReplacementCalloc(size_t cbItem, size_t cItems) { size_t cb = cbItem * cItems; size_t cbAligned = RTMEM_REPLACMENT_ALIGN(cb); void *pv = rtR3MemAlloc("r-calloc", RTMEMTYPE_RTMEMALLOCZ, cb, cbAligned, "heap", ASMReturnAddress(), RT_SRC_POS); if (!pv) pv = g_pfnOrgCalloc(cbItem, cItems); return pv; }