/** * Create a User-kernel heap. * * This does not require SUPLib to be initialized as we'll lazily allocate the * kernel accessible memory on the first alloc call. * * @returns VBox status. * @param pVM The handle to the VM the heap should be associated with. * @param ppHeap Where to store the heap pointer. */ int mmR3UkHeapCreateU(PUVM pUVM, PMMUKHEAP *ppHeap) { PMMUKHEAP pHeap = (PMMUKHEAP)MMR3HeapAllocZU(pUVM, MM_TAG_MM, sizeof(MMUKHEAP)); if (pHeap) { int rc = RTCritSectInit(&pHeap->Lock); if (RT_SUCCESS(rc)) { /* * Initialize the global stat record. */ pHeap->pUVM = pUVM; #ifdef MMUKHEAP_WITH_STATISTICS PMMUKHEAPSTAT pStat = &pHeap->Stat; STAMR3RegisterU(pUVM, &pStat->cAllocations, STAMTYPE_U64, STAMVISIBILITY_ALWAYS, "/MM/UkHeap/cAllocations", STAMUNIT_CALLS, "Number or MMR3UkHeapAlloc() calls."); STAMR3RegisterU(pUVM, &pStat->cReallocations, STAMTYPE_U64, STAMVISIBILITY_ALWAYS, "/MM/UkHeap/cReallocations", STAMUNIT_CALLS, "Number of MMR3UkHeapRealloc() calls."); STAMR3RegisterU(pUVM, &pStat->cFrees, STAMTYPE_U64, STAMVISIBILITY_ALWAYS, "/MM/UkHeap/cFrees", STAMUNIT_CALLS, "Number of MMR3UkHeapFree() calls."); STAMR3RegisterU(pUVM, &pStat->cFailures, STAMTYPE_U64, STAMVISIBILITY_ALWAYS, "/MM/UkHeap/cFailures", STAMUNIT_COUNT, "Number of failures."); STAMR3RegisterU(pUVM, &pStat->cbCurAllocated, sizeof(pStat->cbCurAllocated) == sizeof(uint32_t) ? STAMTYPE_U32 : STAMTYPE_U64, STAMVISIBILITY_ALWAYS, "/MM/UkHeap/cbCurAllocated", STAMUNIT_BYTES, "Number of bytes currently allocated."); STAMR3RegisterU(pUVM, &pStat->cbAllocated, STAMTYPE_U64, STAMVISIBILITY_ALWAYS, "/MM/UkHeap/cbAllocated", STAMUNIT_BYTES, "Total number of bytes allocated."); STAMR3RegisterU(pUVM, &pStat->cbFreed, STAMTYPE_U64, STAMVISIBILITY_ALWAYS, "/MM/UkHeap/cbFreed", STAMUNIT_BYTES, "Total number of bytes freed."); #endif *ppHeap = pHeap; return VINF_SUCCESS; } AssertRC(rc); MMR3HeapFree(pHeap); } AssertMsgFailed(("failed to allocate heap structure\n")); return VERR_NO_MEMORY; }
/** * EMT worker function for DBGFR3OSRegister. * * @returns VBox status code. * @param pUVM The user mode VM handle. * @param pReg The registration structure. */ static DECLCALLBACK(int) dbgfR3OSRegister(PUVM pUVM, PDBGFOSREG pReg) { /* more validations. */ DBGF_OS_READ_LOCK(pUVM); PDBGFOS pOS; for (pOS = pUVM->dbgf.s.pOSHead; pOS; pOS = pOS->pNext) if (!strcmp(pOS->pReg->szName, pReg->szName)) { DBGF_OS_READ_UNLOCK(pUVM); Log(("dbgfR3OSRegister: %s -> VERR_ALREADY_LOADED\n", pReg->szName)); return VERR_ALREADY_LOADED; } DBGF_OS_READ_UNLOCK(pUVM); /* * Allocate a new structure, call the constructor and link it into the list. */ pOS = (PDBGFOS)MMR3HeapAllocZU(pUVM, MM_TAG_DBGF_OS, RT_OFFSETOF(DBGFOS, abData[pReg->cbData])); AssertReturn(pOS, VERR_NO_MEMORY); pOS->pReg = pReg; int rc = pOS->pReg->pfnConstruct(pUVM, pOS->abData); if (RT_SUCCESS(rc)) { DBGF_OS_WRITE_LOCK(pUVM); pOS->pNext = pUVM->dbgf.s.pOSHead; pUVM->dbgf.s.pOSHead = pOS; DBGF_OS_WRITE_UNLOCK(pUVM); } else { if (pOS->pReg->pfnDestruct) pOS->pReg->pfnDestruct(pUVM, pOS->abData); MMR3HeapFree(pOS); } return VINF_SUCCESS; }
/** * Allocate memory from the heap. * * @returns Pointer to allocated memory. * @param pHeap Heap handle. * @param enmTag Statistics tag. Statistics are collected on a per tag * basis in addition to a global one. Thus we can easily * identify how memory is used by the VM. * @param cb Size of the block. * @param fZero Whether or not to zero the memory block. * @param pR0Ptr Where to return the ring-0 pointer. */ static void *mmR3UkHeapAlloc(PMMUKHEAP pHeap, MMTAG enmTag, size_t cb, bool fZero, PRTR0PTR pR0Ptr) { if (pR0Ptr) *pR0Ptr = NIL_RTR0PTR; RTCritSectEnter(&pHeap->Lock); #ifdef MMUKHEAP_WITH_STATISTICS /* * Find/alloc statistics nodes. */ pHeap->Stat.cAllocations++; PMMUKHEAPSTAT pStat = (PMMUKHEAPSTAT)RTAvlULGet(&pHeap->pStatTree, (AVLULKEY)enmTag); if (pStat) pStat->cAllocations++; else { pStat = (PMMUKHEAPSTAT)MMR3HeapAllocZU(pHeap->pUVM, MM_TAG_MM, sizeof(MMUKHEAPSTAT)); if (!pStat) { pHeap->Stat.cFailures++; AssertMsgFailed(("Failed to allocate heap stat record.\n")); RTCritSectLeave(&pHeap->Lock); return NULL; } pStat->Core.Key = (AVLULKEY)enmTag; RTAvlULInsert(&pHeap->pStatTree, &pStat->Core); pStat->cAllocations++; /* register the statistics */ PUVM pUVM = pHeap->pUVM; const char *pszTag = mmGetTagName(enmTag); STAMR3RegisterFU(pUVM, &pStat->cbCurAllocated, STAMTYPE_U32, STAMVISIBILITY_ALWAYS, STAMUNIT_BYTES, "Number of bytes currently allocated.", "/MM/UkHeap/%s", pszTag); STAMR3RegisterFU(pUVM, &pStat->cAllocations, STAMTYPE_U64, STAMVISIBILITY_ALWAYS, STAMUNIT_CALLS, "Number or MMR3UkHeapAlloc() calls.", "/MM/UkHeap/%s/cAllocations", pszTag); STAMR3RegisterFU(pUVM, &pStat->cReallocations, STAMTYPE_U64, STAMVISIBILITY_ALWAYS, STAMUNIT_CALLS, "Number of MMR3UkHeapRealloc() calls.", "/MM/UkHeap/%s/cReallocations", pszTag); STAMR3RegisterFU(pUVM, &pStat->cFrees, STAMTYPE_U64, STAMVISIBILITY_ALWAYS, STAMUNIT_CALLS, "Number of MMR3UkHeapFree() calls.", "/MM/UkHeap/%s/cFrees", pszTag); STAMR3RegisterFU(pUVM, &pStat->cFailures, STAMTYPE_U64, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Number of failures.", "/MM/UkHeap/%s/cFailures", pszTag); STAMR3RegisterFU(pUVM, &pStat->cbAllocated, STAMTYPE_U64, STAMVISIBILITY_ALWAYS, STAMUNIT_BYTES, "Total number of bytes allocated.", "/MM/UkHeap/%s/cbAllocated", pszTag); STAMR3RegisterFU(pUVM, &pStat->cbFreed, STAMTYPE_U64, STAMVISIBILITY_ALWAYS, STAMUNIT_BYTES, "Total number of bytes freed.", "/MM/UkHeap/%s/cbFreed", pszTag); } #endif /* * Validate input. */ if (cb == 0) { #ifdef MMUKHEAP_WITH_STATISTICS pStat->cFailures++; pHeap->Stat.cFailures++; #endif RTCritSectLeave(&pHeap->Lock); return NULL; } /* * Allocate heap block. */ cb = RT_ALIGN_Z(cb, MMUKHEAP_SIZE_ALIGNMENT); void *pv = NULL; PMMUKHEAPSUB pSubHeapPrev = NULL; PMMUKHEAPSUB pSubHeap = pHeap->pSubHeapHead; while (pSubHeap) { if (fZero) pv = RTHeapSimpleAllocZ(pSubHeap->hSimple, cb, MMUKHEAP_SIZE_ALIGNMENT); else pv = RTHeapSimpleAlloc(pSubHeap->hSimple, cb, MMUKHEAP_SIZE_ALIGNMENT); if (pv) { /* Move the sub-heap with free memory to the head. */ if (pSubHeapPrev) { pSubHeapPrev->pNext = pSubHeap->pNext; pSubHeap->pNext = pHeap->pSubHeapHead; pHeap->pSubHeapHead = pSubHeap; } break; } pSubHeapPrev = pSubHeap; pSubHeap = pSubHeap->pNext; } if (RT_UNLIKELY(!pv)) { /* * Add another sub-heap. */ pSubHeap = mmR3UkHeapAddSubHeap(pHeap, RT_MAX(RT_ALIGN_Z(cb, PAGE_SIZE) + PAGE_SIZE * 16, _256K)); if (pSubHeap) { if (fZero) pv = RTHeapSimpleAllocZ(pSubHeap->hSimple, cb, MMUKHEAP_SIZE_ALIGNMENT); else pv = RTHeapSimpleAlloc(pSubHeap->hSimple, cb, MMUKHEAP_SIZE_ALIGNMENT); } if (RT_UNLIKELY(!pv)) { AssertMsgFailed(("Failed to allocate heap block %d, enmTag=%x(%.4s).\n", cb, enmTag, &enmTag)); #ifdef MMUKHEAP_WITH_STATISTICS pStat->cFailures++; pHeap->Stat.cFailures++; #endif RTCritSectLeave(&pHeap->Lock); return NULL; } } /* * Update statistics */ #ifdef MMUKHEAP_WITH_STATISTICS size_t cbActual = RTHeapSimpleSize(pSubHeap->hSimple, pv); pStat->cbAllocated += cbActual; pStat->cbCurAllocated += cbActual; pHeap->Stat.cbAllocated += cbActual; pHeap->Stat.cbCurAllocated += cbActual; #endif if (pR0Ptr) *pR0Ptr = (uintptr_t)pv - (uintptr_t)pSubHeap->pv + pSubHeap->pvR0; RTCritSectLeave(&pHeap->Lock); return pv; }
/** * Walks the entire stack allocating memory as we walk. */ static DECLCALLBACK(int) dbgfR3StackWalkCtxFull(PUVM pUVM, VMCPUID idCpu, PCCPUMCTXCORE pCtxCore, RTDBGAS hAs, DBGFCODETYPE enmCodeType, PCDBGFADDRESS pAddrFrame, PCDBGFADDRESS pAddrStack, PCDBGFADDRESS pAddrPC, DBGFRETURNTYPE enmReturnType, PCDBGFSTACKFRAME *ppFirstFrame) { /* alloc first frame. */ PDBGFSTACKFRAME pCur = (PDBGFSTACKFRAME)MMR3HeapAllocZU(pUVM, MM_TAG_DBGF_STACK, sizeof(*pCur)); if (!pCur) return VERR_NO_MEMORY; /* * Initialize the frame. */ pCur->pNextInternal = NULL; pCur->pFirstInternal = pCur; int rc = VINF_SUCCESS; if (pAddrPC) pCur->AddrPC = *pAddrPC; else if (enmCodeType != DBGFCODETYPE_GUEST) DBGFR3AddrFromFlat(pUVM, &pCur->AddrPC, pCtxCore->rip); else rc = DBGFR3AddrFromSelOff(pUVM, idCpu, &pCur->AddrPC, pCtxCore->cs.Sel, pCtxCore->rip); if (RT_SUCCESS(rc)) { if (enmReturnType == DBGFRETURNTYPE_INVALID) switch (pCur->AddrPC.fFlags & DBGFADDRESS_FLAGS_TYPE_MASK) { case DBGFADDRESS_FLAGS_FAR16: pCur->enmReturnType = DBGFRETURNTYPE_NEAR16; break; case DBGFADDRESS_FLAGS_FAR32: pCur->enmReturnType = DBGFRETURNTYPE_NEAR32; break; case DBGFADDRESS_FLAGS_FAR64: pCur->enmReturnType = DBGFRETURNTYPE_NEAR64; break; case DBGFADDRESS_FLAGS_RING0: pCur->enmReturnType = HC_ARCH_BITS == 64 ? DBGFRETURNTYPE_NEAR64 : DBGFRETURNTYPE_NEAR32; break; default: pCur->enmReturnType = DBGFRETURNTYPE_NEAR32; break; /// @todo 64-bit guests } uint64_t fAddrMask; if (enmCodeType == DBGFCODETYPE_RING0) fAddrMask = HC_ARCH_BITS == 64 ? UINT64_MAX : UINT32_MAX; else if (enmCodeType == DBGFCODETYPE_HYPER) fAddrMask = UINT32_MAX; else if (DBGFADDRESS_IS_FAR16(&pCur->AddrPC)) fAddrMask = UINT16_MAX; else if (DBGFADDRESS_IS_FAR32(&pCur->AddrPC)) fAddrMask = UINT32_MAX; else if (DBGFADDRESS_IS_FAR64(&pCur->AddrPC)) fAddrMask = UINT64_MAX; else { PVMCPU pVCpu = VMMGetCpuById(pUVM->pVM, idCpu); CPUMMODE CpuMode = CPUMGetGuestMode(pVCpu); if (CpuMode == CPUMMODE_REAL) fAddrMask = UINT16_MAX; else if ( CpuMode == CPUMMODE_PROTECTED || !CPUMIsGuestIn64BitCode(pVCpu)) fAddrMask = UINT32_MAX; else fAddrMask = UINT64_MAX; } if (pAddrStack) pCur->AddrStack = *pAddrStack; else if (enmCodeType != DBGFCODETYPE_GUEST) DBGFR3AddrFromFlat(pUVM, &pCur->AddrStack, pCtxCore->rsp & fAddrMask); else rc = DBGFR3AddrFromSelOff(pUVM, idCpu, &pCur->AddrStack, pCtxCore->ss.Sel, pCtxCore->rsp & fAddrMask); if (pAddrFrame) pCur->AddrFrame = *pAddrFrame; else if (enmCodeType != DBGFCODETYPE_GUEST) DBGFR3AddrFromFlat(pUVM, &pCur->AddrFrame, pCtxCore->rbp & fAddrMask); else if (RT_SUCCESS(rc)) rc = DBGFR3AddrFromSelOff(pUVM, idCpu, &pCur->AddrFrame, pCtxCore->ss.Sel, pCtxCore->rbp & fAddrMask); } else pCur->enmReturnType = enmReturnType; /* * The first frame. */ if (RT_SUCCESS(rc)) rc = dbgfR3StackWalk(pUVM, idCpu, hAs, pCur); if (RT_FAILURE(rc)) { DBGFR3StackWalkEnd(pCur); return rc; } /* * The other frames. */ DBGFSTACKFRAME Next = *pCur; while (!(pCur->fFlags & (DBGFSTACKFRAME_FLAGS_LAST | DBGFSTACKFRAME_FLAGS_MAX_DEPTH | DBGFSTACKFRAME_FLAGS_LOOP))) { /* try walk. */ rc = dbgfR3StackWalk(pUVM, idCpu, hAs, &Next); if (RT_FAILURE(rc)) break; /* add the next frame to the chain. */ PDBGFSTACKFRAME pNext = (PDBGFSTACKFRAME)MMR3HeapAllocU(pUVM, MM_TAG_DBGF_STACK, sizeof(*pNext)); if (!pNext) { DBGFR3StackWalkEnd(pCur); return VERR_NO_MEMORY; } *pNext = Next; pCur->pNextInternal = pNext; pCur = pNext; Assert(pCur->pNextInternal == NULL); /* check for loop */ for (PCDBGFSTACKFRAME pLoop = pCur->pFirstInternal; pLoop && pLoop != pCur; pLoop = pLoop->pNextInternal) if (pLoop->AddrFrame.FlatPtr == pCur->AddrFrame.FlatPtr) { pCur->fFlags |= DBGFSTACKFRAME_FLAGS_LOOP; break; } /* check for insane recursion */ if (pCur->iFrame >= 2048) pCur->fFlags |= DBGFSTACKFRAME_FLAGS_MAX_DEPTH; } *ppFirstFrame = pCur->pFirstInternal; return rc; }