BOOLEAN KdbIsMemoryValid(PVOID pvBase, ULONG cjSize) { PUCHAR pjAddress; pjAddress = ALIGN_DOWN_POINTER_BY(pvBase, PAGE_SIZE); while (pjAddress < (PUCHAR)pvBase + cjSize) { if (!MmIsAddressValid(pjAddress)) return FALSE; pjAddress += PAGE_SIZE; } return TRUE; }
VOID HeapRelease( PVOID HeapHandle) { PHEAP Heap = HeapHandle; PHEAP_BLOCK Block; PUCHAR StartAddress, EndAddress; PFN_COUNT FreePages, AllFreePages = 0; TRACE("HeapRelease(%p)\n", HeapHandle); /* Loop all heap chunks */ for (Block = &Heap->Blocks; Block->Size != 0; Block = Block + 1 + Block->Size) { /* Continue, if its not free */ if (Block->Tag != 0) { #ifdef FREELDR_HEAP_VERIFIER /* Verify size and redzones */ ASSERT(*REDZONE_SIZE(Block) <= Block->Size * sizeof(HEAP_BLOCK)); ASSERT(*REDZONE_LOW(Block) == REDZONE_MARK); ASSERT(*REDZONE_HI(Block) == REDZONE_MARK); #endif continue; } /* Calculate page aligned start address of the free region */ StartAddress = ALIGN_UP_POINTER_BY(Block->Data, PAGE_SIZE); /* Walk over adjacent free blocks */ while (Block->Tag == 0) Block = Block + Block->Size + 1; /* Check if this was the last block */ if (Block->Size == 0) { /* Align the end address up to cover the end of the heap */ EndAddress = ALIGN_UP_POINTER_BY(Block->Data, PAGE_SIZE); } else { /* Align the end address down to not cover any allocations */ EndAddress = ALIGN_DOWN_POINTER_BY(Block->Data, PAGE_SIZE); } /* Check if we have free pages */ if (EndAddress > StartAddress) { /* Calculate the size of the free region in pages */ FreePages = (PFN_COUNT)((EndAddress - StartAddress) / MM_PAGE_SIZE); AllFreePages += FreePages; /* Now mark the pages free */ MmMarkPagesInLookupTable(PageLookupTableAddress, (ULONG_PTR)StartAddress / MM_PAGE_SIZE, FreePages, LoaderFree); } /* bail out, if it was the last block */ if (Block->Size == 0) break; } TRACE("HeapRelease() done, freed %ld pages\n", AllFreePages); }