/*! Must be called with this address space's write lock held */ status_t VMUserAddressSpace::_InsertAreaSlot(addr_t start, addr_t size, addr_t end, uint32 addressSpec, VMUserArea* area, uint32 allocationFlags) { VMUserArea* last = NULL; VMUserArea* next; bool foundSpot = false; TRACE(("VMUserAddressSpace::_InsertAreaSlot: address space %p, start " "0x%lx, size %ld, end 0x%lx, addressSpec %ld, area %p\n", this, start, size, end, addressSpec, area)); // do some sanity checking if (start < fBase || size == 0 || end > fEndAddress || start + (size - 1) > end) return B_BAD_ADDRESS; if (addressSpec == B_EXACT_ADDRESS && area->id != RESERVED_AREA_ID) { // search for a reserved area status_t status = _InsertAreaIntoReservedRegion(start, size, area, allocationFlags); if (status == B_OK || status == B_BAD_VALUE) return status; // There was no reserved area, and the slot doesn't seem to be used // already // TODO: this could be further optimized. } size_t alignment = B_PAGE_SIZE; if (addressSpec == B_ANY_KERNEL_BLOCK_ADDRESS) { // align the memory to the next power of two of the size while (alignment < size) alignment <<= 1; } start = ROUNDUP(start, alignment); // walk up to the spot where we should start searching second_chance: VMUserAreaList::Iterator it = fAreas.GetIterator(); while ((next = it.Next()) != NULL) { if (next->Base() > start + (size - 1)) { // we have a winner break; } last = next; } // find the right spot depending on the address specification - the area // will be inserted directly after "last" ("next" is not referenced anymore) switch (addressSpec) { case B_ANY_ADDRESS: case B_ANY_KERNEL_ADDRESS: case B_ANY_KERNEL_BLOCK_ADDRESS: { // find a hole big enough for a new area if (last == NULL) { // see if we can build it at the beginning of the virtual map addr_t alignedBase = ROUNDUP(fBase, alignment); if (is_valid_spot(fBase, alignedBase, size, next == NULL ? end : next->Base())) { foundSpot = true; area->SetBase(alignedBase); break; } last = next; next = it.Next(); } // keep walking while (next != NULL) { addr_t alignedBase = ROUNDUP(last->Base() + last->Size(), alignment); if (is_valid_spot(last->Base() + (last->Size() - 1), alignedBase, size, next->Base())) { foundSpot = true; area->SetBase(alignedBase); break; } last = next; next = it.Next(); } if (foundSpot) break; addr_t alignedBase = ROUNDUP(last->Base() + last->Size(), alignment); if (is_valid_spot(last->Base() + (last->Size() - 1), alignedBase, size, end)) { // got a spot foundSpot = true; area->SetBase(alignedBase); break; } else if (area->id != RESERVED_AREA_ID) { // We didn't find a free spot - if there are any reserved areas, // we can now test those for free space // TODO: it would make sense to start with the biggest of them it.Rewind(); next = it.Next(); for (last = NULL; next != NULL; next = it.Next()) { if (next->id != RESERVED_AREA_ID) { last = next; continue; } // TODO: take free space after the reserved area into // account! addr_t alignedBase = ROUNDUP(next->Base(), alignment); if (next->Base() == alignedBase && next->Size() == size) { // The reserved area is entirely covered, and thus, // removed fAreas.Remove(next); foundSpot = true; area->SetBase(alignedBase); next->~VMUserArea(); free_etc(next, allocationFlags); break; } if ((next->protection & RESERVED_AVOID_BASE) == 0 && alignedBase == next->Base() && next->Size() >= size) { // The new area will be placed at the beginning of the // reserved area and the reserved area will be offset // and resized foundSpot = true; next->SetBase(next->Base() + size); next->SetSize(next->Size() - size); area->SetBase(alignedBase); break; } if (is_valid_spot(next->Base(), alignedBase, size, next->Base() + (next->Size() - 1))) { // The new area will be placed at the end of the // reserved area, and the reserved area will be resized // to make space alignedBase = ROUNDDOWN( next->Base() + next->Size() - size, alignment); foundSpot = true; next->SetSize(alignedBase - next->Base()); area->SetBase(alignedBase); last = next; break; } last = next; } } break; } case B_BASE_ADDRESS: { // find a hole big enough for a new area beginning with "start" if (last == NULL) { // see if we can build it at the beginning of the specified // start if (next == NULL || next->Base() > start + (size - 1)) { foundSpot = true; area->SetBase(start); break; } last = next; next = it.Next(); } // keep walking while (next != NULL) { if (next->Base() - (last->Base() + last->Size()) >= size) { // we found a spot (it'll be filled up below) break; } last = next; next = it.Next(); } addr_t lastEnd = last->Base() + (last->Size() - 1); if (next != NULL || end - lastEnd >= size) { // got a spot foundSpot = true; if (lastEnd < start) area->SetBase(start); else area->SetBase(lastEnd + 1); break; } // we didn't find a free spot in the requested range, so we'll // try again without any restrictions start = fBase; addressSpec = B_ANY_ADDRESS; last = NULL; goto second_chance; } case B_EXACT_ADDRESS: // see if we can create it exactly here if ((last == NULL || last->Base() + (last->Size() - 1) < start) && (next == NULL || next->Base() > start + (size - 1))) { foundSpot = true; area->SetBase(start); break; } break; default: return B_BAD_VALUE; } if (!foundSpot) return addressSpec == B_EXACT_ADDRESS ? B_BAD_VALUE : B_NO_MEMORY; area->SetSize(size); if (last) fAreas.Insert(fAreas.GetNext(last), area); else fAreas.Insert(fAreas.Head(), area); IncrementChangeCount(); return B_OK; }
VMKernelAddressSpace::Range* VMKernelAddressSpace::_FindFreeRange(addr_t start, size_t size, size_t alignment, uint32 addressSpec, bool allowReservedRange, addr_t& _foundAddress) { TRACE(" VMKernelAddressSpace::_FindFreeRange(start: %#" B_PRIxADDR ", size: %#" B_PRIxSIZE ", alignment: %#" B_PRIxSIZE ", addressSpec: %#" B_PRIx32 ", reserved allowed: %d)\n", start, size, alignment, addressSpec, allowReservedRange); switch (addressSpec) { case B_BASE_ADDRESS: { // We have to iterate through the range list starting at the given // address. This is the most inefficient case. Range* range = fRangeTree.FindClosest(start, true); while (range != NULL) { if (range->type == Range::RANGE_FREE) { addr_t alignedBase = ROUNDUP(range->base, alignment); if (is_valid_spot(start, alignedBase, size, range->base + (range->size - 1))) { _foundAddress = alignedBase; return range; } } range = fRangeList.GetNext(range); } // We didn't find a free spot in the requested range, so we'll // try again without any restrictions. start = fBase; addressSpec = B_ANY_ADDRESS; // fall through... } case B_ANY_ADDRESS: case B_ANY_KERNEL_ADDRESS: case B_ANY_KERNEL_BLOCK_ADDRESS: { // We want to allocate from the first non-empty free list that is // guaranteed to contain the size. Finding a free range is O(1), // unless there are constraints (min base address, alignment). int freeListIndex = ld((size * 2 - 1) >> PAGE_SHIFT); for (int32 i = freeListIndex; i < fFreeListCount; i++) { RangeFreeList& freeList = fFreeLists[i]; if (freeList.IsEmpty()) continue; for (RangeFreeList::Iterator it = freeList.GetIterator(); Range* range = it.Next();) { addr_t alignedBase = ROUNDUP(range->base, alignment); if (is_valid_spot(start, alignedBase, size, range->base + (range->size - 1))) { _foundAddress = alignedBase; return range; } } } if (!allowReservedRange) return NULL; // We haven't found any free ranges, but we're supposed to look // for reserved ones, too. Iterate through the range list starting // at the given address. Range* range = fRangeTree.FindClosest(start, true); while (range != NULL) { if (range->type == Range::RANGE_RESERVED) { addr_t alignedBase = ROUNDUP(range->base, alignment); if (is_valid_spot(start, alignedBase, size, range->base + (range->size - 1))) { // allocation from the back might be preferred // -- adjust the base accordingly if ((range->reserved.flags & RESERVED_AVOID_BASE) != 0) { alignedBase = ROUNDDOWN( range->base + (range->size - size), alignment); } _foundAddress = alignedBase; return range; } } range = fRangeList.GetNext(range); } return NULL; } case B_EXACT_ADDRESS: { Range* range = fRangeTree.FindClosest(start, true); TRACE(" B_EXACT_ADDRESS: range: %p\n", range); if (range == NULL || range->type == Range::RANGE_AREA || range->base + (range->size - 1) < start + (size - 1)) { // TODO: Support allocating if the area range covers multiple // free and reserved ranges! TRACE(" -> no suitable range\n"); return NULL; } if (range->type != Range::RANGE_FREE && !allowReservedRange) { TRACE(" -> reserved range not allowed\n"); return NULL; } _foundAddress = start; return range; } default: return NULL; } }