bool DynamicMemoryBlockBase::AllocFixed(u32 addr, u32 size) { assert(size); size = PAGE_4K(size + (addr & 4095)); // align size addr &= ~4095; // align start address if (!IsInMyRange(addr, size)) { assert(0); return false; } std::lock_guard<std::mutex> lock(Memory.mutex); for (u32 i = 0; i<m_allocated.size(); ++i) { if (addr >= m_allocated[i].addr && addr <= m_allocated[i].addr + m_allocated[i].size - 1) return false; } AppendMem(addr, size); return true; }
u32 VirtualMemoryBlock::UnmapAddress(u64 addr) { for(u32 i=0; i<m_mapped_memory.size(); ++i) { if(m_mapped_memory[i].addr == addr && IsInMyRange(m_mapped_memory[i].addr, m_mapped_memory[i].size)) { u32 size = m_mapped_memory[i].size; m_mapped_memory.erase(m_mapped_memory.begin() + i); return size; } } return 0; }
bool VirtualMemoryBlock::UnmapAddress(u32 addr, u32& size) { for (u32 i = 0; i<m_mapped_memory.size(); ++i) { if (m_mapped_memory[i].addr == addr && IsInMyRange(m_mapped_memory[i].addr, m_mapped_memory[i].size)) { size = m_mapped_memory[i].size; m_mapped_memory.erase(m_mapped_memory.begin() + i); return true; } } return false; }
bool VirtualMemoryBlock::Map(u32 realaddr, u32 size, u32 addr) { assert(size); if (!IsInMyRange(addr, size)) { return false; } for (u32 i = 0; i<m_mapped_memory.size(); ++i) { if (addr >= m_mapped_memory[i].addr && addr + size - 1 <= m_mapped_memory[i].addr + m_mapped_memory[i].size - 1) { return false; } } m_mapped_memory.emplace_back(addr, realaddr, size); return true; }
u64 VirtualMemoryBlock::Map(u64 realaddr, u32 size, u64 addr) { if(addr) { if(!IsInMyRange(addr, size) && (IsMyAddress(addr) || IsMyAddress(addr + size - 1))) return 0; m_mapped_memory.emplace_back(addr, realaddr, size); return addr; } else { for(u64 addr = GetStartAddr(); addr <= GetEndAddr() - GetReservedAmount() - size;) { bool is_good_addr = true; // check if address is already mapped for(u32 i=0; i<m_mapped_memory.size(); ++i) { if((addr >= m_mapped_memory[i].addr && addr < m_mapped_memory[i].addr + m_mapped_memory[i].size) || (m_mapped_memory[i].addr >= addr && m_mapped_memory[i].addr < addr + size)) { is_good_addr = false; addr = m_mapped_memory[i].addr + m_mapped_memory[i].size; break; } } if(!is_good_addr) continue; m_mapped_memory.emplace_back(addr, realaddr, size); return addr; } return 0; } }
bool DynamicMemoryBlockBase::AllocFixed(u64 addr, u32 size) { size = PAGE_4K(size + (addr & 4095)); // align size addr &= ~4095; // align start address if (!IsInMyRange(addr, size)) { assert(0); return false; } LV2_LOCK(0); for (u32 i = 0; i<m_allocated.size(); ++i) { if (addr >= m_allocated[i].addr && addr < m_allocated[i].addr + m_allocated[i].size) return false; } AppendMem(addr, size); return true; }
bool VirtualMemoryBlock::IsInMyRange(const u64 addr, const u32 size) { return IsInMyRange(addr) && IsInMyRange(addr + size - 1); }
bool DynamicMemoryBlockBase::IsMyAddress(const u64 addr) { return IsInMyRange(addr); }
bool DynamicMemoryBlockBase::IsInMyRange(const u64 addr, const u32 size) { return IsInMyRange(addr) && IsInMyRange(addr + size - 1); }