static GrBackendTexture make_backend_texture( GrContext* context, AHardwareBuffer* hardwareBuffer, int width, int height, GrPixelConfig config, GrAHardwareBufferImageGenerator::DeleteImageProc* deleteProc, GrAHardwareBufferImageGenerator::DeleteImageCtx* deleteCtx, bool isProtectedContent, const GrBackendFormat& backendFormat) { if (context->abandoned()) { return GrBackendTexture(); } bool createProtectedImage = isProtectedContent && can_import_protected_content(context); if (kOpenGL_GrBackend == context->contextPriv().getBackend()) { return make_gl_backend_texture(context, hardwareBuffer, width, height, config, deleteProc, deleteCtx, createProtectedImage, backendFormat); } else { SkASSERT(kVulkan_GrBackend == context->contextPriv().getBackend()); #ifdef SK_VULKAN // Currently we don't support protected images on vulkan SkASSERT(!createProtectedImage); return make_vk_backend_texture(context, hardwareBuffer, width, height, config, deleteProc, deleteCtx, createProtectedImage, backendFormat); #else return GrBackendTexture(); #endif } }
static GrBackendTexture make_gl_backend_texture( GrContext* context, AHardwareBuffer* hardwareBuffer, int width, int height, GrPixelConfig config, GrAHardwareBufferImageGenerator::DeleteImageProc* deleteProc, GrAHardwareBufferImageGenerator::DeleteImageCtx* deleteCtx, bool isProtectedContent, const GrBackendFormat& backendFormat) { while (GL_NO_ERROR != glGetError()) {} //clear GL errors EGLClientBuffer clientBuffer = eglGetNativeClientBufferANDROID(hardwareBuffer); EGLint attribs[] = { EGL_IMAGE_PRESERVED_KHR, EGL_TRUE, isProtectedContent ? EGL_PROTECTED_CONTENT_EXT : EGL_NONE, isProtectedContent ? EGL_TRUE : EGL_NONE, EGL_NONE }; EGLDisplay display = eglGetCurrentDisplay(); // eglCreateImageKHR will add a ref to the AHardwareBuffer EGLImageKHR image = eglCreateImageKHR(display, EGL_NO_CONTEXT, EGL_NATIVE_BUFFER_ANDROID, clientBuffer, attribs); if (EGL_NO_IMAGE_KHR == image) { SkDebugf("Could not create EGL image, err = (%#x)", (int) eglGetError() ); return GrBackendTexture(); } GrGLuint texID; glGenTextures(1, &texID); if (!texID) { eglDestroyImageKHR(display, image); return GrBackendTexture(); } glBindTexture(GL_TEXTURE_EXTERNAL_OES, texID); GLenum status = GL_NO_ERROR; if ((status = glGetError()) != GL_NO_ERROR) { SkDebugf("glBindTexture failed (%#x)", (int) status); glDeleteTextures(1, &texID); eglDestroyImageKHR(display, image); return GrBackendTexture(); } glEGLImageTargetTexture2DOES(GL_TEXTURE_EXTERNAL_OES, image); if ((status = glGetError()) != GL_NO_ERROR) { SkDebugf("glEGLImageTargetTexture2DOES failed (%#x)", (int) status); glDeleteTextures(1, &texID); eglDestroyImageKHR(display, image); return GrBackendTexture(); } context->resetContext(kTextureBinding_GrGLBackendState); GrGLTextureInfo textureInfo; textureInfo.fID = texID; SkASSERT(backendFormat.isValid()); textureInfo.fTarget = *backendFormat.getGLTarget(); textureInfo.fFormat = *backendFormat.getGLFormat(); *deleteProc = GrAHardwareBufferImageGenerator::DeleteGLTexture; *deleteCtx = new GLCleanupHelper(texID, image, display); return GrBackendTexture(width, height, GrMipMapped::kNo, textureInfo); }
GrBackendTexture GrMockGpu::createTestingOnlyBackendTexture(const void* pixels, int w, int h, GrColorType colorType, bool isRT, GrMipMapped mipMapped, size_t rowBytes) { GrPixelConfig config = GrColorTypeToPixelConfig(colorType, GrSRGBEncoded::kNo); if (!this->caps()->isConfigTexturable(config)) { return GrBackendTexture(); // invalid } GrMockTextureInfo info; info.fConfig = config; info.fID = NextExternalTextureID(); fOutstandingTestingOnlyTextureIDs.add(info.fID); return GrBackendTexture(w, h, mipMapped, info); }
bool GrGLTexture::onStealBackendTexture(GrBackendTexture* backendTexture, SkImage::BackendTextureReleaseProc* releaseProc) { *backendTexture = GrBackendTexture(width(), height(), config(), fInfo); // Set the release proc to a no-op function. GL doesn't require any special cleanup. *releaseProc = [](GrBackendTexture){}; // It's important that we only abandon this texture's objects, not subclass objects such as // those held by GrGLTextureRenderTarget. Those objects are not being stolen and need to be // cleaned up by us. this->GrGLTexture::onAbandon(); return true; }
GrBackendTexture GrMockGpu::createBackendTexture(int w, int h, const GrBackendFormat& format, GrMipMapped mipMapped, GrRenderable /* renderable */, const void* /* pixels */, size_t /* rowBytes */, const SkColor4f& /* color */) { const GrPixelConfig* pixelConfig = format.getMockFormat(); if (!pixelConfig) { return GrBackendTexture(); // invalid } if (!this->caps()->isConfigTexturable(*pixelConfig)) { return GrBackendTexture(); // invalid } GrMockTextureInfo info; info.fConfig = *pixelConfig; info.fID = NextExternalTextureID(); fOutstandingTestingOnlyTextureIDs.add(info.fID); return GrBackendTexture(w, h, mipMapped, info); }
GrBackendTexture GrGLTexture::getBackendTexture() const { return GrBackendTexture(this->width(), this->height(), this->texturePriv().mipMapped(), fInfo); }
static GrBackendTexture make_vk_backend_texture( GrContext* context, AHardwareBuffer* hardwareBuffer, int width, int height, GrPixelConfig config, GrAHardwareBufferImageGenerator::DeleteImageProc* deleteProc, GrAHardwareBufferImageGenerator::DeleteImageCtx* deleteCtx, bool isProtectedContent, const GrBackendFormat& backendFormat) { SkASSERT(context->contextPriv().getBackend() == kVulkan_GrBackend); GrVkGpu* gpu = static_cast<GrVkGpu*>(context->contextPriv().getGpu()); VkPhysicalDevice physicalDevice = gpu->physicalDevice(); VkDevice device = gpu->device(); SkASSERT(gpu); if (!gpu->vkCaps().supportsAndroidHWBExternalMemory()) { return GrBackendTexture(); } SkASSERT(backendFormat.getVkFormat()); VkFormat format = *backendFormat.getVkFormat(); VkResult err; VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps; hwbFormatProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID; hwbFormatProps.pNext = nullptr; VkAndroidHardwareBufferPropertiesANDROID hwbProps; hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID; hwbProps.pNext = &hwbFormatProps; err = VK_CALL(GetAndroidHardwareBufferProperties(device, hardwareBuffer, &hwbProps)); if (VK_SUCCESS != err) { return GrBackendTexture(); } SkASSERT(format == hwbFormatProps.format); SkASSERT(SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures) && SkToBool(VK_FORMAT_FEATURE_TRANSFER_SRC_BIT & hwbFormatProps.formatFeatures) && SkToBool(VK_FORMAT_FEATURE_TRANSFER_DST_BIT & hwbFormatProps.formatFeatures)); const VkExternalMemoryImageCreateInfo externalMemoryImageInfo { VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, // sType nullptr, // pNext VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, // handleTypes }; VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT; // TODO: Check the supported tilings vkGetPhysicalDeviceImageFormatProperties2 to see if we have // to use linear. Add better linear support throughout Ganesh. VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL; const VkImageCreateInfo imageCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType &externalMemoryImageInfo, // pNext 0, // VkImageCreateFlags VK_IMAGE_TYPE_2D, // VkImageType format, // VkFormat { (uint32_t)width, (uint32_t)height, 1 }, // VkExtent3D 1, // mipLevels 1, // arrayLayers VK_SAMPLE_COUNT_1_BIT, // samples tiling, // VkImageTiling usageFlags, // VkImageUsageFlags VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode 0, // queueFamilyCount 0, // pQueueFamilyIndices VK_IMAGE_LAYOUT_UNDEFINED, // initialLayout }; VkImage image; err = VK_CALL(CreateImage(device, &imageCreateInfo, nullptr, &image)); if (VK_SUCCESS != err) { return GrBackendTexture(); } VkImageMemoryRequirementsInfo2 memReqsInfo; memReqsInfo.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2; memReqsInfo.pNext = nullptr; memReqsInfo.image = image; VkMemoryDedicatedRequirements dedicatedMemReqs; dedicatedMemReqs.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS; dedicatedMemReqs.pNext = nullptr; VkMemoryRequirements2 memReqs; memReqs.sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2; memReqs.pNext = &dedicatedMemReqs; VK_CALL(GetImageMemoryRequirements2(device, &memReqsInfo, &memReqs)); SkASSERT(VK_TRUE == dedicatedMemReqs.requiresDedicatedAllocation); VkPhysicalDeviceMemoryProperties2 phyDevMemProps; phyDevMemProps.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2; phyDevMemProps.pNext = nullptr; uint32_t typeIndex = 0; uint32_t heapIndex = 0; bool foundHeap = false; VK_CALL(GetPhysicalDeviceMemoryProperties2(physicalDevice, &phyDevMemProps)); uint32_t memTypeCnt = phyDevMemProps.memoryProperties.memoryTypeCount; for (uint32_t i = 0; i < memTypeCnt && !foundHeap; ++i) { if (hwbProps.memoryTypeBits & (1 << i)) { const VkPhysicalDeviceMemoryProperties& pdmp = phyDevMemProps.memoryProperties; uint32_t supportedFlags = pdmp.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; if (supportedFlags == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) { typeIndex = i; heapIndex = pdmp.memoryTypes[i].heapIndex; foundHeap = true; } } } if (!foundHeap) { VK_CALL(DestroyImage(device, image, nullptr)); return GrBackendTexture(); } VkImportAndroidHardwareBufferInfoANDROID hwbImportInfo; hwbImportInfo.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID; hwbImportInfo.pNext = nullptr; hwbImportInfo.buffer = hardwareBuffer; VkMemoryDedicatedAllocateInfo dedicatedAllocInfo; dedicatedAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO; dedicatedAllocInfo.pNext = &hwbImportInfo; dedicatedAllocInfo.image = image; dedicatedAllocInfo.buffer = VK_NULL_HANDLE; VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // sType &dedicatedAllocInfo, // pNext hwbProps.allocationSize, // allocationSize typeIndex, // memoryTypeIndex }; VkDeviceMemory memory; err = VK_CALL(AllocateMemory(device, &allocInfo, nullptr, &memory)); if (VK_SUCCESS != err) { VK_CALL(DestroyImage(device, image, nullptr)); return GrBackendTexture(); } VkBindImageMemoryInfo bindImageInfo; bindImageInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO; bindImageInfo.pNext = nullptr; bindImageInfo.image = image; bindImageInfo.memory = memory; bindImageInfo.memoryOffset = 0; err = VK_CALL(BindImageMemory2(device, 1, &bindImageInfo)); if (VK_SUCCESS != err) { VK_CALL(DestroyImage(device, image, nullptr)); VK_CALL(FreeMemory(device, memory, nullptr)); return GrBackendTexture(); } GrVkImageInfo imageInfo; imageInfo.fImage = image; imageInfo.fAlloc = GrVkAlloc(memory, 0, hwbProps.allocationSize, 0); imageInfo.fImageTiling = tiling; imageInfo.fImageLayout = VK_IMAGE_LAYOUT_UNDEFINED; imageInfo.fFormat = format; imageInfo.fLevelCount = 1; // TODO: This should possibly be VK_QUEUE_FAMILY_FOREIGN_EXT but current Adreno devices do not // support that extension. Or if we know the source of the AHardwareBuffer is not from a // "foreign" device we can leave them as external. imageInfo.fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL; *deleteProc = GrAHardwareBufferImageGenerator::DeleteVkImage; *deleteCtx = new VulkanCleanupHelper(gpu, image, memory); return GrBackendTexture(width, height, imageInfo); }