static GrBackendTexture make_gl_backend_texture(
        GrContext* context, AHardwareBuffer* hardwareBuffer,
        int width, int height, GrPixelConfig config,
        GrAHardwareBufferImageGenerator::DeleteImageProc* deleteProc,
        GrAHardwareBufferImageGenerator::DeleteImageCtx* deleteCtx,
        bool isProtectedContent,
        const GrBackendFormat& backendFormat) {
    while (GL_NO_ERROR != glGetError()) {} //clear GL errors

    EGLClientBuffer clientBuffer = eglGetNativeClientBufferANDROID(hardwareBuffer);
    EGLint attribs[] = { EGL_IMAGE_PRESERVED_KHR, EGL_TRUE,
                         isProtectedContent ? EGL_PROTECTED_CONTENT_EXT : EGL_NONE,
                         isProtectedContent ? EGL_TRUE : EGL_NONE,
                         EGL_NONE };
    EGLDisplay display = eglGetCurrentDisplay();
    // eglCreateImageKHR will add a ref to the AHardwareBuffer
    EGLImageKHR image = eglCreateImageKHR(display, EGL_NO_CONTEXT, EGL_NATIVE_BUFFER_ANDROID,
                                          clientBuffer, attribs);
    if (EGL_NO_IMAGE_KHR == image) {
        SkDebugf("Could not create EGL image, err = (%#x)", (int) eglGetError() );
        return GrBackendTexture();
    }

    GrGLuint texID;
    glGenTextures(1, &texID);
    if (!texID) {
        eglDestroyImageKHR(display, image);
        return GrBackendTexture();
    }
    glBindTexture(GL_TEXTURE_EXTERNAL_OES, texID);
    GLenum status = GL_NO_ERROR;
    if ((status = glGetError()) != GL_NO_ERROR) {
        SkDebugf("glBindTexture failed (%#x)", (int) status);
        glDeleteTextures(1, &texID);
        eglDestroyImageKHR(display, image);
        return GrBackendTexture();
    }
    glEGLImageTargetTexture2DOES(GL_TEXTURE_EXTERNAL_OES, image);
    if ((status = glGetError()) != GL_NO_ERROR) {
        SkDebugf("glEGLImageTargetTexture2DOES failed (%#x)", (int) status);
        glDeleteTextures(1, &texID);
        eglDestroyImageKHR(display, image);
        return GrBackendTexture();
    }
    context->resetContext(kTextureBinding_GrGLBackendState);

    GrGLTextureInfo textureInfo;
    textureInfo.fID = texID;
    SkASSERT(backendFormat.isValid());
    textureInfo.fTarget = *backendFormat.getGLTarget();
    textureInfo.fFormat = *backendFormat.getGLFormat();

    *deleteProc = GrAHardwareBufferImageGenerator::DeleteGLTexture;
    *deleteCtx = new GLCleanupHelper(texID, image, display);

    return GrBackendTexture(width, height, GrMipMapped::kNo, textureInfo);
}
Ejemplo n.º 2
0
SkSurfaceCharacterization GrContextThreadSafeProxy::createCharacterization(
                                     size_t cacheMaxResourceBytes,
                                     const SkImageInfo& ii, const GrBackendFormat& backendFormat,
                                     int sampleCnt, GrSurfaceOrigin origin,
                                     const SkSurfaceProps& surfaceProps,
                                     bool isMipMapped, bool willUseGLFBO0, bool isTextureable) {
    if (!backendFormat.isValid()) {
        return SkSurfaceCharacterization(); // return an invalid characterization
    }

    if (GrBackendApi::kOpenGL != backendFormat.backend() && willUseGLFBO0) {
        // The willUseGLFBO0 flags can only be used for a GL backend.
        return SkSurfaceCharacterization(); // return an invalid characterization
    }

    if (!this->caps()->mipMapSupport()) {
        isMipMapped = false;
    }

    GrPixelConfig config = this->caps()->getConfigFromBackendFormat(backendFormat, ii.colorType());
    if (config == kUnknown_GrPixelConfig) {
        return SkSurfaceCharacterization(); // return an invalid characterization
    }

    if (!SkSurface_Gpu::Valid(this->caps(), config, ii.colorSpace())) {
        return SkSurfaceCharacterization(); // return an invalid characterization
    }

    sampleCnt = this->caps()->getRenderTargetSampleCount(sampleCnt, config);
    if (!sampleCnt) {
        return SkSurfaceCharacterization(); // return an invalid characterization
    }

    GrFSAAType FSAAType = GrFSAAType::kNone;
    if (sampleCnt > 1) {
        FSAAType = this->caps()->usesMixedSamples() ? GrFSAAType::kMixedSamples
                                                    : GrFSAAType::kUnifiedMSAA;
    }

    if (willUseGLFBO0 && isTextureable) {
        return SkSurfaceCharacterization(); // return an invalid characterization
    }

    if (isTextureable && !this->caps()->isConfigTexturable(config)) {
        // Skia doesn't agree that this is textureable.
        return SkSurfaceCharacterization(); // return an invalid characterization
    }

    return SkSurfaceCharacterization(sk_ref_sp<GrContextThreadSafeProxy>(this),
                                     cacheMaxResourceBytes, ii,
                                     origin, config, FSAAType, sampleCnt,
                                     SkSurfaceCharacterization::Textureable(isTextureable),
                                     SkSurfaceCharacterization::MipMapped(isMipMapped),
                                     SkSurfaceCharacterization::UsesGLFBO0(willUseGLFBO0),
                                     SkSurfaceCharacterization::VulkanSecondaryCBCompatible(false),
                                     surfaceProps);
}
Ejemplo n.º 3
0
GrBackendTexture GrMockGpu::createBackendTexture(int w, int h,
                                                 const GrBackendFormat& format,
                                                 GrMipMapped mipMapped,
                                                 GrRenderable /* renderable */,
                                                 const void* /* pixels */,
                                                 size_t /* rowBytes */,
                                                 const SkColor4f& /* color */) {

    const GrPixelConfig* pixelConfig = format.getMockFormat();
    if (!pixelConfig) {
        return GrBackendTexture();  // invalid
    }

    if (!this->caps()->isConfigTexturable(*pixelConfig)) {
        return GrBackendTexture();  // invalid
    }

    GrMockTextureInfo info;
    info.fConfig = *pixelConfig;
    info.fID = NextExternalTextureID();
    fOutstandingTestingOnlyTextureIDs.add(info.fID);
    return GrBackendTexture(w, h, mipMapped, info);
}
static GrBackendTexture make_vk_backend_texture(
        GrContext* context, AHardwareBuffer* hardwareBuffer,
        int width, int height, GrPixelConfig config,
        GrAHardwareBufferImageGenerator::DeleteImageProc* deleteProc,
        GrAHardwareBufferImageGenerator::DeleteImageCtx* deleteCtx,
        bool isProtectedContent,
        const GrBackendFormat& backendFormat) {
    SkASSERT(context->contextPriv().getBackend() == kVulkan_GrBackend);
    GrVkGpu* gpu = static_cast<GrVkGpu*>(context->contextPriv().getGpu());

    VkPhysicalDevice physicalDevice = gpu->physicalDevice();
    VkDevice device = gpu->device();

    SkASSERT(gpu);

    if (!gpu->vkCaps().supportsAndroidHWBExternalMemory()) {
        return GrBackendTexture();
    }

    SkASSERT(backendFormat.getVkFormat());
    VkFormat format = *backendFormat.getVkFormat();

    VkResult err;

    VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps;
    hwbFormatProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
    hwbFormatProps.pNext = nullptr;

    VkAndroidHardwareBufferPropertiesANDROID hwbProps;
    hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
    hwbProps.pNext = &hwbFormatProps;

    err = VK_CALL(GetAndroidHardwareBufferProperties(device, hardwareBuffer, &hwbProps));
    if (VK_SUCCESS != err) {
        return GrBackendTexture();
    }

    SkASSERT(format == hwbFormatProps.format);
    SkASSERT(SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures) &&
             SkToBool(VK_FORMAT_FEATURE_TRANSFER_SRC_BIT & hwbFormatProps.formatFeatures) &&
             SkToBool(VK_FORMAT_FEATURE_TRANSFER_DST_BIT & hwbFormatProps.formatFeatures));

    const VkExternalMemoryImageCreateInfo externalMemoryImageInfo {
        VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, // sType
        nullptr, // pNext
        VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, // handleTypes
    };
    VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT |
                                   VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
                                   VK_IMAGE_USAGE_TRANSFER_DST_BIT;

    // TODO: Check the supported tilings vkGetPhysicalDeviceImageFormatProperties2 to see if we have
    // to use linear. Add better linear support throughout Ganesh.
    VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL;

    const VkImageCreateInfo imageCreateInfo = {
        VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,         // sType
        &externalMemoryImageInfo,                    // pNext
        0,                                           // VkImageCreateFlags
        VK_IMAGE_TYPE_2D,                            // VkImageType
        format,                                      // VkFormat
        { (uint32_t)width, (uint32_t)height, 1 },    // VkExtent3D
        1,                                           // mipLevels
        1,                                           // arrayLayers
        VK_SAMPLE_COUNT_1_BIT,                       // samples
        tiling,                                      // VkImageTiling
        usageFlags,                                  // VkImageUsageFlags
        VK_SHARING_MODE_EXCLUSIVE,                   // VkSharingMode
        0,                                           // queueFamilyCount
        0,                                           // pQueueFamilyIndices
        VK_IMAGE_LAYOUT_UNDEFINED,                   // initialLayout
    };

    VkImage image;
    err = VK_CALL(CreateImage(device, &imageCreateInfo, nullptr, &image));
    if (VK_SUCCESS != err) {
        return GrBackendTexture();
    }

    VkImageMemoryRequirementsInfo2 memReqsInfo;
    memReqsInfo.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2;
    memReqsInfo.pNext = nullptr;
    memReqsInfo.image = image;

    VkMemoryDedicatedRequirements dedicatedMemReqs;
    dedicatedMemReqs.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS;
    dedicatedMemReqs.pNext = nullptr;

    VkMemoryRequirements2 memReqs;
    memReqs.sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2;
    memReqs.pNext = &dedicatedMemReqs;

    VK_CALL(GetImageMemoryRequirements2(device, &memReqsInfo, &memReqs));
    SkASSERT(VK_TRUE == dedicatedMemReqs.requiresDedicatedAllocation);

    VkPhysicalDeviceMemoryProperties2 phyDevMemProps;
    phyDevMemProps.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2;
    phyDevMemProps.pNext = nullptr;

    uint32_t typeIndex = 0;
    uint32_t heapIndex = 0;
    bool foundHeap = false;
    VK_CALL(GetPhysicalDeviceMemoryProperties2(physicalDevice, &phyDevMemProps));
    uint32_t memTypeCnt = phyDevMemProps.memoryProperties.memoryTypeCount;
    for (uint32_t i = 0; i < memTypeCnt && !foundHeap; ++i) {
        if (hwbProps.memoryTypeBits & (1 << i)) {
            const VkPhysicalDeviceMemoryProperties& pdmp = phyDevMemProps.memoryProperties;
            uint32_t supportedFlags = pdmp.memoryTypes[i].propertyFlags &
                    VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
            if (supportedFlags == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
                typeIndex = i;
                heapIndex = pdmp.memoryTypes[i].heapIndex;
                foundHeap = true;
            }
        }
    }
    if (!foundHeap) {
        VK_CALL(DestroyImage(device, image, nullptr));
        return GrBackendTexture();
    }

    VkImportAndroidHardwareBufferInfoANDROID hwbImportInfo;
    hwbImportInfo.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
    hwbImportInfo.pNext = nullptr;
    hwbImportInfo.buffer = hardwareBuffer;

    VkMemoryDedicatedAllocateInfo dedicatedAllocInfo;
    dedicatedAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
    dedicatedAllocInfo.pNext = &hwbImportInfo;
    dedicatedAllocInfo.image = image;
    dedicatedAllocInfo.buffer = VK_NULL_HANDLE;

    VkMemoryAllocateInfo allocInfo = {
        VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,      // sType
        &dedicatedAllocInfo,                         // pNext
        hwbProps.allocationSize,                     // allocationSize
        typeIndex,                                   // memoryTypeIndex
    };

    VkDeviceMemory memory;

    err = VK_CALL(AllocateMemory(device, &allocInfo, nullptr, &memory));
    if (VK_SUCCESS != err) {
        VK_CALL(DestroyImage(device, image, nullptr));
        return GrBackendTexture();
    }

    VkBindImageMemoryInfo bindImageInfo;
    bindImageInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
    bindImageInfo.pNext = nullptr;
    bindImageInfo.image = image;
    bindImageInfo.memory = memory;
    bindImageInfo.memoryOffset = 0;

    err = VK_CALL(BindImageMemory2(device, 1, &bindImageInfo));
    if (VK_SUCCESS != err) {
        VK_CALL(DestroyImage(device, image, nullptr));
        VK_CALL(FreeMemory(device, memory, nullptr));
        return GrBackendTexture();
    }

    GrVkImageInfo imageInfo;

    imageInfo.fImage = image;
    imageInfo.fAlloc = GrVkAlloc(memory, 0, hwbProps.allocationSize, 0);
    imageInfo.fImageTiling = tiling;
    imageInfo.fImageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
    imageInfo.fFormat = format;
    imageInfo.fLevelCount = 1;
    // TODO: This should possibly be VK_QUEUE_FAMILY_FOREIGN_EXT but current Adreno devices do not
    // support that extension. Or if we know the source of the AHardwareBuffer is not from a
    // "foreign" device we can leave them as external.
    imageInfo.fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL;

    *deleteProc = GrAHardwareBufferImageGenerator::DeleteVkImage;
    *deleteCtx = new VulkanCleanupHelper(gpu, image, memory);

    return GrBackendTexture(width, height, imageInfo);
}
Ejemplo n.º 5
0
DEF_GPUTEST_FOR_RENDERING_CONTEXTS(PromiseImageTest, reporter, ctxInfo) {
    const int kWidth = 10;
    const int kHeight = 10;

    GrContext* ctx = ctxInfo.grContext();
    GrGpu* gpu = ctx->contextPriv().getGpu();

    for (bool releaseImageEarly : {true, false}) {
        GrBackendTexture backendTex = gpu->createTestingOnlyBackendTexture(
                nullptr, kWidth, kHeight, kRGBA_8888_GrPixelConfig, true, GrMipMapped::kNo);
        REPORTER_ASSERT(reporter, backendTex.isValid());

        GrBackendFormat backendFormat = backendTex.format();
        REPORTER_ASSERT(reporter, backendFormat.isValid());

        PromiseTextureChecker promiseChecker(backendTex);
        GrSurfaceOrigin texOrigin = kTopLeft_GrSurfaceOrigin;
        sk_sp<SkImage> refImg(
                SkImage_Gpu::MakePromiseTexture(ctx, backendFormat, kWidth, kHeight,
                                                GrMipMapped::kNo, texOrigin,
                                                kRGBA_8888_SkColorType, kPremul_SkAlphaType,
                                                nullptr,
                                                PromiseTextureChecker::Fulfill,
                                                PromiseTextureChecker::Release,
                                                PromiseTextureChecker::Done,
                                                &promiseChecker));

        SkImageInfo info = SkImageInfo::MakeN32Premul(kWidth, kHeight);
        sk_sp<SkSurface> surface = SkSurface::MakeRenderTarget(ctx, SkBudgeted::kNo, info);
        SkCanvas* canvas = surface->getCanvas();

        int expectedFulfillCnt = 0;
        int expectedReleaseCnt = 0;
        int expectedDoneCnt = 0;

        canvas->drawImage(refImg, 0, 0);
        REPORTER_ASSERT(reporter, check_fulfill_and_release_cnts(promiseChecker,
                                                                 true,
                                                                 expectedFulfillCnt,
                                                                 expectedReleaseCnt,
                                                                 true,
                                                                 expectedDoneCnt,
                                                                 reporter));

        bool isVulkan = kVulkan_GrBackend == ctx->contextPriv().getBackend();
        canvas->flush();
        expectedFulfillCnt++;
        expectedReleaseCnt++;
        REPORTER_ASSERT(reporter, check_fulfill_and_release_cnts(promiseChecker,
                                                                 !isVulkan,
                                                                 expectedFulfillCnt,
                                                                 expectedReleaseCnt,
                                                                 !isVulkan,
                                                                 expectedDoneCnt,
                                                                 reporter));

        gpu->testingOnly_flushGpuAndSync();
        REPORTER_ASSERT(reporter, check_fulfill_and_release_cnts(promiseChecker,
                                                                 true,
                                                                 expectedFulfillCnt,
                                                                 expectedReleaseCnt,
                                                                 true,
                                                                 expectedDoneCnt,
                                                                 reporter));

        canvas->drawImage(refImg, 0, 0);
        canvas->drawImage(refImg, 0, 0);

        canvas->flush();
        expectedFulfillCnt++;
        expectedReleaseCnt++;

        gpu->testingOnly_flushGpuAndSync();
        REPORTER_ASSERT(reporter, check_fulfill_and_release_cnts(promiseChecker,
                                                                 true,
                                                                 expectedFulfillCnt,
                                                                 expectedReleaseCnt,
                                                                 true,
                                                                 expectedDoneCnt,
                                                                 reporter));

        // Now test code path on Vulkan where we released the texture, but the GPU isn't done with
        // resource yet and we do another draw. We should only call fulfill on the first draw and
        // use the cached GrBackendTexture on the second. Release should only be called after the
        // second draw is finished.
        canvas->drawImage(refImg, 0, 0);
        canvas->flush();
        expectedFulfillCnt++;
        expectedReleaseCnt++;
        REPORTER_ASSERT(reporter, check_fulfill_and_release_cnts(promiseChecker,
                                                                 !isVulkan,
                                                                 expectedFulfillCnt,
                                                                 expectedReleaseCnt,
                                                                 !isVulkan,
                                                                 expectedDoneCnt,
                                                                 reporter));

        canvas->drawImage(refImg, 0, 0);

        if (releaseImageEarly) {
            refImg.reset();
        }

        REPORTER_ASSERT(reporter, check_fulfill_and_release_cnts(promiseChecker,
                                                                 !isVulkan,
                                                                 expectedFulfillCnt,
                                                                 expectedReleaseCnt,
                                                                 !isVulkan,
                                                                 expectedDoneCnt,
                                                                 reporter));

        canvas->flush();
        expectedFulfillCnt++;

        gpu->testingOnly_flushGpuAndSync();
        expectedReleaseCnt++;
        if (releaseImageEarly) {
            expectedDoneCnt++;
        }
        REPORTER_ASSERT(reporter, check_fulfill_and_release_cnts(promiseChecker,
                                                                 true,
                                                                 expectedFulfillCnt,
                                                                 expectedReleaseCnt,
                                                                 !isVulkan,
                                                                 expectedDoneCnt,
                                                                 reporter));
        expectedFulfillCnt = promiseChecker.fFulfillCount;
        expectedReleaseCnt = promiseChecker.fReleaseCount;

        if (!releaseImageEarly) {
            refImg.reset();
            expectedDoneCnt++;
        }

        REPORTER_ASSERT(reporter, check_fulfill_and_release_cnts(promiseChecker,
                                                                 true,
                                                                 expectedFulfillCnt,
                                                                 expectedReleaseCnt,
                                                                 true,
                                                                 expectedDoneCnt,
                                                                 reporter));

        gpu->deleteTestingOnlyBackendTexture(backendTex);
    }
}