void MediaPlayerPrivateQuickTimeVisualContext::retrieveCurrentImage() { if (!m_visualContext) return; #if USE(ACCELERATED_COMPOSITING) if (m_qtVideoLayer) { QTPixelBuffer buffer = m_visualContext->imageForTime(0); if (!buffer.pixelBufferRef()) return; WKCACFLayer* layer = static_cast<WKCACFLayer*>(m_qtVideoLayer->platformLayer()); if (!buffer.lockBaseAddress()) { if (requiredDllsAvailable()) { if (!m_imageQueue) { m_imageQueue = new WKCAImageQueue(buffer.width(), buffer.height(), 30); m_imageQueue->setFlags(WKCAImageQueue::Fill, WKCAImageQueue::Fill); layer->setContents(m_imageQueue->get()); } // Debug QuickTime links against a non-Debug version of CoreFoundation, so the // CFDictionary attached to the CVPixelBuffer cannot be directly passed on into the // CAImageQueue without being converted to a non-Debug CFDictionary. Additionally, // old versions of QuickTime used a non-AAS CoreFoundation, so the types are not // interchangable even in the release case. RetainPtr<CFDictionaryRef> attachments(AdoptCF, QTCFDictionaryCreateCopyWithDataCallback(kCFAllocatorDefault, buffer.attachments(), &QTCFDictionaryCreateWithDataCallback)); CFTimeInterval imageTime = QTMovieVisualContext::currentHostTime(); m_imageQueue->collect(); uint64_t imageId = m_imageQueue->registerPixelBuffer(buffer.baseAddress(), buffer.dataSize(), buffer.bytesPerRow(), buffer.width(), buffer.height(), buffer.pixelFormatType(), attachments.get(), 0); if (m_imageQueue->insertImage(imageTime, WKCAImageQueue::Buffer, imageId, WKCAImageQueue::Opaque | WKCAImageQueue::Flush, &QTPixelBuffer::imageQueueReleaseCallback, buffer.pixelBufferRef())) { // Retain the buffer one extra time so it doesn't dissappear before CAImageQueue decides to release it: QTPixelBuffer::retainCallback(buffer.pixelBufferRef()); } } else { CGImageRef image = CreateCGImageFromPixelBuffer(buffer); layer->setContents(image); CGImageRelease(image); } buffer.unlockBaseAddress(); layer->rootLayer()->setNeedsRender(); } } else #endif m_player->repaint(); m_visualContext->task(); }
static CGImageRef CreateCGImageFromPixelBuffer(QTPixelBuffer buffer) { #if USE(ACCELERATED_COMPOSITING) CGDataProviderRef provider = 0; CGColorSpaceRef colorSpace = 0; CGImageRef image = 0; size_t bitsPerComponent = 0; size_t bitsPerPixel = 0; CGImageAlphaInfo alphaInfo = kCGImageAlphaNone; if (buffer.pixelFormatIs32BGRA()) { bitsPerComponent = 8; bitsPerPixel = 32; alphaInfo = (CGImageAlphaInfo)(kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Little); } else if (buffer.pixelFormatIs32ARGB()) { bitsPerComponent = 8; bitsPerPixel = 32; alphaInfo = (CGImageAlphaInfo)(kCGImageAlphaNoneSkipLast | kCGBitmapByteOrder32Big); } else { // All other pixel formats are currently unsupported: ASSERT_NOT_REACHED(); } CGDataProviderDirectAccessCallbacks callbacks = { &QTPixelBuffer::dataProviderGetBytePointerCallback, &QTPixelBuffer::dataProviderReleaseBytePointerCallback, &QTPixelBuffer::dataProviderGetBytesAtPositionCallback, &QTPixelBuffer::dataProviderReleaseInfoCallback, }; // Colorspace should be device, so that Quartz does not have to do an extra render. colorSpace = CGColorSpaceCreateDeviceRGB(); require(colorSpace, Bail); provider = CGDataProviderCreateDirectAccess(buffer.pixelBufferRef(), buffer.dataSize(), &callbacks); require(provider, Bail); // CGDataProvider does not retain the buffer, but it will release it later, so do an extra retain here: QTPixelBuffer::retainCallback(buffer.pixelBufferRef()); image = CGImageCreate(buffer.width(), buffer.height(), bitsPerComponent, bitsPerPixel, buffer.bytesPerRow(), colorSpace, alphaInfo, provider, 0, false, kCGRenderingIntentDefault); Bail: // Once the image is created we can release our reference to the provider and the colorspace, they are retained by the image if (provider) CGDataProviderRelease(provider); if (colorSpace) CGColorSpaceRelease(colorSpace); return image; #else return 0; #endif }