Esempio n. 1
0
File: vda.c Progetto: AsamQi/vlc
static int Extract( vlc_va_t *external, picture_t *p_picture, void *opaque,
                    uint8_t *data )
{
    vlc_va_vda_t *p_va = vlc_va_vda_Get( external );
    CVPixelBufferRef cv_buffer = ( CVPixelBufferRef )data;

    if( !cv_buffer )
    {
        msg_Dbg( p_va->p_log, "Frame buffer is empty.");
        return VLC_EGENERIC;
    }
    if (!CVPixelBufferGetDataSize(cv_buffer) > 0)
    {
        msg_Dbg( p_va->p_log, "Empty frame buffer");
        return VLC_EGENERIC;
    }

    if( p_va->hw_ctx.cv_pix_fmt_type == kCVPixelFormatType_420YpCbCr8Planar )
    {
        if( !p_va->image_cache.buffer ) {
            CVPixelBufferRelease( cv_buffer );
            return VLC_EGENERIC;
        }

        vda_Copy420YpCbCr8Planar( p_picture,
                                  cv_buffer,
                                  p_va->hw_ctx.width,
                                  p_va->hw_ctx.height,
                                  &p_va->image_cache );
    }
    else
        vda_Copy422YpCbCr8( p_picture, cv_buffer );
    (void) opaque;
    return VLC_SUCCESS;
}
Esempio n. 2
0
void
dump_cvpixel_buffer (CVPixelBufferRef pixbuf)
{
  gsize left, right, top, bottom;

  GST_LOG ("buffer %p", pixbuf);
  if (CVPixelBufferLockBaseAddress (pixbuf, 0)) {
    GST_WARNING ("Couldn't lock base adress on pixel buffer !");
    return;
  }
  GST_LOG ("Width:%" G_GSIZE_FORMAT " , Height:%" G_GSIZE_FORMAT,
      CVPixelBufferGetWidth (pixbuf), CVPixelBufferGetHeight (pixbuf));
  GST_LOG ("Format:%" GST_FOURCC_FORMAT,
      GST_FOURCC_ARGS (CVPixelBufferGetPixelFormatType (pixbuf)));
  GST_LOG ("base address:%p", CVPixelBufferGetBaseAddress (pixbuf));
  GST_LOG ("Bytes per row:%" G_GSIZE_FORMAT,
      CVPixelBufferGetBytesPerRow (pixbuf));
  GST_LOG ("Data Size:%" G_GSIZE_FORMAT, CVPixelBufferGetDataSize (pixbuf));
  GST_LOG ("Plane count:%" G_GSIZE_FORMAT, CVPixelBufferGetPlaneCount (pixbuf));
  CVPixelBufferGetExtendedPixels (pixbuf, &left, &right, &top, &bottom);
  GST_LOG ("Extended pixels. left/right/top/bottom : %" G_GSIZE_FORMAT
      "/%" G_GSIZE_FORMAT "/%" G_GSIZE_FORMAT "/%" G_GSIZE_FORMAT,
      left, right, top, bottom);
  CVPixelBufferUnlockBaseAddress (pixbuf, 0);
}
Esempio n. 3
0
VideoFrame VideoDecoderVDA::frame()
{
    DPTR_D(VideoDecoderVDA);
    CVPixelBufferRef cv_buffer = (CVPixelBufferRef)d.frame->data[3];
    if (!cv_buffer) {
        qDebug("Frame buffer is empty.");
        return VideoFrame();
    }
    if (CVPixelBufferGetDataSize(cv_buffer) <= 0) {
        qDebug("Empty frame buffer");
        return VideoFrame();
    }
    VideoFormat::PixelFormat pixfmt = format_from_cv(CVPixelBufferGetPixelFormatType(cv_buffer));
    if (pixfmt == VideoFormat::Format_Invalid) {
        qWarning("unsupported vda pixel format: %#x", CVPixelBufferGetPixelFormatType(cv_buffer));
        return VideoFrame();
    }
    // we can map the cv buffer addresses to video frame in SurfaceInteropCVBuffer. (may need VideoSurfaceInterop::mapToTexture()
    class SurfaceInteropCVBuffer Q_DECL_FINAL: public VideoSurfaceInterop {
        bool glinterop;
        CVPixelBufferRef cvbuf; // keep ref until video frame is destroyed
    public:
        SurfaceInteropCVBuffer(CVPixelBufferRef cv, bool gl) : glinterop(gl), cvbuf(cv) {
            //CVPixelBufferRetain(cvbuf);
        }
        ~SurfaceInteropCVBuffer() {
            CVPixelBufferRelease(cvbuf);
        }
        void* mapToHost(const VideoFormat &format, void *handle, int plane) {
            Q_UNUSED(plane);
            CVPixelBufferLockBaseAddress(cvbuf, 0);
            const VideoFormat fmt(format_from_cv(CVPixelBufferGetPixelFormatType(cvbuf)));
            if (!fmt.isValid()) {
                CVPixelBufferUnlockBaseAddress(cvbuf, 0);
                return NULL;
            }
            const int w = CVPixelBufferGetWidth(cvbuf);
            const int h = CVPixelBufferGetHeight(cvbuf);
            uint8_t *src[3];
            int pitch[3];
            for (int i = 0; i <fmt.planeCount(); ++i) {
                // get address results in internal copy
                src[i] = (uint8_t*)CVPixelBufferGetBaseAddressOfPlane(cvbuf, i);
                pitch[i] = CVPixelBufferGetBytesPerRowOfPlane(cvbuf, i);
            }
            CVPixelBufferUnlockBaseAddress(cvbuf, 0);
            //CVPixelBufferRelease(cv_buffer); // release when video frame is destroyed
            VideoFrame frame(VideoFrame::fromGPU(fmt, w, h, h, src, pitch));
            if (fmt != format)
                frame = frame.to(format);
            VideoFrame *f = reinterpret_cast<VideoFrame*>(handle);
            frame.setTimestamp(f->timestamp());
            frame.setDisplayAspectRatio(f->displayAspectRatio());
            *f = frame;
            return f;
        }
Esempio n. 4
0
size_t QTPixelBuffer::dataProviderGetBytesAtPositionCallback(void* refcon, void* buffer, size_t position, size_t count)
{
    char* data = (char*)CVPixelBufferGetBaseAddress(static_cast<CVPixelBufferRef>(refcon));
    size_t size = CVPixelBufferGetDataSize(static_cast<CVPixelBufferRef>(refcon));
    if (size - position < count)
        count = size - position;

    memcpy(buffer, data+position, count);
    return count;
}
Esempio n. 5
0
File: vda.c Progetto: Geal/vlc
static int Extract( vlc_va_t *va, picture_t *p_picture, uint8_t *data )
{
    vlc_va_sys_t *sys = va->sys;

    CVPixelBufferRef cv_buffer = (CVPixelBufferRef)data;

    if( !cv_buffer || !CVPixelBufferGetDataSize(cv_buffer) )
    {
        msg_Dbg( va, "Frame buffer is empty.");
        return VLC_EGENERIC;
    }

    copy420YpCbCr8Planar( p_picture, cv_buffer, sys->i_height );

    return VLC_SUCCESS;
}
Esempio n. 6
0
File: vda.c Progetto: Aakash-729/vlc
static int Extract( vlc_va_t *va, picture_t *p_picture, uint8_t *data )
{
    CVPixelBufferRef cv_buffer = (CVPixelBufferRef)data;

    if( !cv_buffer )
    {
        msg_Dbg( va, "Frame buffer is empty.");
        return VLC_EGENERIC;
    }
    if (!CVPixelBufferGetDataSize(cv_buffer) > 0)
    {
        msg_Dbg( va, "Empty frame buffer");
        return VLC_EGENERIC;
    }

    vda_Copy422YpCbCr8( p_picture, cv_buffer );

    return VLC_SUCCESS;
}
Esempio n. 7
0
static void
_frame_decompressed(void *decompressionTrackingRefCon, OSStatus err,
                    ICMDecompressionTrackingFlags dtf,
                    CVPixelBufferRef pixelBuffer, TimeValue64 displayTime,
                    TimeValue64 displayDuration,
                    ICMValidTimeFlags validTimeFlags, void *reserved,
                    void *sourceFrameRefCon)
{
    dbg_printf("[ vOE]  >> [%08lx] :: _frame_decompressed()\n", (UInt32) -1);
    if (!err) {
        StreamInfoPtr si = (StreamInfoPtr) decompressionTrackingRefCon;
        if (dtf & kICMDecompressionTracking_ReleaseSourceData) {
            // if we were responsible for managing source data buffers,
            //  we should release the source buffer here,
            //  using sourceFrameRefCon to identify it.
        }

        if ((dtf & kICMDecompressionTracking_EmittingFrame) && pixelBuffer) {
            ICMCompressionFrameOptionsRef frameOptions = NULL;
            OSType pf = CVPixelBufferGetPixelFormatType(pixelBuffer);

            dbg_printf("[ vOE]   > [%08lx] :: _frame_decompressed() = %ld; %ld,"
                       " %lld, %lld, %ld [%ld '%4.4s' (%ld x %ld)]\n",
                       (UInt32) -1, err,
                       dtf, displayTime, displayDuration, validTimeFlags,
                       CVPixelBufferGetDataSize(pixelBuffer), (char *) &pf,
                       CVPixelBufferGetWidth(pixelBuffer),
                       CVPixelBufferGetHeight(pixelBuffer));

            displayDuration = 25; //?

            // Feed the frame to the compression session.
            err = ICMCompressionSessionEncodeFrame(si->si_v.cs, pixelBuffer,
                                                   displayTime, displayDuration,
                                                   validTimeFlags, frameOptions,
                                                   NULL, NULL );
        }
    }

    dbg_printf("[ vOE] <   [%08lx] :: _frame_decompressed() = %ld\n",
               (UInt32) -1, err);
}
Esempio n. 8
0
void jit_gl_hap_draw_frame(void *jitob, CVImageBufferRef frame)
{
	t_jit_gl_hap * x = (t_jit_gl_hap*)jitob;
	CFTypeID imageType = CFGetTypeID(frame);
	OSType newPixelFormat;

	if(x->validframe)
		return;
		
	if (imageType == CVPixelBufferGetTypeID()) {
        
        // Update the texture
        CVBufferRetain(frame);
		
		if(x->buffer) {
			CVPixelBufferUnlockBaseAddress(x->buffer, kCVPixelBufferLock_ReadOnly);
			CVBufferRelease(x->buffer);
		}
		
		x->buffer = frame;
		CVPixelBufferLockBaseAddress(x->buffer, kCVPixelBufferLock_ReadOnly);
		
		x->dim[0] = CVPixelBufferGetWidth(x->buffer);
		x->dim[1] = CVPixelBufferGetHeight(x->buffer);

		newPixelFormat = CVPixelBufferGetPixelFormatType(x->buffer);

		if(x->buffer && x->hap_format==JIT_GL_HAP_PF_HAP) {
			size_t extraRight, extraBottom;
			unsigned int bitsPerPixel;
			size_t bytesPerRow;
			size_t actualBufferSize;

			CVPixelBufferGetExtendedPixels(x->buffer, NULL, &extraRight, NULL, &extraBottom);
			x->roundedWidth = x->dim[0] + extraRight;
			x->roundedHeight = x->dim[1] + extraBottom;
			if (x->roundedWidth % 4 != 0 || x->roundedHeight % 4 != 0) {
				x->validframe = 0;
				return;
			}			

			switch (newPixelFormat) {
				case kHapPixelFormatTypeRGB_DXT1:
					x->newInternalFormat = GL_COMPRESSED_RGB_S3TC_DXT1_EXT;
					bitsPerPixel = 4;
					break;
				case kHapPixelFormatTypeRGBA_DXT5:
				case kHapPixelFormatTypeYCoCg_DXT5:
					x->newInternalFormat = GL_COMPRESSED_RGBA_S3TC_DXT5_EXT;
					bitsPerPixel = 8;
					break;
				default:
					// we don't support non-DXT pixel buffers
					x->validframe = 0;
					return;
					break;
			}
			x->useshader = (newPixelFormat == kHapPixelFormatTypeYCoCg_DXT5);
			
			bytesPerRow = (x->roundedWidth * bitsPerPixel) / 8;
			x->newDataLength = bytesPerRow * x->roundedHeight; // usually not the full length of the buffer
			actualBufferSize = CVPixelBufferGetDataSize(x->buffer);
			
			// Check the buffer is as large as we expect it to be
			if (x->newDataLength > actualBufferSize) {
				x->validframe = 0;
				return;
			}

			// If we got this far we're good to go
			x->validframe = 1;
			x->target = GL_TEXTURE_2D;
			if(!x->flipped) {
				jit_attr_setlong(x->texoutput, gensym("flip"), 1);
				x->flipped = 1;
			}
			//x->drawhap = 1;
		}
		else if(x->buffer) {// && x->hap_format==JIT_GL_HAP_PF_HAP) {
			if( newPixelFormat == k24RGBPixelFormat )
				x->newInternalFormat = GL_RGB8;
			else if( newPixelFormat == k32BGRAPixelFormat )
				x->newInternalFormat = GL_RGBA8;
			else {
				x->validframe = 0;
				return;
			}

			x->roundedWidth = x->dim[0];
			x->roundedHeight = x->dim[1];
			x->newDataLength = CVPixelBufferGetDataSize(x->buffer);
			x->rowLength = CVPixelBufferGetBytesPerRow( x->buffer ) / (x->hap_format==JIT_GL_HAP_PF_RGB ? 3 : 4);
			x->target = GL_TEXTURE_RECTANGLE_EXT;
			
			if(!x->flipped) {
				jit_attr_setlong(x->texoutput, gensym("flip"), 1);
				x->flipped = 1;
			}
			x->validframe = 1;
		}
    }
	else {
#ifdef MAC_VERSION
		CGSize imageSize = CVImageBufferGetEncodedSize(frame);
		bool flipped = CVOpenGLTextureIsFlipped(frame);
		x->texture = CVOpenGLTextureGetName(frame);
		x->useshader = 0;
		x->dim[0] = (t_atom_long)imageSize.width;
		x->dim[1] = (t_atom_long)imageSize.height;
		x->validframe = 1;
		x->target = GL_TEXTURE_RECTANGLE_ARB;
		if(x->flipped!=flipped) {
			jit_attr_setlong(x->texoutput, gensym("flip"), flipped);
			x->flipped = flipped;			
		}
#endif
	}
}
Esempio n. 9
0
size_t QTPixelBuffer::dataSize() const
{
    return CVPixelBufferGetDataSize(m_pixelBuffer);
}