Exemple #1
0
VideoFrame VideoDecoderVDA::frame()
{
    DPTR_D(VideoDecoderVDA);
    CVPixelBufferRef cv_buffer = (CVPixelBufferRef)d.frame->data[3];
    if (!cv_buffer) {
        qDebug("Frame buffer is empty.");
        return VideoFrame();
    }
    if (CVPixelBufferGetDataSize(cv_buffer) <= 0) {
        qDebug("Empty frame buffer");
        return VideoFrame();
    }
    VideoFormat::PixelFormat pixfmt = format_from_cv(CVPixelBufferGetPixelFormatType(cv_buffer));
    if (pixfmt == VideoFormat::Format_Invalid) {
        qWarning("unsupported vda pixel format: %#x", CVPixelBufferGetPixelFormatType(cv_buffer));
        return VideoFrame();
    }
    // we can map the cv buffer addresses to video frame in SurfaceInteropCVBuffer. (may need VideoSurfaceInterop::mapToTexture()
    class SurfaceInteropCVBuffer Q_DECL_FINAL: public VideoSurfaceInterop {
        bool glinterop;
        CVPixelBufferRef cvbuf; // keep ref until video frame is destroyed
    public:
        SurfaceInteropCVBuffer(CVPixelBufferRef cv, bool gl) : glinterop(gl), cvbuf(cv) {
            //CVPixelBufferRetain(cvbuf);
        }
        ~SurfaceInteropCVBuffer() {
            CVPixelBufferRelease(cvbuf);
        }
        void* mapToHost(const VideoFormat &format, void *handle, int plane) {
            Q_UNUSED(plane);
            CVPixelBufferLockBaseAddress(cvbuf, 0);
            const VideoFormat fmt(format_from_cv(CVPixelBufferGetPixelFormatType(cvbuf)));
            if (!fmt.isValid()) {
                CVPixelBufferUnlockBaseAddress(cvbuf, 0);
                return NULL;
            }
            const int w = CVPixelBufferGetWidth(cvbuf);
            const int h = CVPixelBufferGetHeight(cvbuf);
            uint8_t *src[3];
            int pitch[3];
            for (int i = 0; i <fmt.planeCount(); ++i) {
                // get address results in internal copy
                src[i] = (uint8_t*)CVPixelBufferGetBaseAddressOfPlane(cvbuf, i);
                pitch[i] = CVPixelBufferGetBytesPerRowOfPlane(cvbuf, i);
            }
            CVPixelBufferUnlockBaseAddress(cvbuf, 0);
            //CVPixelBufferRelease(cv_buffer); // release when video frame is destroyed
            VideoFrame frame(VideoFrame::fromGPU(fmt, w, h, h, src, pitch));
            if (fmt != format)
                frame = frame.to(format);
            VideoFrame *f = reinterpret_cast<VideoFrame*>(handle);
            frame.setTimestamp(f->timestamp());
            frame.setDisplayAspectRatio(f->displayAspectRatio());
            *f = frame;
            return f;
        }
Exemple #2
0
	bool getImage(const VideoFrame &videoFrame, void *dest, ImgScaler *nv12ToRGB32) override
	{
		{
			QMutexLocker locker(&m_buffersMutex);
			if (m_buffers.indexOf(videoFrame.surfaceId) < 0)
				return false;
		}

		CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)videoFrame.surfaceId;
		if (CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)
		{
			CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);

			const quint8 *srcData[2] = {
				(const quint8 *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0),
				(const quint8 *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1)
			};
			const qint32 srcLinesize[2] = {
				(qint32)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0),
				(qint32)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1)
			};

			nv12ToRGB32->scale((const void **)srcData, srcLinesize, dest);

			CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);

			return true;
		}

		return false;
	}
Exemple #3
0
void
dump_cvpixel_buffer (CVPixelBufferRef pixbuf)
{
  gsize left, right, top, bottom;

  GST_LOG ("buffer %p", pixbuf);
  if (CVPixelBufferLockBaseAddress (pixbuf, 0)) {
    GST_WARNING ("Couldn't lock base adress on pixel buffer !");
    return;
  }
  GST_LOG ("Width:%" G_GSIZE_FORMAT " , Height:%" G_GSIZE_FORMAT,
      CVPixelBufferGetWidth (pixbuf), CVPixelBufferGetHeight (pixbuf));
  GST_LOG ("Format:%" GST_FOURCC_FORMAT,
      GST_FOURCC_ARGS (CVPixelBufferGetPixelFormatType (pixbuf)));
  GST_LOG ("base address:%p", CVPixelBufferGetBaseAddress (pixbuf));
  GST_LOG ("Bytes per row:%" G_GSIZE_FORMAT,
      CVPixelBufferGetBytesPerRow (pixbuf));
  GST_LOG ("Data Size:%" G_GSIZE_FORMAT, CVPixelBufferGetDataSize (pixbuf));
  GST_LOG ("Plane count:%" G_GSIZE_FORMAT, CVPixelBufferGetPlaneCount (pixbuf));
  CVPixelBufferGetExtendedPixels (pixbuf, &left, &right, &top, &bottom);
  GST_LOG ("Extended pixels. left/right/top/bottom : %" G_GSIZE_FORMAT
      "/%" G_GSIZE_FORMAT "/%" G_GSIZE_FORMAT "/%" G_GSIZE_FORMAT,
      left, right, top, bottom);
  CVPixelBufferUnlockBaseAddress (pixbuf, 0);
}
Exemple #4
0
void VdaMixer::adjust(VideoFormatData *data, const mp_image *mpi) {
	Q_ASSERT(data->imgfmt == IMGFMT_VDA);
	auto buffer = (CVPixelBufferRef)mpi->planes[3];
	switch (CVPixelBufferGetPixelFormatType(buffer)) {
	case kCVPixelFormatType_422YpCbCr8:
		data->type = IMGFMT_UYVY;
		break;
	case kCVPixelFormatType_422YpCbCr8_yuvs:
		data->type = IMGFMT_YUYV;
		break;
	case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
		data->type = IMGFMT_NV12;
		break;
	case kCVPixelFormatType_420YpCbCr8Planar:
		data->type = IMGFMT_420P;
		break;
	default:
		_Error("Not supported format.");
		data->type = IMGFMT_NONE;
	}
	auto desc = mp_imgfmt_get_desc(data->type);
	if (CVPixelBufferIsPlanar(buffer)) {
		data->planes = CVPixelBufferGetPlaneCount(buffer);
		Q_ASSERT(data->planes == desc.num_planes);
		for (int i=0; i<data->planes; ++i) {
			data->alignedByteSize[i].rwidth() = CVPixelBufferGetBytesPerRowOfPlane(buffer, i);
			data->alignedByteSize[i].rheight() = CVPixelBufferGetHeightOfPlane(buffer, i);
			data->bpp += desc.bpp[i] >> (desc.xs[i] + desc.ys[i]);
		}
	} else {
Exemple #5
0
static int vda_retrieve_data(AVCodecContext *s, AVFrame *frame)
{
    InputStream *ist = s->opaque;
    VDAContext  *vda = ist->hwaccel_ctx;
    CVPixelBufferRef pixbuf = (CVPixelBufferRef)frame->data[3];
    OSType pixel_format = CVPixelBufferGetPixelFormatType(pixbuf);
    CVReturn err;
    uint8_t *data[4] = { 0 };
    int linesize[4] = { 0 };
    int planes, ret, i;

    av_frame_unref(vda->tmp_frame);

    switch (pixel_format) {
    case kCVPixelFormatType_420YpCbCr8Planar: vda->tmp_frame->format = AV_PIX_FMT_YUV420P; break;
    case kCVPixelFormatType_422YpCbCr8:       vda->tmp_frame->format = AV_PIX_FMT_UYVY422; break;
    default:
        av_log(NULL, AV_LOG_ERROR,
               "Unsupported pixel format: %u\n", pixel_format);
        return AVERROR(ENOSYS);
    }

    vda->tmp_frame->width  = frame->width;
    vda->tmp_frame->height = frame->height;
    ret = av_frame_get_buffer(vda->tmp_frame, 32);
    if (ret < 0)
        return ret;

    err = CVPixelBufferLockBaseAddress(pixbuf, kCVPixelBufferLock_ReadOnly);
    if (err != kCVReturnSuccess) {
        av_log(NULL, AV_LOG_ERROR, "Error locking the pixel buffer.\n");
        return AVERROR_UNKNOWN;
    }

    if (CVPixelBufferIsPlanar(pixbuf)) {

        planes = CVPixelBufferGetPlaneCount(pixbuf);
        for (i = 0; i < planes; i++) {
            data[i]     = CVPixelBufferGetBaseAddressOfPlane(pixbuf, i);
            linesize[i] = CVPixelBufferGetBytesPerRowOfPlane(pixbuf, i);
        }
    } else {
        data[0] = CVPixelBufferGetBaseAddress(pixbuf);
        linesize[0] = CVPixelBufferGetBytesPerRow(pixbuf);
    }

    av_image_copy(vda->tmp_frame->data, vda->tmp_frame->linesize,
                  data, linesize, vda->tmp_frame->format,
                  frame->width, frame->height);

    ret = av_frame_copy_props(vda->tmp_frame, frame);
    if (ret < 0)
        return ret;

    av_frame_unref(frame);
    av_frame_move_ref(frame, vda->tmp_frame);

    return 0;
}
Exemple #6
0
/* Decoder callback that adds the vda frame to the queue in display order. */
static void vda_decoder_callback (void *vda_hw_ctx,
                                  CFDictionaryRef user_info,
                                  OSStatus status,
                                  uint32_t infoFlags,
                                  CVImageBufferRef image_buffer)
{
    struct vda_context *vda_ctx = vda_hw_ctx;

    if (!image_buffer)
        return;

    if (vda_ctx->cv_pix_fmt_type != CVPixelBufferGetPixelFormatType(image_buffer))
        return;

    if (vda_ctx->use_sync_decoding) {
        vda_ctx->cv_buffer = CVPixelBufferRetain(image_buffer);
    } else {
        vda_frame *new_frame;
        vda_frame *queue_walker;

        if (!(new_frame = av_mallocz(sizeof(*new_frame))))
            return;

        new_frame->next_frame = NULL;
        new_frame->cv_buffer = CVPixelBufferRetain(image_buffer);
        new_frame->pts = vda_pts_from_dictionary(user_info);

        pthread_mutex_lock(&vda_ctx->queue_mutex);

        queue_walker = vda_ctx->queue;

        if (!queue_walker || (new_frame->pts < queue_walker->pts)) {
            /* we have an empty queue, or this frame earlier than the current queue head */
            new_frame->next_frame = queue_walker;
            vda_ctx->queue = new_frame;
        } else {
            /* walk the queue and insert this frame where it belongs in display order */
            vda_frame *next_frame;

            while (1) {
                next_frame = queue_walker->next_frame;

                if (!next_frame || (new_frame->pts < next_frame->pts)) {
                    new_frame->next_frame = next_frame;
                    queue_walker->next_frame = new_frame;
                    break;
                }
                queue_walker = next_frame;
            }
        }

        pthread_mutex_unlock(&vda_ctx->queue_mutex);
    }
}
Exemple #7
0
/* Decoder callback that adds the vda frame to the queue in display order. */
static void vda_decoder_callback (void *vda_hw_ctx, CFDictionaryRef /*user_info*/, OSStatus /*status*/, uint32_t /*infoFlags*/, CVImageBufferRef image_buffer) {
	vda_context *vda_ctx = (vda_context*)vda_hw_ctx;
	if (!image_buffer)
		return;
	const auto fmt = CVPixelBufferGetPixelFormatType(image_buffer);
	if (!_Contains(cvpixfmts, fmt)) {
		qDebug() << "vda: not supported format!!";
		return;
	}
	vda_ctx->cv_pix_fmt_type = fmt;
	vda_ctx->cv_buffer = CVPixelBufferRetain(image_buffer);
}
Exemple #8
0
/* Decoder callback that adds the VDA frame to the queue in display order. */
static void vda_decoder_callback(void *vda_hw_ctx,
                                 CFDictionaryRef user_info,
                                 OSStatus status,
                                 uint32_t infoFlags,
                                 CVImageBufferRef image_buffer)
{
    struct vda_context *vda_ctx = vda_hw_ctx;

    if (!image_buffer)
        return;

    if (vda_ctx->cv_pix_fmt_type != CVPixelBufferGetPixelFormatType(image_buffer))
        return;

    vda_ctx->cv_buffer = CVPixelBufferRetain(image_buffer);
}
Exemple #9
0
static void
VDADecoderCallback (void *decompressionOutputRefCon, CFDictionaryRef frameInfo, OSStatus status, uint32_t infoFlags, CVImageBufferRef imageBuffer)
{
	MoonVDADecoder *decoder = (MoonVDADecoder *) decompressionOutputRefCon;
	VideoStream *vs = (VideoStream *) decoder->GetStream ();

	// FIXME: Is this always 1 thread?  Can we optimize this
	decoder->GetDeployment ()->RegisterThread ();

	Deployment::SetCurrent (decoder->GetDeployment ());

	if (imageBuffer == NULL) {
		return;
	}

	OSType format_type = CVPixelBufferGetPixelFormatType (imageBuffer);
	if (format_type != kCVPixelFormatType_422YpCbCr8) {
		g_warning ("Mismatched format in VDA");
		return;
	}

	MediaFrame *mf = (MediaFrame *) CFDictionaryGetValue (frameInfo, CFSTR ("MoonMediaFrame"));

	mf->AddState (MediaFrameVUY2);
	mf->FreeBuffer ();
	mf->SetBufLen (0);

	mf->srcSlideY = 0;
	mf->srcSlideH = vs->GetHeight ();

	mf->width = vs->GetWidth ();
	mf->height = vs->GetHeight ();

	CVPixelBufferLockBaseAddress (imageBuffer, 0);

	mf->data_stride [0] = (uint8_t *) CVPixelBufferGetBaseAddress (imageBuffer);
	mf->srcStride [0] = CVPixelBufferGetBytesPerRow (imageBuffer);

	mf->AddState (MediaFrameDecoded);

	mf->decoder_specific_data = imageBuffer;
	CVPixelBufferRetain (imageBuffer);

	decoder->ReportDecodeFrameCompleted (mf);

	mf->unref ();
}
Exemple #10
0
static void
_frame_decompressed(void *decompressionTrackingRefCon, OSStatus err,
                    ICMDecompressionTrackingFlags dtf,
                    CVPixelBufferRef pixelBuffer, TimeValue64 displayTime,
                    TimeValue64 displayDuration,
                    ICMValidTimeFlags validTimeFlags, void *reserved,
                    void *sourceFrameRefCon)
{
    dbg_printf("[ vOE]  >> [%08lx] :: _frame_decompressed()\n", (UInt32) -1);
    if (!err) {
        StreamInfoPtr si = (StreamInfoPtr) decompressionTrackingRefCon;
        if (dtf & kICMDecompressionTracking_ReleaseSourceData) {
            // if we were responsible for managing source data buffers,
            //  we should release the source buffer here,
            //  using sourceFrameRefCon to identify it.
        }

        if ((dtf & kICMDecompressionTracking_EmittingFrame) && pixelBuffer) {
            ICMCompressionFrameOptionsRef frameOptions = NULL;
            OSType pf = CVPixelBufferGetPixelFormatType(pixelBuffer);

            dbg_printf("[ vOE]   > [%08lx] :: _frame_decompressed() = %ld; %ld,"
                       " %lld, %lld, %ld [%ld '%4.4s' (%ld x %ld)]\n",
                       (UInt32) -1, err,
                       dtf, displayTime, displayDuration, validTimeFlags,
                       CVPixelBufferGetDataSize(pixelBuffer), (char *) &pf,
                       CVPixelBufferGetWidth(pixelBuffer),
                       CVPixelBufferGetHeight(pixelBuffer));

            displayDuration = 25; //?

            // Feed the frame to the compression session.
            err = ICMCompressionSessionEncodeFrame(si->si_v.cs, pixelBuffer,
                                                   displayTime, displayDuration,
                                                   validTimeFlags, frameOptions,
                                                   NULL, NULL );
        }
    }

    dbg_printf("[ vOE] <   [%08lx] :: _frame_decompressed() = %ld\n",
               (UInt32) -1, err);
}
Surface8uRef convertCVPixelBufferToSurface( CVPixelBufferRef pixelBufferRef )
{
	CVPixelBufferLockBaseAddress( pixelBufferRef, 0 );
	uint8_t *ptr = reinterpret_cast<uint8_t*>( CVPixelBufferGetBaseAddress( pixelBufferRef ) );
	int32_t rowBytes = CVPixelBufferGetBytesPerRow( pixelBufferRef );
	OSType type = CVPixelBufferGetPixelFormatType( pixelBufferRef );
	size_t width = CVPixelBufferGetWidth( pixelBufferRef );
	size_t height = CVPixelBufferGetHeight( pixelBufferRef );
	SurfaceChannelOrder sco;
	if( type == k24RGBPixelFormat )
		sco = SurfaceChannelOrder::RGB;
	else if( type == k32ARGBPixelFormat )
		sco = SurfaceChannelOrder::ARGB;
	else if( type == k24BGRPixelFormat )
		sco = SurfaceChannelOrder::BGR;
	else if( type == k32BGRAPixelFormat )
		sco = SurfaceChannelOrder::BGRA;
	Surface8u *newSurface = new Surface8u( ptr, width, height, rowBytes, sco );
	return Surface8uRef( newSurface, [=] ( Surface8u *s ) { ::CVBufferRelease( pixelBufferRef ); delete s; } );
}
Surface8u convertCVPixelBufferToSurface( CVPixelBufferRef pixelBufferRef )
{
	CVPixelBufferLockBaseAddress( pixelBufferRef, 0 );
	uint8_t *ptr = reinterpret_cast<uint8_t*>( CVPixelBufferGetBaseAddress( pixelBufferRef ) );
	int32_t rowBytes = CVPixelBufferGetBytesPerRow( pixelBufferRef );
	OSType type = CVPixelBufferGetPixelFormatType( pixelBufferRef );
	size_t width = CVPixelBufferGetWidth( pixelBufferRef );
	size_t height = CVPixelBufferGetHeight( pixelBufferRef );
	SurfaceChannelOrder sco;
	if( type == k24RGBPixelFormat )
		sco = SurfaceChannelOrder::RGB;
	else if( type == k32ARGBPixelFormat )
		sco = SurfaceChannelOrder::ARGB;
	else if( type == k24BGRPixelFormat )
		sco = SurfaceChannelOrder::BGR;
	else if( type == k32BGRAPixelFormat )
		sco = SurfaceChannelOrder::BGRA;
	Surface result( ptr, width, height, rowBytes, sco );
	result.setDeallocator( CVPixelBufferDealloc, pixelBufferRef );
	return result;
}
static gboolean
gst_video_info_init_from_pixel_buffer (GstVideoInfo * info,
    CVPixelBufferRef pixel_buf)
{
  size_t width, height;
  OSType format_type;
  GstVideoFormat video_format;

  width = CVPixelBufferGetWidth (pixel_buf);
  height = CVPixelBufferGetHeight (pixel_buf);
  format_type = CVPixelBufferGetPixelFormatType (pixel_buf);
  video_format = gst_core_media_buffer_get_video_format (format_type);

  if (video_format == GST_VIDEO_FORMAT_UNKNOWN) {
    return FALSE;
  }

  gst_video_info_init (info);
  gst_video_info_set_format (info, video_format, width, height);

  return TRUE;
}
Exemple #14
0
bool QTPixelBuffer::pixelFormatIs32BGRA() const
{
    return CVPixelBufferGetPixelFormatType(m_pixelBuffer) == k32BGRAPixelFormat;
}
Exemple #15
0
void jit_gl_hap_draw_frame(void *jitob, CVImageBufferRef frame)
{
	t_jit_gl_hap * x = (t_jit_gl_hap*)jitob;
	CFTypeID imageType = CFGetTypeID(frame);
	OSType newPixelFormat;

	if(x->validframe)
		return;
		
	if (imageType == CVPixelBufferGetTypeID()) {
        
        // Update the texture
        CVBufferRetain(frame);
		
		if(x->buffer) {
			CVPixelBufferUnlockBaseAddress(x->buffer, kCVPixelBufferLock_ReadOnly);
			CVBufferRelease(x->buffer);
		}
		
		x->buffer = frame;
		CVPixelBufferLockBaseAddress(x->buffer, kCVPixelBufferLock_ReadOnly);
		
		x->dim[0] = CVPixelBufferGetWidth(x->buffer);
		x->dim[1] = CVPixelBufferGetHeight(x->buffer);

		newPixelFormat = CVPixelBufferGetPixelFormatType(x->buffer);

		if(x->buffer && x->hap_format==JIT_GL_HAP_PF_HAP) {
			size_t extraRight, extraBottom;
			unsigned int bitsPerPixel;
			size_t bytesPerRow;
			size_t actualBufferSize;

			CVPixelBufferGetExtendedPixels(x->buffer, NULL, &extraRight, NULL, &extraBottom);
			x->roundedWidth = x->dim[0] + extraRight;
			x->roundedHeight = x->dim[1] + extraBottom;
			if (x->roundedWidth % 4 != 0 || x->roundedHeight % 4 != 0) {
				x->validframe = 0;
				return;
			}			

			switch (newPixelFormat) {
				case kHapPixelFormatTypeRGB_DXT1:
					x->newInternalFormat = GL_COMPRESSED_RGB_S3TC_DXT1_EXT;
					bitsPerPixel = 4;
					break;
				case kHapPixelFormatTypeRGBA_DXT5:
				case kHapPixelFormatTypeYCoCg_DXT5:
					x->newInternalFormat = GL_COMPRESSED_RGBA_S3TC_DXT5_EXT;
					bitsPerPixel = 8;
					break;
				default:
					// we don't support non-DXT pixel buffers
					x->validframe = 0;
					return;
					break;
			}
			x->useshader = (newPixelFormat == kHapPixelFormatTypeYCoCg_DXT5);
			
			bytesPerRow = (x->roundedWidth * bitsPerPixel) / 8;
			x->newDataLength = bytesPerRow * x->roundedHeight; // usually not the full length of the buffer
			actualBufferSize = CVPixelBufferGetDataSize(x->buffer);
			
			// Check the buffer is as large as we expect it to be
			if (x->newDataLength > actualBufferSize) {
				x->validframe = 0;
				return;
			}

			// If we got this far we're good to go
			x->validframe = 1;
			x->target = GL_TEXTURE_2D;
			if(!x->flipped) {
				jit_attr_setlong(x->texoutput, gensym("flip"), 1);
				x->flipped = 1;
			}
			//x->drawhap = 1;
		}
		else if(x->buffer) {// && x->hap_format==JIT_GL_HAP_PF_HAP) {
			if( newPixelFormat == k24RGBPixelFormat )
				x->newInternalFormat = GL_RGB8;
			else if( newPixelFormat == k32BGRAPixelFormat )
				x->newInternalFormat = GL_RGBA8;
			else {
				x->validframe = 0;
				return;
			}

			x->roundedWidth = x->dim[0];
			x->roundedHeight = x->dim[1];
			x->newDataLength = CVPixelBufferGetDataSize(x->buffer);
			x->rowLength = CVPixelBufferGetBytesPerRow( x->buffer ) / (x->hap_format==JIT_GL_HAP_PF_RGB ? 3 : 4);
			x->target = GL_TEXTURE_RECTANGLE_EXT;
			
			if(!x->flipped) {
				jit_attr_setlong(x->texoutput, gensym("flip"), 1);
				x->flipped = 1;
			}
			x->validframe = 1;
		}
    }
	else {
#ifdef MAC_VERSION
		CGSize imageSize = CVImageBufferGetEncodedSize(frame);
		bool flipped = CVOpenGLTextureIsFlipped(frame);
		x->texture = CVOpenGLTextureGetName(frame);
		x->useshader = 0;
		x->dim[0] = (t_atom_long)imageSize.width;
		x->dim[1] = (t_atom_long)imageSize.height;
		x->validframe = 1;
		x->target = GL_TEXTURE_RECTANGLE_ARB;
		if(x->flipped!=flipped) {
			jit_attr_setlong(x->texoutput, gensym("flip"), flipped);
			x->flipped = flipped;			
		}
#endif
	}
}
Exemple #16
0
static GF_Err VTBDec_ProcessData(GF_MediaDecoder *ifcg,
                               char *inBuffer, u32 inBufferLength,
                               u16 ES_ID, u32 *CTS,
                               char *outBuffer, u32 *outBufferLength,
                               u8 PaddingBits, u32 mmlevel)
{
    OSStatus status;
    CMSampleBufferRef sample = NULL;
    CMBlockBufferRef block_buffer = NULL;
	OSType type;
	char *in_data;
	u32 in_data_size;
	
	GF_Err e;
	VTBDec *ctx = (VTBDec *)ifcg->privateStack;
	
	if (ctx->skip_mpeg4_vosh) {
		GF_M4VDecSpecInfo dsi;
		dsi.width = dsi.height = 0;
		e = gf_m4v_get_config(inBuffer, inBufferLength, &dsi);
		//found a vosh - remove it from payload, init decoder if needed
		if ((e==GF_OK) && dsi.width && dsi.height) {
			if (!ctx->vtb_session) {
				ctx->vosh = inBuffer;
				ctx->vosh_size = dsi.next_object_start;
				e = VTBDec_InitDecoder(ctx, GF_FALSE);
				if (e) return e;

				//enfoce removal for all frames
				ctx->skip_mpeg4_vosh = GF_TRUE;
				
				if (ctx->out_size != *outBufferLength) {
					*outBufferLength = ctx->out_size;
					return GF_BUFFER_TOO_SMALL;
				}
			}
			ctx->vosh_size = dsi.next_object_start;
		} else if (!ctx->vtb_session) {
			*outBufferLength=0;
			return GF_OK;
		}
	}

	if (ctx->init_mpeg12) {
		GF_M4VDecSpecInfo dsi;
		dsi.width = dsi.height = 0;
		
		e = gf_mpegv12_get_config(inBuffer, inBufferLength, &dsi);
		if ((e==GF_OK) && dsi.width && dsi.height) {
			ctx->width = dsi.width;
			ctx->height = dsi.height;
			ctx->pixel_ar = dsi.par_num;
			ctx->pixel_ar <<= 16;
			ctx->pixel_ar |= dsi.par_den;
			
			e = VTBDec_InitDecoder(ctx, GF_FALSE);
			if (e) return e;

			if (ctx->out_size != *outBufferLength) {
				*outBufferLength = ctx->out_size;
				return GF_BUFFER_TOO_SMALL;
			}
		}

		if (!ctx->vtb_session) {
			*outBufferLength=0;
			return GF_OK;
		}
	}

	if (ctx->is_annex_b || (!ctx->vtb_session && ctx->nalu_size_length) ) {
		if (ctx->cached_annex_b) {
			in_data = ctx->cached_annex_b;
			in_data_size = ctx->cached_annex_b_size;
			ctx->cached_annex_b = NULL;
		} else {
			e = VTB_RewriteNALs(ctx, inBuffer, inBufferLength, &in_data, &in_data_size);
			if (e) return e;
		}
		
		if (ctx->out_size != *outBufferLength) {
			*outBufferLength = ctx->out_size;
			ctx->cached_annex_b = in_data;
			ctx->cached_annex_b_size = in_data_size;

			return GF_BUFFER_TOO_SMALL;
		}
	} else if (ctx->vosh_size) {
		in_data = inBuffer + ctx->vosh_size;
		in_data_size = inBufferLength - ctx->vosh_size;
		ctx->vosh_size = 0;
	} else {
		in_data = inBuffer;
		in_data_size = inBufferLength;
	}
	
	if (!ctx->vtb_session) {
		*outBufferLength=0;
		return GF_OK;
	}
	

	status = CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, in_data, in_data_size, kCFAllocatorNull, NULL, 0, in_data_size, 0, &block_buffer);

	if (status) {
		return GF_IO_ERR;
	}

	*outBufferLength=0;
	if (block_buffer == NULL)
		return GF_OK;
		
	
	status = CMSampleBufferCreate(kCFAllocatorDefault, block_buffer, TRUE, NULL, NULL, ctx->fmt_desc, 1, 0, NULL, 0, NULL, &sample);

    if (status || (sample==NULL)) {
		if (block_buffer)
			CFRelease(block_buffer);
		return GF_IO_ERR;
	}
	ctx->last_error = GF_OK;
    status = VTDecompressionSessionDecodeFrame(ctx->vtb_session, sample, 0, NULL, 0);
    if (!status)
		status = VTDecompressionSessionWaitForAsynchronousFrames(ctx->vtb_session);
	

	CFRelease(block_buffer);
	CFRelease(sample);
	if (ctx->cached_annex_b)
		gf_free(in_data);
	
	if (ctx->last_error) return ctx->last_error;
	if (status) return GF_NON_COMPLIANT_BITSTREAM;
	
	if (!ctx->frame) {
		*outBufferLength=0;
		return ctx->last_error;
	}
	
	*outBufferLength = ctx->out_size;
	
	status = CVPixelBufferLockBaseAddress(ctx->frame, kCVPixelBufferLock_ReadOnly);
    if (status != kCVReturnSuccess) {
        GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[VTB] Error locking frame data\n"));
        return GF_IO_ERR;
    }
	type = CVPixelBufferGetPixelFormatType(ctx->frame);
	
    if (CVPixelBufferIsPlanar(ctx->frame)) {
        u32 i, j, nb_planes = (u32) CVPixelBufferGetPlaneCount(ctx->frame);
		char *dst = outBuffer;
		Bool needs_stride=GF_FALSE;
		if ((type==kCVPixelFormatType_420YpCbCr8Planar)
			|| (type==kCVPixelFormatType_420YpCbCr8PlanarFullRange)
			|| (type==kCVPixelFormatType_422YpCbCr8_yuvs)
			|| (type==kCVPixelFormatType_444YpCbCr8)
			|| (type=='444v')
		
		) {
			u32 stride = (u32) CVPixelBufferGetBytesPerRowOfPlane(ctx->frame, 0);
			
			//TOCHECK - for now the 3 planes are consecutive in VideoToolbox
			if (stride==ctx->width) {
				char *data = CVPixelBufferGetBaseAddressOfPlane(ctx->frame, 0);
				memcpy(dst, data, sizeof(char)*ctx->out_size);
			} else {
				for (i=0; i<nb_planes; i++) {
					char *data = CVPixelBufferGetBaseAddressOfPlane(ctx->frame, i);
					u32 stride = (u32) CVPixelBufferGetBytesPerRowOfPlane(ctx->frame, i);
					u32 w, h = (u32) CVPixelBufferGetHeightOfPlane(ctx->frame, i);
					w = ctx->width;
					if (i) {
						switch (ctx->pix_fmt) {
						case GF_PIXEL_YUV444:
							break;
						case GF_PIXEL_YUV422:
						case GF_PIXEL_YV12:
							w /= 2;
							break;
						}
					}
					if (stride != w) {
						needs_stride=GF_TRUE;
						for (j=0; j<h; j++) {
							memcpy(dst, data, sizeof(char)*w);
							dst += w;
							data += stride;
						}
					} else {
						memcpy(dst, data, sizeof(char)*h*stride);
						dst += sizeof(char)*h*stride;
					}
				}
			}
        } else if ((type==kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) || (type==kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)) {
			char *dst_v;
			char *data = CVPixelBufferGetBaseAddressOfPlane(ctx->frame, 0);
			u32 stride = (u32) CVPixelBufferGetBytesPerRowOfPlane(ctx->frame, 0);
			u32 i, h = (u32) CVPixelBufferGetHeightOfPlane(ctx->frame, 0);

			if (stride==ctx->width) {
				memcpy(dst, data, sizeof(char)*h*stride);
				dst += sizeof(char)*h*stride;
			} else {
				for (i=0; i<h; i++) {
					memcpy(dst, data, sizeof(char)*ctx->width);
					dst += ctx->width;
					data += stride;
				}
				needs_stride=GF_TRUE;
			}
			
			data = CVPixelBufferGetBaseAddressOfPlane(ctx->frame, 1);
			stride = (u32) CVPixelBufferGetBytesPerRowOfPlane(ctx->frame, 1);
			h = (u32) CVPixelBufferGetHeightOfPlane(ctx->frame, 1);
			dst_v = dst+sizeof(char) * h*stride/2;

			for (i=0; i<ctx->width * h / 2; i++) {
				*dst = data[0];
				*dst_v = data[1];
				data += 2;
				dst_v++;
				dst++;
				
				if (!(i%ctx->width)) data += (stride - ctx->width);

			}

		}
    }

    CVPixelBufferUnlockBaseAddress(ctx->frame, kCVPixelBufferLock_ReadOnly);

	return GF_OK;
}
Exemple #17
0
unsigned long QTPixelBuffer::pixelFormatType() const
{
    return CVPixelBufferGetPixelFormatType(m_pixelBuffer);
}
Exemple #18
0
void CDVDVideoCodecVDA::VDADecoderCallback(
  void                *decompressionOutputRefCon,
   CFDictionaryRef    frameInfo,
   OSStatus           status,
   uint32_t           infoFlags,
   CVImageBufferRef   imageBuffer)
{
  CCocoaAutoPool pool;
  // Warning, this is an async callback. There can be multiple frames in flight.
  CDVDVideoCodecVDA *ctx = (CDVDVideoCodecVDA*)decompressionOutputRefCon;

  if (imageBuffer == NULL)
  {
    //CLog::Log(LOGDEBUG, "%s - imageBuffer is NULL", __FUNCTION__);
    return;
  }
  OSType format_type = CVPixelBufferGetPixelFormatType(imageBuffer);
  if ((format_type != kCVPixelFormatType_422YpCbCr8) && (format_type != kCVPixelFormatType_32BGRA) )
  {
    CLog::Log(LOGERROR, "%s - imageBuffer format is not '2vuy' or 'BGRA',is reporting 0x%x",
      __FUNCTION__, format_type);
    return;
  }
  if (kVDADecodeInfo_FrameDropped & infoFlags)
  {
    CLog::Log(LOGDEBUG, "%s - frame dropped", __FUNCTION__);
    return;
  }

  // allocate a new frame and populate it with some information.
  // this pointer to a frame_queue type keeps track of the newest decompressed frame
  // and is then inserted into a linked list of frame pointers depending on the display time
  // parsed out of the bitstream and stored in the frameInfo dictionary by the client
  frame_queue *newFrame = (frame_queue*)calloc(sizeof(frame_queue), 1);
  newFrame->nextframe = NULL;
  newFrame->pixel_buffer_format = format_type;
  newFrame->pixel_buffer_ref = CVPixelBufferRetain(imageBuffer);
  GetFrameDisplayTimeFromDictionary(frameInfo, newFrame);

  // if both dts or pts are good we use those, else use decoder insert time for frame sort
  if ((newFrame->pts != DVD_NOPTS_VALUE) || (newFrame->dts != DVD_NOPTS_VALUE))
  {
    // if pts is borked (stupid avi's), use dts for frame sort
    if (newFrame->pts == DVD_NOPTS_VALUE)
      newFrame->sort_time = newFrame->dts;
    else
      newFrame->sort_time = newFrame->pts;
  }

  // since the frames we get may be in decode order rather than presentation order
  // our hypothetical callback places them in a queue of frames which will
  // hold them in display order for display on another thread
  pthread_mutex_lock(&ctx->m_queue_mutex);
  //
  frame_queue *queueWalker = ctx->m_display_queue;
  if (!queueWalker || (newFrame->sort_time < queueWalker->sort_time))
  {
    // we have an empty queue, or this frame earlier than the current queue head.
    newFrame->nextframe = queueWalker;
    ctx->m_display_queue = newFrame;
  } else {
    // walk the queue and insert this frame where it belongs in display order.
    bool frameInserted = false;
    frame_queue *nextFrame = NULL;
    //
    while (!frameInserted)
    {
      nextFrame = queueWalker->nextframe;
      if (!nextFrame || (newFrame->sort_time < nextFrame->sort_time))
      {
        // if the next frame is the tail of the queue, or our new frame is earlier.
        newFrame->nextframe = nextFrame;
        queueWalker->nextframe = newFrame;
        frameInserted = true;
      }
      queueWalker = nextFrame;
    }
  }
  ctx->m_queue_depth++;
  //
  pthread_mutex_unlock(&ctx->m_queue_mutex);	
}
static int videotoolbox_retrieve_data(AVCodecContext *s, AVFrame *frame)
{
    InputStream *ist = s->opaque;
    VTContext  *vt = ist->hwaccel_ctx;
    CVPixelBufferRef pixbuf = (CVPixelBufferRef)frame->data[3];
    OSType pixel_format = CVPixelBufferGetPixelFormatType(pixbuf);
    CVReturn err;
    uint8_t *data[4] = { 0 };
    int linesize[4] = { 0 };
    int planes, ret, i;
    char codec_str[32];

    av_frame_unref(vt->tmp_frame);

    switch (pixel_format) {
    case kCVPixelFormatType_420YpCbCr8Planar: vt->tmp_frame->format = AV_PIX_FMT_YUV420P; break;
    case kCVPixelFormatType_422YpCbCr8:       vt->tmp_frame->format = AV_PIX_FMT_UYVY422; break;
    case kCVPixelFormatType_32BGRA:           vt->tmp_frame->format = AV_PIX_FMT_BGRA; break;
#ifdef kCFCoreFoundationVersionNumber10_7
    case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: vt->tmp_frame->format = AV_PIX_FMT_NV12; break;
#endif
    default:
        av_get_codec_tag_string(codec_str, sizeof(codec_str), s->codec_tag);
        av_log(NULL, AV_LOG_ERROR,
               "%s: Unsupported pixel format: %s\n", codec_str, videotoolbox_pixfmt);
        return AVERROR(ENOSYS);
    }

    vt->tmp_frame->width  = frame->width;
    vt->tmp_frame->height = frame->height;
    ret = av_frame_get_buffer(vt->tmp_frame, 32);
    if (ret < 0)
        return ret;

    err = CVPixelBufferLockBaseAddress(pixbuf, kCVPixelBufferLock_ReadOnly);
    if (err != kCVReturnSuccess) {
        av_log(NULL, AV_LOG_ERROR, "Error locking the pixel buffer.\n");
        return AVERROR_UNKNOWN;
    }

    if (CVPixelBufferIsPlanar(pixbuf)) {

        planes = CVPixelBufferGetPlaneCount(pixbuf);
        for (i = 0; i < planes; i++) {
            data[i]     = CVPixelBufferGetBaseAddressOfPlane(pixbuf, i);
            linesize[i] = CVPixelBufferGetBytesPerRowOfPlane(pixbuf, i);
        }
    } else {
        data[0] = CVPixelBufferGetBaseAddress(pixbuf);
        linesize[0] = CVPixelBufferGetBytesPerRow(pixbuf);
    }

    av_image_copy(vt->tmp_frame->data, vt->tmp_frame->linesize,
                  (const uint8_t **)data, linesize, vt->tmp_frame->format,
                  frame->width, frame->height);

    ret = av_frame_copy_props(vt->tmp_frame, frame);
    CVPixelBufferUnlockBaseAddress(pixbuf, kCVPixelBufferLock_ReadOnly);
    if (ret < 0)
        return ret;

    av_frame_unref(frame);
    av_frame_move_ref(frame, vt->tmp_frame);

    return 0;
}
Exemple #20
0
void CDVDVideoCodecVDA::VDADecoderCallback(
  void              *decompressionOutputRefCon,
  CFDictionaryRef    frameInfo,
  OSStatus           status,
  uint32_t           infoFlags,
  CVImageBufferRef   imageBuffer)
{
  CCocoaAutoPool pool;
  // Warning, this is an async callback. There can be multiple frames in flight.
  CDVDVideoCodecVDA *ctx = (CDVDVideoCodecVDA*)decompressionOutputRefCon;

  if (imageBuffer == NULL)
  {
    //CLog::Log(LOGDEBUG, "%s - imageBuffer is NULL", __FUNCTION__);
    return;
  }
  OSType format_type = CVPixelBufferGetPixelFormatType(imageBuffer);
  if ((format_type != kCVPixelFormatType_422YpCbCr8) &&
      (format_type != kCVPixelFormatType_32BGRA) )
  {
    CLog::Log(LOGERROR, "%s - imageBuffer format is not '2vuy' or 'BGRA',is reporting 0x%x",
      __FUNCTION__, (unsigned int)format_type);
    return;
  }
  if (kVDADecodeInfo_FrameDropped & infoFlags)
  {
    if (g_advancedSettings.CanLogComponent(LOGVIDEO))
      CLog::Log(LOGDEBUG, "%s - frame dropped", __FUNCTION__);
    return;
  }

  // allocate a new frame and populate it with some information.
  // this pointer to a frame_queue type keeps track of the newest decompressed frame
  // and is then inserted into a linked list of frame pointers depending on the display time
  // parsed out of the bitstream and stored in the frameInfo dictionary by the client
  frame_queue *newFrame = (frame_queue*)calloc(sizeof(frame_queue), 1);
  newFrame->nextframe = NULL;
  newFrame->pixel_buffer_format = format_type;
  newFrame->pixel_buffer_ref = CVBufferRetain(imageBuffer);
  GetFrameDisplayTimeFromDictionary(frameInfo, newFrame);

  // since the frames we get may be in decode order rather than presentation order
  // our hypothetical callback places them in a queue of frames which will
  // hold them in display order for display on another thread
  pthread_mutex_lock(&ctx->m_queue_mutex);

  frame_queue base;
  base.nextframe = ctx->m_display_queue;
  frame_queue *ptr = &base;
  for(; ptr->nextframe; ptr = ptr->nextframe)
  {
    if(ptr->nextframe->pts == DVD_NOPTS_VALUE
    || newFrame->pts       == DVD_NOPTS_VALUE)
      continue;
    if(ptr->nextframe->pts > newFrame->pts)
      break;
  }
  /* insert after ptr */
  newFrame->nextframe = ptr->nextframe;
  ptr->nextframe = newFrame;

  /* update anchor if needed */
  if(newFrame->nextframe == ctx->m_display_queue)
    ctx->m_display_queue = newFrame;

  ctx->m_queue_depth++;
  //
  pthread_mutex_unlock(&ctx->m_queue_mutex);	
}
Exemple #21
0
void PrivateDecoderVDA::VDADecoderCallback(void *decompressionOutputRefCon,
                                           CFDictionaryRef frameInfo,
                                           OSStatus status,
                                           uint32_t infoFlags,
                                           CVImageBufferRef imageBuffer)
{
    CocoaAutoReleasePool pool;
    PrivateDecoderVDA *decoder = (PrivateDecoderVDA*)decompressionOutputRefCon;

    if (kVDADecodeInfo_FrameDropped & infoFlags)
    {
        LOG(VB_GENERAL, LOG_ERR, LOC + "Callback: Decoder dropped frame");
        return;
    }

    if (!imageBuffer)
    {
        LOG(VB_GENERAL, LOG_ERR, LOC +
            "Callback: decoder returned empty buffer.");
        return;
    }

    INIT_ST;
    vda_st = status;
    CHECK_ST;

    OSType format_type = CVPixelBufferGetPixelFormatType(imageBuffer);
    if ((format_type != '2vuy') && (format_type != 'BGRA'))
    {
        LOG(VB_GENERAL, LOG_ERR, LOC +
            QString("Callback: image buffer format unknown (%1)")
                .arg(format_type));
        return;
    }

    int64_t pts = AV_NOPTS_VALUE;
    int8_t interlaced = 0;
    int8_t topfirst   = 0;
    int8_t repeatpic  = 0;
    CFNumberRef ptsref = (CFNumberRef)CFDictionaryGetValue(frameInfo,
                                                   CFSTR("FRAME_PTS"));
    CFNumberRef intref = (CFNumberRef)CFDictionaryGetValue(frameInfo,
                                                   CFSTR("FRAME_INTERLACED"));
    CFNumberRef topref = (CFNumberRef)CFDictionaryGetValue(frameInfo,
                                                   CFSTR("FRAME_TFF"));
    CFNumberRef repref = (CFNumberRef)CFDictionaryGetValue(frameInfo,
                                                   CFSTR("FRAME_REPEAT"));

    if (ptsref)
    {
        CFNumberGetValue(ptsref, kCFNumberSInt64Type, &pts);
        CFRelease(ptsref);
    }
    if (intref)
    {
        CFNumberGetValue(intref, kCFNumberSInt8Type, &interlaced);
        CFRelease(intref);
    }
    if (topref)
    {
        CFNumberGetValue(topref, kCFNumberSInt8Type, &topfirst);
        CFRelease(topref);
    }
    if (repref)
    {
        CFNumberGetValue(repref, kCFNumberSInt8Type, &repeatpic);
        CFRelease(repref);
    }

    int64_t time =  (pts != (int64_t)AV_NOPTS_VALUE) ? pts : 0;
    {
        QMutexLocker lock(&decoder->m_frame_lock);
        bool found = false;
        int i = 0;
        for (; i < decoder->m_decoded_frames.size(); i++)
        {
            int64_t pts = decoder->m_decoded_frames[i].pts;
            if (pts != (int64_t)AV_NOPTS_VALUE && time > pts)
            {
                found = true;
                break;
            }
        }

        VDAFrame frame(CVPixelBufferRetain(imageBuffer), format_type,
                       pts, interlaced, topfirst, repeatpic);
        if (!found)
            i = decoder->m_decoded_frames.size();
        decoder->m_decoded_frames.insert(i, frame);
        decoder->m_frames_decoded++;
    }
}