static int vdadec_decode(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt) { VDADecoderContext *ctx = avctx->priv_data; AVFrame *pic = data; int ret; set_context(avctx); ret = ff_h264_decoder.decode(avctx, data, got_frame, avpkt); restore_context(avctx); if (*got_frame) { AVBufferRef *buffer = pic->buf[0]; VDABufferContext *context = av_buffer_get_opaque(buffer); CVPixelBufferRef cv_buffer = (CVPixelBufferRef)pic->data[3]; CVPixelBufferRetain(cv_buffer); CVPixelBufferLockBaseAddress(cv_buffer, 0); context->cv_buffer = cv_buffer; pic->format = ctx->pix_fmt; if (CVPixelBufferIsPlanar(cv_buffer)) { int i, count = CVPixelBufferGetPlaneCount(cv_buffer); av_assert0(count < 4); for (i = 0; i < count; i++) { pic->data[i] = CVPixelBufferGetBaseAddressOfPlane(cv_buffer, i); pic->linesize[i] = CVPixelBufferGetBytesPerRowOfPlane(cv_buffer, i); } } else { pic->data[0] = CVPixelBufferGetBaseAddress(cv_buffer); pic->linesize[0] = CVPixelBufferGetBytesPerRow(cv_buffer); } } avctx->pix_fmt = ctx->pix_fmt; return ret; }
static gpointer gst_apple_core_video_mem_map (GstMemory * gmem, gsize maxsize, GstMapFlags flags) { GstAppleCoreVideoMemory *mem = (GstAppleCoreVideoMemory *) gmem; gpointer ret; if (!gst_apple_core_video_pixel_buffer_lock (mem->gpixbuf, flags)) return NULL; if (CVPixelBufferIsPlanar (mem->gpixbuf->buf)) { ret = CVPixelBufferGetBaseAddressOfPlane (mem->gpixbuf->buf, mem->plane); if (ret != NULL) GST_DEBUG ("%p: pixbuf %p plane %" G_GSIZE_FORMAT " flags %08x: mapped %p", mem, mem->gpixbuf->buf, mem->plane, flags, ret); else GST_ERROR ("%p: invalid plane base address (NULL) for pixbuf %p plane %" G_GSIZE_FORMAT, mem, mem->gpixbuf->buf, mem->plane); } else { ret = CVPixelBufferGetBaseAddress (mem->gpixbuf->buf); if (ret != NULL) GST_DEBUG ("%p: pixbuf %p flags %08x: mapped %p", mem, mem->gpixbuf->buf, flags, ret); else GST_ERROR ("%p: invalid base address (NULL) for pixbuf %p" G_GSIZE_FORMAT, mem, mem->gpixbuf->buf); } return ret; }
static int vda_retrieve_data(AVCodecContext *s, AVFrame *frame) { InputStream *ist = s->opaque; VDAContext *vda = ist->hwaccel_ctx; CVPixelBufferRef pixbuf = (CVPixelBufferRef)frame->data[3]; OSType pixel_format = CVPixelBufferGetPixelFormatType(pixbuf); CVReturn err; uint8_t *data[4] = { 0 }; int linesize[4] = { 0 }; int planes, ret, i; av_frame_unref(vda->tmp_frame); switch (pixel_format) { case kCVPixelFormatType_420YpCbCr8Planar: vda->tmp_frame->format = AV_PIX_FMT_YUV420P; break; case kCVPixelFormatType_422YpCbCr8: vda->tmp_frame->format = AV_PIX_FMT_UYVY422; break; default: av_log(NULL, AV_LOG_ERROR, "Unsupported pixel format: %u\n", pixel_format); return AVERROR(ENOSYS); } vda->tmp_frame->width = frame->width; vda->tmp_frame->height = frame->height; ret = av_frame_get_buffer(vda->tmp_frame, 32); if (ret < 0) return ret; err = CVPixelBufferLockBaseAddress(pixbuf, kCVPixelBufferLock_ReadOnly); if (err != kCVReturnSuccess) { av_log(NULL, AV_LOG_ERROR, "Error locking the pixel buffer.\n"); return AVERROR_UNKNOWN; } if (CVPixelBufferIsPlanar(pixbuf)) { planes = CVPixelBufferGetPlaneCount(pixbuf); for (i = 0; i < planes; i++) { data[i] = CVPixelBufferGetBaseAddressOfPlane(pixbuf, i); linesize[i] = CVPixelBufferGetBytesPerRowOfPlane(pixbuf, i); } } else { data[0] = CVPixelBufferGetBaseAddress(pixbuf); linesize[0] = CVPixelBufferGetBytesPerRow(pixbuf); } av_image_copy(vda->tmp_frame->data, vda->tmp_frame->linesize, data, linesize, vda->tmp_frame->format, frame->width, frame->height); ret = av_frame_copy_props(vda->tmp_frame, frame); if (ret < 0) return ret; av_frame_unref(frame); av_frame_move_ref(frame, vda->tmp_frame); return 0; }
GstBuffer * gst_core_video_buffer_new (CVBufferRef cvbuf, GstVideoInfo * vinfo) { CVPixelBufferRef pixbuf = NULL; GstBuffer *buf; GstCoreVideoMeta *meta; guint n_planes; gsize offset[GST_VIDEO_MAX_PLANES]; gint stride[GST_VIDEO_MAX_PLANES]; if (CFGetTypeID (cvbuf) != CVPixelBufferGetTypeID ()) /* TODO: Do we need to handle other buffer types? */ goto error; pixbuf = (CVPixelBufferRef) cvbuf; if (CVPixelBufferLockBaseAddress (pixbuf, kCVPixelBufferLock_ReadOnly) != kCVReturnSuccess) { goto error; } buf = gst_buffer_new (); /* add the corevideo meta to free the underlying corevideo buffer */ meta = (GstCoreVideoMeta *) gst_buffer_add_meta (buf, gst_core_video_meta_get_info (), NULL); meta->cvbuf = CVBufferRetain (cvbuf); meta->pixbuf = pixbuf; /* set stride, offset and size */ memset (&offset, 0, sizeof (offset)); memset (&stride, 0, sizeof (stride)); if (CVPixelBufferIsPlanar (pixbuf)) { int i, size, off; n_planes = CVPixelBufferGetPlaneCount (pixbuf); off = 0; for (i = 0; i < n_planes; ++i) { stride[i] = CVPixelBufferGetBytesPerRowOfPlane (pixbuf, i); size = stride[i] * CVPixelBufferGetHeightOfPlane (pixbuf, i); offset[i] = off; off += size; gst_buffer_append_memory (buf, gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, CVPixelBufferGetBaseAddressOfPlane (pixbuf, i), size, 0, size, NULL, NULL)); } } else { int size; n_planes = 1; stride[0] = CVPixelBufferGetBytesPerRow (pixbuf); offset[0] = 0; size = stride[0] * vinfo->height; gst_buffer_append_memory (buf, gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, CVPixelBufferGetBaseAddress (pixbuf), size, 0, size, NULL, NULL)); } if (vinfo) { GstVideoMeta *video_meta; video_meta = gst_buffer_add_video_meta_full (buf, GST_VIDEO_FRAME_FLAG_NONE, vinfo->finfo->format, vinfo->width, vinfo->height, n_planes, offset, stride); } return buf; error: return NULL; }
static gboolean gst_core_media_buffer_wrap_pixel_buffer (GstBuffer * buf, GstVideoInfo * info, CVPixelBufferRef pixel_buf, gboolean * has_padding, gboolean map) { guint n_planes; gsize offset[GST_VIDEO_MAX_PLANES] = { 0 }; gint stride[GST_VIDEO_MAX_PLANES] = { 0 }; GstVideoMeta *video_meta; UInt32 size; if (map && CVPixelBufferLockBaseAddress (pixel_buf, 0) != kCVReturnSuccess) { GST_ERROR ("Could not lock pixel buffer base address"); return FALSE; } *has_padding = FALSE; if (CVPixelBufferIsPlanar (pixel_buf)) { gint i, size = 0, plane_offset = 0; n_planes = CVPixelBufferGetPlaneCount (pixel_buf); for (i = 0; i < n_planes; i++) { stride[i] = CVPixelBufferGetBytesPerRowOfPlane (pixel_buf, i); if (stride[i] != GST_VIDEO_INFO_PLANE_STRIDE (info, i)) { *has_padding = TRUE; } size = stride[i] * CVPixelBufferGetHeightOfPlane (pixel_buf, i); offset[i] = plane_offset; plane_offset += size; if (map) { gst_buffer_append_memory (buf, gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, CVPixelBufferGetBaseAddressOfPlane (pixel_buf, i), size, 0, size, NULL, NULL)); } } } else { n_planes = 1; stride[0] = CVPixelBufferGetBytesPerRow (pixel_buf); offset[0] = 0; size = stride[0] * CVPixelBufferGetHeight (pixel_buf); if (map) { gst_buffer_append_memory (buf, gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, CVPixelBufferGetBaseAddress (pixel_buf), size, 0, size, NULL, NULL)); } } video_meta = gst_buffer_add_video_meta_full (buf, GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_INFO_FORMAT (info), info->width, info->height, n_planes, offset, stride); return TRUE; }
int CVideoEncodeVt::CopyFrameToPixelBuffer(const AVFrame* pFrame, CVPixelBufferRef aPixelBuffer, const int* apStrides, const int* apRows) { if(NULL == aPixelBuffer) { return -1; } int iPlaneCnt = 0; int ret = CVPixelBufferLockBaseAddress(aPixelBuffer, 0); if(0 != ret) { CLog::GetInstance().Log(ENUM_LOG_LEVEL::enum_Log_Level5, "CVPixelBufferPoolCreatePixelBuffer failed!"); return -1; } if(CVPixelBufferIsPlanar(aPixelBuffer)) { iPlaneCnt = (int)CVPixelBufferGetPlaneCount(aPixelBuffer); for(int i = 0; pFrame->data[i]; i++) { if(i == iPlaneCnt) { CVPixelBufferUnlockBaseAddress(aPixelBuffer, 0); return -1; } uint8_t* pSrc = pFrame->data[i]; uint8_t* pDst = (uint8_t*)CVPixelBufferGetBaseAddressOfPlane(aPixelBuffer, i); int iSrcStride = apStrides[i]; int iDstStride = (int)CVPixelBufferGetBytesPerRowOfPlane(aPixelBuffer, i); if(iSrcStride == iDstStride) { memcpy(pDst, pSrc, iSrcStride * apRows[i]); } else { int iCopyBytes = iDstStride < iSrcStride ? iDstStride : iSrcStride; for(int j = 0; j < apRows[i]; j++) { memcpy(pDst + j * iDstStride, pSrc + j * iSrcStride, iCopyBytes); } } } } else { CLog::GetInstance().Log(ENUM_LOG_LEVEL::enum_Log_Level5, "aPixelBuffer muse be yuv420p!"); CVPixelBufferUnlockBaseAddress(aPixelBuffer, 0); return -1; } CVPixelBufferUnlockBaseAddress(aPixelBuffer, 0); return 0; }
GstBuffer * gst_core_media_buffer_new (CMSampleBufferRef sample_buf) { CVImageBufferRef image_buf; CVPixelBufferRef pixel_buf; CMBlockBufferRef block_buf; gchar *data = NULL; UInt32 size; OSStatus status; GstBuffer *buf; GstCoreMediaMeta *meta; image_buf = CMSampleBufferGetImageBuffer (sample_buf); pixel_buf = NULL; block_buf = CMSampleBufferGetDataBuffer (sample_buf); if (image_buf != NULL && CFGetTypeID (image_buf) == CVPixelBufferGetTypeID ()) { pixel_buf = (CVPixelBufferRef) image_buf; if (CVPixelBufferLockBaseAddress (pixel_buf, kCVPixelBufferLock_ReadOnly) != kCVReturnSuccess) { goto error; } if (CVPixelBufferIsPlanar (pixel_buf)) { gint plane_count, plane_idx; data = CVPixelBufferGetBaseAddressOfPlane (pixel_buf, 0); size = 0; plane_count = CVPixelBufferGetPlaneCount (pixel_buf); for (plane_idx = 0; plane_idx != plane_count; plane_idx++) { size += CVPixelBufferGetBytesPerRowOfPlane (pixel_buf, plane_idx) * CVPixelBufferGetHeightOfPlane (pixel_buf, plane_idx); } } else { data = CVPixelBufferGetBaseAddress (pixel_buf); size = CVPixelBufferGetBytesPerRow (pixel_buf) * CVPixelBufferGetHeight (pixel_buf); } } else if (block_buf != NULL) { status = CMBlockBufferGetDataPointer (block_buf, 0, 0, 0, &data); if (status != noErr) goto error; size = CMBlockBufferGetDataLength (block_buf); } else { goto error; } buf = gst_buffer_new (); meta = (GstCoreMediaMeta *) gst_buffer_add_meta (buf, gst_core_media_meta_get_info (), NULL); CVBufferRetain ((CVBufferRef)sample_buf); meta->sample_buf = sample_buf; meta->image_buf = image_buf; meta->pixel_buf = pixel_buf; meta->block_buf = block_buf; gst_buffer_append_memory (buf, gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, data, size, 0, size, NULL, NULL)); return buf; error: return NULL; }
// Copy and return a decoded frame. nsresult AppleVTDecoder::OutputFrame(CVPixelBufferRef aImage, nsAutoPtr<FrameRef> aFrameRef) { size_t width = CVPixelBufferGetWidth(aImage); size_t height = CVPixelBufferGetHeight(aImage); LOG(" got decoded frame data... %ux%u %s", width, height, CVPixelBufferIsPlanar(aImage) ? "planar" : "chunked"); #ifdef DEBUG size_t planes = CVPixelBufferGetPlaneCount(aImage); for (size_t i = 0; i < planes; ++i) { size_t stride = CVPixelBufferGetBytesPerRowOfPlane(aImage, i); LOG(" plane %u %ux%u rowbytes %u", (unsigned)i, CVPixelBufferGetWidthOfPlane(aImage, i), CVPixelBufferGetHeightOfPlane(aImage, i), (unsigned)stride); } MOZ_ASSERT(planes == 2); #endif // DEBUG VideoData::YCbCrBuffer buffer; // Lock the returned image data. CVReturn rv = CVPixelBufferLockBaseAddress(aImage, kCVPixelBufferLock_ReadOnly); if (rv != kCVReturnSuccess) { NS_ERROR("error locking pixel data"); mCallback->Error(); return NS_ERROR_FAILURE; } // Y plane. buffer.mPlanes[0].mData = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(aImage, 0)); buffer.mPlanes[0].mStride = CVPixelBufferGetBytesPerRowOfPlane(aImage, 0); buffer.mPlanes[0].mWidth = width; buffer.mPlanes[0].mHeight = height; buffer.mPlanes[0].mOffset = 0; buffer.mPlanes[0].mSkip = 0; // Cb plane. buffer.mPlanes[1].mData = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(aImage, 1)); buffer.mPlanes[1].mStride = CVPixelBufferGetBytesPerRowOfPlane(aImage, 1); buffer.mPlanes[1].mWidth = (width+1) / 2; buffer.mPlanes[1].mHeight = (height+1) / 2; buffer.mPlanes[1].mOffset = 0; buffer.mPlanes[1].mSkip = 1; // Cr plane. buffer.mPlanes[2].mData = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(aImage, 1)); buffer.mPlanes[2].mStride = CVPixelBufferGetBytesPerRowOfPlane(aImage, 1); buffer.mPlanes[2].mWidth = (width+1) / 2; buffer.mPlanes[2].mHeight = (height+1) / 2; buffer.mPlanes[2].mOffset = 1; buffer.mPlanes[2].mSkip = 1; // Bounds. VideoInfo info; info.mDisplay = nsIntSize(width, height); info.mHasVideo = true; gfx::IntRect visible = gfx::IntRect(0, 0, mConfig.display_width, mConfig.display_height); // Copy the image data into our own format. nsAutoPtr<VideoData> data; data = VideoData::Create(info, mImageContainer, nullptr, aFrameRef->byte_offset, aFrameRef->composition_timestamp, aFrameRef->duration, buffer, aFrameRef->is_sync_point, aFrameRef->decode_timestamp, visible); // Unlock the returned image data. CVPixelBufferUnlockBaseAddress(aImage, kCVPixelBufferLock_ReadOnly); if (!data) { NS_ERROR("Couldn't create VideoData for frame"); mCallback->Error(); return NS_ERROR_FAILURE; } // Frames come out in DTS order but we need to output them // in composition order. mReorderQueue.Push(data.forget()); // Assume a frame with a PTS <= current DTS is ready. while (mReorderQueue.Length() > 0) { VideoData* readyData = mReorderQueue.Pop(); if (readyData->mTime <= aFrameRef->decode_timestamp) { LOG("returning queued frame with pts %lld", readyData->mTime); mCallback->Output(readyData); } else { LOG("requeued frame with pts %lld > %lld", readyData->mTime, aFrameRef->decode_timestamp); mReorderQueue.Push(readyData); break; } } LOG("%llu decoded frames queued", static_cast<unsigned long long>(mReorderQueue.Length())); return NS_OK; }
bool QTPixelBuffer::isPlanar() const { return CVPixelBufferIsPlanar(m_pixelBuffer); }
static GF_Err VTBDec_ProcessData(GF_MediaDecoder *ifcg, char *inBuffer, u32 inBufferLength, u16 ES_ID, u32 *CTS, char *outBuffer, u32 *outBufferLength, u8 PaddingBits, u32 mmlevel) { OSStatus status; CMSampleBufferRef sample = NULL; CMBlockBufferRef block_buffer = NULL; OSType type; char *in_data; u32 in_data_size; GF_Err e; VTBDec *ctx = (VTBDec *)ifcg->privateStack; if (ctx->skip_mpeg4_vosh) { GF_M4VDecSpecInfo dsi; dsi.width = dsi.height = 0; e = gf_m4v_get_config(inBuffer, inBufferLength, &dsi); //found a vosh - remove it from payload, init decoder if needed if ((e==GF_OK) && dsi.width && dsi.height) { if (!ctx->vtb_session) { ctx->vosh = inBuffer; ctx->vosh_size = dsi.next_object_start; e = VTBDec_InitDecoder(ctx, GF_FALSE); if (e) return e; //enfoce removal for all frames ctx->skip_mpeg4_vosh = GF_TRUE; if (ctx->out_size != *outBufferLength) { *outBufferLength = ctx->out_size; return GF_BUFFER_TOO_SMALL; } } ctx->vosh_size = dsi.next_object_start; } else if (!ctx->vtb_session) { *outBufferLength=0; return GF_OK; } } if (ctx->init_mpeg12) { GF_M4VDecSpecInfo dsi; dsi.width = dsi.height = 0; e = gf_mpegv12_get_config(inBuffer, inBufferLength, &dsi); if ((e==GF_OK) && dsi.width && dsi.height) { ctx->width = dsi.width; ctx->height = dsi.height; ctx->pixel_ar = dsi.par_num; ctx->pixel_ar <<= 16; ctx->pixel_ar |= dsi.par_den; e = VTBDec_InitDecoder(ctx, GF_FALSE); if (e) return e; if (ctx->out_size != *outBufferLength) { *outBufferLength = ctx->out_size; return GF_BUFFER_TOO_SMALL; } } if (!ctx->vtb_session) { *outBufferLength=0; return GF_OK; } } if (ctx->is_annex_b || (!ctx->vtb_session && ctx->nalu_size_length) ) { if (ctx->cached_annex_b) { in_data = ctx->cached_annex_b; in_data_size = ctx->cached_annex_b_size; ctx->cached_annex_b = NULL; } else { e = VTB_RewriteNALs(ctx, inBuffer, inBufferLength, &in_data, &in_data_size); if (e) return e; } if (ctx->out_size != *outBufferLength) { *outBufferLength = ctx->out_size; ctx->cached_annex_b = in_data; ctx->cached_annex_b_size = in_data_size; return GF_BUFFER_TOO_SMALL; } } else if (ctx->vosh_size) { in_data = inBuffer + ctx->vosh_size; in_data_size = inBufferLength - ctx->vosh_size; ctx->vosh_size = 0; } else { in_data = inBuffer; in_data_size = inBufferLength; } if (!ctx->vtb_session) { *outBufferLength=0; return GF_OK; } status = CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, in_data, in_data_size, kCFAllocatorNull, NULL, 0, in_data_size, 0, &block_buffer); if (status) { return GF_IO_ERR; } *outBufferLength=0; if (block_buffer == NULL) return GF_OK; status = CMSampleBufferCreate(kCFAllocatorDefault, block_buffer, TRUE, NULL, NULL, ctx->fmt_desc, 1, 0, NULL, 0, NULL, &sample); if (status || (sample==NULL)) { if (block_buffer) CFRelease(block_buffer); return GF_IO_ERR; } ctx->last_error = GF_OK; status = VTDecompressionSessionDecodeFrame(ctx->vtb_session, sample, 0, NULL, 0); if (!status) status = VTDecompressionSessionWaitForAsynchronousFrames(ctx->vtb_session); CFRelease(block_buffer); CFRelease(sample); if (ctx->cached_annex_b) gf_free(in_data); if (ctx->last_error) return ctx->last_error; if (status) return GF_NON_COMPLIANT_BITSTREAM; if (!ctx->frame) { *outBufferLength=0; return ctx->last_error; } *outBufferLength = ctx->out_size; status = CVPixelBufferLockBaseAddress(ctx->frame, kCVPixelBufferLock_ReadOnly); if (status != kCVReturnSuccess) { GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[VTB] Error locking frame data\n")); return GF_IO_ERR; } type = CVPixelBufferGetPixelFormatType(ctx->frame); if (CVPixelBufferIsPlanar(ctx->frame)) { u32 i, j, nb_planes = (u32) CVPixelBufferGetPlaneCount(ctx->frame); char *dst = outBuffer; Bool needs_stride=GF_FALSE; if ((type==kCVPixelFormatType_420YpCbCr8Planar) || (type==kCVPixelFormatType_420YpCbCr8PlanarFullRange) || (type==kCVPixelFormatType_422YpCbCr8_yuvs) || (type==kCVPixelFormatType_444YpCbCr8) || (type=='444v') ) { u32 stride = (u32) CVPixelBufferGetBytesPerRowOfPlane(ctx->frame, 0); //TOCHECK - for now the 3 planes are consecutive in VideoToolbox if (stride==ctx->width) { char *data = CVPixelBufferGetBaseAddressOfPlane(ctx->frame, 0); memcpy(dst, data, sizeof(char)*ctx->out_size); } else { for (i=0; i<nb_planes; i++) { char *data = CVPixelBufferGetBaseAddressOfPlane(ctx->frame, i); u32 stride = (u32) CVPixelBufferGetBytesPerRowOfPlane(ctx->frame, i); u32 w, h = (u32) CVPixelBufferGetHeightOfPlane(ctx->frame, i); w = ctx->width; if (i) { switch (ctx->pix_fmt) { case GF_PIXEL_YUV444: break; case GF_PIXEL_YUV422: case GF_PIXEL_YV12: w /= 2; break; } } if (stride != w) { needs_stride=GF_TRUE; for (j=0; j<h; j++) { memcpy(dst, data, sizeof(char)*w); dst += w; data += stride; } } else { memcpy(dst, data, sizeof(char)*h*stride); dst += sizeof(char)*h*stride; } } } } else if ((type==kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) || (type==kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)) { char *dst_v; char *data = CVPixelBufferGetBaseAddressOfPlane(ctx->frame, 0); u32 stride = (u32) CVPixelBufferGetBytesPerRowOfPlane(ctx->frame, 0); u32 i, h = (u32) CVPixelBufferGetHeightOfPlane(ctx->frame, 0); if (stride==ctx->width) { memcpy(dst, data, sizeof(char)*h*stride); dst += sizeof(char)*h*stride; } else { for (i=0; i<h; i++) { memcpy(dst, data, sizeof(char)*ctx->width); dst += ctx->width; data += stride; } needs_stride=GF_TRUE; } data = CVPixelBufferGetBaseAddressOfPlane(ctx->frame, 1); stride = (u32) CVPixelBufferGetBytesPerRowOfPlane(ctx->frame, 1); h = (u32) CVPixelBufferGetHeightOfPlane(ctx->frame, 1); dst_v = dst+sizeof(char) * h*stride/2; for (i=0; i<ctx->width * h / 2; i++) { *dst = data[0]; *dst_v = data[1]; data += 2; dst_v++; dst++; if (!(i%ctx->width)) data += (stride - ctx->width); } } } CVPixelBufferUnlockBaseAddress(ctx->frame, kCVPixelBufferLock_ReadOnly); return GF_OK; }
static int create(struct gl_hwdec *hw) { if (!check_hwdec(hw)) return -1; struct priv *p = talloc_zero(hw, struct priv); hw->priv = p; hw->gl->GenTextures(MP_MAX_PLANES, p->gl_planes); p->hwctx = (struct mp_hwdec_ctx){ .type = HWDEC_VIDEOTOOLBOX, .download_image = mp_vt_download_image, .ctx = &p->hwctx, }; hwdec_devices_add(hw->devs, &p->hwctx); return 0; } static int reinit(struct gl_hwdec *hw, struct mp_image_params *params) { struct priv *p = hw->priv; assert(params->imgfmt == hw->driver->imgfmt); if (!params->hw_subfmt) { MP_ERR(hw, "Unsupported CVPixelBuffer format.\n"); return -1; } if (!gl_get_imgfmt_desc(hw->gl, params->hw_subfmt, &p->desc)) { MP_ERR(hw, "Unsupported texture format.\n"); return -1; } params->imgfmt = params->hw_subfmt; params->hw_subfmt = 0; return 0; } static int map_frame(struct gl_hwdec *hw, struct mp_image *hw_image, struct gl_hwdec_frame *out_frame) { struct priv *p = hw->priv; GL *gl = hw->gl; CVPixelBufferRelease(p->pbuf); p->pbuf = (CVPixelBufferRef)hw_image->planes[3]; CVPixelBufferRetain(p->pbuf); IOSurfaceRef surface = CVPixelBufferGetIOSurface(p->pbuf); if (!surface) { MP_ERR(hw, "CVPixelBuffer has no IOSurface\n"); return -1; } const bool planar = CVPixelBufferIsPlanar(p->pbuf); const int planes = CVPixelBufferGetPlaneCount(p->pbuf); assert((planar && planes == p->desc.num_planes) || p->desc.num_planes == 1); GLenum gl_target = GL_TEXTURE_RECTANGLE; for (int i = 0; i < p->desc.num_planes; i++) { const struct gl_format *fmt = p->desc.planes[i]; gl->BindTexture(gl_target, p->gl_planes[i]); CGLError err = CGLTexImageIOSurface2D( CGLGetCurrentContext(), gl_target, fmt->internal_format, IOSurfaceGetWidthOfPlane(surface, i), IOSurfaceGetHeightOfPlane(surface, i), fmt->format, fmt->type, surface, i); if (err != kCGLNoError) MP_ERR(hw, "error creating IOSurface texture for plane %d: %s (%x)\n", i, CGLErrorString(err), gl->GetError()); gl->BindTexture(gl_target, 0); out_frame->planes[i] = (struct gl_hwdec_plane){ .gl_texture = p->gl_planes[i], .gl_target = gl_target, .tex_w = IOSurfaceGetWidthOfPlane(surface, i), .tex_h = IOSurfaceGetHeightOfPlane(surface, i), }; } snprintf(out_frame->swizzle, sizeof(out_frame->swizzle), "%s", p->desc.swizzle); return 0; } static void destroy(struct gl_hwdec *hw) { struct priv *p = hw->priv; GL *gl = hw->gl; CVPixelBufferRelease(p->pbuf); gl->DeleteTextures(MP_MAX_PLANES, p->gl_planes); hwdec_devices_remove(hw->devs, &p->hwctx); } const struct gl_hwdec_driver gl_hwdec_videotoolbox = { .name = "videotoolbox", .api = HWDEC_VIDEOTOOLBOX, .imgfmt = IMGFMT_VIDEOTOOLBOX, .create = create, .reinit = reinit, .map_frame = map_frame, .destroy = destroy, };
static int videotoolbox_retrieve_data(AVCodecContext *s, AVFrame *frame) { InputStream *ist = s->opaque; VTContext *vt = ist->hwaccel_ctx; CVPixelBufferRef pixbuf = (CVPixelBufferRef)frame->data[3]; OSType pixel_format = CVPixelBufferGetPixelFormatType(pixbuf); CVReturn err; uint8_t *data[4] = { 0 }; int linesize[4] = { 0 }; int planes, ret, i; char codec_str[32]; av_frame_unref(vt->tmp_frame); switch (pixel_format) { case kCVPixelFormatType_420YpCbCr8Planar: vt->tmp_frame->format = AV_PIX_FMT_YUV420P; break; case kCVPixelFormatType_422YpCbCr8: vt->tmp_frame->format = AV_PIX_FMT_UYVY422; break; case kCVPixelFormatType_32BGRA: vt->tmp_frame->format = AV_PIX_FMT_BGRA; break; #ifdef kCFCoreFoundationVersionNumber10_7 case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: vt->tmp_frame->format = AV_PIX_FMT_NV12; break; #endif default: av_get_codec_tag_string(codec_str, sizeof(codec_str), s->codec_tag); av_log(NULL, AV_LOG_ERROR, "%s: Unsupported pixel format: %s\n", codec_str, videotoolbox_pixfmt); return AVERROR(ENOSYS); } vt->tmp_frame->width = frame->width; vt->tmp_frame->height = frame->height; ret = av_frame_get_buffer(vt->tmp_frame, 32); if (ret < 0) return ret; err = CVPixelBufferLockBaseAddress(pixbuf, kCVPixelBufferLock_ReadOnly); if (err != kCVReturnSuccess) { av_log(NULL, AV_LOG_ERROR, "Error locking the pixel buffer.\n"); return AVERROR_UNKNOWN; } if (CVPixelBufferIsPlanar(pixbuf)) { planes = CVPixelBufferGetPlaneCount(pixbuf); for (i = 0; i < planes; i++) { data[i] = CVPixelBufferGetBaseAddressOfPlane(pixbuf, i); linesize[i] = CVPixelBufferGetBytesPerRowOfPlane(pixbuf, i); } } else { data[0] = CVPixelBufferGetBaseAddress(pixbuf); linesize[0] = CVPixelBufferGetBytesPerRow(pixbuf); } av_image_copy(vt->tmp_frame->data, vt->tmp_frame->linesize, (const uint8_t **)data, linesize, vt->tmp_frame->format, frame->width, frame->height); ret = av_frame_copy_props(vt->tmp_frame, frame); CVPixelBufferUnlockBaseAddress(pixbuf, kCVPixelBufferLock_ReadOnly); if (ret < 0) return ret; av_frame_unref(frame); av_frame_move_ref(frame, vt->tmp_frame); return 0; }