static void CVPX_I420(filter_t *p_filter, picture_t *sourcePicture, picture_t *destinationPicture) { VLC_UNUSED(p_filter); picture_sys_t *picsys = sourcePicture->p_sys; if (picsys == NULL) return; if (picsys->pixelBuffer == nil) return; unsigned width = CVPixelBufferGetWidthOfPlane(picsys->pixelBuffer, 0); unsigned height = CVPixelBufferGetHeightOfPlane(picsys->pixelBuffer, 0); if (width == 0 || height == 0) return; uint8_t *pp_plane[2]; size_t pi_pitch[2]; CVPixelBufferLockBaseAddress(picsys->pixelBuffer, kCVPixelBufferLock_ReadOnly); for (int i = 0; i < 2; i++) { pp_plane[i] = CVPixelBufferGetBaseAddressOfPlane(picsys->pixelBuffer, i); pi_pitch[i] = CVPixelBufferGetBytesPerRowOfPlane(picsys->pixelBuffer, i); } CopyFromNv12ToI420(destinationPicture, pp_plane, pi_pitch, height); CVPixelBufferUnlockBaseAddress(picsys->pixelBuffer, kCVPixelBufferLock_ReadOnly); }
void VdaMixer::adjust(VideoFormatData *data, const mp_image *mpi) { Q_ASSERT(data->imgfmt == IMGFMT_VDA); auto buffer = (CVPixelBufferRef)mpi->planes[3]; switch (CVPixelBufferGetPixelFormatType(buffer)) { case kCVPixelFormatType_422YpCbCr8: data->type = IMGFMT_UYVY; break; case kCVPixelFormatType_422YpCbCr8_yuvs: data->type = IMGFMT_YUYV; break; case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: data->type = IMGFMT_NV12; break; case kCVPixelFormatType_420YpCbCr8Planar: data->type = IMGFMT_420P; break; default: _Error("Not supported format."); data->type = IMGFMT_NONE; } auto desc = mp_imgfmt_get_desc(data->type); if (CVPixelBufferIsPlanar(buffer)) { data->planes = CVPixelBufferGetPlaneCount(buffer); Q_ASSERT(data->planes == desc.num_planes); for (int i=0; i<data->planes; ++i) { data->alignedByteSize[i].rwidth() = CVPixelBufferGetBytesPerRowOfPlane(buffer, i); data->alignedByteSize[i].rheight() = CVPixelBufferGetHeightOfPlane(buffer, i); data->bpp += desc.bpp[i] >> (desc.xs[i] + desc.ys[i]); } } else {
GstBuffer * gst_core_video_buffer_new (CVBufferRef cvbuf, GstVideoInfo * vinfo) { CVPixelBufferRef pixbuf = NULL; GstBuffer *buf; GstCoreVideoMeta *meta; guint n_planes; gsize offset[GST_VIDEO_MAX_PLANES]; gint stride[GST_VIDEO_MAX_PLANES]; if (CFGetTypeID (cvbuf) != CVPixelBufferGetTypeID ()) /* TODO: Do we need to handle other buffer types? */ goto error; pixbuf = (CVPixelBufferRef) cvbuf; if (CVPixelBufferLockBaseAddress (pixbuf, kCVPixelBufferLock_ReadOnly) != kCVReturnSuccess) { goto error; } buf = gst_buffer_new (); /* add the corevideo meta to free the underlying corevideo buffer */ meta = (GstCoreVideoMeta *) gst_buffer_add_meta (buf, gst_core_video_meta_get_info (), NULL); meta->cvbuf = CVBufferRetain (cvbuf); meta->pixbuf = pixbuf; /* set stride, offset and size */ memset (&offset, 0, sizeof (offset)); memset (&stride, 0, sizeof (stride)); if (CVPixelBufferIsPlanar (pixbuf)) { int i, size, off; n_planes = CVPixelBufferGetPlaneCount (pixbuf); off = 0; for (i = 0; i < n_planes; ++i) { stride[i] = CVPixelBufferGetBytesPerRowOfPlane (pixbuf, i); size = stride[i] * CVPixelBufferGetHeightOfPlane (pixbuf, i); offset[i] = off; off += size; gst_buffer_append_memory (buf, gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, CVPixelBufferGetBaseAddressOfPlane (pixbuf, i), size, 0, size, NULL, NULL)); } } else { int size; n_planes = 1; stride[0] = CVPixelBufferGetBytesPerRow (pixbuf); offset[0] = 0; size = stride[0] * vinfo->height; gst_buffer_append_memory (buf, gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, CVPixelBufferGetBaseAddress (pixbuf), size, 0, size, NULL, NULL)); } if (vinfo) { GstVideoMeta *video_meta; video_meta = gst_buffer_add_video_meta_full (buf, GST_VIDEO_FRAME_FLAG_NONE, vinfo->finfo->format, vinfo->width, vinfo->height, n_planes, offset, stride); } return buf; error: return NULL; }
static gboolean gst_core_media_buffer_wrap_pixel_buffer (GstBuffer * buf, GstVideoInfo * info, CVPixelBufferRef pixel_buf, gboolean * has_padding, gboolean map) { guint n_planes; gsize offset[GST_VIDEO_MAX_PLANES] = { 0 }; gint stride[GST_VIDEO_MAX_PLANES] = { 0 }; GstVideoMeta *video_meta; UInt32 size; if (map && CVPixelBufferLockBaseAddress (pixel_buf, 0) != kCVReturnSuccess) { GST_ERROR ("Could not lock pixel buffer base address"); return FALSE; } *has_padding = FALSE; if (CVPixelBufferIsPlanar (pixel_buf)) { gint i, size = 0, plane_offset = 0; n_planes = CVPixelBufferGetPlaneCount (pixel_buf); for (i = 0; i < n_planes; i++) { stride[i] = CVPixelBufferGetBytesPerRowOfPlane (pixel_buf, i); if (stride[i] != GST_VIDEO_INFO_PLANE_STRIDE (info, i)) { *has_padding = TRUE; } size = stride[i] * CVPixelBufferGetHeightOfPlane (pixel_buf, i); offset[i] = plane_offset; plane_offset += size; if (map) { gst_buffer_append_memory (buf, gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, CVPixelBufferGetBaseAddressOfPlane (pixel_buf, i), size, 0, size, NULL, NULL)); } } } else { n_planes = 1; stride[0] = CVPixelBufferGetBytesPerRow (pixel_buf); offset[0] = 0; size = stride[0] * CVPixelBufferGetHeight (pixel_buf); if (map) { gst_buffer_append_memory (buf, gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, CVPixelBufferGetBaseAddress (pixel_buf), size, 0, size, NULL, NULL)); } } video_meta = gst_buffer_add_video_meta_full (buf, GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_INFO_FORMAT (info), info->width, info->height, n_planes, offset, stride); return TRUE; }
GstBuffer * gst_core_media_buffer_new (CMSampleBufferRef sample_buf) { CVImageBufferRef image_buf; CVPixelBufferRef pixel_buf; CMBlockBufferRef block_buf; gchar *data = NULL; UInt32 size; OSStatus status; GstBuffer *buf; GstCoreMediaMeta *meta; image_buf = CMSampleBufferGetImageBuffer (sample_buf); pixel_buf = NULL; block_buf = CMSampleBufferGetDataBuffer (sample_buf); if (image_buf != NULL && CFGetTypeID (image_buf) == CVPixelBufferGetTypeID ()) { pixel_buf = (CVPixelBufferRef) image_buf; if (CVPixelBufferLockBaseAddress (pixel_buf, kCVPixelBufferLock_ReadOnly) != kCVReturnSuccess) { goto error; } if (CVPixelBufferIsPlanar (pixel_buf)) { gint plane_count, plane_idx; data = CVPixelBufferGetBaseAddressOfPlane (pixel_buf, 0); size = 0; plane_count = CVPixelBufferGetPlaneCount (pixel_buf); for (plane_idx = 0; plane_idx != plane_count; plane_idx++) { size += CVPixelBufferGetBytesPerRowOfPlane (pixel_buf, plane_idx) * CVPixelBufferGetHeightOfPlane (pixel_buf, plane_idx); } } else { data = CVPixelBufferGetBaseAddress (pixel_buf); size = CVPixelBufferGetBytesPerRow (pixel_buf) * CVPixelBufferGetHeight (pixel_buf); } } else if (block_buf != NULL) { status = CMBlockBufferGetDataPointer (block_buf, 0, 0, 0, &data); if (status != noErr) goto error; size = CMBlockBufferGetDataLength (block_buf); } else { goto error; } buf = gst_buffer_new (); meta = (GstCoreMediaMeta *) gst_buffer_add_meta (buf, gst_core_media_meta_get_info (), NULL); CVBufferRetain ((CVBufferRef)sample_buf); meta->sample_buf = sample_buf; meta->image_buf = image_buf; meta->pixel_buf = pixel_buf; meta->block_buf = block_buf; gst_buffer_append_memory (buf, gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, data, size, 0, size, NULL, NULL)); return buf; error: return NULL; }
// Copy and return a decoded frame. nsresult AppleVTDecoder::OutputFrame(CVPixelBufferRef aImage, nsAutoPtr<FrameRef> aFrameRef) { size_t width = CVPixelBufferGetWidth(aImage); size_t height = CVPixelBufferGetHeight(aImage); LOG(" got decoded frame data... %ux%u %s", width, height, CVPixelBufferIsPlanar(aImage) ? "planar" : "chunked"); #ifdef DEBUG size_t planes = CVPixelBufferGetPlaneCount(aImage); for (size_t i = 0; i < planes; ++i) { size_t stride = CVPixelBufferGetBytesPerRowOfPlane(aImage, i); LOG(" plane %u %ux%u rowbytes %u", (unsigned)i, CVPixelBufferGetWidthOfPlane(aImage, i), CVPixelBufferGetHeightOfPlane(aImage, i), (unsigned)stride); } MOZ_ASSERT(planes == 2); #endif // DEBUG VideoData::YCbCrBuffer buffer; // Lock the returned image data. CVReturn rv = CVPixelBufferLockBaseAddress(aImage, kCVPixelBufferLock_ReadOnly); if (rv != kCVReturnSuccess) { NS_ERROR("error locking pixel data"); mCallback->Error(); return NS_ERROR_FAILURE; } // Y plane. buffer.mPlanes[0].mData = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(aImage, 0)); buffer.mPlanes[0].mStride = CVPixelBufferGetBytesPerRowOfPlane(aImage, 0); buffer.mPlanes[0].mWidth = width; buffer.mPlanes[0].mHeight = height; buffer.mPlanes[0].mOffset = 0; buffer.mPlanes[0].mSkip = 0; // Cb plane. buffer.mPlanes[1].mData = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(aImage, 1)); buffer.mPlanes[1].mStride = CVPixelBufferGetBytesPerRowOfPlane(aImage, 1); buffer.mPlanes[1].mWidth = (width+1) / 2; buffer.mPlanes[1].mHeight = (height+1) / 2; buffer.mPlanes[1].mOffset = 0; buffer.mPlanes[1].mSkip = 1; // Cr plane. buffer.mPlanes[2].mData = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(aImage, 1)); buffer.mPlanes[2].mStride = CVPixelBufferGetBytesPerRowOfPlane(aImage, 1); buffer.mPlanes[2].mWidth = (width+1) / 2; buffer.mPlanes[2].mHeight = (height+1) / 2; buffer.mPlanes[2].mOffset = 1; buffer.mPlanes[2].mSkip = 1; // Bounds. VideoInfo info; info.mDisplay = nsIntSize(width, height); info.mHasVideo = true; gfx::IntRect visible = gfx::IntRect(0, 0, mConfig.display_width, mConfig.display_height); // Copy the image data into our own format. nsAutoPtr<VideoData> data; data = VideoData::Create(info, mImageContainer, nullptr, aFrameRef->byte_offset, aFrameRef->composition_timestamp, aFrameRef->duration, buffer, aFrameRef->is_sync_point, aFrameRef->decode_timestamp, visible); // Unlock the returned image data. CVPixelBufferUnlockBaseAddress(aImage, kCVPixelBufferLock_ReadOnly); if (!data) { NS_ERROR("Couldn't create VideoData for frame"); mCallback->Error(); return NS_ERROR_FAILURE; } // Frames come out in DTS order but we need to output them // in composition order. mReorderQueue.Push(data.forget()); // Assume a frame with a PTS <= current DTS is ready. while (mReorderQueue.Length() > 0) { VideoData* readyData = mReorderQueue.Pop(); if (readyData->mTime <= aFrameRef->decode_timestamp) { LOG("returning queued frame with pts %lld", readyData->mTime); mCallback->Output(readyData); } else { LOG("requeued frame with pts %lld > %lld", readyData->mTime, aFrameRef->decode_timestamp); mReorderQueue.Push(readyData); break; } } LOG("%llu decoded frames queued", static_cast<unsigned long long>(mReorderQueue.Length())); return NS_OK; }
size_t QTPixelBuffer::heightOfPlane(size_t plane) const { return CVPixelBufferGetHeightOfPlane(m_pixelBuffer, plane); }
static GF_Err VTBDec_ProcessData(GF_MediaDecoder *ifcg, char *inBuffer, u32 inBufferLength, u16 ES_ID, u32 *CTS, char *outBuffer, u32 *outBufferLength, u8 PaddingBits, u32 mmlevel) { OSStatus status; CMSampleBufferRef sample = NULL; CMBlockBufferRef block_buffer = NULL; OSType type; char *in_data; u32 in_data_size; GF_Err e; VTBDec *ctx = (VTBDec *)ifcg->privateStack; if (ctx->skip_mpeg4_vosh) { GF_M4VDecSpecInfo dsi; dsi.width = dsi.height = 0; e = gf_m4v_get_config(inBuffer, inBufferLength, &dsi); //found a vosh - remove it from payload, init decoder if needed if ((e==GF_OK) && dsi.width && dsi.height) { if (!ctx->vtb_session) { ctx->vosh = inBuffer; ctx->vosh_size = dsi.next_object_start; e = VTBDec_InitDecoder(ctx, GF_FALSE); if (e) return e; //enfoce removal for all frames ctx->skip_mpeg4_vosh = GF_TRUE; if (ctx->out_size != *outBufferLength) { *outBufferLength = ctx->out_size; return GF_BUFFER_TOO_SMALL; } } ctx->vosh_size = dsi.next_object_start; } else if (!ctx->vtb_session) { *outBufferLength=0; return GF_OK; } } if (ctx->init_mpeg12) { GF_M4VDecSpecInfo dsi; dsi.width = dsi.height = 0; e = gf_mpegv12_get_config(inBuffer, inBufferLength, &dsi); if ((e==GF_OK) && dsi.width && dsi.height) { ctx->width = dsi.width; ctx->height = dsi.height; ctx->pixel_ar = dsi.par_num; ctx->pixel_ar <<= 16; ctx->pixel_ar |= dsi.par_den; e = VTBDec_InitDecoder(ctx, GF_FALSE); if (e) return e; if (ctx->out_size != *outBufferLength) { *outBufferLength = ctx->out_size; return GF_BUFFER_TOO_SMALL; } } if (!ctx->vtb_session) { *outBufferLength=0; return GF_OK; } } if (ctx->is_annex_b || (!ctx->vtb_session && ctx->nalu_size_length) ) { if (ctx->cached_annex_b) { in_data = ctx->cached_annex_b; in_data_size = ctx->cached_annex_b_size; ctx->cached_annex_b = NULL; } else { e = VTB_RewriteNALs(ctx, inBuffer, inBufferLength, &in_data, &in_data_size); if (e) return e; } if (ctx->out_size != *outBufferLength) { *outBufferLength = ctx->out_size; ctx->cached_annex_b = in_data; ctx->cached_annex_b_size = in_data_size; return GF_BUFFER_TOO_SMALL; } } else if (ctx->vosh_size) { in_data = inBuffer + ctx->vosh_size; in_data_size = inBufferLength - ctx->vosh_size; ctx->vosh_size = 0; } else { in_data = inBuffer; in_data_size = inBufferLength; } if (!ctx->vtb_session) { *outBufferLength=0; return GF_OK; } status = CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, in_data, in_data_size, kCFAllocatorNull, NULL, 0, in_data_size, 0, &block_buffer); if (status) { return GF_IO_ERR; } *outBufferLength=0; if (block_buffer == NULL) return GF_OK; status = CMSampleBufferCreate(kCFAllocatorDefault, block_buffer, TRUE, NULL, NULL, ctx->fmt_desc, 1, 0, NULL, 0, NULL, &sample); if (status || (sample==NULL)) { if (block_buffer) CFRelease(block_buffer); return GF_IO_ERR; } ctx->last_error = GF_OK; status = VTDecompressionSessionDecodeFrame(ctx->vtb_session, sample, 0, NULL, 0); if (!status) status = VTDecompressionSessionWaitForAsynchronousFrames(ctx->vtb_session); CFRelease(block_buffer); CFRelease(sample); if (ctx->cached_annex_b) gf_free(in_data); if (ctx->last_error) return ctx->last_error; if (status) return GF_NON_COMPLIANT_BITSTREAM; if (!ctx->frame) { *outBufferLength=0; return ctx->last_error; } *outBufferLength = ctx->out_size; status = CVPixelBufferLockBaseAddress(ctx->frame, kCVPixelBufferLock_ReadOnly); if (status != kCVReturnSuccess) { GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[VTB] Error locking frame data\n")); return GF_IO_ERR; } type = CVPixelBufferGetPixelFormatType(ctx->frame); if (CVPixelBufferIsPlanar(ctx->frame)) { u32 i, j, nb_planes = (u32) CVPixelBufferGetPlaneCount(ctx->frame); char *dst = outBuffer; Bool needs_stride=GF_FALSE; if ((type==kCVPixelFormatType_420YpCbCr8Planar) || (type==kCVPixelFormatType_420YpCbCr8PlanarFullRange) || (type==kCVPixelFormatType_422YpCbCr8_yuvs) || (type==kCVPixelFormatType_444YpCbCr8) || (type=='444v') ) { u32 stride = (u32) CVPixelBufferGetBytesPerRowOfPlane(ctx->frame, 0); //TOCHECK - for now the 3 planes are consecutive in VideoToolbox if (stride==ctx->width) { char *data = CVPixelBufferGetBaseAddressOfPlane(ctx->frame, 0); memcpy(dst, data, sizeof(char)*ctx->out_size); } else { for (i=0; i<nb_planes; i++) { char *data = CVPixelBufferGetBaseAddressOfPlane(ctx->frame, i); u32 stride = (u32) CVPixelBufferGetBytesPerRowOfPlane(ctx->frame, i); u32 w, h = (u32) CVPixelBufferGetHeightOfPlane(ctx->frame, i); w = ctx->width; if (i) { switch (ctx->pix_fmt) { case GF_PIXEL_YUV444: break; case GF_PIXEL_YUV422: case GF_PIXEL_YV12: w /= 2; break; } } if (stride != w) { needs_stride=GF_TRUE; for (j=0; j<h; j++) { memcpy(dst, data, sizeof(char)*w); dst += w; data += stride; } } else { memcpy(dst, data, sizeof(char)*h*stride); dst += sizeof(char)*h*stride; } } } } else if ((type==kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) || (type==kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)) { char *dst_v; char *data = CVPixelBufferGetBaseAddressOfPlane(ctx->frame, 0); u32 stride = (u32) CVPixelBufferGetBytesPerRowOfPlane(ctx->frame, 0); u32 i, h = (u32) CVPixelBufferGetHeightOfPlane(ctx->frame, 0); if (stride==ctx->width) { memcpy(dst, data, sizeof(char)*h*stride); dst += sizeof(char)*h*stride; } else { for (i=0; i<h; i++) { memcpy(dst, data, sizeof(char)*ctx->width); dst += ctx->width; data += stride; } needs_stride=GF_TRUE; } data = CVPixelBufferGetBaseAddressOfPlane(ctx->frame, 1); stride = (u32) CVPixelBufferGetBytesPerRowOfPlane(ctx->frame, 1); h = (u32) CVPixelBufferGetHeightOfPlane(ctx->frame, 1); dst_v = dst+sizeof(char) * h*stride/2; for (i=0; i<ctx->width * h / 2; i++) { *dst = data[0]; *dst_v = data[1]; data += 2; dst_v++; dst++; if (!(i%ctx->width)) data += (stride - ctx->width); } } } CVPixelBufferUnlockBaseAddress(ctx->frame, kCVPixelBufferLock_ReadOnly); return GF_OK; }