static void copy_field (GstInterlace * interlace, GstBuffer * dest, GstBuffer * src, int field_index) { GstVideoInfo *info = &interlace->info; gint i, j, n_planes; guint8 *d, *s; GstVideoFrame dframe, sframe; if (!gst_video_frame_map (&dframe, info, dest, GST_MAP_WRITE)) goto dest_map_failed; if (!gst_video_frame_map (&sframe, info, src, GST_MAP_READ)) goto src_map_failed; n_planes = GST_VIDEO_FRAME_N_PLANES (&dframe); for (i = 0; i < n_planes; i++) { gint cheight, cwidth; gint ss, ds; d = GST_VIDEO_FRAME_PLANE_DATA (&dframe, i); s = GST_VIDEO_FRAME_PLANE_DATA (&sframe, i); ds = GST_VIDEO_FRAME_PLANE_STRIDE (&dframe, i); ss = GST_VIDEO_FRAME_PLANE_STRIDE (&sframe, i); d += field_index * ds; s += field_index * ss; cheight = GST_VIDEO_FRAME_COMP_HEIGHT (&dframe, i); cwidth = MIN (ABS (ss), ABS (ds)); for (j = field_index; j < cheight; j += 2) { memcpy (d, s, cwidth); d += ds * 2; s += ss * 2; } } gst_video_frame_unmap (&dframe); gst_video_frame_unmap (&sframe); return; dest_map_failed: { GST_ERROR_OBJECT (interlace, "failed to map dest"); return; } src_map_failed: { GST_ERROR_OBJECT (interlace, "failed to map src"); gst_video_frame_unmap (&dframe); return; } }
static GstFlowReturn gst_vtenc_encode_frame (GstVTEnc * self, GstBuffer * buf) { GstVTApi *vt = self->ctx->vt; CMTime ts, duration; GstCoreMediaMeta *meta; CVPixelBufferRef pbuf = NULL; VTStatus vt_status; GstFlowReturn ret = GST_FLOW_OK; guint i; self->cur_inbuf = buf; ts = CMTimeMake (GST_TIME_AS_MSECONDS (GST_BUFFER_TIMESTAMP (buf)), 1000); duration = CMTimeMake (GST_TIME_AS_MSECONDS (GST_BUFFER_DURATION (buf)), 1000); meta = gst_buffer_get_core_media_meta (buf); if (meta != NULL) { pbuf = gst_core_media_buffer_get_pixel_buffer (buf); } if (pbuf == NULL) { GstVTEncFrame *frame; CVReturn cv_ret; frame = gst_vtenc_frame_new (buf, &self->video_info); if (!frame) goto cv_error; { const size_t num_planes = GST_VIDEO_FRAME_N_PLANES (&frame->videoframe); void *plane_base_addresses[GST_VIDEO_MAX_PLANES]; size_t plane_widths[GST_VIDEO_MAX_PLANES]; size_t plane_heights[GST_VIDEO_MAX_PLANES]; size_t plane_bytes_per_row[GST_VIDEO_MAX_PLANES]; OSType pixel_format_type; size_t i; for (i = 0; i < num_planes; i++) { plane_base_addresses[i] = GST_VIDEO_FRAME_PLANE_DATA (&frame->videoframe, i); plane_widths[i] = GST_VIDEO_FRAME_COMP_WIDTH (&frame->videoframe, i); plane_heights[i] = GST_VIDEO_FRAME_COMP_HEIGHT (&frame->videoframe, i); plane_bytes_per_row[i] = GST_VIDEO_FRAME_COMP_STRIDE (&frame->videoframe, i); plane_bytes_per_row[i] = GST_VIDEO_FRAME_COMP_STRIDE (&frame->videoframe, i); } switch (GST_VIDEO_INFO_FORMAT (&self->video_info)) { case GST_VIDEO_FORMAT_I420: pixel_format_type = kCVPixelFormatType_420YpCbCr8Planar; break; case GST_VIDEO_FORMAT_NV12: pixel_format_type = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange; break; default: goto cv_error; } cv_ret = CVPixelBufferCreateWithPlanarBytes (NULL, self->negotiated_width, self->negotiated_height, pixel_format_type, frame, GST_VIDEO_FRAME_SIZE (&frame->videoframe), num_planes, plane_base_addresses, plane_widths, plane_heights, plane_bytes_per_row, gst_pixel_buffer_release_cb, frame, NULL, &pbuf); if (cv_ret != kCVReturnSuccess) { gst_vtenc_frame_free (frame); goto cv_error; } } } GST_OBJECT_LOCK (self); self->expect_keyframe = CFDictionaryContainsKey (self->options, *(vt->kVTEncodeFrameOptionKey_ForceKeyFrame)); if (self->expect_keyframe) gst_vtenc_clear_cached_caps_downstream (self); vt_status = self->ctx->vt->VTCompressionSessionEncodeFrame (self->session, pbuf, ts, duration, self->options, NULL, NULL); if (vt_status != 0) { GST_WARNING_OBJECT (self, "VTCompressionSessionEncodeFrame returned %d", vt_status); } self->ctx->vt->VTCompressionSessionCompleteFrames (self->session, kCMTimeInvalid); GST_OBJECT_UNLOCK (self); CVPixelBufferRelease (pbuf); self->cur_inbuf = NULL; gst_buffer_unref (buf); if (self->cur_outbufs->len > 0) { meta = gst_buffer_get_core_media_meta (g_ptr_array_index (self->cur_outbufs, 0)); if (!gst_vtenc_negotiate_downstream (self, meta->sample_buf)) ret = GST_FLOW_NOT_NEGOTIATED; } for (i = 0; i != self->cur_outbufs->len; i++) { GstBuffer *buf = g_ptr_array_index (self->cur_outbufs, i); if (ret == GST_FLOW_OK) { ret = gst_pad_push (self->srcpad, buf); } else { gst_buffer_unref (buf); } } g_ptr_array_set_size (self->cur_outbufs, 0); return ret; cv_error: { self->cur_inbuf = NULL; gst_buffer_unref (buf); return GST_FLOW_ERROR; } }
static GstFlowReturn gst_mpeg2dec_crop_buffer (GstMpeg2dec * dec, GstVideoCodecFrame * in_frame, GstVideoFrame * input_vframe) { GstVideoCodecState *state; GstVideoInfo *info; GstVideoInfo *dinfo; guint c, n_planes; GstVideoFrame output_frame; GstFlowReturn ret; state = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (dec)); info = &state->info; dinfo = &dec->decoded_info; GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec, "Copying input buffer %ux%u (%" G_GSIZE_FORMAT ") to output buffer " "%ux%u (%" G_GSIZE_FORMAT ")", dinfo->width, dinfo->height, dinfo->size, info->width, info->height, info->size); ret = gst_video_decoder_allocate_output_frame (GST_VIDEO_DECODER (dec), in_frame); if (ret != GST_FLOW_OK) goto beach; if (!gst_video_frame_map (&output_frame, info, in_frame->output_buffer, GST_MAP_WRITE)) goto map_fail; n_planes = GST_VIDEO_FRAME_N_PLANES (&output_frame); for (c = 0; c < n_planes; c++) { guint w, h, j; guint8 *sp, *dp; gint ss, ds; sp = GST_VIDEO_FRAME_PLANE_DATA (input_vframe, c); dp = GST_VIDEO_FRAME_PLANE_DATA (&output_frame, c); ss = GST_VIDEO_FRAME_PLANE_STRIDE (input_vframe, c); ds = GST_VIDEO_FRAME_PLANE_STRIDE (&output_frame, c); w = MIN (ABS (ss), ABS (ds)); h = GST_VIDEO_FRAME_COMP_HEIGHT (&output_frame, c); GST_CAT_DEBUG (GST_CAT_PERFORMANCE, "copy plane %u, w:%u h:%u ", c, w, h); for (j = 0; j < h; j++) { memcpy (dp, sp, w); dp += ds; sp += ss; } } gst_video_frame_unmap (&output_frame); GST_BUFFER_FLAGS (in_frame->output_buffer) = GST_BUFFER_FLAGS (input_vframe->buffer); beach: gst_video_codec_state_unref (state); return ret; map_fail: { GST_ERROR_OBJECT (dec, "Failed to map output frame"); gst_video_codec_state_unref (state); return GST_FLOW_ERROR; } }
static GstFlowReturn gst_compositor_aggregate_frames (GstVideoAggregator * vagg, GstBuffer * outbuf) { GList *l; GstCompositor *self = GST_COMPOSITOR (vagg); BlendFunction composite; GstVideoFrame out_frame, *outframe; if (!gst_video_frame_map (&out_frame, &vagg->info, outbuf, GST_MAP_WRITE)) { return GST_FLOW_ERROR; } outframe = &out_frame; /* default to blending */ composite = self->blend; switch (self->background) { case COMPOSITOR_BACKGROUND_CHECKER: self->fill_checker (outframe); break; case COMPOSITOR_BACKGROUND_BLACK: self->fill_color (outframe, 16, 128, 128); break; case COMPOSITOR_BACKGROUND_WHITE: self->fill_color (outframe, 240, 128, 128); break; case COMPOSITOR_BACKGROUND_TRANSPARENT: { guint i, plane, num_planes, height; num_planes = GST_VIDEO_FRAME_N_PLANES (outframe); for (plane = 0; plane < num_planes; ++plane) { guint8 *pdata; gsize rowsize, plane_stride; pdata = GST_VIDEO_FRAME_PLANE_DATA (outframe, plane); plane_stride = GST_VIDEO_FRAME_PLANE_STRIDE (outframe, plane); rowsize = GST_VIDEO_FRAME_COMP_WIDTH (outframe, plane) * GST_VIDEO_FRAME_COMP_PSTRIDE (outframe, plane); height = GST_VIDEO_FRAME_COMP_HEIGHT (outframe, plane); for (i = 0; i < height; ++i) { memset (pdata, 0, rowsize); pdata += plane_stride; } } /* use overlay to keep background transparent */ composite = self->overlay; break; } } GST_OBJECT_LOCK (vagg); for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) { GstVideoAggregatorPad *pad = l->data; GstCompositorPad *compo_pad = GST_COMPOSITOR_PAD (pad); if (pad->aggregated_frame != NULL) { composite (pad->aggregated_frame, compo_pad->xpos, compo_pad->ypos, compo_pad->alpha, outframe); } } GST_OBJECT_UNLOCK (vagg); gst_video_frame_unmap (outframe); return GST_FLOW_OK; }