static GstFlowReturn gst_mpeg2dec_alloc_sized_buf (GstMpeg2dec * mpeg2dec, guint size, GstVideoCodecFrame * frame, GstBuffer ** buffer) { GstFlowReturn ret = GST_FLOW_OK; GstVideoCodecState *state; state = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (mpeg2dec)); if (!mpeg2dec->need_cropping || mpeg2dec->has_cropping) { /* need parsed input, but that might be slightly bogus, * so avoid giving up altogether and mark it as error */ if (frame->output_buffer) { gst_buffer_replace (&frame->output_buffer, NULL); GST_VIDEO_DECODER_ERROR (mpeg2dec, 1, STREAM, DECODE, ("decoding error"), ("Input not correctly parsed"), ret); } ret = gst_video_decoder_allocate_output_frame (GST_VIDEO_DECODER (mpeg2dec), frame); *buffer = frame->output_buffer; } else { GstAllocationParams params = { 0, 15, 0, 0 }; *buffer = gst_buffer_new_allocate (NULL, size, ¶ms); gst_video_codec_frame_set_user_data (frame, *buffer, (GDestroyNotify) frame_user_data_destroy_notify); } gst_video_codec_state_unref (state); return ret; }
static void gst_vp9_enc_set_frame_user_data (GstVPXEnc * enc, GstVideoCodecFrame * frame, vpx_image_t * image) { gst_video_codec_frame_set_user_data (frame, image, (GDestroyNotify) gst_vp9_enc_user_data_free); return; }
static GstVaapiDecoderStatus do_parse (GstVaapiDecoder * decoder, GstVideoCodecFrame * base_frame, GstAdapter * adapter, gboolean at_eos, guint * got_unit_size_ptr, gboolean * got_frame_ptr) { GstVaapiParserState *const ps = &decoder->parser_state; GstVaapiParserFrame *frame; GstVaapiDecoderUnit *unit; GstVaapiDecoderStatus status; *got_unit_size_ptr = 0; *got_frame_ptr = FALSE; frame = gst_video_codec_frame_get_user_data (base_frame); if (!frame) { GstVideoCodecState *const codec_state = decoder->codec_state; frame = gst_vaapi_parser_frame_new (codec_state->info.width, codec_state->info.height); if (!frame) return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED; gst_video_codec_frame_set_user_data (base_frame, frame, (GDestroyNotify) gst_vaapi_mini_object_unref); } parser_state_prepare (ps, adapter); unit = &ps->next_unit; if (ps->next_unit_pending) { ps->next_unit_pending = FALSE; goto got_unit; } gst_vaapi_decoder_unit_init (unit); ps->current_frame = base_frame; status = GST_VAAPI_DECODER_GET_CLASS (decoder)->parse (decoder, adapter, at_eos, unit); if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) { if (at_eos && frame->units->len > 0 && status == GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA) { /* XXX: assume the frame is complete at <EOS> */ *got_frame_ptr = TRUE; return GST_VAAPI_DECODER_STATUS_SUCCESS; } return status; } if (GST_VAAPI_DECODER_UNIT_IS_FRAME_START (unit) && frame->units->len > 0) { ps->next_unit_pending = TRUE; *got_frame_ptr = TRUE; return GST_VAAPI_DECODER_STATUS_SUCCESS; } got_unit: gst_vaapi_parser_frame_append_unit (frame, unit); *got_unit_size_ptr = unit->size; *got_frame_ptr = GST_VAAPI_DECODER_UNIT_IS_FRAME_END (unit); return GST_VAAPI_DECODER_STATUS_SUCCESS; }
static void gst_vp8_enc_set_frame_user_data (GstVPXEnc * enc, GstVideoCodecFrame * frame, vpx_image_t * image) { GstVP8EncUserData *user_data; user_data = g_slice_new0 (GstVP8EncUserData); user_data->image = image; gst_video_codec_frame_set_user_data (frame, user_data, (GDestroyNotify) gst_vp8_enc_user_data_free); return; }
static void drop_frame (GstVaapiDecoder * decoder, GstVideoCodecFrame * frame) { GST_DEBUG ("drop frame %d", frame->system_frame_number); /* no surface proxy */ gst_video_codec_frame_set_user_data (frame, NULL, NULL); frame->pts = GST_CLOCK_TIME_NONE; GST_VIDEO_CODEC_FRAME_FLAG_SET (frame, GST_VIDEO_CODEC_FRAME_FLAG_DECODE_ONLY); g_async_queue_push (decoder->frames, gst_video_codec_frame_ref (frame)); }
static gboolean upload_frame (GstVaapiEncoder * encoder, GstVaapiSurfaceProxy * proxy) { GstVideoCodecFrame *frame; GstVaapiEncoderStatus ret; frame = g_slice_new0 (GstVideoCodecFrame); gst_video_codec_frame_set_user_data (frame, gst_vaapi_surface_proxy_ref (proxy), (GDestroyNotify) gst_vaapi_surface_proxy_unref); ret = gst_vaapi_encoder_put_frame (encoder, frame); return (ret == GST_VAAPI_ENCODER_STATUS_SUCCESS); }
static void gst_vaapiencode_purge (GstVaapiEncode * encode) { GstVaapiCodedBufferProxy *codedbuf_proxy = NULL; GstVaapiEncoderStatus status; GstVideoCodecFrame *out_frame; do { status = gst_vaapi_encoder_get_buffer_with_timeout (encode->encoder, &codedbuf_proxy, 0); if (status == GST_VAAPI_ENCODER_STATUS_SUCCESS) { out_frame = gst_vaapi_coded_buffer_proxy_get_user_data (codedbuf_proxy); if (out_frame) gst_video_codec_frame_set_user_data (out_frame, NULL, NULL); gst_vaapi_coded_buffer_proxy_unref (codedbuf_proxy); } } while (status == GST_VAAPI_ENCODER_STATUS_SUCCESS); }
static GstFlowReturn gst_vdp_h264_dec_handle_frame (GstVideoDecoder * video_decoder, GstVideoCodecFrame * frame) { GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (video_decoder); GstH264Meta *h264_meta; GstH264Frame *h264_frame; GList *tmp; GstFlowReturn ret; VdpPictureInfoH264 info; VdpBitstreamBuffer *bufs; GstH264SliceHdr *first_slice; guint i; GstMapInfo map; GST_DEBUG ("handle_frame"); h264_meta = gst_buffer_get_h264_meta (frame->input_buffer); if (G_UNLIKELY (h264_meta == NULL)) goto no_h264_meta; if (G_UNLIKELY (h264_meta->num_slices == 0)) goto no_slices; /* Handle PPS/SPS/SEI if present */ if (h264_meta->sps) { for (tmp = h264_meta->sps; tmp; tmp = tmp->next) { GstH264SPS *sps = (GstH264SPS *) tmp->data; GST_LOG_OBJECT (h264_dec, "Storing SPS %d", sps->id); h264_dec->sps[sps->id] = g_slice_dup (GstH264SPS, sps); } } if (h264_meta->pps) { for (tmp = h264_meta->pps; tmp; tmp = tmp->next) { GstH264PPS *pps = (GstH264PPS *) tmp->data; GST_LOG_OBJECT (h264_dec, "Storing PPS %d", pps->id); h264_dec->pps[pps->id] = g_slice_dup (GstH264PPS, pps); /* Adjust pps pointer */ h264_dec->pps[pps->id]->sequence = h264_dec->sps[pps->sps_id]; } } first_slice = &h264_meta->slices[0]; if (!h264_dec->got_idr && first_slice->slice_type != GST_H264_NAL_SLICE_IDR) goto no_idr; /* Handle slices */ for (i = 0; i < h264_meta->num_slices; i++) { GstH264SliceHdr *slice = &h264_meta->slices[i]; GST_LOG_OBJECT (h264_dec, "Handling slice #%d", i); slice->pps = h264_dec->pps[slice->pps_id]; } if (first_slice->slice_type == GST_H264_NAL_SLICE_IDR) { ret = gst_vdp_h264_dec_idr (h264_dec, frame, first_slice); if (ret == GST_FLOW_OK) h264_dec->got_idr = TRUE; else goto skip_frame; } h264_frame = g_slice_new0 (GstH264Frame); gst_video_codec_frame_set_user_data (frame, h264_frame, (GDestroyNotify) gst_h264_frame_free); gst_vdp_h264_dec_init_frame_info (h264_dec, h264_frame, first_slice); h264_frame->frame = frame; gst_vdp_h264_dec_fill_info (&info, h264_dec, h264_frame, first_slice); info.slice_count = h264_meta->num_slices; if (!gst_buffer_map (frame->input_buffer, &map, GST_MAP_READ)) goto map_fail; bufs = gst_vdp_h264_dec_create_bitstream_buffers (h264_dec, h264_meta, &map); ret = gst_vdp_decoder_render (GST_VDP_DECODER (h264_dec), (VdpPictureInfo *) & info, h264_meta->num_slices, bufs, frame); g_free (bufs); gst_buffer_unmap (frame->input_buffer, &map); if (ret != GST_FLOW_OK) goto render_fail; /* DPB handling */ return gst_vdp_h264_dec_handle_dpb (h264_dec, h264_frame, first_slice); /* EARLY exit */ no_idr: { GST_DEBUG_OBJECT (video_decoder, "Didn't see a IDR yet, skipping frame"); return gst_video_decoder_finish_frame (video_decoder, frame); } skip_frame: { GST_DEBUG_OBJECT (video_decoder, "Skipping frame"); return gst_video_decoder_finish_frame (video_decoder, frame); } /* ERRORS */ no_h264_meta: { GST_ERROR_OBJECT (video_decoder, "Input buffer doesn't have GstH264Meta"); return GST_FLOW_ERROR; } no_slices: { GST_ERROR_OBJECT (video_decoder, "Input buffer doesn't have any slices"); return GST_FLOW_ERROR; } map_fail: { GST_ERROR_OBJECT (video_decoder, "Failed to map input buffer for READ"); return GST_FLOW_ERROR; } render_fail: { GST_ERROR_OBJECT (video_decoder, "Failed to render : %s", gst_flow_get_name (ret)); gst_video_decoder_drop_frame (video_decoder, frame); return ret; } }
static GstFlowReturn gst_amc_video_enc_handle_frame (GstVideoEncoder * encoder, GstVideoCodecFrame * frame) { GstAmcVideoEnc *self; gint idx; GstAmcBuffer *buf; GstAmcBufferInfo buffer_info; GstClockTime timestamp, duration, timestamp_offset = 0; BufferIdentification *id; GError *err = NULL; self = GST_AMC_VIDEO_ENC (encoder); GST_DEBUG_OBJECT (self, "Handling frame"); if (!self->started) { GST_ERROR_OBJECT (self, "Codec not started yet"); gst_video_codec_frame_unref (frame); return GST_FLOW_NOT_NEGOTIATED; } if (self->flushing) goto flushing; if (self->downstream_flow_ret != GST_FLOW_OK) goto downstream_error; timestamp = frame->pts; duration = frame->duration; again: /* Make sure to release the base class stream lock, otherwise * _loop() can't call _finish_frame() and we might block forever * because no input buffers are released */ GST_VIDEO_ENCODER_STREAM_UNLOCK (self); /* Wait at most 100ms here, some codecs don't fail dequeueing if * the codec is flushing, causing deadlocks during shutdown */ idx = gst_amc_codec_dequeue_input_buffer (self->codec, 100000, &err); GST_VIDEO_ENCODER_STREAM_LOCK (self); if (idx < 0) { if (self->flushing || self->downstream_flow_ret == GST_FLOW_FLUSHING) { g_clear_error (&err); goto flushing; } switch (idx) { case INFO_TRY_AGAIN_LATER: GST_DEBUG_OBJECT (self, "Dequeueing input buffer timed out"); goto again; /* next try */ break; case G_MININT: GST_ERROR_OBJECT (self, "Failed to dequeue input buffer"); goto dequeue_error; default: g_assert_not_reached (); break; } goto again; } if (self->flushing) { memset (&buffer_info, 0, sizeof (buffer_info)); gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info, NULL); goto flushing; } if (self->downstream_flow_ret != GST_FLOW_OK) { memset (&buffer_info, 0, sizeof (buffer_info)); gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info, &err); if (err && !self->flushing) GST_ELEMENT_WARNING_FROM_ERROR (self, err); g_clear_error (&err); goto downstream_error; } /* Now handle the frame */ /* Copy the buffer content in chunks of size as requested * by the port */ buf = gst_amc_codec_get_input_buffer (self->codec, idx, &err); if (!buf) goto failed_to_get_input_buffer; memset (&buffer_info, 0, sizeof (buffer_info)); buffer_info.offset = 0; buffer_info.size = MIN (self->color_format_info.frame_size, buf->size); gst_amc_buffer_set_position_and_limit (buf, NULL, buffer_info.offset, buffer_info.size); if (!gst_amc_video_enc_fill_buffer (self, frame->input_buffer, buf, &buffer_info)) { memset (&buffer_info, 0, sizeof (buffer_info)); gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info, &err); if (err && !self->flushing) GST_ELEMENT_WARNING_FROM_ERROR (self, err); g_clear_error (&err); gst_amc_buffer_free (buf); buf = NULL; goto buffer_fill_error; } gst_amc_buffer_free (buf); buf = NULL; if (timestamp != GST_CLOCK_TIME_NONE) { buffer_info.presentation_time_us = gst_util_uint64_scale (timestamp + timestamp_offset, 1, GST_USECOND); self->last_upstream_ts = timestamp + timestamp_offset; } if (duration != GST_CLOCK_TIME_NONE) self->last_upstream_ts += duration; id = buffer_identification_new (timestamp + timestamp_offset); if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) buffer_info.flags |= BUFFER_FLAG_SYNC_FRAME; gst_video_codec_frame_set_user_data (frame, id, (GDestroyNotify) buffer_identification_free); GST_DEBUG_OBJECT (self, "Queueing buffer %d: size %d time %" G_GINT64_FORMAT " flags 0x%08x", idx, buffer_info.size, buffer_info.presentation_time_us, buffer_info.flags); if (!gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info, &err)) { if (self->flushing) { g_clear_error (&err); goto flushing; } goto queue_error; } self->drained = FALSE; gst_video_codec_frame_unref (frame); return self->downstream_flow_ret; downstream_error: { GST_ERROR_OBJECT (self, "Downstream returned %s", gst_flow_get_name (self->downstream_flow_ret)); gst_video_codec_frame_unref (frame); return self->downstream_flow_ret; } failed_to_get_input_buffer: { GST_ELEMENT_ERROR_FROM_ERROR (self, err); gst_video_codec_frame_unref (frame); return GST_FLOW_ERROR; } buffer_fill_error: { GST_ELEMENT_ERROR (self, RESOURCE, WRITE, (NULL), ("Failed to write input into the amc buffer(write %dB to a %dB buffer)", self->color_format_info.frame_size, buf->size)); gst_video_codec_frame_unref (frame); return GST_FLOW_ERROR; } dequeue_error: { GST_ELEMENT_ERROR_FROM_ERROR (self, err); gst_video_codec_frame_unref (frame); return GST_FLOW_ERROR; } queue_error: { GST_ELEMENT_ERROR_FROM_ERROR (self, err); gst_video_codec_frame_unref (frame); return GST_FLOW_ERROR; } flushing: { GST_DEBUG_OBJECT (self, "Flushing -- returning FLUSHING"); gst_video_codec_frame_unref (frame); return GST_FLOW_FLUSHING; } }
static GstFlowReturn gst_vaapiencode_handle_frame (GstVideoEncoder * venc, GstVideoCodecFrame * frame) { GstVaapiEncode *const encode = GST_VAAPIENCODE_CAST (venc); GstVaapiEncoderStatus status; GstVaapiVideoMeta *meta; GstVaapiSurfaceProxy *proxy; GstFlowReturn ret; GstBuffer *buf; buf = NULL; ret = gst_vaapi_plugin_base_get_input_buffer (GST_VAAPI_PLUGIN_BASE (encode), frame->input_buffer, &buf); if (ret != GST_FLOW_OK) goto error_buffer_invalid; gst_buffer_replace (&frame->input_buffer, buf); gst_buffer_unref (buf); meta = gst_buffer_get_vaapi_video_meta (buf); if (!meta) goto error_buffer_no_meta; proxy = gst_vaapi_video_meta_get_surface_proxy (meta); if (!proxy) goto error_buffer_no_surface_proxy; gst_video_codec_frame_set_user_data (frame, gst_vaapi_surface_proxy_ref (proxy), (GDestroyNotify) gst_vaapi_surface_proxy_unref); GST_VIDEO_ENCODER_STREAM_UNLOCK (encode); status = gst_vaapi_encoder_put_frame (encode->encoder, frame); GST_VIDEO_ENCODER_STREAM_LOCK (encode); if (status < GST_VAAPI_ENCODER_STATUS_SUCCESS) goto error_encode_frame; gst_video_codec_frame_unref (frame); return GST_FLOW_OK; /* ERRORS */ error_buffer_invalid: { if (buf) gst_buffer_unref (buf); gst_video_codec_frame_unref (frame); return ret; } error_buffer_no_meta: { GST_ERROR ("failed to get GstVaapiVideoMeta information"); gst_video_codec_frame_unref (frame); return GST_FLOW_ERROR; } error_buffer_no_surface_proxy: { GST_ERROR ("failed to get VA surface proxy"); gst_video_codec_frame_unref (frame); return GST_FLOW_ERROR; } error_encode_frame: { GST_ERROR ("failed to encode frame %d (status %d)", frame->system_frame_number, status); gst_video_codec_frame_unref (frame); return GST_FLOW_ERROR; } }
static GstFlowReturn gst_vaapiencode_push_frame (GstVaapiEncode * encode, gint64 timeout) { GstVideoEncoder *const venc = GST_VIDEO_ENCODER_CAST (encode); GstVaapiEncodeClass *const klass = GST_VAAPIENCODE_GET_CLASS (encode); GstVideoCodecFrame *out_frame; GstVaapiCodedBufferProxy *codedbuf_proxy = NULL; GstVaapiEncoderStatus status; GstBuffer *out_buffer; GstFlowReturn ret; status = gst_vaapi_encoder_get_buffer_with_timeout (encode->encoder, &codedbuf_proxy, timeout); if (status == GST_VAAPI_ENCODER_STATUS_NO_BUFFER) return GST_VAAPI_ENCODE_FLOW_TIMEOUT; if (status != GST_VAAPI_ENCODER_STATUS_SUCCESS) goto error_get_buffer; out_frame = gst_vaapi_coded_buffer_proxy_get_user_data (codedbuf_proxy); if (!out_frame) goto error_get_buffer; gst_video_codec_frame_ref (out_frame); gst_video_codec_frame_set_user_data (out_frame, NULL, NULL); /* Update output state */ GST_VIDEO_ENCODER_STREAM_LOCK (encode); if (!ensure_output_state (encode)) goto error_output_state; GST_VIDEO_ENCODER_STREAM_UNLOCK (encode); /* Allocate and copy buffer into system memory */ out_buffer = NULL; ret = klass->alloc_buffer (encode, GST_VAAPI_CODED_BUFFER_PROXY_BUFFER (codedbuf_proxy), &out_buffer); gst_vaapi_coded_buffer_proxy_replace (&codedbuf_proxy, NULL); if (ret != GST_FLOW_OK) goto error_allocate_buffer; gst_buffer_replace (&out_frame->output_buffer, out_buffer); gst_buffer_unref (out_buffer); GST_TRACE_OBJECT (encode, "output:%" GST_TIME_FORMAT ", size:%zu", GST_TIME_ARGS (out_frame->pts), gst_buffer_get_size (out_buffer)); return gst_video_encoder_finish_frame (venc, out_frame); /* ERRORS */ error_get_buffer: { GST_ERROR ("failed to get encoded buffer (status %d)", status); if (codedbuf_proxy) gst_vaapi_coded_buffer_proxy_unref (codedbuf_proxy); return GST_FLOW_ERROR; } error_allocate_buffer: { GST_ERROR ("failed to allocate encoded buffer in system memory"); if (out_buffer) gst_buffer_unref (out_buffer); gst_video_codec_frame_unref (out_frame); return ret; } error_output_state: { GST_ERROR ("failed to negotiate output state (status %d)", status); GST_VIDEO_ENCODER_STREAM_UNLOCK (encode); gst_video_codec_frame_unref (out_frame); return GST_FLOW_NOT_NEGOTIATED; } }