static GstFlowReturn gst_amc_video_dec_handle_frame (GstVideoDecoder * decoder, GstVideoCodecFrame * frame) { GstAmcVideoDec *self; gint idx; GstAmcBuffer *buf; GstAmcBufferInfo buffer_info; guint offset = 0; GstClockTime timestamp, duration, timestamp_offset = 0; GstMapInfo minfo; GError *err = NULL; memset (&minfo, 0, sizeof (minfo)); self = GST_AMC_VIDEO_DEC (decoder); GST_DEBUG_OBJECT (self, "Handling frame"); if (!self->started) { GST_ERROR_OBJECT (self, "Codec not started yet"); gst_video_codec_frame_unref (frame); return GST_FLOW_NOT_NEGOTIATED; } if (self->flushing) goto flushing; if (self->downstream_flow_ret != GST_FLOW_OK) goto downstream_error; timestamp = frame->pts; duration = frame->duration; gst_buffer_map (frame->input_buffer, &minfo, GST_MAP_READ); while (offset < minfo.size) { /* Make sure to release the base class stream lock, otherwise * _loop() can't call _finish_frame() and we might block forever * because no input buffers are released */ GST_VIDEO_DECODER_STREAM_UNLOCK (self); /* Wait at most 100ms here, some codecs don't fail dequeueing if * the codec is flushing, causing deadlocks during shutdown */ idx = gst_amc_codec_dequeue_input_buffer (self->codec, 100000, &err); GST_VIDEO_DECODER_STREAM_LOCK (self); if (idx < 0) { if (self->flushing || self->downstream_flow_ret == GST_FLOW_FLUSHING) { g_clear_error (&err); goto flushing; } switch (idx) { case INFO_TRY_AGAIN_LATER: GST_DEBUG_OBJECT (self, "Dequeueing input buffer timed out"); continue; /* next try */ break; case G_MININT: GST_ERROR_OBJECT (self, "Failed to dequeue input buffer"); goto dequeue_error; default: g_assert_not_reached (); break; } continue; } if (self->flushing) { memset (&buffer_info, 0, sizeof (buffer_info)); gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info, NULL); goto flushing; } if (self->downstream_flow_ret != GST_FLOW_OK) { memset (&buffer_info, 0, sizeof (buffer_info)); gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info, &err); if (err && !self->flushing) GST_ELEMENT_WARNING_FROM_ERROR (self, err); g_clear_error (&err); goto downstream_error; } /* Now handle the frame */ /* Copy the buffer content in chunks of size as requested * by the port */ buf = gst_amc_codec_get_input_buffer (self->codec, idx, &err); if (!buf) goto failed_to_get_input_buffer; memset (&buffer_info, 0, sizeof (buffer_info)); buffer_info.offset = 0; buffer_info.size = MIN (minfo.size - offset, buf->size); gst_amc_buffer_set_position_and_limit (buf, NULL, buffer_info.offset, buffer_info.size); orc_memcpy (buf->data, minfo.data + offset, buffer_info.size); gst_amc_buffer_free (buf); buf = NULL; /* Interpolate timestamps if we're passing the buffer * in multiple chunks */ if (offset != 0 && duration != GST_CLOCK_TIME_NONE) { timestamp_offset = gst_util_uint64_scale (offset, duration, minfo.size); } if (timestamp != GST_CLOCK_TIME_NONE) { buffer_info.presentation_time_us = gst_util_uint64_scale (timestamp + timestamp_offset, 1, GST_USECOND); self->last_upstream_ts = timestamp + timestamp_offset; } if (duration != GST_CLOCK_TIME_NONE) self->last_upstream_ts += duration; if (offset == 0) { BufferIdentification *id = buffer_identification_new (timestamp + timestamp_offset); if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) buffer_info.flags |= BUFFER_FLAG_SYNC_FRAME; gst_video_codec_frame_set_user_data (frame, id, (GDestroyNotify) buffer_identification_free); } offset += buffer_info.size; GST_DEBUG_OBJECT (self, "Queueing buffer %d: size %d time %" G_GINT64_FORMAT " flags 0x%08x", idx, buffer_info.size, buffer_info.presentation_time_us, buffer_info.flags); if (!gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info, &err)) { if (self->flushing) { g_clear_error (&err); goto flushing; } goto queue_error; } self->drained = FALSE; } gst_buffer_unmap (frame->input_buffer, &minfo); gst_video_codec_frame_unref (frame); return self->downstream_flow_ret; downstream_error: { GST_ERROR_OBJECT (self, "Downstream returned %s", gst_flow_get_name (self->downstream_flow_ret)); if (minfo.data) gst_buffer_unmap (frame->input_buffer, &minfo); gst_video_codec_frame_unref (frame); return self->downstream_flow_ret; } failed_to_get_input_buffer: { GST_ELEMENT_ERROR_FROM_ERROR (self, err); if (minfo.data) gst_buffer_unmap (frame->input_buffer, &minfo); gst_video_codec_frame_unref (frame); return GST_FLOW_ERROR; } dequeue_error: { GST_ELEMENT_ERROR_FROM_ERROR (self, err); if (minfo.data) gst_buffer_unmap (frame->input_buffer, &minfo); gst_video_codec_frame_unref (frame); return GST_FLOW_ERROR; } queue_error: { GST_VIDEO_DECODER_ERROR_FROM_ERROR (self, err); if (minfo.data) gst_buffer_unmap (frame->input_buffer, &minfo); gst_video_codec_frame_unref (frame); return GST_FLOW_ERROR; } flushing: { GST_DEBUG_OBJECT (self, "Flushing -- returning FLUSHING"); if (minfo.data) gst_buffer_unmap (frame->input_buffer, &minfo); gst_video_codec_frame_unref (frame); return GST_FLOW_FLUSHING; } }
static GstFlowReturn gst_vp8_enc_pre_push (GstVideoEncoder * video_encoder, GstVideoCodecFrame * frame) { GstVP8Enc *encoder; GstVPXEnc *vpx_enc; GstBuffer *buf; GstFlowReturn ret = GST_FLOW_OK; GstVP8EncUserData *user_data = gst_video_codec_frame_get_user_data (frame); GList *l; gint inv_count; GstVideoInfo *info; GST_DEBUG_OBJECT (video_encoder, "pre_push"); encoder = GST_VP8_ENC (video_encoder); vpx_enc = GST_VPX_ENC (encoder); info = &vpx_enc->input_state->info; g_assert (user_data != NULL); for (inv_count = 0, l = user_data->invisible; l; inv_count++, l = l->next) { buf = l->data; l->data = NULL; /* FIXME : All of this should have already been handled by base classes, no ? */ if (l == user_data->invisible && GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) { GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT); encoder->keyframe_distance = 0; } else { GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT); encoder->keyframe_distance++; } GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DECODE_ONLY); GST_BUFFER_TIMESTAMP (buf) = GST_BUFFER_TIMESTAMP (frame->output_buffer); GST_BUFFER_DURATION (buf) = 0; if (GST_VIDEO_INFO_FPS_D (info) == 0 || GST_VIDEO_INFO_FPS_N (info) == 0) { GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET_NONE; GST_BUFFER_OFFSET (buf) = GST_BUFFER_OFFSET_NONE; } else { GST_BUFFER_OFFSET_END (buf) = _to_granulepos (frame->presentation_frame_number + 1, inv_count, encoder->keyframe_distance); GST_BUFFER_OFFSET (buf) = gst_util_uint64_scale (frame->presentation_frame_number + 1, GST_SECOND * GST_VIDEO_INFO_FPS_D (info), GST_VIDEO_INFO_FPS_N (info)); } ret = gst_pad_push (GST_VIDEO_ENCODER_SRC_PAD (video_encoder), buf); if (ret != GST_FLOW_OK) { GST_WARNING_OBJECT (encoder, "flow error %d", ret); goto done; } } buf = frame->output_buffer; /* FIXME : All of this should have already been handled by base classes, no ? */ if (!user_data->invisible && GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) { GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT); encoder->keyframe_distance = 0; } else { GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT); encoder->keyframe_distance++; } if (GST_VIDEO_INFO_FPS_D (info) == 0 || GST_VIDEO_INFO_FPS_N (info) == 0) { GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET_NONE; GST_BUFFER_OFFSET (buf) = GST_BUFFER_OFFSET_NONE; } else { GST_BUFFER_OFFSET_END (buf) = _to_granulepos (frame->presentation_frame_number + 1, 0, encoder->keyframe_distance); GST_BUFFER_OFFSET (buf) = gst_util_uint64_scale (frame->presentation_frame_number + 1, GST_SECOND * GST_VIDEO_INFO_FPS_D (info), GST_VIDEO_INFO_FPS_N (info)); } GST_LOG_OBJECT (video_encoder, "src ts: %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf))); done: return ret; }