static gboolean gst_v4l2_h264_enc_negotiate (GstVideoEncoder * encoder) { GstV4l2H264Enc *self = GST_V4L2_H264_ENC (encoder); GstV4l2VideoEnc *venc = GST_V4L2_VIDEO_ENC (encoder); GstV4l2Object *v4l2object = venc->v4l2output; GstCaps *allowed_caps; struct ProfileLevelCtx ctx = { self, NULL, NULL }; GstVideoCodecState *state; GstStructure *s; GST_DEBUG_OBJECT (self, "Negotiating H264 profile and level."); allowed_caps = gst_pad_get_allowed_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder)); if (allowed_caps) { if (gst_caps_is_empty (allowed_caps)) goto not_negotiated; allowed_caps = gst_caps_make_writable (allowed_caps); /* negotiate_profile_and_level() will return TRUE on failure to keep * iterating, if gst_caps_foreach() returns TRUE it means there was no * compatible profile and level in any of the structure */ if (gst_caps_foreach (allowed_caps, negotiate_profile_and_level, &ctx)) { goto no_profile_level; } } if (!ctx.profile) { struct v4l2_control control = { 0, }; control.id = V4L2_CID_MPEG_VIDEO_H264_PROFILE; if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_CTRL, &control) < 0) goto g_ctrl_failed; ctx.profile = v4l2_profile_to_string (control.value); } if (!ctx.level) { struct v4l2_control control = { 0, }; control.id = V4L2_CID_MPEG_VIDEO_H264_LEVEL; if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_CTRL, &control) < 0) goto g_ctrl_failed; ctx.level = v4l2_level_to_string (control.value); } GST_DEBUG_OBJECT (self, "Selected H264 profile %s at level %s", ctx.profile, ctx.level); state = gst_video_encoder_get_output_state (encoder); s = gst_caps_get_structure (state->caps, 0); gst_structure_set (s, "profile", G_TYPE_STRING, ctx.profile, "level", G_TYPE_STRING, ctx.level, NULL); return GST_VIDEO_ENCODER_CLASS (parent_class)->negotiate (encoder); g_ctrl_failed: GST_WARNING_OBJECT (self, "Failed to get H264 profile and level: '%s'", g_strerror (errno)); goto not_negotiated; no_profile_level: GST_WARNING_OBJECT (self, "No compatible level and profiled in caps: %" GST_PTR_FORMAT, allowed_caps); goto not_negotiated; not_negotiated: if (allowed_caps) gst_caps_unref (allowed_caps); return FALSE; }
static GstFlowReturn gst_amc_video_enc_handle_output_frame (GstAmcVideoEnc * self, GstAmcBuffer * buf, const GstAmcBufferInfo * buffer_info, GstVideoCodecFrame * frame) { GstFlowReturn flow_ret = GST_FLOW_OK; GstVideoEncoder *encoder = GST_VIDEO_ENCODER_CAST (self); /* The BUFFER_FLAG_CODEC_CONFIG logic is borrowed from * gst-omx. see *_handle_output_frame in * gstomxvideoenc.c and gstomxh264enc.c */ if ((buffer_info->flags & BUFFER_FLAG_CODEC_CONFIG) && buffer_info->size > 0) { GstStructure *s; GstVideoCodecState *state; state = gst_video_encoder_get_output_state (encoder); s = gst_caps_get_structure (state->caps, 0); if (!strcmp (gst_structure_get_name (s), "video/x-h264")) { gst_video_codec_state_unref (state); if (buffer_info->size > 4 && GST_READ_UINT32_BE (buf->data + buffer_info->offset) == 0x00000001) { GList *l = NULL; GstBuffer *hdrs; GST_DEBUG_OBJECT (self, "got codecconfig in byte-stream format"); hdrs = gst_buffer_new_and_alloc (buffer_info->size); gst_buffer_fill (hdrs, 0, buf->data + buffer_info->offset, buffer_info->size); l = g_list_append (l, hdrs); gst_video_encoder_set_headers (encoder, l); } } else { GstBuffer *codec_data; GST_DEBUG_OBJECT (self, "Handling codec data"); codec_data = gst_buffer_new_and_alloc (buffer_info->size); gst_buffer_fill (codec_data, 0, buf->data + buffer_info->offset, buffer_info->size); state->codec_data = codec_data; gst_video_codec_state_unref (state); if (!gst_video_encoder_negotiate (encoder)) { gst_video_codec_frame_unref (frame); return GST_FLOW_NOT_NEGOTIATED; } return GST_FLOW_OK; } } if (buffer_info->size > 0) { GstBuffer *out_buf; GstPad *srcpad; srcpad = GST_VIDEO_ENCODER_SRC_PAD (encoder); out_buf = gst_video_encoder_allocate_output_buffer (encoder, buffer_info->size); gst_buffer_fill (out_buf, 0, buf->data + buffer_info->offset, buffer_info->size); GST_BUFFER_PTS (out_buf) = gst_util_uint64_scale (buffer_info->presentation_time_us, GST_USECOND, 1); if (frame) { frame->output_buffer = out_buf; flow_ret = gst_video_encoder_finish_frame (encoder, frame); } else { /* This sometimes happens at EOS or if the input is not properly framed, * let's handle it gracefully by allocating a new buffer for the current * caps and filling it */ GST_ERROR_OBJECT (self, "No corresponding frame found"); flow_ret = gst_pad_push (srcpad, out_buf); } } else if (frame) { flow_ret = gst_video_encoder_finish_frame (encoder, frame); } return flow_ret; }