static gboolean gst_v4l2_transform_set_caps (GstBaseTransform * trans, GstCaps * incaps, GstCaps * outcaps) { GstV4l2Transform *self = GST_V4L2_TRANSFORM (trans); if (self->incaps && self->outcaps) { if (gst_caps_is_equal (incaps, self->incaps) && gst_caps_is_equal (outcaps, self->outcaps)) { GST_DEBUG_OBJECT (trans, "Caps did not changed"); return TRUE; } } /* TODO Add renegotiation support */ g_return_val_if_fail (!GST_V4L2_IS_ACTIVE (self->v4l2output), FALSE); g_return_val_if_fail (!GST_V4L2_IS_ACTIVE (self->v4l2capture), FALSE); gst_caps_replace (&self->incaps, incaps); gst_caps_replace (&self->outcaps, outcaps); if (!gst_v4l2_object_set_format (self->v4l2output, incaps)) goto incaps_failed; if (!gst_v4l2_object_set_format (self->v4l2capture, outcaps)) goto outcaps_failed; /* FIXME implement fallback if crop not supported */ if (!gst_v4l2_object_set_crop (self->v4l2output)) goto failed; if (!gst_v4l2_object_set_crop (self->v4l2capture)) goto failed; return TRUE; incaps_failed: { GST_ERROR_OBJECT (self, "failed to set input caps: %" GST_PTR_FORMAT, incaps); goto failed; } outcaps_failed: { gst_v4l2_object_stop (self->v4l2output); GST_ERROR_OBJECT (self, "failed to set output caps: %" GST_PTR_FORMAT, outcaps); goto failed; } failed: return FALSE; }
static gboolean gst_v4l2src_set_caps (GstBaseSrc * src, GstCaps * caps) { GstV4l2Src *v4l2src; GstV4l2Object *obj; v4l2src = GST_V4L2SRC (src); obj = v4l2src->v4l2object; /* make sure the caps changed before doing anything */ if (gst_v4l2_object_caps_equal (obj, caps)) return TRUE; /* make sure we stop capturing and dealloc buffers */ if (!gst_v4l2_object_stop (obj)) return FALSE; g_signal_emit (v4l2src, gst_v4l2_signals[SIGNAL_PRE_SET_FORMAT], 0, v4l2src->v4l2object->video_fd, caps); if (!gst_v4l2_object_set_format (obj, caps)) /* error already posted */ return FALSE; return TRUE; }
static gboolean gst_v4l2_video_dec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state) { gboolean ret = TRUE; GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder); GST_DEBUG_OBJECT (self, "Setting format: %" GST_PTR_FORMAT, state->caps); if (self->input_state) { if (gst_v4l2_object_caps_equal (self->v4l2output, state->caps)) { GST_DEBUG_OBJECT (self, "Compatible caps"); goto done; } gst_video_codec_state_unref (self->input_state); self->input_state = NULL; /* FIXME we probably need to do more work if pools are active */ } ret = gst_v4l2_object_set_format (self->v4l2output, state->caps); if (ret) self->input_state = gst_video_codec_state_ref (state); done: return ret; }
static gboolean gst_v4l2sink_set_caps (GstBaseSink * bsink, GstCaps * caps) { GstV4l2Sink *v4l2sink = GST_V4L2SINK (bsink); GstV4l2Object *obj = v4l2sink->v4l2object; LOG_CAPS (v4l2sink, caps); if (!GST_V4L2_IS_OPEN (v4l2sink->v4l2object)) { GST_DEBUG_OBJECT (v4l2sink, "device is not open"); return FALSE; } /* make sure the caps changed before doing anything */ if (gst_v4l2_object_caps_equal (obj, caps)) return TRUE; if (!gst_v4l2_object_stop (obj)) goto stop_failed; if (!gst_v4l2_object_set_format (v4l2sink->v4l2object, caps)) goto invalid_format; gst_v4l2sink_sync_overlay_fields (v4l2sink); gst_v4l2sink_sync_crop_fields (v4l2sink); #ifdef HAVE_XVIDEO gst_v4l2_video_overlay_prepare_window_handle (v4l2sink->v4l2object, TRUE); #endif GST_INFO_OBJECT (v4l2sink, "outputting buffers via mmap()"); v4l2sink->video_width = GST_V4L2_WIDTH (v4l2sink->v4l2object); v4l2sink->video_height = GST_V4L2_HEIGHT (v4l2sink->v4l2object); /* TODO: videosink width/height should be scaled according to * pixel-aspect-ratio */ GST_VIDEO_SINK_WIDTH (v4l2sink) = v4l2sink->video_width; GST_VIDEO_SINK_HEIGHT (v4l2sink) = v4l2sink->video_height; return TRUE; /* ERRORS */ stop_failed: { GST_DEBUG_OBJECT (v4l2sink, "failed to stop streaming"); return FALSE; } invalid_format: { /* error already posted */ GST_DEBUG_OBJECT (v4l2sink, "can't set format"); return FALSE; } }
static gboolean gst_v4l2src_set_format (GstV4l2Src * v4l2src, GstCaps * caps) { GstV4l2Object *obj; obj = v4l2src->v4l2object; g_signal_emit (v4l2src, gst_v4l2_signals[SIGNAL_PRE_SET_FORMAT], 0, v4l2src->v4l2object->video_fd, caps); if (!gst_v4l2_object_set_format (obj, caps)) /* error already posted */ return FALSE; return TRUE; }
gboolean gst_v4l2sink_set_output (GstV4l2Sink * v4l2sink, guint32 pixelformat, guint32 width, guint32 height, guint fps_n, guint fps_d) { gint fd = v4l2sink->v4l2object->video_fd; struct v4l2_streamparm stream; if (pixelformat == GST_MAKE_FOURCC ('M', 'P', 'E', 'G')) return TRUE; if (!gst_v4l2_object_set_format (v4l2sink->v4l2object, pixelformat, width, height)) { /* error already reported */ return FALSE; } /* Is there a reason we require the caller to always specify a framerate? */ GST_LOG_OBJECT (v4l2sink, "Desired framerate: %u/%u", fps_n, fps_d); memset (&stream, 0x00, sizeof (struct v4l2_streamparm)); stream.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; if (v4l2_ioctl (fd, VIDIOC_G_PARM, &stream) < 0) { GST_ELEMENT_WARNING (v4l2sink, RESOURCE, SETTINGS, (_("Could not get parameters on device '%s'"), v4l2sink->v4l2object->videodev), GST_ERROR_SYSTEM); goto done; } /* Note: V4L2 provides the frame interval, we have the frame rate */ if (fractions_are_equal (stream.parm.output.timeperframe.numerator, stream.parm.output.timeperframe.denominator, fps_d, fps_n)) { GST_LOG_OBJECT (v4l2sink, "Desired framerate already set"); v4l2sink->fps_n = fps_n; v4l2sink->fps_d = fps_d; goto done; } /* We want to change the frame rate, so check whether we can. Some cheap USB * cameras don't have the capability */ if ((stream.parm.output.capability & V4L2_CAP_TIMEPERFRAME) == 0) { GST_DEBUG_OBJECT (v4l2sink, "Not setting framerate (not supported)"); goto done; } GST_LOG_OBJECT (v4l2sink, "Setting framerate to %u/%u", fps_n, fps_d); /* Note: V4L2 wants the frame interval, we have the frame rate */ stream.parm.output.timeperframe.numerator = fps_n; stream.parm.output.timeperframe.denominator = fps_d; /* some cheap USB cam's won't accept any change */ if (v4l2_ioctl (fd, VIDIOC_S_PARM, &stream) < 0) { GST_ELEMENT_WARNING (v4l2sink, RESOURCE, SETTINGS, (_("Video input device did not accept new frame rate setting.")), GST_ERROR_SYSTEM); goto done; } v4l2sink->fps_n = fps_n; v4l2sink->fps_d = fps_d; /* if we have a framerate pre-calculate duration */ if (fps_n > 0 && fps_d > 0) { v4l2sink->duration = gst_util_uint64_scale_int (GST_SECOND, fps_d, fps_n); } else { v4l2sink->duration = GST_CLOCK_TIME_NONE; } GST_INFO_OBJECT (v4l2sink, "Set framerate to %u/%u and duration to %" GST_TIME_FORMAT, fps_n, fps_d, GST_TIME_ARGS (v4l2sink->duration)); done: return TRUE; }
/****************************************************** * gst_v4l2src_set_capture(): * set capture parameters * return value: TRUE on success, FALSE on error ******************************************************/ gboolean gst_v4l2src_set_capture (GstV4l2Src * v4l2src, guint32 pixelformat, guint32 width, guint32 height, guint fps_n, guint fps_d) { gint fd = v4l2src->v4l2object->video_fd; struct v4l2_streamparm stream; if (!gst_v4l2_object_set_format (v4l2src->v4l2object, pixelformat, width, height)) { /* error already reported */ return FALSE; } /* Is there a reason we require the caller to always specify a framerate? */ GST_LOG_OBJECT (v4l2src, "Desired framerate: %u/%u", fps_n, fps_d); memset (&stream, 0x00, sizeof (struct v4l2_streamparm)); stream.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (v4l2_ioctl (fd, VIDIOC_G_PARM, &stream) < 0) { GST_ELEMENT_WARNING (v4l2src, RESOURCE, SETTINGS, (_("Could not get parameters on device '%s'"), v4l2src->v4l2object->videodev), GST_ERROR_SYSTEM); goto done; } /* Note: V4L2 provides the frame interval, we have the frame rate */ if (fractions_are_equal (stream.parm.capture.timeperframe.numerator, stream.parm.capture.timeperframe.denominator, fps_d, fps_n)) { GST_LOG_OBJECT (v4l2src, "Desired framerate already set"); v4l2src->fps_n = fps_n; v4l2src->fps_d = fps_d; goto done; } /* We want to change the frame rate, so check whether we can. Some cheap USB * cameras don't have the capability */ if ((stream.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) == 0) { GST_DEBUG_OBJECT (v4l2src, "Not setting framerate (not supported)"); goto done; } GST_LOG_OBJECT (v4l2src, "Setting framerate to %u/%u", fps_n, fps_d); /* Note: V4L2 wants the frame interval, we have the frame rate */ stream.parm.capture.timeperframe.numerator = fps_d; stream.parm.capture.timeperframe.denominator = fps_n; /* some cheap USB cam's won't accept any change */ if (v4l2_ioctl (fd, VIDIOC_S_PARM, &stream) < 0) { GST_ELEMENT_WARNING (v4l2src, RESOURCE, SETTINGS, (_("Video input device did not accept new frame rate setting.")), GST_ERROR_SYSTEM); goto done; } v4l2src->fps_n = fps_n; v4l2src->fps_d = fps_d; GST_INFO_OBJECT (v4l2src, "Set framerate to %u/%u", fps_n, fps_d); done: return TRUE; }
static GstFlowReturn gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder, GstVideoCodecFrame * frame) { GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder); GstFlowReturn ret = GST_FLOW_OK; GST_DEBUG_OBJECT (self, "Handling frame %d", frame->system_frame_number); if (G_UNLIKELY (!g_atomic_int_get (&self->active))) goto flushing; if (G_UNLIKELY (!GST_V4L2_IS_ACTIVE (self->v4l2output))) { if (!self->input_state) goto not_negotiated; if (!gst_v4l2_object_set_format (self->v4l2output, self->input_state->caps)) goto not_negotiated; } if (G_UNLIKELY (!GST_V4L2_IS_ACTIVE (self->v4l2capture))) { GstBufferPool *pool = GST_BUFFER_POOL (self->v4l2output->pool); GstVideoInfo info; GstVideoCodecState *output_state; GstBuffer *codec_data; GST_DEBUG_OBJECT (self, "Sending header"); codec_data = self->input_state->codec_data; /* We are running in byte-stream mode, so we don't know the headers, but * we need to send something, otherwise the decoder will refuse to * intialize. */ if (codec_data) { gst_buffer_ref (codec_data); } else { codec_data = frame->input_buffer; frame->input_buffer = NULL; } /* Ensure input internal pool is active */ if (!gst_buffer_pool_is_active (pool)) { GstStructure *config = gst_buffer_pool_get_config (pool); gst_buffer_pool_config_set_params (config, self->input_state->caps, self->v4l2output->info.size, 2, 2); /* There is no reason to refuse this config */ if (!gst_buffer_pool_set_config (pool, config)) goto activate_failed; if (!gst_buffer_pool_set_active (pool, TRUE)) goto activate_failed; } GST_VIDEO_DECODER_STREAM_UNLOCK (decoder); ret = gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL (self-> v4l2output->pool), &codec_data); GST_VIDEO_DECODER_STREAM_LOCK (decoder); gst_buffer_unref (codec_data); if (!gst_v4l2_object_acquire_format (self->v4l2capture, &info)) goto not_negotiated; output_state = gst_video_decoder_set_output_state (decoder, info.finfo->format, info.width, info.height, self->input_state); /* Copy the rest of the information, there might be more in the future */ output_state->info.interlace_mode = info.interlace_mode; gst_video_codec_state_unref (output_state); if (!gst_video_decoder_negotiate (decoder)) { if (GST_PAD_IS_FLUSHING (decoder->srcpad)) goto flushing; else goto not_negotiated; } /* Ensure our internal pool is activated */ if (!gst_buffer_pool_set_active (GST_BUFFER_POOL (self->v4l2capture->pool), TRUE)) goto activate_failed; } if (g_atomic_int_get (&self->processing) == FALSE) { /* It's possible that the processing thread stopped due to an error */ if (self->output_flow != GST_FLOW_OK && self->output_flow != GST_FLOW_FLUSHING) { GST_DEBUG_OBJECT (self, "Processing loop stopped with error, leaving"); ret = self->output_flow; goto drop; } GST_DEBUG_OBJECT (self, "Starting decoding thread"); /* Start the processing task, when it quits, the task will disable input * processing to unlock input if draining, or prevent potential block */ g_atomic_int_set (&self->processing, TRUE); if (!gst_pad_start_task (decoder->srcpad, (GstTaskFunction) gst_v4l2_video_dec_loop, self, (GDestroyNotify) gst_v4l2_video_dec_loop_stopped)) goto start_task_failed; } if (frame->input_buffer) { GST_VIDEO_DECODER_STREAM_UNLOCK (decoder); ret = gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL (self->v4l2output-> pool), &frame->input_buffer); GST_VIDEO_DECODER_STREAM_LOCK (decoder); if (ret == GST_FLOW_FLUSHING) { if (g_atomic_int_get (&self->processing) == FALSE) ret = self->output_flow; goto drop; } else if (ret != GST_FLOW_OK) { goto process_failed; } /* No need to keep input arround */ gst_buffer_replace (&frame->input_buffer, NULL); } gst_video_codec_frame_unref (frame); return ret; /* ERRORS */ not_negotiated: { GST_ERROR_OBJECT (self, "not negotiated"); ret = GST_FLOW_NOT_NEGOTIATED; goto drop; } activate_failed: { GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS, (_("Failed to allocate required memory.")), ("Buffer pool activation failed")); ret = GST_FLOW_ERROR; goto drop; } flushing: { ret = GST_FLOW_FLUSHING; goto drop; } start_task_failed: { GST_ELEMENT_ERROR (self, RESOURCE, FAILED, (_("Failed to start decoding thread.")), (NULL)); g_atomic_int_set (&self->processing, FALSE); ret = GST_FLOW_ERROR; goto drop; } process_failed: { GST_ELEMENT_ERROR (self, RESOURCE, FAILED, (_("Failed to process frame.")), ("Maybe be due to not enough memory or failing driver")); ret = GST_FLOW_ERROR; goto drop; } drop: { gst_video_decoder_drop_frame (decoder, frame); return ret; } }
static GstFlowReturn gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder, GstVideoCodecFrame * frame) { GstV4l2Error error = GST_V4L2_ERROR_INIT; GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder); GstFlowReturn ret = GST_FLOW_OK; gboolean processed = FALSE; GstBuffer *tmp; GST_DEBUG_OBJECT (self, "Handling frame %d", frame->system_frame_number); if (G_UNLIKELY (!g_atomic_int_get (&self->active))) goto flushing; if (G_UNLIKELY (!GST_V4L2_IS_ACTIVE (self->v4l2output))) { if (!self->input_state) goto not_negotiated; if (!gst_v4l2_object_set_format (self->v4l2output, self->input_state->caps, &error)) goto not_negotiated; } if (G_UNLIKELY (!GST_V4L2_IS_ACTIVE (self->v4l2capture))) { GstBufferPool *pool = GST_BUFFER_POOL (self->v4l2output->pool); GstVideoInfo info; GstVideoCodecState *output_state; GstBuffer *codec_data; GstCaps *acquired_caps, *available_caps, *caps, *filter; GstStructure *st; GST_DEBUG_OBJECT (self, "Sending header"); codec_data = self->input_state->codec_data; /* We are running in byte-stream mode, so we don't know the headers, but * we need to send something, otherwise the decoder will refuse to * intialize. */ if (codec_data) { gst_buffer_ref (codec_data); } else { codec_data = gst_buffer_ref (frame->input_buffer); processed = TRUE; } /* Ensure input internal pool is active */ if (!gst_buffer_pool_is_active (pool)) { GstStructure *config = gst_buffer_pool_get_config (pool); gst_buffer_pool_config_set_params (config, self->input_state->caps, self->v4l2output->info.size, 2, 2); /* There is no reason to refuse this config */ if (!gst_buffer_pool_set_config (pool, config)) goto activate_failed; if (!gst_buffer_pool_set_active (pool, TRUE)) goto activate_failed; } GST_VIDEO_DECODER_STREAM_UNLOCK (decoder); ret = gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL (self-> v4l2output->pool), &codec_data); GST_VIDEO_DECODER_STREAM_LOCK (decoder); gst_buffer_unref (codec_data); /* For decoders G_FMT returns coded size, G_SELECTION returns visible size * in the compose rectangle. gst_v4l2_object_acquire_format() checks both * and returns the visible size as with/height and the coded size as * padding. */ if (!gst_v4l2_object_acquire_format (self->v4l2capture, &info)) goto not_negotiated; /* Create caps from the acquired format, remove the format field */ acquired_caps = gst_video_info_to_caps (&info); st = gst_caps_get_structure (acquired_caps, 0); gst_structure_remove_field (st, "format"); /* Probe currently available pixel formats */ available_caps = gst_v4l2_object_probe_caps (self->v4l2capture, NULL); available_caps = gst_caps_make_writable (available_caps); /* Replace coded size with visible size, we want to negotiate visible size * with downstream, not coded size. */ gst_caps_map_in_place (available_caps, gst_v4l2_video_remove_padding, self); filter = gst_caps_intersect_full (available_caps, acquired_caps, GST_CAPS_INTERSECT_FIRST); gst_caps_unref (acquired_caps); gst_caps_unref (available_caps); caps = gst_pad_peer_query_caps (decoder->srcpad, filter); gst_caps_unref (filter); GST_DEBUG_OBJECT (self, "Possible decoded caps: %" GST_PTR_FORMAT, caps); if (gst_caps_is_empty (caps)) { gst_caps_unref (caps); goto not_negotiated; } /* Fixate pixel format */ caps = gst_caps_fixate (caps); GST_DEBUG_OBJECT (self, "Chosen decoded caps: %" GST_PTR_FORMAT, caps); /* Try to set negotiated format, on success replace acquired format */ if (gst_v4l2_object_set_format (self->v4l2capture, caps, &error)) gst_video_info_from_caps (&info, caps); else gst_v4l2_clear_error (&error); gst_caps_unref (caps); output_state = gst_video_decoder_set_output_state (decoder, info.finfo->format, info.width, info.height, self->input_state); /* Copy the rest of the information, there might be more in the future */ output_state->info.interlace_mode = info.interlace_mode; gst_video_codec_state_unref (output_state); if (!gst_video_decoder_negotiate (decoder)) { if (GST_PAD_IS_FLUSHING (decoder->srcpad)) goto flushing; else goto not_negotiated; } /* Ensure our internal pool is activated */ if (!gst_buffer_pool_set_active (GST_BUFFER_POOL (self->v4l2capture->pool), TRUE)) goto activate_failed; } if (g_atomic_int_get (&self->processing) == FALSE) { /* It's possible that the processing thread stopped due to an error */ if (self->output_flow != GST_FLOW_OK && self->output_flow != GST_FLOW_FLUSHING) { GST_DEBUG_OBJECT (self, "Processing loop stopped with error, leaving"); ret = self->output_flow; goto drop; } GST_DEBUG_OBJECT (self, "Starting decoding thread"); /* Start the processing task, when it quits, the task will disable input * processing to unlock input if draining, or prevent potential block */ g_atomic_int_set (&self->processing, TRUE); if (!gst_pad_start_task (decoder->srcpad, (GstTaskFunction) gst_v4l2_video_dec_loop, self, (GDestroyNotify) gst_v4l2_video_dec_loop_stopped)) goto start_task_failed; } if (!processed) { GST_VIDEO_DECODER_STREAM_UNLOCK (decoder); ret = gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL (self->v4l2output-> pool), &frame->input_buffer); GST_VIDEO_DECODER_STREAM_LOCK (decoder); if (ret == GST_FLOW_FLUSHING) { if (g_atomic_int_get (&self->processing) == FALSE) ret = self->output_flow; goto drop; } else if (ret != GST_FLOW_OK) { goto process_failed; } } /* No need to keep input arround */ tmp = frame->input_buffer; frame->input_buffer = gst_buffer_new (); gst_buffer_copy_into (frame->input_buffer, tmp, GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS | GST_BUFFER_COPY_META, 0, 0); gst_buffer_unref (tmp); gst_video_codec_frame_unref (frame); return ret; /* ERRORS */ not_negotiated: { GST_ERROR_OBJECT (self, "not negotiated"); ret = GST_FLOW_NOT_NEGOTIATED; gst_v4l2_error (self, &error); goto drop; } activate_failed: { GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS, (_("Failed to allocate required memory.")), ("Buffer pool activation failed")); ret = GST_FLOW_ERROR; goto drop; } flushing: { ret = GST_FLOW_FLUSHING; goto drop; } start_task_failed: { GST_ELEMENT_ERROR (self, RESOURCE, FAILED, (_("Failed to start decoding thread.")), (NULL)); g_atomic_int_set (&self->processing, FALSE); ret = GST_FLOW_ERROR; goto drop; } process_failed: { GST_ELEMENT_ERROR (self, RESOURCE, FAILED, (_("Failed to process frame.")), ("Maybe be due to not enough memory or failing driver")); ret = GST_FLOW_ERROR; goto drop; } drop: { gst_video_decoder_drop_frame (decoder, frame); return ret; } }