static gboolean gst_v4l2_transform_query (GstBaseTransform * trans, GstPadDirection direction, GstQuery * query) { GstV4l2Transform *self = GST_V4L2_TRANSFORM (trans); gboolean ret = TRUE; switch (GST_QUERY_TYPE (query)) { case GST_QUERY_CAPS:{ GstCaps *filter, *caps = NULL, *result = NULL; GstPad *pad, *otherpad; gst_query_parse_caps (query, &filter); if (direction == GST_PAD_SRC) { pad = GST_BASE_TRANSFORM_SRC_PAD (trans); otherpad = GST_BASE_TRANSFORM_SINK_PAD (trans); if (self->probed_srccaps) caps = gst_caps_ref (self->probed_srccaps); } else { pad = GST_BASE_TRANSFORM_SINK_PAD (trans); otherpad = GST_BASE_TRANSFORM_SRC_PAD (trans); if (self->probed_sinkcaps) caps = gst_caps_ref (self->probed_sinkcaps); } if (!caps) caps = gst_pad_get_pad_template_caps (pad); if (filter) { GstCaps *tmp = caps; caps = gst_caps_intersect_full (filter, tmp, GST_CAPS_INTERSECT_FIRST); gst_caps_unref (tmp); } result = gst_pad_peer_query_caps (otherpad, caps); result = gst_caps_make_writable (result); gst_caps_append (result, caps); GST_DEBUG_OBJECT (self, "Returning %s caps %" GST_PTR_FORMAT, GST_PAD_NAME (pad), result); gst_query_set_caps_result (query, result); gst_caps_unref (result); break; } default: ret = GST_BASE_TRANSFORM_CLASS (parent_class)->query (trans, direction, query); break; } return ret; }
static gboolean gst_capsfilter_accept_caps (GstBaseTransform * base, GstPadDirection direction, GstCaps * caps) { GstCapsFilter *capsfilter = GST_CAPSFILTER (base); GstCaps *filter_caps; gboolean ret; GST_OBJECT_LOCK (capsfilter); filter_caps = gst_caps_ref (capsfilter->filter_caps); GST_OBJECT_UNLOCK (capsfilter); ret = gst_caps_can_intersect (caps, filter_caps); GST_DEBUG_OBJECT (capsfilter, "can intersect: %d", ret); if (ret) { /* if we can intersect, see if the other end also accepts */ if (direction == GST_PAD_SRC) ret = gst_pad_peer_query_accept_caps (GST_BASE_TRANSFORM_SINK_PAD (base), caps); else ret = gst_pad_peer_query_accept_caps (GST_BASE_TRANSFORM_SRC_PAD (base), caps); GST_DEBUG_OBJECT (capsfilter, "peer accept: %d", ret); } gst_caps_unref (filter_caps); return ret; }
static void gst_frei0r_filter_init (GstFrei0rFilter * self, GstFrei0rFilterClass * klass) { self->property_cache = gst_frei0r_property_cache_init (klass->properties, klass->n_properties); gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SINK_PAD (self)); gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SRC_PAD (self)); }
/* Check if downstream forces variable framerate (0/1) and if * it is the case, use variable framerate ourself * Otherwise compute the framerate from the 2 buffers that we * have already received and make use of it as wanted framerate */ static void gst_video_rate_check_variable_rate (GstVideoRate * videorate, GstBuffer * buffer) { GstStructure *st; gint fps_d, fps_n; GstCaps *srcpadcaps, *tmpcaps, *downstream_caps; GstPad *pad = NULL; srcpadcaps = gst_pad_get_current_caps (GST_BASE_TRANSFORM_SRC_PAD (videorate)); gst_video_guess_framerate (GST_BUFFER_PTS (buffer) - GST_BUFFER_PTS (videorate->prevbuf), &fps_n, &fps_d); tmpcaps = gst_caps_copy (srcpadcaps); st = gst_caps_get_structure (tmpcaps, 0); gst_structure_set (st, "framerate", GST_TYPE_FRACTION, fps_n, fps_d, NULL); gst_caps_unref (srcpadcaps); pad = gst_pad_get_peer (GST_BASE_TRANSFORM_SRC_PAD (videorate)); downstream_caps = gst_pad_query_caps (pad, NULL); if (pad && !gst_caps_can_intersect (tmpcaps, downstream_caps)) { videorate->force_variable_rate = TRUE; gst_caps_unref (downstream_caps); GST_DEBUG_OBJECT (videorate, "Downstream forces variable framerate" " respecting it"); goto done; } gst_caps_unref (downstream_caps); videorate->to_rate_numerator = fps_n; videorate->to_rate_denominator = fps_d; GST_INFO_OBJECT (videorate, "Computed framerate to %d/%d", videorate->to_rate_numerator, videorate->to_rate_denominator); videorate->updating_caps = TRUE; gst_base_transform_update_src_caps (GST_BASE_TRANSFORM (videorate), tmpcaps); done: gst_caps_unref (tmpcaps); if (pad) gst_object_unref (pad); }
static void gst_shagadelictv_init (GstShagadelicTV * filter, GstShagadelicTVClass * klass) { filter->ripple = NULL; filter->spiral = NULL; gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SRC_PAD (filter)); gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SINK_PAD (filter)); }
static void gst_quarktv_init (GstQuarkTV * filter, GstQuarkTVClass * klass) { filter->planes = PLANES; filter->current_plane = filter->planes - 1; gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SRC_PAD (filter)); gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SINK_PAD (filter)); }
static void gst_dicetv_init (GstDiceTV * filter, GstDiceTVClass * klass) { filter->dicemap = NULL; filter->g_cube_bits = DEFAULT_CUBE_BITS; filter->g_cube_size = 0; filter->g_map_height = 0; filter->g_map_width = 0; gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SINK_PAD (filter)); gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SRC_PAD (filter)); }
static gboolean gst_identity_accept_caps (GstBaseTransform * base, GstPadDirection direction, GstCaps * caps) { gboolean ret; GstPad *pad; /* Proxy accept-caps */ if (direction == GST_PAD_SRC) pad = GST_BASE_TRANSFORM_SINK_PAD (base); else pad = GST_BASE_TRANSFORM_SRC_PAD (base); ret = gst_pad_peer_query_accept_caps (pad, caps); return ret; }
static void gst_chromaprint_create_fingerprint (GstChromaprint * chromaprint) { GstTagList *tags; if (chromaprint->duration <= 3) return; GST_DEBUG_OBJECT (chromaprint, "Generating fingerprint based on %d seconds of audio", chromaprint->duration); chromaprint_finish (chromaprint->context); chromaprint_get_fingerprint (chromaprint->context, &chromaprint->fingerprint); chromaprint->record = FALSE; tags = gst_tag_list_new (GST_TAG_CHROMAPRINT_FINGERPRINT, chromaprint->fingerprint, NULL); gst_pad_push_event (GST_BASE_TRANSFORM_SRC_PAD (chromaprint), gst_event_new_tag (tags)); }
static gboolean pad_can_dmabuf (GstMsdkVPP * thiz, GstPadDirection direction, GstCaps * filter) { gboolean ret = FALSE; GstCaps *caps, *out_caps; GstPad *pad; GstBaseTransform *trans = GST_BASE_TRANSFORM (thiz); if (direction == GST_PAD_SRC) pad = GST_BASE_TRANSFORM_SRC_PAD (trans); else pad = GST_BASE_TRANSFORM_SINK_PAD (trans); /* make a copy of filter caps since we need to alter the structure * by adding dmabuf-capsfeatures */ caps = gst_caps_copy (filter); gst_caps_set_features (caps, 0, gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_DMABUF)); out_caps = gst_pad_peer_query_caps (pad, caps); if (!out_caps) goto done; if (gst_caps_is_any (out_caps) || gst_caps_is_empty (out_caps) || out_caps == caps) goto done; if (_gst_caps_has_feature (out_caps, GST_CAPS_FEATURE_MEMORY_DMABUF)) ret = TRUE; done: if (caps) gst_caps_unref (caps); if (out_caps) gst_caps_unref (out_caps); return ret; }
static gboolean gst_video_rate_query (GstBaseTransform * trans, GstPadDirection direction, GstQuery * query) { GstVideoRate *videorate = GST_VIDEO_RATE (trans); gboolean res = FALSE; GstPad *otherpad; otherpad = (direction == GST_PAD_SRC) ? GST_BASE_TRANSFORM_SINK_PAD (trans) : GST_BASE_TRANSFORM_SRC_PAD (trans); switch (GST_QUERY_TYPE (query)) { case GST_QUERY_LATENCY: { GstClockTime min, max; gboolean live; guint64 latency; guint64 avg_period; GstPad *peer; GST_OBJECT_LOCK (videorate); avg_period = videorate->average_period_set; GST_OBJECT_UNLOCK (videorate); if (avg_period == 0 && (peer = gst_pad_get_peer (otherpad))) { if ((res = gst_pad_query (peer, query))) { gst_query_parse_latency (query, &live, &min, &max); GST_DEBUG_OBJECT (videorate, "Peer latency: min %" GST_TIME_FORMAT " max %" GST_TIME_FORMAT, GST_TIME_ARGS (min), GST_TIME_ARGS (max)); if (videorate->from_rate_numerator != 0) { /* add latency. We don't really know since we hold on to the frames * until we get a next frame, which can be anything. We assume * however that this will take from_rate time. */ latency = gst_util_uint64_scale (GST_SECOND, videorate->from_rate_denominator, videorate->from_rate_numerator); } else { /* no input framerate, we don't know */ latency = 0; } GST_DEBUG_OBJECT (videorate, "Our latency: %" GST_TIME_FORMAT, GST_TIME_ARGS (latency)); min += latency; if (max != -1) max += latency; GST_DEBUG_OBJECT (videorate, "Calculated total latency : min %" GST_TIME_FORMAT " max %" GST_TIME_FORMAT, GST_TIME_ARGS (min), GST_TIME_ARGS (max)); gst_query_set_latency (query, live, min, max); } gst_object_unref (peer); break; } /* Simple fallthrough if we don't have a latency or not a peer that we * can't ask about its latency yet.. */ } default: res = GST_BASE_TRANSFORM_CLASS (parent_class)->query (trans, direction, query); break; } return res; }
/* flush the oldest buffer */ static GstFlowReturn gst_video_rate_flush_prev (GstVideoRate * videorate, gboolean duplicate) { GstFlowReturn res; GstBuffer *outbuf; GstClockTime push_ts; if (!videorate->prevbuf) goto eos_before_buffers; /* make sure we can write to the metadata */ outbuf = gst_buffer_make_writable (gst_buffer_ref (videorate->prevbuf)); GST_BUFFER_OFFSET (outbuf) = videorate->out; GST_BUFFER_OFFSET_END (outbuf) = videorate->out + 1; if (videorate->discont) { GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT); videorate->discont = FALSE; } else GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DISCONT); if (duplicate) GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP); else GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_GAP); /* this is the timestamp we put on the buffer */ push_ts = videorate->next_ts; videorate->out++; videorate->out_frame_count++; if (videorate->to_rate_numerator) { /* interpolate next expected timestamp in the segment */ videorate->next_ts = videorate->segment.base + videorate->segment.start + videorate->base_ts + gst_util_uint64_scale (videorate->out_frame_count, videorate->to_rate_denominator * GST_SECOND, videorate->to_rate_numerator); GST_BUFFER_DURATION (outbuf) = videorate->next_ts - push_ts; } /* We do not need to update time in VFR (variable frame rate) mode */ if (!videorate->drop_only) { /* adapt for looping, bring back to time in current segment. */ GST_BUFFER_TIMESTAMP (outbuf) = push_ts - videorate->segment.base; } GST_LOG_OBJECT (videorate, "old is best, dup, pushing buffer outgoing ts %" GST_TIME_FORMAT, GST_TIME_ARGS (push_ts)); res = gst_pad_push (GST_BASE_TRANSFORM_SRC_PAD (videorate), outbuf); return res; /* WARNINGS */ eos_before_buffers: { GST_INFO_OBJECT (videorate, "got EOS before any buffer was received"); return GST_FLOW_OK; } }
static GstFlowReturn gst_identity_transform_ip (GstBaseTransform * trans, GstBuffer * buf) { GstFlowReturn ret = GST_FLOW_OK; GstIdentity *identity = GST_IDENTITY (trans); GstClockTime rundts = GST_CLOCK_TIME_NONE; GstClockTime runpts = GST_CLOCK_TIME_NONE; GstClockTime ts, duration, runtimestamp; gsize size; size = gst_buffer_get_size (buf); if (identity->check_imperfect_timestamp) gst_identity_check_imperfect_timestamp (identity, buf); if (identity->check_imperfect_offset) gst_identity_check_imperfect_offset (identity, buf); /* update prev values */ identity->prev_timestamp = GST_BUFFER_TIMESTAMP (buf); identity->prev_duration = GST_BUFFER_DURATION (buf); identity->prev_offset_end = GST_BUFFER_OFFSET_END (buf); identity->prev_offset = GST_BUFFER_OFFSET (buf); if (identity->error_after >= 0) { identity->error_after--; if (identity->error_after == 0) goto error_after; } if (identity->drop_probability > 0.0) { if ((gfloat) (1.0 * rand () / (RAND_MAX)) < identity->drop_probability) goto dropped; } if (identity->dump) { GstMapInfo info; gst_buffer_map (buf, &info, GST_MAP_READ); gst_util_dump_mem (info.data, info.size); gst_buffer_unmap (buf, &info); } if (!identity->silent) { gst_identity_update_last_message_for_buffer (identity, "chain", buf, size); } if (identity->datarate > 0) { GstClockTime time = gst_util_uint64_scale_int (identity->offset, GST_SECOND, identity->datarate); GST_BUFFER_PTS (buf) = GST_BUFFER_DTS (buf) = time; GST_BUFFER_DURATION (buf) = size * GST_SECOND / identity->datarate; } if (identity->signal_handoffs) g_signal_emit (identity, gst_identity_signals[SIGNAL_HANDOFF], 0, buf); if (trans->segment.format == GST_FORMAT_TIME) { rundts = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME, GST_BUFFER_DTS (buf)); runpts = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME, GST_BUFFER_PTS (buf)); } if (GST_CLOCK_TIME_IS_VALID (rundts)) runtimestamp = rundts; else if (GST_CLOCK_TIME_IS_VALID (runpts)) runtimestamp = runpts; else runtimestamp = 0; ret = gst_identity_do_sync (identity, runtimestamp); identity->offset += size; if (identity->sleep_time && ret == GST_FLOW_OK) g_usleep (identity->sleep_time); if (identity->single_segment && (trans->segment.format == GST_FORMAT_TIME) && (ret == GST_FLOW_OK)) { GST_BUFFER_DTS (buf) = rundts; GST_BUFFER_PTS (buf) = runpts; GST_BUFFER_OFFSET (buf) = GST_CLOCK_TIME_NONE; GST_BUFFER_OFFSET_END (buf) = GST_CLOCK_TIME_NONE; } return ret; /* ERRORS */ error_after: { GST_ELEMENT_ERROR (identity, CORE, FAILED, (_("Failed after iterations as requested.")), (NULL)); return GST_FLOW_ERROR; } dropped: { if (!identity->silent) { gst_identity_update_last_message_for_buffer (identity, "dropping", buf, size); } ts = GST_BUFFER_TIMESTAMP (buf); if (GST_CLOCK_TIME_IS_VALID (ts)) { duration = GST_BUFFER_DURATION (buf); gst_pad_push_event (GST_BASE_TRANSFORM_SRC_PAD (identity), gst_event_new_gap (ts, duration)); } /* return DROPPED to basetransform. */ return GST_BASE_TRANSFORM_FLOW_DROPPED; } }
static void gst_dasf_change_peer_omx_state (GstDasfSrc* self) { GST_INFO (""); GstPad *peer; GstElement *next_element; GooComponent *component; GstBaseSrc *base_src; peer = gst_pad_get_peer (GST_BASE_SRC_PAD (self)); if (G_UNLIKELY (peer == NULL)) { GST_INFO ("No next pad"); return; } next_element = GST_ELEMENT (gst_pad_get_parent (peer)); if (G_UNLIKELY (next_element == NULL)) { GST_INFO ("Cannot find a next element"); gst_object_unref (next_element); return; } /** expecting a capsfilter between dasfsrc and goo audio component **/ while (GST_IS_BASE_TRANSFORM (next_element)) { GST_DEBUG_OBJECT(self, "next element name: %s", gst_element_get_name (next_element)); gst_object_unref (peer); peer = gst_pad_get_peer (GST_BASE_TRANSFORM_SRC_PAD (next_element)); gst_object_unref (next_element); next_element = GST_ELEMENT(gst_pad_get_parent (peer)) ; GST_DEBUG_OBJECT (self, "one after element name: %s", gst_element_get_name(next_element)); } /** capsfilter might be found * element next to the caps filter should be goo **/ component = GOO_COMPONENT (g_object_get_data (G_OBJECT (next_element), "goo")); if (G_UNLIKELY (component == NULL)) { GST_INFO ("Previous element does not have a Goo component"); gst_object_unref (peer); gst_object_unref (next_element); return; } if (!GOO_IS_TI_AUDIO_COMPONENT (component)) { GST_WARNING ("The component in previous element is not TI Audio"); gst_object_unref (peer); gst_object_unref (next_element); return; } self->peer_element = g_object_ref (GST_GOO_AUDIO_FILTER (next_element)); /* Work with a queue on the output buffers */ g_object_set (GST_GOO_AUDIO_FILTER (next_element)->component, "outport-queue", TRUE, NULL); /** This fixates the caps on the next goo element to configure the output omx port **/ gst_goo_audio_filter_check_fixed_src_caps (GST_GOO_AUDIO_FILTER (next_element)); g_object_set (GST_GOO_AUDIO_FILTER (next_element)->inport, "buffercount", 1, NULL); g_object_set (GST_GOO_AUDIO_FILTER (next_element)->outport, "buffercount", 1, NULL); GST_INFO ("Setting peer omx component to idle"); goo_component_set_state_idle (GST_GOO_AUDIO_FILTER (next_element)->component); GST_INFO ("Setting peer omx component to executing"); goo_component_set_state_executing (GST_GOO_AUDIO_FILTER (next_element)->component); gst_object_unref (peer); gst_object_unref (next_element); GST_DEBUG_OBJECT (self, "peer refcount = %d", G_OBJECT (peer)->ref_count); GST_DEBUG_OBJECT (self, "next element refcount = %d", G_OBJECT (next_element)->ref_count); return; }
static void gst_dasf_enable (GstDasfSrc* self) { GST_INFO (""); GstPad *peer; GstElement *next_element; GooComponent *component; GstBaseSrc *base_src; if (self->component != NULL) { return; } peer = gst_pad_get_peer (GST_BASE_SRC_PAD (self)); if (G_UNLIKELY (peer == NULL)) { GST_INFO ("No next pad"); return; } next_element = GST_ELEMENT (gst_pad_get_parent (peer)); if (G_UNLIKELY (next_element == NULL)) { GST_INFO ("Cannot find a next element"); goto done; } /** expecting a capsfilter between dasfsrc and goo audio component **/ while (GST_IS_BASE_TRANSFORM (next_element)) { GST_DEBUG_OBJECT(self, "next element name: %s", gst_element_get_name (next_element)); gst_object_unref (peer); peer = gst_pad_get_peer (GST_BASE_TRANSFORM_SRC_PAD (next_element)); gst_object_unref (next_element); next_element = GST_ELEMENT(gst_pad_get_parent (peer)) ; GST_DEBUG_OBJECT (self, "one after element name: %s", gst_element_get_name(next_element)); } /** capsfilter might be found * element next to the caps filter should be goo **/ component = GOO_COMPONENT (g_object_get_data (G_OBJECT (next_element), "goo")); if (G_UNLIKELY (component == NULL)) { GST_INFO ("Previous element does not have a Goo component"); goto done; } if (!GOO_IS_TI_AUDIO_COMPONENT (component)) { GST_WARNING ("The component in previous element is not TI Audio"); goto done; } self->component = GOO_TI_AUDIO_COMPONENT (component); goo_ti_audio_component_set_dasf_mode (self->component, TRUE); GST_DEBUG_OBJECT (self, "set data path"); goo_ti_audio_component_set_data_path (self->component, 0); /** getting num-buffers from base src **/ base_src = GST_BASE_SRC (self); goo_ti_audio_encoder_set_number_buffers (GOO_TI_AUDIO_ENCODER (component), base_src->num_buffers); done: gst_object_unref (peer); gst_object_unref (next_element); GST_DEBUG_OBJECT (self, "peer refcount = %d", G_OBJECT (peer)->ref_count); GST_DEBUG_OBJECT (self, "next element refcount = %d", G_OBJECT (next_element)->ref_count); return; }
static GstCaps * gst_mfxpostproc_transform_caps_impl (GstBaseTransform * trans, GstPadDirection direction, GstCaps * caps) { GstMfxPostproc *const vpp = GST_MFXPOSTPROC (trans); GstVideoInfo vi, peer_vi; GstVideoFormat out_format; GstCaps *out_caps, *peer_caps; GstMfxCapsFeature feature; const gchar *feature_str; guint width, height; /* Generate the sink pad caps, that could be fixated afterwards */ if (direction == GST_PAD_SRC) { if (!ensure_allowed_sinkpad_caps (vpp)) return NULL; return gst_caps_ref (vpp->allowed_sinkpad_caps); } /* Generate complete set of src pad caps if non-fixated sink pad * caps are provided */ if (!gst_caps_is_fixed (caps)) { if (!ensure_allowed_srcpad_caps (vpp)) return NULL; return gst_caps_ref (vpp->allowed_srcpad_caps); } /* Generate the expected src pad caps, from the current fixated * sink pad caps */ if (!gst_video_info_from_caps (&vi, caps)) return NULL; if (vpp->deinterlace_mode) GST_VIDEO_INFO_INTERLACE_MODE (&vi) = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE; /* Update size from user-specified parameters */ find_best_size (vpp, &vi, &width, &height); /* Update format from user-specified parameters */ peer_caps = gst_pad_peer_query_caps (GST_BASE_TRANSFORM_SRC_PAD (trans), vpp->allowed_srcpad_caps); if (gst_caps_is_any (peer_caps) || gst_caps_is_empty (peer_caps)) return peer_caps; if (!gst_caps_is_fixed (peer_caps)) peer_caps = gst_caps_fixate (peer_caps); gst_video_info_from_caps (&peer_vi, peer_caps); out_format = GST_VIDEO_INFO_FPS_N (&peer_vi); /* Update width and height from the caps */ if (GST_VIDEO_INFO_HEIGHT (&peer_vi) != 1 && GST_VIDEO_INFO_WIDTH (&peer_vi) != 1) find_best_size(vpp, &peer_vi, &width, &height); if (vpp->format != DEFAULT_FORMAT) out_format = vpp->format; if (vpp->fps_n) { GST_VIDEO_INFO_FPS_N (&vi) = vpp->fps_n; GST_VIDEO_INFO_FPS_D (&vi) = vpp->fps_d; vpp->field_duration = gst_util_uint64_scale (GST_SECOND, vpp->fps_d, vpp->fps_n); if (DEFAULT_FRC_ALG == vpp->alg) vpp->alg = GST_MFX_FRC_PRESERVE_TIMESTAMP; } if (peer_caps) gst_caps_unref (peer_caps); feature = gst_mfx_find_preferred_caps_feature (GST_BASE_TRANSFORM_SRC_PAD (trans), &out_format); gst_video_info_change_format (&vi, out_format, width, height); out_caps = gst_video_info_to_caps (&vi); if (!out_caps) return NULL; if (feature) { feature_str = gst_mfx_caps_feature_to_string (feature); if (feature_str) gst_caps_set_features (out_caps, 0, gst_caps_features_new (feature_str, NULL)); } if (vpp->format != out_format) vpp->format = out_format; return out_caps; }
static GstFlowReturn gst_scene_change_filter_ip_I420 (GstVideoFilter2 * videofilter2, GstBuffer * buf, int start, int end) { GstSceneChange *scenechange; double score_min; double score_max; double threshold; double score; gboolean change; int i; int width; int height; g_return_val_if_fail (GST_IS_SCENE_CHANGE (videofilter2), GST_FLOW_ERROR); scenechange = GST_SCENE_CHANGE (videofilter2); width = GST_VIDEO_FILTER2_WIDTH (videofilter2); height = GST_VIDEO_FILTER2_HEIGHT (videofilter2); if (!scenechange->oldbuf) { scenechange->n_diffs = 0; memset (scenechange->diffs, 0, sizeof (double) * SC_N_DIFFS); scenechange->oldbuf = gst_buffer_ref (buf); return GST_FLOW_OK; } score = get_frame_score (GST_BUFFER_DATA (scenechange->oldbuf), GST_BUFFER_DATA (buf), width, height); memmove (scenechange->diffs, scenechange->diffs + 1, sizeof (double) * (SC_N_DIFFS - 1)); scenechange->diffs[SC_N_DIFFS - 1] = score; scenechange->n_diffs++; gst_buffer_unref (scenechange->oldbuf); scenechange->oldbuf = gst_buffer_ref (buf); score_min = scenechange->diffs[0]; score_max = scenechange->diffs[0]; for (i = 1; i < SC_N_DIFFS - 1; i++) { score_min = MIN (score_min, scenechange->diffs[i]); score_max = MAX (score_max, scenechange->diffs[i]); } threshold = 1.8 * score_max - 0.8 * score_min; if (scenechange->n_diffs > 2) { if (score < 5) { change = FALSE; } else if (score / threshold < 1.0) { change = FALSE; } else if (score / threshold > 2.5) { change = TRUE; } else if (score > 50) { change = TRUE; } else { change = FALSE; } } else { change = FALSE; } #ifdef TESTING if (change != is_shot_change (scenechange->n_diffs)) { g_print ("%d %g %g %g %d\n", scenechange->n_diffs, score / threshold, score, threshold, change); } #endif if (change) { GstEvent *event; GST_DEBUG_OBJECT (scenechange, "%d %g %g %g %d", scenechange->n_diffs, score / threshold, score, threshold, change); event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, gst_structure_new ("GstForceKeyUnit", NULL)); gst_pad_push_event (GST_BASE_TRANSFORM_SRC_PAD (scenechange), event); } return GST_FLOW_OK; }
void camera_calibrate_run(GstCameraCalibrate *calib, IplImage *img) { cv::Mat view = cv::cvarrToMat(img); // For camera only take new samples after delay time if (calib->mode == CAPTURING) { // get_input cv::Size imageSize = view.size(); /* find_pattern * FIXME find ways to reduce CPU usage * don't do it on all frames ? will it help ? corner display will be affected. * in a separate frame? * in a separate element that gets composited back into the main stream * (video is tee-d into it and can then be decimated, scaled, etc..) */ std::vector<cv::Point2f> pointBuf; bool found; int chessBoardFlags = cv::CALIB_CB_ADAPTIVE_THRESH | cv::CALIB_CB_NORMALIZE_IMAGE; if (!calib->useFisheye) { /* fast check erroneously fails with high distortions like fisheye */ chessBoardFlags |= cv::CALIB_CB_FAST_CHECK; } /* Find feature points on the input format */ switch(calib->calibrationPattern) { case GST_CAMERA_CALIBRATION_PATTERN_CHESSBOARD: found = cv::findChessboardCorners(view, calib->boardSize, pointBuf, chessBoardFlags); break; case GST_CAMERA_CALIBRATION_PATTERN_CIRCLES_GRID: found = cv::findCirclesGrid(view, calib->boardSize, pointBuf); break; case GST_CAMERA_CALIBRATION_PATTERN_ASYMMETRIC_CIRCLES_GRID: found = cv::findCirclesGrid(view, calib->boardSize, pointBuf, cv::CALIB_CB_ASYMMETRIC_GRID ); break; default: found = FALSE; break; } bool blinkOutput = FALSE; if (found) { /* improve the found corners' coordinate accuracy for chessboard */ if (calib->calibrationPattern == GST_CAMERA_CALIBRATION_PATTERN_CHESSBOARD && calib->cornerSubPix) { /* FIXME findChessboardCorners and alike do a cv::COLOR_BGR2GRAY (and a histogram balance) * the color convert should be done once (if needed) and shared * FIXME keep viewGray around to avoid reallocating it each time... */ cv::Mat viewGray; cv::cvtColor(view, viewGray, cv::COLOR_BGR2GRAY); cv::cornerSubPix(viewGray, pointBuf, cv::Size(11, 11), cv::Size(-1, -1), cv::TermCriteria(cv::TermCriteria::EPS + cv::TermCriteria::COUNT, 30, 0.1)); } /* take new samples after delay time */ if ((calib->mode == CAPTURING) && ((clock() - calib->prevTimestamp) > calib->delay * 1e-3 * CLOCKS_PER_SEC)) { calib->imagePoints.push_back(pointBuf); calib->prevTimestamp = clock(); blinkOutput = true; } /* draw the corners */ if (calib->showCorners) { cv::drawChessboardCorners(view, calib->boardSize, cv::Mat(pointBuf), found); } } /* if got enough frames then stop calibration and show result */ if (calib->mode == CAPTURING && calib->imagePoints.size() >= (size_t)calib->nrFrames) { if (camera_calibrate_calibrate(calib, imageSize, calib->cameraMatrix, calib->distCoeffs, calib->imagePoints)) { calib->mode = CALIBRATED; GstPad *sink_pad = GST_BASE_TRANSFORM_SINK_PAD (calib); GstPad *src_pad = GST_BASE_TRANSFORM_SRC_PAD (calib); GstEvent *sink_event; GstEvent *src_event; /* set settings property */ g_free (calib->settings); calib->settings = camera_serialize_undistort_settings(calib->cameraMatrix, calib->distCoeffs); /* create calibrated event and send upstream and downstream */ sink_event = gst_camera_event_new_calibrated (calib->settings); GST_LOG_OBJECT (sink_pad, "Sending upstream event %s.", GST_EVENT_TYPE_NAME (sink_event)); if (!gst_pad_push_event (sink_pad, sink_event)) { GST_WARNING_OBJECT (sink_pad, "Sending upstream event %p (%s) failed.", sink_event, GST_EVENT_TYPE_NAME (sink_event)); } src_event = gst_camera_event_new_calibrated (calib->settings); GST_LOG_OBJECT (src_pad, "Sending downstream event %s.", GST_EVENT_TYPE_NAME (src_event)); if (!gst_pad_push_event (src_pad, src_event)) { GST_WARNING_OBJECT (src_pad, "Sending downstream event %p (%s) failed.", src_event, GST_EVENT_TYPE_NAME (src_event)); } } else { /* failed to calibrate, go back to detection mode */ calib->mode = DETECTION; } } if (calib->mode == CAPTURING && blinkOutput) { bitwise_not(view, view); } } /* output text */ /* FIXME ll additional rendering (text, corners, ...) should be done with * cairo or another gst framework. * this will relax the conditions on the input format (RBG only at the moment). * the calibration itself accepts more formats... */ std::string msg = (calib->mode == CAPTURING) ? "100/100" : (calib->mode == CALIBRATED) ? "Calibrated" : "Waiting..."; int baseLine = 0; cv::Size textSize = cv::getTextSize(msg, 1, 1, 1, &baseLine); cv::Point textOrigin(view.cols - 2 * textSize.width - 10, view.rows - 2 * baseLine - 10); if (calib->mode == CAPTURING) { msg = cv::format("%d/%d", (int)calib->imagePoints.size(), calib->nrFrames); } const cv::Scalar RED(0,0,255); const cv::Scalar GREEN(0,255,0); cv::putText(view, msg, textOrigin, 1, 1, calib->mode == CALIBRATED ? GREEN : RED); }
static void gst_rg_analysis_handle_eos (GstRgAnalysis * filter) { gboolean album_processing = (filter->num_tracks > 0); gboolean album_finished = (filter->num_tracks == 1); gboolean album_skipping = album_processing && filter->skip; filter->has_track_gain = FALSE; filter->has_track_peak = FALSE; if (album_finished) { filter->ignore_tags = FALSE; filter->skip = FALSE; filter->has_album_gain = FALSE; filter->has_album_peak = FALSE; } else if (!album_skipping) { filter->skip = FALSE; } /* We might have just fully processed a track because it has * incomplete tags. If we do album processing and allow skipping * (not forced), prevent switching to skipping if a later track with * full tags comes along: */ if (!filter->forced && album_processing && !album_finished) filter->ignore_tags = TRUE; if (!filter->skip) { GstTagList *tag_list = NULL; gboolean track_success; gboolean album_success = FALSE; track_success = gst_rg_analysis_track_result (filter, &tag_list); if (album_finished) album_success = gst_rg_analysis_album_result (filter, &tag_list); else if (!album_processing) rg_analysis_reset_album (filter->ctx); if (track_success || album_success) { GST_LOG_OBJECT (filter, "posting tag list with results"); gst_tag_list_add (tag_list, GST_TAG_MERGE_APPEND, GST_TAG_REFERENCE_LEVEL, filter->reference_level, NULL); /* This steals our reference to the list: */ gst_pad_push_event (GST_BASE_TRANSFORM_SRC_PAD (GST_BASE_TRANSFORM (filter)), gst_event_new_tag (gst_tag_list_ref (tag_list))); } } if (album_processing) { filter->num_tracks--; if (!album_finished) { GST_DEBUG_OBJECT (filter, "album not finished yet (num-tracks is now %u)", filter->num_tracks); } else { GST_DEBUG_OBJECT (filter, "album finished (num-tracks is now 0)"); } } if (album_processing) g_object_notify (G_OBJECT (filter), "num-tracks"); }
static gboolean gst_imx_blitter_video_transform_propose_allocation(GstBaseTransform *transform, G_GNUC_UNUSED GstQuery *decide_query, GstQuery *query) { return gst_pad_peer_query(GST_BASE_TRANSFORM_SRC_PAD(transform), query); }
static GstFlowReturn gst_msdkvpp_transform (GstBaseTransform * trans, GstBuffer * inbuf, GstBuffer * outbuf) { GstMsdkVPP *thiz = GST_MSDKVPP (trans); GstClockTime timestamp; GstFlowReturn ret = GST_FLOW_OK; mfxSession session; mfxSyncPoint sync_point = NULL; mfxStatus status; MsdkSurface *in_surface = NULL; MsdkSurface *out_surface = NULL; timestamp = GST_BUFFER_TIMESTAMP (inbuf); in_surface = get_msdk_surface_from_input_buffer (thiz, inbuf); if (!in_surface) return GST_FLOW_ERROR; if (gst_msdk_is_msdk_buffer (outbuf)) { out_surface = g_slice_new0 (MsdkSurface); out_surface->surface = gst_msdk_get_surface_from_buffer (outbuf); } else { GST_ERROR ("Failed to get msdk outsurface!"); return GST_FLOW_ERROR; } session = gst_msdk_context_get_session (thiz->context); /* outer loop is for handling FrameRate Control and deinterlace use cases */ do { for (;;) { status = MFXVideoVPP_RunFrameVPPAsync (session, in_surface->surface, out_surface->surface, NULL, &sync_point); if (status != MFX_WRN_DEVICE_BUSY) break; /* If device is busy, wait 1ms and retry, as per MSDK's recommendation */ g_usleep (1000); }; if (status != MFX_ERR_NONE && status != MFX_ERR_MORE_DATA && status != MFX_ERR_MORE_SURFACE) goto vpp_error; /* No output generated */ if (status == MFX_ERR_MORE_DATA) goto error_more_data; if (sync_point) MFXVideoCORE_SyncOperation (session, sync_point, 10000); /* More than one output buffers are generated */ if (status == MFX_ERR_MORE_SURFACE) { GST_BUFFER_TIMESTAMP (outbuf) = timestamp; GST_BUFFER_DURATION (outbuf) = thiz->buffer_duration; timestamp += thiz->buffer_duration; ret = gst_pad_push (GST_BASE_TRANSFORM_SRC_PAD (trans), outbuf); if (ret != GST_FLOW_OK) goto error_push_buffer; outbuf = create_output_buffer (thiz); } else { GST_BUFFER_TIMESTAMP (outbuf) = timestamp; GST_BUFFER_DURATION (outbuf) = thiz->buffer_duration; } } while (status == MFX_ERR_MORE_SURFACE); free_msdk_surface (in_surface); return ret; vpp_error: GST_ERROR_OBJECT (thiz, "MSDK Failed to do VPP"); free_msdk_surface (in_surface); free_msdk_surface (out_surface); return GST_FLOW_ERROR; error_more_data: GST_WARNING_OBJECT (thiz, "MSDK Requries additional input for processing, " "Retruning FLOW_DROPPED since no output buffer was generated"); free_msdk_surface (in_surface); return GST_BASE_TRANSFORM_FLOW_DROPPED; error_push_buffer: { free_msdk_surface (in_surface); free_msdk_surface (out_surface); GST_DEBUG_OBJECT (thiz, "failed to push output buffer: %s", gst_flow_get_name (ret)); return ret; } }
static GstFlowReturn gst_scene_change_transform_frame_ip (GstVideoFilter * filter, GstVideoFrame * frame) { GstSceneChange *scenechange = GST_SCENE_CHANGE (filter); GstVideoFrame oldframe; double score_min; double score_max; double threshold; double score; gboolean change; gboolean ret; int i; GST_DEBUG_OBJECT (scenechange, "transform_frame_ip"); if (!scenechange->oldbuf) { scenechange->n_diffs = 0; memset (scenechange->diffs, 0, sizeof (double) * SC_N_DIFFS); scenechange->oldbuf = gst_buffer_ref (frame->buffer); memcpy (&scenechange->oldinfo, &frame->info, sizeof (GstVideoInfo)); return GST_FLOW_OK; } ret = gst_video_frame_map (&oldframe, &scenechange->oldinfo, scenechange->oldbuf, GST_MAP_READ); if (!ret) { GST_ERROR_OBJECT (scenechange, "failed to map old video frame"); return GST_FLOW_ERROR; } score = get_frame_score (&oldframe, frame); gst_video_frame_unmap (&oldframe); gst_buffer_unref (scenechange->oldbuf); scenechange->oldbuf = gst_buffer_ref (frame->buffer); memcpy (&scenechange->oldinfo, &frame->info, sizeof (GstVideoInfo)); memmove (scenechange->diffs, scenechange->diffs + 1, sizeof (double) * (SC_N_DIFFS - 1)); scenechange->diffs[SC_N_DIFFS - 1] = score; scenechange->n_diffs++; score_min = scenechange->diffs[0]; score_max = scenechange->diffs[0]; for (i = 1; i < SC_N_DIFFS - 1; i++) { score_min = MIN (score_min, scenechange->diffs[i]); score_max = MAX (score_max, scenechange->diffs[i]); } threshold = 1.8 * score_max - 0.8 * score_min; if (scenechange->n_diffs > 2) { if (score < 5) { change = FALSE; } else if (score / threshold < 1.0) { change = FALSE; } else if (score / threshold > 2.5) { change = TRUE; } else if (score > 50) { change = TRUE; } else { change = FALSE; } } else { change = FALSE; } #ifdef TESTING if (change != is_shot_change (scenechange->n_diffs)) { g_print ("%d %g %g %g %d\n", scenechange->n_diffs, score / threshold, score, threshold, change); } #endif if (change) { GstEvent *event; GST_INFO_OBJECT (scenechange, "%d %g %g %g %d", scenechange->n_diffs, score / threshold, score, threshold, change); event = gst_video_event_new_downstream_force_key_unit (GST_BUFFER_PTS (frame->buffer), GST_CLOCK_TIME_NONE, GST_CLOCK_TIME_NONE, FALSE, scenechange->count++); gst_pad_push_event (GST_BASE_TRANSFORM_SRC_PAD (scenechange), event); } return GST_FLOW_OK; }