static GstFlowReturn gst_vtdec_push_frames_if_needed (GstVtdec * vtdec, gboolean drain, gboolean flush) { GstVideoCodecFrame *frame; GstFlowReturn ret = GST_FLOW_OK; GstVideoDecoder *decoder = GST_VIDEO_DECODER (vtdec); /* negotiate now so that we know whether we need to use the GL upload meta or * not */ if (gst_pad_check_reconfigure (decoder->srcpad)) { if (!gst_video_decoder_negotiate (decoder)) { gst_pad_mark_reconfigure (decoder->srcpad); if (GST_PAD_IS_FLUSHING (decoder->srcpad)) ret = GST_FLOW_FLUSHING; else ret = GST_FLOW_NOT_NEGOTIATED; return ret; } } if (drain) VTDecompressionSessionWaitForAsynchronousFrames (vtdec->session); /* push a buffer if there are enough frames to guarantee that we push in PTS * order */ while ((g_async_queue_length (vtdec->reorder_queue) >= vtdec->reorder_queue_length) || drain || flush) { frame = (GstVideoCodecFrame *) g_async_queue_try_pop (vtdec->reorder_queue); /* we need to check this in case reorder_queue_length=0 (jpeg for * example) or we're draining/flushing */ if (frame) { if (flush || frame->flags & VTDEC_FRAME_FLAG_SKIP) gst_video_decoder_release_frame (decoder, frame); else if (frame->flags & VTDEC_FRAME_FLAG_DROP) gst_video_decoder_drop_frame (decoder, frame); else ret = gst_video_decoder_finish_frame (decoder, frame); } if (!frame || ret != GST_FLOW_OK) break; } return ret; }
static gboolean gst_vtenc_sink_setcaps (GstVTEnc * self, GstCaps * caps) { GstStructure *structure; VTCompressionSessionRef session; GST_OBJECT_LOCK (self); structure = gst_caps_get_structure (caps, 0); gst_structure_get_int (structure, "width", &self->negotiated_width); gst_structure_get_int (structure, "height", &self->negotiated_height); gst_structure_get_fraction (structure, "framerate", &self->negotiated_fps_n, &self->negotiated_fps_d); if (!gst_video_info_from_caps (&self->video_info, caps)) return FALSE; gst_vtenc_destroy_session (self, &self->session); GST_OBJECT_UNLOCK (self); session = gst_vtenc_create_session (self); GST_OBJECT_LOCK (self); self->session = session; if (self->options != NULL) CFRelease (self->options); self->options = CFDictionaryCreateMutable (NULL, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); /* renegotiate when upstream caps change */ gst_pad_mark_reconfigure (self->srcpad); GST_OBJECT_UNLOCK (self); return TRUE; }
/* the first caps we receive on any of the sinkpads will define the caps for all * the other sinkpads because we can only mix streams with the same caps. */ static gboolean gst_audiomixer_setcaps (GstAudioMixer * audiomixer, GstPad * pad, GstCaps * orig_caps) { GstAggregator *agg = GST_AGGREGATOR (audiomixer); GstAudioAggregator *aagg = GST_AUDIO_AGGREGATOR (audiomixer); GstCaps *caps; GstAudioInfo info; GstStructure *s; gint channels = 0; caps = gst_caps_copy (orig_caps); s = gst_caps_get_structure (caps, 0); if (gst_structure_get_int (s, "channels", &channels)) if (channels <= 2) gst_structure_remove_field (s, "channel-mask"); if (!gst_audio_info_from_caps (&info, caps)) goto invalid_format; if (channels == 1) { GstCaps *filter; GstCaps *downstream_caps; if (audiomixer->filter_caps) filter = gst_caps_intersect_full (caps, audiomixer->filter_caps, GST_CAPS_INTERSECT_FIRST); else filter = gst_caps_ref (caps); downstream_caps = gst_pad_peer_query_caps (agg->srcpad, filter); gst_caps_unref (filter); if (downstream_caps) { gst_caps_unref (caps); caps = downstream_caps; if (gst_caps_is_empty (caps)) { gst_caps_unref (caps); return FALSE; } caps = gst_caps_fixate (caps); } } GST_OBJECT_LOCK (audiomixer); /* don't allow reconfiguration for now; there's still a race between the * different upstream threads doing query_caps + accept_caps + sending * (possibly different) CAPS events, but there's not much we can do about * that, upstream needs to deal with it. */ if (aagg->current_caps != NULL) { if (gst_audio_info_is_equal (&info, &aagg->info)) { GST_OBJECT_UNLOCK (audiomixer); gst_caps_unref (caps); gst_audio_aggregator_set_sink_caps (aagg, GST_AUDIO_AGGREGATOR_PAD (pad), orig_caps); return TRUE; } else { GST_DEBUG_OBJECT (pad, "got input caps %" GST_PTR_FORMAT ", but " "current caps are %" GST_PTR_FORMAT, caps, aagg->current_caps); GST_OBJECT_UNLOCK (audiomixer); gst_pad_push_event (pad, gst_event_new_reconfigure ()); gst_caps_unref (caps); return FALSE; } } else { gst_caps_replace (&aagg->current_caps, caps); aagg->info = info; gst_pad_mark_reconfigure (GST_AGGREGATOR_SRC_PAD (agg)); } GST_OBJECT_UNLOCK (audiomixer); gst_audio_aggregator_set_sink_caps (aagg, GST_AUDIO_AGGREGATOR_PAD (pad), orig_caps); GST_INFO_OBJECT (pad, "handle caps change to %" GST_PTR_FORMAT, caps); gst_caps_unref (caps); return TRUE; /* ERRORS */ invalid_format: { gst_caps_unref (caps); GST_WARNING_OBJECT (audiomixer, "invalid format set as caps"); return FALSE; } }
static gboolean gst_vtdec_sink_setcaps (GstVTDec * self, GstCaps * caps) { GstStructure *structure; CMFormatDescriptionRef fmt_desc = NULL; GstVideoFormat format = GST_VIDEO_FORMAT_NV12; gint width, height; gint fps_n, fps_d; gint par_n, par_d; structure = gst_caps_get_structure (caps, 0); if (!gst_structure_get_int (structure, "width", &width)) goto incomplete_caps; if (!gst_structure_get_int (structure, "height", &height)) goto incomplete_caps; gst_video_info_init (&self->vinfo); gst_video_info_set_format (&self->vinfo, format, width, height); if (gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d)) { if (fps_n == 0) { /* variable framerate */ self->vinfo.flags |= GST_VIDEO_FLAG_VARIABLE_FPS; /* see if we have a max-framerate */ gst_structure_get_fraction (structure, "max-framerate", &fps_n, &fps_d); } self->vinfo.fps_n = fps_n; self->vinfo.fps_d = fps_d; } else { /* unspecified is variable framerate */ self->vinfo.fps_n = 0; self->vinfo.fps_d = 1; } if (gst_structure_get_fraction (structure, "pixel-aspect-ratio", &par_n, &par_d)) { self->vinfo.par_n = par_n; self->vinfo.par_d = par_d; } else { self->vinfo.par_n = 1; self->vinfo.par_d = 1; } if (self->details->format_id == kVTFormatH264) { const GValue *codec_data_value; codec_data_value = gst_structure_get_value (structure, "codec_data"); if (codec_data_value != NULL) { fmt_desc = gst_vtdec_create_format_description_from_codec_data (self, gst_value_get_buffer (codec_data_value)); } else { GST_DEBUG_OBJECT (self, "no codec_data in caps, awaiting future setcaps"); } } else { fmt_desc = gst_vtdec_create_format_description (self); } if (fmt_desc != NULL) { gst_vtdec_destroy_session (self, &self->session); self->ctx->cm->FigFormatDescriptionRelease (self->fmt_desc); self->fmt_desc = fmt_desc; self->session = gst_vtdec_create_session (self, fmt_desc); if (self->session == NULL) goto session_create_error; } /* renegotiate when upstream caps change */ gst_pad_mark_reconfigure (self->srcpad); return TRUE; /* ERRORS */ incomplete_caps: { return TRUE; } session_create_error: { GST_ELEMENT_ERROR (self, RESOURCE, FAILED, ("failed to create session"), (NULL)); return FALSE; } }
static void gst_udpsrc_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec) { GstUDPSrc *udpsrc = GST_UDPSRC (object); switch (prop_id) { case PROP_BUFFER_SIZE: udpsrc->buffer_size = g_value_get_int (value); break; case PROP_PORT: udpsrc->port = g_value_get_int (value); g_free (udpsrc->uri); udpsrc->uri = g_strdup_printf ("udp://%s:%u", udpsrc->address, udpsrc->port); break; case PROP_MULTICAST_GROUP: case PROP_ADDRESS: { const gchar *group; g_free (udpsrc->address); if ((group = g_value_get_string (value))) udpsrc->address = g_strdup (group); else udpsrc->address = g_strdup (UDP_DEFAULT_MULTICAST_GROUP); g_free (udpsrc->uri); udpsrc->uri = g_strdup_printf ("udp://%s:%u", udpsrc->address, udpsrc->port); break; } case PROP_MULTICAST_IFACE: g_free (udpsrc->multi_iface); if (g_value_get_string (value) == NULL) udpsrc->multi_iface = g_strdup (UDP_DEFAULT_MULTICAST_IFACE); else udpsrc->multi_iface = g_value_dup_string (value); break; case PROP_URI: gst_udpsrc_set_uri (udpsrc, g_value_get_string (value), NULL); break; case PROP_CAPS: { const GstCaps *new_caps_val = gst_value_get_caps (value); GstCaps *new_caps; GstCaps *old_caps; if (new_caps_val == NULL) { new_caps = gst_caps_new_any (); } else { new_caps = gst_caps_copy (new_caps_val); } GST_OBJECT_LOCK (udpsrc); old_caps = udpsrc->caps; udpsrc->caps = new_caps; GST_OBJECT_UNLOCK (udpsrc); if (old_caps) gst_caps_unref (old_caps); gst_pad_mark_reconfigure (GST_BASE_SRC_PAD (udpsrc)); break; } case PROP_SOCKET: if (udpsrc->socket != NULL && udpsrc->socket != udpsrc->used_socket && udpsrc->close_socket) { GError *err = NULL; if (!g_socket_close (udpsrc->socket, &err)) { GST_ERROR ("failed to close socket %p: %s", udpsrc->socket, err->message); g_clear_error (&err); } } if (udpsrc->socket) g_object_unref (udpsrc->socket); udpsrc->socket = g_value_dup_object (value); GST_DEBUG ("setting socket to %p", udpsrc->socket); break; case PROP_TIMEOUT: udpsrc->timeout = g_value_get_uint64 (value); break; case PROP_SKIP_FIRST_BYTES: udpsrc->skip_first_bytes = g_value_get_int (value); break; case PROP_CLOSE_SOCKET: udpsrc->close_socket = g_value_get_boolean (value); break; case PROP_AUTO_MULTICAST: udpsrc->auto_multicast = g_value_get_boolean (value); break; case PROP_REUSE: udpsrc->reuse = g_value_get_boolean (value); break; default: break; } }
static GstFlowReturn gst_overlay_composition_sink_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) { GstOverlayComposition *self = GST_OVERLAY_COMPOSITION (parent); GstVideoOverlayComposition *compo = NULL; GstVideoOverlayCompositionMeta *upstream_compo_meta; if (gst_pad_check_reconfigure (self->srcpad)) { if (!gst_overlay_composition_negotiate (self, NULL)) { gst_pad_mark_reconfigure (self->srcpad); gst_buffer_unref (buffer); GST_OBJECT_LOCK (self->srcpad); if (GST_PAD_IS_FLUSHING (self->srcpad)) { GST_OBJECT_UNLOCK (self->srcpad); return GST_FLOW_FLUSHING; } GST_OBJECT_UNLOCK (self->srcpad); return GST_FLOW_NOT_NEGOTIATED; } } if (!self->sample) { self->sample = gst_sample_new (buffer, self->caps, &self->segment, NULL); } else { self->sample = gst_sample_make_writable (self->sample); gst_sample_set_buffer (self->sample, buffer); gst_sample_set_caps (self->sample, self->caps); gst_sample_set_segment (self->sample, &self->segment); } g_signal_emit (self, overlay_composition_signals[SIGNAL_DRAW], 0, self->sample, &compo); /* Don't store the buffer in the sample any longer, otherwise it will not * be writable below as we have one reference in the sample and one in * this function. * * If the sample is not writable itself then the application kept an * reference itself. */ if (gst_sample_is_writable (self->sample)) { gst_sample_set_buffer (self->sample, NULL); } if (!compo) { GST_DEBUG_OBJECT (self->sinkpad, "Application did not provide an overlay composition"); return gst_pad_push (self->srcpad, buffer); } /* If upstream attached a meta, we can safely add our own things * in it. Upstream must've checked that downstream supports it */ upstream_compo_meta = gst_buffer_get_video_overlay_composition_meta (buffer); if (upstream_compo_meta) { GstVideoOverlayComposition *merged_compo = gst_video_overlay_composition_copy (upstream_compo_meta->overlay); guint i, n; GST_DEBUG_OBJECT (self->sinkpad, "Appending to upstream overlay composition"); n = gst_video_overlay_composition_n_rectangles (compo); for (i = 0; i < n; i++) { GstVideoOverlayRectangle *rect = gst_video_overlay_composition_get_rectangle (compo, i); gst_video_overlay_composition_add_rectangle (merged_compo, rect); } gst_video_overlay_composition_unref (compo); gst_video_overlay_composition_unref (upstream_compo_meta->overlay); upstream_compo_meta->overlay = merged_compo; } else if (self->attach_compo_to_buffer) { GST_DEBUG_OBJECT (self->sinkpad, "Attaching as meta"); buffer = gst_buffer_make_writable (buffer); gst_buffer_add_video_overlay_composition_meta (buffer, compo); gst_video_overlay_composition_unref (compo); } else { GstVideoFrame frame; buffer = gst_buffer_make_writable (buffer); if (!gst_video_frame_map (&frame, &self->info, buffer, GST_MAP_READWRITE)) { gst_video_overlay_composition_unref (compo); goto map_failed; } gst_video_overlay_composition_blend (compo, &frame); gst_video_frame_unmap (&frame); gst_video_overlay_composition_unref (compo); } return gst_pad_push (self->srcpad, buffer); map_failed: { GST_ERROR_OBJECT (self->sinkpad, "Failed to map buffer"); gst_buffer_unref (buffer); return GST_FLOW_ERROR; } }
/* Based on gstbasetextoverlay.c */ static gboolean gst_overlay_composition_negotiate (GstOverlayComposition * self, GstCaps * caps) { gboolean upstream_has_meta = FALSE; gboolean caps_has_meta = FALSE; gboolean alloc_has_meta = FALSE; gboolean attach = FALSE; gboolean ret = TRUE; guint width, height; GstCapsFeatures *f; GstCaps *overlay_caps; GstQuery *query; guint alloc_index; GST_DEBUG_OBJECT (self, "performing negotiation"); /* Clear any pending reconfigure to avoid negotiating twice */ gst_pad_check_reconfigure (self->srcpad); self->window_width = self->window_height = 0; if (!caps) caps = gst_pad_get_current_caps (self->sinkpad); else gst_caps_ref (caps); if (!caps || gst_caps_is_empty (caps)) goto no_format; /* Check if upstream caps have meta */ if ((f = gst_caps_get_features (caps, 0))) { upstream_has_meta = gst_caps_features_contains (f, GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION); } /* Initialize dimensions */ width = self->info.width; height = self->info.height; if (upstream_has_meta) { overlay_caps = gst_caps_ref (caps); } else { GstCaps *peercaps; /* BaseTransform requires caps for the allocation query to work */ overlay_caps = gst_caps_copy (caps); f = gst_caps_get_features (overlay_caps, 0); gst_caps_features_add (f, GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION); /* Then check if downstream accept overlay composition in caps */ /* FIXME: We should probably check if downstream *prefers* the * overlay meta, and only enforce usage of it if we can't handle * the format ourselves and thus would have to drop the overlays. * Otherwise we should prefer what downstream wants here. */ peercaps = gst_pad_peer_query_caps (self->srcpad, overlay_caps); caps_has_meta = !gst_caps_is_empty (peercaps); gst_caps_unref (peercaps); GST_DEBUG_OBJECT (self, "caps have overlay meta %d", caps_has_meta); } if (upstream_has_meta || caps_has_meta) { /* Send caps immediatly, it's needed by GstBaseTransform to get a reply * from allocation query */ ret = gst_pad_set_caps (self->srcpad, overlay_caps); /* First check if the allocation meta has compositon */ query = gst_query_new_allocation (overlay_caps, FALSE); if (!gst_pad_peer_query (self->srcpad, query)) { /* no problem, we use the query defaults */ GST_DEBUG_OBJECT (self, "ALLOCATION query failed"); /* In case we were flushing, mark reconfigure and fail this method, * will make it retry */ if (GST_PAD_IS_FLUSHING (self->srcpad)) ret = FALSE; } alloc_has_meta = gst_query_find_allocation_meta (query, GST_VIDEO_OVERLAY_COMPOSITION_META_API_TYPE, &alloc_index); GST_DEBUG_OBJECT (self, "sink alloc has overlay meta %d", alloc_has_meta); if (alloc_has_meta) { const GstStructure *params; gst_query_parse_nth_allocation_meta (query, alloc_index, ¶ms); if (params) { if (gst_structure_get (params, "width", G_TYPE_UINT, &width, "height", G_TYPE_UINT, &height, NULL)) { GST_DEBUG_OBJECT (self, "received window size: %dx%d", width, height); g_assert (width != 0 && height != 0); } } } gst_query_unref (query); } /* Update render size if needed */ self->window_width = width; self->window_height = height; /* For backward compatbility, we will prefer bliting if downstream * allocation does not support the meta. In other case we will prefer * attaching, and will fail the negotiation in the unlikely case we are * force to blit, but format isn't supported. */ if (upstream_has_meta) { attach = TRUE; } else if (caps_has_meta) { if (alloc_has_meta) { attach = TRUE; } else { /* Don't attach unless we cannot handle the format */ attach = !can_blend_caps (caps); } } else { ret = can_blend_caps (caps); } /* If we attach, then pick the overlay caps */ if (attach) { GST_DEBUG_OBJECT (self, "Using caps %" GST_PTR_FORMAT, overlay_caps); /* Caps where already sent */ } else if (ret) { GST_DEBUG_OBJECT (self, "Using caps %" GST_PTR_FORMAT, caps); ret = gst_pad_set_caps (self->srcpad, caps); } self->attach_compo_to_buffer = attach; if (!ret) { GST_DEBUG_OBJECT (self, "negotiation failed, schedule reconfigure"); gst_pad_mark_reconfigure (self->srcpad); } g_signal_emit (self, overlay_composition_signals[SIGNAL_CAPS_CHANGED], 0, caps, self->window_width, self->window_height, NULL); gst_caps_unref (overlay_caps); gst_caps_unref (caps); return ret; no_format: { if (caps) gst_caps_unref (caps); gst_pad_mark_reconfigure (self->srcpad); return FALSE; } }
static GstFlowReturn gst_rtp_mux_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) { GstRTPMux *rtp_mux; GstFlowReturn ret; GstRTPMuxPadPrivate *padpriv; gboolean drop; gboolean changed = FALSE; GstRTPBuffer rtpbuffer = GST_RTP_BUFFER_INIT; rtp_mux = GST_RTP_MUX (parent); if (gst_pad_check_reconfigure (rtp_mux->srcpad)) { GstCaps *current_caps = gst_pad_get_current_caps (pad); if (!gst_rtp_mux_setcaps (pad, rtp_mux, current_caps)) { gst_pad_mark_reconfigure (rtp_mux->srcpad); if (GST_PAD_IS_FLUSHING (rtp_mux->srcpad)) ret = GST_FLOW_FLUSHING; else ret = GST_FLOW_NOT_NEGOTIATED; gst_buffer_unref (buffer); goto out; } gst_caps_unref (current_caps); } GST_OBJECT_LOCK (rtp_mux); padpriv = gst_pad_get_element_private (pad); if (!padpriv) { GST_OBJECT_UNLOCK (rtp_mux); gst_buffer_unref (buffer); return GST_FLOW_NOT_LINKED; } buffer = gst_buffer_make_writable (buffer); if (!gst_rtp_buffer_map (buffer, GST_MAP_READWRITE, &rtpbuffer)) { GST_OBJECT_UNLOCK (rtp_mux); gst_buffer_unref (buffer); GST_ERROR_OBJECT (rtp_mux, "Invalid RTP buffer"); return GST_FLOW_ERROR; } drop = !process_buffer_locked (rtp_mux, padpriv, &rtpbuffer); gst_rtp_buffer_unmap (&rtpbuffer); if (!drop) { if (pad != rtp_mux->last_pad) { changed = TRUE; g_clear_object (&rtp_mux->last_pad); rtp_mux->last_pad = g_object_ref (pad); } if (GST_BUFFER_DURATION_IS_VALID (buffer) && GST_BUFFER_PTS_IS_VALID (buffer)) rtp_mux->last_stop = GST_BUFFER_PTS (buffer) + GST_BUFFER_DURATION (buffer); else rtp_mux->last_stop = GST_CLOCK_TIME_NONE; } GST_OBJECT_UNLOCK (rtp_mux); if (changed) gst_pad_sticky_events_foreach (pad, resend_events, rtp_mux); if (drop) { gst_buffer_unref (buffer); ret = GST_FLOW_OK; } else { ret = gst_pad_push (rtp_mux->srcpad, buffer); } out: return ret; }
static GstFlowReturn gst_rtp_mux_chain_list (GstPad * pad, GstObject * parent, GstBufferList * bufferlist) { GstRTPMux *rtp_mux; GstFlowReturn ret; GstRTPMuxPadPrivate *padpriv; gboolean changed = FALSE; struct BufferListData bd; rtp_mux = GST_RTP_MUX (parent); if (gst_pad_check_reconfigure (rtp_mux->srcpad)) { GstCaps *current_caps = gst_pad_get_current_caps (pad); if (!gst_rtp_mux_setcaps (pad, rtp_mux, current_caps)) { gst_pad_mark_reconfigure (rtp_mux->srcpad); if (GST_PAD_IS_FLUSHING (rtp_mux->srcpad)) ret = GST_FLOW_FLUSHING; else ret = GST_FLOW_NOT_NEGOTIATED; gst_buffer_list_unref (bufferlist); goto out; } gst_caps_unref (current_caps); } GST_OBJECT_LOCK (rtp_mux); padpriv = gst_pad_get_element_private (pad); if (!padpriv) { GST_OBJECT_UNLOCK (rtp_mux); ret = GST_FLOW_NOT_LINKED; gst_buffer_list_unref (bufferlist); goto out; } bd.rtp_mux = rtp_mux; bd.padpriv = padpriv; bd.drop = FALSE; bufferlist = gst_buffer_list_make_writable (bufferlist); gst_buffer_list_foreach (bufferlist, process_list_item, &bd); if (!bd.drop && pad != rtp_mux->last_pad) { changed = TRUE; g_clear_object (&rtp_mux->last_pad); rtp_mux->last_pad = g_object_ref (pad); } GST_OBJECT_UNLOCK (rtp_mux); if (changed) gst_pad_sticky_events_foreach (pad, resend_events, rtp_mux); if (bd.drop) { gst_buffer_list_unref (bufferlist); ret = GST_FLOW_OK; } else { ret = gst_pad_push_list (rtp_mux->srcpad, bufferlist); } out: return ret; }