static GstFlowReturn gst_png_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame) { GstPngParse *pngparse = GST_PNG_PARSE (parse); if (!pngparse->sent_codec_tag) { GstTagList *taglist; GstCaps *caps; taglist = gst_tag_list_new_empty (); /* codec tag */ caps = gst_pad_get_current_caps (GST_BASE_PARSE_SRC_PAD (parse)); gst_pb_utils_add_codec_description_to_tag_list (taglist, GST_TAG_VIDEO_CODEC, caps); gst_caps_unref (caps); gst_pad_push_event (GST_BASE_PARSE_SRC_PAD (pngparse), gst_event_new_tag (taglist)); /* also signals the end of first-frame processing */ pngparse->sent_codec_tag = TRUE; } return GST_FLOW_OK; }
static void brasero_transcode_send_volume_event (BraseroTranscode *transcode) { BraseroTranscodePrivate *priv; gdouble track_peak = 0.0; gdouble track_gain = 0.0; GstTagList *tag_list; BraseroTrack *track; GstEvent *event; GValue *value; priv = BRASERO_TRANSCODE_PRIVATE (transcode); brasero_job_get_current_track (BRASERO_JOB (transcode), &track); BRASERO_JOB_LOG (transcode, "Sending audio levels tags"); if (brasero_track_tag_lookup (track, BRASERO_TRACK_PEAK_VALUE, &value) == BRASERO_BURN_OK) track_peak = g_value_get_double (value); if (brasero_track_tag_lookup (track, BRASERO_TRACK_GAIN_VALUE, &value) == BRASERO_BURN_OK) track_gain = g_value_get_double (value); /* it's possible we fail */ tag_list = gst_tag_list_new (GST_TAG_TRACK_GAIN, track_gain, GST_TAG_TRACK_PEAK, track_peak, NULL); /* NOTE: that event is goind downstream */ event = gst_event_new_tag (tag_list); if (!gst_element_send_event (priv->convert, event)) BRASERO_JOB_LOG (transcode, "Couldn't send tags to rgvolume"); BRASERO_JOB_LOG (transcode, "Set volume level %lf %lf", track_gain, track_peak); }
static GstFlowReturn gst_mpegv_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame) { GstMpegvParse *mpvparse = GST_MPEGVIDEO_PARSE (parse); GstTagList *taglist; /* tag sending done late enough in hook to ensure pending events * have already been sent */ if (G_UNLIKELY (mpvparse->send_codec_tag)) { gchar *codec; /* codec tag */ codec = g_strdup_printf ("MPEG %d Video", (mpvparse->config_flags & FLAG_MPEG2) ? 2 : 1); taglist = gst_tag_list_new (GST_TAG_VIDEO_CODEC, codec, NULL); g_free (codec); gst_pad_push_event (GST_BASE_PARSE_SRC_PAD (mpvparse), gst_event_new_tag (taglist)); mpvparse->send_codec_tag = FALSE; } /* usual clipping applies */ frame->flags |= GST_BASE_PARSE_FRAME_FLAG_CLIP; return GST_FLOW_OK; }
static gboolean gst_mpeg_demux_parse_packhead (GstMPEGParse * mpeg_parse, GstBuffer * buffer) { GstMPEGDemux *demux = GST_MPEG_DEMUX (mpeg_parse); guint8 *buf; parent_class->parse_packhead (mpeg_parse, buffer); buf = GST_BUFFER_DATA (buffer); /* do something useful here */ if (demux->pending_tags) { GstMPEGStream **streams; guint i, num; streams = demux->audio_stream; num = GST_MPEG_DEMUX_NUM_AUDIO_STREAMS; for (i = 0; i < num; ++i) { if (streams[i] != NULL && streams[i]->tags != NULL) gst_pad_push_event (streams[i]->pad, gst_event_new_tag (gst_tag_list_copy (streams[i]->tags))); } demux->pending_tags = FALSE; } return TRUE; }
GstEvent * gst_kate_util_decoder_base_get_tag_event (GstKateDecoderBase * decoder) { if (!decoder->tags) return NULL; decoder->tags_changed = FALSE; return gst_event_new_tag (gst_tag_list_ref (decoder->tags)); }
void notifyGstTagsOnPad(GstElement* element, GstPad* pad, GstTagList* tags) { #ifdef GST_API_VERSION_1 UNUSED_PARAM(element); gst_pad_push_event(GST_PAD_CAST(pad), gst_event_new_tag(tags)); #else gst_element_found_tags_for_pad(element, pad, tags); #endif }
/** * gst_avi_subtitle_title_tag: * @sub: subtitle element * @title: the title of this subtitle stream * * Send an event to the srcpad of the @sub element with the title * of the subtitle stream as a GST_TAG_TITLE */ static void gst_avi_subtitle_title_tag (GstAviSubtitle * sub, gchar * title) { GstTagList *temp_list = gst_tag_list_new (); gst_tag_list_add (temp_list, GST_TAG_MERGE_APPEND, GST_TAG_TITLE, title, NULL); gst_pad_push_event (sub->src, gst_event_new_tag (temp_list)); }
static VALUE tag_initialize(VALUE self, VALUE taglist) { GstEvent *event; event = gst_event_new_tag(RVAL2GST_STRUCT(taglist)); G_INITIALIZE(self, event); return Qnil; }
static GstFlowReturn gst_tag_mux_render_start_tag (GstTagMux * mux) { GstTagMuxClass *klass; GstBuffer *buffer; GstTagList *taglist; GstEvent *event; GstFlowReturn ret; GstSegment segment; taglist = gst_tag_mux_get_tags (mux); klass = GST_TAG_MUX_CLASS (G_OBJECT_GET_CLASS (mux)); if (klass->render_start_tag == NULL) goto no_vfunc; buffer = klass->render_start_tag (mux, taglist); /* Null buffer is ok, just means we're not outputting anything */ if (buffer == NULL) { GST_INFO_OBJECT (mux, "No start tag generated"); mux->priv->start_tag_size = 0; return GST_FLOW_OK; } mux->priv->start_tag_size = gst_buffer_get_size (buffer); GST_LOG_OBJECT (mux, "tag size = %" G_GSIZE_FORMAT " bytes", mux->priv->start_tag_size); /* Send newsegment event from byte position 0, so the tag really gets * written to the start of the file, independent of the upstream segment */ gst_segment_init (&segment, GST_FORMAT_BYTES); gst_pad_push_event (mux->priv->srcpad, gst_event_new_segment (&segment)); /* Send an event about the new tags to downstream elements */ /* gst_event_new_tag takes ownership of the list, so use a copy */ event = gst_event_new_tag (gst_tag_list_ref (taglist)); gst_pad_push_event (mux->priv->srcpad, event); GST_BUFFER_OFFSET (buffer) = 0; ret = gst_pad_push (mux->priv->srcpad, buffer); mux->priv->current_offset = mux->priv->start_tag_size; mux->priv->max_offset = MAX (mux->priv->max_offset, mux->priv->current_offset); return ret; no_vfunc: { GST_ERROR_OBJECT (mux, "Subclass does not implement " "render_start_tag vfunc!"); return GST_FLOW_ERROR; } }
static gboolean gst_a2dp_sink_init_dynamic_elements (GstA2dpSink * self, GstCaps * caps) { GstStructure *structure; GstEvent *event; gboolean crc; gchar *mode = NULL; structure = gst_caps_get_structure (caps, 0); /* first, we need to create our rtp payloader */ if (gst_structure_has_name (structure, "audio/x-sbc")) { GST_LOG_OBJECT (self, "sbc media received"); if (!gst_a2dp_sink_init_rtp_sbc_element (self)) return FALSE; } else if (gst_structure_has_name (structure, "audio/mpeg")) { GST_LOG_OBJECT (self, "mp3 media received"); if (!gst_a2dp_sink_init_rtp_mpeg_element (self)) return FALSE; } else { GST_ERROR_OBJECT (self, "Unexpected media type"); return FALSE; } if (!gst_element_link (GST_ELEMENT (self->rtp), GST_ELEMENT (self->sink))) { GST_ERROR_OBJECT (self, "couldn't link rtpsbcpay " "to avdtpsink"); return FALSE; } /* check if we should push the taglist FIXME should we push this? * we can send the tags directly if needed */ if (self->taglist != NULL && gst_structure_has_name (structure, "audio/mpeg")) { event = gst_event_new_tag (self->taglist); /* send directly the crc */ if (gst_tag_list_get_boolean (self->taglist, "has-crc", &crc)) gst_avdtp_sink_set_crc (self->sink, crc); if (gst_tag_list_get_string (self->taglist, "channel-mode", &mode)) gst_avdtp_sink_set_channel_mode (self->sink, mode); gst_pad_send_event (self->ghostpad, event); self->taglist = NULL; g_free (mode); } g_object_set (self->rtp, "mtu", gst_avdtp_sink_get_link_mtu (self->sink), NULL); return TRUE; }
static void gst_vp9_dec_send_tags (GstVP9Dec * dec) { GstTagList *list; list = gst_tag_list_new_empty (); gst_tag_list_add (list, GST_TAG_MERGE_REPLACE, GST_TAG_VIDEO_CODEC, "VP9 video", NULL); gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (dec), gst_event_new_tag (list)); }
static inline void _push_mandatory_events (GstAggregator * self) { GstAggregatorPrivate *priv = self->priv; if (g_atomic_int_get (&self->priv->send_stream_start)) { gchar s_id[32]; GST_INFO_OBJECT (self, "pushing stream start"); /* stream-start (FIXME: create id based on input ids) */ g_snprintf (s_id, sizeof (s_id), "agg-%08x", g_random_int ()); if (!gst_pad_push_event (self->srcpad, gst_event_new_stream_start (s_id))) { GST_WARNING_OBJECT (self->srcpad, "Sending stream start event failed"); } g_atomic_int_set (&self->priv->send_stream_start, FALSE); } if (self->priv->srccaps) { GST_INFO_OBJECT (self, "pushing caps: %" GST_PTR_FORMAT, self->priv->srccaps); if (!gst_pad_push_event (self->srcpad, gst_event_new_caps (self->priv->srccaps))) { GST_WARNING_OBJECT (self->srcpad, "Sending caps event failed"); } gst_caps_unref (self->priv->srccaps); self->priv->srccaps = NULL; } if (g_atomic_int_get (&self->priv->send_segment)) { if (!g_atomic_int_get (&self->priv->flush_seeking)) { GstEvent *segev = gst_event_new_segment (&self->segment); if (!self->priv->seqnum) self->priv->seqnum = gst_event_get_seqnum (segev); else gst_event_set_seqnum (segev, self->priv->seqnum); GST_DEBUG_OBJECT (self, "pushing segment %" GST_PTR_FORMAT, segev); gst_pad_push_event (self->srcpad, segev); g_atomic_int_set (&self->priv->send_segment, FALSE); } } if (priv->tags && priv->tags_changed) { gst_pad_push_event (self->srcpad, gst_event_new_tag (gst_tag_list_ref (priv->tags))); priv->tags_changed = FALSE; } }
static GstFlowReturn vorbis_handle_type_packet (GstVorbisDec * vd) { GList *walk; gint res; g_assert (vd->initialized == FALSE); #ifdef USE_TREMOLO if (G_UNLIKELY ((res = vorbis_dsp_init (&vd->vd, &vd->vi)))) goto synthesis_init_error; #else if (G_UNLIKELY ((res = vorbis_synthesis_init (&vd->vd, &vd->vi)))) goto synthesis_init_error; if (G_UNLIKELY ((res = vorbis_block_init (&vd->vd, &vd->vb)))) goto block_init_error; #endif vd->initialized = TRUE; if (vd->pendingevents) { for (walk = vd->pendingevents; walk; walk = g_list_next (walk)) gst_pad_push_event (vd->srcpad, GST_EVENT_CAST (walk->data)); g_list_free (vd->pendingevents); vd->pendingevents = NULL; } if (vd->taglist) { /* The tags have already been sent on the bus as messages. */ gst_pad_push_event (vd->srcpad, gst_event_new_tag (vd->taglist)); vd->taglist = NULL; } return GST_FLOW_OK; /* ERRORS */ synthesis_init_error: { GST_ELEMENT_ERROR (GST_ELEMENT (vd), STREAM, DECODE, (NULL), ("couldn't initialize synthesis (%d)", res)); return GST_FLOW_ERROR; } block_init_error: { GST_ELEMENT_ERROR (GST_ELEMENT (vd), STREAM, DECODE, (NULL), ("couldn't initialize block (%d)", res)); return GST_FLOW_ERROR; } }
/* takes ownership of tag list */ static gboolean gst_icydemux_send_tag_event (GstICYDemux * icydemux, GstTagList * tags) { GstEvent *event; gst_element_post_message (GST_ELEMENT (icydemux), gst_message_new_tag (GST_OBJECT (icydemux), gst_tag_list_copy (tags))); event = gst_event_new_tag (tags); GST_EVENT_TIMESTAMP (event) = 0; GST_DEBUG_OBJECT (icydemux, "Sending tag event on src pad"); return gst_pad_push_event (icydemux->srcpad, event); }
static GstFlowReturn gst_real_audio_demux_parse_data (GstRealAudioDemux * demux) { GstFlowReturn ret = GST_FLOW_OK; guint avail, unit_size; avail = gst_adapter_available (demux->adapter); if (demux->packet_size > 0) unit_size = demux->packet_size; else unit_size = avail & 0xfffffff0; /* round down to next multiple of 16 */ GST_LOG_OBJECT (demux, "available = %u, unit_size = %u", avail, unit_size); while (ret == GST_FLOW_OK && unit_size > 0 && avail >= unit_size) { GstClockTime ts; GstBuffer *buf; buf = gst_adapter_take_buffer (demux->adapter, unit_size); avail -= unit_size; if (demux->need_newsegment) { gst_pad_push_event (demux->srcpad, gst_event_new_segment (&demux->segment)); demux->need_newsegment = FALSE; } if (demux->pending_tags) { gst_pad_push_event (demux->srcpad, gst_event_new_tag (demux->pending_tags)); demux->pending_tags = NULL; } if (demux->fourcc == GST_RM_AUD_DNET) { buf = gst_rm_utils_descramble_dnet_buffer (buf); } ts = gst_real_demux_get_timestamp_from_offset (demux, demux->offset); GST_BUFFER_TIMESTAMP (buf) = ts; demux->segment.position = ts; ret = gst_pad_push (demux->srcpad, buf); } return ret; }
static void avrcp_metadata_cb (GstAvrcpConnection * avrcp, GstTagList * taglist, gpointer user_data) { GstAvdtpSrc *src = GST_AVDTP_SRC (user_data); guint64 duration; if (gst_tag_list_get_uint64 (taglist, GST_TAG_DURATION, &duration)) { src->duration = duration; gst_element_post_message (GST_ELEMENT (src), gst_message_new_duration_changed (GST_OBJECT (src))); } gst_pad_push_event (GST_BASE_SRC_PAD (src), gst_event_new_tag (gst_tag_list_copy (taglist))); gst_element_post_message (GST_ELEMENT (src), gst_message_new_tag (GST_OBJECT (src), taglist)); }
static void gst_chromaprint_create_fingerprint (GstChromaprint * chromaprint) { GstTagList *tags; if (chromaprint->duration <= 3) return; GST_DEBUG_OBJECT (chromaprint, "Generating fingerprint based on %d seconds of audio", chromaprint->duration); chromaprint_finish (chromaprint->context); chromaprint_get_fingerprint (chromaprint->context, &chromaprint->fingerprint); chromaprint->record = FALSE; tags = gst_tag_list_new (GST_TAG_CHROMAPRINT_FINGERPRINT, chromaprint->fingerprint, NULL); gst_pad_push_event (GST_BASE_TRANSFORM_SRC_PAD (chromaprint), gst_event_new_tag (tags)); }
static gboolean on_activate(GstPad* pad, GstObject* parent) { gst_pad_activate_mode(pad,GST_PAD_MODE_PUSH,TRUE); GstTagList* list = gst_tag_list_new_empty(); GValue value; memset(&value,0,sizeof(value)); g_value_init(&value,G_TYPE_DOUBLE); fprintf(stderr,"bwub setting gain/peak %f %f %f\n", g_activate_gain.peak, g_activate_gain.gain, g_activate_gain.level); g_value_set_double(&value,g_activate_gain.gain*2); gst_tag_list_add_value(list, GST_TAG_MERGE_REPLACE, GST_TAG_TRACK_GAIN, &value); gst_tag_list_add_value(list, GST_TAG_MERGE_REPLACE, GST_TAG_ALBUM_GAIN, &value); g_value_set_double(&value,g_activate_gain.peak*2); gst_tag_list_add_value(list, GST_TAG_MERGE_REPLACE, GST_TAG_TRACK_PEAK, &value); gst_tag_list_add_value(list, GST_TAG_MERGE_REPLACE, GST_TAG_ALBUM_PEAK, &value); g_value_set_double(&value,g_activate_gain.level); gst_tag_list_add_value(list, GST_TAG_MERGE_REPLACE, GST_TAG_REFERENCE_LEVEL, &value); assert(TRUE==gst_pad_send_event(pad,gst_event_new_tag(list))); return TRUE; }
// GStreamer event filter that removes any image metadata. static gboolean remove_image(GstPad* pad, GstObject* parent, GstEvent* event) { GstTagList* tags = NULL; GstPad* sink = NULL; gboolean ret = false; switch (GST_EVENT_TYPE(event)) { case GST_EVENT_TAG: gst_event_parse_tag(event, &tags); gst_tag_list_remove_tag(tags, "image"); event = gst_event_new_tag(tags); break; case GST_EVENT_CAPS: sink = gst_element_get_static_pad(GST_ELEMENT(parent), "src"); ret = gst_pad_push_event(sink, event); gst_object_unref(sink); return ret; default: break; } return gst_pad_event_default(pad, parent, event); }
static GstFlowReturn gst_multipart_demux_chain (GstPad * pad, GstBuffer * buf) { GstMultipartDemux *multipart; GstAdapter *adapter; GstClockTime timestamp; gint size = 1; GstFlowReturn res; multipart = GST_MULTIPART_DEMUX (gst_pad_get_parent (pad)); adapter = multipart->adapter; res = GST_FLOW_OK; timestamp = GST_BUFFER_TIMESTAMP (buf); if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) { gst_adapter_clear (adapter); } gst_adapter_push (adapter, buf); while (gst_adapter_available (adapter) > 0) { GstMultipartPad *srcpad; GstBuffer *outbuf; gboolean created; gint datalen; if (G_UNLIKELY (!multipart->header_completed)) { if ((size = multipart_parse_header (multipart)) < 0) { goto nodata; } else { gst_adapter_flush (adapter, size); multipart->header_completed = TRUE; } } if ((size = multipart_find_boundary (multipart, &datalen)) < 0) { goto nodata; } /* Invalidate header info */ multipart->header_completed = FALSE; multipart->content_length = -1; if (G_UNLIKELY (datalen <= 0)) { GST_DEBUG_OBJECT (multipart, "skipping empty content."); gst_adapter_flush (adapter, size - datalen); } else { srcpad = gst_multipart_find_pad_by_mime (multipart, multipart->mime_type, &created); outbuf = gst_adapter_take_buffer (adapter, datalen); gst_adapter_flush (adapter, size - datalen); gst_buffer_set_caps (outbuf, GST_PAD_CAPS (srcpad->pad)); if (created) { GstTagList *tags; /* Push new segment, first buffer has 0 timestamp */ gst_pad_push_event (srcpad->pad, gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0)); tags = gst_tag_list_new_full (GST_TAG_CONTAINER_FORMAT, "Multipart", NULL); gst_pad_push_event (srcpad->pad, gst_event_new_tag (tags)); GST_BUFFER_TIMESTAMP (outbuf) = 0; } else { GST_BUFFER_TIMESTAMP (outbuf) = timestamp; } GST_DEBUG_OBJECT (multipart, "pushing buffer with timestamp %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf))); GST_DEBUG_OBJECT (multipart, "buffer has caps %" GST_PTR_FORMAT, GST_BUFFER_CAPS (outbuf)); res = gst_pad_push (srcpad->pad, outbuf); res = gst_multipart_combine_flows (multipart, srcpad, res); if (res != GST_FLOW_OK) break; } } nodata: gst_object_unref (multipart); if (G_UNLIKELY (size == MULTIPART_DATA_ERROR)) return GST_FLOW_ERROR; if (G_UNLIKELY (size == MULTIPART_DATA_EOS)) return GST_FLOW_UNEXPECTED; return res; }
static gboolean gst_a2dp_sink_init_dynamic_elements(GstA2dpSink *self, GstCaps *caps) { GstStructure *structure; GstEvent *event; GstPad *capsfilterpad; gboolean crc; gchar *mode = NULL; structure = gst_caps_get_structure(caps, 0); /* before everything we need to remove fakesink */ gst_a2dp_sink_remove_fakesink(self); /* first, we need to create our rtp payloader */ if (gst_structure_has_name(structure, "audio/x-sbc")) { GST_LOG_OBJECT(self, "sbc media received"); if (!gst_a2dp_sink_init_rtp_sbc_element(self)) return FALSE; } else if (gst_structure_has_name(structure, "audio/mpeg")) { GST_LOG_OBJECT(self, "mp3 media received"); if (!gst_a2dp_sink_init_rtp_mpeg_element(self)) return FALSE; } else { GST_ERROR_OBJECT(self, "Unexpected media type"); return FALSE; } if (!gst_a2dp_sink_init_avdtp_sink(self)) return FALSE; /* check if we should push the taglist FIXME should we push this? * we can send the tags directly if needed */ if (self->taglist != NULL && gst_structure_has_name(structure, "audio/mpeg")) { event = gst_event_new_tag(self->taglist); /* send directly the crc */ if (gst_tag_list_get_boolean(self->taglist, "has-crc", &crc)) gst_avdtp_sink_set_crc(self->sink, crc); if (gst_tag_list_get_string(self->taglist, "channel-mode", &mode)) gst_avdtp_sink_set_channel_mode(self->sink, mode); capsfilterpad = gst_ghost_pad_get_target(self->ghostpad); gst_pad_send_event(capsfilterpad, event); self->taglist = NULL; g_free(mode); } if (!gst_avdtp_sink_set_device_caps(self->sink, caps)) return FALSE; g_object_set(G_OBJECT(self->rtp), "mtu", gst_avdtp_sink_get_link_mtu(self->sink), NULL); /* we forward our new segment here if we have one */ if (self->newseg_event) { gst_pad_send_event(GST_BASE_RTP_PAYLOAD_SINKPAD(self->rtp), self->newseg_event); self->newseg_event = NULL; } return TRUE; }
static GstFlowReturn gst_flac_tag_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) { GstFlacTag *tag; GstFlowReturn ret; GstMapInfo map; gsize size; ret = GST_FLOW_OK; tag = GST_FLAC_TAG (parent); gst_adapter_push (tag->adapter, buffer); /* Initial state, we don't even know if we are dealing with a flac file */ if (tag->state == GST_FLAC_TAG_STATE_INIT) { GstBuffer *id_buffer; if (gst_adapter_available (tag->adapter) < sizeof (FLAC_MAGIC)) goto cleanup; id_buffer = gst_adapter_take_buffer (tag->adapter, FLAC_MAGIC_SIZE); GST_DEBUG_OBJECT (tag, "looking for " FLAC_MAGIC " identifier"); if (gst_buffer_memcmp (id_buffer, 0, FLAC_MAGIC, FLAC_MAGIC_SIZE) == 0) { GST_DEBUG_OBJECT (tag, "pushing " FLAC_MAGIC " identifier buffer"); ret = gst_pad_push (tag->srcpad, id_buffer); if (ret != GST_FLOW_OK) goto cleanup; tag->state = GST_FLAC_TAG_STATE_METADATA_BLOCKS; } else { /* FIXME: does that work well with FLAC files containing ID3v2 tags ? */ gst_buffer_unref (id_buffer); GST_ELEMENT_ERROR (tag, STREAM, WRONG_TYPE, (NULL), (NULL)); ret = GST_FLOW_ERROR; } } /* The fLaC magic string has been skipped, try to detect the beginning * of a metadata block */ if (tag->state == GST_FLAC_TAG_STATE_METADATA_BLOCKS) { guint type; gboolean is_last; const guint8 *block_header; g_assert (tag->metadata_block_size == 0); g_assert (tag->metadata_last_block == FALSE); /* The header of a flac metadata block is 4 bytes long: * 1st bit: indicates whether this is the last metadata info block * 7 next bits: 4 if vorbis comment block * 24 next bits: size of the metadata to follow (big endian) */ if (gst_adapter_available (tag->adapter) < 4) goto cleanup; block_header = gst_adapter_map (tag->adapter, 4); is_last = ((block_header[0] & 0x80) == 0x80); type = block_header[0] & 0x7F; size = (block_header[1] << 16) | (block_header[2] << 8) | block_header[3]; gst_adapter_unmap (tag->adapter); /* The 4 bytes long header isn't included in the metadata size */ tag->metadata_block_size = size + 4; tag->metadata_last_block = is_last; GST_DEBUG_OBJECT (tag, "got metadata block: %" G_GSIZE_FORMAT " bytes, type %d, " "is vorbiscomment: %d, is last: %d", size, type, (type == 0x04), is_last); /* Metadata blocks of type 4 are vorbis comment blocks */ if (type == 0x04) { tag->state = GST_FLAC_TAG_STATE_VC_METADATA_BLOCK; } else { tag->state = GST_FLAC_TAG_STATE_WRITING_METADATA_BLOCK; } } /* Reads a metadata block */ if ((tag->state == GST_FLAC_TAG_STATE_WRITING_METADATA_BLOCK) || (tag->state == GST_FLAC_TAG_STATE_VC_METADATA_BLOCK)) { GstBuffer *metadata_buffer; if (gst_adapter_available (tag->adapter) < tag->metadata_block_size) goto cleanup; metadata_buffer = gst_adapter_take_buffer (tag->adapter, tag->metadata_block_size); /* clear the is-last flag, as the last metadata block will * be the vorbis comment block which we will build ourselves. */ gst_buffer_map (metadata_buffer, &map, GST_MAP_READWRITE); map.data[0] &= (~0x80); gst_buffer_unmap (metadata_buffer, &map); if (tag->state == GST_FLAC_TAG_STATE_WRITING_METADATA_BLOCK) { GST_DEBUG_OBJECT (tag, "pushing metadata block buffer"); ret = gst_pad_push (tag->srcpad, metadata_buffer); if (ret != GST_FLOW_OK) goto cleanup; } else { tag->vorbiscomment = metadata_buffer; } tag->metadata_block_size = 0; tag->state = GST_FLAC_TAG_STATE_METADATA_NEXT_BLOCK; } /* This state is mainly used to be able to stop as soon as we read * a vorbiscomment block from the flac file if we are in an only output * tags mode */ if (tag->state == GST_FLAC_TAG_STATE_METADATA_NEXT_BLOCK) { /* Check if in the previous iteration we read a vorbis comment metadata * block, and stop now if the user only wants to read tags */ if (tag->vorbiscomment != NULL) { guint8 id_data[4]; /* We found some tags, try to parse them and notify the other elements * that we encountered some tags */ GST_DEBUG_OBJECT (tag, "emitting vorbiscomment tags"); gst_buffer_extract (tag->vorbiscomment, 0, id_data, 4); tag->tags = gst_tag_list_from_vorbiscomment_buffer (tag->vorbiscomment, id_data, 4, NULL); if (tag->tags != NULL) { gst_pad_push_event (tag->srcpad, gst_event_new_tag (gst_tag_list_copy (tag->tags))); } gst_buffer_unref (tag->vorbiscomment); tag->vorbiscomment = NULL; } /* Skip to next state */ if (tag->metadata_last_block == FALSE) { tag->state = GST_FLAC_TAG_STATE_METADATA_BLOCKS; } else { tag->state = GST_FLAC_TAG_STATE_ADD_VORBIS_COMMENT; } } /* Creates a vorbis comment block from the metadata which was set * on the gstreamer element, and add it to the flac stream */ if (tag->state == GST_FLAC_TAG_STATE_ADD_VORBIS_COMMENT) { GstBuffer *buffer; const GstTagList *user_tags; GstTagList *merged_tags; /* merge the tag lists */ user_tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (tag)); if (user_tags != NULL) { merged_tags = gst_tag_list_merge (user_tags, tag->tags, gst_tag_setter_get_tag_merge_mode (GST_TAG_SETTER (tag))); } else { merged_tags = gst_tag_list_copy (tag->tags); } if (merged_tags == NULL) { /* If we get a NULL list of tags, we must generate a padding block * which is marked as the last metadata block, otherwise we'll * end up with a corrupted flac file. */ GST_WARNING_OBJECT (tag, "No tags found"); buffer = gst_buffer_new_and_alloc (12); if (buffer == NULL) goto no_buffer; gst_buffer_map (buffer, &map, GST_MAP_WRITE); memset (map.data, 0, map.size); map.data[0] = 0x81; /* 0x80 = Last metadata block, * 0x01 = padding block */ gst_buffer_unmap (buffer, &map); } else { guchar header[4]; guint8 fbit[1]; memset (header, 0, sizeof (header)); header[0] = 0x84; /* 0x80 = Last metadata block, * 0x04 = vorbiscomment block */ buffer = gst_tag_list_to_vorbiscomment_buffer (merged_tags, header, sizeof (header), NULL); GST_DEBUG_OBJECT (tag, "Writing tags %" GST_PTR_FORMAT, merged_tags); gst_tag_list_free (merged_tags); if (buffer == NULL) goto no_comment; size = gst_buffer_get_size (buffer); if ((size < 4) || ((size - 4) > 0xFFFFFF)) goto comment_too_long; fbit[0] = 1; /* Get rid of the framing bit at the end of the vorbiscomment buffer * if it exists since libFLAC seems to lose sync because of this * bit in gstflacdec */ if (gst_buffer_memcmp (buffer, size - 1, fbit, 1) == 0) { buffer = gst_buffer_make_writable (buffer); gst_buffer_resize (buffer, 0, size - 1); } } /* The 4 byte metadata block header isn't accounted for in the total * size of the metadata block */ gst_buffer_map (buffer, &map, GST_MAP_WRITE); map.data[1] = (((map.size - 4) & 0xFF0000) >> 16); map.data[2] = (((map.size - 4) & 0x00FF00) >> 8); map.data[3] = ((map.size - 4) & 0x0000FF); gst_buffer_unmap (buffer, &map); GST_DEBUG_OBJECT (tag, "pushing %" G_GSIZE_FORMAT " byte vorbiscomment " "buffer", map.size); ret = gst_pad_push (tag->srcpad, buffer); if (ret != GST_FLOW_OK) { goto cleanup; } tag->state = GST_FLAC_TAG_STATE_AUDIO_DATA; }
static GstBuffer * gst_tag_lib_mux_render_tag (GstTagLibMux * mux) { GstTagLibMuxClass *klass; GstTagMergeMode merge_mode; GstTagSetter *tagsetter; GstBuffer *buffer; const GstTagList *tagsetter_tags; GstTagList *taglist; GstEvent *event; tagsetter = GST_TAG_SETTER (mux); tagsetter_tags = gst_tag_setter_get_tag_list (tagsetter); merge_mode = gst_tag_setter_get_tag_merge_mode (tagsetter); GST_LOG_OBJECT (mux, "merging tags, merge mode = %d", merge_mode); GST_LOG_OBJECT (mux, "event tags: %" GST_PTR_FORMAT, mux->event_tags); GST_LOG_OBJECT (mux, "set tags: %" GST_PTR_FORMAT, tagsetter_tags); taglist = gst_tag_list_merge (tagsetter_tags, mux->event_tags, merge_mode); GST_LOG_OBJECT (mux, "final tags: %" GST_PTR_FORMAT, taglist); klass = GST_TAG_LIB_MUX_CLASS (G_OBJECT_GET_CLASS (mux)); if (klass->render_tag == NULL) goto no_vfunc; buffer = klass->render_tag (mux, taglist); if (buffer == NULL) goto render_error; mux->tag_size = GST_BUFFER_SIZE (buffer); GST_LOG_OBJECT (mux, "tag size = %" G_GSIZE_FORMAT " bytes", mux->tag_size); /* Send newsegment event from byte position 0, so the tag really gets * written to the start of the file, independent of the upstream segment */ gst_pad_push_event (mux->srcpad, gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES, 0, -1, 0)); /* Send an event about the new tags to downstream elements */ /* gst_event_new_tag takes ownership of the list, so no need to unref it */ event = gst_event_new_tag (taglist); gst_pad_push_event (mux->srcpad, event); GST_BUFFER_OFFSET (buffer) = 0; return buffer; no_vfunc: { GST_ERROR_OBJECT (mux, "Subclass does not implement render_tag vfunc!"); gst_tag_list_free (taglist); return NULL; } render_error: { GST_ERROR_OBJECT (mux, "Failed to render tag"); gst_tag_list_free (taglist); return NULL; } }
static gboolean gst_musepack_stream_init (GstMusepackDec * musepackdec) { mpc_streaminfo i; GstTagList *tags; GstCaps *caps; gchar *stream_id; /* set up reading */ gst_musepack_init_reader (musepackdec->r, musepackdec); musepackdec->d = mpc_demux_init (musepackdec->r); if (!musepackdec->d) { GST_ELEMENT_ERROR (musepackdec, STREAM, WRONG_TYPE, (NULL), (NULL)); return FALSE; } mpc_demux_get_info (musepackdec->d, &i); stream_id = gst_pad_create_stream_id (musepackdec->srcpad, GST_ELEMENT_CAST (musepackdec), NULL); gst_pad_push_event (musepackdec->srcpad, gst_event_new_stream_start (stream_id)); g_free (stream_id); /* capsnego */ caps = gst_caps_new_simple ("audio/x-raw", "format", G_TYPE_STRING, GST_MPC_FORMAT, "layout", G_TYPE_STRING, "interleaved", "channels", G_TYPE_INT, i.channels, "rate", G_TYPE_INT, i.sample_freq, NULL); gst_pad_use_fixed_caps (musepackdec->srcpad); if (!gst_pad_set_caps (musepackdec->srcpad, caps)) { GST_ELEMENT_ERROR (musepackdec, CORE, NEGOTIATION, (NULL), (NULL)); return FALSE; } g_atomic_int_set (&musepackdec->bps, 4 * i.channels); g_atomic_int_set (&musepackdec->rate, i.sample_freq); musepackdec->segment.position = 0; musepackdec->segment.duration = mpc_streaminfo_get_length_samples (&i); /* send basic tags */ tags = gst_tag_list_new_empty (); gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_AUDIO_CODEC, "Musepack", NULL); if (i.encoder[0] != '\0' && i.encoder_version > 0) { gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_ENCODER, i.encoder, GST_TAG_ENCODER_VERSION, i.encoder_version, NULL); } if (i.bitrate > 0) { gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_BITRATE, i.bitrate, NULL); } else if (i.average_bitrate > 0.0) { gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_BITRATE, (guint) i.average_bitrate, NULL); } if (i.gain_title != 0 || i.gain_album != 0) { gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_TRACK_GAIN, (gdouble) i.gain_title / 100.0, GST_TAG_ALBUM_GAIN, (gdouble) i.gain_album / 100.0, NULL); } if (i.peak_title != 0 && i.peak_title != 32767 && i.peak_album != 0 && i.peak_album != 32767) { gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_TRACK_PEAK, (gdouble) i.peak_title / 32767.0, GST_TAG_ALBUM_PEAK, (gdouble) i.peak_album / 32767.0, NULL); } GST_LOG_OBJECT (musepackdec, "Posting tags: %" GST_PTR_FORMAT, tags); gst_pad_push_event (musepackdec->srcpad, gst_event_new_tag (tags)); return TRUE; }
static GstFlowReturn gst_lv2_source_fill (GstBaseSrc * base, guint64 offset, guint length, GstBuffer * buffer) { GstLV2Source *lv2 = (GstLV2Source *) base; GstLV2SourceClass *klass = (GstLV2SourceClass *) GST_BASE_SRC_GET_CLASS (lv2); GstLV2Class *lv2_class = &klass->lv2; GstLV2Group *lv2_group; GstLV2Port *lv2_port; GstClockTime next_time; gint64 next_sample, next_byte; guint bytes, samples; GstElementClass *eclass; GstMapInfo map; gint samplerate, bpf; guint j, k, l; gfloat *out = NULL, *cv = NULL, *mem; gfloat val; /* example for tagging generated data */ if (!lv2->tags_pushed) { GstTagList *taglist; taglist = gst_tag_list_new (GST_TAG_DESCRIPTION, "lv2 wave", NULL); eclass = GST_ELEMENT_CLASS (parent_class); if (eclass->send_event) eclass->send_event (GST_ELEMENT (base), gst_event_new_tag (taglist)); else gst_tag_list_unref (taglist); lv2->tags_pushed = TRUE; } if (lv2->eos_reached) { GST_INFO_OBJECT (lv2, "eos"); return GST_FLOW_EOS; } samplerate = GST_AUDIO_INFO_RATE (&lv2->info); bpf = GST_AUDIO_INFO_BPF (&lv2->info); /* if no length was given, use our default length in samples otherwise convert * the length in bytes to samples. */ if (length == -1) samples = lv2->samples_per_buffer; else samples = length / bpf; /* if no offset was given, use our next logical byte */ if (offset == -1) offset = lv2->next_byte; /* now see if we are at the byteoffset we think we are */ if (offset != lv2->next_byte) { GST_DEBUG_OBJECT (lv2, "seek to new offset %" G_GUINT64_FORMAT, offset); /* we have a discont in the expected sample offset, do a 'seek' */ lv2->next_sample = offset / bpf; lv2->next_time = gst_util_uint64_scale_int (lv2->next_sample, GST_SECOND, samplerate); lv2->next_byte = offset; } /* check for eos */ if (lv2->check_seek_stop && (lv2->sample_stop > lv2->next_sample) && (lv2->sample_stop < lv2->next_sample + samples) ) { /* calculate only partial buffer */ lv2->generate_samples_per_buffer = lv2->sample_stop - lv2->next_sample; next_sample = lv2->sample_stop; lv2->eos_reached = TRUE; GST_INFO_OBJECT (lv2, "eos reached"); } else { /* calculate full buffer */ lv2->generate_samples_per_buffer = samples; next_sample = lv2->next_sample + (lv2->reverse ? (-samples) : samples); } bytes = lv2->generate_samples_per_buffer * bpf; next_byte = lv2->next_byte + (lv2->reverse ? (-bytes) : bytes); next_time = gst_util_uint64_scale_int (next_sample, GST_SECOND, samplerate); GST_LOG_OBJECT (lv2, "samplerate %d", samplerate); GST_LOG_OBJECT (lv2, "next_sample %" G_GINT64_FORMAT ", ts %" GST_TIME_FORMAT, next_sample, GST_TIME_ARGS (next_time)); gst_buffer_set_size (buffer, bytes); GST_BUFFER_OFFSET (buffer) = lv2->next_sample; GST_BUFFER_OFFSET_END (buffer) = next_sample; if (!lv2->reverse) { GST_BUFFER_TIMESTAMP (buffer) = lv2->timestamp_offset + lv2->next_time; GST_BUFFER_DURATION (buffer) = next_time - lv2->next_time; } else { GST_BUFFER_TIMESTAMP (buffer) = lv2->timestamp_offset + next_time; GST_BUFFER_DURATION (buffer) = lv2->next_time - next_time; } gst_object_sync_values (GST_OBJECT (lv2), GST_BUFFER_TIMESTAMP (buffer)); lv2->next_time = next_time; lv2->next_sample = next_sample; lv2->next_byte = next_byte; GST_LOG_OBJECT (lv2, "generating %u samples at ts %" GST_TIME_FORMAT, samples, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer))); gst_buffer_map (buffer, &map, GST_MAP_WRITE); /* multi channel outputs */ lv2_group = &lv2_class->out_group; if (lv2_group->ports->len > 1) { out = g_new0 (gfloat, samples * lv2_group->ports->len); for (j = 0; j < lv2_group->ports->len; ++j) { lv2_port = &g_array_index (lv2_group->ports, GstLV2Port, j); lilv_instance_connect_port (lv2->lv2.instance, lv2_port->index, out + (j * samples)); GST_LOG_OBJECT (lv2, "connected port %d/%d", j, lv2_group->ports->len); } } else { lv2_port = &g_array_index (lv2_group->ports, GstLV2Port, 0); lilv_instance_connect_port (lv2->lv2.instance, lv2_port->index, (gfloat *) map.data); GST_LOG_OBJECT (lv2, "connected port 0"); } /* cv ports */ cv = g_new (gfloat, samples * lv2_class->num_cv_in); for (j = k = 0; j < lv2_class->control_in_ports->len; j++) { lv2_port = &g_array_index (lv2_class->control_in_ports, GstLV2Port, j); if (lv2_port->type != GST_LV2_PORT_CV) continue; mem = cv + (k * samples); val = lv2->lv2.ports.control.in[j]; /* FIXME: use gst_control_binding_get_value_array */ for (l = 0; l < samples; l++) mem[l] = val; lilv_instance_connect_port (lv2->lv2.instance, lv2_port->index, mem); k++; } lilv_instance_run (lv2->lv2.instance, samples); if (lv2_group->ports->len > 1) { gst_lv2_source_interleave_data (lv2_group->ports->len, (gfloat *) map.data, samples, out); g_free (out); } g_free (cv); gst_buffer_unmap (buffer, &map); return GST_FLOW_OK; }
static gboolean gme_setup (GstGmeDec * gme) { gme_info_t *info; gme_err_t gme_err = NULL; GstTagList *taglist; guint64 total_duration; guint64 fade_time; GstBuffer *buffer; GstSegment seg; GstMapInfo map; if (!gst_adapter_available (gme->adapter) || !gme_negotiate (gme)) { return FALSE; } buffer = gst_adapter_take_buffer (gme->adapter, gst_adapter_available (gme->adapter)); gst_buffer_map (buffer, &map, GST_MAP_READ); gme_err = gme_open_data (map.data, map.size, &gme->player, 32000); gst_buffer_unmap (buffer, &map); gst_buffer_unref (buffer); if (gme_err || !gme->player) { if (gme->player) { gme_delete (gme->player); gme->player = NULL; } GST_ELEMENT_ERROR (gme, STREAM, DEMUX, (NULL), ("%s", gme_err)); return FALSE; } gme_err = gme_track_info (gme->player, &info, 0); taglist = gst_tag_list_new_empty (); if (info->song && *info->song) gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_TITLE, info->song, NULL); if (info->author && *info->author) gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_ARTIST, info->author, NULL); /* Prefer the name of the official soundtrack over the name of the game (since this is * how track numbers are derived) */ if (info->game && *info->game) gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_ALBUM, info->game, NULL); if (info->comment && *info->comment) gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_COMMENT, info->comment, NULL); if (info->dumper && *info->dumper) gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_CONTACT, info->dumper, NULL); if (info->copyright && *info->copyright) gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_COPYRIGHT, info->copyright, NULL); if (info->system && *info->system) gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_ENCODER, info->system, NULL); gme->total_duration = total_duration = gst_util_uint64_scale_int (info->play_length + (info->loop_length > 0 ? 8000 : 0), GST_MSECOND, 1); fade_time = info->loop_length > 0 ? info->play_length : 0; gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_DURATION, total_duration, NULL); gst_pad_push_event (gme->srcpad, gst_event_new_tag (taglist)); g_free (info); #ifdef HAVE_LIBGME_ACCURACY /* TODO: Is it worth it to make this optional? */ gme_enable_accuracy (gme->player, 1); #endif gme_start_track (gme->player, 0); if (fade_time) gme_set_fade (gme->player, fade_time); gst_segment_init (&seg, GST_FORMAT_TIME); gst_pad_push_event (gme->srcpad, gst_event_new_segment (&seg)); gst_pad_start_task (gme->srcpad, (GstTaskFunction) gst_gme_play, gme->srcpad, NULL); gme->initialized = TRUE; gme->seeking = FALSE; gme->seekpoint = 0; return gme->initialized; }
static void gst_rg_analysis_handle_eos (GstRgAnalysis * filter) { gboolean album_processing = (filter->num_tracks > 0); gboolean album_finished = (filter->num_tracks == 1); gboolean album_skipping = album_processing && filter->skip; filter->has_track_gain = FALSE; filter->has_track_peak = FALSE; if (album_finished) { filter->ignore_tags = FALSE; filter->skip = FALSE; filter->has_album_gain = FALSE; filter->has_album_peak = FALSE; } else if (!album_skipping) { filter->skip = FALSE; } /* We might have just fully processed a track because it has * incomplete tags. If we do album processing and allow skipping * (not forced), prevent switching to skipping if a later track with * full tags comes along: */ if (!filter->forced && album_processing && !album_finished) filter->ignore_tags = TRUE; if (!filter->skip) { GstTagList *tag_list = NULL; gboolean track_success; gboolean album_success = FALSE; track_success = gst_rg_analysis_track_result (filter, &tag_list); if (album_finished) album_success = gst_rg_analysis_album_result (filter, &tag_list); else if (!album_processing) rg_analysis_reset_album (filter->ctx); if (track_success || album_success) { GST_LOG_OBJECT (filter, "posting tag list with results"); gst_tag_list_add (tag_list, GST_TAG_MERGE_APPEND, GST_TAG_REFERENCE_LEVEL, filter->reference_level, NULL); /* This steals our reference to the list: */ gst_pad_push_event (GST_BASE_TRANSFORM_SRC_PAD (GST_BASE_TRANSFORM (filter)), gst_event_new_tag (gst_tag_list_ref (tag_list))); } } if (album_processing) { filter->num_tracks--; if (!album_finished) { GST_DEBUG_OBJECT (filter, "album not finished yet (num-tracks is now %u)", filter->num_tracks); } else { GST_DEBUG_OBJECT (filter, "album finished (num-tracks is now 0)"); } } if (album_processing) g_object_notify (G_OBJECT (filter), "num-tracks"); }
static GstFlowReturn gst_ladspa_source_type_fill (GstBaseSrc * base, guint64 offset, guint length, GstBuffer * buffer) { GstLADSPASource *ladspa = GST_LADSPA_SOURCE (base); GstClockTime next_time; gint64 next_sample, next_byte; gint bytes, samples; GstElementClass *eclass; GstMapInfo map; gint samplerate, bpf; /* example for tagging generated data */ if (!ladspa->tags_pushed) { GstTagList *taglist; taglist = gst_tag_list_new (GST_TAG_DESCRIPTION, "ladspa wave", NULL); eclass = GST_ELEMENT_CLASS (gst_ladspa_source_type_parent_class); if (eclass->send_event) eclass->send_event (GST_ELEMENT (base), gst_event_new_tag (taglist)); else gst_tag_list_unref (taglist); ladspa->tags_pushed = TRUE; } if (ladspa->eos_reached) { GST_INFO_OBJECT (ladspa, "eos"); return GST_FLOW_EOS; } samplerate = GST_AUDIO_INFO_RATE (&ladspa->info); bpf = GST_AUDIO_INFO_BPF (&ladspa->info); /* if no length was given, use our default length in samples otherwise convert * the length in bytes to samples. */ if (length == -1) samples = ladspa->samples_per_buffer; else samples = length / bpf; /* if no offset was given, use our next logical byte */ if (offset == -1) offset = ladspa->next_byte; /* now see if we are at the byteoffset we think we are */ if (offset != ladspa->next_byte) { GST_DEBUG_OBJECT (ladspa, "seek to new offset %" G_GUINT64_FORMAT, offset); /* we have a discont in the expected sample offset, do a 'seek' */ ladspa->next_sample = offset / bpf; ladspa->next_time = gst_util_uint64_scale_int (ladspa->next_sample, GST_SECOND, samplerate); ladspa->next_byte = offset; } /* check for eos */ if (ladspa->check_seek_stop && (ladspa->sample_stop > ladspa->next_sample) && (ladspa->sample_stop < ladspa->next_sample + samples) ) { /* calculate only partial buffer */ ladspa->generate_samples_per_buffer = ladspa->sample_stop - ladspa->next_sample; next_sample = ladspa->sample_stop; ladspa->eos_reached = TRUE; } else { /* calculate full buffer */ ladspa->generate_samples_per_buffer = samples; next_sample = ladspa->next_sample + (ladspa->reverse ? (-samples) : samples); } bytes = ladspa->generate_samples_per_buffer * bpf; next_byte = ladspa->next_byte + (ladspa->reverse ? (-bytes) : bytes); next_time = gst_util_uint64_scale_int (next_sample, GST_SECOND, samplerate); GST_LOG_OBJECT (ladspa, "samplerate %d", samplerate); GST_LOG_OBJECT (ladspa, "next_sample %" G_GINT64_FORMAT ", ts %" GST_TIME_FORMAT, next_sample, GST_TIME_ARGS (next_time)); gst_buffer_set_size (buffer, bytes); GST_BUFFER_OFFSET (buffer) = ladspa->next_sample; GST_BUFFER_OFFSET_END (buffer) = next_sample; if (!ladspa->reverse) { GST_BUFFER_TIMESTAMP (buffer) = ladspa->timestamp_offset + ladspa->next_time; GST_BUFFER_DURATION (buffer) = next_time - ladspa->next_time; } else { GST_BUFFER_TIMESTAMP (buffer) = ladspa->timestamp_offset + next_time; GST_BUFFER_DURATION (buffer) = ladspa->next_time - next_time; } gst_object_sync_values (GST_OBJECT (ladspa), GST_BUFFER_TIMESTAMP (buffer)); ladspa->next_time = next_time; ladspa->next_sample = next_sample; ladspa->next_byte = next_byte; GST_LOG_OBJECT (ladspa, "generating %u samples at ts %" GST_TIME_FORMAT, ladspa->generate_samples_per_buffer, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer))); gst_buffer_map (buffer, &map, GST_MAP_WRITE); gst_ladspa_transform (&ladspa->ladspa, map.data, ladspa->generate_samples_per_buffer, NULL); gst_buffer_unmap (buffer, &map); return GST_FLOW_OK; }
static GstFlowReturn gst_multipart_demux_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) { GstMultipartDemux *multipart; GstAdapter *adapter; gint size = 1; GstFlowReturn res; multipart = GST_MULTIPART_DEMUX (parent); adapter = multipart->adapter; res = GST_FLOW_OK; if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) { GSList *l; for (l = multipart->srcpads; l != NULL; l = l->next) { GstMultipartPad *srcpad = l->data; srcpad->discont = TRUE; } gst_adapter_clear (adapter); } gst_adapter_push (adapter, buf); while (gst_adapter_available (adapter) > 0) { GstMultipartPad *srcpad; GstBuffer *outbuf; gboolean created; gint datalen; if (G_UNLIKELY (!multipart->header_completed)) { if ((size = multipart_parse_header (multipart)) < 0) { goto nodata; } else { gst_adapter_flush (adapter, size); multipart->header_completed = TRUE; } } if ((size = multipart_find_boundary (multipart, &datalen)) < 0) { goto nodata; } /* Invalidate header info */ multipart->header_completed = FALSE; multipart->content_length = -1; if (G_UNLIKELY (datalen <= 0)) { GST_DEBUG_OBJECT (multipart, "skipping empty content."); gst_adapter_flush (adapter, size - datalen); } else { GstClockTime ts; srcpad = gst_multipart_find_pad_by_mime (multipart, multipart->mime_type, &created); ts = gst_adapter_prev_pts (adapter, NULL); outbuf = gst_adapter_take_buffer (adapter, datalen); gst_adapter_flush (adapter, size - datalen); if (created) { GstTagList *tags; GstSegment segment; gst_segment_init (&segment, GST_FORMAT_TIME); /* Push new segment, first buffer has 0 timestamp */ gst_pad_push_event (srcpad->pad, gst_event_new_segment (&segment)); tags = gst_tag_list_new (GST_TAG_CONTAINER_FORMAT, "Multipart", NULL); gst_tag_list_set_scope (tags, GST_TAG_SCOPE_GLOBAL); gst_pad_push_event (srcpad->pad, gst_event_new_tag (tags)); } outbuf = gst_buffer_make_writable (outbuf); if (srcpad->last_ts == GST_CLOCK_TIME_NONE || srcpad->last_ts != ts) { GST_BUFFER_TIMESTAMP (outbuf) = ts; srcpad->last_ts = ts; } else { GST_BUFFER_TIMESTAMP (outbuf) = GST_CLOCK_TIME_NONE; } if (srcpad->discont) { GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT); srcpad->discont = FALSE; } else { GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DISCONT); } GST_DEBUG_OBJECT (multipart, "pushing buffer with timestamp %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf))); res = gst_pad_push (srcpad->pad, outbuf); res = gst_multipart_combine_flows (multipart, srcpad, res); if (res != GST_FLOW_OK) break; } } nodata: if (G_UNLIKELY (size == MULTIPART_DATA_ERROR)) return GST_FLOW_ERROR; if (G_UNLIKELY (size == MULTIPART_DATA_EOS)) return GST_FLOW_EOS; return res; }
static gboolean gst_uade_raw_dec_load_from_custom(GstNonstreamAudioDecoder *dec, guint initial_subsong, G_GNUC_UNUSED GstNonstreamAudioSubsongMode initial_subsong_mode, GstClockTime *initial_position, GstNonstreamAudioOutputMode *initial_output_mode, gint *initial_num_loops) { GstUadeRawDec *uade_raw_dec; int ret; GstTagList *tags; struct uade_config *config; gchar *tmpstr; uade_raw_dec = GST_UADE_RAW_DEC(dec); g_assert(uade_raw_dec->state == NULL); if (uade_raw_dec->location == NULL) { GST_ERROR_OBJECT(uade_raw_dec, "no location set -> nothing to play"); return FALSE; } GST_TRACE_OBJECT(uade_raw_dec, "attempting to load music file \"%s\"", uade_raw_dec->location); config = uade_new_config(); uade_config_set_option(config, UC_ONE_SUBSONG, NULL); uade_config_set_option(config, UC_NO_EP_END, NULL); uade_config_set_option(config, UC_UADECORE_FILE, uade_raw_dec->uadecore_file); uade_config_set_option(config, UC_BASE_DIR, uade_raw_dec->base_directory); switch (uade_raw_dec->filter_type) { case GST_UADE_FILTER_TYPE_A500: uade_config_set_option(config, UC_FILTER_TYPE, "a500"); break; case GST_UADE_FILTER_TYPE_A1200: uade_config_set_option(config, UC_FILTER_TYPE, "a1200"); break; default: break; } switch (uade_raw_dec->headphone_mode) { case GST_UADE_HEADPHONE_MODE_NONE: uade_config_set_option(config, UC_NO_HEADPHONES, NULL); break; case GST_UADE_HEADPHONE_MODE_1: uade_config_set_option(config, UC_HEADPHONES, NULL); break; case GST_UADE_HEADPHONE_MODE_2: uade_config_set_option(config, UC_HEADPHONES2, NULL); break; default: break; } if (!(uade_raw_dec->use_filter)) uade_config_set_option(config, UC_NO_FILTER, NULL); /* this must be called AFTER the filter type is set */ tmpstr = g_strdup_printf("%f", uade_raw_dec->gain); uade_config_set_option(config, UC_GAIN, tmpstr); g_free(tmpstr); if (!(uade_raw_dec->use_postprocessing)) uade_config_set_option(config, UC_NO_POSTPROCESSING, NULL); tmpstr = g_strdup_printf("%f", uade_raw_dec->panning); uade_config_set_option(config, UC_PANNING_VALUE, tmpstr); g_free(tmpstr); uade_raw_dec->state = uade_new_state(config); free(config); /* Set output format */ gst_nonstream_audio_decoder_set_output_format_simple( GST_NONSTREAM_AUDIO_DECODER(uade_raw_dec), uade_get_sampling_rate(uade_raw_dec->state), GST_AUDIO_FORMAT_S16, 2 ); ret = uade_play(uade_raw_dec->location, -1, uade_raw_dec->state); if (!ret) { GST_ERROR_OBJECT(uade_raw_dec, "uade_play failed"); return FALSE; } GST_TRACE_OBJECT(uade_raw_dec, "loading successful, retrieving song information"); uade_raw_dec->playback_started = TRUE; uade_raw_dec->info = uade_get_song_info(uade_raw_dec->state); if (uade_raw_dec->info == NULL) { GST_ERROR_OBJECT(uade_raw_dec, "uade_get_song_info failed"); return FALSE; } GST_INFO_OBJECT(uade_raw_dec, "min subsong: %d max subsong: %d", uade_raw_dec->info->subsongs.min, uade_raw_dec->info->subsongs.max); uade_raw_dec->current_subsong = CLAMP(((int)initial_subsong) + uade_raw_dec->info->subsongs.min, uade_raw_dec->info->subsongs.min, uade_raw_dec->info->subsongs.max); if (uade_seek(UADE_SEEK_SUBSONG_RELATIVE, 0, uade_raw_dec->current_subsong, uade_raw_dec->state) != 0) { GST_ERROR_OBJECT(uade_raw_dec, "seeking to initial subsong failed"); return FALSE; } *initial_position = 0; *initial_output_mode = GST_NONSTREM_AUDIO_OUTPUT_MODE_STEADY; tags = gst_tag_list_new_empty(); if (uade_raw_dec->info->modulename[0] != 0) gst_tag_list_add(tags, GST_TAG_MERGE_APPEND, GST_TAG_TITLE, uade_raw_dec->info->modulename, NULL); if (uade_raw_dec->info->formatname[0] != 0) gst_tag_list_add(tags, GST_TAG_MERGE_APPEND, GST_TAG_CONTAINER_FORMAT, uade_raw_dec->info->formatname, NULL); if (uade_raw_dec->info->playername[0] != 0) gst_tag_list_add(tags, GST_TAG_MERGE_APPEND, GST_TAG_APPLICATION_NAME, uade_raw_dec->info->playername, NULL); gst_pad_push_event(GST_NONSTREAM_AUDIO_DECODER_SRC_PAD(uade_raw_dec), gst_event_new_tag(tags)); return TRUE; }