static GstFlowReturn gst_asteriskh263_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) { GstAsteriskh263 *asteriskh263; GstBuffer *outbuf; GstFlowReturn ret; asteriskh263 = GST_ASTERISK_H263 (parent); { gint payload_len; guint8 *payload; gboolean M; guint32 timestamp; guint32 samples; guint16 asterisk_len; GstRTPBuffer rtp = { NULL }; GstMapInfo map; if (!gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp)) goto bad_packet; payload_len = gst_rtp_buffer_get_payload_len (&rtp); payload = gst_rtp_buffer_get_payload (&rtp); M = gst_rtp_buffer_get_marker (&rtp); timestamp = gst_rtp_buffer_get_timestamp (&rtp); gst_rtp_buffer_unmap (&rtp); outbuf = gst_buffer_new_and_alloc (payload_len + GST_ASTERISKH263_HEADER_LEN); /* build the asterisk header */ asterisk_len = payload_len; if (M) asterisk_len |= 0x8000; if (!asteriskh263->lastts) asteriskh263->lastts = timestamp; samples = timestamp - asteriskh263->lastts; asteriskh263->lastts = timestamp; gst_buffer_map (outbuf, &map, GST_MAP_WRITE); GST_ASTERISKH263_HEADER_TIMESTAMP (map.data) = g_htonl (samples); GST_ASTERISKH263_HEADER_LENGTH (map.data) = g_htons (asterisk_len); /* copy the data into place */ memcpy (map.data + GST_ASTERISKH263_HEADER_LEN, payload, payload_len); gst_buffer_unmap (outbuf, &map); GST_BUFFER_PTS (outbuf) = timestamp; if (!gst_pad_has_current_caps (asteriskh263->srcpad)) { GstCaps *caps; caps = gst_pad_get_pad_template_caps (asteriskh263->srcpad); gst_pad_set_caps (asteriskh263->srcpad, caps); gst_caps_unref (caps); } ret = gst_pad_push (asteriskh263->srcpad, outbuf); gst_buffer_unref (buf); } return ret; bad_packet: { GST_DEBUG ("Packet does not validate"); gst_buffer_unref (buf); return GST_FLOW_ERROR; } }
static GstFlowReturn new_sample_callback (GstAppSink * sink, gpointer user_data) { GstBuffer *buffer; GstSample *sample; Encoder *encoder = (Encoder *)user_data; *(encoder->output->heartbeat) = gst_clock_get_time (encoder->system_clock); sample = gst_app_sink_pull_sample (GST_APP_SINK (sink)); buffer = gst_sample_get_buffer (sample); if (sem_wait (encoder->output->semaphore) == -1) { GST_ERROR ("new_sample_callback sem_wait failure: %s", g_strerror (errno)); gst_sample_unref (sample); return GST_FLOW_OK; } (*(encoder->output->total_count)) += gst_buffer_get_size (buffer); /* update head_addr, free enough memory for current buffer. */ while (cache_free (encoder) <= gst_buffer_get_size (buffer) + 12) { /* timestamp + gop size = 12 */ move_head (encoder); } if (encoder->has_tssegment && encoder->has_m3u8_output) { if ((encoder->duration_accumulation >= encoder->segment_duration) || ((encoder->segment_duration - encoder->duration_accumulation) < 500000000)) { encoder->last_segment_duration = encoder->duration_accumulation; encoder->last_running_time = GST_BUFFER_PTS (buffer); encoder->duration_accumulation = 0; } encoder->duration_accumulation += GST_BUFFER_DURATION (buffer); } /* * random access point found. * 1. with video encoder and IDR found; * 2. audio only encoder and current pts >= last_running_time; * 3. tssegment out every buffer with random access point. */ if ((encoder->has_video && !GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT)) || (encoder->has_audio_only && (GST_BUFFER_PTS (buffer) >= encoder->last_running_time)) || (encoder->has_tssegment && (GST_BUFFER_PTS (buffer) >= encoder->last_running_time))) { if (encoder->has_m3u8_output == FALSE) { /* no m3u8 output */ move_last_rap (encoder, buffer); } else if (GST_BUFFER_PTS (buffer) + 90000000 >= encoder->last_running_time) { move_last_rap (encoder, buffer); send_msg (encoder); } else if (encoder->is_first_key) { /* move_last_rap if its first key even if has m3u8 output */ move_last_rap (encoder, buffer); send_msg (encoder); encoder->is_first_key = FALSE; } } /* udpstreaming? */ if (encoder->udpstreaming) { udp_streaming (encoder, buffer); } /* * copy buffer to cache. * update tail_addr */ copy_buffer (encoder, buffer); sem_post (encoder->output->semaphore); gst_sample_unref (sample); return GST_FLOW_OK; }
static void need_data_callback (GstAppSrc *src, guint length, gpointer user_data) { EncoderStream *stream = (EncoderStream *)user_data; gint current_position; GstBuffer *buffer; GstPad *pad; GstEvent *event; Encoder *encoder; current_position = (stream->current_position + 1) % SOURCE_RING_SIZE; for (;;) { if (stream->state != NULL) { stream->state->last_heartbeat = gst_clock_get_time (stream->system_clock); } /* insure next buffer isn't current buffer */ if ((current_position == stream->source->current_position) || stream->source->current_position == -1) { if ((current_position == stream->source->current_position) && stream->source->eos) { GstFlowReturn ret; ret = gst_app_src_end_of_stream (src); GST_INFO ("EOS of source %s, tell encoder %s, return %s", stream->source->name, stream->name, gst_flow_get_name (ret)); break; } GST_DEBUG ("waiting %s source ready", stream->name); g_usleep (50000); /* wiating 50ms */ continue; } /* first buffer, set caps. */ if (stream->current_position == -1) { GstCaps *caps; caps = gst_sample_get_caps (stream->source->ring[current_position]); gst_app_src_set_caps (src, caps); GST_INFO ("set stream %s caps: %s", stream->name, gst_caps_to_string (caps)); } buffer = gst_sample_get_buffer (stream->source->ring[current_position]); GST_DEBUG ("%s encoder position %d; timestamp %" GST_TIME_FORMAT " source position %d", stream->name, stream->current_position, GST_TIME_ARGS (GST_BUFFER_PTS (buffer)), stream->source->current_position); encoder = stream->encoder; /* segment_duration != 0? with m3u8playlist conf */ if (stream->is_segment_reference) { if (encoder->duration_accumulation >= encoder->segment_duration) { GstClockTime running_time; encoder->last_segment_duration = encoder->duration_accumulation; running_time = GST_BUFFER_PTS (buffer); /* force key unit? */ if (encoder->has_video) { pad = gst_element_get_static_pad ((GstElement *)src, "src"); event = gst_video_event_new_downstream_force_key_unit (running_time, running_time, running_time, TRUE, encoder->force_key_count); gst_pad_push_event (pad, event); } else { encoder->last_running_time = running_time; } encoder->force_key_count++; encoder->duration_accumulation = 0; } encoder->duration_accumulation += GST_BUFFER_DURATION (buffer); } /* push buffer */ if (gst_app_src_push_buffer (src, gst_buffer_ref (buffer)) != GST_FLOW_OK) { GST_ERROR ("%s, gst_app_src_push_buffer failure.", stream->name); } if (stream->state != NULL) { stream->state->current_timestamp = GST_BUFFER_PTS (buffer); } break; } stream->current_position = current_position; }
static GstFlowReturn gst_multi_file_sink_write_buffer (GstMultiFileSink * multifilesink, GstBuffer * buffer) { GstMapInfo map; gboolean ret; gboolean first_file = TRUE; gst_buffer_map (buffer, &map, GST_MAP_READ); switch (multifilesink->next_file) { case GST_MULTI_FILE_SINK_NEXT_BUFFER: if (multifilesink->files != NULL) first_file = FALSE; if (!gst_multi_file_sink_open_next_file (multifilesink)) goto stdio_write_error; if (first_file == FALSE) gst_multi_file_sink_write_stream_headers (multifilesink); GST_DEBUG_OBJECT (multifilesink, "Writing buffer data (%" G_GSIZE_FORMAT " bytes) to new file", map.size); ret = fwrite (map.data, map.size, 1, multifilesink->file); if (ret != 1) { gst_multi_file_sink_close_file (multifilesink, NULL); goto stdio_write_error; } gst_multi_file_sink_close_file (multifilesink, buffer); break; case GST_MULTI_FILE_SINK_NEXT_DISCONT: if (GST_BUFFER_IS_DISCONT (buffer)) { if (multifilesink->file) gst_multi_file_sink_close_file (multifilesink, buffer); } if (multifilesink->file == NULL) { if (!gst_multi_file_sink_open_next_file (multifilesink)) goto stdio_write_error; } ret = fwrite (map.data, map.size, 1, multifilesink->file); if (ret != 1) goto stdio_write_error; break; case GST_MULTI_FILE_SINK_NEXT_KEY_FRAME: if (multifilesink->next_segment == GST_CLOCK_TIME_NONE) { if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) { multifilesink->next_segment = GST_BUFFER_TIMESTAMP (buffer) + 10 * GST_SECOND; } } if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer) && GST_BUFFER_TIMESTAMP (buffer) >= multifilesink->next_segment && !GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT)) { if (multifilesink->file) { first_file = FALSE; gst_multi_file_sink_close_file (multifilesink, buffer); } multifilesink->next_segment += 10 * GST_SECOND; } if (multifilesink->file == NULL) { if (!gst_multi_file_sink_open_next_file (multifilesink)) goto stdio_write_error; if (!first_file) gst_multi_file_sink_write_stream_headers (multifilesink); } ret = fwrite (map.data, map.size, 1, multifilesink->file); if (ret != 1) goto stdio_write_error; break; case GST_MULTI_FILE_SINK_NEXT_KEY_UNIT_EVENT: if (multifilesink->file == NULL) { if (!gst_multi_file_sink_open_next_file (multifilesink)) goto stdio_write_error; /* we don't need to write stream headers here, they will be inserted in * the stream by upstream elements if key unit events have * all_headers=true set */ } ret = fwrite (map.data, map.size, 1, multifilesink->file); if (ret != 1) goto stdio_write_error; break; case GST_MULTI_FILE_SINK_NEXT_MAX_SIZE:{ guint64 new_size; new_size = multifilesink->cur_file_size + map.size; if (new_size > multifilesink->max_file_size) { GST_INFO_OBJECT (multifilesink, "current size: %" G_GUINT64_FORMAT ", new_size: %" G_GUINT64_FORMAT ", max. size %" G_GUINT64_FORMAT, multifilesink->cur_file_size, new_size, multifilesink->max_file_size); if (multifilesink->file != NULL) { first_file = FALSE; gst_multi_file_sink_close_file (multifilesink, buffer); } } if (multifilesink->file == NULL) { if (!gst_multi_file_sink_open_next_file (multifilesink)) goto stdio_write_error; if (!first_file) gst_multi_file_sink_write_stream_headers (multifilesink); } ret = fwrite (map.data, map.size, 1, multifilesink->file); if (ret != 1) goto stdio_write_error; multifilesink->cur_file_size += map.size; break; } case GST_MULTI_FILE_SINK_NEXT_MAX_DURATION:{ GstClockTime new_duration = 0; if (GST_BUFFER_PTS_IS_VALID (buffer) && GST_CLOCK_TIME_IS_VALID (multifilesink->file_pts)) { /* The new duration will extend to this new buffer pts ... */ new_duration = GST_BUFFER_PTS (buffer) - multifilesink->file_pts; /* ... and duration (if it has one) */ if (GST_BUFFER_DURATION_IS_VALID (buffer)) new_duration += GST_BUFFER_DURATION (buffer); } if (new_duration > multifilesink->max_file_duration) { GST_INFO_OBJECT (multifilesink, "new_duration: %" G_GUINT64_FORMAT ", max. duration %" G_GUINT64_FORMAT, new_duration, multifilesink->max_file_duration); if (multifilesink->file != NULL) { first_file = FALSE; gst_multi_file_sink_close_file (multifilesink, buffer); } } if (multifilesink->file == NULL) { if (!gst_multi_file_sink_open_next_file (multifilesink)) goto stdio_write_error; multifilesink->file_pts = GST_BUFFER_PTS (buffer); if (!first_file) gst_multi_file_sink_write_stream_headers (multifilesink); } ret = fwrite (map.data, map.size, 1, multifilesink->file); if (ret != 1) goto stdio_write_error; break; } default: g_assert_not_reached (); } gst_buffer_unmap (buffer, &map); return GST_FLOW_OK; /* ERRORS */ stdio_write_error: switch (errno) { case ENOSPC: GST_ELEMENT_ERROR (multifilesink, RESOURCE, NO_SPACE_LEFT, ("Error while writing to file."), ("%s", g_strerror (errno))); break; default: GST_ELEMENT_ERROR (multifilesink, RESOURCE, WRITE, ("Error while writing to file."), ("%s", g_strerror (errno))); } gst_buffer_unmap (buffer, &map); return GST_FLOW_ERROR; }
static GstFlowReturn gst_identity_transform_ip (GstBaseTransform * trans, GstBuffer * buf) { GstFlowReturn ret = GST_FLOW_OK; GstIdentity *identity = GST_IDENTITY (trans); GstClockTime rundts = GST_CLOCK_TIME_NONE; GstClockTime runpts = GST_CLOCK_TIME_NONE; GstClockTime ts, duration, runtimestamp; gsize size; size = gst_buffer_get_size (buf); if (identity->check_imperfect_timestamp) gst_identity_check_imperfect_timestamp (identity, buf); if (identity->check_imperfect_offset) gst_identity_check_imperfect_offset (identity, buf); /* update prev values */ identity->prev_timestamp = GST_BUFFER_TIMESTAMP (buf); identity->prev_duration = GST_BUFFER_DURATION (buf); identity->prev_offset_end = GST_BUFFER_OFFSET_END (buf); identity->prev_offset = GST_BUFFER_OFFSET (buf); if (identity->error_after >= 0) { identity->error_after--; if (identity->error_after == 0) goto error_after; } if (identity->drop_probability > 0.0) { if ((gfloat) (1.0 * rand () / (RAND_MAX)) < identity->drop_probability) goto dropped; } if (GST_BUFFER_FLAG_IS_SET (buf, identity->drop_buffer_flags)) goto dropped; if (identity->dump) { GstMapInfo info; if (gst_buffer_map (buf, &info, GST_MAP_READ)) { gst_util_dump_mem (info.data, info.size); gst_buffer_unmap (buf, &info); } } if (!identity->silent) { gst_identity_update_last_message_for_buffer (identity, "chain", buf, size); } if (identity->datarate > 0) { GstClockTime time = gst_util_uint64_scale_int (identity->offset, GST_SECOND, identity->datarate); GST_BUFFER_PTS (buf) = GST_BUFFER_DTS (buf) = time; GST_BUFFER_DURATION (buf) = size * GST_SECOND / identity->datarate; } if (identity->signal_handoffs) g_signal_emit (identity, gst_identity_signals[SIGNAL_HANDOFF], 0, buf); if (trans->segment.format == GST_FORMAT_TIME) { rundts = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME, GST_BUFFER_DTS (buf)); runpts = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME, GST_BUFFER_PTS (buf)); } if (GST_CLOCK_TIME_IS_VALID (rundts)) runtimestamp = rundts; else if (GST_CLOCK_TIME_IS_VALID (runpts)) runtimestamp = runpts; else runtimestamp = 0; ret = gst_identity_do_sync (identity, runtimestamp); identity->offset += size; if (identity->sleep_time && ret == GST_FLOW_OK) g_usleep (identity->sleep_time); if (identity->single_segment && (trans->segment.format == GST_FORMAT_TIME) && (ret == GST_FLOW_OK)) { GST_BUFFER_DTS (buf) = rundts; GST_BUFFER_PTS (buf) = runpts; GST_BUFFER_OFFSET (buf) = GST_CLOCK_TIME_NONE; GST_BUFFER_OFFSET_END (buf) = GST_CLOCK_TIME_NONE; } return ret; /* ERRORS */ error_after: { GST_ELEMENT_ERROR (identity, CORE, FAILED, (_("Failed after iterations as requested.")), (NULL)); return GST_FLOW_ERROR; } dropped: { if (!identity->silent) { gst_identity_update_last_message_for_buffer (identity, "dropping", buf, size); } ts = GST_BUFFER_TIMESTAMP (buf); if (GST_CLOCK_TIME_IS_VALID (ts)) { duration = GST_BUFFER_DURATION (buf); gst_pad_push_event (GST_BASE_TRANSFORM_SRC_PAD (identity), gst_event_new_gap (ts, duration)); } /* return DROPPED to basetransform. */ return GST_BASE_TRANSFORM_FLOW_DROPPED; } }
static GstFlowReturn gst_ffmpegauddec_handle_frame (GstAudioDecoder * decoder, GstBuffer * inbuf) { GstFFMpegAudDec *ffmpegdec; GstFFMpegAudDecClass *oclass; guint8 *data, *bdata; GstMapInfo map; gint size, bsize, len, have_data; GstFlowReturn ret = GST_FLOW_OK; gboolean do_padding; ffmpegdec = (GstFFMpegAudDec *) decoder; if (G_UNLIKELY (!ffmpegdec->opened)) goto not_negotiated; if (inbuf == NULL) { gst_ffmpegauddec_drain (ffmpegdec); return GST_FLOW_OK; } inbuf = gst_buffer_ref (inbuf); oclass = (GstFFMpegAudDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec)); GST_LOG_OBJECT (ffmpegdec, "Received new data of size %" G_GSIZE_FORMAT ", offset:%" G_GUINT64_FORMAT ", ts:%" GST_TIME_FORMAT ", dur:%" GST_TIME_FORMAT, gst_buffer_get_size (inbuf), GST_BUFFER_OFFSET (inbuf), GST_TIME_ARGS (GST_BUFFER_PTS (inbuf)), GST_TIME_ARGS (GST_BUFFER_DURATION (inbuf))); /* workarounds, functions write to buffers: * libavcodec/svq1.c:svq1_decode_frame writes to the given buffer. * libavcodec/svq3.c:svq3_decode_slice_header too. * ffmpeg devs know about it and will fix it (they said). */ if (oclass->in_plugin->id == AV_CODEC_ID_SVQ1 || oclass->in_plugin->id == AV_CODEC_ID_SVQ3) { inbuf = gst_buffer_make_writable (inbuf); } gst_buffer_map (inbuf, &map, GST_MAP_READ); bdata = map.data; bsize = map.size; if (bsize > 0 && (!GST_MEMORY_IS_ZERO_PADDED (map.memory) || (map.maxsize - map.size) < FF_INPUT_BUFFER_PADDING_SIZE)) { /* add padding */ if (ffmpegdec->padded_size < bsize + FF_INPUT_BUFFER_PADDING_SIZE) { ffmpegdec->padded_size = bsize + FF_INPUT_BUFFER_PADDING_SIZE; ffmpegdec->padded = g_realloc (ffmpegdec->padded, ffmpegdec->padded_size); GST_LOG_OBJECT (ffmpegdec, "resized padding buffer to %d", ffmpegdec->padded_size); } GST_CAT_TRACE_OBJECT (CAT_PERFORMANCE, ffmpegdec, "Copy input to add padding"); memcpy (ffmpegdec->padded, bdata, bsize); memset (ffmpegdec->padded + bsize, 0, FF_INPUT_BUFFER_PADDING_SIZE); bdata = ffmpegdec->padded; do_padding = TRUE; } else { do_padding = FALSE; } do { guint8 tmp_padding[FF_INPUT_BUFFER_PADDING_SIZE]; data = bdata; size = bsize; if (do_padding) { /* add temporary padding */ GST_CAT_TRACE_OBJECT (CAT_PERFORMANCE, ffmpegdec, "Add temporary input padding"); memcpy (tmp_padding, data + size, FF_INPUT_BUFFER_PADDING_SIZE); memset (data + size, 0, FF_INPUT_BUFFER_PADDING_SIZE); } /* decode a frame of audio now */ len = gst_ffmpegauddec_frame (ffmpegdec, data, size, &have_data, &ret); if (do_padding) { memcpy (data + size, tmp_padding, FF_INPUT_BUFFER_PADDING_SIZE); } if (ret != GST_FLOW_OK) { GST_LOG_OBJECT (ffmpegdec, "breaking because of flow ret %s", gst_flow_get_name (ret)); /* bad flow return, make sure we discard all data and exit */ bsize = 0; break; } if (len == 0 && have_data == 0) { /* nothing was decoded, this could be because no data was available or * because we were skipping frames. * If we have no context we must exit and wait for more data, we keep the * data we tried. */ GST_LOG_OBJECT (ffmpegdec, "Decoding didn't return any data, breaking"); break; } else if (len < 0) { /* a decoding error happened, we must break and try again with next data. */ GST_LOG_OBJECT (ffmpegdec, "Decoding error, breaking"); bsize = 0; break; } /* prepare for the next round, for codecs with a context we did this * already when using the parser. */ bsize -= len; bdata += len; do_padding = TRUE; GST_LOG_OBJECT (ffmpegdec, "Before (while bsize>0). bsize:%d , bdata:%p", bsize, bdata); } while (bsize > 0); gst_buffer_unmap (inbuf, &map); gst_buffer_unref (inbuf); if (ffmpegdec->outbuf) ret = gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (ffmpegdec), ffmpegdec->outbuf, 1); ffmpegdec->outbuf = NULL; if (bsize > 0) { GST_DEBUG_OBJECT (ffmpegdec, "Dropping %d bytes of data", bsize); } return ret; /* ERRORS */ not_negotiated: { oclass = (GstFFMpegAudDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec)); GST_ELEMENT_ERROR (ffmpegdec, CORE, NEGOTIATION, (NULL), ("avdec_%s: input format was not set before data start", oclass->in_plugin->name)); return GST_FLOW_NOT_NEGOTIATED; } }
static gboolean gst_rtp_ulpfec_dec_handle_packet_loss (GstRtpUlpFecDec * self, guint16 seqnum, GstClockTime timestamp, GstClockTime duration) { gint caps_pt = self->have_caps_pt ? self->caps_pt : -1; gboolean ret = TRUE; GstBufferList *buflist = rtp_storage_get_packets_for_recovery (self->storage, self->fec_pt, self->caps_ssrc, seqnum); if (buflist) { GstBuffer *recovered_buffer = NULL; guint16 recovered_seq = 0; guint8 recovered_pt = 0; gst_rtp_ulpfec_dec_start (self, buflist, self->fec_pt, seqnum); while (NULL != (recovered_buffer = gst_rtp_ulpfec_dec_recover (self, self->caps_ssrc, caps_pt, &recovered_pt, &recovered_seq))) { if (seqnum == recovered_seq) { GstBuffer *sent_buffer; GstRTPBuffer rtp = GST_RTP_BUFFER_INIT; recovered_buffer = gst_buffer_make_writable (recovered_buffer); GST_BUFFER_PTS (recovered_buffer) = timestamp; /* GST_BUFFER_DURATION (recovered_buffer) = duration; * JB does not set the duration, so we will not too */ if (!self->lost_packet_from_storage) rtp_storage_put_recovered_packet (self->storage, recovered_buffer, recovered_pt, self->caps_ssrc, recovered_seq); GST_DEBUG_OBJECT (self, "Pushing recovered packet ssrc=0x%08x seq=%u %" GST_PTR_FORMAT, self->caps_ssrc, seqnum, recovered_buffer); sent_buffer = gst_buffer_copy_deep (recovered_buffer); gst_rtp_buffer_map (sent_buffer, GST_MAP_WRITE, &rtp); gst_rtp_buffer_set_seq (&rtp, self->next_seqnum++); gst_rtp_buffer_unmap (&rtp); ret = FALSE; self->unset_discont_flag = TRUE; self->chain_return_val = gst_pad_push (self->srcpad, sent_buffer); break; } rtp_storage_put_recovered_packet (self->storage, recovered_buffer, recovered_pt, self->caps_ssrc, recovered_seq); } gst_rtp_ulpfec_dec_stop (self); gst_buffer_list_unref (buflist); } GST_DEBUG_OBJECT (self, "Packet lost ssrc=0x%08x seq=%u", self->caps_ssrc, seqnum); return ret; }
/* Called with the object lock for both the element and pad held, * as well as the aagg lock */ static gboolean gst_audio_aggregator_fill_buffer (GstAudioAggregator * aagg, GstAudioAggregatorPad * pad, GstBuffer * inbuf) { GstClockTime start_time, end_time; gboolean discont = FALSE; guint64 start_offset, end_offset; gint rate, bpf; GstAggregator *agg = GST_AGGREGATOR (aagg); GstAggregatorPad *aggpad = GST_AGGREGATOR_PAD (pad); g_assert (pad->priv->buffer == NULL); rate = GST_AUDIO_INFO_RATE (&pad->info); bpf = GST_AUDIO_INFO_BPF (&pad->info); pad->priv->position = 0; pad->priv->size = gst_buffer_get_size (inbuf) / bpf; if (!GST_BUFFER_PTS_IS_VALID (inbuf)) { if (pad->priv->output_offset == -1) pad->priv->output_offset = aagg->priv->offset; if (pad->priv->next_offset == -1) pad->priv->next_offset = pad->priv->size; else pad->priv->next_offset += pad->priv->size; goto done; } start_time = GST_BUFFER_PTS (inbuf); end_time = start_time + gst_util_uint64_scale_ceil (pad->priv->size, GST_SECOND, rate); /* Clipping should've ensured this */ g_assert (start_time >= aggpad->segment.start); start_offset = gst_util_uint64_scale (start_time - aggpad->segment.start, rate, GST_SECOND); end_offset = start_offset + pad->priv->size; if (GST_BUFFER_IS_DISCONT (inbuf) || GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_RESYNC) || pad->priv->new_segment || pad->priv->next_offset == -1) { discont = TRUE; pad->priv->new_segment = FALSE; } else { guint64 diff, max_sample_diff; /* Check discont, based on audiobasesink */ if (start_offset <= pad->priv->next_offset) diff = pad->priv->next_offset - start_offset; else diff = start_offset - pad->priv->next_offset; max_sample_diff = gst_util_uint64_scale_int (aagg->priv->alignment_threshold, rate, GST_SECOND); /* Discont! */ if (G_UNLIKELY (diff >= max_sample_diff)) { if (aagg->priv->discont_wait > 0) { if (pad->priv->discont_time == GST_CLOCK_TIME_NONE) { pad->priv->discont_time = start_time; } else if (start_time - pad->priv->discont_time >= aagg->priv->discont_wait) { discont = TRUE; pad->priv->discont_time = GST_CLOCK_TIME_NONE; } } else { discont = TRUE; } } else if (G_UNLIKELY (pad->priv->discont_time != GST_CLOCK_TIME_NONE)) { /* we have had a discont, but are now back on track! */ pad->priv->discont_time = GST_CLOCK_TIME_NONE; } } if (discont) { /* Have discont, need resync */ if (pad->priv->next_offset != -1) GST_DEBUG_OBJECT (pad, "Have discont. Expected %" G_GUINT64_FORMAT ", got %" G_GUINT64_FORMAT, pad->priv->next_offset, start_offset); pad->priv->output_offset = -1; pad->priv->next_offset = end_offset; } else { pad->priv->next_offset += pad->priv->size; } if (pad->priv->output_offset == -1) { GstClockTime start_running_time; GstClockTime end_running_time; guint64 start_output_offset; guint64 end_output_offset; start_running_time = gst_segment_to_running_time (&aggpad->segment, GST_FORMAT_TIME, start_time); end_running_time = gst_segment_to_running_time (&aggpad->segment, GST_FORMAT_TIME, end_time); /* Convert to position in the output segment */ start_output_offset = gst_segment_position_from_running_time (&agg->segment, GST_FORMAT_TIME, start_running_time); if (start_output_offset != -1) start_output_offset = gst_util_uint64_scale (start_output_offset - agg->segment.start, rate, GST_SECOND); end_output_offset = gst_segment_position_from_running_time (&agg->segment, GST_FORMAT_TIME, end_running_time); if (end_output_offset != -1) end_output_offset = gst_util_uint64_scale (end_output_offset - agg->segment.start, rate, GST_SECOND); if (start_output_offset == -1 && end_output_offset == -1) { /* Outside output segment, drop */ gst_buffer_unref (inbuf); pad->priv->buffer = NULL; pad->priv->position = 0; pad->priv->size = 0; pad->priv->output_offset = -1; GST_DEBUG_OBJECT (pad, "Buffer outside output segment"); return FALSE; } /* Calculate end_output_offset if it was outside the output segment */ if (end_output_offset == -1) end_output_offset = start_output_offset + pad->priv->size; if (end_output_offset < aagg->priv->offset) { /* Before output segment, drop */ gst_buffer_unref (inbuf); pad->priv->buffer = NULL; pad->priv->position = 0; pad->priv->size = 0; pad->priv->output_offset = -1; GST_DEBUG_OBJECT (pad, "Buffer before segment or current position: %" G_GUINT64_FORMAT " < %" G_GINT64_FORMAT, end_output_offset, aagg->priv->offset); return FALSE; } if (start_output_offset == -1 || start_output_offset < aagg->priv->offset) { guint diff; if (start_output_offset == -1 && end_output_offset < pad->priv->size) { diff = pad->priv->size - end_output_offset + aagg->priv->offset; } else if (start_output_offset == -1) { start_output_offset = end_output_offset - pad->priv->size; if (start_output_offset < aagg->priv->offset) diff = aagg->priv->offset - start_output_offset; else diff = 0; } else { diff = aagg->priv->offset - start_output_offset; } pad->priv->position += diff; if (pad->priv->position >= pad->priv->size) { /* Empty buffer, drop */ gst_buffer_unref (inbuf); pad->priv->buffer = NULL; pad->priv->position = 0; pad->priv->size = 0; pad->priv->output_offset = -1; GST_DEBUG_OBJECT (pad, "Buffer before segment or current position: %" G_GUINT64_FORMAT " < %" G_GINT64_FORMAT, end_output_offset, aagg->priv->offset); return FALSE; } } if (start_output_offset == -1 || start_output_offset < aagg->priv->offset) pad->priv->output_offset = aagg->priv->offset; else pad->priv->output_offset = start_output_offset; GST_DEBUG_OBJECT (pad, "Buffer resynced: Pad offset %" G_GUINT64_FORMAT ", current audio aggregator offset %" G_GINT64_FORMAT, pad->priv->output_offset, aagg->priv->offset); } done: GST_LOG_OBJECT (pad, "Queued new buffer at offset %" G_GUINT64_FORMAT, pad->priv->output_offset); pad->priv->buffer = inbuf; return TRUE; }
static GstFlowReturn gst_amc_audio_dec_handle_frame (GstAudioDecoder * decoder, GstBuffer * inbuf) { GstAmcAudioDec *self; gint idx; GstAmcBuffer *buf; GstAmcBufferInfo buffer_info; guint offset = 0; GstClockTime timestamp, duration, timestamp_offset = 0; GstMapInfo minfo; GError *err = NULL; memset (&minfo, 0, sizeof (minfo)); self = GST_AMC_AUDIO_DEC (decoder); GST_DEBUG_OBJECT (self, "Handling frame"); /* Make sure to keep a reference to the input here, * it can be unreffed from the other thread if * finish_frame() is called */ if (inbuf) inbuf = gst_buffer_ref (inbuf); if (!self->started) { GST_ERROR_OBJECT (self, "Codec not started yet"); if (inbuf) gst_buffer_unref (inbuf); return GST_FLOW_NOT_NEGOTIATED; } if (self->flushing) goto flushing; if (self->downstream_flow_ret != GST_FLOW_OK) goto downstream_error; if (!inbuf) return gst_amc_audio_dec_drain (self); timestamp = GST_BUFFER_PTS (inbuf); duration = GST_BUFFER_DURATION (inbuf); gst_buffer_map (inbuf, &minfo, GST_MAP_READ); while (offset < minfo.size) { /* Make sure to release the base class stream lock, otherwise * _loop() can't call _finish_frame() and we might block forever * because no input buffers are released */ GST_AUDIO_DECODER_STREAM_UNLOCK (self); /* Wait at most 100ms here, some codecs don't fail dequeueing if * the codec is flushing, causing deadlocks during shutdown */ idx = gst_amc_codec_dequeue_input_buffer (self->codec, 100000, &err); GST_AUDIO_DECODER_STREAM_LOCK (self); if (idx < 0) { if (self->flushing || self->downstream_flow_ret == GST_FLOW_FLUSHING) { g_clear_error (&err); goto flushing; } switch (idx) { case INFO_TRY_AGAIN_LATER: GST_DEBUG_OBJECT (self, "Dequeueing input buffer timed out"); continue; /* next try */ break; case G_MININT: GST_ERROR_OBJECT (self, "Failed to dequeue input buffer"); goto dequeue_error; default: g_assert_not_reached (); break; } continue; } if (self->flushing) { memset (&buffer_info, 0, sizeof (buffer_info)); gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info, NULL); goto flushing; } if (self->downstream_flow_ret != GST_FLOW_OK) { memset (&buffer_info, 0, sizeof (buffer_info)); gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info, &err); if (err && !self->flushing) GST_ELEMENT_WARNING_FROM_ERROR (self, err); g_clear_error (&err); goto downstream_error; } /* Now handle the frame */ /* Copy the buffer content in chunks of size as requested * by the port */ buf = gst_amc_codec_get_input_buffer (self->codec, idx, &err); if (!buf) goto failed_to_get_input_buffer; memset (&buffer_info, 0, sizeof (buffer_info)); buffer_info.offset = 0; buffer_info.size = MIN (minfo.size - offset, buf->size); gst_amc_buffer_set_position_and_limit (buf, NULL, buffer_info.offset, buffer_info.size); orc_memcpy (buf->data, minfo.data + offset, buffer_info.size); gst_amc_buffer_free (buf); buf = NULL; /* Interpolate timestamps if we're passing the buffer * in multiple chunks */ if (offset != 0 && duration != GST_CLOCK_TIME_NONE) { timestamp_offset = gst_util_uint64_scale (offset, duration, minfo.size); } if (timestamp != GST_CLOCK_TIME_NONE) { buffer_info.presentation_time_us = gst_util_uint64_scale (timestamp + timestamp_offset, 1, GST_USECOND); self->last_upstream_ts = timestamp + timestamp_offset; } if (duration != GST_CLOCK_TIME_NONE) self->last_upstream_ts += duration; if (offset == 0) { if (!GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_DELTA_UNIT)) buffer_info.flags |= BUFFER_FLAG_SYNC_FRAME; } offset += buffer_info.size; GST_DEBUG_OBJECT (self, "Queueing buffer %d: size %d time %" G_GINT64_FORMAT " flags 0x%08x", idx, buffer_info.size, buffer_info.presentation_time_us, buffer_info.flags); if (!gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info, &err)) { if (self->flushing) { g_clear_error (&err); goto flushing; } goto queue_error; } self->drained = FALSE; } gst_buffer_unmap (inbuf, &minfo); gst_buffer_unref (inbuf); return self->downstream_flow_ret; downstream_error: { GST_ERROR_OBJECT (self, "Downstream returned %s", gst_flow_get_name (self->downstream_flow_ret)); if (minfo.data) gst_buffer_unmap (inbuf, &minfo); if (inbuf) gst_buffer_unref (inbuf); return self->downstream_flow_ret; } failed_to_get_input_buffer: { GST_ELEMENT_ERROR_FROM_ERROR (self, err); if (minfo.data) gst_buffer_unmap (inbuf, &minfo); if (inbuf) gst_buffer_unref (inbuf); return GST_FLOW_ERROR; } dequeue_error: { GST_ELEMENT_ERROR_FROM_ERROR (self, err); if (minfo.data) gst_buffer_unmap (inbuf, &minfo); if (inbuf) gst_buffer_unref (inbuf); return GST_FLOW_ERROR; } queue_error: { GST_AUDIO_DECODER_ERROR_FROM_ERROR (self, err); if (minfo.data) gst_buffer_unmap (inbuf, &minfo); if (inbuf) gst_buffer_unref (inbuf); return GST_FLOW_ERROR; } flushing: { GST_DEBUG_OBJECT (self, "Flushing -- returning FLUSHING"); if (minfo.data) gst_buffer_unmap (inbuf, &minfo); if (inbuf) gst_buffer_unref (inbuf); return GST_FLOW_FLUSHING; } }
static GstFlowReturn gst_rtp_vraw_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buffer) { GstRtpVRawPay *rtpvrawpay; GstFlowReturn ret = GST_FLOW_OK; gfloat packets_per_packline; guint pgroups_per_packet; guint packlines_per_list, buffers_per_list; guint lines_delay; /* after how many packed lines we push out a buffer list */ guint last_line; /* last pack line number we pushed out a buffer list */ guint line, offset; guint8 *p0, *yp, *up, *vp; guint ystride, uvstride; guint xinc, yinc; guint pgroup; guint mtu; guint width, height; gint field, fields; GstVideoFormat format; GstVideoFrame frame; gint interlaced; gboolean use_buffer_lists; GstBufferList *list = NULL; GstRTPBuffer rtp = { NULL, }; rtpvrawpay = GST_RTP_VRAW_PAY (payload); if (!gst_video_frame_map (&frame, &rtpvrawpay->vinfo, buffer, GST_MAP_READ)) { gst_buffer_unref (buffer); return GST_FLOW_ERROR; } GST_LOG_OBJECT (rtpvrawpay, "new frame of %" G_GSIZE_FORMAT " bytes", gst_buffer_get_size (buffer)); /* get pointer and strides of the planes */ p0 = GST_VIDEO_FRAME_PLANE_DATA (&frame, 0); yp = GST_VIDEO_FRAME_COMP_DATA (&frame, 0); up = GST_VIDEO_FRAME_COMP_DATA (&frame, 1); vp = GST_VIDEO_FRAME_COMP_DATA (&frame, 2); ystride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0); uvstride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 1); mtu = GST_RTP_BASE_PAYLOAD_MTU (payload); /* amount of bytes for one pixel */ pgroup = rtpvrawpay->pgroup; width = GST_VIDEO_INFO_WIDTH (&rtpvrawpay->vinfo); height = GST_VIDEO_INFO_HEIGHT (&rtpvrawpay->vinfo); interlaced = GST_VIDEO_INFO_IS_INTERLACED (&rtpvrawpay->vinfo); format = GST_VIDEO_INFO_FORMAT (&rtpvrawpay->vinfo); yinc = rtpvrawpay->yinc; xinc = rtpvrawpay->xinc; /* after how many packed lines we push out a buffer list */ lines_delay = GST_ROUND_UP_4 (height / rtpvrawpay->chunks_per_frame); /* calculate how many buffers we expect to store in a single buffer list */ pgroups_per_packet = (mtu - (12 + 14)) / pgroup; packets_per_packline = width / (xinc * pgroups_per_packet * 1.0); packlines_per_list = height / (yinc * rtpvrawpay->chunks_per_frame); buffers_per_list = packlines_per_list * packets_per_packline; buffers_per_list = GST_ROUND_UP_8 (buffers_per_list); use_buffer_lists = buffers_per_list > 1 && (rtpvrawpay->chunks_per_frame < (height / yinc)); fields = 1 + interlaced; /* start with line 0, offset 0 */ for (field = 0; field < fields; field++) { line = field; offset = 0; last_line = 0; if (use_buffer_lists) list = gst_buffer_list_new_sized (buffers_per_list); /* write all lines */ while (line < height) { guint left, pack_line; GstBuffer *out; guint8 *outdata, *headers; gboolean next_line, complete = FALSE; guint length, cont, pixels; /* get the max allowed payload length size, we try to fill the complete MTU */ left = gst_rtp_buffer_calc_payload_len (mtu, 0, 0); out = gst_rtp_buffer_new_allocate (left, 0, 0); if (field == 0) { GST_BUFFER_PTS (out) = GST_BUFFER_PTS (buffer); } else { GST_BUFFER_PTS (out) = GST_BUFFER_PTS (buffer) + GST_BUFFER_DURATION (buffer) / 2; } gst_rtp_buffer_map (out, GST_MAP_WRITE, &rtp); outdata = gst_rtp_buffer_get_payload (&rtp); GST_LOG_OBJECT (rtpvrawpay, "created buffer of size %u for MTU %u", left, mtu); /* * 0 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | Extended Sequence Number | Length | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * |F| Line No |C| Offset | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | Length |F| Line No | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * |C| Offset | . * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ . * . . * . Two (partial) lines of video data . * . . * +---------------------------------------------------------------+ */ /* need 2 bytes for the extended sequence number */ *outdata++ = 0; *outdata++ = 0; left -= 2; /* the headers start here */ headers = outdata; /* make sure we can fit at least *one* header and pixel */ if (!(left > (6 + pgroup))) { gst_rtp_buffer_unmap (&rtp); gst_buffer_unref (out); goto too_small; } /* while we can fit at least one header and one pixel */ while (left > (6 + pgroup)) { /* we need a 6 bytes header */ left -= 6; /* get how may bytes we need for the remaining pixels */ pixels = width - offset; length = (pixels * pgroup) / xinc; if (left >= length) { /* pixels and header fit completely, we will write them and skip to the * next line. */ next_line = TRUE; } else { /* line does not fit completely, see how many pixels fit */ pixels = (left / pgroup) * xinc; length = (pixels * pgroup) / xinc; next_line = FALSE; } GST_LOG_OBJECT (rtpvrawpay, "filling %u bytes in %u pixels", length, pixels); left -= length; /* write length */ *outdata++ = (length >> 8) & 0xff; *outdata++ = length & 0xff; /* write line no */ *outdata++ = ((line >> 8) & 0x7f) | ((field << 7) & 0x80); *outdata++ = line & 0xff; if (next_line) { /* go to next line we do this here to make the check below easier */ line += yinc; } /* calculate continuation marker */ cont = (left > (6 + pgroup) && line < height) ? 0x80 : 0x00; /* write offset and continuation marker */ *outdata++ = ((offset >> 8) & 0x7f) | cont; *outdata++ = offset & 0xff; if (next_line) { /* reset offset */ offset = 0; GST_LOG_OBJECT (rtpvrawpay, "go to next line %u", line); } else { offset += pixels; GST_LOG_OBJECT (rtpvrawpay, "next offset %u", offset); } if (!cont) break; } GST_LOG_OBJECT (rtpvrawpay, "consumed %u bytes", (guint) (outdata - headers)); /* second pass, read headers and write the data */ while (TRUE) { guint offs, lin; /* read length and cont */ length = (headers[0] << 8) | headers[1]; lin = ((headers[2] & 0x7f) << 8) | headers[3]; offs = ((headers[4] & 0x7f) << 8) | headers[5]; cont = headers[4] & 0x80; pixels = length / pgroup; headers += 6; GST_LOG_OBJECT (payload, "writing length %u, line %u, offset %u, cont %d", length, lin, offs, cont); switch (format) { case GST_VIDEO_FORMAT_RGB: case GST_VIDEO_FORMAT_RGBA: case GST_VIDEO_FORMAT_BGR: case GST_VIDEO_FORMAT_BGRA: case GST_VIDEO_FORMAT_UYVY: case GST_VIDEO_FORMAT_UYVP: offs /= xinc; memcpy (outdata, p0 + (lin * ystride) + (offs * pgroup), length); outdata += length; break; case GST_VIDEO_FORMAT_AYUV: { gint i; guint8 *datap; datap = p0 + (lin * ystride) + (offs * 4); for (i = 0; i < pixels; i++) { *outdata++ = datap[2]; *outdata++ = datap[1]; *outdata++ = datap[3]; datap += 4; } break; } case GST_VIDEO_FORMAT_I420: { gint i; guint uvoff; guint8 *yd1p, *yd2p, *udp, *vdp; yd1p = yp + (lin * ystride) + (offs); yd2p = yd1p + ystride; uvoff = (lin / yinc * uvstride) + (offs / xinc); udp = up + uvoff; vdp = vp + uvoff; for (i = 0; i < pixels; i++) { *outdata++ = *yd1p++; *outdata++ = *yd1p++; *outdata++ = *yd2p++; *outdata++ = *yd2p++; *outdata++ = *udp++; *outdata++ = *vdp++; } break; } case GST_VIDEO_FORMAT_Y41B: { gint i; guint uvoff; guint8 *ydp, *udp, *vdp; ydp = yp + (lin * ystride) + offs; uvoff = (lin / yinc * uvstride) + (offs / xinc); udp = up + uvoff; vdp = vp + uvoff; for (i = 0; i < pixels; i++) { *outdata++ = *udp++; *outdata++ = *ydp++; *outdata++ = *ydp++; *outdata++ = *vdp++; *outdata++ = *ydp++; *outdata++ = *ydp++; } break; } default: gst_rtp_buffer_unmap (&rtp); gst_buffer_unref (out); goto unknown_sampling; } if (!cont) break; } if (line >= height) { GST_LOG_OBJECT (rtpvrawpay, "field/frame complete, set marker"); gst_rtp_buffer_set_marker (&rtp, TRUE); complete = TRUE; } gst_rtp_buffer_unmap (&rtp); if (left > 0) { GST_LOG_OBJECT (rtpvrawpay, "we have %u bytes left", left); gst_buffer_resize (out, 0, gst_buffer_get_size (out) - left); } gst_rtp_copy_video_meta (rtpvrawpay, out, buffer); /* Now either push out the buffer directly */ if (!use_buffer_lists) { ret = gst_rtp_base_payload_push (payload, out); continue; } /* or add the buffer to buffer list ... */ gst_buffer_list_add (list, out); /* .. and check if we need to push out the list */ pack_line = (line - field) / fields; if (complete || (pack_line > last_line && pack_line % lines_delay == 0)) { GST_LOG_OBJECT (rtpvrawpay, "pushing list of %u buffers up to pack " "line %u", gst_buffer_list_length (list), pack_line); ret = gst_rtp_base_payload_push_list (payload, list); list = NULL; if (!complete) list = gst_buffer_list_new_sized (buffers_per_list); last_line = pack_line; } } } gst_video_frame_unmap (&frame); gst_buffer_unref (buffer); return ret; /* ERRORS */ unknown_sampling: { GST_ELEMENT_ERROR (payload, STREAM, FORMAT, (NULL), ("unimplemented sampling")); gst_video_frame_unmap (&frame); gst_buffer_unref (buffer); return GST_FLOW_NOT_SUPPORTED; } too_small: { GST_ELEMENT_ERROR (payload, RESOURCE, NO_SPACE_LEFT, (NULL), ("not enough space to send at least one pixel")); gst_video_frame_unmap (&frame); gst_buffer_unref (buffer); return GST_FLOW_NOT_SUPPORTED; } }
static GstFlowReturn gst_audio_aggregator_aggregate (GstAggregator * agg, gboolean timeout) { /* Get all pads that have data for us and store them in a * new list. * * Calculate the current output offset/timestamp and * offset_end/timestamp_end. Allocate a silence buffer * for this and store it. * * For all pads: * 1) Once per input buffer (cached) * 1) Check discont (flag and timestamp with tolerance) * 2) If discont or new, resync. That means: * 1) Drop all start data of the buffer that comes before * the current position/offset. * 2) Calculate the offset (output segment!) that the first * frame of the input buffer corresponds to. Base this on * the running time. * * 2) If the current pad's offset/offset_end overlaps with the output * offset/offset_end, mix it at the appropiate position in the output * buffer and advance the pad's position. Remember if this pad needs * a new buffer to advance behind the output offset_end. * * 3) If we had no pad with a buffer, go EOS. * * 4) If we had at least one pad that did not advance behind output * offset_end, let collected be called again for the current * output offset/offset_end. */ GstElement *element; GstAudioAggregator *aagg; GList *iter; GstFlowReturn ret; GstBuffer *outbuf = NULL; gint64 next_offset; gint64 next_timestamp; gint rate, bpf; gboolean dropped = FALSE; gboolean is_eos = TRUE; gboolean is_done = TRUE; guint blocksize; element = GST_ELEMENT (agg); aagg = GST_AUDIO_AGGREGATOR (agg); /* Sync pad properties to the stream time */ gst_aggregator_iterate_sinkpads (agg, (GstAggregatorPadForeachFunc) sync_pad_values, NULL); GST_AUDIO_AGGREGATOR_LOCK (aagg); GST_OBJECT_LOCK (agg); /* Update position from the segment start/stop if needed */ if (agg->segment.position == -1) { if (agg->segment.rate > 0.0) agg->segment.position = agg->segment.start; else agg->segment.position = agg->segment.stop; } if (G_UNLIKELY (aagg->info.finfo->format == GST_AUDIO_FORMAT_UNKNOWN)) { if (timeout) { GST_DEBUG_OBJECT (aagg, "Got timeout before receiving any caps, don't output anything"); /* Advance position */ if (agg->segment.rate > 0.0) agg->segment.position += aagg->priv->output_buffer_duration; else if (agg->segment.position > aagg->priv->output_buffer_duration) agg->segment.position -= aagg->priv->output_buffer_duration; else agg->segment.position = 0; GST_OBJECT_UNLOCK (agg); GST_AUDIO_AGGREGATOR_UNLOCK (aagg); return GST_AGGREGATOR_FLOW_NEED_DATA; } else { GST_OBJECT_UNLOCK (agg); goto not_negotiated; } } rate = GST_AUDIO_INFO_RATE (&aagg->info); bpf = GST_AUDIO_INFO_BPF (&aagg->info); if (aagg->priv->offset == -1) { aagg->priv->offset = gst_util_uint64_scale (agg->segment.position - agg->segment.start, rate, GST_SECOND); GST_DEBUG_OBJECT (aagg, "Starting at offset %" G_GINT64_FORMAT, aagg->priv->offset); } blocksize = gst_util_uint64_scale (aagg->priv->output_buffer_duration, rate, GST_SECOND); blocksize = MAX (1, blocksize); /* for the next timestamp, use the sample counter, which will * never accumulate rounding errors */ /* FIXME: Reverse mixing does not work at all yet */ if (agg->segment.rate > 0.0) { next_offset = aagg->priv->offset + blocksize; } else { next_offset = aagg->priv->offset - blocksize; } next_timestamp = agg->segment.start + gst_util_uint64_scale (next_offset, GST_SECOND, rate); if (aagg->priv->current_buffer == NULL) { GST_OBJECT_UNLOCK (agg); aagg->priv->current_buffer = GST_AUDIO_AGGREGATOR_GET_CLASS (aagg)->create_output_buffer (aagg, blocksize); /* Be careful, some things could have changed ? */ GST_OBJECT_LOCK (agg); GST_BUFFER_FLAG_SET (aagg->priv->current_buffer, GST_BUFFER_FLAG_GAP); } outbuf = aagg->priv->current_buffer; GST_LOG_OBJECT (agg, "Starting to mix %u samples for offset %" G_GINT64_FORMAT " with timestamp %" GST_TIME_FORMAT, blocksize, aagg->priv->offset, GST_TIME_ARGS (agg->segment.position)); for (iter = element->sinkpads; iter; iter = iter->next) { GstBuffer *inbuf; GstAudioAggregatorPad *pad = (GstAudioAggregatorPad *) iter->data; GstAggregatorPad *aggpad = (GstAggregatorPad *) iter->data; gboolean drop_buf = FALSE; gboolean pad_eos = gst_aggregator_pad_is_eos (aggpad); if (!pad_eos) is_eos = FALSE; inbuf = gst_aggregator_pad_get_buffer (aggpad); GST_OBJECT_LOCK (pad); if (!inbuf) { if (timeout) { if (pad->priv->output_offset < next_offset) { gint64 diff = next_offset - pad->priv->output_offset; GST_DEBUG_OBJECT (pad, "Timeout, missing %" G_GINT64_FORMAT " frames (%" GST_TIME_FORMAT ")", diff, GST_TIME_ARGS (gst_util_uint64_scale (diff, GST_SECOND, GST_AUDIO_INFO_RATE (&aagg->info)))); } } else if (!pad_eos) { is_done = FALSE; } GST_OBJECT_UNLOCK (pad); continue; } g_assert (!pad->priv->buffer || pad->priv->buffer == inbuf); /* New buffer? */ if (!pad->priv->buffer) { /* Takes ownership of buffer */ if (!gst_audio_aggregator_fill_buffer (aagg, pad, inbuf)) { dropped = TRUE; GST_OBJECT_UNLOCK (pad); gst_aggregator_pad_drop_buffer (aggpad); continue; } } else { gst_buffer_unref (inbuf); } if (!pad->priv->buffer && !dropped && pad_eos) { GST_DEBUG_OBJECT (aggpad, "Pad is in EOS state"); GST_OBJECT_UNLOCK (pad); continue; } g_assert (pad->priv->buffer); /* This pad is lacking behind, we need to update the offset * and maybe drop the current buffer */ if (pad->priv->output_offset < aagg->priv->offset) { gint64 diff = aagg->priv->offset - pad->priv->output_offset; gint64 odiff = diff; if (pad->priv->position + diff > pad->priv->size) diff = pad->priv->size - pad->priv->position; pad->priv->position += diff; pad->priv->output_offset += diff; if (pad->priv->position == pad->priv->size) { GST_DEBUG_OBJECT (pad, "Buffer was late by %" GST_TIME_FORMAT ", dropping %" GST_PTR_FORMAT, GST_TIME_ARGS (gst_util_uint64_scale (odiff, GST_SECOND, GST_AUDIO_INFO_RATE (&aagg->info))), pad->priv->buffer); /* Buffer done, drop it */ gst_buffer_replace (&pad->priv->buffer, NULL); dropped = TRUE; GST_OBJECT_UNLOCK (pad); gst_aggregator_pad_drop_buffer (aggpad); continue; } } if (pad->priv->output_offset >= aagg->priv->offset && pad->priv->output_offset < aagg->priv->offset + blocksize && pad->priv->buffer) { GST_LOG_OBJECT (aggpad, "Mixing buffer for current offset"); drop_buf = !gst_audio_aggregator_mix_buffer (aagg, pad, pad->priv->buffer, outbuf); if (pad->priv->output_offset >= next_offset) { GST_LOG_OBJECT (pad, "Pad is at or after current offset: %" G_GUINT64_FORMAT " >= %" G_GINT64_FORMAT, pad->priv->output_offset, next_offset); } else { is_done = FALSE; } } GST_OBJECT_UNLOCK (pad); if (drop_buf) gst_aggregator_pad_drop_buffer (aggpad); } GST_OBJECT_UNLOCK (agg); if (dropped) { /* We dropped a buffer, retry */ GST_LOG_OBJECT (aagg, "A pad dropped a buffer, wait for the next one"); GST_AUDIO_AGGREGATOR_UNLOCK (aagg); return GST_AGGREGATOR_FLOW_NEED_DATA; } if (!is_done && !is_eos) { /* Get more buffers */ GST_LOG_OBJECT (aagg, "We're not done yet for the current offset, waiting for more data"); GST_AUDIO_AGGREGATOR_UNLOCK (aagg); return GST_AGGREGATOR_FLOW_NEED_DATA; } if (is_eos) { gint64 max_offset = 0; GST_DEBUG_OBJECT (aagg, "We're EOS"); GST_OBJECT_LOCK (agg); for (iter = GST_ELEMENT (agg)->sinkpads; iter; iter = iter->next) { GstAudioAggregatorPad *pad = GST_AUDIO_AGGREGATOR_PAD (iter->data); max_offset = MAX ((gint64) max_offset, (gint64) pad->priv->output_offset); } GST_OBJECT_UNLOCK (agg); /* This means EOS or nothing mixed in at all */ if (aagg->priv->offset == max_offset) { gst_buffer_replace (&aagg->priv->current_buffer, NULL); GST_AUDIO_AGGREGATOR_UNLOCK (aagg); return GST_FLOW_EOS; } if (max_offset <= next_offset) { GST_DEBUG_OBJECT (aagg, "Last buffer is incomplete: %" G_GUINT64_FORMAT " <= %" G_GINT64_FORMAT, max_offset, next_offset); next_offset = max_offset; next_timestamp = agg->segment.start + gst_util_uint64_scale (next_offset, GST_SECOND, rate); if (next_offset > aagg->priv->offset) gst_buffer_resize (outbuf, 0, (next_offset - aagg->priv->offset) * bpf); } } /* set timestamps on the output buffer */ GST_OBJECT_LOCK (agg); if (agg->segment.rate > 0.0) { GST_BUFFER_PTS (outbuf) = agg->segment.position; GST_BUFFER_OFFSET (outbuf) = aagg->priv->offset; GST_BUFFER_OFFSET_END (outbuf) = next_offset; GST_BUFFER_DURATION (outbuf) = next_timestamp - agg->segment.position; } else { GST_BUFFER_PTS (outbuf) = next_timestamp; GST_BUFFER_OFFSET (outbuf) = next_offset; GST_BUFFER_OFFSET_END (outbuf) = aagg->priv->offset; GST_BUFFER_DURATION (outbuf) = agg->segment.position - next_timestamp; } GST_OBJECT_UNLOCK (agg); /* send it out */ GST_LOG_OBJECT (aagg, "pushing outbuf %p, timestamp %" GST_TIME_FORMAT " offset %" G_GINT64_FORMAT, outbuf, GST_TIME_ARGS (GST_BUFFER_PTS (outbuf)), GST_BUFFER_OFFSET (outbuf)); GST_AUDIO_AGGREGATOR_UNLOCK (aagg); ret = gst_aggregator_finish_buffer (agg, aagg->priv->current_buffer); aagg->priv->current_buffer = NULL; GST_LOG_OBJECT (aagg, "pushed outbuf, result = %s", gst_flow_get_name (ret)); GST_AUDIO_AGGREGATOR_LOCK (aagg); GST_OBJECT_LOCK (agg); aagg->priv->offset = next_offset; agg->segment.position = next_timestamp; /* If there was a timeout and there was a gap in data in out of the streams, * then it's a very good time to for a resync with the timestamps. */ if (timeout) { for (iter = element->sinkpads; iter; iter = iter->next) { GstAudioAggregatorPad *pad = GST_AUDIO_AGGREGATOR_PAD (iter->data); GST_OBJECT_LOCK (pad); if (pad->priv->output_offset < aagg->priv->offset) pad->priv->output_offset = -1; GST_OBJECT_UNLOCK (pad); } } GST_OBJECT_UNLOCK (agg); GST_AUDIO_AGGREGATOR_UNLOCK (aagg); return ret; /* ERRORS */ not_negotiated: { GST_AUDIO_AGGREGATOR_UNLOCK (aagg); GST_ELEMENT_ERROR (aagg, STREAM, FORMAT, (NULL), ("Unknown data received, not negotiated")); return GST_FLOW_NOT_NEGOTIATED; } }
/*! * \brief CvVideoWriter_GStreamer::writeFrame * \param image * \return * Pushes the given frame on the pipeline. * The timestamp for the buffer is generated from the framerate set in open * and ensures a smooth video */ bool CvVideoWriter_GStreamer::writeFrame( const IplImage * image ) { CV_FUNCNAME("CvVideoWriter_GStreamer::writerFrame"); GstClockTime duration, timestamp; GstFlowReturn ret; int size; __BEGIN__; handleMessage(pipeline); if (input_pix_fmt == GST_VIDEO_FORMAT_BGR) { if (image->nChannels != 3 || image->depth != IPL_DEPTH_8U) { CV_ERROR(CV_StsUnsupportedFormat, "cvWriteFrame() needs images with depth = IPL_DEPTH_8U and nChannels = 3."); } } #if FULL_GST_VERSION >= VERSION_NUM(0,10,29) else if (input_pix_fmt == GST_VIDEO_FORMAT_GRAY8) { if (image->nChannels != 1 || image->depth != IPL_DEPTH_8U) { CV_ERROR(CV_StsUnsupportedFormat, "cvWriteFrame() needs images with depth = IPL_DEPTH_8U and nChannels = 1."); } } #endif else { assert(false); } size = image->imageSize; duration = ((double)1/framerate) * GST_SECOND; timestamp = num_frames * duration; //gst_app_src_push_buffer takes ownership of the buffer, so we need to supply it a copy #if GST_VERSION_MAJOR == 0 buffer = gst_buffer_new_and_alloc (size); memcpy(GST_BUFFER_DATA (buffer), (guint8*)image->imageData, size); GST_BUFFER_DURATION(buffer) = duration; GST_BUFFER_TIMESTAMP(buffer) = timestamp; #else buffer = gst_buffer_new_allocate (NULL, size, NULL); GstMapInfo info; gst_buffer_map(buffer, &info, (GstMapFlags)GST_MAP_READ); memcpy(info.data, (guint8*)image->imageData, size); gst_buffer_unmap(buffer, &info); GST_BUFFER_DURATION(buffer) = duration; GST_BUFFER_PTS(buffer) = timestamp; GST_BUFFER_DTS(buffer) = timestamp; #endif //set the current number in the frame GST_BUFFER_OFFSET(buffer) = num_frames; ret = gst_app_src_push_buffer(GST_APP_SRC(source), buffer); if (ret != GST_FLOW_OK) { /* something wrong, stop pushing */ assert(false); } //gst_debug_bin_to_dot_file (GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline"); ++num_frames; __END__; return true; }
/** * rtp_jitter_buffer_insert: * @jbuf: an #RTPJitterBuffer * @buf: a buffer * @time: a running_time when this buffer was received in nanoseconds * @clock_rate: the clock-rate of the payload of @buf * @max_delay: the maximum lateness of @buf * @tail: TRUE when the tail element changed. * * Inserts @buf into the packet queue of @jbuf. The sequence number of the * packet will be used to sort the packets. This function takes ownerhip of * @buf when the function returns %TRUE. * @buf should have writable metadata when calling this function. * * Returns: %FALSE if a packet with the same number already existed. */ gboolean rtp_jitter_buffer_insert (RTPJitterBuffer * jbuf, GstBuffer * buf, GstClockTime time, guint32 clock_rate, gboolean * tail, gint * percent) { GList *list; guint32 rtptime; guint16 seqnum; GstRTPBuffer rtp = { NULL }; g_return_val_if_fail (jbuf != NULL, FALSE); g_return_val_if_fail (buf != NULL, FALSE); gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp); seqnum = gst_rtp_buffer_get_seq (&rtp); /* loop the list to skip strictly smaller seqnum buffers */ for (list = jbuf->packets->head; list; list = g_list_next (list)) { guint16 qseq; gint gap; GstRTPBuffer rtpb = { NULL }; gst_rtp_buffer_map (GST_BUFFER_CAST (list->data), GST_MAP_READ, &rtpb); qseq = gst_rtp_buffer_get_seq (&rtpb); gst_rtp_buffer_unmap (&rtpb); /* compare the new seqnum to the one in the buffer */ gap = gst_rtp_buffer_compare_seqnum (seqnum, qseq); /* we hit a packet with the same seqnum, notify a duplicate */ if (G_UNLIKELY (gap == 0)) goto duplicate; /* seqnum > qseq, we can stop looking */ if (G_LIKELY (gap < 0)) break; } rtptime = gst_rtp_buffer_get_timestamp (&rtp); /* rtp time jumps are checked for during skew calculation, but bypassed * in other mode, so mind those here and reset jb if needed. * Only reset if valid input time, which is likely for UDP input * where we expect this might happen due to async thread effects * (in seek and state change cycles), but not so much for TCP input */ if (GST_CLOCK_TIME_IS_VALID (time) && jbuf->mode != RTP_JITTER_BUFFER_MODE_SLAVE && jbuf->base_time != -1 && jbuf->last_rtptime != -1) { GstClockTime ext_rtptime = jbuf->ext_rtptime; ext_rtptime = gst_rtp_buffer_ext_timestamp (&ext_rtptime, rtptime); if (ext_rtptime > jbuf->last_rtptime + 3 * clock_rate || ext_rtptime + 3 * clock_rate < jbuf->last_rtptime) { /* reset even if we don't have valid incoming time; * still better than producing possibly very bogus output timestamp */ GST_WARNING ("rtp delta too big, reset skew"); rtp_jitter_buffer_reset_skew (jbuf); } } switch (jbuf->mode) { case RTP_JITTER_BUFFER_MODE_NONE: case RTP_JITTER_BUFFER_MODE_BUFFER: /* send 0 as the first timestamp and -1 for the other ones. This will * interpollate them from the RTP timestamps with a 0 origin. In buffering * mode we will adjust the outgoing timestamps according to the amount of * time we spent buffering. */ if (jbuf->base_time == -1) time = 0; else time = -1; break; case RTP_JITTER_BUFFER_MODE_SLAVE: default: break; } /* do skew calculation by measuring the difference between rtptime and the * receive time, this function will retimestamp @buf with the skew corrected * running time. */ time = calculate_skew (jbuf, rtptime, time, clock_rate); GST_BUFFER_PTS (buf) = time; GST_BUFFER_DTS (buf) = time; /* It's more likely that the packet was inserted in the front of the buffer */ if (G_LIKELY (list)) g_queue_insert_before (jbuf->packets, list, buf); else g_queue_push_tail (jbuf->packets, buf); /* buffering mode, update buffer stats */ if (jbuf->mode == RTP_JITTER_BUFFER_MODE_BUFFER) update_buffer_level (jbuf, percent); else *percent = -1; /* tail was changed when we did not find a previous packet, we set the return * flag when requested. */ if (G_LIKELY (tail)) *tail = (list == NULL); gst_rtp_buffer_unmap (&rtp); return TRUE; /* ERRORS */ duplicate: { gst_rtp_buffer_unmap (&rtp); GST_WARNING ("duplicate packet %d found", (gint) seqnum); return FALSE; } }
static GstFlowReturn gst_video_test_src_fill (GstPushSrc * psrc, GstBuffer * buffer) { GstVideoTestSrc *src; GstClockTime next_time; GstVideoFrame frame; gconstpointer pal; gsize palsize; src = GST_VIDEO_TEST_SRC (psrc); if (G_UNLIKELY (GST_VIDEO_INFO_FORMAT (&src->info) == GST_VIDEO_FORMAT_UNKNOWN)) goto not_negotiated; /* 0 framerate and we are at the second frame, eos */ if (G_UNLIKELY (src->info.fps_n == 0 && src->n_frames == 1)) goto eos; GST_LOG_OBJECT (src, "creating buffer from pool for frame %d", (gint) src->n_frames); if (!gst_video_frame_map (&frame, &src->info, buffer, GST_MAP_WRITE)) goto invalid_frame; src->make_image (src, &frame); if ((pal = gst_video_format_get_palette (GST_VIDEO_FRAME_FORMAT (&frame), &palsize))) { memcpy (GST_VIDEO_FRAME_PLANE_DATA (&frame, 1), pal, palsize); } gst_video_frame_unmap (&frame); GST_BUFFER_DTS (buffer) = src->accum_rtime + src->timestamp_offset + src->running_time; GST_BUFFER_PTS (buffer) = GST_BUFFER_DTS (buffer); GST_DEBUG_OBJECT (src, "Timestamp: %" GST_TIME_FORMAT " = accumulated %" GST_TIME_FORMAT " + offset: %" GST_TIME_FORMAT " + running time: %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_PTS (buffer)), GST_TIME_ARGS (src->accum_rtime), GST_TIME_ARGS (src->timestamp_offset), GST_TIME_ARGS (src->running_time)); GST_BUFFER_OFFSET (buffer) = src->accum_frames + src->n_frames; src->n_frames++; GST_BUFFER_OFFSET_END (buffer) = GST_BUFFER_OFFSET (buffer) + 1; if (src->info.fps_n) { next_time = gst_util_uint64_scale_int (src->n_frames * GST_SECOND, src->info.fps_d, src->info.fps_n); GST_BUFFER_DURATION (buffer) = next_time - src->running_time; } else { next_time = src->timestamp_offset; /* NONE means forever */ GST_BUFFER_DURATION (buffer) = GST_CLOCK_TIME_NONE; } src->running_time = next_time; return GST_FLOW_OK; not_negotiated: { GST_ELEMENT_ERROR (src, CORE, NEGOTIATION, (NULL), ("format wasn't negotiated before get function")); return GST_FLOW_NOT_NEGOTIATED; } eos: { GST_DEBUG_OBJECT (src, "eos: 0 framerate, frame %d", (gint) src->n_frames); return GST_FLOW_EOS; } invalid_frame: { GST_DEBUG_OBJECT (src, "invalid frame"); return GST_FLOW_OK; } }
static BOOL tsmf_gstreamer_decodeEx(ITSMFDecoder* decoder, const BYTE *data, UINT32 data_size, UINT32 extensions, UINT64 start_time, UINT64 end_time, UINT64 duration) { GstBuffer *gst_buf; TSMFGstreamerDecoder* mdecoder = (TSMFGstreamerDecoder *) decoder; UINT64 sample_time = tsmf_gstreamer_timestamp_ms_to_gst(start_time); BOOL useTimestamps = TRUE; if (!mdecoder) { WLog_ERR(TAG, "Decoder not initialized!"); return FALSE; } /* * This function is always called from a stream-specific thread. * It should be alright to block here if necessary. * We don't expect to block here often, since the pipeline should * have more than enough buffering. */ DEBUG_TSMF("%s. Start:(%"PRIu64") End:(%"PRIu64") Duration:(%"PRIu64") Last Start:(%"PRIu64")", get_type(mdecoder), start_time, end_time, duration, mdecoder->last_sample_start_time); if (mdecoder->shutdown) { WLog_ERR(TAG, "decodeEx called on shutdown decoder"); return TRUE; } if (mdecoder->gst_caps == NULL) { WLog_ERR(TAG, "tsmf_gstreamer_set_format not called or invalid format."); return FALSE; } if (!mdecoder->pipe) tsmf_gstreamer_pipeline_build(mdecoder); if (!mdecoder->src) { WLog_ERR(TAG, "failed to construct pipeline correctly. Unable to push buffer to source element."); return FALSE; } gst_buf = tsmf_get_buffer_from_data(data, data_size); if (gst_buf == NULL) { WLog_ERR(TAG, "tsmf_get_buffer_from_data(%p, %"PRIu32") failed.", (void*) data, data_size); return FALSE; } /* Relative timestamping will sometimes be set to 0 * so we ignore these timestamps just to be safe(bit 8) */ if (extensions & 0x00000080) { DEBUG_TSMF("Ignoring the timestamps - relative - bit 8"); useTimestamps = FALSE; } /* If no timestamps exist then we dont want to look at the timestamp values (bit 7) */ if (extensions & 0x00000040) { DEBUG_TSMF("Ignoring the timestamps - none - bit 7"); useTimestamps = FALSE; } /* If performing a seek */ if (mdecoder->seeking) { mdecoder->seeking = FALSE; tsmf_gstreamer_pipeline_set_state(mdecoder, GST_STATE_PAUSED); mdecoder->pipeline_start_time_valid = 0; } if (mdecoder->pipeline_start_time_valid) { DEBUG_TSMF("%s start time %"PRIu64"", get_type(mdecoder), start_time); /* Adjusted the condition for a seek to be based on start time only * WMV1 and WMV2 files in particular have bad end time and duration values * there seems to be no real side effects of just using the start time instead */ UINT64 minTime = mdecoder->last_sample_start_time - (UINT64) SEEK_TOLERANCE; UINT64 maxTime = mdecoder->last_sample_start_time + (UINT64) SEEK_TOLERANCE; /* Make sure the minTime stops at 0 , should we be at the beginning of the stream */ if (mdecoder->last_sample_start_time < (UINT64) SEEK_TOLERANCE) minTime = 0; /* If the start_time is valid and different from the previous start time by more than the seek tolerance, then we have a seek condition */ if (((start_time > maxTime) || (start_time < minTime)) && useTimestamps) { DEBUG_TSMF("tsmf_gstreamer_decodeEx: start_time=[%"PRIu64"] > last_sample_start_time=[%"PRIu64"] OR ", start_time, mdecoder->last_sample_start_time); DEBUG_TSMF("tsmf_gstreamer_decodeEx: start_time=[%"PRIu64"] < last_sample_start_time=[%"PRIu64"] with", start_time, mdecoder->last_sample_start_time); DEBUG_TSMF("tsmf_gstreamer_decodeEX: a tolerance of more than [%lu] from the last sample", SEEK_TOLERANCE); DEBUG_TSMF("tsmf_gstreamer_decodeEX: minTime=[%"PRIu64"] maxTime=[%"PRIu64"]", minTime, maxTime); mdecoder->seeking = TRUE; /* since we cant make the gstreamer pipeline jump to the new start time after a seek - we just maintain * a offset between realtime and gstreamer time */ mdecoder->seek_offset = start_time; } } else { DEBUG_TSMF("%s start time %"PRIu64"", get_type(mdecoder), start_time); /* Always set base/start time to 0. Will use seek offset to translate real buffer times * back to 0. This allows the video to be started from anywhere and the ability to handle seeks * without rebuilding the pipeline, etc. since that is costly */ gst_element_set_base_time(mdecoder->pipe, tsmf_gstreamer_timestamp_ms_to_gst(0)); gst_element_set_start_time(mdecoder->pipe, tsmf_gstreamer_timestamp_ms_to_gst(0)); mdecoder->pipeline_start_time_valid = 1; /* Set the seek offset if buffer has valid timestamps. */ if (useTimestamps) mdecoder->seek_offset = start_time; if (!gst_element_seek(mdecoder->pipe, 1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH, GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE)) { WLog_ERR(TAG, "seek failed"); } } #if GST_VERSION_MAJOR > 0 if (useTimestamps) GST_BUFFER_PTS(gst_buf) = sample_time - tsmf_gstreamer_timestamp_ms_to_gst(mdecoder->seek_offset); else GST_BUFFER_PTS(gst_buf) = GST_CLOCK_TIME_NONE; #else if (useTimestamps) GST_BUFFER_TIMESTAMP(gst_buf) = sample_time - tsmf_gstreamer_timestamp_ms_to_gst(mdecoder->seek_offset); else GST_BUFFER_TIMESTAMP(gst_buf) = GST_CLOCK_TIME_NONE; #endif GST_BUFFER_DURATION(gst_buf) = GST_CLOCK_TIME_NONE; GST_BUFFER_OFFSET(gst_buf) = GST_BUFFER_OFFSET_NONE; #if GST_VERSION_MAJOR > 0 #else gst_buffer_set_caps(gst_buf, mdecoder->gst_caps); #endif gst_app_src_push_buffer(GST_APP_SRC(mdecoder->src), gst_buf); /* Should only update the last timestamps if the current ones are valid */ if (useTimestamps) { mdecoder->last_sample_start_time = start_time; mdecoder->last_sample_end_time = end_time; } if (mdecoder->pipe && (GST_STATE(mdecoder->pipe) != GST_STATE_PLAYING)) { DEBUG_TSMF("%s: state=%s", get_type(mdecoder), gst_element_state_get_name(GST_STATE(mdecoder->pipe))); DEBUG_TSMF("%s Paused: %"PRIi32" Shutdown: %i Ready: %"PRIi32"", get_type(mdecoder), mdecoder->paused, mdecoder->shutdown, mdecoder->ready); if (!mdecoder->paused && !mdecoder->shutdown && mdecoder->ready) tsmf_gstreamer_pipeline_set_state(mdecoder, GST_STATE_PLAYING); } return TRUE; }
static GstFlowReturn gst_rtp_mux_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) { GstRTPMux *rtp_mux; GstFlowReturn ret; GstRTPMuxPadPrivate *padpriv; gboolean drop; gboolean changed = FALSE; GstRTPBuffer rtpbuffer = GST_RTP_BUFFER_INIT; rtp_mux = GST_RTP_MUX (parent); if (gst_pad_check_reconfigure (rtp_mux->srcpad)) { GstCaps *current_caps = gst_pad_get_current_caps (pad); if (!gst_rtp_mux_setcaps (pad, rtp_mux, current_caps)) { gst_pad_mark_reconfigure (rtp_mux->srcpad); if (GST_PAD_IS_FLUSHING (rtp_mux->srcpad)) ret = GST_FLOW_FLUSHING; else ret = GST_FLOW_NOT_NEGOTIATED; gst_buffer_unref (buffer); goto out; } gst_caps_unref (current_caps); } GST_OBJECT_LOCK (rtp_mux); padpriv = gst_pad_get_element_private (pad); if (!padpriv) { GST_OBJECT_UNLOCK (rtp_mux); gst_buffer_unref (buffer); return GST_FLOW_NOT_LINKED; } buffer = gst_buffer_make_writable (buffer); if (!gst_rtp_buffer_map (buffer, GST_MAP_READWRITE, &rtpbuffer)) { GST_OBJECT_UNLOCK (rtp_mux); gst_buffer_unref (buffer); GST_ERROR_OBJECT (rtp_mux, "Invalid RTP buffer"); return GST_FLOW_ERROR; } drop = !process_buffer_locked (rtp_mux, padpriv, &rtpbuffer); gst_rtp_buffer_unmap (&rtpbuffer); if (!drop) { if (pad != rtp_mux->last_pad) { changed = TRUE; g_clear_object (&rtp_mux->last_pad); rtp_mux->last_pad = g_object_ref (pad); } if (GST_BUFFER_DURATION_IS_VALID (buffer) && GST_BUFFER_PTS_IS_VALID (buffer)) rtp_mux->last_stop = GST_BUFFER_PTS (buffer) + GST_BUFFER_DURATION (buffer); else rtp_mux->last_stop = GST_CLOCK_TIME_NONE; } GST_OBJECT_UNLOCK (rtp_mux); if (changed) gst_pad_sticky_events_foreach (pad, resend_events, rtp_mux); if (drop) { gst_buffer_unref (buffer); ret = GST_FLOW_OK; } else { ret = gst_pad_push (rtp_mux->srcpad, buffer); } out: return ret; }
static GstFlowReturn gst_rtp_g723_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buf) { GstFlowReturn ret = GST_FLOW_OK; GstMapInfo map; guint8 HDR; GstRTPG723Pay *pay; GstClockTime packet_dur, timestamp; guint payload_len, packet_len; pay = GST_RTP_G723_PAY (payload); gst_buffer_map (buf, &map, GST_MAP_READ); timestamp = GST_BUFFER_PTS (buf); if (GST_BUFFER_IS_DISCONT (buf)) { /* flush everything on discont */ gst_adapter_clear (pay->adapter); pay->timestamp = GST_CLOCK_TIME_NONE; pay->duration = 0; pay->discont = TRUE; } /* should be one of these sizes */ if (map.size != 4 && map.size != 20 && map.size != 24) goto invalid_size; /* check size by looking at the header bits */ HDR = map.data[0] & 0x3; if (size_tab[HDR] != map.size) goto wrong_size; /* calculate packet size and duration */ payload_len = gst_adapter_available (pay->adapter) + map.size; packet_dur = pay->duration + G723_FRAME_DURATION; packet_len = gst_rtp_buffer_calc_packet_len (payload_len, 0, 0); if (gst_rtp_base_payload_is_filled (payload, packet_len, packet_dur)) { /* size or duration would overflow the packet, flush the queued data */ ret = gst_rtp_g723_pay_flush (pay); } /* update timestamp, we keep the timestamp for the first packet in the adapter * but are able to calculate it from next packets. */ if (timestamp != GST_CLOCK_TIME_NONE && pay->timestamp == GST_CLOCK_TIME_NONE) { if (timestamp > pay->duration) pay->timestamp = timestamp - pay->duration; else pay->timestamp = 0; } gst_buffer_unmap (buf, &map); /* add packet to the queue */ gst_adapter_push (pay->adapter, buf); pay->duration = packet_dur; /* check if we can flush now */ if (pay->duration >= payload->min_ptime) { ret = gst_rtp_g723_pay_flush (pay); } return ret; /* WARNINGS */ invalid_size: { GST_ELEMENT_WARNING (pay, STREAM, WRONG_TYPE, ("Invalid input buffer size"), ("Input size should be 4, 20 or 24, got %" G_GSIZE_FORMAT, map.size)); gst_buffer_unmap (buf, &map); gst_buffer_unref (buf); return GST_FLOW_OK; } wrong_size: { GST_ELEMENT_WARNING (pay, STREAM, WRONG_TYPE, ("Wrong input buffer size"), ("Expected input buffer size %u but got %" G_GSIZE_FORMAT, size_tab[HDR], map.size)); gst_buffer_unmap (buf, &map); gst_buffer_unref (buf); return GST_FLOW_OK; } }
/* Decode */ static picture_t *DecodeBlock( decoder_t *p_dec, block_t **pp_block ) { block_t *p_block; picture_t *p_pic = NULL; decoder_sys_t *p_sys = p_dec->p_sys; GstMessage *p_msg; GstBuffer *p_buf; if( !pp_block ) return NULL; p_block = *pp_block; if( !p_block ) goto check_messages; if( unlikely( p_block->i_flags & (BLOCK_FLAG_DISCONTINUITY | BLOCK_FLAG_CORRUPTED ) ) ) { if( p_block->i_flags & BLOCK_FLAG_DISCONTINUITY ) Flush( p_dec ); if( p_block->i_flags & BLOCK_FLAG_CORRUPTED ) { block_Release( p_block ); goto done; } } if( likely( p_block->i_buffer ) ) { p_buf = gst_buffer_new_wrapped_full( GST_MEMORY_FLAG_READONLY, p_block->p_start, p_block->i_size, p_block->p_buffer - p_block->p_start, p_block->i_buffer, p_block, ( GDestroyNotify )block_Release ); if( unlikely( p_buf == NULL ) ) { msg_Err( p_dec, "failed to create input gstbuffer" ); p_dec->b_error = true; block_Release( p_block ); goto done; } if( p_block->i_dts > VLC_TS_INVALID ) GST_BUFFER_DTS( p_buf ) = gst_util_uint64_scale( p_block->i_dts, GST_SECOND, GST_MSECOND ); if( p_block->i_pts <= VLC_TS_INVALID ) GST_BUFFER_PTS( p_buf ) = GST_BUFFER_DTS( p_buf ); else GST_BUFFER_PTS( p_buf ) = gst_util_uint64_scale( p_block->i_pts, GST_SECOND, GST_MSECOND ); if( p_block->i_length > VLC_TS_INVALID ) GST_BUFFER_DURATION( p_buf ) = gst_util_uint64_scale( p_block->i_length, GST_SECOND, GST_MSECOND ); if( p_dec->fmt_in.video.i_frame_rate && p_dec->fmt_in.video.i_frame_rate_base ) GST_BUFFER_DURATION( p_buf ) = gst_util_uint64_scale( GST_SECOND, p_dec->fmt_in.video.i_frame_rate_base, p_dec->fmt_in.video.i_frame_rate ); /* Give the input buffer to GStreamer Bin. * * libvlc libvlc * \ (i/p) (o/p) ^ * \ / * ___v____GSTREAMER BIN_____/____ * | | * | appsrc-->decode-->vlcsink | * |_______________________________| * * * * * * * * * * * * * * * * * * * * * */ if( unlikely( gst_app_src_push_buffer( GST_APP_SRC_CAST( p_sys->p_decode_src ), p_buf ) != GST_FLOW_OK ) ) { /* block will be released internally, * when gst_buffer_unref() is called */ p_dec->b_error = true; msg_Err( p_dec, "failed to push buffer" ); goto done; } } else block_Release( p_block ); check_messages: /* Poll for any messages, errors */ p_msg = gst_bus_pop_filtered( p_sys->p_bus, GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_WARNING | GST_MESSAGE_INFO ); if( p_msg ) { switch( GST_MESSAGE_TYPE( p_msg ) ){ case GST_MESSAGE_EOS: /* for debugging purpose */ msg_Warn( p_dec, "got unexpected eos" ); break; /* First buffer received */ case GST_MESSAGE_ASYNC_DONE: /* for debugging purpose */ p_sys->b_prerolled = true; msg_Dbg( p_dec, "Pipeline is prerolled" ); break; default: p_dec->b_error = default_msg_handler( p_dec, p_msg ); if( p_dec->b_error ) { gst_message_unref( p_msg ); goto done; } break; } gst_message_unref( p_msg ); } /* Look for any output buffers in the queue */ if( gst_atomic_queue_peek( p_sys->p_que ) ) { GstBuffer *p_buf = GST_BUFFER_CAST( gst_atomic_queue_pop( p_sys->p_que )); GstMemory *p_mem; if(( p_mem = gst_buffer_peek_memory( p_buf, 0 )) && GST_IS_VLC_PICTURE_PLANE_ALLOCATOR( p_mem->allocator )) { p_pic = picture_Hold(( (GstVlcPicturePlane*) p_mem )->p_pic ); } else { GstVideoFrame frame; /* Get a new picture */ p_pic = decoder_NewPicture( p_dec ); if( !p_pic ) goto done; if( unlikely( !gst_video_frame_map( &frame, &p_sys->vinfo, p_buf, GST_MAP_READ ) ) ) { msg_Err( p_dec, "failed to map gst video frame" ); gst_buffer_unref( p_buf ); p_dec->b_error = true; goto done; } gst_CopyPicture( p_pic, &frame ); gst_video_frame_unmap( &frame ); } if( likely( GST_BUFFER_PTS_IS_VALID( p_buf ) ) ) p_pic->date = gst_util_uint64_scale( GST_BUFFER_PTS( p_buf ), GST_MSECOND, GST_SECOND ); else msg_Warn( p_dec, "Gst Buffer has no timestamp" ); gst_buffer_unref( p_buf ); } done: *pp_block = NULL; return p_pic; }
static GstFlowReturn gst_webvtt_enc_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) { GstWebvttEnc *webvttenc = GST_WEBVTT_ENC (parent); GstClockTime ts, dur = GST_SECOND; GstBuffer *new_buffer; GstMapInfo map_info; GstFlowReturn ret; GString *s; gsize buf_size; if (!webvttenc->pushed_header) { const char *header = "WEBVTT\n\n"; new_buffer = gst_buffer_new_wrapped (g_strdup (header), strlen (header)); GST_BUFFER_PTS (new_buffer) = GST_CLOCK_TIME_NONE; GST_BUFFER_DURATION (new_buffer) = GST_CLOCK_TIME_NONE; ret = gst_pad_push (webvttenc->srcpad, new_buffer); if (ret != GST_FLOW_OK) goto out; webvttenc->pushed_header = TRUE; } gst_object_sync_values (GST_OBJECT (webvttenc), GST_BUFFER_PTS (buf)); ts = GST_BUFFER_PTS (buf) + webvttenc->timestamp; if (GST_BUFFER_DURATION_IS_VALID (buf)) dur = GST_BUFFER_DURATION (buf) + webvttenc->duration; else if (webvttenc->duration > 0) dur = webvttenc->duration; else dur = GST_SECOND; buf_size = gst_buffer_get_size (buf); s = g_string_sized_new (50 + buf_size + 1 + 1); /* start_time --> end_time */ gst_webvtt_enc_append_timestamp_to_string (ts, s); g_string_append_printf (s, " --> "); gst_webvtt_enc_append_timestamp_to_string (ts + dur, s); g_string_append_c (s, '\n'); /* text */ if (gst_buffer_map (buf, &map_info, GST_MAP_READ)) { g_string_append_len (s, (const gchar *) map_info.data, map_info.size); gst_buffer_unmap (buf, &map_info); } g_string_append_c (s, '\n'); buf_size = s->len; new_buffer = gst_buffer_new_wrapped (g_string_free (s, FALSE), buf_size); GST_BUFFER_TIMESTAMP (new_buffer) = GST_BUFFER_TIMESTAMP (buf); GST_BUFFER_DURATION (new_buffer) = GST_BUFFER_DURATION (buf); ret = gst_pad_push (webvttenc->srcpad, new_buffer); out: gst_buffer_unref (buf); return ret; }
static gboolean gst_hls_demux_get_next_fragment (GstHLSDemux * demux, gboolean caching) { GstFragment *download; const gchar *next_fragment_uri; GstClockTime duration; GstClockTime timestamp; GstBuffer *buf; gboolean discont; if (!gst_m3u8_client_get_next_fragment (demux->client, &discont, &next_fragment_uri, &duration, ×tamp)) { GST_INFO_OBJECT (demux, "This playlist doesn't contain more fragments"); demux->end_of_playlist = TRUE; gst_task_start (demux->stream_task); return FALSE; } GST_INFO_OBJECT (demux, "Fetching next fragment %s", next_fragment_uri); download = gst_uri_downloader_fetch_uri (demux->downloader, next_fragment_uri); if (download == NULL) goto error; buf = gst_fragment_get_buffer (download); GST_BUFFER_DURATION (buf) = duration; GST_BUFFER_PTS (buf) = timestamp; /* We actually need to do this every time we switch bitrate */ if (G_UNLIKELY (demux->do_typefind)) { GstCaps *caps = gst_fragment_get_caps (download); if (!demux->input_caps || !gst_caps_is_equal (caps, demux->input_caps)) { gst_caps_replace (&demux->input_caps, caps); /* gst_pad_set_caps (demux->srcpad, demux->input_caps); */ GST_INFO_OBJECT (demux, "Input source caps: %" GST_PTR_FORMAT, demux->input_caps); demux->do_typefind = FALSE; } gst_caps_unref (caps); } else { gst_fragment_set_caps (download, demux->input_caps); } if (discont) { GST_DEBUG_OBJECT (demux, "Marking fragment as discontinuous"); GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT); } g_queue_push_tail (demux->queue, download); if (!caching) { GST_TASK_SIGNAL (demux->updates_task); gst_task_start (demux->stream_task); } return TRUE; error: { gst_hls_demux_stop (demux); return FALSE; } }
static GstFlowReturn gst_cc_extractor_handle_meta (GstCCExtractor * filter, GstBuffer * buf, GstVideoCaptionMeta * meta) { GstBuffer *outbuf = NULL; GstEvent *event; gchar *captionid; GstFlowReturn flow; GST_DEBUG_OBJECT (filter, "Handling meta"); /* Check if the meta type matches the configured one */ if (filter->captionpad != NULL && meta->caption_type != filter->caption_type) { GST_ERROR_OBJECT (filter, "GstVideoCaptionMeta type changed, Not handled currently"); flow = GST_FLOW_NOT_NEGOTIATED; goto out; } if (filter->captionpad == NULL) { GstCaps *caption_caps = NULL; GstEvent *stream_event; GST_DEBUG_OBJECT (filter, "Creating new caption pad"); switch (meta->caption_type) { case GST_VIDEO_CAPTION_TYPE_CEA608_RAW: caption_caps = gst_caps_from_string ("closedcaption/x-cea-608,format=(string)raw"); break; case GST_VIDEO_CAPTION_TYPE_CEA608_IN_CEA708_RAW: caption_caps = gst_caps_from_string ("closedcaption/x-cea-608,format=(string)cc_data"); break; case GST_VIDEO_CAPTION_TYPE_CEA708_RAW: caption_caps = gst_caps_from_string ("closedcaption/x-cea-708,format=(string)cc_data"); break; case GST_VIDEO_CAPTION_TYPE_CEA708_CDP: caption_caps = gst_caps_from_string ("closedcaption/x-cea-708,format=(string)cdp"); break; default: break; } if (caption_caps == NULL) { GST_ERROR_OBJECT (filter, "Unknown/invalid caption type"); return GST_FLOW_NOT_NEGOTIATED; } /* Create the caption pad and set the caps */ filter->captionpad = gst_pad_new_from_static_template (&captiontemplate, "caption"); gst_pad_set_iterate_internal_links_function (filter->sinkpad, GST_DEBUG_FUNCPTR (gst_cc_extractor_iterate_internal_links)); gst_pad_set_active (filter->captionpad, TRUE); gst_element_add_pad (GST_ELEMENT (filter), filter->captionpad); gst_flow_combiner_add_pad (filter->combiner, filter->captionpad); captionid = gst_pad_create_stream_id (filter->captionpad, (GstElement *) filter, "caption"); stream_event = gst_event_new_stream_start (captionid); g_free (captionid); /* FIXME : Create a proper stream-id */ if ((event = gst_pad_get_sticky_event (filter->srcpad, GST_EVENT_STREAM_START, 0))) { guint group_id; if (gst_event_parse_group_id (event, &group_id)) gst_event_set_group_id (stream_event, group_id); gst_event_unref (event); } gst_pad_push_event (filter->captionpad, stream_event); gst_pad_set_caps (filter->captionpad, caption_caps); gst_caps_unref (caption_caps); /* Carry over sticky events */ if ((event = gst_pad_get_sticky_event (filter->srcpad, GST_EVENT_SEGMENT, 0))) gst_pad_push_event (filter->captionpad, event); if ((event = gst_pad_get_sticky_event (filter->srcpad, GST_EVENT_TAG, 0))) gst_pad_push_event (filter->captionpad, event); filter->caption_type = meta->caption_type; } GST_DEBUG_OBJECT (filter, "Creating new buffer of size %" G_GSIZE_FORMAT " bytes", meta->size); /* Extract caption data into new buffer with identical buffer timestamps */ outbuf = gst_buffer_new_allocate (NULL, meta->size, NULL); gst_buffer_fill (outbuf, 0, meta->data, meta->size); GST_BUFFER_PTS (outbuf) = GST_BUFFER_PTS (buf); GST_BUFFER_DTS (outbuf) = GST_BUFFER_DTS (buf); GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (buf); /* We don't really care about the flow return */ flow = gst_pad_push (filter->captionpad, outbuf); out: /* Set flow return on pad and return combined value */ return gst_flow_combiner_update_pad_flow (filter->combiner, filter->captionpad, flow); }
static void gst_hls_demux_stream_loop (GstHLSDemux * demux) { GstFragment *fragment; GstBuffer *buf; GstFlowReturn ret; GstCaps *bufcaps, *srccaps = NULL; /* Loop for the source pad task. The task is started when we have * received the main playlist from the source element. It tries first to * cache the first fragments and then it waits until it has more data in the * queue. This task is woken up when we push a new fragment to the queue or * when we reached the end of the playlist */ if (G_UNLIKELY (demux->need_cache)) { if (!gst_hls_demux_cache_fragments (demux)) goto cache_error; /* we can start now the updates thread (only if on playing) */ if (GST_STATE (demux) == GST_STATE_PLAYING) gst_task_start (demux->updates_task); GST_INFO_OBJECT (demux, "First fragments cached successfully"); } if (g_queue_is_empty (demux->queue)) { if (demux->end_of_playlist) goto end_of_playlist; goto pause_task; } fragment = g_queue_pop_head (demux->queue); buf = gst_fragment_get_buffer (fragment); /* Figure out if we need to create/switch pads */ if (G_LIKELY (demux->srcpad)) srccaps = gst_pad_get_current_caps (demux->srcpad); bufcaps = gst_fragment_get_caps (fragment); if (G_UNLIKELY (!srccaps || !gst_caps_is_equal_fixed (bufcaps, srccaps) || demux->need_segment)) { switch_pads (demux, bufcaps); demux->need_segment = TRUE; } gst_caps_unref (bufcaps); if (G_LIKELY (srccaps)) gst_caps_unref (srccaps); g_object_unref (fragment); if (demux->need_segment) { GstSegment segment; GstClockTime start = GST_BUFFER_PTS (buf); start += demux->position_shift; /* And send a newsegment */ GST_DEBUG_OBJECT (demux, "Sending new-segment. segment start:%" GST_TIME_FORMAT, GST_TIME_ARGS (start)); gst_segment_init (&segment, GST_FORMAT_TIME); segment.start = start; segment.time = start; gst_pad_push_event (demux->srcpad, gst_event_new_segment (&segment)); demux->need_segment = FALSE; demux->position_shift = 0; } ret = gst_pad_push (demux->srcpad, buf); if (ret != GST_FLOW_OK) goto error_pushing; return; end_of_playlist: { GST_DEBUG_OBJECT (demux, "Reached end of playlist, sending EOS"); gst_pad_push_event (demux->srcpad, gst_event_new_eos ()); gst_hls_demux_stop (demux); return; } cache_error: { gst_task_pause (demux->stream_task); if (!demux->cancelled) { GST_ELEMENT_ERROR (demux, RESOURCE, NOT_FOUND, ("Could not cache the first fragments"), (NULL)); gst_hls_demux_stop (demux); } return; } error_pushing: { /* FIXME: handle error */ GST_DEBUG_OBJECT (demux, "Error pushing buffer: %s... stopping task", gst_flow_get_name (ret)); gst_hls_demux_stop (demux); return; } pause_task: { gst_task_pause (demux->stream_task); return; } }
static GstFlowReturn gst_multi_file_sink_render (GstBaseSink * bsink, GstBuffer * buffer) { GstMultiFileSink *sink = GST_MULTI_FILE_SINK (bsink); GstFlowReturn flow = GST_FLOW_OK; gboolean key_unit, header; header = GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_HEADER); key_unit = !GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); if (sink->aggregate_gops) { GstBuffer *gop_buffer = NULL; guint avail; avail = gst_adapter_available (sink->gop_adapter); GST_LOG_OBJECT (sink, "aggregate GOP: received %s%s unit buffer: " "%" GST_PTR_FORMAT, (key_unit) ? "key" : "delta", (header) ? " header" : "", buffer); /* If it's a header buffer, it might potentially be for the next GOP */ if (header) { GST_LOG_OBJECT (sink, "Accumulating buffer to potential next GOP"); sink->potential_next_gop = g_list_append (sink->potential_next_gop, gst_buffer_ref (buffer)); } else { if (key_unit && avail > 0) { GstClockTime pts, dts; GST_LOG_OBJECT (sink, "Grabbing pending completed GOP"); pts = gst_adapter_prev_pts_at_offset (sink->gop_adapter, 0, NULL); dts = gst_adapter_prev_dts_at_offset (sink->gop_adapter, 0, NULL); gop_buffer = gst_adapter_take_buffer (sink->gop_adapter, avail); GST_BUFFER_PTS (gop_buffer) = pts; GST_BUFFER_DTS (gop_buffer) = dts; } /* just accumulate the buffer */ if (sink->potential_next_gop) { GList *tmp; GST_LOG_OBJECT (sink, "Carrying over pending next GOP data into adapter"); /* If we have pending data, put that first in the adapter */ for (tmp = sink->potential_next_gop; tmp; tmp = tmp->next) { GstBuffer *tmpb = (GstBuffer *) tmp->data; gst_adapter_push (sink->gop_adapter, tmpb); } g_list_free (sink->potential_next_gop); sink->potential_next_gop = NULL; } GST_LOG_OBJECT (sink, "storing buffer in adapter"); gst_adapter_push (sink->gop_adapter, gst_buffer_ref (buffer)); if (gop_buffer != NULL) { GST_DEBUG_OBJECT (sink, "writing out pending GOP, %u bytes", avail); GST_DEBUG_OBJECT (sink, "gop buffer pts:%" GST_TIME_FORMAT " dts:%" GST_TIME_FORMAT " duration:%" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_PTS (gop_buffer)), GST_TIME_ARGS (GST_BUFFER_DTS (gop_buffer)), GST_TIME_ARGS (GST_BUFFER_DURATION (gop_buffer))); flow = gst_multi_file_sink_write_buffer (sink, gop_buffer); gst_buffer_unref (gop_buffer); } } } else { flow = gst_multi_file_sink_write_buffer (sink, buffer); } return flow; }
static BOOL tsmf_gstreamer_decodeEx(ITSMFDecoder* decoder, const BYTE *data, UINT32 data_size, UINT32 extensions, UINT64 start_time, UINT64 end_time, UINT64 duration) { GstBuffer *gst_buf; TSMFGstreamerDecoder* mdecoder = (TSMFGstreamerDecoder *) decoder; UINT64 sample_time = tsmf_gstreamer_timestamp_ms_to_gst(start_time); UINT64 sample_duration = tsmf_gstreamer_timestamp_ms_to_gst(duration); if (!mdecoder) { WLog_ERR(TAG, "Decoder not initialized!"); return FALSE; } /* * This function is always called from a stream-specific thread. * It should be alright to block here if necessary. * We don't expect to block here often, since the pipeline should * have more than enough buffering. */ DEBUG_TSMF("%s. Start:(%llu) End:(%llu) Duration:(%llu) Last End:(%llu)", get_type(mdecoder), start_time, end_time, duration, mdecoder->last_sample_end_time); if (mdecoder->gst_caps == NULL) { WLog_ERR(TAG, "tsmf_gstreamer_set_format not called or invalid format."); return FALSE; } if (!mdecoder->src) { WLog_ERR(TAG, "failed to construct pipeline correctly. Unable to push buffer to source element."); return FALSE; } gst_buf = tsmf_get_buffer_from_data(data, data_size); if (gst_buf == NULL) { WLog_ERR(TAG, "tsmf_get_buffer_from_data(%p, %d) failed.", data, data_size); return FALSE; } if (mdecoder->pipeline_start_time_valid) { long long diff = start_time; diff -= mdecoder->last_sample_end_time; if (diff < 0) diff *= -1; /* The pipe is initialized, but there is a discontinuity. * Seek to the start position... */ if (diff > 50) { DEBUG_TSMF("%s seeking to %lld", get_type(mdecoder), start_time); if (!gst_element_seek(mdecoder->pipe, 1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE, GST_SEEK_TYPE_SET, sample_time, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE)) { WLog_ERR(TAG, "seek failed"); } mdecoder->pipeline_start_time_valid = 0; } } else { DEBUG_TSMF("%s start time %llu", get_type(mdecoder), sample_time); mdecoder->pipeline_start_time_valid = 1; } #if GST_VERSION_MAJOR > 0 GST_BUFFER_PTS(gst_buf) = sample_time; #else GST_BUFFER_TIMESTAMP(gst_buf) = sample_time; #endif GST_BUFFER_DURATION(gst_buf) = sample_duration; gst_app_src_push_buffer(GST_APP_SRC(mdecoder->src), gst_buf); if (mdecoder->ack_cb) mdecoder->ack_cb(mdecoder->stream, TRUE); mdecoder->last_sample_end_time = end_time; if (GST_STATE(mdecoder->pipe) != GST_STATE_PLAYING) { DEBUG_TSMF("%s: state=%s", get_type(mdecoder), gst_element_state_get_name(GST_STATE(mdecoder->pipe))); if (!mdecoder->paused && !mdecoder->shutdown && mdecoder->ready) tsmf_gstreamer_pipeline_set_state(mdecoder, GST_STATE_PLAYING); } return TRUE; }