static GstBuffer * gst_rtp_mpa_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf) { GstRtpMPADepay *rtpmpadepay; GstBuffer *outbuf; GstRTPBuffer rtp = { NULL }; gint payload_len; #if 0 guint8 *payload; guint16 frag_offset; #endif gboolean marker; rtpmpadepay = GST_RTP_MPA_DEPAY (depayload); gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp); payload_len = gst_rtp_buffer_get_payload_len (&rtp); if (payload_len <= 4) goto empty_packet; #if 0 payload = gst_rtp_buffer_get_payload (&rtp); /* strip off header * * 0 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | MBZ | Frag_offset | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ */ frag_offset = (payload[2] << 8) | payload[3]; #endif /* subbuffer skipping the 4 header bytes */ outbuf = gst_rtp_buffer_get_payload_subbuffer (&rtp, 4, -1); marker = gst_rtp_buffer_get_marker (&rtp); if (marker) { /* mark start of talkspurt with RESYNC */ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_RESYNC); } GST_DEBUG_OBJECT (rtpmpadepay, "gst_rtp_mpa_depay_chain: pushing buffer of size %" G_GSIZE_FORMAT "", gst_buffer_get_size (outbuf)); gst_rtp_buffer_unmap (&rtp); /* FIXME, we can push half mpeg frames when they are split over multiple * RTP packets */ return outbuf; /* ERRORS */ empty_packet: { GST_ELEMENT_WARNING (rtpmpadepay, STREAM, DECODE, ("Empty Payload."), (NULL)); gst_rtp_buffer_unmap (&rtp); return NULL; } }
static GstFlowReturn gst_vaapidecode_push_decoded_frame (GstVideoDecoder * vdec, GstVideoCodecFrame * out_frame) { GstVaapiDecode *const decode = GST_VAAPIDECODE (vdec); GstVaapiSurfaceProxy *proxy; GstVaapiSurface *surface; GstFlowReturn ret; const GstVaapiRectangle *crop_rect; GstVaapiVideoMeta *meta; GstBufferPoolAcquireParams *params = NULL; GstVaapiVideoBufferPoolAcquireParams vaapi_params = { {0,}, }; guint flags, out_flags = 0; gboolean alloc_renegotiate, caps_renegotiate; if (!GST_VIDEO_CODEC_FRAME_IS_DECODE_ONLY (out_frame)) { proxy = gst_video_codec_frame_get_user_data (out_frame); surface = GST_VAAPI_SURFACE_PROXY_SURFACE (proxy); crop_rect = gst_vaapi_surface_proxy_get_crop_rect (proxy); /* in theory, we are not supposed to check the surface resolution * change here since it should be advertised before from ligstvaapi. * But there are issues with it especially for some vp9 streams where * upstream element set un-cropped values in set_format() which make * everything a mess. So better doing the explicit check here irrespective * of what notification we get from upstream or libgstvaapi.Also, even if * we received notification from libgstvaapi, the frame we are going to * be pushed at this point might not have the notified resolution if there * are queued frames in decoded picture buffer. */ alloc_renegotiate = is_surface_resolution_changed (decode, surface); caps_renegotiate = is_display_resolution_changed (decode, crop_rect); if (gst_pad_needs_reconfigure (GST_VIDEO_DECODER_SRC_PAD (vdec)) || alloc_renegotiate || caps_renegotiate || decode->do_renego) { g_atomic_int_set (&decode->do_renego, FALSE); if (!gst_vaapidecode_negotiate (decode)) return GST_FLOW_ERROR; } gst_vaapi_surface_proxy_set_destroy_notify (proxy, (GDestroyNotify) gst_vaapidecode_release, gst_object_ref (decode)); if (is_src_allocator_dmabuf (decode)) { vaapi_params.proxy = gst_vaapi_surface_proxy_ref (proxy); params = (GstBufferPoolAcquireParams *) & vaapi_params; } ret = gst_video_decoder_allocate_output_frame_with_params (vdec, out_frame, params); if (params) gst_vaapi_surface_proxy_unref (vaapi_params.proxy); if (ret != GST_FLOW_OK) goto error_create_buffer; /* if not dmabuf is negotiated set the vaapi video meta in the * proxy */ if (!params) { meta = gst_buffer_get_vaapi_video_meta (out_frame->output_buffer); if (!meta) goto error_get_meta; gst_vaapi_video_meta_set_surface_proxy (meta, proxy); } flags = gst_vaapi_surface_proxy_get_flags (proxy); if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_CORRUPTED) out_flags |= GST_BUFFER_FLAG_CORRUPTED; if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_INTERLACED) { out_flags |= GST_VIDEO_BUFFER_FLAG_INTERLACED; if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_TFF) out_flags |= GST_VIDEO_BUFFER_FLAG_TFF; if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_RFF) out_flags |= GST_VIDEO_BUFFER_FLAG_RFF; if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_ONEFIELD) out_flags |= GST_VIDEO_BUFFER_FLAG_ONEFIELD; } GST_BUFFER_FLAG_SET (out_frame->output_buffer, out_flags); if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_FFB) { GST_BUFFER_FLAG_SET (out_frame->output_buffer, GST_VIDEO_BUFFER_FLAG_FIRST_IN_BUNDLE); } #if (USE_GLX || USE_EGL) if (decode->has_texture_upload_meta) gst_buffer_ensure_texture_upload_meta (out_frame->output_buffer); #endif } if (decode->in_segment.rate < 0.0 && !GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (out_frame)) { GST_TRACE_OBJECT (decode, "drop frame in reverse playback"); gst_video_decoder_release_frame (GST_VIDEO_DECODER (decode), out_frame); return GST_FLOW_OK; } ret = gst_video_decoder_finish_frame (vdec, out_frame); if (ret != GST_FLOW_OK) goto error_commit_buffer; return GST_FLOW_OK; /* ERRORS */ error_create_buffer: { const GstVaapiID surface_id = gst_vaapi_surface_get_id (GST_VAAPI_SURFACE_PROXY_SURFACE (proxy)); GST_ELEMENT_ERROR (vdec, STREAM, FAILED, ("Failed to create sink buffer"), ("video sink failed to create video buffer for proxy'ed " "surface %" GST_VAAPI_ID_FORMAT, GST_VAAPI_ID_ARGS (surface_id))); gst_video_decoder_drop_frame (vdec, out_frame); return GST_FLOW_ERROR; } error_get_meta: { GST_ELEMENT_ERROR (vdec, STREAM, FAILED, ("Failed to get vaapi video meta attached to video buffer"), ("Failed to get vaapi video meta attached to video buffer")); gst_video_decoder_drop_frame (vdec, out_frame); return GST_FLOW_ERROR; } error_commit_buffer: { GST_INFO_OBJECT (decode, "downstream element rejected the frame (%s [%d])", gst_flow_get_name (ret), ret); return ret; } }
static GstFlowReturn gst_decklink_audio_src_create (GstPushSrc * bsrc, GstBuffer ** buffer) { GstDecklinkAudioSrc *self = GST_DECKLINK_AUDIO_SRC_CAST (bsrc); GstFlowReturn flow_ret = GST_FLOW_OK; const guint8 *data; glong sample_count; gsize data_size; CapturePacket *p; AudioPacket *ap; GstClockTime timestamp, duration; GstClockTime start_time, end_time; guint64 start_offset, end_offset; gboolean discont = FALSE; g_mutex_lock (&self->lock); while (g_queue_is_empty (&self->current_packets) && !self->flushing) { g_cond_wait (&self->cond, &self->lock); } p = (CapturePacket *) g_queue_pop_head (&self->current_packets); g_mutex_unlock (&self->lock); if (self->flushing) { if (p) capture_packet_free (p); GST_DEBUG_OBJECT (self, "Flushing"); return GST_FLOW_FLUSHING; } p->packet->GetBytes ((gpointer *) & data); sample_count = p->packet->GetSampleFrameCount (); data_size = self->info.bpf * sample_count; ap = (AudioPacket *) g_malloc0 (sizeof (AudioPacket)); *buffer = gst_buffer_new_wrapped_full ((GstMemoryFlags) GST_MEMORY_FLAG_READONLY, (gpointer) data, data_size, 0, data_size, ap, (GDestroyNotify) audio_packet_free); ap->packet = p->packet; p->packet->AddRef (); ap->input = self->input->input; ap->input->AddRef (); timestamp = p->capture_time; // Jitter and discontinuity handling, based on audiobasesrc start_time = timestamp; // Convert to the sample numbers start_offset = gst_util_uint64_scale (start_time, self->info.rate, GST_SECOND); end_offset = start_offset + sample_count; end_time = gst_util_uint64_scale_int (end_offset, GST_SECOND, self->info.rate); duration = end_time - start_time; if (self->next_offset == (guint64) - 1) { discont = TRUE; } else { guint64 diff, max_sample_diff; // Check discont if (start_offset <= self->next_offset) diff = self->next_offset - start_offset; else diff = start_offset - self->next_offset; max_sample_diff = gst_util_uint64_scale_int (self->alignment_threshold, self->info.rate, GST_SECOND); // Discont! if (G_UNLIKELY (diff >= max_sample_diff)) { if (self->discont_wait > 0) { if (self->discont_time == GST_CLOCK_TIME_NONE) { self->discont_time = start_time; } else if (start_time - self->discont_time >= self->discont_wait) { discont = TRUE; self->discont_time = GST_CLOCK_TIME_NONE; } } else { discont = TRUE; } } else if (G_UNLIKELY (self->discont_time != GST_CLOCK_TIME_NONE)) { // we have had a discont, but are now back on track! self->discont_time = GST_CLOCK_TIME_NONE; } } if (discont) { // Have discont, need resync and use the capture timestamps if (self->next_offset != (guint64) - 1) GST_INFO_OBJECT (self, "Have discont. Expected %" G_GUINT64_FORMAT ", got %" G_GUINT64_FORMAT, self->next_offset, start_offset); GST_BUFFER_FLAG_SET (*buffer, GST_BUFFER_FLAG_DISCONT); self->next_offset = end_offset; } else { // No discont, just keep counting self->discont_time = GST_CLOCK_TIME_NONE; timestamp = gst_util_uint64_scale (self->next_offset, GST_SECOND, self->info.rate); self->next_offset += sample_count; duration = gst_util_uint64_scale (self->next_offset, GST_SECOND, self->info.rate) - timestamp; } GST_BUFFER_TIMESTAMP (*buffer) = timestamp; GST_BUFFER_DURATION (*buffer) = duration; GST_DEBUG_OBJECT (self, "Outputting buffer %p with timestamp %" GST_TIME_FORMAT " and duration %" GST_TIME_FORMAT, *buffer, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (*buffer)), GST_TIME_ARGS (GST_BUFFER_DURATION (*buffer))); capture_packet_free (p); return flow_ret; }
/* flush the oldest buffer */ static GstFlowReturn gst_video_rate_flush_prev (GstVideoRate * videorate, gboolean duplicate) { GstFlowReturn res; GstBuffer *outbuf; GstClockTime push_ts; if (!videorate->prevbuf) goto eos_before_buffers; /* make sure we can write to the metadata */ outbuf = gst_buffer_make_writable (gst_buffer_ref (videorate->prevbuf)); GST_BUFFER_OFFSET (outbuf) = videorate->out; GST_BUFFER_OFFSET_END (outbuf) = videorate->out + 1; if (videorate->discont) { GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT); videorate->discont = FALSE; } else GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DISCONT); if (duplicate) GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP); else GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_GAP); /* this is the timestamp we put on the buffer */ push_ts = videorate->next_ts; videorate->out++; videorate->out_frame_count++; if (videorate->to_rate_numerator) { /* interpolate next expected timestamp in the segment */ videorate->next_ts = videorate->segment.base + videorate->segment.start + videorate->base_ts + gst_util_uint64_scale (videorate->out_frame_count, videorate->to_rate_denominator * GST_SECOND, videorate->to_rate_numerator); GST_BUFFER_DURATION (outbuf) = videorate->next_ts - push_ts; } /* We do not need to update time in VFR (variable frame rate) mode */ if (!videorate->drop_only) { /* adapt for looping, bring back to time in current segment. */ GST_BUFFER_TIMESTAMP (outbuf) = push_ts - videorate->segment.base; } GST_LOG_OBJECT (videorate, "old is best, dup, pushing buffer outgoing ts %" GST_TIME_FORMAT, GST_TIME_ARGS (push_ts)); res = gst_pad_push (GST_BASE_TRANSFORM_SRC_PAD (videorate), outbuf); return res; /* WARNINGS */ eos_before_buffers: { GST_INFO_OBJECT (videorate, "got EOS before any buffer was received"); return GST_FLOW_OK; } }
static GstFlowReturn gst_bml_transform_transform_mono_to_stereo (GstBaseTransform * base, GstBuffer * inbuf, GstBuffer * outbuf) { GstMapInfo infoi, infoo; GstBMLTransform *bml_transform = GST_BML_TRANSFORM (base); GstBMLTransformClass *klass = GST_BML_TRANSFORM_GET_CLASS (bml_transform); GstBML *bml = GST_BML (bml_transform); GstBMLClass *bml_class = GST_BML_CLASS (klass); BMLData *datai, *datao, *seg_datai, *seg_datao; gpointer bm = bml->bm; guint todo, seg_size, samples_per_buffer; gboolean has_data; guint mode = 3; /*WM_READWRITE */ bml->running_time = gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (inbuf)); if (GST_BUFFER_FLAG_IS_SET (outbuf, GST_BUFFER_FLAG_DISCONT)) { bml->subtick_count = (!bml->reverse) ? bml->subticks_per_tick : 1; } if (bml->subtick_count >= bml->subticks_per_tick) { bml (gstbml_reset_triggers (bml, bml_class)); bml (gstbml_sync_values (bml, bml_class, GST_BUFFER_TIMESTAMP (outbuf))); bml (tick (bm)); bml->subtick_count = 1; } else { bml->subtick_count++; } /* don't process data in passthrough-mode */ if (gst_base_transform_is_passthrough (base)) { // we would actually need to convert mono to stereo here // but this is not even called GST_WARNING_OBJECT (bml_transform, "m2s in passthrough mode"); //return GST_FLOW_OK; } if (!gst_buffer_map (inbuf, &infoi, GST_MAP_READ)) { GST_WARNING_OBJECT (base, "unable to map input buffer for read"); return GST_FLOW_ERROR; } datai = (BMLData *) infoi.data; samples_per_buffer = infoi.size / sizeof (BMLData); if (!gst_buffer_map (outbuf, &infoo, GST_MAP_READ | GST_MAP_WRITE)) { GST_WARNING_OBJECT (base, "unable to map output buffer for read & write"); return GST_FLOW_ERROR; } datao = (BMLData *) infoo.data; // some buzzmachines expect a cleared buffer //for(i=0;i<samples_per_buffer*2;i++) datao[i]=0.0f; memset (datao, 0, samples_per_buffer * 2 * sizeof (BMLData)); /* if buffer has only silence process with different mode */ if (GST_BUFFER_FLAG_IS_SET (outbuf, GST_BUFFER_FLAG_GAP)) { mode = 2; /* WM_WRITE */ } else { gfloat fc = 32768.0; orc_scalarmultiply_f32_ns (datai, datai, fc, samples_per_buffer); } GST_DEBUG_OBJECT (bml_transform, " calling work_m2s(%d,%d)", samples_per_buffer, mode); todo = samples_per_buffer; seg_datai = datai; seg_datao = datao; has_data = FALSE; while (todo) { // 256 is MachineInterface.h::MAX_BUFFER_LENGTH seg_size = (todo > 256) ? 256 : todo; has_data |= bml (work_m2s (bm, seg_datai, seg_datao, (int) seg_size, mode)); seg_datai = &seg_datai[seg_size]; seg_datao = &seg_datao[seg_size * 2]; todo -= seg_size; } if (gstbml_fix_data ((GstElement *) bml_transform, &infoo, has_data)) { GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP); } else { GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_GAP); } gst_buffer_unmap (inbuf, &infoi); gst_buffer_unmap (outbuf, &infoo); return GST_FLOW_OK; }
static GstFlowReturn gst_v4l2_buffer_pool_dqbuf (GstV4l2BufferPool * pool, GstBuffer ** buffer) { GstFlowReturn res; GstBuffer *outbuf; struct v4l2_buffer vbuffer; GstV4l2Object *obj = pool->obj; GstClockTime timestamp; if (obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) { /* select works for input devices when data is available. According to the * specs we can also poll to find out when a frame has been displayed but * that just seems to lock up here */ if ((res = gst_v4l2_object_poll (obj)) != GST_FLOW_OK) goto poll_error; } memset (&vbuffer, 0x00, sizeof (vbuffer)); vbuffer.type = obj->type; vbuffer.memory = V4L2_MEMORY_MMAP; GST_LOG_OBJECT (pool, "doing DQBUF"); if (v4l2_ioctl (pool->video_fd, VIDIOC_DQBUF, &vbuffer) < 0) goto error; /* get our GstBuffer with that index from the pool, if the buffer was * outstanding we have a serious problem. */ outbuf = pool->buffers[vbuffer.index]; if (outbuf == NULL) goto no_buffer; /* mark the buffer outstanding */ pool->buffers[vbuffer.index] = NULL; pool->num_queued--; timestamp = GST_TIMEVAL_TO_TIME (vbuffer.timestamp); GST_LOG_OBJECT (pool, "dequeued buffer %p seq:%d (ix=%d), used %d, flags %08x, ts %" GST_TIME_FORMAT ", pool-queued=%d, buffer=%p", outbuf, vbuffer.sequence, vbuffer.index, vbuffer.bytesused, vbuffer.flags, GST_TIME_ARGS (timestamp), pool->num_queued, outbuf); /* set top/bottom field first if v4l2_buffer has the information */ if (vbuffer.field == V4L2_FIELD_INTERLACED_TB) { GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_FLAG_TFF); } if (vbuffer.field == V4L2_FIELD_INTERLACED_BT) { GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_FLAG_TFF); } /* this can change at every frame, esp. with jpeg */ if (obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) gst_buffer_resize (outbuf, 0, vbuffer.bytesused); else gst_buffer_resize (outbuf, 0, vbuffer.length); GST_BUFFER_TIMESTAMP (outbuf) = timestamp; *buffer = outbuf; return GST_FLOW_OK; /* ERRORS */ poll_error: { GST_DEBUG_OBJECT (pool, "poll error %s", gst_flow_get_name (res)); return res; } error: { GST_WARNING_OBJECT (pool, "problem dequeuing frame %d (ix=%d), pool-ct=%d, buf.flags=%d", vbuffer.sequence, vbuffer.index, GST_MINI_OBJECT_REFCOUNT (pool), vbuffer.flags); switch (errno) { case EAGAIN: GST_WARNING_OBJECT (pool, "Non-blocking I/O has been selected using O_NONBLOCK and" " no buffer was in the outgoing queue. device %s", obj->videodev); break; case EINVAL: GST_ERROR_OBJECT (pool, "The buffer type is not supported, or the index is out of bounds, " "or no buffers have been allocated yet, or the userptr " "or length are invalid. device %s", obj->videodev); break; case ENOMEM: GST_ERROR_OBJECT (pool, "insufficient memory to enqueue a user pointer buffer"); break; case EIO: GST_INFO_OBJECT (pool, "VIDIOC_DQBUF failed due to an internal error." " Can also indicate temporary problems like signal loss." " Note the driver might dequeue an (empty) buffer despite" " returning an error, or even stop capturing." " device %s", obj->videodev); /* have we de-queued a buffer ? */ if (!(vbuffer.flags & (V4L2_BUF_FLAG_QUEUED | V4L2_BUF_FLAG_DONE))) { GST_DEBUG_OBJECT (pool, "reenqueing buffer"); /* FIXME ... should we do something here? */ } break; case EINTR: GST_WARNING_OBJECT (pool, "could not sync on a buffer on device %s", obj->videodev); break; default: GST_WARNING_OBJECT (pool, "Grabbing frame got interrupted on %s unexpectedly. %d: %s.", obj->videodev, errno, g_strerror (errno)); break; } return GST_FLOW_ERROR; } no_buffer: { GST_ERROR_OBJECT (pool, "No free buffer found in the pool at index %d.", vbuffer.index); return GST_FLOW_ERROR; } }
static void gst_timidity_loop (GstPad * sinkpad) { GstTimidity *timidity = GST_TIMIDITY (GST_PAD_PARENT (sinkpad)); GstBuffer *out; GstFlowReturn ret; if (timidity->mididata_size == 0) { if (!gst_timidity_get_upstream_size (timidity, &timidity->mididata_size)) { GST_ELEMENT_ERROR (timidity, STREAM, DECODE, (NULL), ("Unable to get song length")); goto paused; } if (timidity->mididata) g_free (timidity->mididata); timidity->mididata = g_malloc (timidity->mididata_size); timidity->mididata_offset = 0; return; } if (timidity->mididata_offset < timidity->mididata_size) { GstBuffer *buffer; gint64 size; GST_DEBUG_OBJECT (timidity, "loading song"); ret = gst_pad_pull_range (timidity->sinkpad, timidity->mididata_offset, -1, &buffer); if (ret != GST_FLOW_OK) { GST_ELEMENT_ERROR (timidity, STREAM, DECODE, (NULL), ("Unable to load song")); goto paused; } size = timidity->mididata_size - timidity->mididata_offset; if (GST_BUFFER_SIZE (buffer) < size) size = GST_BUFFER_SIZE (buffer); memmove (timidity->mididata + timidity->mididata_offset, GST_BUFFER_DATA (buffer), size); gst_buffer_unref (buffer); timidity->mididata_offset += size; GST_DEBUG_OBJECT (timidity, "Song loaded"); return; } if (!timidity->song) { MidIStream *stream; GstTagList *tags = NULL; gchar *text; GST_DEBUG_OBJECT (timidity, "Parsing song"); stream = mid_istream_open_mem (timidity->mididata, timidity->mididata_size, 0); timidity->song = mid_song_load (stream, timidity->song_options); mid_istream_close (stream); if (!timidity->song) { GST_ELEMENT_ERROR (timidity, STREAM, DECODE, (NULL), ("Unable to parse midi")); goto paused; } mid_song_start (timidity->song); timidity->o_len = (GST_MSECOND * (GstClockTime) mid_song_get_total_time (timidity->song)) / timidity->time_per_frame; gst_segment_set_newsegment (timidity->o_segment, FALSE, 1.0, GST_FORMAT_DEFAULT, 0, GST_CLOCK_TIME_NONE, 0); gst_pad_push_event (timidity->srcpad, gst_timidity_get_new_segment_event (timidity, GST_FORMAT_TIME, FALSE)); /* extract tags */ text = mid_song_get_meta (timidity->song, MID_SONG_TEXT); if (text) { tags = gst_tag_list_new (); gst_tag_list_add (tags, GST_TAG_MERGE_APPEND, GST_TAG_TITLE, text, NULL); //g_free (text); } text = mid_song_get_meta (timidity->song, MID_SONG_COPYRIGHT); if (text) { if (tags == NULL) tags = gst_tag_list_new (); gst_tag_list_add (tags, GST_TAG_MERGE_APPEND, GST_TAG_COPYRIGHT, text, NULL); //g_free (text); } if (tags) { gst_element_found_tags (GST_ELEMENT (timidity), tags); } GST_DEBUG_OBJECT (timidity, "Parsing song done"); return; } if (timidity->o_segment_changed) { GstSegment *segment = gst_timidity_get_segment (timidity, GST_FORMAT_TIME, !timidity->o_new_segment); GST_LOG_OBJECT (timidity, "sending newsegment from %" GST_TIME_FORMAT "-%" GST_TIME_FORMAT ", pos=%" GST_TIME_FORMAT, GST_TIME_ARGS ((guint64) segment->start), GST_TIME_ARGS ((guint64) segment->stop), GST_TIME_ARGS ((guint64) segment->time)); if (timidity->o_segment->flags & GST_SEEK_FLAG_SEGMENT) { gst_element_post_message (GST_ELEMENT (timidity), gst_message_new_segment_start (GST_OBJECT (timidity), segment->format, segment->start)); } gst_segment_free (segment); timidity->o_segment_changed = FALSE; return; } if (timidity->o_seek) { /* perform a seek internally */ timidity->o_segment->last_stop = timidity->o_segment->time; mid_song_seek (timidity->song, (timidity->o_segment->last_stop * timidity->time_per_frame) / GST_MSECOND); } out = gst_timidity_get_buffer (timidity); if (!out) { GST_LOG_OBJECT (timidity, "Song ended, generating eos"); gst_pad_push_event (timidity->srcpad, gst_event_new_eos ()); timidity->o_seek = FALSE; goto paused; } if (timidity->o_seek) { GST_BUFFER_FLAG_SET (out, GST_BUFFER_FLAG_DISCONT); timidity->o_seek = FALSE; } gst_buffer_set_caps (out, timidity->out_caps); ret = gst_pad_push (timidity->srcpad, out); if (GST_FLOW_IS_FATAL (ret) || ret == GST_FLOW_NOT_LINKED) goto error; return; paused: { GST_DEBUG_OBJECT (timidity, "pausing task"); gst_pad_pause_task (timidity->sinkpad); return; } error: { GST_ELEMENT_ERROR (timidity, STREAM, FAILED, ("Internal data stream error"), ("Streaming stopped, reason %s", gst_flow_get_name (ret))); gst_pad_push_event (timidity->srcpad, gst_event_new_eos ()); goto paused; } }
static GstFlowReturn gst_vdp_vpp_drain (GstVdpVideoPostProcess * vpp) { GstVdpPicture current_pic; guint32 video_surfaces_past_count; VdpVideoSurface video_surfaces_past[MAX_PICTURES]; guint32 video_surfaces_future_count; VdpVideoSurface video_surfaces_future[MAX_PICTURES]; GstFlowReturn ret; while (gst_vdp_vpp_get_next_picture (vpp, ¤t_pic, &video_surfaces_past_count, video_surfaces_past, &video_surfaces_future_count, video_surfaces_future)) { GError *err; GstVdpOutputBuffer *outbuf; GstStructure *structure; GstVideoRectangle src_r = { 0, } , dest_r = { 0,}; VdpRect rect; GstVdpDevice *device; VdpStatus status; err = NULL; ret = gst_vdp_output_src_pad_alloc_buffer ((GstVdpOutputSrcPad *) vpp->srcpad, &outbuf, &err); if (ret != GST_FLOW_OK) goto output_pad_error; src_r.w = vpp->width; src_r.h = vpp->height; if (vpp->got_par) { gint new_width; new_width = gst_util_uint64_scale_int (src_r.w, vpp->par_n, vpp->par_d); src_r.x += (src_r.w - new_width) / 2; src_r.w = new_width; } structure = gst_caps_get_structure (GST_BUFFER_CAPS (outbuf), 0); if (!gst_structure_get_int (structure, "width", &dest_r.w) || !gst_structure_get_int (structure, "height", &dest_r.h)) goto invalid_caps; if (vpp->force_aspect_ratio) { GstVideoRectangle res_r; gst_video_sink_center_rect (src_r, dest_r, &res_r, TRUE); rect.x0 = res_r.x; rect.x1 = res_r.w + res_r.x; rect.y0 = res_r.y; rect.y1 = res_r.h + res_r.y; } else { rect.x0 = 0; rect.x1 = dest_r.w; rect.y0 = 0; rect.y1 = dest_r.h; } device = vpp->device; status = device->vdp_video_mixer_render (vpp->mixer, VDP_INVALID_HANDLE, NULL, current_pic.structure, video_surfaces_past_count, video_surfaces_past, current_pic.buf->surface, video_surfaces_future_count, video_surfaces_future, NULL, outbuf->surface, NULL, &rect, 0, NULL); if (status != VDP_STATUS_OK) goto render_error; GST_BUFFER_TIMESTAMP (outbuf) = current_pic.timestamp; if (gst_vdp_vpp_is_interlaced (vpp)) GST_BUFFER_DURATION (outbuf) = vpp->field_duration; else GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (current_pic.buf); if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_DISCONT)) GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT); if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_PREROLL)) GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_PREROLL); if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_GAP)) GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP); err = NULL; ret = gst_vdp_output_src_pad_push ((GstVdpOutputSrcPad *) vpp->srcpad, outbuf, &err); if (ret != GST_FLOW_OK) goto output_pad_error; continue; invalid_caps: gst_buffer_unref (GST_BUFFER (outbuf)); GST_ELEMENT_ERROR (vpp, STREAM, FAILED, ("Invalid output caps"), (NULL)); ret = GST_FLOW_ERROR; break; render_error: gst_buffer_unref (GST_BUFFER (outbuf)); GST_ELEMENT_ERROR (vpp, RESOURCE, READ, ("Could not postprocess frame"), ("Error returned from vdpau was: %s", device->vdp_get_error_string (status))); ret = GST_FLOW_ERROR; break; output_pad_error: if (ret == GST_FLOW_ERROR && err != NULL) gst_vdp_vpp_post_error (vpp, err); break; } return ret; }
static GstFlowReturn gst_vdp_vpp_chain (GstPad * pad, GstBuffer * buffer) { GstVdpVideoPostProcess *vpp = GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad)); GstClockTime qostime; GstFlowReturn ret = GST_FLOW_OK; GError *err; GST_DEBUG ("chain"); /* can only do QoS if the segment is in TIME */ if (vpp->segment.format != GST_FORMAT_TIME) goto no_qos; /* QOS is done on the running time of the buffer, get it now */ qostime = gst_segment_to_running_time (&vpp->segment, GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buffer)); if (qostime != -1) { gboolean need_skip; GstClockTime earliest_time; /* lock for getting the QoS parameters that are set (in a different thread) * with the QOS events */ GST_OBJECT_LOCK (vpp); earliest_time = vpp->earliest_time; /* check for QoS, don't perform conversion for buffers * that are known to be late. */ need_skip = GST_CLOCK_TIME_IS_VALID (earliest_time) && qostime != -1 && qostime <= earliest_time; GST_OBJECT_UNLOCK (vpp); if (need_skip) { GST_DEBUG_OBJECT (vpp, "skipping transform: qostime %" GST_TIME_FORMAT " <= %" GST_TIME_FORMAT, GST_TIME_ARGS (qostime), GST_TIME_ARGS (earliest_time)); /* mark discont for next buffer */ vpp->discont = TRUE; gst_buffer_unref (buffer); return GST_FLOW_OK; } } no_qos: if (vpp->discont) { GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); vpp->discont = FALSE; } if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))) { GST_DEBUG_OBJECT (vpp, "Received discont buffer"); gst_vdp_vpp_flush (vpp); } if (!vpp->native_input) { GstVdpVideoBuffer *video_buf; err = NULL; video_buf = (GstVdpVideoBuffer *) gst_vdp_buffer_pool_get_buffer (vpp->vpool, &err); if (G_UNLIKELY (!video_buf)) goto video_buf_error; if (!gst_vdp_video_buffer_upload (video_buf, buffer, vpp->fourcc, vpp->width, vpp->height)) { gst_buffer_unref (GST_BUFFER (video_buf)); GST_ELEMENT_ERROR (vpp, RESOURCE, READ, ("Couldn't upload YUV data to vdpau"), (NULL)); ret = GST_FLOW_ERROR; goto error; } gst_buffer_copy_metadata (GST_BUFFER (video_buf), buffer, GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS); gst_buffer_unref (buffer); buffer = GST_BUFFER (video_buf); } if (G_UNLIKELY (vpp->mixer == VDP_INVALID_HANDLE)) { ret = gst_vdp_vpp_create_mixer (vpp); if (ret != GST_FLOW_OK) goto error; } gst_vdp_vpp_add_buffer (vpp, GST_VDP_VIDEO_BUFFER (buffer)); ret = gst_vdp_vpp_drain (vpp); done: gst_object_unref (vpp); return ret; error: gst_buffer_unref (buffer); goto done; video_buf_error: gst_buffer_unref (GST_BUFFER (buffer)); gst_vdp_vpp_post_error (vpp, err); ret = GST_FLOW_ERROR; goto done; }
static GstFlowReturn gst_shm_src_create (GstPushSrc * psrc, GstBuffer ** outbuf) { GstShmSrc *self = GST_SHM_SRC (psrc); gchar *buf = NULL; int rv = 0; struct GstShmBuffer *gsb; do { if (gst_poll_wait (self->poll, GST_CLOCK_TIME_NONE) < 0) { if (errno == EBUSY) return GST_FLOW_WRONG_STATE; GST_ELEMENT_ERROR (self, RESOURCE, READ, ("Failed to read from shmsrc"), ("Poll failed on fd: %s", strerror (errno))); return GST_FLOW_ERROR; } if (self->unlocked) return GST_FLOW_WRONG_STATE; if (gst_poll_fd_has_closed (self->poll, &self->pollfd)) { GST_ELEMENT_ERROR (self, RESOURCE, READ, ("Failed to read from shmsrc"), ("Control socket has closed")); return GST_FLOW_ERROR; } if (gst_poll_fd_has_error (self->poll, &self->pollfd)) { GST_ELEMENT_ERROR (self, RESOURCE, READ, ("Failed to read from shmsrc"), ("Control socket has error")); return GST_FLOW_ERROR; } if (gst_poll_fd_can_read (self->poll, &self->pollfd)) { buf = NULL; GST_LOG_OBJECT (self, "Reading from pipe"); GST_OBJECT_LOCK (self); rv = sp_client_recv (self->pipe->pipe, &buf); GST_OBJECT_UNLOCK (self); if (rv < 0) { GST_ELEMENT_ERROR (self, RESOURCE, READ, ("Failed to read from shmsrc"), ("Error reading control data: %d", rv)); return GST_FLOW_ERROR; } } } while (buf == NULL); GST_LOG_OBJECT (self, "Got buffer %p of size %d", buf, rv); gsb = g_slice_new0 (struct GstShmBuffer); gsb->buf = buf; gsb->pipe = self->pipe; gst_shm_pipe_inc (self->pipe); *outbuf = gst_buffer_new (); GST_BUFFER_FLAG_SET (*outbuf, GST_BUFFER_FLAG_READONLY); GST_BUFFER_DATA (*outbuf) = (guint8 *) buf; GST_BUFFER_SIZE (*outbuf) = rv; GST_BUFFER_MALLOCDATA (*outbuf) = (guint8 *) gsb; GST_BUFFER_FREE_FUNC (*outbuf) = free_buffer; return GST_FLOW_OK; }
static GstBuffer * gst_rtp_amr_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf) { GstRtpAMRDepay *rtpamrdepay; const gint *frame_size; GstBuffer *outbuf = NULL; gint payload_len; rtpamrdepay = GST_RTP_AMR_DEPAY (depayload); /* setup frame size pointer */ if (rtpamrdepay->mode == GST_RTP_AMR_DP_MODE_NB) frame_size = nb_frame_size; else frame_size = wb_frame_size; /* when we get here, 1 channel, 8000/16000 Hz, octet aligned, no CRC, * no robust sorting, no interleaving data is to be depayloaded */ { guint8 *payload, *p, *dp; guint8 CMR; gint i, num_packets, num_nonempty_packets; gint amr_len; gint ILL, ILP; payload_len = gst_rtp_buffer_get_payload_len (buf); /* need at least 2 bytes for the header */ if (payload_len < 2) goto too_small; payload = gst_rtp_buffer_get_payload (buf); /* depay CMR. The CMR is used by the sender to request * a new encoding mode. * * 0 1 2 3 4 5 6 7 * +-+-+-+-+-+-+-+-+ * | CMR |R|R|R|R| * +-+-+-+-+-+-+-+-+ */ CMR = (payload[0] & 0xf0) >> 4; /* strip CMR header now, pack FT and the data for the decoder */ payload_len -= 1; payload += 1; GST_DEBUG_OBJECT (rtpamrdepay, "payload len %d", payload_len); if (rtpamrdepay->interleaving) { ILL = (payload[0] & 0xf0) >> 4; ILP = (payload[0] & 0x0f); payload_len -= 1; payload += 1; if (ILP > ILL) goto wrong_interleaving; } /* * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 * +-+-+-+-+-+-+-+-+.. * |F| FT |Q|P|P| more FT.. * +-+-+-+-+-+-+-+-+.. */ /* count number of packets by counting the FTs. Also * count number of amr data bytes and number of non-empty * packets (this is also the number of CRCs if present). */ amr_len = 0; num_nonempty_packets = 0; num_packets = 0; for (i = 0; i < payload_len; i++) { gint fr_size; guint8 FT; FT = (payload[i] & 0x78) >> 3; fr_size = frame_size[FT]; GST_DEBUG_OBJECT (rtpamrdepay, "frame size %d", fr_size); if (fr_size == -1) goto wrong_framesize; if (fr_size > 0) { amr_len += fr_size; num_nonempty_packets++; } num_packets++; if ((payload[i] & 0x80) == 0) break; } if (rtpamrdepay->crc) { /* data len + CRC len + header bytes should be smaller than payload_len */ if (num_packets + num_nonempty_packets + amr_len > payload_len) goto wrong_length_1; } else { /* data len + header bytes should be smaller than payload_len */ if (num_packets + amr_len > payload_len) goto wrong_length_2; } outbuf = gst_buffer_new_and_alloc (payload_len); /* point to destination */ p = GST_BUFFER_DATA (outbuf); /* point to first data packet */ dp = payload + num_packets; if (rtpamrdepay->crc) { /* skip CRC if present */ dp += num_nonempty_packets; } for (i = 0; i < num_packets; i++) { gint fr_size; /* copy FT, clear F bit */ *p++ = payload[i] & 0x7f; fr_size = frame_size[(payload[i] & 0x78) >> 3]; if (fr_size > 0) { /* copy data packet, FIXME, calc CRC here. */ memcpy (p, dp, fr_size); p += fr_size; dp += fr_size; } } /* we can set the duration because each packet is 20 milliseconds */ GST_BUFFER_DURATION (outbuf) = num_packets * 20 * GST_MSECOND; if (gst_rtp_buffer_get_marker (buf)) { /* marker bit marks a discont buffer after a talkspurt. */ GST_DEBUG_OBJECT (depayload, "marker bit was set"); GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT); } GST_DEBUG_OBJECT (depayload, "pushing buffer of size %d", GST_BUFFER_SIZE (outbuf)); }
static void gst_decklink_src_task (void *priv) { GstDecklinkSrc *decklinksrc = GST_DECKLINK_SRC (priv); GstBuffer *buffer; GstBuffer *audio_buffer; IDeckLinkVideoInputFrame *video_frame; IDeckLinkAudioInputPacket *audio_frame; void *data; int n_samples; GstFlowReturn ret; const GstDecklinkMode *mode; GST_DEBUG_OBJECT (decklinksrc, "task"); g_mutex_lock (decklinksrc->mutex); while (decklinksrc->video_frame == NULL && !decklinksrc->stop) { g_cond_wait (decklinksrc->cond, decklinksrc->mutex); } video_frame = decklinksrc->video_frame; audio_frame = decklinksrc->audio_frame; decklinksrc->video_frame = NULL; decklinksrc->audio_frame = NULL; g_mutex_unlock (decklinksrc->mutex); if (decklinksrc->stop) { GST_DEBUG ("stopping task"); return; } /* warning on dropped frames */ if (decklinksrc->dropped_frames - decklinksrc->dropped_frames_old > 0) { GST_ELEMENT_WARNING (decklinksrc, RESOURCE, READ, ("Dropped %d frame(s), for a total of %d frame(s)", decklinksrc->dropped_frames - decklinksrc->dropped_frames_old, decklinksrc->dropped_frames), (NULL)); decklinksrc->dropped_frames_old = decklinksrc->dropped_frames; } mode = gst_decklink_get_mode (decklinksrc->mode); video_frame->GetBytes (&data); if (decklinksrc->copy_data) { buffer = gst_buffer_new_and_alloc (mode->width * mode->height * 2); memcpy (GST_BUFFER_DATA (buffer), data, mode->width * mode->height * 2); video_frame->Release (); } else { buffer = gst_buffer_new (); GST_BUFFER_SIZE (buffer) = mode->width * mode->height * 2; GST_BUFFER_DATA (buffer) = (guint8 *) data; GST_BUFFER_FREE_FUNC (buffer) = video_frame_free; GST_BUFFER_MALLOCDATA (buffer) = (guint8 *) video_frame; } GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale_int (decklinksrc->frame_num * GST_SECOND, mode->fps_d, mode->fps_n); GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int ((decklinksrc->frame_num + 1) * GST_SECOND, mode->fps_d, mode->fps_n) - GST_BUFFER_TIMESTAMP (buffer); GST_BUFFER_OFFSET (buffer) = decklinksrc->frame_num; GST_BUFFER_OFFSET_END (buffer) = decklinksrc->frame_num; if (decklinksrc->frame_num == 0) { GstEvent *event; gboolean ret; GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, GST_CLOCK_TIME_NONE, 0); ret = gst_pad_push_event (decklinksrc->videosrcpad, event); if (!ret) { GST_ERROR_OBJECT (decklinksrc, "new segment event ret=%d", ret); return; } } if (decklinksrc->video_caps == NULL) { decklinksrc->video_caps = gst_decklink_mode_get_caps (decklinksrc->mode); } gst_buffer_set_caps (buffer, decklinksrc->video_caps); ret = gst_pad_push (decklinksrc->videosrcpad, buffer); if (ret != GST_FLOW_OK) { GST_ELEMENT_ERROR (decklinksrc, CORE, NEGOTIATION, (NULL), (NULL)); } if (gst_pad_is_linked (decklinksrc->audiosrcpad)) { n_samples = audio_frame->GetSampleFrameCount (); audio_frame->GetBytes (&data); audio_buffer = gst_buffer_new_and_alloc (n_samples * 2 * 2); memcpy (GST_BUFFER_DATA (audio_buffer), data, n_samples * 2 * 2); GST_BUFFER_TIMESTAMP (audio_buffer) = gst_util_uint64_scale_int (decklinksrc->num_audio_samples * GST_SECOND, 1, 48000); GST_BUFFER_DURATION (audio_buffer) = gst_util_uint64_scale_int ((decklinksrc->num_audio_samples + n_samples) * GST_SECOND, 1, 48000) - GST_BUFFER_TIMESTAMP (audio_buffer); decklinksrc->num_audio_samples += n_samples; if (decklinksrc->audio_caps == NULL) { decklinksrc->audio_caps = gst_caps_new_simple ("audio/x-raw-int", "endianness", G_TYPE_INT, G_LITTLE_ENDIAN, "signed", G_TYPE_BOOLEAN, TRUE, "depth", G_TYPE_INT, 16, "width", G_TYPE_INT, 16, "channels", G_TYPE_INT, 2, "rate", G_TYPE_INT, 48000, NULL); } gst_buffer_set_caps (audio_buffer, decklinksrc->audio_caps); ret = gst_pad_push (decklinksrc->audiosrcpad, audio_buffer); if (ret != GST_FLOW_OK) { GST_ELEMENT_ERROR (decklinksrc, CORE, NEGOTIATION, (NULL), (NULL)); } } audio_frame->Release (); }
static GstFlowReturn gst_audio_rate_chain (GstPad * pad, GstBuffer * buf) { GstAudioRate *audiorate; GstClockTime in_time, in_duration, in_stop, run_time; guint64 in_offset, in_offset_end, in_samples; guint in_size; GstFlowReturn ret = GST_FLOW_OK; audiorate = GST_AUDIO_RATE (gst_pad_get_parent (pad)); /* need to be negotiated now */ if (audiorate->bytes_per_sample == 0) goto not_negotiated; /* we have a new pending segment */ if (audiorate->next_offset == -1) { gint64 pos; /* update the TIME segment */ gst_audio_rate_convert_segments (audiorate); /* first buffer, we are negotiated and we have a segment, calculate the * current expected offsets based on the segment.start, which is the first * media time of the segment and should match the media time of the first * buffer in that segment, which is the offset expressed in DEFAULT units. */ /* convert first timestamp of segment to sample position */ pos = gst_util_uint64_scale_int (audiorate->src_segment.start, audiorate->rate, GST_SECOND); GST_DEBUG_OBJECT (audiorate, "resync to offset %" G_GINT64_FORMAT, pos); audiorate->next_offset = pos; audiorate->next_ts = gst_util_uint64_scale_int (audiorate->next_offset, GST_SECOND, audiorate->rate); } audiorate->in++; in_time = GST_BUFFER_TIMESTAMP (buf); if (in_time == GST_CLOCK_TIME_NONE) { GST_DEBUG_OBJECT (audiorate, "no timestamp, using expected next time"); in_time = audiorate->next_ts; } in_size = GST_BUFFER_SIZE (buf); in_samples = in_size / audiorate->bytes_per_sample; /* get duration from the size because we can and it's more accurate */ in_duration = gst_util_uint64_scale_int (in_samples, GST_SECOND, audiorate->rate); in_stop = in_time + in_duration; /* Figure out the total accumulated segment time. */ run_time = in_time + audiorate->src_segment.accum; /* calculate the buffer offset */ in_offset = gst_util_uint64_scale_int (run_time, audiorate->rate, GST_SECOND); in_offset_end = in_offset + in_samples; GST_LOG_OBJECT (audiorate, "in_time:%" GST_TIME_FORMAT ", run_time:%" GST_TIME_FORMAT ", in_duration:%" GST_TIME_FORMAT ", in_size:%u, in_offset:%lld, in_offset_end:%lld" ", ->next_offset:%lld", GST_TIME_ARGS (in_time), GST_TIME_ARGS (run_time), GST_TIME_ARGS (in_duration), in_size, in_offset, in_offset_end, audiorate->next_offset); /* do we need to insert samples */ if (in_offset > audiorate->next_offset) { GstBuffer *fill; gint fillsize; guint64 fillsamples; /* We don't want to allocate a single unreasonably huge buffer - it might be hundreds of megabytes. So, limit each output buffer to one second of audio */ fillsamples = in_offset - audiorate->next_offset; while (fillsamples > 0) { guint64 cursamples = MIN (fillsamples, audiorate->rate); fillsamples -= cursamples; fillsize = cursamples * audiorate->bytes_per_sample; fill = gst_buffer_new_and_alloc (fillsize); /* FIXME, 0 might not be the silence byte for the negotiated format. */ memset (GST_BUFFER_DATA (fill), 0, fillsize); GST_DEBUG_OBJECT (audiorate, "inserting %lld samples", cursamples); GST_BUFFER_OFFSET (fill) = audiorate->next_offset; audiorate->next_offset += cursamples; GST_BUFFER_OFFSET_END (fill) = audiorate->next_offset; /* Use next timestamp, then calculate following timestamp based on * offset to get duration. Neccesary complexity to get 'perfect' * streams */ GST_BUFFER_TIMESTAMP (fill) = audiorate->next_ts; audiorate->next_ts = gst_util_uint64_scale_int (audiorate->next_offset, GST_SECOND, audiorate->rate); GST_BUFFER_DURATION (fill) = audiorate->next_ts - GST_BUFFER_TIMESTAMP (fill); /* we created this buffer to fill a gap */ GST_BUFFER_FLAG_SET (fill, GST_BUFFER_FLAG_GAP); /* set discont if it's pending, this is mostly done for the first buffer * and after a flushing seek */ if (audiorate->discont) { GST_BUFFER_FLAG_SET (fill, GST_BUFFER_FLAG_DISCONT); audiorate->discont = FALSE; } gst_buffer_set_caps (fill, GST_PAD_CAPS (audiorate->srcpad)); ret = gst_pad_push (audiorate->srcpad, fill); if (ret != GST_FLOW_OK) goto beach; audiorate->out++; audiorate->add += cursamples; if (!audiorate->silent) g_object_notify (G_OBJECT (audiorate), "add"); } } else if (in_offset < audiorate->next_offset) { /* need to remove samples */ if (in_offset_end <= audiorate->next_offset) { guint64 drop = in_size / audiorate->bytes_per_sample; audiorate->drop += drop; GST_DEBUG_OBJECT (audiorate, "dropping %lld samples", drop); /* we can drop the buffer completely */ gst_buffer_unref (buf); if (!audiorate->silent) g_object_notify (G_OBJECT (audiorate), "drop"); goto beach; } else { guint64 truncsamples; guint truncsize, leftsize; GstBuffer *trunc; /* truncate buffer */ truncsamples = audiorate->next_offset - in_offset; truncsize = truncsamples * audiorate->bytes_per_sample; leftsize = in_size - truncsize; trunc = gst_buffer_create_sub (buf, truncsize, leftsize); gst_buffer_unref (buf); buf = trunc; gst_buffer_set_caps (buf, GST_PAD_CAPS (audiorate->srcpad)); audiorate->drop += truncsamples; } } /* Now calculate parameters for whichever buffer (either the original * or truncated one) we're pushing. */ GST_BUFFER_OFFSET (buf) = audiorate->next_offset; GST_BUFFER_OFFSET_END (buf) = in_offset_end; GST_BUFFER_TIMESTAMP (buf) = audiorate->next_ts; audiorate->next_ts = gst_util_uint64_scale_int (in_offset_end, GST_SECOND, audiorate->rate); GST_BUFFER_DURATION (buf) = audiorate->next_ts - GST_BUFFER_TIMESTAMP (buf); if (audiorate->discont) { /* we need to output a discont buffer, do so now */ GST_DEBUG_OBJECT (audiorate, "marking DISCONT on output buffer"); buf = gst_buffer_make_metadata_writable (buf); GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT); audiorate->discont = FALSE; } else if (GST_BUFFER_IS_DISCONT (buf)) { /* else we make everything continuous so we can safely remove the DISCONT * flag from the buffer if there was one */ GST_DEBUG_OBJECT (audiorate, "removing DISCONT from buffer"); buf = gst_buffer_make_metadata_writable (buf); GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT); } /* set last_stop on segment */ gst_segment_set_last_stop (&audiorate->src_segment, GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf)); ret = gst_pad_push (audiorate->srcpad, buf); audiorate->out++; audiorate->next_offset = in_offset_end; beach: gst_object_unref (audiorate); return ret; /* ERRORS */ not_negotiated: { GST_ELEMENT_ERROR (audiorate, STREAM, FORMAT, (NULL), ("pipeline error, format was not negotiated")); return GST_FLOW_NOT_NEGOTIATED; } }
static GstBuffer * gst_rtp_xqt_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf) { GstRtpXQTDepay *rtpxqtdepay; GstBuffer *outbuf = NULL; gboolean m; GstRTPBuffer rtp = { NULL }; rtpxqtdepay = GST_RTP_XQT_DEPAY (depayload); gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp); if (GST_BUFFER_IS_DISCONT (buf)) { /* discont, clear adapter and try to find a new packet start */ gst_adapter_clear (rtpxqtdepay->adapter); rtpxqtdepay->need_resync = TRUE; GST_DEBUG_OBJECT (rtpxqtdepay, "we need resync"); } m = gst_rtp_buffer_get_marker (&rtp); GST_LOG_OBJECT (rtpxqtdepay, "marker: %d", m); { gint payload_len; guint avail; guint8 *payload; guint8 ver, pck; gboolean s, q, l, d; payload_len = gst_rtp_buffer_get_payload_len (&rtp); payload = gst_rtp_buffer_get_payload (&rtp); /* 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | VER |PCK|S|Q|L| RES |D| QuickTime Payload ID | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ */ if (payload_len <= 4) goto wrong_length; ver = (payload[0] & 0xf0) >> 4; if (ver > 1) goto wrong_version; pck = (payload[0] & 0x0c) >> 2; if (pck == 0) goto pck_reserved; s = (payload[0] & 0x02) != 0; /* contains sync sample */ q = (payload[0] & 0x01) != 0; /* has payload description */ l = (payload[1] & 0x80) != 0; /* has packet specific information description */ d = (payload[2] & 0x80) != 0; /* don't cache info for payload id */ /* id used for caching info */ rtpxqtdepay->current_id = ((payload[2] & 0x7f) << 8) | payload[3]; GST_LOG_OBJECT (rtpxqtdepay, "VER: %d, PCK: %d, S: %d, Q: %d, L: %d, D: %d, ID: %d", ver, pck, s, q, l, d, rtpxqtdepay->current_id); if (rtpxqtdepay->need_resync) { /* we need to find the boundary of a new packet after a DISCONT */ if (pck != 3 || q) { /* non-fragmented packet or payload description present, packet starts * here. */ rtpxqtdepay->need_resync = FALSE; } else { /* fragmented packet without description */ if (m) { /* marker bit set, next packet is start of new one */ rtpxqtdepay->need_resync = FALSE; } goto need_resync; } } payload += 4; payload_len -= 4; if (q) { gboolean k, f, a, z; guint pdlen, pdpadded; gint padding; /* media_type only used for printing */ guint32 G_GNUC_UNUSED media_type; guint32 timescale; /* 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * |K|F|A|Z| RES | QuickTime Payload Desc Length | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * . QuickTime Payload Desc Data ... . * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ */ if (payload_len <= 4) goto wrong_length; k = (payload[0] & 0x80) != 0; /* keyframe */ f = (payload[0] & 0x40) != 0; /* sparse */ a = (payload[0] & 0x20) != 0; /* start of payload */ z = (payload[0] & 0x10) != 0; /* end of payload */ pdlen = (payload[2] << 8) | payload[3]; if (pdlen < 12) goto wrong_length; /* calc padding */ pdpadded = pdlen + 3; pdpadded -= pdpadded % 4; if (payload_len < pdpadded) goto wrong_length; padding = pdpadded - pdlen; GST_LOG_OBJECT (rtpxqtdepay, "K: %d, F: %d, A: %d, Z: %d, len: %d, padding %d", k, f, a, z, pdlen, padding); payload += 4; payload_len -= 4; /* 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | QuickTime Media Type | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | Timescale | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * . QuickTime TLVs ... . * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ */ media_type = (payload[0] << 24) | (payload[1] << 16) | (payload[2] << 8) | payload[3]; timescale = (payload[4] << 24) | (payload[5] << 16) | (payload[6] << 8) | payload[7]; GST_LOG_OBJECT (rtpxqtdepay, "media_type: %c%c%c%c, timescale %u", payload[0], payload[1], payload[2], payload[3], timescale); payload += 8; payload_len -= 8; pdlen -= 12; /* parse TLV (type-length-value triplets */ while (pdlen > 3) { guint16 tlv_len, tlv_type; /* 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | QuickTime TLV Length | QuickTime TLV Type | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * . QuickTime TLV Value ... . * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ */ tlv_len = (payload[0] << 8) | payload[1]; tlv_type = (payload[2] << 8) | payload[3]; pdlen -= 4; if (tlv_len > pdlen) goto wrong_length; GST_LOG_OBJECT (rtpxqtdepay, "TLV '%c%c', len %d", payload[2], payload[3], tlv_len); payload += 4; payload_len -= 4; switch (tlv_type) { case TLV_sd: /* Session description */ if (!gst_rtp_quicktime_parse_sd (rtpxqtdepay, payload, tlv_len)) goto unknown_format; rtpxqtdepay->have_sd = TRUE; break; case TLV_qt: case TLV_ti: case TLV_ly: case TLV_vo: case TLV_mx: case TLV_tr: case TLV_tw: case TLV_th: case TLV_la: case TLV_rt: case TLV_gm: case TLV_oc: case TLV_cr: case TLV_du: case TLV_po: default: break; } pdlen -= tlv_len; payload += tlv_len; payload_len -= tlv_len; } payload += padding; payload_len -= padding; } if (l) { guint ssilen, ssipadded; gint padding; /* 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | RES | Sample-Specific Info Length | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * . QuickTime TLVs ... * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ */ if (payload_len <= 4) goto wrong_length; ssilen = (payload[2] << 8) | payload[3]; if (ssilen < 4) goto wrong_length; /* calc padding */ ssipadded = ssilen + 3; ssipadded -= ssipadded % 4; if (payload_len < ssipadded) goto wrong_length; padding = ssipadded - ssilen; GST_LOG_OBJECT (rtpxqtdepay, "len: %d, padding %d", ssilen, padding); payload += 4; payload_len -= 4; ssilen -= 4; /* parse TLV (type-length-value triplets */ while (ssilen > 3) { guint16 tlv_len, tlv_type; /* 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | QuickTime TLV Length | QuickTime TLV Type | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * . QuickTime TLV Value ... . * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ */ tlv_len = (payload[0] << 8) | payload[1]; tlv_type = (payload[2] << 8) | payload[3]; ssilen -= 4; if (tlv_len > ssilen) goto wrong_length; GST_LOG_OBJECT (rtpxqtdepay, "TLV '%c%c', len %d", payload[2], payload[3], tlv_len); payload += 4; payload_len -= 4; switch (tlv_type) { case TLV_sd: case TLV_qt: case TLV_ti: case TLV_ly: case TLV_vo: case TLV_mx: case TLV_tr: case TLV_tw: case TLV_th: case TLV_la: case TLV_rt: case TLV_gm: case TLV_oc: case TLV_cr: case TLV_du: case TLV_po: default: break; } ssilen -= tlv_len; payload += tlv_len; payload_len -= tlv_len; } payload += padding; payload_len -= padding; } rtpxqtdepay->previous_id = rtpxqtdepay->current_id; switch (pck) { case 1: { /* multiple samples per packet. */ outbuf = gst_buffer_new_and_alloc (payload_len); gst_buffer_fill (outbuf, 0, payload, payload_len); goto done; } case 2: { guint slen; /* multiple samples per packet. * 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * |S| Reserved | Sample Length | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | Sample Timestamp | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * . Sample Data ... . * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * |S| Reserved | Sample Length | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | Sample Timestamp | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * . Sample Data ... . * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * . ...... . * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ */ while (payload_len > 8) { s = (payload[0] & 0x80) != 0; /* contains sync sample */ slen = (payload[2] << 8) | payload[3]; /* timestamp = * (payload[4] << 24) | (payload[5] << 16) | (payload[6] << 8) | * payload[7]; */ payload += 8; payload_len -= 8; if (slen > payload_len) slen = payload_len; outbuf = gst_buffer_new_and_alloc (slen); gst_buffer_fill (outbuf, 0, payload, slen); if (!s) GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT); gst_rtp_base_depayload_push (depayload, outbuf); /* aligned on 32 bit boundary */ slen = GST_ROUND_UP_4 (slen); payload += slen; payload_len -= slen; } break; } case 3: { /* one sample per packet, use adapter to combine based on marker bit. */ outbuf = gst_buffer_new_and_alloc (payload_len); gst_buffer_fill (outbuf, 0, payload, payload_len); gst_adapter_push (rtpxqtdepay->adapter, outbuf); outbuf = NULL; if (!m) goto done; avail = gst_adapter_available (rtpxqtdepay->adapter); outbuf = gst_adapter_take_buffer (rtpxqtdepay->adapter, avail); GST_DEBUG_OBJECT (rtpxqtdepay, "gst_rtp_xqt_depay_chain: pushing buffer of size %u", avail); goto done; } } } done: gst_rtp_buffer_unmap (&rtp); return outbuf; need_resync: { GST_DEBUG_OBJECT (rtpxqtdepay, "waiting for marker"); goto done; } wrong_version: { GST_ELEMENT_WARNING (rtpxqtdepay, STREAM, DECODE, ("Unknown payload version."), (NULL)); goto done; } pck_reserved: { GST_ELEMENT_WARNING (rtpxqtdepay, STREAM, DECODE, ("PCK reserved 0."), (NULL)); goto done; } wrong_length: { GST_ELEMENT_WARNING (rtpxqtdepay, STREAM, DECODE, ("Wrong payload length."), (NULL)); goto done; } unknown_format: { GST_ELEMENT_WARNING (rtpxqtdepay, STREAM, DECODE, ("Unknown payload format."), (NULL)); goto done; } }
/*********************************************************************************** * chain ***********************************************************************************/ static GstFlowReturn videodecoder_chain(GstPad *pad, GstBuffer *buf) { VideoDecoder *decoder = VIDEODECODER(GST_PAD_PARENT(pad)); BaseDecoder *base = BASEDECODER(decoder); GstFlowReturn result = GST_FLOW_OK; int num_dec = NO_DATA_USED; if (base->is_flushing) // Reject buffers in flushing state. { result = GST_FLOW_WRONG_STATE; goto _exit; } if (!base->is_initialized && !videodecoder_configure(decoder, GST_PAD_CAPS(pad))) { result = GST_FLOW_ERROR; goto _exit; } if (!base->is_hls) { if (av_new_packet(&decoder->packet, GST_BUFFER_SIZE(buf)) == 0) { memcpy(decoder->packet.data, GST_BUFFER_DATA(buf), GST_BUFFER_SIZE(buf)); if (GST_BUFFER_TIMESTAMP_IS_VALID(buf)) base->context->reordered_opaque = GST_BUFFER_TIMESTAMP(buf); else base->context->reordered_opaque = AV_NOPTS_VALUE; num_dec = avcodec_decode_video2(base->context, base->frame, &decoder->frame_finished, &decoder->packet); av_free_packet(&decoder->packet); } else { result = GST_FLOW_ERROR; goto _exit; } } else { av_init_packet(&decoder->packet); decoder->packet.data = GST_BUFFER_DATA(buf); decoder->packet.size = GST_BUFFER_SIZE(buf); if (GST_BUFFER_TIMESTAMP_IS_VALID(buf)) base->context->reordered_opaque = GST_BUFFER_TIMESTAMP(buf); else base->context->reordered_opaque = AV_NOPTS_VALUE; num_dec = avcodec_decode_video2(base->context, base->frame, &decoder->frame_finished, &decoder->packet); } if (num_dec < 0) { // basedecoder_flush(base); #ifdef DEBUG_OUTPUT g_print ("videodecoder_chain error: %s\n", avelement_error_to_string(AVELEMENT(decoder), num_dec)); #endif goto _exit; } if (decoder->frame_finished > 0) { if (!videodecoder_configure_sourcepad(decoder)) result = GST_FLOW_ERROR; else { GstBuffer *outbuf = NULL; result = gst_pad_alloc_buffer_and_set_caps(base->srcpad, base->context->frame_number, decoder->frame_size, GST_PAD_CAPS(base->srcpad), &outbuf); if (result != GST_FLOW_OK) { if (result != GST_FLOW_WRONG_STATE) { gst_element_message_full(GST_ELEMENT(decoder), GST_MESSAGE_ERROR, GST_STREAM_ERROR, GST_STREAM_ERROR_DECODE, ("Decoded video buffer allocation failed"), NULL, ("videodecoder.c"), ("videodecoder_chain"), 0); } } else { if (base->frame->reordered_opaque != AV_NOPTS_VALUE) { GST_BUFFER_TIMESTAMP(outbuf) = base->frame->reordered_opaque; GST_BUFFER_DURATION(outbuf) = GST_BUFFER_DURATION(buf); // Duration for video usually same } GST_BUFFER_SIZE(outbuf) = decoder->frame_size; // Copy image by parts from different arrays. memcpy(GST_BUFFER_DATA(outbuf), base->frame->data[0], decoder->u_offset); memcpy(GST_BUFFER_DATA(outbuf) + decoder->u_offset, base->frame->data[1], decoder->uv_blocksize); memcpy(GST_BUFFER_DATA(outbuf) + decoder->v_offset, base->frame->data[2], decoder->uv_blocksize); GST_BUFFER_OFFSET_END(outbuf) = GST_BUFFER_OFFSET_NONE; if (decoder->discont || GST_BUFFER_IS_DISCONT(buf)) { #ifdef DEBUG_OUTPUT g_print("Video discont: frame size=%dx%d\n", base->context->width, base->context->height); #endif GST_BUFFER_FLAG_SET(outbuf, GST_BUFFER_FLAG_DISCONT); decoder->discont = FALSE; } #ifdef VERBOSE_DEBUG g_print("videodecoder: pushing buffer ts=%.4f sec", (double)GST_BUFFER_TIMESTAMP(outbuf)/GST_SECOND); #endif result = gst_pad_push(base->srcpad, outbuf); #ifdef VERBOSE_DEBUG g_print(" done, res=%s\n", gst_flow_get_name(result)); #endif } } } _exit: // INLINE - gst_buffer_unref() gst_buffer_unref(buf); return result; }
static GstFlowReturn gst_jasper_dec_chain (GstPad * pad, GstBuffer * buf) { GstJasperDec *dec; GstFlowReturn ret = GST_FLOW_OK; GstClockTime ts; GstBuffer *outbuf = NULL; guint8 *data; guint size; gboolean decode; dec = GST_JASPER_DEC (GST_PAD_PARENT (pad)); if (dec->fmt < 0) goto not_negotiated; ts = GST_BUFFER_TIMESTAMP (buf); GST_LOG_OBJECT (dec, "buffer with ts: %" GST_TIME_FORMAT, GST_TIME_ARGS (ts)); if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) dec->discont = TRUE; decode = gst_jasper_dec_do_qos (dec, ts); /* FIXME: do clipping */ if (G_UNLIKELY (!decode)) { dec->discont = TRUE; goto done; } /* strip possible prefix */ if (dec->strip) { GstBuffer *tmp; tmp = gst_buffer_create_sub (buf, dec->strip, GST_BUFFER_SIZE (buf) - dec->strip); gst_buffer_copy_metadata (tmp, buf, GST_BUFFER_COPY_TIMESTAMPS); gst_buffer_unref (buf); buf = tmp; } /* preprend possible codec_data */ if (dec->codec_data) { GstBuffer *tmp; tmp = gst_buffer_append (gst_buffer_ref (dec->codec_data), gst_buffer_ref (buf)); gst_buffer_copy_metadata (tmp, buf, GST_BUFFER_COPY_TIMESTAMPS); gst_buffer_unref (buf); buf = tmp; } /* now really feed the data to decoder */ data = GST_BUFFER_DATA (buf); size = GST_BUFFER_SIZE (buf); ret = gst_jasper_dec_get_picture (dec, data, size, &outbuf); if (outbuf) { gst_buffer_copy_metadata (outbuf, buf, GST_BUFFER_COPY_TIMESTAMPS); if (dec->discont) { GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT); dec->discont = FALSE; } if (ret == GST_FLOW_OK) ret = gst_pad_push (dec->srcpad, outbuf); else gst_buffer_unref (outbuf); } done: gst_buffer_unref (buf); return ret; /* ERRORS */ not_negotiated: { GST_ELEMENT_ERROR (dec, CORE, NEGOTIATION, (NULL), ("format wasn't negotiated before chain function")); ret = GST_FLOW_NOT_NEGOTIATED; goto done; } }
static GstFlowReturn gst_vaapidecode_push_decoded_frame (GstVideoDecoder * vdec, GstVideoCodecFrame * out_frame) { GstVaapiDecode *const decode = GST_VAAPIDECODE (vdec); GstVaapiSurfaceProxy *proxy; GstFlowReturn ret; const GstVaapiRectangle *crop_rect; GstVaapiVideoMeta *meta; guint flags, out_flags = 0; if (!GST_VIDEO_CODEC_FRAME_IS_DECODE_ONLY (out_frame)) { proxy = gst_video_codec_frame_get_user_data (out_frame); /* reconfigure if un-cropped surface resolution changed */ if (is_surface_resolution_changed (vdec, GST_VAAPI_SURFACE_PROXY_SURFACE (proxy))) gst_vaapidecode_negotiate (decode); gst_vaapi_surface_proxy_set_destroy_notify (proxy, (GDestroyNotify) gst_vaapidecode_release, gst_object_ref (decode)); ret = gst_video_decoder_allocate_output_frame (vdec, out_frame); if (ret != GST_FLOW_OK) goto error_create_buffer; meta = gst_buffer_get_vaapi_video_meta (out_frame->output_buffer); if (!meta) goto error_get_meta; gst_vaapi_video_meta_set_surface_proxy (meta, proxy); flags = gst_vaapi_surface_proxy_get_flags (proxy); if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_CORRUPTED) out_flags |= GST_BUFFER_FLAG_CORRUPTED; if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_INTERLACED) { out_flags |= GST_VIDEO_BUFFER_FLAG_INTERLACED; if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_TFF) out_flags |= GST_VIDEO_BUFFER_FLAG_TFF; if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_RFF) out_flags |= GST_VIDEO_BUFFER_FLAG_RFF; if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_ONEFIELD) out_flags |= GST_VIDEO_BUFFER_FLAG_ONEFIELD; } GST_BUFFER_FLAG_SET (out_frame->output_buffer, out_flags); #if GST_CHECK_VERSION(1,5,0) /* First-in-bundle flag only appeared in 1.5 dev */ if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_FFB) { GST_BUFFER_FLAG_SET (out_frame->output_buffer, GST_VIDEO_BUFFER_FLAG_FIRST_IN_BUNDLE); } #endif crop_rect = gst_vaapi_surface_proxy_get_crop_rect (proxy); if (crop_rect) { GstVideoCropMeta *const crop_meta = gst_buffer_add_video_crop_meta (out_frame->output_buffer); if (crop_meta) { crop_meta->x = crop_rect->x; crop_meta->y = crop_rect->y; crop_meta->width = crop_rect->width; crop_meta->height = crop_rect->height; } } #if (USE_GLX || USE_EGL) if (decode->has_texture_upload_meta) gst_buffer_ensure_texture_upload_meta (out_frame->output_buffer); #endif } ret = gst_video_decoder_finish_frame (vdec, out_frame); if (ret != GST_FLOW_OK) goto error_commit_buffer; gst_video_codec_frame_unref (out_frame); return GST_FLOW_OK; /* ERRORS */ error_create_buffer: { const GstVaapiID surface_id = gst_vaapi_surface_get_id (GST_VAAPI_SURFACE_PROXY_SURFACE (proxy)); GST_ELEMENT_ERROR (vdec, STREAM, FAILED, ("Failed to create sink buffer"), ("video sink failed to create video buffer for proxy'ed " "surface %" GST_VAAPI_ID_FORMAT, GST_VAAPI_ID_ARGS (surface_id))); gst_video_decoder_drop_frame (vdec, out_frame); gst_video_codec_frame_unref (out_frame); return GST_FLOW_ERROR; } error_get_meta: { GST_ELEMENT_ERROR (vdec, STREAM, FAILED, ("Failed to get vaapi video meta attached to video buffer"), ("Failed to get vaapi video meta attached to video buffer")); gst_video_decoder_drop_frame (vdec, out_frame); gst_video_codec_frame_unref (out_frame); return GST_FLOW_ERROR; } error_commit_buffer: { GST_INFO_OBJECT (decode, "downstream element rejected the frame (%s [%d])", gst_flow_get_name (ret), ret); gst_video_codec_frame_unref (out_frame); return ret; } }
/* * Read a new buffer from src->reqoffset, takes care of events * and seeking and such. */ static GstFlowReturn gst_rtmp_src_create (GstPushSrc * pushsrc, GstBuffer ** buffer) { GstRTMPSrc *src; GstBuffer *buf; GstMapInfo map; guint8 *data; guint todo; gsize bsize; int read; int size; src = GST_RTMP_SRC (pushsrc); g_return_val_if_fail (src->rtmp != NULL, GST_FLOW_ERROR); size = GST_BASE_SRC_CAST (pushsrc)->blocksize; GST_DEBUG ("reading from %" G_GUINT64_FORMAT ", size %u", src->cur_offset, size); buf = gst_buffer_new_allocate (NULL, size, NULL); if (G_UNLIKELY (buf == NULL)) { GST_ERROR_OBJECT (src, "Failed to allocate %u bytes", size); return GST_FLOW_ERROR; } bsize = todo = size; gst_buffer_map (buf, &map, GST_MAP_WRITE); data = map.data; read = bsize = 0; while (todo > 0) { read = RTMP_Read (src->rtmp, (char *) data, todo); if (G_UNLIKELY (read == 0 && todo == size)) { goto eos; } else if (G_UNLIKELY (read == 0)) { todo = 0; break; } if (G_UNLIKELY (read < 0)) goto read_failed; if (read < todo) { data += read; todo -= read; bsize += read; } else { bsize += todo; todo = 0; } GST_LOG (" got size %d", read); } gst_buffer_unmap (buf, &map); gst_buffer_resize (buf, 0, bsize); if (src->discont) { GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT); src->discont = FALSE; } GST_BUFFER_TIMESTAMP (buf) = src->last_timestamp; GST_BUFFER_OFFSET (buf) = src->cur_offset; src->cur_offset += size; if (src->last_timestamp == GST_CLOCK_TIME_NONE) src->last_timestamp = src->rtmp->m_mediaStamp * GST_MSECOND; else src->last_timestamp = MAX (src->last_timestamp, src->rtmp->m_mediaStamp * GST_MSECOND); GST_LOG_OBJECT (src, "Created buffer of size %u at %" G_GINT64_FORMAT " with timestamp %" GST_TIME_FORMAT, size, GST_BUFFER_OFFSET (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf))); /* we're done, return the buffer */ *buffer = buf; return GST_FLOW_OK; read_failed: { gst_buffer_unref (buf); GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL), ("Failed to read data")); return GST_FLOW_ERROR; } eos: { gst_buffer_unref (buf); GST_DEBUG_OBJECT (src, "Reading data gave EOS"); return GST_FLOW_EOS; } }
/* push packets from the queue to the downstream demuxer */ static void gst_rdt_manager_loop (GstPad * pad) { GstRDTManager *rdtmanager; GstRDTManagerSession *session; GstBuffer *buffer; GstFlowReturn result; rdtmanager = GST_RDT_MANAGER (GST_PAD_PARENT (pad)); session = gst_pad_get_element_private (pad); JBUF_LOCK_CHECK (session, flushing); GST_DEBUG_OBJECT (rdtmanager, "Peeking item"); while (TRUE) { /* always wait if we are blocked */ if (!session->blocked) { /* if we have a packet, we can exit the loop and grab it */ if (rdt_jitter_buffer_num_packets (session->jbuf) > 0) break; /* no packets but we are EOS, do eos logic */ if (session->eos) goto do_eos; } /* underrun, wait for packets or flushing now */ session->waiting = TRUE; JBUF_WAIT_CHECK (session, flushing); session->waiting = FALSE; } buffer = rdt_jitter_buffer_pop (session->jbuf); GST_DEBUG_OBJECT (rdtmanager, "Got item %p", buffer); if (session->discont) { GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); session->discont = FALSE; } JBUF_UNLOCK (session); result = gst_pad_push (session->recv_rtp_src, buffer); if (result != GST_FLOW_OK) goto pause; return; /* ERRORS */ flushing: { GST_DEBUG_OBJECT (rdtmanager, "we are flushing"); gst_pad_pause_task (session->recv_rtp_src); JBUF_UNLOCK (session); return; } do_eos: { /* store result, we are flushing now */ GST_DEBUG_OBJECT (rdtmanager, "We are EOS, pushing EOS downstream"); session->srcresult = GST_FLOW_EOS; gst_pad_pause_task (session->recv_rtp_src); gst_pad_push_event (session->recv_rtp_src, gst_event_new_eos ()); JBUF_UNLOCK (session); return; } pause: { GST_DEBUG_OBJECT (rdtmanager, "pausing task, reason %s", gst_flow_get_name (result)); JBUF_LOCK (session); /* store result */ session->srcresult = result; /* we don't post errors or anything because upstream will do that for us * when we pass the return value upstream. */ gst_pad_pause_task (session->recv_rtp_src); JBUF_UNLOCK (session); return; } }
static GstBuffer * gst_rtp_vp9_depay_process (GstRTPBaseDepayload * depay, GstRTPBuffer * rtp) { GstRtpVP9Depay *self = GST_RTP_VP9_DEPAY (depay); GstBuffer *payload; guint8 *data; guint hdrsize = 1; guint size; gint spatial_layer = 0; gboolean i_bit, p_bit, l_bit, f_bit, b_bit, e_bit, v_bit; if (G_UNLIKELY (GST_BUFFER_IS_DISCONT (rtp->buffer))) { GST_LOG_OBJECT (self, "Discontinuity, flushing adapter"); gst_adapter_clear (self->adapter); self->started = FALSE; } size = gst_rtp_buffer_get_payload_len (rtp); /* Mandatory with at least one header and one vp9 byte */ if (G_UNLIKELY (size < hdrsize + 1)) goto too_small; data = gst_rtp_buffer_get_payload (rtp); i_bit = (data[0] & 0x80) != 0; p_bit = (data[0] & 0x40) != 0; l_bit = (data[0] & 0x20) != 0; f_bit = (data[0] & 0x10) != 0; b_bit = (data[0] & 0x08) != 0; e_bit = (data[0] & 0x04) != 0; v_bit = (data[0] & 0x02) != 0; if (G_UNLIKELY (!self->started)) { /* Check if this is the start of a VP9 layer frame, otherwise bail */ if (!b_bit) goto done; self->started = TRUE; } GST_TRACE_OBJECT (self, "IPLFBEV : %d%d%d%d%d%d%d", i_bit, p_bit, l_bit, f_bit, b_bit, e_bit, v_bit); /* Check I optional header Picture ID */ if (i_bit) { hdrsize++; if (G_UNLIKELY (size < hdrsize + 1)) goto too_small; /* Check M for 15 bits PictureID */ if ((data[1] & 0x80) != 0) { hdrsize++; if (G_UNLIKELY (size < hdrsize + 1)) goto too_small; } } /* flexible-mode not implemented */ g_assert (!f_bit); /* Check L optional header layer indices */ if (l_bit) { hdrsize++; /* Check TL0PICIDX temporal layer zero index (non-flexible mode) */ if (!f_bit) hdrsize++; } /* Check V optional Scalability Structure */ if (v_bit) { guint n_s, y_bit, g_bit; guint8 *ss = &data[hdrsize]; guint sssize = 1; if (G_UNLIKELY (size < hdrsize + sssize + 1)) goto too_small; n_s = (ss[0] & 0xe0) >> 5; y_bit = (ss[0] & 0x10) != 0; g_bit = (ss[0] & 0x08) != 0; GST_TRACE_OBJECT (self, "SS header: N_S=%u, Y=%u, G=%u", n_s, y_bit, g_bit); sssize += y_bit ? (n_s + 1) * 4 : 0; if (G_UNLIKELY (size < hdrsize + sssize + 1)) goto too_small; if (y_bit) { guint i; for (i = 0; i <= n_s; i++) { /* For now, simply use the last layer specified for width and height */ self->ss_width = ss[1 + i * 4] * 256 + ss[2 + i * 4]; self->ss_height = ss[3 + i * 4] * 256 + ss[4 + i * 4]; GST_TRACE_OBJECT (self, "N_S[%d]: WIDTH=%u, HEIGHT=%u", i, self->ss_width, self->ss_height); } } if (g_bit) { guint i, j; guint n_g = ss[sssize]; sssize++; if (G_UNLIKELY (size < hdrsize + sssize + 1)) goto too_small; for (i = 0; i < n_g; i++) { guint t = (ss[sssize] & 0xe0) >> 5; guint u = (ss[sssize] & 0x10) >> 4; guint r = (ss[sssize] & 0x0c) >> 2; GST_TRACE_OBJECT (self, "N_G[%u]: 0x%02x -> T=%u, U=%u, R=%u", i, ss[sssize], t, u, r); for (j = 0; j < r; j++) GST_TRACE_OBJECT (self, " R[%u]: P_DIFF=%u", j, ss[sssize + 1 + j]); sssize += 1 + r; if (G_UNLIKELY (size < hdrsize + sssize + 1)) goto too_small; } } hdrsize += sssize; } GST_DEBUG_OBJECT (depay, "hdrsize %u, size %u", hdrsize, size); if (G_UNLIKELY (hdrsize >= size)) goto too_small; payload = gst_rtp_buffer_get_payload_subbuffer (rtp, hdrsize, -1); { GstMapInfo map; gst_buffer_map (payload, &map, GST_MAP_READ); GST_MEMDUMP_OBJECT (self, "vp9 payload", map.data, 16); gst_buffer_unmap (payload, &map); } gst_adapter_push (self->adapter, payload); /* Marker indicates that it was the last rtp packet for this frame */ if (gst_rtp_buffer_get_marker (rtp)) { GstBuffer *out; gboolean key_frame_first_layer = !p_bit && spatial_layer == 0; if (gst_adapter_available (self->adapter) < 10) goto too_small; out = gst_adapter_take_buffer (self->adapter, gst_adapter_available (self->adapter)); self->started = FALSE; /* mark keyframes */ out = gst_buffer_make_writable (out); /* Filter away all metas that are not sensible to copy */ gst_rtp_drop_meta (GST_ELEMENT_CAST (self), out, g_quark_from_static_string (GST_META_TAG_VIDEO_STR)); if (!key_frame_first_layer) { GST_BUFFER_FLAG_SET (out, GST_BUFFER_FLAG_DELTA_UNIT); if (!self->caps_sent) { gst_buffer_unref (out); out = NULL; GST_INFO_OBJECT (self, "Dropping inter-frame before intra-frame"); gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD_SINKPAD (depay), gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE, TRUE, 0)); } } else { GST_BUFFER_FLAG_UNSET (out, GST_BUFFER_FLAG_DELTA_UNIT); if (self->last_width != self->ss_width || self->last_height != self->ss_height) { GstCaps *srccaps; /* Width and height are optional in the RTP header. Consider to parse * the frame header in addition if missing from RTP header */ if (self->ss_width != 0 && self->ss_height != 0) { srccaps = gst_caps_new_simple ("video/x-vp9", "framerate", GST_TYPE_FRACTION, 0, 1, "width", G_TYPE_INT, self->ss_width, "height", G_TYPE_INT, self->ss_height, NULL); } else { srccaps = gst_caps_new_simple ("video/x-vp9", "framerate", GST_TYPE_FRACTION, 0, 1, NULL); } gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depay), srccaps); gst_caps_unref (srccaps); self->caps_sent = TRUE; self->last_width = self->ss_width; self->last_height = self->ss_height; self->ss_width = 0; self->ss_height = 0; } } return out; } done: return NULL; too_small: GST_LOG_OBJECT (self, "Invalid rtp packet (too small), ignoring"); gst_adapter_clear (self->adapter); self->started = FALSE; goto done; }
GstFlowReturn gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder, GstVideoFrame * frame) { GstBaseVideoDecoderClass *base_video_decoder_class; GstClockTime presentation_timestamp; GstClockTime presentation_duration; GstBuffer *src_buffer; GST_DEBUG ("finish frame"); base_video_decoder_class = GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); gst_base_video_decoder_calculate_timestamps (base_video_decoder, frame, &presentation_timestamp, &presentation_duration); src_buffer = frame->src_buffer; GST_BUFFER_FLAG_UNSET (src_buffer, GST_BUFFER_FLAG_DELTA_UNIT); if (base_video_decoder->state.interlaced) { #ifndef GST_VIDEO_BUFFER_TFF #define GST_VIDEO_BUFFER_TFF (GST_MINI_OBJECT_FLAG_LAST << 5) #endif #ifndef GST_VIDEO_BUFFER_RFF #define GST_VIDEO_BUFFER_RFF (GST_MINI_OBJECT_FLAG_LAST << 6) #endif #ifndef GST_VIDEO_BUFFER_ONEFIELD #define GST_VIDEO_BUFFER_ONEFIELD (GST_MINI_OBJECT_FLAG_LAST << 7) #endif if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_TFF)) { GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_TFF); } else { GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_TFF); } GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_RFF); GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_ONEFIELD); if (frame->n_fields == 3) { GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_RFF); } else if (frame->n_fields == 1) { GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_ONEFIELD); } } if (base_video_decoder->discont) { GST_BUFFER_FLAG_UNSET (src_buffer, GST_BUFFER_FLAG_DISCONT); base_video_decoder->discont = FALSE; } GST_BUFFER_TIMESTAMP (src_buffer) = presentation_timestamp; GST_BUFFER_DURATION (src_buffer) = presentation_duration; GST_BUFFER_OFFSET (src_buffer) = GST_BUFFER_OFFSET_NONE; GST_BUFFER_OFFSET_END (src_buffer) = GST_BUFFER_OFFSET_NONE; GST_DEBUG ("pushing frame %" GST_TIME_FORMAT, GST_TIME_ARGS (presentation_timestamp)); gst_base_video_decoder_set_src_caps (base_video_decoder); if (base_video_decoder->sink_clipping) { gint64 start = GST_BUFFER_TIMESTAMP (src_buffer); gint64 stop = GST_BUFFER_TIMESTAMP (src_buffer) + GST_BUFFER_DURATION (src_buffer); if (gst_segment_clip (&base_video_decoder->segment, GST_FORMAT_TIME, start, stop, &start, &stop)) { GST_BUFFER_TIMESTAMP (src_buffer) = start; GST_BUFFER_DURATION (src_buffer) = stop - start; GST_DEBUG ("accepting buffer inside segment: %" GST_TIME_FORMAT " %" GST_TIME_FORMAT " seg %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT " time %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer)), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer) + GST_BUFFER_DURATION (src_buffer)), GST_TIME_ARGS (base_video_decoder->segment.start), GST_TIME_ARGS (base_video_decoder->segment.stop), GST_TIME_ARGS (base_video_decoder->segment.time)); } else { GST_DEBUG ("dropping buffer outside segment: %" GST_TIME_FORMAT " %" GST_TIME_FORMAT " seg %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT " time %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer)), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer) + GST_BUFFER_DURATION (src_buffer)), GST_TIME_ARGS (base_video_decoder->segment.start), GST_TIME_ARGS (base_video_decoder->segment.stop), GST_TIME_ARGS (base_video_decoder->segment.time)); gst_video_frame_unref (frame); return GST_FLOW_OK; } } gst_buffer_ref (src_buffer); gst_video_frame_unref (frame); if (base_video_decoder_class->shape_output) return base_video_decoder_class->shape_output (base_video_decoder, src_buffer); return gst_pad_push (GST_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder), src_buffer); }
static GstFlowReturn gst_interleave_collected (GstCollectPads * pads, GstInterleave * self) { guint size; GstBuffer *outbuf = NULL; GstFlowReturn ret = GST_FLOW_OK; GSList *collected; guint nsamples; guint ncollected = 0; gboolean empty = TRUE; gint width = self->width / 8; GstMapInfo write_info; GstClockTime timestamp = -1; /* FIXME: send caps and tags after stream-start */ #if 0 if (self->send_stream_start) { gchar s_id[32]; /* stream-start (FIXME: create id based on input ids) */ g_snprintf (s_id, sizeof (s_id), "interleave-%08x", g_random_int ()); gst_pad_push_event (self->src, gst_event_new_stream_start (s_id)); self->send_stream_start = FALSE; } #endif size = gst_collect_pads_available (pads); if (size == 0) goto eos; g_return_val_if_fail (self->func != NULL, GST_FLOW_NOT_NEGOTIATED); g_return_val_if_fail (self->width > 0, GST_FLOW_NOT_NEGOTIATED); g_return_val_if_fail (self->channels > 0, GST_FLOW_NOT_NEGOTIATED); g_return_val_if_fail (self->rate > 0, GST_FLOW_NOT_NEGOTIATED); g_return_val_if_fail (size % width == 0, GST_FLOW_ERROR); GST_DEBUG_OBJECT (self, "Starting to collect %u bytes from %d channels", size, self->channels); nsamples = size / width; outbuf = gst_buffer_new_allocate (NULL, size * self->channels, NULL); if (outbuf == NULL || gst_buffer_get_size (outbuf) < size * self->channels) { gst_buffer_unref (outbuf); return GST_FLOW_NOT_NEGOTIATED; } gst_buffer_map (outbuf, &write_info, GST_MAP_WRITE); memset (write_info.data, 0, size * self->channels); for (collected = pads->data; collected != NULL; collected = collected->next) { GstCollectData *cdata; GstBuffer *inbuf; guint8 *outdata; GstMapInfo input_info; cdata = (GstCollectData *) collected->data; inbuf = gst_collect_pads_take_buffer (pads, cdata, size); if (inbuf == NULL) { GST_DEBUG_OBJECT (cdata->pad, "No buffer available"); goto next; } ncollected++; gst_buffer_map (inbuf, &input_info, GST_MAP_READ); if (timestamp == -1) timestamp = GST_BUFFER_TIMESTAMP (inbuf); if (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_GAP)) goto next; empty = FALSE; outdata = write_info.data + width * GST_INTERLEAVE_PAD_CAST (cdata->pad)->channel; self->func (outdata, input_info.data, self->channels, nsamples); gst_buffer_unmap (inbuf, &input_info); next: if (inbuf) gst_buffer_unref (inbuf); } if (ncollected == 0) { gst_buffer_unmap (outbuf, &write_info); goto eos; } GST_OBJECT_LOCK (self); if (self->pending_segment) { GstEvent *event; GstSegment segment; event = self->pending_segment; self->pending_segment = NULL; GST_OBJECT_UNLOCK (self); /* convert the input segment to time now */ gst_event_copy_segment (event, &segment); if (segment.format != GST_FORMAT_TIME) { gst_event_unref (event); /* not time, convert */ switch (segment.format) { case GST_FORMAT_BYTES: segment.start *= width; if (segment.stop != -1) segment.stop *= width; if (segment.position != -1) segment.position *= width; /* fallthrough for the samples case */ case GST_FORMAT_DEFAULT: segment.start = gst_util_uint64_scale_int (segment.start, GST_SECOND, self->rate); if (segment.stop != -1) segment.stop = gst_util_uint64_scale_int (segment.stop, GST_SECOND, self->rate); if (segment.position != -1) segment.position = gst_util_uint64_scale_int (segment.position, GST_SECOND, self->rate); break; default: GST_WARNING ("can't convert segment values"); segment.start = 0; segment.stop = -1; segment.position = 0; break; } event = gst_event_new_segment (&segment); } gst_pad_push_event (self->src, event); GST_OBJECT_LOCK (self); } GST_OBJECT_UNLOCK (self); if (timestamp != -1) { self->offset = gst_util_uint64_scale_int (timestamp, self->rate, GST_SECOND); self->timestamp = timestamp; } GST_BUFFER_TIMESTAMP (outbuf) = self->timestamp; GST_BUFFER_OFFSET (outbuf) = self->offset; self->offset += nsamples; self->timestamp = gst_util_uint64_scale_int (self->offset, GST_SECOND, self->rate); GST_BUFFER_DURATION (outbuf) = self->timestamp - GST_BUFFER_TIMESTAMP (outbuf); if (empty) GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP); gst_buffer_unmap (outbuf, &write_info); GST_LOG_OBJECT (self, "pushing outbuf, timestamp %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf))); ret = gst_pad_push (self->src, outbuf); return ret; eos: { GST_DEBUG_OBJECT (self, "no data available, must be EOS"); if (outbuf) gst_buffer_unref (outbuf); gst_pad_push_event (self->src, gst_event_new_eos ()); return GST_FLOW_EOS; } }
static GstFlowReturn gst_bml_transform_transform_ip_stereo (GstBaseTransform * base, GstBuffer * outbuf) { GstMapInfo info; GstBMLTransform *bml_transform = GST_BML_TRANSFORM (base); GstBMLTransformClass *klass = GST_BML_TRANSFORM_GET_CLASS (bml_transform); GstBML *bml = GST_BML (bml_transform); GstBMLClass *bml_class = GST_BML_CLASS (klass); BMLData *data, *seg_data; gpointer bm = bml->bm; guint todo, seg_size, samples_per_buffer; gboolean has_data; guint mode = 3; /*WM_READWRITE */ bml->running_time = gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (outbuf)); if (GST_BUFFER_FLAG_IS_SET (outbuf, GST_BUFFER_FLAG_DISCONT)) { bml->subtick_count = (!bml->reverse) ? bml->subticks_per_tick : 1; } /* TODO(ensonic): sync on subticks ? */ if (bml->subtick_count >= bml->subticks_per_tick) { bml (gstbml_reset_triggers (bml, bml_class)); bml (gstbml_sync_values (bml, bml_class, GST_BUFFER_TIMESTAMP (outbuf))); bml (tick (bm)); bml->subtick_count = 1; } else { bml->subtick_count++; } /* don't process data in passthrough-mode */ if (gst_base_transform_is_passthrough (base)) return GST_FLOW_OK; if (!gst_buffer_map (outbuf, &info, GST_MAP_READ | GST_MAP_WRITE)) { GST_WARNING_OBJECT (base, "unable to map buffer for read & write"); return GST_FLOW_ERROR; } data = (BMLData *) info.data; samples_per_buffer = info.size / (sizeof (BMLData) * 2); /* if buffer has only silence process with different mode */ if (GST_BUFFER_FLAG_IS_SET (outbuf, GST_BUFFER_FLAG_GAP)) { mode = 2; /* WM_WRITE */ } else { gfloat fc = 32768.0; orc_scalarmultiply_f32_ns (data, data, fc, samples_per_buffer * 2); } GST_DEBUG_OBJECT (bml_transform, " calling work_m2s(%d,%d)", samples_per_buffer, mode); todo = samples_per_buffer; seg_data = data; has_data = FALSE; while (todo) { // 256 is MachineInterface.h::MAX_BUFFER_LENGTH seg_size = (todo > 256) ? 256 : todo; // first seg_data can be NULL, its ignored has_data |= bml (work_m2s (bm, seg_data, seg_data, (int) seg_size, mode)); seg_data = &seg_data[seg_size * 2]; todo -= seg_size; } if (gstbml_fix_data ((GstElement *) bml_transform, &info, has_data)) { GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP); } else { GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_GAP); } gst_buffer_unmap (outbuf, &info); return GST_FLOW_OK; }
static GstFlowReturn legacyresample_do_output (GstLegacyresample * legacyresample, GstBuffer * outbuf) { int outsize; int outsamples; ResampleState *r; r = legacyresample->resample; outsize = resample_get_output_size (r); GST_LOG_OBJECT (legacyresample, "legacyresample can give me %d bytes", outsize); /* protect against mem corruption */ if (outsize > GST_BUFFER_SIZE (outbuf)) { GST_WARNING_OBJECT (legacyresample, "overriding legacyresample's outsize %d with outbuffer's size %d", outsize, GST_BUFFER_SIZE (outbuf)); outsize = GST_BUFFER_SIZE (outbuf); } /* catch possibly wrong size differences */ if (GST_BUFFER_SIZE (outbuf) - outsize > r->sample_size) { GST_WARNING_OBJECT (legacyresample, "legacyresample's outsize %d too far from outbuffer's size %d", outsize, GST_BUFFER_SIZE (outbuf)); } outsize = resample_get_output_data (r, GST_BUFFER_DATA (outbuf), outsize); outsamples = outsize / r->sample_size; GST_LOG_OBJECT (legacyresample, "resample gave me %d bytes or %d samples", outsize, outsamples); GST_BUFFER_OFFSET (outbuf) = legacyresample->offset; GST_BUFFER_TIMESTAMP (outbuf) = legacyresample->next_ts; if (legacyresample->ts_offset != -1) { legacyresample->offset += outsamples; legacyresample->ts_offset += outsamples; legacyresample->next_ts = gst_util_uint64_scale_int (legacyresample->ts_offset, GST_SECOND, legacyresample->o_rate); GST_BUFFER_OFFSET_END (outbuf) = legacyresample->offset; /* we calculate DURATION as the difference between "next" timestamp * and current timestamp so we ensure a contiguous stream, instead of * having rounding errors. */ GST_BUFFER_DURATION (outbuf) = legacyresample->next_ts - GST_BUFFER_TIMESTAMP (outbuf); } else { /* no valid offset know, we can still sortof calculate the duration though */ GST_BUFFER_DURATION (outbuf) = gst_util_uint64_scale_int (outsamples, GST_SECOND, legacyresample->o_rate); } /* check for possible mem corruption */ if (outsize > GST_BUFFER_SIZE (outbuf)) { /* this is an error that when it happens, would need fixing in the * resample library; we told it we wanted only GST_BUFFER_SIZE (outbuf), * and it gave us more ! */ GST_WARNING_OBJECT (legacyresample, "legacyresample, you memory corrupting bastard. " "you gave me outsize %d while my buffer was size %d", outsize, GST_BUFFER_SIZE (outbuf)); return GST_FLOW_ERROR; } /* catch possibly wrong size differences */ if (GST_BUFFER_SIZE (outbuf) - outsize > r->sample_size) { GST_WARNING_OBJECT (legacyresample, "legacyresample's written outsize %d too far from outbuffer's size %d", outsize, GST_BUFFER_SIZE (outbuf)); } GST_BUFFER_SIZE (outbuf) = outsize; if (G_UNLIKELY (legacyresample->need_discont)) { GST_DEBUG_OBJECT (legacyresample, "marking this buffer with the DISCONT flag"); GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT); legacyresample->need_discont = FALSE; } GST_LOG_OBJECT (legacyresample, "transformed to buffer of %d bytes, ts %" GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT ", offset %" G_GINT64_FORMAT ", offset_end %" G_GINT64_FORMAT, outsize, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)), GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf), GST_BUFFER_OFFSET_END (outbuf)); return GST_FLOW_OK; }
static GstFlowReturn recv_sample (GstAppSink * appsink, gpointer user_data) { KmsRecorderEndpoint *self = KMS_RECORDER_ENDPOINT (GST_OBJECT_PARENT (appsink)); GstAppSrc *appsrc = GST_APP_SRC (user_data); KmsUriEndpointState state; GstFlowReturn ret; GstSample *sample; GstSegment *segment; GstBuffer *buffer; BaseTimeType *base_time; GstClockTime offset; g_signal_emit_by_name (appsink, "pull-sample", &sample); if (sample == NULL) return GST_FLOW_OK; buffer = gst_sample_get_buffer (sample); if (buffer == NULL) { ret = GST_FLOW_OK; goto end; } segment = gst_sample_get_segment (sample); g_object_get (G_OBJECT (self), "state", &state, NULL); if (state != KMS_URI_ENDPOINT_STATE_START) { GST_WARNING ("Dropping buffer received in invalid state %" GST_PTR_FORMAT, buffer); // TODO: Add a flag to discard buffers until keyframe ret = GST_FLOW_OK; goto end; } gst_buffer_ref (buffer); buffer = gst_buffer_make_writable (buffer); if (GST_BUFFER_PTS_IS_VALID (buffer)) buffer->pts = gst_segment_to_running_time (segment, GST_FORMAT_TIME, buffer->pts); if (GST_BUFFER_DTS_IS_VALID (buffer)) buffer->dts = gst_segment_to_running_time (segment, GST_FORMAT_TIME, buffer->dts); BASE_TIME_LOCK (self); base_time = g_object_get_data (G_OBJECT (self), BASE_TIME_DATA); if (base_time == NULL) { base_time = g_slice_new0 (BaseTimeType); base_time->pts = buffer->pts; base_time->dts = buffer->dts; GST_DEBUG_OBJECT (appsrc, "Setting pts base time to: %" G_GUINT64_FORMAT, base_time->pts); g_object_set_data_full (G_OBJECT (self), BASE_TIME_DATA, base_time, release_base_time_type); } if (!GST_CLOCK_TIME_IS_VALID (base_time->pts) && GST_BUFFER_PTS_IS_VALID (buffer)) { base_time->pts = buffer->pts; GST_DEBUG_OBJECT (appsrc, "Setting pts base time to: %" G_GUINT64_FORMAT, base_time->pts); base_time->dts = buffer->dts; } if (GST_CLOCK_TIME_IS_VALID (base_time->pts)) { if (GST_BUFFER_PTS_IS_VALID (buffer)) { offset = base_time->pts + self->priv->paused_time; if (buffer->pts > offset) { buffer->pts -= offset; } else { buffer->pts = 0; } } } if (GST_CLOCK_TIME_IS_VALID (base_time->dts)) { if (GST_BUFFER_DTS_IS_VALID (buffer)) { offset = base_time->dts + self->priv->paused_time; if (buffer->dts > offset) { buffer->dts -= offset; } else { buffer->dts = 0; } } } BASE_TIME_UNLOCK (GST_OBJECT_PARENT (appsink)); GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_LIVE); if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_HEADER)) GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); ret = gst_app_src_push_buffer (appsrc, buffer); if (ret != GST_FLOW_OK) { /* something wrong */ GST_ERROR ("Could not send buffer to appsrc %s. Cause: %s", GST_ELEMENT_NAME (appsrc), gst_flow_get_name (ret)); } end: if (sample != NULL) { gst_sample_unref (sample); } return ret; }
/* from the given two data buffers, create two streamheader buffers and * some caps that match it, and store them in the given pointers * returns one ref to each of the buffers and the caps */ static void gst_multifdsink_create_streamheader (const gchar * data1, const gchar * data2, GstBuffer ** hbuf1, GstBuffer ** hbuf2, GstCaps ** caps) { GstBuffer *buf; GValue array = { 0 }; GValue value = { 0 }; GstStructure *structure; guint size1 = strlen (data1); guint size2 = strlen (data2); fail_if (hbuf1 == NULL); fail_if (hbuf2 == NULL); fail_if (caps == NULL); /* create caps with streamheader, set the caps, and push the IN_CAPS * buffers */ *hbuf1 = gst_buffer_new_and_alloc (size1); GST_BUFFER_FLAG_SET (*hbuf1, GST_BUFFER_FLAG_IN_CAPS); memcpy (GST_BUFFER_DATA (*hbuf1), data1, size1); *hbuf2 = gst_buffer_new_and_alloc (size2); GST_BUFFER_FLAG_SET (*hbuf2, GST_BUFFER_FLAG_IN_CAPS); memcpy (GST_BUFFER_DATA (*hbuf2), data2, size2); /* we want to keep them around for the tests */ gst_buffer_ref (*hbuf1); gst_buffer_ref (*hbuf2); g_value_init (&array, GST_TYPE_ARRAY); g_value_init (&value, GST_TYPE_BUFFER); /* we take a copy, set it on the array (which refs it), then unref our copy */ buf = gst_buffer_copy (*hbuf1); gst_value_set_buffer (&value, buf); ASSERT_BUFFER_REFCOUNT (buf, "copied buffer", 2); gst_buffer_unref (buf); gst_value_array_append_value (&array, &value); g_value_unset (&value); g_value_init (&value, GST_TYPE_BUFFER); buf = gst_buffer_copy (*hbuf2); gst_value_set_buffer (&value, buf); ASSERT_BUFFER_REFCOUNT (buf, "copied buffer", 2); gst_buffer_unref (buf); gst_value_array_append_value (&array, &value); g_value_unset (&value); *caps = gst_caps_from_string ("application/x-gst-check"); structure = gst_caps_get_structure (*caps, 0); gst_structure_set_value (structure, "streamheader", &array); g_value_unset (&array); ASSERT_CAPS_REFCOUNT (*caps, "streamheader caps", 1); /* set our streamheadery caps on the buffers */ gst_buffer_set_caps (*hbuf1, *caps); gst_buffer_set_caps (*hbuf2, *caps); ASSERT_CAPS_REFCOUNT (*caps, "streamheader caps", 3); GST_DEBUG ("created streamheader caps %p %" GST_PTR_FORMAT, *caps, *caps); }
GstFlowReturn gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder, GstVideoFrame * frame) { GstFlowReturn ret; GstBaseVideoEncoderClass *base_video_encoder_class; base_video_encoder_class = GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder); frame->system_frame_number = GST_BASE_VIDEO_CODEC (base_video_encoder)->system_frame_number; GST_BASE_VIDEO_CODEC (base_video_encoder)->system_frame_number++; if (frame->is_sync_point) { base_video_encoder->distance_from_sync = 0; GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_BUFFER_FLAG_DELTA_UNIT); } else { GST_BUFFER_FLAG_SET (frame->src_buffer, GST_BUFFER_FLAG_DELTA_UNIT); } frame->distance_from_sync = base_video_encoder->distance_from_sync; base_video_encoder->distance_from_sync++; frame->decode_frame_number = frame->system_frame_number - 1; if (frame->decode_frame_number < 0) { frame->decode_timestamp = 0; } else { frame->decode_timestamp = gst_util_uint64_scale (frame->decode_frame_number, GST_SECOND * GST_BASE_VIDEO_CODEC (base_video_encoder)->state.fps_d, GST_BASE_VIDEO_CODEC (base_video_encoder)->state.fps_n); } GST_BUFFER_TIMESTAMP (frame->src_buffer) = frame->presentation_timestamp; GST_BUFFER_DURATION (frame->src_buffer) = frame->presentation_duration; GST_BUFFER_OFFSET (frame->src_buffer) = frame->decode_timestamp; GST_BASE_VIDEO_CODEC (base_video_encoder)->frames = g_list_remove (GST_BASE_VIDEO_CODEC (base_video_encoder)->frames, frame); if (!base_video_encoder->set_output_caps) { if (base_video_encoder_class->get_caps) { GST_BASE_VIDEO_CODEC (base_video_encoder)->caps = base_video_encoder_class->get_caps (base_video_encoder); } else { GST_BASE_VIDEO_CODEC (base_video_encoder)->caps = gst_caps_new_simple ("video/unknown", NULL); } gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), GST_BASE_VIDEO_CODEC (base_video_encoder)->caps); base_video_encoder->set_output_caps = TRUE; } gst_buffer_set_caps (GST_BUFFER (frame->src_buffer), GST_BASE_VIDEO_CODEC (base_video_encoder)->caps); if (frame->force_keyframe) { GstClockTime stream_time; GstClockTime running_time; GstStructure *s; running_time = gst_segment_to_running_time (&GST_BASE_VIDEO_CODEC (base_video_encoder)->segment, GST_FORMAT_TIME, frame->presentation_timestamp); stream_time = gst_segment_to_stream_time (&GST_BASE_VIDEO_CODEC (base_video_encoder)->segment, GST_FORMAT_TIME, frame->presentation_timestamp); /* FIXME this should send the event that we got on the sink pad instead of creating a new one */ s = gst_structure_new ("GstForceKeyUnit", "timestamp", G_TYPE_UINT64, frame->presentation_timestamp, "stream-time", G_TYPE_UINT64, stream_time, "running-time", G_TYPE_UINT64, running_time, NULL); gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, s)); } if (base_video_encoder_class->shape_output) { ret = base_video_encoder_class->shape_output (base_video_encoder, frame); } else { ret = gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), frame->src_buffer); } gst_base_video_codec_free_frame (frame); return ret; }
/* keep 100 bytes and burst 80 bytes to clients */ void test_burst_client_bytes_with_keyframe() { GstElement *sink; GstBuffer *buffer; GstCaps *caps; int pfd1[2]; int pfd2[2]; int pfd3[2]; gchar data[16]; gint i; guint buffers_queued; std_log(LOG_FILENAME_LINE, "Test Started test_burst_client_bytes_with_keyframe"); sink = setup_multifdsink (); /* make sure we keep at least 100 bytes at all times */ g_object_set (sink, "bytes-min", 100, NULL); g_object_set (sink, "sync-method", 5, NULL); /* 3 = burst_with_keyframe */ g_object_set (sink, "burst-unit", 3, NULL); /* 3 = bytes */ g_object_set (sink, "burst-value", (guint64) 80, NULL); fail_if (pipe (pfd1) == -1); fail_if (pipe (pfd2) == -1); fail_if (pipe (pfd3) == -1); ASSERT_SET_STATE (sink, GST_STATE_PLAYING, GST_STATE_CHANGE_ASYNC); caps = gst_caps_from_string ("application/x-gst-check"); GST_DEBUG ("Created test caps %p %" GST_PTR_FORMAT, caps, caps); /* push buffers in, 9 * 16 bytes = 144 bytes */ for (i = 0; i < 9; i++) { gchar *data; buffer = gst_buffer_new_and_alloc (16); gst_buffer_set_caps (buffer, caps); /* mark most buffers as delta */ if (i != 0 && i != 4 && i != 8) GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); /* copy some id */ data = (gchar *) GST_BUFFER_DATA (buffer); g_snprintf (data, 16, "deadbee%08x", i); fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK); } /* check that at least 7 buffers (112 bytes) are in the queue */ g_object_get (sink, "buffers-queued", &buffers_queued, NULL); fail_if (buffers_queued != 7); /* now add the clients */ g_signal_emit_by_name (sink, "add", pfd1[1]); g_signal_emit_by_name (sink, "add_full", pfd2[1], 5, 3, (guint64) 50, 3, (guint64) 90); g_signal_emit_by_name (sink, "add_full", pfd3[1], 5, 3, (guint64) 50, 3, (guint64) 50); /* push last buffer to make client fds ready for reading */ for (i = 9; i < 10; i++) { gchar *data; buffer = gst_buffer_new_and_alloc (16); gst_buffer_set_caps (buffer, caps); GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); /* copy some id */ data = (gchar *) GST_BUFFER_DATA (buffer); g_snprintf (data, 16, "deadbee%08x", i); fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK); } /* now we should only read the last 6 buffers (min 5 * 16 = 80 bytes), * keyframe at buffer 4 */ GST_DEBUG ("Reading from client 1"); fail_if (read (pfd1[0], data, 16) < 16); fail_unless (strncmp (data, "deadbee00000004", 16) == 0); fail_if (read (pfd1[0], data, 16) < 16); fail_unless (strncmp (data, "deadbee00000005", 16) == 0); fail_if (read (pfd1[0], data, 16) < 16); fail_unless (strncmp (data, "deadbee00000006", 16) == 0); fail_if (read (pfd1[0], data, 16) < 16); fail_unless (strncmp (data, "deadbee00000007", 16) == 0); fail_if (read (pfd1[0], data, 16) < 16); fail_unless (strncmp (data, "deadbee00000008", 16) == 0); fail_if (read (pfd1[0], data, 16) < 16); fail_unless (strncmp (data, "deadbee00000009", 16) == 0); /* second client only bursts 50 bytes = 4 buffers, there is * no keyframe above min and below max, so send min */ GST_DEBUG ("Reading from client 2"); fail_if (read (pfd2[0], data, 16) < 16); fail_unless (strncmp (data, "deadbee00000006", 16) == 0); fail_if (read (pfd2[0], data, 16) < 16); fail_unless (strncmp (data, "deadbee00000007", 16) == 0); fail_if (read (pfd2[0], data, 16) < 16); fail_unless (strncmp (data, "deadbee00000008", 16) == 0); fail_if (read (pfd2[0], data, 16) < 16); fail_unless (strncmp (data, "deadbee00000009", 16) == 0); /* third client only bursts 50 bytes = 4 buffers, we can't send * more than 50 bytes so we only get 3 buffers (48 bytes). */ GST_DEBUG ("Reading from client 3"); fail_if (read (pfd3[0], data, 16) < 16); fail_unless (strncmp (data, "deadbee00000007", 16) == 0); fail_if (read (pfd3[0], data, 16) < 16); fail_unless (strncmp (data, "deadbee00000008", 16) == 0); fail_if (read (pfd3[0], data, 16) < 16); fail_unless (strncmp (data, "deadbee00000009", 16) == 0); GST_DEBUG ("cleaning up multifdsink"); ASSERT_SET_STATE (sink, GST_STATE_NULL, GST_STATE_CHANGE_SUCCESS); cleanup_multifdsink (sink); ASSERT_CAPS_REFCOUNT (caps, "caps", 1); gst_caps_unref (caps); std_log(LOG_FILENAME_LINE, "Test Successful"); create_xml(0); }
/* Reads in buffers, parses them, reframes into one-buffer-per-ogg-page, submits * pages to output pad. */ static GstFlowReturn gst_ogg_parse_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) { GstOggParse *ogg; GstFlowReturn result = GST_FLOW_OK; gint ret = -1; guint32 serialno; GstBuffer *pagebuffer; GstClockTime buffertimestamp = GST_BUFFER_TIMESTAMP (buffer); ogg = GST_OGG_PARSE (parent); GST_LOG_OBJECT (ogg, "Chain function received buffer of size %" G_GSIZE_FORMAT, gst_buffer_get_size (buffer)); gst_ogg_parse_submit_buffer (ogg, buffer); while (ret != 0 && result == GST_FLOW_OK) { ogg_page page; /* We use ogg_sync_pageseek() rather than ogg_sync_pageout() so that we can * track how many bytes the ogg layer discarded (in the case of sync errors, * etc.); this allows us to accurately track the current stream offset */ ret = ogg_sync_pageseek (&ogg->sync, &page); if (ret == 0) { /* need more data, that's fine... */ break; } else if (ret < 0) { /* discontinuity; track how many bytes we skipped (-ret) */ ogg->offset -= ret; } else { gint64 granule = ogg_page_granulepos (&page); #ifndef GST_DISABLE_GST_DEBUG int bos = ogg_page_bos (&page); #endif guint64 startoffset = ogg->offset; GstOggStream *stream; gboolean keyframe; serialno = ogg_page_serialno (&page); stream = gst_ogg_parse_find_stream (ogg, serialno); GST_LOG_OBJECT (ogg, "Timestamping outgoing buffer as %" GST_TIME_FORMAT, GST_TIME_ARGS (buffertimestamp)); if (stream) { buffertimestamp = gst_ogg_stream_get_end_time_for_granulepos (stream, granule); if (ogg->video_stream) { if (stream == ogg->video_stream) { keyframe = gst_ogg_stream_granulepos_is_key_frame (stream, granule); } else { keyframe = FALSE; } } else { keyframe = TRUE; } } else { buffertimestamp = GST_CLOCK_TIME_NONE; keyframe = TRUE; } pagebuffer = gst_ogg_parse_buffer_from_page (&page, startoffset, buffertimestamp); /* We read out 'ret' bytes, so we set the next offset appropriately */ ogg->offset += ret; GST_LOG_OBJECT (ogg, "processing ogg page (serial %08x, pageno %ld, " "granule pos %" G_GUINT64_FORMAT ", bos %d, offset %" G_GUINT64_FORMAT "-%" G_GUINT64_FORMAT ") keyframe=%d", serialno, ogg_page_pageno (&page), granule, bos, startoffset, ogg->offset, keyframe); if (ogg_page_bos (&page)) { /* If we've seen this serialno before, this is technically an error, * we log this case but accept it - this one replaces the previous * stream with this serialno. We can do this since we're streaming, and * not supporting seeking... */ GstOggStream *stream = gst_ogg_parse_find_stream (ogg, serialno); if (stream != NULL) { GST_LOG_OBJECT (ogg, "Incorrect stream; repeats serial number %08x " "at offset %" G_GINT64_FORMAT, serialno, ogg->offset); } if (ogg->last_page_not_bos) { GST_LOG_OBJECT (ogg, "Deleting all referenced streams, found a new " "chain starting with serial %u", serialno); gst_ogg_parse_delete_all_streams (ogg); } stream = gst_ogg_parse_new_stream (ogg, &page); ogg->last_page_not_bos = FALSE; gst_buffer_ref (pagebuffer); stream->headers = g_list_append (stream->headers, pagebuffer); if (!ogg->in_headers) { GST_LOG_OBJECT (ogg, "Found start of new chain at offset %" G_GUINT64_FORMAT, startoffset); ogg->in_headers = 1; } /* For now, we just keep the header buffer in the stream->headers list; * it actually gets output once we've collected the entire set */ } else { /* Non-BOS page. Either: we're outside headers, and this isn't a * header (normal data), outside headers and this is (error!), inside * headers, this is (append header), or inside headers and this isn't * (we've found the end of headers; flush the lot!) * * Before that, we flag that the last page seen (this one) was not a * BOS page; that way we know that when we next see a BOS page it's a * new chain, and we can flush all existing streams. */ page_type type; GstOggStream *stream = gst_ogg_parse_find_stream (ogg, serialno); if (!stream) { GST_LOG_OBJECT (ogg, "Non-BOS page unexpectedly found at %" G_GINT64_FORMAT, ogg->offset); goto failure; } ogg->last_page_not_bos = TRUE; type = gst_ogg_parse_is_header (ogg, stream, &page); if (type == PAGE_PENDING && ogg->in_headers) { gst_buffer_ref (pagebuffer); stream->unknown_pages = g_list_append (stream->unknown_pages, pagebuffer); } else if (type == PAGE_HEADER) { if (!ogg->in_headers) { GST_LOG_OBJECT (ogg, "Header page unexpectedly found outside " "headers at offset %" G_GINT64_FORMAT, ogg->offset); goto failure; } else { /* Append the header to the buffer list, after any unknown previous * pages */ stream->headers = g_list_concat (stream->headers, stream->unknown_pages); g_list_free (stream->unknown_pages); gst_buffer_ref (pagebuffer); stream->headers = g_list_append (stream->headers, pagebuffer); } } else { /* PAGE_DATA, or PAGE_PENDING but outside headers */ if (ogg->in_headers) { /* First non-header page... set caps, flush headers. * * First up, we build a single GValue list of all the pagebuffers * we're using for the headers, in order. * Then we set this on the caps structure. Then we can start pushing * buffers for the headers, and finally we send this non-header * page. */ GstCaps *caps; GstStructure *structure; GValue array = { 0 }; gint count = 0; gboolean found_pending_headers = FALSE; GSList *l; g_value_init (&array, GST_TYPE_ARRAY); for (l = ogg->oggstreams; l != NULL; l = l->next) { GstOggStream *stream = (GstOggStream *) l->data; if (g_list_length (stream->headers) == 0) { GST_LOG_OBJECT (ogg, "No primary header found for stream %08x", stream->serialno); goto failure; } gst_ogg_parse_append_header (&array, GST_BUFFER (stream->headers->data)); count++; } for (l = ogg->oggstreams; l != NULL; l = l->next) { GstOggStream *stream = (GstOggStream *) l->data; GList *j; /* already appended the first header, now do headers 2-N */ for (j = stream->headers->next; j != NULL; j = j->next) { gst_ogg_parse_append_header (&array, GST_BUFFER (j->data)); count++; } } caps = gst_pad_query_caps (ogg->srcpad, NULL); caps = gst_caps_make_writable (caps); structure = gst_caps_get_structure (caps, 0); gst_structure_take_value (structure, "streamheader", &array); gst_pad_set_caps (ogg->srcpad, caps); if (ogg->caps) gst_caps_unref (ogg->caps); ogg->caps = caps; GST_LOG_OBJECT (ogg, "Set \"streamheader\" caps with %d buffers " "(one per page)", count); /* Now, we do the same thing, but push buffers... */ for (l = ogg->oggstreams; l != NULL; l = l->next) { GstOggStream *stream = (GstOggStream *) l->data; GstBuffer *buf = GST_BUFFER (stream->headers->data); result = gst_pad_push (ogg->srcpad, buf); if (result != GST_FLOW_OK) return result; } for (l = ogg->oggstreams; l != NULL; l = l->next) { GstOggStream *stream = (GstOggStream *) l->data; GList *j; /* pushed the first one for each stream already, now do 2-N */ for (j = stream->headers->next; j != NULL; j = j->next) { GstBuffer *buf = GST_BUFFER (j->data); result = gst_pad_push (ogg->srcpad, buf); if (result != GST_FLOW_OK) return result; } } ogg->in_headers = 0; /* And finally the pending data pages */ for (l = ogg->oggstreams; l != NULL; l = l->next) { GstOggStream *stream = (GstOggStream *) l->data; GList *k; if (stream->unknown_pages == NULL) continue; if (found_pending_headers) { GST_WARNING_OBJECT (ogg, "Incorrectly muxed headers found at " "approximate offset %" G_GINT64_FORMAT, ogg->offset); } found_pending_headers = TRUE; GST_LOG_OBJECT (ogg, "Pushing %d pending pages after headers", g_list_length (stream->unknown_pages) + 1); for (k = stream->unknown_pages; k != NULL; k = k->next) { GstBuffer *buf = GST_BUFFER (k->data); result = gst_pad_push (ogg->srcpad, buf); if (result != GST_FLOW_OK) return result; } g_list_foreach (stream->unknown_pages, (GFunc) gst_mini_object_unref, NULL); g_list_free (stream->unknown_pages); stream->unknown_pages = NULL; } } if (granule == -1) { stream->stored_buffers = g_list_append (stream->stored_buffers, pagebuffer); } else { while (stream->stored_buffers) { GstBuffer *buf = stream->stored_buffers->data; buf = gst_buffer_make_writable (buf); GST_BUFFER_TIMESTAMP (buf) = buffertimestamp; if (!keyframe) { GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT); } else { keyframe = FALSE; } result = gst_pad_push (ogg->srcpad, buf); if (result != GST_FLOW_OK) return result; stream->stored_buffers = g_list_delete_link (stream->stored_buffers, stream->stored_buffers); } pagebuffer = gst_buffer_make_writable (pagebuffer); if (!keyframe) { GST_BUFFER_FLAG_SET (pagebuffer, GST_BUFFER_FLAG_DELTA_UNIT); } else { keyframe = FALSE; } result = gst_pad_push (ogg->srcpad, pagebuffer); if (result != GST_FLOW_OK) return result; } } } } } return result; failure: gst_pad_push_event (GST_PAD (ogg->srcpad), gst_event_new_eos ()); return GST_FLOW_ERROR; }
/** * gst_buffer_create_sub: * @parent: a #GstBuffer. * @offset: the offset into parent #GstBuffer at which the new sub-buffer * begins. * @size: the size of the new #GstBuffer sub-buffer, in bytes. * * Creates a sub-buffer from @parent at @offset and @size. * This sub-buffer uses the actual memory space of the parent buffer. * This function will copy the offset and timestamp fields when the * offset is 0. If not, they will be set to #GST_CLOCK_TIME_NONE and * #GST_BUFFER_OFFSET_NONE. * If @offset equals 0 and @size equals the total size of @buffer, the * duration and offset end fields are also copied. If not they will be set * to #GST_CLOCK_TIME_NONE and #GST_BUFFER_OFFSET_NONE. * * MT safe. * Returns: the new #GstBuffer. * Returns NULL if the arguments were invalid. */ GstBuffer * gst_buffer_create_sub (GstBuffer * buffer, guint offset, guint size) { GstSubBuffer *subbuffer; GstBuffer *parent; gboolean complete; g_return_val_if_fail (buffer != NULL, NULL); g_return_val_if_fail (buffer->mini_object.refcount > 0, NULL); g_return_val_if_fail (buffer->size >= offset + size, NULL); /* find real parent */ if (GST_IS_SUBBUFFER (buffer)) { parent = GST_SUBBUFFER_CAST (buffer)->parent; } else { parent = buffer; } gst_buffer_ref (parent); /* create the new buffer */ subbuffer = (GstSubBuffer *) gst_mini_object_new (_gst_subbuffer_type); subbuffer->parent = parent; GST_CAT_LOG (GST_CAT_BUFFER, "new subbuffer %p (parent %p)", subbuffer, parent); /* set the right values in the child */ GST_BUFFER_DATA (GST_BUFFER_CAST (subbuffer)) = buffer->data + offset; GST_BUFFER_SIZE (GST_BUFFER_CAST (subbuffer)) = size; if ((offset == 0) && (size == GST_BUFFER_SIZE (buffer))) { /* copy all the flags except IN_CAPS */ GST_BUFFER_FLAG_SET (subbuffer, GST_BUFFER_FLAGS (buffer)); GST_BUFFER_FLAG_UNSET (subbuffer, GST_BUFFER_FLAG_IN_CAPS); } else { /* copy only PREROLL & GAP flags */ GST_BUFFER_FLAG_SET (subbuffer, (GST_BUFFER_FLAGS (buffer) & (GST_BUFFER_FLAG_PREROLL | GST_BUFFER_FLAG_GAP))); } /* we can copy the timestamp and offset if the new buffer starts at * offset 0 */ if (offset == 0) { GST_BUFFER_TIMESTAMP (subbuffer) = GST_BUFFER_TIMESTAMP (buffer); GST_BUFFER_OFFSET (subbuffer) = GST_BUFFER_OFFSET (buffer); complete = (buffer->size == size); } else { GST_BUFFER_TIMESTAMP (subbuffer) = GST_CLOCK_TIME_NONE; GST_BUFFER_OFFSET (subbuffer) = GST_BUFFER_OFFSET_NONE; complete = FALSE; } if (complete) { GstCaps *caps; /* if we copied the complete buffer we can copy the duration, * offset_end and caps as well */ GST_BUFFER_DURATION (subbuffer) = GST_BUFFER_DURATION (buffer); GST_BUFFER_OFFSET_END (subbuffer) = GST_BUFFER_OFFSET_END (buffer); if ((caps = GST_BUFFER_CAPS (buffer))) gst_caps_ref (caps); GST_BUFFER_CAPS (subbuffer) = caps; } else { GST_BUFFER_DURATION (subbuffer) = GST_CLOCK_TIME_NONE; GST_BUFFER_OFFSET_END (subbuffer) = GST_BUFFER_OFFSET_NONE; GST_BUFFER_CAPS (subbuffer) = NULL; } return GST_BUFFER_CAST (subbuffer); }