static void gst_bcbuffer_finalize (GstBufferClassBuffer * buffer) { GstBufferClassBufferPool *pool = buffer->pool; gboolean resuscitated; GST_LOG_OBJECT (pool->elem, "finalizing buffer %p %d", buffer, buffer->index); GST_BCBUFFERPOOL_LOCK (pool); if (pool->running) { GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_READONLY); g_async_queue_push (pool->avail_buffers, buffer); resuscitated = TRUE; } else { GST_LOG_OBJECT (pool->elem, "the pool is shutting down"); resuscitated = FALSE; } if (resuscitated) { GST_LOG_OBJECT (pool->elem, "reviving buffer %p, %d", buffer, buffer->index); gst_buffer_ref (GST_BUFFER (buffer)); } GST_BCBUFFERPOOL_UNLOCK (pool); if (!resuscitated) { GST_LOG_OBJECT (pool->elem, "buffer %p not recovered, unmapping", buffer); gst_mini_object_unref (GST_MINI_OBJECT (pool)); // munmap ((void *) GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer)); GST_MINI_OBJECT_CLASS (buffer_parent_class)-> finalize (GST_MINI_OBJECT (buffer)); } }
static GstBuffer* gst_goo_decspark_codec_data_processing (GstGooVideoFilter *filter, GstBuffer *buffer) { GstGooDecSpark *self = GST_GOO_DECSPARK (filter); if (GST_IS_BUFFER (GST_GOO_VIDEODEC(self)->video_header)) { GST_DEBUG_OBJECT (self, "Adding SPARK header info to buffer"); GstBuffer *new_buf = gst_buffer_merge (GST_BUFFER (GST_GOO_VIDEODEC(self)->video_header), GST_BUFFER (buffer)); /* gst_buffer_merge() will end up putting video_header's timestamp on * the new buffer, but actually we want buf's timestamp: */ GST_BUFFER_TIMESTAMP (new_buf) = GST_BUFFER_TIMESTAMP (buffer); buffer = new_buf; gst_buffer_unref (GST_GOO_VIDEODEC(self)->video_header); } return buffer; }
static void gst_rfc2250_enc_loop (GstElement * element) { GstRFC2250Enc *enc = GST_RFC2250_ENC (element); GstData *data; guint id; gboolean mpeg2; data = gst_mpeg_packetize_read (enc->packetize); id = GST_MPEG_PACKETIZE_ID (enc->packetize); mpeg2 = GST_MPEG_PACKETIZE_IS_MPEG2 (enc->packetize); if (GST_IS_BUFFER (data)) { GstBuffer *buffer = GST_BUFFER (data); GST_DEBUG ("rfc2250enc: have chunk 0x%02X", id); switch (id) { case SEQUENCE_START_CODE: gst_rfc2250_enc_new_buffer (enc); enc->flags |= ENC_HAVE_SEQ; break; case GOP_START_CODE: if (enc->flags & ENC_HAVE_DATA) { gst_rfc2250_enc_new_buffer (enc); } enc->flags |= ENC_HAVE_GOP; break; case PICTURE_START_CODE: if (enc->flags & ENC_HAVE_DATA) { gst_rfc2250_enc_new_buffer (enc); } enc->flags |= ENC_HAVE_PIC; break; case EXT_START_CODE: case USER_START_CODE: case SEQUENCE_ERROR_START_CODE: case SEQUENCE_END_START_CODE: break; default: /* do this here because of the long range */ if (id >= SLICE_MIN_START_CODE && id <= SLICE_MAX_START_CODE) { enc->flags |= ENC_HAVE_DATA; gst_rfc2250_enc_add_slice (enc, buffer); buffer = NULL; break; } break; } if (buffer) { gst_buffer_merge (enc->packet, buffer); enc->remaining -= GST_BUFFER_SIZE (buffer); gst_buffer_unref (buffer); } } else { if (enc->packet) { gst_pad_push (enc->srcpad, GST_DATA (enc->packet)); enc->packet = NULL; enc->flags = 0; enc->remaining = enc->MTU; } gst_pad_event_default (enc->sinkpad, GST_EVENT (data)); } }
static GstBuffer * gst_adapter_get_buffer (GstAdapter * adapter) { return gst_buffer_ref (GST_BUFFER (adapter->buflist->data)); }
static GstV4l2Buffer * gst_v4l2_buffer_new (GstV4l2BufferPool * pool, guint index, GstCaps * caps) { GstV4l2Buffer *ret; guint8 *data; ret = (GstV4l2Buffer *) gst_mini_object_new (GST_TYPE_V4L2_BUFFER); GST_LOG_OBJECT (pool->v4l2elem, "creating buffer %u, %p in pool %p", index, ret, pool); ret->pool = (GstV4l2BufferPool *) gst_mini_object_ref (GST_MINI_OBJECT (pool)); ret->vbuffer.index = index; ret->vbuffer.type = pool->type; ret->vbuffer.memory = V4L2_MEMORY_MMAP; if (v4l2_ioctl (pool->video_fd, VIDIOC_QUERYBUF, &ret->vbuffer) < 0) goto querybuf_failed; GST_LOG_OBJECT (pool->v4l2elem, " index: %u", ret->vbuffer.index); GST_LOG_OBJECT (pool->v4l2elem, " type: %d", ret->vbuffer.type); GST_LOG_OBJECT (pool->v4l2elem, " bytesused: %u", ret->vbuffer.bytesused); GST_LOG_OBJECT (pool->v4l2elem, " flags: %08x", ret->vbuffer.flags); GST_LOG_OBJECT (pool->v4l2elem, " field: %d", ret->vbuffer.field); GST_LOG_OBJECT (pool->v4l2elem, " memory: %d", ret->vbuffer.memory); if (ret->vbuffer.memory == V4L2_MEMORY_MMAP) GST_LOG_OBJECT (pool->v4l2elem, " MMAP offset: %u", ret->vbuffer.m.offset); GST_LOG_OBJECT (pool->v4l2elem, " length: %u", ret->vbuffer.length); GST_LOG_OBJECT (pool->v4l2elem, " input: %u", ret->vbuffer.input); data = (guint8 *) v4l2_mmap (0, ret->vbuffer.length, PROT_READ | PROT_WRITE, MAP_SHARED, pool->video_fd, ret->vbuffer.m.offset); if (data == MAP_FAILED) goto mmap_failed; GST_BUFFER_DATA (ret) = data; GST_BUFFER_SIZE (ret) = ret->vbuffer.length; GST_BUFFER_FLAG_SET (ret, GST_BUFFER_FLAG_READONLY); gst_buffer_set_caps (GST_BUFFER (ret), caps); return ret; /* ERRORS */ querybuf_failed: { gint errnosave = errno; GST_WARNING ("Failed QUERYBUF: %s", g_strerror (errnosave)); gst_buffer_unref (GST_BUFFER (ret)); errno = errnosave; return NULL; } mmap_failed: { gint errnosave = errno; GST_WARNING ("Failed to mmap: %s", g_strerror (errnosave)); gst_buffer_unref (GST_BUFFER (ret)); errno = errnosave; return NULL; } }
static void gst_median_chain (GstPad * pad, GstData * _data) { GstBuffer *buf = GST_BUFFER (_data); GstMedian *median; guchar *data; gulong size; GstBuffer *outbuf; /* GstMeta *meta; */ int lumsize, chromsize; g_return_if_fail (pad != NULL); g_return_if_fail (GST_IS_PAD (pad)); g_return_if_fail (buf != NULL); median = GST_MEDIAN (GST_OBJECT_PARENT (pad)); if (!median->active) { gst_pad_push (median->srcpad, GST_DATA (buf)); return; } data = GST_BUFFER_DATA (buf); size = GST_BUFFER_SIZE (buf); GST_DEBUG ("median: have buffer of %d", GST_BUFFER_SIZE (buf)); outbuf = gst_buffer_new (); GST_BUFFER_DATA (outbuf) = g_malloc (GST_BUFFER_SIZE (buf)); GST_BUFFER_SIZE (outbuf) = GST_BUFFER_SIZE (buf); lumsize = median->width * median->height; chromsize = lumsize / 4; if (median->filtersize == 5) { median_5 (data, GST_BUFFER_DATA (outbuf), median->width, median->height); if (!median->lum_only) { median_5 (data + lumsize, GST_BUFFER_DATA (outbuf) + lumsize, median->width / 2, median->height / 2); median_5 (data + lumsize + chromsize, GST_BUFFER_DATA (outbuf) + lumsize + chromsize, median->width / 2, median->height / 2); } else { memcpy (GST_BUFFER_DATA (outbuf) + lumsize, data + lumsize, chromsize * 2); } } else { median_9 (data, GST_BUFFER_DATA (outbuf), median->width, median->height); if (!median->lum_only) { median_9 (data + lumsize, GST_BUFFER_DATA (outbuf) + lumsize, median->width / 2, median->height / 2); median_9 (data + lumsize + chromsize, GST_BUFFER_DATA (outbuf) + lumsize + chromsize, median->width / 2, median->height / 2); } else { memcpy (GST_BUFFER_DATA (outbuf) + lumsize, data + lumsize, chromsize * 2); } } GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf); gst_buffer_unref (buf); gst_pad_push (median->srcpad, GST_DATA (outbuf)); }
static gboolean probe_cb (InsanityGstTest * ptest, GstPad * pad, GstMiniObject * object, gpointer userdata) { InsanityTest *test = INSANITY_TEST (ptest); global_last_probe = g_get_monotonic_time (); if (GST_IS_BUFFER (object)) { GstClockTime buf_start, buf_end; GstBuffer *next_sub, *buf = GST_BUFFER (object); buf_start = gst_segment_to_stream_time (&glob_suboverlay_src_probe->last_segment, glob_suboverlay_src_probe->last_segment.format, GST_BUFFER_PTS (buf)); buf_end = buf_start + GST_BUFFER_DURATION (buf); if (glob_in_progress == TEST_SUBTTILE_DESCRIPTOR_GENERATION) { if (glob_pipeline_restarted == TRUE) { gboolean has_subs; if (glob_duration > 0 && buf_end > glob_duration) { /* Done according to the duration previously found by the * discoverer */ next_test (test); } has_subs = frame_contains_subtitles (buf); if (GST_CLOCK_TIME_IS_VALID (glob_last_subtitled_frame)) { if (has_subs == FALSE) { GstBuffer *nbuf = gst_buffer_new (); GST_BUFFER_PTS (nbuf) = glob_last_subtitled_frame; GST_BUFFER_DURATION (nbuf) = buf_end - glob_last_subtitled_frame; media_descriptor_writer_add_frame (glob_writer, pad, nbuf); glob_last_subtitled_frame = GST_CLOCK_TIME_NONE; gst_buffer_unref (nbuf); } } else if (has_subs) { glob_last_subtitled_frame = buf_start; } } goto done; } /* We played enough... next test */ if (GST_CLOCK_TIME_IS_VALID (glob_first_subtitle_ts) && buf_start >= glob_first_subtitle_ts + glob_playback_duration * GST_SECOND) { next_test (test); } switch (glob_in_progress) { case TEST_NONE: { if (glob_suboverlay_src_probe->waiting_first_segment == TRUE) { insanity_test_validate_checklist_item (test, "first-segment", FALSE, "Got a buffer before the first segment"); } next_test (test); } default: break; } if (glob_subtitled_frames != NULL) { GstClockTime sub_start, sub_end; next_sub = GST_BUFFER (glob_subtitled_frames->data); sub_start = GST_BUFFER_PTS (next_sub); sub_end = GST_BUFFER_DURATION_IS_VALID (next_sub) ? GST_BUFFER_DURATION (next_sub) + sub_start : -1; if (buf_start >= sub_start && buf_end < sub_end) { if (frame_contains_subtitles (buf) == TRUE) { glob_sub_render_found = TRUE; insanity_test_validate_checklist_item (test, "subtitle-rendered", TRUE, NULL); } else { gchar *msg = g_strdup_printf ("Subtitle start %" GST_TIME_FORMAT " end %" GST_TIME_FORMAT " received buffer with no sub start %" GST_TIME_FORMAT " end %" GST_TIME_FORMAT, GST_TIME_ARGS (sub_start), GST_TIME_ARGS (sub_end), GST_TIME_ARGS (buf_start), GST_TIME_ARGS (buf_end)); insanity_test_validate_checklist_item (test, "subtitle-rendered", FALSE, msg); glob_wrong_rendered_buf = TRUE; g_free (msg); } } else if (buf_end > sub_end) { /* We got a buffer that is after the subtitle we were waiting for * remove that buffer as not waiting for it anymore */ gst_buffer_unref (next_sub); glob_subtitled_frames = g_list_remove (glob_subtitled_frames, next_sub); } } } else if (GST_IS_EVENT (object)) { GstEvent *event = GST_EVENT (object); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_SEGMENT: { gst_event_copy_segment (event, &glob_suboverlay_src_probe->last_segment); if (glob_suboverlay_src_probe->waiting_first_segment == TRUE) { insanity_test_validate_checklist_item (test, "first-segment", TRUE, NULL); glob_suboverlay_src_probe->waiting_first_segment = FALSE; } if (glob_suboverlay_src_probe->waiting_segment == FALSE) /* Cache the segment as it will be our reference but don't look * further */ goto done; if (glob_suboverlay_src_probe->waiting_first_segment == TRUE) { /* Make sure that a new segment has been received for each stream */ glob_suboverlay_src_probe->waiting_first_segment = FALSE; glob_suboverlay_src_probe->waiting_segment = FALSE; } glob_suboverlay_src_probe->waiting_segment = FALSE; break; } default: break; } } done: return TRUE; }
static void test_video_profile (const gchar * profile, gint profile_id, const gchar * input_format) { GstElement *x264enc; GstBuffer *inbuffer, *outbuffer; int i, num_buffers; x264enc = setup_x264enc (profile, "avc", input_format); fail_unless (gst_element_set_state (x264enc, GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS, "could not set to playing"); /* corresponds to I420 buffer for the size mentioned in the caps */ if (!strcmp (input_format, "I420")) inbuffer = gst_buffer_new_and_alloc (384 * 288 * 3 / 2); else if (!strcmp (input_format, "Y42B")) inbuffer = gst_buffer_new_and_alloc (384 * 288 * 2); else if (!strcmp (input_format, "Y444")) inbuffer = gst_buffer_new_and_alloc (384 * 288 * 3); else g_assert_not_reached (); /* makes valgrind's memcheck happier */ gst_buffer_memset (inbuffer, 0, 0, -1); GST_BUFFER_TIMESTAMP (inbuffer) = 0; ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1); fail_unless (gst_pad_push (mysrcpad, inbuffer) == GST_FLOW_OK); /* send eos to have all flushed if needed */ fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_eos ()) == TRUE); num_buffers = g_list_length (buffers); fail_unless (num_buffers == 1); /* check output caps */ { GstCaps *outcaps; outcaps = gst_pad_get_current_caps (mysinkpad); check_caps (outcaps, profile, profile_id); gst_caps_unref (outcaps); } /* clean up buffers */ for (i = 0; i < num_buffers; ++i) { outbuffer = GST_BUFFER (buffers->data); fail_if (outbuffer == NULL); switch (i) { case 0: { gint nsize, npos, j, type, next_type; GstMapInfo map; const guint8 *data; gsize size; gst_buffer_map (outbuffer, &map, GST_MAP_READ); data = map.data; size = map.size; npos = 0; j = 0; /* need SPS first */ next_type = 7; /* loop through NALs */ while (npos < size) { fail_unless (size - npos >= 4); nsize = GST_READ_UINT32_BE (data + npos); fail_unless (nsize > 0); fail_unless (npos + 4 + nsize <= size); type = data[npos + 4] & 0x1F; /* check the first NALs, disregard AU (9), SEI (6) */ if (type != 9 && type != 6) { fail_unless (type == next_type); switch (type) { case 7: /* SPS */ next_type = 8; break; case 8: /* PPS */ next_type = 5; break; default: break; } j++; } npos += nsize + 4; } gst_buffer_unmap (outbuffer, &map); /* should have reached the exact end */ fail_unless (npos == size); break; } default: break; } buffers = g_list_remove (buffers, outbuffer); ASSERT_BUFFER_REFCOUNT (outbuffer, "outbuffer", 1); gst_buffer_unref (outbuffer); outbuffer = NULL; } cleanup_x264enc (x264enc); g_list_free (buffers); buffers = NULL; }
static GstFlowReturn gst_vdp_vpp_chain (GstPad * pad, GstBuffer * buffer) { GstVdpVideoPostProcess *vpp = GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad)); GstFlowReturn ret = GST_FLOW_OK; GstVdpPicture current_pic; guint32 video_surfaces_past_count; VdpVideoSurface video_surfaces_past[MAX_PICTURES]; guint32 video_surfaces_future_count; VdpVideoSurface video_surfaces_future[MAX_PICTURES]; if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))) { GST_DEBUG_OBJECT (vpp, "Received discont buffer"); gst_vdp_vpp_flush (vpp); } gst_vdp_vpp_add_buffer (vpp, GST_VDP_VIDEO_BUFFER (buffer)); while (gst_vdp_vpp_get_next_picture (vpp, ¤t_pic, &video_surfaces_past_count, video_surfaces_past, &video_surfaces_future_count, video_surfaces_future)) { GstVdpOutputBuffer *outbuf; GstStructure *structure; GstVideoRectangle src_r = { 0, } , dest_r = { 0,}; gint par_n, par_d; VdpRect rect; GstVdpDevice *device; VdpStatus status; ret = gst_vdp_vpp_alloc_output_buffer (vpp, GST_PAD_CAPS (vpp->srcpad), &outbuf); if (ret != GST_FLOW_OK) break; structure = gst_caps_get_structure (GST_BUFFER_CAPS (buffer), 0); if (!gst_structure_get_int (structure, "width", &src_r.w) || !gst_structure_get_int (structure, "height", &src_r.h)) goto invalid_caps; if (gst_structure_get_fraction (structure, "pixel-aspect-ratio", &par_n, &par_d)) { gint new_width; new_width = gst_util_uint64_scale_int (src_r.w, par_n, par_d); src_r.x += (src_r.w - new_width) / 2; src_r.w = new_width; } structure = gst_caps_get_structure (GST_BUFFER_CAPS (outbuf), 0); if (!gst_structure_get_int (structure, "width", &dest_r.w) || !gst_structure_get_int (structure, "height", &dest_r.h)) goto invalid_caps; if (vpp->force_aspect_ratio) { GstVideoRectangle res_r; gst_video_sink_center_rect (src_r, dest_r, &res_r, TRUE); rect.x0 = res_r.x; rect.x1 = res_r.w + res_r.x; rect.y0 = res_r.y; rect.y1 = res_r.h + res_r.y; } else { rect.x0 = 0; rect.x1 = dest_r.w; rect.y0 = 0; rect.y1 = dest_r.h; } device = vpp->device; status = device->vdp_video_mixer_render (vpp->mixer, VDP_INVALID_HANDLE, NULL, current_pic.structure, video_surfaces_past_count, video_surfaces_past, current_pic.buf->surface, video_surfaces_future_count, video_surfaces_future, NULL, outbuf->surface, NULL, &rect, 0, NULL); if (status != VDP_STATUS_OK) { GST_ELEMENT_ERROR (vpp, RESOURCE, READ, ("Could not post process frame"), ("Error returned from vdpau was: %s", device->vdp_get_error_string (status))); ret = GST_FLOW_ERROR; goto done; } GST_BUFFER_TIMESTAMP (outbuf) = current_pic.timestamp; if (gst_vdp_vpp_is_interlaced (vpp)) GST_BUFFER_DURATION (outbuf) = vpp->field_duration; else GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (current_pic.buf); if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_DISCONT)) GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT); if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_PREROLL)) GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_PREROLL); if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_GAP)) GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP); ret = gst_pad_push (vpp->srcpad, GST_BUFFER (outbuf)); if (ret != GST_FLOW_OK) break; continue; invalid_caps: gst_buffer_unref (GST_BUFFER (outbuf)); ret = GST_FLOW_ERROR; break; } done: gst_object_unref (vpp); return ret; }
static GstFlowReturn gst_vdp_mpeg_dec_decode (GstVdpMpegDec * mpeg_dec, GstClockTime timestamp, gint64 size) { VdpPictureInfoMPEG1Or2 *info; GstBuffer *buffer; GstBuffer *outbuf; VdpVideoSurface surface; GstVdpDevice *device; VdpBitstreamBuffer vbit[1]; VdpStatus status; info = &mpeg_dec->vdp_info; if (info->picture_coding_type != B_FRAME) { if (info->backward_reference != VDP_INVALID_HANDLE) { gst_buffer_ref (mpeg_dec->b_buffer); gst_vdp_mpeg_dec_push_video_buffer (mpeg_dec, GST_VDP_VIDEO_BUFFER (mpeg_dec->b_buffer)); } if (info->forward_reference != VDP_INVALID_HANDLE) { gst_buffer_unref (mpeg_dec->f_buffer); info->forward_reference = VDP_INVALID_HANDLE; } info->forward_reference = info->backward_reference; mpeg_dec->f_buffer = mpeg_dec->b_buffer; info->backward_reference = VDP_INVALID_HANDLE; } if (gst_vdp_mpeg_dec_alloc_buffer (mpeg_dec, &outbuf) != GST_FLOW_OK) { gst_adapter_clear (mpeg_dec->adapter); return GST_FLOW_ERROR; } device = GST_VDP_VIDEO_BUFFER (outbuf)->device; if (info->forward_reference != VDP_INVALID_HANDLE && info->picture_coding_type != I_FRAME) gst_vdp_video_buffer_add_reference (GST_VDP_VIDEO_BUFFER (outbuf), GST_VDP_VIDEO_BUFFER (mpeg_dec->f_buffer)); if (info->backward_reference != VDP_INVALID_HANDLE && info->picture_coding_type == B_FRAME) gst_vdp_video_buffer_add_reference (GST_VDP_VIDEO_BUFFER (outbuf), GST_VDP_VIDEO_BUFFER (mpeg_dec->b_buffer)); GST_BUFFER_TIMESTAMP (outbuf) = timestamp; GST_BUFFER_DURATION (outbuf) = mpeg_dec->duration; GST_BUFFER_OFFSET (outbuf) = mpeg_dec->frame_nr; GST_BUFFER_SIZE (outbuf) = size; if (info->picture_coding_type == I_FRAME) GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT); else GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT); if (info->top_field_first) GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_TFF); else GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_TFF); buffer = gst_adapter_take_buffer (mpeg_dec->adapter, gst_adapter_available (mpeg_dec->adapter)); surface = GST_VDP_VIDEO_BUFFER (outbuf)->surface; vbit[0].struct_version = VDP_BITSTREAM_BUFFER_VERSION; vbit[0].bitstream = GST_BUFFER_DATA (buffer); vbit[0].bitstream_bytes = GST_BUFFER_SIZE (buffer); status = device->vdp_decoder_render (mpeg_dec->decoder, surface, (VdpPictureInfo *) info, 1, vbit); gst_buffer_unref (buffer); info->slice_count = 0; if (status != VDP_STATUS_OK) { GST_ELEMENT_ERROR (mpeg_dec, RESOURCE, READ, ("Could not decode"), ("Error returned from vdpau was: %s", device->vdp_get_error_string (status))); gst_buffer_unref (GST_BUFFER (outbuf)); return GST_FLOW_ERROR; } if (info->picture_coding_type == B_FRAME) { gst_vdp_mpeg_dec_push_video_buffer (mpeg_dec, GST_VDP_VIDEO_BUFFER (outbuf)); } else { info->backward_reference = surface; mpeg_dec->b_buffer = GST_BUFFER (outbuf); } return GST_FLOW_OK; }
static GstFlowReturn gst_visual_gl_chain (GstPad * pad, GstBuffer * buffer) { GstGLBuffer *outbuf = NULL; GstVisualGL *visual = GST_VISUAL_GL (gst_pad_get_parent (pad)); GstFlowReturn ret = GST_FLOW_OK; guint avail; GST_DEBUG_OBJECT (visual, "chain function called"); /* If we don't have an output format yet, preallocate a buffer to try and * set one */ if (GST_PAD_CAPS (visual->srcpad) == NULL) { ret = get_buffer (visual, &outbuf); if (ret != GST_FLOW_OK) { gst_buffer_unref (buffer); goto beach; } } /* resync on DISCONT */ if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT)) { gst_adapter_clear (visual->adapter); } GST_DEBUG_OBJECT (visual, "Input buffer has %d samples, time=%" G_GUINT64_FORMAT, GST_BUFFER_SIZE (buffer) / visual->bps, GST_BUFFER_TIMESTAMP (buffer)); gst_adapter_push (visual->adapter, buffer); while (TRUE) { gboolean need_skip; guint64 dist, timestamp; GST_DEBUG_OBJECT (visual, "processing buffer"); avail = gst_adapter_available (visual->adapter); GST_DEBUG_OBJECT (visual, "avail now %u", avail); /* we need at least VISUAL_SAMPLES samples */ if (avail < VISUAL_SAMPLES * visual->bps) break; /* we need at least enough samples to make one frame */ if (avail < visual->spf * visual->bps) break; /* get timestamp of the current adapter byte */ timestamp = gst_adapter_prev_timestamp (visual->adapter, &dist); if (GST_CLOCK_TIME_IS_VALID (timestamp)) { /* convert bytes to time */ dist /= visual->bps; timestamp += gst_util_uint64_scale_int (dist, GST_SECOND, visual->rate); } if (timestamp != -1) { gint64 qostime; /* QoS is done on running time */ qostime = gst_segment_to_running_time (&visual->segment, GST_FORMAT_TIME, timestamp); qostime += visual->duration; GST_OBJECT_LOCK (visual); /* check for QoS, don't compute buffers that are known to be late */ need_skip = visual->earliest_time != -1 && qostime <= visual->earliest_time; GST_OBJECT_UNLOCK (visual); if (need_skip) { GST_WARNING_OBJECT (visual, "QoS: skip ts: %" GST_TIME_FORMAT ", earliest: %" GST_TIME_FORMAT, GST_TIME_ARGS (qostime), GST_TIME_ARGS (visual->earliest_time)); goto skip; } } /* alloc a buffer if we don't have one yet, this happens * when we pushed a buffer in this while loop before */ if (outbuf == NULL) { ret = get_buffer (visual, &outbuf); if (ret != GST_FLOW_OK) { goto beach; } } /* render libvisual plugin to our target */ gst_gl_display_use_fbo_v2 (visual->display, visual->width, visual->height, visual->fbo, visual->depthbuffer, visual->midtexture, (GLCB_V2) render_frame, (gpointer *) visual); /* gst video is top-down whereas opengl plan is bottom up */ gst_gl_display_use_fbo (visual->display, visual->width, visual->height, visual->fbo, visual->depthbuffer, outbuf->texture, (GLCB) bottom_up_to_top_down, visual->width, visual->height, visual->midtexture, 0, visual->width, 0, visual->height, GST_GL_DISPLAY_PROJECTION_ORTHO2D, (gpointer *) visual); GST_BUFFER_TIMESTAMP (outbuf) = timestamp; GST_BUFFER_DURATION (outbuf) = visual->duration; ret = gst_pad_push (visual->srcpad, GST_BUFFER (outbuf)); outbuf = NULL; skip: GST_DEBUG_OBJECT (visual, "finished frame, flushing %u samples from input", visual->spf); /* Flush out the number of samples per frame */ gst_adapter_flush (visual->adapter, visual->spf * visual->bps); /* quit the loop if something was wrong */ if (ret != GST_FLOW_OK) break; } beach: if (outbuf != NULL) gst_gl_buffer_unref (outbuf); gst_object_unref (visual); return ret; }
static GstFlowReturn gst_goo_filter_chain (GstPad* pad, GstBuffer* buffer) { GST_LOG (""); GstGooFilter* self = GST_GOO_FILTER (gst_pad_get_parent (pad)); GstGooFilterPrivate* priv = GST_GOO_FILTER_GET_PRIVATE (self); GstGooAdapter* adapter = self->adapter; GstFlowReturn ret = GST_FLOW_OK; static OMX_S64 omx_normalize_timestamp; GstClockTime timestamp = GST_BUFFER_TIMESTAMP (buffer); if (priv->incount == 0) { omx_normalize_timestamp = (gint64)timestamp / CONVERSION; } if (goo_port_is_tunneled (self->inport)) { /* shall we send a ghost buffer here ? */ GST_INFO ("port is tunneled"); ret = GST_FLOW_OK; GST_DEBUG_OBJECT (self, "Buffer timestamp: time %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer))); GST_DEBUG_OBJECT (self, "Buffer duration: %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_DURATION (buffer))); GST_DEBUG_OBJECT (self, "Pushing buffer to next element. Size =%d", GST_BUFFER_SIZE (buffer)); /** FIXME GStreamer should not insert the header. OMX component should take * care of it. Remove this function upon resolution of DR OMAPS00140835 and * OMAPS00140836 **/ priv->outcount++; buffer = gst_goo_filter_insert_header (self, buffer, priv->outcount); gst_buffer_set_caps (buffer, GST_PAD_CAPS (self->srcpad)); gst_pad_push (self->srcpad, buffer); goto done; } if (goo_port_is_eos (self->inport)) { GST_INFO ("port is eos"); ret = GST_FLOW_UNEXPECTED; goto fail; } /** @todo GstGooAdapter! */ if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT)) { gst_goo_adapter_clear (adapter); } if (priv->incount == 0 && goo_component_get_state (self->component) == OMX_StateLoaded) { /** Some filters require header processing, apended to the first buffer **/ buffer = gst_goo_filter_codec_data_processing (self, GST_BUFFER (buffer)); /** Todo: Use the gst_caps_fixatecaps_func to make this cleaner **/ if (!gst_goo_filter_check_fixed_src_caps (self)) return GST_FLOW_NOT_NEGOTIATED; /** Remove gst_goo_filter_check_fixed_src_caps function when fixed **/ g_object_set (self->inport, "buffercount", priv->num_input_buffers, NULL); g_object_set (self->outport, "buffercount", priv->num_output_buffers, NULL); GST_INFO ("going to idle"); goo_component_set_state_idle (self->component); GST_INFO ("going to executing"); goo_component_set_state_executing (self->component); } /** Function to perform post buffering processing **/ buffer = gst_goo_filter_extra_buffer_processing (self, GST_BUFFER (buffer)); gst_goo_adapter_push (adapter, buffer); if (self->component->cur_state != OMX_StateExecuting) { goto done; } int omxbufsiz; if (priv->process_mode == STREAMMODE) omxbufsiz = GOO_PORT_GET_DEFINITION (self->inport)->nBufferSize; else omxbufsiz = GST_BUFFER_SIZE (buffer); while (gst_goo_adapter_available (adapter) >= omxbufsiz && ret == GST_FLOW_OK && omxbufsiz != 0) { GST_DEBUG ("Adapter available =%d omxbufsiz = %d", gst_goo_adapter_available (adapter), omxbufsiz); OMX_BUFFERHEADERTYPE* omx_buffer; omx_buffer = goo_port_grab_buffer (self->inport); GST_DEBUG ("memcpy to buffer %d bytes", omxbufsiz); gst_goo_adapter_peek (adapter, omxbufsiz, omx_buffer); omx_buffer->nFilledLen = omxbufsiz; gst_goo_adapter_flush (adapter, omxbufsiz); /* transfer timestamp to openmax */ { GST_DEBUG_OBJECT (self, "checking timestamp: time %" GST_TIME_FORMAT, GST_TIME_ARGS (timestamp)); if (GST_CLOCK_TIME_IS_VALID (timestamp)) { gint64 buffer_ts = (gint64)timestamp; omx_buffer->nTimeStamp = (OMX_S64)buffer_ts / CONVERSION; omx_buffer->nTimeStamp = omx_buffer->nTimeStamp - omx_normalize_timestamp; } else GST_WARNING_OBJECT (self, "Invalid timestamp!"); } priv->incount++; goo_component_release_buffer (self->component, omx_buffer); ret = GST_FLOW_OK; } if (goo_port_is_tunneled (self->outport)) { /** @todo send a ghost buffer */ GstBuffer *ghost_buffer = (GstBuffer*) gst_ghost_buffer_new (); GST_BUFFER_TIMESTAMP (ghost_buffer) = timestamp; gst_pad_push (self->srcpad, ghost_buffer); } goto done; fail: gst_goo_adapter_clear (adapter); done: gst_object_unref (self); gst_buffer_unref (buffer); return ret; }
static void gst_swfdec_buffer_free (SwfdecBuffer * buf, void *priv) { gst_buffer_unref (GST_BUFFER (priv)); }
static void gst_chart_chain (GstPad * pad, GstData * _data) { GstBuffer *bufin = GST_BUFFER (_data); GstChart *chart; GstBuffer *bufout; guint32 samples_in; guint32 sizeout; gint16 *datain; guchar *dataout; g_return_if_fail (bufin != NULL); g_return_if_fail (pad != NULL); g_return_if_fail (GST_IS_PAD (pad)); g_return_if_fail (GST_IS_CHART (GST_OBJECT_PARENT (pad))); chart = GST_CHART (GST_OBJECT_PARENT (pad)); g_return_if_fail (chart != NULL); GST_DEBUG ("CHART: chainfunc called"); samples_in = GST_BUFFER_SIZE (bufin) / sizeof (gint16); datain = (gint16 *) (GST_BUFFER_DATA (bufin)); GST_DEBUG ("input buffer has %d samples", samples_in); if (chart->next_time <= GST_BUFFER_TIMESTAMP (bufin)) { chart->next_time = GST_BUFFER_TIMESTAMP (bufin); GST_DEBUG ("in: %" G_GINT64_FORMAT, GST_BUFFER_TIMESTAMP (bufin)); } chart->samples_since_last_frame += samples_in; if (chart->samples_between_frames <= chart->samples_since_last_frame) { chart->samples_since_last_frame = 0; /* get data to draw into buffer */ if (samples_in >= chart->width) { /* make a new buffer for the output */ bufout = gst_buffer_new (); sizeout = chart->bpp / 8 * chart->width * chart->height; dataout = g_malloc (sizeout); GST_BUFFER_SIZE (bufout) = sizeout; GST_BUFFER_DATA (bufout) = dataout; GST_DEBUG ("CHART: made new buffer: size %d, width %d, height %d", sizeout, chart->width, chart->height); /* take data and draw to new buffer */ /* FIXME: call different routines for different properties */ draw_chart_16bpp (dataout, chart->width, chart->height, (gint16 *) datain, samples_in); gst_buffer_unref (bufin); /* set timestamp */ GST_BUFFER_TIMESTAMP (bufout) = chart->next_time; GST_DEBUG ("CHART: outputting buffer"); /* output buffer */ GST_BUFFER_FLAG_SET (bufout, GST_BUFFER_READONLY); gst_pad_push (chart->srcpad, GST_DATA (bufout)); } } else { GST_DEBUG ("CHART: skipping buffer"); gst_buffer_unref (bufin); } GST_DEBUG ("CHART: exiting chainfunc"); }
static GstBuffer * get_buffer (void) { return GST_BUFFER (g_async_queue_pop (pending_buffers)); }
static void gst_videodrop_chain (GstPad * pad, GstData * data) { GstVideodrop *videodrop = GST_VIDEODROP (gst_pad_get_parent (pad)); GstBuffer *buf; if (GST_IS_EVENT (data)) { GstEvent *event = GST_EVENT (data); if (GST_EVENT_TYPE (event) == GST_EVENT_DISCONTINUOUS) { /* since we rely on timestamps of the source, we need to handle * changes in time carefully. */ gint64 time; if (gst_event_discont_get_value (event, GST_FORMAT_TIME, &time)) { videodrop->total = videodrop->pass = 0; videodrop->time_adjust = time; } else { GST_ELEMENT_ERROR (videodrop, STREAM, TOO_LAZY, (NULL), ("Received discont, but no time information")); gst_event_unref (event); return; } /* FIXME: increase timestamp / speed */ } gst_pad_event_default (pad, event); return; } buf = GST_BUFFER (data); videodrop->total++; GST_DEBUG ("Received buffer at %u:%02u:%02u:%09u, fps=%lf, pass=%" G_GUINT64_FORMAT " of " G_GUINT64_FORMAT ", speed=%lf", (guint) (GST_BUFFER_TIMESTAMP (buf) / (GST_SECOND * 60 * 60)), (guint) ((GST_BUFFER_TIMESTAMP (buf) / (GST_SECOND * 60)) % 60), (guint) ((GST_BUFFER_TIMESTAMP (buf) / GST_SECOND) % 60), (guint) (GST_BUFFER_TIMESTAMP (buf) % GST_SECOND), videodrop->to_fps, videodrop->total, videodrop->pass, videodrop->speed); while (((GST_BUFFER_TIMESTAMP (buf) - videodrop->time_adjust) / videodrop->speed * videodrop->to_fps / GST_SECOND) >= videodrop->pass) { /* since we write to the struct (time/duration), we need a new struct, * but we don't want to copy around data - a subbuffer is the easiest * way to accomplish that... */ GstBuffer *copy = gst_buffer_create_sub (buf, 0, GST_BUFFER_SIZE (buf)); /* adjust timestamp/duration and push forward */ GST_BUFFER_TIMESTAMP (copy) = (videodrop->time_adjust / videodrop->speed) + GST_SECOND * videodrop->pass / videodrop->to_fps; GST_BUFFER_DURATION (copy) = GST_SECOND / videodrop->to_fps; GST_DEBUG ("Sending out buffer from out %u:%02u:%02u:%09u", (guint) (GST_BUFFER_TIMESTAMP (copy) / (GST_SECOND * 60 * 60)), (guint) ((GST_BUFFER_TIMESTAMP (copy) / (GST_SECOND * 60)) % 60), (guint) ((GST_BUFFER_TIMESTAMP (copy) / GST_SECOND) % 60), (guint) (GST_BUFFER_TIMESTAMP (copy) % GST_SECOND)); gst_pad_push (videodrop->srcpad, GST_DATA (copy)); videodrop->pass++; } gst_buffer_unref (buf); }
foreach (QGstXvImageBuffer *xvBuffer, m_pool) { gst_buffer_unref(GST_BUFFER(xvBuffer)); }
static void buffer_unref (void *buffer, void *user_data) { gst_buffer_unref (GST_BUFFER (buffer)); }
/* Pipeline Callbacks */ static gboolean renderer_probe_cb (InsanityGstTest * ptest, GstPad * pad, GstMiniObject * object, gpointer userdata) { InsanityTest *test = INSANITY_TEST (ptest); if (GST_IS_BUFFER (object)) { gint64 stime_ts; GstBuffer *buf = GST_BUFFER (object), *nbuf; if (glob_in_progress == TEST_SUBTTILE_DESCRIPTOR_GENERATION) goto done; if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DURATION (buf)) == FALSE && glob_parser == NULL) { gboolean generate_media_desc; insanity_test_get_boolean_argument (test, "create-media-descriptor", (gboolean *) & generate_media_desc); /* We generate the XML file if needed and allowed by user */ if (generate_media_desc) generate_xml_media_descriptor (test); else insanity_test_done (test); } else if (glob_parser == NULL) { /* Avoid using xml descriptor when not needed */ stime_ts = gst_segment_to_stream_time (&glob_renderer_sink_probe->last_segment, glob_renderer_sink_probe->last_segment.format, GST_BUFFER_PTS (buf)); if (GST_CLOCK_TIME_IS_VALID (glob_first_subtitle_ts) == FALSE) glob_first_subtitle_ts = stime_ts; nbuf = gst_buffer_new (); GST_BUFFER_PTS (nbuf) = stime_ts; GST_BUFFER_DURATION (nbuf) = GST_BUFFER_DURATION (buf); glob_subtitled_frames = g_list_insert_sorted (glob_subtitled_frames, nbuf, (GCompareFunc) sort_subtitle_bufs); } } else if (GST_IS_EVENT (object)) { GstEvent *event = GST_EVENT (object); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_SEGMENT: { /* We do not care about event during subtitle generation */ if (glob_in_progress == TEST_SUBTTILE_DESCRIPTOR_GENERATION) goto done; gst_event_copy_segment (event, &glob_renderer_sink_probe->last_segment); if (glob_renderer_sink_probe->waiting_segment == FALSE) /* Cache the segment as it will be our reference but don't look * further */ goto done; if (glob_renderer_sink_probe->waiting_first_segment == TRUE) { /* Make sure that a new segment has been received for each stream */ glob_renderer_sink_probe->waiting_first_segment = FALSE; glob_renderer_sink_probe->waiting_segment = FALSE; } glob_renderer_sink_probe->waiting_segment = FALSE; break; } default: break; } } done: return TRUE; }
/****************************************************** * gst_v4l2src_grab_frame (): * grab a frame for capturing * return value: GST_FLOW_OK, GST_FLOW_WRONG_STATE or GST_FLOW_ERROR ******************************************************/ GstFlowReturn gst_v4l2src_grab_frame (GstV4l2Src * v4l2src, GstBuffer ** buf) { #define NUM_TRIALS 50 GstV4l2Object *v4l2object; GstV4l2BufferPool *pool; gint32 trials = NUM_TRIALS; GstBuffer *pool_buffer; gboolean need_copy; gint ret; v4l2object = v4l2src->v4l2object; pool = v4l2src->pool; if (!pool) goto no_buffer_pool; GST_DEBUG_OBJECT (v4l2src, "grab frame"); for (;;) { if (v4l2object->can_poll_device) { ret = gst_poll_wait (v4l2object->poll, GST_CLOCK_TIME_NONE); if (G_UNLIKELY (ret < 0)) { if (errno == EBUSY) goto stopped; if (errno == ENXIO) { GST_DEBUG_OBJECT (v4l2src, "v4l2 device doesn't support polling. Disabling"); v4l2object->can_poll_device = FALSE; } else { if (errno != EAGAIN && errno != EINTR) goto select_error; } } } pool_buffer = GST_BUFFER (gst_v4l2_buffer_pool_dqbuf (pool)); if (pool_buffer) break; GST_WARNING_OBJECT (pool->v4l2elem, "trials=%d", trials); /* if the sync() got interrupted, we can retry */ switch (errno) { case EINVAL: case ENOMEM: /* fatal */ return GST_FLOW_ERROR; case EAGAIN: case EIO: case EINTR: default: /* try again, until too many trials */ break; } /* check nr. of attempts to capture */ if (--trials == -1) { goto too_many_trials; } } /* if we are handing out the last buffer in the pool, we need to make a * copy and bring the buffer back in the pool. */ need_copy = v4l2src->always_copy || !gst_v4l2_buffer_pool_available_buffers (pool); if (G_UNLIKELY (need_copy)) { if (!v4l2src->always_copy) { GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, v4l2src, "running out of buffers, making a copy to reuse current one"); } *buf = gst_buffer_copy (pool_buffer); GST_BUFFER_FLAG_UNSET (*buf, GST_BUFFER_FLAG_READONLY); /* this will requeue */ gst_buffer_unref (pool_buffer); } else { *buf = pool_buffer; } /* we set the buffer metadata in gst_v4l2src_create() */ return GST_FLOW_OK; /* ERRORS */ no_buffer_pool: { GST_DEBUG ("no buffer pool"); return GST_FLOW_WRONG_STATE; } select_error: { GST_ELEMENT_ERROR (pool->v4l2elem, RESOURCE, READ, (NULL), ("select error %d: %s (%d)", ret, g_strerror (errno), errno)); return GST_FLOW_ERROR; } stopped: { GST_DEBUG ("stop called"); return GST_FLOW_WRONG_STATE; } too_many_trials: { GST_ELEMENT_ERROR (pool->v4l2elem, RESOURCE, FAILED, (_("Failed trying to get video frames from device '%s'."), v4l2object->videodev), (_("Failed after %d tries. device %s. system error: %s"), NUM_TRIALS, v4l2object->videodev, g_strerror (errno))); return GST_FLOW_ERROR; } }
GstFlowReturn gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder, GstVideoFrame * frame) { GstFlowReturn ret; GstBaseVideoEncoderClass *base_video_encoder_class; base_video_encoder_class = GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder); frame->system_frame_number = GST_BASE_VIDEO_CODEC (base_video_encoder)->system_frame_number; GST_BASE_VIDEO_CODEC (base_video_encoder)->system_frame_number++; if (frame->is_sync_point) { base_video_encoder->distance_from_sync = 0; GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_BUFFER_FLAG_DELTA_UNIT); } else { GST_BUFFER_FLAG_SET (frame->src_buffer, GST_BUFFER_FLAG_DELTA_UNIT); } frame->distance_from_sync = base_video_encoder->distance_from_sync; base_video_encoder->distance_from_sync++; frame->decode_frame_number = frame->system_frame_number - 1; if (frame->decode_frame_number < 0) { frame->decode_timestamp = 0; } else { frame->decode_timestamp = gst_util_uint64_scale (frame->decode_frame_number, GST_SECOND * GST_BASE_VIDEO_CODEC (base_video_encoder)->state.fps_d, GST_BASE_VIDEO_CODEC (base_video_encoder)->state.fps_n); } GST_BUFFER_TIMESTAMP (frame->src_buffer) = frame->presentation_timestamp; GST_BUFFER_DURATION (frame->src_buffer) = frame->presentation_duration; GST_BUFFER_OFFSET (frame->src_buffer) = frame->decode_timestamp; GST_BASE_VIDEO_CODEC (base_video_encoder)->frames = g_list_remove (GST_BASE_VIDEO_CODEC (base_video_encoder)->frames, frame); if (!base_video_encoder->set_output_caps) { if (base_video_encoder_class->get_caps) { GST_BASE_VIDEO_CODEC (base_video_encoder)->caps = base_video_encoder_class->get_caps (base_video_encoder); } else { GST_BASE_VIDEO_CODEC (base_video_encoder)->caps = gst_caps_new_simple ("video/unknown", NULL); } gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), GST_BASE_VIDEO_CODEC (base_video_encoder)->caps); base_video_encoder->set_output_caps = TRUE; } gst_buffer_set_caps (GST_BUFFER (frame->src_buffer), GST_BASE_VIDEO_CODEC (base_video_encoder)->caps); if (frame->force_keyframe) { GstClockTime stream_time; GstClockTime running_time; GstStructure *s; running_time = gst_segment_to_running_time (&GST_BASE_VIDEO_CODEC (base_video_encoder)->segment, GST_FORMAT_TIME, frame->presentation_timestamp); stream_time = gst_segment_to_stream_time (&GST_BASE_VIDEO_CODEC (base_video_encoder)->segment, GST_FORMAT_TIME, frame->presentation_timestamp); /* FIXME this should send the event that we got on the sink pad instead of creating a new one */ s = gst_structure_new ("GstForceKeyUnit", "timestamp", G_TYPE_UINT64, frame->presentation_timestamp, "stream-time", G_TYPE_UINT64, stream_time, "running-time", G_TYPE_UINT64, running_time, NULL); gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, s)); } if (base_video_encoder_class->shape_output) { ret = base_video_encoder_class->shape_output (base_video_encoder, frame); } else { ret = gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), frame->src_buffer); } gst_base_video_codec_free_frame (frame); return ret; }
static void gst_fameenc_chain (GstPad * pad, GstData * _data) { GstBuffer *buf = GST_BUFFER (_data); GstFameEnc *fameenc; guchar *data; gulong size; gint frame_size; gint length; g_return_if_fail (pad != NULL); g_return_if_fail (GST_IS_PAD (pad)); g_return_if_fail (buf != NULL); g_return_if_fail (GST_IS_BUFFER (buf)); fameenc = GST_FAMEENC (gst_pad_get_parent (pad)); data = (guchar *) GST_BUFFER_DATA (buf); size = GST_BUFFER_SIZE (buf); GST_DEBUG ("gst_fameenc_chain: got buffer of %ld bytes in '%s'", size, GST_OBJECT_NAME (fameenc)); /* the data contains the three planes side by side, with size w * h, w * h /4, * w * h / 4 */ fameenc->fy.w = fameenc->fp.width; fameenc->fy.h = fameenc->fp.height; frame_size = fameenc->fp.width * fameenc->fp.height; fameenc->fy.p = 0; fameenc->fy.y = data; fameenc->fy.u = data + frame_size; fameenc->fy.v = fameenc->fy.u + (frame_size >> 2); fame_start_frame (fameenc->fc, &fameenc->fy, NULL); while ((length = fame_encode_slice (fameenc->fc)) != 0) { GstBuffer *outbuf; outbuf = gst_buffer_new (); /* FIXME: safeguard, remove me when a better way is found */ if (length > FAMEENC_BUFFER_SIZE) g_warning ("FAMEENC_BUFFER_SIZE is defined too low, encoded slice has size %d !\n", length); if (!fameenc->time_interval) { fameenc->time_interval = GST_SECOND * fameenc->fp.frame_rate_den / fameenc->fp.frame_rate_num; } fameenc->next_time += fameenc->time_interval; GST_BUFFER_SIZE (outbuf) = length; GST_BUFFER_TIMESTAMP (outbuf) = fameenc->next_time; GST_BUFFER_DATA (outbuf) = g_malloc (length); memcpy (GST_BUFFER_DATA (outbuf), fameenc->buffer, length); GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf); GST_DEBUG ("gst_fameenc_chain: pushing buffer of size %d", GST_BUFFER_SIZE (outbuf)); gst_pad_push (fameenc->srcpad, GST_DATA (outbuf)); } fame_end_frame (fameenc->fc, NULL); gst_buffer_unref (buf); }
/* Reads in buffers, parses them, reframes into one-buffer-per-ogg-page, submits * pages to output pad. */ static GstFlowReturn gst_ogg_parse_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) { GstOggParse *ogg; GstFlowReturn result = GST_FLOW_OK; gint ret = -1; guint32 serialno; GstBuffer *pagebuffer; GstClockTime buffertimestamp = GST_BUFFER_TIMESTAMP (buffer); ogg = GST_OGG_PARSE (parent); GST_LOG_OBJECT (ogg, "Chain function received buffer of size %" G_GSIZE_FORMAT, gst_buffer_get_size (buffer)); gst_ogg_parse_submit_buffer (ogg, buffer); while (ret != 0 && result == GST_FLOW_OK) { ogg_page page; /* We use ogg_sync_pageseek() rather than ogg_sync_pageout() so that we can * track how many bytes the ogg layer discarded (in the case of sync errors, * etc.); this allows us to accurately track the current stream offset */ ret = ogg_sync_pageseek (&ogg->sync, &page); if (ret == 0) { /* need more data, that's fine... */ break; } else if (ret < 0) { /* discontinuity; track how many bytes we skipped (-ret) */ ogg->offset -= ret; } else { gint64 granule = ogg_page_granulepos (&page); #ifndef GST_DISABLE_GST_DEBUG int bos = ogg_page_bos (&page); #endif guint64 startoffset = ogg->offset; GstOggStream *stream; gboolean keyframe; serialno = ogg_page_serialno (&page); stream = gst_ogg_parse_find_stream (ogg, serialno); GST_LOG_OBJECT (ogg, "Timestamping outgoing buffer as %" GST_TIME_FORMAT, GST_TIME_ARGS (buffertimestamp)); if (stream) { buffertimestamp = gst_ogg_stream_get_end_time_for_granulepos (stream, granule); if (ogg->video_stream) { if (stream == ogg->video_stream) { keyframe = gst_ogg_stream_granulepos_is_key_frame (stream, granule); } else { keyframe = FALSE; } } else { keyframe = TRUE; } } else { buffertimestamp = GST_CLOCK_TIME_NONE; keyframe = TRUE; } pagebuffer = gst_ogg_parse_buffer_from_page (&page, startoffset, buffertimestamp); /* We read out 'ret' bytes, so we set the next offset appropriately */ ogg->offset += ret; GST_LOG_OBJECT (ogg, "processing ogg page (serial %08x, pageno %ld, " "granule pos %" G_GUINT64_FORMAT ", bos %d, offset %" G_GUINT64_FORMAT "-%" G_GUINT64_FORMAT ") keyframe=%d", serialno, ogg_page_pageno (&page), granule, bos, startoffset, ogg->offset, keyframe); if (ogg_page_bos (&page)) { /* If we've seen this serialno before, this is technically an error, * we log this case but accept it - this one replaces the previous * stream with this serialno. We can do this since we're streaming, and * not supporting seeking... */ GstOggStream *stream = gst_ogg_parse_find_stream (ogg, serialno); if (stream != NULL) { GST_LOG_OBJECT (ogg, "Incorrect stream; repeats serial number %08x " "at offset %" G_GINT64_FORMAT, serialno, ogg->offset); } if (ogg->last_page_not_bos) { GST_LOG_OBJECT (ogg, "Deleting all referenced streams, found a new " "chain starting with serial %u", serialno); gst_ogg_parse_delete_all_streams (ogg); } stream = gst_ogg_parse_new_stream (ogg, &page); ogg->last_page_not_bos = FALSE; gst_buffer_ref (pagebuffer); stream->headers = g_list_append (stream->headers, pagebuffer); if (!ogg->in_headers) { GST_LOG_OBJECT (ogg, "Found start of new chain at offset %" G_GUINT64_FORMAT, startoffset); ogg->in_headers = 1; } /* For now, we just keep the header buffer in the stream->headers list; * it actually gets output once we've collected the entire set */ } else { /* Non-BOS page. Either: we're outside headers, and this isn't a * header (normal data), outside headers and this is (error!), inside * headers, this is (append header), or inside headers and this isn't * (we've found the end of headers; flush the lot!) * * Before that, we flag that the last page seen (this one) was not a * BOS page; that way we know that when we next see a BOS page it's a * new chain, and we can flush all existing streams. */ page_type type; GstOggStream *stream = gst_ogg_parse_find_stream (ogg, serialno); if (!stream) { GST_LOG_OBJECT (ogg, "Non-BOS page unexpectedly found at %" G_GINT64_FORMAT, ogg->offset); goto failure; } ogg->last_page_not_bos = TRUE; type = gst_ogg_parse_is_header (ogg, stream, &page); if (type == PAGE_PENDING && ogg->in_headers) { gst_buffer_ref (pagebuffer); stream->unknown_pages = g_list_append (stream->unknown_pages, pagebuffer); } else if (type == PAGE_HEADER) { if (!ogg->in_headers) { GST_LOG_OBJECT (ogg, "Header page unexpectedly found outside " "headers at offset %" G_GINT64_FORMAT, ogg->offset); goto failure; } else { /* Append the header to the buffer list, after any unknown previous * pages */ stream->headers = g_list_concat (stream->headers, stream->unknown_pages); g_list_free (stream->unknown_pages); gst_buffer_ref (pagebuffer); stream->headers = g_list_append (stream->headers, pagebuffer); } } else { /* PAGE_DATA, or PAGE_PENDING but outside headers */ if (ogg->in_headers) { /* First non-header page... set caps, flush headers. * * First up, we build a single GValue list of all the pagebuffers * we're using for the headers, in order. * Then we set this on the caps structure. Then we can start pushing * buffers for the headers, and finally we send this non-header * page. */ GstCaps *caps; GstStructure *structure; GValue array = { 0 }; gint count = 0; gboolean found_pending_headers = FALSE; GSList *l; g_value_init (&array, GST_TYPE_ARRAY); for (l = ogg->oggstreams; l != NULL; l = l->next) { GstOggStream *stream = (GstOggStream *) l->data; if (g_list_length (stream->headers) == 0) { GST_LOG_OBJECT (ogg, "No primary header found for stream %08x", stream->serialno); goto failure; } gst_ogg_parse_append_header (&array, GST_BUFFER (stream->headers->data)); count++; } for (l = ogg->oggstreams; l != NULL; l = l->next) { GstOggStream *stream = (GstOggStream *) l->data; GList *j; /* already appended the first header, now do headers 2-N */ for (j = stream->headers->next; j != NULL; j = j->next) { gst_ogg_parse_append_header (&array, GST_BUFFER (j->data)); count++; } } caps = gst_pad_query_caps (ogg->srcpad, NULL); caps = gst_caps_make_writable (caps); structure = gst_caps_get_structure (caps, 0); gst_structure_take_value (structure, "streamheader", &array); gst_pad_set_caps (ogg->srcpad, caps); if (ogg->caps) gst_caps_unref (ogg->caps); ogg->caps = caps; GST_LOG_OBJECT (ogg, "Set \"streamheader\" caps with %d buffers " "(one per page)", count); /* Now, we do the same thing, but push buffers... */ for (l = ogg->oggstreams; l != NULL; l = l->next) { GstOggStream *stream = (GstOggStream *) l->data; GstBuffer *buf = GST_BUFFER (stream->headers->data); result = gst_pad_push (ogg->srcpad, buf); if (result != GST_FLOW_OK) return result; } for (l = ogg->oggstreams; l != NULL; l = l->next) { GstOggStream *stream = (GstOggStream *) l->data; GList *j; /* pushed the first one for each stream already, now do 2-N */ for (j = stream->headers->next; j != NULL; j = j->next) { GstBuffer *buf = GST_BUFFER (j->data); result = gst_pad_push (ogg->srcpad, buf); if (result != GST_FLOW_OK) return result; } } ogg->in_headers = 0; /* And finally the pending data pages */ for (l = ogg->oggstreams; l != NULL; l = l->next) { GstOggStream *stream = (GstOggStream *) l->data; GList *k; if (stream->unknown_pages == NULL) continue; if (found_pending_headers) { GST_WARNING_OBJECT (ogg, "Incorrectly muxed headers found at " "approximate offset %" G_GINT64_FORMAT, ogg->offset); } found_pending_headers = TRUE; GST_LOG_OBJECT (ogg, "Pushing %d pending pages after headers", g_list_length (stream->unknown_pages) + 1); for (k = stream->unknown_pages; k != NULL; k = k->next) { GstBuffer *buf = GST_BUFFER (k->data); result = gst_pad_push (ogg->srcpad, buf); if (result != GST_FLOW_OK) return result; } g_list_foreach (stream->unknown_pages, (GFunc) gst_mini_object_unref, NULL); g_list_free (stream->unknown_pages); stream->unknown_pages = NULL; } } if (granule == -1) { stream->stored_buffers = g_list_append (stream->stored_buffers, pagebuffer); } else { while (stream->stored_buffers) { GstBuffer *buf = stream->stored_buffers->data; buf = gst_buffer_make_writable (buf); GST_BUFFER_TIMESTAMP (buf) = buffertimestamp; if (!keyframe) { GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT); } else { keyframe = FALSE; } result = gst_pad_push (ogg->srcpad, buf); if (result != GST_FLOW_OK) return result; stream->stored_buffers = g_list_delete_link (stream->stored_buffers, stream->stored_buffers); } pagebuffer = gst_buffer_make_writable (pagebuffer); if (!keyframe) { GST_BUFFER_FLAG_SET (pagebuffer, GST_BUFFER_FLAG_DELTA_UNIT); } else { keyframe = FALSE; } result = gst_pad_push (ogg->srcpad, pagebuffer); if (result != GST_FLOW_OK) return result; } } } } } return result; failure: gst_pad_push_event (GST_PAD (ogg->srcpad), gst_event_new_eos ()); return GST_FLOW_ERROR; }
/* Pipeline Callbacks */ static gboolean probe_cb (InsanityGstTest * ptest, GstPad * pad, GstMiniObject * object, gpointer userdata) { InsanityTest *test = INSANITY_TEST (ptest); global_last_probe = g_get_monotonic_time (); DECODER_TEST_LOCK (); if (GST_IS_BUFFER (object)) { GstBuffer *buf; GstClockTime ts; buf = GST_BUFFER (object); ts = GST_BUFFER_PTS (buf); /* First check clipping */ if (glob_testing_parser == FALSE && GST_CLOCK_TIME_IS_VALID (ts) && glob_waiting_segment == FALSE) { GstClockTime ts_end, cstart, cstop; /* Check if buffer is completely outside the segment */ ts_end = ts; if (GST_BUFFER_DURATION_IS_VALID (buf)) ts_end += GST_BUFFER_DURATION (buf); /* Check if buffer is completely outside the segment */ ts_end = ts; if (!gst_segment_clip (&glob_last_segment, glob_last_segment.format, ts, ts_end, &cstart, &cstop)) { char *msg = g_strdup_printf ("Got timestamp %" GST_TIME_FORMAT " -- %" GST_TIME_FORMAT ", outside configured segment (%" GST_TIME_FORMAT " -- %" GST_TIME_FORMAT "), method %s", GST_TIME_ARGS (ts), GST_TIME_ARGS (ts_end), GST_TIME_ARGS (glob_last_segment.start), GST_TIME_ARGS (glob_last_segment.stop), test_get_name (glob_in_progress)); insanity_test_validate_checklist_item (INSANITY_TEST (ptest), "segment-clipping", FALSE, msg); g_free (msg); glob_bad_segment_clipping = TRUE; } } switch (glob_in_progress) { case TEST_NONE: if (glob_waiting_first_segment == TRUE) insanity_test_validate_checklist_item (test, "first-segment", FALSE, "Got a buffer before the first segment"); /* Got the first buffer, starting testing dance */ next_test (test); break; case TEST_POSITION: test_position (test, buf); break; case TEST_FAST_FORWARD: case TEST_BACKWARD_PLAYBACK: case TEST_FAST_BACKWARD: { gint64 stime_ts; if (GST_CLOCK_TIME_IS_VALID (ts) == FALSE || glob_waiting_segment == TRUE) { break; } stime_ts = gst_segment_to_stream_time (&glob_last_segment, glob_last_segment.format, ts); if (GST_CLOCK_TIME_IS_VALID (glob_seek_first_buf_ts) == FALSE) { GstClockTime expected_ts = gst_segment_to_stream_time (&glob_last_segment, glob_last_segment.format, glob_seek_rate < 0 ? glob_seek_stop_ts : glob_seek_segment_seektime); GstClockTimeDiff diff = ABS (GST_CLOCK_DIFF (stime_ts, expected_ts)); if (diff > SEEK_THRESHOLD) { gchar *valmsg = g_strdup_printf ("Received buffer timestamp %" GST_TIME_FORMAT " Seeek wanted %" GST_TIME_FORMAT "", GST_TIME_ARGS (stime_ts), GST_TIME_ARGS (expected_ts)); validate_current_test (test, FALSE, valmsg); next_test (test); g_free (valmsg); } else glob_seek_first_buf_ts = stime_ts; } else { GstClockTimeDiff diff = GST_CLOCK_DIFF (stime_ts, glob_seek_first_buf_ts); if (diff < 0) diff = -diff; if (diff >= glob_playback_duration * GST_SECOND) { validate_current_test (test, TRUE, NULL); next_test (test); } } break; } default: break; } } else if (GST_IS_EVENT (object)) { GstEvent *event = GST_EVENT (object); guint seqnum = gst_event_get_seqnum (event); if (G_LIKELY (glob_seqnum_found == FALSE) && seqnum == glob_seqnum) glob_seqnum_found = TRUE; if (glob_seqnum_found == TRUE && seqnum != glob_seqnum) { gchar *message = g_strdup_printf ("Current seqnum %i != " "received %i", glob_seqnum, seqnum); insanity_test_validate_checklist_item (test, "seqnum-management", FALSE, message); glob_wrong_seqnum = TRUE; g_free (message); } switch (GST_EVENT_TYPE (event)) { case GST_EVENT_SEGMENT: { gst_event_copy_segment (event, &glob_last_segment); if (glob_waiting_segment == FALSE) /* Cache the segment as it will be our reference but don't look * further */ goto done; glob_last_segment_start_time = glob_last_segment.start; if (glob_waiting_first_segment == TRUE) { insanity_test_validate_checklist_item (test, "first-segment", TRUE, NULL); glob_waiting_first_segment = FALSE; } else if (glob_in_progress >= TEST_FAST_FORWARD && glob_in_progress <= TEST_FAST_BACKWARD) { GstClockTimeDiff diff; gboolean valid_stop = TRUE; GstClockTimeDiff wdiff, rdiff; rdiff = ABS (GST_CLOCK_DIFF (glob_last_segment.stop, glob_last_segment.start)) * ABS (glob_last_segment.rate * glob_last_segment.applied_rate); wdiff = ABS (GST_CLOCK_DIFF (glob_seek_stop_ts, glob_seek_segment_seektime)); diff = GST_CLOCK_DIFF (glob_last_segment.position, glob_seek_segment_seektime); if (diff < 0) diff = -diff; /* Now compare with the expected segment */ if ((glob_last_segment.rate * glob_last_segment.applied_rate) == glob_seek_rate && diff <= SEEK_THRESHOLD && valid_stop) { glob_seek_got_segment = TRUE; } else { GstClockTime stopdiff = ABS (GST_CLOCK_DIFF (rdiff, wdiff)); gchar *validate_msg = g_strdup_printf ("Wrong segment received, Rate %f expected " "%f, start time diff %" GST_TIME_FORMAT " stop diff %" GST_TIME_FORMAT, (glob_last_segment.rate * glob_last_segment.applied_rate), glob_seek_rate, GST_TIME_ARGS (diff), GST_TIME_ARGS (stopdiff)); validate_current_test (test, FALSE, validate_msg); next_test (test); g_free (validate_msg); } } glob_waiting_segment = FALSE; break; } default: break; } } done: DECODER_TEST_UNLOCK (); return TRUE; }
/* encode the CMML head tag and push the CMML headers */ static void gst_cmml_enc_parse_tag_head (GstCmmlEnc * enc, GstCmmlTagHead * head) { GList *headers = NULL; GList *walk; guchar *head_string; GstCaps *caps; GstBuffer *ident_buf, *preamble_buf, *head_buf; GstBuffer *buffer; if (enc->preamble == NULL) goto flow_unexpected; GST_INFO_OBJECT (enc, "parsing head tag"); enc->flow_return = gst_cmml_enc_new_ident_header (enc, &ident_buf); if (enc->flow_return != GST_FLOW_OK) goto alloc_error; headers = g_list_append (headers, ident_buf); enc->flow_return = gst_cmml_enc_new_buffer (enc, enc->preamble, strlen ((gchar *) enc->preamble), &preamble_buf); if (enc->flow_return != GST_FLOW_OK) goto alloc_error; headers = g_list_append (headers, preamble_buf); head_string = gst_cmml_parser_tag_head_to_string (enc->parser, head); enc->flow_return = gst_cmml_enc_new_buffer (enc, head_string, strlen ((gchar *) head_string), &head_buf); g_free (head_string); if (enc->flow_return != GST_FLOW_OK) goto alloc_error; headers = g_list_append (headers, head_buf); caps = gst_pad_get_caps (enc->srcpad); caps = gst_cmml_enc_set_header_on_caps (enc, caps, ident_buf, preamble_buf, head_buf); while (headers) { buffer = GST_BUFFER (headers->data); /* set granulepos 0 on headers */ GST_BUFFER_OFFSET_END (buffer) = 0; gst_buffer_set_caps (buffer, caps); enc->flow_return = gst_cmml_enc_push (enc, buffer); headers = g_list_delete_link (headers, headers); if (GST_FLOW_IS_FATAL (enc->flow_return)) goto push_error; } gst_caps_unref (caps); enc->sent_headers = TRUE; return; flow_unexpected: GST_ELEMENT_ERROR (enc, STREAM, ENCODE, (NULL), ("got head tag before preamble")); enc->flow_return = GST_FLOW_ERROR; return; push_error: gst_caps_unref (caps); /* fallthrough */ alloc_error: for (walk = headers; walk; walk = walk->next) gst_buffer_unref (GST_BUFFER (walk->data)); g_list_free (headers); return; }
static GstFlowReturn gst_vdp_vpp_drain (GstVdpVideoPostProcess * vpp) { GstVdpPicture current_pic; guint32 video_surfaces_past_count; VdpVideoSurface video_surfaces_past[MAX_PICTURES]; guint32 video_surfaces_future_count; VdpVideoSurface video_surfaces_future[MAX_PICTURES]; GstFlowReturn ret; while (gst_vdp_vpp_get_next_picture (vpp, ¤t_pic, &video_surfaces_past_count, video_surfaces_past, &video_surfaces_future_count, video_surfaces_future)) { GError *err; GstVdpOutputBuffer *outbuf; GstStructure *structure; GstVideoRectangle src_r = { 0, } , dest_r = { 0,}; VdpRect rect; GstVdpDevice *device; VdpStatus status; err = NULL; ret = gst_vdp_output_src_pad_alloc_buffer ((GstVdpOutputSrcPad *) vpp->srcpad, &outbuf, &err); if (ret != GST_FLOW_OK) goto output_pad_error; src_r.w = vpp->width; src_r.h = vpp->height; if (vpp->got_par) { gint new_width; new_width = gst_util_uint64_scale_int (src_r.w, vpp->par_n, vpp->par_d); src_r.x += (src_r.w - new_width) / 2; src_r.w = new_width; } structure = gst_caps_get_structure (GST_BUFFER_CAPS (outbuf), 0); if (!gst_structure_get_int (structure, "width", &dest_r.w) || !gst_structure_get_int (structure, "height", &dest_r.h)) goto invalid_caps; if (vpp->force_aspect_ratio) { GstVideoRectangle res_r; gst_video_sink_center_rect (src_r, dest_r, &res_r, TRUE); rect.x0 = res_r.x; rect.x1 = res_r.w + res_r.x; rect.y0 = res_r.y; rect.y1 = res_r.h + res_r.y; } else { rect.x0 = 0; rect.x1 = dest_r.w; rect.y0 = 0; rect.y1 = dest_r.h; } device = vpp->device; status = device->vdp_video_mixer_render (vpp->mixer, VDP_INVALID_HANDLE, NULL, current_pic.structure, video_surfaces_past_count, video_surfaces_past, current_pic.buf->surface, video_surfaces_future_count, video_surfaces_future, NULL, outbuf->surface, NULL, &rect, 0, NULL); if (status != VDP_STATUS_OK) goto render_error; GST_BUFFER_TIMESTAMP (outbuf) = current_pic.timestamp; if (gst_vdp_vpp_is_interlaced (vpp)) GST_BUFFER_DURATION (outbuf) = vpp->field_duration; else GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (current_pic.buf); if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_DISCONT)) GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT); if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_PREROLL)) GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_PREROLL); if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_GAP)) GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP); err = NULL; ret = gst_vdp_output_src_pad_push ((GstVdpOutputSrcPad *) vpp->srcpad, outbuf, &err); if (ret != GST_FLOW_OK) goto output_pad_error; continue; invalid_caps: gst_buffer_unref (GST_BUFFER (outbuf)); GST_ELEMENT_ERROR (vpp, STREAM, FAILED, ("Invalid output caps"), (NULL)); ret = GST_FLOW_ERROR; break; render_error: gst_buffer_unref (GST_BUFFER (outbuf)); GST_ELEMENT_ERROR (vpp, RESOURCE, READ, ("Could not postprocess frame"), ("Error returned from vdpau was: %s", device->vdp_get_error_string (status))); ret = GST_FLOW_ERROR; break; output_pad_error: if (ret == GST_FLOW_ERROR && err != NULL) gst_vdp_vpp_post_error (vpp, err); break; } return ret; }
/* Updates the SSRC, payload type, seqnum and timestamp of the RTP buffer * before the buffer is pushed. */ static GstFlowReturn gst_rtp_base_payload_prepare_push (GstRTPBasePayload * payload, gpointer obj, gboolean is_list) { GstRTPBasePayloadPrivate *priv; HeaderData data; if (payload->clock_rate == 0) goto no_rate; priv = payload->priv; /* update first, so that the property is set to the last * seqnum pushed */ payload->seqnum = priv->next_seqnum; /* fill in the fields we want to set on all headers */ data.payload = payload; data.seqnum = payload->seqnum; data.ssrc = payload->current_ssrc; data.pt = payload->pt; /* find the first buffer with a timestamp */ if (is_list) { data.dts = -1; data.pts = -1; data.offset = GST_BUFFER_OFFSET_NONE; gst_buffer_list_foreach (GST_BUFFER_LIST_CAST (obj), find_timestamp, &data); } else { data.dts = GST_BUFFER_DTS (GST_BUFFER_CAST (obj)); data.pts = GST_BUFFER_PTS (GST_BUFFER_CAST (obj)); data.offset = GST_BUFFER_OFFSET (GST_BUFFER_CAST (obj)); } /* convert to RTP time */ if (priv->perfect_rtptime && data.offset != GST_BUFFER_OFFSET_NONE && priv->base_offset != GST_BUFFER_OFFSET_NONE) { /* if we have an offset, use that for making an RTP timestamp */ data.rtptime = payload->ts_base + priv->base_rtime + data.offset - priv->base_offset; GST_LOG_OBJECT (payload, "Using offset %" G_GUINT64_FORMAT " for RTP timestamp", data.offset); } else if (GST_CLOCK_TIME_IS_VALID (data.pts)) { gint64 rtime; /* no offset, use the gstreamer pts */ rtime = gst_segment_to_running_time (&payload->segment, GST_FORMAT_TIME, data.pts); if (rtime == -1) { GST_LOG_OBJECT (payload, "Clipped pts, using base RTP timestamp"); rtime = 0; } else { GST_LOG_OBJECT (payload, "Using running_time %" GST_TIME_FORMAT " for RTP timestamp", GST_TIME_ARGS (rtime)); rtime = gst_util_uint64_scale_int (rtime, payload->clock_rate, GST_SECOND); priv->base_offset = data.offset; priv->base_rtime = rtime; } /* add running_time in clock-rate units to the base timestamp */ data.rtptime = payload->ts_base + rtime; } else { GST_LOG_OBJECT (payload, "Using previous RTP timestamp %" G_GUINT32_FORMAT, payload->timestamp); /* no timestamp to convert, take previous timestamp */ data.rtptime = payload->timestamp; } /* set ssrc, payload type, seq number, caps and rtptime */ if (is_list) { gst_buffer_list_foreach (GST_BUFFER_LIST_CAST (obj), set_headers, &data); } else { GstBuffer *buf = GST_BUFFER_CAST (obj); set_headers (&buf, 0, &data); } priv->next_seqnum = data.seqnum; payload->timestamp = data.rtptime; GST_LOG_OBJECT (payload, "Preparing to push packet with size %" G_GSIZE_FORMAT ", seq=%d, rtptime=%u, pts %" GST_TIME_FORMAT, (is_list) ? -1 : gst_buffer_get_size (GST_BUFFER (obj)), payload->seqnum, data.rtptime, GST_TIME_ARGS (data.pts)); if (g_atomic_int_compare_and_exchange (&payload-> priv->notified_first_timestamp, 1, 0)) { g_object_notify (G_OBJECT (payload), "timestamp"); g_object_notify (G_OBJECT (payload), "seqnum"); } return GST_FLOW_OK; /* ERRORS */ no_rate: { GST_ELEMENT_ERROR (payload, STREAM, NOT_IMPLEMENTED, (NULL), ("subclass did not specify clock-rate")); return GST_FLOW_ERROR; } }
static GstFlowReturn gst_vdp_vpp_chain (GstPad * pad, GstBuffer * buffer) { GstVdpVideoPostProcess *vpp = GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad)); GstClockTime qostime; GstFlowReturn ret = GST_FLOW_OK; GError *err; GST_DEBUG ("chain"); /* can only do QoS if the segment is in TIME */ if (vpp->segment.format != GST_FORMAT_TIME) goto no_qos; /* QOS is done on the running time of the buffer, get it now */ qostime = gst_segment_to_running_time (&vpp->segment, GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buffer)); if (qostime != -1) { gboolean need_skip; GstClockTime earliest_time; /* lock for getting the QoS parameters that are set (in a different thread) * with the QOS events */ GST_OBJECT_LOCK (vpp); earliest_time = vpp->earliest_time; /* check for QoS, don't perform conversion for buffers * that are known to be late. */ need_skip = GST_CLOCK_TIME_IS_VALID (earliest_time) && qostime != -1 && qostime <= earliest_time; GST_OBJECT_UNLOCK (vpp); if (need_skip) { GST_DEBUG_OBJECT (vpp, "skipping transform: qostime %" GST_TIME_FORMAT " <= %" GST_TIME_FORMAT, GST_TIME_ARGS (qostime), GST_TIME_ARGS (earliest_time)); /* mark discont for next buffer */ vpp->discont = TRUE; gst_buffer_unref (buffer); return GST_FLOW_OK; } } no_qos: if (vpp->discont) { GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); vpp->discont = FALSE; } if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))) { GST_DEBUG_OBJECT (vpp, "Received discont buffer"); gst_vdp_vpp_flush (vpp); } if (!vpp->native_input) { GstVdpVideoBuffer *video_buf; err = NULL; video_buf = (GstVdpVideoBuffer *) gst_vdp_buffer_pool_get_buffer (vpp->vpool, &err); if (G_UNLIKELY (!video_buf)) goto video_buf_error; if (!gst_vdp_video_buffer_upload (video_buf, buffer, vpp->fourcc, vpp->width, vpp->height)) { gst_buffer_unref (GST_BUFFER (video_buf)); GST_ELEMENT_ERROR (vpp, RESOURCE, READ, ("Couldn't upload YUV data to vdpau"), (NULL)); ret = GST_FLOW_ERROR; goto error; } gst_buffer_copy_metadata (GST_BUFFER (video_buf), buffer, GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS); gst_buffer_unref (buffer); buffer = GST_BUFFER (video_buf); } if (G_UNLIKELY (vpp->mixer == VDP_INVALID_HANDLE)) { ret = gst_vdp_vpp_create_mixer (vpp); if (ret != GST_FLOW_OK) goto error; } gst_vdp_vpp_add_buffer (vpp, GST_VDP_VIDEO_BUFFER (buffer)); ret = gst_vdp_vpp_drain (vpp); done: gst_object_unref (vpp); return ret; error: gst_buffer_unref (buffer); goto done; video_buf_error: gst_buffer_unref (GST_BUFFER (buffer)); gst_vdp_vpp_post_error (vpp, err); ret = GST_FLOW_ERROR; goto done; }
static void do_perfect_stream_test (guint rate, guint width, gdouble drop_probability, gdouble inject_probability) { GstElement *pipe, *src, *conv, *filter, *injector, *audiorate, *sink; GstMessage *msg; GstCaps *caps; GstPad *srcpad; GList *l, *bufs = NULL; GstClockTime next_time = GST_CLOCK_TIME_NONE; guint64 next_offset = GST_BUFFER_OFFSET_NONE; caps = gst_caps_new_simple ("audio/x-raw-int", "rate", G_TYPE_INT, rate, "width", G_TYPE_INT, width, NULL); GST_INFO ("-------- drop=%.0f%% caps = %" GST_PTR_FORMAT " ---------- ", drop_probability * 100.0, caps); g_assert (drop_probability >= 0.0 && drop_probability <= 1.0); g_assert (inject_probability >= 0.0 && inject_probability <= 1.0); g_assert (width > 0 && (width % 8) == 0); pipe = gst_pipeline_new ("pipeline"); fail_unless (pipe != NULL); src = gst_element_factory_make ("audiotestsrc", "audiotestsrc"); fail_unless (src != NULL); g_object_set (src, "num-buffers", 100, NULL); conv = gst_element_factory_make ("audioconvert", "audioconvert"); fail_unless (conv != NULL); filter = gst_element_factory_make ("capsfilter", "capsfilter"); fail_unless (filter != NULL); g_object_set (filter, "caps", caps, NULL); injector_inject_probability = inject_probability; injector = GST_ELEMENT (g_object_new (test_injector_get_type (), NULL)); srcpad = gst_element_get_pad (injector, "src"); fail_unless (srcpad != NULL); gst_pad_add_buffer_probe (srcpad, G_CALLBACK (probe_cb), &drop_probability); gst_object_unref (srcpad); audiorate = gst_element_factory_make ("audiorate", "audiorate"); fail_unless (audiorate != NULL); sink = gst_element_factory_make ("fakesink", "fakesink"); fail_unless (sink != NULL); g_object_set (sink, "signal-handoffs", TRUE, NULL); g_signal_connect (sink, "handoff", G_CALLBACK (got_buf), &bufs); gst_bin_add_many (GST_BIN (pipe), src, conv, filter, injector, audiorate, sink, NULL); gst_element_link_many (src, conv, filter, injector, audiorate, sink, NULL); fail_unless_equals_int (gst_element_set_state (pipe, GST_STATE_PLAYING), GST_STATE_CHANGE_ASYNC); fail_unless_equals_int (gst_element_get_state (pipe, NULL, NULL, -1), GST_STATE_CHANGE_SUCCESS); msg = gst_bus_poll (GST_ELEMENT_BUS (pipe), GST_MESSAGE_EOS | GST_MESSAGE_ERROR, -1); fail_unless_equals_string (GST_MESSAGE_TYPE_NAME (msg), "eos"); for (l = bufs; l != NULL; l = l->next) { GstBuffer *buf = GST_BUFFER (l->data); guint num_samples; fail_unless (GST_BUFFER_TIMESTAMP_IS_VALID (buf)); fail_unless (GST_BUFFER_DURATION_IS_VALID (buf)); fail_unless (GST_BUFFER_OFFSET_IS_VALID (buf)); fail_unless (GST_BUFFER_OFFSET_END_IS_VALID (buf)); GST_LOG ("buffer: ts=%" GST_TIME_FORMAT ", end_ts=%" GST_TIME_FORMAT " off=%" G_GINT64_FORMAT ", end_off=%" G_GINT64_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf)), GST_BUFFER_OFFSET (buf), GST_BUFFER_OFFSET_END (buf)); if (GST_CLOCK_TIME_IS_VALID (next_time)) { fail_unless_equals_uint64 (next_time, GST_BUFFER_TIMESTAMP (buf)); } if (next_offset != GST_BUFFER_OFFSET_NONE) { fail_unless_equals_uint64 (next_offset, GST_BUFFER_OFFSET (buf)); } /* check buffer size for sanity */ fail_unless_equals_int (GST_BUFFER_SIZE (buf) % (width / 8), 0); /* check there is actually as much data as there should be */ num_samples = GST_BUFFER_OFFSET_END (buf) - GST_BUFFER_OFFSET (buf); fail_unless_equals_int (GST_BUFFER_SIZE (buf), num_samples * (width / 8)); next_time = GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf); next_offset = GST_BUFFER_OFFSET_END (buf); } gst_message_unref (msg); gst_element_set_state (pipe, GST_STATE_NULL); gst_object_unref (pipe); g_list_foreach (bufs, (GFunc) gst_mini_object_unref, NULL); g_list_free (bufs); gst_caps_unref (caps); }
/** * gst_wrapper_camera_bin_src_vidsrc_probe: * * Buffer probe called before sending each buffer to image queue. */ static GstPadProbeReturn gst_wrapper_camera_bin_src_vidsrc_probe (GstPad * pad, GstPadProbeInfo * info, gpointer data) { GstWrapperCameraBinSrc *self = GST_WRAPPER_CAMERA_BIN_SRC (data); GstBaseCameraSrc *camerasrc = GST_BASE_CAMERA_SRC_CAST (self); GstPadProbeReturn ret = GST_PAD_PROBE_DROP; GstBuffer *buffer = GST_BUFFER (info->data); GST_LOG_OBJECT (self, "Video probe, mode %d, capture status %d", camerasrc->mode, self->video_rec_status); /* TODO do we want to lock for every buffer? */ /* * Note that we can use gst_pad_push_event here because we are a buffer * probe. */ /* TODO shouldn't access this directly */ g_mutex_lock (&camerasrc->capturing_mutex); if (self->video_rec_status == GST_VIDEO_RECORDING_STATUS_DONE) { /* NOP */ } else if (self->video_rec_status == GST_VIDEO_RECORDING_STATUS_STARTING) { GstClockTime ts; GstSegment segment; GstCaps *caps; GstSample *sample; GST_DEBUG_OBJECT (self, "Starting video recording"); self->video_rec_status = GST_VIDEO_RECORDING_STATUS_RUNNING; ts = GST_BUFFER_TIMESTAMP (buffer); if (!GST_CLOCK_TIME_IS_VALID (ts)) ts = 0; gst_segment_init (&segment, GST_FORMAT_TIME); segment.start = ts; gst_pad_push_event (self->vidsrc, gst_event_new_segment (&segment)); /* post preview */ GST_DEBUG_OBJECT (self, "Posting preview for video"); caps = gst_pad_get_current_caps (pad); sample = gst_sample_new (buffer, caps, NULL, NULL); gst_base_camera_src_post_preview (camerasrc, sample); gst_caps_unref (caps); gst_sample_unref (sample); ret = GST_PAD_PROBE_OK; } else if (self->video_rec_status == GST_VIDEO_RECORDING_STATUS_FINISHING) { GstPad *peer; /* send eos */ GST_DEBUG_OBJECT (self, "Finishing video recording, pushing eos"); peer = gst_pad_get_peer (self->vidsrc); if (peer) { /* send to the peer as we don't want our pads with eos flag */ gst_pad_send_event (peer, gst_event_new_eos ()); gst_object_unref (peer); } else { GST_WARNING_OBJECT (camerasrc, "No peer pad for vidsrc"); } self->video_rec_status = GST_VIDEO_RECORDING_STATUS_DONE; gst_base_camera_src_finish_capture (camerasrc); } else { ret = GST_PAD_PROBE_OK; } g_mutex_unlock (&camerasrc->capturing_mutex); return ret; }