static gboolean push_data (CustomData *data) { static gboolean white = FALSE; static GstClockTime timestamp = 0; GstBuffer *buffer; guint size; GstFlowReturn ret; // g_print ("cb_need_data called!\n"); #if 1 size = 385 * 288 * 2; buffer = gst_buffer_new_allocate (NULL, size, NULL); /* this makes the image black/white */ gst_buffer_memset (buffer, 0, white ? 0xff : 0x0, size); white = !white; // GST_BUFFER_PTS (buffer) = timestamp; // GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, 2); timestamp += GST_BUFFER_DURATION (buffer); g_print("uint64 timestamp: %" PRIu64 "\n", timestamp); g_signal_emit_by_name (data->appsrc, "push-buffer", buffer, &ret); if (ret != GST_FLOW_OK) { /* something wrong, stop pushing */ g_main_loop_quit (loop); } #endif return TRUE; }
static void gst_imx_ipu_blitter_init_dummy_black_buffer(GstImxIpuBlitter *ipu_blitter) { GstVideoInfo video_info; gst_video_info_init(&video_info); gst_video_info_set_format(&video_info, GST_VIDEO_FORMAT_RGBx, 64, 64); ipu_blitter->dummy_black_buffer = gst_buffer_new_allocate(ipu_blitter->allocator, GST_VIDEO_INFO_SIZE(&video_info), NULL); gst_buffer_memset(ipu_blitter->dummy_black_buffer, 0, 0, GST_VIDEO_INFO_SIZE(&video_info)); gst_buffer_add_video_meta_full( ipu_blitter->dummy_black_buffer, GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_INFO_FORMAT(&video_info), GST_VIDEO_INFO_WIDTH(&video_info), GST_VIDEO_INFO_HEIGHT(&video_info), GST_VIDEO_INFO_N_PLANES(&video_info), &(GST_VIDEO_INFO_PLANE_OFFSET(&video_info, 0)), &(GST_VIDEO_INFO_PLANE_STRIDE(&video_info, 0)) ); { GstImxPhysMemory *imx_phys_mem_mem = (GstImxPhysMemory *)gst_buffer_peek_memory(ipu_blitter->dummy_black_buffer, 0); GstImxPhysMemMeta *phys_mem_meta = (GstImxPhysMemMeta *)GST_IMX_PHYS_MEM_META_ADD(ipu_blitter->dummy_black_buffer); phys_mem_meta->phys_addr = imx_phys_mem_mem->phys_addr; } }
static void cb_need_data (GstElement *appsrc, guint unused_size, gpointer user_data) { static gboolean white = FALSE; static GstClockTime timestamp = 0; GstBuffer *buffer; guint size; GstFlowReturn ret; //g_print ("cb_need_data called!\n"); // sleep (1); size = 385 * 288 * 2; buffer = gst_buffer_new_allocate (NULL, size, NULL); /* this makes the image black/white */ gst_buffer_memset (buffer, 0, white ? 0xff : 0x0, size); white = !white; // GST_BUFFER_PTS (buffer) = timestamp; // GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, 2); // timestamp += GST_BUFFER_DURATION (buffer); g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret); if (ret != GST_FLOW_OK) { /* something wrong, stop pushing */ g_main_loop_quit (loop); } }
static void check_filter_caps (const gchar * name, GstEvent * event, GstCaps * caps, gint size, gint num_buffers, const gchar * prop, va_list varargs) { GstElement *filter; GstBuffer *inbuffer, *outbuffer; gint i; GstSegment segment; filter = setup_filter (name, prop, varargs); fail_unless (gst_element_set_state (filter, GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS, "could not set to playing"); gst_check_setup_events (mysrcpad, filter, caps, GST_FORMAT_TIME); /* ensure segment (format) properly setup */ gst_segment_init (&segment, GST_FORMAT_TIME); fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_segment (&segment))); if (event) fail_unless (gst_pad_push_event (mysrcpad, event)); for (i = 0; i < num_buffers; ++i) { inbuffer = gst_buffer_new_and_alloc (size); /* makes valgrind's memcheck happier */ gst_buffer_memset (inbuffer, 0, 0, size); GST_BUFFER_TIMESTAMP (inbuffer) = 0; ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1); fail_unless (gst_pad_push (mysrcpad, inbuffer) == GST_FLOW_OK); } fail_unless (g_list_length (buffers) == num_buffers); /* clean up buffers */ for (i = 0; i < num_buffers; ++i) { outbuffer = GST_BUFFER (buffers->data); fail_if (outbuffer == NULL); switch (i) { case 0: fail_unless (gst_buffer_get_size (outbuffer) == size); /* no check on filter operation itself */ break; default: break; } buffers = g_list_remove (buffers, outbuffer); ASSERT_BUFFER_REFCOUNT (outbuffer, "outbuffer", 1); gst_buffer_unref (outbuffer); outbuffer = NULL; } cleanup_filter (filter); g_list_free (buffers); buffers = NULL; }
static GstBuffer * gst_cd_foo_src_read_sector (GstAudioCdSrc * audiocdsrc, gint sector) { GstBuffer *buf; buf = gst_buffer_new_and_alloc (CD_FRAMESIZE_RAW); gst_buffer_memset (buf, 0, 0, CD_FRAMESIZE_RAW); return buf; }
static GstFlowReturn gst_codec_src_create (GstPushSrc * src, GstBuffer ** p_buf) { GstBuffer *buf; buf = gst_buffer_new_and_alloc (20); gst_buffer_memset (buf, 0, 0, 20); *p_buf = buf; return GST_FLOW_OK; }
static GstBufferList * create_buffer_list (guint * data_size) { GstBufferList *list; GstBuffer *rtp_buffer; GstBuffer *data_buffer; list = gst_buffer_list_new (); /*** First group, i.e. first packet. **/ /* Create the RTP header buffer */ rtp_buffer = gst_buffer_new_allocate (NULL, RTP_HEADER_SIZE, NULL); gst_buffer_memset (rtp_buffer, 0, 0, RTP_HEADER_SIZE); /* Create the buffer that holds the payload */ data_buffer = gst_buffer_new_allocate (NULL, RTP_PAYLOAD_SIZE, NULL); gst_buffer_memset (data_buffer, 0, 0, RTP_PAYLOAD_SIZE); /* Create a new group to hold the rtp header and the payload */ gst_buffer_list_add (list, gst_buffer_append (rtp_buffer, data_buffer)); /*** Second group, i.e. second packet. ***/ /* Create the RTP header buffer */ rtp_buffer = gst_buffer_new_allocate (NULL, RTP_HEADER_SIZE, NULL); gst_buffer_memset (rtp_buffer, 0, 0, RTP_HEADER_SIZE); /* Create the buffer that holds the payload */ data_buffer = gst_buffer_new_allocate (NULL, RTP_PAYLOAD_SIZE, NULL); gst_buffer_memset (data_buffer, 0, 0, RTP_PAYLOAD_SIZE); /* Create a new group to hold the rtp header and the payload */ gst_buffer_list_add (list, gst_buffer_append (rtp_buffer, data_buffer)); /* Calculate the size of the data */ *data_size = 2 * RTP_HEADER_SIZE + 2 * RTP_PAYLOAD_SIZE; return list; }
/* push a random block of audio of the given size */ static void push_data (gint size, GstFlowReturn expected_return) { GstBuffer *buffer; GstFlowReturn res; buffer = gst_buffer_new_and_alloc (size); /* make valgrind happier */ gst_buffer_memset (buffer, 0, 0, size); res = gst_pad_push (srcpad, buffer); fail_unless (res == expected_return, "pushing audio returned %d (%s) not %d (%s)", res, gst_flow_get_name (res), expected_return, gst_flow_get_name (expected_return)); }
/* Update the buffer used to draw black borders. When we have viewporter * support, this is a scaled up 1x1 image, and without we need an black image * the size of the rendering areay. */ static void gst_wl_window_update_borders (GstWlWindow * window) { GstVideoFormat format; GstVideoInfo info; gint width, height; GstBuffer *buf; struct wl_buffer *wlbuf; GstWlBuffer *gwlbuf; GstAllocator *alloc; if (window->no_border_update) return; if (window->display->viewporter) { width = height = 1; window->no_border_update = TRUE; } else { width = window->render_rectangle.w; height = window->render_rectangle.h; } /* we want WL_SHM_FORMAT_XRGB8888 */ #if G_BYTE_ORDER == G_BIG_ENDIAN format = GST_VIDEO_FORMAT_xRGB; #else format = GST_VIDEO_FORMAT_BGRx; #endif /* draw the area_subsurface */ gst_video_info_set_format (&info, format, width, height); alloc = gst_wl_shm_allocator_get (); buf = gst_buffer_new_allocate (alloc, info.size, NULL); gst_buffer_memset (buf, 0, 0, info.size); wlbuf = gst_wl_shm_memory_construct_wl_buffer (gst_buffer_peek_memory (buf, 0), window->display, &info); gwlbuf = gst_buffer_add_wl_buffer (buf, wlbuf, window->display); gst_wl_buffer_attach (gwlbuf, window->area_surface_wrapper); /* at this point, the GstWlBuffer keeps the buffer * alive and will free it on wl_buffer::release */ gst_buffer_unref (buf); g_object_unref (alloc); }
static gpointer push_vbuffers (gpointer data) { GstSegment segment; GstPad *pad = data; gint i; GstClockTime timestamp = 0; if (videodelay) g_usleep (2000); if (late_video) timestamp = 50 * GST_MSECOND; gst_pad_send_event (pad, gst_event_new_stream_start ("test")); gst_segment_init (&segment, GST_FORMAT_TIME); gst_pad_send_event (pad, gst_event_new_segment (&segment)); for (i = 0; i < n_vbuffers; i++) { GstBuffer *buf = gst_buffer_new_and_alloc (1000); GstClockTime *rtime = g_new (GstClockTime, 1); gst_buffer_memset (buf, 0, i, 1); GST_BUFFER_TIMESTAMP (buf) = timestamp; timestamp += 25 * GST_MSECOND; GST_BUFFER_DURATION (buf) = timestamp - GST_BUFFER_TIMESTAMP (buf); *rtime = gst_segment_to_running_time (&segment, GST_FORMAT_TIME, timestamp); g_queue_push_tail (&v_timestamp_q, rtime); if (i == 4) { if (video_gaps) timestamp += 10 * GST_MSECOND; else if (video_overlaps) timestamp -= 10 * GST_MSECOND; } fail_unless (gst_pad_chain (pad, buf) == GST_FLOW_OK); } gst_pad_send_event (pad, gst_event_new_eos ()); return NULL; }
static void live_switch_push (int rate, GstCaps * caps) { GstBuffer *inbuffer; GstCaps *desired; GList *l; desired = gst_caps_copy (caps); gst_caps_set_simple (desired, "rate", G_TYPE_INT, rate, NULL); gst_pad_set_caps (mysrcpad, desired); #if 0 fail_unless (gst_pad_alloc_buffer_and_set_caps (mysrcpad, GST_BUFFER_OFFSET_NONE, rate * 4, desired, &inbuffer) == GST_FLOW_OK); #endif inbuffer = gst_buffer_new_and_alloc (rate * 4); gst_buffer_memset (inbuffer, 0, 0, rate * 4); GST_BUFFER_DURATION (inbuffer) = GST_SECOND; GST_BUFFER_TIMESTAMP (inbuffer) = 0; GST_BUFFER_OFFSET (inbuffer) = 0; /* pushing gives away my reference ... */ fail_unless (gst_pad_push (mysrcpad, inbuffer) == GST_FLOW_OK); /* ... but it ends up being collected on the global buffer list */ fail_unless_equals_int (g_list_length (buffers), 1); for (l = buffers; l; l = l->next) { GstBuffer *buffer = GST_BUFFER (l->data); gst_buffer_unref (buffer); } g_list_free (buffers); buffers = NULL; gst_caps_unref (desired); }
/* called when we need to give data to appsrc */ static void need_data (GstElement * appsrc, guint unused, MyContext * ctx) { GstBuffer *buffer; guint size; GstFlowReturn ret; size = 385 * 288 * 2; buffer = gst_buffer_new_allocate (NULL, size, NULL); /* this makes the image black/white */ gst_buffer_memset (buffer, 0, ctx->white ? 0xff : 0x0, size); ctx->white = !ctx->white; /* get timestamp from clock */ //ctx->timestamp = gst_clock_get_time (global_clock); g_print("Time is: %"G_GUINT64_FORMAT" \n", ctx->timestamp); //remove basetime //ctx->timestamp -= gst_element_get_base_time (GST_ELEMENT (appsrc)); GST_BUFFER_PTS (buffer) = ctx->timestamp; g_print("PTS: %"G_GUINT64_FORMAT" BASETIME %"G_GUINT64_FORMAT" SUM %"G_GUINT64_FORMAT " \n", ctx->timestamp, gst_element_get_base_time(appsrc), ( ctx->timestamp) + ( gst_element_get_base_time(appsrc))); GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, 2); ctx->timestamp += GST_BUFFER_DURATION (buffer); g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret); }
static GstFlowReturn gst_rtp_asf_pay_handle_packet (GstRtpAsfPay * rtpasfpay, GstBuffer * buffer) { GstRTPBasePayload *rtppay; GstAsfPacketInfo *packetinfo; guint8 flags; guint8 *data; guint32 packet_util_size; guint32 packet_offset; guint32 size_left; GstFlowReturn ret = GST_FLOW_OK; rtppay = GST_RTP_BASE_PAYLOAD (rtpasfpay); packetinfo = &rtpasfpay->packetinfo; if (!gst_asf_parse_packet (buffer, packetinfo, TRUE, rtpasfpay->asfinfo.packet_size)) { GST_ERROR_OBJECT (rtpasfpay, "Error while parsing asf packet"); gst_buffer_unref (buffer); return GST_FLOW_ERROR; } if (packetinfo->packet_size == 0) packetinfo->packet_size = rtpasfpay->asfinfo.packet_size; GST_LOG_OBJECT (rtpasfpay, "Packet size: %" G_GUINT32_FORMAT ", padding: %" G_GUINT32_FORMAT, packetinfo->packet_size, packetinfo->padding); /* update padding field to 0 */ if (packetinfo->padding > 0) { GstAsfPacketInfo info; /* find padding field offset */ guint offset = packetinfo->err_cor_len + 2 + gst_asf_get_var_size_field_len (packetinfo->packet_field_type) + gst_asf_get_var_size_field_len (packetinfo->seq_field_type); buffer = gst_buffer_make_writable (buffer); switch (packetinfo->padd_field_type) { case ASF_FIELD_TYPE_DWORD: gst_buffer_memset (buffer, offset, 0, 4); break; case ASF_FIELD_TYPE_WORD: gst_buffer_memset (buffer, offset, 0, 2); break; case ASF_FIELD_TYPE_BYTE: gst_buffer_memset (buffer, offset, 0, 1); break; case ASF_FIELD_TYPE_NONE: default: break; } gst_asf_parse_packet (buffer, &info, FALSE, 0); } if (packetinfo->padding != 0) packet_util_size = rtpasfpay->asfinfo.packet_size - packetinfo->padding; else packet_util_size = packetinfo->packet_size; packet_offset = 0; while (packet_util_size > 0) { /* Even if we don't fill completely an output buffer we * push it when we add an fragment. Because it seems that * it is not possible to determine where a asf packet * fragment ends inside a rtp packet payload. * This flag tells us to push the packet. */ gboolean force_push = FALSE; GstRTPBuffer rtp = GST_RTP_BUFFER_INIT; /* we have no output buffer pending, create one */ if (rtpasfpay->current == NULL) { GST_LOG_OBJECT (rtpasfpay, "Creating new output buffer"); rtpasfpay->current = gst_rtp_buffer_new_allocate_len (GST_RTP_BASE_PAYLOAD_MTU (rtpasfpay), 0, 0); rtpasfpay->cur_off = 0; rtpasfpay->has_ts = FALSE; rtpasfpay->marker = FALSE; } gst_rtp_buffer_map (rtpasfpay->current, GST_MAP_READWRITE, &rtp); data = gst_rtp_buffer_get_payload (&rtp); data += rtpasfpay->cur_off; size_left = gst_rtp_buffer_get_payload_len (&rtp) - rtpasfpay->cur_off; GST_DEBUG_OBJECT (rtpasfpay, "Input buffer bytes consumed: %" G_GUINT32_FORMAT "/%" G_GSIZE_FORMAT, packet_offset, gst_buffer_get_size (buffer)); GST_DEBUG_OBJECT (rtpasfpay, "Output rtpbuffer status"); GST_DEBUG_OBJECT (rtpasfpay, "Current offset: %" G_GUINT32_FORMAT, rtpasfpay->cur_off); GST_DEBUG_OBJECT (rtpasfpay, "Size left: %" G_GUINT32_FORMAT, size_left); GST_DEBUG_OBJECT (rtpasfpay, "Has ts: %s", rtpasfpay->has_ts ? "yes" : "no"); if (rtpasfpay->has_ts) { GST_DEBUG_OBJECT (rtpasfpay, "Ts: %" G_GUINT32_FORMAT, rtpasfpay->ts); } flags = 0; if (packetinfo->has_keyframe) { flags = flags | 0x80; } flags = flags | 0x20; /* Relative timestamp is present */ if (!rtpasfpay->has_ts) { /* this is the first asf packet, its send time is the * rtp packet timestamp */ rtpasfpay->has_ts = TRUE; rtpasfpay->ts = packetinfo->send_time; } if (size_left >= packet_util_size + 8) { /* enough space for the rest of the packet */ if (packet_offset == 0) { flags = flags | 0x40; GST_WRITE_UINT24_BE (data + 1, packet_util_size); } else { GST_WRITE_UINT24_BE (data + 1, packet_offset); force_push = TRUE; } data[0] = flags; GST_WRITE_UINT32_BE (data + 4, (gint32) (packetinfo->send_time) - (gint32) rtpasfpay->ts); gst_buffer_extract (buffer, packet_offset, data + 8, packet_util_size); /* updating status variables */ rtpasfpay->cur_off += 8 + packet_util_size; size_left -= packet_util_size + 8; packet_offset += packet_util_size; packet_util_size = 0; rtpasfpay->marker = TRUE; } else { /* fragment packet */ data[0] = flags; GST_WRITE_UINT24_BE (data + 1, packet_offset); GST_WRITE_UINT32_BE (data + 4, (gint32) (packetinfo->send_time) - (gint32) rtpasfpay->ts); gst_buffer_extract (buffer, packet_offset, data + 8, size_left - 8); /* updating status variables */ rtpasfpay->cur_off += size_left; packet_offset += size_left - 8; packet_util_size -= size_left - 8; size_left = 0; force_push = TRUE; } /* there is not enough room for any more buffers */ if (force_push || size_left <= 8) { gst_rtp_buffer_set_ssrc (&rtp, rtppay->current_ssrc); gst_rtp_buffer_set_marker (&rtp, rtpasfpay->marker); gst_rtp_buffer_set_payload_type (&rtp, GST_RTP_BASE_PAYLOAD_PT (rtppay)); gst_rtp_buffer_set_seq (&rtp, rtppay->seqnum + 1); gst_rtp_buffer_set_timestamp (&rtp, packetinfo->send_time); gst_rtp_buffer_unmap (&rtp); /* trim remaining bytes not used */ if (size_left != 0) { gst_buffer_set_size (rtpasfpay->current, gst_buffer_get_size (rtpasfpay->current) - size_left); } GST_BUFFER_TIMESTAMP (rtpasfpay->current) = GST_BUFFER_TIMESTAMP (buffer); rtppay->seqnum++; rtppay->timestamp = packetinfo->send_time; GST_DEBUG_OBJECT (rtpasfpay, "Pushing rtp buffer"); ret = gst_rtp_base_payload_push (rtppay, rtpasfpay->current); rtpasfpay->current = NULL; if (ret != GST_FLOW_OK) { gst_buffer_unref (buffer); return ret; } } } gst_buffer_unref (buffer); return ret; }
static void test_video_profile (const gchar * profile, gint profile_id, const gchar * input_format) { GstElement *x264enc; GstBuffer *inbuffer, *outbuffer; int i, num_buffers; x264enc = setup_x264enc (profile, "avc", input_format); fail_unless (gst_element_set_state (x264enc, GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS, "could not set to playing"); /* corresponds to I420 buffer for the size mentioned in the caps */ if (!strcmp (input_format, "I420")) inbuffer = gst_buffer_new_and_alloc (384 * 288 * 3 / 2); else if (!strcmp (input_format, "Y42B")) inbuffer = gst_buffer_new_and_alloc (384 * 288 * 2); else if (!strcmp (input_format, "Y444")) inbuffer = gst_buffer_new_and_alloc (384 * 288 * 3); else g_assert_not_reached (); /* makes valgrind's memcheck happier */ gst_buffer_memset (inbuffer, 0, 0, -1); GST_BUFFER_TIMESTAMP (inbuffer) = 0; ASSERT_BUFFER_REFCOUNT (inbuffer, "inbuffer", 1); fail_unless (gst_pad_push (mysrcpad, inbuffer) == GST_FLOW_OK); /* send eos to have all flushed if needed */ fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_eos ()) == TRUE); num_buffers = g_list_length (buffers); fail_unless (num_buffers == 1); /* check output caps */ { GstCaps *outcaps; outcaps = gst_pad_get_current_caps (mysinkpad); check_caps (outcaps, profile, profile_id); gst_caps_unref (outcaps); } /* clean up buffers */ for (i = 0; i < num_buffers; ++i) { outbuffer = GST_BUFFER (buffers->data); fail_if (outbuffer == NULL); switch (i) { case 0: { gint nsize, npos, j, type, next_type; GstMapInfo map; const guint8 *data; gsize size; gst_buffer_map (outbuffer, &map, GST_MAP_READ); data = map.data; size = map.size; npos = 0; j = 0; /* need SPS first */ next_type = 7; /* loop through NALs */ while (npos < size) { fail_unless (size - npos >= 4); nsize = GST_READ_UINT32_BE (data + npos); fail_unless (nsize > 0); fail_unless (npos + 4 + nsize <= size); type = data[npos + 4] & 0x1F; /* check the first NALs, disregard AU (9), SEI (6) */ if (type != 9 && type != 6) { fail_unless (type == next_type); switch (type) { case 7: /* SPS */ next_type = 8; break; case 8: /* PPS */ next_type = 5; break; default: break; } j++; } npos += nsize + 4; } gst_buffer_unmap (outbuffer, &map); /* should have reached the exact end */ fail_unless (npos == size); break; } default: break; } buffers = g_list_remove (buffers, outbuffer); ASSERT_BUFFER_REFCOUNT (outbuffer, "outbuffer", 1); gst_buffer_unref (outbuffer); outbuffer = NULL; } cleanup_x264enc (x264enc); g_list_free (buffers); buffers = NULL; }
static gboolean gst_rtp_dv_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps) { GstStructure *structure; GstRTPDVDepay *rtpdvdepay; GstCaps *srccaps; gint clock_rate; gboolean systemstream, ret; const gchar *encode, *media; rtpdvdepay = GST_RTP_DV_DEPAY (depayload); structure = gst_caps_get_structure (caps, 0); if (!gst_structure_get_int (structure, "clock-rate", &clock_rate)) clock_rate = 90000; /* default */ depayload->clock_rate = clock_rate; /* we really need the encode property to figure out the frame size, it's also * required by the spec */ if (!(encode = gst_structure_get_string (structure, "encode"))) goto no_encode; /* figure out the size of one frame */ if (!parse_encode (rtpdvdepay, encode)) goto unknown_encode; /* check the media, this tells us that the stream has video or not */ if (!(media = gst_structure_get_string (structure, "media"))) goto no_media; systemstream = FALSE; if (!strcmp (media, "audio")) { /* we need a demuxer for audio only */ systemstream = TRUE; } else if (!strcmp (media, "video")) { const gchar *audio; /* check the optional audio field, if it's present and set to bundled, we * are dealing with a system stream. */ if ((audio = gst_structure_get_string (structure, "audio"))) { if (!strcmp (audio, "bundled")) systemstream = TRUE; } } /* allocate accumulator */ rtpdvdepay->acc = gst_buffer_new_and_alloc (rtpdvdepay->frame_size); /* Initialize the new accumulator frame. * If the previous frame exists, copy that into the accumulator frame. * This way, missing packets in the stream won't show up badly. */ gst_buffer_memset (rtpdvdepay->acc, 0, 0, rtpdvdepay->frame_size); srccaps = gst_caps_new_simple ("video/x-dv", "systemstream", G_TYPE_BOOLEAN, systemstream, "width", G_TYPE_INT, rtpdvdepay->width, "height", G_TYPE_INT, rtpdvdepay->height, "framerate", GST_TYPE_FRACTION, rtpdvdepay->rate_num, rtpdvdepay->rate_denom, NULL); ret = gst_pad_set_caps (depayload->srcpad, srccaps); gst_caps_unref (srccaps); return ret; /* ERRORS */ no_encode: { GST_ERROR_OBJECT (rtpdvdepay, "required encode property not found in caps"); return FALSE; } unknown_encode: { GST_ERROR_OBJECT (rtpdvdepay, "unknown encode property %s found", encode); return FALSE; } no_media: { GST_ERROR_OBJECT (rtpdvdepay, "required media property not found in caps"); return FALSE; } }
static gpointer push_abuffers (gpointer data) { GstSegment segment; GstPad *pad = data; gint i, j, k; GstClockTime timestamp = 0; GstAudioInfo info; GstCaps *caps; guint buf_size = 1000; if (audiodelay) g_usleep (2000); if (early_video) timestamp = 50 * GST_MSECOND; gst_pad_send_event (pad, gst_event_new_stream_start ("test")); gst_audio_info_set_format (&info, GST_AUDIO_FORMAT_S8, buf_size, channels, NULL); caps = gst_audio_info_to_caps (&info); gst_pad_send_event (pad, gst_event_new_caps (caps)); gst_caps_unref (caps); gst_segment_init (&segment, GST_FORMAT_TIME); gst_pad_send_event (pad, gst_event_new_segment (&segment)); for (i = 0; i < n_abuffers; i++) { GstBuffer *buf = gst_buffer_new_and_alloc (channels * buf_size); if (per_channel) { GstMapInfo map; guint8 *in_data; gst_buffer_map (buf, &map, GST_MAP_WRITE); in_data = map.data; for (j = 0; j < buf_size; j++) { for (k = 0; k < channels; k++) { in_data[j * channels + k] = fill_value_per_channel[k]; } } gst_buffer_unmap (buf, &map); } else { gst_buffer_memset (buf, 0, fill_value, channels * buf_size); } GST_BUFFER_TIMESTAMP (buf) = timestamp; timestamp += 1 * GST_SECOND; if (audio_drift) timestamp += 50 * GST_MSECOND; else if (i == 4 && audio_nondiscont) timestamp += 30 * GST_MSECOND; GST_BUFFER_DURATION (buf) = timestamp - GST_BUFFER_TIMESTAMP (buf); fail_unless (gst_pad_chain (pad, buf) == GST_FLOW_OK); } gst_pad_send_event (pad, gst_event_new_eos ()); return NULL; }