static void gst_base_video_parse_init (GstBaseVideoParse * base_video_parse, GstBaseVideoParseClass * klass) { GstPad *pad; GST_DEBUG ("gst_base_video_parse_init"); pad = GST_BASE_VIDEO_CODEC_SINK_PAD (base_video_parse); gst_pad_set_chain_function (pad, gst_base_video_parse_chain); gst_pad_set_query_function (pad, gst_base_video_parse_sink_query); gst_pad_set_event_function (pad, gst_base_video_parse_sink_event); pad = GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse); gst_pad_set_query_type_function (pad, gst_base_video_parse_get_query_types); gst_pad_set_query_function (pad, gst_base_video_parse_src_query); gst_pad_set_event_function (pad, gst_base_video_parse_src_event); base_video_parse->input_adapter = gst_adapter_new (); base_video_parse->output_adapter = gst_adapter_new (); base_video_parse->reorder_depth = 1; base_video_parse->current_frame = gst_base_video_parse_new_frame (base_video_parse); }
static void gst_base_video_decoder_init (GstBaseVideoDecoder * base_video_decoder, GstBaseVideoDecoderClass * klass) { GstPad *pad; GST_DEBUG ("gst_base_video_decoder_init"); pad = GST_BASE_VIDEO_CODEC_SINK_PAD (base_video_decoder); gst_pad_set_activatepush_function (pad, gst_base_video_decoder_sink_activate_push); gst_pad_set_chain_function (pad, gst_base_video_decoder_chain); gst_pad_set_event_function (pad, gst_base_video_decoder_sink_event); gst_pad_set_setcaps_function (pad, gst_base_video_decoder_sink_setcaps); gst_pad_set_query_function (pad, gst_base_video_decoder_sink_query); pad = GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder); gst_pad_set_event_function (pad, gst_base_video_decoder_src_event); gst_pad_set_query_type_function (pad, gst_base_video_decoder_get_query_types); gst_pad_set_query_function (pad, gst_base_video_decoder_src_query); base_video_decoder->input_adapter = gst_adapter_new (); base_video_decoder->output_adapter = gst_adapter_new (); gst_segment_init (&base_video_decoder->state.segment, GST_FORMAT_TIME); gst_base_video_decoder_reset (base_video_decoder); base_video_decoder->current_frame = gst_base_video_decoder_new_frame (base_video_decoder); base_video_decoder->sink_clipping = TRUE; }
static void gst_rtp_j2k_depay_init (GstRtpJ2KDepay * rtpj2kdepay) { rtpj2kdepay->pu_adapter = gst_adapter_new (); rtpj2kdepay->t_adapter = gst_adapter_new (); rtpj2kdepay->f_adapter = gst_adapter_new (); }
static void gst_rtp_j2k_depay_init (GstRtpJ2KDepay * rtpj2kdepay, GstRtpJ2KDepayClass * klass) { rtpj2kdepay->buffer_list = DEFAULT_BUFFER_LIST; rtpj2kdepay->pu_adapter = gst_adapter_new (); rtpj2kdepay->t_adapter = gst_adapter_new (); rtpj2kdepay->f_adapter = gst_adapter_new (); }
AudioSourceProviderGStreamer::AudioSourceProviderGStreamer() : m_client(0) , m_deinterleaveSourcePads(0) , m_deinterleavePadAddedHandlerId(0) , m_deinterleaveNoMorePadsHandlerId(0) , m_deinterleavePadRemovedHandlerId(0) { g_mutex_init(&m_adapterMutex); m_frontLeftAdapter = gst_adapter_new(); m_frontRightAdapter = gst_adapter_new(); }
static gboolean parser_state_init (GstVaapiParserState * ps) { memset (ps, 0, sizeof (*ps)); ps->input_adapter = gst_adapter_new (); if (!ps->input_adapter) return FALSE; ps->output_adapter = gst_adapter_new (); if (!ps->output_adapter) return FALSE; return TRUE; }
static void gst_jpeg_parse_init (GstJpegParse * parse, GstJpegParseClass * g_class) { GstPad *sinkpad; parse->priv = G_TYPE_INSTANCE_GET_PRIVATE (parse, GST_TYPE_JPEG_PARSE, GstJpegParsePrivate); /* create the sink and src pads */ sinkpad = gst_pad_new_from_static_template (&gst_jpeg_parse_sink_pad_template, "sink"); gst_pad_set_chain_function (sinkpad, GST_DEBUG_FUNCPTR (gst_jpeg_parse_chain)); gst_pad_set_event_function (sinkpad, GST_DEBUG_FUNCPTR (gst_jpeg_parse_sink_event)); gst_pad_set_setcaps_function (sinkpad, GST_DEBUG_FUNCPTR (gst_jpeg_parse_sink_setcaps)); gst_element_add_pad (GST_ELEMENT (parse), sinkpad); parse->priv->srcpad = gst_pad_new_from_static_template (&gst_jpeg_parse_src_pad_template, "src"); gst_pad_set_getcaps_function (parse->priv->srcpad, GST_DEBUG_FUNCPTR (gst_jpeg_parse_src_getcaps)); gst_element_add_pad (GST_ELEMENT (parse), parse->priv->srcpad); parse->priv->next_ts = GST_CLOCK_TIME_NONE; parse->priv->adapter = gst_adapter_new (); }
static void gst_goom_init (GstGoom * goom) { /* create the sink and src pads */ goom->sinkpad = gst_pad_new_from_static_template (&sink_template, "sink"); gst_pad_set_chain_function (goom->sinkpad, GST_DEBUG_FUNCPTR (gst_goom_chain)); gst_pad_set_event_function (goom->sinkpad, GST_DEBUG_FUNCPTR (gst_goom_sink_event)); gst_pad_set_setcaps_function (goom->sinkpad, GST_DEBUG_FUNCPTR (gst_goom_sink_setcaps)); gst_element_add_pad (GST_ELEMENT (goom), goom->sinkpad); goom->srcpad = gst_pad_new_from_static_template (&src_template, "src"); gst_pad_set_setcaps_function (goom->srcpad, GST_DEBUG_FUNCPTR (gst_goom_src_setcaps)); gst_pad_set_event_function (goom->srcpad, GST_DEBUG_FUNCPTR (gst_goom_src_event)); gst_pad_set_query_function (goom->srcpad, GST_DEBUG_FUNCPTR (gst_goom_src_query)); gst_element_add_pad (GST_ELEMENT (goom), goom->srcpad); goom->adapter = gst_adapter_new (); goom->width = DEFAULT_WIDTH; goom->height = DEFAULT_HEIGHT; goom->fps_n = DEFAULT_FPS_N; /* desired frame rate */ goom->fps_d = DEFAULT_FPS_D; /* desired frame rate */ goom->channels = 0; goom->rate = 0; goom->duration = 0; goom->plugin = goom_init (goom->width, goom->height); }
static gboolean gst_flac_dec_start (GstAudioDecoder * audio_dec) { FLAC__StreamDecoderInitStatus s; GstFlacDec *dec; dec = GST_FLAC_DEC (audio_dec); dec->adapter = gst_adapter_new (); dec->decoder = FLAC__stream_decoder_new (); gst_audio_info_init (&dec->info); dec->depth = 0; /* no point calculating MD5 since it's never checked here */ FLAC__stream_decoder_set_md5_checking (dec->decoder, false); GST_DEBUG_OBJECT (dec, "initializing decoder"); s = FLAC__stream_decoder_init_stream (dec->decoder, gst_flac_dec_read_stream, NULL, NULL, NULL, NULL, gst_flac_dec_write_stream, gst_flac_dec_metadata_cb, gst_flac_dec_error_cb, dec); if (s != FLAC__STREAM_DECODER_INIT_STATUS_OK) { GST_ELEMENT_ERROR (GST_ELEMENT (dec), LIBRARY, INIT, (NULL), (NULL)); return FALSE; } dec->got_headers = FALSE; return TRUE; }
GstAiurStreamCache * gst_aiur_stream_cache_new (guint64 threshold_max, guint64 threshold_pre, void *context) { GstAiurStreamCache *cache = (GstAiurStreamCache *) gst_mini_object_new (GST_TYPE_AIURSTREAMCACHE); cache->pad = NULL; cache->adapter = gst_adapter_new (); cache->mutex = g_mutex_new (); cache->consume_cond = g_cond_new (); cache->produce_cond = g_cond_new (); cache->threshold_max = threshold_max; cache->threshold_pre = threshold_pre; cache->start = 0; cache->offset = 0; cache->ignore_size = 0; cache->eos = FALSE; cache->seeking = FALSE; cache->closed = FALSE; cache->context = context; return cache; }
static void gst_gsmenc_init (GstGSMEnc * gsmenc) { gint use_wav49; /* create the sink and src pads */ gsmenc->sinkpad = gst_pad_new_from_static_template (&gsmenc_sink_template, "sink"); gst_pad_set_chain_function (gsmenc->sinkpad, gst_gsmenc_chain); gst_pad_set_setcaps_function (gsmenc->sinkpad, gst_gsmenc_setcaps); gst_element_add_pad (GST_ELEMENT (gsmenc), gsmenc->sinkpad); gsmenc->srcpad = gst_pad_new_from_static_template (&gsmenc_src_template, "src"); gst_element_add_pad (GST_ELEMENT (gsmenc), gsmenc->srcpad); gsmenc->state = gsm_create (); /* turn off WAV49 handling */ use_wav49 = 0; gsm_option (gsmenc->state, GSM_OPT_WAV49, &use_wav49); gsmenc->adapter = gst_adapter_new (); gsmenc->next_ts = 0; }
static void gst_monoscope_init (GstMonoscope * monoscope) { monoscope->sinkpad = gst_pad_new_from_static_template (&sink_template, "sink"); gst_pad_set_chain_function (monoscope->sinkpad, GST_DEBUG_FUNCPTR (gst_monoscope_chain)); gst_pad_set_event_function (monoscope->sinkpad, GST_DEBUG_FUNCPTR (gst_monoscope_sink_event)); gst_element_add_pad (GST_ELEMENT (monoscope), monoscope->sinkpad); monoscope->srcpad = gst_pad_new_from_static_template (&src_template, "src"); gst_pad_set_event_function (monoscope->srcpad, GST_DEBUG_FUNCPTR (gst_monoscope_src_event)); gst_element_add_pad (GST_ELEMENT (monoscope), monoscope->srcpad); monoscope->adapter = gst_adapter_new (); monoscope->next_ts = GST_CLOCK_TIME_NONE; monoscope->bps = sizeof (gint16); /* reset the initial video state */ monoscope->width = 256; monoscope->height = 128; monoscope->fps_num = 25; /* desired frame rate */ monoscope->fps_denom = 1; monoscope->visstate = NULL; /* reset the initial audio state */ monoscope->rate = GST_AUDIO_DEF_RATE; }
MetadataParsingReturn metadataparse_util_hold_chunk (guint32 * read, guint8 ** buf, guint32 * bufsize, guint8 ** next_start, guint32 * next_size, GstAdapter ** adapter) { int ret; if (*read > *bufsize) { *next_start = *buf; *next_size = *read; ret = META_PARSING_NEED_MORE_DATA; } else { GstBuffer *gst_buf; if (NULL == *adapter) { *adapter = gst_adapter_new (); } gst_buf = gst_buffer_new_and_alloc (*read); memcpy (GST_BUFFER_DATA (gst_buf), *buf, *read); gst_adapter_push (*adapter, gst_buf); *next_start = *buf + *read; *buf += *read; *bufsize -= *read; *read = 0; ret = META_PARSING_DONE; } return ret; }
static GstAdapter * create_and_fill_adapter (void) { GstAdapter *adapter; gint i, j; adapter = gst_adapter_new (); fail_unless (adapter != NULL); for (i = 0; i < 10000; i += 4) { GstBuffer *buf = gst_buffer_new_and_alloc (sizeof (guint32) * 4); guint8 *data; fail_unless (buf != NULL); data = GST_BUFFER_DATA (buf); for (j = 0; j < 4; j++) { GST_WRITE_UINT32_LE (data, i + j); data += sizeof (guint32); } gst_adapter_push (adapter, buf); } return adapter; }
static void run_test_take_buffer (struct TestParams *params) { /* Create an adapter and feed it data of fixed size, then retrieve it in * a different size */ GstAdapter *adapter = gst_adapter_new (); GstBuffer *buf; int i; gint ntimes = params->tot_size / params->write_size; for (i = 0; i < ntimes; i++) { buf = gst_buffer_new_and_alloc (params->write_size); memset (GST_BUFFER_DATA (buf), 0, params->write_size); gst_adapter_push (adapter, buf); } do { buf = gst_adapter_take_buffer (adapter, params->read_size); if (buf == NULL) break; gst_buffer_unref (buf); } while (TRUE); g_object_unref (adapter); }
static void gst_vdp_mpeg_dec_init (GstVdpMpegDec * mpeg_dec, GstVdpMpegDecClass * gclass) { mpeg_dec->src = gst_pad_new_from_static_template (&src_template, "src"); gst_pad_use_fixed_caps (mpeg_dec->src); gst_pad_set_event_function (mpeg_dec->src, GST_DEBUG_FUNCPTR (gst_vdp_mpeg_dec_src_event)); gst_pad_set_query_function (mpeg_dec->src, GST_DEBUG_FUNCPTR (gst_vdp_mpeg_dec_src_query)); gst_pad_set_query_type_function (mpeg_dec->src, GST_DEBUG_FUNCPTR (gst_mpeg_dec_get_querytypes)); gst_element_add_pad (GST_ELEMENT (mpeg_dec), mpeg_dec->src); mpeg_dec->sink = gst_pad_new_from_static_template (&sink_template, "sink"); gst_pad_set_setcaps_function (mpeg_dec->sink, GST_DEBUG_FUNCPTR (gst_vdp_mpeg_dec_set_caps)); gst_pad_set_chain_function (mpeg_dec->sink, GST_DEBUG_FUNCPTR (gst_vdp_mpeg_dec_chain)); gst_pad_set_event_function (mpeg_dec->sink, GST_DEBUG_FUNCPTR (gst_vdp_mpeg_dec_sink_event)); gst_element_add_pad (GST_ELEMENT (mpeg_dec), mpeg_dec->sink); mpeg_dec->adapter = gst_adapter_new (); mpeg_dec->mutex = g_mutex_new (); }
GstInterSurface * gst_inter_surface_get (const char *name) { GList *g; GstInterSurface *surface; g_static_mutex_lock (&mutex); for (g = list; g; g = g_list_next (g)) { surface = (GstInterSurface *) g->data; if (strcmp (name, surface->name) == 0) { g_static_mutex_unlock (&mutex); return surface; } } surface = g_malloc0 (sizeof (GstInterSurface)); surface->name = g_strdup (name); surface->mutex = g_mutex_new (); surface->audio_adapter = gst_adapter_new (); list = g_list_append (list, surface); g_static_mutex_unlock (&mutex); return surface; }
static void gst_visual_gl_init (GstVisualGL * visual) { /* create the sink and src pads */ visual->sinkpad = gst_pad_new_from_static_template (&sink_template, "sink"); gst_pad_set_setcaps_function (visual->sinkpad, gst_visual_gl_sink_setcaps); gst_pad_set_chain_function (visual->sinkpad, gst_visual_gl_chain); gst_pad_set_event_function (visual->sinkpad, gst_visual_gl_sink_event); gst_element_add_pad (GST_ELEMENT (visual), visual->sinkpad); visual->srcpad = gst_pad_new_from_static_template (&src_template, "src"); gst_pad_set_setcaps_function (visual->srcpad, gst_visual_gl_src_setcaps); gst_pad_set_getcaps_function (visual->srcpad, gst_visual_gl_getcaps); gst_pad_set_event_function (visual->srcpad, gst_visual_gl_src_event); gst_pad_set_query_function (visual->srcpad, gst_visual_gl_src_query); gst_element_add_pad (GST_ELEMENT (visual), visual->srcpad); visual->adapter = gst_adapter_new (); visual->actor = NULL; visual->display = NULL; visual->fbo = 0; visual->depthbuffer = 0; visual->midtexture = 0; visual->is_enabled_gl_depth_test = GL_FALSE; visual->gl_depth_func = GL_LESS; visual->is_enabled_gl_blend = GL_FALSE; visual->gl_blend_src_alpha = GL_ONE; }
/* initialize the new element * instantiate pads and add them to element * set functions * initialize structure */ static void gst_wildmidi_init (GstWildmidi * filter, GstWildmidiClass * g_class) { GstElementClass *klass = GST_ELEMENT_GET_CLASS (filter); filter->sinkpad = gst_pad_new_from_template (gst_element_class_get_pad_template (klass, "sink"), "sink"); gst_pad_set_activatepull_function (filter->sinkpad, gst_wildmidi_activatepull); gst_pad_set_activate_function (filter->sinkpad, gst_wildmidi_activate); gst_pad_set_event_function (filter->sinkpad, gst_wildmidi_sink_event); gst_pad_set_chain_function (filter->sinkpad, gst_wildmidi_chain); gst_element_add_pad (GST_ELEMENT (filter), filter->sinkpad); filter->srcpad = gst_pad_new_from_template (gst_element_class_get_pad_template (klass, "src"), "src"); gst_pad_set_query_function (filter->srcpad, gst_wildmidi_src_query); gst_pad_set_event_function (filter->srcpad, gst_wildmidi_src_event); gst_pad_use_fixed_caps (filter->srcpad); gst_element_add_pad (GST_ELEMENT (filter), filter->srcpad); gst_segment_init (filter->o_segment, GST_FORMAT_DEFAULT); filter->adapter = gst_adapter_new (); filter->bytes_per_frame = WILDMIDI_BPS; }
static void gst_rtp_h263p_pay_init (GstRtpH263PPay * rtph263ppay) { rtph263ppay->adapter = gst_adapter_new (); rtph263ppay->fragmentation_mode = DEFAULT_FRAGMENTATION_MODE; }
/* initialize the new element * instantiate pads and add them to element * set functions * initialize structure */ static void gst_wildmidi_init (GstWildmidi * filter) { filter->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink"); gst_pad_set_activatemode_function (filter->sinkpad, gst_wildmidi_activatemode); gst_pad_set_activate_function (filter->sinkpad, gst_wildmidi_activate); gst_pad_set_event_function (filter->sinkpad, gst_wildmidi_sink_event); gst_pad_set_chain_function (filter->sinkpad, gst_wildmidi_chain); gst_element_add_pad (GST_ELEMENT (filter), filter->sinkpad); filter->srcpad = gst_pad_new_from_static_template (&src_factory, "src"); gst_pad_set_query_function (filter->srcpad, gst_wildmidi_src_query); gst_pad_set_event_function (filter->srcpad, gst_wildmidi_src_event); gst_pad_use_fixed_caps (filter->srcpad); gst_element_add_pad (GST_ELEMENT (filter), filter->srcpad); gst_segment_init (filter->o_segment, GST_FORMAT_DEFAULT); filter->adapter = gst_adapter_new (); filter->bytes_per_frame = WILDMIDI_BPS; filter->high_quality = DEFAULT_HIGH_QUALITY; filter->linear_volume = DEFAULT_LINEAR_VOLUME; }
/** * @brief Initialize tensor_aggregator element. */ static void gst_tensor_aggregator_init (GstTensorAggregator * self) { /** setup sink pad */ self->sinkpad = gst_pad_new_from_static_template (&sink_template, "sink"); gst_pad_set_event_function (self->sinkpad, GST_DEBUG_FUNCPTR (gst_tensor_aggregator_sink_event)); gst_pad_set_query_function (self->sinkpad, GST_DEBUG_FUNCPTR (gst_tensor_aggregator_sink_query)); gst_pad_set_chain_function (self->sinkpad, GST_DEBUG_FUNCPTR (gst_tensor_aggregator_chain)); GST_PAD_SET_PROXY_CAPS (self->sinkpad); gst_element_add_pad (GST_ELEMENT (self), self->sinkpad); /** setup src pad */ self->srcpad = gst_pad_new_from_static_template (&src_template, "src"); gst_pad_set_query_function (self->srcpad, GST_DEBUG_FUNCPTR (gst_tensor_aggregator_src_query)); GST_PAD_SET_PROXY_CAPS (self->srcpad); gst_element_add_pad (GST_ELEMENT (self), self->srcpad); /** init properties */ self->silent = DEFAULT_SILENT; self->frames_in = DEFAULT_FRAMES_IN; self->frames_out = DEFAULT_FRAMES_OUT; self->frames_flush = DEFAULT_FRAMES_FLUSH; self->frames_dim = DEFAULT_FRAMES_DIMENSION; self->concat = DEFAULT_CONCAT; self->adapter = gst_adapter_new (); gst_tensor_aggregator_reset (self); }
static void gst_sbc_enc_init(GstSbcEnc *self, GstSbcEncClass *klass) { self->sinkpad = gst_pad_new_from_static_template( &sbc_enc_sink_factory, "sink"); gst_pad_set_setcaps_function(self->sinkpad, GST_DEBUG_FUNCPTR(sbc_enc_sink_setcaps)); gst_element_add_pad(GST_ELEMENT(self), self->sinkpad); self->srcpad = gst_pad_new_from_static_template( &sbc_enc_src_factory, "src"); gst_pad_set_getcaps_function(self->srcpad, GST_DEBUG_FUNCPTR(sbc_enc_src_getcaps)); gst_pad_set_setcaps_function(self->srcpad, GST_DEBUG_FUNCPTR(sbc_enc_src_setcaps)); gst_element_add_pad(GST_ELEMENT(self), self->srcpad); gst_pad_set_chain_function(self->sinkpad, GST_DEBUG_FUNCPTR(sbc_enc_chain)); self->subbands = SBC_ENC_DEFAULT_SUB_BANDS; self->blocks = SBC_ENC_DEFAULT_BLOCKS; self->mode = SBC_ENC_DEFAULT_MODE; self->allocation = SBC_ENC_DEFAULT_ALLOCATION; self->rate = SBC_ENC_DEFAULT_RATE; self->channels = SBC_ENC_DEFAULT_CHANNELS; self->bitpool = SBC_ENC_BITPOOL_AUTO; self->frame_length = 0; self->frame_duration = 0; self->adapter = gst_adapter_new(); }
static GstFlowReturn gst_wavpack_parse_chain (GstPad * pad, GstBuffer * buf) { GstWavpackParse *wvparse = GST_WAVPACK_PARSE (GST_PAD_PARENT (pad)); GstFlowReturn ret = GST_FLOW_OK; WavpackHeader wph; const guint8 *tmp_buf; if (!wvparse->adapter) { wvparse->adapter = gst_adapter_new (); } if (GST_BUFFER_IS_DISCONT (buf)) { gst_adapter_clear (wvparse->adapter); wvparse->discont = TRUE; } gst_adapter_push (wvparse->adapter, buf); if (gst_adapter_available (wvparse->adapter) < sizeof (WavpackHeader)) return ret; if (!gst_wavpack_parse_resync_adapter (wvparse->adapter)) return ret; tmp_buf = gst_adapter_peek (wvparse->adapter, sizeof (WavpackHeader)); gst_wavpack_read_header (&wph, (guint8 *) tmp_buf); while (gst_adapter_available (wvparse->adapter) >= wph.ckSize + 4 * 1 + 4) { GstBuffer *outbuf = gst_adapter_take_buffer (wvparse->adapter, wph.ckSize + 4 * 1 + 4); if (!outbuf) return GST_FLOW_ERROR; if (wvparse->srcpad == NULL) { if (!gst_wavpack_parse_create_src_pad (wvparse, outbuf, &wph)) { GST_ERROR_OBJECT (wvparse, "Failed to create src pad"); ret = GST_FLOW_ERROR; break; } } ret = gst_wavpack_parse_push_buffer (wvparse, outbuf, &wph); if (ret != GST_FLOW_OK) break; if (gst_adapter_available (wvparse->adapter) >= sizeof (WavpackHeader)) { tmp_buf = gst_adapter_peek (wvparse->adapter, sizeof (WavpackHeader)); if (!gst_wavpack_parse_resync_adapter (wvparse->adapter)) break; gst_wavpack_read_header (&wph, (guint8 *) tmp_buf); } } return ret; }
static void gst_rtp_mpa_pay_init (GstRtpMPAPay * rtpmpapay) { rtpmpapay->adapter = gst_adapter_new (); GST_RTP_BASE_PAYLOAD (rtpmpapay)->pt = GST_RTP_PAYLOAD_MPA; }
void gst_inter_surface_init (void) { surface = g_malloc0 (sizeof (GstInterSurface)); surface->mutex = g_mutex_new (); surface->audio_adapter = gst_adapter_new (); }
static GstStateChangeReturn gst_vcd_parse_change_state (GstElement * element, GstStateChange transition) { GstStateChangeReturn res = GST_STATE_CHANGE_SUCCESS; GstVcdParse *vcd = GST_VCD_PARSE (element); switch (transition) { case GST_STATE_CHANGE_READY_TO_PAUSED: vcd->adapter = gst_adapter_new (); break; default: break; } res = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); switch (transition) { case GST_STATE_CHANGE_PAUSED_TO_READY: case GST_STATE_CHANGE_READY_TO_NULL: if (vcd->adapter) { g_object_unref (vcd->adapter); vcd->adapter = NULL; } break; default: break; } return res; }
static void mpegtsmux_init (MpegTsMux * mux, MpegTsMuxClass * g_class) { mux->srcpad = gst_pad_new_from_static_template (&mpegtsmux_src_factory, "src"); gst_pad_use_fixed_caps (mux->srcpad); gst_element_add_pad (GST_ELEMENT (mux), mux->srcpad); mux->collect = gst_collect_pads_new (); gst_collect_pads_set_function (mux->collect, (GstCollectPadsFunction) GST_DEBUG_FUNCPTR (mpegtsmux_collected), mux); mux->tsmux = tsmux_new (); tsmux_set_write_func (mux->tsmux, new_packet_cb, mux); mux->programs = g_new0 (TsMuxProgram *, MAX_PROG_NUMBER); mux->first = TRUE; mux->last_flow_ret = GST_FLOW_OK; mux->adapter = gst_adapter_new (); mux->m2ts_mode = FALSE; mux->pat_interval = TSMUX_DEFAULT_PAT_INTERVAL; mux->pmt_interval = TSMUX_DEFAULT_PMT_INTERVAL; mux->first_pcr = TRUE; mux->last_ts = 0; mux->is_delta = TRUE; mux->prog_map = NULL; mux->streamheader = NULL; mux->streamheader_sent = FALSE; }
static GstAdapter * create_and_fill_adapter (void) { GstAdapter *adapter; gint i, j; adapter = gst_adapter_new (); fail_unless (adapter != NULL); for (i = 0; i < 10000; i += 4) { GstBuffer *buf; GstMapInfo info; guint8 *ptr; buf = gst_buffer_new_and_alloc (sizeof (guint32) * 4); fail_unless (buf != NULL); fail_unless (gst_buffer_map (buf, &info, GST_MAP_WRITE)); ptr = info.data; for (j = 0; j < 4; j++) { GST_WRITE_UINT32_LE (ptr, i + j); ptr += sizeof (guint32); } gst_buffer_unmap (buf, &info); gst_adapter_push (adapter, buf); } return adapter; }
static void gst_rtp_mp2t_pay_init (GstRTPMP2TPay * rtpmp2tpay) { GST_RTP_BASE_PAYLOAD (rtpmp2tpay)->clock_rate = 90000; GST_RTP_BASE_PAYLOAD_PT (rtpmp2tpay) = GST_RTP_PAYLOAD_MP2T; rtpmp2tpay->adapter = gst_adapter_new (); }