static GstFlowReturn gst_wavpack_parse_chain (GstPad * pad, GstBuffer * buf) { GstWavpackParse *wvparse = GST_WAVPACK_PARSE (GST_PAD_PARENT (pad)); GstFlowReturn ret = GST_FLOW_OK; WavpackHeader wph; const guint8 *tmp_buf; if (!wvparse->adapter) { wvparse->adapter = gst_adapter_new (); } if (GST_BUFFER_IS_DISCONT (buf)) { gst_adapter_clear (wvparse->adapter); wvparse->discont = TRUE; } gst_adapter_push (wvparse->adapter, buf); if (gst_adapter_available (wvparse->adapter) < sizeof (WavpackHeader)) return ret; if (!gst_wavpack_parse_resync_adapter (wvparse->adapter)) return ret; tmp_buf = gst_adapter_peek (wvparse->adapter, sizeof (WavpackHeader)); gst_wavpack_read_header (&wph, (guint8 *) tmp_buf); while (gst_adapter_available (wvparse->adapter) >= wph.ckSize + 4 * 1 + 4) { GstBuffer *outbuf = gst_adapter_take_buffer (wvparse->adapter, wph.ckSize + 4 * 1 + 4); if (!outbuf) return GST_FLOW_ERROR; if (wvparse->srcpad == NULL) { if (!gst_wavpack_parse_create_src_pad (wvparse, outbuf, &wph)) { GST_ERROR_OBJECT (wvparse, "Failed to create src pad"); ret = GST_FLOW_ERROR; break; } } ret = gst_wavpack_parse_push_buffer (wvparse, outbuf, &wph); if (ret != GST_FLOW_OK) break; if (gst_adapter_available (wvparse->adapter) >= sizeof (WavpackHeader)) { tmp_buf = gst_adapter_peek (wvparse->adapter, sizeof (WavpackHeader)); if (!gst_wavpack_parse_resync_adapter (wvparse->adapter)) break; gst_wavpack_read_header (&wph, (guint8 *) tmp_buf); } } return ret; }
static gboolean gst_wavpack_parse_resync_adapter (GstAdapter * adapter) { const guint8 *buf, *marker; guint avail = gst_adapter_available (adapter); if (avail < 4) return FALSE; /* if the marker is at the beginning don't do the expensive search */ buf = gst_adapter_peek (adapter, 4); if (memcmp (buf, "wvpk", 4) == 0) return TRUE; if (avail == 4) return FALSE; /* search for the marker in the complete content of the adapter */ buf = gst_adapter_peek (adapter, avail); if (buf && (marker = gst_wavpack_parse_find_marker ((guint8 *) buf, avail))) { gst_adapter_flush (adapter, marker - buf); return TRUE; } /* flush everything except the last 4 bytes. they could contain * the start of a new marker */ gst_adapter_flush (adapter, avail - 4); return FALSE; }
static gint multipart_find_boundary (GstMultipartDemux * multipart, gint * datalen) { /* Adaptor is positioned at the start of the data */ const guint8 *data, *pos; const guint8 *dataend; gint len; if (multipart->content_length >= 0) { /* fast path, known content length :) */ len = multipart->content_length; if (gst_adapter_available (multipart->adapter) >= len + 2) { *datalen = len; data = gst_adapter_peek (multipart->adapter, len + 1); /* If data[len] contains \r then assume a newline is \r\n */ if (data[len] == '\r') len += 2; else if (data[len] == '\n') len += 1; /* Don't check if boundary is actually there, but let the header parsing * bail out if it isn't */ return len; } else { /* need more data */ return MULTIPART_NEED_MORE_DATA; } } len = gst_adapter_available (multipart->adapter); if (len == 0) return MULTIPART_NEED_MORE_DATA; data = gst_adapter_peek (multipart->adapter, len); dataend = data + len; for (pos = data + multipart->scanpos; pos <= dataend - multipart->boundary_len - 2; pos++) { if (*pos == '-' && pos[1] == '-' && !strncmp ((gchar *) pos + 2, multipart->boundary, multipart->boundary_len)) { /* Found the boundary! Check if there was a newline before the boundary */ len = pos - data; if (pos - 2 > data && pos[-2] == '\r') len -= 2; else if (pos - 1 > data && pos[-1] == '\n') len -= 1; *datalen = len; multipart->scanpos = 0; return pos - data; } } multipart->scanpos = pos - data; return MULTIPART_NEED_MORE_DATA; }
static gboolean get_next_chunk (GstAdapter *adapter, Chunk *chunk) { const guint8 *data; guint length; data = gst_adapter_peek (adapter, 8); if (!data) return FALSE; length = GST_READ_UINT32_BE (data + 4); chunk->fourcc = GST_READ_UINT32_LE (data); chunk->length = length; gst_adapter_flush (adapter, 8); chunk->data = gst_adapter_peek (adapter, gst_adapter_available_fast (adapter)); chunk->available = gst_adapter_available_fast (adapter); return TRUE; }
/* 把 buffer 拿出来 */ guint8 * gst_adapter_take (GstAdapter * adapter, guint nbytes) { cdata = gst_adapter_peek (adapter, nbytes); data = g_malloc (nbytes); memcpy (data, cdata, nbytes); gst_adapter_flush (adapter, nbytes); }
int32 gst_sphinx_sink_ad_read(ad_rec_t *ad, int16 *buf, int32 max) { GstSphinxSink *sphinxsink = GST_SINK (((GstSphinxSinkAd *)ad)->self); memcpy ((void *)buf, gst_adapter_peek (sphinxsink->adapter, REQUIRED_FRAME_BYTES), REQUIRED_FRAME_BYTES); return REQUIRED_FRAME_SAMPLES; }
static gboolean gst_aiffparse_peek_data (AIFFParse * aiff, guint32 size, const guint8 ** data) { if (gst_adapter_available (aiff->adapter) < size) return FALSE; *data = gst_adapter_peek (aiff->adapter, size); return TRUE; }
static GstFlowReturn gst_real_audio_demux_parse_data (GstRealAudioDemux * demux) { GstFlowReturn ret = GST_FLOW_OK; guint avail, unit_size; avail = gst_adapter_available (demux->adapter); if (demux->packet_size > 0) unit_size = demux->packet_size; else unit_size = avail & 0xfffffff0; /* round down to next multiple of 16 */ GST_LOG_OBJECT (demux, "available = %u, unit_size = %u", avail, unit_size); while (ret == GST_FLOW_OK && unit_size > 0 && avail >= unit_size) { GstClockTime ts; const guint8 *data; GstBuffer *buf = NULL; buf = gst_buffer_new_and_alloc (unit_size); gst_buffer_set_caps (buf, GST_PAD_CAPS (demux->srcpad)); data = gst_adapter_peek (demux->adapter, unit_size); memcpy (GST_BUFFER_DATA (buf), data, unit_size); gst_adapter_flush (demux->adapter, unit_size); avail -= unit_size; if (demux->need_newsegment) { gst_pad_push_event (demux->srcpad, gst_event_new_new_segment_full (FALSE, demux->segment.rate, demux->segment.applied_rate, GST_FORMAT_TIME, demux->segment.start, demux->segment.stop, demux->segment.time)); demux->need_newsegment = FALSE; } if (demux->pending_tags) { gst_element_found_tags_for_pad (GST_ELEMENT (demux), demux->srcpad, demux->pending_tags); demux->pending_tags = NULL; } if (demux->fourcc == GST_RM_AUD_DNET) { buf = gst_rm_utils_descramble_dnet_buffer (buf); } ts = gst_real_demux_get_timestamp_from_offset (demux, demux->offset); GST_BUFFER_TIMESTAMP (buf) = ts; gst_segment_set_last_stop (&demux->segment, GST_FORMAT_TIME, ts); ret = gst_pad_push (demux->srcpad, buf); } return ret; }
int32 gst_pocketsphinx_ad_read(ad_rec_t *ad, int16 *buf, int32 max) { GstPocketSphinx *sphinxsink = GST_POCKETSPHINX (((GstSphinxSinkAd *)ad)->self); memcpy ((void *)buf, gst_adapter_peek (sphinxsink->adapter, REQUIRED_FRAME_BYTES), REQUIRED_FRAME_BYTES); return REQUIRED_FRAME_SAMPLES; }
static MetadataParsingReturn metadataparse_jpeg_iptc (JpegParseData * jpeg_data, guint8 ** buf, guint32 * bufsize, guint8 ** next_start, guint32 * next_size) { int ret; ret = metadataparse_util_hold_chunk (&jpeg_data->read, buf, bufsize, next_start, next_size, jpeg_data->iptc_adapter); if (ret == META_PARSING_DONE) { const guint8 *buf; guint32 size; unsigned int iptc_len; int res; jpeg_data->state = JPEG_PARSE_READING; size = gst_adapter_available (*jpeg_data->iptc_adapter); buf = gst_adapter_peek (*jpeg_data->iptc_adapter, size); /* FIXME: currently we are throwing away others PhotoShop data */ res = iptc_jpeg_ps3_find_iptc (buf, size, &iptc_len); if (res < 0) { /* error */ ret = META_PARSING_ERROR; } else if (res == 0) { /* no iptc data found */ gst_adapter_clear (*jpeg_data->iptc_adapter); } else { gst_adapter_flush (*jpeg_data->iptc_adapter, res); size = gst_adapter_available (*jpeg_data->iptc_adapter); if (size > iptc_len) { GstBuffer *buf; buf = gst_adapter_take_buffer (*jpeg_data->iptc_adapter, iptc_len); gst_adapter_clear (*jpeg_data->iptc_adapter); gst_adapter_push (*jpeg_data->iptc_adapter, buf); } } /* if there is a second Iptc chunk in the file it will be jumped */ jpeg_data->iptc_adapter = NULL; } return ret; }
static GstFlowReturn sbc_enc_chain(GstPad *pad, GstBuffer *buffer) { GstSbcEnc *enc = GST_SBC_ENC(gst_pad_get_parent(pad)); GstAdapter *adapter = enc->adapter; GstFlowReturn res = GST_FLOW_OK; gst_adapter_push(adapter, buffer); while (gst_adapter_available(adapter) >= enc->codesize && res == GST_FLOW_OK) { GstBuffer *output; GstCaps *caps; const guint8 *data; gint consumed; caps = GST_PAD_CAPS(enc->srcpad); res = gst_pad_alloc_buffer_and_set_caps(enc->srcpad, GST_BUFFER_OFFSET_NONE, enc->frame_length, caps, &output); if (res != GST_FLOW_OK) goto done; data = gst_adapter_peek(adapter, enc->codesize); consumed = sbc_encode(&enc->sbc, (gpointer) data, enc->codesize, GST_BUFFER_DATA(output), GST_BUFFER_SIZE(output), NULL); if (consumed <= 0) { GST_DEBUG_OBJECT(enc, "comsumed < 0, codesize: %d", enc->codesize); break; } gst_adapter_flush(adapter, consumed); GST_BUFFER_TIMESTAMP(output) = GST_BUFFER_TIMESTAMP(buffer); /* we have only 1 frame */ GST_BUFFER_DURATION(output) = enc->frame_duration; res = gst_pad_push(enc->srcpad, output); if (res != GST_FLOW_OK) goto done; } done: gst_object_unref(enc); return res; }
gint16 * scope_parser_poll_scope(ScopeParser *parser) { GstBuffer *first_buffer, *last_buffer; GstFormat format = GST_FORMAT_TIME; guint64 first_stamp, last_stamp; gint64 sink_stamp = 0; gint offset, bytes_per_read, i, c; guint available; gint16 *data; gdouble factor; if(parser->channels == 0) { return NULL; } bytes_per_read = PARSER_SAMPLES * parser->channels * sizeof(gint16); if(gst_adapter_available(parser->adapter) < bytes_per_read) { return parser->scope; } first_buffer = (GstBuffer *)g_slist_nth_data(parser->adapter->buflist, 0); last_buffer = (GstBuffer *)(g_slist_last(parser->adapter->buflist)->data); first_stamp = GST_BUFFER_TIMESTAMP(first_buffer); last_stamp = GST_BUFFER_TIMESTAMP(last_buffer); gst_element_query_position(parser->fakesink, &format, &sink_stamp); available = gst_adapter_available(parser->adapter); data = (gint16*)gst_adapter_peek(parser->adapter, available); factor = (gdouble)(last_stamp - sink_stamp) / (last_stamp - first_stamp); offset = available - (int)(factor * (double)available); if(offset < 0) { offset *= -1; } offset = MIN(offset, available - bytes_per_read); for(i = 0; i < PARSER_SAMPLES; i++, data += parser->channels) { parser->scope[i] = 0; for(c = 0; c < parser->channels; c++) { parser->scope[i] += data[offset / sizeof(gint16) + c]; } parser->scope[i] /= parser->channels; } return parser->scope; }
static gboolean theora_enc_read_multipass_cache (GstTheoraEnc * enc) { GstBuffer *cache_buf; const guint8 *cache_data; gsize bytes_read = 0; gint bytes_consumed = 0; GIOStatus stat = G_IO_STATUS_NORMAL; gboolean done = FALSE; while (!done) { if (gst_adapter_available (enc->multipass_cache_adapter) == 0) { cache_buf = gst_buffer_new_and_alloc (512); stat = g_io_channel_read_chars (enc->multipass_cache_fd, (gchar *) GST_BUFFER_DATA (cache_buf), GST_BUFFER_SIZE (cache_buf), &bytes_read, NULL); if (bytes_read <= 0) { gst_buffer_unref (cache_buf); break; } else { GST_BUFFER_SIZE (cache_buf) = bytes_read; gst_adapter_push (enc->multipass_cache_adapter, cache_buf); } } if (gst_adapter_available (enc->multipass_cache_adapter) == 0) break; bytes_read = MIN (gst_adapter_available (enc->multipass_cache_adapter), 512); cache_data = gst_adapter_peek (enc->multipass_cache_adapter, bytes_read); bytes_consumed = th_encode_ctl (enc->encoder, TH_ENCCTL_2PASS_IN, (guint8 *) cache_data, bytes_read); done = bytes_consumed <= 0; if (bytes_consumed > 0) gst_adapter_flush (enc->multipass_cache_adapter, bytes_consumed); } if (stat == G_IO_STATUS_ERROR || (stat == G_IO_STATUS_EOF && bytes_read == 0) || bytes_consumed < 0) { GST_ELEMENT_ERROR (enc, RESOURCE, READ, (NULL), ("Failed to read multipass cache file")); return FALSE; } return TRUE; }
static void gst_icydemux_parse_and_send_tags (GstICYDemux * icydemux) { GstTagList *tags; const guint8 *data; int length, i; gchar *buffer; gchar **strings; length = gst_adapter_available (icydemux->meta_adapter); data = gst_adapter_peek (icydemux->meta_adapter, length); /* Now, copy this to a buffer where we can NULL-terminate it to make things * a bit easier, then do that parsing. */ buffer = g_strndup ((const gchar *) data, length); tags = gst_tag_list_new (); strings = g_strsplit (buffer, "';", 0); for (i = 0; strings[i]; i++) { if (!g_ascii_strncasecmp (strings[i], "StreamTitle=", 12)) { char *title = gst_icydemux_unicodify (strings[i] + 13); if (title && *title) { gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_TITLE, title, NULL); g_free (title); } } else if (!g_ascii_strncasecmp (strings[i], "StreamUrl=", 10)) { char *url = gst_icydemux_unicodify (strings[i] + 11); if (url && *url) { gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_HOMEPAGE, url, NULL); g_free (url); } } } g_strfreev (strings); g_free (buffer); gst_adapter_clear (icydemux->meta_adapter); if (!gst_tag_list_is_empty (tags)) gst_icydemux_tag_found (icydemux, tags); else gst_tag_list_free (tags); }
static gboolean chunk_ensure (GstAdapter *adapter, Chunk *chunk, guint size) { if (size == 0) size = MIN (gst_adapter_available (adapter), chunk->length); g_return_val_if_fail (size <= chunk->length, FALSE); chunk->data = gst_adapter_peek (adapter, size); if (chunk->data) { chunk->available = size; return TRUE; } else { chunk->available = 0; return FALSE; } }
static void chunk_skip (GstAdapter *adapter, Chunk *chunk, guint size) { g_return_if_fail (size <= chunk->available); chunk->length -= size; chunk->available -= size; gst_adapter_flush (adapter, size); if (chunk->available) { chunk->data = gst_adapter_peek (adapter, chunk->available); g_assert (chunk->data); } else { chunk->data = NULL; } GST_LOG ("skipped %u bytes, %u of %u still available\n", size, chunk->available, chunk->length); }
/* * gst_aiffparse_peek_chunk_info: * @aiff AIFFparse object * @tag holder for tag * @size holder for tag size * * Peek next chunk info (tag and size) * * Returns: %TRUE when the chunk info (header) is available */ static gboolean gst_aiffparse_peek_chunk_info (AIFFParse * aiff, guint32 * tag, guint32 * size) { const guint8 *data = NULL; if (gst_adapter_available (aiff->adapter) < 8) return FALSE; data = gst_adapter_peek (aiff->adapter, 8); *tag = GST_READ_UINT32_LE (data); *size = GST_READ_UINT32_BE (data + 4); GST_DEBUG ("Next chunk size is %d bytes, type %" GST_FOURCC_FORMAT, *size, GST_FOURCC_ARGS (*tag)); return TRUE; }
static GstFlowReturn gst_gsmenc_chain (GstPad * pad, GstBuffer * buf) { GstGSMEnc *gsmenc; gsm_signal *data; GstFlowReturn ret = GST_FLOW_OK; gsmenc = GST_GSMENC (gst_pad_get_parent (pad)); if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) { gst_adapter_clear (gsmenc->adapter); } gst_adapter_push (gsmenc->adapter, buf); while (gst_adapter_available (gsmenc->adapter) >= 320) { GstBuffer *outbuf; outbuf = gst_buffer_new_and_alloc (33 * sizeof (gsm_byte)); GST_BUFFER_TIMESTAMP (outbuf) = gsmenc->next_ts; GST_BUFFER_DURATION (outbuf) = 20 * GST_MSECOND; gsmenc->next_ts += 20 * GST_MSECOND; /* encode 160 16-bit samples into 33 bytes */ data = (gsm_signal *) gst_adapter_peek (gsmenc->adapter, 320); gsm_encode (gsmenc->state, data, (gsm_byte *) GST_BUFFER_DATA (outbuf)); gst_adapter_flush (gsmenc->adapter, 320); gst_buffer_set_caps (outbuf, GST_PAD_CAPS (gsmenc->srcpad)); GST_DEBUG_OBJECT (gsmenc, "Pushing buffer of size %d", GST_BUFFER_SIZE (outbuf)); ret = gst_pad_push (gsmenc->srcpad, outbuf); } gst_object_unref (gsmenc); return ret; }
static gboolean gst_real_audio_demux_get_data_offset_from_header (GstRealAudioDemux * demux) { const guint8 *data; data = gst_adapter_peek (demux->adapter, 16); g_assert (data != NULL); switch (demux->ra_version) { case 3: demux->data_offset = GST_READ_UINT16_BE (data) + 8; break; case 4: demux->data_offset = GST_READ_UINT32_BE (data + 12) + 16; break; default: demux->data_offset = 0; g_return_val_if_reached (FALSE); } return TRUE; }
static void gst_sphinx_sink_calibrate_chunk (GstSphinxSink *sphinxsink) { int result; char *adbuf; if (!sphinxsink->ad.calibrate_started) { gst_sphinx_sink_send_message (sphinxsink, "calibration", NULL); sphinxsink->ad.calibrate_started = TRUE; } result = cont_ad_calib_loop (sphinxsink->cont, (int16 *)gst_adapter_peek(sphinxsink->adapter, REQUIRED_FRAME_BYTES), REQUIRED_FRAME_SAMPLES); if (result == 0) { sphinxsink->ad.calibrated = TRUE; sphinxsink->ad.listening = 0; g_message ("Sending ready"); gst_sphinx_sink_send_message (sphinxsink, "ready", NULL); } }
void metadataparse_exif_tag_list_add (GstTagList * taglist, GstTagMergeMode mode, GstAdapter * adapter, MetadataTagMapping mapping) { const guint8 *buf; guint32 size; ExifData *exif = NULL; MEUserData user_data = { taglist, mode, 2, -1, 'k', 'k' }; if (adapter == NULL || (size = gst_adapter_available (adapter)) == 0) { goto done; } /* add chunk tag */ if (mapping & METADATA_TAG_MAP_WHOLECHUNK) metadataparse_util_tag_list_add_chunk (taglist, mode, GST_TAG_EXIF, adapter); if (!(mapping & METADATA_TAG_MAP_INDIVIDUALS)) goto done; buf = gst_adapter_peek (adapter, size); exif = exif_data_new_from_data (buf, size); if (exif == NULL) { goto done; } exif_data_foreach_content (exif, metadataparse_exif_data_foreach_content_func, (void *) &user_data); done: if (exif) exif_data_unref (exif); return; }
static GstFlowReturn gst_real_audio_demux_parse_marker (GstRealAudioDemux * demux) { const guint8 *data; if (gst_adapter_available (demux->adapter) < 6) { GST_LOG_OBJECT (demux, "need at least 6 bytes, waiting for more data"); return GST_FLOW_OK; } data = gst_adapter_peek (demux->adapter, 6); if (memcmp (data, ".ra\375", 4) != 0) goto wrong_format; demux->ra_version = GST_READ_UINT16_BE (data + 4); GST_DEBUG_OBJECT (demux, "ra_version = %u", demux->ra_version); if (demux->ra_version != 4 && demux->ra_version != 3) goto unsupported_ra_version; gst_adapter_flush (demux->adapter, 6); demux->state = REAL_AUDIO_DEMUX_STATE_HEADER; return GST_FLOW_OK; /* ERRORS */ wrong_format: { GST_ELEMENT_ERROR (GST_ELEMENT (demux), STREAM, WRONG_TYPE, (NULL), (NULL)); return GST_FLOW_ERROR; } unsupported_ra_version: { GST_ELEMENT_ERROR (GST_ELEMENT (demux), STREAM, DECODE, ("Cannot decode this RealAudio file, please file a bug"), ("ra_version = %u", demux->ra_version)); return GST_FLOW_ERROR; } }
static GstFlowReturn gst_nuv_demux_read_bytes (GstNuvDemux * nuv, guint64 size, gboolean move, GstBuffer ** buffer) { GstFlowReturn ret = GST_FLOW_OK; if (size == 0) { *buffer = gst_buffer_new (); return ret; } if (nuv->mode == 0) { ret = gst_pad_pull_range (nuv->sinkpad, nuv->offset, size, buffer); if (ret == GST_FLOW_OK) { if (move) { nuv->offset += size; } /* got eos */ } else if (ret == GST_FLOW_UNEXPECTED) { gst_nuv_demux_send_eos (nuv); return GST_FLOW_WRONG_STATE; } } else { if (gst_adapter_available (nuv->adapter) < size) return GST_FLOW_ERROR_NO_DATA; if (move) { *buffer = gst_adapter_take_buffer (nuv->adapter, size); } else { guint8 *data = NULL; data = (guint8 *) gst_adapter_peek (nuv->adapter, size); *buffer = gst_buffer_new (); gst_buffer_set_data (*buffer, data, size); } } return ret; }
static void gst_pocketsphinx_calibrate_chunk (GstPocketSphinx *sphinxsink) { int result; if (!sphinxsink->ad.calibrate_started) { g_signal_emit (sphinxsink, gst_pocketsphinx_signals[SIGNAL_CALIBRATION], 0, NULL); sphinxsink->ad.calibrate_started = TRUE; } result = cont_ad_calib_loop (sphinxsink->cont, (int16 *)gst_adapter_peek(sphinxsink->adapter, REQUIRED_FRAME_BYTES), REQUIRED_FRAME_SAMPLES); if (result == 0) { sphinxsink->ad.calibrated = TRUE; sphinxsink->ad.listening = 0; g_signal_emit (sphinxsink, gst_pocketsphinx_signals[SIGNAL_READY], 0, NULL); } }
static gboolean gst_hls_demux_update_playlist (GstHLSDemux * demux, gboolean retry) { const guint8 *data; gchar *playlist; guint avail; GST_INFO_OBJECT (demux, "Updating the playlist %s", demux->client->current->uri); if (!gst_hls_demux_fetch_location (demux, demux->client->current->uri)) return FALSE; avail = gst_adapter_available (demux->download); data = gst_adapter_peek (demux->download, avail); playlist = gst_hls_src_buf_to_utf8_playlist ((gchar *) data, avail); gst_adapter_clear (demux->download); if (playlist == NULL) { GST_WARNING_OBJECT (demux, "Couldn't not validate playlist encoding"); return FALSE; } gst_m3u8_client_update (demux->client, playlist); return TRUE; }
static int gst_ffmpeg_pipe_read (URLContext * h, unsigned char *buf, int size) { GstFFMpegPipe *ffpipe; const guint8 *data; guint available; ffpipe = (GstFFMpegPipe *) h->priv_data; GST_LOG ("requested size %d", size); GST_FFMPEG_PIPE_MUTEX_LOCK (ffpipe); GST_LOG ("requested size %d", size); while ((available = gst_adapter_available (ffpipe->adapter)) < size && !ffpipe->eos) { GST_DEBUG ("Available:%d, requested:%d", available, size); ffpipe->needed = size; GST_FFMPEG_PIPE_SIGNAL (ffpipe); GST_FFMPEG_PIPE_WAIT (ffpipe); } size = MIN (available, size); if (size) { GST_LOG ("Getting %d bytes", size); data = gst_adapter_peek (ffpipe->adapter, size); memcpy (buf, data, size); gst_adapter_flush (ffpipe->adapter, size); GST_LOG ("%d bytes left in adapter", gst_adapter_available (ffpipe->adapter)); ffpipe->needed = 0; } GST_FFMPEG_PIPE_MUTEX_UNLOCK (ffpipe); return size; }
static GstFlowReturn gst_gsmdec_chain (GstPad * pad, GstBuffer * buf) { GstGSMDec *gsmdec; gsm_byte *data; GstFlowReturn ret = GST_FLOW_OK; GstClockTime timestamp; gint needed; gsmdec = GST_GSMDEC (gst_pad_get_parent (pad)); timestamp = GST_BUFFER_TIMESTAMP (buf); if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) { gst_adapter_clear (gsmdec->adapter); gsmdec->next_ts = GST_CLOCK_TIME_NONE; /* FIXME, do some good offset */ gsmdec->next_of = 0; } gst_adapter_push (gsmdec->adapter, buf); needed = 33; /* do we have enough bytes to read a frame */ while (gst_adapter_available (gsmdec->adapter) >= needed) { GstBuffer *outbuf; /* always the same amount of output samples */ outbuf = gst_buffer_new_and_alloc (ENCODED_SAMPLES * sizeof (gsm_signal)); /* If we are not given any timestamp, interpolate from last seen * timestamp (if any). */ if (timestamp == GST_CLOCK_TIME_NONE) timestamp = gsmdec->next_ts; GST_BUFFER_TIMESTAMP (outbuf) = timestamp; /* interpolate in the next run */ if (timestamp != GST_CLOCK_TIME_NONE) gsmdec->next_ts = timestamp + gsmdec->duration; timestamp = GST_CLOCK_TIME_NONE; GST_BUFFER_DURATION (outbuf) = gsmdec->duration; GST_BUFFER_OFFSET (outbuf) = gsmdec->next_of; if (gsmdec->next_of != -1) gsmdec->next_of += ENCODED_SAMPLES; GST_BUFFER_OFFSET_END (outbuf) = gsmdec->next_of; gst_buffer_set_caps (outbuf, GST_PAD_CAPS (gsmdec->srcpad)); /* now encode frame into the output buffer */ data = (gsm_byte *) gst_adapter_peek (gsmdec->adapter, needed); if (gsm_decode (gsmdec->state, data, (gsm_signal *) GST_BUFFER_DATA (outbuf)) < 0) { /* invalid frame */ GST_WARNING_OBJECT (gsmdec, "tried to decode an invalid frame, skipping"); } gst_adapter_flush (gsmdec->adapter, needed); /* WAV49 requires alternating 33 and 32 bytes of input */ if (gsmdec->use_wav49) needed = (needed == 33 ? 32 : 33); GST_DEBUG_OBJECT (gsmdec, "Pushing buffer of size %d ts %" GST_TIME_FORMAT, GST_BUFFER_SIZE (outbuf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf))); /* push */ ret = gst_pad_push (gsmdec->srcpad, outbuf); } gst_object_unref (gsmdec); return ret; }
static GstFlowReturn gst_rsvg_dec_chain (GstPad * pad, GstBuffer * buffer) { GstRsvgDec *rsvg = GST_RSVG_DEC (GST_PAD_PARENT (pad)); gboolean completed = FALSE; const guint8 *data; guint size; gboolean ret = GST_FLOW_OK; /* first_timestamp is used slightly differently where a framerate is given or not. If there is a frame rate, it will be used as a base. If there is not, it will be used to keep track of the timestamp of the first buffer, to be used as the timestamp of the output buffer. When a buffer is output, first timestamp will resync to the next buffer's timestamp. */ if (rsvg->first_timestamp == GST_CLOCK_TIME_NONE) { if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) rsvg->first_timestamp = GST_BUFFER_TIMESTAMP (buffer); else if (rsvg->fps_n != 0) rsvg->first_timestamp = 0; } gst_adapter_push (rsvg->adapter, buffer); size = gst_adapter_available (rsvg->adapter); /* "<svg></svg>" */ while (size >= 5 + 6 && ret == GST_FLOW_OK) { guint i; data = gst_adapter_peek (rsvg->adapter, size); for (i = size - 6; i >= 5; i--) { if (memcmp (data + i, "</svg>", 6) == 0) { completed = TRUE; size = i + 6; break; } } if (completed) { GstBuffer *outbuf = NULL; GST_LOG_OBJECT (rsvg, "have complete svg of %u bytes", size); data = gst_adapter_peek (rsvg->adapter, size); ret = gst_rsvg_decode_image (rsvg, data, size, &outbuf); if (ret != GST_FLOW_OK) break; if (rsvg->first_timestamp != GST_CLOCK_TIME_NONE) { GST_BUFFER_TIMESTAMP (outbuf) = rsvg->first_timestamp; GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE; if (GST_BUFFER_DURATION_IS_VALID (buffer)) { GstClockTime end = GST_BUFFER_TIMESTAMP_IS_VALID (buffer) ? GST_BUFFER_TIMESTAMP (buffer) : rsvg->first_timestamp; end += GST_BUFFER_DURATION (buffer); GST_BUFFER_DURATION (outbuf) = end - GST_BUFFER_TIMESTAMP (outbuf); } if (rsvg->fps_n == 0) { rsvg->first_timestamp = GST_CLOCK_TIME_NONE; } else { GST_BUFFER_DURATION (outbuf) = gst_util_uint64_scale (rsvg->frame_count, rsvg->fps_d, rsvg->fps_n * GST_SECOND); } } else if (rsvg->fps_n != 0) { GST_BUFFER_TIMESTAMP (outbuf) = rsvg->first_timestamp + gst_util_uint64_scale (rsvg->frame_count, rsvg->fps_d, rsvg->fps_n * GST_SECOND); GST_BUFFER_DURATION (outbuf) = gst_util_uint64_scale (rsvg->frame_count, rsvg->fps_d, rsvg->fps_n * GST_SECOND); } else { GST_BUFFER_TIMESTAMP (outbuf) = rsvg->first_timestamp; GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE; } rsvg->frame_count++; if (rsvg->need_newsegment) { gst_pad_push_event (rsvg->srcpad, gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0)); rsvg->need_newsegment = FALSE; } if (rsvg->pending_events) { GList *l; for (l = rsvg->pending_events; l; l = l->next) gst_pad_push_event (rsvg->srcpad, l->data); g_list_free (rsvg->pending_events); rsvg->pending_events = NULL; } GST_LOG_OBJECT (rsvg, "image rendered okay"); ret = gst_pad_push (rsvg->srcpad, outbuf); if (ret != GST_FLOW_OK) break; gst_adapter_flush (rsvg->adapter, size); size = gst_adapter_available (rsvg->adapter); continue; } else { break; } } return GST_FLOW_OK; }
/* chain function * this function does the actual processing */ static GstFlowReturn gst_ivf_parse_chain (GstPad * pad, GstBuffer * buf) { GstIvfParse *ivf = GST_IVF_PARSE (GST_OBJECT_PARENT (pad)); gboolean res; /* lazy creation of the adapter */ if (G_UNLIKELY (ivf->adapter == NULL)) { ivf->adapter = gst_adapter_new (); } GST_LOG_OBJECT (ivf, "Pushing buffer of size %u to adapter", GST_BUFFER_SIZE (buf)); gst_adapter_push (ivf->adapter, buf); /* adapter takes ownership of buf */ res = GST_FLOW_OK; switch (ivf->state) { case GST_IVF_PARSE_START: if (gst_adapter_available (ivf->adapter) >= 32) { GstCaps *caps; const guint8 *data = gst_adapter_peek (ivf->adapter, 32); guint32 magic = GST_READ_UINT32_LE (data); guint16 version = GST_READ_UINT16_LE (data + 4); guint16 header_size = GST_READ_UINT16_LE (data + 6); guint32 fourcc = GST_READ_UINT32_LE (data + 8); guint16 width = GST_READ_UINT16_LE (data + 12); guint16 height = GST_READ_UINT16_LE (data + 14); guint32 rate_num = GST_READ_UINT32_LE (data + 16); guint32 rate_den = GST_READ_UINT32_LE (data + 20); #ifndef GST_DISABLE_GST_DEBUG guint32 num_frames = GST_READ_UINT32_LE (data + 24); #endif /* last 4 bytes unused */ gst_adapter_flush (ivf->adapter, 32); if (magic != GST_MAKE_FOURCC ('D', 'K', 'I', 'F') || version != 0 || header_size != 32 || fourcc != GST_MAKE_FOURCC ('V', 'P', '8', '0')) { GST_ELEMENT_ERROR (ivf, STREAM, WRONG_TYPE, (NULL), (NULL)); return GST_FLOW_ERROR; } /* create src pad caps */ caps = gst_caps_new_simple ("video/x-vp8", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, rate_num, rate_den, NULL); GST_INFO_OBJECT (ivf, "Found stream: %" GST_PTR_FORMAT, caps); GST_LOG_OBJECT (ivf, "Stream has %d frames", num_frames); gst_pad_set_caps (ivf->srcpad, caps); gst_caps_unref (caps); /* keep framerate in instance for convenience */ ivf->rate_num = rate_num; ivf->rate_den = rate_den; gst_pad_push_event (ivf->srcpad, gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0)); /* move along */ ivf->state = GST_IVF_PARSE_DATA; } else { GST_LOG_OBJECT (ivf, "Header data not yet available."); break; } /* fall through */ case GST_IVF_PARSE_DATA: while (gst_adapter_available (ivf->adapter) > 12) { const guint8 *data = gst_adapter_peek (ivf->adapter, 12); guint32 frame_size = GST_READ_UINT32_LE (data); guint64 frame_pts = GST_READ_UINT64_LE (data + 4); GST_LOG_OBJECT (ivf, "Read frame header: size %u, pts %" G_GUINT64_FORMAT, frame_size, frame_pts); if (gst_adapter_available (ivf->adapter) >= 12 + frame_size) { GstBuffer *frame; gst_adapter_flush (ivf->adapter, 12); frame = gst_adapter_take_buffer (ivf->adapter, frame_size); gst_buffer_set_caps (frame, GST_PAD_CAPS (ivf->srcpad)); GST_BUFFER_TIMESTAMP (frame) = gst_util_uint64_scale_int (GST_SECOND * frame_pts, ivf->rate_den, ivf->rate_num); GST_BUFFER_DURATION (frame) = gst_util_uint64_scale_int (GST_SECOND, ivf->rate_den, ivf->rate_num); GST_DEBUG_OBJECT (ivf, "Pushing frame of size %u, ts %" GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT ", off %" G_GUINT64_FORMAT ", off_end %" G_GUINT64_FORMAT, GST_BUFFER_SIZE (frame), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (frame)), GST_TIME_ARGS (GST_BUFFER_DURATION (frame)), GST_BUFFER_OFFSET (frame), GST_BUFFER_OFFSET_END (frame)); res = gst_pad_push (ivf->srcpad, frame); if (res != GST_FLOW_OK) break; } else { GST_LOG_OBJECT (ivf, "Frame data not yet available."); break; } } break; default: g_return_val_if_reached (GST_FLOW_ERROR); } return res; }
static void bp_vis_pcm_handoff (GstElement *sink, GstBuffer *buffer, GstPad *pad, gpointer userdata) { BansheePlayer *player = (BansheePlayer*)userdata; GstStructure *structure; gint channels, wanted_size; gfloat *data; BansheePlayerVisDataCallback vis_data_cb; g_return_if_fail (IS_BANSHEE_PLAYER (player)); vis_data_cb = player->vis_data_cb; if (vis_data_cb == NULL) { return; } if (player->vis_thawing) { // Flush our buffers out. gst_adapter_clear (player->vis_buffer); memset (player->vis_fft_sample_buffer, 0, sizeof(gfloat) * SLICE_SIZE); player->vis_thawing = FALSE; } structure = gst_caps_get_structure (gst_buffer_get_caps (buffer), 0); gst_structure_get_int (structure, "channels", &channels); wanted_size = channels * SLICE_SIZE * sizeof (gfloat); gst_adapter_push (player->vis_buffer, gst_buffer_copy (buffer)); while ((data = (gfloat *)gst_adapter_peek (player->vis_buffer, wanted_size)) != NULL) { gfloat *deinterlaced = g_malloc (wanted_size); gfloat *specbuf = g_new (gfloat, SLICE_SIZE * 2); gint i, j; memcpy (specbuf, player->vis_fft_sample_buffer, SLICE_SIZE * sizeof(gfloat)); for (i = 0; i < SLICE_SIZE; i++) { gfloat avg = 0.0f; for (j = 0; j < channels; j++) { gfloat sample = data[i * channels + j]; deinterlaced[j * SLICE_SIZE + i] = sample; avg += sample; } avg /= channels; specbuf[i + SLICE_SIZE] = avg; } memcpy (player->vis_fft_sample_buffer, &specbuf[SLICE_SIZE], SLICE_SIZE * sizeof(gfloat)); gst_fft_f32_window (player->vis_fft, specbuf, GST_FFT_WINDOW_HAMMING); gst_fft_f32_fft (player->vis_fft, specbuf, player->vis_fft_buffer); for (i = 0; i < SLICE_SIZE; i++) { gfloat val; GstFFTF32Complex cplx = player->vis_fft_buffer[i]; val = cplx.r * cplx.r + cplx.i * cplx.i; val /= SLICE_SIZE * SLICE_SIZE; val = 10.0f * log10f(val); val = (val + 60.0f) / 60.0f; if (val < 0.0f) val = 0.0f; specbuf[i] = val; } vis_data_cb (player, channels, SLICE_SIZE, deinterlaced, SLICE_SIZE, specbuf); g_free (deinterlaced); g_free (specbuf); gst_adapter_flush (player->vis_buffer, wanted_size); } }