/** * gst_buffer_add_protection_meta: * @buffer: #GstBuffer holding an encrypted sample, to which protection * metadata should be added. * @info: (transfer full): a #GstStructure holding cryptographic * information relating to the sample contained in @buffer. This * function takes ownership of @info. * * Attaches protection metadata to a #GstBuffer. * * Returns: (transfer none): a pointer to the added #GstProtectionMeta if successful; %NULL if * unsuccessful. * * Since: 1.6 */ GstProtectionMeta * gst_buffer_add_protection_meta (GstBuffer * buffer, GstStructure * info) { GstProtectionMeta *meta; g_return_val_if_fail (GST_IS_BUFFER (buffer), NULL); g_return_val_if_fail (info != NULL, NULL); meta = (GstProtectionMeta *) gst_buffer_add_meta (buffer, GST_PROTECTION_META_INFO, NULL); meta->info = info; return meta; }
/** * gst_buffer_set_qdata: * @buffer: a #GstBuffer * @quark: name quark of data structure to set or replace * @data: (transfer full) (allow-none): a #GstStructure to store with the * buffer, name must match @quark. Can be NULL to remove an existing * structure. This function takes ownership of the structure passed. * * Set metadata structure for name quark @quark to @data, or remove the * existing metadata structure by that name in case @data is NULL. * * Takes ownership of @data. * * Since: 0.10.36 */ void gst_buffer_set_qdata (GstBuffer * buffer, GQuark quark, GstStructure * data) { GstBufferPrivate *priv; GList *l; g_return_if_fail (GST_IS_BUFFER (buffer)); g_return_if_fail (gst_buffer_is_metadata_writable (buffer)); g_return_if_fail (data == NULL || quark == gst_structure_get_name_id (data)); /* locking should not really be required, since the metadata_writable * check ensures that the caller is the only one holding a ref, so as * as a second ref is added everything turns read-only */ priv = gst_buffer_ensure_priv (buffer); if (data) { gst_structure_set_parent_refcount (data, &buffer->mini_object.refcount); } for (l = priv->qdata; l != NULL; l = l->next) { GstStructure *s = l->data; if (s->name == quark) { GST_CAT_LOG (GST_CAT_BUFFER, "Replacing qdata '%s' on buffer %p: " "%" GST_PTR_FORMAT " => %" GST_PTR_FORMAT, g_quark_to_string (quark), buffer, s, data); gst_structure_set_parent_refcount (s, NULL); gst_structure_free (s); if (data == NULL) priv->qdata = g_list_delete_link (priv->qdata, l); else l->data = data; goto done; } } GST_CAT_LOG (GST_CAT_BUFFER, "Set qdata '%s' on buffer %p: %" GST_PTR_FORMAT, g_quark_to_string (quark), buffer, data); priv->qdata = g_list_prepend (priv->qdata, data); done: return; }
static void sink_handoff_float32 (GstElement * element, GstBuffer * buffer, GstPad * pad, gpointer user_data) { gint i; gfloat *data; GstCaps *caps; gint n = GPOINTER_TO_INT (user_data); fail_unless (GST_IS_BUFFER (buffer)); fail_unless_equals_int (GST_BUFFER_SIZE (buffer), 48000 * 2 * sizeof (gfloat)); fail_unless_equals_int (GST_BUFFER_DURATION (buffer), GST_SECOND); caps = gst_caps_new_simple ("audio/x-raw-float", "width", G_TYPE_INT, 32, "channels", G_TYPE_INT, 2, "rate", G_TYPE_INT, 48000, "endianness", G_TYPE_INT, G_BYTE_ORDER, NULL); if (n == 0) { GstAudioChannelPosition pos[2] = { GST_AUDIO_CHANNEL_POSITION_NONE, GST_AUDIO_CHANNEL_POSITION_NONE }; gst_audio_set_channel_positions (gst_caps_get_structure (caps, 0), pos); } else if (n == 1) { GstAudioChannelPosition pos[2] = { GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT, GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT }; gst_audio_set_channel_positions (gst_caps_get_structure (caps, 0), pos); } else if (n == 2) { GstAudioChannelPosition pos[2] = { GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER, GST_AUDIO_CHANNEL_POSITION_REAR_CENTER }; gst_audio_set_channel_positions (gst_caps_get_structure (caps, 0), pos); } fail_unless (gst_caps_is_equal (caps, GST_BUFFER_CAPS (buffer))); gst_caps_unref (caps); data = (gfloat *) GST_BUFFER_DATA (buffer); for (i = 0; i < 48000 * 2; i += 2) { fail_unless_equals_float (data[i], -1.0); fail_unless_equals_float (data[i + 1], 1.0); } have_data++; }
GstVaapiVideoMeta * gst_buffer_get_vaapi_video_meta (GstBuffer * buffer) { GstVaapiVideoMeta *meta; GstMeta *m; g_return_val_if_fail (GST_IS_BUFFER (buffer), NULL); m = gst_buffer_get_meta (buffer, GST_VAAPI_VIDEO_META_API_TYPE); if (!m) return NULL; meta = GST_VAAPI_VIDEO_META_HOLDER (m)->meta; if (meta) meta->buffer = buffer; return meta; }
gboolean gst_rdt_buffer_get_first_packet (GstBuffer * buffer, GstRDTPacket * packet) { g_return_val_if_fail (GST_IS_BUFFER (buffer), FALSE); g_return_val_if_fail (packet != NULL, FALSE); /* init to 0 */ packet->buffer = buffer; packet->offset = 0; packet->type = GST_RDT_TYPE_INVALID; memset (&packet->map, 0, sizeof (GstMapInfo)); if (!read_packet_header (packet)) return FALSE; return TRUE; }
static void _known_source_packet_received (FsStreamTransmitter *st, guint component_id, GstBuffer *buffer, gpointer user_data) { guint stream = GPOINTER_TO_UINT (user_data); ts_fail_unless (associate_on_source == TRUE, "Got known-source-packet-received when we shouldn't have"); ts_fail_unless (component_id == 1 || component_id == 2, "Invalid component id %u", component_id); ts_fail_unless (GST_IS_BUFFER (buffer), "Invalid buffer received at %p", buffer); received_known[stream - 1][component_id - 1]++; }
void MediaPlayerPrivateGStreamerBase::triggerRepaint(GstBuffer* buffer) { g_return_if_fail(GST_IS_BUFFER(buffer)); g_mutex_lock(m_bufferMutex); gst_buffer_replace(&m_buffer, buffer); g_mutex_unlock(m_bufferMutex); #if USE(ACCELERATED_COMPOSITING) && USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS) if (supportsAcceleratedRendering() && m_player->mediaPlayerClient()->mediaPlayerRenderingCanBeAccelerated(m_player) && client()) { client()->setPlatformLayerNeedsDisplay(); return; } #endif m_player->repaint(); }
/** * gst_buffer_add_net_control_message_meta: * @buffer: a #GstBuffer * @message: a @GSocketControlMessage to attach to @buffer * * Attaches @message as metadata in a #GstNetControlMessageMeta to @buffer. * * Returns: (transfer none): a #GstNetControlMessageMeta connected to @buffer */ GstNetControlMessageMeta * gst_buffer_add_net_control_message_meta (GstBuffer * buffer, GSocketControlMessage * message) { GstNetControlMessageMeta *meta; g_return_val_if_fail (GST_IS_BUFFER (buffer), NULL); g_return_val_if_fail (G_IS_SOCKET_CONTROL_MESSAGE (message), NULL); meta = (GstNetControlMessageMeta *) gst_buffer_add_meta (buffer, GST_NET_CONTROL_MESSAGE_META_INFO, NULL); meta->message = g_object_ref (message); return meta; }
KmsBufferLatencyMeta * kms_buffer_add_buffer_latency_meta (GstBuffer * buffer, GstClockTime ts, gboolean valid, KmsMediaType type) { KmsBufferLatencyMeta *meta; g_return_val_if_fail (GST_IS_BUFFER (buffer), NULL); meta = (KmsBufferLatencyMeta *) gst_buffer_add_meta (buffer, KMS_BUFFER_LATENCY_META_INFO, NULL); meta->ts = ts; meta->valid = valid; meta->type = type; return meta; }
/** * gst_buffer_add_video_region_of_interest_meta_id: * @buffer: a #GstBuffer * @roi_type: Type of the region of interest (e.g. "face") * @x: X position * @y: Y position * @w: width * @h: height * * Attaches #GstVideoRegionOfInterestMeta metadata to @buffer with the given * parameters. * * Returns: (transfer none): the #GstVideoRegionOfInterestMeta on @buffer. */ GstVideoRegionOfInterestMeta * gst_buffer_add_video_region_of_interest_meta_id (GstBuffer * buffer, GQuark roi_type, guint x, guint y, guint w, guint h) { GstVideoRegionOfInterestMeta *meta; g_return_val_if_fail (GST_IS_BUFFER (buffer), NULL); meta = (GstVideoRegionOfInterestMeta *) gst_buffer_add_meta (buffer, GST_VIDEO_REGION_OF_INTEREST_META_INFO, NULL); meta->roi_type = roi_type; meta->x = x; meta->y = y; meta->w = w; meta->h = h; return meta; }
static void _known_source_packet_received (FsStreamTransmitter *st, guint component_id, GstBuffer *buffer, gpointer user_data) { ts_fail_unless (associate_on_source == TRUE, "Got known-source-packet-received when we shouldn't have"); ts_fail_unless (component_id == 1 || component_id == 2, "Invalid component id %u", component_id); ts_fail_unless (GST_IS_BUFFER (buffer), "Invalid buffer received at %p", buffer); received_known[component_id - 1]++; GST_LOG ("Known source buffer %d component: %d size: %" G_GSIZE_FORMAT, received_known[component_id-1], component_id, gst_buffer_get_size (buffer)); }
static gboolean _rtpbin_pad_have_data_callback (GstPad *pad, GstMiniObject *miniobj, gpointer user_data) { FsRtpSubStream *self = FS_RTP_SUB_STREAM (user_data); gboolean ret = TRUE; gboolean remove = FALSE; FS_RTP_SESSION_LOCK (self->priv->session); if (!self->priv->codecbin || !self->codec || !self->priv->caps) { ret = FALSE; } else if (GST_IS_BUFFER (miniobj)) { if (!gst_caps_is_equal_fixed (GST_BUFFER_CAPS (miniobj), self->priv->caps)) { GstCaps *intersect = gst_caps_intersect (GST_BUFFER_CAPS (miniobj), self->priv->caps); if (gst_caps_is_empty (intersect)) ret = FALSE; else gst_buffer_set_caps (GST_BUFFER (miniobj), self->priv->caps); gst_caps_unref (intersect); } else { remove = TRUE; } } if (remove && self->priv->blocking_id) { gst_pad_remove_data_probe (pad, self->priv->blocking_id); self->priv->blocking_id = 0; } FS_RTP_SESSION_UNLOCK (self->priv->session); return ret; }
/** * gst_type_find_helper_for_buffer: * @obj: (allow-none): object doing the typefinding, or %NULL (used for logging) * @buf: (in) (transfer none): a #GstBuffer with data to typefind * @prob: (out) (allow-none): location to store the probability of the found * caps, or %NULL * * Tries to find what type of data is contained in the given #GstBuffer, the * assumption being that the buffer represents the beginning of the stream or * file. * * All available typefinders will be called on the data in order of rank. If * a typefinding function returns a probability of %GST_TYPE_FIND_MAXIMUM, * typefinding is stopped immediately and the found caps will be returned * right away. Otherwise, all available typefind functions will the tried, * and the caps with the highest probability will be returned, or %NULL if * the content of the buffer could not be identified. * * Free-function: gst_caps_unref * * Returns: (transfer full) (nullable): the #GstCaps corresponding to the data, * or %NULL if no type could be found. The caller should free the caps * returned with gst_caps_unref(). */ GstCaps * gst_type_find_helper_for_buffer (GstObject * obj, GstBuffer * buf, GstTypeFindProbability * prob) { GstCaps *result; GstMapInfo info; g_return_val_if_fail (buf != NULL, NULL); g_return_val_if_fail (GST_IS_BUFFER (buf), NULL); g_return_val_if_fail (GST_BUFFER_OFFSET (buf) == 0 || GST_BUFFER_OFFSET (buf) == GST_BUFFER_OFFSET_NONE, NULL); if (!gst_buffer_map (buf, &info, GST_MAP_READ)) return NULL; result = gst_type_find_helper_for_data (obj, info.data, info.size, prob); gst_buffer_unmap (buf, &info); return result; }
static GstBuffer * dequeue_buffer (GstAppSink * appsink) { GstAppSinkPrivate *priv = appsink->priv; GstBuffer *buffer; do { GstMiniObject *obj; obj = g_queue_pop_head (priv->queue); if (GST_IS_BUFFER (obj)) { buffer = GST_BUFFER_CAST (obj); GST_DEBUG_OBJECT (appsink, "dequeued buffer %p", buffer); priv->num_buffers--; break; } else if (GST_IS_EVENT (obj)) { GstEvent *event = GST_EVENT_CAST (obj); switch (GST_EVENT_TYPE (obj)) { case GST_EVENT_CAPS: { GstCaps *caps; gst_event_parse_caps (event, &caps); GST_DEBUG_OBJECT (appsink, "activating caps %" GST_PTR_FORMAT, caps); gst_caps_replace (&priv->last_caps, caps); break; } case GST_EVENT_SEGMENT: gst_event_copy_segment (event, &priv->last_segment); GST_DEBUG_OBJECT (appsink, "activated segment %" GST_SEGMENT_FORMAT, &priv->last_segment); break; default: break; } gst_mini_object_unref (obj); } } while (TRUE); return buffer; }
static gboolean on_video_sink_data_flow (GstPad * pad, GstMiniObject * mini_obj, gpointer user_data) { GstFPSDisplaySink *self = GST_FPS_DISPLAY_SINK (user_data); #if 0 if (GST_IS_BUFFER (mini_obj)) { GstBuffer *buf = GST_BUFFER_CAST (mini_obj); if (GST_CLOCK_TIME_IS_VALID (self->next_ts)) { if (GST_BUFFER_TIMESTAMP (buf) <= self->next_ts) { self->frames_rendered++; } else { GST_WARNING_OBJECT (self, "dropping frame : ts %" GST_TIME_FORMAT " < expected_ts %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), GST_TIME_ARGS (self->next_ts)); self->frames_dropped++; } } else { self->frames_rendered++; } } else #endif if (GST_IS_EVENT (mini_obj)) { GstEvent *ev = GST_EVENT_CAST (mini_obj); if (GST_EVENT_TYPE (ev) == GST_EVENT_QOS) { GstClockTimeDiff diff; GstClockTime ts; gst_event_parse_qos (ev, NULL, &diff, &ts); self->next_ts = ts + diff; if (diff <= 0.0) { self->frames_rendered++; } else { self->frames_dropped++; } } } return TRUE; }
EXPORT_C #endif guint gst_rtp_buffer_get_payload_len (GstBuffer * buffer) { guint len, size; g_return_val_if_fail (GST_IS_BUFFER (buffer), 0); g_return_val_if_fail (GST_BUFFER_DATA (buffer) != NULL, 0); size = GST_BUFFER_SIZE (buffer); len = size - gst_rtp_buffer_get_header_len (buffer); if (GST_RTP_HEADER_PADDING (buffer)) len -= GST_BUFFER_DATA (buffer)[size - 1]; return len; }
TcamStatisticsMeta* gst_buffer_add_tcam_statistics_meta (GstBuffer* buffer, GstStructure* statistics) { g_return_val_if_fail(GST_IS_BUFFER(buffer), nullptr); g_return_val_if_fail(statistics, nullptr); TcamStatisticsMeta* meta = (TcamStatisticsMeta*) gst_buffer_add_meta(buffer, TCAM_STATISTICS_META_INFO, nullptr); if (!meta) { return nullptr; } meta->structure = statistics; return meta; }
static void user_endrow_callback (png_structp png_ptr, png_bytep new_row, png_uint_32 row_num, int pass) { GstPngDec *pngdec = NULL; pngdec = GST_PNGDEC (png_ptr->io_ptr); /* FIXME: implement interlaced pictures */ /* If buffer_out doesn't exist, it means buffer_alloc failed, which * will already have set the return code */ if (GST_IS_BUFFER (pngdec->buffer_out)) { size_t offset = row_num * pngdec->rowbytes; GST_LOG ("got row %u, copying in buffer %p at offset %" G_GSIZE_FORMAT, (guint) row_num, pngdec->buffer_out, offset); memcpy (GST_BUFFER_DATA (pngdec->buffer_out) + offset, new_row, pngdec->rowbytes); pngdec->ret = GST_FLOW_OK; } }
EXPORT_C #endif GstClockTime gst_audio_duration_from_pad_buffer (GstPad * pad, GstBuffer * buf) { long bytes = 0; int width = 0; int channels = 0; int rate = 0; GstClockTime length; const GstCaps *caps = NULL; GstStructure *structure; g_assert (GST_IS_BUFFER (buf)); /* get caps of pad */ caps = GST_PAD_CAPS (pad); if (caps == NULL) { /* ERROR: could not get caps of pad */ g_warning ("gstaudio: could not get caps of pad %s:%s\n", GST_DEBUG_PAD_NAME (pad)); length = GST_CLOCK_TIME_NONE; } else { structure = gst_caps_get_structure (caps, 0); bytes = GST_BUFFER_SIZE (buf); gst_structure_get_int (structure, "width", &width); gst_structure_get_int (structure, "channels", &channels); gst_structure_get_int (structure, "rate", &rate); g_assert (bytes != 0); g_assert (width != 0); g_assert (channels != 0); g_assert (rate != 0); length = (bytes * 8 * GST_SECOND) / (rate * channels * width); } return length; }
KmsSerializableMeta * kms_buffer_add_serializable_meta (GstBuffer * buffer, GstStructure * data) { KmsSerializableMeta *meta; g_return_val_if_fail (GST_IS_BUFFER (buffer), NULL); meta = (KmsSerializableMeta *) gst_buffer_get_meta (buffer, KMS_SERIALIZABLE_META_API_TYPE); if (meta != NULL) { gst_structure_foreach (data, add_fields_to_structure, meta->data); gst_structure_free (data); } else { meta = (KmsSerializableMeta *) gst_buffer_add_meta (buffer, KMS_SERIALIZABLE_META_INFO, NULL); meta->data = data; } return meta; }
static gboolean webkitVideoSinkTimeoutCallback(gpointer data) { WebKitVideoSink* sink = reinterpret_cast<WebKitVideoSink*>(data); WebKitVideoSinkPrivate* priv = sink->priv; g_mutex_lock(priv->bufferMutex); GstBuffer* buffer = priv->buffer; priv->buffer = 0; priv->timeoutId = 0; if (!buffer || priv->unlocked || UNLIKELY(!GST_IS_BUFFER(buffer))) { g_cond_signal(priv->dataCondition); g_mutex_unlock(priv->bufferMutex); return FALSE; } g_signal_emit(sink, webkitVideoSinkSignals[REPAINT_REQUESTED], 0, buffer); gst_buffer_unref(buffer); g_cond_signal(priv->dataCondition); g_mutex_unlock(priv->bufferMutex); return FALSE; }
EXPORT_C #endif void gst_rtp_buffer_set_packet_len (GstBuffer * buffer, guint len) { guint oldlen; g_return_if_fail (GST_IS_BUFFER (buffer)); oldlen = GST_BUFFER_SIZE (buffer); if (oldlen < len) { guint8 *newdata; newdata = g_realloc (GST_BUFFER_MALLOCDATA (buffer), len); GST_BUFFER_MALLOCDATA (buffer) = newdata; GST_BUFFER_DATA (buffer) = newdata; } GST_BUFFER_SIZE (buffer) = len; /* remove any padding */ GST_RTP_HEADER_PADDING (buffer) = FALSE; }
static GstFlowReturn callback_function_sample (GstAppSink * appsink, gpointer p_counter) { GstSample *sample; GstBuffer *buf; gint *p_int_counter = p_counter; sample = gst_app_sink_pull_sample (appsink); buf = gst_sample_get_buffer (sample); fail_unless (GST_IS_BUFFER (buf)); /* buffer list has 3 buffers in two groups */ switch (*p_int_counter) { case 0: fail_unless_equals_int (gst_buffer_get_size (buf), sizeof (gint)); gst_check_buffer_data (buf, &values[0], sizeof (gint)); break; case 1: fail_unless_equals_int (gst_buffer_get_size (buf), sizeof (gint)); gst_check_buffer_data (buf, &values[1], sizeof (gint)); break; case 2: fail_unless_equals_int (gst_buffer_get_size (buf), sizeof (gint)); gst_check_buffer_data (buf, &values[2], sizeof (gint)); break; default: g_warn_if_reached (); break; } gst_sample_unref (sample); *p_int_counter += 1; return GST_FLOW_OK; }
/* Data probe cb to drop everything but count buffers and events */ gboolean probe_cb (GstPad * pad, GstMiniObject * obj, gpointer user_data) { gint count = 0; gchar *count_type = NULL; GST_LOG_OBJECT (pad, "got data"); if (GST_IS_BUFFER (obj)) { count_type = "buffer_count"; } else if (GST_IS_EVENT (obj)) { count_type = "event_count"; } else { g_assert_not_reached (); } /* increment and store count */ count = GPOINTER_TO_INT (g_object_get_data (G_OBJECT (pad), count_type)); count++; g_object_set_data (G_OBJECT (pad), count_type, GINT_TO_POINTER (count)); /* drop everything */ return FALSE; }
static GstBuffer* gst_goo_decspark_codec_data_processing (GstGooVideoFilter *filter, GstBuffer *buffer) { GstGooDecSpark *self = GST_GOO_DECSPARK (filter); if (GST_IS_BUFFER (GST_GOO_VIDEODEC(self)->video_header)) { GST_DEBUG_OBJECT (self, "Adding SPARK header info to buffer"); GstBuffer *new_buf = gst_buffer_merge (GST_BUFFER (GST_GOO_VIDEODEC(self)->video_header), GST_BUFFER (buffer)); /* gst_buffer_merge() will end up putting video_header's timestamp on * the new buffer, but actually we want buf's timestamp: */ GST_BUFFER_TIMESTAMP (new_buf) = GST_BUFFER_TIMESTAMP (buffer); buffer = new_buf; gst_buffer_unref (GST_GOO_VIDEODEC(self)->video_header); } return buffer; }
/** * gst_adapter_push: * @adapter: a #GstAdapter * @buf: a #GstBuffer to add to queue in the adapter * * Adds the data from @buf to the data stored inside @adapter and takes * ownership of the buffer. */ void gst_adapter_push (GstAdapter * adapter, GstBuffer * buf) { guint size; g_return_if_fail (GST_IS_ADAPTER (adapter)); g_return_if_fail (GST_IS_BUFFER (buf)); size = GST_BUFFER_SIZE (buf); adapter->size += size; /* Note: merging buffers at this point is premature. */ if (G_UNLIKELY (adapter->buflist == NULL)) { GST_LOG_OBJECT (adapter, "pushing first %u bytes", size); adapter->buflist = adapter->buflist_end = g_slist_append (NULL, buf); update_timestamp (adapter, buf); } else { /* Otherwise append to the end, and advance our end pointer */ GST_LOG_OBJECT (adapter, "pushing %u bytes at end, size now %u", size, adapter->size); adapter->buflist_end = g_slist_append (adapter->buflist_end, buf); adapter->buflist_end = g_slist_next (adapter->buflist_end); } }
static void gst_rfc2250_enc_loop (GstElement * element) { GstRFC2250Enc *enc = GST_RFC2250_ENC (element); GstData *data; guint id; gboolean mpeg2; data = gst_mpeg_packetize_read (enc->packetize); id = GST_MPEG_PACKETIZE_ID (enc->packetize); mpeg2 = GST_MPEG_PACKETIZE_IS_MPEG2 (enc->packetize); if (GST_IS_BUFFER (data)) { GstBuffer *buffer = GST_BUFFER (data); GST_DEBUG ("rfc2250enc: have chunk 0x%02X", id); switch (id) { case SEQUENCE_START_CODE: gst_rfc2250_enc_new_buffer (enc); enc->flags |= ENC_HAVE_SEQ; break; case GOP_START_CODE: if (enc->flags & ENC_HAVE_DATA) { gst_rfc2250_enc_new_buffer (enc); } enc->flags |= ENC_HAVE_GOP; break; case PICTURE_START_CODE: if (enc->flags & ENC_HAVE_DATA) { gst_rfc2250_enc_new_buffer (enc); } enc->flags |= ENC_HAVE_PIC; break; case EXT_START_CODE: case USER_START_CODE: case SEQUENCE_ERROR_START_CODE: case SEQUENCE_END_START_CODE: break; default: /* do this here because of the long range */ if (id >= SLICE_MIN_START_CODE && id <= SLICE_MAX_START_CODE) { enc->flags |= ENC_HAVE_DATA; gst_rfc2250_enc_add_slice (enc, buffer); buffer = NULL; break; } break; } if (buffer) { gst_buffer_merge (enc->packet, buffer); enc->remaining -= GST_BUFFER_SIZE (buffer); gst_buffer_unref (buffer); } } else { if (enc->packet) { gst_pad_push (enc->srcpad, GST_DATA (enc->packet)); enc->packet = NULL; enc->flags = 0; enc->remaining = enc->MTU; } gst_pad_event_default (enc->sinkpad, GST_EVENT (data)); } }
static GstFlowReturn gst_app_src_push_buffer_full (GstAppSrc * appsrc, GstBuffer * buffer, gboolean steal_ref) { gboolean first = TRUE; GstAppSrcPrivate *priv; g_return_val_if_fail (GST_IS_APP_SRC (appsrc), GST_FLOW_ERROR); g_return_val_if_fail (GST_IS_BUFFER (buffer), GST_FLOW_ERROR); priv = appsrc->priv; g_mutex_lock (&priv->mutex); while (TRUE) { /* can't accept buffers when we are flushing or EOS */ if (priv->flushing) goto flushing; if (priv->is_eos) goto eos; if (priv->max_bytes && priv->queued_bytes >= priv->max_bytes) { GST_DEBUG_OBJECT (appsrc, "queue filled (%" G_GUINT64_FORMAT " >= %" G_GUINT64_FORMAT ")", priv->queued_bytes, priv->max_bytes); if (first) { gboolean emit; emit = priv->emit_signals; /* only signal on the first push */ g_mutex_unlock (&priv->mutex); if (priv->callbacks.enough_data) priv->callbacks.enough_data (appsrc, priv->user_data); else if (emit) g_signal_emit (appsrc, gst_app_src_signals[SIGNAL_ENOUGH_DATA], 0, NULL); g_mutex_lock (&priv->mutex); /* continue to check for flushing/eos after releasing the lock */ first = FALSE; continue; } if (priv->block) { GST_DEBUG_OBJECT (appsrc, "waiting for free space"); /* we are filled, wait until a buffer gets popped or when we * flush. */ g_cond_wait (&priv->cond, &priv->mutex); } else { /* no need to wait for free space, we just pump more data into the * queue hoping that the caller reacts to the enough-data signal and * stops pushing buffers. */ break; } } else break; } GST_DEBUG_OBJECT (appsrc, "queueing buffer %p", buffer); if (!steal_ref) gst_buffer_ref (buffer); g_queue_push_tail (priv->queue, buffer); priv->queued_bytes += gst_buffer_get_size (buffer); g_cond_broadcast (&priv->cond); g_mutex_unlock (&priv->mutex); return GST_FLOW_OK; /* ERRORS */ flushing: { GST_DEBUG_OBJECT (appsrc, "refuse buffer %p, we are flushing", buffer); if (steal_ref) gst_buffer_unref (buffer); g_mutex_unlock (&priv->mutex); return GST_FLOW_FLUSHING; } eos: { GST_DEBUG_OBJECT (appsrc, "refuse buffer %p, we are EOS", buffer); if (steal_ref) gst_buffer_unref (buffer); g_mutex_unlock (&priv->mutex); return GST_FLOW_EOS; } }
static gboolean handle_queued_objects (APP_STATE_T * state) { GstMiniObject *object = NULL; g_mutex_lock (state->queue_lock); if (state->flushing) { g_cond_broadcast (state->cond); goto beach; } else if (g_async_queue_length (state->queue) == 0) { goto beach; } if ((object = g_async_queue_try_pop (state->queue))) { if (GST_IS_BUFFER (object)) { GstBuffer *buffer = GST_BUFFER_CAST (object); update_image (state, buffer); render_scene (state); gst_buffer_unref (buffer); if (!SYNC_BUFFERS) { object = NULL; } } else if (GST_IS_QUERY (object)) { GstQuery *query = GST_QUERY_CAST (object); GstStructure *s = (GstStructure *) gst_query_get_structure (query); if (gst_structure_has_name (s, "eglglessink-allocate-eglimage")) { GstBuffer *buffer; GstVideoFormat format; gint width, height; GValue v = { 0, }; if (!gst_structure_get_enum (s, "format", GST_TYPE_VIDEO_FORMAT, (gint *) & format) || !gst_structure_get_int (s, "width", &width) || !gst_structure_get_int (s, "height", &height)) { g_assert_not_reached (); } buffer = gst_egl_allocate_eglimage (state, GST_EGL_IMAGE_BUFFER_POOL (state->pool)->allocator, format, width, height); g_value_init (&v, G_TYPE_POINTER); g_value_set_pointer (&v, buffer); gst_structure_set_value (s, "buffer", &v); g_value_unset (&v); } else { g_assert_not_reached (); } } else if (GST_IS_EVENT (object)) { GstEvent *event = GST_EVENT_CAST (object); g_print ("\nevent %p %s\n", event, gst_event_type_get_name (GST_EVENT_TYPE (event))); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_EOS: flush_internal (state); break; default: break; } gst_event_unref (event); object = NULL; } } if (object) { state->popped_obj = object; g_cond_broadcast (state->cond); } beach: g_mutex_unlock (state->queue_lock); return FALSE; }
EXPORT_C #endif GstBuffer * gst_audio_buffer_clip (GstBuffer * buffer, GstSegment * segment, gint rate, gint frame_size) { GstBuffer *ret; GstClockTime timestamp = GST_CLOCK_TIME_NONE, duration = GST_CLOCK_TIME_NONE; guint64 offset = GST_BUFFER_OFFSET_NONE, offset_end = GST_BUFFER_OFFSET_NONE; guint8 *data; guint size; gboolean change_duration = TRUE, change_offset = TRUE, change_offset_end = TRUE; g_return_val_if_fail (segment->format == GST_FORMAT_TIME || segment->format == GST_FORMAT_DEFAULT, buffer); g_return_val_if_fail (GST_IS_BUFFER (buffer), NULL); if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) /* No timestamp - assume the buffer is completely in the segment */ return buffer; /* Get copies of the buffer metadata to change later. * Calculate the missing values for the calculations, * they won't be changed later though. */ data = GST_BUFFER_DATA (buffer); size = GST_BUFFER_SIZE (buffer); timestamp = GST_BUFFER_TIMESTAMP (buffer); if (GST_BUFFER_DURATION_IS_VALID (buffer)) { duration = GST_BUFFER_DURATION (buffer); } else { change_duration = FALSE; duration = gst_util_uint64_scale (size / frame_size, GST_SECOND, rate); } if (GST_BUFFER_OFFSET_IS_VALID (buffer)) { offset = GST_BUFFER_OFFSET (buffer); } else { change_offset = FALSE; offset = 0; } if (GST_BUFFER_OFFSET_END_IS_VALID (buffer)) { offset_end = GST_BUFFER_OFFSET_END (buffer); } else { change_offset_end = FALSE; offset_end = offset + size / frame_size; } if (segment->format == GST_FORMAT_TIME) { /* Handle clipping for GST_FORMAT_TIME */ gint64 start, stop, cstart, cstop, diff; start = timestamp; stop = timestamp + duration; if (gst_segment_clip (segment, GST_FORMAT_TIME, start, stop, &cstart, &cstop)) { diff = cstart - start; if (diff > 0) { timestamp = cstart; if (change_duration) duration -= diff; diff = gst_util_uint64_scale (diff, rate, GST_SECOND); if (change_offset) offset += diff; data += diff * frame_size; size -= diff * frame_size; } diff = stop - cstop; if (diff > 0) { /* duration is always valid if stop is valid */ duration -= diff; diff = gst_util_uint64_scale (diff, rate, GST_SECOND); if (change_offset_end) offset_end -= diff; size -= diff * frame_size; } } else { gst_buffer_unref (buffer); return NULL; } } else { /* Handle clipping for GST_FORMAT_DEFAULT */ gint64 start, stop, cstart, cstop, diff; g_return_val_if_fail (GST_BUFFER_OFFSET_IS_VALID (buffer), buffer); start = offset; stop = offset_end; if (gst_segment_clip (segment, GST_FORMAT_DEFAULT, start, stop, &cstart, &cstop)) { diff = cstart - start; if (diff > 0) { offset = cstart; timestamp = gst_util_uint64_scale (cstart, GST_SECOND, rate); if (change_duration) duration -= gst_util_uint64_scale (diff, GST_SECOND, rate); data += diff * frame_size; size -= diff * frame_size; } diff = stop - cstop; if (diff > 0) { offset_end = cstop; if (change_duration) duration -= gst_util_uint64_scale (diff, GST_SECOND, rate); size -= diff * frame_size; } } else { gst_buffer_unref (buffer); return NULL; } } /* Get a metadata writable buffer and apply all changes */ ret = gst_buffer_make_metadata_writable (buffer); GST_BUFFER_TIMESTAMP (ret) = timestamp; GST_BUFFER_SIZE (ret) = size; GST_BUFFER_DATA (ret) = data; if (change_duration) GST_BUFFER_DURATION (ret) = duration; if (change_offset) GST_BUFFER_OFFSET (ret) = offset; if (change_offset_end) GST_BUFFER_OFFSET_END (ret) = offset_end; return ret; }