static GstStructure * make_player_clients_list_msg (SnraManager * manager) { GstStructure *msg; GValue p = { 0, }; GList *cur; g_value_init (&p, GST_TYPE_ARRAY); msg = gst_structure_new ("json", "msg-type", G_TYPE_STRING, "player-clients", NULL); for (cur = manager->player_info; cur != NULL; cur = g_list_next (cur)) { SnraPlayerInfo *info = (SnraPlayerInfo *) (cur->data); if (info->conn != NULL) { GValue tmp = { 0, }; GstStructure *cur_struct = gst_structure_new ("client", "client-id", G_TYPE_INT64, (gint64) info->id, "enabled", G_TYPE_BOOLEAN, info->enabled, "volume", G_TYPE_DOUBLE, info->volume, "host", G_TYPE_STRING, info->host, NULL); g_value_init (&tmp, GST_TYPE_STRUCTURE); gst_value_set_structure (&tmp, cur_struct); gst_value_array_append_value (&p, &tmp); g_value_unset (&tmp); gst_structure_free (cur_struct); } } gst_structure_take_value (msg, "player-clients", &p); return msg; }
static GstCaps * theora_set_header_on_caps (GstCaps * caps, GList * buffers) { GstStructure *structure; GValue array = { 0 }; GValue value = { 0 }; GstBuffer *buffer; GList *walk; caps = gst_caps_make_writable (caps); structure = gst_caps_get_structure (caps, 0); /* put copies of the buffers in a fixed list */ g_value_init (&array, GST_TYPE_ARRAY); for (walk = buffers; walk; walk = walk->next) { buffer = walk->data; g_value_init (&value, GST_TYPE_BUFFER); gst_value_set_buffer (&value, buffer); gst_value_array_append_value (&array, &value); g_value_unset (&value); } gst_structure_take_value (structure, "streamheader", &array); return caps; }
bool fill_structure_fixed_resolution (GstStructure* structure, const tcam::VideoFormatDescription& format, const tcam_resolution_description& res) { std::vector<double> framerates = format.get_frame_rates(res); int framerate_count = framerates.size(); GValue fps_list = G_VALUE_INIT; g_value_init(&fps_list, GST_TYPE_LIST); for (int f = 0; f < framerate_count; f++) { int frame_rate_numerator; int frame_rate_denominator; gst_util_double_to_fraction(framerates[f], &frame_rate_numerator, &frame_rate_denominator); GValue fraction = G_VALUE_INIT; g_value_init(&fraction, GST_TYPE_FRACTION); gst_value_set_fraction(&fraction, frame_rate_numerator, frame_rate_denominator); gst_value_list_append_value(&fps_list, &fraction); g_value_unset(&fraction); } gst_structure_set (structure, "width", G_TYPE_INT, res.max_size.width, "height", G_TYPE_INT, res.max_size.height, NULL); gst_structure_take_value(structure, "framerate", &fps_list); return true; }
static void gst_droidcamsrc_dev_preview_metadata_callback (void *user, const DroidMediaCameraFace * faces, size_t num_faces) { GstDroidCamSrcDev *dev = (GstDroidCamSrcDev *) user; GstDroidCamSrc *src = GST_DROIDCAMSRC (GST_PAD_PARENT (dev->imgsrc->pad)); GstStructure *s; gint width, height; GValue regions = G_VALUE_INIT; gint i; GST_DEBUG_OBJECT (src, "dev preview metadata callback"); GST_INFO_OBJECT (src, "camera detected %d faces", num_faces); GST_OBJECT_LOCK (src); width = src->width; height = src->height; GST_OBJECT_UNLOCK (src); s = gst_structure_new ("regions-of-interest", "frame-width", G_TYPE_UINT, width, "frame-height", G_TYPE_UINT, height, "type", G_TYPE_UINT, GST_DROIDCAMSRC_ROI_FACE_AREA, NULL); g_value_init (®ions, GST_TYPE_LIST); for (i = 0; i < num_faces; i++) { GValue region = G_VALUE_INIT; int x, y, w, h, r, b; GstStructure *rs; g_value_init (®ion, GST_TYPE_STRUCTURE); GST_DEBUG_OBJECT (src, "face %d: score=%d, left=%d, top=%d, right=%d, bottom=%d", i, faces[i].score, faces[i].left, faces[i].top, faces[i].right, faces[i].bottom); x = gst_util_uint64_scale (faces[i].left + 1000, width, 2000); y = gst_util_uint64_scale (faces[i].top + 1000, height, 2000); r = gst_util_uint64_scale (faces[i].right + 1000, width, 2000); b = gst_util_uint64_scale (faces[i].bottom + 1000, height, 2000); w = r - x; h = b - y; rs = gst_structure_new ("region-of-interest", "region-x", G_TYPE_UINT, x, "region-y", G_TYPE_UINT, y, "region-w", G_TYPE_UINT, w, "region-h", G_TYPE_UINT, h, "region-id", G_TYPE_INT, faces[i].id, "region-score", G_TYPE_INT, faces[i].score, NULL); gst_value_set_structure (®ion, rs); gst_structure_free (rs); gst_value_list_append_value (®ions, ®ion); g_value_unset (®ion); } gst_structure_take_value (s, "regions", ®ions); gst_droidcamsrc_post_message (src, s); }
/* * _gst_caps_set_buffer_array: * @caps: (transfer full): a #GstCaps * @field: field in caps to set * @buf: header buffers * * Adds given buffers to an array of buffers set as the given @field * on the given @caps. List of buffer arguments must be NULL-terminated. * * Returns: (transfer full): input caps with a streamheader field added, or NULL * if some error occurred */ static GstCaps * _gst_caps_set_buffer_array (GstCaps * caps, const gchar * field, GstBuffer * buf, ...) { GstStructure *structure = NULL; va_list va; GValue array = { 0 }; GValue value = { 0 }; g_return_val_if_fail (caps != NULL, NULL); g_return_val_if_fail (gst_caps_is_fixed (caps), NULL); g_return_val_if_fail (field != NULL, NULL); caps = gst_caps_make_writable (caps); structure = gst_caps_get_structure (caps, 0); g_value_init (&array, GST_TYPE_ARRAY); va_start (va, buf); /* put buffers in a fixed list */ while (buf) { g_value_init (&value, GST_TYPE_BUFFER); gst_value_set_buffer (&value, buf); gst_value_array_append_value (&array, &value); g_value_unset (&value); buf = va_arg (va, GstBuffer *); } va_end (va); gst_structure_take_value (structure, field, &array); return caps; }
static void theora_parse_set_header_on_caps (GstTheoraParse * parse, GstCaps * caps) { GstBuffer **bufs; GstStructure *structure; gint i; GValue array = { 0 }; GValue value = { 0 }; bufs = parse->streamheader; structure = gst_caps_get_structure (caps, 0); g_value_init (&array, GST_TYPE_ARRAY); for (i = 0; i < 3; i++) { if (bufs[i] == NULL) continue; bufs[i] = gst_buffer_make_writable (bufs[i]); GST_BUFFER_FLAG_SET (bufs[i], GST_BUFFER_FLAG_HEADER); g_value_init (&value, GST_TYPE_BUFFER); gst_value_set_buffer (&value, bufs[i]); gst_value_array_append_value (&array, &value); g_value_unset (&value); } gst_structure_take_value (structure, "streamheader", &array); }
static GstMessage * gst_level_message_new (GstLevel * level, GstClockTime timestamp, GstClockTime duration) { GstBaseTransform *trans = GST_BASE_TRANSFORM_CAST (level); GstStructure *s; GValue v = { 0, }; GstClockTime endtime, running_time, stream_time; running_time = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME, timestamp); stream_time = gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp); /* endtime is for backwards compatibility */ endtime = stream_time + duration; s = gst_structure_new ("level", "endtime", GST_TYPE_CLOCK_TIME, endtime, "timestamp", G_TYPE_UINT64, timestamp, "stream-time", G_TYPE_UINT64, stream_time, "running-time", G_TYPE_UINT64, running_time, "duration", G_TYPE_UINT64, duration, NULL); g_value_init (&v, G_TYPE_VALUE_ARRAY); g_value_take_boxed (&v, g_value_array_new (0)); gst_structure_take_value (s, "rms", &v); g_value_init (&v, G_TYPE_VALUE_ARRAY); g_value_take_boxed (&v, g_value_array_new (0)); gst_structure_take_value (s, "peak", &v); g_value_init (&v, G_TYPE_VALUE_ARRAY); g_value_take_boxed (&v, g_value_array_new (0)); gst_structure_take_value (s, "decay", &v); return gst_message_new_element (GST_OBJECT (level), s); }
static void gst_x265_enc_add_x265_chroma_format (GstStructure * s, int x265_chroma_format_local) { GValue fmt = G_VALUE_INIT; if (x265_max_bit_depth >= 10) { GST_INFO ("This x265 build supports %d-bit depth", x265_max_bit_depth); if (x265_chroma_format_local == 0) { #if G_BYTE_ORDER == G_LITTLE_ENDIAN set_value (&fmt, 4, "I420", "Y444", "I420_10LE", "Y444_10LE"); #else set_value (&fmt, 4, "I420", "Y444", "I420_10BE", "Y444_10BE"); #endif } else if (x265_chroma_format_local == X265_CSP_I444) { #if G_BYTE_ORDER == G_LITTLE_ENDIAN set_value (&fmt, 2, "Y444", "Y444_10LE"); #else set_value (&fmt, 2, "Y444", "Y444_10BE"); #endif } else if (x265_chroma_format_local == X265_CSP_I420) { #if G_BYTE_ORDER == G_LITTLE_ENDIAN set_value (&fmt, 2, "I420", "I420_10LE"); #else set_value (&fmt, 2, "I420", "I420_10BE"); #endif } else { GST_ERROR ("Unsupported chroma format %d", x265_chroma_format_local); } } else if (x265_max_bit_depth == 8) { GST_INFO ("This x265 build supports 8-bit depth"); if (x265_chroma_format_local == 0) { set_value (&fmt, 2, "I420", "Y444"); } else if (x265_chroma_format_local == X265_CSP_I444) { set_value (&fmt, 1, "Y444"); } else if (x265_chroma_format_local == X265_CSP_I420) { set_value (&fmt, 1, "I420"); } else { GST_ERROR ("Unsupported chroma format %d", x265_chroma_format_local); } } if (G_VALUE_TYPE (&fmt) != G_TYPE_INVALID) gst_structure_take_value (s, "format", &fmt); }
/* Clamp the framerate in a caps structure to be a smaller range then * [1...max_rate], otherwise return false */ static gboolean gst_video_max_rate_clamp_structure (GstStructure * s, gint maxrate, gint * min_num, gint * min_denom, gint * max_num, gint * max_denom) { gboolean ret = FALSE; if (!gst_structure_has_field (s, "framerate")) { /* No framerate field implies any framerate, clamping would result in * [1..max_rate] so not a real subset */ goto out; } else { const GValue *v; GValue intersection = { 0, }; GValue clamp = { 0, }; gint tmp_num, tmp_denom; g_value_init (&clamp, GST_TYPE_FRACTION_RANGE); gst_value_set_fraction_range_full (&clamp, 0, 1, maxrate, 1); v = gst_structure_get_value (s, "framerate"); ret = gst_value_intersect (&intersection, v, &clamp); g_value_unset (&clamp); if (!ret) goto out; gst_value_fraction_get_extremes (&intersection, min_num, min_denom, max_num, max_denom); gst_value_fraction_get_extremes (v, &tmp_num, &tmp_denom, max_num, max_denom); if (gst_util_fraction_compare (*max_num, *max_denom, maxrate, 1) > 0) { *max_num = maxrate; *max_denom = 1; } gst_structure_take_value (s, "framerate", &intersection); } out: return ret; }
static void add_wavelength_list_to_struct (GstStructure * structure, gint wavelengthno, gint *wavelengths) { GValue array = { 0 }; GValue value = { 0 }; gint i; /* put copies of the buffers in a fixed list */ g_value_init (&array, GST_TYPE_ARRAY); for (i=0; i< wavelengthno; i++) { g_value_init (&value, G_TYPE_INT); g_value_set_int (&value, wavelengths[i]); gst_value_array_append_value (&array, &value); g_value_unset (&value); } gst_structure_take_value (structure, "wavelength_ids", &array); }
static void vorbis_parse_set_header_on_caps (GstVorbisParse * parse, GstCaps * caps) { GstBuffer *buf1, *buf2, *buf3; GstStructure *structure; GValue array = { 0 }; GValue value = { 0 }; g_assert (parse); g_assert (parse->streamheader); g_assert (parse->streamheader->next); g_assert (parse->streamheader->next->next); buf1 = parse->streamheader->data; g_assert (buf1); buf2 = parse->streamheader->next->data; g_assert (buf2); buf3 = parse->streamheader->next->next->data; g_assert (buf3); structure = gst_caps_get_structure (caps, 0); /* mark buffers */ GST_BUFFER_FLAG_SET (buf1, GST_BUFFER_FLAG_HEADER); GST_BUFFER_FLAG_SET (buf2, GST_BUFFER_FLAG_HEADER); GST_BUFFER_FLAG_SET (buf3, GST_BUFFER_FLAG_HEADER); /* put buffers in a fixed list */ g_value_init (&array, GST_TYPE_ARRAY); g_value_init (&value, GST_TYPE_BUFFER); gst_value_set_buffer (&value, buf1); gst_value_array_append_value (&array, &value); g_value_unset (&value); g_value_init (&value, GST_TYPE_BUFFER); gst_value_set_buffer (&value, buf2); gst_value_array_append_value (&array, &value); g_value_unset (&value); g_value_init (&value, GST_TYPE_BUFFER); gst_value_set_buffer (&value, buf3); gst_value_array_append_value (&array, &value); gst_structure_take_value (structure, "streamheader", &array); g_value_unset (&value); }
static void gst_x265_enc_add_x265_chroma_format (GstStructure * s, int x265_chroma_format_local) { GValue fmt = G_VALUE_INIT; GST_INFO ("This x265 build supports 8-bit depth"); if (x265_chroma_format_local == 0) { set_value (&fmt, 2, "I420", "Y444"); } else if (x265_chroma_format_local == X265_CSP_I444) { set_value (&fmt, 1, "Y444"); } else if (x265_chroma_format_local == X265_CSP_I420) { set_value (&fmt, 1, "I420"); } else { GST_ERROR ("Unsupported chroma format %d", x265_chroma_format_local); } if (G_VALUE_TYPE (&fmt) != G_TYPE_INVALID) gst_structure_take_value (s, "format", &fmt); }
GstFlowReturn GStreamerReader::AllocateVideoBufferFull(GstPad* aPad, guint64 aOffset, guint aSize, GstCaps* aCaps, GstBuffer** aBuf, nsRefPtr<PlanarYCbCrImage>& aImage) { /* allocate an image using the container */ ImageContainer* container = mDecoder->GetImageContainer(); ImageFormat format = PLANAR_YCBCR; PlanarYCbCrImage* img = reinterpret_cast<PlanarYCbCrImage*>(container->CreateImage(&format, 1).get()); nsRefPtr<PlanarYCbCrImage> image = dont_AddRef(img); /* prepare a GstBuffer pointing to the underlying PlanarYCbCrImage buffer */ GstBuffer* buf = gst_buffer_new(); GST_BUFFER_SIZE(buf) = aSize; /* allocate the actual YUV buffer */ GST_BUFFER_DATA(buf) = image->AllocateAndGetNewBuffer(aSize); aImage = image; #if GST_VERSION_MICRO >= 36 /* create a GBoxed handle to hold the image */ BufferData* data = new BufferData(image); /* store it in a GValue so we can put it in a GstStructure */ GValue value = {0,}; g_value_init(&value, buffer_data_get_type()); g_value_take_boxed(&value, data); /* store the value in the structure */ GstStructure* structure = gst_structure_new("moz-reader-data", nullptr); gst_structure_take_value(structure, "image", &value); /* and attach the structure to the buffer */ gst_buffer_set_qdata(buf, g_quark_from_string("moz-reader-data"), structure); #endif *aBuf = buf; return GST_FLOW_OK; }
GstCaps * gst_kate_util_set_header_on_caps (GstElement * element, GstCaps * caps, GList * headers) { GstStructure *structure; GValue array = { 0 }; GST_LOG_OBJECT (element, "caps: %" GST_PTR_FORMAT, caps); if (G_UNLIKELY (!caps)) return NULL; if (G_UNLIKELY (!headers)) return NULL; caps = gst_caps_make_writable (caps); structure = gst_caps_get_structure (caps, 0); g_value_init (&array, GST_TYPE_ARRAY); while (headers) { GValue value = { 0 }; GstBuffer *buffer = headers->data; g_assert (buffer); g_value_init (&value, GST_TYPE_BUFFER); buffer = gst_buffer_copy (buffer); GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_HEADER); gst_value_take_buffer (&value, buffer); gst_value_array_append_value (&array, &value); g_value_unset (&value); headers = headers->next; } gst_structure_take_value (structure, "streamheader", &array); GST_LOG_OBJECT (element, "here are the newly set caps: %" GST_PTR_FORMAT, caps); return caps; }
static GstMessage * update_rms_from_buffer (GstVideoFrameAudioLevel * self, GstBuffer * inbuf) { GstMapInfo map; guint8 *in_data; gsize in_size; gdouble CS; guint i; guint num_frames, frames; guint num_int_samples = 0; /* number of interleaved samples * ie. total count for all channels combined */ gint channels, rate, bps; GValue v = G_VALUE_INIT; GValue va = G_VALUE_INIT; GValueArray *a; GstStructure *s; GstMessage *msg; GstClockTime duration, running_time; channels = GST_AUDIO_INFO_CHANNELS (&self->ainfo); bps = GST_AUDIO_INFO_BPS (&self->ainfo); rate = GST_AUDIO_INFO_RATE (&self->ainfo); gst_buffer_map (inbuf, &map, GST_MAP_READ); in_data = map.data; in_size = map.size; num_int_samples = in_size / bps; GST_LOG_OBJECT (self, "analyzing %u sample frames at ts %" GST_TIME_FORMAT, num_int_samples, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (inbuf))); g_return_val_if_fail (num_int_samples % channels == 0, NULL); num_frames = num_int_samples / channels; frames = num_frames; duration = GST_FRAMES_TO_CLOCK_TIME (frames, rate); if (num_frames > 0) { for (i = 0; i < channels; ++i) { self->process (in_data + (bps * i), num_int_samples, channels, &CS); GST_LOG_OBJECT (self, "[%d]: cumulative squares %lf, over %d samples/%d channels", i, CS, num_int_samples, channels); self->CS[i] += CS; } in_data += num_frames * bps; self->total_frames += num_frames; } running_time = self->first_time + gst_util_uint64_scale (self->total_frames, GST_SECOND, rate); a = g_value_array_new (channels); s = gst_structure_new ("videoframe-audiolevel", "running-time", G_TYPE_UINT64, running_time, "duration", G_TYPE_UINT64, duration, NULL); g_value_init (&v, G_TYPE_DOUBLE); g_value_init (&va, G_TYPE_VALUE_ARRAY); for (i = 0; i < channels; i++) { gdouble rms; if (frames == 0 || self->CS[i] == 0) { rms = 0; /* empty buffer */ } else { rms = sqrt (self->CS[i] / frames); } self->CS[i] = 0.0; g_value_set_double (&v, rms); g_value_array_append (a, &v); } g_value_take_boxed (&va, a); gst_structure_take_value (s, "rms", &va); msg = gst_message_new_element (GST_OBJECT (self), s); gst_buffer_unmap (inbuf, &map); return msg; }
/* Reads in buffers, parses them, reframes into one-buffer-per-ogg-page, submits * pages to output pad. */ static GstFlowReturn gst_ogg_parse_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) { GstOggParse *ogg; GstFlowReturn result = GST_FLOW_OK; gint ret = -1; guint32 serialno; GstBuffer *pagebuffer; GstClockTime buffertimestamp = GST_BUFFER_TIMESTAMP (buffer); ogg = GST_OGG_PARSE (parent); GST_LOG_OBJECT (ogg, "Chain function received buffer of size %" G_GSIZE_FORMAT, gst_buffer_get_size (buffer)); gst_ogg_parse_submit_buffer (ogg, buffer); while (ret != 0 && result == GST_FLOW_OK) { ogg_page page; /* We use ogg_sync_pageseek() rather than ogg_sync_pageout() so that we can * track how many bytes the ogg layer discarded (in the case of sync errors, * etc.); this allows us to accurately track the current stream offset */ ret = ogg_sync_pageseek (&ogg->sync, &page); if (ret == 0) { /* need more data, that's fine... */ break; } else if (ret < 0) { /* discontinuity; track how many bytes we skipped (-ret) */ ogg->offset -= ret; } else { gint64 granule = ogg_page_granulepos (&page); #ifndef GST_DISABLE_GST_DEBUG int bos = ogg_page_bos (&page); #endif guint64 startoffset = ogg->offset; GstOggStream *stream; gboolean keyframe; serialno = ogg_page_serialno (&page); stream = gst_ogg_parse_find_stream (ogg, serialno); GST_LOG_OBJECT (ogg, "Timestamping outgoing buffer as %" GST_TIME_FORMAT, GST_TIME_ARGS (buffertimestamp)); if (stream) { buffertimestamp = gst_ogg_stream_get_end_time_for_granulepos (stream, granule); if (ogg->video_stream) { if (stream == ogg->video_stream) { keyframe = gst_ogg_stream_granulepos_is_key_frame (stream, granule); } else { keyframe = FALSE; } } else { keyframe = TRUE; } } else { buffertimestamp = GST_CLOCK_TIME_NONE; keyframe = TRUE; } pagebuffer = gst_ogg_parse_buffer_from_page (&page, startoffset, buffertimestamp); /* We read out 'ret' bytes, so we set the next offset appropriately */ ogg->offset += ret; GST_LOG_OBJECT (ogg, "processing ogg page (serial %08x, pageno %ld, " "granule pos %" G_GUINT64_FORMAT ", bos %d, offset %" G_GUINT64_FORMAT "-%" G_GUINT64_FORMAT ") keyframe=%d", serialno, ogg_page_pageno (&page), granule, bos, startoffset, ogg->offset, keyframe); if (ogg_page_bos (&page)) { /* If we've seen this serialno before, this is technically an error, * we log this case but accept it - this one replaces the previous * stream with this serialno. We can do this since we're streaming, and * not supporting seeking... */ GstOggStream *stream = gst_ogg_parse_find_stream (ogg, serialno); if (stream != NULL) { GST_LOG_OBJECT (ogg, "Incorrect stream; repeats serial number %08x " "at offset %" G_GINT64_FORMAT, serialno, ogg->offset); } if (ogg->last_page_not_bos) { GST_LOG_OBJECT (ogg, "Deleting all referenced streams, found a new " "chain starting with serial %u", serialno); gst_ogg_parse_delete_all_streams (ogg); } stream = gst_ogg_parse_new_stream (ogg, &page); ogg->last_page_not_bos = FALSE; gst_buffer_ref (pagebuffer); stream->headers = g_list_append (stream->headers, pagebuffer); if (!ogg->in_headers) { GST_LOG_OBJECT (ogg, "Found start of new chain at offset %" G_GUINT64_FORMAT, startoffset); ogg->in_headers = 1; } /* For now, we just keep the header buffer in the stream->headers list; * it actually gets output once we've collected the entire set */ } else { /* Non-BOS page. Either: we're outside headers, and this isn't a * header (normal data), outside headers and this is (error!), inside * headers, this is (append header), or inside headers and this isn't * (we've found the end of headers; flush the lot!) * * Before that, we flag that the last page seen (this one) was not a * BOS page; that way we know that when we next see a BOS page it's a * new chain, and we can flush all existing streams. */ page_type type; GstOggStream *stream = gst_ogg_parse_find_stream (ogg, serialno); if (!stream) { GST_LOG_OBJECT (ogg, "Non-BOS page unexpectedly found at %" G_GINT64_FORMAT, ogg->offset); goto failure; } ogg->last_page_not_bos = TRUE; type = gst_ogg_parse_is_header (ogg, stream, &page); if (type == PAGE_PENDING && ogg->in_headers) { gst_buffer_ref (pagebuffer); stream->unknown_pages = g_list_append (stream->unknown_pages, pagebuffer); } else if (type == PAGE_HEADER) { if (!ogg->in_headers) { GST_LOG_OBJECT (ogg, "Header page unexpectedly found outside " "headers at offset %" G_GINT64_FORMAT, ogg->offset); goto failure; } else { /* Append the header to the buffer list, after any unknown previous * pages */ stream->headers = g_list_concat (stream->headers, stream->unknown_pages); g_list_free (stream->unknown_pages); gst_buffer_ref (pagebuffer); stream->headers = g_list_append (stream->headers, pagebuffer); } } else { /* PAGE_DATA, or PAGE_PENDING but outside headers */ if (ogg->in_headers) { /* First non-header page... set caps, flush headers. * * First up, we build a single GValue list of all the pagebuffers * we're using for the headers, in order. * Then we set this on the caps structure. Then we can start pushing * buffers for the headers, and finally we send this non-header * page. */ GstCaps *caps; GstStructure *structure; GValue array = { 0 }; gint count = 0; gboolean found_pending_headers = FALSE; GSList *l; g_value_init (&array, GST_TYPE_ARRAY); for (l = ogg->oggstreams; l != NULL; l = l->next) { GstOggStream *stream = (GstOggStream *) l->data; if (g_list_length (stream->headers) == 0) { GST_LOG_OBJECT (ogg, "No primary header found for stream %08x", stream->serialno); goto failure; } gst_ogg_parse_append_header (&array, GST_BUFFER (stream->headers->data)); count++; } for (l = ogg->oggstreams; l != NULL; l = l->next) { GstOggStream *stream = (GstOggStream *) l->data; GList *j; /* already appended the first header, now do headers 2-N */ for (j = stream->headers->next; j != NULL; j = j->next) { gst_ogg_parse_append_header (&array, GST_BUFFER (j->data)); count++; } } caps = gst_pad_query_caps (ogg->srcpad, NULL); caps = gst_caps_make_writable (caps); structure = gst_caps_get_structure (caps, 0); gst_structure_take_value (structure, "streamheader", &array); gst_pad_set_caps (ogg->srcpad, caps); if (ogg->caps) gst_caps_unref (ogg->caps); ogg->caps = caps; GST_LOG_OBJECT (ogg, "Set \"streamheader\" caps with %d buffers " "(one per page)", count); /* Now, we do the same thing, but push buffers... */ for (l = ogg->oggstreams; l != NULL; l = l->next) { GstOggStream *stream = (GstOggStream *) l->data; GstBuffer *buf = GST_BUFFER (stream->headers->data); result = gst_pad_push (ogg->srcpad, buf); if (result != GST_FLOW_OK) return result; } for (l = ogg->oggstreams; l != NULL; l = l->next) { GstOggStream *stream = (GstOggStream *) l->data; GList *j; /* pushed the first one for each stream already, now do 2-N */ for (j = stream->headers->next; j != NULL; j = j->next) { GstBuffer *buf = GST_BUFFER (j->data); result = gst_pad_push (ogg->srcpad, buf); if (result != GST_FLOW_OK) return result; } } ogg->in_headers = 0; /* And finally the pending data pages */ for (l = ogg->oggstreams; l != NULL; l = l->next) { GstOggStream *stream = (GstOggStream *) l->data; GList *k; if (stream->unknown_pages == NULL) continue; if (found_pending_headers) { GST_WARNING_OBJECT (ogg, "Incorrectly muxed headers found at " "approximate offset %" G_GINT64_FORMAT, ogg->offset); } found_pending_headers = TRUE; GST_LOG_OBJECT (ogg, "Pushing %d pending pages after headers", g_list_length (stream->unknown_pages) + 1); for (k = stream->unknown_pages; k != NULL; k = k->next) { GstBuffer *buf = GST_BUFFER (k->data); result = gst_pad_push (ogg->srcpad, buf); if (result != GST_FLOW_OK) return result; } g_list_foreach (stream->unknown_pages, (GFunc) gst_mini_object_unref, NULL); g_list_free (stream->unknown_pages); stream->unknown_pages = NULL; } } if (granule == -1) { stream->stored_buffers = g_list_append (stream->stored_buffers, pagebuffer); } else { while (stream->stored_buffers) { GstBuffer *buf = stream->stored_buffers->data; buf = gst_buffer_make_writable (buf); GST_BUFFER_TIMESTAMP (buf) = buffertimestamp; if (!keyframe) { GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT); } else { keyframe = FALSE; } result = gst_pad_push (ogg->srcpad, buf); if (result != GST_FLOW_OK) return result; stream->stored_buffers = g_list_delete_link (stream->stored_buffers, stream->stored_buffers); } pagebuffer = gst_buffer_make_writable (pagebuffer); if (!keyframe) { GST_BUFFER_FLAG_SET (pagebuffer, GST_BUFFER_FLAG_DELTA_UNIT); } else { keyframe = FALSE; } result = gst_pad_push (ogg->srcpad, pagebuffer); if (result != GST_FLOW_OK) return result; } } } } } return result; failure: gst_pad_push_event (GST_PAD (ogg->srcpad), gst_event_new_eos ()); return GST_FLOW_ERROR; }
static GstCaps * gst_rtp_h264_pay_getcaps (GstRTPBasePayload * payload, GstPad * pad, GstCaps * filter) { GstCaps *template_caps; GstCaps *allowed_caps; GstCaps *caps, *icaps; gboolean append_unrestricted; guint i; allowed_caps = gst_pad_peer_query_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload), NULL); if (allowed_caps == NULL) return NULL; template_caps = gst_static_pad_template_get_caps (&gst_rtp_h264_pay_sink_template); if (gst_caps_is_any (allowed_caps)) { caps = gst_caps_ref (template_caps); goto done; } if (gst_caps_is_empty (allowed_caps)) { caps = gst_caps_ref (allowed_caps); goto done; } caps = gst_caps_new_empty (); append_unrestricted = FALSE; for (i = 0; i < gst_caps_get_size (allowed_caps); i++) { GstStructure *s = gst_caps_get_structure (allowed_caps, i); GstStructure *new_s = gst_structure_new_empty ("video/x-h264"); const gchar *profile_level_id; profile_level_id = gst_structure_get_string (s, "profile-level-id"); if (profile_level_id && strlen (profile_level_id) == 6) { const gchar *profile; const gchar *level; long int spsint; guint8 sps[3]; spsint = strtol (profile_level_id, NULL, 16); sps[0] = spsint >> 16; sps[1] = spsint >> 8; sps[2] = spsint; profile = gst_codec_utils_h264_get_profile (sps, 3); level = gst_codec_utils_h264_get_level (sps, 3); if (profile && level) { GST_LOG_OBJECT (payload, "In caps, have profile %s and level %s", profile, level); if (!strcmp (profile, "constrained-baseline")) gst_structure_set (new_s, "profile", G_TYPE_STRING, profile, NULL); else { GValue val = { 0, }; GValue profiles = { 0, }; g_value_init (&profiles, GST_TYPE_LIST); g_value_init (&val, G_TYPE_STRING); g_value_set_static_string (&val, profile); gst_value_list_append_value (&profiles, &val); g_value_set_static_string (&val, "constrained-baseline"); gst_value_list_append_value (&profiles, &val); gst_structure_take_value (new_s, "profile", &profiles); } if (!strcmp (level, "1")) gst_structure_set (new_s, "level", G_TYPE_STRING, level, NULL); else { GValue levels = { 0, }; GValue val = { 0, }; int j; g_value_init (&levels, GST_TYPE_LIST); g_value_init (&val, G_TYPE_STRING); for (j = 0; j < G_N_ELEMENTS (all_levels); j++) { g_value_set_static_string (&val, all_levels[j]); gst_value_list_prepend_value (&levels, &val); if (!strcmp (level, all_levels[j])) break; } gst_structure_take_value (new_s, "level", &levels); } } else { /* Invalid profile-level-id means baseline */ gst_structure_set (new_s, "profile", G_TYPE_STRING, "constrained-baseline", NULL); } } else { /* No profile-level-id means baseline or unrestricted */ gst_structure_set (new_s, "profile", G_TYPE_STRING, "constrained-baseline", NULL); append_unrestricted = TRUE; } caps = gst_caps_merge_structure (caps, new_s); }
GstCaps* convert_videoformatsdescription_to_caps (const std::vector<tcam::VideoFormatDescription>& descriptions) { GstCaps* caps = gst_caps_new_empty(); for (const auto& desc : descriptions) { if (desc.get_fourcc() == 0) { tcam_info("Format has empty fourcc. Ignoring"); continue; } const char* caps_string = tcam_fourcc_to_gst_1_0_caps_string(desc.get_fourcc()); if (caps_string == nullptr) { tcam_warning("Format has empty caps string. Ignoring %s", tcam::fourcc_to_description(desc.get_fourcc())); continue; } // tcam_error("Found '%s' pixel format string", caps_string); std::vector<struct tcam_resolution_description> res = desc.get_resolutions(); for (const auto& r : res) { int min_width = r.min_size.width; int min_height = r.min_size.height; int max_width = r.max_size.width; int max_height = r.max_size.height; if (r.type == TCAM_RESOLUTION_TYPE_RANGE) { std::vector<struct tcam_image_size> framesizes = tcam::get_standard_resolutions(r.min_size, r.max_size); // check if min/max are already in the vector. // some devices return std resolutions as max if (r.min_size != framesizes.front()) { framesizes.insert(framesizes.begin(), r.min_size); } if (r.max_size != framesizes.back()) { framesizes.push_back(r.max_size); } for (const auto& reso : framesizes) { GstStructure* structure = gst_structure_from_string (caps_string, NULL); std::vector<double> framerates = desc.get_framerates(reso); if (framerates.empty()) { // tcam_log(TCAM_LOG_WARNING, "No available framerates. Ignoring format."); continue; } GValue fps_list = G_VALUE_INIT; g_value_init(&fps_list, GST_TYPE_LIST); for (const auto& f : framerates) { int frame_rate_numerator; int frame_rate_denominator; gst_util_double_to_fraction(f, &frame_rate_numerator, &frame_rate_denominator); if ((frame_rate_denominator == 0) || (frame_rate_numerator == 0)) { continue; } GValue fraction = G_VALUE_INIT; g_value_init(&fraction, GST_TYPE_FRACTION); gst_value_set_fraction(&fraction, frame_rate_numerator, frame_rate_denominator); gst_value_list_append_value(&fps_list, &fraction); g_value_unset(&fraction); } gst_structure_set (structure, "width", G_TYPE_INT, reso.width, "height", G_TYPE_INT, reso.height, NULL); gst_structure_take_value(structure, "framerate", &fps_list); gst_caps_append_structure (caps, structure); } // finally also add the range to allow unusual settings like 1920x96@90fps GstStructure* structure = gst_structure_from_string (caps_string, NULL); GValue w = G_VALUE_INIT; g_value_init(&w, GST_TYPE_INT_RANGE); gst_value_set_int_range(&w, min_width, max_width); GValue h = G_VALUE_INIT; g_value_init(&h, GST_TYPE_INT_RANGE); gst_value_set_int_range(&h, min_height, max_height); std::vector<double> fps = desc.get_frame_rates(r); if (fps.empty()) { // GST_ERROR("Could not find any framerates for format"); continue; } int fps_min_num; int fps_min_den; int fps_max_num; int fps_max_den; gst_util_double_to_fraction(*std::min_element(fps.begin(), fps.end()), &fps_min_num, &fps_min_den); gst_util_double_to_fraction(*std::max_element(fps.begin(), fps.end()), &fps_max_num, &fps_max_den); GValue f = G_VALUE_INIT; g_value_init(&f, GST_TYPE_FRACTION_RANGE); gst_value_set_fraction_range_full(&f, fps_min_num, fps_min_den, fps_max_num, fps_max_den); gst_structure_set_value(structure, "width", &w); gst_structure_set_value(structure,"height", &h); gst_structure_set_value(structure,"framerate", &f); gst_caps_append_structure(caps, structure); } else { GstStructure* structure = gst_structure_from_string (caps_string, NULL); fill_structure_fixed_resolution(structure, desc, r); gst_caps_append_structure (caps, structure); } } } return caps; }