/** * gst_rtsp_sdp_from_media: * @sdp: a #GstSDPMessage * @info: info * @media: a #GstRTSPMedia * * Add @media specific info to @sdp. @info is used to configure the connection * information in the SDP. * * Returns: TRUE on success. */ gboolean gst_rtsp_sdp_from_media (GstSDPMessage * sdp, GstSDPInfo * info, GstRTSPMedia * media) { guint i, n_streams; gchar *rangestr; n_streams = gst_rtsp_media_n_streams (media); rangestr = gst_rtsp_media_get_range_string (media, FALSE, GST_RTSP_RANGE_NPT); if (rangestr == NULL) goto not_prepared; gst_sdp_message_add_attribute (sdp, "range", rangestr); g_free (rangestr); for (i = 0; i < n_streams; i++) { GstRTSPStream *stream; GstSDPMedia *smedia; GstStructure *s; const gchar *caps_str, *caps_enc, *caps_params; gchar *tmp; gint caps_pt, caps_rate; guint n_fields, j; gboolean first; GString *fmtp; GstCaps *caps; stream = gst_rtsp_media_get_stream (media, i); caps = gst_rtsp_stream_get_caps (stream); if (caps == NULL) { g_warning ("ignoring stream %d without media type", i); continue; } s = gst_caps_get_structure (caps, 0); if (s == NULL) { gst_caps_unref (caps); g_warning ("ignoring stream %d without media type", i); continue; } gst_sdp_media_new (&smedia); /* get media type and payload for the m= line */ caps_str = gst_structure_get_string (s, "media"); gst_sdp_media_set_media (smedia, caps_str); gst_structure_get_int (s, "payload", &caps_pt); tmp = g_strdup_printf ("%d", caps_pt); gst_sdp_media_add_format (smedia, tmp); g_free (tmp); gst_sdp_media_set_port_info (smedia, 0, 1); gst_sdp_media_set_proto (smedia, "RTP/AVP"); /* for the c= line */ if (info->is_ipv6) { gst_sdp_media_add_connection (smedia, "IN", "IP6", "::", 16, 0); } else { gst_sdp_media_add_connection (smedia, "IN", "IP4", "0.0.0.0", 16, 0); } /* get clock-rate, media type and params for the rtpmap attribute */ gst_structure_get_int (s, "clock-rate", &caps_rate); caps_enc = gst_structure_get_string (s, "encoding-name"); caps_params = gst_structure_get_string (s, "encoding-params"); if (caps_enc) { if (caps_params) tmp = g_strdup_printf ("%d %s/%d/%s", caps_pt, caps_enc, caps_rate, caps_params); else tmp = g_strdup_printf ("%d %s/%d", caps_pt, caps_enc, caps_rate); gst_sdp_media_add_attribute (smedia, "rtpmap", tmp); g_free (tmp); } /* the config uri */ tmp = gst_rtsp_stream_get_control (stream); gst_sdp_media_add_attribute (smedia, "control", tmp); g_free (tmp); /* collect all other properties and add them to fmtp or attributes */ fmtp = g_string_new (""); g_string_append_printf (fmtp, "%d ", caps_pt); first = TRUE; n_fields = gst_structure_n_fields (s); for (j = 0; j < n_fields; j++) { const gchar *fname, *fval; fname = gst_structure_nth_field_name (s, j); /* filter out standard properties */ if (!strcmp (fname, "media")) continue; if (!strcmp (fname, "payload")) continue; if (!strcmp (fname, "clock-rate")) continue; if (!strcmp (fname, "encoding-name")) continue; if (!strcmp (fname, "encoding-params")) continue; if (!strcmp (fname, "ssrc")) continue; if (!strcmp (fname, "clock-base")) continue; if (!strcmp (fname, "seqnum-base")) continue; if (g_str_has_prefix (fname, "a-")) { /* attribute */ if ((fval = gst_structure_get_string (s, fname))) gst_sdp_media_add_attribute (smedia, fname + 2, fval); continue; } if (g_str_has_prefix (fname, "x-")) { /* attribute */ if ((fval = gst_structure_get_string (s, fname))) gst_sdp_media_add_attribute (smedia, fname, fval); continue; } if ((fval = gst_structure_get_string (s, fname))) { g_string_append_printf (fmtp, "%s%s=%s", first ? "" : ";", fname, fval); first = FALSE; } } if (!first) { tmp = g_string_free (fmtp, FALSE); gst_sdp_media_add_attribute (smedia, "fmtp", tmp); g_free (tmp); } else { g_string_free (fmtp, TRUE); } update_sdp_from_tags (stream, smedia); gst_sdp_message_add_media (sdp, smedia); gst_sdp_media_free (smedia); gst_caps_unref (caps); } { GstNetTimeProvider *provider; if ((provider = gst_rtsp_media_get_time_provider (media, info->server_ip, 0))) { GstClock *clock; gchar *address, *str; gint port; g_object_get (provider, "clock", &clock, "address", &address, "port", &port, NULL); str = g_strdup_printf ("GstNetTimeProvider %s %s:%d %" G_GUINT64_FORMAT, g_type_name (G_TYPE_FROM_INSTANCE (clock)), address, port, gst_clock_get_time (clock)); gst_sdp_message_add_attribute (sdp, "x-gst-clock", str); g_free (str); gst_object_unref (clock); g_free (address); gst_object_unref (provider); } } return TRUE; /* ERRORS */ not_prepared: { GST_ERROR ("media %p is not prepared", media); return FALSE; } }
FrameSource::FrameStatus GStreamerBaseFrameSourceImpl::fetch(vx_image image, vx_uint32 /*timeout*/) { if (end) { close(); return FrameSource::CLOSED; } handleGStreamerMessages(); if (gst_app_sink_is_eos(GST_APP_SINK(sink))) { close(); return FrameSource::CLOSED; } if ((lastFrameTimestamp.toc()/1000.0) > Application::get().getSourceDefaultTimeout()) { close(); return FrameSource::CLOSED; } lastFrameTimestamp.tic(); #if GST_VERSION_MAJOR == 0 std::unique_ptr<GstBuffer, GStreamerObjectDeleter> bufferHolder( gst_app_sink_pull_buffer(GST_APP_SINK(sink))); GstBuffer* buffer = bufferHolder.get(); #else std::unique_ptr<GstSample, GStreamerObjectDeleter> sample(gst_app_sink_pull_sample(GST_APP_SINK(sink))); if (!sample) { close(); return FrameSource::CLOSED; } GstBuffer* buffer = gst_sample_get_buffer(sample.get()); #endif gint width; gint height; #if GST_VERSION_MAJOR == 0 std::unique_ptr<GstCaps, GStreamerObjectDeleter> bufferCapsHolder(gst_buffer_get_caps(buffer)); GstCaps* bufferCaps = bufferCapsHolder.get(); #else GstCaps* bufferCaps = gst_sample_get_caps(sample.get()); #endif // bail out in no caps assert(gst_caps_get_size(bufferCaps) == 1); GstStructure* structure = gst_caps_get_structure(bufferCaps, 0); // bail out if width or height are 0 if (!gst_structure_get_int(structure, "width", &width) || !gst_structure_get_int(structure, "height", &height)) { close(); return FrameSource::CLOSED; } int depth = 3; #if GST_VERSION_MAJOR > 0 depth = 0; const gchar* name = gst_structure_get_name(structure); const gchar* format = gst_structure_get_string(structure, "format"); if (!name || !format) { close(); return FrameSource::CLOSED; } // we support 2 types of data: // video/x-raw, format=BGR -> 8bit, 3 channels // video/x-raw, format=GRAY8 -> 8bit, 1 channel if (strcasecmp(name, "video/x-raw") == 0) { if (strcasecmp(format, "RGB") == 0) { depth = 3; } else if(strcasecmp(format, "GRAY8") == 0) { depth = 1; } } #endif if (depth == 0) { close(); return FrameSource::CLOSED; } vx_imagepatch_addressing_t decodedImageAddr; decodedImageAddr.dim_x = width; decodedImageAddr.dim_y = height; decodedImageAddr.stride_x = depth; // GStreamer uses as stride width rounded up to the nearest multiple of 4 decodedImageAddr.stride_y = ((width*depth+3)/4)*4; decodedImageAddr.scale_x = 1; decodedImageAddr.scale_y = 1; vx_image decodedImage = NULL; vx_df_image_e vx_type_map[5] = { VX_DF_IMAGE_VIRT, VX_DF_IMAGE_U8, VX_DF_IMAGE_VIRT, VX_DF_IMAGE_RGB, VX_DF_IMAGE_RGBX }; // fetch image width and height vx_uint32 actual_width, actual_height; vx_df_image_e actual_format; NVXIO_SAFE_CALL( vxQueryImage(image, VX_IMAGE_ATTRIBUTE_WIDTH, (void *)&actual_width, sizeof(actual_width)) ); NVXIO_SAFE_CALL( vxQueryImage(image, VX_IMAGE_ATTRIBUTE_HEIGHT, (void *)&actual_height, sizeof(actual_height)) ); NVXIO_SAFE_CALL( vxQueryImage(image, VX_IMAGE_ATTRIBUTE_FORMAT, (void *)&actual_format, sizeof(actual_format)) ); bool needScale = width != (int)configuration.frameWidth || height != (int)configuration.frameHeight; // config and actual image sized must be the same! if ((actual_height != configuration.frameHeight) || (actual_width != configuration.frameWidth) || (actual_format != configuration.format)) { close(); NVXIO_THROW_EXCEPTION("Actual image [ " << actual_width << " x " << actual_height << " ] does not equal configuration one [ " << configuration.frameWidth << " x " << configuration.frameHeight << " ]"); } // we assume that decoced image will have no more than 3 channels per pixel if (!devMem) { NVXIO_ASSERT( cudaSuccess == cudaMallocPitch(&devMem, &devMemPitch, width * 3, height) ); } // check if decoded image format has changed if (scaledImage) { vx_df_image_e scaled_format; NVXIO_SAFE_CALL( vxQueryImage(scaledImage, VX_IMAGE_ATTRIBUTE_FORMAT, (void *)&scaled_format, sizeof(scaled_format)) ); if (scaled_format != vx_type_map[depth]) { vxReleaseImage(&scaledImage); scaledImage = NULL; } } if (needScale && !scaledImage) { scaledImage = vxCreateImage(vxContext, configuration.frameWidth, configuration.frameHeight, vx_type_map[depth]); NVXIO_CHECK_REFERENCE( scaledImage ); } #if GST_VERSION_MAJOR == 0 bool needConvert = configuration.format != VX_DF_IMAGE_RGB; void * decodedPtr = GST_BUFFER_DATA(buffer); #else GstMapInfo info; gboolean success = gst_buffer_map(buffer, &info, (GstMapFlags)GST_MAP_READ); if (!success) { printf("GStreamer: unable to map buffer\n"); close(); return FrameSource::CLOSED; } bool needConvert = configuration.format != vx_type_map[depth]; void * decodedPtr = info.data; #endif if (!needConvert && !needScale) { decodedImage = vxCreateImageFromHandle(vxContext, vx_type_map[depth], &decodedImageAddr, &decodedPtr, VX_IMPORT_TYPE_HOST); NVXIO_CHECK_REFERENCE( decodedImage ); NVXIO_SAFE_CALL( nvxuCopyImage(vxContext, decodedImage, image) ); } else { // 1. upload decoced image to CUDA buffer NVXIO_ASSERT( cudaSuccess == cudaMemcpy2D(devMem, devMemPitch, decodedPtr, decodedImageAddr.stride_y, decodedImageAddr.dim_x * depth, decodedImageAddr.dim_y, cudaMemcpyHostToDevice) ); // 2. create vx_image wrapper for decoded buffer decodedImageAddr.stride_y = static_cast<vx_int32>(devMemPitch); decodedImage = vxCreateImageFromHandle(vxContext, vx_type_map[depth], &decodedImageAddr, &devMem, NVX_IMPORT_TYPE_CUDA); NVXIO_CHECK_REFERENCE( decodedImage ); if (needScale) { // 3. scale image NVXIO_SAFE_CALL( vxuScaleImage(vxContext, decodedImage, scaledImage, VX_INTERPOLATION_TYPE_BILINEAR) ); // 4. convert to dst image NVXIO_SAFE_CALL( vxuColorConvert(vxContext, scaledImage, image) ); } else { // 3. convert to dst image NVXIO_SAFE_CALL( vxuColorConvert(vxContext, decodedImage, image) ); } } #if GST_VERSION_MAJOR != 0 gst_buffer_unmap(buffer, &info); #endif NVXIO_SAFE_CALL( vxReleaseImage(&decodedImage) ); return FrameSource::OK; }
/* compare output with ffmpegcolorspace */ static void colorspace_compare (gint width, gint height, gboolean comp) { GstBus *bus; GstElement *pipeline, *src, *filter1, *filter2, *csp, *fcsp, *fakesink; GstElement *queue1, *queue2, *tee, *compare; GstCaps *caps, *tcaps, *rcaps, *fcaps; GstCaps *ccaps; GstPad *pad; gint i, j; /* create elements */ pipeline = gst_pipeline_new ("pipeline"); src = gst_element_factory_make ("videotestsrc", "videotestsrc"); fail_unless (src != NULL); filter1 = gst_element_factory_make ("capsfilter", "capsfilter1"); fail_unless (filter1 != NULL); csp = gst_element_factory_make ("colorspace", "colorspace"); fail_unless (csp != NULL); filter2 = gst_element_factory_make ("capsfilter", "capsfilter2"); fail_unless (filter2 != NULL); if (comp) { fcsp = gst_element_factory_make ("ffmpegcolorspace", "ffmpegcolorspace"); fail_unless (fcsp != NULL); tee = gst_element_factory_make ("tee", "tee"); fail_unless (tee != NULL); queue1 = gst_element_factory_make ("queue", "queue1"); fail_unless (queue1 != NULL); queue2 = gst_element_factory_make ("queue", "queue2"); fail_unless (queue2 != NULL); compare = gst_element_factory_make ("compare", "compare"); fail_unless (compare != NULL); } else { fcsp = tee = queue1 = queue2 = compare = NULL; } fakesink = gst_element_factory_make ("fakesink", "fakesink"); fail_unless (fakesink != NULL); /* add and link */ gst_bin_add_many (GST_BIN (pipeline), src, filter1, filter2, csp, fakesink, tee, queue1, queue2, fcsp, compare, NULL); fail_unless (gst_element_link (src, filter1)); if (comp) { fail_unless (gst_element_link (filter1, tee)); fail_unless (gst_element_link (tee, queue1)); fail_unless (gst_element_link (queue1, fcsp)); fail_unless (gst_element_link_pads (fcsp, NULL, compare, "sink")); fail_unless (gst_element_link (tee, queue2)); fail_unless (gst_element_link (queue2, csp)); fail_unless (gst_element_link_pads (csp, NULL, compare, "check")); fail_unless (gst_element_link (compare, filter2)); } else { fail_unless (gst_element_link (filter1, csp)); fail_unless (gst_element_link (csp, filter2)); } fail_unless (gst_element_link (filter2, fakesink)); /* obtain possible caps combinations */ if (comp) { pad = gst_element_get_static_pad (fcsp, "sink"); fail_unless (pad != NULL); ccaps = gst_pad_get_pad_template_caps (pad); fail_unless (ccaps != NULL); fcaps = ccaps; gst_object_unref (pad); } else { fcaps = gst_caps_new_any (); } pad = gst_element_get_static_pad (csp, "sink"); fail_unless (pad != NULL); ccaps = gst_pad_get_pad_template_caps (pad); fail_unless (ccaps != NULL); gst_object_unref (pad); /* handle videotestsrc limitations */ pad = gst_element_get_static_pad (src, "src"); fail_unless (pad != NULL); caps = (GstCaps *) gst_pad_get_pad_template_caps (pad); fail_unless (caps != NULL); gst_object_unref (pad); rcaps = gst_caps_new_simple ("video/x-raw-yuv", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, 25, 1, "color-matrix", G_TYPE_STRING, "sdtv", "chroma-site", G_TYPE_STRING, "mpeg2", NULL); gst_caps_append (rcaps, gst_caps_new_simple ("video/x-raw-rgb", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, 25, 1, "depth", G_TYPE_INT, 32, NULL)); /* FIXME also allow x-raw-gray if/when colorspace actually handles those */ /* limit to supported compare types */ if (comp) { gst_caps_append (rcaps, gst_caps_new_simple ("video/x-raw-rgb", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, 25, 1, "depth", G_TYPE_INT, 24, NULL)); } tcaps = gst_caps_intersect (fcaps, ccaps); gst_caps_unref (fcaps); gst_caps_unref (ccaps); caps = gst_caps_intersect (tcaps, caps); gst_caps_unref (tcaps); tcaps = caps; caps = gst_caps_intersect (tcaps, rcaps); gst_caps_unref (tcaps); gst_caps_unref (rcaps); /* normalize to finally have a list of acceptable fixed formats */ caps = gst_caps_simplify (caps); caps = gst_caps_normalize (caps); /* set up for running stuff */ loop = g_main_loop_new (NULL, FALSE); bus = gst_element_get_bus (pipeline); gst_bus_add_signal_watch (bus); g_signal_connect (bus, "message::eos", (GCallback) message_cb, NULL); gst_object_unref (bus); g_object_set (src, "num-buffers", 5, NULL); if (comp) { /* set lower bound for ssim comparison, and allow slightly different caps */ g_object_set (compare, "method", 2, NULL); g_object_set (compare, "meta", 3, NULL); g_object_set (compare, "threshold", 0.90, NULL); g_object_set (compare, "upper", FALSE, NULL); } GST_INFO ("possible caps to check %d", gst_caps_get_size (caps)); /* loop over all input and output combinations */ for (i = 0; i < gst_caps_get_size (caps); i++) { for (j = 0; j < gst_caps_get_size (caps); j++) { GstCaps *in_caps, *out_caps; GstStructure *s; const gchar *fourcc; in_caps = gst_caps_copy_nth (caps, i); out_caps = gst_caps_copy_nth (caps, j); /* FIXME remove if videotestsrc and video format handle these properly */ s = gst_caps_get_structure (in_caps, 0); if ((fourcc = gst_structure_get_string (s, "format"))) { if (!strcmp (fourcc, "YUV9") || !strcmp (fourcc, "YVU9") || !strcmp (fourcc, "v216")) { gst_caps_unref (in_caps); gst_caps_unref (out_caps); continue; } } GST_INFO ("checking conversion from %" GST_PTR_FORMAT " (%d)" " to %" GST_PTR_FORMAT " (%d)", in_caps, i, out_caps, j); g_object_set (filter1, "caps", in_caps, NULL); g_object_set (filter2, "caps", out_caps, NULL); fail_unless (gst_element_set_state (pipeline, GST_STATE_PLAYING) != GST_STATE_CHANGE_FAILURE); g_main_loop_run (loop); fail_unless (gst_element_set_state (pipeline, GST_STATE_NULL) == GST_STATE_CHANGE_SUCCESS); gst_caps_unref (in_caps); gst_caps_unref (out_caps); } } gst_caps_unref (caps); gst_object_unref (pipeline); g_main_loop_unref (loop); }
static gboolean gst_rtp_L16_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps) { GstStructure *structure; GstRtpL16Depay *rtpL16depay; gint clock_rate, payload; gint channels; GstCaps *srccaps; gboolean res; const gchar *channel_order; const GstRTPChannelOrder *order; GstAudioInfo *info; rtpL16depay = GST_RTP_L16_DEPAY (depayload); structure = gst_caps_get_structure (caps, 0); payload = 96; gst_structure_get_int (structure, "payload", &payload); switch (payload) { case GST_RTP_PAYLOAD_L16_STEREO: channels = 2; clock_rate = 44100; break; case GST_RTP_PAYLOAD_L16_MONO: channels = 1; clock_rate = 44100; break; default: /* no fixed mapping, we need clock-rate */ channels = 0; clock_rate = 0; break; } /* caps can overwrite defaults */ clock_rate = gst_rtp_L16_depay_parse_int (structure, "clock-rate", clock_rate); if (clock_rate == 0) goto no_clockrate; channels = gst_rtp_L16_depay_parse_int (structure, "encoding-params", channels); if (channels == 0) { channels = gst_rtp_L16_depay_parse_int (structure, "channels", channels); if (channels == 0) { /* channels defaults to 1 otherwise */ channels = 1; } } depayload->clock_rate = clock_rate; info = &rtpL16depay->info; gst_audio_info_init (info); info->finfo = gst_audio_format_get_info (GST_AUDIO_FORMAT_S16BE); info->rate = clock_rate; info->channels = channels; info->bpf = (info->finfo->width / 8) * channels; /* add channel positions */ channel_order = gst_structure_get_string (structure, "channel-order"); order = gst_rtp_channels_get_by_order (channels, channel_order); rtpL16depay->order = order; if (order) { memcpy (info->position, order->pos, sizeof (GstAudioChannelPosition) * channels); gst_audio_channel_positions_to_valid_order (info->position, info->channels); } else { GST_ELEMENT_WARNING (rtpL16depay, STREAM, DECODE, (NULL), ("Unknown channel order '%s' for %d channels", GST_STR_NULL (channel_order), channels)); /* create default NONE layout */ gst_rtp_channels_create_default (channels, info->position); } srccaps = gst_audio_info_to_caps (info); res = gst_pad_set_caps (depayload->srcpad, srccaps); gst_caps_unref (srccaps); return res; /* ERRORS */ no_clockrate: { GST_ERROR_OBJECT (depayload, "no clock-rate specified"); return FALSE; } }
static gboolean gst_kate_enc_sink_event (GstPad * pad, GstEvent * event) { GstKateEnc *ke = GST_KATE_ENC (gst_pad_get_parent (pad)); GstStructure *structure; gboolean ret; switch (GST_EVENT_TYPE (event)) { case GST_EVENT_NEWSEGMENT: GST_LOG_OBJECT (ke, "Got newsegment event"); if (ke->initialized) { GST_LOG_OBJECT (ke, "ensuring all headers are in"); if (gst_kate_enc_flush_headers (ke) != GST_FLOW_OK) { GST_WARNING_OBJECT (ke, "Failed to flush headers"); } else { GstFormat format; gint64 timestamp; gst_event_parse_new_segment (event, NULL, NULL, &format, ×tamp, NULL, NULL); if (format != GST_FORMAT_TIME || !GST_CLOCK_TIME_IS_VALID (timestamp)) { GST_WARNING_OBJECT (ke, "No time in newsegment event %p, format %d, timestamp %" G_GINT64_FORMAT, event, (int) format, timestamp); /* to be safe, we'd need to generate a keepalive anyway, but we'd have to guess at the timestamp to use; a good guess would be the last known timestamp plus the keepalive time, but if we then get a packet with a timestamp less than this, it would fail to encode, which would be Bad. If we don't encode a keepalive, we run the risk of stalling the pipeline and hanging, which is Very Bad. Oh dear. We can't exit(-1), can we ? */ } else { float t = timestamp / (double) GST_SECOND; if (ke->delayed_spu && t - ke->delayed_start / (double) GST_SECOND >= ke->default_spu_duration) { if (G_UNLIKELY (gst_kate_enc_flush_waiting (ke, timestamp) != GST_FLOW_OK)) { GST_WARNING_OBJECT (ke, "Failed to encode delayed packet"); /* continue with new segment handling anyway */ } } GST_LOG_OBJECT (ke, "ts %f, last %f (min %f)", t, ke->last_timestamp / (double) GST_SECOND, ke->keepalive_min_time); if (ke->keepalive_min_time > 0.0f && t - ke->last_timestamp / (double) GST_SECOND >= ke->keepalive_min_time) { /* we only generate a keepalive if there is no SPU waiting, as it would mean out of sequence start times - and granulepos */ if (!ke->delayed_spu) { gst_kate_enc_generate_keepalive (ke, timestamp); } } } } } ret = gst_pad_push_event (ke->srcpad, event); break; case GST_EVENT_CUSTOM_DOWNSTREAM: GST_LOG_OBJECT (ke, "Got custom downstream event"); /* adapted from the dvdsubdec element */ structure = event->structure; if (structure != NULL && gst_structure_has_name (structure, "application/x-gst-dvd")) { if (ke->initialized) { GST_LOG_OBJECT (ke, "ensuring all headers are in"); if (gst_kate_enc_flush_headers (ke) != GST_FLOW_OK) { GST_WARNING_OBJECT (ke, "Failed to flush headers"); } else { const gchar *event_name = gst_structure_get_string (structure, "event"); if (event_name) { if (!strcmp (event_name, "dvd-spu-clut-change")) { gchar name[16]; int idx; gboolean found; gint value; GST_INFO_OBJECT (ke, "New CLUT received"); for (idx = 0; idx < 16; ++idx) { g_snprintf (name, sizeof (name), "clut%02d", idx); found = gst_structure_get_int (structure, name, &value); if (found) { ke->spu_clut[idx] = value; } else { GST_WARNING_OBJECT (ke, "DVD CLUT event did not contain %s field", name); } } } else if (!strcmp (event_name, "dvd-lang-codes")) { /* we can't know which stream corresponds to us */ } } else { GST_WARNING_OBJECT (ke, "custom downstream event with no name"); } } } } ret = gst_pad_push_event (ke->srcpad, event); break; case GST_EVENT_TAG: GST_LOG_OBJECT (ke, "Got tag event"); if (ke->tags) { GstTagList *list; gst_event_parse_tag (event, &list); gst_tag_list_insert (ke->tags, list, gst_tag_setter_get_tag_merge_mode (GST_TAG_SETTER (ke))); } else { g_assert_not_reached (); } ret = gst_pad_event_default (pad, event); break; case GST_EVENT_EOS: GST_INFO_OBJECT (ke, "Got EOS event"); if (ke->initialized) { GST_LOG_OBJECT (ke, "ensuring all headers are in"); if (gst_kate_enc_flush_headers (ke) != GST_FLOW_OK) { GST_WARNING_OBJECT (ke, "Failed to flush headers"); } else { kate_packet kp; int ret; GstClockTime delayed_end = ke->delayed_start + ke->default_spu_duration * GST_SECOND; if (G_UNLIKELY (gst_kate_enc_flush_waiting (ke, delayed_end) != GST_FLOW_OK)) { GST_WARNING_OBJECT (ke, "Failed to encode delayed packet"); /* continue with EOS handling anyway */ } ret = kate_encode_finish (&ke->k, -1, &kp); if (ret < 0) { GST_WARNING_OBJECT (ke, "Failed to encode EOS packet: %d", ret); } else { kate_int64_t granpos = kate_encode_get_granule (&ke->k); GST_LOG_OBJECT (ke, "EOS packet encoded"); if (gst_kate_enc_push_and_free_kate_packet (ke, &kp, granpos, ke->latest_end_time, 0, FALSE)) { GST_WARNING_OBJECT (ke, "Failed to push EOS packet"); } } } } ret = gst_pad_event_default (pad, event); break; default: GST_LOG_OBJECT (ke, "Got unhandled event"); ret = gst_pad_event_default (pad, event); break; } gst_object_unref (ke); return ret; }
static gboolean gst_mpg123_audio_dec_set_format (GstAudioDecoder * dec, GstCaps * input_caps) { /* Using the parsed information upstream, and the list of allowed caps * downstream, this code tries to find a suitable audio info. It is important * to keep in mind that the rate and number of channels should never deviate * from the one the bitstream has, otherwise mpg123 has to mix channels and/or * resample (and as its docs say, its internal resampler is very crude). The * sample format, however, can be chosen freely, because the MPEG specs do not * mandate any special format. Therefore, rate and number of channels are taken * from upstream (which parsed the MPEG frames, so the input_caps contain * exactly the rate and number of channels the bitstream actually has), while * the sample format is chosen by trying out all caps that are allowed by * downstream. This way, the output is adjusted to what the downstream prefers. * * Also, the new output audio info is not set immediately. Instead, it is * considered the "next audioinfo". The code waits for mpg123 to notice the new * format (= when mpg123_decode_frame() returns MPG123_AUDIO_DEC_NEW_FORMAT), * and then sets the next audioinfo. Otherwise, the next audioinfo is set too * soon, which may cause problems with mp3s containing several format headers. * One example would be an mp3 with the first 30 seconds using 44.1 kHz, then * the next 30 seconds using 32 kHz. Rare, but possible. * * STEPS: * * 1. get rate and channels from input_caps * 2. get allowed caps from src pad * 3. for each structure in allowed caps: * 3.1. take format * 3.2. if the combination of format with rate and channels is unsupported by * mpg123, go to (3), or exit with error if there are no more structures * to try * 3.3. create next audioinfo out of rate,channels,format, and exit */ int rate, channels; GstMpg123AudioDec *mpg123_decoder; GstCaps *allowed_srccaps; guint structure_nr; gboolean match_found = FALSE; mpg123_decoder = GST_MPG123_AUDIO_DEC (dec); g_assert (mpg123_decoder->handle != NULL); mpg123_decoder->has_next_audioinfo = FALSE; /* Get rate and channels from input_caps */ { GstStructure *structure; gboolean err = FALSE; /* Only the first structure is used (multiple * input caps structures don't make sense */ structure = gst_caps_get_structure (input_caps, 0); if (!gst_structure_get_int (structure, "rate", &rate)) { err = TRUE; GST_ERROR_OBJECT (dec, "Input caps do not have a rate value"); } if (!gst_structure_get_int (structure, "channels", &channels)) { err = TRUE; GST_ERROR_OBJECT (dec, "Input caps do not have a channel value"); } if (err) return FALSE; } /* Get the caps that are allowed by downstream */ { GstCaps *allowed_srccaps_unnorm = gst_pad_get_allowed_caps (GST_AUDIO_DECODER_SRC_PAD (dec)); if (!allowed_srccaps_unnorm) { GST_ERROR_OBJECT (dec, "Allowed src caps are NULL"); return FALSE; } allowed_srccaps = gst_caps_normalize (allowed_srccaps_unnorm); } /* Go through all allowed caps, pick the first one that matches */ for (structure_nr = 0; structure_nr < gst_caps_get_size (allowed_srccaps); ++structure_nr) { GstStructure *structure; gchar const *format_str; GstAudioFormat format; int encoding; structure = gst_caps_get_structure (allowed_srccaps, structure_nr); format_str = gst_structure_get_string (structure, "format"); if (format_str == NULL) { GST_DEBUG_OBJECT (dec, "Could not get format from src caps"); continue; } format = gst_audio_format_from_string (format_str); if (format == GST_AUDIO_FORMAT_UNKNOWN) { GST_DEBUG_OBJECT (dec, "Unknown format %s", format_str); continue; } switch (format) { case GST_AUDIO_FORMAT_S16: encoding = MPG123_ENC_SIGNED_16; break; case GST_AUDIO_FORMAT_S24: encoding = MPG123_ENC_SIGNED_24; break; case GST_AUDIO_FORMAT_S32: encoding = MPG123_ENC_SIGNED_32; break; case GST_AUDIO_FORMAT_U16: encoding = MPG123_ENC_UNSIGNED_16; break; case GST_AUDIO_FORMAT_U24: encoding = MPG123_ENC_UNSIGNED_24; break; case GST_AUDIO_FORMAT_U32: encoding = MPG123_ENC_UNSIGNED_32; break; case GST_AUDIO_FORMAT_F32: encoding = MPG123_ENC_FLOAT_32; break; default: GST_DEBUG_OBJECT (dec, "Format %s in srccaps is not supported", format_str); continue; } { int err; /* Cleanup old formats & set new one */ mpg123_format_none (mpg123_decoder->handle); err = mpg123_format (mpg123_decoder->handle, rate, channels, encoding); if (err != MPG123_OK) { GST_DEBUG_OBJECT (dec, "mpg123 cannot use caps %" GST_PTR_FORMAT " because mpg123_format() failed: %s", structure, mpg123_strerror (mpg123_decoder->handle)); continue; } } gst_audio_info_init (&(mpg123_decoder->next_audioinfo)); gst_audio_info_set_format (&(mpg123_decoder->next_audioinfo), format, rate, channels, NULL); GST_LOG_OBJECT (dec, "The next audio format is: %s, %u Hz, %u channels", format_str, rate, channels); mpg123_decoder->has_next_audioinfo = TRUE; match_found = TRUE; break; } gst_caps_unref (allowed_srccaps); return match_found; }
/** * gst_missing_plugin_message_get_description: * @msg: a missing-plugin #GstMessage of type #GST_MESSAGE_ELEMENT * * Returns a localised string describing the missing feature, for use in * error dialogs and the like. Should never return NULL unless @msg is not * a valid missing-plugin message. * * This function is mainly for applications that need a human-readable string * describing a missing plugin, given a previously collected missing-plugin * message * * Returns: a newly-allocated description string, or NULL on error. Free * string with g_free() when not needed any longer. */ gchar * gst_missing_plugin_message_get_description (GstMessage * msg) { GstMissingType missing_type; const gchar *desc; gchar *ret = NULL; const GstStructure *structure; g_return_val_if_fail (gst_is_missing_plugin_message (msg), NULL); structure = gst_message_get_structure (msg); GST_LOG ("Parsing missing-plugin message: %" GST_PTR_FORMAT, structure); desc = gst_structure_get_string (structure, "name"); if (desc != NULL && *desc != '\0') { ret = g_strdup (desc); goto done; } /* fallback #1 */ missing_type = missing_structure_get_type (structure); switch (missing_type) { case GST_MISSING_TYPE_URISOURCE: case GST_MISSING_TYPE_URISINK: case GST_MISSING_TYPE_ELEMENT:{ gchar *detail = NULL; if (missing_structure_get_string_detail (structure, &detail)) { if (missing_type == GST_MISSING_TYPE_URISOURCE) ret = gst_pb_utils_get_source_description (detail); else if (missing_type == GST_MISSING_TYPE_URISINK) ret = gst_pb_utils_get_sink_description (detail); else ret = gst_pb_utils_get_element_description (detail); g_free (detail); } break; } case GST_MISSING_TYPE_DECODER: case GST_MISSING_TYPE_ENCODER:{ GstCaps *caps = NULL; if (missing_structure_get_caps_detail (structure, &caps)) { if (missing_type == GST_MISSING_TYPE_DECODER) ret = gst_pb_utils_get_decoder_description (caps); else ret = gst_pb_utils_get_encoder_description (caps); gst_caps_unref (caps); } break; } default: break; } if (ret) goto done; /* fallback #2 */ switch (missing_type) { case GST_MISSING_TYPE_URISOURCE: desc = _("Unknown source element"); break; case GST_MISSING_TYPE_URISINK: desc = _("Unknown sink element"); break; case GST_MISSING_TYPE_ELEMENT: desc = _("Unknown element"); break; case GST_MISSING_TYPE_DECODER: desc = _("Unknown decoder element"); break; case GST_MISSING_TYPE_ENCODER: desc = _("Unknown encoder element"); break; default: /* we should really never get here, but we better still return * something if we do */ desc = _("Plugin or element of unknown type"); break; } ret = g_strdup (desc); done: GST_LOG ("returning '%s'", ret); return ret; }
/*! * \brief CvCapture_GStreamer::retrieveFrame * \return IplImage pointer. [Transfer Full] * Retreive the previously grabbed buffer, and wrap it in an IPLImage structure */ IplImage * CvCapture_GStreamer::retrieveFrame(int) { if(!buffer) return 0; //construct a frame header if we did not have any yet if(!frame) { gint height, width; //reuse the caps ptr if (buffer_caps) gst_caps_unref(buffer_caps); #if GST_VERSION_MAJOR == 0 buffer_caps = gst_buffer_get_caps(buffer); #else buffer_caps = gst_sample_get_caps(sample); #endif // bail out in no caps assert(gst_caps_get_size(buffer_caps) == 1); GstStructure* structure = gst_caps_get_structure(buffer_caps, 0); // bail out if width or height are 0 if(!gst_structure_get_int(structure, "width", &width) || !gst_structure_get_int(structure, "height", &height)) { return 0; } int depth = 3; #if GST_VERSION_MAJOR > 0 depth = 0; const gchar* name = gst_structure_get_name(structure); const gchar* format = gst_structure_get_string(structure, "format"); if (!name || !format) return 0; // we support 3 types of data: // video/x-raw, format=BGR -> 8bit, 3 channels // video/x-raw, format=GRAY8 -> 8bit, 1 channel // video/x-bayer -> 8bit, 1 channel // bayer data is never decoded, the user is responsible for that // everything is 8 bit, so we just test the caps for bit depth if (strcasecmp(name, "video/x-raw") == 0) { if (strcasecmp(format, "BGR") == 0) { depth = 3; } else if(strcasecmp(format, "GRAY8") == 0){ depth = 1; } } else if (strcasecmp(name, "video/x-bayer") == 0) { depth = 1; } #endif if (depth > 0) { frame = cvCreateImageHeader(cvSize(width, height), IPL_DEPTH_8U, depth); }else{ return 0; } } // gstreamer expects us to handle the memory at this point // so we can just wrap the raw buffer and be done with it #if GST_VERSION_MAJOR == 0 frame->imageData = (char *)GST_BUFFER_DATA(buffer); #else // the data ptr in GstMapInfo is only valid throughout the mapifo objects life. // TODO: check if reusing the mapinfo object is ok. gboolean success = gst_buffer_map(buffer,info, (GstMapFlags)GST_MAP_READ); if (!success){ //something weird went wrong here. abort. abort. //fprintf(stderr,"GStreamer: unable to map buffer"); return 0; } frame->imageData = (char*)info->data; gst_buffer_unmap(buffer,info); #endif return frame; }
static void gst_hls_sink_handle_message (GstBin * bin, GstMessage * message) { GstHlsSink *sink = GST_HLS_SINK_CAST (bin); switch (message->type) { case GST_MESSAGE_ELEMENT: { GFile *file; const char *filename, *title; char *playlist_content; GstClockTime running_time, duration; gboolean discont = FALSE; GError *error = NULL; gchar *entry_location; const GstStructure *structure; structure = gst_message_get_structure (message); if (strcmp (gst_structure_get_name (structure), "GstMultiFileSink")) break; filename = gst_structure_get_string (structure, "filename"); gst_structure_get_clock_time (structure, "running-time", &running_time); duration = running_time - sink->last_running_time; sink->last_running_time = running_time; file = g_file_new_for_path (filename); title = "ciao"; GST_INFO_OBJECT (sink, "COUNT %d", sink->index); if (sink->playlist_root == NULL) entry_location = g_path_get_basename (filename); else { gchar *name = g_path_get_basename (filename); entry_location = g_build_filename (sink->playlist_root, name, NULL); g_free (name); } gst_m3u8_playlist_add_entry (sink->playlist, entry_location, file, title, duration, sink->index, discont); g_free (entry_location); playlist_content = gst_m3u8_playlist_render (sink->playlist); g_file_set_contents (sink->playlist_location, playlist_content, -1, &error); g_free (playlist_content); /* multifilesink is starting a new file. It means that upstream sent a key * unit and we can schedule the next key unit now. */ sink->waiting_fku = FALSE; schedule_next_key_unit (sink); /* multifilesink is an internal implementation detail. If applications * need a notification, we should probably do our own message */ GST_DEBUG_OBJECT (bin, "dropping message %" GST_PTR_FORMAT, message); gst_message_unref (message); message = NULL; break; } default: break; } if (message) GST_BIN_CLASS (parent_class)->handle_message (bin, message); }
static void handle_player_info (G_GNUC_UNUSED SoupSession * session, SoupMessage * msg, AurClient * client) { SoupBuffer *buffer; if (msg->status_code < 200 || msg->status_code >= 300) return; buffer = soup_message_body_flatten (msg->response_body); if (json_parser_load_from_data (client->json, buffer->data, buffer->length, NULL)) { const GValue *v1; GArray *player_info = NULL; gsize i; JsonNode *root = json_parser_get_root (client->json); GstStructure *s1 = aur_json_to_gst_structure (root); if (s1 == NULL) return; /* Invalid chunk */ v1 = gst_structure_get_value (s1, "player-clients"); if (!GST_VALUE_HOLDS_ARRAY (v1)) goto failed; player_info = g_array_sized_new (TRUE, TRUE, sizeof (AurPlayerInfo), gst_value_array_get_size (v1)); for (i = 0; i < gst_value_array_get_size (v1); i++) { AurPlayerInfo info; const GValue *v2 = gst_value_array_get_value (v1, i); const GstStructure *s2; gint64 client_id; if (!GST_VALUE_HOLDS_STRUCTURE (v2)) goto failed; s2 = gst_value_get_structure (v2); if (!aur_json_structure_get_int64 (s2, "client-id", &client_id)) goto failed; info.id = client_id; if (!aur_json_structure_get_boolean (s2, "enabled", &info.enabled)) goto failed; if (!aur_json_structure_get_double (s2, "volume", &info.volume)) goto failed; if (!(info.host = g_strdup (gst_structure_get_string (s2, "host")))) goto failed; g_array_append_val (player_info, info); } free_player_info (client->player_info); client->player_info = player_info; player_info = NULL; g_signal_emit (client, signals[SIGNAL_PLAYER_INFO_CHANGED], 0); failed: if (player_info) free_player_info (player_info); gst_structure_free (s1); } }
static gboolean gst_imx_audio_uniaudio_dec_set_format(GstAudioDecoder *dec, GstCaps *caps) { UniACodecParameter parameter; UniACodecMemoryOps memory_ops; GstImxAudioUniaudioDec *imx_audio_uniaudio_dec = GST_IMX_AUDIO_UNIAUDIO_DEC(dec); #define UNIA_SET_PARAMETER(PARAM_ID, DESC) \ do \ { \ if (imx_audio_uniaudio_dec->codec->set_parameter(imx_audio_uniaudio_dec->handle, (PARAM_ID), ¶meter) != ACODEC_SUCCESS) \ { \ GST_ERROR_OBJECT(dec, "setting %s parameter failed: %s", (DESC), imx_audio_uniaudio_dec->codec->get_last_error(imx_audio_uniaudio_dec->handle)); \ gst_imx_audio_uniaudio_dec_close_handle(imx_audio_uniaudio_dec); \ return FALSE; \ } \ } \ while (0) #define UNIA_SET_PARAMETER_EX(PARAM_ID, DESC, VALUE) \ do \ { \ if (imx_audio_uniaudio_dec->codec->set_parameter(imx_audio_uniaudio_dec->handle, (PARAM_ID), ((UniACodecParameter *)(VALUE))) != ACODEC_SUCCESS) \ { \ GST_ERROR_OBJECT(dec, "setting %s parameter failed: %s", (DESC), imx_audio_uniaudio_dec->codec->get_last_error(imx_audio_uniaudio_dec->handle)); \ gst_imx_audio_uniaudio_dec_close_handle(imx_audio_uniaudio_dec); \ return FALSE; \ } \ } \ while (0) if (imx_audio_uniaudio_dec->handle != NULL) { /* drain old decoder handle */ gst_imx_audio_uniaudio_dec_handle_frame(dec, NULL); gst_imx_audio_uniaudio_dec_close_handle(imx_audio_uniaudio_dec); } if ((imx_audio_uniaudio_dec->codec = gst_imx_audio_uniaudio_codec_table_get_codec(caps)) == NULL) { GST_ERROR_OBJECT(dec, "found no suitable codec for caps %" GST_PTR_FORMAT, (gpointer)caps); return FALSE; } memory_ops.Calloc = gst_imx_audio_uniaudio_dec_calloc; memory_ops.Malloc = gst_imx_audio_uniaudio_dec_malloc; memory_ops.Free = gst_imx_audio_uniaudio_dec_free; memory_ops.ReAlloc = gst_imx_audio_uniaudio_dec_realloc; if ((imx_audio_uniaudio_dec->handle = imx_audio_uniaudio_dec->codec->create_codec(&memory_ops)) == NULL) { GST_ERROR_OBJECT(dec, "creating codec handle for caps %" GST_PTR_FORMAT " failed", (gpointer)caps); return FALSE; } /* Get configuration parameters from caps */ { int samplerate, channels, bitrate, block_align, wmaversion; gchar const *stream_format, *sample_format; GValue const *value; gboolean framed, is_vorbis; GstBuffer *codec_data = NULL; GstStructure *structure = gst_caps_get_structure(caps, 0); imx_audio_uniaudio_dec->skip_header_counter = 0; is_vorbis = (g_strcmp0(gst_structure_get_name(structure), "audio/x-vorbis") == 0); parameter.framed = is_vorbis || (gst_structure_get_boolean(structure, "framed", &framed) && framed) || (gst_structure_get_boolean(structure, "parsed", &framed) && framed); GST_DEBUG_OBJECT(dec, "input is framed: %d", parameter.framed); UNIA_SET_PARAMETER(UNIA_FRAMED, "framed"); if (gst_structure_get_int(structure, "rate", &samplerate)) { GST_DEBUG_OBJECT(dec, "input caps sample rate: %d Hz", samplerate); parameter.samplerate = samplerate; UNIA_SET_PARAMETER(UNIA_SAMPLERATE, "sample rate"); } if (gst_structure_get_int(structure, "channels", &channels)) { CHAN_TABLE table; GST_DEBUG_OBJECT(dec, "input caps channel count: %d", channels); parameter.channels = channels; UNIA_SET_PARAMETER(UNIA_CHANNEL, "channel"); memset(&table, 0, sizeof(table)); table.size = CHANNEL_MAPS_SIZE; memcpy(&table.channel_table, uniaudio_channel_maps, sizeof(uniaudio_channel_maps)); UNIA_SET_PARAMETER_EX(UNIA_CHAN_MAP_TABLE, "channel map", &table); } if (gst_structure_get_int(structure, "bitrate", &bitrate)) { GST_DEBUG_OBJECT(dec, "input caps channel count: %d", bitrate); parameter.bitrate = bitrate; UNIA_SET_PARAMETER(UNIA_BITRATE, "bitrate"); } if (gst_structure_get_int(structure, "block_align", &block_align)) { GST_DEBUG_OBJECT(dec, "block alignment: %d", block_align); parameter.blockalign = block_align; UNIA_SET_PARAMETER(UNIA_WMA_BlOCKALIGN, "blockalign"); } if (gst_structure_get_int(structure, "wmaversion", &wmaversion)) { GST_DEBUG_OBJECT(dec, "WMA version: %d", wmaversion); parameter.version = wmaversion; UNIA_SET_PARAMETER(UNIA_WMA_VERSION, "wmaversion"); } if ((stream_format = gst_structure_get_string(structure, "stream-format")) != NULL) { GST_DEBUG_OBJECT(dec, "input caps stream format: %s", stream_format); if (g_strcmp0(stream_format, "raw") == 0) parameter.stream_type = STREAM_ADTS; if (g_strcmp0(stream_format, "adif") == 0) parameter.stream_type = STREAM_ADIF; if (g_strcmp0(stream_format, "raw") == 0) parameter.stream_type = STREAM_RAW; else parameter.stream_type = STREAM_UNKNOW; UNIA_SET_PARAMETER(UNIA_STREAM_TYPE, "stream type"); } if ((sample_format = gst_structure_get_string(structure, "format")) != NULL) { GstAudioFormat fmt; GstAudioFormatInfo const * fmtinfo; GST_DEBUG_OBJECT(dec, "input caps stream sample format: %s", sample_format); if ((fmt = gst_audio_format_from_string(sample_format)) == GST_AUDIO_FORMAT_UNKNOWN) { GST_ERROR_OBJECT(dec, "format is unknown, cannot continue"); return FALSE; } fmtinfo = gst_audio_format_get_info(fmt); g_assert(fmtinfo != NULL); parameter.depth = GST_AUDIO_FORMAT_INFO_DEPTH(fmtinfo); UNIA_SET_PARAMETER(UNIA_DEPTH, "depth"); } /* Handle codec data, either directly from a codec_data caps, * or assemble it from a list of buffers specified by the * streamheader caps (typically used by Vorbis audio) */ /* Cleanup old codec data first */ if (imx_audio_uniaudio_dec->codec_data != NULL) { gst_buffer_unref(imx_audio_uniaudio_dec->codec_data); imx_audio_uniaudio_dec->codec_data = NULL; } /* Check if either codec_data or streamheader caps exist */ if ((value = gst_structure_get_value(structure, "codec_data")) != NULL) { /* codec_data caps exist - simply make a copy of its buffer * (this makes sure we own that buffer properly) */ GstBuffer *caps_buffer; GST_DEBUG_OBJECT(dec, "reading codec_data value"); caps_buffer = gst_value_get_buffer(value); g_assert(caps_buffer != NULL); codec_data = gst_buffer_copy(caps_buffer); } else if ((value = gst_structure_get_value(structure, "streamheader")) != NULL) { /* streamheader caps exist, which are a list of buffers * these buffers need to be concatenated and then given as * one consecutive codec data buffer to the decoder */ guint i, num_buffers = gst_value_array_get_size(value); GstAdapter *streamheader_adapter = gst_adapter_new(); GST_DEBUG_OBJECT(dec, "reading streamheader value (%u headers)", num_buffers); imx_audio_uniaudio_dec->num_vorbis_headers = num_buffers; /* Use the GstAdapter to stitch these buffers together */ for (i = 0; i < num_buffers; ++i) { GValue const *array_value = gst_value_array_get_value(value, i); GstBuffer *buf = gst_value_get_buffer(array_value); GST_DEBUG_OBJECT(dec, "add streamheader buffer #%u with %" G_GSIZE_FORMAT " byte", i, gst_buffer_get_size(buf)); gst_adapter_push(streamheader_adapter, gst_buffer_copy(buf)); } codec_data = gst_adapter_take_buffer(streamheader_adapter, gst_adapter_available(streamheader_adapter)); g_object_unref(G_OBJECT(streamheader_adapter)); } /* At this point, if either codec_data or streamheader caps were found, * the codec_data pointer will refer to a valid non-empty buffer with * codec data inside. This buffer is owned by this audio decoder object, * and must be kept around for as long as the decoder needs to be ran, * since the set_parameter call below does *not* copy the codec data * bytes into some internal buffer. Instead, the uniaudio decoder plugin * expects the caller to keep the buffer valid. */ if ((codec_data != NULL) && (gst_buffer_get_size(codec_data) != 0)) { GstMapInfo map; gst_buffer_map(codec_data, &map, GST_MAP_READ); parameter.codecData.size = map.size; parameter.codecData.buf = (char *)(map.data); UNIA_SET_PARAMETER(UNIA_CODEC_DATA, "codec data"); gst_buffer_unmap(codec_data, &map); imx_audio_uniaudio_dec->codec_data = codec_data; GST_DEBUG_OBJECT(dec, "codec data: %" G_GUINT32_FORMAT " byte", (guint32)(parameter.codecData.size)); } } GST_DEBUG_OBJECT(dec, "decoder configured"); imx_audio_uniaudio_dec->has_audioinfo_set = FALSE; #undef UNIA_SET_PARAMETER return TRUE; }
static gboolean gst_rtp_opus_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps) { gboolean res; GstCaps *src_caps; GstStructure *s; char *encoding_name; gint channels, rate; const char *sprop_stereo = NULL; char *sprop_maxcapturerate = NULL; src_caps = gst_pad_get_allowed_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload)); if (src_caps) { src_caps = gst_caps_make_writable (src_caps); src_caps = gst_caps_truncate (src_caps); s = gst_caps_get_structure (src_caps, 0); gst_structure_fixate_field_string (s, "encoding-name", "OPUS"); encoding_name = g_strdup (gst_structure_get_string (s, "encoding-name")); gst_caps_unref (src_caps); } else { encoding_name = g_strdup ("X-GST-OPUS-DRAFT-SPITTKA-00"); } s = gst_caps_get_structure (caps, 0); if (gst_structure_get_int (s, "channels", &channels)) { if (channels > 2) { GST_ERROR_OBJECT (payload, "More than 2 channels with channel-mapping-family=0 is invalid"); return FALSE; } else if (channels == 2) { sprop_stereo = "1"; } else { sprop_stereo = "0"; } } if (gst_structure_get_int (s, "rate", &rate)) { sprop_maxcapturerate = g_strdup_printf ("%d", rate); } gst_rtp_base_payload_set_options (payload, "audio", FALSE, encoding_name, 48000); g_free (encoding_name); if (sprop_maxcapturerate && sprop_stereo) { res = gst_rtp_base_payload_set_outcaps (payload, "sprop-maxcapturerate", G_TYPE_STRING, sprop_maxcapturerate, "sprop-stereo", G_TYPE_STRING, sprop_stereo, NULL); } else if (sprop_maxcapturerate) { res = gst_rtp_base_payload_set_outcaps (payload, "sprop-maxcapturerate", G_TYPE_STRING, sprop_maxcapturerate, NULL); } else if (sprop_stereo) { res = gst_rtp_base_payload_set_outcaps (payload, "sprop-stereo", G_TYPE_STRING, sprop_stereo, NULL); } else { res = gst_rtp_base_payload_set_outcaps (payload, NULL); } g_free (sprop_maxcapturerate); return res; }
static gboolean bus_cb (GstBus *bus, GstMessage *message, RBPlayerGst *mp) { const GstStructure *structure; g_return_val_if_fail (mp != NULL, FALSE); switch (GST_MESSAGE_TYPE (message)) { case GST_MESSAGE_ERROR: { char *debug = NULL; GError *error = NULL; GError *sig_error = NULL; int code; gboolean emit = TRUE; gst_message_parse_error (message, &error, &debug); /* If we've already got an error, ignore 'internal data flow error' * type messages, as they're too generic to be helpful. */ if (mp->priv->emitted_error && error->domain == GST_STREAM_ERROR && error->code == GST_STREAM_ERROR_FAILED) { rb_debug ("Ignoring generic error \"%s\"", error->message); emit = FALSE; } code = rb_gst_error_get_error_code (error); if (emit) { if (message_from_sink (mp->priv->audio_sink, message)) { rb_debug ("got error from sink: %s (%s)", error->message, debug); /* Translators: the parameter here is an error message */ g_set_error (&sig_error, RB_PLAYER_ERROR, code, _("Failed to open output device: %s"), error->message); } else { rb_debug ("got error from stream: %s (%s)", error->message, debug); g_set_error (&sig_error, RB_PLAYER_ERROR, code, "%s", error->message); } state_change_finished (mp, sig_error); mp->priv->emitted_error = TRUE; if (mp->priv->playbin_stream_changing) { emit_playing_stream_and_tags (mp, TRUE); } _rb_player_emit_error (RB_PLAYER (mp), mp->priv->stream_data, sig_error); } /* close if not already closing */ if (mp->priv->uri != NULL) rb_player_close (RB_PLAYER (mp), NULL, NULL); g_error_free (error); g_free (debug); break; } case GST_MESSAGE_EOS: if (mp->priv->stream_change_pending) { rb_debug ("got EOS with stream change pending"); start_state_change (mp, GST_STATE_READY, SET_NEXT_URI); } else { _rb_player_emit_eos (RB_PLAYER (mp), mp->priv->stream_data, FALSE); } break; case GST_MESSAGE_STATE_CHANGED: { GstState oldstate; GstState newstate; GstState pending; gst_message_parse_state_changed (message, &oldstate, &newstate, &pending); if (GST_MESSAGE_SRC (message) == GST_OBJECT (mp->priv->playbin)) { if (pending == GST_STATE_VOID_PENDING) { rb_debug ("playbin reached state %s", gst_element_state_get_name (newstate)); state_change_finished (mp, NULL); } } break; } case GST_MESSAGE_TAG: { GstTagList *tags; if (mp->priv->playbin_stream_changing) { rb_debug ("ignoring tags during playbin stream change"); break; } gst_message_parse_tag (message, &tags); if (mp->priv->stream_change_pending) { mp->priv->stream_tags = g_list_append (mp->priv->stream_tags, tags); } else { gst_tag_list_foreach (tags, (GstTagForeachFunc) process_tag, mp); gst_tag_list_free (tags); } break; } case GST_MESSAGE_BUFFERING: { gint progress; structure = gst_message_get_structure (message); if (!gst_structure_get_int (structure, "buffer-percent", &progress)) { g_warning ("Could not get value from BUFFERING message"); break; } if (progress >= 100) { mp->priv->buffering = FALSE; if (mp->priv->playing) { rb_debug ("buffering done, setting pipeline back to PLAYING"); gst_element_set_state (mp->priv->playbin, GST_STATE_PLAYING); } else { rb_debug ("buffering done, leaving pipeline PAUSED"); } } else if (mp->priv->buffering == FALSE && mp->priv->playing) { rb_debug ("buffering - temporarily pausing playback"); gst_element_set_state (mp->priv->playbin, GST_STATE_PAUSED); mp->priv->buffering = TRUE; } _rb_player_emit_buffering (RB_PLAYER (mp), mp->priv->stream_data, progress); break; } case GST_MESSAGE_APPLICATION: structure = gst_message_get_structure (message); _rb_player_emit_event (RB_PLAYER (mp), mp->priv->stream_data, gst_structure_get_name (structure), NULL); break; case GST_MESSAGE_STREAM_START: if (mp->priv->playbin_stream_changing) { rb_debug ("got STREAM_START message"); mp->priv->playbin_stream_changing = FALSE; emit_playing_stream_and_tags (mp, TRUE); } break; case GST_MESSAGE_ELEMENT: structure = gst_message_get_structure (message); if (gst_is_missing_plugin_message (message)) { handle_missing_plugin_message (mp, message); } else if (gst_structure_has_name (structure, "redirect")) { const char *uri = gst_structure_get_string (structure, "new-location"); _rb_player_emit_redirect (RB_PLAYER (mp), mp->priv->stream_data, uri); } break; default: break; } /* emit message signals too, so plugins can process messages */ gst_bus_async_signal_func (bus, message, NULL); return TRUE; }
static void * create_node(GstDspBase *base) { GstDspADec *self; struct td_codec *codec; int dsp_handle; struct dsp_node *node; const struct dsp_uuid usn_uuid = { 0x79A3C8B3, 0x95F2, 0x403F, 0x9A, 0x4B, { 0xCF, 0x80, 0x57, 0x73, 0x05, 0x41 } }; self = GST_DSP_ADEC(base); dsp_handle = base->dsp_handle; if (!gstdsp_register(dsp_handle, &usn_uuid, DSP_DCD_LIBRARYTYPE, "usn.dll64P")) { pr_err(self, "failed to register usn node library"); return NULL; } codec = base->codec; if (!codec) { pr_err(self, "unknown algorithm"); return NULL; } pr_info(base, "algo=%s", codec->filename); if (!gstdsp_register(dsp_handle, codec->uuid, DSP_DCD_LIBRARYTYPE, codec->filename)) { pr_err(self, "failed to register algo node library"); return NULL; } if (!gstdsp_register(dsp_handle, codec->uuid, DSP_DCD_NODETYPE, codec->filename)) { pr_err(self, "failed to register algo node"); return NULL; } { struct dsp_node_attr_in attrs = { .cb = sizeof(attrs), .priority = 10, .timeout = 10000, }; void *arg_data; codec->create_args(base, &attrs.profile_id, &arg_data); if (!arg_data) return NULL; if (!dsp_node_allocate(dsp_handle, base->proc, codec->uuid, arg_data, &attrs, &node)) { pr_err(self, "dsp node allocate failed"); free(arg_data); return NULL; } free(arg_data); } if (!dsp_node_create(dsp_handle, node)) { pr_err(self, "dsp node create failed"); dsp_node_free(dsp_handle, node); return NULL; } pr_info(self, "dsp node created"); if (codec->send_params) codec->send_params(base, node); if (codec->setup_params) codec->setup_params(base); base->flush_buffer = codec->flush_buffer; return node; } static inline void configure_caps(GstDspADec *self, GstCaps *in, GstCaps *out) { GstDspBase *base; GstStructure *out_struc, *in_struc; int channels; base = GST_DSP_BASE(self); in_struc = gst_caps_get_structure(in, 0); out_struc = gst_structure_new("audio/x-raw-int", "endianness", G_TYPE_INT, G_BYTE_ORDER, "signed", G_TYPE_BOOLEAN, TRUE, "width", G_TYPE_INT, 16, "depth", G_TYPE_INT, 16, NULL); if (gst_structure_get_int(in_struc, "channels", &channels)) gst_structure_set(out_struc, "channels", G_TYPE_INT, channels, NULL); if (gst_structure_get_int(in_struc, "rate", &self->samplerate)) gst_structure_set(out_struc, "rate", G_TYPE_INT, self->samplerate, NULL); if (base->alg == GSTDSP_AACDEC) { const char *fmt; gboolean tmp; gst_structure_get_boolean(in_struc, "framed", &tmp); self->packetized = tmp; fmt = gst_structure_get_string(in_struc, "stream-format"); self->raw = strcmp(fmt, "raw") == 0; } base->output_buffer_size = 4 * 1024; gst_caps_append_structure(out, out_struc); }
/* returns static descriptions and dynamic ones (such as video/x-raw), * or NULL if caps aren't known at all */ static gchar * format_info_get_desc (const FormatInfo * info, const GstCaps * caps) { const GstStructure *s; g_assert (info != NULL); if (info->desc != NULL) return g_strdup (_(info->desc)); s = gst_caps_get_structure (caps, 0); if (strcmp (info->type, "video/x-raw") == 0) { gchar *ret = NULL; const gchar *str = 0; GstVideoFormat format; const GstVideoFormatInfo *finfo; str = gst_structure_get_string (s, "format"); if (str == NULL) return g_strdup (_("Uncompressed video")); format = gst_video_format_from_string (str); if (format == GST_VIDEO_FORMAT_UNKNOWN) return g_strdup (_("Uncompressed video")); finfo = gst_video_format_get_info (format); if (GST_VIDEO_FORMAT_INFO_IS_GRAY (finfo)) { ret = g_strdup (_("Uncompressed gray")); } else if (GST_VIDEO_FORMAT_INFO_IS_YUV (finfo)) { const gchar *layout; const gchar *subs; gint w_sub, h_sub; w_sub = GST_VIDEO_FORMAT_INFO_W_SUB (finfo, 1); h_sub = GST_VIDEO_FORMAT_INFO_H_SUB (finfo, 1); if (GST_VIDEO_FORMAT_INFO_N_PLANES (finfo) == 1) { layout = "planar"; } else { layout = "packed"; } if (w_sub == 1 && h_sub == 1) { subs = "4:4:4"; } else if (w_sub == 2 && h_sub == 1) { subs = "4:2:2"; } else if (w_sub == 2 && h_sub == 2) { subs = "4:2:0"; } else if (w_sub == 4 && h_sub == 1) { subs = "4:1:1"; } else { subs = ""; } ret = g_strdup_printf (_("Uncompressed %s YUV %s"), layout, subs); } else if (GST_VIDEO_FORMAT_INFO_IS_RGB (finfo)) { gboolean alpha, palette; gint bits; alpha = GST_VIDEO_FORMAT_INFO_HAS_ALPHA (finfo); palette = GST_VIDEO_FORMAT_INFO_HAS_PALETTE (finfo); bits = GST_VIDEO_FORMAT_INFO_BITS (finfo); ret = g_strdup_printf (_("Uncompressed %s%d-bit %s"), palette ? "palettized " : "", bits, alpha ? "RGBA" : "RGB"); } else { ret = g_strdup (_("Uncompressed video")); } return ret; } else if (strcmp (info->type, "video/x-h263") == 0) { const gchar *variant, *ret; variant = gst_structure_get_string (s, "variant"); if (variant == NULL) ret = "H.263"; else if (strcmp (variant, "itu") == 0) ret = "ITU H.26n"; /* why not ITU H.263? (tpm) */ else if (strcmp (variant, "lead") == 0) ret = "Lead H.263"; else if (strcmp (variant, "microsoft") == 0) ret = "Microsoft H.263"; else if (strcmp (variant, "vdolive") == 0) ret = "VDOLive"; else if (strcmp (variant, "vivo") == 0) ret = "Vivo H.263"; else if (strcmp (variant, "xirlink") == 0) ret = "Xirlink H.263"; else { GST_WARNING ("Unknown H263 variant '%s'", variant); ret = "H.263"; } return g_strdup (ret); } else if (strcmp (info->type, "video/x-h264") == 0) { const gchar *variant, *ret; variant = gst_structure_get_string (s, "variant"); if (variant == NULL) ret = "H.264"; else if (strcmp (variant, "itu") == 0) ret = "ITU H.264"; else if (strcmp (variant, "videosoft") == 0) ret = "Videosoft H.264"; else if (strcmp (variant, "lead") == 0) ret = "Lead H.264"; else { GST_WARNING ("Unknown H264 variant '%s'", variant); ret = "H.264"; } return g_strdup (ret); } else if (strcmp (info->type, "video/x-divx") == 0) { gint ver = 0; if (!gst_structure_get_int (s, "divxversion", &ver) || ver <= 2) { GST_WARNING ("Unexpected DivX version in %" GST_PTR_FORMAT, caps); return g_strdup ("DivX MPEG-4"); } return g_strdup_printf (_("DivX MPEG-4 Version %d"), ver); } else if (strcmp (info->type, "video/x-msmpeg") == 0) { gint ver = 0; if (!gst_structure_get_int (s, "msmpegversion", &ver) || ver < 40 || ver > 49) { GST_WARNING ("Unexpected msmpegversion in %" GST_PTR_FORMAT, caps); return g_strdup ("Microsoft MPEG-4 4.x"); } return g_strdup_printf ("Microsoft MPEG-4 4.%d", ver % 10); } else if (strcmp (info->type, "video/x-truemotion") == 0) { gint ver = 0; gst_structure_get_int (s, "trueversion", &ver); switch (ver) { case 1: return g_strdup_printf ("Duck TrueMotion 1"); case 2: return g_strdup_printf ("TrueMotion 2.0"); default: GST_WARNING ("Unexpected trueversion in %" GST_PTR_FORMAT, caps); break; } return g_strdup_printf ("TrueMotion"); } else if (strcmp (info->type, "video/x-xan") == 0) { gint ver = 0; if (!gst_structure_get_int (s, "wcversion", &ver) || ver < 1) { GST_WARNING ("Unexpected wcversion in %" GST_PTR_FORMAT, caps); return g_strdup ("Xan Wing Commander"); } return g_strdup_printf ("Xan Wing Commander %u", ver); } else if (strcmp (info->type, "video/x-indeo") == 0) { gint ver = 0; if (!gst_structure_get_int (s, "indeoversion", &ver) || ver < 2) { GST_WARNING ("Unexpected indeoversion in %" GST_PTR_FORMAT, caps); return g_strdup ("Intel Indeo"); } return g_strdup_printf ("Intel Indeo %u", ver); } else if (strcmp (info->type, "audio/x-wma") == 0) { gint ver = 0; gst_structure_get_int (s, "wmaversion", &ver); switch (ver) { case 1: case 2: case 3: return g_strdup_printf ("Windows Media Audio %d", ver + 6); default: break; } GST_WARNING ("Unexpected wmaversion in %" GST_PTR_FORMAT, caps); return g_strdup ("Windows Media Audio"); } else if (strcmp (info->type, "video/x-wmv") == 0) { gint ver = 0; gst_structure_get_int (s, "wmvversion", &ver); switch (ver) { case 1: case 2: case 3: return g_strdup_printf ("Windows Media Video %d", ver + 6); default: break; } GST_WARNING ("Unexpected wmvversion in %" GST_PTR_FORMAT, caps); return g_strdup ("Windows Media Video"); } else if (strcmp (info->type, "audio/x-mace") == 0) { gint ver = 0; gst_structure_get_int (s, "maceversion", &ver); if (ver == 3 || ver == 6) { return g_strdup_printf ("MACE-%d", ver); } else { GST_WARNING ("Unexpected maceversion in %" GST_PTR_FORMAT, caps); return g_strdup ("MACE"); } } else if (strcmp (info->type, "video/x-svq") == 0) { gint ver = 0; gst_structure_get_int (s, "svqversion", &ver); if (ver == 1 || ver == 3) { return g_strdup_printf ("Sorensen Video %d", ver); } else { GST_WARNING ("Unexpected svqversion in %" GST_PTR_FORMAT, caps); return g_strdup ("Sorensen Video"); } } else if (strcmp (info->type, "video/x-asus") == 0) { gint ver = 0; gst_structure_get_int (s, "asusversion", &ver); if (ver == 1 || ver == 2) { return g_strdup_printf ("Asus Video %d", ver); } else { GST_WARNING ("Unexpected asusversion in %" GST_PTR_FORMAT, caps); return g_strdup ("Asus Video"); } } else if (strcmp (info->type, "video/x-ati-vcr") == 0) { gint ver = 0; gst_structure_get_int (s, "vcrversion", &ver); if (ver == 1 || ver == 2) { return g_strdup_printf ("ATI VCR %d", ver); } else { GST_WARNING ("Unexpected acrversion in %" GST_PTR_FORMAT, caps); return g_strdup ("ATI VCR"); } } else if (strcmp (info->type, "audio/x-adpcm") == 0) { const GValue *layout_val; layout_val = gst_structure_get_value (s, "layout"); if (layout_val != NULL && G_VALUE_HOLDS_STRING (layout_val)) { const gchar *layout; if ((layout = g_value_get_string (layout_val))) { gchar *layout_upper, *ret; if (strcmp (layout, "swf") == 0) return g_strdup ("Shockwave ADPCM"); if (strcmp (layout, "microsoft") == 0) return g_strdup ("Microsoft ADPCM"); if (strcmp (layout, "quicktime") == 0) return g_strdup ("Quicktime ADPCM"); if (strcmp (layout, "westwood") == 0) return g_strdup ("Westwood ADPCM"); if (strcmp (layout, "yamaha") == 0) return g_strdup ("Yamaha ADPCM"); /* FIXME: other layouts: sbpro2, sbpro3, sbpro4, ct, g726, ea, * adx, xa, 4xm, smjpeg, dk4, dk3, dvi */ layout_upper = g_ascii_strup (layout, -1); ret = g_strdup_printf ("%s ADPCM", layout_upper); g_free (layout_upper); return ret; } } return g_strdup ("ADPCM"); } else if (strcmp (info->type, "audio/mpeg") == 0) { gint ver = 0, layer = 0; gst_structure_get_int (s, "mpegversion", &ver); switch (ver) { case 1: gst_structure_get_int (s, "layer", &layer); switch (layer) { case 1: case 2: case 3: return g_strdup_printf ("MPEG-1 Layer %d (MP%d)", layer, layer); default: break; } GST_WARNING ("Unexpected MPEG-1 layer in %" GST_PTR_FORMAT, caps); return g_strdup ("MPEG-1 Audio"); case 4: return g_strdup ("MPEG-4 AAC"); default: break; } GST_WARNING ("Unexpected audio mpegversion in %" GST_PTR_FORMAT, caps); return g_strdup ("MPEG Audio"); } else if (strcmp (info->type, "audio/x-pn-realaudio") == 0) { gint ver = 0; gst_structure_get_int (s, "raversion", &ver); switch (ver) { case 1: return g_strdup ("RealAudio 14k4bps"); case 2: return g_strdup ("RealAudio 28k8bps"); case 8: return g_strdup ("RealAudio G2 (Cook)"); default: break; } GST_WARNING ("Unexpected raversion in %" GST_PTR_FORMAT, caps); return g_strdup ("RealAudio"); } else if (strcmp (info->type, "video/x-pn-realvideo") == 0) { gint ver = 0; gst_structure_get_int (s, "rmversion", &ver); switch (ver) { case 1: return g_strdup ("RealVideo 1.0"); case 2: return g_strdup ("RealVideo 2.0"); case 3: return g_strdup ("RealVideo 3.0"); case 4: return g_strdup ("RealVideo 4.0"); default: break; } GST_WARNING ("Unexpected rmversion in %" GST_PTR_FORMAT, caps); return g_strdup ("RealVideo"); } else if (strcmp (info->type, "video/mpeg") == 0) { gboolean sysstream; gint ver = 0; if (!gst_structure_get_boolean (s, "systemstream", &sysstream) || !gst_structure_get_int (s, "mpegversion", &ver) || ver < 1 || ver > 4) { GST_WARNING ("Missing fields in mpeg video caps %" GST_PTR_FORMAT, caps); } else { if (sysstream) { return g_strdup_printf ("MPEG-%d System Stream", ver); } else { return g_strdup_printf ("MPEG-%d Video", ver); } } return g_strdup ("MPEG Video"); } else if (strcmp (info->type, "audio/x-raw") == 0) { gint depth = 0; gboolean is_float; const gchar *str; GstAudioFormat format; const GstAudioFormatInfo *finfo; str = gst_structure_get_string (s, "format"); format = gst_audio_format_from_string (str); if (format == GST_AUDIO_FORMAT_UNKNOWN) return g_strdup (_("Uncompressed audio")); finfo = gst_audio_format_get_info (format); depth = GST_AUDIO_FORMAT_INFO_DEPTH (finfo); is_float = GST_AUDIO_FORMAT_INFO_IS_FLOAT (finfo); return g_strdup_printf (_("Raw %d-bit %s audio"), depth, is_float ? "floating-point" : "PCM"); } return NULL; }
static gboolean bus_watch (GstBus *bus, GstMessage *message, gpointer user_data) { struct SimpleMsnConference *dat = user_data; switch (GST_MESSAGE_TYPE (message)) { case GST_MESSAGE_ELEMENT: { const GstStructure *s = gst_message_get_structure (message); ts_fail_if (s==NULL, "NULL structure in element message"); if (gst_structure_has_name (s, "farstream-error")) { const GValue *value; FsError errorno; const gchar *error; ts_fail_unless ( gst_implements_interface_check (GST_MESSAGE_SRC (message), FS_TYPE_CONFERENCE), "Received farstream-error from non-farstream element"); ts_fail_unless ( gst_structure_has_field_typed (s, "src-object", G_TYPE_OBJECT), "farstream-error structure has no src-object field"); ts_fail_unless ( gst_structure_has_field_typed (s, "error-no", FS_TYPE_ERROR), "farstream-error structure has no src-object field"); ts_fail_unless ( gst_structure_has_field_typed (s, "error-msg", G_TYPE_STRING), "farstream-error structure has no src-object field"); value = gst_structure_get_value (s, "error-no"); errorno = g_value_get_enum (value); error = gst_structure_get_string (s, "error-msg"); ts_fail ("Error on BUS (%d) %s", errorno, error); } else if (gst_structure_has_name (s, "farstream-new-local-candidate")) { FsStream *stream; FsCandidate *candidate; const GValue *value; ts_fail_unless ( gst_implements_interface_check (GST_MESSAGE_SRC (message), FS_TYPE_CONFERENCE), "Received farstream-error from non-farstream element"); ts_fail_unless ( gst_structure_has_field_typed (s, "stream", FS_TYPE_STREAM), "farstream-new-local-candidate structure has no stream field"); ts_fail_unless ( gst_structure_has_field_typed (s, "candidate", FS_TYPE_CANDIDATE), "farstream-new-local-candidate structure has no candidate field"); value = gst_structure_get_value (s, "stream"); stream = g_value_get_object (value); value = gst_structure_get_value (s, "candidate"); candidate = g_value_get_boxed (value); ts_fail_unless (stream && candidate, "new-local-candidate with NULL" " stream(%p) or candidate(%p)", stream, candidate); if (dat->target) { GError *error = NULL; GList *list = g_list_append (NULL, candidate); gboolean add_remote_candidates_res; GST_DEBUG ("Setting candidate: %s %d", candidate->ip, candidate->port); add_remote_candidates_res = fs_stream_add_remote_candidates ( dat->target->stream, list, &error); ts_fail_unless (add_remote_candidates_res, "Could not set remote candidate: %s", error ? error->message : "No GError"); ts_fail_unless (error == NULL); g_list_free (list); } } } break; case GST_MESSAGE_ERROR: { GError *error = NULL; gchar *debug = NULL; gst_message_parse_error (message, &error, &debug); ts_fail ("Got an error on the BUS (%d): %s (%s)", error->code, error->message, debug); g_error_free (error); g_free (debug); } break; case GST_MESSAGE_WARNING: { GError *error = NULL; gchar *debug = NULL; gst_message_parse_warning (message, &error, &debug); GST_DEBUG ("%d: Got a warning on the BUS: %s (%s)", error->code, error->message, debug); g_error_free (error); g_free (debug); } break; default: break; } return TRUE; }
void eServiceMP3Record::gstBusCall(GstMessage *msg) { if (!msg) return; ePtr<iRecordableService> ptr = this; gchar *sourceName; GstObject *source; source = GST_MESSAGE_SRC(msg); if (!GST_IS_OBJECT(source)) return; sourceName = gst_object_get_name(source); switch (GST_MESSAGE_TYPE (msg)) { case GST_MESSAGE_EOS: eDebug("[eMP3ServiceRecord] gstBusCall eos event"); // Stream end -> stop recording m_event((iRecordableService*)this, evGstRecordEnded); break; case GST_MESSAGE_STATE_CHANGED: { if(GST_MESSAGE_SRC(msg) != GST_OBJECT(m_recording_pipeline)) break; GstState old_state, new_state; gst_message_parse_state_changed(msg, &old_state, &new_state, NULL); if(old_state == new_state) break; GstStateChange transition = (GstStateChange)GST_STATE_TRANSITION(old_state, new_state); eDebug("[eMP3ServiceRecord] gstBusCall state transition %s -> %s", gst_element_state_get_name(old_state), gst_element_state_get_name(new_state)); switch(transition) { case GST_STATE_CHANGE_PAUSED_TO_PLAYING: { if (m_streamingsrc_timeout) m_streamingsrc_timeout->stop(); break; } default: break; } break; } case GST_MESSAGE_ERROR: { gchar *debug; GError *err; gst_message_parse_error(msg, &err, &debug); g_free(debug); if (err->code != GST_STREAM_ERROR_CODEC_NOT_FOUND) eWarning("[eServiceMP3Record] gstBusCall Gstreamer error: %s (%i) from %s", err->message, err->code, sourceName); g_error_free(err); break; } case GST_MESSAGE_ELEMENT: { const GstStructure *msgstruct = gst_message_get_structure(msg); if (msgstruct) { if (gst_is_missing_plugin_message(msg)) { GstCaps *caps = NULL; gst_structure_get (msgstruct, "detail", GST_TYPE_CAPS, &caps, NULL); if (caps) { std::string codec = (const char*) gst_caps_to_string(caps); eDebug("[eServiceMP3Record] gstBusCall cannot record because of incompatible codecs %s", codec.c_str()); gst_caps_unref(caps); } } else { const gchar *eventname = gst_structure_get_name(msgstruct); if (eventname) { if (!strcmp(eventname, "redirect")) { const char *uri = gst_structure_get_string(msgstruct, "new-location"); eDebug("[eServiceMP3Record] gstBusCall redirect to %s", uri); gst_element_set_state (m_recording_pipeline, GST_STATE_NULL); g_object_set(G_OBJECT (m_source), "uri", uri, NULL); gst_element_set_state (m_recording_pipeline, GST_STATE_PLAYING); } } } } break; } case GST_MESSAGE_STREAM_STATUS: { GstStreamStatusType type; GstElement *owner; gst_message_parse_stream_status (msg, &type, &owner); if (type == GST_STREAM_STATUS_TYPE_CREATE) { if (GST_IS_PAD(source)) owner = gst_pad_get_parent_element(GST_PAD(source)); else if (GST_IS_ELEMENT(source)) owner = GST_ELEMENT(source); else owner = 0; if (owner) { GstState state; gst_element_get_state(m_recording_pipeline, &state, NULL, 0LL); GstElementFactory *factory = gst_element_get_factory(GST_ELEMENT(owner)); const gchar *name = gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(factory)); if (!strcmp(name, "souphttpsrc") && (state == GST_STATE_READY) && !m_streamingsrc_timeout->isActive()) { m_streamingsrc_timeout->start(HTTP_TIMEOUT*1000, true); g_object_set (G_OBJECT (owner), "timeout", HTTP_TIMEOUT, NULL); eDebug("[eServiceMP3Record] gstBusCall setting timeout on %s to %is", name, HTTP_TIMEOUT); } } if (GST_IS_PAD(source)) gst_object_unref(owner); } break; } default: break; } g_free(sourceName); }
static gboolean gst_aravis_set_caps (GstBaseSrc *src, GstCaps *caps) { GstAravis* gst_aravis = GST_ARAVIS(src); GstStructure *structure; ArvPixelFormat pixel_format; int height, width; int bpp, depth; const GValue *frame_rate; const char *caps_string; unsigned int i; guint32 fourcc; GST_LOG_OBJECT (gst_aravis, "Requested caps = %" GST_PTR_FORMAT, caps); arv_camera_stop_acquisition (gst_aravis->camera); if (gst_aravis->stream != NULL) g_object_unref (gst_aravis->stream); structure = gst_caps_get_structure (caps, 0); gst_structure_get_int (structure, "width", &width); gst_structure_get_int (structure, "height", &height); frame_rate = gst_structure_get_value (structure, "framerate"); gst_structure_get_int (structure, "bpp", &bpp); gst_structure_get_int (structure, "depth", &depth); if (gst_structure_get_field_type (structure, "format") == G_TYPE_STRING) { const char *string; string = gst_structure_get_string (structure, "format"); fourcc = GST_STR_FOURCC (string); } else if (gst_structure_get_field_type (structure, "format") == GST_TYPE_FOURCC) { gst_structure_get_fourcc (structure, "format", &fourcc); } else fourcc = 0; pixel_format = arv_pixel_format_from_gst_caps (gst_structure_get_name (structure), bpp, depth, fourcc); arv_camera_set_region (gst_aravis->camera, gst_aravis->offset_x, gst_aravis->offset_y, width, height); arv_camera_set_binning (gst_aravis->camera, gst_aravis->h_binning, gst_aravis->v_binning); arv_camera_set_pixel_format (gst_aravis->camera, pixel_format); if (frame_rate != NULL) { double dbl_frame_rate; dbl_frame_rate = (double) gst_value_get_fraction_numerator (frame_rate) / (double) gst_value_get_fraction_denominator (frame_rate); GST_DEBUG_OBJECT (gst_aravis, "Frame rate = %g Hz", dbl_frame_rate); arv_camera_set_frame_rate (gst_aravis->camera, dbl_frame_rate); if (dbl_frame_rate > 0.0) gst_aravis->buffer_timeout_us = MAX (GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT, 3e6 / dbl_frame_rate); else gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT; } else gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT; GST_DEBUG_OBJECT (gst_aravis, "Buffer timeout = %Ld µs", gst_aravis->buffer_timeout_us); GST_DEBUG_OBJECT (gst_aravis, "Actual frame rate = %g Hz", arv_camera_get_frame_rate (gst_aravis->camera)); if(gst_aravis->gain_auto) { arv_camera_set_gain_auto (gst_aravis->camera, ARV_AUTO_CONTINUOUS); GST_DEBUG_OBJECT (gst_aravis, "Auto Gain = continuous", gst_aravis->gain_auto); } else { if (gst_aravis->gain >= 0) { GST_DEBUG_OBJECT (gst_aravis, "Gain = %d", gst_aravis->gain); arv_camera_set_gain_auto (gst_aravis->camera, ARV_AUTO_OFF); arv_camera_set_gain (gst_aravis->camera, gst_aravis->gain); } GST_DEBUG_OBJECT (gst_aravis, "Actual gain = %d", arv_camera_get_gain (gst_aravis->camera)); } if(gst_aravis->exposure_auto) { arv_camera_set_exposure_time_auto (gst_aravis->camera, ARV_AUTO_CONTINUOUS); GST_DEBUG_OBJECT (gst_aravis, "Auto Exposure = contiuous", gst_aravis->exposure_auto); } else { if (gst_aravis->exposure_time_us > 0.0) { GST_DEBUG_OBJECT (gst_aravis, "Exposure = %g µs", gst_aravis->exposure_time_us); arv_camera_set_exposure_time_auto (gst_aravis->camera, ARV_AUTO_OFF); arv_camera_set_exposure_time (gst_aravis->camera, gst_aravis->exposure_time_us); } GST_DEBUG_OBJECT (gst_aravis, "Actual exposure = %g µs", arv_camera_get_exposure_time (gst_aravis->camera)); } if (gst_aravis->fixed_caps != NULL) gst_caps_unref (gst_aravis->fixed_caps); caps_string = arv_pixel_format_to_gst_caps_string (pixel_format); if (caps_string != NULL) { GstStructure *structure; GstCaps *caps; caps = gst_caps_new_empty (); structure = gst_structure_from_string (caps_string, NULL); gst_structure_set (structure, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL); if (frame_rate != NULL) gst_structure_set_value (structure, "framerate", frame_rate); gst_caps_append_structure (caps, structure); gst_aravis->fixed_caps = caps; } else gst_aravis->fixed_caps = NULL; gst_aravis->payload = arv_camera_get_payload (gst_aravis->camera); gst_aravis->stream = arv_camera_create_stream (gst_aravis->camera, NULL, NULL); for (i = 0; i < GST_ARAVIS_N_BUFFERS; i++) arv_stream_push_buffer (gst_aravis->stream, arv_buffer_new (gst_aravis->payload, NULL)); GST_LOG_OBJECT (gst_aravis, "Start acquisition"); arv_camera_start_acquisition (gst_aravis->camera); gst_aravis->timestamp_offset = 0; gst_aravis->last_timestamp = 0; return TRUE; }
/** * gst_missing_plugin_message_get_installer_detail: * @msg: a missing-plugin #GstMessage of type #GST_MESSAGE_ELEMENT * * Returns an opaque string containing all the details about the missing * element to be passed to an external installer called via * gst_install_plugins_async() or gst_install_plugins_sync(). * * This function is mainly for applications that call external plugin * installation mechanisms using one of the two above-mentioned functions. * * Returns: a newly-allocated detail string, or NULL on error. Free string * with g_free() when not needed any longer. */ gchar * gst_missing_plugin_message_get_installer_detail (GstMessage * msg) { GstMissingType missing_type; const gchar *progname; const gchar *type; GString *str = NULL; gchar *detail = NULL; gchar *desc; const GstStructure *structure; g_return_val_if_fail (gst_is_missing_plugin_message (msg), NULL); structure = gst_message_get_structure (msg); GST_LOG ("Parsing missing-plugin message: %" GST_PTR_FORMAT, structure); missing_type = missing_structure_get_type (structure); if (missing_type == GST_MISSING_TYPE_UNKNOWN) { GST_WARNING ("couldn't parse 'type' field"); goto error; } type = gst_structure_get_string (structure, "type"); g_assert (type != NULL); /* validity already checked above */ /* FIXME: use gst_installer_detail_new() here too */ str = g_string_new (GST_DETAIL_STRING_MARKER "|"); g_string_append_printf (str, "%s|", GST_API_VERSION); progname = (const gchar *) g_get_prgname (); if (progname) { g_string_append_printf (str, "%s|", progname); } else { g_string_append_printf (str, "pid/%lu|", (gulong) getpid ()); } desc = gst_missing_plugin_message_get_description (msg); if (desc) { g_strdelimit (desc, "|", '#'); g_string_append_printf (str, "%s|", desc); g_free (desc); } else { g_string_append (str, "|"); } switch (missing_type) { case GST_MISSING_TYPE_URISOURCE: case GST_MISSING_TYPE_URISINK: case GST_MISSING_TYPE_ELEMENT: if (!missing_structure_get_string_detail (structure, &detail)) goto error; break; case GST_MISSING_TYPE_DECODER: case GST_MISSING_TYPE_ENCODER:{ GstCaps *caps = NULL; if (!missing_structure_get_caps_detail (structure, &caps)) goto error; detail = gst_caps_to_string (caps); gst_caps_unref (caps); break; } default: g_return_val_if_reached (NULL); } g_string_append_printf (str, "%s-%s", type, detail); g_free (detail); return g_string_free (str, FALSE); /* ERRORS */ error: { GST_WARNING ("Failed to parse missing-plugin msg: %" GST_PTR_FORMAT, msg); if (str) g_string_free (str, TRUE); return NULL; } }
static gboolean _add_clip(GstValidateScenario *scenario, GstValidateAction * action) { GESTimeline *timeline = get_timeline(scenario); GESAsset *asset; GESLayer *layer; GESClip *clip; GError *error = NULL; gint layer_priority; const gchar *name; const gchar *asset_id; const gchar *type_string; GType type; gboolean res = FALSE; GstClockTime duration = 1 * GST_SECOND; gst_structure_get_int(action->structure, "layer-priority", &layer_priority); name = gst_structure_get_string(action->structure, "name"); asset_id = gst_structure_get_string(action->structure, "asset-id"); type_string = gst_structure_get_string(action->structure, "type"); if (!(type = g_type_from_name(type_string))) { GST_ERROR("This type doesn't exist : %s", type_string); goto beach; } asset = ges_asset_request(type, asset_id, &error); if (!asset || error) { GST_ERROR("There was an error requesting the asset with id %s and type %s (%s)", asset_id, type_string, error->message); goto beach; } layer = _get_layer_by_priority(timeline, layer_priority); if (!layer) { GST_ERROR("No layer with priority %d", layer_priority); goto beach; } if (type == GES_TYPE_URI_CLIP) { duration = GST_CLOCK_TIME_NONE; } clip = ges_layer_add_asset(layer, asset, GST_CLOCK_TIME_NONE, 0, duration, GES_TRACK_TYPE_UNKNOWN); if (clip) { res = TRUE; if (!ges_timeline_element_set_name(GES_TIMELINE_ELEMENT(clip), name)) { res = FALSE; GST_ERROR("couldn't set name %s on clip with id %s", name, asset_id); } } else { GST_ERROR("Couldn't add clip with id %s to layer with priority %d", asset_id, layer_priority); } gst_object_unref (layer); ges_timeline_commit(timeline); beach: g_object_unref(timeline); return res; }
static GstCaps * gst_rtp_h263p_pay_sink_getcaps (GstRTPBasePayload * payload, GstPad * pad, GstCaps * filter) { GstRtpH263PPay *rtph263ppay; GstCaps *caps = NULL, *templ; GstCaps *peercaps = NULL; GstCaps *intersect = NULL; guint i; rtph263ppay = GST_RTP_H263P_PAY (payload); peercaps = gst_pad_peer_query_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload), filter); if (!peercaps) return gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SINKPAD (payload)); templ = gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload)); intersect = gst_caps_intersect (peercaps, templ); gst_caps_unref (peercaps); gst_caps_unref (templ); if (gst_caps_is_empty (intersect)) return intersect; caps = gst_caps_new_empty (); for (i = 0; i < gst_caps_get_size (intersect); i++) { GstStructure *s = gst_caps_get_structure (intersect, i); const gchar *encoding_name = gst_structure_get_string (s, "encoding-name"); if (!strcmp (encoding_name, "H263-2000")) { const gchar *profile_str = gst_structure_get_string (s, "profile"); const gchar *level_str = gst_structure_get_string (s, "level"); int profile = 0; int level = 0; if (profile_str && level_str) { gboolean i = FALSE, j = FALSE, l = FALSE, t = FALSE, f = FALSE, v = FALSE; GstStructure *new_s = gst_structure_new ("video/x-h263", "variant", G_TYPE_STRING, "itu", NULL); profile = atoi (profile_str); level = atoi (level_str); /* These profiles are defined in the H.263 Annex X */ switch (profile) { case 0: /* The Baseline Profile (Profile 0) */ break; case 1: /* H.320 Coding Efficiency Version 2 Backward-Compatibility Profile * (Profile 1) * Baseline + Annexes I, J, L.4 and T */ i = j = l = t = TRUE; break; case 2: /* Version 1 Backward-Compatibility Profile (Profile 2) * Baseline + Annex F */ i = j = l = t = f = TRUE; break; case 3: /* Version 2 Interactive and Streaming Wireless Profile * Baseline + Annexes I, J, T */ i = j = t = TRUE; break; case 4: /* Version 3 Interactive and Streaming Wireless Profile (Profile 4) * Baseline + Annexes I, J, T, V, W.6.3.8, */ /* Missing W.6.3.8 */ i = j = t = v = TRUE; break; case 5: /* Conversational High Compression Profile (Profile 5) * Baseline + Annexes F, I, J, L.4, T, D, U */ /* Missing D, U */ f = i = j = l = t = TRUE; break; case 6: /* Conversational Internet Profile (Profile 6) * Baseline + Annexes F, I, J, L.4, T, D, U and * K with arbitratry slice ordering */ /* Missing D, U, K with arbitratry slice ordering */ f = i = j = l = t = TRUE; break; case 7: /* Conversational Interlace Profile (Profile 7) * Baseline + Annexes F, I, J, L.4, T, D, U, W.6.3.11 */ /* Missing D, U, W.6.3.11 */ f = i = j = l = t = TRUE; break; case 8: /* High Latency Profile (Profile 8) * Baseline + Annexes F, I, J, L.4, T, D, U, P.5, O.1.1 and * K with arbitratry slice ordering */ /* Missing D, U, P.5, O.1.1 */ f = i = j = l = t = TRUE; break; } if (f || i || j || t || l || v) { GValue list = { 0 }; GValue vstr = { 0 }; g_value_init (&list, GST_TYPE_LIST); g_value_init (&vstr, G_TYPE_STRING); g_value_set_static_string (&vstr, "h263"); gst_value_list_append_value (&list, &vstr); g_value_set_static_string (&vstr, "h263p"); gst_value_list_append_value (&list, &vstr); if (l || v) { g_value_set_static_string (&vstr, "h263pp"); gst_value_list_append_value (&list, &vstr); } g_value_unset (&vstr); gst_structure_set_value (new_s, "h263version", &list); g_value_unset (&list); } else { gst_structure_set (new_s, "h263version", G_TYPE_STRING, "h263", NULL); } if (!f) gst_structure_set (new_s, "annex-f", G_TYPE_BOOLEAN, FALSE, NULL); if (!i) gst_structure_set (new_s, "annex-i", G_TYPE_BOOLEAN, FALSE, NULL); if (!j) gst_structure_set (new_s, "annex-j", G_TYPE_BOOLEAN, FALSE, NULL); if (!t) gst_structure_set (new_s, "annex-t", G_TYPE_BOOLEAN, FALSE, NULL); if (!l) gst_structure_set (new_s, "annex-l", G_TYPE_BOOLEAN, FALSE, NULL); if (!v) gst_structure_set (new_s, "annex-v", G_TYPE_BOOLEAN, FALSE, NULL); if (level <= 10 || level == 45) { gst_structure_set (new_s, "width", GST_TYPE_INT_RANGE, 1, 176, "height", GST_TYPE_INT_RANGE, 1, 144, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 2002, NULL); caps = gst_caps_merge_structure (caps, new_s); } else if (level <= 20) { GstStructure *s_copy = gst_structure_copy (new_s); gst_structure_set (new_s, "width", GST_TYPE_INT_RANGE, 1, 352, "height", GST_TYPE_INT_RANGE, 1, 288, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 2002, NULL); caps = gst_caps_merge_structure (caps, new_s); gst_structure_set (s_copy, "width", GST_TYPE_INT_RANGE, 1, 176, "height", GST_TYPE_INT_RANGE, 1, 144, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 1001, NULL); caps = gst_caps_merge_structure (caps, s_copy); } else if (level <= 40) { gst_structure_set (new_s, "width", GST_TYPE_INT_RANGE, 1, 352, "height", GST_TYPE_INT_RANGE, 1, 288, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 1001, NULL); caps = gst_caps_merge_structure (caps, new_s); } else if (level <= 50) { GstStructure *s_copy = gst_structure_copy (new_s); gst_structure_set (new_s, "width", GST_TYPE_INT_RANGE, 1, 352, "height", GST_TYPE_INT_RANGE, 1, 288, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 50, 1, NULL); caps = gst_caps_merge_structure (caps, new_s); gst_structure_set (s_copy, "width", GST_TYPE_INT_RANGE, 1, 352, "height", GST_TYPE_INT_RANGE, 1, 240, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 60000, 1001, NULL); caps = gst_caps_merge_structure (caps, s_copy); } else if (level <= 60) { GstStructure *s_copy = gst_structure_copy (new_s); gst_structure_set (new_s, "width", GST_TYPE_INT_RANGE, 1, 720, "height", GST_TYPE_INT_RANGE, 1, 288, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 50, 1, NULL); caps = gst_caps_merge_structure (caps, new_s); gst_structure_set (s_copy, "width", GST_TYPE_INT_RANGE, 1, 720, "height", GST_TYPE_INT_RANGE, 1, 240, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 60000, 1001, NULL); caps = gst_caps_merge_structure (caps, s_copy); } else if (level <= 70) { GstStructure *s_copy = gst_structure_copy (new_s); gst_structure_set (new_s, "width", GST_TYPE_INT_RANGE, 1, 720, "height", GST_TYPE_INT_RANGE, 1, 576, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 50, 1, NULL); caps = gst_caps_merge_structure (caps, new_s); gst_structure_set (s_copy, "width", GST_TYPE_INT_RANGE, 1, 720, "height", GST_TYPE_INT_RANGE, 1, 480, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 60000, 1001, NULL); caps = gst_caps_merge_structure (caps, s_copy); } else { caps = gst_caps_merge_structure (caps, new_s); } } else { GstStructure *new_s = gst_structure_new ("video/x-h263", "variant", G_TYPE_STRING, "itu", "h263version", G_TYPE_STRING, "h263", NULL); GST_DEBUG_OBJECT (rtph263ppay, "No profile or level specified" " for H263-2000, defaulting to baseline H263"); caps = gst_caps_merge_structure (caps, new_s); } } else { gboolean f = FALSE, i = FALSE, j = FALSE, t = FALSE; /* FIXME: ffmpeg support the Appendix K too, how do we express it ? * guint k; */ const gchar *str; GstStructure *new_s = gst_structure_new ("video/x-h263", "variant", G_TYPE_STRING, "itu", NULL); gboolean added = FALSE; str = gst_structure_get_string (s, "f"); if (str && !strcmp (str, "1")) f = TRUE; str = gst_structure_get_string (s, "i"); if (str && !strcmp (str, "1")) i = TRUE; str = gst_structure_get_string (s, "j"); if (str && !strcmp (str, "1")) j = TRUE; str = gst_structure_get_string (s, "t"); if (str && !strcmp (str, "1")) t = TRUE; if (f || i || j || t) { GValue list = { 0 }; GValue vstr = { 0 }; g_value_init (&list, GST_TYPE_LIST); g_value_init (&vstr, G_TYPE_STRING); g_value_set_static_string (&vstr, "h263"); gst_value_list_append_value (&list, &vstr); g_value_set_static_string (&vstr, "h263p"); gst_value_list_append_value (&list, &vstr); g_value_unset (&vstr); gst_structure_set_value (new_s, "h263version", &list); g_value_unset (&list); } else { gst_structure_set (new_s, "h263version", G_TYPE_STRING, "h263", NULL); } if (!f) gst_structure_set (new_s, "annex-f", G_TYPE_BOOLEAN, FALSE, NULL); if (!i) gst_structure_set (new_s, "annex-i", G_TYPE_BOOLEAN, FALSE, NULL); if (!j) gst_structure_set (new_s, "annex-j", G_TYPE_BOOLEAN, FALSE, NULL); if (!t) gst_structure_set (new_s, "annex-t", G_TYPE_BOOLEAN, FALSE, NULL); str = gst_structure_get_string (s, "custom"); if (str) { unsigned int xmax, ymax, mpi; if (sscanf (str, "%u,%u,%u", &xmax, &ymax, &mpi) == 3) { if (xmax % 4 && ymax % 4 && mpi >= 1 && mpi <= 32) { caps = caps_append (caps, new_s, xmax, ymax, mpi); added = TRUE; } else { GST_WARNING_OBJECT (rtph263ppay, "Invalid custom framesize/MPI" " %u x %u at %u, ignoring", xmax, ymax, mpi); } } else { GST_WARNING_OBJECT (rtph263ppay, "Invalid custom framesize/MPI: %s," " ignoring", str); } } str = gst_structure_get_string (s, "16cif"); if (str) { int mpi = atoi (str); caps = caps_append (caps, new_s, 1408, 1152, mpi); added = TRUE; } str = gst_structure_get_string (s, "4cif"); if (str) { int mpi = atoi (str); caps = caps_append (caps, new_s, 704, 576, mpi); added = TRUE; } str = gst_structure_get_string (s, "cif"); if (str) { int mpi = atoi (str); caps = caps_append (caps, new_s, 352, 288, mpi); added = TRUE; } str = gst_structure_get_string (s, "qcif"); if (str) { int mpi = atoi (str); caps = caps_append (caps, new_s, 176, 144, mpi); added = TRUE; } str = gst_structure_get_string (s, "sqcif"); if (str) { int mpi = atoi (str); caps = caps_append (caps, new_s, 128, 96, mpi); added = TRUE; } if (added) gst_structure_free (new_s); else caps = gst_caps_merge_structure (caps, new_s); } } gst_caps_unref (intersect); return caps; }
static gboolean _edit_clip (GstValidateScenario * scenario, GstValidateAction * action) { gint64 cpos; gdouble rate; GList *layers = NULL; GESTimeline *timeline; GstQuery *query_segment; GESTimelineElement *clip; GstClockTime position; gint64 stop_value; gboolean res = FALSE; gint new_layer_priority = -1; GESEditMode edge = GES_EDGE_NONE; GESEditMode mode = GES_EDIT_MODE_NORMAL; const gchar *edit_mode_str = NULL, *edge_str = NULL; const gchar *clip_name; clip_name = gst_structure_get_string (action->structure, "clip-name"); timeline = get_timeline (scenario); g_return_val_if_fail (timeline, FALSE); clip = ges_timeline_get_element (timeline, clip_name); g_return_val_if_fail (GES_IS_CLIP (clip), FALSE); if (!gst_validate_action_get_clocktime (scenario, action, "position", &position)) { GST_WARNING ("Could not get position"); goto beach; } if ((edit_mode_str = gst_structure_get_string (action->structure, "edit-mode"))) g_return_val_if_fail (gst_validate_utils_enum_from_str (GES_TYPE_EDIT_MODE, edit_mode_str, &mode), FALSE); if ((edge_str = gst_structure_get_string (action->structure, "edge"))) g_return_val_if_fail (gst_validate_utils_enum_from_str (GES_TYPE_EDGE, edge_str, &edge), FALSE); gst_structure_get_int (action->structure, "new-layer-priority", &new_layer_priority); gst_validate_printf (action, "Editing %s to %" GST_TIME_FORMAT " in %s mode, edge: %s " "with new layer prio: %d \n\n", clip_name, GST_TIME_ARGS (position), edit_mode_str ? edit_mode_str : "normal", edge_str ? edge_str : "None", new_layer_priority); if (!ges_container_edit (GES_CONTAINER (clip), layers, new_layer_priority, mode, edge, position)) { gst_object_unref (clip); goto beach; } gst_object_unref (clip); query_segment = gst_query_new_segment (GST_FORMAT_TIME); if (!gst_element_query (scenario->pipeline, query_segment)) { GST_ERROR_OBJECT (scenario, "Could not query segment"); goto beach; } if (!gst_element_query_position (scenario->pipeline, GST_FORMAT_TIME, &cpos)) { GST_ERROR_OBJECT (scenario, "Could not query position"); goto beach; } if (!ges_timeline_commit (timeline)) { GST_DEBUG_OBJECT (scenario, "nothing changed, no need to seek"); res = TRUE; goto beach; } gst_query_parse_segment (query_segment, &rate, NULL, NULL, &stop_value); res = gst_validate_scenario_execute_seek (scenario, action, rate, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE, GST_SEEK_TYPE_SET, cpos, GST_SEEK_TYPE_SET, stop_value); beach: g_object_unref(timeline); return res; }
static gboolean gst_rtp_bv_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps) { GstRTPBVDepay *rtpbvdepay = GST_RTP_BV_DEPAY (depayload); GstCaps *srccaps; GstStructure *structure; const gchar *mode_str = NULL; gint mode, clock_rate, expected_rate; gboolean ret; structure = gst_caps_get_structure (caps, 0); mode_str = gst_structure_get_string (structure, "encoding-name"); if (!mode_str) goto no_mode; if (!strcmp (mode_str, "BV16")) { mode = 16; expected_rate = 8000; } else if (!strcmp (mode_str, "BV32")) { mode = 32; expected_rate = 16000; } else goto invalid_mode; if (!gst_structure_get_int (structure, "clock-rate", &clock_rate)) clock_rate = expected_rate; else if (clock_rate != expected_rate) goto wrong_rate; depayload->clock_rate = clock_rate; rtpbvdepay->mode = mode; srccaps = gst_caps_new_simple ("audio/x-bv", "mode", G_TYPE_INT, rtpbvdepay->mode, NULL); ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps); GST_DEBUG ("set caps on source: %" GST_PTR_FORMAT " (ret=%d)", srccaps, ret); gst_caps_unref (srccaps); return ret; /* ERRORS */ no_mode: { GST_ERROR_OBJECT (rtpbvdepay, "did not receive an encoding-name"); return FALSE; } invalid_mode: { GST_ERROR_OBJECT (rtpbvdepay, "invalid encoding-name, expected BV16 or BV32, got %s", mode_str); return FALSE; } wrong_rate: { GST_ERROR_OBJECT (rtpbvdepay, "invalid clock-rate, expected %d, got %d", expected_rate, clock_rate); return FALSE; } }
static gboolean gst_omx_aac_dec_is_format_change (GstOMXAudioDec * dec, GstOMXPort * port, GstCaps * caps) { GstOMXAACDec *self = GST_OMX_AAC_DEC (dec); OMX_AUDIO_PARAM_AACPROFILETYPE aac_param; OMX_ERRORTYPE err; GstStructure *s; gint rate, channels, mpegversion; const gchar *stream_format; GST_OMX_INIT_STRUCT (&aac_param); aac_param.nPortIndex = port->index; err = gst_omx_component_get_parameter (dec->dec, OMX_IndexParamAudioAac, &aac_param); if (err != OMX_ErrorNone) { GST_ERROR_OBJECT (self, "Failed to get AAC parameters from component: %s (0x%08x)", gst_omx_error_to_string (err), err); return FALSE; } s = gst_caps_get_structure (caps, 0); if (!gst_structure_get_int (s, "mpegversion", &mpegversion) || !gst_structure_get_int (s, "rate", &rate) || !gst_structure_get_int (s, "channels", &channels)) { GST_ERROR_OBJECT (self, "Incomplete caps"); return FALSE; } stream_format = gst_structure_get_string (s, "stream-format"); if (!stream_format) { GST_ERROR_OBJECT (self, "Incomplete caps"); return FALSE; } if (aac_param.nChannels != channels) return TRUE; if (aac_param.nSampleRate != rate) return TRUE; if (mpegversion == 2 && aac_param.eAACStreamFormat != OMX_AUDIO_AACStreamFormatMP2ADTS) return TRUE; if (aac_param.eAACStreamFormat == OMX_AUDIO_AACStreamFormatMP4ADTS && strcmp (stream_format, "adts") != 0) return TRUE; if (aac_param.eAACStreamFormat == OMX_AUDIO_AACStreamFormatMP4LOAS && strcmp (stream_format, "loas") != 0) return TRUE; if (aac_param.eAACStreamFormat == OMX_AUDIO_AACStreamFormatADIF && strcmp (stream_format, "adif") != 0) return TRUE; if (aac_param.eAACStreamFormat == OMX_AUDIO_AACStreamFormatRAW && strcmp (stream_format, "raw") != 0) return TRUE; return FALSE; }
static GstCaps * gst_rtp_h264_pay_getcaps (GstBaseRTPPayload * payload, GstPad * pad) { GstCaps *template_caps; GstCaps *allowed_caps; GstCaps *caps, *icaps; gboolean append_unrestricted; guint i; allowed_caps = gst_pad_peer_get_caps_reffed (GST_BASE_RTP_PAYLOAD_SRCPAD (payload)); if (allowed_caps == NULL) return NULL; template_caps = gst_static_pad_template_get_caps (&gst_rtp_h264_pay_sink_template); if (gst_caps_is_any (allowed_caps)) { caps = gst_caps_ref (template_caps); goto done; } if (gst_caps_is_empty (allowed_caps)) { caps = gst_caps_ref (allowed_caps); goto done; } caps = gst_caps_new_empty (); append_unrestricted = FALSE; for (i = 0; i < gst_caps_get_size (allowed_caps); i++) { GstStructure *s = gst_caps_get_structure (allowed_caps, i); GstStructure *new_s = gst_structure_new ("video/x-h264", NULL); const gchar *profile_level_id; profile_level_id = gst_structure_get_string (s, "profile-level-id"); if (profile_level_id && strlen (profile_level_id) == 6) { const gchar *profile; const gchar *level; long int spsint; guint8 sps[3]; spsint = strtol (profile_level_id, NULL, 16); sps[0] = spsint >> 16; sps[1] = spsint >> 8; sps[2] = spsint; profile = gst_codec_utils_h264_get_profile (sps, 3); level = gst_codec_utils_h264_get_level (sps, 3); if (profile && level) { GST_LOG_OBJECT (payload, "In caps, have profile %s and level %s", profile, level); if (!strcmp (profile, "constrained-baseline")) gst_structure_set (new_s, "profile", G_TYPE_STRING, profile, NULL); else { GValue val = { 0, }; GValue profiles = { 0, }; g_value_init (&profiles, GST_TYPE_LIST); g_value_init (&val, G_TYPE_STRING); g_value_set_static_string (&val, profile); gst_value_list_append_value (&profiles, &val); g_value_set_static_string (&val, "constrained-baseline"); gst_value_list_append_value (&profiles, &val); gst_structure_take_value (new_s, "profile", &profiles); } if (!strcmp (level, "1")) gst_structure_set (new_s, "level", G_TYPE_STRING, level, NULL); else { GValue levels = { 0, }; GValue val = { 0, }; int j; g_value_init (&levels, GST_TYPE_LIST); g_value_init (&val, G_TYPE_STRING); for (j = 0; all_levels[j]; j++) { g_value_set_static_string (&val, all_levels[j]); gst_value_list_prepend_value (&levels, &val); if (!strcmp (level, all_levels[j])) break; } gst_structure_take_value (new_s, "level", &levels); } } else { /* Invalid profile-level-id means baseline */ gst_structure_set (new_s, "profile", G_TYPE_STRING, "constrained-baseline", NULL); } } else { /* No profile-level-id means baseline or unrestricted */ gst_structure_set (new_s, "profile", G_TYPE_STRING, "constrained-baseline", NULL); append_unrestricted = TRUE; } gst_caps_merge_structure (caps, new_s); }
static gboolean gst_omx_aac_dec_set_format (GstOMXAudioDec * dec, GstOMXPort * port, GstCaps * caps) { GstOMXAACDec *self = GST_OMX_AAC_DEC (dec); OMX_PARAM_PORTDEFINITIONTYPE port_def; OMX_AUDIO_PARAM_AACPROFILETYPE aac_param; OMX_ERRORTYPE err; GstStructure *s; gint rate, channels, mpegversion; const gchar *stream_format; gst_omx_port_get_port_definition (port, &port_def); port_def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; err = gst_omx_port_update_port_definition (port, &port_def); if (err != OMX_ErrorNone) { GST_ERROR_OBJECT (self, "Failed to set AAC format on component: %s (0x%08x)", gst_omx_error_to_string (err), err); return FALSE; } GST_OMX_INIT_STRUCT (&aac_param); aac_param.nPortIndex = port->index; err = gst_omx_component_get_parameter (dec->dec, OMX_IndexParamAudioAac, &aac_param); if (err != OMX_ErrorNone) { GST_ERROR_OBJECT (self, "Failed to get AAC parameters from component: %s (0x%08x)", gst_omx_error_to_string (err), err); return FALSE; } s = gst_caps_get_structure (caps, 0); if (!gst_structure_get_int (s, "mpegversion", &mpegversion) || !gst_structure_get_int (s, "rate", &rate) || !gst_structure_get_int (s, "channels", &channels)) { GST_ERROR_OBJECT (self, "Incomplete caps"); return FALSE; } stream_format = gst_structure_get_string (s, "stream-format"); if (!stream_format) { GST_ERROR_OBJECT (self, "Incomplete caps"); return FALSE; } aac_param.nChannels = channels; aac_param.nSampleRate = rate; aac_param.nBitRate = 0; /* unknown */ aac_param.nAudioBandWidth = 0; /* decoder decision */ aac_param.eChannelMode = 0; /* FIXME */ if (mpegversion == 2) aac_param.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP2ADTS; else if (strcmp (stream_format, "adts") == 0) aac_param.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4ADTS; else if (strcmp (stream_format, "loas") == 0) aac_param.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4LOAS; else if (strcmp (stream_format, "adif") == 0) aac_param.eAACStreamFormat = OMX_AUDIO_AACStreamFormatADIF; else if (strcmp (stream_format, "raw") == 0) aac_param.eAACStreamFormat = OMX_AUDIO_AACStreamFormatRAW; else /* fallback instead of failing */ aac_param.eAACStreamFormat = OMX_AUDIO_AACStreamFormatRAW; err = gst_omx_component_set_parameter (dec->dec, OMX_IndexParamAudioAac, &aac_param); if (err != OMX_ErrorNone) { GST_ERROR_OBJECT (self, "Error setting AAC parameters: %s (0x%08x)", gst_omx_error_to_string (err), err); return FALSE; } return TRUE; }
static gboolean gst_navseek_handle_src_event (GstPad * pad, GstEvent * event) { GstNavSeek *navseek; gboolean ret = TRUE; navseek = GST_NAVSEEK (GST_PAD_PARENT (pad)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_NAVIGATION: /* Check for a keyup and convert left/right to a seek event */ { const GstStructure *structure; const gchar *event_type; structure = gst_event_get_structure (event); g_return_val_if_fail (structure != NULL, FALSE); event_type = gst_structure_get_string (structure, "event"); g_return_val_if_fail (event_type != NULL, FALSE); if (strcmp (event_type, "key-press") == 0) { const gchar *key; key = gst_structure_get_string (structure, "key"); g_return_val_if_fail (key != NULL, FALSE); if (strcmp (key, "Left") == 0) { /* Seek backward by 5 secs */ gst_navseek_seek (navseek, -1.0 * navseek->seek_offset * GST_SECOND); } else if (strcmp (key, "Right") == 0) { /* Seek forward */ gst_navseek_seek (navseek, navseek->seek_offset * GST_SECOND); } else if (strcmp (key, "s") == 0) { /* Grab the next frame as the start frame of a segment */ navseek->grab_seg_start = TRUE; } else if (strcmp (key, "e") == 0) { /* Grab the next frame as the end frame of a segment */ navseek->grab_seg_end = TRUE; } else if (strcmp (key, "l") == 0) { /* Toggle the loop flag. If we have both start and end segment times send a seek */ navseek->loop = !navseek->loop; gst_navseek_segseek (navseek); } } else { break; } gst_event_unref (event); event = NULL; } break; default: break; } if (event && GST_PAD_IS_LINKED (GST_BASE_TRANSFORM (navseek)->sinkpad)) { GstPad *peer_pad = gst_pad_get_peer (GST_BASE_TRANSFORM (navseek)->sinkpad); ret = gst_pad_send_event (peer_pad, event); gst_object_unref (peer_pad); } return ret; }
static gboolean sink_setcaps(GstPad *pad, GstCaps *caps) { GstDspVEnc *self; GstDspBase *base; GstStructure *in_struc; GstCaps *out_caps; GstStructure *out_struc; gint width = 0, height = 0; GstCaps *allowed_caps; gint tgt_level = -1; struct td_codec *codec; self = GST_DSP_VENC(GST_PAD_PARENT(pad)); base = GST_DSP_BASE(self); codec = base->codec; if (!codec) return FALSE; #ifdef DEBUG { gchar *str = gst_caps_to_string(caps); pr_info(self, "sink caps: %s", str); g_free(str); } #endif in_struc = gst_caps_get_structure(caps, 0); out_caps = gst_caps_new_empty(); switch (base->alg) { case GSTDSP_JPEGENC: out_struc = gst_structure_new("image/jpeg", NULL); break; case GSTDSP_H263ENC: out_struc = gst_structure_new("video/x-h263", "variant", G_TYPE_STRING, "itu", NULL); break; case GSTDSP_MP4VENC: out_struc = gst_structure_new("video/mpeg", "mpegversion", G_TYPE_INT, 4, "systemstream", G_TYPE_BOOLEAN, FALSE, NULL); break; case GSTDSP_H264ENC: out_struc = gst_structure_new("video/x-h264", "alignment", G_TYPE_STRING, "au", NULL); break; default: return FALSE; } if (gst_structure_get_int(in_struc, "width", &width)) gst_structure_set(out_struc, "width", G_TYPE_INT, width, NULL); if (gst_structure_get_int(in_struc, "height", &height)) gst_structure_set(out_struc, "height", G_TYPE_INT, height, NULL); gst_structure_get_fourcc(in_struc, "format", &self->color_format); switch (base->alg) { case GSTDSP_H263ENC: case GSTDSP_MP4VENC: case GSTDSP_H264ENC: base->output_buffer_size = width * height / 2; break; case GSTDSP_JPEGENC: if (width % 2 || height % 2) return FALSE; if (self->color_format == GST_MAKE_FOURCC('I', '4', '2', '0')) base->input_buffer_size = ROUND_UP(width, 16) * ROUND_UP(height, 16) * 3 / 2; else base->input_buffer_size = ROUND_UP(width, 16) * ROUND_UP(height, 16) * 2; base->output_buffer_size = width * height; if (self->quality < 10) base->output_buffer_size /= 10; else if (self->quality < 100) base->output_buffer_size /= (100 / self->quality); break; default: break; } if (base->node) goto skip_setup; switch (base->alg) { case GSTDSP_JPEGENC: du_port_alloc_buffers(base->ports[0], 1); #if SN_API > 1 du_port_alloc_buffers(base->ports[1], 2); #else /* old versions of the sn can't handle 2 buffers */ /* * Some constrained pipelines might starve because of this. You * might want to try enable-last-buffer=false on some sinks. * TODO Is there any way around this? */ du_port_alloc_buffers(base->ports[1], 1); #endif break; default: du_port_alloc_buffers(base->ports[0], 2); du_port_alloc_buffers(base->ports[1], 4); break; } skip_setup: self->width = width; self->height = height; { const GValue *framerate = NULL; framerate = gst_structure_get_value(in_struc, "framerate"); if (framerate) { gst_structure_set_value(out_struc, "framerate", framerate); /* calculate nearest integer */ self->framerate = (gst_value_get_fraction_numerator(framerate) * 2 / gst_value_get_fraction_denominator(framerate) + 1) / 2; } } /* see if downstream caps express something */ allowed_caps = gst_pad_get_allowed_caps(base->srcpad); if (allowed_caps) { if (gst_caps_get_size(allowed_caps) > 0) { GstStructure *s; s = gst_caps_get_structure(allowed_caps, 0); gst_structure_get_int(s, "level", &tgt_level); if (base->alg == GSTDSP_H264ENC) { const char *stream_format; stream_format = gst_structure_get_string(s, "stream-format"); if (stream_format && !strcmp(stream_format, "avc")) self->priv.h264.bytestream = false; else stream_format = "byte-stream"; gst_structure_set(out_struc, "stream-format", G_TYPE_STRING, stream_format, NULL); } } gst_caps_unref(allowed_caps); } check_supported_levels(self, tgt_level); if (self->bitrate == 0) self->bitrate = self->max_bitrate; else if (self->bitrate > self->max_bitrate) self->bitrate = self->max_bitrate; gst_caps_append_structure(out_caps, out_struc); #ifdef DEBUG { gchar *str = gst_caps_to_string(out_caps); pr_info(self, "src caps: %s", str); g_free(str); } #endif if (!gst_pad_take_caps(base->srcpad, out_caps)) return FALSE; if (base->node) return TRUE; base->node = create_node(self); if (!base->node) { pr_err(self, "dsp node creation failed"); return FALSE; } if (codec->setup_params) codec->setup_params(base); if (!gstdsp_start(base)) { pr_err(self, "dsp start failed"); return FALSE; } if (codec->send_params) codec->send_params(base, base->node); return TRUE; }
static gboolean gst_rtp_amr_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps) { GstStructure *structure; GstCaps *srccaps; GstRtpAMRDepay *rtpamrdepay; const gchar *params; const gchar *str, *type; gint clock_rate, need_clock_rate; gboolean res; rtpamrdepay = GST_RTP_AMR_DEPAY (depayload); structure = gst_caps_get_structure (caps, 0); /* figure out the mode first and set the clock rates */ if ((str = gst_structure_get_string (structure, "encoding-name"))) { if (strcmp (str, "AMR") == 0) { rtpamrdepay->mode = GST_RTP_AMR_DP_MODE_NB; need_clock_rate = 8000; type = "audio/AMR"; } else if (strcmp (str, "AMR-WB") == 0) { rtpamrdepay->mode = GST_RTP_AMR_DP_MODE_WB; need_clock_rate = 16000; type = "audio/AMR-WB"; } else goto invalid_mode; } else goto invalid_mode; if (!(str = gst_structure_get_string (structure, "octet-align"))) rtpamrdepay->octet_align = FALSE; else rtpamrdepay->octet_align = (atoi (str) == 1); if (!(str = gst_structure_get_string (structure, "crc"))) rtpamrdepay->crc = FALSE; else rtpamrdepay->crc = (atoi (str) == 1); if (rtpamrdepay->crc) { /* crc mode implies octet aligned mode */ rtpamrdepay->octet_align = TRUE; } if (!(str = gst_structure_get_string (structure, "robust-sorting"))) rtpamrdepay->robust_sorting = FALSE; else rtpamrdepay->robust_sorting = (atoi (str) == 1); if (rtpamrdepay->robust_sorting) { /* robust_sorting mode implies octet aligned mode */ rtpamrdepay->octet_align = TRUE; } if (!(str = gst_structure_get_string (structure, "interleaving"))) rtpamrdepay->interleaving = FALSE; else rtpamrdepay->interleaving = (atoi (str) == 1); if (rtpamrdepay->interleaving) { /* interleaving mode implies octet aligned mode */ rtpamrdepay->octet_align = TRUE; } if (!(params = gst_structure_get_string (structure, "encoding-params"))) rtpamrdepay->channels = 1; else { rtpamrdepay->channels = atoi (params); } if (!gst_structure_get_int (structure, "clock-rate", &clock_rate)) clock_rate = need_clock_rate; depayload->clock_rate = clock_rate; /* we require 1 channel, 8000 Hz, octet aligned, no CRC, * no robust sorting, no interleaving for now */ if (rtpamrdepay->channels != 1) return FALSE; if (clock_rate != need_clock_rate) return FALSE; if (rtpamrdepay->octet_align != TRUE) return FALSE; if (rtpamrdepay->robust_sorting != FALSE) return FALSE; if (rtpamrdepay->interleaving != FALSE) return FALSE; srccaps = gst_caps_new_simple (type, "channels", G_TYPE_INT, rtpamrdepay->channels, "rate", G_TYPE_INT, clock_rate, NULL); res = gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload), srccaps); gst_caps_unref (srccaps); return res; /* ERRORS */ invalid_mode: { GST_ERROR_OBJECT (rtpamrdepay, "invalid encoding-name"); return FALSE; } }
static gboolean gst_rtp_g722_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps) { GstStructure *structure; GstRtpG722Depay *rtpg722depay; gint clock_rate, payload, samplerate; gint channels; GstCaps *srccaps; gboolean res; #if 0 const gchar *channel_order; const GstRTPChannelOrder *order; #endif rtpg722depay = GST_RTP_G722_DEPAY (depayload); structure = gst_caps_get_structure (caps, 0); payload = 96; gst_structure_get_int (structure, "payload", &payload); switch (payload) { case GST_RTP_PAYLOAD_G722: channels = 1; clock_rate = 8000; samplerate = 16000; break; default: /* no fixed mapping, we need clock-rate */ channels = 0; clock_rate = 0; samplerate = 0; break; } /* caps can overwrite defaults */ clock_rate = gst_rtp_g722_depay_parse_int (structure, "clock-rate", clock_rate); if (clock_rate == 0) goto no_clockrate; if (clock_rate == 8000) samplerate = 16000; if (samplerate == 0) samplerate = clock_rate; channels = gst_rtp_g722_depay_parse_int (structure, "encoding-params", channels); if (channels == 0) { channels = gst_rtp_g722_depay_parse_int (structure, "channels", channels); if (channels == 0) { /* channels defaults to 1 otherwise */ channels = 1; } } depayload->clock_rate = clock_rate; rtpg722depay->rate = samplerate; rtpg722depay->channels = channels; srccaps = gst_caps_new_simple ("audio/G722", "rate", G_TYPE_INT, samplerate, "channels", G_TYPE_INT, channels, NULL); /* FIXME: Do something with the channel order */ #if 0 /* add channel positions */ channel_order = gst_structure_get_string (structure, "channel-order"); order = gst_rtp_channels_get_by_order (channels, channel_order); if (order) { gst_audio_set_channel_positions (gst_caps_get_structure (srccaps, 0), order->pos); } else { GstAudioChannelPosition *pos; GST_ELEMENT_WARNING (rtpg722depay, STREAM, DECODE, (NULL), ("Unknown channel order '%s' for %d channels", GST_STR_NULL (channel_order), channels)); /* create default NONE layout */ pos = gst_rtp_channels_create_default (channels); gst_audio_set_channel_positions (gst_caps_get_structure (srccaps, 0), pos); g_free (pos); } #endif res = gst_pad_set_caps (depayload->srcpad, srccaps); gst_caps_unref (srccaps); return res; /* ERRORS */ no_clockrate: { GST_ERROR_OBJECT (depayload, "no clock-rate specified"); return FALSE; } }