static gboolean gst_omx_aac_dec_set_format (GstOMXAudioDec * dec, GstOMXPort * port, GstCaps * caps) { GstOMXAACDec *self = GST_OMX_AAC_DEC (dec); OMX_PARAM_PORTDEFINITIONTYPE port_def; OMX_AUDIO_PARAM_AACPROFILETYPE aac_param; OMX_ERRORTYPE err; GstStructure *s; gint rate, channels, mpegversion; const gchar *stream_format; gst_omx_port_get_port_definition (port, &port_def); port_def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; err = gst_omx_port_update_port_definition (port, &port_def); if (err != OMX_ErrorNone) { GST_ERROR_OBJECT (self, "Failed to set AAC format on component: %s (0x%08x)", gst_omx_error_to_string (err), err); return FALSE; } GST_OMX_INIT_STRUCT (&aac_param); aac_param.nPortIndex = port->index; err = gst_omx_component_get_parameter (dec->dec, OMX_IndexParamAudioAac, &aac_param); if (err != OMX_ErrorNone) { GST_ERROR_OBJECT (self, "Failed to get AAC parameters from component: %s (0x%08x)", gst_omx_error_to_string (err), err); return FALSE; } s = gst_caps_get_structure (caps, 0); if (!gst_structure_get_int (s, "mpegversion", &mpegversion) || !gst_structure_get_int (s, "rate", &rate) || !gst_structure_get_int (s, "channels", &channels)) { GST_ERROR_OBJECT (self, "Incomplete caps"); return FALSE; } stream_format = gst_structure_get_string (s, "stream-format"); if (!stream_format) { GST_ERROR_OBJECT (self, "Incomplete caps"); return FALSE; } aac_param.nChannels = channels; aac_param.nSampleRate = rate; aac_param.nBitRate = 0; /* unknown */ aac_param.nAudioBandWidth = 0; /* decoder decision */ aac_param.eChannelMode = 0; /* FIXME */ if (mpegversion == 2) aac_param.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP2ADTS; else if (strcmp (stream_format, "adts") == 0) aac_param.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4ADTS; else if (strcmp (stream_format, "loas") == 0) aac_param.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4LOAS; else if (strcmp (stream_format, "adif") == 0) aac_param.eAACStreamFormat = OMX_AUDIO_AACStreamFormatADIF; else if (strcmp (stream_format, "raw") == 0) aac_param.eAACStreamFormat = OMX_AUDIO_AACStreamFormatRAW; else { GST_ERROR_OBJECT (self, "Unexpected format: %s", stream_format); return FALSE; } err = gst_omx_component_set_parameter (dec->dec, OMX_IndexParamAudioAac, &aac_param); if (err != OMX_ErrorNone) { GST_ERROR_OBJECT (self, "Error setting AAC parameters: %s (0x%08x)", gst_omx_error_to_string (err), err); return FALSE; } return TRUE; }
// checkBuffer void gstCamera::checkBuffer() { if( !mAppSink ) return; // block waiting for the buffer GstSample* gstSample = gst_app_sink_pull_sample(mAppSink); if( !gstSample ) { printf(LOG_GSTREAMER "gstreamer camera -- gst_app_sink_pull_sample() returned NULL...\n"); return; } GstBuffer* gstBuffer = gst_sample_get_buffer(gstSample); if( !gstBuffer ) { printf(LOG_GSTREAMER "gstreamer camera -- gst_sample_get_buffer() returned NULL...\n"); return; } // retrieve GstMapInfo map; if( !gst_buffer_map(gstBuffer, &map, GST_MAP_READ) ) { printf(LOG_GSTREAMER "gstreamer camera -- gst_buffer_map() failed...\n"); return; } //gst_util_dump_mem(map.data, map.size); void* gstData = map.data; //GST_BUFFER_DATA(gstBuffer); const uint32_t gstSize = map.size; //GST_BUFFER_SIZE(gstBuffer); if( !gstData ) { printf(LOG_GSTREAMER "gstreamer camera -- gst_buffer had NULL data pointer...\n"); release_return; } // retrieve caps GstCaps* gstCaps = gst_sample_get_caps(gstSample); if( !gstCaps ) { printf(LOG_GSTREAMER "gstreamer camera -- gst_buffer had NULL caps...\n"); release_return; } GstStructure* gstCapsStruct = gst_caps_get_structure(gstCaps, 0); if( !gstCapsStruct ) { printf(LOG_GSTREAMER "gstreamer camera -- gst_caps had NULL structure...\n"); release_return; } // get width & height of the buffer int width = 0; int height = 0; if( !gst_structure_get_int(gstCapsStruct, "width", &width) || !gst_structure_get_int(gstCapsStruct, "height", &height) ) { printf(LOG_GSTREAMER "gstreamer camera -- gst_caps missing width/height...\n"); release_return; } if( width < 1 || height < 1 ) release_return; mWidth = width; mHeight = height; mDepth = (gstSize * 8) / (width * height); mSize = gstSize; //printf(LOG_GSTREAMER "gstreamer camera recieved %ix%i frame (%u bytes, %u bpp)\n", width, height, gstSize, mDepth); // make sure ringbuffer is allocated if( !mRingbufferCPU[0] ) { for( uint32_t n=0; n < NUM_RINGBUFFERS; n++ ) { if( !cudaAllocMapped(&mRingbufferCPU[n], &mRingbufferGPU[n], gstSize) ) printf(LOG_CUDA "gstreamer camera -- failed to allocate ringbuffer %u (size=%u)\n", n, gstSize); } printf(LOG_CUDA "gstreamer camera -- allocated %u ringbuffers, %u bytes each\n", NUM_RINGBUFFERS, gstSize); } // copy to next ringbuffer const uint32_t nextRingbuffer = (mLatestRingbuffer + 1) % NUM_RINGBUFFERS; //printf(LOG_GSTREAMER "gstreamer camera -- using ringbuffer #%u for next frame\n", nextRingbuffer); memcpy(mRingbufferCPU[nextRingbuffer], gstData, gstSize); gst_buffer_unmap(gstBuffer, &map); //gst_buffer_unref(gstBuffer); gst_sample_unref(gstSample); // update and signal sleeping threads mRingMutex->lock(); mLatestRingbuffer = nextRingbuffer; mLatestRetrieved = false; mRingMutex->unlock(); mWaitEvent->wakeAll(); }
static gboolean gst_aravis_set_caps (GstBaseSrc *src, GstCaps *caps) { GstAravis* gst_aravis = GST_ARAVIS(src); GstStructure *structure; ArvPixelFormat pixel_format; int height, width; int bpp, depth; const GValue *frame_rate; const char *caps_string; unsigned int i; guint32 fourcc; GST_LOG_OBJECT (gst_aravis, "Requested caps = %" GST_PTR_FORMAT, caps); arv_camera_stop_acquisition (gst_aravis->camera); if (gst_aravis->stream != NULL) g_object_unref (gst_aravis->stream); structure = gst_caps_get_structure (caps, 0); gst_structure_get_int (structure, "width", &width); gst_structure_get_int (structure, "height", &height); frame_rate = gst_structure_get_value (structure, "framerate"); gst_structure_get_int (structure, "bpp", &bpp); gst_structure_get_int (structure, "depth", &depth); if (gst_structure_get_field_type (structure, "format") == G_TYPE_STRING) { const char *string; string = gst_structure_get_string (structure, "format"); fourcc = GST_STR_FOURCC (string); } else if (gst_structure_get_field_type (structure, "format") == GST_TYPE_FOURCC) { gst_structure_get_fourcc (structure, "format", &fourcc); } else fourcc = 0; pixel_format = arv_pixel_format_from_gst_caps (gst_structure_get_name (structure), bpp, depth, fourcc); arv_camera_set_region (gst_aravis->camera, gst_aravis->offset_x, gst_aravis->offset_y, width, height); arv_camera_set_binning (gst_aravis->camera, gst_aravis->h_binning, gst_aravis->v_binning); arv_camera_set_pixel_format (gst_aravis->camera, pixel_format); if (frame_rate != NULL) { double dbl_frame_rate; dbl_frame_rate = (double) gst_value_get_fraction_numerator (frame_rate) / (double) gst_value_get_fraction_denominator (frame_rate); GST_DEBUG_OBJECT (gst_aravis, "Frame rate = %g Hz", dbl_frame_rate); arv_camera_set_frame_rate (gst_aravis->camera, dbl_frame_rate); if (dbl_frame_rate > 0.0) gst_aravis->buffer_timeout_us = MAX (GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT, 3e6 / dbl_frame_rate); else gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT; } else gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT; GST_DEBUG_OBJECT (gst_aravis, "Buffer timeout = %Ld µs", gst_aravis->buffer_timeout_us); GST_DEBUG_OBJECT (gst_aravis, "Actual frame rate = %g Hz", arv_camera_get_frame_rate (gst_aravis->camera)); if(gst_aravis->gain_auto) { arv_camera_set_gain_auto (gst_aravis->camera, ARV_AUTO_CONTINUOUS); GST_DEBUG_OBJECT (gst_aravis, "Auto Gain = continuous", gst_aravis->gain_auto); } else { if (gst_aravis->gain >= 0) { GST_DEBUG_OBJECT (gst_aravis, "Gain = %d", gst_aravis->gain); arv_camera_set_gain_auto (gst_aravis->camera, ARV_AUTO_OFF); arv_camera_set_gain (gst_aravis->camera, gst_aravis->gain); } GST_DEBUG_OBJECT (gst_aravis, "Actual gain = %d", arv_camera_get_gain (gst_aravis->camera)); } if(gst_aravis->exposure_auto) { arv_camera_set_exposure_time_auto (gst_aravis->camera, ARV_AUTO_CONTINUOUS); GST_DEBUG_OBJECT (gst_aravis, "Auto Exposure = contiuous", gst_aravis->exposure_auto); } else { if (gst_aravis->exposure_time_us > 0.0) { GST_DEBUG_OBJECT (gst_aravis, "Exposure = %g µs", gst_aravis->exposure_time_us); arv_camera_set_exposure_time_auto (gst_aravis->camera, ARV_AUTO_OFF); arv_camera_set_exposure_time (gst_aravis->camera, gst_aravis->exposure_time_us); } GST_DEBUG_OBJECT (gst_aravis, "Actual exposure = %g µs", arv_camera_get_exposure_time (gst_aravis->camera)); } if (gst_aravis->fixed_caps != NULL) gst_caps_unref (gst_aravis->fixed_caps); caps_string = arv_pixel_format_to_gst_caps_string (pixel_format); if (caps_string != NULL) { GstStructure *structure; GstCaps *caps; caps = gst_caps_new_empty (); structure = gst_structure_from_string (caps_string, NULL); gst_structure_set (structure, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL); if (frame_rate != NULL) gst_structure_set_value (structure, "framerate", frame_rate); gst_caps_append_structure (caps, structure); gst_aravis->fixed_caps = caps; } else gst_aravis->fixed_caps = NULL; gst_aravis->payload = arv_camera_get_payload (gst_aravis->camera); gst_aravis->stream = arv_camera_create_stream (gst_aravis->camera, NULL, NULL); for (i = 0; i < GST_ARAVIS_N_BUFFERS; i++) arv_stream_push_buffer (gst_aravis->stream, arv_buffer_new (gst_aravis->payload, NULL)); GST_LOG_OBJECT (gst_aravis, "Start acquisition"); arv_camera_start_acquisition (gst_aravis->camera); gst_aravis->timestamp_offset = 0; gst_aravis->last_timestamp = 0; return TRUE; }
/* h264 dec has its own sink_setcaps for supporting nalu convert codec data */ static gboolean sink_setcaps (GstPad * pad, GstCaps * caps) { GstStructure *structure; GstOmxBaseVideoDec *self; GstOmxH264Dec *h264_self; GstOmxBaseFilter *omx_base; GOmxCore *gomx; OMX_PARAM_PORTDEFINITIONTYPE param; gint width = 0; gint height = 0; self = GST_OMX_BASE_VIDEODEC (GST_PAD_PARENT (pad)); h264_self = GST_OMX_H264DEC (GST_PAD_PARENT (pad)); omx_base = GST_OMX_BASE_FILTER (self); gomx = (GOmxCore *) omx_base->gomx; GST_INFO_OBJECT (self, "setcaps (sink)(h264): %" GST_PTR_FORMAT, caps); g_return_val_if_fail (gst_caps_get_size (caps) == 1, FALSE); structure = gst_caps_get_structure (caps, 0); gst_structure_get_int (structure, "width", &width); gst_structure_get_int (structure, "height", &height); { const GValue *framerate = NULL; framerate = gst_structure_get_value (structure, "framerate"); if (framerate) { self->framerate_num = gst_value_get_fraction_numerator (framerate); self->framerate_denom = gst_value_get_fraction_denominator (framerate); } } G_OMX_INIT_PARAM (param); { const GValue *codec_data; GstBuffer *buffer; gboolean ret = FALSE; guint8 *buf_data = NULL; codec_data = gst_structure_get_value (structure, "codec_data"); if (codec_data) { buffer = gst_value_get_buffer (codec_data); buf_data = GST_BUFFER_DATA(buffer); if (GST_BUFFER_SIZE(buffer) < 4) GST_ERROR("codec data size is less than 4!!"); //check this. if ((buf_data[0] == 0x00)&&(buf_data[1] == 0x00)&& ((buf_data[2] == 0x01)||((buf_data[2] == 0x00)&&(buf_data[3] == 0x01)))) { h264_self->h264Format = GSTOMX_H264_FORMAT_NALU; GST_INFO_OBJECT(self, "H264 format is NALU"); } else { h264_self->h264Format = GSTOMX_H264_FORMAT_3GPP; GST_INFO_OBJECT(self, "H264 format is 3GPP"); } /* if codec data is 3gpp format, convert nalu format */ if(h264_self->h264Format == GSTOMX_H264_FORMAT_3GPP) { GstBuffer *nalu_dci = NULL; ret = convert_dci(h264_self, buffer, &nalu_dci); if (ret) { omx_base->codec_data = nalu_dci; } else { if (nalu_dci) { gst_buffer_unref (nalu_dci); } GST_ERROR_OBJECT(h264_self, "converting dci error."); omx_base->codec_data = buffer; gst_buffer_ref (buffer); } } else { /* not 3GPP format */ omx_base->codec_data = buffer; gst_buffer_ref (buffer); } h264_self->h264Format = GSTOMX_H264_FORMAT_UNKNOWN; } } /* Input port configuration. */ { param.nPortIndex = omx_base->in_port->port_index; OMX_GetParameter (gomx->omx_handle, OMX_IndexParamPortDefinition, ¶m); param.format.video.nFrameWidth = width; param.format.video.nFrameHeight = height; OMX_SetParameter (gomx->omx_handle, OMX_IndexParamPortDefinition, ¶m); } return gst_pad_set_caps (pad, caps); }
/* * debug_dump_element: * @bin: the bin that should be analyzed * @out: file to write to * @indent: level of graph indentation * * Helper for _gst_debug_bin_to_dot_file() to recursively dump a pipeline. */ static void debug_dump_element (GstBin * bin, GstDebugGraphDetails details, FILE * out, const gint indent) { GstIterator *element_iter, *pad_iter; gboolean elements_done, pads_done; GstElement *element, *peer_element, *target_element; GstPad *pad, *peer_pad, *target_pad; GstPadDirection dir; GstCaps *caps; GstStructure *structure; gboolean free_caps, free_media; guint src_pads, sink_pads; gchar *media = NULL; gchar *pad_name, *element_name; gchar *peer_pad_name, *peer_element_name; gchar *target_pad_name, *target_element_name; gchar *color_name; gchar *state_name = NULL; gchar *param_name = NULL; gchar *spc = NULL; spc = g_malloc (1 + indent * 2); memset (spc, 32, indent * 2); spc[indent * 2] = '\0'; element_iter = gst_bin_iterate_elements (bin); elements_done = FALSE; while (!elements_done) { switch (gst_iterator_next (element_iter, (gpointer) & element)) { case GST_ITERATOR_OK: element_name = debug_dump_make_object_name (GST_OBJECT (element)); if (details & GST_DEBUG_GRAPH_SHOW_STATES) { state_name = debug_dump_get_element_state (GST_ELEMENT (element)); } if (details & GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS) { param_name = debug_dump_get_element_params (GST_ELEMENT (element)); } /* elements */ fprintf (out, "%ssubgraph cluster_%s {\n", spc, element_name); fprintf (out, "%s fontname=\"Bitstream Vera Sans\";\n", spc); fprintf (out, "%s fontsize=\"8\";\n", spc); fprintf (out, "%s style=filled;\n", spc); fprintf (out, "%s color=black;\n\n", spc); fprintf (out, "%s label=\"<%s>\\n%s%s%s\";\n", spc, G_OBJECT_TYPE_NAME (element), GST_OBJECT_NAME (element), (state_name ? state_name : ""), (param_name ? param_name : "") ); if (state_name) { g_free (state_name); state_name = NULL; } if (param_name) { g_free (param_name); param_name = NULL; } g_free (element_name); src_pads = sink_pads = 0; if ((pad_iter = gst_element_iterate_pads (element))) { pads_done = FALSE; while (!pads_done) { switch (gst_iterator_next (pad_iter, (gpointer) & pad)) { case GST_ITERATOR_OK: dir = gst_pad_get_direction (pad); pad_name = debug_dump_make_object_name (GST_OBJECT (pad)); element_name = debug_dump_make_object_name (GST_OBJECT (element)); if (GST_IS_GHOST_PAD (pad)) { color_name = (dir == GST_PAD_SRC) ? "#ffdddd" : ((dir == GST_PAD_SINK) ? "#ddddff" : "#ffffff"); } else { color_name = (dir == GST_PAD_SRC) ? "#ffaaaa" : ((dir == GST_PAD_SINK) ? "#aaaaff" : "#cccccc"); } /* pads */ fprintf (out, "%s %s_%s [color=black, fillcolor=\"%s\", label=\"%s\"];\n", spc, element_name, pad_name, color_name, GST_OBJECT_NAME (pad)); if (dir == GST_PAD_SRC) src_pads++; else if (dir == GST_PAD_SINK) sink_pads++; g_free (pad_name); g_free (element_name); gst_object_unref (pad); break; case GST_ITERATOR_RESYNC: gst_iterator_resync (pad_iter); break; case GST_ITERATOR_ERROR: case GST_ITERATOR_DONE: pads_done = TRUE; break; } } gst_iterator_free (pad_iter); } if (GST_IS_BIN (element)) { fprintf (out, "%s fillcolor=\"#ffffff\";\n", spc); /* recurse */ debug_dump_element (GST_BIN (element), details, out, indent + 1); } else { if (src_pads && !sink_pads) fprintf (out, "%s fillcolor=\"#ffaaaa\";\n", spc); else if (!src_pads && sink_pads) fprintf (out, "%s fillcolor=\"#aaaaff\";\n", spc); else if (src_pads && sink_pads) fprintf (out, "%s fillcolor=\"#aaffaa\";\n", spc); else fprintf (out, "%s fillcolor=\"#ffffff\";\n", spc); } fprintf (out, "%s}\n\n", spc); if ((pad_iter = gst_element_iterate_pads (element))) { pads_done = FALSE; while (!pads_done) { switch (gst_iterator_next (pad_iter, (gpointer) & pad)) { case GST_ITERATOR_OK: if (gst_pad_is_linked (pad) && gst_pad_get_direction (pad) == GST_PAD_SRC) { if ((peer_pad = gst_pad_get_peer (pad))) { free_media = FALSE; if ((details & GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE) || (details & GST_DEBUG_GRAPH_SHOW_CAPS_DETAILS) ) { if ((caps = gst_pad_get_negotiated_caps (pad))) { free_caps = TRUE; } else { free_caps = FALSE; if (!(caps = (GstCaps *) gst_pad_get_pad_template_caps (pad))) { /* this should not happen */ media = "?"; } } if (caps) { if (details & GST_DEBUG_GRAPH_SHOW_CAPS_DETAILS) { gchar *tmp = g_strdelimit (gst_caps_to_string (caps), ",", '\n'); media = g_strescape (tmp, NULL); free_media = TRUE; g_free (tmp); } else { if (GST_CAPS_IS_SIMPLE (caps)) { structure = gst_caps_get_structure (caps, 0); media = (gchar *) gst_structure_get_name (structure); } else media = "*"; } if (free_caps) { gst_caps_unref (caps); } } } pad_name = debug_dump_make_object_name (GST_OBJECT (pad)); element_name = debug_dump_make_object_name (GST_OBJECT (element)); peer_pad_name = debug_dump_make_object_name (GST_OBJECT (peer_pad)); if ((peer_element = gst_pad_get_parent_element (peer_pad))) { peer_element_name = debug_dump_make_object_name (GST_OBJECT (peer_element)); } else { peer_element_name = ""; } /* pad link */ if (media) { fprintf (out, "%s%s_%s -> %s_%s [label=\"%s\"]\n", spc, element_name, pad_name, peer_element_name, peer_pad_name, media); if (free_media) { g_free (media); } } else { fprintf (out, "%s%s_%s -> %s_%s\n", spc, element_name, pad_name, peer_element_name, peer_pad_name); } if (GST_IS_GHOST_PAD (pad)) { if ((target_pad = gst_ghost_pad_get_target (GST_GHOST_PAD (pad)))) { target_pad_name = debug_dump_make_object_name (GST_OBJECT (target_pad)); if ((target_element = gst_pad_get_parent_element (target_pad))) { target_element_name = debug_dump_make_object_name (GST_OBJECT (target_element)); } else { target_element_name = ""; } /* src ghostpad relationship */ fprintf (out, "%s%s_%s -> %s_%s [style=dashed]\n", spc, target_element_name, target_pad_name, element_name, pad_name); g_free (target_pad_name); if (target_element) { g_free (target_element_name); gst_object_unref (target_element); } gst_object_unref (target_pad); } } if (GST_IS_GHOST_PAD (peer_pad)) { if ((target_pad = gst_ghost_pad_get_target (GST_GHOST_PAD (peer_pad)))) { target_pad_name = debug_dump_make_object_name (GST_OBJECT (target_pad)); if ((target_element = gst_pad_get_parent_element (target_pad))) { target_element_name = debug_dump_make_object_name (GST_OBJECT (target_element)); } else { target_element_name = ""; } /* sink ghostpad relationship */ fprintf (out, "%s%s_%s -> %s_%s [style=dashed]\n", spc, peer_element_name, peer_pad_name, target_element_name, target_pad_name); g_free (target_pad_name); if (target_element) { g_free (target_element_name); gst_object_unref (target_element); } gst_object_unref (target_pad); } } g_free (pad_name); g_free (element_name); g_free (peer_pad_name); if (peer_element) { g_free (peer_element_name); gst_object_unref (peer_element); } gst_object_unref (peer_pad); } } gst_object_unref (pad); break; case GST_ITERATOR_RESYNC: gst_iterator_resync (pad_iter); break; case GST_ITERATOR_ERROR: case GST_ITERATOR_DONE: pads_done = TRUE; break; } } gst_iterator_free (pad_iter); } gst_object_unref (element); break; case GST_ITERATOR_RESYNC: gst_iterator_resync (element_iter); break; case GST_ITERATOR_ERROR: case GST_ITERATOR_DONE: elements_done = TRUE; break; } } gst_iterator_free (element_iter); g_free (spc); }
static gboolean gst_amc_audio_dec_set_format (GstAudioDecoder * decoder, GstCaps * caps) { GstAmcAudioDec *self; GstStructure *s; GstAmcFormat *format; const gchar *mime; gboolean is_format_change = FALSE; gboolean needs_disable = FALSE; gchar *format_string; gint rate, channels; GError *err = NULL; self = GST_AMC_AUDIO_DEC (decoder); GST_DEBUG_OBJECT (self, "Setting new caps %" GST_PTR_FORMAT, caps); /* Check if the caps change is a real format change or if only irrelevant * parts of the caps have changed or nothing at all. */ is_format_change |= (!self->input_caps || !gst_caps_is_equal (self->input_caps, caps)); needs_disable = self->started; /* If the component is not started and a real format change happens * we have to restart the component. If no real format change * happened we can just exit here. */ if (needs_disable && !is_format_change) { /* Framerate or something minor changed */ self->input_caps_changed = TRUE; GST_DEBUG_OBJECT (self, "Already running and caps did not change the format"); return TRUE; } if (needs_disable && is_format_change) { gst_amc_audio_dec_drain (self); GST_AUDIO_DECODER_STREAM_UNLOCK (self); gst_amc_audio_dec_stop (GST_AUDIO_DECODER (self)); GST_AUDIO_DECODER_STREAM_LOCK (self); gst_amc_audio_dec_close (GST_AUDIO_DECODER (self)); if (!gst_amc_audio_dec_open (GST_AUDIO_DECODER (self))) { GST_ERROR_OBJECT (self, "Failed to open codec again"); return FALSE; } if (!gst_amc_audio_dec_start (GST_AUDIO_DECODER (self))) { GST_ERROR_OBJECT (self, "Failed to start codec again"); } } /* srcpad task is not running at this point */ mime = caps_to_mime (caps); if (!mime) { GST_ERROR_OBJECT (self, "Failed to convert caps to mime"); return FALSE; } s = gst_caps_get_structure (caps, 0); if (!gst_structure_get_int (s, "rate", &rate) || !gst_structure_get_int (s, "channels", &channels)) { GST_ERROR_OBJECT (self, "Failed to get rate/channels"); return FALSE; } format = gst_amc_format_new_audio (mime, rate, channels, &err); if (!format) { GST_ELEMENT_ERROR_FROM_ERROR (self, err); return FALSE; } /* FIXME: These buffers needs to be valid until the codec is stopped again */ g_list_foreach (self->codec_datas, (GFunc) gst_buffer_unref, NULL); g_list_free (self->codec_datas); self->codec_datas = NULL; if (gst_structure_has_field (s, "codec_data")) { const GValue *h = gst_structure_get_value (s, "codec_data"); GstBuffer *codec_data = gst_value_get_buffer (h); GstMapInfo minfo; guint8 *data; gst_buffer_map (codec_data, &minfo, GST_MAP_READ); data = g_memdup (minfo.data, minfo.size); self->codec_datas = g_list_prepend (self->codec_datas, data); gst_amc_format_set_buffer (format, "csd-0", data, minfo.size, &err); if (err) GST_ELEMENT_WARNING_FROM_ERROR (self, err); gst_buffer_unmap (codec_data, &minfo); } else if (gst_structure_has_field (s, "streamheader")) { const GValue *sh = gst_structure_get_value (s, "streamheader"); gint nsheaders = gst_value_array_get_size (sh); GstBuffer *buf; const GValue *h; gint i, j; gchar *fname; GstMapInfo minfo; guint8 *data; for (i = 0, j = 0; i < nsheaders; i++) { h = gst_value_array_get_value (sh, i); buf = gst_value_get_buffer (h); if (strcmp (mime, "audio/vorbis") == 0) { guint8 header_type; gst_buffer_extract (buf, 0, &header_type, 1); /* Only use the identification and setup packets */ if (header_type != 0x01 && header_type != 0x05) continue; } fname = g_strdup_printf ("csd-%d", j); gst_buffer_map (buf, &minfo, GST_MAP_READ); data = g_memdup (minfo.data, minfo.size); self->codec_datas = g_list_prepend (self->codec_datas, data); gst_amc_format_set_buffer (format, fname, data, minfo.size, &err); if (err) GST_ELEMENT_WARNING_FROM_ERROR (self, err); gst_buffer_unmap (buf, &minfo); g_free (fname); j++; } } format_string = gst_amc_format_to_string (format, &err); if (err) GST_ELEMENT_WARNING_FROM_ERROR (self, err); GST_DEBUG_OBJECT (self, "Configuring codec with format: %s", GST_STR_NULL (format_string)); g_free (format_string); if (!gst_amc_codec_configure (self->codec, format, NULL, 0, &err)) { GST_ERROR_OBJECT (self, "Failed to configure codec"); GST_ELEMENT_ERROR_FROM_ERROR (self, err); return FALSE; } gst_amc_format_free (format); if (!gst_amc_codec_start (self->codec, &err)) { GST_ERROR_OBJECT (self, "Failed to start codec"); GST_ELEMENT_ERROR_FROM_ERROR (self, err); return FALSE; } self->spf = -1; /* TODO: Implement for other codecs too */ if (gst_structure_has_name (s, "audio/mpeg")) { gint mpegversion = -1; gst_structure_get_int (s, "mpegversion", &mpegversion); if (mpegversion == 1) { gint layer = -1, mpegaudioversion = -1; gst_structure_get_int (s, "layer", &layer); gst_structure_get_int (s, "mpegaudioversion", &mpegaudioversion); if (layer == 1) self->spf = 384; else if (layer == 2) self->spf = 1152; else if (layer == 3 && mpegaudioversion != -1) self->spf = (mpegaudioversion == 1 ? 1152 : 576); } } self->started = TRUE; self->input_caps_changed = TRUE; /* Start the srcpad loop again */ self->flushing = FALSE; self->downstream_flow_ret = GST_FLOW_OK; gst_pad_start_task (GST_AUDIO_DECODER_SRC_PAD (self), (GstTaskFunction) gst_amc_audio_dec_loop, decoder, NULL); return TRUE; }
static gboolean gst_gdiscreencapsrc_set_caps (GstBaseSrc * bsrc, GstCaps * caps) { GstGDIScreenCapSrc *src = GST_GDISCREENCAPSRC (bsrc); HWND capture; HDC device; GstStructure *structure; const GValue *framerate; gint red_mask, green_mask, blue_mask; gint bpp; structure = gst_caps_get_structure (caps, 0); gst_structure_get_int (structure, "red_mask", &red_mask); gst_structure_get_int (structure, "green_mask", &green_mask); gst_structure_get_int (structure, "blue_mask", &blue_mask); if (blue_mask != GST_VIDEO_BYTE1_MASK_24_INT || green_mask != GST_VIDEO_BYTE2_MASK_24_INT || red_mask != GST_VIDEO_BYTE3_MASK_24_INT) { GST_ERROR ("Wrong red_,green_,blue_ mask provided. " "We only support RGB and BGR"); return FALSE; } gst_structure_get_int (structure, "bpp", &bpp); if (bpp != 24) { GST_ERROR ("Wrong bpp provided %d. We only support 24 bpp", bpp); return FALSE; } src->src_rect = src->screen_rect; if (src->capture_w && src->capture_h) { src->src_rect.left += src->capture_x; src->src_rect.top += src->capture_y; src->src_rect.right = src->src_rect.left + src->capture_w; src->src_rect.bottom = src->src_rect.top + src->capture_h; } framerate = gst_structure_get_value (structure, "framerate"); if (framerate) { src->rate_numerator = gst_value_get_fraction_numerator (framerate); src->rate_denominator = gst_value_get_fraction_denominator (framerate); } src->info.bmiHeader.biSize = sizeof (BITMAPINFOHEADER); src->info.bmiHeader.biWidth = src->src_rect.right - src->src_rect.left; src->info.bmiHeader.biHeight = src->src_rect.top - src->src_rect.bottom; src->info.bmiHeader.biPlanes = 1; src->info.bmiHeader.biBitCount = 24; src->info.bmiHeader.biCompression = BI_RGB; src->info.bmiHeader.biSizeImage = 0; src->info.bmiHeader.biXPelsPerMeter = 0; src->info.bmiHeader.biYPelsPerMeter = 0; src->info.bmiHeader.biClrUsed = 0; src->info.bmiHeader.biClrImportant = 0; /* Cleanup first */ if (src->hBitmap != INVALID_HANDLE_VALUE) DeleteObject (src->hBitmap); if (src->memDC != INVALID_HANDLE_VALUE) DeleteDC (src->memDC); /* Allocate */ capture = GetDesktopWindow (); device = GetDC (capture); src->hBitmap = CreateDIBSection (device, &(src->info), DIB_RGB_COLORS, (void **) &(src->dibMem), 0, 0); src->memDC = CreateCompatibleDC (device); SelectObject (src->memDC, src->hBitmap); ReleaseDC (capture, device); GST_DEBUG_OBJECT (src, "size %dx%d, %d/%d fps", (gint) src->info.bmiHeader.biWidth, (gint) (-src->info.bmiHeader.biHeight), src->rate_numerator, src->rate_denominator); return TRUE; }
/*! * \brief CvCapture_GStreamer::getProperty retreive the requested property from the pipeline * \param propId requested property * \return property value * * There are two ways the properties can be retreived. For seek-based properties we can query the pipeline. * For frame-based properties, we use the caps of the lasst receivef sample. This means that some properties * are not available until a first frame was received */ double CvCapture_GStreamer::getProperty( int propId ) { GstFormat format; gint64 value; gboolean status; #if GST_VERSION_MAJOR == 0 #define FORMAT &format #else #define FORMAT format #endif if(!pipeline) { CV_WARN("GStreamer: no pipeline"); return false; } switch(propId) { case CV_CAP_PROP_POS_MSEC: format = GST_FORMAT_TIME; status = gst_element_query_position(sink, FORMAT, &value); if(!status) { CV_WARN("GStreamer: unable to query position of stream"); return false; } return value * 1e-6; // nano seconds to milli seconds case CV_CAP_PROP_POS_FRAMES: format = GST_FORMAT_DEFAULT; status = gst_element_query_position(sink, FORMAT, &value); if(!status) { CV_WARN("GStreamer: unable to query position of stream"); return false; } return value; case CV_CAP_PROP_POS_AVI_RATIO: format = GST_FORMAT_PERCENT; status = gst_element_query_position(sink, FORMAT, &value); if(!status) { CV_WARN("GStreamer: unable to query position of stream"); return false; } return ((double) value) / GST_FORMAT_PERCENT_MAX; case CV_CAP_PROP_FRAME_WIDTH: { if (!buffer_caps){ CV_WARN("GStreamer: unable to query width of frame; no frame grabbed yet"); return 0; } GstStructure* structure = gst_caps_get_structure(buffer_caps, 0); gint width = 0; if(!gst_structure_get_int(structure, "width", &width)){ CV_WARN("GStreamer: unable to query width of frame"); return 0; } return width; break; } case CV_CAP_PROP_FRAME_HEIGHT: { if (!buffer_caps){ CV_WARN("GStreamer: unable to query height of frame; no frame grabbed yet"); return 0; } GstStructure* structure = gst_caps_get_structure(buffer_caps, 0); gint height = 0; if(!gst_structure_get_int(structure, "height", &height)){ CV_WARN("GStreamer: unable to query height of frame"); return 0; } return height; break; } case CV_CAP_PROP_FPS: { if (!buffer_caps){ CV_WARN("GStreamer: unable to query framerate of stream; no frame grabbed yet"); return 0; } GstStructure* structure = gst_caps_get_structure(buffer_caps, 0); gint num = 0, denom=1; if(!gst_structure_get_fraction(structure, "framerate", &num, &denom)){ CV_WARN("GStreamer: unable to query framerate of stream"); return 0; } return (double)num/(double)denom; break; } case CV_CAP_PROP_FOURCC: break; case CV_CAP_PROP_FRAME_COUNT: format = GST_FORMAT_DEFAULT; status = gst_element_query_position(sink, FORMAT, &value); if(!status) { CV_WARN("GStreamer: unable to query position of stream"); return false; } return value; case CV_CAP_PROP_FORMAT: case CV_CAP_PROP_MODE: case CV_CAP_PROP_BRIGHTNESS: case CV_CAP_PROP_CONTRAST: case CV_CAP_PROP_SATURATION: case CV_CAP_PROP_HUE: case CV_CAP_PROP_GAIN: case CV_CAP_PROP_CONVERT_RGB: break; case CV_CAP_GSTREAMER_QUEUE_LENGTH: if(!sink) { CV_WARN("GStreamer: there is no sink yet"); return false; } return gst_app_sink_get_max_buffers(GST_APP_SINK(sink)); default: CV_WARN("GStreamer: unhandled property"); break; } #undef FORMAT return false; }
static void check_pad_template (GstPadTemplate * tmpl) { const GValue *list_val, *fmt_val; GstStructure *s; gboolean *formats_supported; GstCaps *caps; guint i, num_formats; num_formats = get_num_formats (); formats_supported = g_new0 (gboolean, num_formats); caps = gst_pad_template_get_caps (tmpl); /* If this fails, we need to update this unit test */ fail_unless_equals_int (gst_caps_get_size (caps), 2); /* Remove the ANY caps features structure */ caps = gst_caps_truncate (caps); s = gst_caps_get_structure (caps, 0); fail_unless (gst_structure_has_name (s, "video/x-raw")); list_val = gst_structure_get_value (s, "format"); fail_unless (list_val != NULL); /* If this fails, we need to update this unit test */ fail_unless (GST_VALUE_HOLDS_LIST (list_val)); for (i = 0; i < gst_value_list_get_size (list_val); ++i) { GstVideoFormat fmt; const gchar *fmt_str; fmt_val = gst_value_list_get_value (list_val, i); fail_unless (G_VALUE_HOLDS_STRING (fmt_val)); fmt_str = g_value_get_string (fmt_val); GST_LOG ("format string: '%s'", fmt_str); fmt = gst_video_format_from_string (fmt_str); if (fmt == GST_VIDEO_FORMAT_UNKNOWN) g_error ("Unknown raw format '%s' in pad template caps", fmt_str); formats_supported[(guint) fmt] = TRUE; } gst_caps_unref (caps); for (i = 2; i < num_formats; ++i) { if (!formats_supported[i]) { const gchar *fmt_str = gst_video_format_to_string ((GstVideoFormat) i); switch (i) { case GST_VIDEO_FORMAT_v210: case GST_VIDEO_FORMAT_v216: case GST_VIDEO_FORMAT_NV12: case GST_VIDEO_FORMAT_NV16: case GST_VIDEO_FORMAT_NV21: case GST_VIDEO_FORMAT_NV24: case GST_VIDEO_FORMAT_UYVP: case GST_VIDEO_FORMAT_A420: case GST_VIDEO_FORMAT_YUV9: case GST_VIDEO_FORMAT_YVU9: case GST_VIDEO_FORMAT_IYU1: case GST_VIDEO_FORMAT_r210:{ static gboolean shown_fixme[100] = { FALSE, }; if (!shown_fixme[i]) { GST_FIXME ("FIXME: add %s support to videoscale", fmt_str); shown_fixme[i] = TRUE; } break; } case GST_VIDEO_FORMAT_BGR16: case GST_VIDEO_FORMAT_BGR15: case GST_VIDEO_FORMAT_RGB8P: case GST_VIDEO_FORMAT_I420_10BE: case GST_VIDEO_FORMAT_I420_10LE: case GST_VIDEO_FORMAT_I422_10BE: case GST_VIDEO_FORMAT_I422_10LE: case GST_VIDEO_FORMAT_Y444_10BE: case GST_VIDEO_FORMAT_Y444_10LE: case GST_VIDEO_FORMAT_GBR: case GST_VIDEO_FORMAT_GBR_10BE: case GST_VIDEO_FORMAT_GBR_10LE: case GST_VIDEO_FORMAT_NV12_64Z32: GST_LOG ("Ignoring lack of support for format %s", fmt_str); break; default: g_error ("videoscale doesn't support format '%s'", fmt_str); break; } } } g_free (formats_supported); }
EXPORT_C #endif gboolean gst_video_format_parse_caps (GstCaps * caps, GstVideoFormat * format, int *width, int *height) { GstStructure *structure; gboolean ok = TRUE; if (!gst_caps_is_fixed (caps)) return FALSE; structure = gst_caps_get_structure (caps, 0); if (format) { if (gst_structure_has_name (structure, "video/x-raw-yuv")) { guint32 fourcc; ok &= gst_structure_get_fourcc (structure, "format", &fourcc); *format = gst_video_format_from_fourcc (fourcc); if (*format == GST_VIDEO_FORMAT_UNKNOWN) { ok = FALSE; } } else if (gst_structure_has_name (structure, "video/x-raw-rgb")) { int depth; int bpp; int endianness; int red_mask; int green_mask; int blue_mask; int alpha_mask; gboolean have_alpha; ok &= gst_structure_get_int (structure, "depth", &depth); ok &= gst_structure_get_int (structure, "bpp", &bpp); ok &= gst_structure_get_int (structure, "endianness", &endianness); ok &= gst_structure_get_int (structure, "red_mask", &red_mask); ok &= gst_structure_get_int (structure, "green_mask", &green_mask); ok &= gst_structure_get_int (structure, "blue_mask", &blue_mask); have_alpha = gst_structure_get_int (structure, "alpha_mask", &alpha_mask); if (depth == 24 && bpp == 32 && endianness == G_BIG_ENDIAN) { *format = gst_video_format_from_rgb32_masks (red_mask, green_mask, blue_mask); if (*format == GST_VIDEO_FORMAT_UNKNOWN) { ok = FALSE; } } else if (depth == 32 && bpp == 32 && endianness == G_BIG_ENDIAN && have_alpha) { *format = gst_video_format_from_rgba32_masks (red_mask, green_mask, blue_mask, alpha_mask); if (*format == GST_VIDEO_FORMAT_UNKNOWN) { ok = FALSE; } } else if (depth == 24 && bpp == 24 && endianness == G_BIG_ENDIAN) { *format = gst_video_format_from_rgb24_masks (red_mask, green_mask, blue_mask); if (*format == GST_VIDEO_FORMAT_UNKNOWN) { ok = FALSE; } } else { ok = FALSE; } } else { ok = FALSE; } } if (width) { ok &= gst_structure_get_int (structure, "width", width); } if (height) { ok &= gst_structure_get_int (structure, "height", height); } return ok; }
/*! * \brief CvCapture_GStreamer::retrieveFrame * \return IplImage pointer. [Transfer Full] * Retreive the previously grabbed buffer, and wrap it in an IPLImage structure */ IplImage * CvCapture_GStreamer::retrieveFrame(int) { if(!buffer) return 0; //construct a frame header if we did not have any yet if(!frame) { gint height, width; //reuse the caps ptr if (buffer_caps) gst_caps_unref(buffer_caps); #if GST_VERSION_MAJOR == 0 buffer_caps = gst_buffer_get_caps(buffer); #else buffer_caps = gst_sample_get_caps(sample); #endif // bail out in no caps assert(gst_caps_get_size(buffer_caps) == 1); GstStructure* structure = gst_caps_get_structure(buffer_caps, 0); // bail out if width or height are 0 if(!gst_structure_get_int(structure, "width", &width) || !gst_structure_get_int(structure, "height", &height)) { return 0; } int depth = 3; #if GST_VERSION_MAJOR > 0 depth = 0; const gchar* name = gst_structure_get_name(structure); const gchar* format = gst_structure_get_string(structure, "format"); if (!name || !format) return 0; // we support 3 types of data: // video/x-raw, format=BGR -> 8bit, 3 channels // video/x-raw, format=GRAY8 -> 8bit, 1 channel // video/x-bayer -> 8bit, 1 channel // bayer data is never decoded, the user is responsible for that // everything is 8 bit, so we just test the caps for bit depth if (strcasecmp(name, "video/x-raw") == 0) { if (strcasecmp(format, "BGR") == 0) { depth = 3; } else if(strcasecmp(format, "GRAY8") == 0){ depth = 1; } } else if (strcasecmp(name, "video/x-bayer") == 0) { depth = 1; } #endif if (depth > 0) { frame = cvCreateImageHeader(cvSize(width, height), IPL_DEPTH_8U, depth); }else{ return 0; } } // gstreamer expects us to handle the memory at this point // so we can just wrap the raw buffer and be done with it #if GST_VERSION_MAJOR == 0 frame->imageData = (char *)GST_BUFFER_DATA(buffer); #else // the data ptr in GstMapInfo is only valid throughout the mapifo objects life. // TODO: check if reusing the mapinfo object is ok. gboolean success = gst_buffer_map(buffer,info, (GstMapFlags)GST_MAP_READ); if (!success){ //something weird went wrong here. abort. abort. //fprintf(stderr,"GStreamer: unable to map buffer"); return 0; } frame->imageData = (char*)info->data; gst_buffer_unmap(buffer,info); #endif return frame; }
static gboolean gst_dshowvideosrc_set_caps (GstBaseSrc * bsrc, GstCaps * caps) { HRESULT hres; IPin *input_pin = NULL; GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (bsrc); GstStructure *s = gst_caps_get_structure (caps, 0); /* search the negociated caps in our caps list to get its index and the corresponding mediatype */ if (gst_caps_is_subset (caps, src->caps)) { guint i = 0; gint res = -1; for (; i < gst_caps_get_size (src->caps) && res == -1; i++) { GstCaps *capstmp = gst_caps_copy_nth (src->caps, i); if (gst_caps_is_subset (caps, capstmp)) { res = i; } gst_caps_unref (capstmp); } if (res != -1 && src->pins_mediatypes) { /* get the corresponding media type and build the dshow graph */ GList *type_pin_mediatype = g_list_nth (src->pins_mediatypes, res); if (type_pin_mediatype) { GstCapturePinMediaType *pin_mediatype = (GstCapturePinMediaType *) type_pin_mediatype->data; gchar *caps_string = NULL; gchar *src_caps_string = NULL; /* retrieve the desired video size */ VIDEOINFOHEADER *video_info = NULL; gint width = 0; gint height = 0; gint numerator = 0; gint denominator = 0; gst_structure_get_int (s, "width", &width); gst_structure_get_int (s, "height", &height); gst_structure_get_fraction (s, "framerate", &numerator, &denominator); /* check if the desired video size is valid about granularity */ /* This check will be removed when GST_TYPE_INT_RANGE_STEP exits */ /* See remarks in gst_dshow_new_video_caps function */ if (pin_mediatype->granularityWidth != 0 && width % pin_mediatype->granularityWidth != 0) g_warning ("your desired video size is not valid : %d mod %d !=0\n", width, pin_mediatype->granularityWidth); if (pin_mediatype->granularityHeight != 0 && height % pin_mediatype->granularityHeight != 0) g_warning ("your desired video size is not valid : %d mod %d !=0\n", height, pin_mediatype->granularityHeight); /* update mediatype */ video_info = (VIDEOINFOHEADER *) pin_mediatype->mediatype->pbFormat; video_info->bmiHeader.biWidth = width; video_info->bmiHeader.biHeight = height; video_info->AvgTimePerFrame = (LONGLONG) (10000000 * denominator / (double) numerator); video_info->bmiHeader.biSizeImage = DIBSIZE (video_info->bmiHeader); pin_mediatype->mediatype->lSampleSize = DIBSIZE (video_info->bmiHeader); src->dshow_fakesink->gst_set_media_type (pin_mediatype->mediatype); src->dshow_fakesink->gst_set_buffer_callback ( (push_buffer_func) gst_dshowvideosrc_push_buffer, src); gst_dshow_get_pin_from_filter (src->dshow_fakesink, PINDIR_INPUT, &input_pin); if (!input_pin) { GST_ERROR ("Can't get input pin from our dshow fakesink"); goto error; } hres = src->filter_graph->ConnectDirect (pin_mediatype->capture_pin, input_pin, pin_mediatype->mediatype); input_pin->Release (); if (hres != S_OK) { GST_ERROR ("Can't connect capture filter with fakesink filter (error=0x%x)", hres); goto error; } /* save width and height negociated */ gst_structure_get_int (s, "width", &src->width); gst_structure_get_int (s, "height", &src->height); src->is_rgb = FALSE; caps_string = gst_caps_to_string (caps); if (caps_string) { if (strstr (caps_string, "video/x-raw-rgb")) { src->is_rgb = TRUE; } else { src->is_rgb = FALSE; } g_free (caps_string); } } } } return TRUE; error: return FALSE; }
static gboolean gst_imx_audio_uniaudio_dec_set_format(GstAudioDecoder *dec, GstCaps *caps) { UniACodecParameter parameter; UniACodecMemoryOps memory_ops; GstImxAudioUniaudioDec *imx_audio_uniaudio_dec = GST_IMX_AUDIO_UNIAUDIO_DEC(dec); #define UNIA_SET_PARAMETER(PARAM_ID, DESC) \ do \ { \ if (imx_audio_uniaudio_dec->codec->set_parameter(imx_audio_uniaudio_dec->handle, (PARAM_ID), ¶meter) != ACODEC_SUCCESS) \ { \ GST_ERROR_OBJECT(dec, "setting %s parameter failed: %s", (DESC), imx_audio_uniaudio_dec->codec->get_last_error(imx_audio_uniaudio_dec->handle)); \ gst_imx_audio_uniaudio_dec_close_handle(imx_audio_uniaudio_dec); \ return FALSE; \ } \ } \ while (0) #define UNIA_SET_PARAMETER_EX(PARAM_ID, DESC, VALUE) \ do \ { \ if (imx_audio_uniaudio_dec->codec->set_parameter(imx_audio_uniaudio_dec->handle, (PARAM_ID), ((UniACodecParameter *)(VALUE))) != ACODEC_SUCCESS) \ { \ GST_ERROR_OBJECT(dec, "setting %s parameter failed: %s", (DESC), imx_audio_uniaudio_dec->codec->get_last_error(imx_audio_uniaudio_dec->handle)); \ gst_imx_audio_uniaudio_dec_close_handle(imx_audio_uniaudio_dec); \ return FALSE; \ } \ } \ while (0) if (imx_audio_uniaudio_dec->handle != NULL) { /* drain old decoder handle */ gst_imx_audio_uniaudio_dec_handle_frame(dec, NULL); gst_imx_audio_uniaudio_dec_close_handle(imx_audio_uniaudio_dec); } if ((imx_audio_uniaudio_dec->codec = gst_imx_audio_uniaudio_codec_table_get_codec(caps)) == NULL) { GST_ERROR_OBJECT(dec, "found no suitable codec for caps %" GST_PTR_FORMAT, (gpointer)caps); return FALSE; } memory_ops.Calloc = gst_imx_audio_uniaudio_dec_calloc; memory_ops.Malloc = gst_imx_audio_uniaudio_dec_malloc; memory_ops.Free = gst_imx_audio_uniaudio_dec_free; memory_ops.ReAlloc = gst_imx_audio_uniaudio_dec_realloc; if ((imx_audio_uniaudio_dec->handle = imx_audio_uniaudio_dec->codec->create_codec(&memory_ops)) == NULL) { GST_ERROR_OBJECT(dec, "creating codec handle for caps %" GST_PTR_FORMAT " failed", (gpointer)caps); return FALSE; } /* Get configuration parameters from caps */ { int samplerate, channels, bitrate, block_align, wmaversion; gchar const *stream_format, *sample_format; GValue const *value; gboolean framed, is_vorbis; GstBuffer *codec_data = NULL; GstStructure *structure = gst_caps_get_structure(caps, 0); imx_audio_uniaudio_dec->skip_header_counter = 0; is_vorbis = (g_strcmp0(gst_structure_get_name(structure), "audio/x-vorbis") == 0); parameter.framed = is_vorbis || (gst_structure_get_boolean(structure, "framed", &framed) && framed) || (gst_structure_get_boolean(structure, "parsed", &framed) && framed); GST_DEBUG_OBJECT(dec, "input is framed: %d", parameter.framed); UNIA_SET_PARAMETER(UNIA_FRAMED, "framed"); if (gst_structure_get_int(structure, "rate", &samplerate)) { GST_DEBUG_OBJECT(dec, "input caps sample rate: %d Hz", samplerate); parameter.samplerate = samplerate; UNIA_SET_PARAMETER(UNIA_SAMPLERATE, "sample rate"); } if (gst_structure_get_int(structure, "channels", &channels)) { CHAN_TABLE table; GST_DEBUG_OBJECT(dec, "input caps channel count: %d", channels); parameter.channels = channels; UNIA_SET_PARAMETER(UNIA_CHANNEL, "channel"); memset(&table, 0, sizeof(table)); table.size = CHANNEL_MAPS_SIZE; memcpy(&table.channel_table, uniaudio_channel_maps, sizeof(uniaudio_channel_maps)); UNIA_SET_PARAMETER_EX(UNIA_CHAN_MAP_TABLE, "channel map", &table); } if (gst_structure_get_int(structure, "bitrate", &bitrate)) { GST_DEBUG_OBJECT(dec, "input caps channel count: %d", bitrate); parameter.bitrate = bitrate; UNIA_SET_PARAMETER(UNIA_BITRATE, "bitrate"); } if (gst_structure_get_int(structure, "block_align", &block_align)) { GST_DEBUG_OBJECT(dec, "block alignment: %d", block_align); parameter.blockalign = block_align; UNIA_SET_PARAMETER(UNIA_WMA_BlOCKALIGN, "blockalign"); } if (gst_structure_get_int(structure, "wmaversion", &wmaversion)) { GST_DEBUG_OBJECT(dec, "WMA version: %d", wmaversion); parameter.version = wmaversion; UNIA_SET_PARAMETER(UNIA_WMA_VERSION, "wmaversion"); } if ((stream_format = gst_structure_get_string(structure, "stream-format")) != NULL) { GST_DEBUG_OBJECT(dec, "input caps stream format: %s", stream_format); if (g_strcmp0(stream_format, "raw") == 0) parameter.stream_type = STREAM_ADTS; if (g_strcmp0(stream_format, "adif") == 0) parameter.stream_type = STREAM_ADIF; if (g_strcmp0(stream_format, "raw") == 0) parameter.stream_type = STREAM_RAW; else parameter.stream_type = STREAM_UNKNOW; UNIA_SET_PARAMETER(UNIA_STREAM_TYPE, "stream type"); } if ((sample_format = gst_structure_get_string(structure, "format")) != NULL) { GstAudioFormat fmt; GstAudioFormatInfo const * fmtinfo; GST_DEBUG_OBJECT(dec, "input caps stream sample format: %s", sample_format); if ((fmt = gst_audio_format_from_string(sample_format)) == GST_AUDIO_FORMAT_UNKNOWN) { GST_ERROR_OBJECT(dec, "format is unknown, cannot continue"); return FALSE; } fmtinfo = gst_audio_format_get_info(fmt); g_assert(fmtinfo != NULL); parameter.depth = GST_AUDIO_FORMAT_INFO_DEPTH(fmtinfo); UNIA_SET_PARAMETER(UNIA_DEPTH, "depth"); } /* Handle codec data, either directly from a codec_data caps, * or assemble it from a list of buffers specified by the * streamheader caps (typically used by Vorbis audio) */ /* Cleanup old codec data first */ if (imx_audio_uniaudio_dec->codec_data != NULL) { gst_buffer_unref(imx_audio_uniaudio_dec->codec_data); imx_audio_uniaudio_dec->codec_data = NULL; } /* Check if either codec_data or streamheader caps exist */ if ((value = gst_structure_get_value(structure, "codec_data")) != NULL) { /* codec_data caps exist - simply make a copy of its buffer * (this makes sure we own that buffer properly) */ GstBuffer *caps_buffer; GST_DEBUG_OBJECT(dec, "reading codec_data value"); caps_buffer = gst_value_get_buffer(value); g_assert(caps_buffer != NULL); codec_data = gst_buffer_copy(caps_buffer); } else if ((value = gst_structure_get_value(structure, "streamheader")) != NULL) { /* streamheader caps exist, which are a list of buffers * these buffers need to be concatenated and then given as * one consecutive codec data buffer to the decoder */ guint i, num_buffers = gst_value_array_get_size(value); GstAdapter *streamheader_adapter = gst_adapter_new(); GST_DEBUG_OBJECT(dec, "reading streamheader value (%u headers)", num_buffers); imx_audio_uniaudio_dec->num_vorbis_headers = num_buffers; /* Use the GstAdapter to stitch these buffers together */ for (i = 0; i < num_buffers; ++i) { GValue const *array_value = gst_value_array_get_value(value, i); GstBuffer *buf = gst_value_get_buffer(array_value); GST_DEBUG_OBJECT(dec, "add streamheader buffer #%u with %" G_GSIZE_FORMAT " byte", i, gst_buffer_get_size(buf)); gst_adapter_push(streamheader_adapter, gst_buffer_copy(buf)); } codec_data = gst_adapter_take_buffer(streamheader_adapter, gst_adapter_available(streamheader_adapter)); g_object_unref(G_OBJECT(streamheader_adapter)); } /* At this point, if either codec_data or streamheader caps were found, * the codec_data pointer will refer to a valid non-empty buffer with * codec data inside. This buffer is owned by this audio decoder object, * and must be kept around for as long as the decoder needs to be ran, * since the set_parameter call below does *not* copy the codec data * bytes into some internal buffer. Instead, the uniaudio decoder plugin * expects the caller to keep the buffer valid. */ if ((codec_data != NULL) && (gst_buffer_get_size(codec_data) != 0)) { GstMapInfo map; gst_buffer_map(codec_data, &map, GST_MAP_READ); parameter.codecData.size = map.size; parameter.codecData.buf = (char *)(map.data); UNIA_SET_PARAMETER(UNIA_CODEC_DATA, "codec data"); gst_buffer_unmap(codec_data, &map); imx_audio_uniaudio_dec->codec_data = codec_data; GST_DEBUG_OBJECT(dec, "codec data: %" G_GUINT32_FORMAT " byte", (guint32)(parameter.codecData.size)); } } GST_DEBUG_OBJECT(dec, "decoder configured"); imx_audio_uniaudio_dec->has_audioinfo_set = FALSE; #undef UNIA_SET_PARAMETER return TRUE; }
GstCaps * gst_dshow_new_video_caps (GstVideoFormat video_format, const gchar * name, GstCapturePinMediaType * pin_mediatype) { GstCaps *video_caps = NULL; GstStructure *video_structure = NULL; gint min_w, max_w; gint min_h, max_h; gint min_fr, max_fr; /* raw video format */ switch (video_format) { case GST_VIDEO_FORMAT_BGR: video_caps = gst_caps_from_string (GST_VIDEO_CAPS_BGR); break; case GST_VIDEO_FORMAT_I420: video_caps = gst_caps_from_string (GST_VIDEO_CAPS_YUV ("I420")); break; case GST_VIDEO_FORMAT_YUY2: video_caps = gst_caps_from_string (GST_VIDEO_CAPS_YUV ("YUY2")); break; default: break; } /* other video format */ if (!video_caps) { if (g_ascii_strncasecmp (name, "video/x-dv, systemstream=FALSE", 31) == 0) { video_caps = gst_caps_new_simple ("video/x-dv", "systemstream", G_TYPE_BOOLEAN, FALSE, "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('d', 'v', 's', 'd'), NULL); } else if (g_ascii_strncasecmp (name, "video/x-dv, systemstream=TRUE", 31) == 0) { video_caps = gst_caps_new_simple ("video/x-dv", "systemstream", G_TYPE_BOOLEAN, TRUE, NULL); return video_caps; } } if (!video_caps) return NULL; video_structure = gst_caps_get_structure (video_caps, 0); /* Hope GST_TYPE_INT_RANGE_STEP will exits in future gstreamer releases */ /* because we could use : */ /* "width", GST_TYPE_INT_RANGE_STEP, video_default->minWidth, video_default->maxWidth, video_default->granularityWidth */ /* instead of : */ /* "width", GST_TYPE_INT_RANGE, video_default->minWidth, video_default->maxWidth */ /* For framerate we do not need a step (granularity) because */ /* "The IAMStreamConfig::SetFormat method will set the frame rate to the closest */ /* value that the filter supports" as it said in the VIDEO_STREAM_CONFIG_CAPS dshwo doc */ min_w = pin_mediatype->vscc.MinOutputSize.cx; max_w = pin_mediatype->vscc.MaxOutputSize.cx; min_h = pin_mediatype->vscc.MinOutputSize.cy; max_h = pin_mediatype->vscc.MaxOutputSize.cy; min_fr = (gint) (10000000 / pin_mediatype->vscc.MaxFrameInterval); max_fr = (gint)(10000000 / pin_mediatype->vscc.MinFrameInterval); if (min_w == max_w) gst_structure_set (video_structure, "width", G_TYPE_INT, min_w, NULL); else gst_structure_set (video_structure, "width", GST_TYPE_INT_RANGE, min_w, max_w, NULL); if (min_h == max_h) gst_structure_set (video_structure, "height", G_TYPE_INT, min_h, NULL); else gst_structure_set (video_structure, "height", GST_TYPE_INT_RANGE, min_h, max_h, NULL); if (min_fr == max_fr) gst_structure_set (video_structure, "framerate", GST_TYPE_FRACTION, min_fr, 1, NULL); else gst_structure_set (video_structure, "framerate", GST_TYPE_FRACTION_RANGE, min_fr, 1, max_fr, 1, NULL); return video_caps; }
/* returns static descriptions and dynamic ones (such as video/x-raw), * or NULL if caps aren't known at all */ static gchar * format_info_get_desc (const FormatInfo * info, const GstCaps * caps) { const GstStructure *s; g_assert (info != NULL); if (info->desc != NULL) return g_strdup (_(info->desc)); s = gst_caps_get_structure (caps, 0); if (strcmp (info->type, "video/x-raw") == 0) { gchar *ret = NULL; const gchar *str = 0; GstVideoFormat format; const GstVideoFormatInfo *finfo; str = gst_structure_get_string (s, "format"); if (str == NULL) return g_strdup (_("Uncompressed video")); format = gst_video_format_from_string (str); if (format == GST_VIDEO_FORMAT_UNKNOWN) return g_strdup (_("Uncompressed video")); finfo = gst_video_format_get_info (format); if (GST_VIDEO_FORMAT_INFO_IS_GRAY (finfo)) { ret = g_strdup (_("Uncompressed gray")); } else if (GST_VIDEO_FORMAT_INFO_IS_YUV (finfo)) { const gchar *subs; gint w_sub, h_sub, n_semi; w_sub = GST_VIDEO_FORMAT_INFO_W_SUB (finfo, 1); h_sub = GST_VIDEO_FORMAT_INFO_H_SUB (finfo, 1); if (w_sub == 1 && h_sub == 1) { subs = "4:4:4"; } else if (w_sub == 2 && h_sub == 1) { subs = "4:2:2"; } else if (w_sub == 2 && h_sub == 2) { subs = "4:2:0"; } else if (w_sub == 4 && h_sub == 1) { subs = "4:1:1"; } else { subs = ""; } n_semi = GST_VIDEO_FORMAT_INFO_HAS_ALPHA (finfo) ? 3 : 2; if (GST_VIDEO_FORMAT_INFO_N_PLANES (finfo) == 1) { ret = g_strdup_printf (_("Uncompressed packed YUV %s"), subs); } else if (GST_VIDEO_FORMAT_INFO_N_PLANES (finfo) == n_semi) { ret = g_strdup_printf (_("Uncompressed semi-planar YUV %s"), subs); } else { ret = g_strdup_printf (_("Uncompressed planar YUV %s"), subs); } } else if (GST_VIDEO_FORMAT_INFO_IS_RGB (finfo)) { gboolean alpha, palette; gint bits; alpha = GST_VIDEO_FORMAT_INFO_HAS_ALPHA (finfo); palette = GST_VIDEO_FORMAT_INFO_HAS_PALETTE (finfo); bits = GST_VIDEO_FORMAT_INFO_BITS (finfo); if (palette) { ret = g_strdup_printf (_("Uncompressed palettized %d-bit %s"), bits, alpha ? "RGBA" : "RGB"); } else { ret = g_strdup_printf (_("Uncompressed %d-bit %s"), bits, alpha ? "RGBA" : "RGB"); } } else { ret = g_strdup (_("Uncompressed video")); } return ret; } else if (strcmp (info->type, "video/x-h263") == 0) { const gchar *variant, *ret; variant = gst_structure_get_string (s, "variant"); if (variant == NULL) ret = "H.263"; else if (strcmp (variant, "itu") == 0) ret = "ITU H.26n"; /* why not ITU H.263? (tpm) */ else if (strcmp (variant, "lead") == 0) ret = "Lead H.263"; else if (strcmp (variant, "microsoft") == 0) ret = "Microsoft H.263"; else if (strcmp (variant, "vdolive") == 0) ret = "VDOLive"; else if (strcmp (variant, "vivo") == 0) ret = "Vivo H.263"; else if (strcmp (variant, "xirlink") == 0) ret = "Xirlink H.263"; else { GST_WARNING ("Unknown H263 variant '%s'", variant); ret = "H.263"; } return g_strdup (ret); } else if (strcmp (info->type, "video/x-h264") == 0) { const gchar *variant, *ret; const gchar *profile; variant = gst_structure_get_string (s, "variant"); if (variant == NULL) ret = "H.264"; else if (strcmp (variant, "itu") == 0) ret = "ITU H.264"; else if (strcmp (variant, "videosoft") == 0) ret = "Videosoft H.264"; else if (strcmp (variant, "lead") == 0) ret = "Lead H.264"; else { GST_WARNING ("Unknown H264 variant '%s'", variant); ret = "H.264"; } /* profile */ profile = gst_structure_get_string (s, "profile"); if (profile != NULL) profile = pbutils_desc_get_h264_profile_name_from_nick (profile); if (profile == NULL) return g_strdup (ret); return g_strdup_printf ("%s (%s Profile)", ret, profile); } else if (strcmp (info->type, "video/x-h265") == 0) { const gchar *profile = gst_structure_get_string (s, "profile"); if (profile != NULL) profile = pbutils_desc_get_h265_profile_name_from_nick (profile); if (profile != NULL) return g_strdup_printf ("H.265 (%s Profile)", profile); return g_strdup ("H.265"); } else if (strcmp (info->type, "video/x-dirac") == 0) { const gchar *profile = gst_structure_get_string (s, "profile"); if (profile == NULL) return g_strdup ("Dirac"); if (strcmp (profile, "vc2-low-delay") == 0) return g_strdup_printf ("Dirac (%s)", "VC-2 Low Delay Profile"); else if (strcmp (profile, "vc2-simple") == 0) return g_strdup_printf ("Dirac (%s)", "VC-2 Simple Profile"); else if (strcmp (profile, "vc2-main") == 0) return g_strdup_printf ("Dirac (%s)", "VC-2 Main Profile"); else return g_strdup ("Dirac"); } else if (strcmp (info->type, "video/x-divx") == 0) { gint ver = 0; if (!gst_structure_get_int (s, "divxversion", &ver) || ver <= 2) { GST_WARNING ("Unexpected DivX version in %" GST_PTR_FORMAT, caps); return g_strdup ("DivX MPEG-4"); } return g_strdup_printf (_("DivX MPEG-4 Version %d"), ver); } else if (strcmp (info->type, "video/x-msmpeg") == 0) { gint ver = 0; if (!gst_structure_get_int (s, "msmpegversion", &ver) || ver < 40 || ver > 49) { GST_WARNING ("Unexpected msmpegversion in %" GST_PTR_FORMAT, caps); return g_strdup ("Microsoft MPEG-4 4.x"); } return g_strdup_printf ("Microsoft MPEG-4 4.%d", ver % 10); } else if (strcmp (info->type, "video/x-truemotion") == 0) { gint ver = 0; gst_structure_get_int (s, "trueversion", &ver); switch (ver) { case 1: return g_strdup_printf ("Duck TrueMotion 1"); case 2: return g_strdup_printf ("TrueMotion 2.0"); default: GST_WARNING ("Unexpected trueversion in %" GST_PTR_FORMAT, caps); break; } return g_strdup_printf ("TrueMotion"); } else if (strcmp (info->type, "video/x-xan") == 0) { gint ver = 0; if (!gst_structure_get_int (s, "wcversion", &ver) || ver < 1) { GST_WARNING ("Unexpected wcversion in %" GST_PTR_FORMAT, caps); return g_strdup ("Xan Wing Commander"); } return g_strdup_printf ("Xan Wing Commander %u", ver); } else if (strcmp (info->type, "video/x-indeo") == 0) { gint ver = 0; if (!gst_structure_get_int (s, "indeoversion", &ver) || ver < 2) { GST_WARNING ("Unexpected indeoversion in %" GST_PTR_FORMAT, caps); return g_strdup ("Intel Indeo"); } return g_strdup_printf ("Intel Indeo %u", ver); } else if (strcmp (info->type, "audio/x-wma") == 0) { gint ver = 0; gst_structure_get_int (s, "wmaversion", &ver); switch (ver) { case 1: case 2: case 3: return g_strdup_printf ("Windows Media Audio %d", ver + 6); default: break; } GST_WARNING ("Unexpected wmaversion in %" GST_PTR_FORMAT, caps); return g_strdup ("Windows Media Audio"); } else if (strcmp (info->type, "video/x-wmv") == 0) { gint ver = 0; const gchar *str; gst_structure_get_int (s, "wmvversion", &ver); str = gst_structure_get_string (s, "format"); switch (ver) { case 1: case 2: case 3: if (str && strncmp (str, "MSS", 3)) { return g_strdup_printf ("Windows Media Video %d Screen", ver + 6); } else { return g_strdup_printf ("Windows Media Video %d", ver + 6); } default: break; } GST_WARNING ("Unexpected wmvversion in %" GST_PTR_FORMAT, caps); return g_strdup ("Windows Media Video"); } else if (strcmp (info->type, "audio/x-mace") == 0) { gint ver = 0; gst_structure_get_int (s, "maceversion", &ver); if (ver == 3 || ver == 6) { return g_strdup_printf ("MACE-%d", ver); } else { GST_WARNING ("Unexpected maceversion in %" GST_PTR_FORMAT, caps); return g_strdup ("MACE"); } } else if (strcmp (info->type, "video/x-svq") == 0) { gint ver = 0; gst_structure_get_int (s, "svqversion", &ver); if (ver == 1 || ver == 3) { return g_strdup_printf ("Sorensen Video %d", ver); } else { GST_WARNING ("Unexpected svqversion in %" GST_PTR_FORMAT, caps); return g_strdup ("Sorensen Video"); } } else if (strcmp (info->type, "video/x-asus") == 0) { gint ver = 0; gst_structure_get_int (s, "asusversion", &ver); if (ver == 1 || ver == 2) { return g_strdup_printf ("Asus Video %d", ver); } else { GST_WARNING ("Unexpected asusversion in %" GST_PTR_FORMAT, caps); return g_strdup ("Asus Video"); } } else if (strcmp (info->type, "video/x-ati-vcr") == 0) { gint ver = 0; gst_structure_get_int (s, "vcrversion", &ver); if (ver == 1 || ver == 2) { return g_strdup_printf ("ATI VCR %d", ver); } else { GST_WARNING ("Unexpected acrversion in %" GST_PTR_FORMAT, caps); return g_strdup ("ATI VCR"); } } else if (strcmp (info->type, "audio/x-adpcm") == 0) { const GValue *layout_val; layout_val = gst_structure_get_value (s, "layout"); if (layout_val != NULL && G_VALUE_HOLDS_STRING (layout_val)) { const gchar *layout; if ((layout = g_value_get_string (layout_val))) { gchar *layout_upper, *ret; if (strcmp (layout, "swf") == 0) return g_strdup ("Shockwave ADPCM"); if (strcmp (layout, "microsoft") == 0) return g_strdup ("Microsoft ADPCM"); if (strcmp (layout, "quicktime") == 0) return g_strdup ("Quicktime ADPCM"); if (strcmp (layout, "westwood") == 0) return g_strdup ("Westwood ADPCM"); if (strcmp (layout, "yamaha") == 0) return g_strdup ("Yamaha ADPCM"); /* FIXME: other layouts: sbpro2, sbpro3, sbpro4, ct, g726, ea, * adx, xa, 4xm, smjpeg, dk4, dk3, dvi */ layout_upper = g_ascii_strup (layout, -1); ret = g_strdup_printf ("%s ADPCM", layout_upper); g_free (layout_upper); return ret; } } return g_strdup ("ADPCM"); } else if (strcmp (info->type, "audio/mpeg") == 0) { gint ver = 0, layer = 0; gst_structure_get_int (s, "mpegversion", &ver); switch (ver) { case 1: gst_structure_get_int (s, "layer", &layer); switch (layer) { case 1: case 2: case 3: return g_strdup_printf ("MPEG-1 Layer %d (MP%d)", layer, layer); default: break; } GST_WARNING ("Unexpected MPEG-1 layer in %" GST_PTR_FORMAT, caps); return g_strdup ("MPEG-1 Audio"); case 2: return g_strdup ("MPEG-2 AAC"); case 4: return g_strdup ("MPEG-4 AAC"); default: break; } GST_WARNING ("Unexpected audio mpegversion in %" GST_PTR_FORMAT, caps); return g_strdup ("MPEG Audio"); } else if (strcmp (info->type, "audio/x-pn-realaudio") == 0) { gint ver = 0; gst_structure_get_int (s, "raversion", &ver); switch (ver) { case 1: return g_strdup ("RealAudio 14k4bps"); case 2: return g_strdup ("RealAudio 28k8bps"); case 8: return g_strdup ("RealAudio G2 (Cook)"); default: break; } GST_WARNING ("Unexpected raversion in %" GST_PTR_FORMAT, caps); return g_strdup ("RealAudio"); } else if (strcmp (info->type, "video/x-pn-realvideo") == 0) { gint ver = 0; gst_structure_get_int (s, "rmversion", &ver); switch (ver) { case 1: return g_strdup ("RealVideo 1.0"); case 2: return g_strdup ("RealVideo 2.0"); case 3: return g_strdup ("RealVideo 3.0"); case 4: return g_strdup ("RealVideo 4.0"); default: break; } GST_WARNING ("Unexpected rmversion in %" GST_PTR_FORMAT, caps); return g_strdup ("RealVideo"); } else if (strcmp (info->type, "video/mpeg") == 0) { gboolean sysstream; gint ver = 0; if (!gst_structure_get_boolean (s, "systemstream", &sysstream)) { GST_WARNING ("Missing systemstream field in mpeg video caps " "%" GST_PTR_FORMAT, caps); sysstream = FALSE; } if (gst_structure_get_int (s, "mpegversion", &ver) && ver > 0 && ver <= 4) { if (sysstream) { return g_strdup_printf ("MPEG-%d System Stream", ver); } else { const gchar *profile = gst_structure_get_string (s, "profile"); if (profile != NULL) { if (ver == 4) profile = pbutils_desc_get_mpeg4v_profile_name_from_nick (profile); else if (ver == 2) profile = pbutils_desc_get_mpeg2v_profile_name_from_nick (profile); else profile = NULL; } if (profile != NULL) return g_strdup_printf ("MPEG-%d Video (%s Profile)", ver, profile); else return g_strdup_printf ("MPEG-%d Video", ver); } } GST_WARNING ("Missing mpegversion field in mpeg video caps " "%" GST_PTR_FORMAT, caps); return g_strdup ("MPEG Video"); } else if (strcmp (info->type, "audio/x-raw") == 0) { gint depth = 0; gboolean is_float; const gchar *str; GstAudioFormat format; const GstAudioFormatInfo *finfo; str = gst_structure_get_string (s, "format"); format = gst_audio_format_from_string (str); if (format == GST_AUDIO_FORMAT_UNKNOWN) return g_strdup (_("Uncompressed audio")); finfo = gst_audio_format_get_info (format); depth = GST_AUDIO_FORMAT_INFO_DEPTH (finfo); is_float = GST_AUDIO_FORMAT_INFO_IS_FLOAT (finfo); return g_strdup_printf (_("Raw %d-bit %s audio"), depth, is_float ? "floating-point" : "PCM"); } else if (strcmp (info->type, "video/x-tscc") == 0) { gint version; gst_structure_get_int (s, "tsccversion", &version); switch (version) { case 1: return g_strdup ("TechSmith Screen Capture 1"); case 2: return g_strdup ("TechSmith Screen Capture 2"); default: break; } GST_WARNING ("Unexpected version in %" GST_PTR_FORMAT, caps); return g_strdup ("TechSmith Screen Capture"); } return NULL; }
static void try_to_plug (GstPad *pad, const GstCaps *caps) { GstObject *parent = GST_OBJECT (GST_OBJECT_PARENT (pad)); const gchar *mime; const GList *item; GstCaps *res, *audiocaps; /* don't plug if we're already plugged - FIXME: memleak for pad */ if (GST_PAD_IS_LINKED (gst_element_get_pad (audiosink, "sink"))) { g_print ("Omitting link for pad %s:%s because we're already linked\n", GST_OBJECT_NAME (parent), GST_OBJECT_NAME (pad)); return; } /* as said above, we only try to plug audio... Omit video */ mime = gst_structure_get_name (gst_caps_get_structure (caps, 0)); if (g_strrstr (mime, "video")) { g_print ("Omitting link for pad %s:%s because mimetype %s is non-audio\n", GST_OBJECT_NAME (parent), GST_OBJECT_NAME (pad), mime); return; } /* can it link to the audiopad? */ audiocaps = gst_pad_get_caps (gst_element_get_pad (audiosink, "sink")); res = gst_caps_intersect (caps, audiocaps); if (res && !gst_caps_is_empty (res)) { g_print ("Found pad to link to audiosink - plugging is now done\n"); close_link (pad, audiosink, "sink", NULL); gst_caps_unref (audiocaps); gst_caps_unref (res); return; } gst_caps_unref (audiocaps); gst_caps_unref (res); /* try to plug from our list */ for (item = factories; item != NULL; item = item->next) { GstElementFactory *factory = GST_ELEMENT_FACTORY (item->data); const GList *pads; for (pads = gst_element_factory_get_static_pad_templates (factory); pads != NULL; pads = pads->next) { GstStaticPadTemplate *templ = pads->data; /* find the sink template - need an always pad*/ if (templ->direction != GST_PAD_SINK || templ->presence != GST_PAD_ALWAYS) { continue; } /* can it link? */ res = gst_caps_intersect (caps, gst_static_caps_get (&templ->static_caps)); if (res && !gst_caps_is_empty (res)) { GstElement *element; gchar *name_template = g_strdup (templ->name_template); /* close link and return */ gst_caps_unref (res); element = gst_element_factory_create (factory, NULL); close_link (pad, element, name_template, gst_element_factory_get_static_pad_templates (factory)); g_free (name_template); return; } gst_caps_unref (res); /* we only check one sink template per factory, so move on to the * next factory now */ break; } } /* if we get here, no item was found */ g_print ("No compatible pad found to decode %s on %s:%s\n", mime, GST_OBJECT_NAME (parent), GST_OBJECT_NAME (pad)); }
static gboolean gst_mpg123_audio_dec_set_format (GstAudioDecoder * dec, GstCaps * input_caps) { /* Using the parsed information upstream, and the list of allowed caps * downstream, this code tries to find a suitable audio info. It is important * to keep in mind that the rate and number of channels should never deviate * from the one the bitstream has, otherwise mpg123 has to mix channels and/or * resample (and as its docs say, its internal resampler is very crude). The * sample format, however, can be chosen freely, because the MPEG specs do not * mandate any special format. Therefore, rate and number of channels are taken * from upstream (which parsed the MPEG frames, so the input_caps contain * exactly the rate and number of channels the bitstream actually has), while * the sample format is chosen by trying out all caps that are allowed by * downstream. This way, the output is adjusted to what the downstream prefers. * * Also, the new output audio info is not set immediately. Instead, it is * considered the "next audioinfo". The code waits for mpg123 to notice the new * format (= when mpg123_decode_frame() returns MPG123_AUDIO_DEC_NEW_FORMAT), * and then sets the next audioinfo. Otherwise, the next audioinfo is set too * soon, which may cause problems with mp3s containing several format headers. * One example would be an mp3 with the first 30 seconds using 44.1 kHz, then * the next 30 seconds using 32 kHz. Rare, but possible. * * STEPS: * * 1. get rate and channels from input_caps * 2. get allowed caps from src pad * 3. for each structure in allowed caps: * 3.1. take format * 3.2. if the combination of format with rate and channels is unsupported by * mpg123, go to (3), or exit with error if there are no more structures * to try * 3.3. create next audioinfo out of rate,channels,format, and exit */ int rate, channels; GstMpg123AudioDec *mpg123_decoder; GstCaps *allowed_srccaps; guint structure_nr; gboolean match_found = FALSE; mpg123_decoder = GST_MPG123_AUDIO_DEC (dec); g_assert (mpg123_decoder->handle != NULL); mpg123_decoder->has_next_audioinfo = FALSE; /* Get rate and channels from input_caps */ { GstStructure *structure; gboolean err = FALSE; /* Only the first structure is used (multiple * input caps structures don't make sense */ structure = gst_caps_get_structure (input_caps, 0); if (!gst_structure_get_int (structure, "rate", &rate)) { err = TRUE; GST_ERROR_OBJECT (dec, "Input caps do not have a rate value"); } if (!gst_structure_get_int (structure, "channels", &channels)) { err = TRUE; GST_ERROR_OBJECT (dec, "Input caps do not have a channel value"); } if (err) return FALSE; } /* Get the caps that are allowed by downstream */ { GstCaps *allowed_srccaps_unnorm = gst_pad_get_allowed_caps (GST_AUDIO_DECODER_SRC_PAD (dec)); allowed_srccaps = gst_caps_normalize (allowed_srccaps_unnorm); } /* Go through all allowed caps, pick the first one that matches */ for (structure_nr = 0; structure_nr < gst_caps_get_size (allowed_srccaps); ++structure_nr) { GstStructure *structure; gchar const *format_str; GstAudioFormat format; int encoding; structure = gst_caps_get_structure (allowed_srccaps, structure_nr); format_str = gst_structure_get_string (structure, "format"); if (format_str == NULL) { GST_DEBUG_OBJECT (dec, "Could not get format from src caps"); continue; } format = gst_audio_format_from_string (format_str); if (format == GST_AUDIO_FORMAT_UNKNOWN) { GST_DEBUG_OBJECT (dec, "Unknown format %s", format_str); continue; } switch (format) { case GST_AUDIO_FORMAT_S16: encoding = MPG123_ENC_SIGNED_16; break; case GST_AUDIO_FORMAT_S24: encoding = MPG123_ENC_SIGNED_24; break; case GST_AUDIO_FORMAT_S32: encoding = MPG123_ENC_SIGNED_32; break; case GST_AUDIO_FORMAT_U16: encoding = MPG123_ENC_UNSIGNED_16; break; case GST_AUDIO_FORMAT_U24: encoding = MPG123_ENC_UNSIGNED_24; break; case GST_AUDIO_FORMAT_U32: encoding = MPG123_ENC_UNSIGNED_32; break; case GST_AUDIO_FORMAT_F32: encoding = MPG123_ENC_FLOAT_32; break; default: GST_DEBUG_OBJECT (dec, "Format %s in srccaps is not supported", format_str); continue; } { int err; /* Cleanup old formats & set new one */ mpg123_format_none (mpg123_decoder->handle); err = mpg123_format (mpg123_decoder->handle, rate, channels, encoding); if (err != MPG123_OK) { GST_DEBUG_OBJECT (dec, "mpg123 cannot use caps %" GST_PTR_FORMAT " because mpg123_format() failed: %s", structure, mpg123_strerror (mpg123_decoder->handle)); continue; } } gst_audio_info_init (&(mpg123_decoder->next_audioinfo)); gst_audio_info_set_format (&(mpg123_decoder->next_audioinfo), format, rate, channels, NULL); GST_LOG_OBJECT (dec, "The next audio format is: %s, %u Hz, %u channels", format_str, rate, channels); mpg123_decoder->has_next_audioinfo = TRUE; match_found = TRUE; break; } gst_caps_unref (allowed_srccaps); return match_found; }
void test_simplify() { GstStructure *s1, *s2; gboolean did_simplify; GstCaps *caps; caps = gst_caps_from_string (non_simple_caps_string); fail_unless (caps != NULL, "gst_caps_from_string (non_simple_caps_string) failed"); did_simplify = gst_caps_do_simplify (caps); fail_unless (did_simplify == TRUE, "gst_caps_do_simplify() should have worked"); /* check simplified caps, should be: * * video/x-raw-rgb, bpp=(int)8, depth=(int)8, endianness=(int)1234, * framerate=(fraction)[ 1/100, 100 ], width=(int)[ 16, 4096 ], * height=(int)[ 16, 4096 ]; * video/x-raw-yuv, format=(fourcc){ YV12, YUY2, I420 }, * width=(int)[ 16, 4096 ], height=(int)[ 16, 4096 ], * framerate=(fraction)[ 1/100, 100 ] */ fail_unless (gst_caps_get_size (caps) == 2); s1 = gst_caps_get_structure (caps, 0); s2 = gst_caps_get_structure (caps, 1); fail_unless (s1 != NULL); fail_unless (s2 != NULL); if (!gst_structure_has_name (s1, "video/x-raw-rgb")) { GstStructure *tmp; tmp = s1; s1 = s2; s2 = tmp; } fail_unless (gst_structure_has_name (s1, "video/x-raw-rgb")); { const GValue *framerate_value; const GValue *width_value; const GValue *height_value; const GValue *val_fps; GValue test_fps = { 0, }; gint bpp, depth, endianness; gint min_width, max_width; gint min_height, max_height; fail_unless (gst_structure_get_int (s1, "bpp", &bpp)); fail_unless (bpp == 8); fail_unless (gst_structure_get_int (s1, "depth", &depth)); fail_unless (depth == 8); fail_unless (gst_structure_get_int (s1, "endianness", &endianness)); fail_unless (endianness == G_LITTLE_ENDIAN); g_value_init (&test_fps, GST_TYPE_FRACTION); framerate_value = gst_structure_get_value (s1, "framerate"); fail_unless (framerate_value != NULL); fail_unless (GST_VALUE_HOLDS_FRACTION_RANGE (framerate_value)); val_fps = gst_value_get_fraction_range_min (framerate_value); gst_value_set_fraction (&test_fps, 1, 100); fail_unless (gst_value_compare (&test_fps, val_fps) == GST_VALUE_EQUAL); val_fps = gst_value_get_fraction_range_max (framerate_value); gst_value_set_fraction (&test_fps, 100, 1); fail_unless (gst_value_compare (&test_fps, val_fps) == GST_VALUE_EQUAL); g_value_unset (&test_fps); width_value = gst_structure_get_value (s1, "width"); fail_unless (width_value != NULL); fail_unless (GST_VALUE_HOLDS_INT_RANGE (width_value)); min_width = gst_value_get_int_range_min (width_value); max_width = gst_value_get_int_range_max (width_value); fail_unless (min_width == 16 && max_width == 4096); height_value = gst_structure_get_value (s1, "height"); fail_unless (height_value != NULL); fail_unless (GST_VALUE_HOLDS_INT_RANGE (height_value)); min_height = gst_value_get_int_range_min (height_value); max_height = gst_value_get_int_range_max (height_value); fail_unless (min_height == 16 && max_height == 4096); } fail_unless (gst_structure_has_name (s2, "video/x-raw-yuv")); { const GValue *framerate_value; const GValue *format_value; const GValue *width_value; const GValue *height_value; const GValue *val_fps; GValue test_fps = { 0, }; gint min_width, max_width; gint min_height, max_height; format_value = gst_structure_get_value (s2, "format"); fail_unless (format_value != NULL); fail_unless (GST_VALUE_HOLDS_LIST (format_value)); fail_unless (gst_value_list_get_size (format_value) == 3); fail_unless (check_fourcc_list (format_value) == TRUE); g_value_init (&test_fps, GST_TYPE_FRACTION); framerate_value = gst_structure_get_value (s2, "framerate"); fail_unless (framerate_value != NULL); fail_unless (GST_VALUE_HOLDS_FRACTION_RANGE (framerate_value)); val_fps = gst_value_get_fraction_range_min (framerate_value); gst_value_set_fraction (&test_fps, 1, 100); fail_unless (gst_value_compare (&test_fps, val_fps) == GST_VALUE_EQUAL); val_fps = gst_value_get_fraction_range_max (framerate_value); gst_value_set_fraction (&test_fps, 100, 1); fail_unless (gst_value_compare (&test_fps, val_fps) == GST_VALUE_EQUAL); g_value_unset (&test_fps); width_value = gst_structure_get_value (s2, "width"); fail_unless (width_value != NULL); fail_unless (GST_VALUE_HOLDS_INT_RANGE (width_value)); min_width = gst_value_get_int_range_min (width_value); max_width = gst_value_get_int_range_max (width_value); fail_unless (min_width == 16 && max_width == 4096); height_value = gst_structure_get_value (s2, "height"); fail_unless (height_value != NULL); fail_unless (GST_VALUE_HOLDS_INT_RANGE (height_value)); min_height = gst_value_get_int_range_min (height_value); max_height = gst_value_get_int_range_max (height_value); fail_unless (min_height == 16 && max_height == 4096); } gst_caps_unref (caps); }
static gboolean sink_setcaps (GstPad *pad, GstCaps *caps) { GstOmxBaseVideoEnc *self; GstOmxBaseFilter *omx_base; GstQuery *query; GstVideoFormat format; gint width, height, rowstride; const GValue *framerate = NULL; self = GST_OMX_BASE_VIDEOENC (GST_PAD_PARENT (pad)); omx_base = GST_OMX_BASE_FILTER (self); GST_INFO_OBJECT (omx_base, "setcaps (sink): %" GST_PTR_FORMAT, caps); g_return_val_if_fail (caps, FALSE); g_return_val_if_fail (gst_caps_is_fixed (caps), FALSE); framerate = gst_structure_get_value ( gst_caps_get_structure (caps, 0), "framerate"); if (framerate) { omx_base->duration = gst_util_uint64_scale_int(GST_SECOND, gst_value_get_fraction_denominator (framerate), gst_value_get_fraction_numerator (framerate)); GST_DEBUG_OBJECT (self, "Nominal frame duration =%"GST_TIME_FORMAT, GST_TIME_ARGS (omx_base->duration)); } if (gst_video_format_parse_caps_strided (caps, &format, &width, &height, &rowstride)) { /* Output port configuration: */ OMX_PARAM_PORTDEFINITIONTYPE param; G_OMX_PORT_GET_DEFINITION (omx_base->in_port, ¶m); param.format.video.eColorFormat = OMX_COLOR_FormatYUV420SemiPlanar ; param.format.video.nFrameWidth = width; param.format.video.nFrameHeight = height; if (!rowstride) rowstride = (width + 15) & 0xFFFFFFF0; param.format.video.nStride = self->rowstride = rowstride; if (framerate) { self->framerate_num = gst_value_get_fraction_numerator (framerate); self->framerate_denom = gst_value_get_fraction_denominator (framerate); /* convert to Q.16 */ param.format.video.xFramerate = (gst_value_get_fraction_numerator (framerate) << 16) / gst_value_get_fraction_denominator (framerate); } G_OMX_PORT_SET_DEFINITION (omx_base->out_port, ¶m); } return TRUE; }
//---------coomented----------------- void test_intersect() { GstStructure *s; GstCaps *c1, *c2, *ci1, *ci2; /* field not specified = any value possible, so the intersection * should keep fields which are only part of one set of caps */ c2 = gst_caps_from_string ("video/x-raw-yuv,format=(fourcc)I420,width=20"); c1 = gst_caps_from_string ("video/x-raw-yuv,format=(fourcc)I420"); ci1 = gst_caps_intersect (c2, c1); GST_DEBUG ("intersected: %" GST_PTR_FORMAT, ci1); fail_unless (gst_caps_get_size (ci1) == 1, NULL); s = gst_caps_get_structure (ci1, 0); fail_unless (gst_structure_has_name (s, "video/x-raw-yuv")); fail_unless (gst_structure_get_value (s, "format") != NULL); fail_unless (gst_structure_get_value (s, "width") != NULL); /* with changed order */ ci2 = gst_caps_intersect (c1, c2); GST_DEBUG ("intersected: %" GST_PTR_FORMAT, ci2); fail_unless (gst_caps_get_size (ci2) == 1, NULL); s = gst_caps_get_structure (ci2, 0); fail_unless (gst_structure_has_name (s, "video/x-raw-yuv")); fail_unless (gst_structure_get_value (s, "format") != NULL); fail_unless (gst_structure_get_value (s, "width") != NULL); fail_unless (gst_caps_is_equal (ci1, ci2)); gst_caps_unref (ci1); gst_caps_unref (ci2); gst_caps_unref (c1); gst_caps_unref (c2); /* ========== */ c2 = gst_caps_from_string ("video/x-raw-yuv,format=(fourcc)I420,width=20"); c1 = gst_caps_from_string ("video/x-raw-yuv,format=(fourcc)I420,width=30"); ci1 = gst_caps_intersect (c2, c1); GST_DEBUG ("intersected: %" GST_PTR_FORMAT, ci1); fail_unless (gst_caps_is_empty (ci1), NULL); /* with changed order */ ci2 = gst_caps_intersect (c1, c2); GST_DEBUG ("intersected: %" GST_PTR_FORMAT, ci2); fail_unless (gst_caps_is_empty (ci2), NULL); fail_unless (gst_caps_is_equal (ci1, ci2)); gst_caps_unref (ci1); gst_caps_unref (ci2); gst_caps_unref (c1); gst_caps_unref (c2); /* ========== */ c2 = gst_caps_from_string ("video/x-raw-yuv,format=(fourcc)I420,width=20"); c1 = gst_caps_from_string ("video/x-raw-rgb,format=(fourcc)I420,width=20"); ci1 = gst_caps_intersect (c2, c1); GST_DEBUG ("intersected: %" GST_PTR_FORMAT, ci1); fail_unless (gst_caps_is_empty (ci1), NULL); /* with changed order */ ci2 = gst_caps_intersect (c1, c2); GST_DEBUG ("intersected: %" GST_PTR_FORMAT, ci2); fail_unless (gst_caps_is_empty (ci2), NULL); fail_unless (gst_caps_is_equal (ci1, ci2)); gst_caps_unref (ci1); gst_caps_unref (ci2); gst_caps_unref (c1); gst_caps_unref (c2); /* ========== */ c2 = gst_caps_from_string ("video/x-raw-yuv,format=(fourcc)I420,width=20"); c1 = gst_caps_from_string ("video/x-raw-yuv,format=(fourcc)I420,height=30"); ci1 = gst_caps_intersect (c2, c1); GST_DEBUG ("intersected: %" GST_PTR_FORMAT, ci1); fail_unless (gst_caps_get_size (ci1) == 1, NULL); s = gst_caps_get_structure (ci1, 0); fail_unless (gst_structure_has_name (s, "video/x-raw-yuv")); fail_unless (gst_structure_get_value (s, "format") != NULL); fail_unless (gst_structure_get_value (s, "width") != NULL); fail_unless (gst_structure_get_value (s, "height") != NULL); /* with changed order */ ci2 = gst_caps_intersect (c1, c2); GST_DEBUG ("intersected: %" GST_PTR_FORMAT, ci2); fail_unless (gst_caps_get_size (ci2) == 1, NULL); s = gst_caps_get_structure (ci2, 0); fail_unless (gst_structure_has_name (s, "video/x-raw-yuv")); fail_unless (gst_structure_get_value (s, "format") != NULL); fail_unless (gst_structure_get_value (s, "height") != NULL); fail_unless (gst_structure_get_value (s, "width") != NULL); fail_unless (gst_caps_is_equal (ci1, ci2)); gst_caps_unref (ci1); gst_caps_unref (ci2); gst_caps_unref (c1); gst_caps_unref (c2); }
static gboolean gst_rdt_depay_setcaps (GstPad * pad, GstCaps * caps) { GstStructure *structure; GstRDTDepay *rdtdepay; GstCaps *srccaps; gint clock_rate = 1000; /* default */ const GValue *value; GstBuffer *header; rdtdepay = GST_RDT_DEPAY (GST_PAD_PARENT (pad)); structure = gst_caps_get_structure (caps, 0); if (gst_structure_has_field (structure, "clock-rate")) gst_structure_get_int (structure, "clock-rate", &clock_rate); /* config contains the RealMedia header as a buffer. */ value = gst_structure_get_value (structure, "config"); if (!value) goto no_header; header = gst_value_get_buffer (value); if (!header) goto no_header; /* get other values for newsegment */ value = gst_structure_get_value (structure, "npt-start"); if (value && G_VALUE_HOLDS_UINT64 (value)) rdtdepay->npt_start = g_value_get_uint64 (value); else rdtdepay->npt_start = 0; GST_DEBUG_OBJECT (rdtdepay, "NPT start %" G_GUINT64_FORMAT, rdtdepay->npt_start); value = gst_structure_get_value (structure, "npt-stop"); if (value && G_VALUE_HOLDS_UINT64 (value)) rdtdepay->npt_stop = g_value_get_uint64 (value); else rdtdepay->npt_stop = -1; GST_DEBUG_OBJECT (rdtdepay, "NPT stop %" G_GUINT64_FORMAT, rdtdepay->npt_stop); value = gst_structure_get_value (structure, "play-speed"); if (value && G_VALUE_HOLDS_DOUBLE (value)) rdtdepay->play_speed = g_value_get_double (value); else rdtdepay->play_speed = 1.0; value = gst_structure_get_value (structure, "play-scale"); if (value && G_VALUE_HOLDS_DOUBLE (value)) rdtdepay->play_scale = g_value_get_double (value); else rdtdepay->play_scale = 1.0; /* caps seem good, configure element */ rdtdepay->clock_rate = clock_rate; /* set caps on pad and on header */ srccaps = gst_caps_new_simple ("application/vnd.rn-realmedia", NULL); gst_pad_set_caps (rdtdepay->srcpad, srccaps); gst_caps_unref (srccaps); if (rdtdepay->header) gst_buffer_unref (rdtdepay->header); rdtdepay->header = gst_buffer_ref (header); return TRUE; /* ERRORS */ no_header: { GST_ERROR_OBJECT (rdtdepay, "no header found in caps, no 'config' field"); return FALSE; } }
/** * adapt_image_capture: * @self: camerasrc object * @in_caps: caps object that describes incoming image format * * Adjust capsfilters and crop according image capture caps if necessary. * The captured image format from video source might be different from * what application requested, so we can try to fix that in camerabin. * */ static void adapt_image_capture (GstWrapperCameraBinSrc * self, GstCaps * in_caps) { GstBaseCameraBinSrc *bcamsrc = GST_BASE_CAMERA_SRC (self); GstStructure *in_st, *new_st, *req_st; gint in_width = 0, in_height = 0, req_width = 0, req_height = 0, crop = 0; gdouble ratio_w, ratio_h; GstCaps *filter_caps = NULL; GST_LOG_OBJECT (self, "in caps: %" GST_PTR_FORMAT, in_caps); GST_LOG_OBJECT (self, "requested caps: %" GST_PTR_FORMAT, self->image_capture_caps); in_st = gst_caps_get_structure (in_caps, 0); gst_structure_get_int (in_st, "width", &in_width); gst_structure_get_int (in_st, "height", &in_height); req_st = gst_caps_get_structure (self->image_capture_caps, 0); gst_structure_get_int (req_st, "width", &req_width); gst_structure_get_int (req_st, "height", &req_height); GST_INFO_OBJECT (self, "we requested %dx%d, and got %dx%d", req_width, req_height, in_width, in_height); new_st = gst_structure_copy (req_st); /* If new fields have been added, we need to copy them */ gst_structure_foreach (in_st, copy_missing_fields, new_st); gst_structure_set (new_st, "width", G_TYPE_INT, in_width, "height", G_TYPE_INT, in_height, NULL); GST_LOG_OBJECT (self, "new image capture caps: %" GST_PTR_FORMAT, new_st); /* Crop if requested aspect ratio differs from incoming frame aspect ratio */ if (self->src_zoom_crop) { ratio_w = (gdouble) in_width / req_width; ratio_h = (gdouble) in_height / req_height; if (ratio_w < ratio_h) { crop = in_height - (req_height * ratio_w); self->base_crop_top = crop / 2; self->base_crop_bottom = crop / 2; } else { crop = in_width - (req_width * ratio_h); self->base_crop_left = crop / 2; self->base_crop_right += crop / 2; } GST_INFO_OBJECT (self, "setting base crop: left:%d, right:%d, top:%d, bottom:%d", self->base_crop_left, self->base_crop_right, self->base_crop_top, self->base_crop_bottom); g_object_set (G_OBJECT (self->src_zoom_crop), "top", self->base_crop_top, "bottom", self->base_crop_bottom, "left", self->base_crop_left, "right", self->base_crop_right, NULL); } /* Update capsfilters */ if (self->image_capture_caps) { gst_caps_unref (self->image_capture_caps); } self->image_capture_caps = gst_caps_new_full (new_st, NULL); set_capsfilter_caps (self, self->image_capture_caps); /* Adjust the capsfilter before crop and videoscale elements if necessary */ if (in_width == bcamsrc->width && in_height == bcamsrc->height) { GST_DEBUG_OBJECT (self, "no adaptation with resolution needed"); } else { GST_DEBUG_OBJECT (self, "changing %" GST_PTR_FORMAT " from %dx%d to %dx%d", self->src_filter, bcamsrc->width, bcamsrc->height, in_width, in_height); /* Apply the width and height to filter caps */ g_object_get (G_OBJECT (self->src_filter), "caps", &filter_caps, NULL); filter_caps = gst_caps_make_writable (filter_caps); gst_caps_set_simple (filter_caps, "width", G_TYPE_INT, in_width, "height", G_TYPE_INT, in_height, NULL); g_object_set (G_OBJECT (self->src_filter), "caps", filter_caps, NULL); gst_caps_unref (filter_caps); } }
static gboolean gst_rtp_amr_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps) { GstStructure *structure; GstCaps *srccaps; GstRtpAMRDepay *rtpamrdepay; const gchar *params; const gchar *str, *type; gint clock_rate, need_clock_rate; gboolean res; rtpamrdepay = GST_RTP_AMR_DEPAY (depayload); structure = gst_caps_get_structure (caps, 0); /* figure out the mode first and set the clock rates */ if ((str = gst_structure_get_string (structure, "encoding-name"))) { if (strcmp (str, "AMR") == 0) { rtpamrdepay->mode = GST_RTP_AMR_DP_MODE_NB; need_clock_rate = 8000; type = "audio/AMR"; } else if (strcmp (str, "AMR-WB") == 0) { rtpamrdepay->mode = GST_RTP_AMR_DP_MODE_WB; need_clock_rate = 16000; type = "audio/AMR-WB"; } else goto invalid_mode; } else goto invalid_mode; if (!(str = gst_structure_get_string (structure, "octet-align"))) rtpamrdepay->octet_align = FALSE; else rtpamrdepay->octet_align = (atoi (str) == 1); if (!(str = gst_structure_get_string (structure, "crc"))) rtpamrdepay->crc = FALSE; else rtpamrdepay->crc = (atoi (str) == 1); if (rtpamrdepay->crc) { /* crc mode implies octet aligned mode */ rtpamrdepay->octet_align = TRUE; } if (!(str = gst_structure_get_string (structure, "robust-sorting"))) rtpamrdepay->robust_sorting = FALSE; else rtpamrdepay->robust_sorting = (atoi (str) == 1); if (rtpamrdepay->robust_sorting) { /* robust_sorting mode implies octet aligned mode */ rtpamrdepay->octet_align = TRUE; } if (!(str = gst_structure_get_string (structure, "interleaving"))) rtpamrdepay->interleaving = FALSE; else rtpamrdepay->interleaving = (atoi (str) == 1); if (rtpamrdepay->interleaving) { /* interleaving mode implies octet aligned mode */ rtpamrdepay->octet_align = TRUE; } if (!(params = gst_structure_get_string (structure, "encoding-params"))) rtpamrdepay->channels = 1; else { rtpamrdepay->channels = atoi (params); } if (!gst_structure_get_int (structure, "clock-rate", &clock_rate)) clock_rate = need_clock_rate; depayload->clock_rate = clock_rate; /* we require 1 channel, 8000 Hz, octet aligned, no CRC, * no robust sorting, no interleaving for now */ if (rtpamrdepay->channels != 1) return FALSE; if (clock_rate != need_clock_rate) return FALSE; if (rtpamrdepay->octet_align != TRUE) return FALSE; if (rtpamrdepay->robust_sorting != FALSE) return FALSE; if (rtpamrdepay->interleaving != FALSE) return FALSE; srccaps = gst_caps_new_simple (type, "channels", G_TYPE_INT, rtpamrdepay->channels, "rate", G_TYPE_INT, clock_rate, NULL); res = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps); gst_caps_unref (srccaps); return res; /* ERRORS */ invalid_mode: { GST_ERROR_OBJECT (rtpamrdepay, "invalid encoding-name"); return FALSE; } }
/** * update_aspect_filter: * @self: camerasrc object * @new_caps: new caps of next buffers arriving to view finder sink element * * Updates aspect ratio capsfilter to maintain aspect ratio, if we need to * scale frames for showing them in view finder. */ static void update_aspect_filter (GstWrapperCameraBinSrc * self, GstCaps * new_caps) { // XXX why not instead add a preserve-aspect-ratio property to videoscale? #if 0 if (camera->flags & GST_CAMERABIN_FLAG_VIEWFINDER_SCALE) { GstCaps *sink_caps, *ar_caps; GstStructure *st; gint in_w = 0, in_h = 0, sink_w = 0, sink_h = 0, target_w = 0, target_h = 0; gdouble ratio_w, ratio_h; GstPad *sink_pad; const GValue *range; sink_pad = gst_element_get_static_pad (camera->view_sink, "sink"); if (sink_pad) { sink_caps = gst_pad_get_caps (sink_pad); gst_object_unref (sink_pad); if (sink_caps) { if (!gst_caps_is_any (sink_caps)) { GST_DEBUG_OBJECT (camera, "sink element caps %" GST_PTR_FORMAT, sink_caps); /* Get maximum resolution that view finder sink accepts */ st = gst_caps_get_structure (sink_caps, 0); if (gst_structure_has_field_typed (st, "width", GST_TYPE_INT_RANGE)) { range = gst_structure_get_value (st, "width"); sink_w = gst_value_get_int_range_max (range); } if (gst_structure_has_field_typed (st, "height", GST_TYPE_INT_RANGE)) { range = gst_structure_get_value (st, "height"); sink_h = gst_value_get_int_range_max (range); } GST_DEBUG_OBJECT (camera, "sink element accepts max %dx%d", sink_w, sink_h); /* Get incoming frames' resolution */ if (sink_h && sink_w) { st = gst_caps_get_structure (new_caps, 0); gst_structure_get_int (st, "width", &in_w); gst_structure_get_int (st, "height", &in_h); GST_DEBUG_OBJECT (camera, "new caps with %dx%d", in_w, in_h); } } gst_caps_unref (sink_caps); } } /* If we get bigger frames than view finder sink accepts, then we scale. If we scale we need to adjust aspect ratio capsfilter caps in order to maintain aspect ratio while scaling. */ if (in_w && in_h && (in_w > sink_w || in_h > sink_h)) { ratio_w = (gdouble) sink_w / in_w; ratio_h = (gdouble) sink_h / in_h; if (ratio_w < ratio_h) { target_w = sink_w; target_h = (gint) (ratio_w * in_h); } else { target_w = (gint) (ratio_h * in_w); target_h = sink_h; } GST_DEBUG_OBJECT (camera, "setting %dx%d filter to maintain aspect ratio", target_w, target_h); ar_caps = gst_caps_copy (new_caps); gst_caps_set_simple (ar_caps, "width", G_TYPE_INT, target_w, "height", G_TYPE_INT, target_h, NULL); } else { GST_DEBUG_OBJECT (camera, "no scaling"); ar_caps = new_caps; } GST_DEBUG_OBJECT (camera, "aspect ratio filter caps %" GST_PTR_FORMAT, ar_caps); g_object_set (G_OBJECT (camera->aspect_filter), "caps", ar_caps, NULL); if (ar_caps != new_caps) gst_caps_unref (ar_caps); } #endif }
static GstCaps * gst_cv_sobel_transform_caps (GstBaseTransform * trans, GstPadDirection dir, GstCaps * caps, GstCaps * filter) { GstCaps *to, *ret; GstCaps *templ; GstStructure *structure; GstPad *other; guint i; to = gst_caps_new_empty (); for (i = 0; i < gst_caps_get_size (caps); i++) { const GValue *v; GValue list = { 0, }; GValue val = { 0, }; structure = gst_structure_copy (gst_caps_get_structure (caps, i)); g_value_init (&list, GST_TYPE_LIST); g_value_init (&val, G_TYPE_STRING); g_value_set_string (&val, "GRAY8"); gst_value_list_append_value (&list, &val); g_value_unset (&val); g_value_init (&val, G_TYPE_STRING); #if G_BYTE_ORDER == G_BIG_ENDIAN g_value_set_string (&val, "GRAY16_BE"); #else g_value_set_string (&val, "GRAY16_LE"); #endif gst_value_list_append_value (&list, &val); g_value_unset (&val); v = gst_structure_get_value (structure, "format"); gst_value_list_merge (&val, v, &list); gst_structure_set_value (structure, "format", &val); g_value_unset (&val); g_value_unset (&list); gst_structure_remove_field (structure, "colorimetry"); gst_structure_remove_field (structure, "chroma-site"); gst_caps_append_structure (to, structure); } /* filter against set allowed caps on the pad */ other = (dir == GST_PAD_SINK) ? trans->srcpad : trans->sinkpad; templ = gst_pad_get_pad_template_caps (other); ret = gst_caps_intersect (to, templ); gst_caps_unref (to); gst_caps_unref (templ); if (ret && filter) { GstCaps *intersection; intersection = gst_caps_intersect_full (filter, ret, GST_CAPS_INTERSECT_FIRST); gst_caps_unref (ret); ret = intersection; } return ret; }
static gboolean gst_openjpeg_enc_set_format (GstVideoEncoder * encoder, GstVideoCodecState * state) { GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (encoder); GstCaps *allowed_caps, *caps; GstStructure *s; const gchar *colorspace = NULL; GstJPEG2000Sampling sampling = GST_JPEG2000_SAMPLING_NONE; gint ncomps; GST_DEBUG_OBJECT (self, "Setting format: %" GST_PTR_FORMAT, state->caps); if (self->input_state) gst_video_codec_state_unref (self->input_state); self->input_state = gst_video_codec_state_ref (state); allowed_caps = gst_pad_get_allowed_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder)); allowed_caps = gst_caps_truncate (allowed_caps); s = gst_caps_get_structure (allowed_caps, 0); if (gst_structure_has_name (s, "image/jp2")) { self->codec_format = OPJ_CODEC_JP2; self->is_jp2c = FALSE; } else if (gst_structure_has_name (s, "image/x-j2c")) { self->codec_format = OPJ_CODEC_J2K; self->is_jp2c = TRUE; } else if (gst_structure_has_name (s, "image/x-jpc")) { self->codec_format = OPJ_CODEC_J2K; self->is_jp2c = FALSE; } else { g_return_val_if_reached (FALSE); } switch (state->info.finfo->format) { case GST_VIDEO_FORMAT_ARGB64: self->fill_image = fill_image_packed16_4; ncomps = 4; break; case GST_VIDEO_FORMAT_ARGB: case GST_VIDEO_FORMAT_AYUV: self->fill_image = fill_image_packed8_4; ncomps = 4; break; case GST_VIDEO_FORMAT_xRGB: self->fill_image = fill_image_packed8_3; ncomps = 3; break; case GST_VIDEO_FORMAT_AYUV64: self->fill_image = fill_image_packed16_4; ncomps = 4; break; case GST_VIDEO_FORMAT_Y444_10LE: case GST_VIDEO_FORMAT_Y444_10BE: case GST_VIDEO_FORMAT_I422_10LE: case GST_VIDEO_FORMAT_I422_10BE: case GST_VIDEO_FORMAT_I420_10LE: case GST_VIDEO_FORMAT_I420_10BE: self->fill_image = fill_image_planar16_3; ncomps = 3; break; case GST_VIDEO_FORMAT_Y444: case GST_VIDEO_FORMAT_Y42B: case GST_VIDEO_FORMAT_I420: case GST_VIDEO_FORMAT_Y41B: case GST_VIDEO_FORMAT_YUV9: self->fill_image = fill_image_planar8_3; ncomps = 3; break; case GST_VIDEO_FORMAT_GRAY8: self->fill_image = fill_image_planar8_1; ncomps = 1; break; case GST_VIDEO_FORMAT_GRAY16_LE: case GST_VIDEO_FORMAT_GRAY16_BE: self->fill_image = fill_image_planar16_1; ncomps = 1; break; default: g_assert_not_reached (); } /* sampling */ /* note: encoder re-orders channels so that alpha channel is encoded as the last channel */ switch (state->info.finfo->format) { case GST_VIDEO_FORMAT_ARGB64: case GST_VIDEO_FORMAT_ARGB: sampling = GST_JPEG2000_SAMPLING_RGBA; break; case GST_VIDEO_FORMAT_AYUV64: case GST_VIDEO_FORMAT_AYUV: sampling = GST_JPEG2000_SAMPLING_YBRA4444_EXT; break; case GST_VIDEO_FORMAT_xRGB: sampling = GST_JPEG2000_SAMPLING_RGB; break; case GST_VIDEO_FORMAT_Y444_10LE: case GST_VIDEO_FORMAT_Y444_10BE: case GST_VIDEO_FORMAT_Y444: sampling = GST_JPEG2000_SAMPLING_YBR444; break; case GST_VIDEO_FORMAT_I422_10LE: case GST_VIDEO_FORMAT_I422_10BE: case GST_VIDEO_FORMAT_Y42B: sampling = GST_JPEG2000_SAMPLING_YBR422; break; case GST_VIDEO_FORMAT_YUV9: sampling = GST_JPEG2000_SAMPLING_YBR410; break; case GST_VIDEO_FORMAT_I420_10LE: case GST_VIDEO_FORMAT_I420_10BE: case GST_VIDEO_FORMAT_I420: sampling = GST_JPEG2000_SAMPLING_YBR420; break; case GST_VIDEO_FORMAT_GRAY8: case GST_VIDEO_FORMAT_GRAY16_LE: case GST_VIDEO_FORMAT_GRAY16_BE: sampling = GST_JPEG2000_SAMPLING_GRAYSCALE; break; default: break; } if ((state->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_YUV)) { colorspace = "sYUV"; } else if ((state->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_RGB)) { colorspace = "sRGB"; } else if ((state->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_GRAY)) { colorspace = "GRAY"; } else g_return_val_if_reached (FALSE); if (sampling != GST_JPEG2000_SAMPLING_NONE) { caps = gst_caps_new_simple (gst_structure_get_name (s), "colorspace", G_TYPE_STRING, colorspace, "sampling", G_TYPE_STRING, gst_jpeg2000_sampling_to_string (sampling), "num-components", G_TYPE_INT, ncomps, NULL); } else { caps = gst_caps_new_simple (gst_structure_get_name (s), "colorspace", G_TYPE_STRING, colorspace, "num-components", G_TYPE_INT, ncomps, NULL); } gst_caps_unref (allowed_caps); if (self->output_state) gst_video_codec_state_unref (self->output_state); self->output_state = gst_video_encoder_set_output_state (encoder, caps, state); gst_video_encoder_negotiate (GST_VIDEO_ENCODER (encoder)); return TRUE; }
static void cb_newpad (GstElement * decodebin, GstPad * pad, gboolean last, gpointer data) { GstCaps *caps; GstStructure *str; GstPad *sinkpad; GstElement *sink; GstElement *pipeline; const gchar *name; GstStateChangeReturn ret; GstPadLinkReturn lret; /* check media type */ caps = gst_pad_get_caps (pad); str = gst_caps_get_structure (caps, 0); name = gst_structure_get_name (str); g_print ("name: %s\n", name); if (g_strrstr (name, "audio")) { sink = gen_audio_element (); } else if (g_strrstr (name, "video")) { sink = gen_video_element (); } else { sink = NULL; } gst_caps_unref (caps); if (sink) { pipeline = GST_ELEMENT_CAST (data); /* add new sink to the pipeline */ gst_bin_add (GST_BIN_CAST (pipeline), sink); /* set the new sink tp PAUSED as well */ ret = gst_element_set_state (sink, GST_STATE_PAUSED); if (ret == GST_STATE_CHANGE_FAILURE) goto state_error; /* get the ghostpad of the sink bin */ sinkpad = gst_element_get_pad (sink, "sink"); /* link'n'play */ lret = gst_pad_link (pad, sinkpad); if (lret != GST_PAD_LINK_OK) goto link_failed; gst_object_unref (sinkpad); } return; /* ERRORS */ state_error: { gst_bin_remove (GST_BIN_CAST (pipeline), sink); g_warning ("could not change state of new sink (%d)", ret); return; } link_failed: { g_warning ("could not link pad and sink (%d)", lret); return; } }
static gboolean gst_v4l2src_negotiate (GstBaseSrc * basesrc) { GstCaps *thiscaps; GstCaps *caps = NULL; GstCaps *peercaps = NULL; gboolean result = FALSE; /* first see what is possible on our source pad */ thiscaps = gst_pad_get_caps (GST_BASE_SRC_PAD (basesrc)); GST_DEBUG_OBJECT (basesrc, "caps of src: %" GST_PTR_FORMAT, thiscaps); /* nothing or anything is allowed, we're done */ if (thiscaps == NULL || gst_caps_is_any (thiscaps)) goto no_nego_needed; /* get the peer caps */ peercaps = gst_pad_peer_get_caps (GST_BASE_SRC_PAD (basesrc)); GST_DEBUG_OBJECT (basesrc, "caps of peer: %" GST_PTR_FORMAT, peercaps); if (peercaps && !gst_caps_is_any (peercaps)) { GstCaps *icaps = NULL; int i; /* Prefer the first caps we are compatible with that the peer proposed */ for (i = 0; i < gst_caps_get_size (peercaps); i++) { /* get intersection */ GstCaps *ipcaps = gst_caps_copy_nth (peercaps, i); GST_DEBUG_OBJECT (basesrc, "peer: %" GST_PTR_FORMAT, ipcaps); icaps = gst_caps_intersect (thiscaps, ipcaps); gst_caps_unref (ipcaps); if (!gst_caps_is_empty (icaps)) break; gst_caps_unref (icaps); icaps = NULL; } GST_DEBUG_OBJECT (basesrc, "intersect: %" GST_PTR_FORMAT, icaps); if (icaps) { /* If there are multiple intersections pick the one with the smallest * resolution strictly bigger then the first peer caps */ if (gst_caps_get_size (icaps) > 1) { GstStructure *s = gst_caps_get_structure (peercaps, 0); int best = 0; int twidth, theight; int width = G_MAXINT, height = G_MAXINT; if (gst_structure_get_int (s, "width", &twidth) && gst_structure_get_int (s, "height", &theight)) { /* Walk the structure backwards to get the first entry of the * smallest resolution bigger (or equal to) the preferred resolution) */ for (i = gst_caps_get_size (icaps) - 1; i >= 0; i--) { GstStructure *is = gst_caps_get_structure (icaps, i); int w, h; if (gst_structure_get_int (is, "width", &w) && gst_structure_get_int (is, "height", &h)) { if (w >= twidth && w <= width && h >= theight && h <= height) { width = w; height = h; best = i; } } } } caps = gst_caps_copy_nth (icaps, best); gst_caps_unref (icaps); } else { caps = icaps; } } gst_caps_unref (thiscaps); gst_caps_unref (peercaps); } else { /* no peer or peer have ANY caps, work with our own caps then */ caps = thiscaps; } if (caps) { caps = gst_caps_make_writable (caps); gst_caps_truncate (caps); /* now fixate */ if (!gst_caps_is_empty (caps)) { gst_pad_fixate_caps (GST_BASE_SRC_PAD (basesrc), caps); GST_DEBUG_OBJECT (basesrc, "fixated to: %" GST_PTR_FORMAT, caps); if (gst_caps_is_any (caps)) { /* hmm, still anything, so element can do anything and * nego is not needed */ result = TRUE; } else if (gst_caps_is_fixed (caps)) { /* yay, fixed caps, use those then */ gst_pad_set_caps (GST_BASE_SRC_PAD (basesrc), caps); result = TRUE; } } gst_caps_unref (caps); } return result; no_nego_needed: { GST_DEBUG_OBJECT (basesrc, "no negotiation needed"); if (thiscaps) gst_caps_unref (thiscaps); return TRUE; } }
static gboolean gst_omx_aac_dec_is_format_change (GstOMXAudioDec * dec, GstOMXPort * port, GstCaps * caps) { GstOMXAACDec *self = GST_OMX_AAC_DEC (dec); OMX_AUDIO_PARAM_AACPROFILETYPE aac_param; OMX_ERRORTYPE err; GstStructure *s; gint rate, channels, mpegversion; const gchar *stream_format; GST_OMX_INIT_STRUCT (&aac_param); aac_param.nPortIndex = port->index; err = gst_omx_component_get_parameter (dec->dec, OMX_IndexParamAudioAac, &aac_param); if (err != OMX_ErrorNone) { GST_ERROR_OBJECT (self, "Failed to get AAC parameters from component: %s (0x%08x)", gst_omx_error_to_string (err), err); return FALSE; } s = gst_caps_get_structure (caps, 0); if (!gst_structure_get_int (s, "mpegversion", &mpegversion) || !gst_structure_get_int (s, "rate", &rate) || !gst_structure_get_int (s, "channels", &channels)) { GST_ERROR_OBJECT (self, "Incomplete caps"); return FALSE; } stream_format = gst_structure_get_string (s, "stream-format"); if (!stream_format) { GST_ERROR_OBJECT (self, "Incomplete caps"); return FALSE; } if (aac_param.nChannels != channels) return TRUE; if (aac_param.nSampleRate != rate) return TRUE; if (mpegversion == 2 && aac_param.eAACStreamFormat != OMX_AUDIO_AACStreamFormatMP2ADTS) return TRUE; if (aac_param.eAACStreamFormat == OMX_AUDIO_AACStreamFormatMP4ADTS && strcmp (stream_format, "adts") != 0) return TRUE; if (aac_param.eAACStreamFormat == OMX_AUDIO_AACStreamFormatMP4LOAS && strcmp (stream_format, "loas") != 0) return TRUE; if (aac_param.eAACStreamFormat == OMX_AUDIO_AACStreamFormatADIF && strcmp (stream_format, "adif") != 0) return TRUE; if (aac_param.eAACStreamFormat == OMX_AUDIO_AACStreamFormatRAW && strcmp (stream_format, "raw") != 0) return TRUE; return FALSE; }
static gboolean gst_egl_sink_set_caps (GstBaseSink * bsink, GstCaps * caps) { GstEGLSink *egl_sink; gint width; gint height; gint bufcount; gboolean ok; gint fps_n, fps_d; gint par_n, par_d; gint display_par_n, display_par_d; guint display_ratio_num, display_ratio_den; GstVideoFormat format; GstStructure *s; GST_DEBUG ("set caps with %" GST_PTR_FORMAT, caps); egl_sink = GST_EGL_SINK (bsink); if(egl_sink->set_caps_callback) return egl_sink->set_caps_callback(caps, egl_sink->client_data); s = gst_caps_get_structure (caps, 0); if(gst_structure_get_int (s, "num-buffers-required", &bufcount) && bufcount > GST_GL_DISPLAY_MAX_BUFFER_COUNT) { GST_WARNING("num-buffers-required %d exceed max eglsink buffer count %d", bufcount, GST_GL_DISPLAY_MAX_BUFFER_COUNT); return FALSE; } ok = gst_video_format_parse_caps (caps, &format, &width, &height); if (!ok) return FALSE; ok &= gst_video_parse_caps_framerate (caps, &fps_n, &fps_d); ok &= gst_video_parse_caps_pixel_aspect_ratio (caps, &par_n, &par_d); if (!ok) return FALSE; /* get display's PAR */ if (egl_sink->par) { display_par_n = gst_value_get_fraction_numerator (egl_sink->par); display_par_d = gst_value_get_fraction_denominator (egl_sink->par); } else { display_par_n = 1; display_par_d = 1; } ok = gst_video_calculate_display_ratio (&display_ratio_num, &display_ratio_den, width, height, par_n, par_d, display_par_n, display_par_d); if (!ok) return FALSE; if (height % display_ratio_den == 0) { GST_DEBUG ("keeping video height"); egl_sink->window_width = (guint) gst_util_uint64_scale_int (height, display_ratio_num, display_ratio_den); egl_sink->window_height = height; } else if (width % display_ratio_num == 0) { GST_DEBUG ("keeping video width"); egl_sink->window_width = width; egl_sink->window_height = (guint) gst_util_uint64_scale_int (width, display_ratio_den, display_ratio_num); } else { GST_DEBUG ("approximating while keeping video height"); egl_sink->window_width = (guint) gst_util_uint64_scale_int (height, display_ratio_num, display_ratio_den); egl_sink->window_height = height; } GST_DEBUG ("scaling to %dx%d", egl_sink->window_width, egl_sink->window_height); GST_VIDEO_SINK_WIDTH (egl_sink) = width; GST_VIDEO_SINK_HEIGHT (egl_sink) = height; egl_sink->fps_n = fps_n; egl_sink->fps_d = fps_d; egl_sink->par_n = par_n; egl_sink->par_d = par_d; if (!egl_sink->window_id && !egl_sink->new_window_id) gst_x_overlay_prepare_xwindow_id (GST_X_OVERLAY (egl_sink)); return TRUE; }