/* This function will be called in a separate thread when our appsink says there is data for us. user_data has to be defined when calling g_signal_connect. It can be used to pass objects etc. from your other function to the callback. */ GstFlowReturn callback (GstElement* sink, void* user_data) { GstSample* sample = NULL; /* Retrieve the buffer */ g_signal_emit_by_name(sink, "pull-sample", &sample, NULL); if (sample) { // we have a valid sample // do things with the image here static guint framecount = 0; int pixel_data = -1; GstBuffer* buffer = gst_sample_get_buffer(sample); GstMapInfo info; // contains the actual image if (gst_buffer_map(buffer, &info, GST_MAP_READ)) { GstVideoInfo* video_info = gst_video_info_new(); if (!gst_video_info_from_caps(video_info, gst_sample_get_caps(sample))) { // Could not parse video info (should not happen) g_warning("Failed to parse video info"); return GST_FLOW_ERROR; } /* Get a pointer to the image data */ unsigned char* data = info.data; /* Get the pixel value of the center pixel */ int stride = video_info->finfo->bits / 8; unsigned int pixel_offset = video_info->width / 2 * stride + video_info->width * video_info->height / 2 * stride; // this is only one pixel // when dealing with formats like BGRx // pixel_data will have to consist out of // pixel_offset => B // pixel_offset+1 => G // pixel_offset+2 => R // pixel_offset+3 => x pixel_data = info.data[pixel_offset]; gst_buffer_unmap(buffer, &info); gst_video_info_free(video_info); } GstClockTime timestamp = GST_BUFFER_PTS(buffer); g_print("Captured frame %d, Pixel Value=%03d Timestamp=%" GST_TIME_FORMAT " \r", framecount, pixel_data, GST_TIME_ARGS(timestamp)); framecount++; // delete our reference so that gstreamer can handle the sample gst_sample_unref (sample); } return GST_FLOW_OK; }
static gboolean gst_vlc_video_sink_propose_allocation( GstBaseSink* p_bsink, GstQuery* p_query ) { GstVlcVideoSink *p_vsink = GST_VLC_VIDEO_SINK( p_bsink ); GstCaps *p_caps; gboolean b_need_pool; GstBufferPool* p_pool = NULL; gsize i_size; gst_query_parse_allocation (p_query, &p_caps, &b_need_pool); if( p_caps == NULL ) goto no_caps; if( b_need_pool ) { GstVideoInfo info; if( !gst_video_info_from_caps( &info, p_caps )) goto invalid_caps; p_pool = (GstBufferPool*) gst_vlc_video_sink_create_pool( p_vsink, p_caps, info.size, 2 ); if( p_pool == NULL ) goto no_pool; i_size = GST_VIDEO_INFO_SIZE( &GST_VLC_VIDEO_POOL_CAST( p_pool )->info); } if( p_pool ) { /* we need at least 2 buffer because we hold on to the last one */ gst_query_add_allocation_pool( p_query, p_pool, i_size, 2, 0); gst_object_unref (p_pool); } /* we support various metadata */ gst_query_add_allocation_meta( p_query, GST_VIDEO_META_API_TYPE, NULL ); return TRUE; /* ERRORS */ no_pool: { msg_Err( p_vsink->p_dec, "failed to create the pool" ); return FALSE; } no_caps: { msg_Err( p_vsink->p_dec, "no caps in allocation query" ); return FALSE; } invalid_caps: { msg_Err( p_vsink->p_dec, "invalid caps in allocation query" ); return FALSE; } }
static gboolean gst_timecodewait_vsink_event (GstPad * pad, GstObject * parent, GstEvent * event) { GstTimeCodeWait *self = GST_TIMECODEWAIT (parent); GST_LOG_OBJECT (pad, "Got %s event", GST_EVENT_TYPE_NAME (event)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_SEGMENT: g_mutex_lock (&self->mutex); gst_event_copy_segment (event, &self->vsegment); if (self->vsegment.format != GST_FORMAT_TIME) { GST_ERROR_OBJECT (self, "Invalid segment format"); g_mutex_unlock (&self->mutex); gst_event_unref (event); return FALSE; } self->vsegment.position = GST_CLOCK_TIME_NONE; g_mutex_unlock (&self->mutex); break; case GST_EVENT_GAP: gst_event_unref (event); return TRUE; case GST_EVENT_EOS: g_mutex_lock (&self->mutex); self->video_eos_flag = TRUE; g_cond_signal (&self->cond); g_mutex_unlock (&self->mutex); break; case GST_EVENT_FLUSH_START: g_mutex_lock (&self->mutex); gst_segment_init (&self->vsegment, GST_FORMAT_UNDEFINED); g_cond_signal (&self->cond); g_mutex_unlock (&self->mutex); self->vsegment.position = GST_CLOCK_TIME_NONE; break; case GST_EVENT_CAPS:{ GstCaps *caps; gst_event_parse_caps (event, &caps); GST_DEBUG_OBJECT (self, "Got caps %" GST_PTR_FORMAT, caps); if (!gst_video_info_from_caps (&self->vinfo, caps)) { gst_event_unref (event); return FALSE; } g_mutex_lock (&self->mutex); if (self->from_string) { self->tc->config.fps_n = self->vinfo.fps_n; self->tc->config.fps_d = self->vinfo.fps_d; } g_mutex_unlock (&self->mutex); break; } default: break; } return gst_pad_event_default (pad, parent, event); }
static gboolean gst_openni2_src_set_caps (GstBaseSrc * src, GstCaps * caps) { GstOpenni2Src *ni2src; ni2src = GST_OPENNI2_SRC (src); return gst_video_info_from_caps (&ni2src->info, caps); }
bool ofGstVideoPlayer::allocate(int bpp){ if(bIsAllocated) return true; guint64 durationNanos = videoUtils.getDurationNanos(); nFrames = 0; if(GstPad* pad = gst_element_get_static_pad(videoUtils.getSink(), "sink")){ #if GST_VERSION_MAJOR==0 int width,height; if(gst_video_get_size(GST_PAD(pad), &width, &height)){ if(!videoUtils.allocate(width,height,bpp)) return false; }else{ ofLogError("ofGstVideoPlayer") << "allocate(): couldn't query width and height"; return false; } const GValue *framerate = gst_video_frame_rate(pad); fps_n=0; fps_d=0; if(framerate && GST_VALUE_HOLDS_FRACTION (framerate)){ fps_n = gst_value_get_fraction_numerator (framerate); fps_d = gst_value_get_fraction_denominator (framerate); nFrames = (float)(durationNanos / (float)GST_SECOND) * (float)fps_n/(float)fps_d; ofLogVerbose("ofGstVideoPlayer") << "allocate(): framerate: " << fps_n << "/" << fps_d; }else{ ofLogWarning("ofGstVideoPlayer") << "allocate(): cannot get framerate, frame seek won't work"; } bIsAllocated = true; #else if(GstCaps *caps = gst_pad_get_current_caps (GST_PAD (pad))){ GstVideoInfo info; gst_video_info_init (&info); if (gst_video_info_from_caps (&info, caps)){ if(!videoUtils.allocate(info.width,info.height,bpp)) return false; }else{ ofLogError("ofGstVideoPlayer") << "allocate(): couldn't query width and height"; return false; } fps_n = info.fps_n; fps_d = info.fps_d; nFrames = (float)(durationNanos / (float)GST_SECOND) * (float)fps_n/(float)fps_d; gst_caps_unref(caps); bIsAllocated = true; }else{ ofLogError("ofGstVideoPlayer") << "allocate(): cannot get pipeline caps"; bIsAllocated = false; } #endif gst_object_unref(GST_OBJECT(pad)); }else{ ofLogError("ofGstVideoPlayer") << "allocate(): cannot get sink pad"; bIsAllocated = false; } return bIsAllocated; }
static gboolean gst_imx_v4l2_buffer_pool_set_config(GstBufferPool *bpool, GstStructure *config) { GstImxV4l2BufferPool *pool = GST_IMX_V4L2_BUFFER_POOL(bpool); GstVideoInfo info; GstCaps *caps; gsize size; guint min, max; struct v4l2_requestbuffers req; if (!gst_buffer_pool_config_get_params(config, &caps, &size, &min, &max)) { GST_ERROR_OBJECT(pool, "pool configuration invalid"); return FALSE; } if (caps == NULL) { GST_ERROR_OBJECT(pool, "configuration contains no caps"); return FALSE; } if (!gst_video_info_from_caps(&info, caps)) { GST_ERROR_OBJECT(pool, "caps cannot be parsed for video info"); return FALSE; } GST_DEBUG_OBJECT(pool, "set_config: size %d, min %d, max %d", size, min, max); memset(&req, 0, sizeof(req)); req.count = min; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; if (ioctl(GST_IMX_FD_OBJECT_GET_FD(pool->fd_obj_v4l), VIDIOC_REQBUFS, &req) < 0) { GST_ERROR_OBJECT(pool, "VIDIOC_REQBUFS failed: %s", g_strerror(errno)); return FALSE; } if (req.count != min) { min = req.count; GST_WARNING_OBJECT(pool, "using %u buffers", min); } pool->num_buffers = min; pool->video_info = info; pool->add_videometa = gst_buffer_pool_config_has_option(config, GST_BUFFER_POOL_OPTION_VIDEO_META); gst_buffer_pool_config_set_params(config, caps, size, min, max); return GST_BUFFER_POOL_CLASS(gst_imx_v4l2_buffer_pool_parent_class)->set_config(bpool, config); }
static gboolean gst_video_filter_set_caps (GstBaseTransform * trans, GstCaps * incaps, GstCaps * outcaps) { GstVideoFilter *filter = GST_VIDEO_FILTER_CAST (trans); GstVideoFilterClass *fclass; GstVideoInfo in_info, out_info; gboolean res; /* input caps */ if (!gst_video_info_from_caps (&in_info, incaps)) goto invalid_caps; /* output caps */ if (!gst_video_info_from_caps (&out_info, outcaps)) goto invalid_caps; fclass = GST_VIDEO_FILTER_GET_CLASS (filter); if (fclass->set_info) res = fclass->set_info (filter, incaps, &in_info, outcaps, &out_info); else res = TRUE; if (res) { filter->in_info = in_info; filter->out_info = out_info; if (fclass->transform_frame == NULL) gst_base_transform_set_in_place (trans, TRUE); if (fclass->transform_frame_ip == NULL) GST_BASE_TRANSFORM_CLASS (fclass)->transform_ip_on_passthrough = FALSE; } filter->negotiated = res; return res; /* ERRORS */ invalid_caps: { GST_ERROR_OBJECT (filter, "invalid caps"); filter->negotiated = FALSE; return FALSE; } }
static gboolean gst_caps_is_interlaced (GstCaps * caps) { GstVideoInfo info; fail_unless (gst_caps_is_fixed (caps)); fail_unless (gst_video_info_from_caps (&info, caps)); return GST_VIDEO_INFO_IS_INTERLACED (&info); }
static gboolean gst_yadif_set_caps (GstBaseTransform * trans, GstCaps * incaps, GstCaps * outcaps) { GstYadif *yadif = GST_YADIF (trans); gst_video_info_from_caps (&yadif->video_info, incaps); return TRUE; }
static GstVideoInfo getVideoInfo(GstSample * sample){ GstCaps *caps = gst_sample_get_caps(sample); GstVideoInfo vinfo; if(caps){ gst_video_info_from_caps (&vinfo, caps); }else{ ofLogError() << "couldn't get sample caps"; } return vinfo; }
static gboolean ensure_sinkpad_allocator (GstVaapiPluginBase * plugin, GstCaps * caps, guint * size) { GstVideoInfo vinfo; const GstVideoInfo *image_info; GstVaapiImageUsageFlags usage_flag = GST_VAAPI_IMAGE_USAGE_FLAG_NATIVE_FORMATS; if (!gst_video_info_from_caps (&vinfo, caps)) goto error_invalid_caps; if (!reset_allocator (plugin->sinkpad_allocator, &vinfo)) goto bail; if (has_dmabuf_capable_peer (plugin, plugin->sinkpad)) { plugin->sinkpad_allocator = gst_vaapi_dmabuf_allocator_new (plugin->display, &vinfo, GST_VAAPI_SURFACE_ALLOC_FLAG_LINEAR_STORAGE, GST_PAD_SINK); goto bail; } /* enable direct upload if upstream requests raw video */ if (gst_caps_is_video_raw (caps)) { usage_flag = GST_VAAPI_IMAGE_USAGE_FLAG_DIRECT_UPLOAD; GST_INFO_OBJECT (plugin, "enabling direct upload in sink allocator"); } plugin->sinkpad_allocator = gst_vaapi_video_allocator_new (plugin->display, &vinfo, 0, usage_flag); bail: if (!plugin->sinkpad_allocator) goto error_create_allocator; image_info = gst_allocator_get_vaapi_video_info (plugin->sinkpad_allocator, NULL); g_assert (image_info); /* allocator ought set its image info */ /* update the size with the one generated by the allocator */ *size = GST_VIDEO_INFO_SIZE (image_info); return TRUE; /* ERRORS */ error_invalid_caps: { GST_ERROR_OBJECT (plugin, "invalid caps %" GST_PTR_FORMAT, caps); return FALSE; } error_create_allocator: { GST_ERROR_OBJECT (plugin, "failed to create sink pad's allocator"); return FALSE; } }
static gboolean gst_decklink_video_sink_propose_allocation (GstBaseSink * bsink, GstQuery * query) { GstCaps *caps; GstVideoInfo info; GstBufferPool *pool; guint size; gst_query_parse_allocation (query, &caps, NULL); if (caps == NULL) return FALSE; if (!gst_video_info_from_caps (&info, caps)) return FALSE; size = GST_VIDEO_INFO_SIZE (&info); if (gst_query_get_n_allocation_pools (query) == 0) { GstStructure *structure; GstAllocator *allocator = NULL; GstAllocationParams params = { (GstMemoryFlags) 0, 15, 0, 0 }; if (gst_query_get_n_allocation_params (query) > 0) gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms); else gst_query_add_allocation_param (query, allocator, ¶ms); pool = gst_video_buffer_pool_new (); structure = gst_buffer_pool_get_config (pool); gst_buffer_pool_config_set_params (structure, caps, size, 0, 0); gst_buffer_pool_config_set_allocator (structure, allocator, ¶ms); if (allocator) gst_object_unref (allocator); if (!gst_buffer_pool_set_config (pool, structure)) goto config_failed; gst_query_add_allocation_pool (query, pool, size, 0, 0); gst_object_unref (pool); gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL); } return TRUE; // ERRORS config_failed: { GST_ERROR_OBJECT (bsink, "failed to set config"); gst_object_unref (pool); return FALSE; } }
static gboolean gst_kms_sink_propose_allocation (GstBaseSink * bsink, GstQuery * query) { GstKMSSink *self; GstCaps *caps; gboolean need_pool; GstVideoInfo vinfo; GstBufferPool *pool; gsize size; self = GST_KMS_SINK (bsink); gst_query_parse_allocation (query, &caps, &need_pool); if (!caps) goto no_caps; if (!gst_video_info_from_caps (&vinfo, caps)) goto invalid_caps; size = GST_VIDEO_INFO_SIZE (&vinfo); pool = NULL; if (need_pool) { pool = gst_kms_sink_create_pool (self, caps, size, 0); if (!pool) goto no_pool; } if (pool) { /* we need at least 2 buffer because we hold on to the last one */ gst_query_add_allocation_pool (query, pool, size, 2, 0); gst_object_unref (pool); } gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL); gst_query_add_allocation_meta (query, GST_VIDEO_CROP_META_API_TYPE, NULL); return TRUE; /* ERRORS */ no_caps: { GST_DEBUG_OBJECT (bsink, "no caps specified"); return FALSE; } invalid_caps: { GST_DEBUG_OBJECT (bsink, "invalid caps specified"); return FALSE; } no_pool: { /* Already warned in create_pool */ return FALSE; } }
static gboolean gst_wayland_sink_set_caps (GstBaseSink * bsink, GstCaps * caps) { GstWaylandSink *sink = GST_WAYLAND_SINK (bsink); GstBufferPool *newpool, *oldpool; GstVideoInfo info; GstStructure *structure; static GstAllocationParams params = { 0, 0, 0, 15, }; guint size; sink = GST_WAYLAND_SINK (bsink); GST_LOG_OBJECT (sink, "set caps %" GST_PTR_FORMAT, caps); if (!gst_video_info_from_caps (&info, caps)) goto invalid_format; sink->video_width = info.width; sink->video_height = info.height; size = info.size; /* create a new pool for the new configuration */ newpool = gst_wayland_buffer_pool_new (sink); if (!newpool) { GST_DEBUG_OBJECT (sink, "Failed to create new pool"); return FALSE; } structure = gst_buffer_pool_get_config (newpool); gst_buffer_pool_config_set_params (structure, caps, size, 2, 0); gst_buffer_pool_config_set_allocator (structure, NULL, ¶ms); if (!gst_buffer_pool_set_config (newpool, structure)) goto config_failed; oldpool = sink->pool; sink->pool = newpool; if (oldpool) gst_object_unref (oldpool); return TRUE; invalid_format: { GST_DEBUG_OBJECT (sink, "Could not locate image format from caps %" GST_PTR_FORMAT, caps); return FALSE; } config_failed: { GST_DEBUG_OBJECT (bsink, "failed setting config"); return FALSE; } }
static void check_filter_varargs (const gchar * name, GstEvent * event, gint num_buffers, const gchar * prop, va_list varargs) { static const struct { const int width, height; } resolutions[] = { { 384, 288}, { 385, 289}, { 385, 385}}; gint i, n, r; gint size; GstCaps *allcaps, *templ = gst_caps_from_string (VIDEO_CAPS_TEMPLATE_STRING); allcaps = gst_caps_normalize (templ); n = gst_caps_get_size (allcaps); for (i = 0; i < n; i++) { GstStructure *s = gst_caps_get_structure (allcaps, i); GstCaps *caps = gst_caps_new_empty (); gst_caps_append_structure (caps, gst_structure_copy (s)); /* try various resolutions */ for (r = 0; r < G_N_ELEMENTS (resolutions); ++r) { GstVideoInfo info; va_list args_cp; caps = gst_caps_make_writable (caps); gst_caps_set_simple (caps, "width", G_TYPE_INT, resolutions[r].width, "height", G_TYPE_INT, resolutions[r].height, "framerate", GST_TYPE_FRACTION, 25, 1, NULL); GST_DEBUG ("Testing with caps: %" GST_PTR_FORMAT, caps); gst_video_info_from_caps (&info, caps); size = GST_VIDEO_INFO_SIZE (&info); if (event) gst_event_ref (event); va_copy (args_cp, varargs); check_filter_caps (name, event, caps, size, num_buffers, prop, args_cp); va_end (args_cp); } gst_caps_unref (caps); } gst_caps_unref (allcaps); if (event) gst_event_unref (event); }
static gboolean gst_video_rate_propose_allocation (GstBaseTransform * trans, GstQuery * decide_query, GstQuery * query) { GstBaseTransformClass *klass = GST_BASE_TRANSFORM_CLASS (parent_class); gboolean res; /* We should always be passthrough */ g_return_val_if_fail (decide_query == NULL, FALSE); res = klass->propose_allocation (trans, NULL, query); if (res) { guint i = 0; guint n_allocation; guint down_min = 0; n_allocation = gst_query_get_n_allocation_pools (query); while (i < n_allocation) { GstBufferPool *pool = NULL; guint size, min, max; gst_query_parse_nth_allocation_pool (query, i, &pool, &size, &min, &max); if (min == max) { if (pool) gst_object_unref (pool); gst_query_remove_nth_allocation_pool (query, i); n_allocation--; down_min = MAX (min, down_min); continue; } gst_query_set_nth_allocation_pool (query, i, pool, size, min + 1, max); if (pool) gst_object_unref (pool); i++; } if (n_allocation == 0) { GstCaps *caps; GstVideoInfo info; gst_query_parse_allocation (query, &caps, NULL); gst_video_info_from_caps (&info, caps); gst_query_add_allocation_pool (query, NULL, info.size, down_min + 1, 0); } } return res; }
static gboolean gst_inter_video_src_set_caps (GstBaseSrc * src, GstCaps * caps) { GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (src); GST_DEBUG_OBJECT (intervideosrc, "set_caps"); if (!gst_video_info_from_caps (&intervideosrc->info, caps)) return FALSE; return gst_pad_set_caps (src->srcpad, caps); }
static gboolean gst_yadif_get_unit_size (GstBaseTransform * trans, GstCaps * caps, gsize * size) { GstVideoInfo info; if (gst_video_info_from_caps (&info, caps)) { *size = GST_VIDEO_INFO_SIZE (&info); return TRUE; } return FALSE; }
static gboolean gst_vtdec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state) { GstStructure *structure; CMVideoCodecType cm_format = 0; CMFormatDescriptionRef format_description = NULL; const char *caps_name; GstVtdec *vtdec = GST_VTDEC (decoder); GST_DEBUG_OBJECT (vtdec, "set_format"); structure = gst_caps_get_structure (state->caps, 0); caps_name = gst_structure_get_name (structure); if (!strcmp (caps_name, "video/x-h264")) { cm_format = kCMVideoCodecType_H264; } else if (!strcmp (caps_name, "video/mpeg")) { cm_format = kCMVideoCodecType_MPEG2Video; } else if (!strcmp (caps_name, "image/jpeg")) { cm_format = kCMVideoCodecType_JPEG; } if (cm_format == kCMVideoCodecType_H264 && state->codec_data == NULL) { GST_INFO_OBJECT (vtdec, "no codec data, wait for one"); return TRUE; } if (vtdec->session) gst_vtdec_invalidate_session (vtdec); gst_video_info_from_caps (&vtdec->video_info, state->caps); if (!gst_vtdec_compute_reorder_queue_length (vtdec, cm_format, state->codec_data)) return FALSE; gst_vtdec_set_latency (vtdec); if (state->codec_data) { format_description = create_format_description_from_codec_data (vtdec, cm_format, state->codec_data); } else { format_description = create_format_description (vtdec, cm_format); } if (vtdec->format_description) CFRelease (vtdec->format_description); vtdec->format_description = format_description; if (!gst_vtdec_negotiate_output_format (vtdec, state)) return FALSE; return TRUE; }
static gboolean gst_imx_blitter_video_transform_transform_size(G_GNUC_UNUSED GstBaseTransform *transform, G_GNUC_UNUSED GstPadDirection direction, G_GNUC_UNUSED GstCaps *caps, gsize size, GstCaps *othercaps, gsize *othersize) { gboolean ret = TRUE; GstVideoInfo info; g_assert(size != 0); ret = gst_video_info_from_caps(&info, othercaps); if (ret) *othersize = info.size; return ret; }
static gboolean gst_gl_filter_get_unit_size (GstBaseTransform * trans, GstCaps * caps, gsize * size) { gboolean ret = FALSE; GstVideoInfo info; ret = gst_video_info_from_caps (&info, caps); if (ret) *size = GST_VIDEO_INFO_SIZE (&info); return TRUE; }
static gboolean gst_imx_blitter_video_transform_set_caps(GstBaseTransform *transform, GstCaps *in, GstCaps *out) { gboolean inout_info_equal; GstVideoInfo in_info, out_info; GstImxBlitterVideoTransform *blitter_video_transform = GST_IMX_BLITTER_VIDEO_TRANSFORM(transform); GstImxBlitterVideoTransformClass *klass = GST_IMX_BLITTER_VIDEO_TRANSFORM_CLASS(G_OBJECT_GET_CLASS(transform)); g_assert(klass->are_video_infos_equal != NULL); g_assert(blitter_video_transform->blitter != NULL); if (!gst_video_info_from_caps(&in_info, in) || !gst_video_info_from_caps(&out_info, out)) { GST_ERROR_OBJECT(transform, "caps are invalid"); blitter_video_transform->inout_info_set = FALSE; return FALSE; } inout_info_equal = klass->are_video_infos_equal(blitter_video_transform, &in_info, &out_info); if (inout_info_equal) GST_DEBUG_OBJECT(transform, "input and output caps are equal"); else GST_DEBUG_OBJECT(transform, "input and output caps are not equal: input: %" GST_PTR_FORMAT " output: %" GST_PTR_FORMAT, (gpointer)in, (gpointer)out); if (!gst_imx_base_blitter_set_input_video_info(blitter_video_transform->blitter, &in_info)) { GST_ERROR_OBJECT(transform, "could not use input caps: %" GST_PTR_FORMAT, (gpointer)in); blitter_video_transform->inout_info_set = FALSE; return FALSE; } blitter_video_transform->input_video_info = in_info; blitter_video_transform->output_video_info = out_info; blitter_video_transform->inout_info_equal = inout_info_equal; blitter_video_transform->inout_info_set = TRUE; return TRUE; }
/** * gst_vaapi_plugin_base_set_caps: * @plugin: a #GstVaapiPluginBase * @incaps: the sink pad (input) caps * @outcaps: the src pad (output) caps * * Notifies the base plugin object of the new input and output caps, * obtained from the subclass. * * Returns: %TRUE if the update of caps was successful, %FALSE otherwise. */ gboolean gst_vaapi_plugin_base_set_caps (GstVaapiPluginBase * plugin, GstCaps * incaps, GstCaps * outcaps) { if (incaps && incaps != plugin->sinkpad_caps) { gst_caps_replace (&plugin->sinkpad_caps, incaps); if (!gst_video_info_from_caps (&plugin->sinkpad_info, incaps)) return FALSE; plugin->sinkpad_caps_changed = TRUE; plugin->sinkpad_caps_is_raw = !gst_caps_has_vaapi_surface (incaps); } if (outcaps && outcaps != plugin->srcpad_caps) { gst_caps_replace (&plugin->srcpad_caps, outcaps); if (!gst_video_info_from_caps (&plugin->srcpad_info, outcaps)) return FALSE; plugin->srcpad_caps_changed = TRUE; } if (!ensure_sinkpad_buffer_pool (plugin, plugin->sinkpad_caps)) return FALSE; return TRUE; }
static gboolean gst_euresys_set_caps (GstBaseSrc * bsrc, GstCaps * caps) { GstVideoInfo vinfo; GST_DEBUG_OBJECT (bsrc, "set_caps with caps=%" GST_PTR_FORMAT, caps); gst_video_info_from_caps (&vinfo, caps); /* TODO: check stride alignment */ gst_base_src_set_blocksize (bsrc, GST_VIDEO_INFO_SIZE (&vinfo)); return TRUE; }
gboolean gst_edt_pdv_sink_set_caps (GstBaseSink * basesink, GstCaps * caps) { GstEdtPdvSink *pdvsink = GST_EDT_PDV_SINK (basesink); int buffer_size; gint depth; int taps; GstVideoInfo vinfo; GST_DEBUG_OBJECT (pdvsink, "Caps being set"); gst_video_info_from_caps (&vinfo, caps); depth = GST_VIDEO_INFO_COMP_DEPTH (&vinfo, 0); buffer_size = vinfo.height * pdv_bytes_per_line (vinfo.width, depth); GST_DEBUG_OBJECT (pdvsink, "Configuring EDT ring buffer with %d buffers each of size %d", pdvsink->n_buffers, buffer_size); /* we'll use just two buffers and ping pong between them */ edt_configure_ring_buffers (pdvsink->dev, buffer_size, pdvsink->n_buffers, EDT_WRITE, NULL); pdvsink->buffers = edt_buffer_addresses (pdvsink->dev); taps = pdvsink->dev->dd_p->cls.taps; /* TODO: handle RGB correctly */ if (depth == 24) { taps = 1; depth = 32; } if (taps == 0) { GST_WARNING_OBJECT (pdvsink, "Taps set to 0, changing to 1"); taps = 1; } GST_DEBUG_OBJECT (pdvsink, "Configuring simulator with %d taps", taps); /* configure simulator */ pdv_cls_set_size (pdvsink->dev, taps, depth, vinfo.width, vinfo.height, PDV_CLS_DEFAULT_HGAP, (vinfo.width / taps) + PDV_CLS_DEFAULT_HGAP, // taps=1 PDV_CLS_DEFAULT_VGAP, vinfo.height + PDV_CLS_DEFAULT_VGAP); GST_DEBUG ("Configured simulator"); return TRUE; }
static gboolean gst_openni2src_decide_allocation (GstBaseSrc * bsrc, GstQuery * query) { GstBufferPool *pool; guint size, min, max; gboolean update; GstStructure *config; GstCaps *caps; GstVideoInfo info; gst_query_parse_allocation (query, &caps, NULL); gst_video_info_from_caps (&info, caps); if (gst_query_get_n_allocation_pools (query) > 0) { gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max); update = TRUE; } else { pool = NULL; min = max = 0; size = info.size; update = FALSE; } GST_DEBUG_OBJECT (bsrc, "allocation: size:%u min:%u max:%u pool:%" GST_PTR_FORMAT " caps:%" GST_PTR_FORMAT, size, min, max, pool, caps); if (!pool) pool = gst_video_buffer_pool_new (); config = gst_buffer_pool_get_config (pool); gst_buffer_pool_config_set_params (config, caps, size, min, max); if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) { GST_DEBUG_OBJECT (pool, "activate Video Meta"); gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META); } gst_buffer_pool_set_config (pool, config); if (update) gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max); else gst_query_add_allocation_pool (query, pool, size, min, max); gst_object_unref (pool); return GST_BASE_SRC_CLASS (parent_class)->decide_allocation (bsrc, query); }
gboolean gst_vulkan_swapper_set_caps (GstVulkanSwapper * swapper, GstCaps * caps, GError ** error) { if (!gst_video_info_from_caps (&swapper->v_info, caps)) { g_set_error (error, GST_VULKAN_ERROR, VK_ERROR_INITIALIZATION_FAILED, "Failed to geto GstVideoInfo from caps"); return FALSE; } gst_caps_replace (&swapper->caps, caps); return _swapchain_resize (swapper, error); }
static gboolean gst_gl_filter_set_caps (GstBaseTransform * bt, GstCaps * incaps, GstCaps * outcaps) { GstGLFilter *filter; GstGLFilterClass *filter_class; filter = GST_GL_FILTER (bt); filter_class = GST_GL_FILTER_GET_CLASS (filter); if (!gst_video_info_from_caps (&filter->in_info, incaps)) goto wrong_caps; if (!gst_video_info_from_caps (&filter->out_info, outcaps)) goto wrong_caps; if (filter_class->set_caps) { if (!filter_class->set_caps (filter, incaps, outcaps)) goto error; } GST_DEBUG ("set_caps %dx%d", GST_VIDEO_INFO_WIDTH (&filter->out_info), GST_VIDEO_INFO_HEIGHT (&filter->out_info)); return TRUE; /* ERRORS */ wrong_caps: { GST_WARNING ("Wrong caps"); return FALSE; } error: { return FALSE; } }
static GstCaps * _update_caps (GstVideoAggregator * vagg, GstCaps * caps) { GstGLVideoMixer *mix = GST_GL_VIDEO_MIXER (vagg); GList *l; gint best_width = -1, best_height = -1; GstVideoInfo info; GstCaps *ret = NULL; int i; caps = gst_caps_make_writable (caps); gst_video_info_from_caps (&info, caps); GST_OBJECT_LOCK (vagg); for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) { GstVideoAggregatorPad *vaggpad = l->data; GstGLVideoMixerPad *mixer_pad = GST_GL_VIDEO_MIXER_PAD (vaggpad); gint this_width, this_height; gint width, height; _mixer_pad_get_output_size (mix, mixer_pad, &width, &height); if (width == 0 || height == 0) continue; this_width = width + MAX (mixer_pad->xpos, 0); this_height = height + MAX (mixer_pad->ypos, 0); if (best_width < this_width) best_width = this_width; if (best_height < this_height) best_height = this_height; } GST_OBJECT_UNLOCK (vagg); ret = GST_VIDEO_AGGREGATOR_CLASS (gst_gl_video_mixer_parent_class)->update_caps (vagg, caps); for (i = 0; i < gst_caps_get_size (ret); i++) { GstStructure *s = gst_caps_get_structure (ret, i); gst_structure_set (s, "width", G_TYPE_INT, best_width, "height", G_TYPE_INT, best_height, NULL); } return ret; }
static gboolean gst_imx_blitter_video_transform_get_unit_size(GstBaseTransform *transform, GstCaps *caps, gsize *size) { GstVideoInfo info; if (!gst_video_info_from_caps(&info, caps)) { GST_WARNING_OBJECT(transform, "Failed to parse caps %" GST_PTR_FORMAT, caps); return FALSE; } *size = info.size; GST_DEBUG_OBJECT(transform, "Returning size %" G_GSIZE_FORMAT " bytes for caps %" GST_PTR_FORMAT, *size, caps); return TRUE; }