GstMemory * gst_vaapi_video_memory_new (GstAllocator * base_allocator, GstVaapiVideoMeta * meta) { GstVaapiVideoAllocator *const allocator = GST_VAAPI_VIDEO_ALLOCATOR_CAST (base_allocator); const GstVideoInfo *vip; GstVaapiVideoMemory *mem; g_return_val_if_fail (GST_VAAPI_IS_VIDEO_ALLOCATOR (allocator), NULL); mem = g_slice_new (GstVaapiVideoMemory); if (!mem) return NULL; vip = &allocator->image_info; gst_memory_init (&mem->parent_instance, GST_MEMORY_FLAG_NO_SHARE, gst_object_ref (allocator), NULL, GST_VIDEO_INFO_SIZE (vip), 0, 0, GST_VIDEO_INFO_SIZE (vip)); mem->proxy = NULL; mem->surface_info = &allocator->surface_info; mem->surface = NULL; mem->image_info = &allocator->image_info; mem->image = NULL; mem->meta = meta ? gst_vaapi_video_meta_ref (meta) : NULL; mem->map_type = 0; mem->map_count = 0; mem->use_direct_rendering = allocator->has_direct_rendering; GST_VAAPI_VIDEO_MEMORY_FLAG_SET (mem, GST_VAAPI_VIDEO_MEMORY_FLAG_SURFACE_IS_CURRENT); return GST_MEMORY_CAST (mem); }
GstMemory * gst_msdk_system_memory_new (GstAllocator * base_allocator) { GstMsdkSystemAllocator *allocator; GstVideoInfo *vip; GstMsdkSystemMemory *mem; g_return_val_if_fail (base_allocator, NULL); g_return_val_if_fail (GST_IS_MSDK_SYSTEM_ALLOCATOR (base_allocator), NULL); allocator = GST_MSDK_SYSTEM_ALLOCATOR_CAST (base_allocator); mem = g_slice_new0 (GstMsdkSystemMemory); if (!mem) return NULL; mem->surface = gst_msdk_system_allocator_create_surface (base_allocator); vip = &allocator->image_info; gst_memory_init (&mem->parent_instance, GST_MEMORY_FLAG_NO_SHARE, base_allocator, NULL, GST_VIDEO_INFO_SIZE (vip), 0, 0, GST_VIDEO_INFO_SIZE (vip)); if (!ensure_data (mem)) return NULL; return GST_MEMORY_CAST (mem); }
static void gst_imx_ipu_blitter_init_dummy_black_buffer(GstImxIpuBlitter *ipu_blitter) { GstVideoInfo video_info; gst_video_info_init(&video_info); gst_video_info_set_format(&video_info, GST_VIDEO_FORMAT_RGBx, 64, 64); ipu_blitter->dummy_black_buffer = gst_buffer_new_allocate(ipu_blitter->allocator, GST_VIDEO_INFO_SIZE(&video_info), NULL); gst_buffer_memset(ipu_blitter->dummy_black_buffer, 0, 0, GST_VIDEO_INFO_SIZE(&video_info)); gst_buffer_add_video_meta_full( ipu_blitter->dummy_black_buffer, GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_INFO_FORMAT(&video_info), GST_VIDEO_INFO_WIDTH(&video_info), GST_VIDEO_INFO_HEIGHT(&video_info), GST_VIDEO_INFO_N_PLANES(&video_info), &(GST_VIDEO_INFO_PLANE_OFFSET(&video_info, 0)), &(GST_VIDEO_INFO_PLANE_STRIDE(&video_info, 0)) ); { GstImxPhysMemory *imx_phys_mem_mem = (GstImxPhysMemory *)gst_buffer_peek_memory(ipu_blitter->dummy_black_buffer, 0); GstImxPhysMemMeta *phys_mem_meta = (GstImxPhysMemMeta *)GST_IMX_PHYS_MEM_META_ADD(ipu_blitter->dummy_black_buffer); phys_mem_meta->phys_addr = imx_phys_mem_mem->phys_addr; } }
static gboolean gst_raw_video_parse_process (GstRawBaseParse * raw_base_parse, GstRawBaseParseConfig config, GstBuffer * in_data, G_GNUC_UNUSED gsize total_num_in_bytes, G_GNUC_UNUSED gsize num_valid_in_bytes, GstBuffer ** processed_data) { GstRawVideoParse *raw_video_parse = GST_RAW_VIDEO_PARSE (raw_base_parse); GstRawVideoParseConfig *config_ptr = gst_raw_video_parse_get_config_ptr (raw_video_parse, config); guint frame_flags = 0; GstVideoInfo *video_info = &(config_ptr->info); GstVideoMeta *videometa; GstBuffer *out_data; /* In case of extra padding bytes, get a subbuffer without the padding bytes. * Otherwise, just add the video meta. */ if (GST_VIDEO_INFO_SIZE (video_info) < config_ptr->frame_stride) { *processed_data = out_data = gst_buffer_copy_region (in_data, GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS | GST_BUFFER_COPY_MEMORY, 0, GST_VIDEO_INFO_SIZE (video_info)); } else { out_data = in_data; *processed_data = NULL; } if (config_ptr->interlaced) { GST_BUFFER_FLAG_SET (out_data, GST_VIDEO_BUFFER_FLAG_INTERLACED); frame_flags |= GST_VIDEO_FRAME_FLAG_INTERLACED; if (config_ptr->top_field_first) { GST_BUFFER_FLAG_SET (out_data, GST_VIDEO_BUFFER_FLAG_TFF); frame_flags |= GST_VIDEO_FRAME_FLAG_TFF; } else GST_BUFFER_FLAG_UNSET (out_data, GST_VIDEO_BUFFER_FLAG_TFF); } /* Remove any existing videometa - it will be replaced by the new videometa * from here */ while ((videometa = gst_buffer_get_video_meta (out_data))) { GST_LOG_OBJECT (raw_base_parse, "removing existing videometa from buffer"); gst_buffer_remove_meta (out_data, (GstMeta *) videometa); } gst_buffer_add_video_meta_full (out_data, frame_flags, config_ptr->format, config_ptr->width, config_ptr->height, GST_VIDEO_INFO_N_PLANES (video_info), config_ptr->plane_offsets, config_ptr->plane_strides); return TRUE; }
/** * gst_video_info_is_equal: * @info: a #GstVideoInfo * @other: a #GstVideoInfo * * Compares two #GstVideoInfo and returns whether they are equal or not * * Returns: %TRUE if @info and @other are equal, else %FALSE. */ gboolean gst_video_info_is_equal (const GstVideoInfo * info, const GstVideoInfo * other) { gint i; if (GST_VIDEO_INFO_FORMAT (info) != GST_VIDEO_INFO_FORMAT (other)) return FALSE; if (GST_VIDEO_INFO_INTERLACE_MODE (info) != GST_VIDEO_INFO_INTERLACE_MODE (other)) return FALSE; if (GST_VIDEO_INFO_FLAGS (info) != GST_VIDEO_INFO_FLAGS (other)) return FALSE; if (GST_VIDEO_INFO_WIDTH (info) != GST_VIDEO_INFO_WIDTH (other)) return FALSE; if (GST_VIDEO_INFO_HEIGHT (info) != GST_VIDEO_INFO_HEIGHT (other)) return FALSE; if (GST_VIDEO_INFO_SIZE (info) != GST_VIDEO_INFO_SIZE (other)) return FALSE; if (GST_VIDEO_INFO_PAR_N (info) != GST_VIDEO_INFO_PAR_N (other)) return FALSE; if (GST_VIDEO_INFO_PAR_D (info) != GST_VIDEO_INFO_PAR_D (other)) return FALSE; if (GST_VIDEO_INFO_FPS_N (info) != GST_VIDEO_INFO_FPS_N (other)) return FALSE; if (GST_VIDEO_INFO_FPS_D (info) != GST_VIDEO_INFO_FPS_D (other)) return FALSE; if (!gst_video_colorimetry_is_equal (&GST_VIDEO_INFO_COLORIMETRY (info), &GST_VIDEO_INFO_COLORIMETRY (other))) return FALSE; if (GST_VIDEO_INFO_CHROMA_SITE (info) != GST_VIDEO_INFO_CHROMA_SITE (other)) return FALSE; if (GST_VIDEO_INFO_MULTIVIEW_MODE (info) != GST_VIDEO_INFO_MULTIVIEW_MODE (other)) return FALSE; if (GST_VIDEO_INFO_MULTIVIEW_FLAGS (info) != GST_VIDEO_INFO_MULTIVIEW_FLAGS (other)) return FALSE; if (GST_VIDEO_INFO_VIEWS (info) != GST_VIDEO_INFO_VIEWS (other)) return FALSE; for (i = 0; i < info->finfo->n_planes; i++) { if (info->stride[i] != other->stride[i]) return FALSE; if (info->offset[i] != other->offset[i]) return FALSE; } return TRUE; }
static gboolean plugin_update_sinkpad_info_from_buffer (GstVaapiPluginBase * plugin, GstBuffer * buf) { GstVideoInfo *const vip = &plugin->sinkpad_info; GstVideoMeta *vmeta; guint i; vmeta = gst_buffer_get_video_meta (buf); if (!vmeta) return TRUE; if (GST_VIDEO_INFO_FORMAT (vip) != vmeta->format || GST_VIDEO_INFO_WIDTH (vip) != vmeta->width || GST_VIDEO_INFO_HEIGHT (vip) != vmeta->height || GST_VIDEO_INFO_N_PLANES (vip) != vmeta->n_planes) return FALSE; for (i = 0; i < GST_VIDEO_INFO_N_PLANES (vip); ++i) { GST_VIDEO_INFO_PLANE_OFFSET (vip, i) = vmeta->offset[i]; GST_VIDEO_INFO_PLANE_STRIDE (vip, i) = vmeta->stride[i]; } GST_VIDEO_INFO_SIZE (vip) = gst_buffer_get_size (buf); return TRUE; }
gboolean gst_niimaqsrc_set_caps (GstBaseSrc * bsrc, GstCaps * caps) { GstNiImaqSrc *src = GST_NIIMAQSRC (bsrc); gboolean res = TRUE; int depth, ncomps; GstVideoInfo vinfo; res = gst_video_info_from_caps (&vinfo, caps); if (!res) { GST_WARNING_OBJECT (src, "Unable to parse video info from caps"); return res; } src->format = GST_VIDEO_INFO_FORMAT (&vinfo); src->width = GST_VIDEO_INFO_WIDTH (&vinfo); src->height = GST_VIDEO_INFO_HEIGHT (&vinfo); /* this will handle byte alignment (i.e. row multiple of 4 bytes) */ src->framesize = GST_VIDEO_INFO_SIZE (&vinfo); gst_base_src_set_blocksize (bsrc, src->framesize); ncomps = GST_VIDEO_INFO_N_COMPONENTS (&vinfo); depth = GST_VIDEO_INFO_COMP_DEPTH (&vinfo, 0); /* use this so NI can give us proper byte alignment */ src->rowpixels = GST_VIDEO_INFO_COMP_STRIDE (&vinfo, 0) / (ncomps * depth / 8); GST_LOG_OBJECT (src, "Caps set, framesize=%d, rowpixels=%d", src->framesize, src->rowpixels); return res; }
struct wl_buffer * gst_wl_shm_memory_construct_wl_buffer (GstMemory * mem, GstWlDisplay * display, const GstVideoInfo * info) { GstWlShmMemory *shm_mem = (GstWlShmMemory *) mem; gint width, height, stride; gsize size; enum wl_shm_format format; struct wl_shm_pool *wl_pool; struct wl_buffer *wbuffer; width = GST_VIDEO_INFO_WIDTH (info); height = GST_VIDEO_INFO_HEIGHT (info); stride = GST_VIDEO_INFO_PLANE_STRIDE (info, 0); size = GST_VIDEO_INFO_SIZE (info); format = gst_video_format_to_wl_shm_format (GST_VIDEO_INFO_FORMAT (info)); g_return_val_if_fail (gst_is_wl_shm_memory (mem), NULL); g_return_val_if_fail (size <= mem->size, NULL); g_return_val_if_fail (shm_mem->fd != -1, NULL); GST_DEBUG_OBJECT (mem->allocator, "Creating wl_buffer of size %" G_GSSIZE_FORMAT " (%d x %d, stride %d), format %s", size, width, height, stride, gst_wl_shm_format_to_string (format)); wl_pool = wl_shm_create_pool (display->shm, shm_mem->fd, mem->size); wbuffer = wl_shm_pool_create_buffer (wl_pool, 0, width, height, stride, format); close (shm_mem->fd); shm_mem->fd = -1; wl_shm_pool_destroy (wl_pool); return wbuffer; }
static gboolean gst_imx_blitter_video_sink_propose_allocation(GstBaseSink *sink, GstQuery *query) { GstCaps *caps; GstVideoInfo info; GstBufferPool *pool; guint size; gst_query_parse_allocation(query, &caps, NULL); if (caps == NULL) { GST_DEBUG_OBJECT(sink, "no caps specified"); return FALSE; } if (!gst_video_info_from_caps(&info, caps)) return FALSE; size = GST_VIDEO_INFO_SIZE(&info); if (gst_query_get_n_allocation_pools(query) == 0) { GstStructure *structure; GstAllocationParams params; GstAllocator *allocator = NULL; memset(¶ms, 0, sizeof(params)); params.flags = 0; params.align = 15; params.prefix = 0; params.padding = 0; if (gst_query_get_n_allocation_params(query) > 0) gst_query_parse_nth_allocation_param(query, 0, &allocator, ¶ms); else gst_query_add_allocation_param(query, allocator, ¶ms); pool = gst_video_buffer_pool_new(); structure = gst_buffer_pool_get_config(pool); gst_buffer_pool_config_set_params(structure, caps, size, 0, 0); gst_buffer_pool_config_set_allocator(structure, allocator, ¶ms); if (allocator) gst_object_unref(allocator); if (!gst_buffer_pool_set_config(pool, structure)) { GST_ERROR_OBJECT(sink, "failed to set config"); gst_object_unref(pool); return FALSE; } gst_query_add_allocation_pool(query, pool, size, 0, 0); gst_object_unref(pool); gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, NULL); } return TRUE; }
static GstFlowReturn gst_wayland_buffer_pool_alloc (GstBufferPool * pool, GstBuffer ** buffer, GstBufferPoolAcquireParams * params) { GstWaylandBufferPool *self = GST_WAYLAND_BUFFER_POOL_CAST (pool); gint width, height, stride; gsize size; enum wl_shm_format format; gint offset; void *data; GstWlMeta *meta; width = GST_VIDEO_INFO_WIDTH (&self->info); height = GST_VIDEO_INFO_HEIGHT (&self->info); stride = GST_VIDEO_INFO_PLANE_STRIDE (&self->info, 0); size = GST_VIDEO_INFO_SIZE (&self->info); format = gst_video_format_to_wayland_format (GST_VIDEO_INFO_FORMAT (&self->info)); GST_DEBUG_OBJECT (self, "Allocating buffer of size %" G_GSSIZE_FORMAT " (%d x %d, stride %d), format %s", size, width, height, stride, gst_wayland_format_to_string (format)); /* try to reserve another memory block from the shm pool */ if (self->used + size > self->size) goto no_buffer; offset = self->used; self->used += size; data = ((gchar *) self->data) + offset; /* create buffer and its metadata object */ *buffer = gst_buffer_new (); meta = (GstWlMeta *) gst_buffer_add_meta (*buffer, GST_WL_META_INFO, NULL); meta->pool = self; meta->wbuffer = wl_shm_pool_create_buffer (self->wl_pool, offset, width, height, stride, format); meta->used_by_compositor = FALSE; /* configure listening to wl_buffer.release */ g_mutex_lock (&self->buffers_map_mutex); g_hash_table_insert (self->buffers_map, meta->wbuffer, *buffer); g_mutex_unlock (&self->buffers_map_mutex); wl_buffer_add_listener (meta->wbuffer, &buffer_listener, self); /* add the allocated memory on the GstBuffer */ gst_buffer_append_memory (*buffer, gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, data, size, 0, size, NULL, NULL)); return GST_FLOW_OK; /* ERROR */ no_buffer: { GST_WARNING_OBJECT (pool, "can't create buffer"); return GST_FLOW_ERROR; } }
static void _vdp_video_mem_init (GstVdpVideoMemory * mem, GstAllocator * allocator, GstMemory * parent, GstVdpDevice * device, GstVideoInfo * info) { gst_memory_init (GST_MEMORY_CAST (mem), GST_MEMORY_FLAG_NO_SHARE, allocator, parent, GST_VIDEO_INFO_SIZE (info), 0, 0, GST_VIDEO_INFO_SIZE (info)); mem->device = gst_object_ref (device); mem->info = info; mem->chroma_type = gst_video_info_to_vdp_chroma_type (info); mem->ycbcr_format = gst_video_format_to_vdp_ycbcr (GST_VIDEO_INFO_FORMAT (info)); mem->refcount = 0; GST_DEBUG ("new VdpVideo memory"); }
gboolean __gst_video_rawvideo_convert (GstVideoCodecState * state, GstFormat src_format, gint64 src_value, GstFormat * dest_format, gint64 * dest_value) { gboolean res = FALSE; guint vidsize; guint fps_n, fps_d; g_return_val_if_fail (dest_format != NULL, FALSE); g_return_val_if_fail (dest_value != NULL, FALSE); if (src_format == *dest_format || src_value == 0 || src_value == -1) { *dest_value = src_value; return TRUE; } vidsize = GST_VIDEO_INFO_SIZE (&state->info); fps_n = GST_VIDEO_INFO_FPS_N (&state->info); fps_d = GST_VIDEO_INFO_FPS_D (&state->info); if (src_format == GST_FORMAT_BYTES && *dest_format == GST_FORMAT_DEFAULT && vidsize) { /* convert bytes to frames */ *dest_value = gst_util_uint64_scale_int (src_value, 1, vidsize); res = TRUE; } else if (src_format == GST_FORMAT_DEFAULT && *dest_format == GST_FORMAT_BYTES && vidsize) { /* convert bytes to frames */ *dest_value = src_value * vidsize; res = TRUE; } else if (src_format == GST_FORMAT_DEFAULT && *dest_format == GST_FORMAT_TIME && fps_n) { /* convert frames to time */ *dest_value = gst_util_uint64_scale (src_value, GST_SECOND * fps_d, fps_n); res = TRUE; } else if (src_format == GST_FORMAT_TIME && *dest_format == GST_FORMAT_DEFAULT && fps_d) { /* convert time to frames */ *dest_value = gst_util_uint64_scale (src_value, fps_n, GST_SECOND * fps_d); res = TRUE; } else if (src_format == GST_FORMAT_TIME && *dest_format == GST_FORMAT_BYTES && fps_d && vidsize) { /* convert time to bytes */ *dest_value = gst_util_uint64_scale (src_value, fps_n * (guint64) vidsize, GST_SECOND * fps_d); res = TRUE; } else if (src_format == GST_FORMAT_BYTES && *dest_format == GST_FORMAT_TIME && fps_n && vidsize) { /* convert bytes to time */ *dest_value = gst_util_uint64_scale (src_value, GST_SECOND * fps_d, fps_n * (guint64) vidsize); res = TRUE; } return res; }
static gboolean gst_vlc_video_sink_propose_allocation( GstBaseSink* p_bsink, GstQuery* p_query ) { GstVlcVideoSink *p_vsink = GST_VLC_VIDEO_SINK( p_bsink ); GstCaps *p_caps; gboolean b_need_pool; GstBufferPool* p_pool = NULL; gsize i_size; gst_query_parse_allocation (p_query, &p_caps, &b_need_pool); if( p_caps == NULL ) goto no_caps; if( b_need_pool ) { GstVideoInfo info; if( !gst_video_info_from_caps( &info, p_caps )) goto invalid_caps; p_pool = (GstBufferPool*) gst_vlc_video_sink_create_pool( p_vsink, p_caps, info.size, 2 ); if( p_pool == NULL ) goto no_pool; i_size = GST_VIDEO_INFO_SIZE( &GST_VLC_VIDEO_POOL_CAST( p_pool )->info); } if( p_pool ) { /* we need at least 2 buffer because we hold on to the last one */ gst_query_add_allocation_pool( p_query, p_pool, i_size, 2, 0); gst_object_unref (p_pool); } /* we support various metadata */ gst_query_add_allocation_meta( p_query, GST_VIDEO_META_API_TYPE, NULL ); return TRUE; /* ERRORS */ no_pool: { msg_Err( p_vsink->p_dec, "failed to create the pool" ); return FALSE; } no_caps: { msg_Err( p_vsink->p_dec, "no caps in allocation query" ); return FALSE; } invalid_caps: { msg_Err( p_vsink->p_dec, "invalid caps in allocation query" ); return FALSE; } }
static gsize gst_raw_video_parse_get_config_frame_size (GstRawBaseParse * raw_base_parse, GstRawBaseParseConfig config) { GstRawVideoParse *raw_video_parse = GST_RAW_VIDEO_PARSE (raw_base_parse); GstRawVideoParseConfig *config_ptr = gst_raw_video_parse_get_config_ptr (raw_video_parse, config); return MAX (GST_VIDEO_INFO_SIZE (&(config_ptr->info)), (gsize) (config_ptr->frame_stride)); }
static gboolean gst_kms_sink_propose_allocation (GstBaseSink * bsink, GstQuery * query) { GstKMSSink *self; GstCaps *caps; gboolean need_pool; GstVideoInfo vinfo; GstBufferPool *pool; gsize size; self = GST_KMS_SINK (bsink); gst_query_parse_allocation (query, &caps, &need_pool); if (!caps) goto no_caps; if (!gst_video_info_from_caps (&vinfo, caps)) goto invalid_caps; size = GST_VIDEO_INFO_SIZE (&vinfo); pool = NULL; if (need_pool) { pool = gst_kms_sink_create_pool (self, caps, size, 0); if (!pool) goto no_pool; } if (pool) { /* we need at least 2 buffer because we hold on to the last one */ gst_query_add_allocation_pool (query, pool, size, 2, 0); gst_object_unref (pool); } gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL); gst_query_add_allocation_meta (query, GST_VIDEO_CROP_META_API_TYPE, NULL); return TRUE; /* ERRORS */ no_caps: { GST_DEBUG_OBJECT (bsink, "no caps specified"); return FALSE; } invalid_caps: { GST_DEBUG_OBJECT (bsink, "invalid caps specified"); return FALSE; } no_pool: { /* Already warned in create_pool */ return FALSE; } }
static gboolean gst_decklink_video_sink_propose_allocation (GstBaseSink * bsink, GstQuery * query) { GstCaps *caps; GstVideoInfo info; GstBufferPool *pool; guint size; gst_query_parse_allocation (query, &caps, NULL); if (caps == NULL) return FALSE; if (!gst_video_info_from_caps (&info, caps)) return FALSE; size = GST_VIDEO_INFO_SIZE (&info); if (gst_query_get_n_allocation_pools (query) == 0) { GstStructure *structure; GstAllocator *allocator = NULL; GstAllocationParams params = { (GstMemoryFlags) 0, 15, 0, 0 }; if (gst_query_get_n_allocation_params (query) > 0) gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms); else gst_query_add_allocation_param (query, allocator, ¶ms); pool = gst_video_buffer_pool_new (); structure = gst_buffer_pool_get_config (pool); gst_buffer_pool_config_set_params (structure, caps, size, 0, 0); gst_buffer_pool_config_set_allocator (structure, allocator, ¶ms); if (allocator) gst_object_unref (allocator); if (!gst_buffer_pool_set_config (pool, structure)) goto config_failed; gst_query_add_allocation_pool (query, pool, size, 0, 0); gst_object_unref (pool); gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL); } return TRUE; // ERRORS config_failed: { GST_ERROR_OBJECT (bsink, "failed to set config"); gst_object_unref (pool); return FALSE; } }
static gboolean ensure_sinkpad_allocator (GstVaapiPluginBase * plugin, GstCaps * caps, guint * size) { GstVideoInfo vinfo; const GstVideoInfo *image_info; GstVaapiImageUsageFlags usage_flag = GST_VAAPI_IMAGE_USAGE_FLAG_NATIVE_FORMATS; if (!gst_video_info_from_caps (&vinfo, caps)) goto error_invalid_caps; if (!reset_allocator (plugin->sinkpad_allocator, &vinfo)) goto bail; if (has_dmabuf_capable_peer (plugin, plugin->sinkpad)) { plugin->sinkpad_allocator = gst_vaapi_dmabuf_allocator_new (plugin->display, &vinfo, GST_VAAPI_SURFACE_ALLOC_FLAG_LINEAR_STORAGE, GST_PAD_SINK); goto bail; } /* enable direct upload if upstream requests raw video */ if (gst_caps_is_video_raw (caps)) { usage_flag = GST_VAAPI_IMAGE_USAGE_FLAG_DIRECT_UPLOAD; GST_INFO_OBJECT (plugin, "enabling direct upload in sink allocator"); } plugin->sinkpad_allocator = gst_vaapi_video_allocator_new (plugin->display, &vinfo, 0, usage_flag); bail: if (!plugin->sinkpad_allocator) goto error_create_allocator; image_info = gst_allocator_get_vaapi_video_info (plugin->sinkpad_allocator, NULL); g_assert (image_info); /* allocator ought set its image info */ /* update the size with the one generated by the allocator */ *size = GST_VIDEO_INFO_SIZE (image_info); return TRUE; /* ERRORS */ error_invalid_caps: { GST_ERROR_OBJECT (plugin, "invalid caps %" GST_PTR_FORMAT, caps); return FALSE; } error_create_allocator: { GST_ERROR_OBJECT (plugin, "failed to create sink pad's allocator"); return FALSE; } }
static void check_filter_varargs (const gchar * name, GstEvent * event, gint num_buffers, const gchar * prop, va_list varargs) { static const struct { const int width, height; } resolutions[] = { { 384, 288}, { 385, 289}, { 385, 385}}; gint i, n, r; gint size; GstCaps *allcaps, *templ = gst_caps_from_string (VIDEO_CAPS_TEMPLATE_STRING); allcaps = gst_caps_normalize (templ); n = gst_caps_get_size (allcaps); for (i = 0; i < n; i++) { GstStructure *s = gst_caps_get_structure (allcaps, i); GstCaps *caps = gst_caps_new_empty (); gst_caps_append_structure (caps, gst_structure_copy (s)); /* try various resolutions */ for (r = 0; r < G_N_ELEMENTS (resolutions); ++r) { GstVideoInfo info; va_list args_cp; caps = gst_caps_make_writable (caps); gst_caps_set_simple (caps, "width", G_TYPE_INT, resolutions[r].width, "height", G_TYPE_INT, resolutions[r].height, "framerate", GST_TYPE_FRACTION, 25, 1, NULL); GST_DEBUG ("Testing with caps: %" GST_PTR_FORMAT, caps); gst_video_info_from_caps (&info, caps); size = GST_VIDEO_INFO_SIZE (&info); if (event) gst_event_ref (event); va_copy (args_cp, varargs); check_filter_caps (name, event, caps, size, num_buffers, prop, args_cp); va_end (args_cp); } gst_caps_unref (caps); } gst_caps_unref (allcaps); if (event) gst_event_unref (event); }
void gst_video_parse_update_frame_size (GstVideoParse * vp) { gint framesize; GstVideoInfo info; gst_video_info_init (&info); gst_video_info_set_format (&info, vp->format, vp->width, vp->height); framesize = GST_VIDEO_INFO_SIZE (&info); gst_raw_parse_set_framesize (GST_RAW_PARSE (vp), framesize); }
static gboolean gst_yadif_get_unit_size (GstBaseTransform * trans, GstCaps * caps, gsize * size) { GstVideoInfo info; if (gst_video_info_from_caps (&info, caps)) { *size = GST_VIDEO_INFO_SIZE (&info); return TRUE; } return FALSE; }
static gboolean gst_gl_filter_get_unit_size (GstBaseTransform * trans, GstCaps * caps, gsize * size) { gboolean ret = FALSE; GstVideoInfo info; ret = gst_video_info_from_caps (&info, caps); if (ret) *size = GST_VIDEO_INFO_SIZE (&info); return TRUE; }
static gboolean ensure_data (GstVdpVideoMemory * vmem) { VdpStatus vdp_stat; GstVideoInfo *info = vmem->info; #ifndef GST_DISABLE_GST_DEBUG GstClockTime before, after; #endif if (g_atomic_int_add (&vmem->refcount, 1) > 1) return TRUE; /* Allocate enough room to store data */ vmem->cache = g_malloc (GST_VIDEO_INFO_SIZE (info)); vmem->cached_data[0] = vmem->cache; vmem->cached_data[1] = vmem->cache + GST_VIDEO_INFO_PLANE_OFFSET (info, 1); vmem->cached_data[2] = vmem->cache + GST_VIDEO_INFO_PLANE_OFFSET (info, 2); vmem->destination_pitches[0] = GST_VIDEO_INFO_PLANE_STRIDE (info, 0); vmem->destination_pitches[1] = GST_VIDEO_INFO_PLANE_STRIDE (info, 1); vmem->destination_pitches[2] = GST_VIDEO_INFO_PLANE_STRIDE (info, 2); GST_DEBUG ("cached_data %p %p %p", vmem->cached_data[0], vmem->cached_data[1], vmem->cached_data[2]); GST_DEBUG ("pitches %d %d %d", vmem->destination_pitches[0], vmem->destination_pitches[1], vmem->destination_pitches[2]); #ifndef GST_DISABLE_GST_DEBUG before = gst_util_get_timestamp (); #endif vdp_stat = vmem->device->vdp_video_surface_get_bits_ycbcr (vmem->surface, vmem->ycbcr_format, vmem->cached_data, vmem->destination_pitches); #ifndef GST_DISABLE_GST_DEBUG after = gst_util_get_timestamp (); #endif GST_CAT_WARNING (GST_CAT_PERFORMANCE, "Downloading took %" GST_TIME_FORMAT, GST_TIME_ARGS (after - before)); if (vdp_stat != VDP_STATUS_OK) { GST_ERROR ("Failed to get bits : %s", vmem->device->vdp_get_error_string (vdp_stat)); g_free (vmem->cache); vmem->cache = NULL; return FALSE; } return TRUE; }
static gboolean gst_euresys_set_caps (GstBaseSrc * bsrc, GstCaps * caps) { GstVideoInfo vinfo; GST_DEBUG_OBJECT (bsrc, "set_caps with caps=%" GST_PTR_FORMAT, caps); gst_video_info_from_caps (&vinfo, caps); /* TODO: check stride alignment */ gst_base_src_set_blocksize (bsrc, GST_VIDEO_INFO_SIZE (&vinfo)); return TRUE; }
static gboolean plugin_bind_dma_to_vaapi_buffer (GstVaapiPluginBase * plugin, GstBuffer * inbuf, GstBuffer * outbuf) { GstVideoInfo *const vip = &plugin->sinkpad_info; GstVaapiVideoMeta *meta; GstVaapiSurface *surface; GstVaapiSurfaceProxy *proxy; gint fd; fd = gst_dmabuf_memory_get_fd (gst_buffer_peek_memory (inbuf, 0)); if (fd < 0) return FALSE; if (!plugin_update_sinkpad_info_from_buffer (plugin, inbuf)) goto error_update_sinkpad_info; meta = gst_buffer_get_vaapi_video_meta (outbuf); g_return_val_if_fail (meta != NULL, FALSE); surface = gst_vaapi_surface_new_with_dma_buf_handle (plugin->display, fd, GST_VIDEO_INFO_SIZE (vip), GST_VIDEO_INFO_FORMAT (vip), GST_VIDEO_INFO_WIDTH (vip), GST_VIDEO_INFO_HEIGHT (vip), vip->offset, vip->stride); if (!surface) goto error_create_surface; proxy = gst_vaapi_surface_proxy_new (surface); gst_vaapi_object_unref (surface); if (!proxy) goto error_create_proxy; gst_vaapi_surface_proxy_set_destroy_notify (proxy, (GDestroyNotify) gst_buffer_unref, (gpointer) gst_buffer_ref (inbuf)); gst_vaapi_video_meta_set_surface_proxy (meta, proxy); gst_vaapi_surface_proxy_unref (proxy); return TRUE; /* ERRORS */ error_update_sinkpad_info: GST_ERROR ("failed to update sink pad video info from video meta"); return FALSE; error_create_surface: GST_ERROR ("failed to create VA surface from dma_buf handle"); return FALSE; error_create_proxy: GST_ERROR ("failed to create VA surface proxy from wrapped VA surface"); return FALSE; }
/** * gst_vaapi_surface_new_with_dma_buf_handle: * @display: a #GstVaapiDisplay * @fd: the DRM PRIME file descriptor * @size: the underlying DRM buffer size * @format: the desired surface format * @width: the desired surface width in pixels * @height: the desired surface height in pixels * @offset: the offsets to each plane * @stride: the pitches for each plane * * Creates a new #GstVaapiSurface with an external DRM PRIME file * descriptor. The newly created VA surfaces owns the supplied buffer * handle. * * Return value: the newly allocated #GstVaapiSurface object, or %NULL * if creation from DRM PRIME fd failed, or is not supported */ GstVaapiSurface * gst_vaapi_surface_new_with_dma_buf_handle (GstVaapiDisplay * display, gint fd, GstVideoInfo * vi) { GstVaapiBufferProxy *proxy; GstVaapiSurface *surface; proxy = gst_vaapi_buffer_proxy_new ((gintptr) fd, GST_VAAPI_BUFFER_MEMORY_TYPE_DMA_BUF, GST_VIDEO_INFO_SIZE (vi), NULL, NULL); if (!proxy) return NULL; surface = gst_vaapi_surface_new_from_buffer_proxy (display, proxy, vi); gst_vaapi_buffer_proxy_unref (proxy); return surface; }
static gboolean gst_wayland_buffer_pool_start (GstBufferPool * pool) { GstWaylandBufferPool *self = GST_WAYLAND_BUFFER_POOL (pool); guint size = 0; int fd; char filename[1024]; static int init = 0; GST_DEBUG_OBJECT (self, "Initializing wayland buffer pool"); /* configure */ size = GST_VIDEO_INFO_SIZE (&self->info) * 15; /* allocate shm pool */ snprintf (filename, 1024, "%s/%s-%d-%s", g_get_user_runtime_dir (), "wayland-shm", init++, "XXXXXX"); fd = mkstemp (filename); if (fd < 0) { GST_ERROR_OBJECT (pool, "opening temp file %s failed: %s", filename, strerror (errno)); return FALSE; } if (ftruncate (fd, size) < 0) { GST_ERROR_OBJECT (pool, "ftruncate failed: %s", strerror (errno)); close (fd); return FALSE; } self->data = mmap (NULL, size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0); if (self->data == MAP_FAILED) { GST_ERROR_OBJECT (pool, "mmap failed: %s", strerror (errno)); close (fd); return FALSE; } self->wl_pool = wl_shm_create_pool (self->display->shm, fd, size); unlink (filename); close (fd); self->size = size; self->used = 0; return GST_BUFFER_POOL_CLASS (parent_class)->start (pool); }
struct wl_buffer * gst_wl_shm_memory_construct_wl_buffer (GstMemory * mem, GstWlDisplay * display, const GstVideoInfo * info) { gint width, height, stride; gsize offset, size, memsize, maxsize; enum wl_shm_format format; struct wl_shm_pool *wl_pool; struct wl_buffer *wbuffer; if (!gst_wl_shm_validate_video_info (info)) { GST_DEBUG_OBJECT (display, "Unsupported strides and offsets."); return NULL; } width = GST_VIDEO_INFO_WIDTH (info); height = GST_VIDEO_INFO_HEIGHT (info); stride = GST_VIDEO_INFO_PLANE_STRIDE (info, 0); size = GST_VIDEO_INFO_SIZE (info); format = gst_video_format_to_wl_shm_format (GST_VIDEO_INFO_FORMAT (info)); memsize = gst_memory_get_sizes (mem, &offset, &maxsize); offset += GST_VIDEO_INFO_PLANE_OFFSET (info, 0); g_return_val_if_fail (gst_is_fd_memory (mem), NULL); g_return_val_if_fail (size <= memsize, NULL); g_return_val_if_fail (gst_wl_display_check_format_for_shm (display, GST_VIDEO_INFO_FORMAT (info)), NULL); GST_DEBUG_OBJECT (display, "Creating wl_buffer from SHM of size %" G_GSSIZE_FORMAT " (%d x %d, stride %d), format %s", size, width, height, stride, gst_wl_shm_format_to_string (format)); wl_pool = wl_shm_create_pool (display->shm, gst_fd_memory_get_fd (mem), memsize); wbuffer = wl_shm_pool_create_buffer (wl_pool, offset, width, height, stride, format); wl_shm_pool_destroy (wl_pool); return wbuffer; }
static gint gst_raw_video_parse_get_overhead_size (GstRawBaseParse * raw_base_parse, GstRawBaseParseConfig config) { GstRawVideoParse *raw_video_parse = GST_RAW_VIDEO_PARSE (raw_base_parse); GstRawVideoParseConfig *config_ptr = gst_raw_video_parse_get_config_ptr (raw_video_parse, config); gint64 frame_size = GST_VIDEO_INFO_SIZE (&(config_ptr->info)); gint64 frame_stride = config_ptr->frame_stride; /* In the video parser, the overhead is defined by the difference between * the frame stride and the actual frame size. If the former is larger, * then the additional bytes are considered padding bytes and get ignored * by the base class. */ GST_LOG_OBJECT (raw_video_parse, "frame size: %" G_GINT64_FORMAT " frame stride: %" G_GINT64_FORMAT, frame_size, frame_stride); return (frame_size < frame_stride) ? (gint) (frame_stride - frame_size) : 0; }
static void gst_raw_video_parse_get_units_per_second (GstRawBaseParse * raw_base_parse, GstFormat format, GstRawBaseParseConfig config, gsize * units_per_sec_n, gsize * units_per_sec_d) { GstRawVideoParse *raw_video_parse = GST_RAW_VIDEO_PARSE (raw_base_parse); GstRawVideoParseConfig *config_ptr = gst_raw_video_parse_get_config_ptr (raw_video_parse, config); switch (format) { case GST_FORMAT_BYTES: { gsize framesize = GST_VIDEO_INFO_SIZE (&(config_ptr->info)); gint64 n = framesize * config_ptr->framerate_n; gint64 d = config_ptr->framerate_d; gint64 common_div = gst_util_greatest_common_divisor_int64 (n, d); GST_DEBUG_OBJECT (raw_video_parse, "n: %" G_GINT64_FORMAT " d: %" G_GINT64_FORMAT " common divisor: %" G_GINT64_FORMAT, n, d, common_div); /* Divide numerator and denominator by greatest common divisor. * This minimizes the risk of integer overflows in the baseparse class. */ *units_per_sec_n = n / common_div; *units_per_sec_d = d / common_div; break; } case GST_FORMAT_DEFAULT: { *units_per_sec_n = config_ptr->framerate_n; *units_per_sec_d = config_ptr->framerate_d; break; } default: g_assert_not_reached (); } }
static gboolean gst_aasink_propose_allocation (GstBaseSink * bsink, GstQuery * query) { GstCaps *caps; GstVideoInfo info; guint size; gst_query_parse_allocation (query, &caps, NULL); if (caps == NULL) goto no_caps; if (!gst_video_info_from_caps (&info, caps)) goto invalid_caps; size = GST_VIDEO_INFO_SIZE (&info); /* we need at least 2 buffer because we hold on to the last one */ gst_query_add_allocation_pool (query, NULL, size, 2, 0); /* we support various metadata */ gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE); return TRUE; /* ERRORS */ no_caps: { GST_DEBUG_OBJECT (bsink, "no caps specified"); return FALSE; } invalid_caps: { GST_DEBUG_OBJECT (bsink, "invalid caps specified"); return FALSE; } }