static gboolean gst_gl_transformation_filter_texture (GstGLFilter * filter, guint in_tex, guint out_tex) { GstGLTransformation *transformation = GST_GL_TRANSFORMATION (filter); transformation->in_tex = in_tex; /* blocking call, use a FBO */ gst_gl_context_use_fbo_v2 (filter->context, GST_VIDEO_INFO_WIDTH (&filter->out_info), GST_VIDEO_INFO_HEIGHT (&filter->out_info), filter->fbo, filter->depthbuffer, out_tex, gst_gl_transformation_callback, (gpointer) transformation); return TRUE; }
static gboolean gst_rsvg_dec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state) { GstRsvgDec *rsvg = GST_RSVG_DEC (decoder); GstVideoInfo *info = &state->info; if (rsvg->input_state) gst_video_codec_state_unref (rsvg->input_state); rsvg->input_state = gst_video_codec_state_ref (state); /* Create the output state */ gst_video_decoder_set_output_state (decoder, GST_RSVG_VIDEO_FORMAT, GST_VIDEO_INFO_WIDTH (info), GST_VIDEO_INFO_HEIGHT (info), rsvg->input_state); return TRUE; }
PassRefPtr<BitmapTexture> MediaPlayerPrivateGStreamerBase::updateTexture(TextureMapper* textureMapper) { GMutexLocker<GMutex> lock(m_sampleMutex); if (!m_sample) return nullptr; GstCaps* caps = gst_sample_get_caps(m_sample); if (!caps) return nullptr; GstVideoInfo videoInfo; gst_video_info_init(&videoInfo); if (!gst_video_info_from_caps(&videoInfo, caps)) return nullptr; IntSize size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo)); RefPtr<BitmapTexture> texture = textureMapper->acquireTextureFromPool(size, GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag); GstBuffer* buffer = gst_sample_get_buffer(m_sample); #if GST_CHECK_VERSION(1, 1, 0) GstVideoGLTextureUploadMeta* meta; if ((meta = gst_buffer_get_video_gl_texture_upload_meta(buffer))) { if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture. const BitmapTextureGL* textureGL = static_cast<const BitmapTextureGL*>(texture.get()); guint ids[4] = { textureGL->id(), 0, 0, 0 }; if (gst_video_gl_texture_upload_meta_upload(meta, ids)) return texture; } } #endif // Right now the TextureMapper only supports chromas with one plane ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1); GstVideoFrame videoFrame; if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, GST_MAP_READ)) return nullptr; int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&videoFrame, 0); const void* srcData = GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0); texture->updateContents(srcData, WebCore::IntRect(WebCore::IntPoint(0, 0), size), WebCore::IntPoint(0, 0), stride, BitmapTexture::UpdateCannotModifyOriginalImageData); gst_video_frame_unmap(&videoFrame); return texture; }
static gboolean gst_gaussianblur_set_info (GstVideoFilter * filter, GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info) { GstGaussianBlur *gb = GST_GAUSSIANBLUR (filter); guint32 n_elems; gb->width = GST_VIDEO_INFO_WIDTH (in_info); gb->height = GST_VIDEO_INFO_HEIGHT (in_info); /* get stride */ gb->stride = GST_VIDEO_INFO_COMP_STRIDE (in_info, 0); n_elems = gb->stride * gb->height; gb->tempim = g_malloc (sizeof (gfloat) * n_elems); return TRUE; }
static void gst_glimage_sink_on_resize (const GstGLImageSink * gl_sink, gint width, gint height) { /* Here gl_sink members (ex:gl_sink->info) have a life time of set_caps. * It means that they cannot not change between two set_caps */ const GstGLFuncs *gl = gl_sink->context->gl_vtable; GST_TRACE ("GL Window resized to %ux%u", width, height); /* check if a client reshape callback is registered */ if (gl_sink->clientReshapeCallback) gl_sink->clientReshapeCallback (width, height, gl_sink->client_data); /* default reshape */ else { if (gl_sink->keep_aspect_ratio) { GstVideoRectangle src, dst, result; src.x = 0; src.y = 0; src.w = GST_VIDEO_INFO_WIDTH (&gl_sink->info); src.h = GST_VIDEO_INFO_HEIGHT (&gl_sink->info); dst.x = 0; dst.y = 0; dst.w = width; dst.h = height; gst_video_sink_center_rect (src, dst, &result, TRUE); gl->Viewport (result.x, result.y, result.w, result.h); } else { gl->Viewport (0, 0, width, height); } #if GST_GL_HAVE_OPENGL if (USING_OPENGL (gl_sink->context)) { gl->MatrixMode (GL_PROJECTION); gl->LoadIdentity (); gluOrtho2D (0, width, 0, height); gl->MatrixMode (GL_MODELVIEW); } #endif } }
static gboolean gst_ffmpegscale_get_unit_size (GstBaseTransform * trans, GstCaps * caps, gsize * size) { GstVideoInfo info; if (!gst_video_info_from_caps (&info, caps)) return FALSE; *size = info.size; GST_DEBUG_OBJECT (trans, "unit size = %" G_GSIZE_FORMAT " for format %d w %d height %d", *size, GST_VIDEO_INFO_FORMAT (&info), GST_VIDEO_INFO_WIDTH (&info), GST_VIDEO_INFO_HEIGHT (&info)); return TRUE; }
static gboolean gst_imx_compositor_negotiated_caps(GstImxBPVideoAggregator *videoaggregator, GstCaps *caps) { GstVideoInfo info; GstImxCompositor *compositor = GST_IMX_COMPOSITOR(videoaggregator); /* Output caps have been negotiated. Set up a suitable DMA buffer pool * (cleaning up any old buffer pool first) and inform subclass about * the new output caps. */ if (!gst_video_info_from_caps(&info, caps)) { GST_ERROR_OBJECT(compositor, "could not get video info from negotiated caps"); return FALSE; } /* Get the new overall width/height from video info */ compositor->overall_width = GST_VIDEO_INFO_WIDTH(&info); compositor->overall_height = GST_VIDEO_INFO_HEIGHT(&info); GST_DEBUG_OBJECT(videoaggregator, "negotiated width/height: %u/%u", compositor->overall_width, compositor->overall_height); /* Update the overall region based on the new overall width/height */ gst_imx_compositor_update_overall_region(compositor); /* Cleanup old buffer pool */ if (compositor->dma_bufferpool != NULL) gst_object_unref(GST_OBJECT(compositor->dma_bufferpool)); /* And get the new one */ compositor->dma_bufferpool = gst_imx_compositor_create_bufferpool(compositor, caps, 0, 0, 0, NULL, NULL); if (compositor->dma_bufferpool != NULL) { GstImxCompositorClass *klass = GST_IMX_COMPOSITOR_CLASS(G_OBJECT_GET_CLASS(compositor)); /* Inform subclass about the new output video info */ if (klass->set_output_video_info != NULL) return klass->set_output_video_info(compositor, &info); else return TRUE; } else return FALSE; }
/* init resources that need a gl context */ static void gst_gl_effects_init_gl_resources (GstGLFilter * filter) { GstGLEffects *effects = GST_GL_EFFECTS (filter); gint i; for (i = 0; i < NEEDED_TEXTURES; i++) { glGenTextures (1, &effects->midtexture[i]); glBindTexture (GL_TEXTURE_2D, effects->midtexture[i]); glTexImage2D (GL_TEXTURE_2D, 0, GL_RGBA8, GST_VIDEO_INFO_WIDTH (&filter->out_info), GST_VIDEO_INFO_HEIGHT (&filter->out_info), 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL); glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); } }
/* This function handles GstXImageBuffer creation depending on XShm availability */ static GstFlowReturn xvimage_buffer_pool_alloc (GstBufferPool * pool, GstBuffer ** buffer, GstBufferPoolAcquireParams * params) { GstXvImageBufferPool *xvpool = GST_XVIMAGE_BUFFER_POOL_CAST (pool); GstVideoInfo *info; GstBuffer *xvimage; GstMemory *mem; GError *err = NULL; info = &xvpool->info; xvimage = gst_buffer_new (); mem = gst_xvimage_allocator_alloc (xvpool->allocator, xvpool->im_format, info, xvpool->padded_width, xvpool->padded_height, &xvpool->crop, &err); if (mem == NULL) { gst_buffer_unref (xvimage); goto no_buffer; } gst_buffer_append_memory (xvimage, mem); if (xvpool->add_metavideo) { GST_DEBUG_OBJECT (pool, "adding GstVideoMeta"); gst_buffer_add_video_meta_full (xvimage, GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_INFO_FORMAT (info), GST_VIDEO_INFO_WIDTH (info), GST_VIDEO_INFO_HEIGHT (info), GST_VIDEO_INFO_N_PLANES (info), info->offset, info->stride); } *buffer = xvimage; return GST_FLOW_OK; /* ERROR */ no_buffer: { GST_WARNING_OBJECT (pool, "can't create image: %s", err->message); g_clear_error (&err); return GST_FLOW_ERROR; } }
static void gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc) { GstVideoCodecState *outstate; GstVideoInfo *info; GstVideoFormat format; switch (clrspc) { case JCS_RGB: format = GST_VIDEO_FORMAT_RGB; break; case JCS_GRAYSCALE: format = GST_VIDEO_FORMAT_GRAY8; break; default: format = GST_VIDEO_FORMAT_I420; break; } /* Compare to currently configured output state */ outstate = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (dec)); if (outstate) { info = &outstate->info; if (width == GST_VIDEO_INFO_WIDTH (info) && height == GST_VIDEO_INFO_HEIGHT (info) && format == GST_VIDEO_INFO_FORMAT (info)) { gst_video_codec_state_unref (outstate); return; } gst_video_codec_state_unref (outstate); } outstate = gst_video_decoder_set_output_state (GST_VIDEO_DECODER (dec), format, width, height, dec->input_state); gst_video_codec_state_unref (outstate); GST_DEBUG_OBJECT (dec, "max_v_samp_factor=%d", dec->cinfo.max_v_samp_factor); GST_DEBUG_OBJECT (dec, "max_h_samp_factor=%d", dec->cinfo.max_h_samp_factor); }
static GstCaps * _update_caps (GstVideoAggregator * vagg, GstCaps * caps) { GList *l; gint best_width = -1, best_height = -1; GstVideoInfo info; GstCaps *ret = NULL; gst_video_info_from_caps (&info, caps); GST_OBJECT_LOCK (vagg); for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) { GstVideoAggregatorPad *vaggpad = l->data; GstCompositorPad *compositor_pad = GST_COMPOSITOR_PAD (vaggpad); gint this_width, this_height; gint width, height; width = GST_VIDEO_INFO_WIDTH (&vaggpad->info); height = GST_VIDEO_INFO_HEIGHT (&vaggpad->info); if (width == 0 || height == 0) continue; this_width = width + MAX (compositor_pad->xpos, 0); this_height = height + MAX (compositor_pad->ypos, 0); if (best_width < this_width) best_width = this_width; if (best_height < this_height) best_height = this_height; } GST_OBJECT_UNLOCK (vagg); if (best_width > 0 && best_height > 0) { info.width = best_width; info.height = best_height; if (set_functions (GST_COMPOSITOR (vagg), &info)) ret = gst_video_info_to_caps (&info); } return ret; }
static gboolean is_surface_resolution_changed (GstVideoDecoder *vdec, GstVaapiSurface *surface) { guint surface_width, surface_height; guint configured_width, configured_height; GstVideoCodecState *state; gboolean ret = FALSE; gst_vaapi_surface_get_size(surface, &surface_width, &surface_height); state = gst_video_decoder_get_output_state (vdec); configured_width = GST_VIDEO_INFO_WIDTH (&state->info); configured_height = GST_VIDEO_INFO_HEIGHT (&state->info); gst_video_codec_state_unref (state); if (surface_width != configured_width || surface_height != configured_height) ret = TRUE; return ret; }
static gboolean gst_gdk_pixbuf_sink_set_caps (GstBaseSink * basesink, GstCaps * caps) { GstGdkPixbufSink *sink = GST_GDK_PIXBUF_SINK (basesink); GstVideoInfo info; GstVideoFormat fmt; gint w, h, s, par_n, par_d; GST_LOG_OBJECT (sink, "caps: %" GST_PTR_FORMAT, caps); if (!gst_video_info_from_caps (&info, caps)) { GST_WARNING_OBJECT (sink, "parse_caps failed"); return FALSE; } fmt = GST_VIDEO_INFO_FORMAT (&info); w = GST_VIDEO_INFO_WIDTH (&info); h = GST_VIDEO_INFO_HEIGHT (&info); s = GST_VIDEO_INFO_COMP_PSTRIDE (&info, 0); par_n = GST_VIDEO_INFO_PAR_N (&info); par_d = GST_VIDEO_INFO_PAR_N (&info); g_assert ((fmt == GST_VIDEO_FORMAT_RGB && s == 3) || (fmt == GST_VIDEO_FORMAT_RGBA && s == 4)); GST_VIDEO_SINK_WIDTH (sink) = w; GST_VIDEO_SINK_HEIGHT (sink) = h; sink->par_n = par_n; sink->par_d = par_d; sink->has_alpha = GST_VIDEO_INFO_HAS_ALPHA (&info); GST_INFO_OBJECT (sink, "format : %d", fmt); GST_INFO_OBJECT (sink, "width x height : %d x %d", w, h); GST_INFO_OBJECT (sink, "pixel-aspect-ratio : %d/%d", par_n, par_d); sink->info = info; return TRUE; }
static gboolean gst_wayland_buffer_pool_set_config (GstBufferPool * pool, GstStructure * config) { GstWaylandBufferPool *self = GST_WAYLAND_BUFFER_POOL_CAST (pool); GstCaps *caps; if (!gst_buffer_pool_config_get_params (config, &caps, NULL, NULL, NULL)) goto wrong_config; if (caps == NULL) goto no_caps; /* now parse the caps from the config */ if (!gst_video_info_from_caps (&self->info, caps)) goto wrong_caps; GST_LOG_OBJECT (pool, "%dx%d, caps %" GST_PTR_FORMAT, GST_VIDEO_INFO_WIDTH (&self->info), GST_VIDEO_INFO_HEIGHT (&self->info), caps); /*Fixme: Enable metadata checking handling based on the config of pool */ return GST_BUFFER_POOL_CLASS (parent_class)->set_config (pool, config); /* ERRORS */ wrong_config: { GST_WARNING_OBJECT (pool, "invalid config"); return FALSE; } no_caps: { GST_WARNING_OBJECT (pool, "no caps in config"); return FALSE; } wrong_caps: { GST_WARNING_OBJECT (pool, "failed getting geometry from caps %" GST_PTR_FORMAT, caps); return FALSE; } }
gboolean gst_opencv_parse_iplimage_params_from_caps (GstCaps * caps, gint * width, gint * height, gint * ipldepth, gint * channels, GError ** err) { GstVideoInfo info; gint i, depth = 0; if (!gst_video_info_from_caps (&info, caps)) { GST_ERROR ("Failed to get the videoinfo from caps"); g_set_error (err, GST_CORE_ERROR, GST_CORE_ERROR_NEGOTIATION, "No width/heighti/depth/channels in caps"); return FALSE; } *width = GST_VIDEO_INFO_WIDTH (&info); *height = GST_VIDEO_INFO_HEIGHT (&info); if (GST_VIDEO_INFO_IS_RGB (&info)) *channels = 3; else if (GST_VIDEO_INFO_IS_GRAY (&info)) *channels = 1; else { g_set_error (err, GST_CORE_ERROR, GST_CORE_ERROR_NEGOTIATION, "Unsupported caps %s", gst_caps_to_string(caps)); return FALSE; } for (i = 0; i < GST_VIDEO_INFO_N_COMPONENTS (&info); i++) depth += GST_VIDEO_INFO_COMP_DEPTH (&info, i); if (depth / *channels == 8) { /* TODO signdness? */ *ipldepth = IPL_DEPTH_8U; } else if (depth / *channels == 16) { *ipldepth = IPL_DEPTH_16U; } else { g_set_error (err, GST_CORE_ERROR, GST_CORE_ERROR_NEGOTIATION, "Unsupported depth/channels %d/%d", depth, *channels); return FALSE; } return TRUE; }
struct wl_buffer * gst_wl_shm_memory_construct_wl_buffer (GstMemory * mem, GstWlDisplay * display, const GstVideoInfo * info) { gint width, height, stride; gsize offset, size, memsize, maxsize; enum wl_shm_format format; struct wl_shm_pool *wl_pool; struct wl_buffer *wbuffer; if (!gst_wl_shm_validate_video_info (info)) { GST_DEBUG_OBJECT (display, "Unsupported strides and offsets."); return NULL; } width = GST_VIDEO_INFO_WIDTH (info); height = GST_VIDEO_INFO_HEIGHT (info); stride = GST_VIDEO_INFO_PLANE_STRIDE (info, 0); size = GST_VIDEO_INFO_SIZE (info); format = gst_video_format_to_wl_shm_format (GST_VIDEO_INFO_FORMAT (info)); memsize = gst_memory_get_sizes (mem, &offset, &maxsize); offset += GST_VIDEO_INFO_PLANE_OFFSET (info, 0); g_return_val_if_fail (gst_is_fd_memory (mem), NULL); g_return_val_if_fail (size <= memsize, NULL); g_return_val_if_fail (gst_wl_display_check_format_for_shm (display, GST_VIDEO_INFO_FORMAT (info)), NULL); GST_DEBUG_OBJECT (display, "Creating wl_buffer from SHM of size %" G_GSSIZE_FORMAT " (%d x %d, stride %d), format %s", size, width, height, stride, gst_wl_shm_format_to_string (format)); wl_pool = wl_shm_create_pool (display->shm, gst_fd_memory_get_fd (mem), memsize); wbuffer = wl_shm_pool_create_buffer (wl_pool, offset, width, height, stride, format); wl_shm_pool_destroy (wl_pool); return wbuffer; }
static void render_dots (GstAudioVisualizer * base, guint32 * vdata, gint16 * adata, guint num_samples) { guint i, s, x, y, ox, oy; gfloat dx, dy; guint w = GST_VIDEO_INFO_WIDTH (&base->vinfo); guint h = GST_VIDEO_INFO_HEIGHT (&base->vinfo); /* draw dots 1st channel x, 2nd channel y */ dx = w / 65536.0; ox = w / 2; dy = h / 65536.0; oy = h / 2; s = 0; for (i = 0; i < num_samples; i++) { x = (guint) (ox + (gfloat) adata[s++] * dx); y = (guint) (oy + (gfloat) adata[s++] * dy); draw_dot (vdata, x, y, w, 0x00FFFFFF); } }
static gboolean gst_shagadelictv_set_info (GstVideoFilter * vfilter, GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info) { GstShagadelicTV *filter = GST_SHAGADELICTV (vfilter); gint width, height, area; width = GST_VIDEO_INFO_WIDTH (in_info); height = GST_VIDEO_INFO_HEIGHT (in_info); area = width * height; g_free (filter->ripple); g_free (filter->spiral); filter->ripple = (guint8 *) g_malloc (area * 4); filter->spiral = (guint8 *) g_malloc (area); gst_shagadelic_initialize (filter, in_info); return TRUE; }
void MediaPlayerPrivateGStreamerBase::paintToTextureMapper(TextureMapper* textureMapper, const FloatRect& targetRect, const TransformationMatrix& matrix, float opacity) { if (!m_player->visible()) return; if (m_usingFallbackVideoSink) { if (RefPtr<BitmapTexture> texture = updateTexture(textureMapper)) textureMapper->drawTexture(*texture.get(), targetRect, matrix, opacity); return; } #if USE(GSTREAMER_GL) if (!GST_IS_SAMPLE(m_sample.get())) return; GstCaps* caps = gst_sample_get_caps(m_sample.get()); if (!caps) return; GstVideoInfo videoInfo; gst_video_info_init(&videoInfo); if (!gst_video_info_from_caps(&videoInfo, caps)) return; GstBuffer* buffer = gst_sample_get_buffer(m_sample.get()); GstVideoFrame videoFrame; if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL))) return; unsigned textureID = *reinterpret_cast<unsigned*>(videoFrame.data[0]); BitmapTexture::Flags flags = BitmapTexture::NoFlag; if (GST_VIDEO_INFO_HAS_ALPHA(&videoInfo)) flags |= BitmapTexture::SupportsAlpha; IntSize size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo)); TextureMapperGL* textureMapperGL = reinterpret_cast<TextureMapperGL*>(textureMapper); textureMapperGL->drawTexture(textureID, flags, size, targetRect, matrix, opacity); gst_video_frame_unmap(&videoFrame); #endif }
static gboolean gst_pnmenc_set_format (GstVideoEncoder * encoder, GstVideoCodecState * state) { GstPnmenc *pnmenc; gboolean ret = TRUE; GstVideoInfo *info; GstVideoCodecState *output_state; pnmenc = GST_PNMENC (encoder); info = &state->info; switch (GST_VIDEO_INFO_FORMAT (info)) { case GST_VIDEO_FORMAT_RGB: pnmenc->info.type = GST_PNM_TYPE_PIXMAP; break; case GST_VIDEO_FORMAT_GRAY8: pnmenc->info.type = GST_PNM_TYPE_GRAYMAP; break; default: ret = FALSE; goto done; } pnmenc->info.width = GST_VIDEO_INFO_WIDTH (info); pnmenc->info.height = GST_VIDEO_INFO_HEIGHT (info); /* Supported max value is only one, that is 255 */ pnmenc->info.max = 255; if (pnmenc->input_state) gst_video_codec_state_unref (pnmenc->input_state); pnmenc->input_state = gst_video_codec_state_ref (state); output_state = gst_video_encoder_set_output_state (encoder, gst_caps_new_empty_simple ("image/pnm"), state); gst_video_codec_state_unref (output_state); done: return ret; }
/* This function handles GstXImageBuffer creation depending on XShm availability */ static GstFlowReturn ximage_buffer_pool_alloc (GstBufferPool * pool, GstBuffer ** buffer, GstBufferPoolAcquireParams * params) { GstXImageBufferPool *xpool = GST_XIMAGE_BUFFER_POOL_CAST (pool); GstXImageBufferPoolPrivate *priv = xpool->priv; GstVideoInfo *info; GstBuffer *ximage; GstMemory *mem; info = &priv->info; ximage = gst_buffer_new (); mem = ximage_memory_alloc (xpool); if (mem == NULL) { gst_buffer_unref (ximage); goto no_buffer; } gst_buffer_append_memory (ximage, mem); if (priv->add_metavideo) { GST_DEBUG_OBJECT (pool, "adding GstVideoMeta"); /* these are just the defaults for now */ gst_buffer_add_video_meta_full (ximage, GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_INFO_FORMAT (info), GST_VIDEO_INFO_WIDTH (info), GST_VIDEO_INFO_HEIGHT (info), GST_VIDEO_INFO_N_PLANES (info), info->offset, info->stride); } *buffer = ximage; return GST_FLOW_OK; /* ERROR */ no_buffer: { GST_WARNING_OBJECT (pool, "can't create image"); return GST_FLOW_ERROR; } }
static gboolean _calculate_par (GtkGstBaseWidget * widget, GstVideoInfo * info) { gboolean ok; gint width, height; gint par_n, par_d; gint display_par_n, display_par_d; width = GST_VIDEO_INFO_WIDTH (info); height = GST_VIDEO_INFO_HEIGHT (info); par_n = GST_VIDEO_INFO_PAR_N (info); par_d = GST_VIDEO_INFO_PAR_D (info); if (!par_n) par_n = 1; /* get display's PAR */ if (widget->par_n != 0 && widget->par_d != 0) { display_par_n = widget->par_n; display_par_d = widget->par_d; } else { display_par_n = 1; display_par_d = 1; } ok = gst_video_calculate_display_ratio (&widget->display_ratio_num, &widget->display_ratio_den, width, height, par_n, par_d, display_par_n, display_par_d); if (ok) { GST_LOG ("PAR: %u/%u DAR:%u/%u", par_n, par_d, display_par_n, display_par_d); return TRUE; } return FALSE; }
static gboolean gst_gl_filtershader_hcallback (GstGLFilter * filter, GstGLMemory * in_tex, gpointer stuff) { GstGLFilterShader *filtershader = GST_GL_FILTERSHADER (filter); GstGLFuncs *gl = GST_GL_BASE_FILTER (filter)->context->gl_vtable; GstGLShader *shader; if (!(shader = _maybe_recompile_shader (filtershader))) return FALSE; gl->ClearColor (0.0, 0.0, 0.0, 1.0); gl->Clear (GL_COLOR_BUFFER_BIT); gst_gl_shader_use (shader); /* FIXME: propertise these */ gst_gl_shader_set_uniform_1i (shader, "tex", 0); gst_gl_shader_set_uniform_1f (shader, "width", GST_VIDEO_INFO_WIDTH (&filter->out_info)); gst_gl_shader_set_uniform_1f (shader, "height", GST_VIDEO_INFO_HEIGHT (&filter->out_info)); gst_gl_shader_set_uniform_1f (shader, "time", filtershader->time); /* FIXME: propertise these */ filter->draw_attr_position_loc = gst_gl_shader_get_attribute_location (shader, "a_position"); filter->draw_attr_texture_loc = gst_gl_shader_get_attribute_location (shader, "a_texcoord"); gl->ActiveTexture (GL_TEXTURE0); gl->BindTexture (GL_TEXTURE_2D, gst_gl_memory_get_texture_id (in_tex)); gst_gl_filter_draw_fullscreen_quad (filter); gst_object_unref (shader); return TRUE; }
/** * gst_vaapi_surface_new_full: * @display: a #GstVaapiDisplay * @vip: the pointer to a #GstVideoInfo * @flags: (optional) allocation flags * * Creates a new #GstVaapiSurface with the specified video information * and optional #GstVaapiSurfaceAllocFlags * * Return value: the newly allocated #GstVaapiSurface object, or %NULL * if creation of VA surface with explicit pixel format is not * supported or failed. */ GstVaapiSurface * gst_vaapi_surface_new_full (GstVaapiDisplay * display, const GstVideoInfo * vip, guint flags) { GstVaapiSurface *surface; GST_DEBUG ("size %ux%u, format %s, flags 0x%08x", GST_VIDEO_INFO_WIDTH (vip), GST_VIDEO_INFO_HEIGHT (vip), gst_vaapi_video_format_to_string (GST_VIDEO_INFO_FORMAT (vip)), flags); surface = gst_vaapi_object_new (gst_vaapi_surface_class (), display); if (!surface) return NULL; if (!gst_vaapi_surface_create_full (surface, vip, flags)) goto error; return surface; error: gst_vaapi_object_unref (surface); return NULL; }
static void render_color_dots (GstAudioVisualizer * base, guint32 * vdata, gint16 * adata, guint num_samples) { GstWaveScope *scope = (GstWaveScope *) base; gint channels = GST_AUDIO_INFO_CHANNELS (&base->ainfo); guint i, c, s, x, y, oy; gfloat dx, dy; guint w = GST_VIDEO_INFO_WIDTH (&base->vinfo); guint h = GST_VIDEO_INFO_HEIGHT (&base->vinfo), h1 = h - 2; gdouble *flt = scope->flt; /* draw dots */ dx = (gfloat) w / (gfloat) num_samples; dy = h / 65536.0; oy = h / 2; for (c = 0; c < channels; c++) { s = c; for (i = 0; i < num_samples; i++) { x = (guint) ((gfloat) i * dx); filter ((gfloat) adata[s]); y = (guint) (oy + flt[0] * dy); y = MIN (y, h1); draw_dot_c (vdata, x, y, w, 0x00FF0000); y = (guint) (oy + flt[3] * dy); y = MIN (y, h1); draw_dot_c (vdata, x, y, w, 0x0000FF00); y = (guint) (oy + (flt[4] + flt[5]) * dy); y = MIN (y, h1); draw_dot_c (vdata, x, y, w, 0x000000FF); s += channels; } flt += 6; } }
void gst_imx_egl_viv_sink_egl_platform_set_video_info(GstImxEglVivSinkEGLPlatform *platform, GstVideoInfo *video_info) { Window x11_window; EGL_PLATFORM_LOCK(platform); if (platform->native_window == 0) { GST_LOG("window not open - cannot set video info"); EGL_PLATFORM_UNLOCK(platform); return; } x11_window = (Window)(platform->native_window); platform->video_width = GST_VIDEO_INFO_WIDTH(video_info); platform->video_height = GST_VIDEO_INFO_HEIGHT(video_info); if (platform->fullscreen || (platform->fixed_window_width != 0) || (platform->fixed_window_height != 0) || (platform->parent_window != 0)) { /* even though the window itself might not have been resized, the callback * still needs to be invoked, because it depends on both the window and the * video frame sizes */ if (platform->window_resized_event_cb != NULL) { // do not call the resize callback here directly; instead, notify the main loop about this change // because here, the EGL context is not and cannot be set gst_imx_egl_viv_sink_egl_platform_send_cmd(platform, GSTIMX_EGLX11_CMD_CALL_RESIZE_CB); } } else { /* not calling the resize callback here, since the XResizeWindow() call * creates a resize event that will be handled in the main loop */ XResizeWindow((Display *)(platform->native_display), x11_window, GST_VIDEO_INFO_WIDTH(video_info), GST_VIDEO_INFO_HEIGHT(video_info)); } EGL_PLATFORM_UNLOCK(platform); }
/** * gst_gl_upload_perform_with_gl_texture_upload_meta: * @upload: a #GstGLUpload * @meta: a #GstVideoGLTextureUploadMeta * @texture_id: resulting GL textures to place the data into. * * Uploads @meta into @texture_id. * * Returns: whether the upload was successful */ gboolean gst_gl_upload_perform_with_gl_texture_upload_meta (GstGLUpload * upload, GstVideoGLTextureUploadMeta * meta, guint texture_id[4]) { gboolean ret; g_return_val_if_fail (upload != NULL, FALSE); g_return_val_if_fail (meta != NULL, FALSE); if (meta->texture_orientation != GST_VIDEO_GL_TEXTURE_ORIENTATION_X_NORMAL_Y_NORMAL) GST_FIXME_OBJECT (upload, "only x-normal,y-normal textures supported, " "the images will not appear the right way up"); if (meta->texture_type[0] != GST_VIDEO_GL_TEXTURE_TYPE_RGBA) { GST_FIXME_OBJECT (upload, "only single rgba texture supported"); return FALSE; } GST_OBJECT_LOCK (upload); upload->priv->meta = meta; if (!upload->priv->tex_id) gst_gl_context_gen_texture (upload->context, &upload->priv->tex_id, GST_VIDEO_FORMAT_RGBA, GST_VIDEO_INFO_WIDTH (&upload->in_info), GST_VIDEO_INFO_HEIGHT (&upload->in_info)); GST_LOG ("Uploading with GLTextureUploadMeta with textures %i,%i,%i,%i", texture_id[0], texture_id[1], texture_id[2], texture_id[3]); gst_gl_context_thread_add (upload->context, (GstGLContextThreadFunc) _do_upload_with_meta, upload); ret = upload->priv->result; GST_OBJECT_UNLOCK (upload); return ret; }
/* This function handles GstXImageBuffer creation depending on XShm availability */ static GstFlowReturn xvimage_buffer_pool_alloc (GstBufferPool * pool, GstBuffer ** buffer, GstBufferPoolAcquireParams * params) { GstXvImageBufferPool *xvpool = GST_XVIMAGE_BUFFER_POOL_CAST (pool); GstXvImageBufferPoolPrivate *priv = xvpool->priv; GstVideoInfo *info; GstBuffer *xvimage; GstXvImageMeta *meta; info = &priv->info; xvimage = gst_buffer_new (); meta = gst_buffer_add_xvimage_meta (xvimage, xvpool); if (meta == NULL) { gst_buffer_unref (xvimage); goto no_buffer; } if (priv->add_metavideo) { GST_DEBUG_OBJECT (pool, "adding GstVideoMeta"); gst_buffer_add_video_meta_full (xvimage, GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_INFO_FORMAT (info), GST_VIDEO_INFO_WIDTH (info), GST_VIDEO_INFO_HEIGHT (info), GST_VIDEO_INFO_N_PLANES (info), info->offset, info->stride); } *buffer = xvimage; return GST_FLOW_OK; /* ERROR */ no_buffer: { GST_WARNING_OBJECT (pool, "can't create image"); return GST_FLOW_ERROR; } }
static gboolean gst_smpte_setcaps (GstPad * pad, GstCaps * caps) { GstSMPTE *smpte; gboolean ret; GstVideoInfo vinfo; smpte = GST_SMPTE (GST_PAD_PARENT (pad)); gst_video_info_init (&vinfo); if (!gst_video_info_from_caps (&vinfo, caps)) return FALSE; smpte->width = GST_VIDEO_INFO_WIDTH (&vinfo); smpte->height = GST_VIDEO_INFO_HEIGHT (&vinfo); smpte->fps_num = GST_VIDEO_INFO_FPS_N (&vinfo); smpte->fps_denom = GST_VIDEO_INFO_FPS_D (&vinfo); /* figure out the duration in frames */ smpte->end_position = gst_util_uint64_scale (smpte->duration, smpte->fps_num, GST_SECOND * smpte->fps_denom); GST_DEBUG_OBJECT (smpte, "duration: %d frames", smpte->end_position); ret = gst_smpte_update_mask (smpte, smpte->type, smpte->invert, smpte->depth, smpte->width, smpte->height); if (pad == smpte->sinkpad1) { GST_DEBUG_OBJECT (smpte, "setting pad1 info"); smpte->vinfo1 = vinfo; } else { GST_DEBUG_OBJECT (smpte, "setting pad2 info"); smpte->vinfo2 = vinfo; } return ret; }
static void gst_imx_compositor_pad_compute_outer_region(GstImxCompositorPad *compositor_pad) { GstVideoInfo *info = &(GST_IMXBP_VIDEO_AGGREGATOR_PAD(compositor_pad)->info); /* Set the outer region's top left corner */ compositor_pad->canvas.outer_region.x1 = compositor_pad->xpos; compositor_pad->canvas.outer_region.y1 = compositor_pad->ypos; /* Check if width and/or height are 0. 0 means "use the video width/height". */ if (compositor_pad->width == 0) compositor_pad->canvas.outer_region.x2 = compositor_pad->xpos + GST_VIDEO_INFO_WIDTH(info); else compositor_pad->canvas.outer_region.x2 = compositor_pad->xpos + compositor_pad->width; if (compositor_pad->height == 0) compositor_pad->canvas.outer_region.y2 = compositor_pad->ypos + GST_VIDEO_INFO_HEIGHT(info); else compositor_pad->canvas.outer_region.y2 = compositor_pad->ypos + compositor_pad->height; GST_DEBUG_OBJECT(compositor_pad, "computed outer region: %" GST_IMX_REGION_FORMAT, GST_IMX_REGION_ARGS(&(compositor_pad->canvas.outer_region))); }