コード例 #1
0
static void
_display_size_to_stream_size (GtkGstBaseWidget * base_widget, gdouble x,
    gdouble y, gdouble * stream_x, gdouble * stream_y)
{
  gdouble stream_width, stream_height;
  GtkAllocation allocation;
  GstVideoRectangle result;

  gtk_widget_get_allocation (GTK_WIDGET (base_widget), &allocation);
  _fit_stream_to_allocated_size (base_widget, &allocation, &result);

  stream_width = (gdouble) GST_VIDEO_INFO_WIDTH (&base_widget->v_info);
  stream_height = (gdouble) GST_VIDEO_INFO_HEIGHT (&base_widget->v_info);

  /* from display coordinates to stream coordinates */
  if (result.w > 0)
    *stream_x = (x - result.x) / result.w * stream_width;
  else
    *stream_x = 0.;

  /* clip to stream size */
  if (*stream_x < 0.)
    *stream_x = 0.;
  if (*stream_x > GST_VIDEO_INFO_WIDTH (&base_widget->v_info))
    *stream_x = GST_VIDEO_INFO_WIDTH (&base_widget->v_info);

  /* same for y-axis */
  if (result.h > 0)
    *stream_y = (y - result.y) / result.h * stream_height;
  else
    *stream_y = 0.;

  if (*stream_y < 0.)
    *stream_y = 0.;
  if (*stream_y > GST_VIDEO_INFO_HEIGHT (&base_widget->v_info))
    *stream_y = GST_VIDEO_INFO_HEIGHT (&base_widget->v_info);

  GST_TRACE ("transform %fx%f into %fx%f", x, y, *stream_x, *stream_y);
}
コード例 #2
0
static gboolean
gst_video_crop_meta_transform (GstBuffer * dest, GstMeta * meta,
    GstBuffer * buffer, GQuark type, gpointer data)
{
  GstVideoCropMeta *dmeta, *smeta;

  if (GST_META_TRANSFORM_IS_COPY (type)) {
    smeta = (GstVideoCropMeta *) meta;
    dmeta = gst_buffer_add_video_crop_meta (dest);

    GST_DEBUG ("copy crop metadata");
    dmeta->x = smeta->x;
    dmeta->y = smeta->y;
    dmeta->width = smeta->width;
    dmeta->height = smeta->height;
  } else if (GST_VIDEO_META_TRANSFORM_IS_SCALE (type)) {
    GstVideoMetaTransform *trans = data;
    gint ow, oh, nw, nh;

    smeta = (GstVideoCropMeta *) meta;
    dmeta = gst_buffer_add_video_crop_meta (dest);

    ow = GST_VIDEO_INFO_WIDTH (trans->in_info);
    nw = GST_VIDEO_INFO_WIDTH (trans->out_info);
    oh = GST_VIDEO_INFO_HEIGHT (trans->in_info);
    nh = GST_VIDEO_INFO_HEIGHT (trans->out_info);

    GST_DEBUG ("scaling crop metadata %dx%d -> %dx%d", ow, oh, nw, nh);
    dmeta->x = (smeta->x * nw) / ow;
    dmeta->y = (smeta->y * nh) / oh;
    dmeta->width = (smeta->width * nw) / ow;
    dmeta->height = (smeta->height * nh) / oh;
    GST_DEBUG ("crop offset %dx%d -> %dx%d", smeta->x, smeta->y, dmeta->x,
        dmeta->y);
    GST_DEBUG ("crop size   %dx%d -> %dx%d", smeta->width, smeta->height,
        dmeta->width, dmeta->height);
  }
  return TRUE;
}
コード例 #3
0
static gboolean
gst_mfxpostproc_create (GstMfxPostproc * vpp)
{
  if (!gst_mfxpostproc_ensure_filter (vpp))
    return FALSE;

  gst_mfx_filter_set_frame_info_from_gst_video_info (vpp->filter,
      &vpp->sinkpad_info);

  if (vpp->async_depth)
    gst_mfx_filter_set_async_depth (vpp->filter, vpp->async_depth);

  gst_mfx_filter_set_size (vpp->filter,
    GST_VIDEO_INFO_WIDTH (&vpp->srcpad_info),
    GST_VIDEO_INFO_HEIGHT (&vpp->srcpad_info));

  if (vpp->flags & GST_MFX_POSTPROC_FLAG_FORMAT)
    gst_mfx_filter_set_format (vpp->filter,
      gst_video_format_to_mfx_fourcc (vpp->format));

  if (vpp->flags & GST_MFX_POSTPROC_FLAG_DENOISE)
    gst_mfx_filter_set_denoising_level (vpp->filter, vpp->denoise_level);

  if (vpp->flags & GST_MFX_POSTPROC_FLAG_DETAIL)
    gst_mfx_filter_set_detail_level (vpp->filter, vpp->detail_level);

  if (vpp->flags & GST_MFX_POSTPROC_FLAG_HUE)
    gst_mfx_filter_set_hue (vpp->filter, vpp->hue);

  if (vpp->flags & GST_MFX_POSTPROC_FLAG_SATURATION)
    gst_mfx_filter_set_saturation (vpp->filter, vpp->saturation);

  if (vpp->flags & GST_MFX_POSTPROC_FLAG_BRIGHTNESS)
    gst_mfx_filter_set_brightness (vpp->filter, vpp->brightness);

  if (vpp->flags & GST_MFX_POSTPROC_FLAG_CONTRAST)
    gst_mfx_filter_set_contrast (vpp->filter, vpp->contrast);

  if (vpp->flags & GST_MFX_POSTPROC_FLAG_ROTATION)
    gst_mfx_filter_set_rotation (vpp->filter, vpp->angle);

  if (vpp->flags & GST_MFX_POSTPROC_FLAG_DEINTERLACING)
    gst_mfx_filter_set_deinterlace_mode (vpp->filter, vpp->deinterlace_mode);

  if (vpp->flags & GST_MFX_POSTPROC_FLAG_FRC) {
    gst_mfx_filter_set_frc_algorithm (vpp->filter, vpp->alg);
    gst_mfx_filter_set_framerate (vpp->filter, vpp->fps_n, vpp->fps_d);
  }

  return  gst_mfx_filter_prepare (vpp->filter);
}
コード例 #4
0
void gst_imx_egl_viv_sink_egl_platform_set_video_info(GstImxEglVivSinkEGLPlatform *platform, GstVideoInfo *video_info)
{
	Window x11_window;

	EGL_PLATFORM_LOCK(platform);
	if (platform->native_window == 0)
	{
		GST_LOG("window not open - cannot set video info");
		EGL_PLATFORM_UNLOCK(platform);
		return;
	}

	x11_window = (Window)(platform->native_window);

	platform->video_width = GST_VIDEO_INFO_WIDTH(video_info);
	platform->video_height = GST_VIDEO_INFO_HEIGHT(video_info);

	if (platform->fullscreen || (platform->fixed_window_width != 0) || (platform->fixed_window_height != 0) || (platform->parent_window != 0))
	{
		/* even though the window itself might not have been resized, the callback
		 * still needs to be invoked, because it depends on both the window and the
		 * video frame sizes */
		if (platform->window_resized_event_cb != NULL)
		{
			// do not call the resize callback here directly; instead, notify the main loop about this change
			// because here, the EGL context is not and cannot be set
			gst_imx_egl_viv_sink_egl_platform_send_cmd(platform, GSTIMX_EGLX11_CMD_CALL_RESIZE_CB);
		}
	}
	else
	{
		/* not calling the resize callback here, since the XResizeWindow() call
		 * creates a resize event that will be handled in the main loop */
		XResizeWindow((Display *)(platform->native_display), x11_window, GST_VIDEO_INFO_WIDTH(video_info), GST_VIDEO_INFO_HEIGHT(video_info));
	}

	EGL_PLATFORM_UNLOCK(platform);
}
コード例 #5
0
/* Called in the gl thread */
static gboolean
_init_upload (GstGLUpload * upload)
{
    GstGLFuncs *gl;
    GstVideoFormat v_format;
    GstVideoInfo out_info;

    gl = upload->context->gl_vtable;

    v_format = GST_VIDEO_INFO_FORMAT (&upload->in_info);

    GST_INFO ("Initializing texture upload for format:%s",
              gst_video_format_to_string (v_format));

    if (!gl->CreateProgramObject && !gl->CreateProgram) {
        gst_gl_context_set_error (upload->context,
                                  "Cannot upload YUV formats without OpenGL shaders");
        goto error;
    }

    gst_video_info_set_format (&out_info, GST_VIDEO_FORMAT_RGBA,
                               GST_VIDEO_INFO_WIDTH (&upload->in_info),
                               GST_VIDEO_INFO_HEIGHT (&upload->in_info));

    gst_gl_color_convert_set_format (upload->convert, &upload->in_info,
                                     &out_info);

    upload->out_tex = gst_gl_memory_wrapped_texture (upload->context, 0,
                      GST_VIDEO_GL_TEXTURE_TYPE_RGBA, GST_VIDEO_INFO_WIDTH (&upload->in_info),
                      GST_VIDEO_INFO_HEIGHT (&upload->in_info), NULL, NULL);

    upload->initted = TRUE;

    return TRUE;

error:
    return FALSE;
}
コード例 #6
0
static gboolean
gst_gl_transformation_set_caps (GstGLFilter * filter, GstCaps * incaps,
    GstCaps * outcaps)
{
  GstGLTransformation *transformation = GST_GL_TRANSFORMATION (filter);

  transformation->aspect =
      (gdouble) GST_VIDEO_INFO_WIDTH (&filter->out_info) /
      (gdouble) GST_VIDEO_INFO_HEIGHT (&filter->out_info);

  gst_gl_transformation_build_mvp (transformation);

  return TRUE;
}
コード例 #7
0
static gboolean
gst_kms_sink_calculate_display_ratio (GstKMSSink * self, GstVideoInfo * vinfo)
{
  guint dar_n, dar_d;
  guint video_width, video_height;
  guint video_par_n, video_par_d;
  guint dpy_par_n, dpy_par_d;

  video_width = GST_VIDEO_INFO_WIDTH (vinfo);
  video_height = GST_VIDEO_INFO_HEIGHT (vinfo);
  video_par_n = GST_VIDEO_INFO_PAR_N (vinfo);
  video_par_d = GST_VIDEO_INFO_PAR_D (vinfo);

  gst_video_calculate_device_ratio (self->hdisplay, self->vdisplay,
      self->mm_width, self->mm_height, &dpy_par_n, &dpy_par_d);

  if (!gst_video_calculate_display_ratio (&dar_n, &dar_d, video_width,
          video_height, video_par_n, video_par_d, dpy_par_n, dpy_par_d))
    return FALSE;

  GST_DEBUG_OBJECT (self, "video calculated display ratio: %d/%d", dar_n,
      dar_d);

  /* now find a width x height that respects this display ratio.
   * prefer those that have one of w/h the same as the incoming video
   * using wd / hd = dar_n / dar_d */

  /* start with same height, because of interlaced video */
  /* check hd / dar_d is an integer scale factor, and scale wd with the PAR */
  if (video_height % dar_d == 0) {
    GST_DEBUG_OBJECT (self, "keeping video height");
    GST_VIDEO_SINK_WIDTH (self) = (guint)
        gst_util_uint64_scale_int (video_height, dar_n, dar_d);
    GST_VIDEO_SINK_HEIGHT (self) = video_height;
  } else if (video_width % dar_n == 0) {
    GST_DEBUG_OBJECT (self, "keeping video width");
    GST_VIDEO_SINK_WIDTH (self) = video_width;
    GST_VIDEO_SINK_HEIGHT (self) = (guint)
        gst_util_uint64_scale_int (video_width, dar_d, dar_n);
  } else {
    GST_DEBUG_OBJECT (self, "approximating while keeping video height");
    GST_VIDEO_SINK_WIDTH (self) = (guint)
        gst_util_uint64_scale_int (video_height, dar_n, dar_d);
    GST_VIDEO_SINK_HEIGHT (self) = video_height;
  }
  GST_DEBUG_OBJECT (self, "scaling to %dx%d", GST_VIDEO_SINK_WIDTH (self),
      GST_VIDEO_SINK_HEIGHT (self));

  return TRUE;
}
コード例 #8
0
static gboolean
plugin_bind_dma_to_vaapi_buffer (GstVaapiPluginBase * plugin,
    GstBuffer * inbuf, GstBuffer * outbuf)
{
  GstVideoInfo *const vip = &plugin->sinkpad_info;
  GstVaapiVideoMeta *meta;
  GstVaapiSurface *surface;
  GstVaapiSurfaceProxy *proxy;
  gint fd;

  fd = gst_dmabuf_memory_get_fd (gst_buffer_peek_memory (inbuf, 0));
  if (fd < 0)
    return FALSE;

  if (!plugin_update_sinkpad_info_from_buffer (plugin, inbuf))
    goto error_update_sinkpad_info;

  meta = gst_buffer_get_vaapi_video_meta (outbuf);
  g_return_val_if_fail (meta != NULL, FALSE);

  surface = gst_vaapi_surface_new_with_dma_buf_handle (plugin->display, fd,
      GST_VIDEO_INFO_SIZE (vip), GST_VIDEO_INFO_FORMAT (vip),
      GST_VIDEO_INFO_WIDTH (vip), GST_VIDEO_INFO_HEIGHT (vip),
      vip->offset, vip->stride);
  if (!surface)
    goto error_create_surface;

  proxy = gst_vaapi_surface_proxy_new (surface);
  gst_vaapi_object_unref (surface);
  if (!proxy)
    goto error_create_proxy;

  gst_vaapi_surface_proxy_set_destroy_notify (proxy,
      (GDestroyNotify) gst_buffer_unref, (gpointer) gst_buffer_ref (inbuf));
  gst_vaapi_video_meta_set_surface_proxy (meta, proxy);
  gst_vaapi_surface_proxy_unref (proxy);
  return TRUE;

  /* ERRORS */
error_update_sinkpad_info:
  GST_ERROR ("failed to update sink pad video info from video meta");
  return FALSE;
error_create_surface:
  GST_ERROR ("failed to create VA surface from dma_buf handle");
  return FALSE;
error_create_proxy:
  GST_ERROR ("failed to create VA surface proxy from wrapped VA surface");
  return FALSE;
}
コード例 #9
0
static GstFlowReturn
gst_glimage_sink_show_frame (GstVideoSink * vsink, GstBuffer * buf)
{
  GstGLImageSink *glimage_sink;
  GstBuffer *stored_buffer;

  GST_TRACE ("rendering buffer:%p", buf);

  glimage_sink = GST_GLIMAGE_SINK (vsink);

  GST_TRACE ("redisplay texture:%u of size:%ux%u, window size:%ux%u",
      glimage_sink->next_tex, GST_VIDEO_INFO_WIDTH (&glimage_sink->info),
      GST_VIDEO_INFO_HEIGHT (&glimage_sink->info),
      GST_VIDEO_SINK_WIDTH (glimage_sink),
      GST_VIDEO_SINK_HEIGHT (glimage_sink));

  /* Avoid to release the texture while drawing */
  GST_GLIMAGE_SINK_LOCK (glimage_sink);
  glimage_sink->redisplay_texture = glimage_sink->next_tex;
  stored_buffer = glimage_sink->stored_buffer;
  glimage_sink->stored_buffer = gst_buffer_ref (buf);
  GST_GLIMAGE_SINK_UNLOCK (glimage_sink);
  if (stored_buffer)
    gst_buffer_unref (stored_buffer);

  /* Ask the underlying window to redraw its content */
  if (!gst_glimage_sink_redisplay (glimage_sink))
    goto redisplay_failed;

  GST_TRACE ("post redisplay");

  if (g_atomic_int_get (&glimage_sink->to_quit) != 0) {
    GST_ELEMENT_ERROR (glimage_sink, RESOURCE, NOT_FOUND,
        ("%s", gst_gl_context_get_error ()), (NULL));
    gst_gl_upload_release_buffer (glimage_sink->upload);
    return GST_FLOW_ERROR;
  }

  return GST_FLOW_OK;

/* ERRORS */
redisplay_failed:
  {
    gst_gl_upload_release_buffer (glimage_sink->upload);
    GST_ELEMENT_ERROR (glimage_sink, RESOURCE, NOT_FOUND,
        ("%s", gst_gl_context_get_error ()), (NULL));
    return GST_FLOW_ERROR;
  }
}
コード例 #10
0
/* based on upstream gst-plugins-good jpegencoder */
static void
generate_sampling_factors (GstVaapiEncoderJpeg * encoder)
{
  GstVideoInfo *vinfo;
  gint i;

  vinfo = GST_VAAPI_ENCODER_VIDEO_INFO (encoder);

  if (GST_VIDEO_INFO_FORMAT (vinfo) == GST_VIDEO_FORMAT_ENCODED) {
    /* Use native I420 format */
    encoder->n_components = 3;
    for (i = 0; i < encoder->n_components; ++i) {
      if (i == 0)
        encoder->h_samp[i] = encoder->v_samp[i] = 2;
      else
        encoder->h_samp[i] = encoder->v_samp[i] = 1;
      GST_DEBUG ("sampling factors: %d %d", encoder->h_samp[i],
          encoder->v_samp[i]);
    }
    return;
  }

  encoder->n_components = GST_VIDEO_INFO_N_COMPONENTS (vinfo);

  encoder->h_max_samp = 0;
  encoder->v_max_samp = 0;
  for (i = 0; i < encoder->n_components; ++i) {
    encoder->cwidth[i] = GST_VIDEO_INFO_COMP_WIDTH (vinfo, i);
    encoder->cheight[i] = GST_VIDEO_INFO_COMP_HEIGHT (vinfo, i);
    encoder->h_samp[i] =
        GST_ROUND_UP_4 (GST_VIDEO_INFO_WIDTH (vinfo)) / encoder->cwidth[i];
    encoder->h_max_samp = MAX (encoder->h_max_samp, encoder->h_samp[i]);
    encoder->v_samp[i] =
        GST_ROUND_UP_4 (GST_VIDEO_INFO_HEIGHT (vinfo)) / encoder->cheight[i];
    encoder->v_max_samp = MAX (encoder->v_max_samp, encoder->v_samp[i]);
  }
  /* samp should only be 1, 2 or 4 */
  g_assert (encoder->h_max_samp <= 4);
  g_assert (encoder->v_max_samp <= 4);

  /* now invert */
  /* maximum is invariant, as one of the components should have samp 1 */
  for (i = 0; i < encoder->n_components; ++i) {
    encoder->h_samp[i] = encoder->h_max_samp / encoder->h_samp[i];
    encoder->v_samp[i] = encoder->v_max_samp / encoder->v_samp[i];
    GST_DEBUG ("sampling factors: %d %d", encoder->h_samp[i],
        encoder->v_samp[i]);
  }
}
コード例 #11
0
static GstVaapiSurface *
new_surface (GstVaapiDisplay * display, const GstVideoInfo * vip)
{
  GstVaapiSurface *surface;
  GstVaapiChromaType chroma_type;

  /* Try with explicit format first */
  if (!USE_NATIVE_FORMATS &&
      GST_VIDEO_INFO_FORMAT (vip) != GST_VIDEO_FORMAT_ENCODED) {
    surface = gst_vaapi_surface_new_with_format (display,
        GST_VIDEO_INFO_FORMAT (vip), GST_VIDEO_INFO_WIDTH (vip),
        GST_VIDEO_INFO_HEIGHT (vip));
    if (surface)
      return surface;
  }

  /* Try to pick something compatible, i.e. with same chroma type */
  chroma_type =
      gst_vaapi_video_format_get_chroma_type (GST_VIDEO_INFO_FORMAT (vip));
  if (!chroma_type)
    return NULL;
  return gst_vaapi_surface_new (display, chroma_type,
      GST_VIDEO_INFO_WIDTH (vip), GST_VIDEO_INFO_HEIGHT (vip));
}
コード例 #12
0
static gboolean
gst_quarktv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
                      GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{
    GstQuarkTV *filter = GST_QUARKTV (vfilter);
    gint width, height;

    width = GST_VIDEO_INFO_WIDTH (in_info);
    height = GST_VIDEO_INFO_HEIGHT (in_info);

    gst_quarktv_planetable_clear (filter);
    filter->area = width * height;

    return TRUE;
}
コード例 #13
0
static gboolean
gst_msdkdec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state)
{
  GstMsdkDec *thiz = GST_MSDKDEC (decoder);

  if (thiz->input_state) {
    /* mark for re-negotiation if display resolution changes */
    if ((GST_VIDEO_INFO_WIDTH (&thiz->input_state->info) !=
            GST_VIDEO_INFO_WIDTH (&state->info)) ||
        GST_VIDEO_INFO_HEIGHT (&thiz->input_state->info) !=
        GST_VIDEO_INFO_HEIGHT (&state->info))
      thiz->do_renego = TRUE;
    gst_video_codec_state_unref (thiz->input_state);
  }
  thiz->input_state = gst_video_codec_state_ref (state);

  /* we don't set output state here to avoid caching of mismatched
   * video information if there is dynamic resolution change in the stream.
   * All negotiation code is consolidated in gst_msdkdec_negotiate() and
   * this will be invoked from handle_frame() */

  gst_msdkdec_set_latency (thiz);
  return TRUE;
}
コード例 #14
0
static void
_mixer_pad_get_output_size (GstGLVideoMixer * mix,
    GstGLVideoMixerPad * mix_pad, gint * width, gint * height)
{
  GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (mix);
  GstVideoAggregatorPad *vagg_pad = GST_VIDEO_AGGREGATOR_PAD (mix_pad);
  gint pad_width, pad_height;
  guint dar_n, dar_d;

  /* FIXME: Anything better we can do here? */
  if (!vagg_pad->info.finfo
      || vagg_pad->info.finfo->format == GST_VIDEO_FORMAT_UNKNOWN) {
    GST_DEBUG_OBJECT (mix_pad, "Have no caps yet");
    *width = 0;
    *height = 0;
    return;
  }

  pad_width =
      mix_pad->width <=
      0 ? GST_VIDEO_INFO_WIDTH (&vagg_pad->info) : mix_pad->width;
  pad_height =
      mix_pad->height <=
      0 ? GST_VIDEO_INFO_HEIGHT (&vagg_pad->info) : mix_pad->height;

  gst_video_calculate_display_ratio (&dar_n, &dar_d, pad_width, pad_height,
      GST_VIDEO_INFO_PAR_N (&vagg_pad->info),
      GST_VIDEO_INFO_PAR_D (&vagg_pad->info),
      GST_VIDEO_INFO_PAR_N (&vagg->info), GST_VIDEO_INFO_PAR_D (&vagg->info)
      );
  GST_LOG_OBJECT (mix_pad, "scaling %ux%u by %u/%u (%u/%u / %u/%u)", pad_width,
      pad_height, dar_n, dar_d, GST_VIDEO_INFO_PAR_N (&vagg_pad->info),
      GST_VIDEO_INFO_PAR_D (&vagg_pad->info),
      GST_VIDEO_INFO_PAR_N (&vagg->info), GST_VIDEO_INFO_PAR_D (&vagg->info));

  if (pad_height % dar_n == 0) {
    pad_width = gst_util_uint64_scale_int (pad_height, dar_n, dar_d);
  } else if (pad_width % dar_d == 0) {
    pad_height = gst_util_uint64_scale_int (pad_width, dar_d, dar_n);
  } else {
    pad_width = gst_util_uint64_scale_int (pad_height, dar_n, dar_d);
  }

  if (width)
    *width = pad_width;
  if (height)
    *height = pad_height;
}
コード例 #15
0
static gboolean
_upload_memory (GstGLUpload * upload)
{
    guint in_width, in_height;
    guint in_texture[GST_VIDEO_MAX_PLANES];
    GstBuffer *inbuf;
    GstVideoFrame out_frame;
    GstVideoInfo out_info;
    gint i;

    in_width = GST_VIDEO_INFO_WIDTH (&upload->in_info);
    in_height = GST_VIDEO_INFO_HEIGHT (&upload->in_info);

    if (!upload->initted) {
        if (!_init_upload (upload)) {
            return FALSE;
        }
    }

    inbuf = gst_buffer_new ();
    for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&upload->in_info); i++) {
        in_texture[i] = upload->in_tex[i]->tex_id;
        gst_buffer_append_memory (inbuf,
                                  gst_memory_ref ((GstMemory *) upload->in_tex[i]));
    }

    GST_TRACE ("uploading with textures:%u,%u,%u dimensions:%ux%u",
               in_texture[0], in_texture[1], in_texture[2], in_width, in_height);

    upload->priv->outbuf = gst_gl_color_convert_perform (upload->convert, inbuf);
    gst_buffer_unref (inbuf);

    gst_video_info_set_format (&out_info, GST_VIDEO_FORMAT_RGBA, in_width,
                               in_height);
    if (!gst_video_frame_map (&out_frame, &out_info, upload->priv->outbuf,
                              GST_MAP_READ | GST_MAP_GL)) {
        gst_buffer_unref (upload->priv->outbuf);
        upload->priv->outbuf = NULL;
        return FALSE;
    }

    upload->out_tex->tex_id = *(guint *) out_frame.data[0];

    gst_video_frame_unmap (&out_frame);
    upload->priv->released = FALSE;

    return TRUE;
}
コード例 #16
0
static gboolean
gst_gl_video_mixer_process_textures (GstGLMixer * mix, GPtrArray * frames,
    guint out_tex)
{
  GstGLVideoMixer *video_mixer = GST_GL_VIDEO_MIXER (mix);

  video_mixer->input_frames = frames;

  gst_gl_context_use_fbo_v2 (GST_GL_BASE_MIXER (mix)->context,
      GST_VIDEO_INFO_WIDTH (&GST_VIDEO_AGGREGATOR (mix)->info),
      GST_VIDEO_INFO_HEIGHT (&GST_VIDEO_AGGREGATOR (mix)->info),
      mix->fbo, mix->depthbuffer,
      out_tex, gst_gl_video_mixer_callback, (gpointer) video_mixer);

  return TRUE;
}
コード例 #17
0
static gboolean
gst_gl_filter_glass_filter_texture (GstGLFilter * filter, guint in_tex,
    guint out_tex)
{
  GstGLFilterGlass *glass_filter = GST_GL_FILTER_GLASS (filter);
  glass_filter->in_tex = in_tex;

  //blocking call, use a FBO
  gst_gl_context_use_fbo_v2 (GST_GL_BASE_FILTER (filter)->context,
      GST_VIDEO_INFO_WIDTH (&filter->out_info),
      GST_VIDEO_INFO_HEIGHT (&filter->out_info),
      filter->fbo, filter->depthbuffer, out_tex,
      gst_gl_filter_glass_callback, (gpointer) glass_filter);

  return TRUE;
}
コード例 #18
0
gboolean gst_opencv_cv_mat_params_from_video_info
    (GstVideoInfo * info, gint * width, gint * height, int *cv_type,
    GError ** err)
{
  GstVideoFormat format;

  format = GST_VIDEO_INFO_FORMAT (info);
  if (!gst_opencv_cv_image_type_from_video_format (format, cv_type, err)) {
    return FALSE;
  }

  *width = GST_VIDEO_INFO_WIDTH (info);
  *height = GST_VIDEO_INFO_HEIGHT (info);

  return TRUE;
}
コード例 #19
0
bool getVideoSizeAndFormatFromCaps(GstCaps* caps, WebCore::IntSize& size, GstVideoFormat& format, int& pixelAspectRatioNumerator, int& pixelAspectRatioDenominator, int& stride)
{
    GstVideoInfo info;

    if (!gst_caps_is_fixed(caps) || !gst_video_info_from_caps(&info, caps))
        return false;

    format = GST_VIDEO_INFO_FORMAT(&info);
    size.setWidth(GST_VIDEO_INFO_WIDTH(&info));
    size.setHeight(GST_VIDEO_INFO_HEIGHT(&info));
    pixelAspectRatioNumerator = GST_VIDEO_INFO_PAR_N(&info);
    pixelAspectRatioDenominator = GST_VIDEO_INFO_PAR_D(&info);
    stride = GST_VIDEO_INFO_PLANE_STRIDE(&info, 0);

    return true;
}
コード例 #20
0
static inline void
allocator_configure_surface_info (GstVaapiDisplay * display,
    GstVaapiVideoAllocator * allocator)
{
  const GstVideoInfo *vinfo;
  GstVaapiSurface *surface = NULL;
  GstVaapiImage *image = NULL;
  gboolean updated;
  GstVideoFormat fmt;

  vinfo = &allocator->video_info;

  fmt = gst_vaapi_video_format_get_best_native (GST_VIDEO_INFO_FORMAT (vinfo));
  gst_video_info_set_format (&allocator->surface_info, fmt,
      GST_VIDEO_INFO_WIDTH (vinfo), GST_VIDEO_INFO_HEIGHT (vinfo));

  /* nothing to configure */
  if (USE_NATIVE_FORMATS ||
      GST_VIDEO_INFO_FORMAT (vinfo) == GST_VIDEO_FORMAT_ENCODED)
    return;

  surface = new_surface (display, vinfo);
  if (!surface)
    goto bail;
  image = gst_vaapi_surface_derive_image (surface);
  if (!image)
    goto bail;
  if (!gst_vaapi_image_map (image))
    goto bail;

  updated = gst_video_info_update_from_image (&allocator->surface_info, image);

  allocator->has_direct_rendering = !USE_NATIVE_FORMATS && updated &&
      (GST_VAAPI_IMAGE_FORMAT (image) == GST_VIDEO_INFO_FORMAT (vinfo));

  GST_INFO ("has direct-rendering for %s surfaces: %s",
      GST_VIDEO_INFO_FORMAT_STRING (&allocator->surface_info),
      allocator->has_direct_rendering ? "yes" : "no");

  gst_vaapi_image_unmap (image);

bail:
  if (surface)
    gst_vaapi_object_unref (surface);
  if (image)
    gst_vaapi_object_unref (image);
}
コード例 #21
0
/* This function handles GstBuffer creation */
static GstFlowReturn
gst_vdp_video_buffer_pool_alloc (GstBufferPool * pool, GstBuffer ** buffer,
    GstBufferPoolAcquireParams * params)
{
  GstVdpVideoBufferPool *vdppool = GST_VDP_VIDEO_BUFFER_POOL_CAST (pool);
  GstVideoInfo *info;
  GstBuffer *buf;
  GstMemory *vdp_mem;

  info = &vdppool->info;

  if (!(buf = gst_buffer_new ()))
    goto no_buffer;

  if (!(vdp_mem = gst_vdp_video_memory_alloc (vdppool->device, info)))
    goto mem_create_failed;

  gst_buffer_append_memory (buf, vdp_mem);

  if (vdppool->add_videometa) {
    GstVideoMeta *vmeta;

    GST_DEBUG_OBJECT (pool, "adding GstVideoMeta");
    /* these are just the defaults for now */
    vmeta = gst_buffer_add_video_meta (buf, 0, GST_VIDEO_INFO_FORMAT (info),
        GST_VIDEO_INFO_WIDTH (info), GST_VIDEO_INFO_HEIGHT (info));
    vmeta->map = gst_vdp_video_memory_map;
    vmeta->unmap = gst_vdp_video_memory_unmap;
  }

  *buffer = buf;

  return GST_FLOW_OK;

  /* ERROR */
no_buffer:
  {
    GST_WARNING_OBJECT (pool, "can't create image");
    return GST_FLOW_ERROR;
  }

mem_create_failed:
  {
    GST_WARNING_OBJECT (pool, "Could create GstVdpVideo Memory");
    return GST_FLOW_ERROR;
  }
}
コード例 #22
0
static gboolean
_do_download (GstGLDownload * download, guint texture_id,
    gpointer data[GST_VIDEO_MAX_PLANES])
{
  guint out_width, out_height;
  GstBuffer *inbuf, *outbuf;
  GstMapInfo map_info;
  gboolean ret = TRUE;
  gint i;

  out_width = GST_VIDEO_INFO_WIDTH (&download->info);
  out_height = GST_VIDEO_INFO_HEIGHT (&download->info);

  if (!download->initted) {
    if (!_init_download (download))
      return FALSE;
  }

  GST_TRACE ("doing download of texture:%u (%ux%u)",
      download->priv->in_tex[0]->tex_id, out_width, out_height);

  inbuf = gst_buffer_new ();
  gst_buffer_append_memory (inbuf,
      gst_memory_ref ((GstMemory *) download->priv->in_tex[0]));

  outbuf = gst_gl_color_convert_perform (download->convert, inbuf);
  if (!outbuf)
    return FALSE;

  for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&download->info); i++) {
    GstMemory *out_mem = gst_buffer_peek_memory (outbuf, i);
    gpointer temp_data = ((GstGLMemory *) out_mem)->data;
    ((GstGLMemory *) out_mem)->data = data[i];

    if (!gst_memory_map (out_mem, &map_info, GST_MAP_READ)) {
      GST_ERROR_OBJECT (download, "Failed to map memory");
      ret = FALSE;
    }
    gst_memory_unmap (out_mem, &map_info);
    ((GstGLMemory *) out_mem)->data = temp_data;
  }

  gst_buffer_unref (inbuf);
  gst_buffer_unref (outbuf);

  return ret;
}
コード例 #23
0
ファイル: gstrsvgdec.c プロジェクト: cbetz421/gst-plugins-bad
static gboolean
gst_rsvg_dec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state)
{
  GstRsvgDec *rsvg = GST_RSVG_DEC (decoder);
  GstVideoInfo *info = &state->info;

  if (rsvg->input_state)
    gst_video_codec_state_unref (rsvg->input_state);
  rsvg->input_state = gst_video_codec_state_ref (state);

  /* Create the output state */
  gst_video_decoder_set_output_state (decoder, GST_RSVG_VIDEO_FORMAT,
      GST_VIDEO_INFO_WIDTH (info), GST_VIDEO_INFO_HEIGHT (info),
      rsvg->input_state);

  return TRUE;
}
コード例 #24
0
static gboolean
gst_spectra_scope_setup (GstAudioVisualizer * bscope)
{
  GstSpectraScope *scope = GST_SPECTRA_SCOPE (bscope);
  guint num_freq = GST_VIDEO_INFO_WIDTH (&bscope->vinfo) + 1;

  if (scope->fft_ctx)
    gst_fft_s16_free (scope->fft_ctx);
  g_free (scope->freq_data);

  /* we'd need this amount of samples per render() call */
  bscope->req_spf = num_freq * 2 - 2;
  scope->fft_ctx = gst_fft_s16_new (bscope->req_spf, FALSE);
  scope->freq_data = g_new (GstFFTS16Complex, num_freq);

  return TRUE;
}
コード例 #25
0
static void
_mixer_pad_get_output_size (GstCompositor * comp,
                            GstCompositorPad * comp_pad, gint out_par_n, gint out_par_d, gint * width,
                            gint * height)
{
    GstVideoAggregatorPad *vagg_pad = GST_VIDEO_AGGREGATOR_PAD (comp_pad);
    gint pad_width, pad_height;
    guint dar_n, dar_d;

    /* FIXME: Anything better we can do here? */
    if (!vagg_pad->info.finfo
            || vagg_pad->info.finfo->format == GST_VIDEO_FORMAT_UNKNOWN) {
        GST_DEBUG_OBJECT (comp_pad, "Have no caps yet");
        *width = 0;
        *height = 0;
        return;
    }

    pad_width =
        comp_pad->width <=
        0 ? GST_VIDEO_INFO_WIDTH (&vagg_pad->info) : comp_pad->width;
    pad_height =
        comp_pad->height <=
        0 ? GST_VIDEO_INFO_HEIGHT (&vagg_pad->info) : comp_pad->height;

    if (!gst_video_calculate_display_ratio (&dar_n, &dar_d, pad_width, pad_height,
                                            GST_VIDEO_INFO_PAR_N (&vagg_pad->info),
                                            GST_VIDEO_INFO_PAR_D (&vagg_pad->info), out_par_n, out_par_d)) {
        GST_WARNING_OBJECT (comp_pad, "Cannot calculate display aspect ratio");
        *width = *height = 0;
    }
    GST_LOG_OBJECT (comp_pad, "scaling %ux%u by %u/%u (%u/%u / %u/%u)", pad_width,
                    pad_height, dar_n, dar_d, GST_VIDEO_INFO_PAR_N (&vagg_pad->info),
                    GST_VIDEO_INFO_PAR_D (&vagg_pad->info), out_par_n, out_par_d);

    if (pad_height % dar_n == 0) {
        pad_width = gst_util_uint64_scale_int (pad_height, dar_n, dar_d);
    } else if (pad_width % dar_d == 0) {
        pad_height = gst_util_uint64_scale_int (pad_width, dar_d, dar_n);
    } else {
        pad_width = gst_util_uint64_scale_int (pad_height, dar_n, dar_d);
    }

    *width = pad_width;
    *height = pad_height;
}
コード例 #26
0
PassRefPtr<BitmapTexture> MediaPlayerPrivateGStreamerBase::updateTexture(TextureMapper* textureMapper)
{
    WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
    if (!GST_IS_SAMPLE(m_sample.get()))
        return nullptr;

    GstCaps* caps = gst_sample_get_caps(m_sample.get());
    if (!caps)
        return nullptr;

    GstVideoInfo videoInfo;
    gst_video_info_init(&videoInfo);
    if (!gst_video_info_from_caps(&videoInfo, caps))
        return nullptr;

    IntSize size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
    RefPtr<BitmapTexture> texture = textureMapper->acquireTextureFromPool(size, GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag);
    GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());

#if GST_CHECK_VERSION(1, 1, 0)
    GstVideoGLTextureUploadMeta* meta;
    if ((meta = gst_buffer_get_video_gl_texture_upload_meta(buffer))) {
        if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture.
            const BitmapTextureGL* textureGL = static_cast<const BitmapTextureGL*>(texture.get());
            guint ids[4] = { textureGL->id(), 0, 0, 0 };

            if (gst_video_gl_texture_upload_meta_upload(meta, ids))
                return texture;
        }
    }
#endif

    // Right now the TextureMapper only supports chromas with one plane
    ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1);

    GstVideoFrame videoFrame;
    if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, GST_MAP_READ))
        return nullptr;

    int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&videoFrame, 0);
    const void* srcData = GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0);
    texture->updateContents(srcData, WebCore::IntRect(WebCore::IntPoint(0, 0), size), WebCore::IntPoint(0, 0), stride, BitmapTexture::UpdateCannotModifyOriginalImageData);
    gst_video_frame_unmap(&videoFrame);

    return texture;
}
コード例 #27
0
static gboolean
gst_gl_transformation_filter_texture (GstGLFilter * filter, guint in_tex,
    guint out_tex)
{
  GstGLTransformation *transformation = GST_GL_TRANSFORMATION (filter);

  transformation->in_tex = in_tex;

  /* blocking call, use a FBO */
  gst_gl_context_use_fbo_v2 (GST_GL_BASE_FILTER (filter)->context,
      GST_VIDEO_INFO_WIDTH (&filter->out_info),
      GST_VIDEO_INFO_HEIGHT (&filter->out_info),
      filter->fbo, filter->depthbuffer,
      out_tex, gst_gl_transformation_callback, (gpointer) transformation);

  return TRUE;
}
コード例 #28
0
static gboolean
gst_gaussianblur_set_info (GstVideoFilter * filter, GstCaps * incaps,
    GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{
  GstGaussianBlur *gb = GST_GAUSSIANBLUR (filter);
  guint32 n_elems;

  gb->width = GST_VIDEO_INFO_WIDTH (in_info);
  gb->height = GST_VIDEO_INFO_HEIGHT (in_info);

  /* get stride */
  gb->stride = GST_VIDEO_INFO_COMP_STRIDE (in_info, 0);
  n_elems = gb->stride * gb->height;
  gb->tempim = g_malloc (sizeof (gfloat) * n_elems);

  return TRUE;
}
static void
gst_glimage_sink_on_resize (const GstGLImageSink * gl_sink, gint width,
    gint height)
{
  /* Here gl_sink members (ex:gl_sink->info) have a life time of set_caps.
   * It means that they cannot not change between two set_caps
   */
  const GstGLFuncs *gl = gl_sink->context->gl_vtable;

  GST_TRACE ("GL Window resized to %ux%u", width, height);

  /* check if a client reshape callback is registered */
  if (gl_sink->clientReshapeCallback)
    gl_sink->clientReshapeCallback (width, height, gl_sink->client_data);

  /* default reshape */
  else {
    if (gl_sink->keep_aspect_ratio) {
      GstVideoRectangle src, dst, result;

      src.x = 0;
      src.y = 0;
      src.w = GST_VIDEO_INFO_WIDTH (&gl_sink->info);
      src.h = GST_VIDEO_INFO_HEIGHT (&gl_sink->info);

      dst.x = 0;
      dst.y = 0;
      dst.w = width;
      dst.h = height;

      gst_video_sink_center_rect (src, dst, &result, TRUE);
      gl->Viewport (result.x, result.y, result.w, result.h);
    } else {
      gl->Viewport (0, 0, width, height);
    }
#if GST_GL_HAVE_OPENGL
    if (USING_OPENGL (gl_sink->context)) {
      gl->MatrixMode (GL_PROJECTION);
      gl->LoadIdentity ();
      gluOrtho2D (0, width, 0, height);
      gl->MatrixMode (GL_MODELVIEW);
    }
#endif
  }
}
コード例 #30
0
void gst_imx_egl_viv_sink_egl_platform_set_video_info(GstImxEglVivSinkEGLPlatform *platform, GstVideoInfo *video_info)
{
	Window x11_window;

	EGL_PLATFORM_LOCK(platform);
	if (platform->native_window == 0)
	{
		GST_LOG("window not open - cannot set video info");
		EGL_PLATFORM_UNLOCK(platform);
		return;
	}

	x11_window = (Window)(platform->native_window);

	XResizeWindow((Display *)(platform->native_display), x11_window, GST_VIDEO_INFO_WIDTH(video_info), GST_VIDEO_INFO_HEIGHT(video_info));

	EGL_PLATFORM_UNLOCK(platform);
}