Exemplo n.º 1
0
static GstFlowReturn
gst_test_reverse_negotiation_sink_render (GstBaseSink * bsink,
    GstBuffer * buffer)
{
  GstTestReverseNegotiationSink *sink =
      GST_TEST_REVERSE_NEGOTIATION_SINK (bsink);
  GstCaps *caps = gst_buffer_get_caps (buffer);
  GstVideoFormat fmt;
  gint width, height;

  fail_unless (caps != NULL);
  fail_unless (gst_video_format_parse_caps (caps, &fmt, &width, &height));

  sink->nbuffers++;

  /* The third buffer is still in the old size
   * because the ffmpegcolorspaces can't convert
   * the frame sizes
   */
  if (sink->nbuffers > 3) {
    fail_unless_equals_int (width, 512);
    fail_unless_equals_int (height, 128);
  }

  gst_caps_unref (caps);

  return GST_FLOW_OK;
}
Exemplo n.º 2
0
QtCamGstSample::QtCamGstSample(GstBuffer *buffer, GstCaps *caps) :
  d_ptr(new QtCamGstSamplePrivate) {

  d_ptr->buffer = gst_buffer_ref(buffer);
  d_ptr->caps = gst_caps_ref(caps);

#if GST_CHECK_VERSION(1,0,0)
  d_ptr->mapped = false;
#endif

  d_ptr->width = -1;
  d_ptr->height = -1;
  d_ptr->format = GST_VIDEO_FORMAT_UNKNOWN;

#if GST_CHECK_VERSION(1,0,0)
  GstVideoInfo info;
  if (!gst_video_info_from_caps (&info, d_ptr->caps)) {
    qCritical() << "Failed to parse GStreamer caps";
  } else {
    d_ptr->width = info.width;
    d_ptr->height = info.height;
    d_ptr->format = info.finfo->format;
  }
#else
  if (!gst_video_format_parse_caps (d_ptr->caps, &d_ptr->format, &d_ptr->width, &d_ptr->height)) {
    qCritical() << "Failed to parse GStreamer caps";
  }
#endif
}
// Returns the size of the video
IntSize MediaPlayerPrivate::naturalSize() const
{
    if (!hasVideo())
        return IntSize();

    // TODO: handle possible clean aperture data. See
    // https://bugzilla.gnome.org/show_bug.cgi?id=596571
    // TODO: handle possible transformation matrix. See
    // https://bugzilla.gnome.org/show_bug.cgi?id=596326
    int width = 0, height = 0;
    if (GstPad* pad = gst_element_get_static_pad(m_videoSink, "sink")) {
        GstCaps* caps = GST_PAD_CAPS(pad);
        gfloat pixelAspectRatio;
        gint pixelAspectRatioNumerator, pixelAspectRatioDenominator;

        if (!GST_IS_CAPS(caps) || !gst_caps_is_fixed(caps) ||
            !gst_video_format_parse_caps(caps, NULL, &width, &height) ||
            !gst_video_parse_caps_pixel_aspect_ratio(caps, &pixelAspectRatioNumerator,
                                                           &pixelAspectRatioDenominator)) {
            gst_object_unref(GST_OBJECT(pad));
            return IntSize();
        }

        pixelAspectRatio = (gfloat) pixelAspectRatioNumerator / (gfloat) pixelAspectRatioDenominator;
        width *= pixelAspectRatio;
        height /= pixelAspectRatio;
        gst_object_unref(GST_OBJECT(pad));
    }

    return IntSize(width, height);
}
Exemplo n.º 4
0
/* Retrieve the video sink's Caps and tell the application about the media size */
static void check_media_size (CustomData *data) {
  JNIEnv *env = get_jni_env ();
  GstElement *video_sink;
  GstPad *video_sink_pad;
  GstCaps *caps;
  GstVideoFormat fmt;
  int width;
  int height;

  /* Retrieve the Caps at the entrance of the video sink */
  g_object_get (data->pipeline, "video-sink", &video_sink, NULL);
  video_sink_pad = gst_element_get_static_pad (video_sink, "sink");
  caps = gst_pad_get_negotiated_caps (video_sink_pad);

  if (gst_video_format_parse_caps(caps, &fmt, &width, &height)) {
    int par_n, par_d;
    if (gst_video_parse_caps_pixel_aspect_ratio (caps, &par_n, &par_d)) {
      width = width * par_n / par_d;
    }
    GST_DEBUG ("Media size is %dx%d, notifying application", width, height);

    (*env)->CallVoidMethod (env, data->app, on_media_size_changed_method_id, (jint)width, (jint)height);
    if ((*env)->ExceptionCheck (env)) {
      GST_ERROR ("Failed to call Java method");
      (*env)->ExceptionClear (env);
    }
  }

  gst_caps_unref(caps);
  gst_object_unref (video_sink_pad);
  gst_object_unref(video_sink);
}
Exemplo n.º 5
0
static GstFlowReturn
gst_test_reverse_negotiation_sink_buffer_alloc (GstBaseSink * bsink,
    guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf)
{
  GstTestReverseNegotiationSink *sink =
      GST_TEST_REVERSE_NEGOTIATION_SINK (bsink);
  GstVideoFormat fmt;
  gint width, height;

  fail_unless (gst_video_format_parse_caps (caps, &fmt, &width, &height));

  if (sink->nbuffers < 2) {
    *buf =
        gst_buffer_new_and_alloc (gst_video_format_get_size (fmt, width,
            height));
    gst_buffer_set_caps (*buf, caps);
  } else {
    gint fps_n, fps_d;

    fail_unless (gst_video_parse_caps_framerate (caps, &fps_n, &fps_d));

    width = 512;
    height = 128;
    *buf =
        gst_buffer_new_and_alloc (gst_video_format_get_size (fmt, width,
            height));
    caps = gst_video_format_new_caps (fmt, width, height, fps_n, fps_d, 1, 1);
    gst_buffer_set_caps (*buf, caps);
    gst_caps_unref (caps);
  }

  return GST_FLOW_OK;
}
Exemplo n.º 6
0
gboolean
gst_base_video_state_from_caps (GstVideoState * state, GstCaps * caps)
{

  gst_video_format_parse_caps (caps, &state->format,
      &state->width, &state->height);

  if (!gst_video_parse_caps_framerate (caps, &state->fps_n, &state->fps_d))
    return FALSE;

  state->par_n = 1;
  state->par_d = 1;
  gst_video_parse_caps_pixel_aspect_ratio (caps, &state->par_n, &state->par_d);

  {
    GstStructure *structure = gst_caps_get_structure (caps, 0);
    state->interlaced = FALSE;
    gst_structure_get_boolean (structure, "interlaced", &state->interlaced);
  }

  state->clean_width = state->width;
  state->clean_height = state->height;
  state->clean_offset_left = 0;
  state->clean_offset_top = 0;

  /* FIXME need better error handling */
  return TRUE;
}
Exemplo n.º 7
0
static gboolean
gst_frei0r_mixer_set_caps (GstPad * pad, GstCaps * caps)
{
  GstFrei0rMixer *self = GST_FREI0R_MIXER (gst_pad_get_parent (pad));
  gboolean ret = TRUE;

  gst_caps_replace (&self->caps, caps);

  if (pad != self->src)
    ret &= gst_pad_set_caps (self->src, caps);
  if (pad != self->sink0)
    ret &= gst_pad_set_caps (self->sink0, caps);
  if (pad != self->sink1)
    ret &= gst_pad_set_caps (self->sink1, caps);
  if (pad != self->sink2 && self->sink2)
    ret &= gst_pad_set_caps (self->sink2, caps);

  if (ret) {
    if (!gst_video_format_parse_caps (caps, &self->fmt, &self->width,
            &self->height)) {
      ret = FALSE;
      goto out;
    }
  }
out:

  gst_object_unref (self);

  return ret;
}
gboolean
gst_mio_video_device_set_caps (GstMIOVideoDevice * self, GstCaps * caps)
{
  GstVideoFormat format;
  GstMIOSetFormatCtx ctx = { 0, };

  if (gst_video_format_parse_caps (caps, &format, &ctx.width, &ctx.height)) {
    if (format == GST_VIDEO_FORMAT_UYVY)
      ctx.format = kCVPixelFormatType_422YpCbCr8;
    else if (format == GST_VIDEO_FORMAT_YUY2)
      ctx.format = kCVPixelFormatType_422YpCbCr8Deprecated;
    else
      g_assert_not_reached ();
  } else {
    GstStructure *s;

    s = gst_caps_get_structure (caps, 0);
    g_assert (gst_structure_has_name (s, "image/jpeg"));
    gst_structure_get_int (s, "width", &ctx.width);
    gst_structure_get_int (s, "height", &ctx.height);

    ctx.format = kFigVideoCodecType_JPEG_OpenDML;
  }

  gst_video_parse_caps_framerate (caps, &ctx.fps_n, &ctx.fps_d);

  gst_mio_video_device_formats_foreach (self,
      gst_mio_video_device_activate_matching_format, &ctx);

  return ctx.success;
}
Exemplo n.º 9
0
static gboolean
gauss_blur_set_caps (GstBaseTransform * btrans,
    GstCaps * incaps, GstCaps * outcaps)
{
  GaussBlur *gb = GAUSS_BLUR (btrans);
  GstStructure *structure;
  GstVideoFormat format;
  guint32 n_elems;

  structure = gst_caps_get_structure (incaps, 0);
  g_return_val_if_fail (structure != NULL, FALSE);

  if (!gst_video_format_parse_caps (incaps, &format, &gb->width, &gb->height))
    return FALSE;

  /* get stride */
  gb->stride = gst_video_format_get_row_stride (format, 0, gb->width);

  n_elems = gb->stride * gb->height;

  gb->tempim = g_malloc (sizeof (gfloat) * n_elems);
  //gb->smoothedim = g_malloc (sizeof (guint16) * n_elems);

  return TRUE;
}
Exemplo n.º 10
0
bool getVideoSizeAndFormatFromCaps(GstCaps* caps, WebCore::IntSize& size, GstVideoFormat& format, int& pixelAspectRatioNumerator, int& pixelAspectRatioDenominator, int& stride)
{
#ifdef GST_API_VERSION_1
    GstVideoInfo info;
    if (!gst_video_info_from_caps(&info, caps))
        return false;

    format = GST_VIDEO_INFO_FORMAT(&info);
    size.setWidth(GST_VIDEO_INFO_WIDTH(&info));
    size.setHeight(GST_VIDEO_INFO_HEIGHT(&info));
    pixelAspectRatioNumerator = GST_VIDEO_INFO_PAR_N(&info);
    pixelAspectRatioDenominator = GST_VIDEO_INFO_PAR_D(&info);
    stride = GST_VIDEO_INFO_PLANE_STRIDE(&info, 0);
#else
    gint width, height;
    if (!GST_IS_CAPS(caps) || !gst_caps_is_fixed(caps)
        || !gst_video_format_parse_caps(caps, &format, &width, &height)
        || !gst_video_parse_caps_pixel_aspect_ratio(caps, &pixelAspectRatioNumerator,
                                                    &pixelAspectRatioDenominator))
        return false;
    size.setWidth(width);
    size.setHeight(height);
    stride = size.width() * 4;
#endif

    return true;
}
Exemplo n.º 11
0
  QtCamGstSamplePrivate(GstBuffer *b, GstCaps *c) :
    buffer(gst_buffer_ref(b)),
    caps(gst_caps_ref(c)),
    width(-1),
    height(-1),
    format(GST_VIDEO_FORMAT_UNKNOWN) {

#if GST_CHECK_VERSION(1,0,0)
    mapped = false;
#endif

#if GST_CHECK_VERSION(1,0,0)
    GstVideoInfo info;
    if (!gst_video_info_from_caps (&info, caps)) {
      qCritical() << "Failed to parse GStreamer caps";
    } else {
      width = info.width;
      height = info.height;
      format = info.finfo->format;
    }
#else
    if (!gst_video_format_parse_caps (caps, &format, &width, &height)) {
      qCritical() << "Failed to parse GStreamer caps";
    }
#endif
  }
/* Store the information from the caps that we are interested in. */
void prepare_overlay (GstElement * overlay, GstCaps * caps, gpointer user_data)
{
    ContextInfo *cx=(ContextInfo*) user_data;

    gst_video_format_parse_caps (caps, NULL, &(cx->ov->width), &(cx->ov->height));
    cx->ov->valid = TRUE;
    cx->ov->xpos = cx->ov->width;
}
Exemplo n.º 13
0
static gboolean
gst_rsvg_overlay_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
    GstCaps * outcaps)
{
  GstRsvgOverlay *overlay = GST_RSVG_OVERLAY (btrans);

  return G_LIKELY (gst_video_format_parse_caps (incaps,
          &overlay->caps_format, &overlay->caps_width, &overlay->caps_height));
}
Exemplo n.º 14
0
static gboolean
gst_gamma_set_caps (GstBaseTransform * base, GstCaps * incaps,
    GstCaps * outcaps)
{
  GstGamma *gamma = GST_GAMMA (base);

  GST_DEBUG_OBJECT (gamma,
      "setting caps: in %" GST_PTR_FORMAT " out %" GST_PTR_FORMAT, incaps,
      outcaps);

  if (!gst_video_format_parse_caps (incaps, &gamma->format, &gamma->width,
          &gamma->height))
    goto invalid_caps;

  gamma->size =
      gst_video_format_get_size (gamma->format, gamma->width, gamma->height);

  switch (gamma->format) {
    case GST_VIDEO_FORMAT_I420:
    case GST_VIDEO_FORMAT_YV12:
    case GST_VIDEO_FORMAT_Y41B:
    case GST_VIDEO_FORMAT_Y42B:
    case GST_VIDEO_FORMAT_Y444:
    case GST_VIDEO_FORMAT_NV12:
    case GST_VIDEO_FORMAT_NV21:
      gamma->process = gst_gamma_planar_yuv_ip;
      break;
    case GST_VIDEO_FORMAT_YUY2:
    case GST_VIDEO_FORMAT_UYVY:
    case GST_VIDEO_FORMAT_AYUV:
    case GST_VIDEO_FORMAT_YVYU:
      gamma->process = gst_gamma_packed_yuv_ip;
      break;
    case GST_VIDEO_FORMAT_ARGB:
    case GST_VIDEO_FORMAT_ABGR:
    case GST_VIDEO_FORMAT_RGBA:
    case GST_VIDEO_FORMAT_BGRA:
    case GST_VIDEO_FORMAT_xRGB:
    case GST_VIDEO_FORMAT_xBGR:
    case GST_VIDEO_FORMAT_RGBx:
    case GST_VIDEO_FORMAT_BGRx:
    case GST_VIDEO_FORMAT_RGB:
    case GST_VIDEO_FORMAT_BGR:
      gamma->process = gst_gamma_packed_rgb_ip;
      break;
    default:
      goto invalid_caps;
      break;
  }

  return TRUE;

invalid_caps:
  GST_ERROR_OBJECT (gamma, "Invalid caps: %" GST_PTR_FORMAT, incaps);
  return FALSE;
}
Exemplo n.º 15
0
static gboolean
gst_droid_cam_src_vfsrc_setcaps (GstPad * pad, GstCaps * caps)
{
  GstDroidCamSrc *src = GST_DROID_CAM_SRC (GST_OBJECT_PARENT (pad));
  GstDroidCamSrcClass *klass = GST_DROID_CAM_SRC_GET_CLASS (src);

  int width, height;
  int fps_n, fps_d;
  int fps;

  GST_DEBUG_OBJECT (src, "vfsrc setcaps %" GST_PTR_FORMAT, caps);

  if (!caps || gst_caps_is_empty (caps) || gst_caps_is_any (caps)) {
    /* We are happy. */
    return TRUE;
  }

  if (!gst_video_format_parse_caps (caps, NULL, &width, &height)) {
    GST_ELEMENT_ERROR (src, STREAM, FORMAT, ("Failed to parse caps"), (NULL));
    return FALSE;
  }

  if (!gst_video_parse_caps_framerate (caps, &fps_n, &fps_d)) {
    GST_ELEMENT_ERROR (src, STREAM, FORMAT, ("Failed to parse caps framerate"),
        (NULL));
    return FALSE;
  }

  if (width == 0 || height == 0) {
    GST_ELEMENT_ERROR (src, STREAM, FORMAT, ("Invalid dimensions"), (NULL));
    return FALSE;
  }

  fps = fps_n / fps_d;

  GST_OBJECT_LOCK (src);
  camera_params_set_viewfinder_size (src->camera_params, width, height);
  camera_params_set_viewfinder_fps (src->camera_params, fps);
  GST_OBJECT_UNLOCK (src);

  if (klass->set_camera_params (src)) {
    /* buffer pool needs to know about FPS */

    GST_CAMERA_BUFFER_POOL_LOCK (src->pool);
    /* TODO: Make sure we are not overwriting a previous value. */
    src->pool->buffer_duration =
        gst_util_uint64_scale_int (GST_SECOND, fps_d, fps_n);
    src->pool->fps_n = fps_n;
    src->pool->fps_d = fps_d;
    GST_CAMERA_BUFFER_POOL_UNLOCK (src->pool);

    return TRUE;
  }

  return FALSE;
}
Exemplo n.º 16
0
/* get notified of caps and plug in the correct process function */
static gboolean
gst_video_balance_set_caps (GstBaseTransform * base, GstCaps * incaps,
    GstCaps * outcaps)
{
  GstVideoBalance *videobalance = GST_VIDEO_BALANCE (base);

  GST_DEBUG_OBJECT (videobalance,
      "in %" GST_PTR_FORMAT " out %" GST_PTR_FORMAT, incaps, outcaps);

  videobalance->process = NULL;

  if (!gst_video_format_parse_caps (incaps, &videobalance->format,
          &videobalance->width, &videobalance->height))
    goto invalid_caps;

  videobalance->size =
      gst_video_format_get_size (videobalance->format, videobalance->width,
      videobalance->height);

  switch (videobalance->format) {
    case GST_VIDEO_FORMAT_I420:
    case GST_VIDEO_FORMAT_YV12:
    case GST_VIDEO_FORMAT_Y41B:
    case GST_VIDEO_FORMAT_Y42B:
    case GST_VIDEO_FORMAT_Y444:
      videobalance->process = gst_video_balance_planar_yuv;
      break;
    case GST_VIDEO_FORMAT_YUY2:
    case GST_VIDEO_FORMAT_UYVY:
    case GST_VIDEO_FORMAT_AYUV:
    case GST_VIDEO_FORMAT_YVYU:
      videobalance->process = gst_video_balance_packed_yuv;
      break;
    case GST_VIDEO_FORMAT_ARGB:
    case GST_VIDEO_FORMAT_ABGR:
    case GST_VIDEO_FORMAT_RGBA:
    case GST_VIDEO_FORMAT_BGRA:
    case GST_VIDEO_FORMAT_xRGB:
    case GST_VIDEO_FORMAT_xBGR:
    case GST_VIDEO_FORMAT_RGBx:
    case GST_VIDEO_FORMAT_BGRx:
    case GST_VIDEO_FORMAT_RGB:
    case GST_VIDEO_FORMAT_BGR:
      videobalance->process = gst_video_balance_packed_rgb;
      break;
    default:
      break;
  }

  return videobalance->process != NULL;

invalid_caps:
  GST_ERROR_OBJECT (videobalance, "Invalid caps: %" GST_PTR_FORMAT, incaps);
  return FALSE;
}
Exemplo n.º 17
0
static gboolean gst_retinex_set_caps(GstBaseTransform * btrans, GstCaps * incaps, GstCaps * outcaps) 
{
  GstRetinex *retinex = GST_RETINEX (btrans);
  gint in_width, in_height;
  gint out_width, out_height;
  
  GST_RETINEX_LOCK (retinex);
  
  gst_video_format_parse_caps(incaps, &retinex->in_format, &in_width, &in_height);
  gst_video_format_parse_caps(outcaps, &retinex->out_format, &out_width, &out_height);
  if (!(retinex->in_format == retinex->out_format) || 
      !(in_width == out_width && in_height == out_height)) {
    GST_WARNING("Failed to parse caps %" GST_PTR_FORMAT " -> %" GST_PTR_FORMAT, incaps, outcaps);
    GST_RETINEX_UNLOCK (retinex);
    return FALSE;
  }
  
  retinex->width  = in_width;
  retinex->height = in_height;
  
  GST_INFO("Initialising Retinex...");

  const CvSize size = cvSize(retinex->width, retinex->height);
  GST_WARNING (" width %d, height %d", retinex->width, retinex->height);

  //////////////////////////////////////////////////////////////////////////////
  // allocate image structs in all spaces///////////////////////////////////////
  retinex->pFrame    = cvCreateImageHeader(size, IPL_DEPTH_8U, 4);

  retinex->pFrame2   = cvCreateImage(size, IPL_DEPTH_8U, 3);
  retinex->pFrameA   = cvCreateImage(size, IPL_DEPTH_8U, 1);

  retinex->ch1       = cvCreateImage(size, IPL_DEPTH_8U, 1);
  retinex->ch2       = cvCreateImage(size, IPL_DEPTH_8U, 1);
  retinex->ch3       = cvCreateImage(size, IPL_DEPTH_8U, 1);
  
  GST_INFO("Retinex initialized.");
  
  GST_RETINEX_UNLOCK (retinex);
  
  return TRUE;
}
Exemplo n.º 18
0
static gboolean gst_skin_set_caps(GstBaseTransform * btrans, GstCaps * incaps, GstCaps * outcaps) 
{
  GstSkin *skin = GST_SKIN (btrans);
  gint in_width, in_height;
  gint out_width, out_height;
  
  GST_SKIN_LOCK (skin);
  
  gst_video_format_parse_caps(incaps, &skin->in_format, &in_width, &in_height);
  gst_video_format_parse_caps(outcaps, &skin->out_format, &out_width, &out_height);
  if (!(skin->in_format == skin->out_format) || 
      !(in_width == out_width && in_height == out_height)) {
    GST_WARNING("Failed to parse caps %" GST_PTR_FORMAT " -> %" GST_PTR_FORMAT, incaps, outcaps);
    GST_SKIN_UNLOCK (skin);
    return FALSE;
  }
  
  skin->width  = in_width;
  skin->height = in_height;
  
  GST_INFO("Initialising Skin...");

  const CvSize size = cvSize(skin->width, skin->height);
  GST_WARNING (" width %d, height %d", skin->width, skin->height);

  //////////////////////////////////////////////////////////////////////////////
  // allocate image structs in RGB(A) //////////////////////////////////////////
  skin->cvRGBA = cvCreateImageHeader(size, IPL_DEPTH_8U, 4);
  skin->cvRGB  = cvCreateImage(size, IPL_DEPTH_8U, 3);

  skin->ch1    = cvCreateImage(size, IPL_DEPTH_8U, 1);
  skin->ch2    = cvCreateImage(size, IPL_DEPTH_8U, 1);
  skin->ch3    = cvCreateImage(size, IPL_DEPTH_8U, 1);
  skin->chA    = cvCreateImage(size, IPL_DEPTH_8U, 1);

  GST_INFO("Skin initialized.");
  
  GST_SKIN_UNLOCK (skin);
  
  return TRUE;
}
Exemplo n.º 19
0
static gboolean
gst_frei0r_filter_set_caps (GstBaseTransform * trans, GstCaps * incaps,
    GstCaps * outcaps)
{
  GstFrei0rFilter *self = GST_FREI0R_FILTER (trans);
  GstVideoFormat fmt;

  if (!gst_video_format_parse_caps (incaps, &fmt, &self->width, &self->height))
    return FALSE;

  return TRUE;
}
Exemplo n.º 20
0
static gboolean
gst_logoinsert_set_caps (GstBaseTransform * base_transform,
    GstCaps * incaps, GstCaps * outcaps)
{
  GstLogoinsert *li;

  g_return_val_if_fail (GST_IS_LOGOINSERT (base_transform), GST_FLOW_ERROR);
  li = GST_LOGOINSERT (base_transform);

  gst_video_format_parse_caps (incaps, &li->format, &li->width, &li->height);

  return TRUE;
}
Exemplo n.º 21
0
static gboolean
gst_patchdetect_get_unit_size (GstBaseTransform * trans, GstCaps * caps,
    guint * size)
{
  int width, height;
  GstVideoFormat format;
  gboolean ret;

  ret = gst_video_format_parse_caps (caps, &format, &width, &height);
  *size = gst_video_format_get_size (format, width, height);

  return ret;
}
Exemplo n.º 22
0
static gboolean gst_retinex_get_unit_size(GstBaseTransform * btrans, GstCaps * caps, guint * size) {
	GstVideoFormat format;
	gint width, height;

	if (!gst_video_format_parse_caps(caps, &format, &width, &height))
		return FALSE;

	*size = gst_video_format_get_size(format, width, height);

	GST_DEBUG_OBJECT(btrans, "unit size = %d for format %d w %d height %d", *size, format, width, height);

	return TRUE;
}
Exemplo n.º 23
0
static gboolean
gst_mse_set_caps (GstPad * pad, GstCaps * caps)
{
  GstMSE *fs;

  fs = GST_MSE (gst_pad_get_parent (pad));

  gst_video_format_parse_caps (caps, &fs->format, &fs->width, &fs->height);

  gst_object_unref (fs);

  return TRUE;
}
Exemplo n.º 24
0
static gboolean
gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  GstBaseVideoDecoder *base_video_decoder;
  GstBaseVideoDecoderClass *base_video_decoder_class;
  GstStructure *structure;
  const GValue *codec_data;
  GstVideoState *state;
  gboolean ret = TRUE;

  base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
  base_video_decoder_class =
      GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  GST_DEBUG ("setcaps %" GST_PTR_FORMAT, caps);

  state = &base_video_decoder->state;

  if (state->codec_data) {
    gst_buffer_unref (state->codec_data);
  }
  memset (state, 0, sizeof (GstVideoState));

  structure = gst_caps_get_structure (caps, 0);

  gst_video_format_parse_caps (caps, NULL, &state->width, &state->height);
  gst_video_parse_caps_framerate (caps, &state->fps_n, &state->fps_d);
  gst_video_parse_caps_pixel_aspect_ratio (caps, &state->par_n, &state->par_d);

#if 0
  /* requires 0.10.23 */
  state->have_interlaced =
      gst_video_format_parse_caps_interlaced (caps, &state->interlaced);
#else
  state->have_interlaced = gst_structure_get_boolean (structure,
      "interlaced", &state->interlaced);
#endif

  codec_data = gst_structure_get_value (structure, "codec_data");
  if (codec_data && G_VALUE_TYPE (codec_data) == GST_TYPE_BUFFER) {
    state->codec_data = gst_value_get_buffer (codec_data);
  }

  if (base_video_decoder_class->start) {
    ret = base_video_decoder_class->start (base_video_decoder);
  }

  g_object_unref (base_video_decoder);

  return ret;
}
Exemplo n.º 25
0
static gboolean
gst_base_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  GstBaseVideoEncoder *base_video_encoder;
  GstBaseVideoEncoderClass *base_video_encoder_class;
  GstStructure *structure;
  GstVideoState *state;
  gboolean ret;

  base_video_encoder = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad));
  base_video_encoder_class =
      GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);

  GST_DEBUG ("setcaps");

  state = &GST_BASE_VIDEO_CODEC (base_video_encoder)->state;
  structure = gst_caps_get_structure (caps, 0);

  gst_video_format_parse_caps (caps, &state->format,
      &state->width, &state->height);

  state->fps_n = 0;
  state->fps_d = 1;
  gst_video_parse_caps_framerate (caps, &state->fps_n, &state->fps_d);
  if (state->fps_d == 0) {
    state->fps_n = 0;
    state->fps_d = 1;
  }

  state->par_n = 1;
  state->par_d = 1;
  gst_video_parse_caps_pixel_aspect_ratio (caps, &state->par_n, &state->par_d);

  state->have_interlaced = gst_structure_get_boolean (structure,
      "interlaced", &state->interlaced);

  state->clean_width = state->width;
  state->clean_height = state->height;
  state->clean_offset_left = 0;
  state->clean_offset_top = 0;

  ret = base_video_encoder_class->set_format (base_video_encoder,
      &GST_BASE_VIDEO_CODEC (base_video_encoder)->state);
  if (ret) {
    ret = base_video_encoder_class->start (base_video_encoder);
  }

  g_object_unref (base_video_encoder);

  return ret;
}
Exemplo n.º 26
0
static gboolean
gst_video_scale_get_unit_size (GstBaseTransform * trans, GstCaps * caps,
    guint * size)
{
  GstVideoFormat format;
  gint width, height;

  if (!gst_video_format_parse_caps (caps, &format, &width, &height))
    return FALSE;

  *size = gst_video_format_get_size (format, width, height);

  return TRUE;
}
Exemplo n.º 27
0
static gboolean
gst_decklink_sink_videosink_event (GstPad * pad, GstObject * parent,
    GstEvent * event)
{
  gboolean res;
  GstDecklinkSink *decklinksink;

  decklinksink = GST_DECKLINK_SINK (parent);

  GST_DEBUG_OBJECT (pad, "event: %" GST_PTR_FORMAT, event);

  switch (GST_EVENT_TYPE (event)) {
    /* FIXME: this makes no sense, template caps don't contain v210 */
#if 0
    case GST_EVENT_CAPS:{
      GstCaps *caps;

      gst_event_parse_caps (event, &caps);
      ret = gst_video_format_parse_caps (caps, &format, &width, &height);
      if (ret) {
        if (format == GST_VIDEO_FORMAT_v210) {
          decklinksink->pixel_format = bmdFormat10BitYUV;
        } else {
          decklinksink->pixel_format = bmdFormat8BitYUV;
        }
      }
      break;
    }
#endif
    case GST_EVENT_EOS:
      /* FIXME: EOS aggregation with audio pad looks wrong */
      decklinksink->video_eos = TRUE;
      decklinksink->video_seqnum = gst_event_get_seqnum (event);
      {
        GstMessage *message;

        message = gst_message_new_eos (GST_OBJECT_CAST (decklinksink));
        gst_message_set_seqnum (message, decklinksink->video_seqnum);
        gst_element_post_message (GST_ELEMENT_CAST (decklinksink), message);
      }
      res = gst_pad_event_default (pad, parent, event);
      break;
    default:
      res = gst_pad_event_default (pad, parent, event);
      break;
  }

  return res;
}
Exemplo n.º 28
0
static gboolean
gst_glimage_sink_set_caps (GstBaseSink * bsink, GstCaps * caps)
{
  GstGLImageSink *glimage_sink;
  gint width;
  gint height;
  gboolean ok;
  gint fps_n, fps_d;
  gint par_n, par_d;
  GstVideoFormat format;
  GstStructure *structure;
  gboolean is_gl;

  GST_DEBUG ("set caps with %" GST_PTR_FORMAT, caps);

  glimage_sink = GST_GLIMAGE_SINK (bsink);

  structure = gst_caps_get_structure (caps, 0);
  if (gst_structure_has_name (structure, "video/x-raw-gl")) {
    is_gl = TRUE;
    format = GST_VIDEO_FORMAT_UNKNOWN;
    ok = gst_structure_get_int (structure, "width", &width);
    ok &= gst_structure_get_int (structure, "height", &height);
  } else {
    is_gl = FALSE;
    ok = gst_video_format_parse_caps (caps, &format, &width, &height);
  }
  ok &= gst_video_parse_caps_framerate (caps, &fps_n, &fps_d);
  ok &= gst_video_parse_caps_pixel_aspect_ratio (caps, &par_n, &par_d);

  if (!ok)
    return FALSE;

  GST_VIDEO_SINK_WIDTH (glimage_sink) = width;
  GST_VIDEO_SINK_HEIGHT (glimage_sink) = height;
  glimage_sink->is_gl = is_gl;
  glimage_sink->format = format;
  glimage_sink->width = width;
  glimage_sink->height = height;
  glimage_sink->fps_n = fps_n;
  glimage_sink->fps_d = fps_d;
  glimage_sink->par_n = par_n;
  glimage_sink->par_d = par_d;

  if (!glimage_sink->window_id && !glimage_sink->new_window_id)
    gst_x_overlay_prepare_xwindow_id (GST_X_OVERLAY (glimage_sink));

  return TRUE;
}
void GStreamerReader::VideoPreroll()
{
  /* The first video buffer has reached the video sink. Get width and height */
  LOG(PR_LOG_DEBUG, ("Video preroll"));
  GstPad* sinkpad = gst_element_get_pad(GST_ELEMENT(mVideoAppSink), "sink");
  GstCaps* caps = gst_pad_get_negotiated_caps(sinkpad);
  gst_video_format_parse_caps(caps, &mFormat, &mPicture.width, &mPicture.height);
  GstStructure* structure = gst_caps_get_structure(caps, 0);
  gst_structure_get_fraction(structure, "framerate", &fpsNum, &fpsDen);
  NS_ASSERTION(mPicture.width && mPicture.height, "invalid video resolution");
  mInfo.mDisplay = nsIntSize(mPicture.width, mPicture.height);
  mInfo.mHasVideo = true;
  gst_caps_unref(caps);
  gst_object_unref(sinkpad);
}
Exemplo n.º 30
0
static gboolean
gst_gdk_pixbuf_overlay_set_caps (GstBaseTransform * trans, GstCaps * incaps,
    GstCaps * outcaps)
{
  GstGdkPixbufOverlay *overlay = GST_GDK_PIXBUF_OVERLAY (trans);
  GstVideoFormat video_format;
  int w, h;

  if (!gst_video_format_parse_caps (incaps, &video_format, &w, &h))
    return FALSE;

  overlay->format = video_format;
  overlay->width = w;
  overlay->height = h;
  return TRUE;
}