Exemple #1
0
static GstFlowReturn
gst_test_reverse_negotiation_sink_buffer_alloc (GstBaseSink * bsink,
    guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf)
{
  GstTestReverseNegotiationSink *sink =
      GST_TEST_REVERSE_NEGOTIATION_SINK (bsink);
  GstVideoFormat fmt;
  gint width, height;

  fail_unless (gst_video_format_parse_caps (caps, &fmt, &width, &height));

  if (sink->nbuffers < 2) {
    *buf =
        gst_buffer_new_and_alloc (gst_video_format_get_size (fmt, width,
            height));
    gst_buffer_set_caps (*buf, caps);
  } else {
    gint fps_n, fps_d;

    fail_unless (gst_video_parse_caps_framerate (caps, &fps_n, &fps_d));

    width = 512;
    height = 128;
    *buf =
        gst_buffer_new_and_alloc (gst_video_format_get_size (fmt, width,
            height));
    caps = gst_video_format_new_caps (fmt, width, height, fps_n, fps_d, 1, 1);
    gst_buffer_set_caps (*buf, caps);
    gst_caps_unref (caps);
  }

  return GST_FLOW_OK;
}
gboolean
gst_mio_video_device_set_caps (GstMIOVideoDevice * self, GstCaps * caps)
{
  GstVideoFormat format;
  GstMIOSetFormatCtx ctx = { 0, };

  if (gst_video_format_parse_caps (caps, &format, &ctx.width, &ctx.height)) {
    if (format == GST_VIDEO_FORMAT_UYVY)
      ctx.format = kCVPixelFormatType_422YpCbCr8;
    else if (format == GST_VIDEO_FORMAT_YUY2)
      ctx.format = kCVPixelFormatType_422YpCbCr8Deprecated;
    else
      g_assert_not_reached ();
  } else {
    GstStructure *s;

    s = gst_caps_get_structure (caps, 0);
    g_assert (gst_structure_has_name (s, "image/jpeg"));
    gst_structure_get_int (s, "width", &ctx.width);
    gst_structure_get_int (s, "height", &ctx.height);

    ctx.format = kFigVideoCodecType_JPEG_OpenDML;
  }

  gst_video_parse_caps_framerate (caps, &ctx.fps_n, &ctx.fps_d);

  gst_mio_video_device_formats_foreach (self,
      gst_mio_video_device_activate_matching_format, &ctx);

  return ctx.success;
}
Exemple #3
0
gboolean
gst_base_video_state_from_caps (GstVideoState * state, GstCaps * caps)
{

  gst_video_format_parse_caps (caps, &state->format,
      &state->width, &state->height);

  if (!gst_video_parse_caps_framerate (caps, &state->fps_n, &state->fps_d))
    return FALSE;

  state->par_n = 1;
  state->par_d = 1;
  gst_video_parse_caps_pixel_aspect_ratio (caps, &state->par_n, &state->par_d);

  {
    GstStructure *structure = gst_caps_get_structure (caps, 0);
    state->interlaced = FALSE;
    gst_structure_get_boolean (structure, "interlaced", &state->interlaced);
  }

  state->clean_width = state->width;
  state->clean_height = state->height;
  state->clean_offset_left = 0;
  state->clean_offset_top = 0;

  /* FIXME need better error handling */
  return TRUE;
}
static gboolean
gst_droid_cam_src_vfsrc_setcaps (GstPad * pad, GstCaps * caps)
{
  GstDroidCamSrc *src = GST_DROID_CAM_SRC (GST_OBJECT_PARENT (pad));
  GstDroidCamSrcClass *klass = GST_DROID_CAM_SRC_GET_CLASS (src);

  int width, height;
  int fps_n, fps_d;
  int fps;

  GST_DEBUG_OBJECT (src, "vfsrc setcaps %" GST_PTR_FORMAT, caps);

  if (!caps || gst_caps_is_empty (caps) || gst_caps_is_any (caps)) {
    /* We are happy. */
    return TRUE;
  }

  if (!gst_video_format_parse_caps (caps, NULL, &width, &height)) {
    GST_ELEMENT_ERROR (src, STREAM, FORMAT, ("Failed to parse caps"), (NULL));
    return FALSE;
  }

  if (!gst_video_parse_caps_framerate (caps, &fps_n, &fps_d)) {
    GST_ELEMENT_ERROR (src, STREAM, FORMAT, ("Failed to parse caps framerate"),
        (NULL));
    return FALSE;
  }

  if (width == 0 || height == 0) {
    GST_ELEMENT_ERROR (src, STREAM, FORMAT, ("Invalid dimensions"), (NULL));
    return FALSE;
  }

  fps = fps_n / fps_d;

  GST_OBJECT_LOCK (src);
  camera_params_set_viewfinder_size (src->camera_params, width, height);
  camera_params_set_viewfinder_fps (src->camera_params, fps);
  GST_OBJECT_UNLOCK (src);

  if (klass->set_camera_params (src)) {
    /* buffer pool needs to know about FPS */

    GST_CAMERA_BUFFER_POOL_LOCK (src->pool);
    /* TODO: Make sure we are not overwriting a previous value. */
    src->pool->buffer_duration =
        gst_util_uint64_scale_int (GST_SECOND, fps_d, fps_n);
    src->pool->fps_n = fps_n;
    src->pool->fps_d = fps_d;
    GST_CAMERA_BUFFER_POOL_UNLOCK (src->pool);

    return TRUE;
  }

  return FALSE;
}
static gboolean
gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  GstBaseVideoDecoder *base_video_decoder;
  GstBaseVideoDecoderClass *base_video_decoder_class;
  GstStructure *structure;
  const GValue *codec_data;
  GstVideoState *state;
  gboolean ret = TRUE;

  base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
  base_video_decoder_class =
      GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  GST_DEBUG ("setcaps %" GST_PTR_FORMAT, caps);

  state = &base_video_decoder->state;

  if (state->codec_data) {
    gst_buffer_unref (state->codec_data);
  }
  memset (state, 0, sizeof (GstVideoState));

  structure = gst_caps_get_structure (caps, 0);

  gst_video_format_parse_caps (caps, NULL, &state->width, &state->height);
  gst_video_parse_caps_framerate (caps, &state->fps_n, &state->fps_d);
  gst_video_parse_caps_pixel_aspect_ratio (caps, &state->par_n, &state->par_d);

#if 0
  /* requires 0.10.23 */
  state->have_interlaced =
      gst_video_format_parse_caps_interlaced (caps, &state->interlaced);
#else
  state->have_interlaced = gst_structure_get_boolean (structure,
      "interlaced", &state->interlaced);
#endif

  codec_data = gst_structure_get_value (structure, "codec_data");
  if (codec_data && G_VALUE_TYPE (codec_data) == GST_TYPE_BUFFER) {
    state->codec_data = gst_value_get_buffer (codec_data);
  }

  if (base_video_decoder_class->start) {
    ret = base_video_decoder_class->start (base_video_decoder);
  }

  g_object_unref (base_video_decoder);

  return ret;
}
static gboolean
gst_base_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  GstBaseVideoEncoder *base_video_encoder;
  GstBaseVideoEncoderClass *base_video_encoder_class;
  GstStructure *structure;
  GstVideoState *state;
  gboolean ret;

  base_video_encoder = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad));
  base_video_encoder_class =
      GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);

  GST_DEBUG ("setcaps");

  state = &GST_BASE_VIDEO_CODEC (base_video_encoder)->state;
  structure = gst_caps_get_structure (caps, 0);

  gst_video_format_parse_caps (caps, &state->format,
      &state->width, &state->height);

  state->fps_n = 0;
  state->fps_d = 1;
  gst_video_parse_caps_framerate (caps, &state->fps_n, &state->fps_d);
  if (state->fps_d == 0) {
    state->fps_n = 0;
    state->fps_d = 1;
  }

  state->par_n = 1;
  state->par_d = 1;
  gst_video_parse_caps_pixel_aspect_ratio (caps, &state->par_n, &state->par_d);

  state->have_interlaced = gst_structure_get_boolean (structure,
      "interlaced", &state->interlaced);

  state->clean_width = state->width;
  state->clean_height = state->height;
  state->clean_offset_left = 0;
  state->clean_offset_top = 0;

  ret = base_video_encoder_class->set_format (base_video_encoder,
      &GST_BASE_VIDEO_CODEC (base_video_encoder)->state);
  if (ret) {
    ret = base_video_encoder_class->start (base_video_encoder);
  }

  g_object_unref (base_video_encoder);

  return ret;
}
Exemple #7
0
static gboolean
gst_glimage_sink_set_caps (GstBaseSink * bsink, GstCaps * caps)
{
  GstGLImageSink *glimage_sink;
  gint width;
  gint height;
  gboolean ok;
  gint fps_n, fps_d;
  gint par_n, par_d;
  GstVideoFormat format;
  GstStructure *structure;
  gboolean is_gl;

  GST_DEBUG ("set caps with %" GST_PTR_FORMAT, caps);

  glimage_sink = GST_GLIMAGE_SINK (bsink);

  structure = gst_caps_get_structure (caps, 0);
  if (gst_structure_has_name (structure, "video/x-raw-gl")) {
    is_gl = TRUE;
    format = GST_VIDEO_FORMAT_UNKNOWN;
    ok = gst_structure_get_int (structure, "width", &width);
    ok &= gst_structure_get_int (structure, "height", &height);
  } else {
    is_gl = FALSE;
    ok = gst_video_format_parse_caps (caps, &format, &width, &height);
  }
  ok &= gst_video_parse_caps_framerate (caps, &fps_n, &fps_d);
  ok &= gst_video_parse_caps_pixel_aspect_ratio (caps, &par_n, &par_d);

  if (!ok)
    return FALSE;

  GST_VIDEO_SINK_WIDTH (glimage_sink) = width;
  GST_VIDEO_SINK_HEIGHT (glimage_sink) = height;
  glimage_sink->is_gl = is_gl;
  glimage_sink->format = format;
  glimage_sink->width = width;
  glimage_sink->height = height;
  glimage_sink->fps_n = fps_n;
  glimage_sink->fps_d = fps_d;
  glimage_sink->par_n = par_n;
  glimage_sink->par_d = par_d;

  if (!glimage_sink->window_id && !glimage_sink->new_window_id)
    gst_x_overlay_prepare_xwindow_id (GST_X_OVERLAY (glimage_sink));

  return TRUE;
}
gboolean AVSC_CC
gst_avsynth_buf_pad_caps_to_vi (GstBuffer *buf, GstPad *pad, GstCaps *caps, AVS_VideoInfo *vi)
{
  gboolean ret = TRUE;
  GstVideoFormat vf;
  gint fps_num = 0, fps_den = 0;
  gint width = 0, height = 0;
  gboolean interlaced;
  gint64 duration = -1;
 

  ret = gst_video_format_parse_caps (caps, &vf, &width, &height);
  if (!ret)
  {
    GST_ERROR ("Failed to convert caps to videoinfo - can't get format/width/height");
    goto cleanup;
  }
  ret = gst_video_format_parse_caps_interlaced (caps, &interlaced);
  if (!ret)
  {
    GST_ERROR ("Failed to convert caps to videoinfo - can't get interlaced state");
    goto cleanup;
  }

  ret = gst_video_parse_caps_framerate (caps, &fps_num, &fps_den);
  if (!ret)
  {
    GST_ERROR ("Failed to convert caps to videoinfo - can't get fps");
    goto cleanup;
  }

  switch (vf)
  {
    case GST_VIDEO_FORMAT_I420:
      vi->pixel_type = AVS_CS_I420;
      break;
    case GST_VIDEO_FORMAT_YUY2:
      vi->pixel_type = AVS_CS_YUY2;
      break;
    case GST_VIDEO_FORMAT_BGRx:
    case GST_VIDEO_FORMAT_BGRA:
      vi->pixel_type = AVS_CS_BGR32;
      break;
    case GST_VIDEO_FORMAT_BGR:
      vi->pixel_type = AVS_CS_BGR24;
      break;
    case GST_VIDEO_FORMAT_YV12:
      vi->pixel_type = AVS_CS_YV12;
      break;
    default:
      ret = FALSE;
  }
  if (interlaced)
  {
    vi->image_type = AVS_IT_FIELDBASED;
    if (buf)
    {
      if (GST_BUFFER_FLAG_IS_SET (buf, GST_VIDEO_BUFFER_TFF))
        vi->image_type |= AVS_IT_TFF;
      else
        /* Apparently, GStreamer doesn't know what "unknown field order" is.
         * If you get wrong field order - file a bugreport against the source
         * element (or maybe a decoder?). Field order should be known.
         */
        vi->image_type |= AVS_IT_BFF;
    }
  }

  vi->fps_numerator = fps_num;
  vi->fps_denominator = fps_den;
  vi->width = width;
  vi->height = height;

  duration = gst_avsynth_query_duration (pad, vi);

  vi->num_frames = duration;

cleanup:
  return ret;
}
static gboolean
gst_cel_video_src_set_caps (GstBaseSrc * basesrc, GstCaps * caps)
{
  GstCelVideoSrc *self = GST_CEL_VIDEO_SRC_CAST (basesrc);
  GstVideoFormat video_format;
  gint width, height, fps_n, fps_d;
  guint i;
  GstCelVideoFormat *selected_format;

  if (self->device == NULL)
    goto no_device;

  if (!gst_video_format_parse_caps (caps, &video_format, &width, &height))
    goto invalid_format;
  if (!gst_video_parse_caps_framerate (caps, &fps_n, &fps_d))
    goto invalid_format;

  gst_cel_video_src_ensure_device_caps_and_formats (self);

  selected_format = NULL;

  for (i = 0; i != self->device_formats->len; i++) {
    GstCelVideoFormat *format;

    format = &g_array_index (self->device_formats, GstCelVideoFormat, i);
    if (format->video_format == video_format &&
        format->width == width && format->height == height &&
        format->fps_n == fps_n && format->fps_d == fps_d) {
      selected_format = format;
      break;
    }
  }

  if (selected_format == NULL)
    goto invalid_format;

  GST_DEBUG_OBJECT (self, "selecting format %u", selected_format->index);

  if (!gst_cel_video_src_select_format (self, selected_format))
    goto select_failed;

  gst_cel_video_src_release_device_caps_and_formats (self);

  return TRUE;

  /* ERRORS */
no_device:
  {
    GST_ELEMENT_ERROR (self, RESOURCE, FAILED, ("no device"), (NULL));
    return FALSE;
  }
invalid_format:
  {
    GST_ELEMENT_ERROR (self, RESOURCE, FAILED, ("invalid format"), (NULL));
    return FALSE;
  }
select_failed:
  {
    GST_ELEMENT_ERROR (self, RESOURCE, FAILED, ("failed to select format"),
        (NULL));
    return FALSE;
  }
}
static gboolean
gst_jpegenc_setcaps (GstPad * pad, GstCaps * caps)
{
  GstJpegEnc *enc = GST_JPEGENC (gst_pad_get_parent (pad));
  GstVideoFormat format;
  gint width, height;
  gint fps_num, fps_den;
  gint par_num, par_den;
  gint i;
  GstCaps *othercaps;
  gboolean ret;

  /* get info from caps */
  if (!gst_video_format_parse_caps (caps, &format, &width, &height))
    goto refuse_caps;
  /* optional; pass along if present */
  fps_num = fps_den = -1;
  par_num = par_den = -1;
  gst_video_parse_caps_framerate (caps, &fps_num, &fps_den);
  gst_video_parse_caps_pixel_aspect_ratio (caps, &par_num, &par_den);

  if (width == enc->width && height == enc->height && enc->format == format
      && fps_num == enc->fps_num && fps_den == enc->fps_den
      && par_num == enc->par_num && par_den == enc->par_den)
    return TRUE;

  /* store input description */
  enc->format = format;
  enc->width = width;
  enc->height = height;
  enc->fps_num = fps_num;
  enc->fps_den = fps_den;
  enc->par_num = par_num;
  enc->par_den = par_den;

  /* prepare a cached image description  */
  enc->channels = 3 + (gst_video_format_has_alpha (format) ? 1 : 0);
  /* ... but any alpha is disregarded in encoding */
  if (gst_video_format_is_gray (format))
    enc->channels = 1;
  else
    enc->channels = 3;
  enc->h_max_samp = 0;
  enc->v_max_samp = 0;
  for (i = 0; i < enc->channels; ++i) {
    enc->cwidth[i] = gst_video_format_get_component_width (format, i, width);
    enc->cheight[i] = gst_video_format_get_component_height (format, i, height);
    enc->offset[i] = gst_video_format_get_component_offset (format, i, width,
        height);
    enc->stride[i] = gst_video_format_get_row_stride (format, i, width);
    enc->inc[i] = gst_video_format_get_pixel_stride (format, i);
    enc->h_samp[i] = GST_ROUND_UP_4 (width) / enc->cwidth[i];
    enc->h_max_samp = MAX (enc->h_max_samp, enc->h_samp[i]);
    enc->v_samp[i] = GST_ROUND_UP_4 (height) / enc->cheight[i];
    enc->v_max_samp = MAX (enc->v_max_samp, enc->v_samp[i]);
  }
  /* samp should only be 1, 2 or 4 */
  g_assert (enc->h_max_samp <= 4);
  g_assert (enc->v_max_samp <= 4);
  /* now invert */
  /* maximum is invariant, as one of the components should have samp 1 */
  for (i = 0; i < enc->channels; ++i) {
    enc->h_samp[i] = enc->h_max_samp / enc->h_samp[i];
    enc->v_samp[i] = enc->v_max_samp / enc->v_samp[i];
  }
  enc->planar = (enc->inc[0] == 1 && enc->inc[1] == 1 && enc->inc[2] == 1);

  othercaps = gst_caps_copy (gst_pad_get_pad_template_caps (enc->srcpad));
  gst_caps_set_simple (othercaps,
      "width", G_TYPE_INT, enc->width, "height", G_TYPE_INT, enc->height, NULL);
  if (enc->fps_den > 0)
    gst_caps_set_simple (othercaps,
        "framerate", GST_TYPE_FRACTION, enc->fps_num, enc->fps_den, NULL);
  if (enc->par_den > 0)
    gst_caps_set_simple (othercaps,
        "pixel-aspect-ratio", GST_TYPE_FRACTION, enc->par_num, enc->par_den,
        NULL);

  ret = gst_pad_set_caps (enc->srcpad, othercaps);
  gst_caps_unref (othercaps);

  if (ret)
    gst_jpegenc_resync (enc);

  gst_object_unref (enc);

  return ret;

  /* ERRORS */
refuse_caps:
  {
    GST_WARNING_OBJECT (enc, "refused caps %" GST_PTR_FORMAT, caps);
    gst_object_unref (enc);
    return FALSE;
  }
}
Exemple #11
0
static gboolean
gst_glimage_sink_set_caps (GstBaseSink * bsink, GstCaps * caps)
{
  GstGLImageSink *glimage_sink;
  gint width;
  gint height;
  gboolean ok;
  gint fps_n, fps_d;
  gint par_n, par_d;
  gint display_par_n, display_par_d;
  guint display_ratio_num, display_ratio_den;
  GstVideoFormat format;
  GstStructure *structure;
  gboolean is_gl;

  GST_DEBUG ("set caps with %" GST_PTR_FORMAT, caps);

  glimage_sink = GST_GLIMAGE_SINK (bsink);

  structure = gst_caps_get_structure (caps, 0);
  if (gst_structure_has_name (structure, "video/x-raw-gl")) {
    is_gl = TRUE;
    format = GST_VIDEO_FORMAT_UNKNOWN;
    ok = gst_structure_get_int (structure, "width", &width);
    ok &= gst_structure_get_int (structure, "height", &height);
  } else {
    is_gl = FALSE;
    ok = gst_video_format_parse_caps (caps, &format, &width, &height);

    if (!ok)
      return FALSE;

    /* init colorspace conversion if needed */
    ok = gst_gl_display_init_upload (glimage_sink->display, format,
        width, height, width, height);

    if (!ok) {
      GST_ELEMENT_ERROR (glimage_sink, RESOURCE, NOT_FOUND,
          GST_GL_DISPLAY_ERR_MSG (glimage_sink->display), (NULL));
      return FALSE;
    }
  }

  gst_gl_display_set_client_reshape_callback (glimage_sink->display,
      glimage_sink->clientReshapeCallback);

  gst_gl_display_set_client_draw_callback (glimage_sink->display,
      glimage_sink->clientDrawCallback);

  gst_gl_display_set_client_data (glimage_sink->display,
      glimage_sink->client_data);

  ok &= gst_video_parse_caps_framerate (caps, &fps_n, &fps_d);
  ok &= gst_video_parse_caps_pixel_aspect_ratio (caps, &par_n, &par_d);

  if (!ok)
    return FALSE;

  /* get display's PAR */
  if (glimage_sink->par) {
    display_par_n = gst_value_get_fraction_numerator (glimage_sink->par);
    display_par_d = gst_value_get_fraction_denominator (glimage_sink->par);
  } else {
    display_par_n = 1;
    display_par_d = 1;
  }

  ok = gst_video_calculate_display_ratio (&display_ratio_num,
      &display_ratio_den, width, height, par_n, par_d, display_par_n,
      display_par_d);

  if (!ok)
    return FALSE;

  if (height % display_ratio_den == 0) {
    GST_DEBUG ("keeping video height");
    glimage_sink->window_width = (guint)
        gst_util_uint64_scale_int (height, display_ratio_num,
        display_ratio_den);
    glimage_sink->window_height = height;
  } else if (width % display_ratio_num == 0) {
    GST_DEBUG ("keeping video width");
    glimage_sink->window_width = width;
    glimage_sink->window_height = (guint)
        gst_util_uint64_scale_int (width, display_ratio_den, display_ratio_num);
  } else {
    GST_DEBUG ("approximating while keeping video height");
    glimage_sink->window_width = (guint)
        gst_util_uint64_scale_int (height, display_ratio_num,
        display_ratio_den);
    glimage_sink->window_height = height;
  }
  GST_DEBUG ("scaling to %dx%d",
      glimage_sink->window_width, glimage_sink->window_height);

  GST_VIDEO_SINK_WIDTH (glimage_sink) = width;
  GST_VIDEO_SINK_HEIGHT (glimage_sink) = height;
  glimage_sink->is_gl = is_gl;
  glimage_sink->width = width;
  glimage_sink->height = height;
  glimage_sink->fps_n = fps_n;
  glimage_sink->fps_d = fps_d;
  glimage_sink->par_n = par_n;
  glimage_sink->par_d = par_d;

  if (!glimage_sink->window_id && !glimage_sink->new_window_id)
    gst_x_overlay_prepare_xwindow_id (GST_X_OVERLAY (glimage_sink));

  return TRUE;
}
static gboolean
gst_egl_sink_set_caps (GstBaseSink * bsink, GstCaps * caps)
{
  GstEGLSink *egl_sink;
  gint width;
  gint height;
  gint bufcount;
  gboolean ok;
  gint fps_n, fps_d;
  gint par_n, par_d;
  gint display_par_n, display_par_d;
  guint display_ratio_num, display_ratio_den;
  GstVideoFormat format;
  GstStructure *s;

  GST_DEBUG ("set caps with %" GST_PTR_FORMAT, caps);

  egl_sink = GST_EGL_SINK (bsink);

  if(egl_sink->set_caps_callback)
    return egl_sink->set_caps_callback(caps, egl_sink->client_data);
  
  s = gst_caps_get_structure (caps, 0);
  if(gst_structure_get_int (s, "num-buffers-required", &bufcount) &&
      bufcount > GST_GL_DISPLAY_MAX_BUFFER_COUNT) {
    GST_WARNING("num-buffers-required %d exceed max eglsink buffer count %d", bufcount, GST_GL_DISPLAY_MAX_BUFFER_COUNT);
    return FALSE;
  }
  
  ok = gst_video_format_parse_caps (caps, &format, &width, &height);
  if (!ok)
    return FALSE;

  ok &= gst_video_parse_caps_framerate (caps, &fps_n, &fps_d);
  ok &= gst_video_parse_caps_pixel_aspect_ratio (caps, &par_n, &par_d);

  if (!ok)
    return FALSE;

  /* get display's PAR */
  if (egl_sink->par) {
    display_par_n = gst_value_get_fraction_numerator (egl_sink->par);
    display_par_d = gst_value_get_fraction_denominator (egl_sink->par);
  } else {
    display_par_n = 1;
    display_par_d = 1;
  }

  ok = gst_video_calculate_display_ratio (&display_ratio_num,
      &display_ratio_den, width, height, par_n, par_d, display_par_n,
      display_par_d);

  if (!ok)
    return FALSE;

  if (height % display_ratio_den == 0) {
    GST_DEBUG ("keeping video height");
    egl_sink->window_width = (guint)
        gst_util_uint64_scale_int (height, display_ratio_num,
        display_ratio_den);
    egl_sink->window_height = height;
  } else if (width % display_ratio_num == 0) {
    GST_DEBUG ("keeping video width");
    egl_sink->window_width = width;
    egl_sink->window_height = (guint)
        gst_util_uint64_scale_int (width, display_ratio_den, display_ratio_num);
  } else {
    GST_DEBUG ("approximating while keeping video height");
    egl_sink->window_width = (guint)
        gst_util_uint64_scale_int (height, display_ratio_num,
        display_ratio_den);
    egl_sink->window_height = height;
  }
  GST_DEBUG ("scaling to %dx%d",
      egl_sink->window_width, egl_sink->window_height);

  GST_VIDEO_SINK_WIDTH (egl_sink) = width;
  GST_VIDEO_SINK_HEIGHT (egl_sink) = height;
  egl_sink->fps_n = fps_n;
  egl_sink->fps_d = fps_d;
  egl_sink->par_n = par_n;
  egl_sink->par_d = par_d;

  if (!egl_sink->window_id && !egl_sink->new_window_id)
    gst_x_overlay_prepare_xwindow_id (GST_X_OVERLAY (egl_sink));

  return TRUE;
}
Exemple #13
0
static gboolean
gst_interlace_setcaps (GstPad * pad, GstCaps * caps)
{
  GstInterlace *interlace;
  gboolean ret;
  int width, height;
  GstVideoFormat format;
  gboolean interlaced = TRUE;
  int fps_n, fps_d;
  GstPad *otherpad;
  GstCaps *othercaps;
  const PulldownFormat *pdformat;

  interlace = GST_INTERLACE (gst_pad_get_parent (pad));

  otherpad =
      (pad == interlace->srcpad) ? interlace->sinkpad : interlace->srcpad;

  ret = gst_video_format_parse_caps (caps, &format, &width, &height);
  gst_video_format_parse_caps_interlaced (caps, &interlaced);
  ret &= gst_video_parse_caps_framerate (caps, &fps_n, &fps_d);

  if (!ret)
    goto error;

  othercaps = gst_caps_copy (caps);
  pdformat = &formats[interlace->pattern];

  if (pad == interlace->srcpad) {
    gst_caps_set_simple (othercaps, "interlaced", G_TYPE_BOOLEAN, FALSE, NULL);
    gst_caps_set_simple (othercaps, "framerate", GST_TYPE_FRACTION,
        fps_n * pdformat->ratio_d, fps_d * pdformat->ratio_n, NULL);
  } else {
    gst_caps_set_simple (othercaps, "interlaced", G_TYPE_BOOLEAN, TRUE, NULL);
    gst_caps_set_simple (othercaps, "framerate", GST_TYPE_FRACTION,
        fps_n * pdformat->ratio_n, fps_d * pdformat->ratio_d, NULL);
  }

  ret = gst_pad_set_caps (otherpad, othercaps);
  if (!ret)
    goto error;

  interlace->format = format;
  interlace->width = width;
  interlace->height = height;

  interlace->phase_index = interlace->pattern_offset;

  if (pad == interlace->sinkpad) {
    gst_caps_replace (&interlace->srccaps, othercaps);
    interlace->src_fps_n = fps_n * pdformat->ratio_n;
    interlace->src_fps_d = fps_d * pdformat->ratio_d;
  } else {
    gst_caps_replace (&interlace->srccaps, caps);
    interlace->src_fps_n = fps_n;
    interlace->src_fps_d = fps_d;
  }

error:
  g_object_unref (interlace);

  return ret;
}
/* Parses a set of caps and tags in st and populates a GstDiscovererStreamInfo
 * structure (parent, if !NULL, otherwise it allocates one)
 */
static GstDiscovererStreamInfo *
collect_information (GstDiscoverer * dc, const GstStructure * st,
    GstDiscovererStreamInfo * parent)
{
  GstCaps *caps;
  GstStructure *caps_st, *tags_st;
  const gchar *name;
  int tmp, tmp2;
  guint utmp;
  gboolean btmp;

  if (!st || !gst_structure_id_has_field (st, _CAPS_QUARK)) {
    GST_WARNING ("Couldn't find caps !");
    if (parent)
      return parent;
    else
      return (GstDiscovererStreamInfo *)
          gst_mini_object_new (GST_TYPE_DISCOVERER_STREAM_INFO);
  }

  gst_structure_id_get (st, _CAPS_QUARK, GST_TYPE_CAPS, &caps, NULL);
  caps_st = gst_caps_get_structure (caps, 0);
  name = gst_structure_get_name (caps_st);

  if (g_str_has_prefix (name, "audio/")) {
    GstDiscovererAudioInfo *info;

    if (parent)
      info = (GstDiscovererAudioInfo *) parent;
    else {
      info = (GstDiscovererAudioInfo *)
          gst_mini_object_new (GST_TYPE_DISCOVERER_AUDIO_INFO);
      info->parent.caps = caps;
    }

    if (gst_structure_get_int (caps_st, "rate", &tmp))
      info->sample_rate = (guint) tmp;

    if (gst_structure_get_int (caps_st, "channels", &tmp))
      info->channels = (guint) tmp;

    if (gst_structure_get_int (caps_st, "depth", &tmp))
      info->depth = (guint) tmp;

    if (gst_structure_id_has_field (st, _TAGS_QUARK)) {
      gst_structure_id_get (st, _TAGS_QUARK,
          GST_TYPE_STRUCTURE, &tags_st, NULL);
      if (gst_structure_get_uint (tags_st, GST_TAG_BITRATE, &utmp) ||
          gst_structure_get_uint (tags_st, GST_TAG_NOMINAL_BITRATE, &utmp))
        info->bitrate = utmp;

      if (gst_structure_get_uint (tags_st, GST_TAG_MAXIMUM_BITRATE, &utmp))
        info->max_bitrate = utmp;

      /* FIXME: Is it worth it to remove the tags we've parsed? */
      info->parent.tags = gst_tag_list_merge (info->parent.tags,
          (GstTagList *) tags_st, GST_TAG_MERGE_REPLACE);

      gst_structure_free (tags_st);
    }

    return (GstDiscovererStreamInfo *) info;

  } else if (g_str_has_prefix (name, "video/") ||
      g_str_has_prefix (name, "image/")) {
    GstDiscovererVideoInfo *info;
    GstVideoFormat format;

    if (parent)
      info = (GstDiscovererVideoInfo *) parent;
    else {
      info = (GstDiscovererVideoInfo *)
          gst_mini_object_new (GST_TYPE_DISCOVERER_VIDEO_INFO);
      info->parent.caps = caps;
    }

    if (gst_video_format_parse_caps (caps, &format, &tmp, &tmp2)) {
      info->width = (guint) tmp;
      info->height = (guint) tmp2;
    }

    if (gst_structure_get_int (caps_st, "depth", &tmp))
      info->depth = (guint) tmp;

    if (gst_video_parse_caps_pixel_aspect_ratio (caps, &tmp, &tmp2)) {
      info->par_num = tmp;
      info->par_denom = tmp2;
    }

    if (gst_video_parse_caps_framerate (caps, &tmp, &tmp2)) {
      info->framerate_num = tmp;
      info->framerate_denom = tmp2;
    }

    if (gst_video_format_parse_caps_interlaced (caps, &btmp))
      info->interlaced = btmp;

    if (gst_structure_id_has_field (st, _TAGS_QUARK)) {
      gst_structure_id_get (st, _TAGS_QUARK,
          GST_TYPE_STRUCTURE, &tags_st, NULL);
      if (gst_structure_get_uint (tags_st, GST_TAG_BITRATE, &utmp) ||
          gst_structure_get_uint (tags_st, GST_TAG_NOMINAL_BITRATE, &utmp))
        info->bitrate = utmp;

      if (gst_structure_get_uint (tags_st, GST_TAG_MAXIMUM_BITRATE, &utmp))
        info->max_bitrate = utmp;

      /* FIXME: Is it worth it to remove the tags we've parsed? */
      info->parent.tags = gst_tag_list_merge (info->parent.tags,
          (GstTagList *) tags_st, GST_TAG_MERGE_REPLACE);
      gst_structure_free (tags_st);
    }

    return (GstDiscovererStreamInfo *) info;

  } else {
    /* None of the above - populate what information we can */
    GstDiscovererStreamInfo *info;

    if (parent)
      info = parent;
    else {
      info = (GstDiscovererStreamInfo *)
          gst_mini_object_new (GST_TYPE_DISCOVERER_STREAM_INFO);
      info->caps = caps;
    }

    if (gst_structure_id_get (st, _TAGS_QUARK,
            GST_TYPE_STRUCTURE, &tags_st, NULL)) {
      info->tags = gst_tag_list_merge (info->tags, (GstTagList *) tags_st,
          GST_TAG_MERGE_REPLACE);
      gst_structure_free (tags_st);
    }

    return info;
  }

}
static gboolean
gst_base_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  GstBaseVideoEncoder *base_video_encoder;
  GstBaseVideoEncoderClass *base_video_encoder_class;
  GstStructure *structure;
  GstVideoState *state, tmp_state;
  gboolean ret;
  gboolean changed = FALSE;

  base_video_encoder = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad));
  base_video_encoder_class =
      GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);

  /* subclass should do something here ... */
  g_return_val_if_fail (base_video_encoder_class->set_format != NULL, FALSE);

  GST_DEBUG_OBJECT (base_video_encoder, "setcaps %" GST_PTR_FORMAT, caps);

  GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_encoder);

  state = &GST_BASE_VIDEO_CODEC (base_video_encoder)->state;
  memset (&tmp_state, 0, sizeof (tmp_state));

  tmp_state.caps = gst_caps_ref (caps);
  structure = gst_caps_get_structure (caps, 0);

  ret =
      gst_video_format_parse_caps (caps, &tmp_state.format, &tmp_state.width,
      &tmp_state.height);
  if (!ret)
    goto exit;

  changed = (tmp_state.format != state->format
      || tmp_state.width != state->width || tmp_state.height != state->height);

  if (!gst_video_parse_caps_framerate (caps, &tmp_state.fps_n,
          &tmp_state.fps_d)) {
    tmp_state.fps_n = 0;
    tmp_state.fps_d = 1;
  }
  changed = changed || (tmp_state.fps_n != state->fps_n
      || tmp_state.fps_d != state->fps_d);

  if (!gst_video_parse_caps_pixel_aspect_ratio (caps, &tmp_state.par_n,
          &tmp_state.par_d)) {
    tmp_state.par_n = 1;
    tmp_state.par_d = 1;
  }
  changed = changed || (tmp_state.par_n != state->par_n
      || tmp_state.par_d != state->par_d);

  tmp_state.have_interlaced =
      gst_structure_get_boolean (structure, "interlaced",
      &tmp_state.interlaced);
  changed = changed || (tmp_state.have_interlaced != state->have_interlaced
      || tmp_state.interlaced != state->interlaced);

  tmp_state.bytes_per_picture =
      gst_video_format_get_size (tmp_state.format, tmp_state.width,
      tmp_state.height);
  tmp_state.clean_width = tmp_state.width;
  tmp_state.clean_height = tmp_state.height;
  tmp_state.clean_offset_left = 0;
  tmp_state.clean_offset_top = 0;

  if (changed) {
    /* arrange draining pending frames */
    gst_base_video_encoder_drain (base_video_encoder);

    /* and subclass should be ready to configure format at any time around */
    if (base_video_encoder_class->set_format)
      ret =
          base_video_encoder_class->set_format (base_video_encoder, &tmp_state);
    if (ret) {
      gst_caps_replace (&state->caps, NULL);
      *state = tmp_state;
    }
  } else {
    /* no need to stir things up */
    GST_DEBUG_OBJECT (base_video_encoder,
        "new video format identical to configured format");
    gst_caps_unref (tmp_state.caps);
    ret = TRUE;
  }

exit:
  GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder);

  if (!ret) {
    GST_WARNING_OBJECT (base_video_encoder, "rejected caps %" GST_PTR_FORMAT,
        caps);
  }

  g_object_unref (base_video_encoder);

  return ret;
}
static gboolean
gst_jasper_enc_sink_setcaps (GstPad * pad, GstCaps * caps)
{
    GstJasperEnc *enc;
    GstVideoFormat format;
    gint width, height;
    gint fps_num, fps_den;
    gint par_num, par_den;
    gint i;

    enc = GST_JASPER_ENC (GST_PAD_PARENT (pad));

    /* get info from caps */
    if (!gst_video_format_parse_caps (caps, &format, &width, &height))
        goto refuse_caps;
    /* optional; pass along if present */
    fps_num = fps_den = -1;
    par_num = par_den = -1;
    gst_video_parse_caps_framerate (caps, &fps_num, &fps_den);
    gst_video_parse_caps_pixel_aspect_ratio (caps, &par_num, &par_den);

    if (width == enc->width && height == enc->height && enc->format == format
            && fps_num == enc->fps_num && fps_den == enc->fps_den
            && par_num == enc->par_num && par_den == enc->par_den)
        return TRUE;

    /* store input description */
    enc->format = format;
    enc->width = width;
    enc->height = height;
    enc->fps_num = fps_num;
    enc->fps_den = fps_den;
    enc->par_num = par_num;
    enc->par_den = par_den;

    /* prepare a cached image description  */
    enc->channels = 3 + (gst_video_format_has_alpha (format) ? 1 : 0);
    for (i = 0; i < enc->channels; ++i) {
        enc->cwidth[i] = gst_video_format_get_component_width (format, i, width);
        enc->cheight[i] = gst_video_format_get_component_height (format, i, height);
        enc->offset[i] = gst_video_format_get_component_offset (format, i, width,
                         height);
        enc->stride[i] = gst_video_format_get_row_stride (format, i, width);
        enc->inc[i] = gst_video_format_get_pixel_stride (format, i);
    }

    if (!gst_jasper_enc_set_src_caps (enc))
        goto setcaps_failed;
    if (!gst_jasper_enc_init_encoder (enc))
        goto setup_failed;

    return TRUE;

    /* ERRORS */
setup_failed:
    {
        GST_ELEMENT_ERROR (enc, LIBRARY, SETTINGS, (NULL), (NULL));
        return FALSE;
    }
setcaps_failed:
    {
        GST_WARNING_OBJECT (enc, "Setting src caps failed");
        GST_ELEMENT_ERROR (enc, LIBRARY, SETTINGS, (NULL), (NULL));
        return FALSE;
    }
refuse_caps:
    {
        GST_WARNING_OBJECT (enc, "refused caps %" GST_PTR_FORMAT, caps);
        gst_object_unref (enc);
        return FALSE;
    }
}
Exemple #17
0
static gboolean
gst_pngenc_setcaps (GstPad * pad, GstCaps * caps)
{
  GstPngEnc *pngenc;
  GstVideoFormat format;
  int fps_n, fps_d;
  GstCaps *pcaps;
  gboolean ret;

  pngenc = GST_PNGENC (gst_pad_get_parent (pad));

  ret = gst_video_format_parse_caps (caps, &format,
      &pngenc->width, &pngenc->height);
  if (G_LIKELY (ret))
    ret = gst_video_parse_caps_framerate (caps, &fps_n, &fps_d);

  if (G_UNLIKELY (!ret))
    goto done;

  switch (format) {
    case GST_VIDEO_FORMAT_RGBA:
      pngenc->png_color_type = PNG_COLOR_TYPE_RGBA;
      break;
    case GST_VIDEO_FORMAT_RGB:
      pngenc->png_color_type = PNG_COLOR_TYPE_RGB;
      break;
    case GST_VIDEO_FORMAT_GRAY8:
      pngenc->png_color_type = PNG_COLOR_TYPE_GRAY;
      break;
    default:
      ret = FALSE;
      goto done;
  }

  if (G_UNLIKELY (pngenc->width < 16 || pngenc->width > 1000000 ||
          pngenc->height < 16 || pngenc->height > 1000000)) {
    ret = FALSE;
    goto done;
  }

  pngenc->stride = gst_video_format_get_row_stride (format, 0, pngenc->width);

  pcaps = gst_caps_new_simple ("image/png",
      "width", G_TYPE_INT, pngenc->width,
      "height", G_TYPE_INT, pngenc->height,
      "framerate", GST_TYPE_FRACTION, fps_n, fps_d, NULL);

  ret = gst_pad_set_caps (pngenc->srcpad, pcaps);

  gst_caps_unref (pcaps);

  /* Fall-through. */
done:
  if (G_UNLIKELY (!ret)) {
    pngenc->width = 0;
    pngenc->height = 0;
  }

  gst_object_unref (pngenc);

  return ret;
}