示例#1
0
bool getVideoSizeAndFormatFromCaps(GstCaps* caps, WebCore::IntSize& size, GstVideoFormat& format, int& pixelAspectRatioNumerator, int& pixelAspectRatioDenominator, int& stride)
{
#ifdef GST_API_VERSION_1
    GstVideoInfo info;
    if (!gst_video_info_from_caps(&info, caps))
        return false;

    format = GST_VIDEO_INFO_FORMAT(&info);
    size.setWidth(GST_VIDEO_INFO_WIDTH(&info));
    size.setHeight(GST_VIDEO_INFO_HEIGHT(&info));
    pixelAspectRatioNumerator = GST_VIDEO_INFO_PAR_N(&info);
    pixelAspectRatioDenominator = GST_VIDEO_INFO_PAR_D(&info);
    stride = GST_VIDEO_INFO_PLANE_STRIDE(&info, 0);
#else
    gint width, height;
    if (!GST_IS_CAPS(caps) || !gst_caps_is_fixed(caps)
        || !gst_video_format_parse_caps(caps, &format, &width, &height)
        || !gst_video_parse_caps_pixel_aspect_ratio(caps, &pixelAspectRatioNumerator,
                                                    &pixelAspectRatioDenominator))
        return false;
    size.setWidth(width);
    size.setHeight(height);
    stride = size.width() * 4;
#endif

    return true;
}
/* Retrieve the video sink's Caps and tell the application about the media size */
static void check_media_size (CustomData *data) {
  JNIEnv *env = get_jni_env ();
  GstElement *video_sink;
  GstPad *video_sink_pad;
  GstCaps *caps;
  GstVideoFormat fmt;
  int width;
  int height;

  /* Retrieve the Caps at the entrance of the video sink */
  g_object_get (data->pipeline, "video-sink", &video_sink, NULL);
  video_sink_pad = gst_element_get_static_pad (video_sink, "sink");
  caps = gst_pad_get_negotiated_caps (video_sink_pad);

  if (gst_video_format_parse_caps(caps, &fmt, &width, &height)) {
    int par_n, par_d;
    if (gst_video_parse_caps_pixel_aspect_ratio (caps, &par_n, &par_d)) {
      width = width * par_n / par_d;
    }
    GST_DEBUG ("Media size is %dx%d, notifying application", width, height);

    (*env)->CallVoidMethod (env, data->app, on_media_size_changed_method_id, (jint)width, (jint)height);
    if ((*env)->ExceptionCheck (env)) {
      GST_ERROR ("Failed to call Java method");
      (*env)->ExceptionClear (env);
    }
  }

  gst_caps_unref(caps);
  gst_object_unref (video_sink_pad);
  gst_object_unref(video_sink);
}
// Returns the size of the video
IntSize MediaPlayerPrivate::naturalSize() const
{
    if (!hasVideo())
        return IntSize();

    // TODO: handle possible clean aperture data. See
    // https://bugzilla.gnome.org/show_bug.cgi?id=596571
    // TODO: handle possible transformation matrix. See
    // https://bugzilla.gnome.org/show_bug.cgi?id=596326
    int width = 0, height = 0;
    if (GstPad* pad = gst_element_get_static_pad(m_videoSink, "sink")) {
        GstCaps* caps = GST_PAD_CAPS(pad);
        gfloat pixelAspectRatio;
        gint pixelAspectRatioNumerator, pixelAspectRatioDenominator;

        if (!GST_IS_CAPS(caps) || !gst_caps_is_fixed(caps) ||
            !gst_video_format_parse_caps(caps, NULL, &width, &height) ||
            !gst_video_parse_caps_pixel_aspect_ratio(caps, &pixelAspectRatioNumerator,
                                                           &pixelAspectRatioDenominator)) {
            gst_object_unref(GST_OBJECT(pad));
            return IntSize();
        }

        pixelAspectRatio = (gfloat) pixelAspectRatioNumerator / (gfloat) pixelAspectRatioDenominator;
        width *= pixelAspectRatio;
        height /= pixelAspectRatio;
        gst_object_unref(GST_OBJECT(pad));
    }

    return IntSize(width, height);
}
示例#4
0
gboolean
gst_base_video_state_from_caps (GstVideoState * state, GstCaps * caps)
{

  gst_video_format_parse_caps (caps, &state->format,
      &state->width, &state->height);

  if (!gst_video_parse_caps_framerate (caps, &state->fps_n, &state->fps_d))
    return FALSE;

  state->par_n = 1;
  state->par_d = 1;
  gst_video_parse_caps_pixel_aspect_ratio (caps, &state->par_n, &state->par_d);

  {
    GstStructure *structure = gst_caps_get_structure (caps, 0);
    state->interlaced = FALSE;
    gst_structure_get_boolean (structure, "interlaced", &state->interlaced);
  }

  state->clean_width = state->width;
  state->clean_height = state->height;
  state->clean_offset_left = 0;
  state->clean_offset_top = 0;

  /* FIXME need better error handling */
  return TRUE;
}
示例#5
0
static gboolean
gst_base_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  GstBaseVideoEncoder *base_video_encoder;
  GstBaseVideoEncoderClass *base_video_encoder_class;
  GstStructure *structure;
  GstVideoState *state;
  gboolean ret;

  base_video_encoder = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad));
  base_video_encoder_class =
      GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);

  GST_DEBUG ("setcaps");

  state = &GST_BASE_VIDEO_CODEC (base_video_encoder)->state;
  structure = gst_caps_get_structure (caps, 0);

  gst_video_format_parse_caps (caps, &state->format,
      &state->width, &state->height);

  state->fps_n = 0;
  state->fps_d = 1;
  gst_video_parse_caps_framerate (caps, &state->fps_n, &state->fps_d);
  if (state->fps_d == 0) {
    state->fps_n = 0;
    state->fps_d = 1;
  }

  state->par_n = 1;
  state->par_d = 1;
  gst_video_parse_caps_pixel_aspect_ratio (caps, &state->par_n, &state->par_d);

  state->have_interlaced = gst_structure_get_boolean (structure,
      "interlaced", &state->interlaced);

  state->clean_width = state->width;
  state->clean_height = state->height;
  state->clean_offset_left = 0;
  state->clean_offset_top = 0;

  ret = base_video_encoder_class->set_format (base_video_encoder,
      &GST_BASE_VIDEO_CODEC (base_video_encoder)->state);
  if (ret) {
    ret = base_video_encoder_class->start (base_video_encoder);
  }

  g_object_unref (base_video_encoder);

  return ret;
}
示例#6
0
static gboolean
gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  GstBaseVideoDecoder *base_video_decoder;
  GstBaseVideoDecoderClass *base_video_decoder_class;
  GstStructure *structure;
  const GValue *codec_data;
  GstVideoState *state;
  gboolean ret = TRUE;

  base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
  base_video_decoder_class =
      GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  GST_DEBUG ("setcaps %" GST_PTR_FORMAT, caps);

  state = &base_video_decoder->state;

  if (state->codec_data) {
    gst_buffer_unref (state->codec_data);
  }
  memset (state, 0, sizeof (GstVideoState));

  structure = gst_caps_get_structure (caps, 0);

  gst_video_format_parse_caps (caps, NULL, &state->width, &state->height);
  gst_video_parse_caps_framerate (caps, &state->fps_n, &state->fps_d);
  gst_video_parse_caps_pixel_aspect_ratio (caps, &state->par_n, &state->par_d);

#if 0
  /* requires 0.10.23 */
  state->have_interlaced =
      gst_video_format_parse_caps_interlaced (caps, &state->interlaced);
#else
  state->have_interlaced = gst_structure_get_boolean (structure,
      "interlaced", &state->interlaced);
#endif

  codec_data = gst_structure_get_value (structure, "codec_data");
  if (codec_data && G_VALUE_TYPE (codec_data) == GST_TYPE_BUFFER) {
    state->codec_data = gst_value_get_buffer (codec_data);
  }

  if (base_video_decoder_class->start) {
    ret = base_video_decoder_class->start (base_video_decoder);
  }

  g_object_unref (base_video_decoder);

  return ret;
}
示例#7
0
static gboolean
gst_glimage_sink_set_caps (GstBaseSink * bsink, GstCaps * caps)
{
  GstGLImageSink *glimage_sink;
  gint width;
  gint height;
  gboolean ok;
  gint fps_n, fps_d;
  gint par_n, par_d;
  GstVideoFormat format;
  GstStructure *structure;
  gboolean is_gl;

  GST_DEBUG ("set caps with %" GST_PTR_FORMAT, caps);

  glimage_sink = GST_GLIMAGE_SINK (bsink);

  structure = gst_caps_get_structure (caps, 0);
  if (gst_structure_has_name (structure, "video/x-raw-gl")) {
    is_gl = TRUE;
    format = GST_VIDEO_FORMAT_UNKNOWN;
    ok = gst_structure_get_int (structure, "width", &width);
    ok &= gst_structure_get_int (structure, "height", &height);
  } else {
    is_gl = FALSE;
    ok = gst_video_format_parse_caps (caps, &format, &width, &height);
  }
  ok &= gst_video_parse_caps_framerate (caps, &fps_n, &fps_d);
  ok &= gst_video_parse_caps_pixel_aspect_ratio (caps, &par_n, &par_d);

  if (!ok)
    return FALSE;

  GST_VIDEO_SINK_WIDTH (glimage_sink) = width;
  GST_VIDEO_SINK_HEIGHT (glimage_sink) = height;
  glimage_sink->is_gl = is_gl;
  glimage_sink->format = format;
  glimage_sink->width = width;
  glimage_sink->height = height;
  glimage_sink->fps_n = fps_n;
  glimage_sink->fps_d = fps_d;
  glimage_sink->par_n = par_n;
  glimage_sink->par_d = par_d;

  if (!glimage_sink->window_id && !glimage_sink->new_window_id)
    gst_x_overlay_prepare_xwindow_id (GST_X_OVERLAY (glimage_sink));

  return TRUE;
}
示例#8
0
void GStreamerReader::VideoPreroll()
{
  /* The first video buffer has reached the video sink. Get width and height */
  LOG(PR_LOG_DEBUG, "Video preroll");
  GstPad* sinkpad = gst_element_get_static_pad(GST_ELEMENT(mVideoAppSink), "sink");
  int PARNumerator, PARDenominator;
#if GST_VERSION_MAJOR >= 1
  GstCaps* caps = gst_pad_get_current_caps(sinkpad);
  memset (&mVideoInfo, 0, sizeof (mVideoInfo));
  gst_video_info_from_caps(&mVideoInfo, caps);
  mFormat = mVideoInfo.finfo->format;
  mPicture.width = mVideoInfo.width;
  mPicture.height = mVideoInfo.height;
  PARNumerator = GST_VIDEO_INFO_PAR_N(&mVideoInfo);
  PARDenominator = GST_VIDEO_INFO_PAR_D(&mVideoInfo);
#else
  GstCaps* caps = gst_pad_get_negotiated_caps(sinkpad);
  gst_video_format_parse_caps(caps, &mFormat, &mPicture.width, &mPicture.height);
  if (!gst_video_parse_caps_pixel_aspect_ratio(caps, &PARNumerator, &PARDenominator)) {
    PARNumerator = 1;
    PARDenominator = 1;
  }
#endif
  NS_ASSERTION(mPicture.width && mPicture.height, "invalid video resolution");

  // Calculate display size according to pixel aspect ratio.
  nsIntRect pictureRect(0, 0, mPicture.width, mPicture.height);
  nsIntSize frameSize = nsIntSize(mPicture.width, mPicture.height);
  nsIntSize displaySize = nsIntSize(mPicture.width, mPicture.height);
  ScaleDisplayByAspectRatio(displaySize, float(PARNumerator) / float(PARDenominator));

  // If video frame size is overflow, stop playing.
  if (IsValidVideoRegion(frameSize, pictureRect, displaySize)) {
    GstStructure* structure = gst_caps_get_structure(caps, 0);
    gst_structure_get_fraction(structure, "framerate", &fpsNum, &fpsDen);
    mInfo.mVideo.mDisplay = ThebesIntSize(displaySize.ToIntSize());
    mInfo.mVideo.mHasVideo = true;
  } else {
    LOG(PR_LOG_DEBUG, "invalid video region");
    Eos();
  }
  gst_caps_unref(caps);
  gst_object_unref(sinkpad);
}
static gboolean
gst_gdk_pixbuf_sink_set_caps (GstBaseSink * basesink, GstCaps * caps)
{
  GstGdkPixbufSink *sink = GST_GDK_PIXBUF_SINK (basesink);
  GstVideoFormat fmt;
  gint w, h, par_n, par_d;

  GST_LOG_OBJECT (sink, "caps: %" GST_PTR_FORMAT, caps);

  if (!gst_video_format_parse_caps (caps, &fmt, &w, &h)) {
    GST_WARNING_OBJECT (sink, "parse_caps failed");
    return FALSE;
  }

  if (!gst_video_parse_caps_pixel_aspect_ratio (caps, &par_n, &par_d)) {
    GST_LOG_OBJECT (sink, "no pixel aspect ratio");
    return FALSE;
  }

  g_assert ((fmt == GST_VIDEO_FORMAT_RGB &&
          gst_video_format_get_pixel_stride (fmt, 0) == 3) ||
      (fmt == GST_VIDEO_FORMAT_RGBA &&
          gst_video_format_get_pixel_stride (fmt, 0) == 4));

  GST_VIDEO_SINK_WIDTH (sink) = w;
  GST_VIDEO_SINK_HEIGHT (sink) = h;

  sink->rowstride = gst_video_format_get_row_stride (fmt, 0, w);
  sink->has_alpha = (fmt == GST_VIDEO_FORMAT_RGBA);

  sink->par_n = par_n;
  sink->par_d = par_d;

  GST_INFO_OBJECT (sink, "format             : %d", fmt);
  GST_INFO_OBJECT (sink, "width x height     : %d x %d", w, h);
  GST_INFO_OBJECT (sink, "pixel-aspect-ratio : %d/%d", par_d, par_n);

  return TRUE;
}
示例#10
0
static gboolean
gst_egl_sink_set_caps (GstBaseSink * bsink, GstCaps * caps)
{
  GstEGLSink *egl_sink;
  gint width;
  gint height;
  gint bufcount;
  gboolean ok;
  gint fps_n, fps_d;
  gint par_n, par_d;
  gint display_par_n, display_par_d;
  guint display_ratio_num, display_ratio_den;
  GstVideoFormat format;
  GstStructure *s;

  GST_DEBUG ("set caps with %" GST_PTR_FORMAT, caps);

  egl_sink = GST_EGL_SINK (bsink);

  if(egl_sink->set_caps_callback)
    return egl_sink->set_caps_callback(caps, egl_sink->client_data);
  
  s = gst_caps_get_structure (caps, 0);
  if(gst_structure_get_int (s, "num-buffers-required", &bufcount) &&
      bufcount > GST_GL_DISPLAY_MAX_BUFFER_COUNT) {
    GST_WARNING("num-buffers-required %d exceed max eglsink buffer count %d", bufcount, GST_GL_DISPLAY_MAX_BUFFER_COUNT);
    return FALSE;
  }
  
  ok = gst_video_format_parse_caps (caps, &format, &width, &height);
  if (!ok)
    return FALSE;

  ok &= gst_video_parse_caps_framerate (caps, &fps_n, &fps_d);
  ok &= gst_video_parse_caps_pixel_aspect_ratio (caps, &par_n, &par_d);

  if (!ok)
    return FALSE;

  /* get display's PAR */
  if (egl_sink->par) {
    display_par_n = gst_value_get_fraction_numerator (egl_sink->par);
    display_par_d = gst_value_get_fraction_denominator (egl_sink->par);
  } else {
    display_par_n = 1;
    display_par_d = 1;
  }

  ok = gst_video_calculate_display_ratio (&display_ratio_num,
      &display_ratio_den, width, height, par_n, par_d, display_par_n,
      display_par_d);

  if (!ok)
    return FALSE;

  if (height % display_ratio_den == 0) {
    GST_DEBUG ("keeping video height");
    egl_sink->window_width = (guint)
        gst_util_uint64_scale_int (height, display_ratio_num,
        display_ratio_den);
    egl_sink->window_height = height;
  } else if (width % display_ratio_num == 0) {
    GST_DEBUG ("keeping video width");
    egl_sink->window_width = width;
    egl_sink->window_height = (guint)
        gst_util_uint64_scale_int (width, display_ratio_den, display_ratio_num);
  } else {
    GST_DEBUG ("approximating while keeping video height");
    egl_sink->window_width = (guint)
        gst_util_uint64_scale_int (height, display_ratio_num,
        display_ratio_den);
    egl_sink->window_height = height;
  }
  GST_DEBUG ("scaling to %dx%d",
      egl_sink->window_width, egl_sink->window_height);

  GST_VIDEO_SINK_WIDTH (egl_sink) = width;
  GST_VIDEO_SINK_HEIGHT (egl_sink) = height;
  egl_sink->fps_n = fps_n;
  egl_sink->fps_d = fps_d;
  egl_sink->par_n = par_n;
  egl_sink->par_d = par_d;

  if (!egl_sink->window_id && !egl_sink->new_window_id)
    gst_x_overlay_prepare_xwindow_id (GST_X_OVERLAY (egl_sink));

  return TRUE;
}
示例#11
0
static gboolean
gst_jasper_enc_sink_setcaps (GstPad * pad, GstCaps * caps)
{
    GstJasperEnc *enc;
    GstVideoFormat format;
    gint width, height;
    gint fps_num, fps_den;
    gint par_num, par_den;
    gint i;

    enc = GST_JASPER_ENC (GST_PAD_PARENT (pad));

    /* get info from caps */
    if (!gst_video_format_parse_caps (caps, &format, &width, &height))
        goto refuse_caps;
    /* optional; pass along if present */
    fps_num = fps_den = -1;
    par_num = par_den = -1;
    gst_video_parse_caps_framerate (caps, &fps_num, &fps_den);
    gst_video_parse_caps_pixel_aspect_ratio (caps, &par_num, &par_den);

    if (width == enc->width && height == enc->height && enc->format == format
            && fps_num == enc->fps_num && fps_den == enc->fps_den
            && par_num == enc->par_num && par_den == enc->par_den)
        return TRUE;

    /* store input description */
    enc->format = format;
    enc->width = width;
    enc->height = height;
    enc->fps_num = fps_num;
    enc->fps_den = fps_den;
    enc->par_num = par_num;
    enc->par_den = par_den;

    /* prepare a cached image description  */
    enc->channels = 3 + (gst_video_format_has_alpha (format) ? 1 : 0);
    for (i = 0; i < enc->channels; ++i) {
        enc->cwidth[i] = gst_video_format_get_component_width (format, i, width);
        enc->cheight[i] = gst_video_format_get_component_height (format, i, height);
        enc->offset[i] = gst_video_format_get_component_offset (format, i, width,
                         height);
        enc->stride[i] = gst_video_format_get_row_stride (format, i, width);
        enc->inc[i] = gst_video_format_get_pixel_stride (format, i);
    }

    if (!gst_jasper_enc_set_src_caps (enc))
        goto setcaps_failed;
    if (!gst_jasper_enc_init_encoder (enc))
        goto setup_failed;

    return TRUE;

    /* ERRORS */
setup_failed:
    {
        GST_ELEMENT_ERROR (enc, LIBRARY, SETTINGS, (NULL), (NULL));
        return FALSE;
    }
setcaps_failed:
    {
        GST_WARNING_OBJECT (enc, "Setting src caps failed");
        GST_ELEMENT_ERROR (enc, LIBRARY, SETTINGS, (NULL), (NULL));
        return FALSE;
    }
refuse_caps:
    {
        GST_WARNING_OBJECT (enc, "refused caps %" GST_PTR_FORMAT, caps);
        gst_object_unref (enc);
        return FALSE;
    }
}
/* Parses a set of caps and tags in st and populates a GstDiscovererStreamInfo
 * structure (parent, if !NULL, otherwise it allocates one)
 */
static GstDiscovererStreamInfo *
collect_information (GstDiscoverer * dc, const GstStructure * st,
    GstDiscovererStreamInfo * parent)
{
  GstCaps *caps;
  GstStructure *caps_st, *tags_st;
  const gchar *name;
  int tmp, tmp2;
  guint utmp;
  gboolean btmp;

  if (!st || !gst_structure_id_has_field (st, _CAPS_QUARK)) {
    GST_WARNING ("Couldn't find caps !");
    if (parent)
      return parent;
    else
      return (GstDiscovererStreamInfo *)
          gst_mini_object_new (GST_TYPE_DISCOVERER_STREAM_INFO);
  }

  gst_structure_id_get (st, _CAPS_QUARK, GST_TYPE_CAPS, &caps, NULL);
  caps_st = gst_caps_get_structure (caps, 0);
  name = gst_structure_get_name (caps_st);

  if (g_str_has_prefix (name, "audio/")) {
    GstDiscovererAudioInfo *info;

    if (parent)
      info = (GstDiscovererAudioInfo *) parent;
    else {
      info = (GstDiscovererAudioInfo *)
          gst_mini_object_new (GST_TYPE_DISCOVERER_AUDIO_INFO);
      info->parent.caps = caps;
    }

    if (gst_structure_get_int (caps_st, "rate", &tmp))
      info->sample_rate = (guint) tmp;

    if (gst_structure_get_int (caps_st, "channels", &tmp))
      info->channels = (guint) tmp;

    if (gst_structure_get_int (caps_st, "depth", &tmp))
      info->depth = (guint) tmp;

    if (gst_structure_id_has_field (st, _TAGS_QUARK)) {
      gst_structure_id_get (st, _TAGS_QUARK,
          GST_TYPE_STRUCTURE, &tags_st, NULL);
      if (gst_structure_get_uint (tags_st, GST_TAG_BITRATE, &utmp) ||
          gst_structure_get_uint (tags_st, GST_TAG_NOMINAL_BITRATE, &utmp))
        info->bitrate = utmp;

      if (gst_structure_get_uint (tags_st, GST_TAG_MAXIMUM_BITRATE, &utmp))
        info->max_bitrate = utmp;

      /* FIXME: Is it worth it to remove the tags we've parsed? */
      info->parent.tags = gst_tag_list_merge (info->parent.tags,
          (GstTagList *) tags_st, GST_TAG_MERGE_REPLACE);

      gst_structure_free (tags_st);
    }

    return (GstDiscovererStreamInfo *) info;

  } else if (g_str_has_prefix (name, "video/") ||
      g_str_has_prefix (name, "image/")) {
    GstDiscovererVideoInfo *info;
    GstVideoFormat format;

    if (parent)
      info = (GstDiscovererVideoInfo *) parent;
    else {
      info = (GstDiscovererVideoInfo *)
          gst_mini_object_new (GST_TYPE_DISCOVERER_VIDEO_INFO);
      info->parent.caps = caps;
    }

    if (gst_video_format_parse_caps (caps, &format, &tmp, &tmp2)) {
      info->width = (guint) tmp;
      info->height = (guint) tmp2;
    }

    if (gst_structure_get_int (caps_st, "depth", &tmp))
      info->depth = (guint) tmp;

    if (gst_video_parse_caps_pixel_aspect_ratio (caps, &tmp, &tmp2)) {
      info->par_num = tmp;
      info->par_denom = tmp2;
    }

    if (gst_video_parse_caps_framerate (caps, &tmp, &tmp2)) {
      info->framerate_num = tmp;
      info->framerate_denom = tmp2;
    }

    if (gst_video_format_parse_caps_interlaced (caps, &btmp))
      info->interlaced = btmp;

    if (gst_structure_id_has_field (st, _TAGS_QUARK)) {
      gst_structure_id_get (st, _TAGS_QUARK,
          GST_TYPE_STRUCTURE, &tags_st, NULL);
      if (gst_structure_get_uint (tags_st, GST_TAG_BITRATE, &utmp) ||
          gst_structure_get_uint (tags_st, GST_TAG_NOMINAL_BITRATE, &utmp))
        info->bitrate = utmp;

      if (gst_structure_get_uint (tags_st, GST_TAG_MAXIMUM_BITRATE, &utmp))
        info->max_bitrate = utmp;

      /* FIXME: Is it worth it to remove the tags we've parsed? */
      info->parent.tags = gst_tag_list_merge (info->parent.tags,
          (GstTagList *) tags_st, GST_TAG_MERGE_REPLACE);
      gst_structure_free (tags_st);
    }

    return (GstDiscovererStreamInfo *) info;

  } else {
    /* None of the above - populate what information we can */
    GstDiscovererStreamInfo *info;

    if (parent)
      info = parent;
    else {
      info = (GstDiscovererStreamInfo *)
          gst_mini_object_new (GST_TYPE_DISCOVERER_STREAM_INFO);
      info->caps = caps;
    }

    if (gst_structure_id_get (st, _TAGS_QUARK,
            GST_TYPE_STRUCTURE, &tags_st, NULL)) {
      info->tags = gst_tag_list_merge (info->tags, (GstTagList *) tags_st,
          GST_TAG_MERGE_REPLACE);
      gst_structure_free (tags_st);
    }

    return info;
  }

}
示例#13
0
static gboolean
gst_video_scale_set_caps (GstBaseTransform * trans, GstCaps * in, GstCaps * out)
{
  GstVideoScale *videoscale = GST_VIDEO_SCALE (trans);
  gboolean ret;
  gint from_dar_n, from_dar_d, to_dar_n, to_dar_d;
  gint from_par_n, from_par_d, to_par_n, to_par_d;

  ret =
      gst_video_format_parse_caps (in, &videoscale->format,
      &videoscale->from_width, &videoscale->from_height);
  ret &=
      gst_video_format_parse_caps (out, NULL, &videoscale->to_width,
      &videoscale->to_height);
  if (!ret)
    goto done;

  videoscale->src_size = gst_video_format_get_size (videoscale->format,
      videoscale->from_width, videoscale->from_height);
  videoscale->dest_size = gst_video_format_get_size (videoscale->format,
      videoscale->to_width, videoscale->to_height);

  if (!gst_video_parse_caps_pixel_aspect_ratio (in, &from_par_n, &from_par_d))
    from_par_n = from_par_d = 1;
  if (!gst_video_parse_caps_pixel_aspect_ratio (out, &to_par_n, &to_par_d))
    to_par_n = to_par_d = 1;

  if (!gst_util_fraction_multiply (videoscale->from_width,
          videoscale->from_height, from_par_n, from_par_d, &from_dar_n,
          &from_dar_d)) {
    from_dar_n = from_dar_d = -1;
  }

  if (!gst_util_fraction_multiply (videoscale->to_width,
          videoscale->to_height, to_par_n, to_par_d, &to_dar_n, &to_dar_d)) {
    to_dar_n = to_dar_d = -1;
  }

  videoscale->borders_w = videoscale->borders_h = 0;
  if (to_dar_n != from_dar_n || to_dar_d != from_dar_d) {
    if (videoscale->add_borders) {
      gint n, d, to_h, to_w;

      if (from_dar_n != -1 && from_dar_d != -1
          && gst_util_fraction_multiply (from_dar_n, from_dar_d, to_par_n,
              to_par_d, &n, &d)) {
        to_h = gst_util_uint64_scale_int (videoscale->to_width, d, n);
        if (to_h <= videoscale->to_height) {
          videoscale->borders_h = videoscale->to_height - to_h;
          videoscale->borders_w = 0;
        } else {
          to_w = gst_util_uint64_scale_int (videoscale->to_height, n, d);
          g_assert (to_w <= videoscale->to_width);
          videoscale->borders_h = 0;
          videoscale->borders_w = videoscale->to_width - to_w;
        }
      } else {
        GST_WARNING_OBJECT (videoscale, "Can't calculate borders");
      }
    } else {
      GST_WARNING_OBJECT (videoscale, "Can't keep DAR!");
    }
  }

  if (videoscale->tmp_buf)
    g_free (videoscale->tmp_buf);
  videoscale->tmp_buf = g_malloc (videoscale->to_width * 8 * 4);

  gst_base_transform_set_passthrough (trans,
      (videoscale->from_width == videoscale->to_width
          && videoscale->from_height == videoscale->to_height));

  GST_DEBUG_OBJECT (videoscale, "from=%dx%d (par=%d/%d dar=%d/%d), size %d "
      "-> to=%dx%d (par=%d/%d dar=%d/%d borders=%d:%d), size %d",
      videoscale->from_width, videoscale->from_height, from_par_n, from_par_d,
      from_dar_n, from_dar_d, videoscale->src_size, videoscale->to_width,
      videoscale->to_height, to_par_n, to_par_d, to_dar_n, to_dar_d,
      videoscale->borders_w, videoscale->borders_h, videoscale->dest_size);

done:
  return ret;
}
示例#14
0
static gboolean
gst_jpegenc_setcaps (GstPad * pad, GstCaps * caps)
{
  GstJpegEnc *enc = GST_JPEGENC (gst_pad_get_parent (pad));
  GstVideoFormat format;
  gint width, height;
  gint fps_num, fps_den;
  gint par_num, par_den;
  gint i;
  GstCaps *othercaps;
  gboolean ret;

  /* get info from caps */
  if (!gst_video_format_parse_caps (caps, &format, &width, &height))
    goto refuse_caps;
  /* optional; pass along if present */
  fps_num = fps_den = -1;
  par_num = par_den = -1;
  gst_video_parse_caps_framerate (caps, &fps_num, &fps_den);
  gst_video_parse_caps_pixel_aspect_ratio (caps, &par_num, &par_den);

  if (width == enc->width && height == enc->height && enc->format == format
      && fps_num == enc->fps_num && fps_den == enc->fps_den
      && par_num == enc->par_num && par_den == enc->par_den)
    return TRUE;

  /* store input description */
  enc->format = format;
  enc->width = width;
  enc->height = height;
  enc->fps_num = fps_num;
  enc->fps_den = fps_den;
  enc->par_num = par_num;
  enc->par_den = par_den;

  /* prepare a cached image description  */
  enc->channels = 3 + (gst_video_format_has_alpha (format) ? 1 : 0);
  /* ... but any alpha is disregarded in encoding */
  if (gst_video_format_is_gray (format))
    enc->channels = 1;
  else
    enc->channels = 3;
  enc->h_max_samp = 0;
  enc->v_max_samp = 0;
  for (i = 0; i < enc->channels; ++i) {
    enc->cwidth[i] = gst_video_format_get_component_width (format, i, width);
    enc->cheight[i] = gst_video_format_get_component_height (format, i, height);
    enc->offset[i] = gst_video_format_get_component_offset (format, i, width,
        height);
    enc->stride[i] = gst_video_format_get_row_stride (format, i, width);
    enc->inc[i] = gst_video_format_get_pixel_stride (format, i);
    enc->h_samp[i] = GST_ROUND_UP_4 (width) / enc->cwidth[i];
    enc->h_max_samp = MAX (enc->h_max_samp, enc->h_samp[i]);
    enc->v_samp[i] = GST_ROUND_UP_4 (height) / enc->cheight[i];
    enc->v_max_samp = MAX (enc->v_max_samp, enc->v_samp[i]);
  }
  /* samp should only be 1, 2 or 4 */
  g_assert (enc->h_max_samp <= 4);
  g_assert (enc->v_max_samp <= 4);
  /* now invert */
  /* maximum is invariant, as one of the components should have samp 1 */
  for (i = 0; i < enc->channels; ++i) {
    enc->h_samp[i] = enc->h_max_samp / enc->h_samp[i];
    enc->v_samp[i] = enc->v_max_samp / enc->v_samp[i];
  }
  enc->planar = (enc->inc[0] == 1 && enc->inc[1] == 1 && enc->inc[2] == 1);

  othercaps = gst_caps_copy (gst_pad_get_pad_template_caps (enc->srcpad));
  gst_caps_set_simple (othercaps,
      "width", G_TYPE_INT, enc->width, "height", G_TYPE_INT, enc->height, NULL);
  if (enc->fps_den > 0)
    gst_caps_set_simple (othercaps,
        "framerate", GST_TYPE_FRACTION, enc->fps_num, enc->fps_den, NULL);
  if (enc->par_den > 0)
    gst_caps_set_simple (othercaps,
        "pixel-aspect-ratio", GST_TYPE_FRACTION, enc->par_num, enc->par_den,
        NULL);

  ret = gst_pad_set_caps (enc->srcpad, othercaps);
  gst_caps_unref (othercaps);

  if (ret)
    gst_jpegenc_resync (enc);

  gst_object_unref (enc);

  return ret;

  /* ERRORS */
refuse_caps:
  {
    GST_WARNING_OBJECT (enc, "refused caps %" GST_PTR_FORMAT, caps);
    gst_object_unref (enc);
    return FALSE;
  }
}
示例#15
0
static gboolean
gst_eglglessink_configure_caps (GstEglGlesSink * eglglessink, GstCaps * caps)
{
  gboolean ret = TRUE;
  gint width, height;
  int par_n, par_d;
  guintptr used_window = 0;

  if (!(ret = gst_video_format_parse_caps (caps, &eglglessink->format, &width,
              &height))) {
    GST_ERROR_OBJECT (eglglessink, "Got weird and/or incomplete caps");
    goto HANDLE_ERROR;
  }

  if (!(ret = gst_video_parse_caps_pixel_aspect_ratio (caps, &par_n, &par_d))) {
    par_n = 1;
    par_d = 1;
    GST_WARNING_OBJECT (eglglessink,
        "Can't parse PAR from caps. Using default: 1");
  }

  eglglessink->size_changed = (GST_VIDEO_SINK_WIDTH (eglglessink) != width ||
      GST_VIDEO_SINK_HEIGHT (eglglessink) != height ||
      eglglessink->par_n != par_n || eglglessink->par_d != par_d);

  eglglessink->par_n = par_n;
  eglglessink->par_d = par_d;
  GST_VIDEO_SINK_WIDTH (eglglessink) = width;
  GST_VIDEO_SINK_HEIGHT (eglglessink) = height;

  if (eglglessink->configured_caps) {
    GST_DEBUG_OBJECT (eglglessink, "Caps were already set");
    if (gst_caps_can_intersect (caps, eglglessink->configured_caps)) {
      GST_DEBUG_OBJECT (eglglessink, "Caps are compatible anyway");
      goto SUCCEED;
    }

    GST_DEBUG_OBJECT (eglglessink, "Caps are not compatible, reconfiguring");

    /* EGL/GLES cleanup */
    gst_egl_adaptation_cleanup (eglglessink->egl_context);

    gst_caps_unref (eglglessink->configured_caps);
    eglglessink->configured_caps = NULL;
  }

  if (!gst_egl_adaptation_choose_config (eglglessink->egl_context)) {
    GST_ERROR_OBJECT (eglglessink, "Couldn't choose EGL config");
    goto HANDLE_ERROR;
  }

  gst_caps_replace (&eglglessink->configured_caps, caps);

  /* By now the application should have set a window
   * if it meant to do so
   */
  GST_OBJECT_LOCK (eglglessink);
  if (!eglglessink->have_window) {

    GST_INFO_OBJECT (eglglessink,
        "No window. Will attempt internal window creation");
    if (!gst_eglglessink_create_window (eglglessink, width, height)) {
      GST_ERROR_OBJECT (eglglessink, "Internal window creation failed!");
      GST_OBJECT_UNLOCK (eglglessink);
      goto HANDLE_ERROR;
    }
    eglglessink->using_own_window = TRUE;
    gst_egl_adaptation_update_used_window (eglglessink->egl_context);
    eglglessink->have_window = TRUE;
  }
  used_window = gst_egl_adaptation_get_window (eglglessink->egl_context);
  GST_OBJECT_UNLOCK (eglglessink);
  gst_x_overlay_got_window_handle (GST_X_OVERLAY (eglglessink),
      (guintptr) used_window);

  if (!eglglessink->egl_context->have_surface) {
    if (!gst_egl_adaptation_init_egl_surface (eglglessink->egl_context,
            eglglessink->format)) {
      GST_ERROR_OBJECT (eglglessink, "Couldn't init EGL surface from window");
      goto HANDLE_ERROR;
    }
  }

SUCCEED:
  GST_INFO_OBJECT (eglglessink, "Configured caps successfully");
  return TRUE;

HANDLE_ERROR:
  GST_ERROR_OBJECT (eglglessink, "Configuring caps failed");
  return FALSE;
}
示例#16
0
static gboolean
gst_glimage_sink_set_caps (GstBaseSink * bsink, GstCaps * caps)
{
  GstGLImageSink *glimage_sink;
  gint width;
  gint height;
  gboolean ok;
  gint fps_n, fps_d;
  gint par_n, par_d;
  gint display_par_n, display_par_d;
  guint display_ratio_num, display_ratio_den;
  GstVideoFormat format;
  GstStructure *structure;
  gboolean is_gl;

  GST_DEBUG ("set caps with %" GST_PTR_FORMAT, caps);

  glimage_sink = GST_GLIMAGE_SINK (bsink);

  structure = gst_caps_get_structure (caps, 0);
  if (gst_structure_has_name (structure, "video/x-raw-gl")) {
    is_gl = TRUE;
    format = GST_VIDEO_FORMAT_UNKNOWN;
    ok = gst_structure_get_int (structure, "width", &width);
    ok &= gst_structure_get_int (structure, "height", &height);
  } else {
    is_gl = FALSE;
    ok = gst_video_format_parse_caps (caps, &format, &width, &height);

    if (!ok)
      return FALSE;

    /* init colorspace conversion if needed */
    ok = gst_gl_display_init_upload (glimage_sink->display, format,
        width, height, width, height);

    if (!ok) {
      GST_ELEMENT_ERROR (glimage_sink, RESOURCE, NOT_FOUND,
          GST_GL_DISPLAY_ERR_MSG (glimage_sink->display), (NULL));
      return FALSE;
    }
  }

  gst_gl_display_set_client_reshape_callback (glimage_sink->display,
      glimage_sink->clientReshapeCallback);

  gst_gl_display_set_client_draw_callback (glimage_sink->display,
      glimage_sink->clientDrawCallback);

  gst_gl_display_set_client_data (glimage_sink->display,
      glimage_sink->client_data);

  ok &= gst_video_parse_caps_framerate (caps, &fps_n, &fps_d);
  ok &= gst_video_parse_caps_pixel_aspect_ratio (caps, &par_n, &par_d);

  if (!ok)
    return FALSE;

  /* get display's PAR */
  if (glimage_sink->par) {
    display_par_n = gst_value_get_fraction_numerator (glimage_sink->par);
    display_par_d = gst_value_get_fraction_denominator (glimage_sink->par);
  } else {
    display_par_n = 1;
    display_par_d = 1;
  }

  ok = gst_video_calculate_display_ratio (&display_ratio_num,
      &display_ratio_den, width, height, par_n, par_d, display_par_n,
      display_par_d);

  if (!ok)
    return FALSE;

  if (height % display_ratio_den == 0) {
    GST_DEBUG ("keeping video height");
    glimage_sink->window_width = (guint)
        gst_util_uint64_scale_int (height, display_ratio_num,
        display_ratio_den);
    glimage_sink->window_height = height;
  } else if (width % display_ratio_num == 0) {
    GST_DEBUG ("keeping video width");
    glimage_sink->window_width = width;
    glimage_sink->window_height = (guint)
        gst_util_uint64_scale_int (width, display_ratio_den, display_ratio_num);
  } else {
    GST_DEBUG ("approximating while keeping video height");
    glimage_sink->window_width = (guint)
        gst_util_uint64_scale_int (height, display_ratio_num,
        display_ratio_den);
    glimage_sink->window_height = height;
  }
  GST_DEBUG ("scaling to %dx%d",
      glimage_sink->window_width, glimage_sink->window_height);

  GST_VIDEO_SINK_WIDTH (glimage_sink) = width;
  GST_VIDEO_SINK_HEIGHT (glimage_sink) = height;
  glimage_sink->is_gl = is_gl;
  glimage_sink->width = width;
  glimage_sink->height = height;
  glimage_sink->fps_n = fps_n;
  glimage_sink->fps_d = fps_d;
  glimage_sink->par_n = par_n;
  glimage_sink->par_d = par_d;

  if (!glimage_sink->window_id && !glimage_sink->new_window_id)
    gst_x_overlay_prepare_xwindow_id (GST_X_OVERLAY (glimage_sink));

  return TRUE;
}
示例#17
0
static gboolean
gst_base_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  GstBaseVideoEncoder *base_video_encoder;
  GstBaseVideoEncoderClass *base_video_encoder_class;
  GstStructure *structure;
  GstVideoState *state, tmp_state;
  gboolean ret;
  gboolean changed = FALSE;

  base_video_encoder = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad));
  base_video_encoder_class =
      GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);

  /* subclass should do something here ... */
  g_return_val_if_fail (base_video_encoder_class->set_format != NULL, FALSE);

  GST_DEBUG_OBJECT (base_video_encoder, "setcaps %" GST_PTR_FORMAT, caps);

  GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_encoder);

  state = &GST_BASE_VIDEO_CODEC (base_video_encoder)->state;
  memset (&tmp_state, 0, sizeof (tmp_state));

  tmp_state.caps = gst_caps_ref (caps);
  structure = gst_caps_get_structure (caps, 0);

  ret =
      gst_video_format_parse_caps (caps, &tmp_state.format, &tmp_state.width,
      &tmp_state.height);
  if (!ret)
    goto exit;

  changed = (tmp_state.format != state->format
      || tmp_state.width != state->width || tmp_state.height != state->height);

  if (!gst_video_parse_caps_framerate (caps, &tmp_state.fps_n,
          &tmp_state.fps_d)) {
    tmp_state.fps_n = 0;
    tmp_state.fps_d = 1;
  }
  changed = changed || (tmp_state.fps_n != state->fps_n
      || tmp_state.fps_d != state->fps_d);

  if (!gst_video_parse_caps_pixel_aspect_ratio (caps, &tmp_state.par_n,
          &tmp_state.par_d)) {
    tmp_state.par_n = 1;
    tmp_state.par_d = 1;
  }
  changed = changed || (tmp_state.par_n != state->par_n
      || tmp_state.par_d != state->par_d);

  tmp_state.have_interlaced =
      gst_structure_get_boolean (structure, "interlaced",
      &tmp_state.interlaced);
  changed = changed || (tmp_state.have_interlaced != state->have_interlaced
      || tmp_state.interlaced != state->interlaced);

  tmp_state.bytes_per_picture =
      gst_video_format_get_size (tmp_state.format, tmp_state.width,
      tmp_state.height);
  tmp_state.clean_width = tmp_state.width;
  tmp_state.clean_height = tmp_state.height;
  tmp_state.clean_offset_left = 0;
  tmp_state.clean_offset_top = 0;

  if (changed) {
    /* arrange draining pending frames */
    gst_base_video_encoder_drain (base_video_encoder);

    /* and subclass should be ready to configure format at any time around */
    if (base_video_encoder_class->set_format)
      ret =
          base_video_encoder_class->set_format (base_video_encoder, &tmp_state);
    if (ret) {
      gst_caps_replace (&state->caps, NULL);
      *state = tmp_state;
    }
  } else {
    /* no need to stir things up */
    GST_DEBUG_OBJECT (base_video_encoder,
        "new video format identical to configured format");
    gst_caps_unref (tmp_state.caps);
    ret = TRUE;
  }

exit:
  GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder);

  if (!ret) {
    GST_WARNING_OBJECT (base_video_encoder, "rejected caps %" GST_PTR_FORMAT,
        caps);
  }

  g_object_unref (base_video_encoder);

  return ret;
}
void MediaPlayerPrivate::paint(GraphicsContext* context, const IntRect& rect)
{
    if (context->paintingDisabled())
        return;

    if (!m_player->visible())
        return;
    if (!m_buffer)
        return;

    int width = 0, height = 0;
    int pixelAspectRatioNumerator = 0;
    int pixelAspectRatioDenominator = 0;
    double doublePixelAspectRatioNumerator = 0;
    double doublePixelAspectRatioDenominator = 0;
    double displayWidth;
    double displayHeight;
    double scale, gapHeight, gapWidth;

    GstCaps *caps = gst_buffer_get_caps(m_buffer);

    if (!gst_video_format_parse_caps(caps, NULL, &width, &height) ||
        !gst_video_parse_caps_pixel_aspect_ratio(caps, &pixelAspectRatioNumerator, &pixelAspectRatioDenominator)) {
      gst_caps_unref(caps);
      return;
    }

    displayWidth = width;
    displayHeight = height;
    doublePixelAspectRatioNumerator = pixelAspectRatioNumerator;
    doublePixelAspectRatioDenominator = pixelAspectRatioDenominator;

    cairo_t* cr = context->platformContext();
    cairo_surface_t* src = cairo_image_surface_create_for_data(GST_BUFFER_DATA(m_buffer),
                                                               CAIRO_FORMAT_RGB24,
                                                               width, height,
                                                               4 * width);

    cairo_save(cr);
    cairo_set_operator(cr, CAIRO_OPERATOR_SOURCE);

    displayWidth *= doublePixelAspectRatioNumerator / doublePixelAspectRatioDenominator;
    displayHeight *= doublePixelAspectRatioDenominator / doublePixelAspectRatioNumerator;

    scale = MIN (rect.width () / displayWidth, rect.height () / displayHeight);
    displayWidth *= scale;
    displayHeight *= scale;

    // Calculate gap between border an picture
    gapWidth = (rect.width() - displayWidth) / 2.0;
    gapHeight = (rect.height() - displayHeight) / 2.0;

    // paint the rectangle on the context and draw the surface inside.
    cairo_translate(cr, rect.x() + gapWidth, rect.y() + gapHeight);
    cairo_rectangle(cr, 0, 0, rect.width(), rect.height());
    cairo_scale(cr, doublePixelAspectRatioNumerator / doublePixelAspectRatioDenominator,
                doublePixelAspectRatioDenominator / doublePixelAspectRatioNumerator);
    cairo_scale(cr, scale, scale);
    cairo_set_source_surface(cr, src, 0, 0);
    cairo_fill(cr);
    cairo_restore(cr);

    cairo_surface_destroy(src);
    gst_caps_unref(caps);
}