static GstEncodingContainerProfile *
kms_recording_profile_create_ksr_profile (gboolean has_audio,
    gboolean has_video)
{
  GstEncodingContainerProfile *cprof;
  GstPadTemplate *templ;
  GstElement *mux;
  GstCaps *pc;

  pc = gst_caps_from_string ("application/x-ksr");
  cprof = gst_encoding_container_profile_new ("Ksr", NULL, pc, NULL);
  gst_caps_unref (pc);

  /* Use matroska caps to define this profile */
  mux = gst_element_factory_make ("matroskamux", NULL);

  if (has_audio) {
    GstCaps *ac;

    templ =
        gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (mux),
        "audio_%u");
    ac = gst_pad_template_get_caps (templ);

    gst_encoding_container_profile_add_profile (cprof, (GstEncodingProfile *)
        gst_encoding_audio_profile_new (ac, NULL, NULL, 0));

    gst_caps_unref (ac);
  }

  if (has_video) {
    GstCaps *vc;

    templ =
        gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (mux),
        "video_%u");
    vc = gst_pad_template_get_caps (templ);

    gst_encoding_container_profile_add_profile (cprof, (GstEncodingProfile *)
        gst_encoding_video_profile_new (vc, NULL, NULL, 0));

    gst_caps_unref (vc);
  }

  g_object_unref (mux);

  return cprof;
}
Exemple #2
0
static GstCaps *
gst_osx_video_src_get_caps (GstBaseSrc * src)
{
  GstElementClass *gstelement_class;
  GstOSXVideoSrc *self;
  GstPadTemplate *pad_template;
  GstCaps *caps;
  GstStructure *structure;
  gint width, height;

  gstelement_class = GST_ELEMENT_GET_CLASS (src);
  self = GST_OSX_VIDEO_SRC (src);

  /* if we don't have the resolution set up, return template caps */
  if (!self->world)
    return NULL;

  pad_template = gst_element_class_get_pad_template (gstelement_class, "src");
  /* i don't think this can actually fail... */
  if (!pad_template)
    return NULL;

  width = self->rect.right;
  height = self->rect.bottom;

  caps = gst_caps_copy (gst_pad_template_get_caps (pad_template));

  structure = gst_caps_get_structure (caps, 0);
  gst_structure_set (structure, "width", G_TYPE_INT, width, NULL);
  gst_structure_set (structure, "height", G_TYPE_INT, height, NULL);

  return caps;
}
static void
gst_ffmpegmux_init (GstFFMpegMux * ffmpegmux, GstFFMpegMuxClass * g_class)
{
  GstElementClass *klass = GST_ELEMENT_CLASS (g_class);
  GstFFMpegMuxClass *oclass = (GstFFMpegMuxClass *) klass;
  GstPadTemplate *templ = gst_element_class_get_pad_template (klass, "src");

  ffmpegmux->srcpad = gst_pad_new_from_template (templ, "src");
  gst_pad_set_caps (ffmpegmux->srcpad, gst_pad_template_get_caps (templ));
  gst_element_add_pad (GST_ELEMENT (ffmpegmux), ffmpegmux->srcpad);

  ffmpegmux->collect = gst_collect_pads_new ();
  gst_collect_pads_set_function (ffmpegmux->collect,
      (GstCollectPadsFunction) gst_ffmpegmux_collected, ffmpegmux);

  ffmpegmux->context = g_new0 (AVFormatContext, 1);
  ffmpegmux->context->oformat = oclass->in_plugin;
  ffmpegmux->context->nb_streams = 0;
  g_snprintf (ffmpegmux->context->filename,
      sizeof (ffmpegmux->context->filename),
      "gstreamer://%p", ffmpegmux->srcpad);
  ffmpegmux->opened = FALSE;

  ffmpegmux->videopads = 0;
  ffmpegmux->audiopads = 0;
  ffmpegmux->preload = 0;
  ffmpegmux->max_delay = 0;
}
static void
gst_tag_lib_mux_init (GstTagLibMux * mux, GstTagLibMuxClass * mux_class)
{
  GstElementClass *element_klass = GST_ELEMENT_CLASS (mux_class);
  GstPadTemplate *tmpl;

  /* pad through which data comes in to the element */
  mux->sinkpad =
      gst_pad_new_from_static_template (&gst_tag_lib_mux_sink_template, "sink");
  gst_pad_set_chain_function (mux->sinkpad,
      GST_DEBUG_FUNCPTR (gst_tag_lib_mux_chain));
  gst_pad_set_event_function (mux->sinkpad,
      GST_DEBUG_FUNCPTR (gst_tag_lib_mux_sink_event));
  gst_element_add_pad (GST_ELEMENT (mux), mux->sinkpad);

  /* pad through which data goes out of the element */
  tmpl = gst_element_class_get_pad_template (element_klass, "src");
  if (tmpl) {
    mux->srcpad = gst_pad_new_from_template (tmpl, "src");
    gst_pad_use_fixed_caps (mux->srcpad);
    gst_pad_set_caps (mux->srcpad, gst_pad_template_get_caps (tmpl));
    gst_element_add_pad (GST_ELEMENT (mux), mux->srcpad);
  }

  mux->render_tag = TRUE;
}
Exemple #5
0
static GstCaps *
gst_alsasink_getcaps (GstBaseSink * bsink)
{
    GstElementClass *element_class;
    GstPadTemplate *pad_template;
    GstAlsaSink *sink = GST_ALSA_SINK (bsink);
    GstCaps *caps;

    if (sink->handle == NULL) {
        GST_DEBUG_OBJECT (sink, "device not open, using template caps");
        return NULL;                /* base class will get template caps for us */
    }

    if (sink->cached_caps) {
        GST_LOG_OBJECT (sink, "Returning cached caps");
        return gst_caps_ref (sink->cached_caps);
    }

    element_class = GST_ELEMENT_GET_CLASS (sink);
    pad_template = gst_element_class_get_pad_template (element_class, "sink");
    g_return_val_if_fail (pad_template != NULL, NULL);

    caps = gst_alsa_probe_supported_formats (GST_OBJECT (sink), sink->device,
            sink->handle, gst_pad_template_get_caps (pad_template));

    if (caps) {
        sink->cached_caps = gst_caps_ref (caps);
    }

    GST_INFO_OBJECT (sink, "returning caps %" GST_PTR_FORMAT, caps);

    return caps;
}
Exemple #6
0
static gpointer
_get_decoder_factories (gpointer arg)
{
  GstElementClass *klass = arg;
  GList *factories;
  GstPadTemplate *templ = gst_element_class_get_pad_template (klass,
      "sink");
  RsnDecFactoryFilterCtx ctx = { NULL, };
  GstCaps *raw;
  gboolean raw_audio;
  GstRegistry *registry = gst_registry_get ();

  ctx.desired_caps = gst_pad_template_get_caps (templ);

  raw =
      gst_caps_from_string
      ("audio/x-raw,format=(string){ F32LE, F32BE, F64LE, F64BE }");
  raw_audio = gst_caps_can_intersect (raw, ctx.desired_caps);
  if (raw_audio) {
    GstCaps *sub = gst_caps_subtract (ctx.desired_caps, raw);
    ctx.desired_caps = sub;
  } else {
    gst_caps_ref (ctx.desired_caps);
  }
  gst_caps_unref (raw);

  /* Set decoder caps to empty. Will be filled by the factory_filter */
  ctx.decoder_caps = gst_caps_new_empty ();
  GST_DEBUG ("Finding factories for caps: %" GST_PTR_FORMAT, ctx.desired_caps);

  factories = gst_registry_feature_filter (registry,
      (GstPluginFeatureFilter) rsndec_factory_filter, FALSE, &ctx);

  /* If these are audio caps, we add audioconvert, which is not a decoder,
     but allows raw audio to go through relatively unmolested - this will
     come handy when we have to send placeholder silence to allow preroll
     for those DVDs which have titles with no audio track. */
  if (raw_audio) {
    GstPluginFeature *feature;
    GST_DEBUG ("These are audio caps, adding audioconvert");
    feature =
        gst_registry_find_feature (registry, "audioconvert",
        GST_TYPE_ELEMENT_FACTORY);
    if (feature) {
      factories = g_list_append (factories, feature);
    } else {
      GST_WARNING ("Could not find feature audioconvert");
    }
  }

  factories = g_list_sort (factories, (GCompareFunc) sort_by_ranks);

  GST_DEBUG ("Available decoder caps %" GST_PTR_FORMAT, ctx.decoder_caps);
  gst_caps_unref (ctx.decoder_caps);
  gst_caps_unref (ctx.desired_caps);

  return factories;
}
static GstCaps *
gst_alsasink_getcaps (GstBaseSink * bsink, GstCaps * filter)
{
  GstElementClass *element_class;
  GstPadTemplate *pad_template;
  GstAlsaSink *sink = GST_ALSA_SINK (bsink);
  GstCaps *caps, *templ_caps;

  if (sink->handle == NULL) {
    GST_DEBUG_OBJECT (sink, "device not open, using template caps");
    return NULL;                /* base class will get template caps for us */
  }

  if (sink->cached_caps) {
    if (filter) {
      caps = gst_caps_intersect_full (filter, sink->cached_caps,
          GST_CAPS_INTERSECT_FIRST);
      GST_LOG_OBJECT (sink, "Returning cached caps %" GST_PTR_FORMAT " with "
          "filter %" GST_PTR_FORMAT " applied: %" GST_PTR_FORMAT,
          sink->cached_caps, filter, caps);
      return caps;
    } else {
      GST_LOG_OBJECT (sink, "Returning cached caps %" GST_PTR_FORMAT,
          sink->cached_caps);
      return gst_caps_ref (sink->cached_caps);
    }
  }

  element_class = GST_ELEMENT_GET_CLASS (sink);
  pad_template = gst_element_class_get_pad_template (element_class, "sink");
  g_return_val_if_fail (pad_template != NULL, NULL);

  templ_caps = gst_pad_template_get_caps (pad_template);
  caps = gst_alsa_probe_supported_formats (GST_OBJECT (sink), sink->device,
      sink->handle, templ_caps);
  gst_caps_unref (templ_caps);

  if (caps) {
    sink->cached_caps = gst_caps_ref (caps);
  }

  GST_INFO_OBJECT (sink, "returning caps %" GST_PTR_FORMAT, caps);

  if (filter) {
    GstCaps *intersection;

    intersection =
        gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
    gst_caps_unref (caps);
    return intersection;
  } else {
    return caps;
  }
}
static GstCaps *
gst_alsasrc_getcaps (GstBaseSrc * bsrc, GstCaps * filter)
{
  GstElementClass *element_class;
  GstPadTemplate *pad_template;
  GstAlsaSrc *src;
  GstCaps *caps, *templ_caps;

  src = GST_ALSA_SRC (bsrc);

  if (src->handle == NULL) {
    GST_DEBUG_OBJECT (src, "device not open, using template caps");
    return GST_BASE_SRC_CLASS (parent_class)->get_caps (bsrc, filter);
  }

  if (src->cached_caps) {
    GST_LOG_OBJECT (src, "Returning cached caps");
    if (filter)
      return gst_caps_intersect_full (filter, src->cached_caps,
          GST_CAPS_INTERSECT_FIRST);
    else
      return gst_caps_ref (src->cached_caps);
  }

  element_class = GST_ELEMENT_GET_CLASS (src);
  pad_template = gst_element_class_get_pad_template (element_class, "src");
  g_return_val_if_fail (pad_template != NULL, NULL);

  templ_caps = gst_pad_template_get_caps (pad_template);
  GST_INFO_OBJECT (src, "template caps %" GST_PTR_FORMAT, templ_caps);

  caps = gst_alsa_probe_supported_formats (GST_OBJECT (src),
      src->device, src->handle, templ_caps);
  gst_caps_unref (templ_caps);

  if (caps) {
    src->cached_caps = gst_caps_ref (caps);
  }

  GST_INFO_OBJECT (src, "returning caps %" GST_PTR_FORMAT, caps);

  if (filter) {
    GstCaps *intersection;

    intersection =
        gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
    gst_caps_unref (caps);
    return intersection;
  } else {
    return caps;
  }
}
static void
check_pad_template (GstPadTemplate * tmpl)
{
  const GValue *list_val, *fmt_val;
  GstStructure *s;
  gboolean *formats_supported;
  GstCaps *caps;
  guint i, num_formats;

  num_formats = get_num_formats ();
  formats_supported = g_new0 (gboolean, num_formats);

  caps = gst_pad_template_get_caps (tmpl);

  /* If this fails, we need to update this unit test */
  fail_unless_equals_int (gst_caps_get_size (caps), 1);
  s = gst_caps_get_structure (caps, 0);

  fail_unless (gst_structure_has_name (s, "video/x-raw"));

  list_val = gst_structure_get_value (s, "format");
  fail_unless (list_val != NULL);
  /* If this fails, we need to update this unit test */
  fail_unless (GST_VALUE_HOLDS_LIST (list_val));

  for (i = 0; i < gst_value_list_get_size (list_val); ++i) {
    GstVideoFormat fmt;
    const gchar *fmt_str;

    fmt_val = gst_value_list_get_value (list_val, i);
    fail_unless (G_VALUE_HOLDS_STRING (fmt_val));
    fmt_str = g_value_get_string (fmt_val);
    GST_LOG ("format string: '%s'", fmt_str);
    fmt = gst_video_format_from_string (fmt_str);
    fail_unless (fmt != GST_VIDEO_FORMAT_UNKNOWN);
    formats_supported[(guint) fmt] = TRUE;
  }

  gst_caps_unref (caps);

  for (i = 2; i < num_formats; ++i) {
    if (!formats_supported[i]) {
      g_error ("videoconvert doesn't support format '%s'",
          gst_video_format_to_string ((GstVideoFormat) i));
    }
  }

  g_free (formats_supported);
}
static GstCaps *
gst_sunaudiosink_getcaps (GstBaseSink * bsink)
{
  GstPadTemplate *pad_template;
  GstCaps *caps = NULL;
  GstSunAudioSink *sunaudiosink = GST_SUNAUDIO_SINK (bsink);

  GST_DEBUG_OBJECT (sunaudiosink, "getcaps called");

  pad_template = gst_static_pad_template_get (&gst_sunaudiosink_factory);
  caps = gst_caps_copy (gst_pad_template_get_caps (pad_template));

  gst_object_unref (pad_template);

  return caps;
}
static GstCaps * gst_devsound_src_getcaps(GstBaseSrc * bsrc)
    {
    GstDevsoundSrc *devsoundsrc;
    GstCaps *caps;
    GstPadTemplate *pad_template;

    devsoundsrc = GST_DEVSOUND_SRC (bsrc);

    GST_DEBUG_OBJECT(devsoundsrc, "getcaps called");

    pad_template = gst_static_pad_template_get(&devsound_src_factory);
    caps = gst_caps_copy(gst_pad_template_get_caps(pad_template));

    gst_object_unref(pad_template);

    return caps;
    }
static void
gst_tag_mux_init (GstTagMux * mux, GstTagMuxClass * mux_class)
{
  GstElementClass *element_klass = GST_ELEMENT_CLASS (mux_class);
  GstPadTemplate *tmpl;

  mux->priv =
      G_TYPE_INSTANCE_GET_PRIVATE (mux, GST_TYPE_TAG_MUX, GstTagMuxPrivate);

  /* pad through which data comes in to the element */
  tmpl = gst_element_class_get_pad_template (element_klass, "sink");
  if (tmpl) {
    mux->priv->sinkpad = gst_pad_new_from_template (tmpl, "sink");
  } else {
    g_warning ("GstTagMux subclass '%s' did not install a %s pad template!\n",
        G_OBJECT_CLASS_NAME (element_klass), "sink");
    mux->priv->sinkpad = gst_pad_new ("sink", GST_PAD_SINK);
  }
  gst_pad_set_chain_function (mux->priv->sinkpad,
      GST_DEBUG_FUNCPTR (gst_tag_mux_chain));
  gst_pad_set_event_function (mux->priv->sinkpad,
      GST_DEBUG_FUNCPTR (gst_tag_mux_sink_event));
  gst_element_add_pad (GST_ELEMENT (mux), mux->priv->sinkpad);

  /* pad through which data goes out of the element */
  tmpl = gst_element_class_get_pad_template (element_klass, "src");
  if (tmpl) {
    GstCaps *tmpl_caps = gst_pad_template_get_caps (tmpl);

    mux->priv->srcpad = gst_pad_new_from_template (tmpl, "src");
    gst_pad_use_fixed_caps (mux->priv->srcpad);
    if (tmpl_caps != NULL && gst_caps_is_fixed (tmpl_caps)) {
      gst_pad_set_caps (mux->priv->srcpad, tmpl_caps);
    }
  } else {
    g_warning ("GstTagMux subclass '%s' did not install a %s pad template!\n",
        G_OBJECT_CLASS_NAME (element_klass), "source");
    mux->priv->srcpad = gst_pad_new ("src", GST_PAD_SRC);
  }
  gst_element_add_pad (GST_ELEMENT (mux), mux->priv->srcpad);

  mux->priv->render_start_tag = TRUE;
  mux->priv->render_end_tag = TRUE;
}
Exemple #13
0
gboolean sink_factory_filter(GstPluginFeature* feature, gpointer data) {
  GstCaps* caps = (GstCaps*)data;
  if (!GST_IS_ELEMENT_FACTORY(feature)) return FALSE;
  const GList* static_pads =
      gst_element_factory_get_static_pad_templates(GST_ELEMENT_FACTORY(feature));
  int not_any_number = 0;
  for (GList* item = (GList*)static_pads; item; item = item->next) {
    GstStaticPadTemplate* padTemplate = (GstStaticPadTemplate*)item->data;
    GstPadTemplate* pad = gst_static_pad_template_get(padTemplate);
    GstCaps* padCaps = gst_pad_template_get_caps(pad);
    if (!gst_caps_is_any(padCaps)) not_any_number++;
  }
  if (not_any_number == 0) return FALSE;
  if (!gst_element_factory_list_is_type(GST_ELEMENT_FACTORY(feature),
                                        GST_ELEMENT_FACTORY_TYPE_DECODABLE))
    return FALSE;
  if (!gst_element_factory_can_sink_all_caps(GST_ELEMENT_FACTORY(feature), caps)) return FALSE;
  return TRUE;
}
static GstCaps *
gst_directsound_sink_getcaps (GstBaseSink * bsink, GstCaps * filter)
{
  GstElementClass *element_class;
  GstPadTemplate *pad_template;
  GstDirectSoundSink *dsoundsink = GST_DIRECTSOUND_SINK (bsink);
  GstCaps *caps;

  if (dsoundsink->pDS == NULL) {
    GST_DEBUG_OBJECT (dsoundsink, "device not open, using template caps");
    return NULL;                /* base class will get template caps for us */
  }

  if (dsoundsink->cached_caps) {
    caps = gst_caps_ref (dsoundsink->cached_caps);
  } else {
    element_class = GST_ELEMENT_GET_CLASS (dsoundsink);
    pad_template = gst_element_class_get_pad_template (element_class, "sink");
    g_return_val_if_fail (pad_template != NULL, NULL);

    caps = gst_directsound_probe_supported_formats (dsoundsink,
        gst_pad_template_get_caps (pad_template));
    if (caps)
      dsoundsink->cached_caps = gst_caps_ref (caps);
  }

  if (caps && filter) {
    GstCaps *tmp =
        gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
    gst_caps_unref (caps);
    caps = tmp;
  }

  if (caps) {
    gchar *caps_string = gst_caps_to_string (caps);
    GST_DEBUG_OBJECT (dsoundsink, "returning caps %s", caps_string);
    g_free (caps_string);
  }

  return caps;
}
Exemple #15
0
static GstFlowReturn
gst_teletextdec_export_rgba_page (GstTeletextDec * teletext, vbi_page * page,
    GstBuffer ** buf)
{
  guint size;
  GstCaps *caps, *out_caps;
  GstFlowReturn ret;
  gint width, height;
  GstPadTemplate *templ;

  /* one character occupies 12 x 10 pixels */
  width = page->columns * 12;
  height = page->rows * 10;

  caps = gst_caps_new_simple ("video/x-raw-rgb",
      "width", G_TYPE_INT, width,
      "height", G_TYPE_INT, height,
      "framerate", GST_TYPE_FRACTION, teletext->rate_numerator,
      teletext->rate_denominator, NULL);

  templ = gst_static_pad_template_get (&src_template);
  out_caps = gst_caps_intersect (caps, gst_pad_template_get_caps (templ));
  gst_caps_unref (caps);
  gst_object_unref (templ);

  size = (guint) width *(guint) height *sizeof (vbi_rgba);

  ret = gst_pad_alloc_buffer_and_set_caps (teletext->srcpad,
      GST_BUFFER_OFFSET_NONE, size, out_caps, &(*buf));

  if (ret == GST_FLOW_OK) {
    GST_DEBUG_OBJECT (teletext, "Creating image with %d rows and %d cols",
        page->rows, page->columns);
    vbi_draw_vt_page (page, VBI_PIXFMT_RGBA32_LE,
        (vbi_rgba *) GST_BUFFER_DATA (*buf), FALSE, TRUE);
  }

  gst_caps_unref (out_caps);
  return ret;
}
Exemple #16
0
static GstCaps *
gst_osx_audio_src_get_caps (GstBaseSrc * src)
{
  GstElementClass *gstelement_class;
  GstOsxAudioSrc *osxsrc;
  GstPadTemplate *pad_template;
  GstCaps *caps;
  gint min, max;

  gstelement_class = GST_ELEMENT_GET_CLASS (src);
  osxsrc = GST_OSX_AUDIO_SRC (src);

  if (osxsrc->deviceChannels == -1) {
    /* -1 means we don't know the number of channels yet.  for now, return
     * template caps.
     */
    return NULL;
  }

  max = osxsrc->deviceChannels;
  if (max < 1)
    max = 1;                    /* 0 channels means 1 channel? */

  min = MIN (1, max);

  pad_template = gst_element_class_get_pad_template (gstelement_class, "src");
  g_return_val_if_fail (pad_template != NULL, NULL);

  caps = gst_caps_copy (gst_pad_template_get_caps (pad_template));

  if (min == max) {
    gst_caps_set_simple (caps, "channels", G_TYPE_INT, max, NULL);
  } else {
    gst_caps_set_simple (caps, "channels", GST_TYPE_INT_RANGE, min, max,
        NULL);
  }

  return caps;
}
static GstCaps *
gst_xviddec_src_getcaps (GstPad * pad)
{
  GstXvidDec *dec = GST_XVIDDEC (GST_PAD_PARENT (pad));
  GstCaps *caps;
  gint csp[] = {
    XVID_CSP_I420,
    XVID_CSP_YV12,
    XVID_CSP_YUY2,
    XVID_CSP_UYVY,
    XVID_CSP_YVYU,
    XVID_CSP_BGRA,
    XVID_CSP_ABGR,
    XVID_CSP_RGBA,
#ifdef XVID_CSP_ARGB
    XVID_CSP_ARGB,
#endif
    XVID_CSP_BGR,
    XVID_CSP_RGB555,
    XVID_CSP_RGB565,
    0
  }, i;

  if (!GST_PAD_CAPS (dec->sinkpad)) {
    GstPadTemplate *templ = gst_static_pad_template_get (&src_template);

    return gst_caps_copy (gst_pad_template_get_caps (templ));
  }

  caps = gst_caps_new_empty ();
  for (i = 0; csp[i] != 0; i++) {
    GstCaps *one = gst_xvid_csp_to_caps (csp[i], dec->width,
        dec->height, dec->fps, dec->par);

    gst_caps_append (caps, one);
  }

  return caps;
}
static GstCaps *
gst_vdp_vpp_sink_getcaps (GstPad * pad)
{
  GstVdpVideoPostProcess *vpp =
      GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad));
  GstCaps *caps;

  if (vpp->device) {
    caps = gst_vdp_video_buffer_get_allowed_caps (vpp->device);
  } else {
    GstElementClass *element_class = GST_ELEMENT_GET_CLASS (vpp);
    GstPadTemplate *sink_template;

    sink_template = gst_element_class_get_pad_template (element_class, "sink");
    caps = gst_caps_copy (gst_pad_template_get_caps (sink_template));
  }
  GST_DEBUG ("returning caps: %" GST_PTR_FORMAT, caps);

  gst_object_unref (vpp);

  return caps;
}
Exemple #19
0
static gboolean
gst_esdsink_open (GstAudioSink * asink)
{
  esd_server_info_t *server_info;
  GstPadTemplate *pad_template;
  GstEsdSink *esdsink;
  gchar *saved_env;
  gint i;

  esdsink = GST_ESDSINK (asink);

  GST_DEBUG_OBJECT (esdsink, "open");

  /* ensure libesd doesn't auto-spawn a sound daemon if none is running yet */
  saved_env = g_strdup (g_getenv ("ESD_NO_SPAWN"));
  g_setenv ("ESD_NO_SPAWN", "1", TRUE);

  /* now try to connect to any existing/running sound daemons */
  esdsink->ctrl_fd = esd_open_sound (esdsink->host);

  /* and restore the previous state */
  if (saved_env != NULL) {
    g_setenv ("ESD_NO_SPAWN", saved_env, TRUE);
  } else {
    g_unsetenv ("ESD_NO_SPAWN");
  }
  g_free (saved_env);

  if (esdsink->ctrl_fd < 0)
    goto couldnt_connect;

  /* get server info */
  server_info = esd_get_server_info (esdsink->ctrl_fd);
  if (!server_info)
    goto no_server_info;

  GST_INFO_OBJECT (esdsink, "got server info rate: %i", server_info->rate);

  pad_template = gst_static_pad_template_get (&sink_factory);
  esdsink->cur_caps = gst_caps_copy (gst_pad_template_get_caps (pad_template));
  gst_object_unref (pad_template);

  for (i = 0; i < esdsink->cur_caps->structs->len; i++) {
    GstStructure *s;

    s = gst_caps_get_structure (esdsink->cur_caps, i);
    gst_structure_set (s, "rate", G_TYPE_INT, server_info->rate, NULL);
  }

  esd_free_server_info (server_info);

  GST_INFO_OBJECT (esdsink, "server caps: %" GST_PTR_FORMAT, esdsink->cur_caps);

  return TRUE;

  /* ERRORS */
couldnt_connect:
  {
    GST_ELEMENT_ERROR (esdsink, RESOURCE, OPEN_WRITE,
        (_("Could not establish connection to sound server")),
        ("can't open connection to esound server"));
    return FALSE;
  }
no_server_info:
  {
    GST_ELEMENT_ERROR (esdsink, RESOURCE, OPEN_WRITE,
        (_("Failed to query sound server capabilities")),
        ("couldn't get server info!"));
    return FALSE;
  }
}
Exemple #20
0
static GstFlowReturn
gst_pngdec_caps_create_and_set (GstPngDec * pngdec)
{
  GstFlowReturn ret = GST_FLOW_OK;
  GstCaps *caps = NULL, *res = NULL;
  GstPadTemplate *templ = NULL;
  gint bpc = 0, color_type;
  png_uint_32 width, height;

  g_return_val_if_fail (GST_IS_PNGDEC (pngdec), GST_FLOW_ERROR);

  /* Get bits per channel */
  bpc = png_get_bit_depth (pngdec->png, pngdec->info);

  /* We don't handle 16 bits per color, strip down to 8 */
  if (bpc == 16) {
    GST_LOG_OBJECT (pngdec,
        "this is a 16 bits per channel PNG image, strip down to 8 bits");
    png_set_strip_16 (pngdec->png);
  }

  /* Get Color type */
  color_type = png_get_color_type (pngdec->png, pngdec->info);

#if 0
  /* We used to have this HACK to reverse the outgoing bytes, but the problem
   * that originally required the hack seems to have been in ffmpegcolorspace's
   * RGBA descriptions. It doesn't seem needed now that's fixed, but might
   * still be needed on big-endian systems, I'm not sure. J.S. 6/7/2007 */
  if (color_type == PNG_COLOR_TYPE_RGB_ALPHA)
    png_set_bgr (pngdec->png);
#endif

  /* Gray scale converted to RGB and upscaled to 8 bits */
  if ((color_type == PNG_COLOR_TYPE_GRAY_ALPHA) ||
      (color_type == PNG_COLOR_TYPE_GRAY)) {
    GST_LOG_OBJECT (pngdec, "converting grayscale png to RGB");
    png_set_gray_to_rgb (pngdec->png);
    if (bpc < 8) {              /* Convert to 8 bits */
      GST_LOG_OBJECT (pngdec, "converting grayscale image to 8 bits");
      png_set_gray_1_2_4_to_8 (pngdec->png);
    }
  }

  /* Palette converted to RGB */
  if (color_type == PNG_COLOR_TYPE_PALETTE) {
    GST_LOG_OBJECT (pngdec, "converting palette png to RGB");
    png_set_palette_to_rgb (pngdec->png);
  }

  /* Update the info structure */
  png_read_update_info (pngdec->png, pngdec->info);

  /* Get IHDR header again after transformation settings */

  png_get_IHDR (pngdec->png, pngdec->info, &width, &height,
      &bpc, &pngdec->color_type, NULL, NULL, NULL);

  pngdec->width = width;
  pngdec->height = height;

  GST_LOG_OBJECT (pngdec, "this is a %dx%d PNG image", pngdec->width,
      pngdec->height);

  switch (pngdec->color_type) {
    case PNG_COLOR_TYPE_RGB:
      GST_LOG_OBJECT (pngdec, "we have no alpha channel, depth is 24 bits");
      pngdec->bpp = 24;
      break;
    case PNG_COLOR_TYPE_RGB_ALPHA:
      GST_LOG_OBJECT (pngdec, "we have an alpha channel, depth is 32 bits");
      pngdec->bpp = 32;
      break;
    default:
      GST_ELEMENT_ERROR (pngdec, STREAM, NOT_IMPLEMENTED, (NULL),
          ("pngdec does not support this color type"));
      ret = GST_FLOW_NOT_SUPPORTED;
      goto beach;
  }

  caps = gst_caps_new_simple ("video/x-raw-rgb",
      "width", G_TYPE_INT, pngdec->width,
      "height", G_TYPE_INT, pngdec->height,
      "bpp", G_TYPE_INT, pngdec->bpp,
      "framerate", GST_TYPE_FRACTION, pngdec->fps_n, pngdec->fps_d, NULL);

  templ = gst_static_pad_template_get (&gst_pngdec_src_pad_template);

  res = gst_caps_intersect (caps, gst_pad_template_get_caps (templ));

  gst_caps_unref (caps);
  gst_object_unref (templ);

  if (!gst_pad_set_caps (pngdec->srcpad, res))
    ret = GST_FLOW_NOT_NEGOTIATED;

  GST_DEBUG_OBJECT (pngdec, "our caps %" GST_PTR_FORMAT, res);

  gst_caps_unref (res);

  /* Push a newsegment event */
  if (pngdec->need_newsegment) {
    gst_pad_push_event (pngdec->srcpad,
        gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0));
    pngdec->need_newsegment = FALSE;
  }

beach:
  return ret;
}
static gboolean
gst_osx_audio_sink_allowed_caps (GstOsxAudioSink * osxsink)
{
  gint i, channels;
  gboolean spdif_allowed;
  AudioChannelLayout *layout;
  GstElementClass *element_class;
  GstPadTemplate *pad_template;
  GstCaps *caps, *in_caps;
  guint64 channel_mask = 0;
  GstAudioChannelPosition *pos = osxsink->channel_positions;

  /* First collect info about the HW capabilites and preferences */
  spdif_allowed =
      gst_core_audio_audio_device_is_spdif_avail (osxsink->device_id);
  layout = gst_core_audio_audio_device_get_channel_layout (osxsink->device_id);

  GST_DEBUG_OBJECT (osxsink, "Selected device ID: %u SPDIF allowed: %d",
      (unsigned) osxsink->device_id, spdif_allowed);

  if (layout) {
    channels = MIN (layout->mNumberChannelDescriptions,
        GST_OSX_AUDIO_MAX_CHANNEL);
  } else {
    GST_WARNING_OBJECT (osxsink, "This driver does not support "
        "kAudioDevicePropertyPreferredChannelLayout.");
    channels = 2;
  }

  switch (channels) {
    case 0:
      pos[0] = GST_AUDIO_CHANNEL_POSITION_NONE;
      break;
    case 1:
      pos[0] = GST_AUDIO_CHANNEL_POSITION_MONO;
      break;
    case 2:
      pos[0] = GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT;
      pos[1] = GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT;
      channel_mask |= GST_AUDIO_CHANNEL_POSITION_MASK (FRONT_LEFT);
      channel_mask |= GST_AUDIO_CHANNEL_POSITION_MASK (FRONT_RIGHT);
      break;
    default:
      channels = MIN (layout->mNumberChannelDescriptions,
          GST_OSX_AUDIO_MAX_CHANNEL);
      for (i = 0; i < channels; i++) {
        switch (layout->mChannelDescriptions[i].mChannelLabel) {
          case kAudioChannelLabel_Left:
            pos[i] = GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT;
            break;
          case kAudioChannelLabel_Right:
            pos[i] = GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT;
            break;
          case kAudioChannelLabel_Center:
            pos[i] = GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER;
            break;
          case kAudioChannelLabel_LFEScreen:
            pos[i] = GST_AUDIO_CHANNEL_POSITION_LFE1;
            break;
          case kAudioChannelLabel_LeftSurround:
            pos[i] = GST_AUDIO_CHANNEL_POSITION_REAR_LEFT;
            break;
          case kAudioChannelLabel_RightSurround:
            pos[i] = GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT;
            break;
          case kAudioChannelLabel_RearSurroundLeft:
            pos[i] = GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT;
            break;
          case kAudioChannelLabel_RearSurroundRight:
            pos[i] = GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT;
            break;
          case kAudioChannelLabel_CenterSurround:
            pos[i] = GST_AUDIO_CHANNEL_POSITION_REAR_CENTER;
            break;
          default:
            GST_WARNING_OBJECT (osxsink, "unrecognized channel: %d",
                (int) layout->mChannelDescriptions[i].mChannelLabel);
            channel_mask = 0;
            channels = 2;
            break;
        }
      }
  }
  g_free (layout);

  /* Recover the template caps */
  element_class = GST_ELEMENT_GET_CLASS (osxsink);
  pad_template = gst_element_class_get_pad_template (element_class, "sink");
  in_caps = gst_pad_template_get_caps (pad_template);

  /* Create the allowed subset  */
  caps = gst_caps_new_empty ();
  for (i = 0; i < gst_caps_get_size (in_caps); i++) {
    GstStructure *in_s, *out_s;

    in_s = gst_caps_get_structure (in_caps, i);

    if (gst_structure_has_name (in_s, "audio/x-ac3") ||
        gst_structure_has_name (in_s, "audio/x-dts")) {
      if (spdif_allowed) {
        gst_caps_append_structure (caps, gst_structure_copy (in_s));
      }
    }
    gst_audio_channel_positions_to_mask (pos, channels, false, &channel_mask);
    out_s = gst_structure_copy (in_s);
    gst_structure_remove_fields (out_s, "channels", "channel-mask", NULL);
    gst_structure_set (out_s, "channels", G_TYPE_INT, channels,
        "channel-mask", GST_TYPE_BITMASK, channel_mask, NULL);
    gst_caps_append_structure (caps, out_s);
  }

  if (osxsink->cached_caps) {
    gst_caps_unref (osxsink->cached_caps);
  }

  osxsink->cached_caps = caps;
  osxsink->channels = channels;

  return TRUE;
}
Exemple #22
0
static void
check_pad_template (GstPadTemplate * tmpl)
{
  const GValue *list_val, *fmt_val;
  GstStructure *s;
  gboolean *formats_supported;
  GstCaps *caps;
  guint i, num_formats;

  num_formats = get_num_formats ();
  formats_supported = g_new0 (gboolean, num_formats);

  caps = gst_pad_template_get_caps (tmpl);

  /* If this fails, we need to update this unit test */
  fail_unless_equals_int (gst_caps_get_size (caps), 2);
  /* Remove the ANY caps features structure */
  caps = gst_caps_truncate (caps);
  s = gst_caps_get_structure (caps, 0);

  fail_unless (gst_structure_has_name (s, "video/x-raw"));

  list_val = gst_structure_get_value (s, "format");
  fail_unless (list_val != NULL);
  /* If this fails, we need to update this unit test */
  fail_unless (GST_VALUE_HOLDS_LIST (list_val));

  for (i = 0; i < gst_value_list_get_size (list_val); ++i) {
    GstVideoFormat fmt;
    const gchar *fmt_str;

    fmt_val = gst_value_list_get_value (list_val, i);
    fail_unless (G_VALUE_HOLDS_STRING (fmt_val));
    fmt_str = g_value_get_string (fmt_val);
    GST_LOG ("format string: '%s'", fmt_str);
    fmt = gst_video_format_from_string (fmt_str);
    if (fmt == GST_VIDEO_FORMAT_UNKNOWN)
      g_error ("Unknown raw format '%s' in pad template caps", fmt_str);
    formats_supported[(guint) fmt] = TRUE;
  }

  gst_caps_unref (caps);

  for (i = 2; i < num_formats; ++i) {
    if (!formats_supported[i]) {
      const gchar *fmt_str = gst_video_format_to_string ((GstVideoFormat) i);

      switch (i) {
        case GST_VIDEO_FORMAT_v210:
        case GST_VIDEO_FORMAT_v216:
        case GST_VIDEO_FORMAT_NV12:
        case GST_VIDEO_FORMAT_NV16:
        case GST_VIDEO_FORMAT_NV21:
        case GST_VIDEO_FORMAT_NV24:
        case GST_VIDEO_FORMAT_UYVP:
        case GST_VIDEO_FORMAT_A420:
        case GST_VIDEO_FORMAT_YUV9:
        case GST_VIDEO_FORMAT_YVU9:
        case GST_VIDEO_FORMAT_IYU1:
        case GST_VIDEO_FORMAT_r210:{
          static gboolean shown_fixme[100] = { FALSE, };

          if (!shown_fixme[i]) {
            GST_FIXME ("FIXME: add %s support to videoscale", fmt_str);
            shown_fixme[i] = TRUE;
          }
          break;
        }
        case GST_VIDEO_FORMAT_BGR16:
        case GST_VIDEO_FORMAT_BGR15:
        case GST_VIDEO_FORMAT_RGB8P:
        case GST_VIDEO_FORMAT_I420_10BE:
        case GST_VIDEO_FORMAT_I420_10LE:
        case GST_VIDEO_FORMAT_I422_10BE:
        case GST_VIDEO_FORMAT_I422_10LE:
        case GST_VIDEO_FORMAT_Y444_10BE:
        case GST_VIDEO_FORMAT_Y444_10LE:
        case GST_VIDEO_FORMAT_GBR:
        case GST_VIDEO_FORMAT_GBR_10BE:
        case GST_VIDEO_FORMAT_GBR_10LE:
          GST_LOG ("Ignoring lack of support for format %s", fmt_str);
          break;
        default:
          g_error ("videoscale doesn't support format '%s'", fmt_str);
          break;
      }
    }
  }

  g_free (formats_supported);
}