Пример #1
0
static GstFlowReturn
vorbis_handle_identification_packet (GstVorbisDec * vd)
{
  GstCaps *caps;
  const GstAudioChannelPosition *pos = NULL;
  gint width = GST_VORBIS_DEC_DEFAULT_SAMPLE_WIDTH;

  switch (vd->vi.channels) {
    case 1:
    case 2:
      /* nothing */
      break;
    case 3:
    case 4:
    case 5:
    case 6:
    case 7:
    case 8:
      pos = gst_vorbis_channel_positions[vd->vi.channels - 1];
      break;
    default:{
      gint i;
      GstAudioChannelPosition *posn =
          g_new (GstAudioChannelPosition, vd->vi.channels);

      GST_ELEMENT_WARNING (GST_ELEMENT (vd), STREAM, DECODE,
          (NULL), ("Using NONE channel layout for more than 8 channels"));

      for (i = 0; i < vd->vi.channels; i++)
        posn[i] = GST_AUDIO_CHANNEL_POSITION_NONE;

      pos = posn;
    }
  }

  /* negotiate width with downstream */
  caps = gst_pad_get_allowed_caps (vd->srcpad);
  if (caps) {
    if (!gst_caps_is_empty (caps)) {
      GstStructure *s;

      s = gst_caps_get_structure (caps, 0);
      /* template ensures 16 or 32 */
      gst_structure_get_int (s, "width", &width);

      GST_INFO_OBJECT (vd, "using %s with %d channels and %d bit audio depth",
          gst_structure_get_name (s), vd->vi.channels, width);
    }
    gst_caps_unref (caps);
  }
  vd->width = width >> 3;

  /* select a copy_samples function, this way we can have specialized versions
   * for mono/stereo and avoid the depth switch in tremor case */
  vd->copy_samples = get_copy_sample_func (vd->vi.channels, vd->width);

  caps = gst_caps_copy (gst_pad_get_pad_template_caps (vd->srcpad));
  gst_caps_set_simple (caps, "rate", G_TYPE_INT, vd->vi.rate,
      "channels", G_TYPE_INT, vd->vi.channels,
      "width", G_TYPE_INT, width, NULL);

  if (pos) {
    gst_audio_set_channel_positions (gst_caps_get_structure (caps, 0), pos);
  }

  if (vd->vi.channels > 8) {
    g_free ((GstAudioChannelPosition *) pos);
  }

  gst_pad_set_caps (vd->srcpad, caps);
  gst_caps_unref (caps);

  return GST_FLOW_OK;
}
Пример #2
0
static GstElement *
gst_auto_video_src_find_best (GstAutoVideoSrc * src)
{
  GList *list, *item;
  GstElement *choice = NULL;
  GstMessage *message = NULL;
  GSList *errors = NULL;
  GstBus *bus = gst_bus_new ();
  GstPad *el_pad = NULL;
  GstCaps *el_caps = NULL, *intersect = NULL;
  gboolean no_match = TRUE;

  list = gst_registry_feature_filter (gst_registry_get_default (),
      (GstPluginFeatureFilter) gst_auto_video_src_factory_filter, FALSE, src);
  list = g_list_sort (list, (GCompareFunc) gst_auto_video_src_compare_ranks);

  GST_LOG_OBJECT (src, "Trying to find usable video devices ...");

  for (item = list; item != NULL; item = item->next) {
    GstElementFactory *f = GST_ELEMENT_FACTORY (item->data);
    GstElement *el;

    if ((el = gst_auto_video_src_create_element_with_pretty_name (src, f))) {
      GstStateChangeReturn ret;

      GST_DEBUG_OBJECT (src, "Testing %s", GST_PLUGIN_FEATURE (f)->name);

      /* If AutoVideoSrc has been provided with filter caps,
       * accept only sources that match with the filter caps */
      if (src->filter_caps) {
        el_pad = gst_element_get_static_pad (GST_ELEMENT (el), "src");
        el_caps = gst_pad_get_caps (el_pad);
        gst_object_unref (el_pad);
        GST_DEBUG_OBJECT (src,
            "Checking caps: %" GST_PTR_FORMAT " vs. %" GST_PTR_FORMAT,
            src->filter_caps, el_caps);
        intersect = gst_caps_intersect (src->filter_caps, el_caps);
        no_match = gst_caps_is_empty (intersect);
        gst_caps_unref (el_caps);
        gst_caps_unref (intersect);

        if (no_match) {
          GST_DEBUG_OBJECT (src, "Incompatible caps");
          gst_object_unref (el);
          continue;
        } else {
          GST_DEBUG_OBJECT (src, "Found compatible caps");
        }
      }

      gst_element_set_bus (el, bus);
      ret = gst_element_set_state (el, GST_STATE_READY);
      if (ret == GST_STATE_CHANGE_SUCCESS) {
        GST_DEBUG_OBJECT (src, "This worked!");
        choice = el;
        break;
      }

      /* collect all error messages */
      while ((message = gst_bus_pop_filtered (bus, GST_MESSAGE_ERROR))) {
        GST_DEBUG_OBJECT (src, "error message %" GST_PTR_FORMAT, message);
        errors = g_slist_append (errors, message);
      }

      gst_element_set_state (el, GST_STATE_NULL);
      gst_object_unref (el);
    }
  }

  GST_DEBUG_OBJECT (src, "done trying");
  if (!choice) {
    if (errors) {
      /* FIXME: we forward the first error for now; but later on it might make
       * sense to actually analyse them */
      gst_message_ref (GST_MESSAGE (errors->data));
      GST_DEBUG_OBJECT (src, "reposting message %p", errors->data);
      gst_element_post_message (GST_ELEMENT (src), GST_MESSAGE (errors->data));
    } else {
      /* send warning message to application and use a fakesrc */
      GST_ELEMENT_WARNING (src, RESOURCE, NOT_FOUND, (NULL),
          ("Failed to find a usable video source"));
      choice = gst_element_factory_make ("fakesrc", "fake-video-src");
      if (g_object_class_find_property (G_OBJECT_GET_CLASS (choice), "sync"))
        g_object_set (choice, "sync", TRUE, NULL);
      gst_element_set_state (choice, GST_STATE_READY);
    }
  }
  gst_object_unref (bus);
  gst_plugin_feature_list_free (list);
  g_slist_foreach (errors, (GFunc) gst_mini_object_unref, NULL);
  g_slist_free (errors);

  return choice;
}
Пример #3
0
static GstCaps *
gst_opus_enc_sink_getcaps (GstAudioEncoder * benc)
{
  GstOpusEnc *enc;
  GstCaps *caps;
  GstCaps *peercaps = NULL;
  GstCaps *intersect = NULL;
  guint i;
  gboolean allow_multistream;

  enc = GST_OPUS_ENC (benc);

  GST_DEBUG_OBJECT (enc, "sink getcaps");

  peercaps = gst_pad_peer_get_caps (GST_AUDIO_ENCODER_SRC_PAD (benc));
  if (!peercaps) {
    GST_DEBUG_OBJECT (benc, "No peercaps, returning template sink caps");
    return
        gst_caps_copy (gst_pad_get_pad_template_caps
        (GST_AUDIO_ENCODER_SINK_PAD (benc)));
  }

  intersect = gst_caps_intersect (peercaps,
      gst_pad_get_pad_template_caps (GST_AUDIO_ENCODER_SRC_PAD (benc)));
  gst_caps_unref (peercaps);

  if (gst_caps_is_empty (intersect))
    return intersect;

  allow_multistream = FALSE;
  for (i = 0; i < gst_caps_get_size (intersect); i++) {
    GstStructure *s = gst_caps_get_structure (intersect, i);
    gboolean multistream;
    if (gst_structure_get_boolean (s, "multistream", &multistream)) {
      if (multistream) {
        allow_multistream = TRUE;
      }
    } else {
      allow_multistream = TRUE;
    }
  }

  gst_caps_unref (intersect);

  caps =
      gst_caps_copy (gst_pad_get_pad_template_caps (GST_AUDIO_ENCODER_SINK_PAD
          (benc)));
  if (!allow_multistream) {
    GValue range = { 0 };
    g_value_init (&range, GST_TYPE_INT_RANGE);
    gst_value_set_int_range (&range, 1, 2);
    for (i = 0; i < gst_caps_get_size (caps); i++) {
      GstStructure *s = gst_caps_get_structure (caps, i);
      gst_structure_set_value (s, "channels", &range);
    }
    g_value_unset (&range);
  }

  GST_DEBUG_OBJECT (enc, "Returning caps: %" GST_PTR_FORMAT, caps);
  return caps;
}
Пример #4
0
static gboolean
gst_goom_src_negotiate (GstGoom * goom)
{
  GstCaps *othercaps, *target;
  GstStructure *structure;
  GstCaps *templ;
  GstQuery *query;
  GstBufferPool *pool;
  GstStructure *config;
  guint size, min, max;

  templ = gst_pad_get_pad_template_caps (goom->srcpad);

  GST_DEBUG_OBJECT (goom, "performing negotiation");

  /* see what the peer can do */
  othercaps = gst_pad_peer_query_caps (goom->srcpad, NULL);
  if (othercaps) {
    target = gst_caps_intersect (othercaps, templ);
    gst_caps_unref (othercaps);
    gst_caps_unref (templ);

    if (gst_caps_is_empty (target))
      goto no_format;

    target = gst_caps_truncate (target);
  } else {
    target = templ;
  }

  structure = gst_caps_get_structure (target, 0);
  gst_structure_fixate_field_nearest_int (structure, "width", DEFAULT_WIDTH);
  gst_structure_fixate_field_nearest_int (structure, "height", DEFAULT_HEIGHT);
  gst_structure_fixate_field_nearest_fraction (structure, "framerate",
      DEFAULT_FPS_N, DEFAULT_FPS_D);

  gst_goom_src_setcaps (goom, target);

  /* try to get a bufferpool now */
  /* find a pool for the negotiated caps now */
  query = gst_query_new_allocation (target, TRUE);

  if (!gst_pad_peer_query (goom->srcpad, query)) {
    /* no problem, we use the query defaults */
    GST_DEBUG_OBJECT (goom, "ALLOCATION query failed");
  }

  if (gst_query_get_n_allocation_pools (query) > 0) {
    /* we got configuration from our peer, parse them */
    gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
  } else {
    pool = NULL;
    size = goom->outsize;
    min = max = 0;
  }

  if (pool == NULL) {
    /* we did not get a pool, make one ourselves then */
    pool = gst_buffer_pool_new ();
  }

  config = gst_buffer_pool_get_config (pool);
  gst_buffer_pool_config_set_params (config, target, size, min, max);
  gst_buffer_pool_set_config (pool, config);

  if (goom->pool) {
    gst_buffer_pool_set_active (goom->pool, FALSE);
    gst_object_unref (goom->pool);
  }
  goom->pool = pool;

  /* and activate */
  gst_buffer_pool_set_active (pool, TRUE);

  gst_caps_unref (target);

  return TRUE;

no_format:
  {
    gst_caps_unref (target);
    return FALSE;
  }
}
Пример #5
0
static GstCaps *
gst_shape_wipe_src_getcaps (GstPad * pad, GstCaps * filter)
{
  GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad));
  GstCaps *templ, *ret, *tmp;

  if (gst_pad_has_current_caps (pad))
    return gst_pad_get_current_caps (pad);
  else if (gst_pad_has_current_caps (self->video_sinkpad))
    return gst_pad_get_current_caps (self->video_sinkpad);

  templ = gst_pad_get_pad_template_caps (self->video_sinkpad);
  tmp = gst_pad_peer_query_caps (self->video_sinkpad, NULL);
  if (tmp) {
    ret = gst_caps_intersect (tmp, templ);
    gst_caps_unref (templ);
    gst_caps_unref (tmp);
  } else {
    ret = templ;
  }

  GST_LOG_OBJECT (pad, "video sink accepted caps: %" GST_PTR_FORMAT, ret);

  if (gst_caps_is_empty (ret))
    goto done;

  tmp = gst_pad_peer_query_caps (pad, NULL);
  GST_LOG_OBJECT (pad, "peer accepted caps: %" GST_PTR_FORMAT, ret);
  if (tmp) {
    GstCaps *intersection;

    intersection = gst_caps_intersect (tmp, ret);
    gst_caps_unref (tmp);
    gst_caps_unref (ret);
    ret = intersection;
  }

  GST_LOG_OBJECT (pad, "intersection: %" GST_PTR_FORMAT, ret);

  if (gst_caps_is_empty (ret))
    goto done;

  if (self->vinfo.height && self->vinfo.width) {
    guint i, n;

    ret = gst_caps_make_writable (ret);
    n = gst_caps_get_size (ret);
    for (i = 0; i < n; i++) {
      GstStructure *s = gst_caps_get_structure (ret, i);

      gst_structure_set (s, "width", G_TYPE_INT, self->vinfo.width, "height",
          G_TYPE_INT, self->vinfo.height, NULL);
    }
  }

  tmp = gst_pad_peer_query_caps (self->mask_sinkpad, NULL);
  GST_LOG_OBJECT (pad, "mask sink accepted caps: %" GST_PTR_FORMAT, ret);
  if (tmp) {
    GstCaps *intersection, *tmp2;
    guint i, n;

    tmp2 = gst_pad_get_pad_template_caps (self->mask_sinkpad);
    intersection = gst_caps_intersect (tmp, tmp2);
    gst_caps_unref (tmp);
    gst_caps_unref (tmp2);

    tmp = gst_caps_make_writable (intersection);
    n = gst_caps_get_size (tmp);

    for (i = 0; i < n; i++) {
      GstStructure *s = gst_caps_get_structure (tmp, i);

      gst_structure_remove_fields (s, "format", "framerate", NULL);
      gst_structure_set_name (s, "video/x-raw");
    }

    intersection = gst_caps_intersect (tmp, ret);
    gst_caps_unref (tmp);
    gst_caps_unref (ret);
    ret = intersection;
  }

done:

  gst_object_unref (self);

  GST_LOG_OBJECT (pad, "Returning caps: %" GST_PTR_FORMAT, ret);

  return ret;
}
Пример #6
0
/* This is essentially gst_base_src_negotiate_default() but the caps
 * are guaranteed to have a channel layout for > 2 channels
 */
static gboolean
gst_pulsesrc_negotiate (GstBaseSrc * basesrc)
{
  GstCaps *thiscaps;
  GstCaps *caps = NULL;
  GstCaps *peercaps = NULL;
  gboolean result = FALSE;

  /* first see what is possible on our source pad */
  thiscaps = gst_pad_get_caps_reffed (GST_BASE_SRC_PAD (basesrc));
  GST_DEBUG_OBJECT (basesrc, "caps of src: %" GST_PTR_FORMAT, thiscaps);
  /* nothing or anything is allowed, we're done */
  if (thiscaps == NULL || gst_caps_is_any (thiscaps))
    goto no_nego_needed;

  /* get the peer caps */
  peercaps = gst_pad_peer_get_caps_reffed (GST_BASE_SRC_PAD (basesrc));
  GST_DEBUG_OBJECT (basesrc, "caps of peer: %" GST_PTR_FORMAT, peercaps);
  if (peercaps) {
    /* get intersection */
    caps = gst_caps_intersect (thiscaps, peercaps);
    GST_DEBUG_OBJECT (basesrc, "intersect: %" GST_PTR_FORMAT, caps);
    gst_caps_unref (thiscaps);
    gst_caps_unref (peercaps);
  } else {
    /* no peer, work with our own caps then */
    caps = thiscaps;
  }
  if (caps) {
    /* take first (and best, since they are sorted) possibility */
    caps = gst_caps_make_writable (caps);
    gst_caps_truncate (caps);

    /* now fixate */
    if (!gst_caps_is_empty (caps)) {
      gst_pad_fixate_caps (GST_BASE_SRC_PAD (basesrc), caps);
      GST_DEBUG_OBJECT (basesrc, "fixated to: %" GST_PTR_FORMAT, caps);

      if (gst_caps_is_any (caps)) {
        /* hmm, still anything, so element can do anything and
         * nego is not needed */
        result = TRUE;
      } else if (gst_caps_is_fixed (caps)) {
        /* yay, fixed caps, use those then */
        result = gst_pulsesrc_create_stream (GST_PULSESRC_CAST (basesrc), caps);
        if (result)
          result = gst_pad_set_caps (GST_BASE_SRC_PAD (basesrc), caps);
      }
    }
    gst_caps_unref (caps);
  }
  return result;

no_nego_needed:
  {
    GST_DEBUG_OBJECT (basesrc, "no negotiation needed");
    if (thiscaps)
      gst_caps_unref (thiscaps);
    return TRUE;
  }
}
static GstCaps *
gst_dshowvideosrc_get_caps (GstBaseSrc * basesrc, GstCaps *filter)
{
    HRESULT hres = S_OK;
    IBindCtx *lpbc = NULL;
    IMoniker *videom;
    DWORD dwEaten;
    GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (basesrc);
    gunichar2 *unidevice = NULL;

    if (src->caps) {
        return gst_caps_ref (src->caps);
    }

    if (!src->device) {
        src->device =
            gst_dshow_getdevice_from_devicename (&CLSID_VideoInputDeviceCategory,
                    &src->device_name);
        if (!src->device) {
            GST_ERROR ("No video device found.");
            return NULL;
        }
    }

    unidevice =
        g_utf8_to_utf16 (src->device, strlen (src->device), NULL, NULL, NULL);

    if (!src->video_cap_filter) {
        hres = CreateBindCtx (0, &lpbc);
        if (SUCCEEDED (hres)) {
            hres =
                MkParseDisplayName (lpbc, (LPCOLESTR) unidevice, &dwEaten, &videom);
            if (SUCCEEDED (hres)) {
                hres = videom->BindToObject (lpbc, NULL, IID_IBaseFilter,
                                             (LPVOID *) & src->video_cap_filter);
                videom->Release ();
            }
            lpbc->Release ();
        }
    }

    if (!src->caps) {
        src->caps = gst_caps_new_empty ();
    }

    if (src->video_cap_filter && gst_caps_is_empty (src->caps)) {
        /* get the capture pins supported types */
        IPin *capture_pin = NULL;
        IEnumPins *enumpins = NULL;
        HRESULT hres;

        hres = src->video_cap_filter->EnumPins (&enumpins);
        if (SUCCEEDED (hres)) {
            while (enumpins->Next (1, &capture_pin, NULL) == S_OK) {
                IKsPropertySet *pKs = NULL;
                hres =
                    capture_pin->QueryInterface (IID_IKsPropertySet, (LPVOID *) & pKs);
                if (SUCCEEDED (hres) && pKs) {
                    DWORD cbReturned;
                    GUID pin_category;
                    RPC_STATUS rpcstatus;

                    hres =
                        pKs->Get (AMPROPSETID_Pin,
                                  AMPROPERTY_PIN_CATEGORY, NULL, 0, &pin_category, sizeof (GUID),
                                  &cbReturned);

                    /* we only want capture pins */
                    if (UuidCompare (&pin_category, (UUID *) & PIN_CATEGORY_CAPTURE,
                                     &rpcstatus) == 0) {
                        {
                            GstCaps *caps =
                                gst_dshowvideosrc_getcaps_from_streamcaps (src, capture_pin);
                            if (caps) {
                                gst_caps_append (src->caps, caps);
                            } else {
                                caps = gst_dshowvideosrc_getcaps_from_enum_mediatypes (src, capture_pin);
                                if (caps)
                                    gst_caps_append (src->caps, caps);
                            }
                        }
                    }
                    pKs->Release ();
                }
                capture_pin->Release ();
            }
            enumpins->Release ();
        }
    }

    if (unidevice) {
        g_free (unidevice);
    }

    if (src->caps) {
        if (filter) {
            return gst_caps_intersect_full (filter, src->caps,
                                            GST_CAPS_INTERSECT_FIRST);
        } else {
            return gst_caps_ref (src->caps);
        }
    }

    return NULL;
}
Пример #8
0
static GstCaps *
gst_rtp_h264_pay_getcaps (GstRTPBasePayload * payload, GstPad * pad,
    GstCaps * filter)
{
  GstCaps *template_caps;
  GstCaps *allowed_caps;
  GstCaps *caps, *icaps;
  gboolean append_unrestricted;
  guint i;

  allowed_caps =
      gst_pad_peer_query_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload), NULL);

  if (allowed_caps == NULL)
    return NULL;

  template_caps =
      gst_static_pad_template_get_caps (&gst_rtp_h264_pay_sink_template);

  if (gst_caps_is_any (allowed_caps)) {
    caps = gst_caps_ref (template_caps);
    goto done;
  }

  if (gst_caps_is_empty (allowed_caps)) {
    caps = gst_caps_ref (allowed_caps);
    goto done;
  }

  caps = gst_caps_new_empty ();

  append_unrestricted = FALSE;
  for (i = 0; i < gst_caps_get_size (allowed_caps); i++) {
    GstStructure *s = gst_caps_get_structure (allowed_caps, i);
    GstStructure *new_s = gst_structure_new_empty ("video/x-h264");
    const gchar *profile_level_id;

    profile_level_id = gst_structure_get_string (s, "profile-level-id");

    if (profile_level_id && strlen (profile_level_id) == 6) {
      const gchar *profile;
      const gchar *level;
      long int spsint;
      guint8 sps[3];

      spsint = strtol (profile_level_id, NULL, 16);
      sps[0] = spsint >> 16;
      sps[1] = spsint >> 8;
      sps[2] = spsint;

      profile = gst_codec_utils_h264_get_profile (sps, 3);
      level = gst_codec_utils_h264_get_level (sps, 3);

      if (profile && level) {
        GST_LOG_OBJECT (payload, "In caps, have profile %s and level %s",
            profile, level);

        if (!strcmp (profile, "constrained-baseline"))
          gst_structure_set (new_s, "profile", G_TYPE_STRING, profile, NULL);
        else {
          GValue val = { 0, };
          GValue profiles = { 0, };

          g_value_init (&profiles, GST_TYPE_LIST);
          g_value_init (&val, G_TYPE_STRING);

          g_value_set_static_string (&val, profile);
          gst_value_list_append_value (&profiles, &val);

          g_value_set_static_string (&val, "constrained-baseline");
          gst_value_list_append_value (&profiles, &val);

          gst_structure_take_value (new_s, "profile", &profiles);
        }

        if (!strcmp (level, "1"))
          gst_structure_set (new_s, "level", G_TYPE_STRING, level, NULL);
        else {
          GValue levels = { 0, };
          GValue val = { 0, };
          int j;

          g_value_init (&levels, GST_TYPE_LIST);
          g_value_init (&val, G_TYPE_STRING);

          for (j = 0; j < G_N_ELEMENTS (all_levels); j++) {
            g_value_set_static_string (&val, all_levels[j]);
            gst_value_list_prepend_value (&levels, &val);
            if (!strcmp (level, all_levels[j]))
              break;
          }
          gst_structure_take_value (new_s, "level", &levels);
        }
      } else {
        /* Invalid profile-level-id means baseline */

        gst_structure_set (new_s,
            "profile", G_TYPE_STRING, "constrained-baseline", NULL);
      }
    } else {
      /* No profile-level-id means baseline or unrestricted */

      gst_structure_set (new_s,
          "profile", G_TYPE_STRING, "constrained-baseline", NULL);
      append_unrestricted = TRUE;
    }

    caps = gst_caps_merge_structure (caps, new_s);
  }
Пример #9
0
static gboolean
gst_sbc_enc_set_format (GstAudioEncoder * audio_enc, GstAudioInfo * info)
{
  const gchar *allocation_method, *channel_mode;
  GstSbcEnc *enc = GST_SBC_ENC (audio_enc);
  GstStructure *s;
  GstCaps *caps, *filter_caps;
  GstCaps *output_caps = NULL;
  guint sampleframes_per_frame;

  enc->rate = GST_AUDIO_INFO_RATE (info);
  enc->channels = GST_AUDIO_INFO_CHANNELS (info);

  /* negotiate output format based on downstream caps restrictions */
  caps = gst_pad_get_allowed_caps (GST_AUDIO_ENCODER_SRC_PAD (enc));
  if (caps == GST_CAPS_NONE || gst_caps_is_empty (caps))
    goto failure;

  if (caps == NULL)
    caps = gst_static_pad_template_get_caps (&sbc_enc_src_factory);

  /* fixate output caps */
  filter_caps = gst_caps_new_simple ("audio/x-sbc", "rate", G_TYPE_INT,
      enc->rate, "channels", G_TYPE_INT, enc->channels, NULL);
  output_caps = gst_caps_intersect (caps, filter_caps);
  gst_caps_unref (filter_caps);

  if (output_caps == NULL || gst_caps_is_empty (output_caps)) {
    GST_WARNING_OBJECT (enc, "Couldn't negotiate output caps with input rate "
        "%d and input channels %d and allowed output caps %" GST_PTR_FORMAT,
        enc->rate, enc->channels, caps);
    goto failure;
  }

  gst_caps_unref (caps);
  caps = NULL;

  GST_DEBUG_OBJECT (enc, "fixating caps %" GST_PTR_FORMAT, output_caps);
  output_caps = gst_caps_truncate (output_caps);
  s = gst_caps_get_structure (output_caps, 0);
  if (enc->channels == 1)
    gst_structure_fixate_field_string (s, "channel-mode", "mono");
  else
    gst_structure_fixate_field_string (s, "channel-mode", "joint");

  gst_structure_fixate_field_nearest_int (s, "bitpool", 64);
  gst_structure_fixate_field_nearest_int (s, "blocks", 16);
  gst_structure_fixate_field_nearest_int (s, "subbands", 8);
  gst_structure_fixate_field_string (s, "allocation-method", "loudness");
  s = NULL;

  /* in case there's anything else left to fixate */
  output_caps = gst_caps_fixate (output_caps);
  gst_caps_set_simple (output_caps, "parsed", G_TYPE_BOOLEAN, TRUE, NULL);

  GST_INFO_OBJECT (enc, "output caps %" GST_PTR_FORMAT, output_caps);

  /* let's see what we fixated to */
  s = gst_caps_get_structure (output_caps, 0);
  gst_structure_get_int (s, "blocks", &enc->blocks);
  gst_structure_get_int (s, "subbands", &enc->subbands);
  gst_structure_get_int (s, "bitpool", &enc->bitpool);
  allocation_method = gst_structure_get_string (s, "allocation-method");
  channel_mode = gst_structure_get_string (s, "channel-mode");

  /* We want channel-mode and channels coherent */
  if (enc->channels == 1) {
    if (g_strcmp0 (channel_mode, "mono") != 0) {
      GST_ERROR_OBJECT (enc, "Can't have channel-mode '%s' for 1 channel",
          channel_mode);
      goto failure;
    }
  } else {
    if (g_strcmp0 (channel_mode, "joint") != 0 &&
        g_strcmp0 (channel_mode, "stereo") != 0 &&
        g_strcmp0 (channel_mode, "dual") != 0) {
      GST_ERROR_OBJECT (enc, "Can't have channel-mode '%s' for 2 channels",
          channel_mode);
      goto failure;
    }
  }

  /* we want to be handed all available samples in handle_frame, but always
   * enough to encode a frame */
  sampleframes_per_frame = enc->blocks * enc->subbands;
  gst_audio_encoder_set_frame_samples_min (audio_enc, sampleframes_per_frame);
  gst_audio_encoder_set_frame_samples_max (audio_enc, sampleframes_per_frame);
  gst_audio_encoder_set_frame_max (audio_enc, 0);

  /* FIXME: what to do with left-over samples at the end? can we encode them? */
  gst_audio_encoder_set_hard_min (audio_enc, TRUE);

  /* and configure encoder based on the output caps we negotiated */
  if (enc->rate == 16000)
    enc->sbc.frequency = SBC_FREQ_16000;
  else if (enc->rate == 32000)
    enc->sbc.frequency = SBC_FREQ_32000;
  else if (enc->rate == 44100)
    enc->sbc.frequency = SBC_FREQ_44100;
  else if (enc->rate == 48000)
    enc->sbc.frequency = SBC_FREQ_48000;
  else
    goto failure;

  if (enc->blocks == 4)
    enc->sbc.blocks = SBC_BLK_4;
  else if (enc->blocks == 8)
    enc->sbc.blocks = SBC_BLK_8;
  else if (enc->blocks == 12)
    enc->sbc.blocks = SBC_BLK_12;
  else if (enc->blocks == 16)
    enc->sbc.blocks = SBC_BLK_16;
  else
    goto failure;

  enc->sbc.subbands = (enc->subbands == 4) ? SBC_SB_4 : SBC_SB_8;
  enc->sbc.bitpool = enc->bitpool;

  if (channel_mode == NULL || allocation_method == NULL)
    goto failure;

  if (strcmp (channel_mode, "joint") == 0)
    enc->sbc.mode = SBC_MODE_JOINT_STEREO;
  else if (strcmp (channel_mode, "stereo") == 0)
    enc->sbc.mode = SBC_MODE_STEREO;
  else if (strcmp (channel_mode, "dual") == 0)
    enc->sbc.mode = SBC_MODE_DUAL_CHANNEL;
  else if (strcmp (channel_mode, "mono") == 0)
    enc->sbc.mode = SBC_MODE_MONO;
  else if (strcmp (channel_mode, "auto") == 0)
    enc->sbc.mode = SBC_MODE_JOINT_STEREO;
  else
    goto failure;

  if (strcmp (allocation_method, "loudness") == 0)
    enc->sbc.allocation = SBC_AM_LOUDNESS;
  else if (strcmp (allocation_method, "snr") == 0)
    enc->sbc.allocation = SBC_AM_SNR;
  else
    goto failure;

  if (!gst_audio_encoder_set_output_format (audio_enc, output_caps))
    goto failure;

  return gst_audio_encoder_negotiate (audio_enc);

failure:
  if (output_caps)
    gst_caps_unref (output_caps);
  if (caps)
    gst_caps_unref (caps);
  return FALSE;
}
Пример #10
0
/* Ouput buffer preparation ... if the buffer has no caps, and our allowed
 * output caps is fixed, then send the caps downstream, making sure caps are
 * sent before segment event.
 *
 * This ensures that caps event is sent if we can, so that pipelines like:
 *   gst-launch filesrc location=rawsamples.raw !
 *       audio/x-raw,format=S16LE,rate=48000,channels=2 ! alsasink
 * will work.
 */
static GstFlowReturn
gst_capsfilter_prepare_buf (GstBaseTransform * trans, GstBuffer * input,
    GstBuffer ** buf)
{
  GstFlowReturn ret = GST_FLOW_OK;

  /* always return the input as output buffer */
  *buf = input;

  if (!gst_pad_has_current_caps (trans->sinkpad)) {
    /* No caps. See if the output pad only supports fixed caps */
    GstCapsFilter *filter = GST_CAPSFILTER (trans);
    GstCaps *out_caps;
    GList *pending_events = filter->pending_events;

    GST_LOG_OBJECT (trans, "Input pad does not have caps");

    filter->pending_events = NULL;

    out_caps = gst_pad_get_current_caps (trans->srcpad);
    if (out_caps == NULL) {
      out_caps = gst_pad_get_allowed_caps (trans->srcpad);
      g_return_val_if_fail (out_caps != NULL, GST_FLOW_ERROR);
    }

    out_caps = gst_caps_simplify (out_caps);

    if (gst_caps_is_fixed (out_caps) && !gst_caps_is_empty (out_caps)) {
      GST_DEBUG_OBJECT (trans, "Have fixed output caps %"
          GST_PTR_FORMAT " to apply to srcpad", out_caps);

      if (!gst_pad_has_current_caps (trans->srcpad)) {
        if (gst_pad_set_caps (trans->srcpad, out_caps)) {
          if (pending_events) {
            GList *l;

            for (l = g_list_last (pending_events); l; l = l->prev) {
              GST_LOG_OBJECT (trans, "Forwarding %s event",
                  GST_EVENT_TYPE_NAME (l->data));
              GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (trans,
                  l->data);
            }
            g_list_free (pending_events);
            pending_events = NULL;
          }
        } else {
          ret = GST_FLOW_NOT_NEGOTIATED;
        }
      }

      g_list_free_full (pending_events, (GDestroyNotify) gst_event_unref);
      gst_caps_unref (out_caps);
    } else {
      gchar *caps_str = gst_caps_to_string (out_caps);

      GST_DEBUG_OBJECT (trans, "Cannot choose caps. Have unfixed output caps %"
          GST_PTR_FORMAT, out_caps);
      gst_caps_unref (out_caps);

      GST_ELEMENT_ERROR (trans, STREAM, FORMAT,
          ("Filter caps do not completely specify the output format"),
          ("Output caps are unfixed: %s", caps_str));

      g_free (caps_str);
      g_list_free_full (pending_events, (GDestroyNotify) gst_event_unref);

      ret = GST_FLOW_ERROR;
    }
  }

  return ret;
}
/**
 * __gst_video_element_proxy_getcaps:
 * @element: a #GstElement
 * @sinkpad: the element's sink #GstPad
 * @srcpad: the element's source #GstPad
 * @initial_caps: initial caps
 * @filter: filter caps
 *
 * Returns caps that express @initial_caps (or sink template caps if
 * @initial_caps == NULL) restricted to resolution/format/...
 * combinations supported by downstream elements (e.g. muxers).
 *
 * Returns: a #GstCaps owned by caller
 */
GstCaps *
__gst_video_element_proxy_getcaps (GstElement * element, GstPad * sinkpad,
    GstPad * srcpad, GstCaps * initial_caps, GstCaps * filter)
{
  GstCaps *templ_caps, *src_templ_caps;
  GstCaps *peer_caps;
  GstCaps *allowed;
  GstCaps *fcaps, *filter_caps;

  /* Allow downstream to specify width/height/framerate/PAR constraints
   * and forward them upstream for video converters to handle
   */
  templ_caps = initial_caps ? gst_caps_ref (initial_caps) :
      gst_pad_get_pad_template_caps (sinkpad);
  src_templ_caps = gst_pad_get_pad_template_caps (srcpad);
  if (filter && !gst_caps_is_any (filter)) {
    GstCaps *proxy_filter =
        __gst_video_element_proxy_caps (element, src_templ_caps, filter);

    peer_caps = gst_pad_peer_query_caps (srcpad, proxy_filter);
    gst_caps_unref (proxy_filter);
  } else {
    peer_caps = gst_pad_peer_query_caps (srcpad, NULL);
  }

  allowed = gst_caps_intersect_full (peer_caps, src_templ_caps,
      GST_CAPS_INTERSECT_FIRST);

  gst_caps_unref (src_templ_caps);
  gst_caps_unref (peer_caps);

  if (!allowed || gst_caps_is_any (allowed)) {
    fcaps = templ_caps;
    goto done;
  } else if (gst_caps_is_empty (allowed)) {
    fcaps = gst_caps_ref (allowed);
    goto done;
  }

  GST_LOG_OBJECT (element, "template caps %" GST_PTR_FORMAT, templ_caps);
  GST_LOG_OBJECT (element, "allowed caps %" GST_PTR_FORMAT, allowed);

  filter_caps = __gst_video_element_proxy_caps (element, templ_caps, allowed);

  fcaps = gst_caps_intersect (filter_caps, templ_caps);
  gst_caps_unref (filter_caps);
  gst_caps_unref (templ_caps);

  if (filter) {
    GST_LOG_OBJECT (element, "intersecting with %" GST_PTR_FORMAT, filter);
    filter_caps = gst_caps_intersect (fcaps, filter);
    gst_caps_unref (fcaps);
    fcaps = filter_caps;
  }

done:
  gst_caps_replace (&allowed, NULL);

  GST_LOG_OBJECT (element, "proxy caps %" GST_PTR_FORMAT, fcaps);

  return fcaps;
}
Пример #12
0
static void
test_basic (const gchar * elem_name, const gchar * sink2, int count,
    check_cb cb)
{
  GstElement *rtpmux = NULL;
  GstPad *reqpad1 = NULL;
  GstPad *reqpad2 = NULL;
  GstPad *src1 = NULL;
  GstPad *src2 = NULL;
  GstPad *sink = NULL;
  GstBuffer *inbuf = NULL;
  GstCaps *src1caps = NULL;
  GstCaps *src2caps = NULL;
  GstCaps *sinkcaps = NULL;
  GstCaps *caps;
  GstSegment segment;
  int i;

  rtpmux = gst_check_setup_element (elem_name);

  reqpad1 = gst_element_get_request_pad (rtpmux, "sink_1");
  fail_unless (reqpad1 != NULL);
  reqpad2 = gst_element_get_request_pad (rtpmux, sink2);
  fail_unless (reqpad2 != NULL);
  sink = gst_check_setup_sink_pad_by_name (rtpmux, &sinktemplate, "src");

  src1 = gst_pad_new_from_static_template (&srctemplate, "src");
  src2 = gst_pad_new_from_static_template (&srctemplate, "src");
  fail_unless (gst_pad_link (src1, reqpad1) == GST_PAD_LINK_OK);
  fail_unless (gst_pad_link (src2, reqpad2) == GST_PAD_LINK_OK);
  gst_pad_set_query_function (src1, query_func);
  gst_pad_set_query_function (src2, query_func);
  gst_pad_set_query_function (sink, query_func);
  gst_pad_set_event_function (sink, event_func);
  g_object_set_data (G_OBJECT (src1), "caps", &src1caps);
  g_object_set_data (G_OBJECT (src2), "caps", &src2caps);
  g_object_set_data (G_OBJECT (sink), "caps", &sinkcaps);

  src1caps = gst_caps_new_simple ("application/x-rtp",
      "clock-rate", G_TYPE_INT, 1, "ssrc", G_TYPE_UINT, 11, NULL);
  src2caps = gst_caps_new_simple ("application/x-rtp",
      "clock-rate", G_TYPE_INT, 2, "ssrc", G_TYPE_UINT, 12, NULL);
  sinkcaps = gst_caps_new_simple ("application/x-rtp",
      "clock-rate", G_TYPE_INT, 3, "ssrc", G_TYPE_UINT, 13, NULL);

  caps = gst_pad_peer_query_caps (src1, NULL);
  fail_unless (gst_caps_is_empty (caps));
  gst_caps_unref (caps);

  gst_caps_set_simple (src2caps, "clock-rate", G_TYPE_INT, 3, NULL);
  caps = gst_pad_peer_query_caps (src1, NULL);
  gst_caps_unref (caps);

  g_object_set (rtpmux, "seqnum-offset", 100, "timestamp-offset", 1000,
      "ssrc", 55, NULL);

  fail_unless (gst_element_set_state (rtpmux,
          GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS);
  gst_pad_set_active (sink, TRUE);
  gst_pad_set_active (src1, TRUE);
  gst_pad_set_active (src2, TRUE);

  fail_unless (gst_pad_push_event (src1,
          gst_event_new_stream_start ("stream1")));
  fail_unless (gst_pad_push_event (src2,
          gst_event_new_stream_start ("stream2")));

  gst_caps_set_simple (sinkcaps,
      "payload", G_TYPE_INT, 98, "seqnum-offset", G_TYPE_UINT, 100,
      "timestamp-offset", G_TYPE_UINT, 1000, "ssrc", G_TYPE_UINT, 66, NULL);
  caps = gst_caps_new_simple ("application/x-rtp",
      "payload", G_TYPE_INT, 98, "clock-rate", G_TYPE_INT, 3,
      "seqnum-offset", G_TYPE_UINT, 56, "timestamp-offset", G_TYPE_UINT, 57,
      "ssrc", G_TYPE_UINT, 66, NULL);
  fail_unless (gst_pad_set_caps (src1, caps));
  gst_caps_unref (caps);

  caps = gst_pad_peer_query_caps (sink, NULL);
  fail_if (gst_caps_is_empty (caps));

  gst_segment_init (&segment, GST_FORMAT_TIME);
  segment.start = 100000;
  fail_unless (gst_pad_push_event (src1, gst_event_new_segment (&segment)));
  segment.start = 0;
  fail_unless (gst_pad_push_event (src2, gst_event_new_segment (&segment)));


  for (i = 0; i < count; i++) {
    GstRTPBuffer rtpbuffer = GST_RTP_BUFFER_INIT;

    inbuf = gst_rtp_buffer_new_allocate (10, 0, 0);
    GST_BUFFER_PTS (inbuf) = i * 1000 + 100000;
    GST_BUFFER_DURATION (inbuf) = 1000;

    gst_rtp_buffer_map (inbuf, GST_MAP_WRITE, &rtpbuffer);

    gst_rtp_buffer_set_version (&rtpbuffer, 2);
    gst_rtp_buffer_set_payload_type (&rtpbuffer, 98);
    gst_rtp_buffer_set_ssrc (&rtpbuffer, 44);
    gst_rtp_buffer_set_timestamp (&rtpbuffer, 200 + i);
    gst_rtp_buffer_set_seq (&rtpbuffer, 2000 + i);
    gst_rtp_buffer_unmap (&rtpbuffer);
    fail_unless (gst_pad_push (src1, inbuf) == GST_FLOW_OK);

    if (buffers)
      fail_unless (GST_BUFFER_PTS (buffers->data) == i * 1000, "%lld",
          GST_BUFFER_PTS (buffers->data));

    cb (src2, i);

    g_list_foreach (buffers, (GFunc) gst_buffer_unref, NULL);
    g_list_free (buffers);
    buffers = NULL;
  }


  gst_pad_set_active (sink, FALSE);
  gst_pad_set_active (src1, FALSE);
  gst_pad_set_active (src2, FALSE);
  fail_unless (gst_element_set_state (rtpmux,
          GST_STATE_NULL) == GST_STATE_CHANGE_SUCCESS);
  gst_check_teardown_pad_by_name (rtpmux, "src");
  gst_object_unref (reqpad1);
  gst_object_unref (reqpad2);
  gst_check_teardown_pad_by_name (rtpmux, "sink_1");
  gst_check_teardown_pad_by_name (rtpmux, sink2);
  gst_element_release_request_pad (rtpmux, reqpad1);
  gst_element_release_request_pad (rtpmux, reqpad2);

  gst_caps_unref (caps);
  gst_caps_replace (&src1caps, NULL);
  gst_caps_replace (&src2caps, NULL);
  gst_caps_replace (&sinkcaps, NULL);

  gst_check_teardown_element (rtpmux);
}
Пример #13
0
static gboolean
gst_ffmpegaudenc_set_format (GstAudioEncoder * encoder, GstAudioInfo * info)
{
  GstFFMpegAudEnc *ffmpegaudenc = (GstFFMpegAudEnc *) encoder;
  GstCaps *other_caps;
  GstCaps *allowed_caps;
  GstCaps *icaps;
  gsize frame_size;
  GstFFMpegAudEncClass *oclass =
      (GstFFMpegAudEncClass *) G_OBJECT_GET_CLASS (ffmpegaudenc);

  /* close old session */
  if (ffmpegaudenc->opened) {
    gst_ffmpeg_avcodec_close (ffmpegaudenc->context);
    ffmpegaudenc->opened = FALSE;
    if (avcodec_get_context_defaults3 (ffmpegaudenc->context,
            oclass->in_plugin) < 0) {
      GST_DEBUG_OBJECT (ffmpegaudenc, "Failed to set context defaults");
      return FALSE;
    }
  }

  /* if we set it in _getcaps we should set it also in _link */
  ffmpegaudenc->context->strict_std_compliance = ffmpegaudenc->compliance;

  /* user defined properties */
  if (ffmpegaudenc->bitrate > 0) {
    GST_INFO_OBJECT (ffmpegaudenc, "Setting avcontext to bitrate %d",
        ffmpegaudenc->bitrate);
    ffmpegaudenc->context->bit_rate = ffmpegaudenc->bitrate;
    ffmpegaudenc->context->bit_rate_tolerance = ffmpegaudenc->bitrate;
  } else {
    GST_INFO_OBJECT (ffmpegaudenc, "Using avcontext default bitrate %d",
        ffmpegaudenc->context->bit_rate);
  }

  /* RTP payload used for GOB production (for Asterisk) */
  if (ffmpegaudenc->rtp_payload_size) {
    ffmpegaudenc->context->rtp_payload_size = ffmpegaudenc->rtp_payload_size;
  }

  /* some other defaults */
  ffmpegaudenc->context->rc_strategy = 2;
  ffmpegaudenc->context->b_frame_strategy = 0;
  ffmpegaudenc->context->coder_type = 0;
  ffmpegaudenc->context->context_model = 0;
  ffmpegaudenc->context->scenechange_threshold = 0;

  /* fetch pix_fmt and so on */
  gst_ffmpeg_audioinfo_to_context (info, ffmpegaudenc->context);
  if (!ffmpegaudenc->context->time_base.den) {
    ffmpegaudenc->context->time_base.den = GST_AUDIO_INFO_RATE (info);
    ffmpegaudenc->context->time_base.num = 1;
    ffmpegaudenc->context->ticks_per_frame = 1;
  }

  if (ffmpegaudenc->context->channel_layout) {
    gst_ffmpeg_channel_layout_to_gst (ffmpegaudenc->context->channel_layout,
        ffmpegaudenc->context->channels, ffmpegaudenc->ffmpeg_layout);
    ffmpegaudenc->needs_reorder =
        (memcmp (ffmpegaudenc->ffmpeg_layout, info->position,
            sizeof (GstAudioChannelPosition) *
            ffmpegaudenc->context->channels) != 0);
  }

  /* open codec */
  if (gst_ffmpeg_avcodec_open (ffmpegaudenc->context, oclass->in_plugin) < 0) {
    gst_ffmpeg_avcodec_close (ffmpegaudenc->context);
    GST_DEBUG_OBJECT (ffmpegaudenc, "avenc_%s: Failed to open FFMPEG codec",
        oclass->in_plugin->name);
    if (avcodec_get_context_defaults3 (ffmpegaudenc->context,
            oclass->in_plugin) < 0)
      GST_DEBUG_OBJECT (ffmpegaudenc, "Failed to set context defaults");

    if ((oclass->in_plugin->capabilities & CODEC_CAP_EXPERIMENTAL) &&
        ffmpegaudenc->compliance != GST_FFMPEG_EXPERIMENTAL) {
      GST_ELEMENT_ERROR (ffmpegaudenc, LIBRARY, SETTINGS,
          ("Codec is experimental, but settings don't allow encoders to "
              "produce output of experimental quality"),
          ("This codec may not create output that is conformant to the specs "
              "or of good quality. If you must use it anyway, set the "
              "compliance property to experimental"));
    }
    return FALSE;
  }

  /* some codecs support more than one format, first auto-choose one */
  GST_DEBUG_OBJECT (ffmpegaudenc, "picking an output format ...");
  allowed_caps = gst_pad_get_allowed_caps (GST_AUDIO_ENCODER_SRC_PAD (encoder));
  if (!allowed_caps) {
    GST_DEBUG_OBJECT (ffmpegaudenc, "... but no peer, using template caps");
    /* we need to copy because get_allowed_caps returns a ref, and
     * get_pad_template_caps doesn't */
    allowed_caps =
        gst_pad_get_pad_template_caps (GST_AUDIO_ENCODER_SRC_PAD (encoder));
  }
  GST_DEBUG_OBJECT (ffmpegaudenc, "chose caps %" GST_PTR_FORMAT, allowed_caps);
  gst_ffmpeg_caps_with_codecid (oclass->in_plugin->id,
      oclass->in_plugin->type, allowed_caps, ffmpegaudenc->context);

  /* try to set this caps on the other side */
  other_caps = gst_ffmpeg_codecid_to_caps (oclass->in_plugin->id,
      ffmpegaudenc->context, TRUE);

  if (!other_caps) {
    gst_caps_unref (allowed_caps);
    gst_ffmpeg_avcodec_close (ffmpegaudenc->context);
    GST_DEBUG ("Unsupported codec - no caps found");
    if (avcodec_get_context_defaults3 (ffmpegaudenc->context,
            oclass->in_plugin) < 0)
      GST_DEBUG_OBJECT (ffmpegaudenc, "Failed to set context defaults");
    return FALSE;
  }

  icaps = gst_caps_intersect (allowed_caps, other_caps);
  gst_caps_unref (allowed_caps);
  gst_caps_unref (other_caps);
  if (gst_caps_is_empty (icaps)) {
    gst_caps_unref (icaps);
    return FALSE;
  }
  icaps = gst_caps_truncate (icaps);

  if (!gst_audio_encoder_set_output_format (GST_AUDIO_ENCODER (ffmpegaudenc),
          icaps)) {
    gst_ffmpeg_avcodec_close (ffmpegaudenc->context);
    gst_caps_unref (icaps);
    if (avcodec_get_context_defaults3 (ffmpegaudenc->context,
            oclass->in_plugin) < 0)
      GST_DEBUG_OBJECT (ffmpegaudenc, "Failed to set context defaults");
    return FALSE;
  }
  gst_caps_unref (icaps);

  frame_size = ffmpegaudenc->context->frame_size;
  if (frame_size > 1) {
    gst_audio_encoder_set_frame_samples_min (GST_AUDIO_ENCODER (ffmpegaudenc),
        frame_size);
    gst_audio_encoder_set_frame_samples_max (GST_AUDIO_ENCODER (ffmpegaudenc),
        frame_size);
    gst_audio_encoder_set_frame_max (GST_AUDIO_ENCODER (ffmpegaudenc), 1);
  } else {
    gst_audio_encoder_set_frame_samples_min (GST_AUDIO_ENCODER (ffmpegaudenc),
        0);
    gst_audio_encoder_set_frame_samples_max (GST_AUDIO_ENCODER (ffmpegaudenc),
        0);
    gst_audio_encoder_set_frame_max (GST_AUDIO_ENCODER (ffmpegaudenc), 0);
  }

  /* success! */
  ffmpegaudenc->opened = TRUE;

  return TRUE;
}
Пример #14
0
static GstCaps *
gst_waveform_sink_getcaps (GstBaseSink * bsink)
{
  GstWaveFormSink *wfsink = GST_WAVEFORM_SINK (bsink);
  MMRESULT mmresult;
  WAVEOUTCAPS wocaps;
  GstCaps *caps, *caps_temp;

  /* return the cached caps if already defined */
  if (wfsink->cached_caps) {
    return gst_caps_ref (wfsink->cached_caps);
  }

  /* get the default device caps */
  mmresult = waveOutGetDevCaps (WAVE_MAPPER, &wocaps, sizeof (wocaps));
  if (mmresult != MMSYSERR_NOERROR) {
    waveOutGetErrorText (mmresult, wfsink->error_string, ERROR_LENGTH - 1);
    GST_ELEMENT_ERROR (wfsink, RESOURCE, SETTINGS,
        ("gst_waveform_sink_getcaps: waveOutGetDevCaps failed error=>%s",
            wfsink->error_string), (NULL));
    return NULL;
  }

  caps = gst_caps_new_empty ();

  /* create a caps for all wave formats supported by the device 
     starting by the best quality format */
  if (wocaps.dwFormats & WAVE_FORMAT_96S16) {
    caps_temp = gst_waveform_sink_create_caps (96000, 2, 16);
    if (caps_temp) {
      gst_caps_append (caps, caps_temp);
    }
  }
  if (wocaps.dwFormats & WAVE_FORMAT_96S08) {
    caps_temp = gst_waveform_sink_create_caps (96000, 2, 8);
    if (caps_temp) {
      gst_caps_append (caps, caps_temp);
    }
  }
  if (wocaps.dwFormats & WAVE_FORMAT_96M16) {
    caps_temp = gst_waveform_sink_create_caps (96000, 1, 16);
    if (caps_temp) {
      gst_caps_append (caps, caps_temp);
    }
  }
  if (wocaps.dwFormats & WAVE_FORMAT_96M08) {
    caps_temp = gst_waveform_sink_create_caps (96000, 1, 8);
    if (caps_temp) {
      gst_caps_append (caps, caps_temp);
    }
  }
  if (wocaps.dwFormats & WAVE_FORMAT_4S16) {
    caps_temp = gst_waveform_sink_create_caps (44100, 2, 16);
    if (caps_temp) {
      gst_caps_append (caps, caps_temp);
    }
  }
  if (wocaps.dwFormats & WAVE_FORMAT_4S08) {
    caps_temp = gst_waveform_sink_create_caps (44100, 2, 8);
    if (caps_temp) {
      gst_caps_append (caps, caps_temp);
    }
  }
  if (wocaps.dwFormats & WAVE_FORMAT_4M16) {
    caps_temp = gst_waveform_sink_create_caps (44100, 1, 16);
    if (caps_temp) {
      gst_caps_append (caps, caps_temp);
    }
  }
  if (wocaps.dwFormats & WAVE_FORMAT_4M08) {
    caps_temp = gst_waveform_sink_create_caps (44100, 1, 8);
    if (caps_temp) {
      gst_caps_append (caps, caps_temp);
    }
  }
  if (wocaps.dwFormats & WAVE_FORMAT_2S16) {
    caps_temp = gst_waveform_sink_create_caps (22050, 2, 16);
    if (caps_temp) {
      gst_caps_append (caps, caps_temp);
    }
  }
  if (wocaps.dwFormats & WAVE_FORMAT_2S08) {
    caps_temp = gst_waveform_sink_create_caps (22050, 2, 8);
    if (caps_temp) {
      gst_caps_append (caps, caps_temp);
    }
  }
  if (wocaps.dwFormats & WAVE_FORMAT_2M16) {
    caps_temp = gst_waveform_sink_create_caps (22050, 1, 16);
    if (caps_temp) {
      gst_caps_append (caps, caps_temp);
    }
  }
  if (wocaps.dwFormats & WAVE_FORMAT_2M08) {
    caps_temp = gst_waveform_sink_create_caps (22050, 1, 8);
    if (caps_temp) {
      gst_caps_append (caps, caps_temp);
    }
  }
  if (wocaps.dwFormats & WAVE_FORMAT_1S16) {
    caps_temp = gst_waveform_sink_create_caps (11025, 2, 16);
    if (caps_temp) {
      gst_caps_append (caps, caps_temp);
    }
  }
  if (wocaps.dwFormats & WAVE_FORMAT_1S08) {
    caps_temp = gst_waveform_sink_create_caps (11025, 2, 8);
    if (caps_temp) {
      gst_caps_append (caps, caps_temp);
    }
  }
  if (wocaps.dwFormats & WAVE_FORMAT_1M16) {
    caps_temp = gst_waveform_sink_create_caps (11025, 1, 16);
    if (caps_temp) {
      gst_caps_append (caps, caps_temp);
    }
  }
  if (wocaps.dwFormats & WAVE_FORMAT_1M08) {
    caps_temp = gst_waveform_sink_create_caps (11025, 1, 8);
    if (caps_temp) {
      gst_caps_append (caps, caps_temp);
    }
  }

  if (gst_caps_is_empty (caps)) {
    gst_caps_unref (caps);
    caps = NULL;
  } else {
    wfsink->cached_caps = gst_caps_ref (caps);
  }

  GST_CAT_LOG_OBJECT (waveformsink_debug, wfsink, "Returning caps %s",
      gst_caps_to_string (caps));

  return caps;
}
Пример #15
0
static gboolean
gst_v4l2src_negotiate (GstBaseSrc * basesrc)
{
  GstCaps *thiscaps;

  GstCaps *caps = NULL;

  GstCaps *peercaps = NULL;

  gboolean result = FALSE;

  /* first see what is possible on our source pad */
  thiscaps = gst_pad_get_caps (GST_BASE_SRC_PAD (basesrc));
  GST_DEBUG_OBJECT (basesrc, "caps of src: %" GST_PTR_FORMAT, thiscaps);
  /* nothing or anything is allowed, we're done */
  if (thiscaps == NULL || gst_caps_is_any (thiscaps))
    goto no_nego_needed;

  /* get the peer caps */
  peercaps = gst_pad_peer_get_caps (GST_BASE_SRC_PAD (basesrc));
  GST_DEBUG_OBJECT (basesrc, "caps of peer: %" GST_PTR_FORMAT, peercaps);
  if (peercaps && !gst_caps_is_any (peercaps)) {
    GstCaps *icaps = NULL;
    int i;

    /* Prefer the first caps we are compatible with that the peer proposed */
    for (i = 0; i < gst_caps_get_size (peercaps); i++) {
      /* get intersection */
      GstCaps *ipcaps = gst_caps_copy_nth (peercaps, i);

      GST_DEBUG_OBJECT (basesrc, "peer: %" GST_PTR_FORMAT, ipcaps);

      icaps = gst_caps_intersect (thiscaps, ipcaps);
      gst_caps_unref (ipcaps);

      if (!gst_caps_is_empty (icaps))
        break;

      gst_caps_unref (icaps);
      icaps = NULL;
    }

    GST_DEBUG_OBJECT (basesrc, "intersect: %" GST_PTR_FORMAT, icaps);
    if (icaps) {
      /* If there are multiple intersections pick the one with the smallest
       * resolution strictly bigger then the first peer caps */
      if (gst_caps_get_size (icaps) > 1) {
        GstStructure *s = gst_caps_get_structure (peercaps, 0);

        int best = 0;

        int twidth, theight;

        int width = G_MAXINT, height = G_MAXINT;

        if (gst_structure_get_int (s, "width", &twidth)
            && gst_structure_get_int (s, "height", &theight)) {

          /* Walk the structure backwards to get the first entry of the
           * smallest resolution bigger (or equal to) the preferred resolution)
           */
          for (i = gst_caps_get_size (icaps) - 1; i >= 0; i--) {
            GstStructure *is = gst_caps_get_structure (icaps, i);

            int w, h;

            if (gst_structure_get_int (is, "width", &w)
                && gst_structure_get_int (is, "height", &h)) {
              if (w >= twidth && w <= width && h >= theight && h <= height) {
                width = w;
                height = h;
                best = i;
              }
            }
          }
        }

        caps = gst_caps_copy_nth (icaps, best);
        gst_caps_unref (icaps);
      } else {
        caps = icaps;
      }
    }
    gst_caps_unref (thiscaps);
    gst_caps_unref (peercaps);
  } else {
    /* no peer or peer have ANY caps, work with our own caps then */
    caps = thiscaps;
  }
  if (caps) {
    caps = gst_caps_make_writable (caps);
    gst_caps_truncate (caps);

    /* now fixate */
    if (!gst_caps_is_empty (caps)) {
      gst_pad_fixate_caps (GST_BASE_SRC_PAD (basesrc), caps);
      GST_DEBUG_OBJECT (basesrc, "fixated to: %" GST_PTR_FORMAT, caps);

      if (gst_caps_is_any (caps)) {
        /* hmm, still anything, so element can do anything and
         * nego is not needed */
        result = TRUE;
      } else if (gst_caps_is_fixed (caps)) {
        /* yay, fixed caps, use those then */
        gst_pad_set_caps (GST_BASE_SRC_PAD (basesrc), caps);
        result = TRUE;
      }
    }
    gst_caps_unref (caps);
  }
  return result;

no_nego_needed:
  {
    GST_DEBUG_OBJECT (basesrc, "no negotiation needed");
    if (thiscaps)
      gst_caps_unref (thiscaps);
    return TRUE;
  }
}
Пример #16
0
static CoglBool
cogl_gst_video_sink_parse_caps (GstCaps *caps,
                                CoglGstVideoSink *sink,
                                CoglBool save)
{
  CoglGstVideoSinkPrivate *priv = sink->priv;
  GstCaps *intersection;
  GstVideoInfo vinfo;
  CoglGstVideoFormat format;
  CoglBool bgr = FALSE;
  CoglGstRenderer *renderer;

  intersection = gst_caps_intersect (priv->caps, caps);
  if (gst_caps_is_empty (intersection))
    goto no_intersection;

  gst_caps_unref (intersection);

  if (!gst_video_info_from_caps (&vinfo, caps))
    goto unknown_format;

  switch (vinfo.finfo->format)
    {
    case GST_VIDEO_FORMAT_YV12:
      format = COGL_GST_YV12;
      break;
    case GST_VIDEO_FORMAT_I420:
      format = COGL_GST_I420;
      break;
    case GST_VIDEO_FORMAT_AYUV:
      format = COGL_GST_AYUV;
      bgr = FALSE;
      break;
    case GST_VIDEO_FORMAT_RGB:
      format = COGL_GST_RGB24;
      bgr = FALSE;
      break;
    case GST_VIDEO_FORMAT_BGR:
      format = COGL_GST_RGB24;
      bgr = TRUE;
      break;
    case GST_VIDEO_FORMAT_RGBA:
      format = COGL_GST_RGB32;
      bgr = FALSE;
      break;
    case GST_VIDEO_FORMAT_BGRA:
      format = COGL_GST_RGB32;
      bgr = TRUE;
      break;
    default:
      goto unhandled_format;
    }

  renderer = cogl_gst_find_renderer_by_format (sink, format);

  if (G_UNLIKELY (renderer == NULL))
    goto no_suitable_renderer;

  GST_INFO_OBJECT (sink, "found the %s renderer", renderer->name);

  if (save)
    {
      priv->info = vinfo;

      priv->format = format;
      priv->bgr = bgr;

      priv->renderer = renderer;
    }

  return TRUE;


no_intersection:
  {
    GST_WARNING_OBJECT (sink,
        "Incompatible caps, don't intersect with %" GST_PTR_FORMAT, priv->caps);
    return FALSE;
  }

unknown_format:
  {
    GST_WARNING_OBJECT (sink, "Could not figure format of input caps");
    return FALSE;
  }

unhandled_format:
  {
    GST_ERROR_OBJECT (sink, "Provided caps aren't supported by clutter-gst");
    return FALSE;
  }

no_suitable_renderer:
  {
    GST_ERROR_OBJECT (sink, "could not find a suitable renderer");
    return FALSE;
  }
}
/* only negotiate/query video overlay composition support for now */
static gboolean
gst_dvbsub_overlay_negotiate (GstDVBSubOverlay * overlay, GstCaps * caps)
{
  gboolean ret;
  gboolean attach = FALSE;
  gboolean caps_has_meta = TRUE;
  GstCapsFeatures *f;

  GST_DEBUG_OBJECT (overlay, "performing negotiation");

  if (!caps) {
    caps = gst_pad_get_current_caps (overlay->srcpad);
  } else {
    gst_caps_ref (caps);
  }

  if (!caps || gst_caps_is_empty (caps))
    goto no_format;

  /* Try to use the overlay meta if possible */
  f = gst_caps_get_features (caps, 0);

  /* if the caps doesn't have the overlay meta, we query if downstream
   * accepts it before trying the version without the meta
   * If upstream already is using the meta then we can only use it */
  if (!f
      || !gst_caps_features_contains (f,
          GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION)) {
    GstCaps *overlay_caps;
    GstCaps *peercaps;

    /* In this case we added the meta, but we can work without it
     * so preserve the original caps so we can use it as a fallback */
    overlay_caps = gst_caps_copy (caps);

    f = gst_caps_get_features (overlay_caps, 0);
    gst_caps_features_add (f,
        GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION);

    /* FIXME: We should probably check if downstream *prefers* the
     * overlay meta, and only enforce usage of it if we can't handle
     * the format ourselves and thus would have to drop the overlays.
     * Otherwise we should prefer what downstream wants here.
     */
    peercaps = gst_pad_peer_query_caps (overlay->srcpad, NULL);
    caps_has_meta = gst_caps_can_intersect (peercaps, overlay_caps);
    gst_caps_unref (peercaps);

    GST_DEBUG_OBJECT (overlay, "Downstream accepts the overlay meta: %d",
        caps_has_meta);
    if (caps_has_meta) {
      gst_caps_unref (caps);
      caps = overlay_caps;

    } else {
      /* fallback to the original */
      gst_caps_unref (overlay_caps);
      caps_has_meta = FALSE;
    }

  }
  GST_DEBUG_OBJECT (overlay, "Using caps %" GST_PTR_FORMAT, caps);
  ret = gst_pad_set_caps (overlay->srcpad, caps);

  if (ret) {
    GstQuery *query;

    /* find supported meta */
    query = gst_query_new_allocation (caps, FALSE);

    if (!gst_pad_peer_query (overlay->srcpad, query)) {
      /* no problem, we use the query defaults */
      GST_DEBUG_OBJECT (overlay, "ALLOCATION query failed");
    }

    if (caps_has_meta && gst_query_find_allocation_meta (query,
            GST_VIDEO_OVERLAY_COMPOSITION_META_API_TYPE, NULL))
      attach = TRUE;

    overlay->attach_compo_to_buffer = attach;

    gst_query_unref (query);
  }
  gst_caps_unref (caps);

  return ret;

no_format:
  {
    if (caps)
      gst_caps_unref (caps);
    return FALSE;
  }
}
Пример #18
0
static GstFlowReturn
gst_rsvg_decode_image (GstRsvgDec * rsvg, const guint8 * data, guint size,
    GstBuffer ** buffer)
{
  GstFlowReturn ret = GST_FLOW_OK;
  cairo_t *cr;
  cairo_surface_t *surface;
  RsvgHandle *handle;
  GError *error = NULL;
  RsvgDimensionData dimension;
  gdouble scalex, scaley;
  const gchar *title = NULL, *comment = NULL;

  GST_LOG_OBJECT (rsvg, "parsing svg");

  handle = rsvg_handle_new_from_data (data, size, &error);
  if (!handle) {
    GST_ERROR_OBJECT (rsvg, "Failed to parse SVG image: %s", error->message);
    g_error_free (error);
    return GST_FLOW_ERROR;
  }

  title = rsvg_handle_get_title (handle);
  comment = rsvg_handle_get_desc (handle);

  if (title || comment) {
    GST_LOG_OBJECT (rsvg, "adding tags");

    if (!rsvg->pending_tags)
      rsvg->pending_tags = gst_tag_list_new ();

    if (title && *title)
      gst_tag_list_add (rsvg->pending_tags, GST_TAG_MERGE_REPLACE_ALL,
          GST_TAG_TITLE, title, NULL);
    if (comment && *comment)
      gst_tag_list_add (rsvg->pending_tags, GST_TAG_MERGE_REPLACE_ALL,
          GST_TAG_COMMENT, comment, NULL);
  }

  rsvg_handle_get_dimensions (handle, &dimension);
  if (rsvg->width != dimension.width || rsvg->height != dimension.height) {
    GstCaps *caps1, *caps2, *caps3;
    GstStructure *s;

    GST_LOG_OBJECT (rsvg, "resolution changed, updating caps");

    caps1 = gst_caps_copy (gst_pad_get_pad_template_caps (rsvg->srcpad));
    caps2 = gst_pad_peer_get_caps (rsvg->srcpad);
    if (caps2) {
      caps3 = gst_caps_intersect (caps1, caps2);
      gst_caps_unref (caps1);
      gst_caps_unref (caps2);
      caps1 = caps3;
      caps3 = NULL;
    }

    if (gst_caps_is_empty (caps1)) {
      GST_ERROR_OBJECT (rsvg, "Unable to negotiate a format");
      gst_caps_unref (caps1);
      g_object_unref (handle);
      return GST_FLOW_NOT_NEGOTIATED;
    }

    caps2 = gst_caps_copy (gst_pad_get_pad_template_caps (rsvg->srcpad));
    s = gst_caps_get_structure (caps2, 0);
    gst_structure_set (s, "width", G_TYPE_INT, dimension.width, "height",
        G_TYPE_INT, dimension.height, "framerate", GST_TYPE_FRACTION, 0, 1,
        NULL);
    caps3 = gst_caps_intersect (caps1, caps2);
    if (!gst_caps_is_empty (caps3)) {
      gst_caps_truncate (caps3);
      gst_pad_set_caps (rsvg->srcpad, caps3);
      gst_caps_unref (caps1);
      gst_caps_unref (caps2);
      gst_caps_unref (caps3);
      rsvg->width = dimension.width;
      rsvg->height = dimension.height;
    } else {
      gst_caps_unref (caps2);
      gst_caps_unref (caps3);
      gst_caps_truncate (caps1);

      s = gst_caps_get_structure (caps1, 0);
      gst_structure_set (s, "framerate", GST_TYPE_FRACTION, 0, 1, NULL);

      if (!gst_caps_is_fixed (caps1)
          && (!gst_structure_fixate_field_nearest_int (s, "width",
                  dimension.width)
              || !gst_structure_fixate_field_nearest_int (s, "height",
                  dimension.height))) {
        g_object_unref (handle);
        GST_ERROR_OBJECT (rsvg, "Failed to fixate caps");
        return GST_FLOW_NOT_NEGOTIATED;
      }
      gst_pad_set_caps (rsvg->srcpad, caps1);
      gst_structure_get_int (s, "width", &rsvg->width);
      gst_structure_get_int (s, "height", &rsvg->height);
      gst_caps_unref (caps1);
    }
  }

  if ((ret = gst_pad_alloc_buffer_and_set_caps (rsvg->srcpad,
              GST_BUFFER_OFFSET_NONE,
              rsvg->width * rsvg->height * 4,
              GST_PAD_CAPS (rsvg->srcpad), buffer)) != GST_FLOW_OK) {
    g_object_unref (handle);
    GST_ERROR_OBJECT (rsvg, "Buffer allocation failed %s",
        gst_flow_get_name (ret));
    return ret;
  }

  GST_LOG_OBJECT (rsvg, "render image at %d x %d", rsvg->height, rsvg->width);

  surface =
      cairo_image_surface_create_for_data (GST_BUFFER_DATA (*buffer),
      CAIRO_FORMAT_ARGB32, rsvg->width, rsvg->height, rsvg->width * 4);

  cr = cairo_create (surface);
  cairo_set_operator (cr, CAIRO_OPERATOR_CLEAR);
  cairo_set_source_rgba (cr, 1.0, 1.0, 1.0, 0.0);
  cairo_paint (cr);
  cairo_set_operator (cr, CAIRO_OPERATOR_OVER);
  cairo_set_source_rgba (cr, 0.0, 0.0, 0.0, 1.0);

  scalex = scaley = 1.0;
  if (rsvg->width != dimension.width) {
    scalex = ((gdouble) rsvg->width) / ((gdouble) dimension.width);
  }
  if (rsvg->height != dimension.height) {
    scaley = ((gdouble) rsvg->height) / ((gdouble) dimension.height);
  }
  cairo_scale (cr, scalex, scaley);
  rsvg_handle_render_cairo (handle, cr);

  g_object_unref (handle);
  cairo_destroy (cr);
  cairo_surface_destroy (surface);

  /* Now unpremultiply Cairo's ARGB to match GStreamer's */
  gst_rsvg_decode_unpremultiply (GST_BUFFER_DATA (*buffer), rsvg->width,
      rsvg->height);

  return ret;
}
Пример #19
0
int
main (int argc, char **argv)
{
    static const GOptionEntry test_goptions[] = {
        {   "videosink", '\0', 0, G_OPTION_ARG_STRING, &opt_videosink_str,
            "videosink to use (default: " DEFAULT_VIDEOSINK ")", NULL
        },
        {   "caps", '\0', 0, G_OPTION_ARG_STRING, &opt_filtercaps_str,
            "filter caps to narrow down formats to test", NULL
        },
        {   "with-ffmpegcolorspace", '\0', 0, G_OPTION_ARG_NONE,
            &opt_with_ffmpegcolorspace,
            "whether to add an ffmpegcolorspace element in front of the sink",
            NULL
        },
        {NULL, '\0', 0, 0, NULL, NULL, NULL}
    };
    GOptionContext *ctx;
    GError *opt_err = NULL;

    GstElement *pipeline, *src, *filter1, *crop, *scale, *filter2, *csp, *sink;
    GMainLoop *loop;
    GstCaps *filter_caps = NULL;
    GList *caps_list, *l;

    if (!g_thread_supported ())
        g_thread_init (NULL);

    /* command line option parsing */
    ctx = g_option_context_new ("");
    g_option_context_add_group (ctx, gst_init_get_option_group ());
    g_option_context_add_main_entries (ctx, test_goptions, NULL);

    if (!g_option_context_parse (ctx, &argc, &argv, &opt_err)) {
        g_error ("Error parsing command line options: %s", opt_err->message);
        return -1;
    }

    GST_DEBUG_CATEGORY_INIT (videocrop_test_debug, "videocroptest", 0, "vctest");

    loop = g_main_loop_new (NULL, FALSE);

    pipeline = gst_pipeline_new ("pipeline");
    src = gst_element_factory_make ("videotestsrc", "videotestsrc");
    g_assert (src != NULL);
    filter1 = gst_element_factory_make ("capsfilter", "capsfilter1");
    g_assert (filter1 != NULL);
    crop = gst_element_factory_make ("videocrop", "videocrop");
    g_assert (crop != NULL);
    scale = gst_element_factory_make ("videoscale", "videoscale");
    g_assert (scale != NULL);
    filter2 = gst_element_factory_make ("capsfilter", "capsfilter2");
    g_assert (filter2 != NULL);

    if (opt_with_ffmpegcolorspace) {
        g_print ("Adding ffmpegcolorspace\n");
        csp = gst_element_factory_make ("ffmpegcolorspace", "colorspace");
    } else {
        csp = gst_element_factory_make ("identity", "colorspace");
    }
    g_assert (csp != NULL);

    if (opt_filtercaps_str) {
        filter_caps = gst_caps_from_string (opt_filtercaps_str);
        if (filter_caps == NULL) {
            g_error ("Invalid filter caps string '%s'", opt_filtercaps_str);
        } else {
            g_print ("Using filter caps '%s'\n", opt_filtercaps_str);
        }
    }

    if (opt_videosink_str) {
        g_print ("Trying videosink '%s' ...", opt_videosink_str);
        sink = gst_element_factory_make (opt_videosink_str, "sink");
        g_print ("%s\n", (sink) ? "ok" : "element couldn't be created");
    } else {
        sink = NULL;
    }

    if (sink == NULL) {
        g_print ("Trying videosink '%s' ...", DEFAULT_VIDEOSINK);
        sink = gst_element_factory_make (DEFAULT_VIDEOSINK, "sink");
        g_print ("%s\n", (sink) ? "ok" : "element couldn't be created");
    }
    if (sink == NULL) {
        g_print ("Trying videosink '%s' ...", "xvimagesink");
        sink = gst_element_factory_make ("xvimagesink", "sink");
        g_print ("%s\n", (sink) ? "ok" : "element couldn't be created");
    }
    if (sink == NULL) {
        g_print ("Trying videosink '%s' ...", "ximagesink");
        sink = gst_element_factory_make ("ximagesink", "sink");
        g_print ("%s\n", (sink) ? "ok" : "element couldn't be created");
    }

    g_assert (sink != NULL);

    gst_bin_add_many (GST_BIN (pipeline), src, filter1, crop, scale, filter2,
                      csp, sink, NULL);

    if (!gst_element_link (src, filter1))
        g_error ("Failed to link videotestsrc to capsfilter1");

    if (!gst_element_link (filter1, crop))
        g_error ("Failed to link capsfilter1 to videocrop");

    if (!gst_element_link (crop, scale))
        g_error ("Failed to link videocrop to videoscale");

    if (!gst_element_link (scale, filter2))
        g_error ("Failed to link videoscale to capsfilter2");

    if (!gst_element_link (filter2, csp))
        g_error ("Failed to link capsfilter2 to ffmpegcolorspace");

    if (!gst_element_link (csp, sink))
        g_error ("Failed to link ffmpegcolorspace to video sink");

    caps_list = video_crop_get_test_caps (crop);
    for (l = caps_list; l != NULL; l = l->next) {
        GstStateChangeReturn ret;
        GstCaps *caps, *out_caps;
        gboolean skip = FALSE;
        gchar *s;

        if (filter_caps) {
            GstCaps *icaps;

            icaps = gst_caps_intersect (filter_caps, GST_CAPS (l->data));
            skip = gst_caps_is_empty (icaps);
            gst_caps_unref (icaps);
        }

        /* this is the size of our window (stays fixed) */
        out_caps = gst_caps_copy (GST_CAPS (l->data));
        gst_structure_set (gst_caps_get_structure (out_caps, 0), "width",
                           G_TYPE_INT, OUT_WIDTH, "height", G_TYPE_INT, OUT_HEIGHT, NULL);

        g_object_set (filter2, "caps", out_caps, NULL);

        /* filter1 gets these too to prevent videotestsrc from renegotiating */
        g_object_set (filter1, "caps", out_caps, NULL);
        gst_caps_unref (out_caps);

        caps = gst_caps_copy (GST_CAPS (l->data));
        GST_INFO ("testing format: %" GST_PTR_FORMAT, caps);

        s = gst_caps_to_string (caps);

        if (skip) {
            g_print ("Skipping format: %s\n", s);
            g_free (s);
            continue;
        }

        g_print ("Format: %s\n", s);

        caps = gst_caps_make_writable (caps);

        /* FIXME: check return values */
        ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
        if (ret != GST_STATE_CHANGE_FAILURE) {
            ret = gst_element_get_state (pipeline, NULL, NULL, -1);

            if (ret != GST_STATE_CHANGE_FAILURE) {
                test_with_caps (src, crop, caps);
            } else {
                g_print ("Format: %s not supported (failed to go to PLAYING)\n", s);
            }
        } else {
            g_print ("Format: %s not supported\n", s);
        }

        gst_element_set_state (pipeline, GST_STATE_NULL);

        gst_caps_unref (caps);
        g_free (s);
    }

    g_list_foreach (caps_list, (GFunc) gst_caps_unref, NULL);
    g_list_free (caps_list);

    gst_element_set_state (pipeline, GST_STATE_NULL);
    gst_object_unref (pipeline);

    return 0;
}
Пример #20
0
/* Output buffer preparation... if the buffer has no caps, and
 * our allowed output caps is fixed, then give the caps to the
 * buffer.
 * This ensures that outgoing buffers have caps if we can, so
 * that pipelines like:
 *   gst-launch filesrc location=rawsamples.raw !
 *       audio/x-raw-int,width=16,depth=16,rate=48000,channels=2,
 *       endianness=4321,signed='(boolean)'true ! alsasink
 * will work.
 */
static GstFlowReturn
gst_capsfilter_prepare_buf (GstBaseTransform * trans, GstBuffer * input,
    gint size, GstCaps * caps, GstBuffer ** buf)
{
  GstFlowReturn ret = GST_FLOW_OK;

  if (GST_BUFFER_CAPS (input) != NULL) {
    /* Output buffer already has caps */
    GST_LOG_OBJECT (trans, "Input buffer already has caps (implicitely fixed)");
    /* FIXME : Move this behaviour to basetransform. The given caps are the ones
     * of the source pad, therefore our outgoing buffers should always have
     * those caps. */
    gst_buffer_set_caps (input, caps);
    gst_buffer_ref (input);
    *buf = input;
  } else {
    /* Buffer has no caps. See if the output pad only supports fixed caps */
    GstCaps *out_caps;

    out_caps = GST_PAD_CAPS (trans->srcpad);

    if (out_caps != NULL) {
      gst_caps_ref (out_caps);
    } else {
      out_caps = gst_pad_get_allowed_caps (trans->srcpad);
      g_return_val_if_fail (out_caps != NULL, GST_FLOW_ERROR);
    }

    out_caps = gst_caps_make_writable (out_caps);
    gst_caps_do_simplify (out_caps);

    if (gst_caps_is_fixed (out_caps) && !gst_caps_is_empty (out_caps)) {
      GST_DEBUG_OBJECT (trans, "Have fixed output caps %"
          GST_PTR_FORMAT " to apply to buffer with no caps", out_caps);
      if (gst_buffer_is_metadata_writable (input)) {
        gst_buffer_ref (input);
        *buf = input;
      } else {
        GST_DEBUG_OBJECT (trans, "Creating sub-buffer and setting caps");
        *buf = gst_buffer_create_sub (input, 0, GST_BUFFER_SIZE (input));
      }
      GST_BUFFER_CAPS (*buf) = out_caps;

      if (GST_PAD_CAPS (trans->srcpad) == NULL)
        gst_pad_set_caps (trans->srcpad, out_caps);
    } else {
      gchar *caps_str = gst_caps_to_string (out_caps);

      GST_DEBUG_OBJECT (trans, "Cannot choose caps. Have unfixed output caps %"
          GST_PTR_FORMAT, out_caps);
      gst_caps_unref (out_caps);

      ret = GST_FLOW_ERROR;
      GST_ELEMENT_ERROR (trans, STREAM, FORMAT,
          ("Filter caps do not completely specify the output format"),
          ("Output caps are unfixed: %s", caps_str));
      g_free (caps_str);
    }
  }

  return ret;
}
static GstCaps *
gst_dshowvideosrc_getcaps_from_streamcaps (GstDshowVideoSrc * src, IPin * pin)
{
    GstCaps *caps = NULL;
    HRESULT hres = S_OK;
    int icount = 0;
    int isize = 0;
    VIDEO_STREAM_CONFIG_CAPS vscc;
    int i = 0;
    IAMStreamConfig *streamcaps = NULL;

    hres = pin->QueryInterface (IID_IAMStreamConfig, (LPVOID *) & streamcaps);
    if (FAILED (hres)) {
        GST_ERROR ("Failed to retrieve IAMStreamConfig (error=0x%x)", hres);
        return NULL;
    }

    streamcaps->GetNumberOfCapabilities (&icount, &isize);

    if (isize != sizeof (vscc)) {
        streamcaps->Release ();
        return NULL;
    }

    caps = gst_caps_new_empty ();

    for (i = 0; i < icount; i++) {

        GstCapturePinMediaType *pin_mediatype =
            gst_dshow_new_pin_mediatype_from_streamcaps (pin, i, streamcaps);

        if (pin_mediatype) {

            GstCaps *mediacaps = NULL;
            GstVideoFormat video_format =
                gst_dshow_guid_to_gst_video_format (pin_mediatype->mediatype);

            if (video_format != GST_VIDEO_FORMAT_UNKNOWN) {
                mediacaps = gst_dshow_new_video_caps (video_format, NULL,
                                                      pin_mediatype);

            } else if (gst_dshow_check_mediatype (pin_mediatype->mediatype,
                                                  MEDIASUBTYPE_dvsd, FORMAT_VideoInfo)) {
                mediacaps =
                    gst_dshow_new_video_caps (GST_VIDEO_FORMAT_UNKNOWN,
                                              "video/x-dv, systemstream=FALSE", pin_mediatype);

            } else if (gst_dshow_check_mediatype (pin_mediatype->mediatype,
                                                  MEDIASUBTYPE_dvsd, FORMAT_DvInfo)) {
                mediacaps =
                    gst_dshow_new_video_caps (GST_VIDEO_FORMAT_UNKNOWN,
                                              "video/x-dv, systemstream=TRUE", pin_mediatype);

                pin_mediatype->granularityWidth = 0;
                pin_mediatype->granularityHeight = 0;
            } else if(gst_dshow_check_mediatype(pin_mediatype->mediatype,
                                                MEDIASUBTYPE_MJPG, FORMAT_VideoInfo)) {
                mediacaps = gst_dshow_new_video_caps(GST_VIDEO_FORMAT_UNKNOWN,
                                                     "image/jpeg", pin_mediatype);
            }


            if (mediacaps) {
                src->pins_mediatypes =
                    g_list_append (src->pins_mediatypes, pin_mediatype);
                gst_caps_append (caps, mediacaps);
            } else {
                /* failed to convert dshow caps */
                gst_dshow_free_pin_mediatype (pin_mediatype);
            }
        }
    }

    streamcaps->Release ();

    if (caps && gst_caps_is_empty (caps)) {
        gst_caps_unref (caps);
        caps = NULL;
    }

    return caps;
}
Пример #22
0
GstCaps*
gst_rrparser_fixate_src_caps(GstRRParser *rrparser, GstCaps *filter_caps){

  GstCaps *caps, *othercaps;

  GstStructure *structure;
  GstStructure *filter_structure;
  const gchar *stream_format;

  int filter_width = 0;
  int filter_height = 0;
  int filter_framerateN = 0;
  int filter_framerateD = 0;

  GST_DEBUG_OBJECT (rrparser, "Enter fixate_src_caps");

  /* Obtain the intersec between the src_pad and this peer caps */
  othercaps = gst_pad_get_allowed_caps(rrparser->src_pad);

  if (othercaps == NULL ||
      gst_caps_is_empty (othercaps) || gst_caps_is_any (othercaps)) {
    /* If we got nothing useful, user our template caps */
    caps =
        gst_caps_copy (gst_pad_get_pad_template_caps (rrparser->src_pad));
  } else {
    /* We got something useful */
    caps = othercaps;
  }

  /* Ensure that the caps are writable */
  caps = gst_caps_make_writable (caps);

  structure = gst_caps_get_structure (caps, 0);
  if (structure == NULL) {
    GST_ERROR_OBJECT (rrparser, "Failed to get src caps structure");
    return NULL;
  }

  /* Force to use avc and nal in case of null */
  stream_format = gst_structure_get_string (structure, "stream-format");
  if (stream_format == NULL) {
    stream_format = "avc";
    gst_structure_set (structure, "stream-format", G_TYPE_STRING, stream_format, (char *)NULL);
  }

  /* Get caps filter fields */
  filter_structure = gst_caps_get_structure (filter_caps, 0);
  gst_structure_get_fraction(filter_structure, "framerate", &filter_framerateN,
							&filter_framerateD);
  gst_structure_get_int(filter_structure, "height", &filter_height);
  gst_structure_get_int(filter_structure, "width", &filter_width);

  /* Set the width, height and framerate */
  gst_structure_set (structure, "width", G_TYPE_INT,
					 filter_width, (char *)NULL);
  gst_structure_set (structure, "height", G_TYPE_INT,
					 filter_height, (char *)NULL);
  gst_structure_set (structure, "framerate", GST_TYPE_FRACTION, filter_framerateN,
					 filter_framerateD, (char *)NULL);


  GST_DEBUG_OBJECT (rrparser, "Leave fixate_src_caps");
  return caps;
}
Пример #23
0
static GstCaps *
gst_shape_wipe_mask_sink_getcaps (GstShapeWipe * self, GstPad * pad,
    GstCaps * filter)
{
  GstCaps *ret, *tmp;
  guint i, n;

  if (gst_pad_has_current_caps (pad))
    return gst_pad_get_current_caps (pad);

  tmp = gst_pad_peer_query_caps (self->video_sinkpad, NULL);
  if (tmp) {
    ret =
        gst_caps_intersect (tmp,
        gst_pad_get_pad_template_caps (self->video_sinkpad));
    gst_caps_unref (tmp);
  } else {
    ret = gst_pad_get_pad_template_caps (self->video_sinkpad);
  }

  GST_LOG_OBJECT (pad, "video sink accepted caps: %" GST_PTR_FORMAT, ret);

  if (gst_caps_is_empty (ret))
    goto done;

  tmp = gst_pad_peer_query_caps (self->srcpad, NULL);
  GST_LOG_OBJECT (pad, "srcpad accepted caps: %" GST_PTR_FORMAT, ret);

  if (tmp) {
    GstCaps *intersection;

    intersection = gst_caps_intersect (ret, tmp);
    gst_caps_unref (ret);
    gst_caps_unref (tmp);
    ret = intersection;
  }

  GST_LOG_OBJECT (pad, "intersection: %" GST_PTR_FORMAT, ret);

  if (gst_caps_is_empty (ret))
    goto done;

  n = gst_caps_get_size (ret);
  tmp = gst_caps_new_empty ();
  for (i = 0; i < n; i++) {
    GstStructure *s = gst_caps_get_structure (ret, i);
    GstStructure *t;

    gst_structure_set_name (s, "video/x-raw");
    gst_structure_remove_fields (s, "format", "framerate", NULL);

    if (self->vinfo.width && self->vinfo.height)
      gst_structure_set (s, "width", G_TYPE_INT, self->vinfo.width, "height",
          G_TYPE_INT, self->vinfo.height, NULL);

    gst_structure_set (s, "framerate", GST_TYPE_FRACTION, 0, 1, NULL);

    t = gst_structure_copy (s);

    gst_structure_set (s, "format", G_TYPE_STRING, GST_VIDEO_NE (GRAY16), NULL);
    gst_structure_set (t, "format", G_TYPE_STRING, "GRAY8", NULL);

    gst_caps_append_structure (tmp, t);
  }
  gst_caps_append (ret, tmp);

  tmp = gst_pad_peer_query_caps (pad, NULL);
  GST_LOG_OBJECT (pad, "peer accepted caps: %" GST_PTR_FORMAT, tmp);

  if (tmp) {
    GstCaps *intersection;

    intersection = gst_caps_intersect (tmp, ret);
    gst_caps_unref (tmp);
    gst_caps_unref (ret);
    ret = intersection;
  }

done:
  GST_LOG_OBJECT (pad, "Returning caps: %" GST_PTR_FORMAT, ret);

  return ret;
}
Пример #24
0
static GstCaps *
gst_dshowaudiosrc_getcaps_from_streamcaps (GstDshowAudioSrc * src, IPin * pin,
    IAMStreamConfig * streamcaps)
{
  GstCaps *caps = NULL;
  HRESULT hres = S_OK;
  int icount = 0;
  int isize = 0;
  AUDIO_STREAM_CONFIG_CAPS ascc;
  int i = 0;

  if (!streamcaps)
    return NULL;

  streamcaps->GetNumberOfCapabilities (&icount, &isize);

  if (isize != sizeof (ascc))
    return NULL;

  for (; i < icount; i++) {
    GstCapturePinMediaType *pin_mediatype = g_new0 (GstCapturePinMediaType, 1);

    pin->AddRef ();
    pin_mediatype->capture_pin = pin;

    hres = streamcaps->GetStreamCaps (i, &pin_mediatype->mediatype,
        (BYTE *) & ascc);
    if (hres == S_OK && pin_mediatype->mediatype) {
      GstCaps *mediacaps = NULL;

      if (!caps)
        caps = gst_caps_new_empty ();

      if (gst_dshow_check_mediatype (pin_mediatype->mediatype, MEDIASUBTYPE_PCM,
              FORMAT_WaveFormatEx)) {
	GstAudioFormat format = GST_AUDIO_FORMAT_UNKNOWN;
        WAVEFORMATEX *wavformat =
            (WAVEFORMATEX *) pin_mediatype->mediatype->pbFormat;

	switch (wavformat->wFormatTag) {
            case WAVE_FORMAT_PCM:
	      format = gst_audio_format_build_integer (TRUE, G_BYTE_ORDER, wavformat->wBitsPerSample, wavformat->wBitsPerSample);
	      break;
            default:
	      break;
	}

	if (format != GST_AUDIO_FORMAT_UNKNOWN) {
	  GstAudioInfo info;

	  gst_audio_info_init(&info);
	  gst_audio_info_set_format(&info,
				    format,
				    wavformat->nSamplesPerSec,
				    wavformat->nChannels,
				    NULL);
	  mediacaps = gst_audio_info_to_caps(&info);
	}

        if (mediacaps) {
          src->pins_mediatypes =
              g_list_append (src->pins_mediatypes, pin_mediatype);
          gst_caps_append (caps, mediacaps);
        } else {
          gst_dshow_free_pin_mediatype (pin_mediatype);
        }
      } else {
        gst_dshow_free_pin_mediatype (pin_mediatype);
      }
    } else {
      gst_dshow_free_pin_mediatype (pin_mediatype);
    }
  }

  if (caps && gst_caps_is_empty (caps)) {
    gst_caps_unref (caps);
    caps = NULL;
  }

  return caps;
}
static gboolean
gst_vdp_vpp_sink_setcaps (GstPad * pad, GstCaps * caps)
{
  GstVdpVideoPostProcess *vpp =
      GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad));
  GstStructure *structure;
  GstCaps *video_caps = NULL;
  gboolean res = FALSE;

  GstCaps *allowed_caps, *output_caps, *src_caps;

  /* check if the input is non native */
  structure = gst_caps_get_structure (caps, 0);
  if (gst_structure_has_name (structure, "video/x-raw-yuv")) {
    if (!gst_structure_get_fourcc (structure, "format", &vpp->fourcc))
      goto done;
    vpp->native_input = FALSE;
    video_caps = gst_vdp_yuv_to_video_caps (caps);
    if (!video_caps)
      goto done;

    if (!vpp->vpool)
      vpp->vpool = gst_vdp_video_buffer_pool_new (vpp->device);

    gst_vdp_buffer_pool_set_caps (vpp->vpool, video_caps);

  } else {
    vpp->native_input = TRUE;
    video_caps = gst_caps_ref (caps);

    if (vpp->vpool) {
      g_object_unref (vpp->vpool);
      vpp->vpool = NULL;
    }
  }

  structure = gst_caps_get_structure (video_caps, 0);
  if (!gst_structure_get_int (structure, "width", &vpp->width) ||
      !gst_structure_get_int (structure, "height", &vpp->height) ||
      !gst_structure_get_int (structure, "chroma-type",
          (gint *) & vpp->chroma_type))
    goto done;


  /* get interlaced flag */
  gst_structure_get_boolean (structure, "interlaced", &vpp->interlaced);

  /* extract par */
  if (gst_structure_has_field_typed (structure, "pixel-aspect-ratio",
          GST_TYPE_FRACTION)) {
    gst_structure_get_fraction (structure, "pixel-aspect-ratio", &vpp->par_n,
        &vpp->par_d);
    vpp->got_par = TRUE;
  } else
    vpp->got_par = FALSE;

  allowed_caps = gst_pad_get_allowed_caps (vpp->srcpad);
  if (G_UNLIKELY (!allowed_caps))
    goto null_allowed_caps;
  if (G_UNLIKELY (gst_caps_is_empty (allowed_caps)))
    goto empty_allowed_caps;
  GST_DEBUG ("allowed_caps: %" GST_PTR_FORMAT, allowed_caps);

  output_caps = gst_vdp_video_to_output_caps (video_caps);
  src_caps = gst_caps_intersect (output_caps, allowed_caps);
  gst_caps_unref (allowed_caps);
  gst_caps_unref (output_caps);

  if (gst_caps_is_empty (src_caps))
    goto not_negotiated;

  gst_pad_fixate_caps (vpp->srcpad, src_caps);


  if (gst_vdp_vpp_is_interlaced (vpp)) {
    gint fps_n, fps_d;

    if (gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d)) {
      gst_fraction_double (&fps_n, &fps_d);
      gst_caps_set_simple (src_caps, "framerate", GST_TYPE_FRACTION, fps_n,
          fps_d, NULL);
      vpp->field_duration = gst_util_uint64_scale (GST_SECOND, fps_d, fps_n);
    }

    gst_caps_set_simple (src_caps, "interlaced", G_TYPE_BOOLEAN, FALSE, NULL);
  }

  GST_DEBUG ("src_caps: %" GST_PTR_FORMAT, src_caps);

  res = gst_pad_set_caps (vpp->srcpad, src_caps);
  gst_caps_unref (src_caps);

done:
  gst_object_unref (vpp);
  if (video_caps)
    gst_caps_unref (video_caps);

  return res;

null_allowed_caps:
  GST_ERROR_OBJECT (vpp, "Got null from gst_pad_get_allowed_caps");
  goto done;

empty_allowed_caps:
  GST_ERROR_OBJECT (vpp, "Got EMPTY caps from gst_pad_get_allowed_caps");

  gst_caps_unref (allowed_caps);
  goto done;

not_negotiated:
  gst_caps_unref (src_caps);
  GST_ERROR_OBJECT (vpp, "Couldn't find suitable output format");
  goto done;
}
Пример #26
0
static gboolean
gst_omx_h264_enc_set_format (GstOMXVideoEnc * enc, GstOMXPort * port,
    GstVideoCodecState * state)
{
  GstOMXH264Enc *self = GST_OMX_H264_ENC (enc);
  GstCaps *peercaps;
  OMX_PARAM_PORTDEFINITIONTYPE port_def;
  OMX_VIDEO_PARAM_PROFILELEVELTYPE param;
  OMX_VIDEO_CONFIG_AVCINTRAPERIOD config_avcintraperiod;
#ifdef USE_OMX_TARGET_RPI
  OMX_CONFIG_PORTBOOLEANTYPE config_inline_header;
#endif
  OMX_ERRORTYPE err;
  const gchar *profile_string, *level_string;

#ifdef USE_OMX_TARGET_RPI
  GST_OMX_INIT_STRUCT (&config_inline_header);
  config_inline_header.nPortIndex =
      GST_OMX_VIDEO_ENC (self)->enc_out_port->index;
  err =
      gst_omx_component_get_parameter (GST_OMX_VIDEO_ENC (self)->enc,
      OMX_IndexParamBrcmVideoAVCInlineHeaderEnable, &config_inline_header);
  if (err != OMX_ErrorNone) {
    GST_ERROR_OBJECT (self,
        "can't get OMX_IndexParamBrcmVideoAVCInlineHeaderEnable %s (0x%08x)",
        gst_omx_error_to_string (err), err);
    return FALSE;
  }

  if (self->inline_sps_pps_headers) {
    config_inline_header.bEnabled = OMX_TRUE;
  } else {
    config_inline_header.bEnabled = OMX_FALSE;
  }

  err =
      gst_omx_component_set_parameter (GST_OMX_VIDEO_ENC (self)->enc,
      OMX_IndexParamBrcmVideoAVCInlineHeaderEnable, &config_inline_header);
  if (err != OMX_ErrorNone) {
    GST_ERROR_OBJECT (self,
        "can't set OMX_IndexParamBrcmVideoAVCInlineHeaderEnable %s (0x%08x)",
        gst_omx_error_to_string (err), err);
    return FALSE;
  }
#endif

  if (self->periodicty_idr !=
      GST_OMX_H264_VIDEO_ENC_PERIODICITY_OF_IDR_FRAMES_DEFAULT
      || self->interval_intraframes !=
      GST_OMX_H264_VIDEO_ENC_INTERVAL_OF_CODING_INTRA_FRAMES_DEFAULT) {


    GST_OMX_INIT_STRUCT (&config_avcintraperiod);
    config_avcintraperiod.nPortIndex =
        GST_OMX_VIDEO_ENC (self)->enc_out_port->index;
    err =
        gst_omx_component_get_parameter (GST_OMX_VIDEO_ENC (self)->enc,
        OMX_IndexConfigVideoAVCIntraPeriod, &config_avcintraperiod);
    if (err != OMX_ErrorNone) {
      GST_ERROR_OBJECT (self,
          "can't get OMX_IndexConfigVideoAVCIntraPeriod %s (0x%08x)",
          gst_omx_error_to_string (err), err);
      return FALSE;
    }

    GST_DEBUG_OBJECT (self, "default nPFrames:%u, nIDRPeriod:%u",
        (guint) config_avcintraperiod.nPFrames,
        (guint) config_avcintraperiod.nIDRPeriod);

    if (self->periodicty_idr !=
        GST_OMX_H264_VIDEO_ENC_PERIODICITY_OF_IDR_FRAMES_DEFAULT) {
      config_avcintraperiod.nIDRPeriod = self->periodicty_idr;
    }

    if (self->interval_intraframes !=
        GST_OMX_H264_VIDEO_ENC_INTERVAL_OF_CODING_INTRA_FRAMES_DEFAULT) {
      config_avcintraperiod.nPFrames = self->interval_intraframes;
    }

    err =
        gst_omx_component_set_parameter (GST_OMX_VIDEO_ENC (self)->enc,
        OMX_IndexConfigVideoAVCIntraPeriod, &config_avcintraperiod);
    if (err != OMX_ErrorNone) {
      GST_ERROR_OBJECT (self,
          "can't set OMX_IndexConfigVideoAVCIntraPeriod %s (0x%08x)",
          gst_omx_error_to_string (err), err);
      return FALSE;
    }
  }

  gst_omx_port_get_port_definition (GST_OMX_VIDEO_ENC (self)->enc_out_port,
      &port_def);
  port_def.format.video.eCompressionFormat = OMX_VIDEO_CodingAVC;
  err =
      gst_omx_port_update_port_definition (GST_OMX_VIDEO_ENC
      (self)->enc_out_port, &port_def);
  if (err != OMX_ErrorNone)
    return FALSE;

  GST_OMX_INIT_STRUCT (&param);
  param.nPortIndex = GST_OMX_VIDEO_ENC (self)->enc_out_port->index;

  err =
      gst_omx_component_get_parameter (GST_OMX_VIDEO_ENC (self)->enc,
      OMX_IndexParamVideoProfileLevelCurrent, &param);
  if (err != OMX_ErrorNone) {
    GST_WARNING_OBJECT (self,
        "Setting profile/level not supported by component");
    return TRUE;
  }

  peercaps = gst_pad_peer_query_caps (GST_VIDEO_ENCODER_SRC_PAD (enc),
      gst_pad_get_pad_template_caps (GST_VIDEO_ENCODER_SRC_PAD (enc)));
  if (peercaps) {
    GstStructure *s;

    if (gst_caps_is_empty (peercaps)) {
      gst_caps_unref (peercaps);
      GST_ERROR_OBJECT (self, "Empty caps");
      return FALSE;
    }

    s = gst_caps_get_structure (peercaps, 0);
    profile_string = gst_structure_get_string (s, "profile");
    if (profile_string) {
      if (g_str_equal (profile_string, "baseline")) {
        param.eProfile = OMX_VIDEO_AVCProfileBaseline;
      } else if (g_str_equal (profile_string, "main")) {
        param.eProfile = OMX_VIDEO_AVCProfileMain;
      } else if (g_str_equal (profile_string, "extended")) {
        param.eProfile = OMX_VIDEO_AVCProfileExtended;
      } else if (g_str_equal (profile_string, "high")) {
        param.eProfile = OMX_VIDEO_AVCProfileHigh;
      } else if (g_str_equal (profile_string, "high-10")) {
        param.eProfile = OMX_VIDEO_AVCProfileHigh10;
      } else if (g_str_equal (profile_string, "high-4:2:2")) {
        param.eProfile = OMX_VIDEO_AVCProfileHigh422;
      } else if (g_str_equal (profile_string, "high-4:4:4")) {
        param.eProfile = OMX_VIDEO_AVCProfileHigh444;
      } else {
        goto unsupported_profile;
      }
    }
    level_string = gst_structure_get_string (s, "level");
    if (level_string) {
      if (g_str_equal (level_string, "1")) {
        param.eLevel = OMX_VIDEO_AVCLevel1;
      } else if (g_str_equal (level_string, "1b")) {
        param.eLevel = OMX_VIDEO_AVCLevel1b;
      } else if (g_str_equal (level_string, "1.1")) {
        param.eLevel = OMX_VIDEO_AVCLevel11;
      } else if (g_str_equal (level_string, "1.2")) {
        param.eLevel = OMX_VIDEO_AVCLevel12;
      } else if (g_str_equal (level_string, "1.3")) {
        param.eLevel = OMX_VIDEO_AVCLevel13;
      } else if (g_str_equal (level_string, "2")) {
        param.eLevel = OMX_VIDEO_AVCLevel2;
      } else if (g_str_equal (level_string, "2.1")) {
        param.eLevel = OMX_VIDEO_AVCLevel21;
      } else if (g_str_equal (level_string, "2.2")) {
        param.eLevel = OMX_VIDEO_AVCLevel22;
      } else if (g_str_equal (level_string, "3")) {
        param.eLevel = OMX_VIDEO_AVCLevel3;
      } else if (g_str_equal (level_string, "3.1")) {
        param.eLevel = OMX_VIDEO_AVCLevel31;
      } else if (g_str_equal (level_string, "3.2")) {
        param.eLevel = OMX_VIDEO_AVCLevel32;
      } else if (g_str_equal (level_string, "4")) {
        param.eLevel = OMX_VIDEO_AVCLevel4;
      } else if (g_str_equal (level_string, "4.1")) {
        param.eLevel = OMX_VIDEO_AVCLevel41;
      } else if (g_str_equal (level_string, "4.2")) {
        param.eLevel = OMX_VIDEO_AVCLevel42;
      } else if (g_str_equal (level_string, "5")) {
        param.eLevel = OMX_VIDEO_AVCLevel5;
      } else if (g_str_equal (level_string, "5.1")) {
        param.eLevel = OMX_VIDEO_AVCLevel51;
      } else {
        goto unsupported_level;
      }
    }
    gst_caps_unref (peercaps);
  }

  err =
      gst_omx_component_set_parameter (GST_OMX_VIDEO_ENC (self)->enc,
      OMX_IndexParamVideoProfileLevelCurrent, &param);
  if (err == OMX_ErrorUnsupportedIndex) {
    GST_WARNING_OBJECT (self,
        "Setting profile/level not supported by component");
  } else if (err != OMX_ErrorNone) {
    GST_ERROR_OBJECT (self,
        "Error setting profile %u and level %u: %s (0x%08x)",
        (guint) param.eProfile, (guint) param.eLevel,
        gst_omx_error_to_string (err), err);
    return FALSE;
  }

  return TRUE;

unsupported_profile:
  GST_ERROR_OBJECT (self, "Unsupported profile %s", profile_string);
  gst_caps_unref (peercaps);
  return FALSE;

unsupported_level:
  GST_ERROR_OBJECT (self, "Unsupported level %s", level_string);
  gst_caps_unref (peercaps);
  return FALSE;
}
Пример #27
0
/*
 * cheese_camera_device_get_caps:
 * @device: a #CheeseCameraDevice
 *
 * Probe the #GstCaps that the @device supports.
 */
static void
cheese_camera_device_get_caps (CheeseCameraDevice *device)
{
  CheeseCameraDevicePrivate *priv = device->priv;

  gchar               *pipeline_desc;
  GstElement          *pipeline;
  GstStateChangeReturn ret;
  GstMessage          *msg;
  GstBus              *bus;
  GError              *err = NULL;

  pipeline_desc = g_strdup_printf ("%s name=source device=%s ! fakesink",
                                   priv->src, priv->device_node);
  pipeline = gst_parse_launch (pipeline_desc, &err);
  if ((pipeline != NULL) && (err == NULL))
  {
    /* Start the pipeline and wait for max. 10 seconds for it to start up */
    gst_element_set_state (pipeline, GST_STATE_READY);
    ret = gst_element_get_state (pipeline, NULL, NULL, 10 * GST_SECOND);

    /* Check if any error messages were posted on the bus */
    bus = gst_element_get_bus (pipeline);
    msg = gst_bus_pop_filtered (bus, GST_MESSAGE_ERROR);
    gst_object_unref (bus);

    if ((msg == NULL) && (ret == GST_STATE_CHANGE_SUCCESS))
    {
      GstElement *src;
      GstPad     *pad;
      GstCaps    *caps;

      src = gst_bin_get_by_name (GST_BIN (pipeline), "source");

      GST_LOG ("Device: %s (%s)\n", priv->name, priv->device_node);
      pad        = gst_element_get_static_pad (src, "src");
      caps       = gst_pad_get_allowed_caps (pad);

      gst_caps_unref (priv->caps);
      priv->caps = cheese_camera_device_filter_caps (device, caps, supported_formats);

      if (!gst_caps_is_empty (priv->caps))
        cheese_camera_device_update_format_table (device);
      else
      {
        g_set_error_literal (&priv->construct_error,
                             CHEESE_CAMERA_DEVICE_ERROR,
                             CHEESE_CAMERA_DEVICE_ERROR_UNSUPPORTED_CAPS,
                             _("Device capabilities not supported"));
      }

      gst_object_unref (pad);
      gst_caps_unref (caps);
      gst_object_unref (src);
    }
    else
    {
      if (msg)
      {
        gchar *dbg_info = NULL;
        gst_message_parse_error (msg, &err, &dbg_info);
        GST_WARNING ("Failed to start the capability probing pipeline");
        GST_WARNING ("Error from element %s: %s, %s",
                     GST_OBJECT_NAME (msg->src),
                     err->message,
                     (dbg_info) ? dbg_info : "no extra debug detail");
        g_error_free (err);
        err = NULL;

        /* construct_error is meant to be displayed in the UI
         * (although it currently isn't displayed in cheese),
         * err->message from gstreamer is too technical for this
         * purpose, the idea is warn the user about an error and point
         * him to the logs for more info */
        g_set_error (&priv->construct_error,
                     CHEESE_CAMERA_DEVICE_ERROR,
                     CHEESE_CAMERA_DEVICE_ERROR_FAILED_INITIALIZATION,
                     _("Failed to initialize device %s for capability probing"),
                     priv->device_node);
      }
    }
    gst_element_set_state (pipeline, GST_STATE_NULL);
    gst_object_unref (pipeline);
  }

  if (err)
    g_error_free (err);

  g_free (pipeline_desc);
}
Пример #28
0
static gboolean
gst_vdp_sink_setcaps (GstBaseSink * bsink, GstCaps * caps)
{
  VdpSink *vdp_sink;
  GstCaps *allowed_caps;
  gboolean ret = TRUE;
  GstStructure *structure;
  GstCaps *intersection;
  gint new_width, new_height;
  const GValue *fps;

  vdp_sink = GST_VDP_SINK (bsink);

  GST_OBJECT_LOCK (vdp_sink);
  if (!vdp_sink->device)
    return FALSE;
  GST_OBJECT_UNLOCK (vdp_sink);

  allowed_caps = gst_pad_get_caps (GST_BASE_SINK_PAD (bsink));
  GST_DEBUG_OBJECT (vdp_sink,
      "sinkconnect possible caps %" GST_PTR_FORMAT " with given caps %"
      GST_PTR_FORMAT, allowed_caps, caps);

  /* We intersect those caps with our template to make sure they are correct */
  intersection = gst_caps_intersect (allowed_caps, caps);
  gst_caps_unref (allowed_caps);

  GST_DEBUG_OBJECT (vdp_sink, "intersection returned %" GST_PTR_FORMAT,
      intersection);
  if (gst_caps_is_empty (intersection)) {
    gst_caps_unref (intersection);
    return FALSE;
  }

  gst_caps_unref (intersection);

  structure = gst_caps_get_structure (caps, 0);

  ret &= gst_structure_get_int (structure, "width", &new_width);
  ret &= gst_structure_get_int (structure, "height", &new_height);
  fps = gst_structure_get_value (structure, "framerate");
  ret &= (fps != NULL);
  if (!ret)
    return FALSE;

  GST_VIDEO_SINK_WIDTH (vdp_sink) = new_width;
  GST_VIDEO_SINK_HEIGHT (vdp_sink) = new_height;
  vdp_sink->fps_n = gst_value_get_fraction_numerator (fps);
  vdp_sink->fps_d = gst_value_get_fraction_denominator (fps);

  /* Notify application to set xwindow id now */
  g_mutex_lock (vdp_sink->flow_lock);
  if (!vdp_sink->window) {
    g_mutex_unlock (vdp_sink->flow_lock);
    gst_x_overlay_prepare_xwindow_id (GST_X_OVERLAY (vdp_sink));
  } else {
    g_mutex_unlock (vdp_sink->flow_lock);
  }

  /* Creating our window and our image */
  if (GST_VIDEO_SINK_WIDTH (vdp_sink) <= 0
      || GST_VIDEO_SINK_HEIGHT (vdp_sink) <= 0) {
    GST_ELEMENT_ERROR (vdp_sink, CORE, NEGOTIATION, (NULL),
        ("Invalid image size."));
    return FALSE;
  }

  g_mutex_lock (vdp_sink->flow_lock);
  if (!vdp_sink->window) {
    vdp_sink->window = gst_vdp_sink_window_new (vdp_sink,
        GST_VIDEO_SINK_WIDTH (vdp_sink), GST_VIDEO_SINK_HEIGHT (vdp_sink));
  }
  g_mutex_unlock (vdp_sink->flow_lock);

  return TRUE;
}
Пример #29
0
static GstCaps *
gst_rtp_h263p_pay_sink_getcaps (GstRTPBasePayload * payload, GstPad * pad,
    GstCaps * filter)
{
  GstRtpH263PPay *rtph263ppay;
  GstCaps *caps = NULL, *templ;
  GstCaps *peercaps = NULL;
  GstCaps *intersect = NULL;
  guint i;

  rtph263ppay = GST_RTP_H263P_PAY (payload);

  peercaps =
      gst_pad_peer_query_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload), filter);

  /* if we're just outputting to udpsink or fakesink or so, we should also
   * accept any input compatible with our sink template caps */
  if (!peercaps || gst_caps_is_any (peercaps))
    return
        gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SINKPAD (payload));

  /* We basically need to differentiate two use-cases here: One where there's
   * a capsfilter after the payloader with caps created from an SDP; in this
   * case the filter caps are fixed and we want to signal to an encoder what
   * we want it to produce. The second case is simply payloader ! depayloader
   * where we are dealing with the depayloader's template caps. In this case
   * we should accept any input compatible with our sink template caps. */
  if (!gst_caps_is_fixed (peercaps))
    return
        gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SINKPAD (payload));

  templ = gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload));
  intersect = gst_caps_intersect (peercaps, templ);
  gst_caps_unref (peercaps);
  gst_caps_unref (templ);

  if (gst_caps_is_empty (intersect))
    return intersect;

  caps = gst_caps_new_empty ();
  for (i = 0; i < gst_caps_get_size (intersect); i++) {
    GstStructure *s = gst_caps_get_structure (intersect, i);
    const gchar *encoding_name = gst_structure_get_string (s, "encoding-name");

    if (!strcmp (encoding_name, "H263-2000")) {
      const gchar *profile_str = gst_structure_get_string (s, "profile");
      const gchar *level_str = gst_structure_get_string (s, "level");
      int profile = 0;
      int level = 0;

      if (profile_str && level_str) {
        gboolean i = FALSE, j = FALSE, l = FALSE, t = FALSE, f = FALSE,
            v = FALSE;
        GstStructure *new_s = gst_structure_new ("video/x-h263",
            "variant", G_TYPE_STRING, "itu",
            NULL);

        profile = atoi (profile_str);
        level = atoi (level_str);

        /* These profiles are defined in the H.263 Annex X */
        switch (profile) {
          case 0:
            /* The Baseline Profile (Profile 0) */
            break;
          case 1:
            /* H.320 Coding Efficiency Version 2 Backward-Compatibility Profile
             * (Profile 1)
             * Baseline + Annexes I, J, L.4 and T
             */
            i = j = l = t = TRUE;
            break;
          case 2:
            /* Version 1 Backward-Compatibility Profile (Profile 2)
             * Baseline + Annex F
             */
            i = j = l = t = f = TRUE;
            break;
          case 3:
            /* Version 2 Interactive and Streaming Wireless Profile
             * Baseline + Annexes I, J, T
             */
            i = j = t = TRUE;
            break;
          case 4:
            /* Version 3 Interactive and Streaming Wireless Profile (Profile 4)
             * Baseline + Annexes I, J, T, V, W.6.3.8,
             */
            /* Missing W.6.3.8 */
            i = j = t = v = TRUE;
            break;
          case 5:
            /* Conversational High Compression Profile (Profile 5)
             * Baseline + Annexes F, I, J, L.4, T, D, U
             */
            /* Missing D, U */
            f = i = j = l = t = TRUE;
            break;
          case 6:
            /* Conversational Internet Profile (Profile 6)
             * Baseline + Annexes F, I, J, L.4, T, D, U and
             * K with arbitratry slice ordering
             */
            /* Missing D, U, K with arbitratry slice ordering */
            f = i = j = l = t = TRUE;
            break;
          case 7:
            /* Conversational Interlace Profile (Profile 7)
             * Baseline + Annexes F, I, J, L.4, T, D, U,  W.6.3.11
             */
            /* Missing D, U, W.6.3.11 */
            f = i = j = l = t = TRUE;
            break;
          case 8:
            /* High Latency Profile (Profile 8)
             * Baseline + Annexes F, I, J, L.4, T, D, U, P.5, O.1.1 and
             * K with arbitratry slice ordering
             */
            /* Missing D, U, P.5, O.1.1 */
            f = i = j = l = t = TRUE;
            break;
        }


        if (f || i || j || t || l || v) {
          GValue list = { 0 };
          GValue vstr = { 0 };

          g_value_init (&list, GST_TYPE_LIST);
          g_value_init (&vstr, G_TYPE_STRING);

          g_value_set_static_string (&vstr, "h263");
          gst_value_list_append_value (&list, &vstr);
          g_value_set_static_string (&vstr, "h263p");
          gst_value_list_append_value (&list, &vstr);

          if (l || v) {
            g_value_set_static_string (&vstr, "h263pp");
            gst_value_list_append_value (&list, &vstr);
          }
          g_value_unset (&vstr);

          gst_structure_set_value (new_s, "h263version", &list);
          g_value_unset (&list);
        } else {
          gst_structure_set (new_s, "h263version", G_TYPE_STRING, "h263", NULL);
        }


        if (!f)
          gst_structure_set (new_s, "annex-f", G_TYPE_BOOLEAN, FALSE, NULL);
        if (!i)
          gst_structure_set (new_s, "annex-i", G_TYPE_BOOLEAN, FALSE, NULL);
        if (!j)
          gst_structure_set (new_s, "annex-j", G_TYPE_BOOLEAN, FALSE, NULL);
        if (!t)
          gst_structure_set (new_s, "annex-t", G_TYPE_BOOLEAN, FALSE, NULL);
        if (!l)
          gst_structure_set (new_s, "annex-l", G_TYPE_BOOLEAN, FALSE, NULL);
        if (!v)
          gst_structure_set (new_s, "annex-v", G_TYPE_BOOLEAN, FALSE, NULL);


        if (level <= 10 || level == 45) {
          gst_structure_set (new_s,
              "width", GST_TYPE_INT_RANGE, 1, 176,
              "height", GST_TYPE_INT_RANGE, 1, 144,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 2002, NULL);
          caps = gst_caps_merge_structure (caps, new_s);
        } else if (level <= 20) {
          GstStructure *s_copy = gst_structure_copy (new_s);

          gst_structure_set (new_s,
              "width", GST_TYPE_INT_RANGE, 1, 352,
              "height", GST_TYPE_INT_RANGE, 1, 288,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 2002, NULL);
          caps = gst_caps_merge_structure (caps, new_s);

          gst_structure_set (s_copy,
              "width", GST_TYPE_INT_RANGE, 1, 176,
              "height", GST_TYPE_INT_RANGE, 1, 144,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 1001, NULL);
          caps = gst_caps_merge_structure (caps, s_copy);
        } else if (level <= 40) {

          gst_structure_set (new_s,
              "width", GST_TYPE_INT_RANGE, 1, 352,
              "height", GST_TYPE_INT_RANGE, 1, 288,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 1001, NULL);
          caps = gst_caps_merge_structure (caps, new_s);
        } else if (level <= 50) {
          GstStructure *s_copy = gst_structure_copy (new_s);

          gst_structure_set (new_s,
              "width", GST_TYPE_INT_RANGE, 1, 352,
              "height", GST_TYPE_INT_RANGE, 1, 288,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 50, 1, NULL);
          caps = gst_caps_merge_structure (caps, new_s);

          gst_structure_set (s_copy,
              "width", GST_TYPE_INT_RANGE, 1, 352,
              "height", GST_TYPE_INT_RANGE, 1, 240,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 60000, 1001, NULL);
          caps = gst_caps_merge_structure (caps, s_copy);
        } else if (level <= 60) {
          GstStructure *s_copy = gst_structure_copy (new_s);

          gst_structure_set (new_s,
              "width", GST_TYPE_INT_RANGE, 1, 720,
              "height", GST_TYPE_INT_RANGE, 1, 288,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 50, 1, NULL);
          caps = gst_caps_merge_structure (caps, new_s);

          gst_structure_set (s_copy,
              "width", GST_TYPE_INT_RANGE, 1, 720,
              "height", GST_TYPE_INT_RANGE, 1, 240,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 60000, 1001, NULL);
          caps = gst_caps_merge_structure (caps, s_copy);
        } else if (level <= 70) {
          GstStructure *s_copy = gst_structure_copy (new_s);

          gst_structure_set (new_s,
              "width", GST_TYPE_INT_RANGE, 1, 720,
              "height", GST_TYPE_INT_RANGE, 1, 576,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 50, 1, NULL);
          caps = gst_caps_merge_structure (caps, new_s);

          gst_structure_set (s_copy,
              "width", GST_TYPE_INT_RANGE, 1, 720,
              "height", GST_TYPE_INT_RANGE, 1, 480,
              "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 60000, 1001, NULL);
          caps = gst_caps_merge_structure (caps, s_copy);
        } else {
          caps = gst_caps_merge_structure (caps, new_s);
        }

      } else {
        GstStructure *new_s = gst_structure_new ("video/x-h263",
            "variant", G_TYPE_STRING, "itu",
            "h263version", G_TYPE_STRING, "h263",
            NULL);

        GST_DEBUG_OBJECT (rtph263ppay, "No profile or level specified"
            " for H263-2000, defaulting to baseline H263");

        caps = gst_caps_merge_structure (caps, new_s);
      }
    } else {
      gboolean f = FALSE, i = FALSE, j = FALSE, t = FALSE;
      /* FIXME: ffmpeg support the Appendix K too, how do we express it ?
       *   guint k;
       */
      const gchar *str;
      GstStructure *new_s = gst_structure_new ("video/x-h263",
          "variant", G_TYPE_STRING, "itu",
          NULL);
      gboolean added = FALSE;

      str = gst_structure_get_string (s, "f");
      if (str && !strcmp (str, "1"))
        f = TRUE;

      str = gst_structure_get_string (s, "i");
      if (str && !strcmp (str, "1"))
        i = TRUE;

      str = gst_structure_get_string (s, "j");
      if (str && !strcmp (str, "1"))
        j = TRUE;

      str = gst_structure_get_string (s, "t");
      if (str && !strcmp (str, "1"))
        t = TRUE;

      if (f || i || j || t) {
        GValue list = { 0 };
        GValue vstr = { 0 };

        g_value_init (&list, GST_TYPE_LIST);
        g_value_init (&vstr, G_TYPE_STRING);

        g_value_set_static_string (&vstr, "h263");
        gst_value_list_append_value (&list, &vstr);
        g_value_set_static_string (&vstr, "h263p");
        gst_value_list_append_value (&list, &vstr);
        g_value_unset (&vstr);

        gst_structure_set_value (new_s, "h263version", &list);
        g_value_unset (&list);
      } else {
        gst_structure_set (new_s, "h263version", G_TYPE_STRING, "h263", NULL);
      }

      if (!f)
        gst_structure_set (new_s, "annex-f", G_TYPE_BOOLEAN, FALSE, NULL);
      if (!i)
        gst_structure_set (new_s, "annex-i", G_TYPE_BOOLEAN, FALSE, NULL);
      if (!j)
        gst_structure_set (new_s, "annex-j", G_TYPE_BOOLEAN, FALSE, NULL);
      if (!t)
        gst_structure_set (new_s, "annex-t", G_TYPE_BOOLEAN, FALSE, NULL);


      str = gst_structure_get_string (s, "custom");
      if (str) {
        unsigned int xmax, ymax, mpi;
        if (sscanf (str, "%u,%u,%u", &xmax, &ymax, &mpi) == 3) {
          if (xmax % 4 && ymax % 4 && mpi >= 1 && mpi <= 32) {
            caps = caps_append (caps, new_s, xmax, ymax, mpi);
            added = TRUE;
          } else {
            GST_WARNING_OBJECT (rtph263ppay, "Invalid custom framesize/MPI"
                " %u x %u at %u, ignoring", xmax, ymax, mpi);
          }
        } else {
          GST_WARNING_OBJECT (rtph263ppay, "Invalid custom framesize/MPI: %s,"
              " ignoring", str);
        }
      }

      str = gst_structure_get_string (s, "16cif");
      if (str) {
        int mpi = atoi (str);
        caps = caps_append (caps, new_s, 1408, 1152, mpi);
        added = TRUE;
      }

      str = gst_structure_get_string (s, "4cif");
      if (str) {
        int mpi = atoi (str);
        caps = caps_append (caps, new_s, 704, 576, mpi);
        added = TRUE;
      }

      str = gst_structure_get_string (s, "cif");
      if (str) {
        int mpi = atoi (str);
        caps = caps_append (caps, new_s, 352, 288, mpi);
        added = TRUE;
      }

      str = gst_structure_get_string (s, "qcif");
      if (str) {
        int mpi = atoi (str);
        caps = caps_append (caps, new_s, 176, 144, mpi);
        added = TRUE;
      }

      str = gst_structure_get_string (s, "sqcif");
      if (str) {
        int mpi = atoi (str);
        caps = caps_append (caps, new_s, 128, 96, mpi);
        added = TRUE;
      }

      if (added)
        gst_structure_free (new_s);
      else
        caps = gst_caps_merge_structure (caps, new_s);
    }
  }

  gst_caps_unref (intersect);

  return caps;
}
Пример #30
0
static gboolean
gst_rtp_dtmf_src_negotiate (GstBaseSrc * basesrc)
{
  GstCaps *srccaps, *peercaps;
  GstRTPDTMFSrc *dtmfsrc = GST_RTP_DTMF_SRC (basesrc);
  gboolean ret;

  /* fill in the defaults, there properties cannot be negotiated. */
  srccaps = gst_caps_new_simple ("application/x-rtp",
      "media", G_TYPE_STRING, "audio",
      "encoding-name", G_TYPE_STRING, "TELEPHONE-EVENT", NULL);

  /* the peer caps can override some of the defaults */
  peercaps = gst_pad_peer_get_caps (GST_BASE_SRC_PAD (basesrc));
  if (peercaps == NULL) {
    /* no peer caps, just add the other properties */
    gst_caps_set_simple (srccaps,
        "payload", G_TYPE_INT, dtmfsrc->pt,
        "ssrc", G_TYPE_UINT, dtmfsrc->current_ssrc,
        "clock-base", G_TYPE_UINT, dtmfsrc->ts_base,
        "clock-rate", G_TYPE_INT, dtmfsrc->clock_rate,
        "seqnum-base", G_TYPE_UINT, dtmfsrc->seqnum_base, NULL);

    GST_DEBUG_OBJECT (dtmfsrc, "no peer caps: %" GST_PTR_FORMAT, srccaps);
  } else {
    GstCaps *temp;
    GstStructure *s;
    const GValue *value;
    gint pt;
    gint clock_rate;

    /* peer provides caps we can use to fixate, intersect. This always returns a
     * writable caps. */
    temp = gst_caps_intersect (srccaps, peercaps);
    gst_caps_unref (srccaps);
    gst_caps_unref (peercaps);

    if (!temp) {
      GST_DEBUG_OBJECT (dtmfsrc, "Could not get intersection with peer caps");
      return FALSE;
    }

    if (gst_caps_is_empty (temp)) {
      GST_DEBUG_OBJECT (dtmfsrc, "Intersection with peer caps is empty");
      gst_caps_unref (temp);
      return FALSE;
    }

    /* now fixate, start by taking the first caps */
    gst_caps_truncate (temp);
    srccaps = temp;

    /* get first structure */
    s = gst_caps_get_structure (srccaps, 0);

    if (gst_structure_get_int (s, "payload", &pt)) {
      /* use peer pt */
      dtmfsrc->pt = pt;
      GST_LOG_OBJECT (dtmfsrc, "using peer pt %d", pt);
    } else {
      if (gst_structure_has_field (s, "payload")) {
        /* can only fixate if there is a field */
        gst_structure_fixate_field_nearest_int (s, "payload", dtmfsrc->pt);
        gst_structure_get_int (s, "payload", &pt);
        GST_LOG_OBJECT (dtmfsrc, "using peer pt %d", pt);
      } else {
        /* no pt field, use the internal pt */
        pt = dtmfsrc->pt;
        gst_structure_set (s, "payload", G_TYPE_INT, pt, NULL);
        GST_LOG_OBJECT (dtmfsrc, "using internal pt %d", pt);
      }
    }

    if (gst_structure_get_int (s, "clock-rate", &clock_rate)) {
      dtmfsrc->clock_rate = clock_rate;
      GST_LOG_OBJECT (dtmfsrc, "using clock-rate from caps %d",
          dtmfsrc->clock_rate);
    } else {
      GST_LOG_OBJECT (dtmfsrc, "using existing clock-rate %d",
          dtmfsrc->clock_rate);
    }
    gst_structure_set (s, "clock-rate", G_TYPE_INT, dtmfsrc->clock_rate, NULL);


    if (gst_structure_has_field_typed (s, "ssrc", G_TYPE_UINT)) {
      value = gst_structure_get_value (s, "ssrc");
      dtmfsrc->current_ssrc = g_value_get_uint (value);
      GST_LOG_OBJECT (dtmfsrc, "using peer ssrc %08x", dtmfsrc->current_ssrc);
    } else {
      /* FIXME, fixate_nearest_uint would be even better */
      gst_structure_set (s, "ssrc", G_TYPE_UINT, dtmfsrc->current_ssrc, NULL);
      GST_LOG_OBJECT (dtmfsrc, "using internal ssrc %08x",
          dtmfsrc->current_ssrc);
    }

    if (gst_structure_has_field_typed (s, "clock-base", G_TYPE_UINT)) {
      value = gst_structure_get_value (s, "clock-base");
      dtmfsrc->ts_base = g_value_get_uint (value);
      GST_LOG_OBJECT (dtmfsrc, "using peer clock-base %u", dtmfsrc->ts_base);
    } else {
      /* FIXME, fixate_nearest_uint would be even better */
      gst_structure_set (s, "clock-base", G_TYPE_UINT, dtmfsrc->ts_base, NULL);
      GST_LOG_OBJECT (dtmfsrc, "using internal clock-base %u",
          dtmfsrc->ts_base);
    }
    if (gst_structure_has_field_typed (s, "seqnum-base", G_TYPE_UINT)) {
      value = gst_structure_get_value (s, "seqnum-base");
      dtmfsrc->seqnum_base = g_value_get_uint (value);
      GST_LOG_OBJECT (dtmfsrc, "using peer seqnum-base %u",
          dtmfsrc->seqnum_base);
    } else {
      /* FIXME, fixate_nearest_uint would be even better */
      gst_structure_set (s, "seqnum-base", G_TYPE_UINT, dtmfsrc->seqnum_base,
          NULL);
      GST_LOG_OBJECT (dtmfsrc, "using internal seqnum-base %u",
          dtmfsrc->seqnum_base);
    }
    GST_DEBUG_OBJECT (dtmfsrc, "with peer caps: %" GST_PTR_FORMAT, srccaps);
  }

  ret = gst_pad_set_caps (GST_BASE_SRC_PAD (basesrc), srccaps);
  gst_caps_unref (srccaps);

  dtmfsrc->dirty = FALSE;

  return ret;

}