Exemple #1
0
static gboolean
gst_fake_h264_decoder_sink_event (GstPad * pad, GstObject * parent,
    GstEvent * event)
{
  GstElement *self = GST_ELEMENT (parent);
  GstPad *otherpad = gst_element_get_static_pad (self, "src");
  GstCaps *caps;
  gboolean ret = TRUE;

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_CAPS:
      caps = gst_caps_new_empty_simple ("video/x-raw");
      gst_pad_set_caps (otherpad, caps);
      gst_caps_unref (caps);
      gst_event_unref (event);
      event = NULL;
      break;
    default:
      break;
  }

  if (event)
    ret = gst_pad_push_event (otherpad, event);
  gst_object_unref (otherpad);

  return ret;
}
static void
push_query (gpointer data, gpointer user_data)
{
  GstHarness *h = user_data;
  GstCaps *caps = gst_caps_new_empty_simple ("mycaps");
  GstQuery *query = gst_query_new_allocation (caps, FALSE);
  gst_caps_unref (caps);
  gst_pad_peer_query (h->srcpad, query);
  gst_query_unref (query);
}
Exemple #3
0
GstCaps * _owr_payload_create_source_caps(OwrPayload *payload)
{
    OwrPayloadPrivate *priv;
    OwrMediaType media_type;
    GstCaps *caps = NULL, *tmp;
    guint channels = 0;

    g_return_val_if_fail(payload, NULL);
    priv = payload->priv;

    g_object_get(payload, "media-type", &media_type, NULL);

    switch (media_type) {
    case OWR_MEDIA_TYPE_AUDIO:
        if (OWR_IS_AUDIO_PAYLOAD(payload))
            g_object_get(OWR_AUDIO_PAYLOAD(payload), "channels", &channels, NULL);
        caps = gst_caps_new_simple("audio/x-raw",
            "rate", G_TYPE_INT, priv->clock_rate,
            NULL);
        if (channels > 0) {
            gst_caps_set_simple(caps,
                "channels", G_TYPE_INT, channels,
                NULL);
        }
        break;

    case OWR_MEDIA_TYPE_VIDEO:
        caps = gst_caps_new_empty();

        /* For cameras that may be able to give us embedded video */
        tmp = _owr_payload_create_encoded_caps(payload);
        if (tmp) {

            /*
             * Do not set video params here.
             * It means that we don't limit the video size in capsfilter and
             * we could stream any resolution of encoded video including dynamic video size change.
             */
            //set_video_params(tmp, payload);

            gst_caps_append(caps, tmp);
        }

        // For raw video source caps
        tmp = gst_caps_new_empty_simple("video/x-raw");
        set_video_params(tmp, payload);
        gst_caps_append(caps, tmp);

        break;
    default:
        g_return_val_if_reached(NULL);
    }

    return caps;
}
Exemple #4
0
GstCaps * _owr_payload_create_raw_caps(OwrPayload *payload)
{
    OwrPayloadPrivate *priv;
    OwrMediaType media_type;
    GstCaps *caps = NULL;
    guint channels = 0;
    guint width = 0, height = 0;
    gdouble framerate = 0.0;
    gint fps_n = 0, fps_d = 1;

    g_return_val_if_fail(payload, NULL);
    priv = payload->priv;

    g_object_get(payload, "media-type", &media_type, NULL);

    switch (media_type) {
    case OWR_MEDIA_TYPE_AUDIO:
        if (OWR_IS_AUDIO_PAYLOAD(payload))
            g_object_get(OWR_AUDIO_PAYLOAD(payload), "channels", &channels, NULL);
        caps = gst_caps_new_simple("audio/x-raw",
            "rate", G_TYPE_INT, priv->clock_rate,
            NULL);
        if (channels > 0) {
            gst_caps_set_simple(caps,
                "channels", G_TYPE_INT, channels,
                NULL);
        }
        break;

    case OWR_MEDIA_TYPE_VIDEO:
        if (OWR_IS_VIDEO_PAYLOAD(payload)) {
            g_object_get(OWR_VIDEO_PAYLOAD(payload),
                "width", &width,
                "height", &height,
                "framerate", &framerate,
                NULL);
        }
        caps = gst_caps_new_empty_simple(_owr_codec_type_to_caps_mime(media_type, priv->codec_type));
#ifdef __APPLE__
        if (priv->codec_type == OWR_CODEC_TYPE_H264)
          gst_caps_set_features(caps, 0, gst_caps_features_new_any());
#endif
        gst_caps_set_simple(caps, "width", G_TYPE_INT, width > 0 ? width : LIMITED_WIDTH, NULL);
        gst_caps_set_simple(caps, "height", G_TYPE_INT, height > 0 ? height : LIMITED_HEIGHT, NULL);

        framerate = framerate > 0.0 ? framerate : LIMITED_FRAMERATE;
        gst_util_double_to_fraction(framerate, &fps_n, &fps_d);
        gst_caps_set_simple(caps, "framerate", GST_TYPE_FRACTION, fps_n, fps_d, NULL);
        break;
    default:
        g_return_val_if_reached(NULL);
    }

    return caps;
}
static GstEncodingProfile *
create_ogg_vorbis_profile (guint presence, gchar * preset)
{
  GstEncodingContainerProfile *cprof;
  GstCaps *ogg, *vorbis;

  ogg = gst_caps_new_empty_simple ("application/ogg");
  cprof =
      gst_encoding_container_profile_new ((gchar *) "myprofile", NULL, ogg,
      NULL);
  gst_caps_unref (ogg);

  vorbis = gst_caps_new_empty_simple ("audio/x-vorbis");
  fail_unless (gst_encoding_container_profile_add_profile (cprof,
          (GstEncodingProfile *) gst_encoding_audio_profile_new (vorbis, preset,
              NULL, presence)));
  gst_caps_unref (vorbis);

  return (GstEncodingProfile *) cprof;
}
static gboolean
set_caps_ct1 (GstBaseTransform * trans, GstCaps * incaps, GstCaps * outcaps)
{
  GstCaps *caps1, *caps2;

  GST_DEBUG_OBJECT (trans, "set_caps called");

  caps1 = gst_caps_new_empty_simple ("baz/x-foo");
  caps2 = gst_caps_new_empty_simple ("foo/x-bar");

  fail_unless (gst_caps_is_equal (incaps, caps1));
  fail_unless (gst_caps_is_equal (outcaps, caps2));

  set_caps_ct1_called = TRUE;

  gst_caps_unref (caps1);
  gst_caps_unref (caps2);

  return TRUE;
}
static GstEncodingProfile *
create_ogg_profile ()
{
  GstEncodingContainerProfile *prof;
  GstCaps *ogg;

  ogg = gst_caps_new_empty_simple ("application/ogg");
  prof = gst_encoding_container_profile_new ((gchar *) "myprofile", NULL, ogg,
      NULL);
  gst_caps_unref (ogg);
  return (GstEncodingProfile *) prof;
}
Exemple #8
0
static GstCaps *
mxf_vc3_create_caps (MXFMetadataTimelineTrack * track, GstTagList ** tags,
    MXFEssenceElementHandleFunc * handler, gpointer * mapping_data)
{
  MXFMetadataFileDescriptor *f = NULL;
  MXFMetadataGenericPictureEssenceDescriptor *p = NULL;
  guint i;
  GstCaps *caps = NULL;

  g_return_val_if_fail (track != NULL, NULL);

  if (track->parent.descriptor == NULL) {
    GST_ERROR ("No descriptor found for this track");
    return NULL;
  }

  for (i = 0; i < track->parent.n_descriptor; i++) {
    if (!track->parent.descriptor[i])
      continue;

    if (MXF_IS_METADATA_GENERIC_PICTURE_ESSENCE_DESCRIPTOR (track->
            parent.descriptor[i])) {
      p = (MXFMetadataGenericPictureEssenceDescriptor *) track->parent.
          descriptor[i];
      f = track->parent.descriptor[i];
      break;
    } else if (MXF_IS_METADATA_FILE_DESCRIPTOR (track->parent.descriptor[i]) &&
        !MXF_IS_METADATA_MULTIPLE_DESCRIPTOR (track->parent.descriptor[i])) {
      f = track->parent.descriptor[i];
    }
  }

  if (!f) {
    GST_ERROR ("No descriptor found for this track");
    return NULL;
  }

  *handler = mxf_vc3_handle_essence_element;

  caps = gst_caps_new_empty_simple ("video/x-dnxhd");
  if (p) {
    mxf_metadata_generic_picture_essence_descriptor_set_caps (p, caps);
  } else {
    GST_WARNING ("Only a generic file descriptor found");
  }

  if (!*tags)
    *tags = gst_tag_list_new_empty ();
  gst_tag_list_add (*tags, GST_TAG_MERGE_APPEND, GST_TAG_VIDEO_CODEC,
      "VC-3 Video", NULL);

  return caps;
}
static GstCaps *
transform_caps_ct1 (GstBaseTransform * trans, GstPadDirection dir,
    GstCaps * caps, GstCaps * filter)
{
  GstCaps *res;

  if (dir == GST_PAD_SINK) {
    res = gst_caps_new_empty_simple ("foo/x-bar");
  } else {
    res = gst_caps_new_empty_simple ("baz/x-foo");
  }

  if (filter) {
    GstCaps *temp =
        gst_caps_intersect_full (filter, res, GST_CAPS_INTERSECT_FIRST);
    gst_caps_unref (res);
    res = temp;
  }

  return res;
}
static GstCaps *
gst_rgb2bayer_transform_caps (GstBaseTransform * trans,
    GstPadDirection direction, GstCaps * caps, GstCaps * filter)
{
  GstStructure *structure;
  GstStructure *new_structure;
  GstCaps *newcaps;
  const GValue *value;

  GST_DEBUG_OBJECT (trans, "transforming caps (from) %" GST_PTR_FORMAT, caps);

  structure = gst_caps_get_structure (caps, 0);

  if (direction == GST_PAD_SRC) {
    newcaps = gst_caps_new_empty_simple ("video/x-raw");
  } else {
    newcaps = gst_caps_new_empty_simple ("video/x-bayer");
  }
  new_structure = gst_caps_get_structure (newcaps, 0);

  value = gst_structure_get_value (structure, "width");
  gst_structure_set_value (new_structure, "width", value);

  value = gst_structure_get_value (structure, "height");
  gst_structure_set_value (new_structure, "height", value);

  value = gst_structure_get_value (structure, "framerate");
  gst_structure_set_value (new_structure, "framerate", value);

  GST_DEBUG_OBJECT (trans, "transforming caps (into) %" GST_PTR_FORMAT,
      newcaps);

  if (filter) {
    GstCaps *tmpcaps = newcaps;
    newcaps = gst_caps_intersect (newcaps, filter);
    gst_caps_unref (tmpcaps);
  }

  return newcaps;
}
static void
play_agnosticbin_vp8_to_raw (void)
{
  gboolean ret;
  GstElement *pipeline = gst_pipeline_new (NULL);
  GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  GstElement *videotestsrc = gst_element_factory_make ("videotestsrc", NULL);
  GstElement *vp8enc = gst_element_factory_make ("vp8enc", NULL);
  GstElement *agnosticbin = gst_element_factory_make ("agnosticbin", NULL);
  GstElement *capsfilter = gst_element_factory_make ("capsfilter", NULL);
  GstElement *fakesink = gst_element_factory_make ("fakesink", NULL);

  loop = g_main_loop_new (NULL, TRUE);
  GstCaps *caps;

  g_object_set (G_OBJECT (pipeline), "async-handling", TRUE, NULL);
  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message", G_CALLBACK (bus_msg), pipeline);

  mark_point ();
  g_object_set (G_OBJECT (videotestsrc), "num-buffers", 100, NULL);
  g_object_set (G_OBJECT (vp8enc), "keyframe-max-dist", 1, NULL);

  caps = gst_caps_new_empty_simple ("video/x-raw");
  g_object_set (G_OBJECT (capsfilter), "caps", caps, NULL);
  gst_caps_unref (caps);

  mark_point ();
  gst_bin_add_many (GST_BIN (pipeline), videotestsrc, vp8enc, agnosticbin,
      capsfilter, fakesink, NULL);
  mark_point ();
  ret =
      gst_element_link_many (videotestsrc, vp8enc, agnosticbin, capsfilter,
      fakesink, NULL);
  fail_unless (ret);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  mark_point ();
  g_main_loop_run (loop);
  mark_point ();

  GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (pipeline),
      GST_DEBUG_GRAPH_SHOW_ALL, "play_agnosticbin_vp8_to_raw_end");

  gst_element_set_state (pipeline, GST_STATE_NULL);

  gst_bus_remove_signal_watch (bus);
  g_object_unref (bus);
  g_main_loop_unref (loop);
  g_object_unref (pipeline);
}
Exemple #12
0
static GstBufferPool *
create_pool (guint size, guint min_buf, guint max_buf)
{
  GstBufferPool *pool = gst_buffer_pool_new ();
  GstStructure *conf = gst_buffer_pool_get_config (pool);
  GstCaps *caps = gst_caps_new_empty_simple ("test/data");

  gst_buffer_pool_config_set_params (conf, caps, size, min_buf, max_buf);
  gst_buffer_pool_set_config (pool, conf);
  gst_caps_unref (caps);

  return pool;
}
static GstEncodingProfile *
create_webm_profile (void)
{
  GstEncodingContainerProfile *container;
  GstCaps *caps = NULL;

  caps = gst_caps_new_empty_simple ("video/webm");
  container = gst_encoding_container_profile_new ("webm", NULL, caps, NULL);
  gst_caps_unref (caps);

  caps = gst_caps_new_empty_simple ("video/x-vp8");
  gst_encoding_container_profile_add_profile (container, (GstEncodingProfile *)
      gst_encoding_video_profile_new (caps, NULL, NULL, 1));
  gst_caps_unref (caps);

  caps = gst_caps_new_empty_simple ("audio/x-vorbis");
  gst_encoding_container_profile_add_profile (container, (GstEncodingProfile *)
      gst_encoding_audio_profile_new (caps, NULL, NULL, 1));
  gst_caps_unref (caps);

  return (GstEncodingProfile *) container;
}
static gboolean
_track_is_compatible_with_profile (GESPipeline * self, GESTrack * track,
    GstEncodingProfile * prof)
{
  if (TRACK_COMPATIBLE_PROFILE (track->type, prof)) {
    if (self->priv->mode == GES_PIPELINE_MODE_SMART_RENDER) {
      GstCaps *ocaps, *rcaps;

      GST_DEBUG ("Smart Render mode, setting input caps");
      ocaps = gst_encoding_profile_get_input_caps (prof);
      ocaps = gst_caps_make_writable (ocaps);
      if (track->type == GES_TRACK_TYPE_AUDIO)
        rcaps = gst_caps_new_empty_simple ("audio/x-raw");
      else
        rcaps = gst_caps_new_empty_simple ("video/x-raw");
      gst_caps_append (ocaps, rcaps);
      ges_track_set_caps (track, ocaps);
      gst_caps_unref (ocaps);
    } else {
      GstCaps *caps = NULL;

      /* Raw preview or rendering mode */
      if (track->type == GES_TRACK_TYPE_VIDEO)
        caps = gst_caps_new_empty_simple ("video/x-raw");
      else if (track->type == GES_TRACK_TYPE_AUDIO)
        caps = gst_caps_new_empty_simple ("audio/x-raw");

      if (caps) {
        ges_track_set_caps (track, caps);
        gst_caps_unref (caps);
      }
    }

    return TRUE;
  }

  return FALSE;
}
static GstEncodingProfile *
create_vorbis_only_profile (void)
{
  GstEncodingProfile *prof;
  GstCaps *vorbis;

  vorbis = gst_caps_new_empty_simple ("audio/x-vorbis");
  prof =
      (GstEncodingProfile *) gst_encoding_audio_profile_new (vorbis, NULL, NULL,
      0);
  gst_caps_unref (vorbis);

  return prof;
}
Exemple #16
0
GstCaps * _owr_payload_create_raw_caps(OwrPayload *payload)
{
    OwrPayloadPrivate *priv;
    GstCaps *caps = NULL;
    guint channels = 0;
    guint width = 0, height = 0;
    gdouble framerate = 0.0;
    gint fps_n = 0, fps_d = 1;

    g_return_val_if_fail(payload, NULL);
    priv = payload->priv;

    switch (priv->media_type) {
    case OWR_MEDIA_TYPE_AUDIO:
        if (OWR_IS_AUDIO_PAYLOAD(payload))
            g_object_get(OWR_AUDIO_PAYLOAD(payload), "channels", &channels, NULL);
        caps = gst_caps_new_simple("audio/x-raw",
            "format", G_TYPE_STRING, "S16LE",
            "layout", G_TYPE_STRING, "interleaved",
            "rate", G_TYPE_INT, priv->clock_rate,
            NULL);
        if (channels > 0) {
            gst_caps_set_simple(caps,
                "channels", G_TYPE_INT, channels,
                NULL);
        }
        break;

    case OWR_MEDIA_TYPE_VIDEO:
        if (OWR_IS_VIDEO_PAYLOAD(payload)) {
            g_object_get(OWR_VIDEO_PAYLOAD(payload),
                "width", &width,
                "height", &height,
                "framerate", &framerate,
                NULL);
        }
        caps = gst_caps_new_empty_simple("video/x-raw");
        gst_caps_set_simple(caps, "width", G_TYPE_INT, width > 0 ? width : LIMITED_WIDTH, NULL);
        gst_caps_set_simple(caps, "height", G_TYPE_INT, height > 0 ? height : LIMITED_HEIGHT, NULL);

        framerate = framerate > 0.0 ? framerate : LIMITED_FRAMERATE;
        gst_util_double_to_fraction(framerate, &fps_n, &fps_d);
        gst_caps_set_simple(caps, "framerate", GST_TYPE_FRACTION, fps_n, fps_d, NULL);
        break;
    default:
        g_return_val_if_reached(NULL);
    }

    return caps;
}
Exemple #17
0
static gboolean
gst_pngenc_set_format (GstVideoEncoder * encoder, GstVideoCodecState * state)
{
  GstPngEnc *pngenc;
  gboolean ret = TRUE;
  GstVideoInfo *info;
  GstVideoCodecState *output_state;

  pngenc = GST_PNGENC (encoder);
  info = &state->info;

  switch (GST_VIDEO_INFO_FORMAT (info)) {
    case GST_VIDEO_FORMAT_RGBA:
      pngenc->png_color_type = PNG_COLOR_TYPE_RGBA;
      break;
    case GST_VIDEO_FORMAT_RGB:
      pngenc->png_color_type = PNG_COLOR_TYPE_RGB;
      break;
    case GST_VIDEO_FORMAT_GRAY8:
    case GST_VIDEO_FORMAT_GRAY16_BE:
      pngenc->png_color_type = PNG_COLOR_TYPE_GRAY;
      break;
    default:
      ret = FALSE;
      goto done;
  }

  switch (GST_VIDEO_INFO_FORMAT (info)) {
    case GST_VIDEO_FORMAT_GRAY16_BE:
      pngenc->depth = 16;
      break;
    default:                   /* GST_VIDEO_FORMAT_RGB and GST_VIDEO_FORMAT_GRAY8 */
      pngenc->depth = 8;
      break;
  }

  if (pngenc->input_state)
    gst_video_codec_state_unref (pngenc->input_state);
  pngenc->input_state = gst_video_codec_state_ref (state);

  output_state =
      gst_video_encoder_set_output_state (encoder,
      gst_caps_new_empty_simple ("image/png"), state);
  gst_video_codec_state_unref (output_state);

done:

  return ret;
}
static void
setup_test_objects (struct TestData *td)
{
    td->mycaps = gst_caps_new_empty_simple ("test/test");
    td->srcpad_cnt = 0;

    td->demux = gst_element_factory_make ("streamiddemux", NULL);
    fail_unless (td->demux != NULL);
    g_signal_connect (td->demux, "pad-added", G_CALLBACK (src_pad_added_cb), td);
    td->demuxsink = gst_element_get_static_pad (td->demux, "sink");
    fail_unless (td->demuxsink != NULL);

    fail_unless (gst_element_set_state (td->demux, GST_STATE_PLAYING) ==
                 GST_STATE_CHANGE_SUCCESS);
}
Exemple #19
0
GstCaps *
gst_vulkan_swapper_get_supported_caps (GstVulkanSwapper * swapper,
    GError ** error)
{
  GstStructure *s;
  GstCaps *caps;

  g_return_val_if_fail (GST_IS_VULKAN_SWAPPER (swapper), NULL);

  if (!_vulkan_swapper_retrieve_surface_properties (swapper, error))
    return NULL;

  caps = gst_caps_new_empty_simple ("video/x-raw");
  gst_caps_set_features (caps, 0,
      gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_VULKAN_BUFFER));
  s = gst_caps_get_structure (caps, 0);

  {
    int i;
    GValue list = G_VALUE_INIT;

    g_value_init (&list, GST_TYPE_LIST);

    if (swapper->n_surf_formats
        && swapper->surf_formats[0].format == VK_FORMAT_UNDEFINED) {
      _add_vk_format_to_list (&list, VK_FORMAT_B8G8R8A8_UNORM);
    } else {
      for (i = 0; i < swapper->n_surf_formats; i++) {
        _add_vk_format_to_list (&list, swapper->surf_formats[i].format);
      }
    }

    gst_structure_set_value (s, "format", &list);
    g_value_unset (&list);
  }
  {
    guint32 max_dim = swapper->device->gpu_props.limits.maxImageDimension2D;

    gst_structure_set (s, "width", GST_TYPE_INT_RANGE, 1, (gint) max_dim,
        "height", GST_TYPE_INT_RANGE, 1, (gint) max_dim, "pixel-aspect-ratio",
        GST_TYPE_FRACTION, 1, 1, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1,
        G_MAXINT, 1, NULL);
  }

  GST_INFO_OBJECT (swapper, "Probed the following caps %" GST_PTR_FORMAT, caps);

  return caps;
}
static GstCaps *
gst_msdkvp8enc_set_src_caps (GstMsdkEnc * encoder)
{
  GstCaps *caps;
  GstStructure *structure;
  const gchar *profile;

  caps = gst_caps_new_empty_simple ("video/x-vp8");
  structure = gst_caps_get_structure (caps, 0);

  profile = profile_to_string (encoder->param.mfx.CodecProfile);
  if (profile)
    gst_structure_set (structure, "profile", G_TYPE_STRING, profile, NULL);

  return caps;
}
GstCaps *
gst_vaapi_video_format_new_template_caps (GstVideoFormat format)
{
  GstCaps *caps;

  g_return_val_if_fail (format != GST_VIDEO_FORMAT_UNKNOWN, NULL);

  caps = gst_caps_new_empty_simple ("video/x-raw");
  if (!caps)
    return NULL;

  gst_caps_set_simple (caps,
      "format", G_TYPE_STRING, gst_video_format_to_string (format), NULL);
  set_video_template_caps (caps);
  return caps;
}
GstCaps *
gst_hspec_build_caps (gint data_cube_width, gint data_cube_height,
  gint data_cube_wavelengths, const gchar *fmtstr,
  const gchar *layoutstr, gint wavelengthno,
  gint *wavelengths) {
  GstCaps *caps = gst_caps_new_empty_simple (GST_HYPERSPECTRAL_MEDIA_TYPE);
  GstStructure *outstruct = gst_caps_get_structure (caps, 0);;
  gst_structure_set(outstruct,
    "width", G_TYPE_INT, data_cube_width,
    "height", G_TYPE_INT, data_cube_height,
    "wavelengths", G_TYPE_INT, data_cube_wavelengths,
    "format", G_TYPE_STRING, fmtstr,
    "layout", G_TYPE_STRING, layoutstr,
    NULL);
  add_wavelength_list_to_struct(outstruct, wavelengthno, wavelengths);
  return caps;
}
static gboolean
gst_rtp_opus_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
  GstCaps *srccaps;
  gboolean ret;

  srccaps = gst_caps_new_empty_simple ("audio/x-opus");
  ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);

  GST_DEBUG_OBJECT (depayload,
      "set caps on source: %" GST_PTR_FORMAT " (ret=%d)", srccaps, ret);
  gst_caps_unref (srccaps);

  depayload->clock_rate = 48000;

  return ret;
}
static GstElement *
setup_avisubtitle (void)
{
  GstElement *avisubtitle;
  GstCaps *srccaps;

  GST_DEBUG ("setup_avisubtitle");
  avisubtitle = gst_check_setup_element ("avisubtitle");
  mysinkpad = gst_check_setup_sink_pad (avisubtitle, &sink_template);
  mysrcpad = gst_check_setup_src_pad (avisubtitle, &src_template);
  gst_pad_set_active (mysinkpad, TRUE);
  gst_pad_set_active (mysrcpad, TRUE);
  srccaps = gst_caps_new_empty_simple ("application/x-subtitle-avi");
  gst_check_setup_events (mysrcpad, avisubtitle, srccaps, GST_FORMAT_TIME);
  gst_caps_unref (srccaps);
  return avisubtitle;
}
static void
setup_test_objects (struct TestData *td, GstPadChainFunction chain_func)
{
  td->mycaps = gst_caps_new_empty_simple ("test/test");

  td->funnel = gst_element_factory_make ("funnel", NULL);

  td->funnelsrc = gst_element_get_static_pad (td->funnel, "src");
  fail_unless (td->funnelsrc != NULL);

  td->funnelsink11 = gst_element_get_request_pad (td->funnel, "sink_11");
  fail_unless (td->funnelsink11 != NULL);
  fail_unless (!strcmp (GST_OBJECT_NAME (td->funnelsink11), "sink_11"));

  td->funnelsink22 = gst_element_get_request_pad (td->funnel, "sink_22");
  fail_unless (td->funnelsink22 != NULL);
  fail_unless (!strcmp (GST_OBJECT_NAME (td->funnelsink22), "sink_22"));

  fail_unless (gst_element_set_state (td->funnel, GST_STATE_PLAYING) ==
      GST_STATE_CHANGE_SUCCESS);

  td->mysink = gst_pad_new ("sink", GST_PAD_SINK);
  gst_pad_set_chain_function (td->mysink, chain_func);
  gst_pad_set_active (td->mysink, TRUE);

  td->mysrc1 = gst_pad_new ("src1", GST_PAD_SRC);
  gst_pad_set_active (td->mysrc1, TRUE);
  gst_check_setup_events_with_stream_id (td->mysrc1, td->funnel, td->mycaps,
      GST_FORMAT_BYTES, "test1");

  td->mysrc2 = gst_pad_new ("src2", GST_PAD_SRC);
  gst_pad_set_active (td->mysrc2, TRUE);
  gst_check_setup_events_with_stream_id (td->mysrc2, td->funnel, td->mycaps,
      GST_FORMAT_BYTES, "test2");

  fail_unless (GST_PAD_LINK_SUCCESSFUL (gst_pad_link (td->funnelsrc,
              td->mysink)));

  fail_unless (GST_PAD_LINK_SUCCESSFUL (gst_pad_link (td->mysrc1,
              td->funnelsink11)));

  fail_unless (GST_PAD_LINK_SUCCESSFUL (gst_pad_link (td->mysrc2,
              td->funnelsink22)));

}
static GstElement *
ges_video_test_source_create_source (GESTrackElement * self)
{
  gint pattern;
  GstElement *testsrc, *capsfilter;
  const gchar *props[] = { "pattern", NULL };

  testsrc = gst_element_factory_make ("videotestsrc", NULL);
  capsfilter = gst_element_factory_make ("capsfilter", NULL);
  pattern = ((GESVideoTestSource *) self)->priv->pattern;

  g_object_set (testsrc, "pattern", pattern, NULL);
  g_object_set (capsfilter, "caps", gst_caps_new_empty_simple ("video/x-raw"),
      NULL);

  ges_track_element_add_children_props (self, testsrc, NULL, NULL, props);

  return ges_source_create_topbin ("videotestsrc", testsrc, capsfilter, NULL);
}
static gpointer
push_buffer (gpointer user_data)
{
  GstFlowReturn flow;
  GstCaps *caps;
  TestData *test_data = (TestData *) user_data;

  gst_pad_push_event (test_data->pad, gst_event_new_stream_start ("test"));

  caps = gst_caps_new_empty_simple ("foo/x-bar");
  gst_pad_push_event (test_data->pad, gst_event_new_caps (caps));
  gst_caps_unref (caps);

  flow = gst_pad_push (test_data->pad, test_data->buffer);
  fail_unless (flow == GST_FLOW_OK, "got flow %s instead of OK",
      gst_flow_get_name (flow));

  return NULL;
}
Exemple #28
0
static void
gst_frame_positioner_update_properties (GstFramePositioner * pos,
    gboolean track_mixing, gint old_track_width, gint old_track_height)
{
  GstCaps *caps;

  if (pos->capsfilter == NULL)
    return;

  if (pos->track_width && pos->track_height &&
      (!track_mixing || !pos->scale_in_compositor)) {
    caps =
        gst_caps_new_simple ("video/x-raw", "width", G_TYPE_INT,
        pos->track_width, "height", G_TYPE_INT, pos->track_height, NULL);
  } else {
    caps = gst_caps_new_empty_simple ("video/x-raw");
  }

  if (pos->fps_n != -1)
    gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION, pos->fps_n,
        pos->fps_d, NULL);

  if (old_track_width && pos->width == old_track_width &&
      old_track_height && pos->height == old_track_height &&
      pos->track_height && pos->track_width &&
      ((float) old_track_width / (float) old_track_height) ==
      ((float) pos->track_width / (float) pos->track_height)) {

    GST_DEBUG_OBJECT (pos, "Following track size width old_track: %d -- pos: %d"
        " || height, old_track %d -- pos: %d",
        old_track_width, pos->width, old_track_height, pos->height);

    pos->width = pos->track_width;
    pos->height = pos->track_height;
  }

  GST_DEBUG_OBJECT (caps, "setting caps");

  g_object_set (pos->capsfilter, "caps", caps, NULL);

  gst_caps_unref (caps);
}
static gboolean
gst_rtp_ac3_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
  GstStructure *structure;
  gint clock_rate;
  GstCaps *srccaps;
  gboolean res;

  structure = gst_caps_get_structure (caps, 0);

  if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
    clock_rate = 90000;         /* default */
  depayload->clock_rate = clock_rate;

  srccaps = gst_caps_new_empty_simple ("audio/ac3");
  res = gst_pad_set_caps (depayload->srcpad, srccaps);
  gst_caps_unref (srccaps);

  return res;
}
GstCaps *
gst_vaapi_video_format_new_template_caps_from_list (GArray * formats)
{
  GValue v_formats = G_VALUE_INIT;
  GstCaps *caps;

  caps = gst_caps_new_empty_simple ("video/x-raw");
  if (!caps)
    return NULL;

  if (!gst_vaapi_value_set_format_list (&v_formats, formats)) {
    gst_caps_unref (caps);
    return NULL;
  }

  gst_caps_set_value (caps, "format", &v_formats);
  set_video_template_caps (caps);
  g_value_unset (&v_formats);
  return caps;
}