static GstEncodingProfile *
create_ogg_theora_vorbis_profile (guint theorapresence, guint vorbispresence)
{
  GstEncodingContainerProfile *prof;
  GstCaps *ogg, *vorbis, *theora;

  ogg = gst_caps_new_empty_simple ("application/ogg");
  prof =
      gst_encoding_container_profile_new ((gchar *) "myprofile", NULL, ogg,
      NULL);
  gst_caps_unref (ogg);

  vorbis = gst_caps_new_empty_simple ("audio/x-vorbis");
  fail_unless (gst_encoding_container_profile_add_profile (prof,
          (GstEncodingProfile *) gst_encoding_audio_profile_new (vorbis, NULL,
              NULL, vorbispresence)));
  gst_caps_unref (vorbis);

  theora = gst_caps_new_empty_simple ("video/x-theora");
  fail_unless (gst_encoding_container_profile_add_profile (prof,
          (GstEncodingProfile *) gst_encoding_video_profile_new (theora, NULL,
              NULL, theorapresence)));
  gst_caps_unref (theora);

  return (GstEncodingProfile *) prof;
}
GstEncodingProfile *QDeclarativeVideoEditor::createEncodingProfile() {
    GstEncodingProfile *profile = (GstEncodingProfile *)
                                  gst_encoding_container_profile_new("mp4", NULL, gst_caps_new_simple("video/quicktime",
                                          "variant", G_TYPE_STRING, "iso",
                                          NULL), NULL);

    GstEncodingProfile *video = NULL;
    if (m_width > 0 && m_height > 0 && m_fpsn > 0 && m_fpsd > 0) {
        video = (GstEncodingProfile *)
                gst_encoding_video_profile_new(gst_caps_new_simple("video/mpeg", "mpegversion",
                                               G_TYPE_INT, 4,
                                               "width", G_TYPE_INT, m_width,
                                               "height", G_TYPE_INT, m_height,
                                               "framerate", GST_TYPE_FRACTION_RANGE,
                                               m_fpsn-1, m_fpsd, m_fpsn+1, m_fpsd, NULL),
                                               NULL, NULL, 1);
    } else {
        video = (GstEncodingProfile *)
                gst_encoding_video_profile_new(gst_caps_new_simple("video/mpeg", "mpegversion",
                                               G_TYPE_INT, 4, NULL), NULL, NULL, 1);
    }
    GstEncodingProfile *audio = (GstEncodingProfile *)
                                gst_encoding_audio_profile_new(gst_caps_new_simple("audio/mpeg", "mpegversion",
                                        G_TYPE_INT, 4,
                                        "rate", G_TYPE_INT, 48000,
                                        "channels", G_TYPE_INT, 2, NULL), NULL, NULL, 0);

    gst_encoding_container_profile_add_profile((GstEncodingContainerProfile*) profile, video);
    gst_encoding_container_profile_add_profile((GstEncodingContainerProfile*) profile, audio);

    return profile;
}
Example #3
0
static GstEncodingContainerProfile *
kms_recording_profile_create_webm_profile (gboolean has_audio,
    gboolean has_video)
{
  GstEncodingContainerProfile *cprof;
  GstCaps *pc;

  if (has_video)
    pc = gst_caps_from_string ("video/webm");
  else
    pc = gst_caps_from_string ("audio/webm");

  cprof = gst_encoding_container_profile_new ("Webm", NULL, pc, NULL);
  gst_caps_unref (pc);

  if (has_audio) {
    GstCaps *ac = gst_caps_from_string ("audio/x-opus");

    gst_encoding_container_profile_add_profile (cprof, (GstEncodingProfile *)
        gst_encoding_audio_profile_new (ac, NULL, NULL, 0));

    gst_caps_unref (ac);
  }

  if (has_video) {
    GstCaps *vc = gst_caps_from_string ("video/x-vp8");

    gst_encoding_container_profile_add_profile (cprof, (GstEncodingProfile *)
        gst_encoding_video_profile_new (vc, NULL, NULL, 0));

    gst_caps_unref (vc);
  }

  return cprof;
}
Example #4
0
static GstEncodingContainerProfile *
kms_recording_profile_create_mp4_profile (gboolean has_audio,
    gboolean has_video)
{
  GstEncodingContainerProfile *cprof;
  GstCaps *pc;

  pc = gst_caps_from_string ("video/quicktime, variant=(string)iso");

  cprof = gst_encoding_container_profile_new ("Mp4", NULL, pc, NULL);
  gst_caps_unref (pc);

  if (has_audio) {
    GstCaps *ac = gst_caps_from_string ("audio/mpeg,mpegversion=1,layer=3");

    gst_encoding_container_profile_add_profile (cprof, (GstEncodingProfile *)
        gst_encoding_audio_profile_new (ac, NULL, NULL, 0));

    gst_caps_unref (ac);
  }

  if (has_video) {
    GstCaps *vc = gst_caps_from_string ("video/x-h264, "
        "stream-format=(string)avc, alignment=(string)au");

    gst_encoding_container_profile_add_profile (cprof, (GstEncodingProfile *)
        gst_encoding_video_profile_new (vc, NULL, NULL, 0));

    gst_caps_unref (vc);
  }

  return cprof;
}
GstEncodingProfile *
profile_get_encoding_profile (GstCaps * settings)
{
  GstEncodingContainerProfile *prof;
  GstCaps *caps;

  caps = gst_caps_from_string (profile[0]);
  prof =
      gst_encoding_container_profile_new ("Profile", "A web video profile",
      caps, NULL);
  gst_caps_unref (caps);

  caps = gst_caps_from_string (profile[1]);

  gst_encoding_container_profile_add_profile (prof,
      (GstEncodingProfile *) gst_encoding_video_profile_new (caps, NULL,
          settings, 0));
  gst_caps_unref (caps);
  gst_caps_unref (settings);

  caps = gst_caps_from_string (profile[2]);
  settings = gst_caps_from_string ("audio/x-raw");
  gst_encoding_container_profile_add_profile (prof,
      (GstEncodingProfile *) gst_encoding_audio_profile_new (caps, NULL,
          settings, 0));
  gst_caps_unref (caps);
  gst_caps_unref (settings);

  return (GstEncodingProfile *) prof;
}
Example #6
0
static GstEncodingProfile *
make_encoding_profile (gchar * audio, gchar * video, gchar * video_restriction,
    gchar * audio_preset, gchar * video_preset, gchar * container)
{
  GstEncodingContainerProfile *profile;
  GstEncodingProfile *stream;
  GstCaps *caps;

  caps = gst_caps_from_string (container);
  profile =
      gst_encoding_container_profile_new ((gchar *) "ges-test4", NULL, caps,
      NULL);
  gst_caps_unref (caps);

  if (audio) {
    caps = gst_caps_from_string (audio);
    stream = (GstEncodingProfile *)
        gst_encoding_audio_profile_new (caps, audio_preset, NULL, 0);
    gst_encoding_container_profile_add_profile (profile, stream);
    gst_caps_unref (caps);
  }

  if (video) {
    caps = gst_caps_from_string (video);
    stream = (GstEncodingProfile *)
        gst_encoding_video_profile_new (caps, video_preset, NULL, 0);
    if (video_restriction)
      gst_encoding_profile_set_restriction (stream,
          gst_caps_from_string (video_restriction));
    gst_encoding_container_profile_add_profile (profile, stream);
    gst_caps_unref (caps);
  }

  return (GstEncodingProfile *) profile;
}
Example #7
0
static GstEncodingContainerProfile *
kms_recording_profile_create_ksr_profile (gboolean has_audio,
    gboolean has_video)
{
  GstEncodingContainerProfile *cprof;
  GstPadTemplate *templ;
  GstElement *mux;
  GstCaps *pc;

  pc = gst_caps_from_string ("application/x-ksr");
  cprof = gst_encoding_container_profile_new ("Ksr", NULL, pc, NULL);
  gst_caps_unref (pc);

  /* Use matroska caps to define this profile */
  mux = gst_element_factory_make ("matroskamux", NULL);

  if (has_audio) {
    GstCaps *ac;

    templ =
        gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (mux),
        "audio_%u");
    ac = gst_pad_template_get_caps (templ);

    gst_encoding_container_profile_add_profile (cprof, (GstEncodingProfile *)
        gst_encoding_audio_profile_new (ac, NULL, NULL, 0));

    gst_caps_unref (ac);
  }

  if (has_video) {
    GstCaps *vc;

    templ =
        gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (mux),
        "video_%u");
    vc = gst_pad_template_get_caps (templ);

    gst_encoding_container_profile_add_profile (cprof, (GstEncodingProfile *)
        gst_encoding_video_profile_new (vc, NULL, NULL, 0));

    gst_caps_unref (vc);
  }

  g_object_unref (mux);

  return cprof;
}
static GstEncodingProfile *
create_profile (GstCaps * cf, GstCaps * vf, GstCaps * af)
{
    GstEncodingContainerProfile *cprof = NULL;

    cprof =
        gst_encoding_container_profile_new ((gchar *) "test-application-profile",
                                            NULL, cf, NULL);

    if (vf)
        gst_encoding_container_profile_add_profile (cprof,
                (GstEncodingProfile *) gst_encoding_video_profile_new (vf,
                        NULL, NULL, 0));
    if (af)
        gst_encoding_container_profile_add_profile (cprof, (GstEncodingProfile *)
                gst_encoding_audio_profile_new (af, NULL, NULL, 0));

    /* Let's print out some info */
    if (!silent) {
        gchar *desc = gst_pb_utils_get_codec_description (cf);
        gchar *cd = gst_caps_to_string (cf);
        g_print ("Encoding parameters\n");
        g_print ("  Container format : %s (%s)\n", desc, cd);
        g_free (desc);
        g_free (cd);
        if (vf) {
            desc = gst_pb_utils_get_codec_description (vf);
            cd = gst_caps_to_string (vf);
            g_print ("  Video format : %s (%s)\n", desc, cd);
            g_free (desc);
            g_free (cd);
        }
        if (af) {
            desc = gst_pb_utils_get_codec_description (af);
            cd = gst_caps_to_string (af);
            g_print ("  Audio format : %s (%s)\n", desc, cd);
            g_free (desc);
            g_free (cd);
        }
    }

    return (GstEncodingProfile *) cprof;
}
Example #9
0
static GstEncodingTarget *
create_saveload_target (const gchar * targetname)
{
  GstEncodingTarget *target;
  GstEncodingProfile *profile, *sprof;
  GstCaps *caps, *caps2;

  GST_DEBUG ("Creating target");

  target = gst_encoding_target_new (targetname, "herding",
      "Plenty of pony glitter profiles", NULL);
  caps = gst_caps_from_string ("animal/x-pony");
  profile =
      (GstEncodingProfile *) gst_encoding_container_profile_new ("pony",
      "I don't want a description !", caps, NULL);
  gst_caps_unref (caps);
  gst_encoding_target_add_profile (target, profile);

  caps = gst_caps_from_string ("audio/x-pony-song,pretty=True");
  caps2 = gst_caps_from_string ("audio/x-raw-int,channels=1,rate=44100");
  sprof =
      (GstEncodingProfile *) gst_encoding_audio_profile_new (caps, NULL, caps2,
      1);
  gst_encoding_container_profile_add_profile ((GstEncodingContainerProfile *)
      profile, sprof);
  gst_caps_unref (caps);
  gst_caps_unref (caps2);

  caps = gst_caps_from_string ("video/x-glitter,sparkling=True");
  caps2 =
      gst_caps_from_string
      ("video/x-raw-yuv,width=640,height=480,framerate=15/1");
  sprof = (GstEncodingProfile *)
      gst_encoding_video_profile_new (caps, "seriously glittery", caps2, 0);
  gst_encoding_video_profile_set_variableframerate ((GstEncodingVideoProfile *)
      sprof, TRUE);
  gst_encoding_container_profile_add_profile ((GstEncodingContainerProfile *)
      profile, sprof);
  gst_caps_unref (caps);
  gst_caps_unref (caps2);

  return target;
}
static GstEncodingProfile *
create_webm_profile (void)
{
  GstEncodingContainerProfile *container;
  GstCaps *caps = NULL;

  caps = gst_caps_new_empty_simple ("video/webm");
  container = gst_encoding_container_profile_new ("webm", NULL, caps, NULL);
  gst_caps_unref (caps);

  caps = gst_caps_new_empty_simple ("video/x-vp8");
  gst_encoding_container_profile_add_profile (container, (GstEncodingProfile *)
      gst_encoding_video_profile_new (caps, NULL, NULL, 1));
  gst_caps_unref (caps);

  caps = gst_caps_new_empty_simple ("audio/x-vorbis");
  gst_encoding_container_profile_add_profile (container, (GstEncodingProfile *)
      gst_encoding_audio_profile_new (caps, NULL, NULL, 1));
  gst_caps_unref (caps);

  return (GstEncodingProfile *) container;
}
static GstEncodingProfile *
create_mp4_profile (void)
{
  GstEncodingContainerProfile *container;
  GstCaps *caps = NULL;

  caps =
      gst_caps_new_simple ("video/quicktime", "variant", G_TYPE_STRING, "iso",
      NULL);
  container = gst_encoding_container_profile_new ("mp4", NULL, caps, NULL);
  gst_caps_unref (caps);

  caps = gst_caps_new_empty_simple ("video/x-h264");
  gst_encoding_container_profile_add_profile (container, (GstEncodingProfile *)
      gst_encoding_video_profile_new (caps, NULL, NULL, 1));
  gst_caps_unref (caps);

  caps = gst_caps_new_simple ("audio/mpeg", "version", G_TYPE_INT, 4, NULL);
  gst_encoding_container_profile_add_profile (container, (GstEncodingProfile *)
      gst_encoding_audio_profile_new (caps, NULL, NULL, 1));
  gst_caps_unref (caps);

  return (GstEncodingProfile *) container;
}
static GstEncodingProfile *
make_profile_from_info (GstDiscovererInfo * info)
{
  GstEncodingContainerProfile *profile = NULL;
  GstDiscovererStreamInfo *sinfo = gst_discoverer_info_get_stream_info (info);

  /* Get the container format */
  if (GST_IS_DISCOVERER_CONTAINER_INFO (sinfo)) {
    GList *tmp, *substreams;

    profile = gst_encoding_container_profile_new ((gchar *) "concatenate", NULL,
        gst_discoverer_stream_info_get_caps (sinfo), NULL);

    substreams =
        gst_discoverer_container_info_get_streams ((GstDiscovererContainerInfo
            *) sinfo);

    /* For each on the formats add stream profiles */
    for (tmp = substreams; tmp; tmp = tmp->next) {
      GstDiscovererStreamInfo *stream = GST_DISCOVERER_STREAM_INFO (tmp->data);
      GstEncodingProfile *sprof = NULL;

      if (GST_IS_DISCOVERER_VIDEO_INFO (stream)) {
        sprof = (GstEncodingProfile *)
            gst_encoding_video_profile_new (gst_discoverer_stream_info_get_caps
            (stream), NULL, NULL, 1);
      } else if (GST_IS_DISCOVERER_AUDIO_INFO (stream)) {
        sprof = (GstEncodingProfile *)
            gst_encoding_audio_profile_new (gst_discoverer_stream_info_get_caps
            (stream), NULL, NULL, 1);
      } else {
        GST_WARNING ("Unsupported streams");
      }

      if (sprof)
        gst_encoding_container_profile_add_profile (profile, sprof);
    }
    if (substreams)
      gst_discoverer_stream_info_list_free (substreams);
  } else {
    GST_ERROR ("No container format !!!");
  }

  if (sinfo)
    gst_discoverer_stream_info_unref (sinfo);

  return GST_ENCODING_PROFILE (profile);
}
static GstEncodingProfile *
create_ogg_vorbis_profile (guint presence, gchar * preset)
{
  GstEncodingContainerProfile *cprof;
  GstCaps *ogg, *vorbis;

  ogg = gst_caps_new_empty_simple ("application/ogg");
  cprof =
      gst_encoding_container_profile_new ((gchar *) "myprofile", NULL, ogg,
      NULL);
  gst_caps_unref (ogg);

  vorbis = gst_caps_new_empty_simple ("audio/x-vorbis");
  fail_unless (gst_encoding_container_profile_add_profile (cprof,
          (GstEncodingProfile *) gst_encoding_audio_profile_new (vorbis, preset,
              NULL, presence)));
  gst_caps_unref (vorbis);

  return (GstEncodingProfile *) cprof;
}
Example #14
0
static GstEncodingContainerProfile *
kms_recording_profile_create_jpeg_profile ()
{
  GstEncodingContainerProfile *cprof;
  GstCaps *vc;
  GstCaps *pc;

  pc = gst_caps_from_string ("image/jpeg");
  cprof = gst_encoding_container_profile_new ("jpeg", NULL, pc, NULL);
  gst_caps_unref (pc);

  vc = gst_caps_from_string ("image/jpeg");

  gst_encoding_container_profile_add_profile (cprof, (GstEncodingProfile *)
      gst_encoding_video_profile_new (vc, NULL, NULL, 0));

  gst_caps_unref (vc);

  return cprof;
}
static GstEncodingProfile *
parse_encoding_profile (const gchar * value)
{
  GstEncodingProfile *res;
  gchar **strpresence_v, **strcaps_v = g_strsplit (value, ":", 0);
  guint i;

  if (strcaps_v[0] && *strcaps_v[0]) {
    GstCaps *caps = gst_caps_from_string (strcaps_v[0]);
    if (caps == NULL) {
      GST_ERROR ("Could not parse caps %s", strcaps_v[0]);
      return NULL;
    }
    res =
        GST_ENCODING_PROFILE (gst_encoding_container_profile_new
        ("User profile", "User profile", caps, NULL));
    gst_caps_unref (caps);
  } else {
    res = NULL;
  }

  for (i = 1; strcaps_v[i] && *strcaps_v[i]; i++) {
    GstEncodingProfile *profile = NULL;
    gchar *strcaps, *strpresence;
    gchar *preset_name = NULL;
    GstCaps *caps;
    gchar **restriction_format, **preset_v;
    guint presence = 0;
    GstCaps *restrictioncaps = NULL;

    restriction_format = g_strsplit (strcaps_v[i], "->", 0);
    if (restriction_format[1]) {
      restrictioncaps = gst_caps_from_string (restriction_format[0]);
      strcaps = g_strdup (restriction_format[1]);
    } else {
      restrictioncaps = NULL;
      strcaps = g_strdup (restriction_format[0]);
    }
    g_strfreev (restriction_format);

    preset_v = g_strsplit (strcaps, "+", 0);
    if (preset_v[1]) {
      strpresence = preset_v[1];
      g_free (strcaps);
      strcaps = g_strdup (preset_v[0]);
    } else {
      strpresence = preset_v[0];
    }

    strpresence_v = g_strsplit (strpresence, "|", 0);
    if (strpresence_v[1]) {     /* We have a presence */
      gchar *endptr;

      if (preset_v[1]) {        /* We have preset and presence */
        preset_name = g_strdup (strpresence_v[0]);
      } else {                  /* We have a presence but no preset */
        g_free (strcaps);
        strcaps = g_strdup (strpresence_v[0]);
      }

      presence = g_ascii_strtoll (strpresence_v[1], &endptr, 10);
      if (endptr == strpresence_v[1]) {
        GST_ERROR ("Wrong presence %s\n", strpresence_v[1]);

        return NULL;
      }
    } else {                    /* We have no presence */
      if (preset_v[1]) {        /* Not presence but preset */
        preset_name = g_strdup (preset_v[1]);
        g_free (strcaps);
        strcaps = g_strdup (preset_v[0]);
      }                         /* Else we have no presence nor preset */
    }
    g_strfreev (strpresence_v);
    g_strfreev (preset_v);

    GST_DEBUG ("Creating preset with restrictions: %" GST_PTR_FORMAT
        ", caps: %s, preset %s, presence %d", restrictioncaps, strcaps,
        preset_name ? preset_name : "none", presence);

    caps = gst_caps_from_string (strcaps);
    g_free (strcaps);
    if (caps == NULL) {
      g_warning ("Could not create caps for %s", strcaps_v[i]);

      return NULL;
    }

    if (g_str_has_prefix (strcaps_v[i], "audio/")) {
      profile = GST_ENCODING_PROFILE (gst_encoding_audio_profile_new (caps,
              preset_name, restrictioncaps, presence));
    } else if (g_str_has_prefix (strcaps_v[i], "video/") ||
        g_str_has_prefix (strcaps_v[i], "image/")) {
      profile = GST_ENCODING_PROFILE (gst_encoding_video_profile_new (caps,
              preset_name, restrictioncaps, presence));
    }

    g_free (preset_name);
    gst_caps_unref (caps);
    if (restrictioncaps)
      gst_caps_unref (restrictioncaps);

    if (profile == NULL) {
      g_warning ("No way to create a preset for caps: %s", strcaps_v[i]);

      return NULL;
    }

    if (res) {
      if (!gst_encoding_container_profile_add_profile
          (GST_ENCODING_CONTAINER_PROFILE (res), profile)) {
        g_warning ("Can not create a preset for caps: %s", strcaps_v[i]);

        return NULL;
      }
    } else {
      res = profile;
    }
  }
  g_strfreev (strcaps_v);

  return res;
}
Example #16
0
/*!
 * \brief CvVideoWriter_GStreamer::open
 * \param filename filename to output to
 * \param fourcc desired codec fourcc
 * \param fps desired framerate
 * \param frameSize the size of the expected frames
 * \param is_color color or grayscale
 * \return success
 *
 * We support 2 modes of operation. Either the user enters a filename and a fourcc
 * code, or enters a manual pipeline description like in CvVideoCapture_Gstreamer.
 * In the latter case, we just push frames on the appsink with appropriate caps.
 * In the former case, we try to deduce the correct container from the filename,
 * and the correct encoder from the fourcc profile.
 *
 * If the file extension did was not recognize, an avi container is used
 *
 */
bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc,
                                    double fps, CvSize frameSize, bool is_color )
{
    CV_FUNCNAME("CvVideoWriter_GStreamer::open");

    // check arguments
    assert (filename);
    assert (fps > 0);
    assert (frameSize.width > 0  &&  frameSize.height > 0);

    // init gstreamer
    gst_initializer::init();

    // init vars
    bool manualpipeline = true;
    int  bufsize = 0;
    GError *err = NULL;
    const char* mime = NULL;
    GstStateChangeReturn stateret;

    GstCaps* caps = NULL;
    GstCaps* videocaps = NULL;

#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
    GstCaps* containercaps = NULL;
    GstEncodingContainerProfile* containerprofile = NULL;
    GstEncodingVideoProfile* videoprofile = NULL;
#endif

    GstIterator* it = NULL;
    gboolean done = FALSE;
    GstElement *element = NULL;
    gchar* name = NULL;
    GstElement* splitter = NULL;
    GstElement* combiner = NULL;

    // we first try to construct a pipeline from the given string.
    // if that fails, we assume it is an ordinary filename

    __BEGIN__;

    encodebin = gst_parse_launch(filename, &err);
    manualpipeline = (encodebin != NULL);

    if(manualpipeline)
    {
#if GST_VERSION_MAJOR == 0
        it = gst_bin_iterate_sources(GST_BIN(encodebin));
        if(gst_iterator_next(it, (gpointer *)&source) != GST_ITERATOR_OK) {
            CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
            return false;
        }
#else
        it = gst_bin_iterate_sources (GST_BIN(encodebin));
        GValue value = G_VALUE_INIT;

        while (!done) {
          switch (gst_iterator_next (it, &value)) {
            case GST_ITERATOR_OK:
              element = GST_ELEMENT (g_value_get_object (&value));
              name = gst_element_get_name(element);
              if (name){
                if(strstr(name, "opencvsrc") != NULL || strstr(name, "appsrc") != NULL) {
                  source = GST_ELEMENT ( gst_object_ref (element) );
                  done = TRUE;
                }
                g_free(name);
              }
              g_value_unset (&value);

              break;
            case GST_ITERATOR_RESYNC:
              gst_iterator_resync (it);
              break;
            case GST_ITERATOR_ERROR:
            case GST_ITERATOR_DONE:
              done = TRUE;
              break;
          }
        }
        gst_iterator_free (it);

        if (!source){
            CV_ERROR(CV_StsError, "GStreamer: cannot find appsrc in manual pipeline\n");
            return false;
        }
#endif
        pipeline = encodebin;
    }
    else
    {
        pipeline = gst_pipeline_new (NULL);

        // we just got a filename and a fourcc code.
        // first, try to guess the container from the filename
        //encodebin = gst_element_factory_make("encodebin", NULL);

        //proxy old non existing fourcc ids. These were used in previous opencv versions,
        //but do not even exist in gstreamer any more
        if (fourcc == CV_FOURCC('M','P','1','V')) fourcc = CV_FOURCC('M', 'P', 'G' ,'1');
        if (fourcc == CV_FOURCC('M','P','2','V')) fourcc = CV_FOURCC('M', 'P', 'G' ,'2');
        if (fourcc == CV_FOURCC('D','R','A','C')) fourcc = CV_FOURCC('d', 'r', 'a' ,'c');


        //create encoder caps from fourcc

        videocaps = gst_riff_create_video_caps(fourcc, NULL, NULL, NULL, NULL, NULL);
        if (!videocaps){
            CV_ERROR( CV_StsUnsupportedFormat, "Gstreamer Opencv backend does not support this codec.");
        }

        //create container caps from file extension
        mime = filenameToMimetype(filename);
        if (!mime) {
            CV_ERROR( CV_StsUnsupportedFormat, "Gstreamer Opencv backend does not support this file type.");
        }

#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
        containercaps = gst_caps_from_string(mime);

        //create encodebin profile
        containerprofile = gst_encoding_container_profile_new("container", "container", containercaps, NULL);
        videoprofile = gst_encoding_video_profile_new(videocaps, NULL, NULL, 1);
        gst_encoding_container_profile_add_profile(containerprofile, (GstEncodingProfile *) videoprofile);
#endif

        //create pipeline elements
        encodebin = gst_element_factory_make("encodebin", NULL);

#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
        g_object_set(G_OBJECT(encodebin), "profile", containerprofile, NULL);
#endif
        source = gst_element_factory_make("appsrc", NULL);
        file = gst_element_factory_make("filesink", NULL);
        g_object_set(G_OBJECT(file), "location", filename, NULL);
    }

    if (is_color)
    {
        input_pix_fmt = GST_VIDEO_FORMAT_BGR;
        bufsize = frameSize.width * frameSize.height * 3;

#if GST_VERSION_MAJOR == 0
        caps = gst_video_format_new_caps(GST_VIDEO_FORMAT_BGR,
                                         frameSize.width,
                                         frameSize.height,
                                         int(fps), 1,
                                         1, 1);
#else
        caps = gst_caps_new_simple("video/x-raw",
                                   "format", G_TYPE_STRING, "BGR",
                                   "width", G_TYPE_INT, frameSize.width,
                                   "height", G_TYPE_INT, frameSize.height,
                                   "framerate", GST_TYPE_FRACTION, int(fps), 1,
                                   NULL);
        caps = gst_caps_fixate(caps);

#endif

    }
    else
    {
#if FULL_GST_VERSION >= VERSION_NUM(0,10,29)
        input_pix_fmt = GST_VIDEO_FORMAT_GRAY8;
        bufsize = frameSize.width * frameSize.height;

#if GST_VERSION_MAJOR == 0
        caps = gst_video_format_new_caps(GST_VIDEO_FORMAT_GRAY8,
                                         frameSize.width,
                                         frameSize.height,
                                         int(fps), 1,
                                         1, 1);
#else
        caps = gst_caps_new_simple("video/x-raw",
                                   "format", G_TYPE_STRING, "GRAY8",
                                   "width", G_TYPE_INT, frameSize.width,
                                   "height", G_TYPE_INT, frameSize.height,
                                   "framerate", GST_TYPE_FRACTION, int(fps), 1,
                                   NULL);
        caps = gst_caps_fixate(caps);
#endif
#else
        CV_Assert(!"Gstreamer 0.10.29 or newer is required for grayscale input");
#endif
    }

    gst_app_src_set_caps(GST_APP_SRC(source), caps);
    gst_app_src_set_stream_type(GST_APP_SRC(source), GST_APP_STREAM_TYPE_STREAM);
    gst_app_src_set_size (GST_APP_SRC(source), -1);

    g_object_set(G_OBJECT(source), "format", GST_FORMAT_TIME, NULL);
    g_object_set(G_OBJECT(source), "block", 1, NULL);
    g_object_set(G_OBJECT(source), "is-live", 0, NULL);


    if(!manualpipeline)
    {
        g_object_set(G_OBJECT(file), "buffer-size", bufsize, NULL);
        gst_bin_add_many(GST_BIN(pipeline), source, encodebin, file, NULL);
        if(!gst_element_link_many(source, encodebin, file, NULL)) {
            CV_ERROR(CV_StsError, "GStreamer: cannot link elements\n");
        }
    }

#if GST_VERSION_MAJOR == 0
    // HACK: remove streamsplitter and streamcombiner from
    // encodebin pipeline to prevent early EOF event handling
    // We always fetch BGR or gray-scale frames, so combiner->spliter
    // endge in graph is useless.
    it = gst_bin_iterate_recurse (GST_BIN(encodebin));
    while (!done) {
      switch (gst_iterator_next (it, (void**)&element)) {
        case GST_ITERATOR_OK:
          name = gst_element_get_name(element);
          if (strstr(name, "streamsplitter"))
            splitter = element;
          else if (strstr(name, "streamcombiner"))
            combiner = element;
          break;
        case GST_ITERATOR_RESYNC:
          gst_iterator_resync (it);
          break;
        case GST_ITERATOR_ERROR:
          done = true;
          break;
        case GST_ITERATOR_DONE:
          done = true;
          break;
      }
    }

    gst_iterator_free (it);

    if (splitter && combiner)
    {
        gst_element_unlink(splitter, combiner);

        GstPad* src  = gst_element_get_pad(combiner, "src");
        GstPad* sink = gst_element_get_pad(combiner, "encodingsink");

        GstPad* srcPeer = gst_pad_get_peer(src);
        GstPad* sinkPeer = gst_pad_get_peer(sink);

        gst_pad_unlink(sinkPeer, sink);
        gst_pad_unlink(src, srcPeer);

        gst_pad_link(sinkPeer, srcPeer);

        src = gst_element_get_pad(splitter, "encodingsrc");
        sink = gst_element_get_pad(splitter, "sink");

        srcPeer = gst_pad_get_peer(src);
        sinkPeer = gst_pad_get_peer(sink);

        gst_pad_unlink(sinkPeer, sink);
        gst_pad_unlink(src, srcPeer);

        gst_pad_link(sinkPeer, srcPeer);
    }
#endif

    stateret = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
    if(stateret  == GST_STATE_CHANGE_FAILURE) {
        handleMessage(pipeline);
        CV_ERROR(CV_StsError, "GStreamer: cannot put pipeline to play\n");
    }

    framerate = fps;
    num_frames = 0;

    handleMessage(pipeline);

    __END__;

    return true;
}
/**
 * parse_encoding_profile:
 * @in: a #GKeyFile
 * @parentprofilename: the parent profile name (including 'profile-' or 'streamprofile-' header)
 * @profilename: the profile name group to parse
 * @nbgroups: the number of top-level groups
 * @groups: the top-level groups
 */
static GstEncodingProfile *
parse_encoding_profile (GKeyFile * in, gchar * parentprofilename,
    gchar * profilename, gsize nbgroups, gchar ** groups)
{
  GstEncodingProfile *sprof = NULL;
  gchar **parent;
  gchar *proftype, *format, *preset, *restriction, *pname, *description,
      *locale;
  GstCaps *formatcaps = NULL;
  GstCaps *restrictioncaps = NULL;
  gboolean variableframerate;
  gint pass, presence;
  gsize i, nbencprofiles;

  GST_DEBUG ("parentprofilename : %s , profilename : %s",
      parentprofilename, profilename);

  if (parentprofilename) {
    gboolean found = FALSE;

    parent =
        g_key_file_get_string_list (in, profilename, "parent",
        &nbencprofiles, NULL);
    if (!parent || !nbencprofiles) {
      return NULL;
    }

    /* Check if this streamprofile is used in <profilename> */
    for (i = 0; i < nbencprofiles; i++) {
      if (!g_strcmp0 (parent[i], parentprofilename)) {
        found = TRUE;
        break;
      }
    }
    g_strfreev (parent);

    if (!found) {
      GST_DEBUG ("Stream profile '%s' isn't used in profile '%s'",
          profilename, parentprofilename);
      return NULL;
    }
  }

  pname = g_key_file_get_value (in, profilename, "name", NULL);

  locale = get_locale ();
  /* will try to fall back to untranslated string if no translation found */
  description = g_key_file_get_locale_string (in, profilename,
      "description", locale, NULL);
  g_free (locale);

  /* Note: a missing description is normal for non-container profiles */
  if (description == NULL) {
    GST_LOG ("Missing 'description' field for streamprofile %s", profilename);
  }

  /* Parse the remaining fields */
  proftype = g_key_file_get_value (in, profilename, "type", NULL);
  if (!proftype) {
    GST_WARNING ("Missing 'type' field for streamprofile %s", profilename);
    return NULL;
  }

  format = g_key_file_get_value (in, profilename, "format", NULL);
  if (format) {
    formatcaps = gst_caps_from_string (format);
    g_free (format);
  }

  preset = g_key_file_get_value (in, profilename, "preset", NULL);

  restriction = g_key_file_get_value (in, profilename, "restriction", NULL);
  if (restriction) {
    restrictioncaps = gst_caps_from_string (restriction);
    g_free (restriction);
  }

  presence = g_key_file_get_integer (in, profilename, "presence", NULL);
  pass = g_key_file_get_integer (in, profilename, "pass", NULL);
  variableframerate =
      g_key_file_get_boolean (in, profilename, "variableframerate", NULL);

  /* Build the streamprofile ! */
  if (!g_strcmp0 (proftype, "container")) {
    GstEncodingProfile *pprof;

    sprof =
        (GstEncodingProfile *) gst_encoding_container_profile_new (pname,
        description, formatcaps, preset);
    /* Now look for the stream profiles */
    for (i = 0; i < nbgroups; i++) {
      if (!g_ascii_strncasecmp (groups[i], "streamprofile-", 13)) {
        pprof = parse_encoding_profile (in, pname, groups[i], nbgroups, groups);
        if (pprof) {
          gst_encoding_container_profile_add_profile (
              (GstEncodingContainerProfile *) sprof, pprof);
        }
      }
    }
  } else if (!g_strcmp0 (proftype, "video")) {
    sprof =
        (GstEncodingProfile *) gst_encoding_video_profile_new (formatcaps,
        preset, restrictioncaps, presence);
    gst_encoding_video_profile_set_variableframerate ((GstEncodingVideoProfile
            *) sprof, variableframerate);
    gst_encoding_video_profile_set_pass ((GstEncodingVideoProfile *) sprof,
        pass);
    gst_encoding_profile_set_name (sprof, pname);
    gst_encoding_profile_set_description (sprof, description);
  } else if (!g_strcmp0 (proftype, "audio")) {
    sprof =
        (GstEncodingProfile *) gst_encoding_audio_profile_new (formatcaps,
        preset, restrictioncaps, presence);
    gst_encoding_profile_set_name (sprof, pname);
    gst_encoding_profile_set_description (sprof, description);
  } else
    GST_ERROR ("Unknown profile format '%s'", proftype);

  if (restrictioncaps)
    gst_caps_unref (restrictioncaps);
  if (formatcaps)
    gst_caps_unref (formatcaps);

  g_free (pname);
  g_free (description);
  g_free (preset);
  g_free (proftype);

  return sprof;
}
static gboolean
add_stream_to_profile (GstEncodingContainerProfile * profile,
    GstDiscovererStreamInfo * sinfo)
{
  GstEncodingProfile *sprofile = NULL;
  GstStructure *s;
  GstCaps *caps;

  caps = gst_discoverer_stream_info_get_caps (sinfo);

  /* Should unify this with copy_and_clean_caps() */
  s = gst_caps_get_structure (caps, 0);
  if (gst_structure_has_field (s, "codec_data")
      || gst_structure_has_field (s, "streamheader")
      || gst_structure_has_field (s, "parsed")
      || gst_structure_has_field (s, "framed")
      || gst_structure_has_field (s, "stream-format")
      || gst_structure_has_field (s, "alignment")) {
    caps = gst_caps_make_writable (caps);
    s = gst_caps_get_structure (caps, 0);
    gst_structure_remove_field (s, "codec_data");
    gst_structure_remove_field (s, "streamheader");
    gst_structure_remove_field (s, "parsed");
    gst_structure_remove_field (s, "framed");
    gst_structure_remove_field (s, "stream-format");
    gst_structure_remove_field (s, "alignment");
  }

  GST_LOG ("Stream: %" GST_PTR_FORMAT "\n", caps);
  if (GST_IS_DISCOVERER_AUDIO_INFO (sinfo)) {
    sprofile =
        (GstEncodingProfile *) gst_encoding_audio_profile_new (caps, NULL,
        NULL, 0);
  } else if (GST_IS_DISCOVERER_VIDEO_INFO (sinfo)) {
    sprofile =
        (GstEncodingProfile *) gst_encoding_video_profile_new (caps, NULL,
        NULL, 0);
  } else if (GST_IS_DISCOVERER_CONTAINER_INFO (sinfo)) {
    GList *streams, *stream;
    guint n_streams = 0;

    streams =
        gst_discoverer_container_info_get_streams (GST_DISCOVERER_CONTAINER_INFO
        (sinfo));
    for (stream = streams; stream; stream = stream->next) {
      if (add_stream_to_profile (profile,
              (GstDiscovererStreamInfo *) stream->data))
        n_streams++;
    }
    gst_discoverer_stream_info_list_free (streams);
    gst_caps_unref (caps);

    return n_streams != 0;
  } else {
    GST_WARNING ("Ignoring stream of type '%s'",
        g_type_name (G_OBJECT_TYPE (sinfo)));
    /* subtitles or other ? ignore for now */
  }
  if (sprofile)
    gst_encoding_container_profile_add_profile (profile, sprofile);
  else
    GST_ERROR ("Failed to create stream profile from caps %" GST_PTR_FORMAT,
        caps);
  gst_caps_unref (caps);

  return sprofile != NULL;
}
void
ges_base_xml_formatter_add_encoding_profile (GESBaseXmlFormatter * self,
    const gchar * type, const gchar * parent, const gchar * name,
    const gchar * description, GstCaps * format, const gchar * preset,
    const gchar * preset_name, guint id, guint presence, GstCaps * restriction,
    guint pass, gboolean variableframerate, GstStructure * properties,
    GError ** error)
{
  const GList *tmp;
  GstEncodingProfile *profile;
  GstEncodingContainerProfile *parent_profile = NULL;
  GESBaseXmlFormatterPrivate *priv = _GET_PRIV (self);

  if (priv->check_only)
    goto done;

  if (parent == NULL) {
    profile =
        _create_profile (self, type, parent, name, description, format, preset,
        preset_name, id, presence, restriction, pass, variableframerate);
    ges_project_add_encoding_profile (GES_FORMATTER (self)->project, profile);
    gst_object_unref (profile);

    goto done;
  }

  for (tmp = ges_project_list_encoding_profiles (GES_FORMATTER (self)->project);
      tmp; tmp = tmp->next) {
    GstEncodingProfile *tmpprofile = GST_ENCODING_PROFILE (tmp->data);

    if (g_strcmp0 (gst_encoding_profile_get_name (tmpprofile),
            gst_encoding_profile_get_name (tmpprofile)) == 0) {

      if (!GST_IS_ENCODING_CONTAINER_PROFILE (tmpprofile)) {
        g_set_error (error, G_MARKUP_ERROR, G_MARKUP_ERROR_INVALID_CONTENT,
            "Profile '%s' parent %s is not a container...'", name, parent);
        goto done;
      }

      parent_profile = GST_ENCODING_CONTAINER_PROFILE (tmpprofile);
      break;
    }
  }

  if (parent_profile == NULL) {
    g_set_error (error, G_MARKUP_ERROR, G_MARKUP_ERROR_INVALID_CONTENT,
        "Profile '%s' parent %s does not exist'", name, parent);
    goto done;
  }

  profile =
      _create_profile (self, type, parent, name, description, format, preset,
      preset_name, id, presence, restriction, pass, variableframerate);

  if (profile == NULL)
    goto done;

  gst_encoding_container_profile_add_profile (parent_profile, profile);

done:
  if (format)
    gst_caps_unref (format);
  if (restriction)
    gst_caps_unref (restriction);
}