void QDeclarativeVideoEditor::cancelRender()
{
    qDebug() << "Cancelling rendering operation";
    gst_element_set_state (GST_ELEMENT (m_pipeline), GST_STATE_PAUSED);
    setProgress(-1.0);
    m_rendering = false;
    ges_timeline_pipeline_set_mode (m_pipeline, TIMELINE_MODE_PREVIEW);
}
Beispiel #2
0
int
main (int argc, gchar ** argv)
{
  GError *err = NULL;
  GOptionEntry options[] = {
    {NULL}
  };
  GOptionContext *ctx;
  GMainLoop *mainloop;
  GstBus *bus;

  ctx = g_option_context_new ("tests thumbnail supoprt (produces no output)");
  g_option_context_set_summary (ctx, "");
  g_option_context_add_main_entries (ctx, options, NULL);
  g_option_context_add_group (ctx, gst_init_get_option_group ());

  if (!g_option_context_parse (ctx, &argc, &argv, &err)) {
    g_print ("Error initializing: %s\n", err->message);
    g_option_context_free (ctx);
    exit (1);
  }

  g_option_context_free (ctx);
  /* Initialize the GStreamer Editing Services */
  ges_init ();

  /* Create the pipeline */
  pipeline = create_timeline ();
  if (!pipeline)
    exit (-1);

  ges_timeline_pipeline_set_mode (pipeline, TIMELINE_MODE_PREVIEW);

  /* Play the pipeline */
  mainloop = g_main_loop_new (NULL, FALSE);

  g_print ("thumbnailing every 1 seconds\n");
  g_timeout_add (1000, thumbnail_cb, pipeline);

  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message", G_CALLBACK (bus_message_cb), mainloop);

  if (gst_element_set_state (GST_ELEMENT (pipeline),
          GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
    g_print ("Failed to start the encoding\n");
    return 1;
  }
  g_main_loop_run (mainloop);

  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);
  gst_object_unref (pipeline);

  return 0;
}
bool QDeclarativeVideoEditor::render()
{
    //sanity check
    if (m_size < 1) {
        emit error(NO_MEDIA, "No media added to the timeline");
        return false;
    }

    qDebug() << "Render preparations started";

    QString output_uri = "file:///home/user/MyDocs/Movies/" + getDateTimeString() + ".mp4";

    GstEncodingProfile *profile = createEncodingProfile();
    if (!ges_timeline_pipeline_set_render_settings (m_pipeline, output_uri.toUtf8().data(), profile)) {
        emit error(RENDERING_FAILED, "Failed setting rendering options");
        gst_encoding_profile_unref(profile);
        return false;
    }
    gst_encoding_profile_unref (profile);

    if (!ges_timeline_pipeline_set_mode (m_pipeline, TIMELINE_MODE_RENDER)) {
        emit error(RENDERING_FAILED, "Failed to set rendering mode");
        gst_encoding_profile_unref(profile);
        return false;
    }

    qDebug() << "Rendering to " << output_uri;

    // reset duration and progress
    setDuration(GST_CLOCK_TIME_NONE);
    setProgress(0.0);
    qDebug() << "Starting progress polling";

    m_positionTimer.start(500);
    //g_timeout_add (500, updateProgress, this);

    m_rendering = true;
    if(!gst_element_set_state (GST_ELEMENT (m_pipeline), GST_STATE_PLAYING)) {
        gst_element_set_state (GST_ELEMENT (m_pipeline), GST_STATE_NULL);

        m_rendering = false;
        emit error(RENDERING_FAILED, "Failed to set pipeline to playing state");
        return false;
    }
    return true;
}
static void
ges_timeline_pipeline_init (GESTimelinePipeline * self)
{
  GstElementClass *playsinkclass;

  GST_INFO_OBJECT (self, "Creating new 'playsink'");
  self->priv = G_TYPE_INSTANCE_GET_PRIVATE (self,
      GES_TYPE_TIMELINE_PIPELINE, GESTimelinePipelinePrivate);

  self->priv->playsink =
      gst_element_factory_make ("playsink", "internal-sinks");
  self->priv->encodebin =
      gst_element_factory_make ("encodebin", "internal-encodebin");
  /* Limit encodebin buffering to 1 buffer since we know the various
   * stream fed to it are decoupled already */
  g_object_set (self->priv->encodebin, "queue-buffers-max", (guint32) 1,
      "queue-bytes-max", (guint32) 0, "queue-time-max", (guint64) 0,
      "avoid-reencoding", TRUE, NULL);

  if (G_UNLIKELY (self->priv->playsink == NULL))
    goto no_playsink;
  if (G_UNLIKELY (self->priv->encodebin == NULL))
    goto no_encodebin;

  /* HACK : Intercept events going through playsink */
  playsinkclass = GST_ELEMENT_GET_CLASS (self->priv->playsink);
  /* Replace playsink's GstBin::send_event with our own */
  playsinkclass->send_event = play_sink_multiple_seeks_send_event;

  ges_timeline_pipeline_set_mode (self, DEFAULT_TIMELINE_MODE);

  return;

no_playsink:
  {
    GST_ERROR_OBJECT (self, "Can't create playsink instance !");
    return;
  }
no_encodebin:
  {
    GST_ERROR_OBJECT (self, "Can't create encodebin instance !");
    return;
  }
}
gboolean
QDeclarativeVideoEditor::handleBusMessage (GstBus *, GstMessage *msg)
{
    switch (GST_MESSAGE_TYPE (msg)) {

    case GST_MESSAGE_EOS:
        qDebug() << "End of stream";
        setProgress(1.0);
        emit progressChanged();
        gst_element_set_state ((GstElement *) m_pipeline, GST_STATE_PAUSED);
        if(isRendering()) {
            m_rendering = false;
            ges_timeline_pipeline_set_mode (m_pipeline, TIMELINE_MODE_PREVIEW);
            emit renderComplete();
        }
        setProgress(-1.0);
        break;

    case GST_MESSAGE_ERROR: {
        gchar  *debug;
        GError *gerror;

        gst_message_parse_error (msg, &gerror, &debug);
        g_free (debug);

        qDebug() << "Error: " << gerror->message;
        if(isRendering()) {
            m_rendering = false;
            emit error(RENDERING_FAILED, gerror->message);
        } else {
            emit error(PLAYBACK_FAILED, gerror->message);
        }
        g_error_free (gerror);
        gst_element_set_state ((GstElement *) m_pipeline, GST_STATE_NULL);
        setProgress(-1.0);
        break;
    }
    default:
        break;
    }

    return TRUE;
}
QDeclarativeVideoEditor::QDeclarativeVideoEditor(QObject *parent) :
    QAbstractListModel(parent), m_position(0), m_positionTimer(this), m_rendering(false), m_size(0),
    m_width(0), m_height(0), m_fpsn(0), m_fpsd(0)
{
    QHash<int, QByteArray> roles;
    roles.insert( 33 , "uri" );
    roles.insert( 34 , "fileName" );
    roles.insert( 35 , "inPoint" );
    roles.insert( 36 , "duration" );
    setRoleNames(roles);

    connect(&m_positionTimer, SIGNAL(timeout()), SLOT(updatePosition()));

    m_timeline = ges_timeline_new_audio_video();
    m_timelineLayer = (GESTimelineLayer*) ges_simple_timeline_layer_new();
    ges_timeline_add_layer(m_timeline, m_timelineLayer);
    m_pipeline = ges_timeline_pipeline_new();

    GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (m_pipeline));
    gst_bus_add_watch (bus, bus_call, this);
    gst_object_unref (bus);

    /*
     * gst-dsp encoders seems to not proxy downstream caps correctly, this can make
     * GES fail to render some projects. We override the default getcaps on our own
     */
    g_signal_connect(m_pipeline, "element-added", (GCallback) gstcapstricks_pipeline_element_added, NULL);

    ges_timeline_pipeline_add_timeline (m_pipeline, m_timeline);

    m_vsink = gst_element_factory_make ("omapxvsink", "previewvsink");
    ges_timeline_pipeline_preview_set_video_sink (m_pipeline, m_vsink);
    gst_x_overlay_set_render_rectangle (GST_X_OVERLAY (m_vsink),
                                        171, 0,
                                        512, 288);

    ges_timeline_pipeline_set_mode (m_pipeline, TIMELINE_MODE_PREVIEW);
    gst_element_set_state ((GstElement*) m_pipeline, GST_STATE_PAUSED);
    m_duration = GST_CLOCK_TIME_NONE;
    m_progress = 0.0;
}
GESTimelinePipeline *
make_timeline (char *path, float duration, char *text, guint32 color,
    gdouble xpos, gdouble ypos)
{
  GESTimeline *timeline;
  GESTrack *trackv, *tracka;
  GESTimelineLayer *layer1;
  GESTimelineObject *srca;
  GESTimelineObject *overlay;
  GESTimelinePipeline *pipeline;
  guint64 aduration;

  pipeline = ges_timeline_pipeline_new ();

  ges_timeline_pipeline_set_mode (pipeline, TIMELINE_MODE_PREVIEW_VIDEO);

  timeline = ges_timeline_new ();
  ges_timeline_pipeline_add_timeline (pipeline, timeline);

  trackv = ges_track_video_raw_new ();
  ges_timeline_add_track (timeline, trackv);

  tracka = ges_track_audio_raw_new ();
  ges_timeline_add_track (timeline, tracka);

  layer1 = GES_TIMELINE_LAYER (ges_timeline_layer_new ());
  g_object_set (layer1, "priority", (gint32) 0, NULL);

  if (!ges_timeline_add_layer (timeline, layer1))
    exit (-1);

  aduration = (guint64) (duration * GST_SECOND);
  srca = make_source (path, 0, aduration, 1);
  overlay = make_overlay (text, 0, aduration, 0, color, xpos, ypos);
  ges_timeline_layer_add_object (layer1, srca);
  ges_timeline_layer_add_object (layer1, overlay);

  return pipeline;
}
Beispiel #8
0
void
load_project (gchar * uri)
{
  GESFormatter *formatter;
  GESTimeline *timeline;
  GMainLoop *mainloop;
  GESTimelinePipeline *pipeline;
  GstBus *bus;

  formatter = GES_FORMATTER (ges_pitivi_formatter_new ());
  timeline = ges_timeline_new ();
  pipeline = ges_timeline_pipeline_new ();
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  mainloop = g_main_loop_new (NULL, FALSE);

  ges_timeline_pipeline_add_timeline (pipeline, timeline);
  ges_formatter_load_from_uri (formatter, timeline, uri);
  ges_timeline_pipeline_set_mode (pipeline, TIMELINE_MODE_PREVIEW_VIDEO);

  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message", G_CALLBACK (bus_message_cb), mainloop);
  g_main_loop_run (mainloop);
}
Beispiel #9
0
int
main (int argc, gchar ** argv)
{
  GError *err = NULL;
  gchar *outputuri = NULL;
  gchar *container = (gchar *) "application/ogg";
  gchar *audio = (gchar *) "audio/x-vorbis";
  gchar *video = (gchar *) "video/x-theora";
  gchar *video_restriction = (gchar *) "ANY";
  gchar *audio_preset = NULL;
  gchar *video_preset = NULL;
  gchar *exclude_args = NULL;
  static gboolean render = FALSE;
  static gboolean smartrender = FALSE;
  static gboolean list_transitions = FALSE;
  static gboolean list_patterns = FALSE;
  static gdouble thumbinterval = 0;
  static gboolean verbose = FALSE;
  gchar *save_path = NULL;
  gchar *load_path = NULL;
  gchar *project_path = NULL;
  GOptionEntry options[] = {
    {"thumbnail", 'm', 0.0, G_OPTION_ARG_DOUBLE, &thumbinterval,
        "Take thumbnails every n seconds (saved in current directory)", "N"},
    {"render", 'r', 0, G_OPTION_ARG_NONE, &render,
        "Render to outputuri", NULL},
    {"smartrender", 's', 0, G_OPTION_ARG_NONE, &smartrender,
        "Render to outputuri, and avoid decoding/reencoding", NULL},
    {"outputuri", 'o', 0, G_OPTION_ARG_STRING, &outputuri,
        "URI to encode to", "URI (<protocol>://<location>)"},
    {"format", 'f', 0, G_OPTION_ARG_STRING, &container,
        "Container format", "<GstCaps>"},
    {"vformat", 'v', 0, G_OPTION_ARG_STRING, &video,
        "Video format", "<GstCaps>"},
    {"aformat", 'a', 0, G_OPTION_ARG_STRING, &audio,
        "Audio format", "<GstCaps>"},
    {"vrestriction", 'x', 0, G_OPTION_ARG_STRING, &video_restriction,
        "Video restriction", "<GstCaps>"},
    {"apreset", 0, 0, G_OPTION_ARG_STRING, &audio_preset,
        "Encoding audio profile preset", "<GstPresetName>"},
    {"vpreset", 0, 0, G_OPTION_ARG_STRING, &video_preset,
        "Encoding video profile preset", "<GstPresetName>"},
    {"repeat", 'l', 0, G_OPTION_ARG_INT, &repeat,
        "Number of time to repeat timeline", NULL},
    {"list-transitions", 't', 0, G_OPTION_ARG_NONE, &list_transitions,
        "List valid transition types and exit", NULL},
    {"list-patterns", 'p', 0, G_OPTION_ARG_NONE, &list_patterns,
        "List patterns and exit", NULL},
    {"save", 'z', 0, G_OPTION_ARG_STRING, &save_path,
        "Save project to file before rendering", "<path>"},
    {"load", 'q', 0, G_OPTION_ARG_STRING, &load_path,
        "Load project from file before rendering", "<path>"},
    {"verbose", 0, 0, G_OPTION_ARG_NONE, &verbose,
        "Output status information and property notifications", NULL},
    {"exclude", 'X', 0, G_OPTION_ARG_NONE, &exclude_args,
        "Do not output status information of TYPE", "TYPE1,TYPE2,..."},
    {"load-xptv", 'y', 0, G_OPTION_ARG_STRING, &project_path,
        "Load xptv project from file for previewing", "<path>"},
    {NULL}
  };
  GOptionContext *ctx;
  GMainLoop *mainloop;
  GstBus *bus;

  ctx = g_option_context_new ("- plays or renders a timeline.");
  g_option_context_set_summary (ctx,
      "ges-launch renders a timeline, which can be specified on the commandline,\n"
      "or loaded from a file using the -q option.\n\n"
      "A timeline is a list of files, patterns, and transitions to be rendered\n"
      "one after the other. Files and Patterns provide video and audio as the\n"
      "primary input, and transitions animate between the end of one file/pattern\n"
      "and the beginning of a new one. Hence, transitions can only be listed\n"
      "in between patterns or files.\n\n"
      "A file is a triplet of filename, inpoint (in seconds) and\n"
      "duration (in seconds). If the duration is 0, the full file length is used.\n\n"
      "Patterns and transitions are triplets that begin with either \"+pattern\"\n"
      "or \"+transition\", followed by a <type> and duration (in seconds, must be\n"
      "greater than 0)\n\n"
      "Durations in all cases can be fractions of a second.\n\n"
      "Example:\n"
      "ges-launch file1.avi 0 45 +transition crossfade 3.5 file2.avi 0 0");
  g_option_context_add_main_entries (ctx, options, NULL);
  g_option_context_add_group (ctx, gst_init_get_option_group ());

  if (!g_option_context_parse (ctx, &argc, &argv, &err)) {
    g_printerr ("Error initializing: %s\n", err->message);
    g_option_context_free (ctx);
    exit (1);
  }

  /* Initialize the GStreamer Editing Services */
  if (!ges_init ()) {
    g_printerr ("Error initializing GES\n");
    exit (1);
  }

  if (list_transitions) {
    print_transition_list ();
    exit (0);
  }

  if (list_patterns) {
    print_pattern_list ();
    exit (0);
  }

  if (project_path) {
    load_project (project_path);
    exit (0);
  }
  if (((!load_path && (argc < 4))) || (outputuri && (!render && !smartrender))) {
    g_printf ("%s", g_option_context_get_help (ctx, TRUE, NULL));
    g_option_context_free (ctx);
    exit (1);
  }

  g_option_context_free (ctx);

  /* normalize */
  if (strcmp (audio, "none") == 0)
    audio = NULL;
  if (strcmp (video, "none") == 0)
    video = NULL;

  /* Create the pipeline */
  pipeline = create_pipeline (load_path, save_path, argc - 1, argv + 1,
      audio, video);
  if (!pipeline)
    exit (1);

  /* Setup profile/encoding if needed */
  if (render || smartrender) {
    GstEncodingProfile *prof;

    prof = make_encoding_profile (audio, video, video_restriction, audio_preset,
        video_preset, container);

    if (!prof ||
        !ges_timeline_pipeline_set_render_settings (pipeline, outputuri, prof)
        || !ges_timeline_pipeline_set_mode (pipeline,
            smartrender ? TIMELINE_MODE_SMART_RENDER : TIMELINE_MODE_RENDER))
      exit (1);

    g_free (outputuri);
    gst_encoding_profile_unref (prof);
  } else {
    ges_timeline_pipeline_set_mode (pipeline, TIMELINE_MODE_PREVIEW);
  }

  if (verbose) {
    gchar **exclude_list =
        exclude_args ? g_strsplit (exclude_args, ",", 0) : NULL;
    g_signal_connect (pipeline, "deep-notify",
        G_CALLBACK (gst_object_default_deep_notify), exclude_list);
  }

  /* Play the pipeline */
  mainloop = g_main_loop_new (NULL, FALSE);

  if (thumbinterval != 0.0) {
    g_printf ("thumbnailing every %f seconds\n", thumbinterval);
    g_timeout_add (1000 * thumbinterval, thumbnail_cb, pipeline);
  }

  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message", G_CALLBACK (bus_message_cb), mainloop);

  if (gst_element_set_state (GST_ELEMENT (pipeline),
          GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
    g_error ("Failed to start the encoding\n");
    return 1;
  }
  g_main_loop_run (mainloop);

  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);

  gst_object_unref (pipeline);

  return (int) seenerrors;
}