void MediaPlayerPrivate::createGSTPlayBin(String url)
{
    ASSERT(!m_playBin);
    m_playBin = gst_element_factory_make("playbin2", "play");

    GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(m_playBin));
    gst_bus_add_signal_watch(bus);
    g_signal_connect(bus, "message", G_CALLBACK(mediaPlayerPrivateMessageCallback), this);
    gst_object_unref(bus);

    g_object_set(G_OBJECT(m_playBin), "uri", url.utf8().data(),
        "volume", static_cast<double>(m_player->volume()), NULL);

    m_videoSink = webkit_video_sink_new();

    g_object_ref_sink(m_videoSink);
    g_object_set(m_playBin, "video-sink", m_videoSink, NULL);

    g_signal_connect(m_videoSink, "repaint-requested", G_CALLBACK(mediaPlayerPrivateRepaintCallback), this);
}
MediaPipelineImpl::MediaPipelineImpl (const boost::property_tree::ptree &config)
  : MediaObjectImpl (config)
{
  GstClock *clock;

  pipeline = gst_pipeline_new (NULL);

  if (pipeline == NULL) {
    throw KurentoException (MEDIA_OBJECT_NOT_AVAILABLE,
                            "Cannot create gstreamer pipeline");
  }

  clock = gst_system_clock_obtain ();
  gst_pipeline_use_clock (GST_PIPELINE (pipeline), clock);
  g_object_unref (clock);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  busMessageHandler = 0;
}
Ejemplo n.º 3
0
int tsmf_platform_register_handler(TSMFGstreamerDecoder* decoder)
{
	GstBus* bus;

	if (!decoder)
		return -1;

	if (!decoder->pipe)
		return -1;

	bus = gst_pipeline_get_bus(GST_PIPELINE(decoder->pipe));

	if (!bus)
	{
		WLog_ERR(TAG, "gst_pipeline_get_bus failed!");
		return 1;
	}

	return 0;
}
Ejemplo n.º 4
0
GST_END_TEST
GST_START_TEST (request_audio_src_pad_pending)
{
  GstElement *dummysrc;
  gchar *padname = NULL;
  GstBus *bus;

  loop = g_main_loop_new (NULL, TRUE);
  pipeline = gst_pipeline_new (__FUNCTION__);
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));

  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message", G_CALLBACK (bus_msg), pipeline);

  dummysrc = gst_element_factory_make ("dummysrc", NULL);
  g_signal_connect (dummysrc, "pad-added", G_CALLBACK (pad_added_delayed),
      &padname);

  gst_bin_add (GST_BIN (pipeline), dummysrc);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* request src pad using action */
  g_signal_emit_by_name (dummysrc, "request-new-pad",
      KMS_ELEMENT_PAD_TYPE_AUDIO, NULL, GST_PAD_SRC, &padname);
  fail_if (padname == NULL);

  GST_DEBUG ("Pad name %s", padname);
  g_object_set (G_OBJECT (dummysrc), "audio", TRUE, NULL);

  g_free (padname);

  g_timeout_add_seconds (4, print_timedout_pipeline, NULL);
  g_main_loop_run (loop);

  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_bus_remove_signal_watch (bus);
  g_object_unref (bus);
  g_object_unref (pipeline);
  g_main_loop_unref (loop);
}
Ejemplo n.º 5
0
gint
main (gint argc, gchar * argv[])
{
  GstStateChangeReturn res;
  GstElement *player;
  GMainLoop *loop;
  GstBus *bus;

  gst_init (&argc, &argv);

  loop = g_main_loop_new (NULL, TRUE);

  player = gst_element_factory_make ("playbin", "player");
  g_assert (player);

  bus = gst_pipeline_get_bus (GST_PIPELINE (player));
  gst_bus_add_signal_watch (bus);

  g_signal_connect (bus, "message::eos", G_CALLBACK (eos_cb), loop);
  g_signal_connect (bus, "message::error", G_CALLBACK (error_cb), loop);
  g_signal_connect (bus, "message::warning", G_CALLBACK (warning_cb), NULL);

  g_object_set (G_OBJECT (player), "uri", argv[1], NULL);

  res = gst_element_set_state (player, GST_STATE_PLAYING);
  if (res == GST_STATE_CHANGE_FAILURE) {
    g_print ("could not play\n");
    return -1;
  }

  g_timeout_add (UPDATE_INTERVAL, (GSourceFunc) update_scale, player);

  g_main_loop_run (loop);

  /* tidy up */
  gst_element_set_state (player, GST_STATE_NULL);
  gst_object_unref (player);
  gst_object_unref (bus);

  return 0;
}
QDeclarativeVideoEditor::QDeclarativeVideoEditor(QObject *parent) :
    QAbstractListModel(parent), m_position(0), m_positionTimer(this), m_rendering(false), m_size(0),
    m_width(0), m_height(0), m_fpsn(0), m_fpsd(0)
{
    QHash<int, QByteArray> roles;
    roles.insert( 33 , "uri" );
    roles.insert( 34 , "fileName" );
    roles.insert( 35 , "inPoint" );
    roles.insert( 36 , "duration" );
    setRoleNames(roles);

    connect(&m_positionTimer, SIGNAL(timeout()), SLOT(updatePosition()));

    m_timeline = ges_timeline_new_audio_video();
    m_timelineLayer = (GESTimelineLayer*) ges_simple_timeline_layer_new();
    ges_timeline_add_layer(m_timeline, m_timelineLayer);
    m_pipeline = ges_timeline_pipeline_new();

    GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (m_pipeline));
    gst_bus_add_watch (bus, bus_call, this);
    gst_object_unref (bus);

    /*
     * gst-dsp encoders seems to not proxy downstream caps correctly, this can make
     * GES fail to render some projects. We override the default getcaps on our own
     */
    g_signal_connect(m_pipeline, "element-added", (GCallback) gstcapstricks_pipeline_element_added, NULL);

    ges_timeline_pipeline_add_timeline (m_pipeline, m_timeline);

    m_vsink = gst_element_factory_make ("omapxvsink", "previewvsink");
    ges_timeline_pipeline_preview_set_video_sink (m_pipeline, m_vsink);
    gst_x_overlay_set_render_rectangle (GST_X_OVERLAY (m_vsink),
                                        171, 0,
                                        512, 288);

    ges_timeline_pipeline_set_mode (m_pipeline, TIMELINE_MODE_PREVIEW);
    gst_element_set_state ((GstElement*) m_pipeline, GST_STATE_PAUSED);
    m_duration = GST_CLOCK_TIME_NONE;
    m_progress = 0.0;
}
Ejemplo n.º 7
0
	void RgAnalyser::HandleErrorMsg (GstMessage *msg)
	{
		GError *gerror = nullptr;
		gchar *debug = nullptr;
		gst_message_parse_error (msg, &gerror, &debug);

		const auto& msgStr = QString::fromUtf8 (gerror->message);
		const auto& debugStr = QString::fromUtf8 (debug);

		const auto code = gerror->code;
		const auto domain = gerror->domain;

		g_error_free (gerror);
		g_free (debug);

		qWarning () << Q_FUNC_INFO
				<< domain
				<< code
				<< msgStr
				<< debugStr;


		if (IsDraining_)
			return;

		IsDraining_ = true;
		const auto bus = gst_pipeline_get_bus (GST_PIPELINE (Pipeline_));
		while (const auto msg = gst_bus_timed_pop (bus, 0.01 * GST_SECOND))
			handleMessage (std::shared_ptr<GstMessage> (msg, gst_message_unref));
		IsDraining_ = false;

		gst_element_set_state (Pipeline_, GST_STATE_NULL);
		PopThread_->Resume ();

		const auto trackInfoPos = std::find_if (Result_.Tracks_.begin (), Result_.Tracks_.end (),
				[this] (const TrackRgResult& info) { return info.TrackPath_ == CurrentPath_; });
		if (trackInfoPos == Result_.Tracks_.end ())
			Result_.Tracks_.append ({ CurrentPath_, 0, 0 });

		CheckFinish ();
	}
Ejemplo n.º 8
0
int main(int argc, char** argv)
{
    GMainLoop *loop;
    GstElement *play;

    gst_init(&argc, &argv);

    play = gst_element_factory_make("playbin2", "play");
	//play->set_properties("volume", 10);
    loop = g_main_loop_new(NULL, FALSE);

    g_object_set(G_OBJECT(play), "uri", argv[1], NULL);
    gst_bus_add_watch(gst_pipeline_get_bus(GST_PIPELINE(play)), bus_cb, loop);
    g_print("playing......\n");
    gst_element_set_state(play, GST_STATE_PLAYING);
    g_print("start g_main_loop_run\n");
    g_main_loop_run(loop);
    g_print("g_main_loop_run return\n");
    gst_element_set_state(play, GST_STATE_NULL);
    return 0;
}
void CrowdDetectorFilterImpl::postConstructor ()
{
  GstBus *bus;
  std::shared_ptr<MediaPipelineImpl> pipe;

  FilterImpl::postConstructor ();

  pipe = std::dynamic_pointer_cast<MediaPipelineImpl> (getMediaPipeline() );

  bus = gst_pipeline_get_bus (GST_PIPELINE (pipe->getPipeline() ) );

  bus_handler_id = register_signal_handler (G_OBJECT (bus),
                   "message",
                   std::function <void (GstElement *, GstMessage *) >
                   (std::bind (&CrowdDetectorFilterImpl::busMessage, this,
                               std::placeholders::_2) ),
                   std::dynamic_pointer_cast<CrowdDetectorFilterImpl>
                   (shared_from_this() ) );

  g_object_unref (bus);
}
Ejemplo n.º 10
0
static void
gst_pipeline_get_property (GObject * object, guint prop_id,
    GValue * value, GParamSpec * pspec)
{
  GstPipeline *pipeline = GST_PIPELINE (object);

  switch (prop_id) {
    case PROP_DELAY:
      g_value_set_uint64 (value, gst_pipeline_get_delay (pipeline));
      break;
    case PROP_AUTO_FLUSH_BUS:
      g_value_set_boolean (value, gst_pipeline_get_auto_flush_bus (pipeline));
      break;
    case PROP_LATENCY:
      g_value_set_uint64 (value, gst_pipeline_get_latency (pipeline));
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}
Ejemplo n.º 11
0
static void
gst_play_file(const char *filename){
 
	GMainLoop *loop;
	GstElement *pipeline;
	GstBus *bus;
	GstElement *source , *parser , *sink;
 
	loop = g_main_loop_new(NULL , TRUE);
 
	pipeline = gst_pipeline_new("audio-player");
 
	source = gst_element_factory_make("filesrc" , "source");
	parser = gst_element_factory_make("wavparse" , "parser");
	sink = gst_element_factory_make("alsasink" , "output");
 
	g_object_set(G_OBJECT(source) , "location"
			, filename , NULL);
 
	bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
	gst_bus_add_watch(bus , bus_watch , loop);
	g_object_unref(bus);
 
	gst_bin_add_many(GST_BIN(pipeline)
			, source , parser , sink , NULL);
 
	g_signal_connect(parser
			, "pad-added" , G_CALLBACK(add_pad) , sink);
 
	if(! gst_element_link(source , parser)){
		g_warning("linke source to parser failed");
	}
 
	gst_element_set_state(pipeline , GST_STATE_PLAYING);
	printf("Start playing...\n");
	g_main_loop_run(loop);
	printf("Playing stopped!!!\n");
	gst_element_set_state(pipeline , GST_STATE_NULL);
	g_object_unref(pipeline);
}
Ejemplo n.º 12
0
void gstreamer_determine_video_dimensions(const char *uri, int *video_width,
int *video_height) {
	GMainLoop *loop = g_main_loop_new(NULL, FALSE);

	char *playbin_launch_str = malloc(strlen(uri) + 64);
	sprintf(playbin_launch_str, PLAYBIN_STR
		" uri=%s audio-sink=fakesink video-sink=fakesink", uri);
	GError *error2 = NULL;
	GstElement *playbin = gst_parse_launch(playbin_launch_str, &error2);
	if (error2) {
		printf("Error: Could not create gstreamer pipeline for identification.\n");
		printf("Parse error: %s\n", error2->message);
		exit(1);
	}

	playbin_pipeline = playbin;
	bus_quit_on_playing = TRUE;
	GstBus *playbin_bus = gst_pipeline_get_bus(GST_PIPELINE(playbin));
	guint type_find_bus_watch_id = gst_bus_add_watch(playbin_bus, bus_callback, loop);
	gst_object_unref(playbin_bus);

	gst_element_set_state(GST_ELEMENT(playbin), GST_STATE_READY);
	gst_element_set_state(GST_ELEMENT(playbin), GST_STATE_PLAYING);
	g_main_loop_run(loop);
	gst_element_set_state(GST_ELEMENT(playbin), GST_STATE_PAUSED);

	GstPad *pad = gst_pad_new("", GST_PAD_UNKNOWN);
	g_signal_emit_by_name(playbin, "get-video-pad", 0, &pad, NULL);
	GstCaps *caps = gst_pad_get_current_caps(pad);
	*video_width = g_value_get_int(gst_structure_get_value(
		gst_caps_get_structure(caps, 0), "width"));
	*video_height = g_value_get_int(gst_structure_get_value(
		gst_caps_get_structure(caps, 0), "height"));
	g_object_unref(pad);

	gst_element_set_state(GST_ELEMENT(playbin), GST_STATE_NULL);
	gst_object_unref(GST_OBJECT(playbin));
	g_source_remove(type_find_bus_watch_id);
	g_main_loop_unref(loop);
}
MediaPlayerPrivateGStreamerBase::~MediaPlayerPrivateGStreamerBase()
{
    if (m_repaintHandler) {
        g_signal_handler_disconnect(m_videoSink.get(), m_repaintHandler);
        m_repaintHandler = 0;
    }

    g_mutex_clear(&m_sampleMutex);

    m_player = 0;

    if (m_volumeSignalHandler) {
        g_signal_handler_disconnect(m_volumeElement.get(), m_volumeSignalHandler);
        m_volumeSignalHandler = 0;
    }

    if (m_muteSignalHandler) {
        g_signal_handler_disconnect(m_volumeElement.get(), m_muteSignalHandler);
        m_muteSignalHandler = 0;
    }

#if USE(GSTREAMER_GL)
    g_cond_clear(&m_drawCondition);
    g_mutex_clear(&m_drawMutex);
#endif

    if (m_pipeline) {
        GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
        ASSERT(bus);
        g_signal_handlers_disconnect_by_func(bus.get(), reinterpret_cast<gpointer>(mediaPlayerPrivateNeedContextMessageCallback), this);
        gst_bus_disable_sync_message_emission(bus.get());
        m_pipeline.clear();
    }

#if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
    if (client())
        client()->platformLayerWillBeDestroyed();
#endif
}
Ejemplo n.º 14
0
eServiceMP3Record::~eServiceMP3Record()
{
	if (m_recording_pipeline)
	{
		// disconnect sync handler callback
		GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(m_recording_pipeline));
#if GST_VERSION_MAJOR < 1
		gst_bus_set_sync_handler(bus, NULL, NULL);
#else
		gst_bus_set_sync_handler(bus, NULL, NULL, NULL);
#endif
		gst_object_unref(bus);
	}

	if (m_state > stateIdle)
		stop();

	if (m_recording_pipeline)
	{
		gst_object_unref(GST_OBJECT(m_recording_pipeline));
	}
}
Ejemplo n.º 15
0
//BUILDER COMMENT. DO NOT REMOVE. auxcode begin
void Music::init(const string newName, AL::ALPtr<AL::ALBroker> parentBroker) {
	Component::init(newName, parentBroker);

	#ifdef WEBOTS
	return;
	#endif

	// init GStreamer
	gst_init (NULL, NULL);
  	loop = g_main_loop_new (NULL, FALSE);

  	// set up
  	play = gst_element_factory_make ("playbin2", "play");
	bus = gst_pipeline_get_bus (GST_PIPELINE (play));

	// set state
	gst_element_set_state (play, GST_STATE_READY);

	isSetFileMp3 = false;
	isPlayPress = false;
	isStopPress = false;
}
Ejemplo n.º 16
0
gboolean introbin_set_pad_offset(CustomData *data)
{
  gint64 pos2;
  pos2=gst_element_get_base_time(data->pipeline);
  GstClock *clock;
  clock=gst_pipeline_get_clock(GST_PIPELINE(data->pipeline));
  GstClockTime clock_time;
  clock_time=gst_clock_get_time(clock);
  gst_object_unref(clock);
  g_print("Pipeline times: base_time=%lld\n clock_time=%lld",
		            pos2,clock_time);
  GstElement *dec=gst_bin_get_by_name(GST_BIN(data->introbin),"introdec");
  GstPad *src_pad1,*src_pad2;
  src_pad1=gst_element_get_static_pad(GST_ELEMENT(dec),"src_0");
  gst_pad_set_offset(src_pad1,clock_time-pos2);
  gst_object_unref(src_pad1);
  src_pad2=gst_element_get_static_pad(GST_ELEMENT(dec),"src_1");
  gst_pad_set_offset(src_pad2,clock_time-pos2);
  gst_object_unref(src_pad2);

  return TRUE;
}
Ejemplo n.º 17
0
static int mp_play_sound(TPMediaPlayer * mp, const char * uri)
{
    GstElement * playbin = gst_element_factory_make( "playbin" , "play" );

    GstBus * bus = gst_pipeline_get_bus( GST_PIPELINE( playbin ) );

    g_object_set( G_OBJECT( playbin ), "uri", uri, NULL );

    gst_bus_add_signal_watch( bus );

    g_signal_connect_object( bus, "message::error" , G_CALLBACK( play_sound_done ), playbin, G_CONNECT_AFTER );
    g_signal_connect_object( bus, "message::eos", G_CALLBACK( play_sound_done ), playbin, G_CONNECT_AFTER );

    gst_object_unref( GST_OBJECT( bus ) );

    if ( GST_STATE_CHANGE_FAILURE == gst_element_set_state( playbin, GST_STATE_PLAYING ) )
    {
        return 2;
    }

    return 0;
}
int
main (int argc, char *argv[])
{
  GError *error = NULL;
  GstBus *bus;

  gst_init (&argc, &argv);
  gtk_init (&argc, &argv);

  builder = gtk_builder_new ();
  if (!gtk_builder_add_from_file (builder, UI_FILE, &error)) {
    g_warning ("Error: %s", error->message);
    g_error_free (error);
    return 1;
  }

  camera = gst_element_factory_make ("camerabin", "camera");
  bus = gst_pipeline_get_bus (GST_PIPELINE (camera));
  gst_bus_add_watch (bus, bus_callback, NULL);
  gst_bus_set_sync_handler (bus, bus_sync_callback, NULL, NULL);
  gst_object_unref (bus);

  if (!init_gtkwidgets_data ()) {
    goto error;
  }

  ui_main_window = GTK_WIDGET (gtk_builder_get_object (builder, "mainWindow"));
  gtk_builder_connect_signals (builder, NULL);
  gtk_widget_show_all (ui_main_window);

  gst_element_set_state (camera, GST_STATE_PLAYING);

  gtk_main ();

error:
  gst_element_set_state (camera, GST_STATE_NULL);
  gst_object_unref (camera);
  return 0;
}
int
main (int argc, char *argv[])
{
  GstElement *bin;
  GstBus *bus;

  gst_init (&argc, &argv);

  if (argc < 2) {
    g_print ("usage: %s <uri>\n", argv[0]);
    return -1;
  }

  /* create a new bin to hold the elements */
  bin = gst_element_factory_make ("playbin", "bin");
  g_assert (bin);
  g_object_set (bin, "uri", argv[1], NULL);

  bus = gst_pipeline_get_bus (GST_PIPELINE (bin));
  gst_bus_add_watch (bus, handle_message, bin);

  /* go to the PAUSED state and wait for preroll */
  g_message ("prerolling first frame");
  gst_element_set_state (bin, GST_STATE_PAUSED);
  gst_element_get_state (bin, NULL, NULL, -1);

  loop = g_main_loop_new (NULL, TRUE);
  g_main_loop_run (loop);

  g_message ("finished");

  /* stop the bin */
  gst_element_set_state (bin, GST_STATE_NULL);

  g_main_loop_unref (loop);
  gst_object_unref (bus);

  exit (0);
}
Ejemplo n.º 20
0
gint
main (gint argc, gchar * argv[])
{
  GstElement *pipeline;
  GstBus *bus;
  GMainLoop *loop;

  /* init GStreamer */
  gst_init (&argc, &argv);
  loop = g_main_loop_new (NULL, FALSE);

  /* set up */
  pipeline = make_pipeline ();

  g_signal_connect (pipeline, "deep_notify",
      G_CALLBACK (gst_object_default_deep_notify), NULL);

  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_watch (bus, my_bus_callback, loop);
  gst_object_unref (bus);

  g_print ("Starting pipeline\n");

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* add a timeout to cycle between the formats */
  g_timeout_add (1000, (GSourceFunc) do_switch, pipeline);

  /* now run */
  g_main_loop_run (loop);

  g_print ("Nulling pipeline\n");

  /* also clean up */
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);

  return 0;
}
Ejemplo n.º 21
0
GST_END_TEST
GST_START_TEST (add_later)
{
  GMainLoop *loop = g_main_loop_new (NULL, TRUE);
  GstElement *pipeline = gst_pipeline_new (__FUNCTION__);
  GstElement *videosrc = gst_element_factory_make ("videotestsrc", NULL);
  GstElement *typefind = gst_element_factory_make ("typefind", NULL);
  GstElement *fakesink = gst_element_factory_make ("fakesink", "fakesink");
  GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));

  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message", G_CALLBACK (bus_msg), pipeline);

  g_object_set (G_OBJECT (fakesink), "sync", FALSE, "signal-handoffs", TRUE,
      NULL);
  g_signal_connect (G_OBJECT (fakesink), "handoff",
      G_CALLBACK (fakesink_hand_off), loop);

  g_signal_connect (G_OBJECT (typefind), "have-type", G_CALLBACK (type_found),
      pipeline);

  gst_bin_add_many (GST_BIN (pipeline), videosrc, typefind, fakesink, NULL);
  gst_element_link (videosrc, typefind);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  g_timeout_add_seconds (10, timeout_check, pipeline);

  mark_point ();
  g_main_loop_run (loop);
  mark_point ();

  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_bus_remove_signal_watch (bus);
  g_object_unref (pipeline);
  g_object_unref (bus);
  g_main_loop_unref (loop);
}
Ejemplo n.º 22
0
static void
play_agnosticbin_video_passthrough (void)
{
  gboolean ret;
  GstElement *pipeline = gst_pipeline_new (NULL);
  GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  GstElement *agnosticbin = gst_element_factory_make ("agnosticbin", NULL);
  GstElement *videotestsrc = gst_element_factory_make ("videotestsrc", NULL);
  GstElement *fakesink = gst_element_factory_make ("fakesink", NULL);

  loop = g_main_loop_new (NULL, TRUE);

  g_object_set (G_OBJECT (pipeline), "async-handling", TRUE, NULL);
  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message", G_CALLBACK (bus_msg), pipeline);

  g_object_set (G_OBJECT (videotestsrc), "num-buffers", 100, NULL);

  mark_point ();
  gst_bin_add_many (GST_BIN (pipeline), videotestsrc, agnosticbin, fakesink,
      NULL);
  mark_point ();
  ret = gst_element_link_many (videotestsrc, agnosticbin, fakesink, NULL);
  fail_unless (ret);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  mark_point ();
  g_main_loop_run (loop);
  mark_point ();

  gst_element_set_state (pipeline, GST_STATE_NULL);

  gst_bus_remove_signal_watch (bus);
  g_object_unref (bus);
  g_main_loop_unref (loop);
  g_object_unref (pipeline);
}
Ejemplo n.º 23
0
void Pipeline::create()
{
    qDebug("Loading video: %s", m_videoLocation.toAscii().data());

    gst_init (NULL, NULL);

#ifdef WIN32
    m_loop = g_main_loop_new (NULL, FALSE);
#endif
    m_pipeline = gst_pipeline_new ("pipeline");

    m_bus = gst_pipeline_get_bus (GST_PIPELINE (m_pipeline));
    gst_bus_add_watch (m_bus, (GstBusFunc) bus_call, this);
    gst_bus_set_sync_handler (m_bus, (GstBusSyncHandler) create_window, this);
    gst_object_unref (m_bus);

    GstElement* videosrc = gst_element_factory_make ("filesrc", "filesrc0");
    GstElement* decodebin = gst_element_factory_make ("decodebin", "decodebin0");
    m_glimagesink  = gst_element_factory_make ("glimagesink", "sink0");
    
    if (!videosrc || !decodebin || !m_glimagesink )
    {
        qDebug ("one element could not be found");
        return;
    }

    g_object_set(G_OBJECT(videosrc), "location", m_videoLocation.toAscii().data(), NULL);

    gst_bin_add_many (GST_BIN (m_pipeline), videosrc, decodebin, m_glimagesink, NULL);

    gst_element_link_pads (videosrc, "src", decodebin, "sink");

    g_signal_connect (decodebin, "new-decoded-pad", G_CALLBACK (cb_new_pad), m_glimagesink);

    GstPad* pad = gst_element_get_static_pad (m_glimagesink, "sink");
    g_signal_connect(pad, "notify::caps", G_CALLBACK(cb_video_size), this);
    gst_object_unref (pad);
}
Ejemplo n.º 24
0
void		ly_ppl_init		()
{
	GstElement* playbin=NULL;
	GstElement* equalizer=NULL;
	GstElement* convert=NULL;
	GstElement* volume=NULL;
	GstElement* audiosink=NULL;
	GstElement* fakesink=NULL;
	GstPad*     mpad;
	GstBus* bus=NULL;

	playbin=gst_element_factory_make("playbin", "playbin");
	equalizer=gst_element_factory_make("equalizer-10bands","equalizer");
	volume= gst_element_factory_make("volume","volume");
	convert=gst_element_factory_make("audioconvert","autoconvert");
	audiosink=gst_element_factory_make("autoaudiosink","autoaudiosink");
	fakesink=gst_element_factory_make("fakesink","fakesink");
	
	bus=gst_pipeline_get_bus(GST_PIPELINE(playbin));
	gst_element_set_state(playbin,GST_STATE_NULL);
	gst_bus_add_watch(bus,(GstBusFunc)ly_ppl_bus_cb, NULL);
	gst_object_unref(bus);
	
	ly_ppl_audio_bin=gst_bin_new("audio-bin");
	gst_bin_add_many(GST_BIN(ly_ppl_audio_bin),equalizer,convert,volume,audiosink,NULL);
	gst_element_link_many(equalizer,convert,volume,audiosink,NULL);
	mpad = gst_element_get_static_pad(equalizer, "sink");
	gst_element_add_pad(ly_ppl_audio_bin, gst_ghost_pad_new(NULL,mpad));
	g_object_set(G_OBJECT(playbin),"audio-sink",ly_ppl_audio_bin,NULL);
	
	ly_ppl_video_bin=gst_bin_new("video-bin");
	gst_bin_add_many(GST_BIN(ly_ppl_video_bin), fakesink,NULL);
	mpad = gst_element_get_static_pad(fakesink, "sink");
	gst_element_add_pad(ly_ppl_video_bin, gst_ghost_pad_new(NULL,mpad));
	g_object_set(G_OBJECT(playbin),"video-sink",ly_ppl_video_bin,NULL);
	
	ly_ppl_playbin=playbin;
}
Ejemplo n.º 25
0
BOOL
LLMediaImplGStreamer::
load ( const LLString& urlIn )
{
	llinfos << "Setting media URI: " << urlIn << llendl;

 	if (!mPlaybin)
 	{
		return FALSE;
	}

	// set URI
	g_object_set (G_OBJECT (mPlaybin), "uri", urlIn.c_str(), NULL);
	//g_object_set (G_OBJECT (mPlaybin), "uri", "file:///tmp/movie", NULL);

	// get playbin's bus - perhaps this can/should be done at init()
	GstBus *bus = llgst_pipeline_get_bus (GST_PIPELINE (mPlaybin));
	if (!bus)
	{
		return FALSE;
	}
	llgst_bus_add_watch (bus, my_bus_callback, this);
	llgst_object_unref (bus);

	if (true) // dummy values
	{
		const int fixedsize = 2;
		mMediaRowbytes = mMediaDepthBytes * fixedsize;
		mMediaWidth = fixedsize;
		mMediaHeight = fixedsize;
		mTextureWidth = fixedsize;
		mTextureHeight = fixedsize;
	}

	BOOL rtn = LLMediaMovieBase::load(urlIn);
	llinfos << "load returns " << int(rtn) << llendl;
	return rtn;
}
Ejemplo n.º 26
0
void
ZBarFilter::init (std::shared_ptr<MediaPipeline> parent)
{
  element = gst_element_factory_make ("filterelement", NULL);

  g_object_set (element, "filter-factory", "zbar", NULL);
  g_object_ref (element);
  gst_bin_add (GST_BIN (parent->pipeline), element);
  gst_element_sync_state_with_parent (element);

  GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (parent->pipeline) );
  GstElement *zbar;

  g_object_get (G_OBJECT (element), "filter", &zbar, NULL);

  this->zbar = zbar;
  g_object_set (G_OBJECT (zbar), "qos", FALSE, NULL);

  bus_handler_id = g_signal_connect (bus, "message", G_CALLBACK (zbar_receive_message), this);
  g_object_unref (bus);
  // There is no need to reference zbar becase its live cycle is the same as the filter live cycle
  g_object_unref (zbar);
}
Ejemplo n.º 27
0
static void
kms_player_end_point_dispose (GObject * object)
{
  KmsPlayerEndPoint *self = KMS_PLAYER_END_POINT (object);

  g_clear_object (&self->priv->loop);

  if (self->priv->pipeline != NULL) {
    GstBus *bus;

    bus = gst_pipeline_get_bus (GST_PIPELINE (self->priv->pipeline));
    gst_bus_set_sync_handler (bus, NULL, NULL, NULL);
    g_object_unref (bus);

    gst_element_set_state (self->priv->pipeline, GST_STATE_NULL);
    gst_object_unref (GST_OBJECT (self->priv->pipeline));
    self->priv->pipeline = NULL;
  }

  /* clean up as possible. May be called multiple times */

  G_OBJECT_CLASS (kms_player_end_point_parent_class)->dispose (object);
}
void
mirageaudio_canceldecode(MirageAudio *ma)
{
    if (GST_IS_ELEMENT(ma->pipeline)) {

        GstState state;
        gst_element_get_state(ma->pipeline, &state, NULL, 100*GST_MSECOND);

        if (state != GST_STATE_NULL) {
            g_mutex_lock(ma->decoding_mutex);

            GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(ma->pipeline));
            GstMessage* eosmsg = gst_message_new_eos(GST_OBJECT(ma->pipeline));
            gst_bus_post(bus, eosmsg);
            g_print("libmirageaudio: EOS Message sent\n");
            gst_object_unref(bus);

            ma->invalidate = TRUE;

            g_mutex_unlock(ma->decoding_mutex);
        }
    }
}
Ejemplo n.º 29
0
static void
start_source (const gchar * uri)
{
  GstElement *pipeline;
  GstElement *uridecodebin;
  GstBus *bus;

  pipeline = gst_pipeline_new (NULL);

  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_watch (bus, master_bus_msg, pipeline);
  gst_object_unref (bus);

  uridecodebin = gst_element_factory_make ("uridecodebin", NULL);
  g_object_set (uridecodebin, "uri", uri, NULL);
  g_signal_connect (uridecodebin, "pad-added", G_CALLBACK (on_pad_added),
      pipeline);
  g_signal_connect (uridecodebin, "autoplug-select",
      G_CALLBACK (on_autoplug_select), pipeline);

  gst_bin_add (GST_BIN (pipeline), uridecodebin);
  gst_element_set_state (pipeline, GST_STATE_PLAYING);
}
Ejemplo n.º 30
0
int main(int argc, char **argv)
{
	GstElement *pipeline;
	GstElement *src;
	GstElement *decoder;
	GstElement *sink;
	int fi = 0;

	if(argc < 2) {
		g_printf("Usage: %s <file>\n", argv[0]);
		exit(1);
	}

	gst_init(NULL, NULL);

	pipeline = gst_pipeline_new("pipeline");
	src = gst_element_factory_make("filesrc", "src");
	decoder = gst_element_factory_make("dtdrdec", "decoder");
	sink = gst_element_factory_make("alsasink", "sink");

	gst_bin_add_many(GST_BIN(pipeline), src, decoder, sink, NULL);
	gst_element_link_many(src, decoder, sink, NULL);

	g_object_set(src, "location", argv[1], NULL);
	gst_bus_add_watch(gst_pipeline_get_bus(GST_PIPELINE(pipeline)), 
		bus_callback, NULL);

	gst_element_set_state(pipeline, GST_STATE_PLAYING);

	loop = g_main_loop_new(NULL, FALSE);
	g_main_loop_run(loop);

	gst_element_set_state(pipeline, GST_STATE_NULL);
	gst_object_unref(pipeline);

	exit(0);
}