示例#1
0
JAMediaPlayer::JAMediaPlayer(){

    Gst::init();
    playbin = Gst::PlayBin2::create();
    //playbin.set_property("buffer-size", 50000)

    //Video out
    video = Gst::Bin::create("jamedia_video_pipeline");

    Glib::RefPtr<Gst::Element> convert =
        Gst::ElementFactory::create_element("ffmpegcolorspace", "conv");
    Glib::RefPtr<Gst::Element> rate =
        Gst::ElementFactory::create_element("videorate", "rate");
    videobalance = Gst::ElementFactory::create_element(
        "videobalance", "videobalance");
    gamma = Gst::ElementFactory::create_element("gamma", "gamma");
    videoflip = Gst::ElementFactory::create_element("videoflip", "videoflip");
    Glib::RefPtr<Gst::XvImageSink> pantalla = Gst::XvImageSink::create();

    rate->set_property("max-rate", 30);
    pantalla->property_force_aspect_ratio() = true;
    pantalla->set_sync(true);

    video->add(convert);
    video->add(rate);
    video->add(videobalance);
    video->add(gamma);
    video->add(videoflip);
    video->add(pantalla);

    convert->link(rate);
    rate->link(videobalance);
    videobalance->link(gamma);
    gamma->link(videoflip);
    videoflip->link(pantalla);

    Glib::RefPtr<Gst::GhostPad> ghost_pad =
        Gst::GhostPad::create(convert->get_static_pad("sink"), "sink");
    video->add_pad(ghost_pad);

    playbin->property_video_sink() = video;

    Glib::RefPtr<Gst::Bus> bus = playbin->get_bus();
    bus->add_watch(sigc::mem_fun(*this, &JAMediaPlayer::on_bus_message));
    //bus->enable_sync_message_emission();
    //bus->signal_sync_message().connect(
    //  sigc::mem_fun(*this, &JAMediaPlayer::on_bus_message_sync));
}
示例#2
0
void GstVideoServer::Init()
{
	using namespace Gst;
/*
 * gst-launch-0.10 -v v4l2src ! ffenc_mpeg4 bitrate=8388608 !\
 *		rtpmp4vpay mtu=1400 pt=96 ssrc=0 timestamp-offset=0 seqnum-offset=0 send-config=true !\
 *		udpsink host=127.0.0.1 port=5000
 */

	pipeline = Pipeline::create("video-server");

	source = ElementFactory::create_element("v4l2src");
	encoder = ElementFactory::create_element("ffenc_mpeg4");
	rtpenc = ElementFactory::create_element("rtpmp4vpay");
	sink = ElementFactory::create_element("udpsink");

	if (!pipeline || !source || !encoder || !rtpenc || !sink)
		throw exception();

	if (!m_DeviceName.empty())
		source->set_property("device", m_DeviceName);

	//encoder->set_property("bitrate", 58720);
	encoder->set_property("bitrate", 8388608);

	rtpenc->set_property("mtu", 1400);
	rtpenc->set_property("pt", 96);
	rtpenc->set_property("ssrc", 0);
	rtpenc->set_property("timestamp-offset", 0);
	rtpenc->set_property("seqnum-offset", 0);
	rtpenc->set_property("send-config", true);

	sink->set_property("host", m_HostIp);
	sink->set_property("port", m_Port);

	Glib::RefPtr<Gst::Bus> bus = pipeline->get_bus();
	bus->add_watch(sigc::mem_fun(dynamic_cast<GstPlayerBase&>(*this), &GstPlayerBase::on_bus_Message));

	pipeline->add(source)->add(encoder)->add(rtpenc)->add(sink);

	source->link(encoder)->link(rtpenc)->link(sink);
}
int play_sound(std::string filename)
{
    // Initialize Gstreamermm:
    Gst::init();

    mainloop = Glib::MainLoop::create();

    // Create the pipeline:
    pipeline = Gst::Pipeline::create("audio-player");

    // Create the elements:

    // filsrc reads the file from disk:
    Glib::RefPtr<Gst::Element> source = Gst::ElementFactory::create_element("filesrc");
    if(!source)
	std::cerr << "filesrc element could not be created." << std::endl;

    // oggdemux parses the ogg streams into elementary streams (audio and video):
    Glib::RefPtr<Gst::Element> parser = Gst::ElementFactory::create_element("oggdemux");
    if(!parser)
	std::cerr << "oggdemux element could not be created." << std::endl;

    // vorbisdec decodes a vorbis (audio) stream:
    decoder = Gst::ElementFactory::create_element("vorbisdec");
    if(!decoder)
	std::cerr << "vorbisdec element could not be created." << std::endl;

    // audioconvert converts raw audio to a format which can be used by the next element
    Glib::RefPtr<Gst::Element> conv = Gst::ElementFactory::create_element("audioconvert");
    if(!conv)
	std::cerr << "audioconvert element could not be created." << std::endl;

    // Outputs sound to an ALSA audio device
    Glib::RefPtr<Gst::Element> sink = Gst::ElementFactory::create_element("alsasink");
    if(!sink)
	std::cerr << "alsasink element could not be created." << std::endl;

    //Check that the elements were created:
    if(!pipeline || !source || !parser || !decoder || !conv || !sink)
    {
	std::cerr << "One element could not be created" << std::endl;
	return 1;
    }

    Glib::RefPtr<Gst::Pad> pad = sink->get_static_pad("sink");
    if(pad)
	data_probe_id = pad->add_data_probe( sigc::ptr_fun(&on_sink_pad_have_data) );
    //std::cout << "sink data probe id = " << data_probe_id << std::endl;

    source->set_property("location", filename);

    // Get the bus from the pipeline,
    // and add a bus watch to the default main context with the default priority:
    Glib::RefPtr<Gst::Bus> bus = pipeline->get_bus();
    bus->add_watch( sigc::ptr_fun(&on_bus_message) );


    // Put all the elements in a pipeline:
    try
    {
	pipeline->add(source)->add(parser)->add(decoder)->add(conv)->add(sink);
    }
    catch(const Glib::Error& ex)
    {
	std::cerr << "Error while adding elements to the pipeline: " << ex.what() << std::endl;
	return 1;
    }

    // Link the elements together:
    try
    {
	source->link(parser);

	// We cannot link the parser and decoder yet,
	// because the parser uses dynamic pads.
	// So we do it later in a pad-added signal handler:
	parser->signal_pad_added().connect( sigc::ptr_fun(&on_parser_pad_added) );

	decoder->link(conv)->link(sink);
    }
    catch(const std::runtime_error& ex)
    {
	std::cout << "Exception while linking elements: " << ex.what() << std::endl;
    }

    // Call on_timeout function at a 200ms
    // interval to regularly print the position of the stream
    Glib::signal_timeout().connect(sigc::ptr_fun(&on_timeout), 200);

    // Now set the whole pipeline to playing and start the main loop:
    //std::cout << "Setting to PLAYING." << std::endl;
    pipeline->set_state(Gst::STATE_PLAYING);
    //std::cout << "Running." << std::endl;
    mainloop->run();

    // Clean up nicely:
    //std::cout << "Returned. Stopping playback." << std::endl;
    pipeline->set_state(Gst::STATE_NULL);

    return 0;
}