Пример #1
0
int main(int argc, const char * argv[]) {
    av_register_all();
    av_log_set_callback(ffmpegLog);

    for (int sig : { SIGSEGV, SIGTERM, SIGINT, SIGHUP, SIGABRT}) {
        signal(sig, handleSignal);
    }    

    if (argc != 4) {
        Err<<"invalid arguments, use: "<<argv[0]<<" tcp://srv:port uuid logFile.log"<<endl;
        return -1;
    }
    
    string  addr = argv[1];
    string  logf = argv[3];
    int64_t uuid = stol(argv[2]);
    
    openLogFile(logf);

    const char * stars = " *\n *\n *\n";
    Log<<stars<<" * Starting id: "<<uuid<<", pushing to: "<<addr<<endl<<stars<<endl;
    
    try {
        MainLoop loop;
        loop.init(ZAddr(addr), uuid);
        loop.run();
        return 0;
    } catch (const exception & ex) {
        Err<<"exception in main: "<<toStr(ex)<<endl;
    } catch (...) {
        Err<<"unknown exception in main"<<endl;
    }    
    return -1;
}
Пример #2
0
void run(const std::string & inFileName)
{
    // Create a main loop object.
    MainLoop loop;
    
    // Create gstreamer elements
    ScopedObject<GstElement> pipeline(Pipeline::Create("audio-player"));
    ScopedObject<GstElement> source(Element::Create(pipeline, "filesrc", "file-source"));
    ScopedObject<GstElement> demuxer(Element::Create(pipeline, "oggdemux",      "ogg-demuxer"));
    ScopedObject<GstElement> decoder(Element::Create(pipeline, "vorbisdec",     "vorbis-decoder"));
    ScopedObject<GstElement> conv(Element::Create(pipeline, "audioconvert",  "converter"));
    ScopedObject<GstElement> sink(Element::Create(pipeline, "autoaudiosink", "audio-output"));
    
    // We set the input filename to the source element
    g_object_set(G_OBJECT(source.get()), "location", inFileName.c_str(), NULL);
    
    // Link the first half
    Element::Link(source, demuxer);

    // Link the second half
    Element::Link(decoder, conv, sink);
    
    // Note that the demuxer will be linked to the decoder dynamically.
    // The reason is that Ogg may contain various streams (for example
    // audio and video). The source pad(s) will be created at run time,
    // by the demuxer when it detects the amount and nature of streams.
    // Therefore we connect a callback function which will be executed
    // when the "pad-added" is emitted.    
    g_signal_connect(demuxer, "pad-added", G_CALLBACK(on_pad_added), decoder);
    
    // We add a message handler. Also pass the loop as an extra argument.
    ScopedBusListener busListener(pipeline.get(), boost::bind(&on_bus_message, _1, _2, &loop));

    // Set the pipeline to "playing" state
    std::cout << "Now playing: " << inFileName << std::endl;    
    Pipeline::SetState(pipeline, GST_STATE_PLAYING);
    
    // Start the event loop
    std::cout << "Running..." << std::endl;
    loop.run();
    
    // Out of the main loop, clean up nicely
    std::cout << "Returned, stopping playback" << std::endl;
    
    // Stop the pipeline
    Element::SetState(pipeline, GST_STATE_NULL);
    
    std::cout << "Deleting pipeline" << std::endl;
    // ScopedObject performs cleanup in its destructor.
}