示例#1
0
    // ---------------------------------------------------------------------
    // Audio CaptureRecorder Test
    //
    void runAVInputReaderRecorderTest()
    {
        PacketStream stream;

        // Create the Encoder Options
        av::EncoderOptions options;
        options.ofile = "audio_test.mp4";
        options.duration = 5; //time(0) +
        options.oformat = av::Format("AAC", "aac",
            av::AudioCodec("AAC", "aac", 2, 44100, 96000, "fltp"));
            //av::AudioCodec("MP3", "libmp3lame", 2, 44100, 128000, "s16p"));

        // Attach the Audio Capture
        av::AVInputReader::Ptr reader(new av::AVInputReader());
        reader->openAudioDevice(0,
            options.oformat.audio.channels,
            options.oformat.audio.sampleRate);
        reader->getEncoderFormat(options.iformat);

        // Attach the Audio Capture
        stream.attachSource<av::AVInputReader>(reader, true);

        // Attach the Audio Encoder
        auto encoder = new av::AVPacketEncoder(options);
        encoder->initialize();
        stream.attach(encoder, 5, true);

        stream.start();
        scy::pause();
        stream.stop();
    }
示例#2
0
int main(int argc, char** argv)
{
    Logger::instance().add(new ConsoleChannel("debug", Level::Trace)); // Debug
    {
        // Create a PacketStream to pass packets
        // from device captures to the encoder
        PacketStream stream;

        av::EncoderOptions options;
        options.ofile = OUTPUT_FILENAME;
        options.oformat = OUTPUT_FORMAT;
        options.iformat.audio.enabled = false; // enabled if available
        options.iformat.video.enabled = false; // enabled if available

        // Create a device manager instance to enumerate system devices
        av::Device device;
        av::DeviceManager devman;

        // Create and attach the default video capture
        av::VideoCapture video;
        if (devman.getDefaultCamera(device)) {
            LInfo("Using video device: ", device.name)
            video.openVideo(device.id, { 640, 480 });
            video.getEncoderFormat(options.iformat);
            stream.attachSource(&video, false, true);
        }

        // Create and attach the default audio capture
        av::AudioCapture audio;
        if (devman.getDefaultMicrophone(device)) {
            LInfo("Using audio device: ", device.name)
            audio.openAudio(device.id, { 2, 44100 });
            audio.getEncoderFormat(options.iformat);
            stream.attachSource(&audio, false, true);
        }

        // Create and attach the multiplex encoder
        av::MultiplexPacketEncoder encoder(options);
        encoder.init();
        stream.attach(&encoder, 5, false);

        // Start the stream
        stream.start();

        // Keep recording until Ctrl-C is pressed
        LInfo("Recording video: ", OUTPUT_FILENAME)
        waitForShutdown([](void* opaque) {
            reinterpret_cast<PacketStream*>(opaque)->stop();
        }, &stream);
    }

    // Logger::destroy();
    return 0;
}
示例#3
0
bool TestPlugin::createEncoder(PacketStream& stream, av::EncoderOptions& options)
{	
	// Instantiate the encoder and return the pointer
	av::AVPacketEncoder* encoder = nullptr;
	try {
		// Create and initialize the encoder
		encoder = new av::AVPacketEncoder(options);
		encoder->initialize(); // may throw
		
		// Attach the encoder to the packet stream
		// The PacketStream will take ownership of the encoder
		stream.attach(encoder, 5, true);	
	}
	catch (std::exception& exc) {
		ErrorL << "Encoder initialization failed: " << exc.what() << endl;
		if (encoder)
			delete encoder;
		encoder = nullptr;
	}
	return !!encoder;
}
void MediaServer::setupPacketStream(PacketStream& stream, const StreamingOptions& options, bool freeCaptures, bool attachPacketizers)
{
    DebugL << "Setup Packet Stream" << endl;

    // Attach capture sources

    assert(options.oformat.video.enabled || options.oformat.audio.enabled);
    if (options.oformat.video.enabled) {
        assert(options.videoCapture);

        //assert(dynamic_cast<av::VideoCapture*>(options.videoCapture.get()));
        //assert(dynamic_cast<av::ICapture*>(options.videoCapture.get()));

        //auto source = dynamic_cast<PacketSource*>(options.videoCapture.get());
        //assert(source);
        //if (!source) throw std::runtime_error("Cannot attach incompatible packet source.");

        stream.attachSource<av::VideoCapture>(options.videoCapture, true); //freeCaptures,
    }
    if (options.oformat.audio.enabled) {
        assert(options.audioCapture);
        stream.attachSource<av::AudioCapture>(options.audioCapture, true); //freeCaptures,
    }

    // Attach an FPS limiter to the stream
    //stream.attach(new FPSLimiter(5), 1, true);

    // Attach an async queue so we don't choke
    // the video capture while encoding.
    auto async = new AsyncPacketQueue(2048); //options.oformat.name == "MJPEG" ? 10 :
    stream.attach(async, 3, true);

    // Attach the video encoder
    auto encoder = new av::AVPacketEncoder(options);
    //encoder->initialize();
    stream.attach(encoder, 5, true);

    // Add format specific framings
    if (options.oformat.name == "MJPEG") {

        // Base64 encode the MJPEG stream for old browsers
        if (options.encoding.empty() ||
            options.encoding == "none" ||
            options.encoding == "None") {
            // no default encoding
        }
        else if (options.encoding == "Base64") {
            auto base64 = new Base64PacketEncoder();
            stream.attach(base64, 10, true);
        }
        else
            throw std::runtime_error("Unsupported encoding method: " + options.encoding);
    }
    else if (options.oformat.name == "FLV") {

        // Allow mid-stream flash client connection
        // FIXME: Broken in latest flash
        //auto injector = new FLVMetadataInjector(options.oformat);
        //stream.attach(injector, 10);
    }

    // Attach the HTTP output framing
    IPacketizer* framing = nullptr;
    if (options.framing.empty() ||
        options.framing == "none" ||
        options.framing == "None")
        ;
        //framing = new http::StreamingAdapter("image/jpeg");

    else if (options.framing == "chunked")
        framing = new http::ChunkedAdapter("image/jpeg");

    else if (options.framing == "multipart")
        framing = new http::MultipartAdapter("image/jpeg", options.encoding == "Base64");    // false,

    else throw std::runtime_error("Unsupported framing method: " + options.framing);

    if (framing)
        stream.attach(framing, 15, true);

    // Attach a sync queue to synchronize output with the event loop
    auto sync = new SyncPacketQueue;
    stream.attach(sync, 20, true);
}