Example #1
0
bool VideoInput::captureFrame()
{
    // Return true if capture could continue, false if must be stop

    if (not decoder_)
        return false;

    const auto ret = decoder_->decode(getNewFrame());

    switch (ret) {
        case MediaDecoder::Status::ReadError:
        case MediaDecoder::Status::DecodeError:
            return false;

        // End of streamed file
        case MediaDecoder::Status::EOFError:
            createDecoder();
            return static_cast<bool>(decoder_);

        case MediaDecoder::Status::FrameFinished:
            publishFrame();

        // continue decoding
        case MediaDecoder::Status::Success:
        default:
            return true;
    }
}
Example #2
0
bool VideoInput::captureFrame()
{
    VideoPacket pkt;
    const auto ret = decoder_->decode(getNewFrame(), pkt);

    switch (ret) {
        case VideoDecoder::Status::FrameFinished:
            break;

        case VideoDecoder::Status::ReadError:
        case VideoDecoder::Status::DecodeError:
            loop_.stop();
            // fallthrough
        case VideoDecoder::Status::Success:
            return false;

            // Play in loop
        case VideoDecoder::Status::EOFError:
            deleteDecoder();
            createDecoder();
            return false;
    }

    publishFrame();
    return true;
}
Example #3
0
int main(int argc, char** argv)
{
	bool connectionSuccessful = false;
	while(!connectionSuccessful)
	{
		try
		{
			spykee = new SpykeeManager(SPYKEE_USER, SPYKEE_PWD);
			connectionSuccessful = true;
		}
		catch (SpykeeException& e)
		{
			cerr << "Error connecting with the robot" << endl;
			cerr << "Make sure that Spykee is turned on and " <<
					"that your wireless network is working! " <<
					"(Retrying in 5 seconds...)" << endl;
			sleep(5);
		}
	}

	/* NOTE: node is initialize here so that SIGINT can kill the application during
	 * the previous cycle (ROS install a signal handler for SIGINT, that should make ros::ok()
	 * return false, but that didn't worked well when inserted in the previous cycle...
	 */
	ros::init(argc, argv, "spykee");
	ros::NodeHandle ros_node;
	ros::Subscriber sub = ros_node.subscribe("spykee_motion", 1000, move);
	ros::Publisher img_pub = ros_node.advertise<sensor_msgs::CompressedImage>("spykee_camera", 3);
	ros::Publisher bat_pub = ros_node.advertise<std_msgs::Int8>("spykee_battery", 3);

	spykee->unplug();
	spykee->setLed(0, false); /* try to turn off the flash */
	spykee->setCameraStatus(true);

	ros::AsyncSpinner spinner(1);
	spinner.start();
	while (ros::ok())
	{
		spykee->readPacket();
		if(spykee->hasImage())
		{
			publishFrame(img_pub, spykee->readImage());
		}
		if(spykee->hasBattery())
		{
			publishBattery(bat_pub, spykee->getBatteryLevel());
		}
	}
	ros::waitForShutdown();
	delete spykee;
	return EXIT_SUCCESS;
}
void
VideoMixer::process()
{
    const auto now = std::chrono::system_clock::now();
    const auto diff = now - lastProcess_;
    const auto delay = FRAME_DURATION - diff;
    if (delay.count() > 0)
        std::this_thread::sleep_for(delay);
    lastProcess_ = now;

    VideoFrame& output = getNewFrame();
    try {
        output.reserve(VIDEO_PIXFMT_YUYV422, width_, height_);
    } catch (const std::bad_alloc& e) {
        RING_ERR("VideoFrame::allocBuffer() failed");
        return;
    }

    yuv422_clear_to_black(output);

    {
        auto lock(rwMutex_.read());

        int i = 0;
        for (const auto& x : sources_) {
            /* thread stop pending? */
            if (!loop_.isRunning())
                return;

            // make rendered frame temporarily unavailable for update()
            // to avoid concurrent access.
            std::unique_ptr<VideoFrame> input;
            x->atomic_swap_render(input);

            if (input)
                render_frame(output, *input, i);

            x->atomic_swap_render(input);
            ++i;
        }
    }

    publishFrame();
}
Example #5
0
void FFmpegDecoderVideo::decodeLoop()
{
    FFmpegPacket packet;
    double pts;

    while (! m_exit)
    {
        // Work on the current packet until we have decoded all of it

        while (m_bytes_remaining > 0)
        {
            // Save global PTS to be stored in m_frame via getBuffer()

            m_packet_pts = packet.packet.pts;

            // Decode video frame

            int frame_finished = 0;

            // We want to use the entire packet since some codecs will require extra information for decoding
            const int bytes_decoded = avcodec_decode_video2(m_context, m_frame.get(), &frame_finished, &(packet.packet));

            if (bytes_decoded < 0)
                throw std::runtime_error("avcodec_decode_video failed()");

            m_bytes_remaining -= bytes_decoded;
            m_packet_data += bytes_decoded;

            // Publish the frame if we have decoded a complete frame
            if (frame_finished)
            {
                AVRational timebase;
                // Find out the frame pts
                if (m_frame->pts != int64_t(AV_NOPTS_VALUE))
                {
                    pts = m_frame->pts;
                    timebase = m_context->time_base;
                }
                else if (packet.packet.dts == int64_t(AV_NOPTS_VALUE) &&
                        m_frame->opaque != 0 &&
                        *reinterpret_cast<const int64_t*>(m_frame->opaque) != int64_t(AV_NOPTS_VALUE))
                {
                    pts = *reinterpret_cast<const int64_t*>(m_frame->opaque);
                    timebase = m_stream->time_base;
                }
                else if (packet.packet.dts != int64_t(AV_NOPTS_VALUE))
                {
                    pts = packet.packet.dts;
                    timebase = m_stream->time_base;
                }
                else
                {
                    pts = 0;
                    timebase = m_context->time_base;
                }

                pts *= av_q2d(timebase);

                const double synched_pts = m_clocks.videoSynchClock(m_frame.get(), av_q2d(timebase), pts);
                const double frame_delay = m_clocks.videoRefreshSchedule(synched_pts);

                publishFrame(frame_delay, m_clocks.audioDisabled());
            }
        }

        while(m_paused && !m_exit)
        {
            microSleep(10000);
        }

        // Get the next packet

        pts = 0;

        if (packet.valid())
            packet.clear();

        bool is_empty = true;
        packet = m_packets.timedPop(is_empty, 10);

        if (! is_empty)
        {
            if (packet.type == FFmpegPacket::PACKET_DATA)
            {
                m_bytes_remaining = packet.packet.size;
                m_packet_data = packet.packet.data;
            }
            else if (packet.type == FFmpegPacket::PACKET_FLUSH)
            {
                avcodec_flush_buffers(m_context);
            }
        }
    }
}