Example #1
0
FirewireVideo::FirewireVideo(
    unsigned deviceid,
    dc1394video_mode_t video_mode,
    float framerate,
    uint32_t width, uint32_t height,
    uint32_t left, uint32_t top,
    dc1394speed_t iso_speed,
    int dma_buffers, bool reset_at_boot
) :running(false)
{
    d = dc1394_new ();
    if (!d)
        throw VideoException("Failed to get 1394 bus");

    err=dc1394_camera_enumerate (d, &list);
    if( err != DC1394_SUCCESS )
        throw VideoException("Failed to enumerate cameras");

    if (list->num == 0)
        throw VideoException("No cameras found");

    if( deviceid >= list->num )
        throw VideoException("Invalid camera index");

    const uint64_t guid = list->ids[deviceid].guid;

    dc1394_camera_free_list (list);

    init_format7_camera(guid,dma_buffers,iso_speed,video_mode,framerate,width,height,left,top, reset_at_boot);

}
Example #2
0
bool FirewireVideo::GrabNewest( unsigned char* image, bool wait )
{
    dc1394video_frame_t *f;
    err = dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_POLL, &f);
    if( err != DC1394_SUCCESS)
        throw VideoException("Could not capture frame", dc1394_error_get_string(err) );

    if( f ) {
        while( true )
        {
            dc1394video_frame_t *nf;
            err = dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_POLL, &nf);
            if( err != DC1394_SUCCESS)
                throw VideoException("Could not capture frame", dc1394_error_get_string(err) );

            if( nf )
            {
                err=dc1394_capture_enqueue(camera,f);
                f = nf;
            }else{
                break;
            }
        }
        memcpy(image,f->image,f->image_bytes);
        err=dc1394_capture_enqueue(camera,f);
        return true;
    }else if(wait){
        return GrabNext(image,true);
    }
    return false;
}
Example #3
0
UnpackVideo::UnpackVideo(std::unique_ptr<VideoInterface> &src_, VideoPixelFormat out_fmt)
    : src(std::move(src_)), size_bytes(0), buffer(0)
{
    if( !src || out_fmt.channels != 1) {
        throw VideoException("UnpackVideo: Only supports single channel output.");
    }

    videoin.push_back(src.get());

    for(size_t s=0; s< src->Streams().size(); ++s) {
        const size_t w = src->Streams()[s].Width();
        const size_t h = src->Streams()[s].Height();

        // Check compatibility of formats
        const VideoPixelFormat in_fmt = src->Streams()[s].PixFormat();
        if(in_fmt.channels > 1 || in_fmt.bpp > 16) {
            throw VideoException("UnpackVideo: Only supports one channel input.");
        }

        const size_t pitch = (w*out_fmt.bpp)/ 8;
        streams.push_back(pangolin::StreamInfo( out_fmt, w, h, pitch, (unsigned char*)0 + size_bytes ));
        size_bytes += h*pitch;
    }

    buffer = new unsigned char[src->SizeBytes()];
}
Example #4
0
FirewireVideo::FirewireVideo(
    unsigned deviceid,
    dc1394video_mode_t video_mode,
    dc1394framerate_t framerate,
    dc1394speed_t iso_speed,
    int dma_buffers
) :running(false),top(0),left(0)
{
    d = dc1394_new ();
    if (!d)
        throw VideoException("Failed to get 1394 bus");

    err=dc1394_camera_enumerate (d, &list);
    if( err != DC1394_SUCCESS )
        throw VideoException("Failed to enumerate cameras");

    if (list->num == 0)
        throw VideoException("No cameras found");

    if( deviceid >= list->num )
        throw VideoException("Invalid camera index");

    const uint64_t guid = list->ids[deviceid].guid;

    dc1394_camera_free_list (list);

    init_camera(guid,dma_buffers,iso_speed,video_mode,framerate);

}
Example #5
0
FirewireFrame FirewireVideo::GetNewest(bool wait)
{
    dc1394video_frame_t *f;
    err = dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_POLL, &f);
    if( err != DC1394_SUCCESS)
        throw VideoException("Could not capture frame", dc1394_error_get_string(err) );

    if( f ) {
        while( true )
        {
            dc1394video_frame_t *nf;
            err = dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_POLL, &nf);
            if( err != DC1394_SUCCESS)
                throw VideoException("Could not capture frame", dc1394_error_get_string(err) );

            if( nf )
            {
                err=dc1394_capture_enqueue(camera,f);
                f = nf;
            }else{
                break;
            }
        }
        return FirewireFrame(f);
    }else if(wait){
        return GetNext(true);
    }
    return FirewireFrame(0);
}
VideoSplitter::VideoSplitter(VideoInterface *videoin, const std::vector<StreamInfo>& streams)
    : videoin(videoin), streams(streams)
{
    if(videoin->Streams().size() != 1)
        throw VideoException("VideoSplitter input must have exactly one stream");

    // Make sure no stream over-runs input stream
    for(unsigned int i=0; i < streams.size(); ++i) {
        if(videoin->Streams()[0].SizeBytes() < (size_t)streams[i].Offset() + streams[i].SizeBytes() )
            throw VideoException("VideoSplitter: stream extends past end of input");
    }
}
Example #7
0
VideoPixelFormat VideoFormatFromString(const std::string& format)
{
    for(int i=0; !SupportedVideoPixelFormats[i].format.empty(); ++i)
        if(!format.compare(SupportedVideoPixelFormats[i].format))
            return SupportedVideoPixelFormats[i];
    throw VideoException("Unknown Format",format);
}
Example #8
0
void ImagesVideo::PopulateFilenames(const std::string& wildcard_path)
{
    const std::vector<std::string> wildcards = Expand(wildcard_path, '[', ']', ',');
    num_channels = wildcards.size();

    filenames.resize(num_channels);

    for(size_t i = 0; i < wildcards.size(); ++i) {
        const std::string channel_wildcard = PathExpand(wildcards[i]);
        FilesMatchingWildcard(channel_wildcard, filenames[i]);
        if(num_files < 0) {
            num_files = (int)filenames[i].size();
        }else{
            if( num_files != (int)filenames[i].size() ) {
                std::cerr << "Warning: Video Channels have unequal number of files" << std::endl;
            }
            num_files = std::min(num_files, (int)filenames[i].size());
        }
        if(num_files == 0) {
            throw VideoException("No files found for wildcard '" + channel_wildcard + "'");
        }
    }

    // Resize empty frames vector to hold future images.
    loaded.resize(num_files);
}
Example #9
0
	void Retry(std::string desc)
	{
		retryStack.push_back(desc);
		if(retryStack.size() > maxRetries){
			FlogW("Maximum number of retries reached, they were spent on:");

			int repeat = 0;
			std::string lastStr = "";

			for(auto str : retryStack){
				if(lastStr == str){
					repeat++;
				}else{
					if(repeat > 0)
						FlogW("(repeats " << repeat << " times)");

					FlogW(" * " << str);
					repeat = 0;
				}

				lastStr = str;
			}

			if(repeat > 0)
					FlogW("(repeats " << repeat << " times)");

			throw VideoException(VideoException::ERetries);
		}
	}
Example #10
0
DebayerVideo::DebayerVideo(VideoInterface* src, color_filter_t tile, bayer_method_t method)
    : size_bytes(0), buffer(0), tile(tile), method(method)
{
    if(!src) {
        throw VideoException("DebayerVideo: VideoInterface in must not be null");
    }
    videoin.push_back(src);

#ifndef HAVE_DC1394
    pango_print_warn("debayer: dc1394 unavailable for debayering. Using simple downsampling method instead.\n");
    this->method = BAYER_METHOD_DOWNSAMPLE;
#endif

    const pangolin::VideoPixelFormat rgb_format = pangolin::VideoFormatFromString("RGB24");
    for(size_t s=0; s< src->Streams().size(); ++s) {
        size_t w = src->Streams()[s].Width();
        size_t h = src->Streams()[s].Height();
        if(this->method==BAYER_METHOD_DOWNSAMPLE) {
            w = w/2;
            h = h/2;
        }
        streams.push_back(pangolin::StreamInfo( rgb_format, w, h, w*rgb_format.bpp / 8, (unsigned char*)0 + size_bytes ));
        size_bytes += w*h*rgb_format.bpp / 8;
    }

    buffer = new unsigned char[src->SizeBytes()];
}
Example #11
0
void FirewireVideo::SetAutoGain(){

        dc1394error_t err = dc1394_feature_set_mode(camera, DC1394_FEATURE_GAIN, DC1394_FEATURE_MODE_AUTO);
        if (err < 0) {
                throw VideoException("Could not set auto gain mode");
        }
}
Example #12
0
double FirewireVideo::bus_period_from_iso_speed(dc1394speed_t iso_speed)
{
  double bus_period;

  switch(iso_speed){
    case DC1394_ISO_SPEED_3200:
      bus_period = 15.625e-6;
      break;
    case DC1394_ISO_SPEED_1600:
      bus_period = 31.25e-6;
      break;
    case DC1394_ISO_SPEED_800:
      bus_period = 62.5e-6;
      break;
    case DC1394_ISO_SPEED_400:
       bus_period = 125e-6;
       break;
    case DC1394_ISO_SPEED_200:
       bus_period = 250e-6;
       break;
    case DC1394_ISO_SPEED_100:
       bus_period = 500e-6;
       break;
    default:
      throw VideoException("iso speed not valid");
    }

  return bus_period;
}
Example #13
0
VideoOutputInterface* OpenVideoOutput(const Uri& uri)
{
    VideoOutputInterface* recorder = 0;
    
    if(!uri.scheme.compare("pango"))
    {
        const std::string filename = uri.url;
        recorder = new PangoVideoOutput(filename);
    }else
#ifdef HAVE_FFMPEG    
    if(!uri.scheme.compare("ffmpeg") )
    {
        int desired_frame_rate = uri.Get("fps", 60);
        int desired_bit_rate = uri.Get("bps", 20000*1024);
        std::string filename = uri.url;

        if(uri.Contains("unique_filename")) {        
            filename = MakeFilenameUnique(filename);
        }
        
        recorder = new FfmpegVideoOutput(filename, desired_frame_rate, desired_bit_rate);
    }else
#endif
    {
        throw VideoException("Unable to open recorder URI");
    }
    
    return recorder;
}
Example #14
0
void FirewireVideo::SetInternalTrigger() 
{
    dc1394error_t err = dc1394_external_trigger_set_power(camera, DC1394_OFF);
    if (err < 0) {
        throw VideoException("Could not set internal trigger mode");
    }
}
Example #15
0
void FirewireVideo::SetAutoShutterTime(){

	dc1394error_t err = dc1394_feature_set_mode(camera, DC1394_FEATURE_SHUTTER, DC1394_FEATURE_MODE_AUTO);
	if (err < 0) {
		throw VideoException("Could not set auto shutter mode");
	}
}
Example #16
0
	void tick(bool includeOldAudio = false){
		bool success = false;

		StreamFrameMap streamFrames;

		streamFrames[videoStream] = Frame::CreateEmpty();
		streamFrames[audioStream] = Frame::CreateEmpty();

		while(!IsEof() && !success)
		{
			try
			{
				int audioQueueTargetSize = audioDevice->GetBlockSize() * 4;

				while(
					frameQueue.size() < (unsigned int)targetFrameQueueSize || 
					(hasAudioStream() && audioHandler->getAudioQueueSize() < audioQueueTargetSize))
				{
					if(frameQueue.size() >= (unsigned int)maxFrameQueueSize)
						break;
					
					bool frameDecoded = decodeFrame(streamFrames);

					if(!frameDecoded)
						throw VideoException(VideoException::EDecodingVideo);

					if(streamFrames[videoStream]->finished != 0){
						frameQueue.push(streamFrames[videoStream]->Clone());
						streamFrames[videoStream] = Frame::CreateEmpty();
					}
					
					if(streamFrames[audioStream]->finished != 0){
						// only enqueue audio that's newer than the current video time, 
						// eg. on seeking we might encounter audio that's older than the frames in the frame queue.
						if(streamFrames[audioStream]->GetSamples().size() > 0 && 
							(includeOldAudio || streamFrames[audioStream]->GetSamples()[0].ts >= timeHandler->GetTime()))
						{
							audioHandler->EnqueueAudio(streamFrames[audioStream]->GetSamples());
						}else{
							FlogD("skipping old audio samples: " << streamFrames[audioStream]->GetSamples().size());
						}
						streamFrames[audioStream] = Frame::CreateEmpty();
					}
				}

				// sync framequeue target size with number of frames needed for audio queue 
				if(targetFrameQueueSize < (int)frameQueue.size()){
					targetFrameQueueSize = std::max((int)frameQueue.size(), minFrameQueueSize);
				}
					
				success = true;
			}

			catch(VideoException e)
			{
				Retry(Str("Exception in tick: " << e.what()));
			}
		}
	}
Example #17
0
float FirewireVideo::GetGamma() const
{
    float gamma;
    err = dc1394_feature_get_absolute_value(camera,DC1394_FEATURE_GAMMA,&gamma);
    if( err != DC1394_SUCCESS )
        throw VideoException("Failed to read gamma");
    return gamma;
}
Example #18
0
void FirewireVideo::SetShutterTime(float val){

	dc1394error_t err = dc1394_feature_set_mode(camera, DC1394_FEATURE_SHUTTER, DC1394_FEATURE_MODE_MANUAL);
	if (err < 0) {
		throw VideoException("Could not set manual shutter mode");
	}

	err = dc1394_feature_set_absolute_control(camera, DC1394_FEATURE_SHUTTER, DC1394_ON);
	if (err < 0) {
          throw VideoException("Could not set absolute control for shutter");
        }

	err = dc1394_feature_set_absolute_value(camera, DC1394_FEATURE_SHUTTER, val);
	if (err < 0) {
		throw VideoException("Could not set shutter value");
	}
}
Example #19
0
void FirewireVideo::SetGain(float val){

        dc1394error_t err = dc1394_feature_set_mode(camera, DC1394_FEATURE_GAIN, DC1394_FEATURE_MODE_MANUAL);
        if (err < 0) {
                throw VideoException("Could not set manual gain mode");
        }

        err = dc1394_feature_set_absolute_control(camera, DC1394_FEATURE_GAIN, DC1394_ON);
        if (err < 0) {
          throw VideoException("Could not set absolute control for gain");
        }

        err = dc1394_feature_set_absolute_value(camera, DC1394_FEATURE_GAIN, val);
        if (err < 0) {
                throw VideoException("Could not set gain value");
        }
}
Example #20
0
float FirewireVideo::GetShutterTime() const
{
    float shutter;
    err = dc1394_feature_get_absolute_value(camera,DC1394_FEATURE_SHUTTER,&shutter);
    if( err != DC1394_SUCCESS )
        throw VideoException("Failed to read shutter");

    return shutter;
}
Example #21
0
void FirewireVideo::SetShutterTimeQuant(int shutter)
{
    // TODO: Set mode as well

    err = dc1394_feature_set_value(camera,DC1394_FEATURE_SHUTTER,shutter);

    if( err != DC1394_SUCCESS )
        throw VideoException("Failed to set shutter");
}
Example #22
0
	void updateBitmapBgr32(uint8_t* pixels, int w, int h)
	{
		if(currentFrame == 0){
			FlogE("Video::updateBitmapBgr32() called but currentFrame is unset");
			throw VideoException(VideoException::EScaling);
		}

		AVPixelFormat fmt = PIX_FMT_RGB32;

		AVPicture pict;
		int avret = avpicture_fill(&pict, pixels, fmt, w, h);
		
		if(avret < 0){
			FlogE("avpicture_fill returned " << avret);
			throw VideoException(VideoException::EScaling);
		}
		
		currentFrame->CopyScaled(&pict, w, h, fmt);
	}
Example #23
0
void FirewireVideo::Start()
{
    if( !running )
    {
        err=dc1394_video_set_transmission(camera, DC1394_ON);
        if( err != DC1394_SUCCESS )
            throw VideoException("Could not start camera iso transmission");
        running = true;
    }
}
Example #24
0
void FirewireVideo::init_camera(
    uint64_t guid, int dma_frames,
    dc1394speed_t iso_speed,
    dc1394video_mode_t video_mode,
    dc1394framerate_t framerate
    ) {

    if(video_mode>=DC1394_VIDEO_MODE_FORMAT7_0)
      throw VideoException("format7 modes need to be initialized through the constructor that allows for specifying the roi");

    camera = dc1394_camera_new (d, guid);
    if (!camera)
        throw VideoException("Failed to initialize camera");

    // Attempt to stop camera if it is already running
    dc1394switch_t is_iso_on = DC1394_OFF;
    dc1394_video_get_transmission(camera, &is_iso_on);
    if (is_iso_on==DC1394_ON) {
        dc1394_video_set_transmission(camera, DC1394_OFF);
    }


    cout << "Using camera with GUID " << camera->guid << endl;

    //-----------------------------------------------------------------------
    //  setup capture
    //-----------------------------------------------------------------------

    if( iso_speed >= DC1394_ISO_SPEED_800)
    {
        err=dc1394_video_set_operation_mode(camera, DC1394_OPERATION_MODE_1394B);
        if( err != DC1394_SUCCESS )
            throw VideoException("Could not set DC1394_OPERATION_MODE_1394B");
    }

    err=dc1394_video_set_iso_speed(camera, iso_speed);
    if( err != DC1394_SUCCESS )
        throw VideoException("Could not set iso speed");

    err=dc1394_video_set_mode(camera, video_mode);
    if( err != DC1394_SUCCESS )
        throw VideoException("Could not set video mode");

    err=dc1394_video_set_framerate(camera, framerate);
    if( err != DC1394_SUCCESS )
        throw VideoException("Could not set framerate");

    err=dc1394_capture_setup(camera,dma_frames, DC1394_CAPTURE_FLAGS_DEFAULT);
    if( err != DC1394_SUCCESS )
        throw VideoException("Could not setup camera - check settings");

    //-----------------------------------------------------------------------
    //  initialise width and height from mode
    //-----------------------------------------------------------------------
    dc1394_get_image_size_from_video_mode(camera, video_mode, &width, &height);

    Start();
}
Example #25
0
int OpenNi2Video::AddDevice(const std::string& device_uri)
{
    const size_t dev_id = numDevices;
    openni::Status rc = devices[dev_id].open(device_uri.c_str());
    if (rc != openni::STATUS_OK) {
        throw VideoException( "OpenNI2: Couldn't open device.", openni::OpenNI::getExtendedError() );
    }
    ++numDevices;
    return dev_id;
}
Example #26
0
void FirewireVideo::Stop()
{
    if( running )
    {
        // Stop transmission
        err=dc1394_video_set_transmission(camera,DC1394_OFF);
        if( err != DC1394_SUCCESS )
            throw VideoException("Could not stop the camera");
        running = false;
    }
}
Example #27
0
void UvcVideo::Start()
{
    uvc_error_t stream_err = uvc_stream_start(strm_, NULL, this, 0);
    
    if (stream_err != UVC_SUCCESS) {
        uvc_perror(stream_err, "uvc_stream_start");
        uvc_close(devh_);
        uvc_unref_device(dev_);
        throw VideoException("Unable to start streaming.");
    }
    
    if (frame_) {
        uvc_free_frame(frame_);
    }
    
    size_bytes = ctrl_.dwMaxVideoFrameSize;
    frame_ = uvc_allocate_frame(size_bytes);
    if(!frame_) {
        throw VideoException("Unable to allocate frame.");
    }
}
Example #28
0
	void updateOverlay(uint8_t** pixels, const uint16_t* pitches, int w, int h)
	{
		if(currentFrame == 0){
			FlogE("Video::updateOverlay() called but currentFrame is unset");
			throw VideoException(VideoException::EScaling);
		}

		AVPicture pict;
		int avret = avpicture_fill(&pict, NULL, AV_PIX_FMT_YUYV422, w, h);

		if(avret < 0){
			FlogE("avpicture_fill returned " << avret);
			throw VideoException(VideoException::EScaling);
		}

		for(int i = 0; i < 3; i++){
			pict.data[i] = pixels[i];
			pict.linesize[i] = pitches[i];
		}

		currentFrame->CopyScaled(&pict, w, h, AV_PIX_FMT_YUYV422);
	}
Example #29
0
VideoPixelFormat PleoraFormat(const PvGenEnum* pfmt)
{
    std::string spfmt = pfmt->ToString().GetAscii();
    if( !spfmt.compare("Mono8") ) {
        return VideoFormatFromString("GRAY8");
    }else if( !spfmt.compare("Mono10p") ) {
        return VideoFormatFromString("GRAY10");
    }else if( !spfmt.compare("Mono12p") ) {
        return VideoFormatFromString("GRAY12");
    }else{
        throw VideoException("Unknown Pleora pixel format", spfmt);
    }
}
Example #30
0
void FirewireVideo::SetExternalTrigger(dc1394trigger_mode_t mode, dc1394trigger_polarity_t polarity, dc1394trigger_source_t source)
{
    dc1394error_t err = dc1394_external_trigger_set_polarity(camera, polarity);
    if (err < 0) {
        throw VideoException("Could not set external trigger polarity");
    }

    err = dc1394_external_trigger_set_mode(camera, mode);
    if (err < 0) {
        throw VideoException("Could not set external trigger mode");
    }

    err = dc1394_external_trigger_set_source(camera, source);
    if (err < 0) {
        throw VideoException("Could not set external trigger source");
    }

    err = dc1394_external_trigger_set_power(camera, DC1394_ON);
    if (err < 0) {
        throw VideoException("Could not set external trigger power");
    }
}