示例#1
0
static dc1394camera_t *open_camera(void)
{
	dc1394_t *d;
	dc1394camera_list_t * list;
	dc1394camera_t *camera;
	dc1394error_t err;

	d = dc1394_new();
	if (!d) {
		return NULL;
	}

	err = dc1394_camera_enumerate(d, &list);
	if (err != DC1394_SUCCESS) {
		dc1394_free(d);
		return NULL;
	}
	if (list->num == 0) {
		dc1394_free(d);
		return NULL;
	}

	camera = dc1394_camera_new(d, list->ids[0].guid);
	if (!camera) {
		dc1394_free(d);
		return NULL;
	}
	dc1394_camera_free_list(list);

	printf("Using camera with GUID %"PRIx64"\n", camera->guid);
	return camera;
}
示例#2
0
/********************************
** Client is asking us to establish connection to the device
*********************************/
bool Cindi_iidc::Connect() {
    dc1394camera_list_t *list;
    dc1394error_t err;

    dc1394 = dc1394_new();
    if (!dc1394) {
        return false;
    }

    /* init bus and connect to the camera */
    err = dc1394_camera_enumerate(dc1394, &list);
    if (err != DC1394_SUCCESS) {
        IDMessage(getDeviceName(), "Could not find DC1394 cameras!");
        return false;
    }
    if (!list->num) {
        IDMessage(getDeviceName(), "No DC1394 cameras found!");
        return false;
    }
    dcam = dc1394_camera_new(dc1394, list->ids[0].guid);
    if (!dcam) {
        IDMessage(getDeviceName(), "Unable to connect to camera!");
        return false;
    }

    /* Reset camera */
    err = dc1394_camera_reset(dcam);
    if (err != DC1394_SUCCESS) {
        IDMessage(getDeviceName(), "Unable to reset camera!");
        return false;
    }

    IDMessage(getDeviceName(), "indi-iidc connected successfully!");
    return true;
}
示例#3
0
void ofxLibdc::initCamera(int cameraNumber) {
	// create camera struct
	dc1394camera_list_t* list;
	dc1394_camera_enumerate(libdcContext, &list);
	if(list->num == 0) {
		ofLog(OF_LOG_ERROR, "No cameras found.");
		return;
	}
	camera = dc1394_camera_new(libdcContext, list->ids[cameraNumber].guid);
	if (!camera) {
		stringstream error;
		error << "Failed to initialize camera " << cameraNumber << " with guid " << list->ids[cameraNumber].guid;
		ofLog(OF_LOG_ERROR, error.str());
		return;
	} else {
		stringstream msg;
		msg << "Using camera with GUID " << camera->guid;
		ofLog(OF_LOG_VERBOSE, msg.str());
	}
	dc1394_camera_free_list(list);
	
	#ifdef TARGET_OSX
	dc1394_iso_release_bandwidth(camera, INT_MAX);
	for (int channel = 0; channel < 64; channel++)
		dc1394_iso_release_channel(camera, channel);
	#endif
	
	#ifdef TARGET_LINUX
	dc1394_reset_bus(camera);
	#endif
}
示例#4
0
vector<CameraInfo> CameraIIDC::getCameraList(){
    
    dc1394_t *context = dc1394_new();
    
    dc1394camera_list_t *camera_list;
    dc1394error_t err;
    err = dc1394_camera_enumerate(context, &camera_list);
    DC1394_WRN(err,"libdc1394: Failed to enumerate cameras!");

    vector<CameraInfo> ret;
    
    for (unsigned int i=0; i<camera_list->num; i++) {
        CameraInfo info;
        dc1394camera_t *cam;
        cam = dc1394_camera_new(context, camera_list->ids[i].guid);

        //info.vendor = std::string(cam->vendor ? cam->vendor : "");
        info.vendor = "IIDC";
        info.model = string(cam->model ? cam->model : "");
        info.busID = (unsigned int)cam->guid;
        
        dc1394_camera_free(cam);
        ret.push_back(info);
    }
    
    dc1394_camera_free_list(camera_list);
    dc1394_free(context);
    
    return ret;
}
示例#5
0
bool CvCaptureCAM_DC1394_v2_CPP::open(int index)
{
    bool result = false;
    dc1394camera_list_t* cameraList = 0;
    dc1394error_t err;

    close();

    if (!dc1394.dc)
        goto _exit_;

    err = dc1394_camera_enumerate(dc1394.dc, &cameraList);
    if (err < 0 || !cameraList || (unsigned)index >= (unsigned)cameraList->num)
        goto _exit_;

    guid = cameraList->ids[index].guid;
    dcCam = dc1394_camera_new(dc1394.dc, guid);
    if (!dcCam)
        goto _exit_;

    cameraId = dcCam->vendor_id;
    //get all features
    if (dc1394_feature_get_all(dcCam,&feature_set) == DC1394_SUCCESS)
        result = true;
    else
        result = false;

_exit_:
    if (cameraList)
        dc1394_camera_free_list(cameraList);

    return result;
}
示例#6
0
void FirewireVideo::init_camera(
    uint64_t guid, int dma_frames,
    dc1394speed_t iso_speed,
    dc1394video_mode_t video_mode,
    dc1394framerate_t framerate
    ) {

    if(video_mode>=DC1394_VIDEO_MODE_FORMAT7_0)
      throw VideoException("format7 modes need to be initialized through the constructor that allows for specifying the roi");

    camera = dc1394_camera_new (d, guid);
    if (!camera)
        throw VideoException("Failed to initialize camera");

    // Attempt to stop camera if it is already running
    dc1394switch_t is_iso_on = DC1394_OFF;
    dc1394_video_get_transmission(camera, &is_iso_on);
    if (is_iso_on==DC1394_ON) {
        dc1394_video_set_transmission(camera, DC1394_OFF);
    }


    cout << "Using camera with GUID " << camera->guid << endl;

    //-----------------------------------------------------------------------
    //  setup capture
    //-----------------------------------------------------------------------

    if( iso_speed >= DC1394_ISO_SPEED_800)
    {
        err=dc1394_video_set_operation_mode(camera, DC1394_OPERATION_MODE_1394B);
        if( err != DC1394_SUCCESS )
            throw VideoException("Could not set DC1394_OPERATION_MODE_1394B");
    }

    err=dc1394_video_set_iso_speed(camera, iso_speed);
    if( err != DC1394_SUCCESS )
        throw VideoException("Could not set iso speed");

    err=dc1394_video_set_mode(camera, video_mode);
    if( err != DC1394_SUCCESS )
        throw VideoException("Could not set video mode");

    err=dc1394_video_set_framerate(camera, framerate);
    if( err != DC1394_SUCCESS )
        throw VideoException("Could not set framerate");

    err=dc1394_capture_setup(camera,dma_frames, DC1394_CAPTURE_FLAGS_DEFAULT);
    if( err != DC1394_SUCCESS )
        throw VideoException("Could not setup camera - check settings");

    //-----------------------------------------------------------------------
    //  initialise width and height from mode
    //-----------------------------------------------------------------------
    dc1394_get_image_size_from_video_mode(camera, video_mode, &width, &height);

    Start();
}
示例#7
0
文件: libFW.c 项目: Linnara/yFirewire
int _setupCam(long yExpo, long yGain, long *sizex, long *sizey, long speed){
  dc1394camera_list_t * list;
  dc1394_t *d = dc1394_new ();
  if (!d)
    return -1;

  dc1394speed_t iso_speed;
   if(speed == 400) 
     iso_speed = DC1394_ISO_SPEED_400; 
   else if(speed == 800) 
     iso_speed = DC1394_ISO_SPEED_800; 
   else 
     return -1; 
   //iso_speed = DC1394_ISO_SPEED_400;

  dc1394_log_register_handler(DC1394_LOG_WARNING, NULL, NULL);
  err=dc1394_camera_enumerate (d, &list);
  DC1394_ERR_RTN(err,"Failed to enumerate cameras");
	
  if (list->num == 0) {
    dc1394_log_error("No cameras found");
    return -1;
  }
	
  camera = dc1394_camera_new (d, list->ids[0].guid);
  if (!camera) {
    dc1394_log_error("Failed to initialize camera with guid %lld", list->ids[0].guid);
    return -1;
  }
  dc1394_camera_free_list (list);
	
  printf("Using camera with GUID %lld\n", camera->guid);
	
  release_iso_and_bw();

  uint32_t expo = yExpo; 
  uint32_t gain = yGain;

  /*-----------------------------------------------------------------------
   *  setup capture
   *-----------------------------------------------------------------------*/
  dc1394_video_set_operation_mode(camera, DC1394_OPERATION_MODE_1394B);
  DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not set operation mode");

  err=dc1394_video_set_iso_speed(camera, iso_speed);
  DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not set iso speed");

  _setVideoMode(0, sizex, sizey);

  err=dc1394_feature_set_value(camera, DC1394_FEATURE_GAIN, gain);
  DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not define gain");

  err=dc1394_feature_set_value(camera, DC1394_FEATURE_SHUTTER, expo);
  DC1394_ERR_CLN_RTN(err,_unsetupCam(),"Could not define shutter");

  return 0;
}
int main(int argc, char *argv[]) {
	unsigned int i;
	dc1394_t * d;
	dc1394camera_list_t * list;
	dc1394error_t err;
	dc1394featureset_t features;

	d = dc1394_new();
	if(!d)
		return 1;
	err = dc1394_camera_enumerate(d, &list);
	DC1394_ERR_RTN(err,"Failed to enumerate cameras");

	if(list->num == 0) {
		dc1394_log_error("No cameras found");
		return 1;
	}

	for(i = 0; i < list->num; i++) {
		dc1394camera_t *camera = dc1394_camera_new(d, list->ids[i].guid);

		if(camera) {
			unsigned int j;
			dc1394video_modes_t modes;

			// Print hardware informations.
			dc1394_camera_print_info(camera, stdout);

			// Print supported camera features.
			err = dc1394_feature_get_all(camera,&features);
			if(err != DC1394_SUCCESS) {
				dc1394_log_warning("Could not get feature set");
			} else {
				dc1394_feature_print_all(&features, stdout);
			}

			// Print a list of supported modes.
			printf("------ Supported Video Modes ------\n");

			err = dc1394_video_get_supported_modes(camera, &modes);
			DC1394_ERR_RTN(err,"Could not get list of modes");

			for(j = 0; j < modes.num; j++) {
				print_video_mode_info(camera, modes.modes[j]);
			}

			dc1394_camera_free(camera);
		}
	}
	dc1394_camera_free_list (list);
	dc1394_free (d);

	return 0;
}
示例#9
0
void ofxLibdc::setup(int cameraNumber) {	
	// create camera struct
	dc1394camera_list_t * list;
	dc1394_camera_enumerate (libdcContext, &list);
	camera = dc1394_camera_new (libdcContext, list->ids[cameraNumber].guid);
	if (!camera) {
		stringstream error;
		error << "Failed to initialize camera " << cameraNumber << " with guid " << list->ids[0].guid;
		ofLog(OF_LOG_ERROR, error.str());
		return;
	} else {
		stringstream msg;
		msg << "Using camera with GUID " << camera->guid;
		ofLog(OF_LOG_VERBOSE, msg.str());
	}
	dc1394_camera_free_list(list);
	
	// select highest res mode:
	dc1394video_modes_t video_modes;
	dc1394_video_get_supported_modes(camera, &video_modes);
	dc1394video_mode_t video_mode;
	dc1394color_coding_t coding;
	for (int i = video_modes.num - 1; i >= 0; i--) {
		if (!dc1394_is_video_mode_scalable(video_modes.modes[i])) {
			dc1394_get_color_coding_from_video_mode(camera, video_modes.modes[i], &coding);
			if (coding == DC1394_COLOR_CODING_MONO8) {
				video_mode = video_modes.modes[i];
				break;
			}
		}
		if(i == 0) {
			ofLog(OF_LOG_ERROR, "Camera does not support DC1394_COLOR_CODING_MONO8.");
			return;
		}
	}
	dc1394_get_image_size_from_video_mode(camera, video_mode, &width, &height);
	 
	// get highest framerate
	dc1394framerates_t framerates;
	dc1394_video_get_supported_framerates(camera, video_mode, &framerates);
	dc1394framerate_t framerate = framerates.framerates[framerates.num - 1];
	
	// apply everything
	dc1394_video_set_iso_speed(camera, DC1394_ISO_SPEED_400);
	dc1394_video_set_mode(camera, video_mode);
	dc1394_video_set_framerate(camera, framerate);
	dc1394_capture_setup(camera, 4, DC1394_CAPTURE_FLAGS_DEFAULT);
	
	// print out features
	dc1394featureset_t features;
	dc1394_feature_get_all(camera, &features);
	dc1394_feature_print_all(&features, stdout);
}
示例#10
0
bool Camera::Init()
{
	// VideoCapture Interface
	cv_image_ = cvCreateImage(cvSize(640,480), IPL_DEPTH_8U, 3);
	
    // Initialize libdc1394
    if ((d = dc1394_new ()) == NULL)
        return false;

    // Find cameras
    err = dc1394_camera_enumerate(d, &list);
    DC1394_ERR_RTN(err, "Failed to enumerate cameras");

    // Verify that we have at least one cameraDC1394_BYTE_ORDER_YUYV
    if(list->num == 0)
    {
        dc1394_log_error("No cameras found");
        return false;
    }

    // Work with first camera found
    if ((camera = dc1394_camera_new (d, list->ids[0].guid)) == NULL)
    {
        dc1394_log_error("Failed to initialize camera with guid %llx", list->ids[0].guid);
        return false;
    }

    // Not using the list anymore, clean it up
    dc1394_camera_free_list(list);

    // Power ON
    dc1394_camera_set_power(camera, DC1394_ON);

    // Print camera info 
    dc1394_camera_print_info(camera, stdout);

    // Setup capture
    err = dc1394_capture_setup(camera, 4, DC1394_CAPTURE_FLAGS_DEFAULT);
    
    // Set video mode
    dc1394_video_set_mode(camera, DC1394_VIDEO_MODE_640x480_YUV422);

    // Start transmission
    err = dc1394_video_set_transmission(camera, DC1394_ON);

	// VideoCapture Interface
	init_ = true;

    return true;
}
示例#11
0
int list_cameras (dc1394_t * d, dc1394camera_list_t * list)
{
    uint32_t i;
    dc1394bool_t sff_available;
    dc1394camera_t * camera;

    for (i = 0; i < list->num; i++) {
        sff_available = DC1394_FALSE;
        camera = dc1394_camera_new (d, list->ids[i].guid);
        dc1394_basler_sff_is_available (camera, &sff_available);

        printf ("%02d:0x%"PRIx64":%s:%s:%s\n", i, camera->guid,
                camera->vendor, camera->model, sff_available ? "SFF" : "NO SFF");
        dc1394_camera_free (camera);
    }
    return 0;
}
示例#12
0
int main(int argc, char *argv[])
{
    dc1394camera_t * camera;
    dc1394error_t err;
    dc1394video_frame_t * frame;
    dc1394_t * d;
    dc1394camera_list_t * list;

    d = dc1394_new ();                                                     /* Initialize libdc1394 */
    if (!d)
        return 1;

    err=dc1394_camera_enumerate (d, &list);                                /* Find cameras */
    DC1394_ERR_RTN(err,"Failed to enumerate cameras");

    if (list->num == 0) {                                                  /* Verify that we have at least one camera */
        dc1394_log_error("No cameras found");
        return 1;
    }

    camera = dc1394_camera_new (d, list->ids[0].guid);                     /* Work with first camera */
    if (!camera) {
        dc1394_log_error("Failed to initialize camera with guid %llx", list->ids[0].guid);
        return 1;
    }
    dc1394_camera_free_list (list);

    err=dc1394_capture_setup(camera, 4, DC1394_CAPTURE_FLAGS_DEFAULT);     /* Setup capture */

    err=dc1394_video_set_transmission(camera, DC1394_ON);                  /* Start transmission */
    
    err=dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_WAIT, &frame);/* Capture */
    DC1394_ERR_RTN(err,"Problem getting an image");

    err=dc1394_capture_enqueue(camera, frame);                             /* Release the buffer */

    err=dc1394_video_set_transmission(camera, DC1394_OFF);                 /* Stop transmission */

    err=dc1394_capture_stop(camera);                                       /* Stop capture */

    printf("Hello World\n");                                               /* Hey, this is a HELLO WORLD program!! */

    dc1394_camera_free (camera);                                           /* cleanup and exit */
    dc1394_free (d);
    return 0;
}
示例#13
0
void Libdc1394SequenceGrabber::initCamera(uint64_t uid) 
{
	msg(osg::INFO) << "initCamera" << std::endl;

	dc1394camera_list_t * list;
	dc1394error_t err;

	err=dc1394_camera_enumerate (_context, &list);
	
	for(unsigned int i = 0; i < list->num; ++i) {
		if (!_camera && ((uid == 0) || (uid == list->ids[i].guid))) {
			_camera = dc1394_camera_new (_context, list->ids[i].guid);
			break;
		}
	}
	dc1394_camera_free_list(list);
	
	if (!_camera) {
		return;
	}
		
	if (_camera) {
		
		//dc1394_reset_bus(_camera);
		/*
		dc1394error_t err = dc1394_iso_release_bandwidth(_camera, INT_MAX);
        checkSuccess(err, "dc1394_iso_release_bandwidth failed");
		for (int channel = 0; channel < 64; ++channel) {
			err = dc1394_iso_release_channel(_camera, channel);
            //checkSuccess(err, "dc1394_iso_release_channel failed");
		}*/
	}
	

	_speed = DC1394_ISO_SPEED_400;
    
	/*
    if ((_camera->bmode_capable > 0)) {
        dc1394error_t err = dc1394_video_set_operation_mode(_camera, DC1394_OPERATION_MODE_1394B);
        checkSuccess(err,"Failed to set ISO Speed 800");
        _speed = (err!=DC1394_SUCCESS) ? DC1394_ISO_SPEED_400 : DC1394_ISO_SPEED_800;
    }
    */
}
示例#14
0
void FWCamera::resetBus()
{
#ifdef AVG_ENABLE_1394_2
    dc1394_t* pDC1394 = dc1394_new();
    if (pDC1394 == 0) {
        return;
    }
    dc1394camera_list_t * pCameraList;
    int err=dc1394_camera_enumerate(pDC1394, &pCameraList);
    if (err == DC1394_SUCCESS) {
        if (pCameraList->num != 0) {
            dc1394camera_t * pCam = dc1394_camera_new(pDC1394, pCameraList->ids[0].guid);
            if (pCam) {
                dc1394_reset_bus(pCam);
                dc1394_camera_free(pCam);
            }
        }
        dc1394_camera_free_list(pCameraList);
    }
    dc1394_free(pDC1394);
#endif
}
示例#15
0
  /** Set up device connection to first camera on the bus.
   *
   *  @returns true if successful
   *
   *  Failure cleanup is sketchy, but that is OK because the node 
   *  terminates, anyway.
   */
  bool setup(void)
  {
    dc1394_t *dev = dc1394_new();
    if (dev == NULL)
      {
        ROS_FATAL("Failed to initialize dc1394_context.");
        return false;
      }

    // list all 1394 cameras attached to this computer
    dc1394camera_list_t *cameras;
    int err = dc1394_camera_enumerate(dev, &cameras);
    if (err != DC1394_SUCCESS)
      {
        ROS_FATAL("Could not get list of cameras");
        return false;
      }
    if (cameras->num == 0)
      {
        ROS_FATAL("No cameras found");
        return false;
      }

    // attach to first camera found
    ROS_INFO_STREAM("Connecting to first camera, GUID: "
                    << std::setw(16) << std::setfill('0') << std::hex
                    << cameras->ids[0].guid);
    camera_ = dc1394_camera_new(dev, cameras->ids[0].guid);
    if (!camera_)
      {
        ROS_FATAL("Failed to initialize camera.");
        return false;
      }

    dc1394_camera_free_list(cameras);
    return true;
  }
示例#16
0
文件: pdp_dc1394.c 项目: Angeldude/pd
void *pdp_dc1394_new(t_symbol *vdef)
{
    t_pdp_dc1394 *x = (t_pdp_dc1394 *)pd_new(pdp_dc1394_class);

    x->x_outlet0 = outlet_new(&x->x_obj, &s_anything);

    x->d = dc1394_new ();
    x->err=dc1394_camera_enumerate (x->d, &x->list);
    //DC1394_ERR_RTN(x->err,"Failed to enumerate cameras");
    //post("Failed to enumerate cameras");

    if (x->list->num == 0) {
        dc1394_log_error("No cameras found");
        return 1;
    }

    x->camera = dc1394_camera_new (x->d, x->list->ids[0].guid);
    if (!x->camera) {
        //dc1394_log_error("Failed to initialize camera with guid %"PRIx64, list->ids[0].guid);
        return 1;
    }
    dc1394_camera_free_list (x->list);

    //printf("Using camera with GUID %"PRIx64"\n", camera->guid);

    /*-----------------------------------------------------------------------
     *  get the best video mode and highest framerate. This can be skipped
     *  if you already know which mode/framerate you want...
     *-----------------------------------------------------------------------*/
    // get video modes:
    x->err=dc1394_video_get_supported_modes(x->camera,&x->video_modes);
    //DC1394_ERR_CLN_RTN(x->err,cleanup_and_exit(x->camera),"Can't get video modes");

    // select highest res mode:
    for (x->i=x->video_modes.num-1;x->i>=0;x->i--) {
        if (!dc1394_is_video_mode_scalable(x->video_modes.modes[x->i])) {
            dc1394_get_color_coding_from_video_mode(x->camera,x->video_modes.modes[x->i], &x->coding);
            if (x->coding==DC1394_COLOR_CODING_MONO8) {
                x->video_mode=x->video_modes.modes[x->i];
		fprintf(stderr,"video_mode %d: %d\n",x->i,x->video_modes.modes[x->i]);
                break;
            }
        }
    }
    if (x->i < 0) {
        dc1394_log_error("Could not get a valid MONO8 mode");
        cleanup_and_exit(x->camera);
    }

    x->err=dc1394_get_color_coding_from_video_mode(x->camera, x->video_mode,&x->coding);
    //DC1394_ERR_CLN_RTN(x->err,cleanup_and_exit(x->camera),"Could not get color coding");
    fprintf(stderr,"color_coding : %d\n",x->coding);

    // get highest framerate
    x->err=dc1394_video_get_supported_framerates(x->camera,x->video_mode,&x->framerates);
    //DC1394_ERR_CLN_RTN(x->err,cleanup_and_exit(x->camera),"Could not get framrates");
    x->framerate=x->framerates.framerates[x->framerates.num-1];
    fprintf(stderr,"framerate : %d\n",x->framerate);

    /*-----------------------------------------------------------------------
     *  setup capture
     *-----------------------------------------------------------------------*/

    x->err=dc1394_video_set_iso_speed(x->camera, DC1394_ISO_SPEED_400);
    //DC1394_ERR_CLN_RTN(x->err,cleanup_and_exit(x->camera),"Could not set iso speed");

    x->err=dc1394_video_set_mode(x->camera, x->video_mode);
    //DC1394_ERR_CLN_RTN(x->err,cleanup_and_exit(x->camera),"Could not set video mode");

    x->err=dc1394_video_set_framerate(x->camera, x->framerate);
    //DC1394_ERR_CLN_RTN(x->err,cleanup_and_exit(x->camera),"Could not set framerate");

    x->err=dc1394_capture_setup(x->camera,4, DC1394_CAPTURE_FLAGS_DEFAULT);
    //DC1394_ERR_CLN_RTN(x->err,cleanup_and_exit(x->camera),"Could not setup camera-\nmake sure that the video mode and framerate are\nsupported by your camera");

    /*-----------------------------------------------------------------------
     *  report camera's features
     *-----------------------------------------------------------------------*/
    x->err=dc1394_feature_get_all(x->camera,&x->features);
    if (x->err!=DC1394_SUCCESS) {
        dc1394_log_warning("Could not get feature set");
    }
    else {
        dc1394_feature_print_all(&x->features, stdout);
    }

    /*-----------------------------------------------------------------------
     *  have the camera start sending us data
     *-----------------------------------------------------------------------*/
    x->err=dc1394_video_set_transmission(x->camera, DC1394_ON);
    //DC1394_ERR_CLN_RTN(x->err,cleanup_and_exit(x->camera),"Could not start camera iso transmission");



    x->x_initialized = true;


    return (void *)x;
}
bool ieee1394capture::init(RoboCompCamera::TCamParams &params_, RoboCompJointMotor::JointMotorPrx head_ , RoboCompDifferentialRobot::DifferentialRobotPrx base_ )
{
	params = params_;
	head = head_;
	base = base_;

	int32_t i;

	fps = (dc1394framerate_t)params.FPS;//15;
	res = (dc1394video_mode_t)0;

	switch (fps) {
	case 1: fps =  DC1394_FRAMERATE_1_875; break;
	case 3: fps =  DC1394_FRAMERATE_3_75; break;
	case 15: fps = DC1394_FRAMERATE_15; break;
	case 30: fps = DC1394_FRAMERATE_30; break;
	case 60: fps = DC1394_FRAMERATE_60; break;
	default: fps = DC1394_FRAMERATE_7_5; break;
	}

	switch (res) {
	case 1:
		res = DC1394_VIDEO_MODE_640x480_YUV411;
		device_width = 640;
		device_height = 480;
		break;
	case 2:
		res = DC1394_VIDEO_MODE_640x480_RGB8;
		device_width = 640;
		device_height = 480;
		break;
	default:
		res = DC1394_VIDEO_MODE_320x240_YUV422;
		device_width = 320;
		device_height = 240;
		break;
	}


	/// Get handle
	qDebug() << "ieee1394capture::init() -> Initializating first Firewire Card in the system...";
	if (!(d = dc1394_new()))
	{
		qDebug() << "ieee1394capture::init() -> Fatal error: Unable to aquire a handle to the Ieee1394 device";
		qDebug() << "Please check if the kernel modules `ieee1394',`raw1394' and `ohci1394' are loaded or if you have read/write access to /dev/raw1394 and to /dev/video1394-0 " ;
		return false;
	}
	CREATED_BUS = true;

	/// Create camera interfaces
	numCameras = 0;
	err = dc1394_camera_enumerate(d, &list);
	DC1394_ERR_RTN(err, "Failed to enumerate cameras");
	if (list->num == 0)
	{
		dc1394_log_error("No cameras found");
		return 1;
	}
	numCameras = 0;
	for (uint32_t  di = 0; di < list->num; di++)
	{
		if (numCameras >= MAX_CAMERAS)
			break;
		cameras[numCameras] = dc1394_camera_new(d, list->ids[di].guid);
		if (!cameras[numCameras])
		{
			dc1394_log_warning("Failed to initialize camera with guid %llx", list->ids[di].guid);
			continue;
		}
		printf("Camera #%d\n", numCameras);
		numCameras++;
	}
	dc1394_camera_free_list(list);
	if ( numCameras < 1 )
	{
		qDebug() << "ieee1394capture::init() -> Fatal error: No cameras found in the bus! Called from ";
		cleanup();
		return false;
	}
	/// Check if one camera has become the root node
/*	for ( int n=0; n < numCameras; n++ )
	{
		if ( cameraNodeList[n] == numNodes )
		{
			qDebug() << "ieee1394capture::init() -> Fatal error: Sorry, your camera is the highest numbered node of the bus, and has therefore become the root node." ;
			cleanup();
			return false;
		}
	}*/
	CREATED_CAMS = true;

	/// Setup cameras for capture
	qDebug() << "ieee1394capture::init() -> Searching cameras with requested parameters:";
	printf("%s\n",params.mode.c_str());
	if (params.mode == "MODE_320x240_YUV422")
	{
		res = DC1394_VIDEO_MODE_320x240_YUV422;
		params.width = 320;
		params.height = 240;
	}
	else if (params.mode == "MODE_640x480_YUV422" )
	{
		res = DC1394_VIDEO_MODE_640x480_YUV422;
		params.width = 640;
		params.height = 480;
	}
	else if (params.mode == "MODE_640x480_RGB" )
	{
		res = DC1394_VIDEO_MODE_640x480_RGB8;
		params.width = 640;
		params.height = 480;
	}
	else if (params.mode == "MODE_640x480_YUV411")
	{
		res = DC1394_VIDEO_MODE_640x480_YUV411;
		params.width = 640;
		params.height = 480;
	}
	else if (params.mode == "MODE_640x480_MONO")
	{
		res = DC1394_VIDEO_MODE_640x480_MONO8;
		params.width = 640;
		params.height = 480;
	}
	else if (params.mode == "MODE_640x480_MONO16")
	{
		res = DC1394_VIDEO_MODE_640x480_MONO16;
		params.width = 640;
		params.height = 480;
	}
	else if (params.mode == "MODE_516x338_YUV422")
	{
		res = DC1394_VIDEO_MODE_FORMAT7_1;
		params.width = 516;
		params.height = 338;
	}
	else qFatal("ieee1394capture::init() -> Image Mode %s not available. Aborting...", params.mode.c_str());
	params.size = params.width*params.height;
	if (params.FPS!=15 and params.FPS!=30)
	{
		qWarning("ieee1394capture::init() -> Framerate %d not available. Aborting...", params.FPS );
		cleanup();
		return false;
	}

	dc1394format7modeset_t info;

	for (i = 0; i < numCameras; i++)
	{

	    if (params.mode == "MODE_516x338_YUV422")
	    {
		err = dc1394_format7_get_modeset(cameras[i], &info);

		for( int j=0;j<DC1394_VIDEO_MODE_FORMAT7_NUM;j++)
		{
		  qDebug() << info.mode[j].present;
		  qDebug() << info.mode[j].size_x;
		  qDebug() << info.mode[j].size_y;
		  qDebug() << info.mode[j].max_size_x;
		  qDebug() << info.mode[j].max_size_y;

		  qDebug() << info.mode[j].pos_x;
		  qDebug() << info.mode[j].pos_y;

		  qDebug() << info.mode[j].unit_size_x;
		  qDebug() << info.mode[j].unit_size_y;
		  qDebug() << info.mode[j].unit_pos_x;
		  qDebug() << info.mode[j].unit_pos_y;

		  qDebug() << info.mode[j].pixnum;

		  qDebug() << info.mode[j].packet_size; /* in bytes */
		  qDebug() << info.mode[j].unit_packet_size;
		  qDebug() << info.mode[j].max_packet_size;
		}
	    }


   	    release_iso_and_bw(i);

	    err = dc1394_video_set_mode(cameras[i], res);
	    DC1394_ERR_CLN_RTN(err, cleanup(), "Could not set video mode");

	    err = dc1394_video_set_iso_speed(cameras[i], DC1394_ISO_SPEED_400);
	    DC1394_ERR_CLN_RTN(err, cleanup(), "Could not set ISO speed");


	    //For format 7 modes only
	    if (params.mode == "MODE_516x338_YUV422")
	    {
	   //  uint32_t packet_size;

	    //  err=dc1394_format7_set_image_size(cameras[i], res, 514, 384);
	    //  DC1394_ERR_RTN(err,"Could not set image size");
	    //  err=dc1394_format7_get_recommended_packet_size(cameras[i], res, &packet_size);
	    //  DC1394_ERR_RTN(err,"Could not get format 7 recommended packet size");
	    //  err=dc1394_format7_set_roi(cameras[i], res, DC1394_COLOR_CODING_YUV422, packet_size, 0,0, 514, 384);
	    //  DC1394_ERR_RTN(err,"Could not set ROI");
	      qDebug() << "ya";
	    }

	    err = dc1394_video_set_framerate(cameras[i], fps);
	    DC1394_ERR_CLN_RTN(err, cleanup(), "Could not set framerate");

	    err = dc1394_capture_setup(cameras[i], NUM_BUFFERS, DC1394_CAPTURE_FLAGS_DEFAULT);
	    DC1394_ERR_CLN_RTN(err, cleanup(), "Could not setup camera-\nmake sure that the video mode and framerate are\nsupported by your camera");

	    err = dc1394_video_set_transmission(cameras[i], DC1394_ON);
	    DC1394_ERR_CLN_RTN(err, cleanup(), "Could not start camera iso transmission");


	}
	fflush(stdout);
	qDebug() << "	ieee1394capture::init() -> Iso transmission started.";


	///Buffers de imagen
	qDebug() << "BUFFERS DE IMAGEN ------------------------------------------";
	for ( int i=0; i < numCameras; i++ )
	{
		AimgBuffer[i] = ( Ipp8u * ) ippsMalloc_8u ( params.size * 9 );
		BimgBuffer[i] = ( Ipp8u * ) ippsMalloc_8u ( params.size * 9 );
		img8u_lum[i] = AimgBuffer[i];
		img8u_YUV[i] = AimgBuffer[i]+params.size;
		localYRGBImgBufferPtr[i] = BimgBuffer[i];
		qDebug() << "Reservando" << params.size * 9 <<" para localYRGBImgBufferPtr["<<i<<"]";
		printf("(de %p a %p)\n", localYRGBImgBufferPtr[i], localYRGBImgBufferPtr[i]+(params.size*9-1));
	}


	planos[0]=BimgBuffer[0]+params.size*3;
	planos[1]=BimgBuffer[0]+ ( params.size*4 );
	planos[2]=BimgBuffer[0]+ ( params.size*5 );
	//img8u_aux = BimgBuffer[0]+(params.size*6);

	imgSize_ipp.width=params.width;
	imgSize_ipp.height=params.height;

	return true;

}
示例#18
0
int capture_setup(capture_t *cap, const char *conf_file)
{
  conf_param_t param = CONF_PARAM_INIT_VAL;
  int i, num_found;

  /* read configulation from a specified file */
  if (s_read_conf_file(conf_file, &param) < 1) {
    return CAPTURE_ERROR;
  }


  /* allocate memory */
  cap->cameras = (dc1394camera_t **)malloc(sizeof(dc1394camera_t *) * param.num_cameras);
  if (cap->cameras == NULL) {
    return CAPTURE_ERROR;
  }

  cap->num_active = 0;
  num_found = 0;
  for (i = 0; i < param.num_cameras; ++i) {
    dc1394error_t err;

    cap->cameras[num_found] = dc1394_camera_new(cap->dc1394_cxt, param.settings[i].uid);

    if (cap->cameras[num_found] == NULL) {
      if (cap->verbose) {
        fprintf(stderr, "camera 0x%0"PRIx64" not found.\n", param.settings[i].uid);
      }
      continue;
    }

    if (cap->verbose) {
      fprintf(stderr, "GUID[%d]: 0x%0"PRIx64"\n", i, cap->cameras[num_found]->guid);
    }

#if 0
    {
      uint32_t port = 42;
      dc1394_camera_get_linux_port(cap->cameras[num_found], &port);
      printf("port: %d\n", port);
    }
#endif

    /* set parameters */
    if (cap->verbose) {
      fprintf(stderr, "set parameter.\n");
    }
    s_set_parameters(cap, num_found, &param.settings[i]);

    /* set up a capture construct */
    if (cap->verbose) {
      fprintf(stderr, "set up a capture construct.\n");
    }

    err = dc1394_capture_setup(cap->cameras[num_found], cap->num_buffers, DC1394_CAPTURE_FLAGS_DEFAULT);
    DC1394_WRN(err, "Setup capture failed.");
    if (err != DC1394_SUCCESS) {
      continue;
    }
    
    /* set ready to capture */
    if (cap->verbose) {
      fprintf(stderr, "activate camera.\n");
    }
    s_activate_camera(cap, num_found);

    ++num_found;
  }
  cap->num_active = num_found;

  s_clear_conf_param(&param);

  return CAPTURE_SUCCESS;
}
示例#19
0
//--------------------------------------------------------------------
int of1394VideoGrabber::initGrabber()
{
	// moved from constructor
	int i;
	
	printf("Making new camera\n");
	
	d = dc1394_new ();
	
	err=dc1394_camera_enumerate (d, &list);
    DC1394_ERR_RTN(err,"Failed to enumerate cameras");
	
	if (list->num == 0) {
		if(err!=0 && bVerbose)printf("\n No cameras found\n ");
		return 1;
	}
	
	camera = dc1394_camera_new (d, list->ids[0].guid);
	if (!camera) {
		if(err!=0 && bVerbose)printf("\n Failed to initialize camera with guid %PRIx64", list->ids[0].guid);
		return 1;
	}
	dc1394_camera_free_list (list);
	

	
	/*-----------------------------------------------------------------------
	 *  setup capture
	 *-----------------------------------------------------------------------*/
	

	

	dc1394_video_set_mode(camera, DC1394_VIDEO_MODE_FORMAT7_0);
	uint32_t f, f2;
	dc1394_format7_get_image_size(camera, DC1394_VIDEO_MODE_FORMAT7_0, &f, &f2);
	
	err=dc1394_capture_setup(camera,4, DC1394_CAPTURE_FLAGS_DEFAULT);
    DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not setup camera-\nmake sure that the video mode and framerate are\nsupported by your camera\n");

	
	
	/*-----------------------------------------------------------------------
	 *  have the camera start sending us data
	 *-----------------------------------------------------------------------*/
	err=dc1394_video_set_transmission(camera, DC1394_ON);
    DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not start camera iso transmission\n");

	dc1394_get_image_size_from_video_mode(camera, video_mode, &width, &height);
	if(err != DC1394_SUCCESS){
		cout<<"get image size error: "<<err<<endl;
	}
	printf("Size from video mode = %i wide %i high\n", width, height);

	pixels	= new unsigned char[width*height*3]; // x3 for RGB
	pixels2	= new unsigned char[width*height*3]; // x3 for RGB

	tex.allocate(width,height,GL_LUMINANCE);

	err = dc1394_capture_dequeue( camera, DC1394_CAPTURE_POLICY_WAIT, &frame ) ;
    DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not capture a frame\n");

	memcpy( pixels, frame->image, width * height*3) ;
	
	dc1394_capture_enqueue( camera, frame ) ;


	
	ofSleepMillis(10);
	startThread(true, false);
	
	failedToInit = false;
	dc1394_feature_set_mode(camera, DC1394_FEATURE_EXPOSURE, DC1394_FEATURE_MODE_MANUAL);
	dc1394_feature_set_mode(camera, DC1394_FEATURE_BRIGHTNESS, DC1394_FEATURE_MODE_MANUAL);
	dc1394_feature_set_mode(camera, DC1394_FEATURE_GAMMA, DC1394_FEATURE_MODE_AUTO);
	dc1394_feature_set_mode(camera, DC1394_FEATURE_SHUTTER, DC1394_FEATURE_MODE_MANUAL);
	dc1394_feature_set_mode(camera, DC1394_FEATURE_GAIN, DC1394_FEATURE_MODE_AUTO);
	dc1394_feature_set_mode(camera, DC1394_FEATURE_IRIS, DC1394_FEATURE_MODE_AUTO);
	dc1394_feature_set_mode(camera, DC1394_FEATURE_WHITE_BALANCE, DC1394_FEATURE_MODE_AUTO);
	dc1394_feature_set_mode(camera, DC1394_FEATURE_TEMPERATURE, DC1394_FEATURE_MODE_AUTO);
	dc1394_feature_set_mode(camera, DC1394_FEATURE_WHITE_SHADING, DC1394_FEATURE_MODE_AUTO);	
	dc1394_feature_set_mode(camera, DC1394_FEATURE_SATURATION, DC1394_FEATURE_MODE_AUTO);		
	dc1394_feature_set_mode(camera, DC1394_FEATURE_HUE, DC1394_FEATURE_MODE_AUTO);		
	
	//Print features
	dc1394_feature_get_all(camera, &features);
	dc1394_feature_print_all(&features, stdout);

	return 0;
}
示例#20
0
int main(int argc, char *argv[]) {
    fitsfile *fptr;
    long fpixel=1, nelements, naxes[2];
    dc1394camera_t *camera;
    int grab_n_frames;
    struct timeval start_time, end_time;
    int i, j, status;
    unsigned int max_height, max_width;
    uint64_t total_bytes = 0;
    unsigned int width, height;
    dc1394video_frame_t *frame=NULL;
    dc1394_t * d;
    dc1394camera_list_t * list;
    dc1394error_t err;
    char *filename;
    unsigned char *buffer;
    float *average;

    grab_n_frames = atoi(argv[1]);
    filename = argv[2];
    status = 0;
    width = 320;
    height = 240;
    naxes[0] = width;
    naxes[1] = height;
    nelements = naxes[0]*naxes[1];

    stderr = freopen("grab_cube.log", "w", stderr);

    d = dc1394_new();
    if (!d)
        return 1;
    err = dc1394_camera_enumerate(d, &list);
    DC1394_ERR_RTN(err, "Failed to enumerate cameras.");

    if (list->num == 0) {
        dc1394_log_error("No cameras found.");
        return 1;
    }

    camera = dc1394_camera_new (d, list->ids[0].guid);
    if (!camera) {
        dc1394_log_error("Failed to initialize camera with guid %"PRIx64,
                         list->ids[0].guid);
        return 1;
    }
    dc1394_camera_free_list(list);

    printf("Using camera with GUID %"PRIx64"\n", camera->guid);

    /*-----------------------------------------------------------------------
     *  setup capture for format 7
     *-----------------------------------------------------------------------*/
    // err=dc1394_video_set_operation_mode(camera, DC1394_OPERATION_MODE_1394B);
    // DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot operate at 1394B");

    // libdc1394 doesn't work well with firewire800 yet so set to legacy 400 mode
    dc1394_video_set_iso_speed(camera, DC1394_ISO_SPEED_400);

    // configure camera for format7
    err = dc1394_video_set_mode(camera, DC1394_VIDEO_MODE_FORMAT7_1);
    DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot choose format7_0");

    err = dc1394_format7_get_max_image_size (camera, DC1394_VIDEO_MODE_FORMAT7_1,
            &max_width, &max_height);
    DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot get max image size for format7_0");

    err = dc1394_format7_set_roi (camera,
                                  DC1394_VIDEO_MODE_FORMAT7_1,
                                  DC1394_COLOR_CODING_MONO8, // not sure why RAW8/16 don't work
                                  DC1394_USE_MAX_AVAIL,
                                  0, 0, // left, top
                                  width, height); // width, height
    DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set roi");

    // set the frame rate to absolute value in frames/sec
    err = dc1394_feature_set_mode(camera, DC1394_FEATURE_FRAME_RATE, DC1394_FEATURE_MODE_MANUAL);
    DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set framerate to manual");
    err = dc1394_feature_set_absolute_control(camera, DC1394_FEATURE_FRAME_RATE, DC1394_TRUE);
    DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set framerate to absolute mode");
    err = dc1394_feature_set_absolute_value(camera, DC1394_FEATURE_FRAME_RATE, 100.0);
    DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set framerate");

    // set the shutter speed to absolute value in seconds
    err = dc1394_feature_set_mode(camera, DC1394_FEATURE_SHUTTER, DC1394_FEATURE_MODE_MANUAL);
    DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set shutter to manual");
    err = dc1394_feature_set_absolute_control(camera, DC1394_FEATURE_SHUTTER, DC1394_TRUE);
    DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set shutter to absolute mode");
    err = dc1394_feature_set_absolute_value(camera, DC1394_FEATURE_SHUTTER, 1.0e-2);
    DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set shutter");

    // set gain manually.  use relative value here in range 48 to 730.
    err = dc1394_feature_set_mode(camera, DC1394_FEATURE_GAIN, DC1394_FEATURE_MODE_MANUAL);
    DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set gain to manual");
    err = dc1394_feature_set_value(camera, DC1394_FEATURE_GAIN, 200);
    DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set gain");

    // set brightness manually.  use relative value in range 0 to 1023.
    err = dc1394_feature_set_mode(camera, DC1394_FEATURE_BRIGHTNESS, DC1394_FEATURE_MODE_MANUAL);
    DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set brightness to manual");
    err = dc1394_feature_set_value(camera, DC1394_FEATURE_BRIGHTNESS, 50);
    DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set brightness");

    err = dc1394_format7_get_total_bytes (camera, DC1394_VIDEO_MODE_FORMAT7_1, &total_bytes);
    DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot get total bytes");

    // err = dc1394_feature_set_value (camera, DC1394_FEATURE_GAIN, 24);
    //DC1394_ERR_CLN_RTN(err, dc1394_camera_free(camera), "Error setting gain");

    err = dc1394_capture_setup(camera, 16, DC1394_CAPTURE_FLAGS_DEFAULT);
    DC1394_ERR_CLN_RTN(err, dc1394_camera_free(camera), "Error capturing");

    /*-----------------------------------------------------------------------
     *  have the camera start sending us data
     *-----------------------------------------------------------------------*/
    err = dc1394_video_set_transmission(camera, DC1394_ON);
    if (err != DC1394_SUCCESS) {
        dc1394_log_error("unable to start camera iso transmission");
        dc1394_capture_stop(camera);
        dc1394_camera_free(camera);
        exit(1);
    }

    /* allocate the buffers */
    if (!(buffer = malloc(nelements*sizeof(char)))) {
        printf("Couldn't Allocate Image Buffer\n");
        exit(-1);
    }

    if (!(average = calloc(nelements, sizeof(float)))) {
        printf("Couldn't Allocate Average Image Buffer\n");
        exit(-1);
    }

    // set up FITS image and capture
    fits_create_file(&fptr, filename, &status);
    dc1394_get_image_size_from_video_mode(camera, DC1394_VIDEO_MODE_FORMAT7_1, &width, &height);
    fits_create_img(fptr, FLOAT_IMG, 2, naxes, &status);

    /*-----------------------------------------------------------------------
     *  capture frames and measure the time for this operation
     *-----------------------------------------------------------------------*/
    gettimeofday(&start_time, NULL);

    printf("Start capture:\n");

    for (i=0; i<grab_n_frames; ++i) {
        /*-----------------------------------------------------------------------
         *  capture one frame
         *-----------------------------------------------------------------------*/
        err = dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_WAIT, &frame);
        if (err != DC1394_SUCCESS) {
            dc1394_log_error("unable to capture");
            dc1394_capture_stop(camera);
            dc1394_camera_free(camera);
            exit(1);
        }

        memcpy(buffer, frame->image, nelements*sizeof(char));

        // release buffer
        dc1394_capture_enqueue(camera,frame);

        for (j=0; j<nelements; j++) {
            average[j] += (1.0/grab_n_frames)*(buffer[j]);
        }

    }

    gettimeofday(&end_time, NULL);
    printf("End capture.\n");

    /*-----------------------------------------------------------------------
     *  stop data transmission
     *-----------------------------------------------------------------------*/

    err = dc1394_video_set_transmission(camera, DC1394_OFF);
    DC1394_ERR_RTN(err,"couldn't stop the camera?");

    /*-----------------------------------------------------------------------
     *  save FITS image to disk
     *-----------------------------------------------------------------------*/
    fits_write_img(fptr, TFLOAT, fpixel, naxes[0]*naxes[1], average, &status);
    fits_close_file(fptr, &status);
    fits_report_error(stderr, status);
    free(buffer);
    free(average);

    printf("wrote: %s\n", filename);
    printf("Readout is %d bits/pixel.\n", frame->data_depth);

    /*-----------------------------------------------------------------------
     *  close camera, cleanup
     *-----------------------------------------------------------------------*/
    dc1394_capture_stop(camera);
    dc1394_video_set_transmission(camera, DC1394_OFF);
    dc1394_camera_free(camera);
    dc1394_free(d);
    return 0;
}
示例#21
0
文件: ffmv_ccd.cpp 项目: A-j-K/indi
/**************************************************************************************
** Client is asking us to establish connection to the device
***************************************************************************************/
bool FFMVCCD::Connect()
{
    dc1394camera_list_t *list;
    dc1394error_t err;
    bool supported;
    bool settings_valid;
    uint32_t val;
    dc1394format7mode_t fm7;
    dc1394feature_info_t feature;
    float min, max;

    dc1394 = dc1394_new();
    if (!dc1394) {
        return false;
    }

    err = dc1394_camera_enumerate(dc1394, &list);
    if (err != DC1394_SUCCESS) {
        IDMessage(getDeviceName(), "Could not find DC1394 cameras!");
        return false;
    }
    if (!list->num) {
        IDMessage(getDeviceName(), "No DC1394 cameras found!");
        return false;
    }
    dcam = dc1394_camera_new(dc1394, list->ids[0].guid);
    if (!dcam) {
        IDMessage(getDeviceName(), "Unable to connect to camera!");
        return false;
    }

    /* Reset camera */
    err = dc1394_camera_reset(dcam);
    if (err != DC1394_SUCCESS) {
        IDMessage(getDeviceName(), "Unable to reset camera!");
        return false;
    }

    /* Set mode */
    err = dc1394_video_set_mode(dcam, DC1394_VIDEO_MODE_640x480_MONO16);
    if (err != DC1394_SUCCESS) {
        IDMessage(getDeviceName(), "Unable to connect to set videomode!");
        return false;
    }
    /* Disable Auto exposure control */
    err = dc1394_feature_set_power(dcam, DC1394_FEATURE_EXPOSURE, DC1394_OFF);
    if (err != DC1394_SUCCESS) {
        IDMessage(getDeviceName(), "Unable to disable auto exposure control");
        return false;
    }

    /* Set frame rate to the lowest possible */
    err = dc1394_video_set_framerate(dcam, DC1394_FRAMERATE_7_5);
    if (err != DC1394_SUCCESS) {
        IDMessage(getDeviceName(), "Unable to connect to set framerate!");
        return false;
    }
    /* Turn frame rate control off to enable extended exposure (subs of 512ms) */
    err = dc1394_feature_set_power(dcam, DC1394_FEATURE_FRAME_RATE, DC1394_OFF);
    if (err != DC1394_SUCCESS) {
        IDMessage(getDeviceName(), "Unable to disable framerate!");
        return false;
    }

    /* Get the longest possible exposure length */
    err = dc1394_feature_set_mode(dcam, DC1394_FEATURE_SHUTTER, DC1394_FEATURE_MODE_MANUAL);
    if (err != DC1394_SUCCESS) {
        IDMessage(getDeviceName(), "Failed to enable manual shutter control.");
    } 
    err = dc1394_feature_set_absolute_control(dcam, DC1394_FEATURE_SHUTTER, DC1394_ON);
    if (err != DC1394_SUCCESS) {
        IDMessage(getDeviceName(), "Failed to enable absolute shutter control.");
    } 
    err = dc1394_feature_get_absolute_boundaries(dcam, DC1394_FEATURE_SHUTTER, &min, &max);
    if (err != DC1394_SUCCESS) {
        IDMessage(getDeviceName(), "Could not get max shutter length");
    } else {
        max_exposure = max;
    }

    /* Set gain to max. By setting the register directly, we can achieve a
     * gain of 24 dB...compared to a gain of 12 dB which is reported as the
     * max
     */
    err = dc1394_set_control_register(dcam, 0x820, 0x40);
    //err = dc1394_set_control_register(dcam, 0x820, 0x7f);
    if (err != DC1394_SUCCESS) {
            return err;
    }
#if 0
    /* Set absolute gain to max */
    err = dc1394_feature_set_absolute_control(dcam, DC1394_FEATURE_GAIN, DC1394_ON);
    if (err != DC1394_SUCCESS) {
        IDMessage(getDeviceName(), "Failed to enable ansolute gain control.");
    } 
    err = dc1394_feature_get_absolute_boundaries(dcam, DC1394_FEATURE_GAIN, &min, &max);
    if (err != DC1394_SUCCESS) {
        IDMessage(getDeviceName(), "Could not get max gain value");
    } else {
        err = dc1394_feature_set_absolute_value(dcam, DC1394_FEATURE_GAIN, max);
        if (err != DC1394_SUCCESS) {
            IDMessage(getDeviceName(), "Could not set max gain value");
        }
    }
#endif

    /* Set brightness */
    err = dc1394_feature_set_mode(dcam, DC1394_FEATURE_BRIGHTNESS, DC1394_FEATURE_MODE_MANUAL);
    if (err != DC1394_SUCCESS) {
        IDMessage(getDeviceName(), "Failed to enable manual brightness control.");
    } 
    err = dc1394_feature_set_absolute_control(dcam, DC1394_FEATURE_BRIGHTNESS, DC1394_ON);
    if (err != DC1394_SUCCESS) {
        IDMessage(getDeviceName(), "Failed to enable ansolute brightness control.");
    } 
    err = dc1394_feature_set_absolute_value(dcam, DC1394_FEATURE_BRIGHTNESS, 1);
    if (err != DC1394_SUCCESS) {
            IDMessage(getDeviceName(), "Could not set max brightness value");
    }

    /* Turn gamma control off */
    err = dc1394_feature_set_absolute_value(dcam, DC1394_FEATURE_GAMMA, 1);
    if (err != DC1394_SUCCESS) {
            IDMessage(getDeviceName(), "Could not set gamma value");
    }
    err = dc1394_feature_set_power(dcam, DC1394_FEATURE_GAMMA, DC1394_OFF);
    if (err != DC1394_SUCCESS) {
        IDMessage(getDeviceName(), "Unable to disable gamma!");
        return false;
    }

    /* Turn off white balance */
    err = dc1394_feature_set_power(dcam, DC1394_FEATURE_WHITE_BALANCE, DC1394_OFF);
    if (err != DC1394_SUCCESS) {
        IDMessage(getDeviceName(), "Unable to disable white balance!");
        return false;
    }

    err=dc1394_capture_setup(dcam,10, DC1394_CAPTURE_FLAGS_DEFAULT);

    return true;
}
示例#22
0
    VideoIIDC1394::VideoIIDC1394(int source, uint8_t iso, uint8_t vid_mode, uint8_t f_rate, uint32_t w, uint32_t h)
    {
#if defined(DUNE_WITH_DC1394)

      std::cout << " Entered constructor " << std:: endl;

      width = w;

      height = h;

      iidc = dc1394_new();

      frame = NULL;

      std::cout << " Built Object " << std:: endl;

      dc1394error_t error_code;

      if (!iidc)
      {
        throw std::runtime_error("Failed to start IEEE 1394 IIDC interface");
      }

      error_code = dc1394_camera_enumerate(iidc, &list);

      std::cout << " Enumerated camera objects " << std:: endl;

      if (error_code < 0)
      {
        throw std::runtime_error("Failed to enumerate cameras");
      }

      std::cout << "Number of cameras found: " << list->num << std:: endl;

      camera = dc1394_camera_new(iidc, list->ids[0].guid);

      std::cout << " Built new camera object " << std:: endl;

      if (!camera)
      {
        throw std::runtime_error("Failed to initialize camera");
      }

      dc1394_camera_free_list(list);

      error_code = dc1394_video_set_iso_speed(camera, DC1394_ISO_SPEED_400);

      if (error_code < 0)
      {
        throw std::runtime_error("Failed to set ISO SPEED");
      }

      error_code = dc1394_video_set_mode(camera, DC1394_VIDEO_MODE_640x480_YUV422);
      if (error_code < 0)
      {
        throw std::runtime_error("Failed to set video mode");
      }

      error_code = dc1394_video_set_framerate(camera, DC1394_FRAMERATE_15);
      if (error_code < 0)
      {
        free(this);
        throw std::runtime_error("Failed to set ISO SPEED");
      }

      error_code = dc1394_capture_setup(camera, 8, DC1394_CAPTURE_FLAGS_DEFAULT);
      if (error_code < 0)
      {
        throw std::runtime_error("Failed to set ISO SPEED");
      }

      error_code = dc1394_video_set_transmission(camera, DC1394_ON);
      if (error_code < 0)
      {
        throw std::runtime_error("Could not start video capture");
      }

      converted_frame = new dc1394video_frame_t;
      std::memset(converted_frame, 0, sizeof(dc1394video_frame_t));

      std::cout << " Finished setting everything up " << std:: endl;

#else
      (void)source;
      (void)iso;
      (void)vid_mode;
      (void)f_rate;
      (void)w;
      (void)h;

      throw std::runtime_error("IIDC Firewire Video interface is not yet implemented in this system.");

#endif
    }
bool Camera::open(int deviceIdx) {

    if (numCams < 1) {
        std::cerr << "Camera not found or could not be opened." << std::endl;
        return false;
    }

    camera = dc1394_camera_new(camDict, camList->ids[deviceIdx].guid);
    if (!camera) {
        dc1394_log_error("Failed to initialize camera with guid %.", camList->ids[deviceIdx].guid);
        return false;
    }

    /* reset camera to initial state with default values */
    resetCameraRegister();

    /* prepare camera for ringlight leds */
    configureOutputPins();
    configureClockDelay();
    configureClockDuration();
    configureResetDelay();
    configureResetDuration();

    /* set video mode */
    error = dc1394_video_set_mode(camera,  DC1394_VIDEO_MODE_640x480_MONO8);
    if (error != DC1394_SUCCESS) {
        std::cout << "Could not set video mode" << std::endl;
        cleanup(camera);
        return false;
    }

    /* camera parameter calibrated with FlyCapture2 */

    /* brightness */
    error = dc1394_feature_set_mode(camera, DC1394_FEATURE_BRIGHTNESS, DC1394_FEATURE_MODE_MANUAL);
    error = dc1394_feature_set_value(camera, DC1394_FEATURE_BRIGHTNESS, 0);

    /* exposure */
    error = dc1394_feature_set_mode(camera, DC1394_FEATURE_EXPOSURE, DC1394_FEATURE_MODE_MANUAL);
    error = dc1394_feature_set_value(camera, DC1394_FEATURE_EXPOSURE, 149);

    /* gamma */
    error = dc1394_feature_set_mode(camera, DC1394_FEATURE_GAMMA, DC1394_FEATURE_MODE_MANUAL);
    error = dc1394_feature_set_value(camera, DC1394_FEATURE_GAMMA, 1024);

    /* shutter */
    error = dc1394_feature_set_mode(camera, DC1394_FEATURE_SHUTTER, DC1394_FEATURE_MODE_MANUAL);
    error = dc1394_feature_set_value(camera, DC1394_FEATURE_SHUTTER, 61);

    /* gain */
    error = dc1394_feature_set_mode(camera, DC1394_FEATURE_GAIN, DC1394_FEATURE_MODE_MANUAL);
    error = dc1394_feature_set_value(camera, DC1394_FEATURE_GAIN, 740);

    /* frame rate */
    error = dc1394_feature_set_mode(camera, DC1394_FEATURE_FRAME_RATE, DC1394_FEATURE_MODE_MANUAL);
    error = dc1394_feature_set_absolute_value(camera, DC1394_FEATURE_FRAME_RATE, FRAME_RATE);

    /* setup capture */
    error = dc1394_capture_setup(camera, numDMABuffers, DC1394_CAPTURE_FLAGS_DEFAULT);
    if (error != DC1394_SUCCESS) {
        std::cerr << "Could not setup camera-\nmake sure that the video mode and framerate are\nsupported by your camera" << std::endl;
        cleanup(camera);
        return false;
    }

    /* starting camera sending data */
    error = dc1394_video_set_transmission(camera, DC1394_ON);
    if (error != DC1394_SUCCESS) {
        std::cerr << "Could not start camera iso transmission" << std::endl;
        cleanup(camera);
        return false;
    }
    
    /* capture image with no LEDs to subtract ambient light */
    captureAmbientImage();
    
    /* set average image intensity used by ps process for adjustment */
    avgImgIntensity = cv::mean(ambientImage)[0];

    return true;
}
示例#24
0
 int main(int argc, char *argv[])
 {
     FILE* imagefile;
     dc1394camera_t *camera;
     unsigned int width, height;
     dc1394video_frame_t *frame=NULL;
     //dc1394featureset_t features;
     dc1394_t * d;
     dc1394camera_list_t * list;
     dc1394error_t err;
     int counter = 0;
     d = dc1394_new ();
     if (!d)
         return 1;
     err=dc1394_camera_enumerate (d, &list);
     DC1394_ERR_RTN(err,"Failed to enumerate cameras");

     if (list->num == 0) {
         dc1394_log_error("No cameras found");
         return 1;
     }

     camera = dc1394_camera_new (d, list->ids[0].guid);
     if (!camera) {
         dc1394_log_error("Failed to initialize camera with guid %llx", list->ids[0].guid);
         return 1;
     }
     dc1394_camera_free_list (list);

     printf("Using camera with GUID %" PRIx64 "\n", camera->guid);

     /*-----------------------------------------------------------------------
      *  setup capture
      *-----------------------------------------------------------------------*/

     err=dc1394_video_set_iso_speed(camera, DC1394_ISO_SPEED_400);
     DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not set iso speed");

     err=dc1394_video_set_mode(camera, DC1394_VIDEO_MODE_FORMAT7_7);
     DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not set video mode\n");

     err=dc1394_video_set_framerate(camera, DC1394_FRAMERATE_7_5);
     DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not set framerate\n");

     err=dc1394_capture_setup(camera,4, DC1394_CAPTURE_FLAGS_DEFAULT);
     DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not setup camera-\nmake sure that the video mode and framerate are\nsupported by your camera\n");

     /*-----------------------------------------------------------------------
      *  have the camera start sending us data
      *-----------------------------------------------------------------------*/
     err=dc1394_video_set_transmission(camera, DC1394_ON);
     DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not start camera iso transmission\n");
     for(;;){
         /*-----------------------------------------------------------------------
          *  capture one frame
          *-----------------------------------------------------------------------*/
         err=dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_WAIT, &frame);
         DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not capture a frame\n");

         dc1394_get_image_size_from_video_mode(camera, DC1394_VIDEO_MODE_FORMAT7_7, &width, &height);

         cv::Mat img = cv::Mat(height, width, CV_8U, frame->image);

         err = dc1394_capture_enqueue(camera, frame);
         frame=NULL;
         cv::imshow("Image", img);

         printf("Frame %d\n", counter);
         counter++;
         cv::waitKey(10);

      }
     /*-----------------------------------------------------------------------
      *  stop data transmission
      *-----------------------------------------------------------------------*/
     err=dc1394_video_set_transmission(camera,DC1394_OFF);
     DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not stop the camera?\n");

     /*-----------------------------------------------------------------------
      *  close camera
      *-----------------------------------------------------------------------*/
     dc1394_video_set_transmission(camera, DC1394_OFF);
     dc1394_capture_stop(camera);
     dc1394_camera_free(camera);
     dc1394_free (d);
     return 0;
 }
示例#25
0
int main(int argc, char *argv[])
{
    dc1394_t * d;
    dc1394camera_list_t * list;
    dc1394camera_t *camera;
    dc1394error_t err;

    d = dc1394_new ();

    if (!d)
        return 1;
    int cam=0;
    int num=-1;
    int chan;
    unsigned int val;
    do
    {
        err=dc1394_camera_enumerate (d, &list);
        DC1394_ERR_RTN(err,"Failed to enumerate cameras");

        if (list->num == 0) {
            dc1394_log_error("No cameras found");
            return 1;
        }
        else
        {
            num=list->num;
        }
        printf("Camera %d\n",cam);
        camera = dc1394_camera_new (d, list->ids[cam].guid);
        if (!camera) {
            dc1394_log_error("Failed to initialize camera with guid %llx", list->ids[cam].guid);
            return 1;
        }
        dc1394_camera_free_list (list);

        printf("Using camera with GUID %"PRIx64"\n", camera->guid);

        printf ("Reseting bus...\n");
        if (dc1394_reset_bus (camera) != DC1394_SUCCESS)
            printf ("Warning: reset reported error\n");

        dc1394_capture_stop(camera);
        dc1394_iso_release_all(camera);
        if ( dc1394_video_get_bandwidth_usage(camera,&val) == DC1394_SUCCESS
                && dc1394_iso_release_bandwidth(camera,val) == DC1394_SUCCESS)
        {
            printf("Freed %d Bandwidth\n",val);
        }

        if ( dc1394_video_get_iso_channel(camera,&val) == DC1394_SUCCESS
                && dc1394_iso_release_channel(camera,val) == DC1394_SUCCESS)
        {
            printf("Freed ISO %d\n",val);
        }
        dc1394_video_set_transmission(camera, DC1394_OFF);

        //dc1394_iso_release_bandwidth(camera, 10000000);
        /*dc1394_iso_release_bandwidth(camera, INT_MAX);
        for(chan=0;chan<64;chan++)
        {
            dc1394_iso_release_channel(camera,chan);
        }*/
        dc1394_camera_free (camera);
        cam++;
    }
    while(cam<num);

    dc1394_free (d);

    return 0;
}
示例#26
0
int main(int argc, char *argv[])
{
    // parse configuration file
    // get input arguments

    OpenCvStereoConfig stereo_config;
    string config_file = "";

    ConciseArgs parser(argc, argv);
    parser.add(config_file, "c", "config", "Configuration file containing camera GUIDs, etc.", true);
    parser.add(left_camera_mode, "l", "left-camera", "Calibrate just the left camera.");
    parser.add(right_camera_mode, "r", "right-camera", "Calibrate just the right camera.");
    parser.add(force_brightness, "b", "brightness", "set brightness to this level");
    parser.add(force_exposure, "e", "exposure", "set exposure to this level");
    parser.parse();

    // parse the config file
    if (ParseConfigFile(config_file, &stereo_config) != true)
    {
        fprintf(stderr, "Failed to parse configuration file, quitting.\n");
        return -1;
    }

    if (left_camera_mode || right_camera_mode)
    {
        stereo_mode = false;
    }

    uint64 guid = stereo_config.guidLeft;
    uint64 guid2 = stereo_config.guidRight;


    dc1394_t        *d;
    dc1394camera_t  *camera;
    dc1394error_t   err;

    Mat frame_array_left[MAX_FRAMES];
    Mat frame_array_right[MAX_FRAMES];

    int numFrames = 0;

    // ----- cam 2 -----
    dc1394_t        *d2;
    dc1394camera_t  *camera2;
    dc1394error_t   err2;

    d = dc1394_new ();
    if (!d)
        g_critical("Could not create dc1394 context");

    d2 = dc1394_new ();
    if (!d2)
        g_critical("Could not create dc1394 context for camera 2");

    camera = dc1394_camera_new (d, guid);
    if (!camera)
        g_critical("Could not create dc1394 camera");

    camera2 = dc1394_camera_new (d2, guid2);
    if (!camera2)
        g_critical("Could not create dc1394 camera for camera 2");

    // setup
    err = setup_gray_capture(camera, DC1394_VIDEO_MODE_FORMAT7_1);
    DC1394_ERR_CLN_RTN(err, cleanup_and_exit(camera), "Could not setup camera");

    err2 = setup_gray_capture(camera2, DC1394_VIDEO_MODE_FORMAT7_1);
    DC1394_ERR_CLN_RTN(err2, cleanup_and_exit(camera2), "Could not setup camera number 2");

    // enable auto-exposure
    // turn on the auto exposure feature
    err = dc1394_feature_set_power(camera, DC1394_FEATURE_EXPOSURE, DC1394_ON);
    DC1394_ERR_RTN(err,"Could not turn on the exposure feature");

    err = dc1394_feature_set_mode(camera, DC1394_FEATURE_EXPOSURE, DC1394_FEATURE_MODE_ONE_PUSH_AUTO);
    DC1394_ERR_RTN(err,"Could not turn on Auto-exposure");

    // enable auto-exposure
    // turn on the auto exposure feature
    err = dc1394_feature_set_power(camera2, DC1394_FEATURE_EXPOSURE, DC1394_ON);
    DC1394_ERR_RTN(err,"Could not turn on the exposure feature for cam2");

    err = dc1394_feature_set_mode(camera2, DC1394_FEATURE_EXPOSURE, DC1394_FEATURE_MODE_ONE_PUSH_AUTO);
    DC1394_ERR_RTN(err,"Could not turn on Auto-exposure for cam2");

    // enable camera
    err = dc1394_video_set_transmission(camera, DC1394_ON);
    DC1394_ERR_CLN_RTN(err, cleanup_and_exit(camera), "Could not start camera iso transmission");
    err2 = dc1394_video_set_transmission(camera2, DC1394_ON);
    DC1394_ERR_CLN_RTN(err2, cleanup_and_exit(camera2), "Could not start camera iso transmission for camera number 2");

    if (left_camera_mode || stereo_mode)
    {
        InitBrightnessSettings(camera, camera2);
        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);
    } else {
        // use the right camera as the master for brightness
        // since we're calibrating that one
        InitBrightnessSettings(camera2, camera);
        MatchBrightnessSettings(camera2, camera, true);
    }

    // make opencv windows
    if (left_camera_mode || stereo_mode)
    {
    	namedWindow("Input Left", CV_WINDOW_AUTOSIZE);
    	moveWindow("Input Left", 100, 100);
    }

    if (right_camera_mode || stereo_mode)
    {
    	namedWindow("Input Right", CV_WINDOW_AUTOSIZE);
    	moveWindow("Input Right", 478, 100);
    }


    CvSize size;

    Mat cornersL, cornersR;

    int i;

    while (numFrames < MAX_FRAMES) {

        Mat chessL, chessR;

        // each loop dump a bunch of frames to clear the buffer
        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);
        for (i=0;i<10;i++)
        {
            if (left_camera_mode || stereo_mode)
            {
                chessL = GetFrameFormat7(camera);
            }

            if (right_camera_mode || stereo_mode)
            {
                chessR = GetFrameFormat7(camera2);
            }
        }

        // copy the images for drawing/display
        size = chessL.size();
        Mat chessLc;
        chessLc.create(size, CV_32FC3);
        Mat chessRc;
        chessRc.create(size, CV_32FC3);

        // attempt checkerboard matching
        bool foundPattern = true; // set to true so we can do an OR
                                  // later if we're only using one
                                  // camera

        if (left_camera_mode || stereo_mode)
        {
            foundPattern = findChessboardCorners(chessL, Size(CHESS_X, CHESS_Y), cornersL);
        }

        if (right_camera_mode || stereo_mode)
        {
            foundPattern = foundPattern & findChessboardCorners(chessR, Size(CHESS_X, CHESS_Y), cornersR);
        }

        if (left_camera_mode || stereo_mode)
        {
            cvtColor( chessL, chessLc, CV_GRAY2BGR );
            drawChessboardCorners(chessLc, Size(CHESS_X, CHESS_Y), cornersL, foundPattern);
            imshow("Input Left", chessLc);
        }

        if (right_camera_mode || stereo_mode)
        {
            cvtColor(chessR, chessRc, CV_GRAY2BGR);
            drawChessboardCorners(chessRc, Size(CHESS_X, CHESS_Y), cornersR, foundPattern);

            imshow("Input Right", chessRc);
        }


		// key codes:
		// page up: 654365
		// page down: 65366
		// b: 98
		char key = waitKey();
		//printf("%d\n", (int)key);
		if (key == 98)
		{
		    break;
		} else if (key == 86){
		    if (foundPattern)
		    {
		        // this was a good one -- save it
		        frame_array_left[numFrames] = chessL;
                frame_array_right[numFrames] = chessR;

                // give the user some guidence on the number
                // of frames they should be using
                if (stereo_mode)
                {
                    printf("Saved frame %d / about 10\n", numFrames);
                } else {
                    printf("Saved frame %d / about 20-30\n", numFrames);
                }

                numFrames ++;
            } else {
                printf("Not saving frame since did not find a checkboard.\n");
            }
		} else if (key == 'W') {
            force_brightness +=20;
            cout << "Brightness: " << force_brightness << "\n";
        } else if (key == 'w') {
            force_brightness -=20;
            cout << "Brightness: " << force_brightness << "\n";
        } else if (key == 'E') {
            force_exposure +=20;
            cout << "Exposure: " << force_exposure << "\n";
        } else if (key == 'e') {
            force_exposure -=20;
            cout << "Exposure: " << force_exposure << "\n";
        }
	}

    printf("\n\n");

    // clear out the calibration directory
    printf("Deleting old images...\nrm calibrationImages/*.ppm\n");
    int retval = system("rm calibrationImages/*.ppm");
    if (retval != 0) {
        printf("Warning: Deleting images may have failed.\n");
    }
    printf("done.\n");

    char filename[1000];

    for (i=0;i<numFrames;i++)
    {
        if (left_camera_mode || stereo_mode)
        {
            sprintf(filename, "calibrationImages/cam1-%05d.ppm", i+1);
            imwrite(filename, frame_array_left[i]);
        }

        if (right_camera_mode || stereo_mode)
        {
            sprintf(filename, "calibrationImages/cam2-%05d.ppm", i+1);
            imwrite(filename, frame_array_right[i]);
        }

        printf("Writing frame %d\n", i);
    }

    printf("\n\n");

    destroyWindow("Input Left");
    destroyWindow("Input Right");

    // stop data transmission
    err = dc1394_video_set_transmission(camera, DC1394_OFF);
    DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not stop the camera");

    err2 = dc1394_video_set_transmission(camera2, DC1394_OFF);
    DC1394_ERR_CLN_RTN(err2,cleanup_and_exit(camera2),"Could not stop the camera 2");

    // close camera
    cleanup_and_exit(camera);
    cleanup_and_exit(camera2);
    dc1394_free (d);
    dc1394_free (d2);

    return 0;
}
示例#27
0
int main(int argc,char *argv[])
{
  XEvent xev;
  XGCValues xgcv;
  long background=0x010203;
  int i, j;
  dc1394_t * d;
  dc1394camera_list_t * list;

  get_options(argc,argv);
  /* process options */
  switch(fps) {
  case 1: fps =        DC1394_FRAMERATE_1_875; break;
  case 3: fps =        DC1394_FRAMERATE_3_75; break;
  case 15: fps = DC1394_FRAMERATE_15; break;
  case 30: fps = DC1394_FRAMERATE_30; break;
  case 60: fps = DC1394_FRAMERATE_60; break;
  default: fps = DC1394_FRAMERATE_7_5; break;
  }
  switch(res) {
  case 1:
    res = DC1394_VIDEO_MODE_640x480_YUV411;
    device_width=640;
    device_height=480;
    format=XV_YUY2;
    break;
  case 2:
    res = DC1394_VIDEO_MODE_640x480_RGB8;
    device_width=640;
    device_height=480;
    format=XV_YUY2;
    break;
  case 3:
    res = DC1394_VIDEO_MODE_800x600_YUV422;
    device_width=800;
    device_height=600;
    format=XV_UYVY;
    break;
  default:
    res = DC1394_VIDEO_MODE_320x240_YUV422;
    device_width=320;
    device_height=240;
    format=XV_UYVY;
    break;
  }

  dc1394error_t err;

  d = dc1394_new ();
  if (!d)
    return 1;
  err=dc1394_camera_enumerate (d, &list);
  DC1394_ERR_RTN(err,"Failed to enumerate cameras");

  if (list->num == 0) {
    dc1394_log_error("No cameras found");
    return 1;
  }

  j = 0;
  for (i = 0; i < list->num; i++) {
    if (j >= MAX_CAMERAS)
      break;
    cameras[j] = dc1394_camera_new (d, list->ids[i].guid);
    if (!cameras[j]) {
      dc1394_log_warning("Failed to initialize camera with guid %llx", list->ids[i].guid);
      continue;
    }
    j++;
  }
  numCameras = j;
  dc1394_camera_free_list (list);

  if (numCameras == 0) {
    dc1394_log_error("No cameras found");
    exit (1);
  }

  /* setup cameras for capture */
  for (i = 0; i < numCameras; i++) {
      
    //err=dc1394_video_set_iso_speed(cameras[i], DC1394_ISO_SPEED_400);
    //dc1394_video_set_operation_mode(my_camera_ptr[i], DC1394_OPERATION_MODE_1394B);
    err= dc1394_video_set_operation_mode(cameras[i], DC1394_OPERATION_MODE_1394B);
    err=dc1394_video_set_iso_speed(cameras[i], DC1394_ISO_SPEED_800);
    DC1394_ERR_CLN_RTN(err,cleanup(),"Could not set ISO speed");

    err=dc1394_video_set_mode(cameras[i], res);
    DC1394_ERR_CLN_RTN(err,cleanup(),"Could not set video mode");

    err=dc1394_video_set_framerate(cameras[i], fps);
    DC1394_ERR_CLN_RTN(err,cleanup(),"Could not set framerate");

    err=dc1394_capture_setup(cameras[i],NUM_BUFFERS, DC1394_CAPTURE_FLAGS_DEFAULT);
    DC1394_ERR_CLN_RTN(err,cleanup(),"Could not setup camera-\nmake sure that the video mode and framerate are\nsupported by your camera");

    err=dc1394_video_set_transmission(cameras[i], DC1394_ON);
    DC1394_ERR_CLN_RTN(err,cleanup(),"Could not start camera iso transmission");

    // Camera settings
    err = dc1394_feature_set_value(cameras[i],DC1394_FEATURE_SHUTTER,1400);
    err = dc1394_feature_set_value(cameras[i],DC1394_FEATURE_BRIGHTNESS,800);
    err = dc1394_feature_set_value(cameras[i],DC1394_FEATURE_EXPOSURE,150);
    err = dc1394_feature_whitebalance_set_value(cameras[i],500,400);

  }

  fflush(stdout);
  if (numCameras < 1) {
    perror("no cameras found :(\n");
    cleanup();
    exit(-1);
  }

  switch(format){
  case XV_YV12:
    set_frame_length(device_width*device_height*3/2, numCameras);
    break;
  case XV_YUY2:
  case XV_UYVY:
    set_frame_length(device_width*device_height*2, numCameras);
    break;
  default:
    dc1394_log_error("Unknown format set (internal error)");
    exit(255);
  }

  /* make the window */
  display=XOpenDisplay(getenv("DISPLAY"));
  if(display==NULL) {
    dc1394_log_error("Could not open display \"%s\"",getenv("DISPLAY"));
    cleanup();
    exit(-1);
  }

  QueryXv();

  if ( adaptor < 0 ) {
    cleanup();
    exit(-1);
  }

  width=device_width;
  height=device_height * numCameras;
  //width=device_width * numCameras;
  //height=device_height;

  window=XCreateSimpleWindow(display,DefaultRootWindow(display),0,0,width,height,0,
			     WhitePixel(display,DefaultScreen(display)),
			     background);

  XSelectInput(display,window,StructureNotifyMask|KeyPressMask);
  XMapWindow(display,window);
  connection=ConnectionNumber(display);

  gc=XCreateGC(display,window,0,&xgcv);

  /* main event loop */
  while(1){

    for (i = 0; i < numCameras; i++) {
      if (dc1394_capture_dequeue(cameras[i], DC1394_CAPTURE_POLICY_WAIT, &frames[i])!=DC1394_SUCCESS)
	dc1394_log_error("Failed to capture from camera %d", i);
    }

    display_frames();
    XFlush(display);

    while(XPending(display)>0){
      XNextEvent(display,&xev);
      switch(xev.type){
      case ConfigureNotify:
	width=xev.xconfigure.width;
	height=xev.xconfigure.height;
	display_frames();
	break;
      case KeyPress:
	switch(XKeycodeToKeysym(display,xev.xkey.keycode,0)){
	case XK_q:
	case XK_Q:
	  cleanup();
	  exit(0);
	  break;
	case XK_comma:
	case XK_less:
	  //width=width/2;
	  //height=height/2;
	  width--;
	  XResizeWindow(display,window,width,height);
	  display_frames();
	  break;
	case XK_period:
	case XK_greater:
	  //width=2*width;
	  //height=2*height;
	  width++;
	  XResizeWindow(display,window,width,height);
	  display_frames();
	  break;
	case XK_0:
	  freeze = !freeze;
	  break;
	case XK_1:
	  fps =        DC1394_FRAMERATE_1_875;
	  for (i = 0; i < numCameras; i++)
	    dc1394_video_set_framerate(cameras[i], fps);
	  break;
	case XK_2:
	  fps =        DC1394_FRAMERATE_3_75;
	  for (i = 0; i < numCameras; i++)
	    dc1394_video_set_framerate(cameras[i], fps);
	  break;
	case XK_4:
	  fps = DC1394_FRAMERATE_15;
	  for (i = 0; i < numCameras; i++)
	    dc1394_video_set_framerate(cameras[i], fps);
	  break;
	case XK_5:
	  fps = DC1394_FRAMERATE_30;
	  for (i = 0; i < numCameras; i++)
	    dc1394_video_set_framerate(cameras[i], fps);
	  break;
	case XK_3:
	  fps = DC1394_FRAMERATE_7_5;
	  for (i = 0; i < numCameras; i++)
	    dc1394_video_set_framerate(cameras[i], fps);
	  break;
	}
	break;
      }
    } /* XPending */

    for (i = 0; i < numCameras; i++) {
      if (frames[i])
	dc1394_capture_enqueue (cameras[i], frames[i]);
    }

  } /* while not interrupted */

  exit(0);
}
示例#28
0
int main(int argc, char *argv[])
{
    // get input arguments

    string configFile = "";
    string video_file_left = "", video_file_right = "", video_directory = "";
    int starting_frame_number = 0;
    bool enable_gamma = false;
    float random_results = -1.0;

    int last_frame_number = -1;

    int last_playback_frame_number = -2;

    ConciseArgs parser(argc, argv);
    parser.add(configFile, "c", "config", "Configuration file containing camera GUIDs, etc.", true);
    parser.add(show_display, "d", "show-dispaly", "Enable for visual debugging display. Will reduce framerate significantly.");
    parser.add(show_display_wait, "w", "show-display-wait", "Optional argument to decrease framerate for lower network traffic when forwarding the display.");
    parser.add(show_unrectified, "u", "show-unrectified", "When displaying images, do not apply rectification.");
    parser.add(disable_stereo, "s", "disable-stereo", "Disable online stereo processing.");
    parser.add(force_brightness, "b", "force-brightness", "Force a brightness setting.");
    parser.add(force_exposure, "e", "force-exposure", "Force an exposure setting.");
    parser.add(quiet_mode, "q", "quiet", "Reduce text output.");
    parser.add(video_file_left, "l", "video-file-left", "Do not use cameras, instead use this video file (also requires a right video file).");
    parser.add(video_file_right, "t", "video-file-right", "Right video file, only for use with the -l option.");
    parser.add(video_directory, "i", "video-directory", "Directory to search for videos in (for playback).");
    parser.add(starting_frame_number, "f", "starting-frame", "Frame to start at when playing back videos.");
    parser.add(display_hud, "v", "hud", "Overlay HUD on display images.");
    parser.add(record_hud, "x", "record-hud", "Record the HUD display.");
    parser.add(file_frame_skip, "p", "skip", "Number of frames skipped in recording (for playback).");
    parser.add(enable_gamma, "g", "enable-gamma", "Turn gamma on for both cameras.");
    parser.add(random_results, "R", "random-results", "Number of random points to produce per frame.  Can be a float in which case we'll take a random sample to decide if to produce the last one.  Disables real stereo processing.  Only for debugging / analysis!");
    parser.add(publish_all_images, "P", "publish-all-images", "Publish all images to LCM");
    parser.parse();

    // parse the config file
    if (ParseConfigFile(configFile, &stereoConfig) != true)
    {
        fprintf(stderr, "Failed to parse configuration file, quitting.\n");
        return -1;
    }

    if (video_file_left.length() > 0
        && video_file_right.length() <= 0) {

        fprintf(stderr, "Error: for playback you must specify both "
            "a right and left video file. (Only got a left one.)\n");

        return -1;
    }

     if (video_file_left.length() <= 0
        && video_file_right.length() > 0) {

        fprintf(stderr, "Error: for playback you must specify both "
            "a right and left video file. (Only got a right one.)\n");

        return -1;
    }

    recording_manager.Init(stereoConfig);

    // attempt to load video files / directories
    if (video_file_left.length() > 0) {
        if (recording_manager.LoadVideoFiles(video_file_left, video_file_right) != true) {
            // don't have videos, bail out.
            return -1;
        }
    }

    if (video_directory.length() > 0) {
        if (recording_manager.SetPlaybackVideoDirectory(video_directory) != true) {
            // bail
            return -1;
        }
    }

    recording_manager.SetQuietMode(quiet_mode);
    recording_manager.SetPlaybackFrameNumber(starting_frame_number);



    uint64 guid = stereoConfig.guidLeft;
    uint64 guid2 = stereoConfig.guidRight;

    // start up LCM
    lcm_t * lcm;
    lcm = lcm_create (stereoConfig.lcmUrl.c_str());


    unsigned long elapsed;

    Hud hud;


    // --- setup control-c handling ---
    struct sigaction sigIntHandler;

    sigIntHandler.sa_handler = control_c_handler;
    sigemptyset(&sigIntHandler.sa_mask);
    sigIntHandler.sa_flags = 0;

    sigaction(SIGINT, &sigIntHandler, NULL);
    // --- end ctrl-c handling code ---

    dc1394error_t   err;
    dc1394error_t   err2;


    // tell opencv to use only one core so that we can manage our
    // own threading without a fight
    setNumThreads(1);

    if (recording_manager.UsingLiveCameras()) {
        d = dc1394_new ();
        if (!d)
            cerr << "Could not create dc1394 context" << endl;

        d2 = dc1394_new ();
        if (!d2)
            cerr << "Could not create dc1394 context for camera 2" << endl;

        camera = dc1394_camera_new (d, guid);
        if (!camera)
        {
            cerr << "Could not create dc1394 camera... quitting." << endl;
            exit(1);
        }

        camera2 = dc1394_camera_new (d2, guid2);
        if (!camera2)
            cerr << "Could not create dc1394 camera for camera 2" << endl;
        // reset the bus
        dc1394_reset_bus(camera);
        dc1394_reset_bus(camera2);

        // setup
        err = setup_gray_capture(camera, DC1394_VIDEO_MODE_FORMAT7_1);
        DC1394_ERR_CLN_RTN(err, cleanup_and_exit(camera), "Could not setup camera");

        err2 = setup_gray_capture(camera2, DC1394_VIDEO_MODE_FORMAT7_1);
        DC1394_ERR_CLN_RTN(err2, cleanup_and_exit(camera2), "Could not setup camera number 2");

        // enable camera
        err = dc1394_video_set_transmission(camera, DC1394_ON);
        DC1394_ERR_CLN_RTN(err, cleanup_and_exit(camera), "Could not start camera iso transmission");
        err2 = dc1394_video_set_transmission(camera2, DC1394_ON);
        DC1394_ERR_CLN_RTN(err2, cleanup_and_exit(camera2), "Could not start camera iso transmission for camera number 2");

        InitBrightnessSettings(camera, camera2, enable_gamma);
    }

    if (show_display) {

        namedWindow("Input", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);
        namedWindow("Input2", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);
        namedWindow("Stereo", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);

        namedWindow("Left Block", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);
        namedWindow("Right Block", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);

        namedWindow("Debug 1", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);
        namedWindow("Debug 2", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);



        setMouseCallback("Input", onMouse); // for drawing disparity lines
        setMouseCallback("Stereo", onMouseStereo, &hud); // for drawing disparity lines

        moveWindow("Input", stereoConfig.displayOffsetX + 100, stereoConfig.displayOffsetY + 100);
        moveWindow("Stereo", stereoConfig.displayOffsetX + 100, stereoConfig.displayOffsetY + 370);
        moveWindow("Input2", stereoConfig.displayOffsetX + 478, stereoConfig.displayOffsetY + 100);
        moveWindow("Left Block", stereoConfig.displayOffsetX + 900, stereoConfig.displayOffsetY + 100);
        moveWindow("Right Block", stereoConfig.displayOffsetX + 1400, stereoConfig.displayOffsetY + 100);

        moveWindow("Debug 1", stereoConfig.displayOffsetX + 900, stereoConfig.displayOffsetY + 670);
        moveWindow("Debug 2", stereoConfig.displayOffsetX + 1400, stereoConfig.displayOffsetY + 670);

    } // show display

    if (show_display || publish_all_images) {
        // if a channel exists, subscribe to it
        if (stereoConfig.stereo_replay_channel.length() > 0) {
            stereo_replay_sub = lcmt_stereo_subscribe(lcm, stereoConfig.stereo_replay_channel.c_str(), &stereo_replay_handler, &hud);
        }

        if (stereoConfig.pose_channel.length() > 0) {
            mav_pose_t_sub = mav_pose_t_subscribe(lcm, stereoConfig.pose_channel.c_str(), &mav_pose_t_handler, &hud);
        }

        if (stereoConfig.gps_channel.length() > 0) {
            mav_gps_data_t_sub = mav_gps_data_t_subscribe(lcm, stereoConfig.gps_channel.c_str(), &mav_gps_data_t_handler, &hud);
        }

        if (stereoConfig.baro_airspeed_channel.length() > 0) {
            baro_airspeed_sub = lcmt_baro_airspeed_subscribe(lcm, stereoConfig.baro_airspeed_channel.c_str(), &baro_airspeed_handler, &hud);
        }

        if (stereoConfig.servo_out_channel.length() > 0) {
            servo_out_sub = lcmt_deltawing_u_subscribe(lcm, stereoConfig.servo_out_channel.c_str(), &servo_out_handler, &hud);
        }

        if (stereoConfig.battery_status_channel.length() > 0) {
            battery_status_sub = lcmt_battery_status_subscribe(lcm, stereoConfig.battery_status_channel.c_str(), &battery_status_handler, &hud);
        }

        if (stereoConfig.cpu_info_channel1.length() > 0) {
            cpu_info_sub1 = lcmt_cpu_info_subscribe(lcm, stereoConfig.cpu_info_channel1.c_str(), &cpu_info_handler, &recording_manager);
            cpu_info_sub2 = lcmt_cpu_info_subscribe(lcm, stereoConfig.cpu_info_channel2.c_str(), &cpu_info_handler, &recording_manager);
            cpu_info_sub3 = lcmt_cpu_info_subscribe(lcm, stereoConfig.cpu_info_channel3.c_str(), &cpu_info_handler, &recording_manager);
        }

        if (stereoConfig.log_size_channel1.length() > 0) {
            log_size_sub1 = lcmt_log_size_subscribe(lcm, stereoConfig.log_size_channel1.c_str(), &log_size_handler, &hud);
            log_size_sub2 = lcmt_log_size_subscribe(lcm, stereoConfig.log_size_channel2.c_str(), &log_size_handler, &hud);
            log_size_sub3 = lcmt_log_size_subscribe(lcm, stereoConfig.log_size_channel3.c_str(), &log_size_handler, &hud);
        }

    } // end show_display || publish_all_images

    // load calibration
    OpenCvStereoCalibration stereoCalibration;

    if (LoadCalibration(stereoConfig.calibrationDir, &stereoCalibration) != true)
    {
        cerr << "Error: failed to read calibration files. Quitting." << endl;
        return -1;
    }

    int inf_disparity_tester, disparity_tester;
    disparity_tester = GetDisparityForDistance(10, stereoCalibration, &inf_disparity_tester);

    std::cout << "computed disparity is = " << disparity_tester << ", inf disparity = " << inf_disparity_tester << std::endl;

    // subscribe to the stereo control channel
    stereo_control_sub = lcmt_stereo_control_subscribe(lcm, stereoConfig.stereoControlChannel.c_str(), &lcm_stereo_control_handler, NULL);


    Mat imgDisp;
    Mat imgDisp2;

    // initilize default parameters
    //PushbroomStereoState state; // HACK

    state.disparity = stereoConfig.disparity;
    state.zero_dist_disparity = stereoConfig.infiniteDisparity;
    state.sobelLimit = stereoConfig.interestOperatorLimit;
    state.horizontalInvarianceMultiplier = stereoConfig.horizontalInvarianceMultiplier;
    state.blockSize = stereoConfig.blockSize;
    state.random_results = random_results;
    state.check_horizontal_invariance = true;

    if (state.blockSize > 10 || state.blockSize < 1)
    {
        fprintf(stderr, "Warning: block size is very large "
            "or small (%d).  Expect trouble.\n", state.blockSize);
    }

    state.sadThreshold = stereoConfig.sadThreshold;

    state.mapxL = stereoCalibration.mx1fp;
    state.mapxR = stereoCalibration.mx2fp;
    state.Q = stereoCalibration.qMat;
    state.show_display = show_display;

    state.lastValidPixelRow = stereoConfig.lastValidPixelRow;

    Mat matL, matR;
    bool quit = false;

    if (recording_manager.UsingLiveCameras()) {
        matL = GetFrameFormat7(camera);
        matR = GetFrameFormat7(camera2);

        if (recording_manager.InitRecording(matL, matR) != true) {
            // failed to init recording, things are going bad.  bail.
            return -1;
        }

        // before we start, turn the cameras on and set the brightness and exposure
        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);

        // grab a few frames and send them over LCM for the user
        // to verify that everything is working
        if (!show_display && !publish_all_images) {
            printf("Sending init images over LCM... ");
            fflush(stdout);

            for (int i = 0; i < 5; i++) {

                matL = GetFrameFormat7(camera);
                SendImageOverLcm(lcm, "stereo_image_left", matL, 50);

                matR = GetFrameFormat7(camera2);
                SendImageOverLcm(lcm, "stereo_image_right", matR, 50);

                // don't send these too fast, otherwise we'll flood the ethernet link
                // and not actually be helpful

                // wait one second
                printf(".");
                fflush(stdout);

                sleep(1);
            }
            printf(" done.\n");
        }

    } // recording_manager.UsingLiveCameras()

    // spool up worker threads
    PushbroomStereo pushbroom_stereo;

    // start the framerate clock
    struct timeval start, now;
    gettimeofday( &start, NULL );

    while (quit == false) {

        // get the frames from the camera
        if (recording_manager.UsingLiveCameras()) {
            // we would like to match brightness every frame
            // but that would really hurt our framerate
            // match brightness every 10 frames instead
            if (numFrames % MATCH_BRIGHTNESS_EVERY_N_FRAMES == 0)
            {
                MatchBrightnessSettings(camera, camera2);
            }

            // capture images from the cameras
            matL = GetFrameFormat7(camera);
            matR = GetFrameFormat7(camera2);

            // record video
            recording_manager.AddFrames(matL, matR);


        } else {
            // using a video file -- get the next frame
            recording_manager.GetFrames(matL, matR);
        }

        cv::vector<Point3f> pointVector3d;
        cv::vector<uchar> pointColors;
        cv::vector<Point3i> pointVector2d; // for display
        cv::vector<Point3i> pointVector2d_inf; // for display

        // do the main stereo processing
        if (disable_stereo != true) {

            gettimeofday( &now, NULL );
            double before = now.tv_usec + now.tv_sec * 1000 * 1000;

            pushbroom_stereo.ProcessImages(matL, matR, &pointVector3d, &pointColors, &pointVector2d, state);

            gettimeofday( &now, NULL );
            double after = now.tv_usec + now.tv_sec * 1000 * 1000;

            timer_sum += after-before;
            timer_count ++;

        }

        // build an LCM message for the stereo data
        lcmt_stereo msg;


        if (recording_manager.UsingLiveCameras() || stereo_lcm_msg == NULL) {
            msg.timestamp = getTimestampNow();
        } else {
            // if we are replaying videos, preserve the timestamp of the original video
            msg.timestamp = stereo_lcm_msg->timestamp;

        }


        msg.number_of_points = (int)pointVector3d.size();

        float x[msg.number_of_points];
        float y[msg.number_of_points];
        float z[msg.number_of_points];
        uchar grey[msg.number_of_points];

        for (unsigned int i=0;i<pointVector3d.size();i++) {

            x[i] = pointVector3d[i].x / stereoConfig.calibrationUnitConversion;
            y[i] = pointVector3d[i].y / stereoConfig.calibrationUnitConversion;
            z[i] = pointVector3d[i].z / stereoConfig.calibrationUnitConversion;
            grey[i] = pointColors[i];
        }

        msg.x = x;
        msg.y = y;
        msg.z = z;
        msg.grey = grey;
        msg.frame_number = recording_manager.GetFrameNumber();

        if (recording_manager.UsingLiveCameras()) {
            msg.frame_number = msg.frame_number - 1;  // minus one since recording manager has
                                                      // already recorded this frame (above in
                                                      // AddFrames) but we haven't made a message
                                                      // for it yet
        }


        msg.video_number = recording_manager.GetRecVideoNumber();

        // publish the LCM message
        if (last_frame_number != msg.frame_number) {
            lcmt_stereo_publish(lcm, "stereo", &msg);
            last_frame_number = msg.frame_number;
        }

        if (publish_all_images) {
            if (recording_manager.GetFrameNumber() != last_playback_frame_number) {
                SendImageOverLcm(lcm, "stereo_image_left", matL, 80);
                SendImageOverLcm(lcm, "stereo_image_right", matR, 80);

                last_playback_frame_number = recording_manager.GetFrameNumber();
            }

            //process LCM until there are no more messages
            // this allows us to drop frames if we are behind
            while (NonBlockingLcm(lcm)) {}
        }

        Mat matDisp, remapL, remapR;

        if (show_display) {
            // we remap again here because we're just in display
            Mat remapLtemp(matL.rows, matL.cols, matL.depth());
            Mat remapRtemp(matR.rows, matR.cols, matR.depth());

            remapL = remapLtemp;
            remapR = remapRtemp;

            remap(matL, remapL, stereoCalibration.mx1fp, Mat(), INTER_NEAREST);
            remap(matR, remapR, stereoCalibration.mx2fp, Mat(), INTER_NEAREST);

            remapL.copyTo(matDisp);

            //process LCM until there are no more messages
            // this allows us to drop frames if we are behind
            while (NonBlockingLcm(lcm)) {}
        } // end show_display


        if (show_display) {

            for (unsigned int i=0;i<pointVector2d.size();i++) {
                int x2 = pointVector2d[i].x;
                int y2 = pointVector2d[i].y;
                //int sad = pointVector2d[i].z;
                rectangle(matDisp, Point(x2,y2), Point(x2+state.blockSize, y2+state.blockSize), 0,  CV_FILLED);
                rectangle(matDisp, Point(x2+1,y2+1), Point(x2+state.blockSize-1, y2-1+state.blockSize), 255);

            }

            // draw pixel blocks
            if (lineLeftImgPosition >= 0 && lineLeftImgPositionY > 1) {
                DisplayPixelBlocks(remapL, remapR, lineLeftImgPosition - state.blockSize/2, lineLeftImgPositionY - state.blockSize/2, state, &pushbroom_stereo);
            }

            // draw a line for the user to show disparity
            DrawLines(remapL, remapR, matDisp, lineLeftImgPosition, lineLeftImgPositionY, state.disparity, state.zero_dist_disparity);


            if (visualize_stereo_hits == true && stereo_lcm_msg != NULL) {

                // transform the points from 3D space back onto the image's 2D space
                vector<Point3f> lcm_points;
                Get3DPointsFromStereoMsg(stereo_lcm_msg, &lcm_points);

                // draw the points on the unrectified image (to see these
                // you must pass the -u flag)
                Draw3DPointsOnImage(matL, &lcm_points, stereoCalibration.M1, stereoCalibration.D1, stereoCalibration.R1, 128);

            }

            if (show_unrectified == false) {

                imshow("Input", remapL);
                imshow("Input2", remapR);
            } else {
                imshow("Input", matL);
                imshow("Input2", matR);
            }


            if (display_hud) {
                Mat with_hud;

                recording_manager.SetHudNumbers(&hud);

                hud.DrawHud(matDisp, with_hud);

                if (record_hud) {
                    // put this frame into the HUD recording
                    recording_manager.RecFrameHud(with_hud);

                }

                imshow("Stereo", with_hud);
            } else {
                imshow("Stereo", matDisp);
            }


            char key = waitKey(show_display_wait);

            if (key != 255 && key != -1)
            {
                cout << endl << key << endl;
            }

            switch (key)
            {
                case 'T':
                    state.disparity --;
                    break;
                case 'R':
                    state.disparity ++;
                    break;

                case 'w':
                    state.sobelLimit += 10;
                    break;

                case 's':
                    state.sobelLimit -= 10;
                    break;

                case 'd':
                    state.horizontalInvarianceMultiplier -= 0.1;
                    break;

                case 'D':
                    state.horizontalInvarianceMultiplier += 0.1;
                    break;

                case 'g':
                    state.blockSize ++;
                    break;

                case 'b':
                    state.blockSize --;
                    if (state.blockSize < 1) {
                        state.blockSize = 1;
                    }
                    break;

                case 'Y':
                    state.sadThreshold += 50;
                    break;

                case 'y':
                    state.sadThreshold ++;
                    break;

                case 'h':
                    state.sadThreshold --;
                    break;

                case 'H':
                    state.sadThreshold -= 50;
                    break;

                case 'm':
                    if (recording_manager.UsingLiveCameras()) {
                        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);
                    }
                    break;

                case '1':
                    force_brightness --;
                    if (recording_manager.UsingLiveCameras()) {
                        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);
                    }
                    break;

                case '2':
                    force_brightness ++;
                    if (recording_manager.UsingLiveCameras()) {
                        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);
                    }
                    break;

                case '3':
                    force_exposure --;
                    if (recording_manager.UsingLiveCameras()) {
                        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);
                    }
                    break;

                case '4':
                    force_exposure ++;
                    if (recording_manager.UsingLiveCameras()) {
                        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);
                    }
                    break;

                case '5':
                    // to show SAD boxes
                    state.sobelLimit = 0;
                    state.sadThreshold = 255;
                    break;

                case 'I':
                    state.check_horizontal_invariance = !state.check_horizontal_invariance;
                    break;

                case '.':
                    recording_manager.SetPlaybackFrameNumber(recording_manager.GetFrameNumber() + 1);
                    break;

                case ',':
                    recording_manager.SetPlaybackFrameNumber(recording_manager.GetFrameNumber() - 1);
                    break;

                case '>':
                    recording_manager.SetPlaybackFrameNumber(recording_manager.GetFrameNumber() + 50);
                    break;

                case '<':
                    recording_manager.SetPlaybackFrameNumber(recording_manager.GetFrameNumber() - 50);
                    break;

                //case 'k':
                //    state.zero_dist_disparity ++;
                 //   break;

                case 'l':
                    state.zero_dist_disparity --;
                    break;

                case 'o':
                    inf_sad_add --;
                    break;

                case 'p':
                    inf_sad_add ++;
                    break;

                case '[':
                    y_offset --;
                    if (y_offset < 0) {
                        y_offset = 0;
                    }
                    break;

                case ']':
                    y_offset ++;
                    break;

                case 'v':
                    display_hud = !display_hud;
                    break;

                case 'c':
                    hud.SetClutterLevel(hud.GetClutterLevel() + 1);
                    break;

                case 'C':
                    hud.SetClutterLevel(hud.GetClutterLevel() - 1);
                    break;

                case '}':
                    hud.SetPitchRangeOfLens(hud.GetPitchRangeOfLens() + 1);
                    break;
                case '{':
                    hud.SetPitchRangeOfLens(hud.GetPitchRangeOfLens() - 1);
                    break;

                case 'S':
                    // take a screen cap of the left and right images
                    // useful for putting into a stereo tuner
                    printf("\nWriting left.ppm...");
                    imwrite("left.ppm", remapL);

                    printf("\nWriting right.ppm...");
                    imwrite("right.ppm", remapR);

                    printf("\ndone.");
                    break;

                case 'V':
                    // record the HUD
                    record_hud = true;
                    recording_manager.RestartRecHud();
                    break;

                    /*
                case 'j':
                    state.debugJ --;
                    break;

                case 'J':
                    state.debugJ ++;
                    break;

                case 'i':
                    state.debugI --;
                    break;

                case 'I':
                    state.debugI ++;
                    break;

                case 'k':
                    state.debugDisparity --;
                    break;

                case 'K':
                    state.debugDisparity ++;
                    break;

                    */

                case 'q':
                    quit = true;
                    break;
            }

            if (key != 255 && key != -1)
            {
                cout << "sadThreshold = " << state.sadThreshold << endl;
                cout << "sobelLimit = " << state.sobelLimit << endl;
                cout << "horizontalInvarianceMultiplier = " << state.horizontalInvarianceMultiplier << endl;
                cout << "brightness: " << force_brightness << endl;
                cout << "exposure: " << force_exposure << endl;
                cout << "disparity = " << state.disparity << endl;
                cout << "inf_disparity = " << state.zero_dist_disparity << endl;
                cout << "inf_sad_add = " << inf_sad_add << endl;
                cout << "blockSize = " << state.blockSize << endl;
                cout << "frame_number = " << recording_manager.GetFrameNumber() << endl;
                cout << "y offset = " << y_offset << endl;
                cout << "PitchRangeOfLens = " << hud.GetPitchRangeOfLens() << endl;
            }
        } // end show_display

        numFrames ++;

        // check for new LCM messages
        NonBlockingLcm(lcm);

        if (quiet_mode == false || numFrames % 100 == 0) {
            // compute framerate
            gettimeofday( &now, NULL );

            elapsed = (now.tv_usec / 1000 + now.tv_sec * 1000) -
            (start.tv_usec / 1000 + start.tv_sec * 1000);

            printf("\r%d frames (%lu ms) - %4.1f fps | %4.1f ms/frame, stereo: %f", numFrames, elapsed, (float)numFrames/elapsed * 1000, elapsed/(float)numFrames, timer_sum/(double)timer_count);
            fflush(stdout);
        }


    } // end main while loop

    printf("\n\n");

    destroyWindow("Input");
    destroyWindow("Input2");
    destroyWindow("Stereo");

    // close camera
    if (recording_manager.UsingLiveCameras()) {
        StopCapture(d, camera);
        StopCapture(d2, camera2);
    }

    return 0;
}
示例#29
0
int main(int argc, char *argv[])
{
  fitsfile *fptr;
  long fpixel=1, nelements, naxes[3];
  dc1394camera_t *camera;
  int grab_n_frames;
  struct timeval start_time, end_time;
  time_t start_sec, end_sec;
  suseconds_t start_usec, end_usec;
  float elapsed_time, fps;
  int i, status;
  unsigned int min_bytes, max_bytes, max_height, max_width;
  unsigned int actual_bytes;
  uint64_t total_bytes = 0;
  unsigned int width, height;
  dc1394video_frame_t *frame=NULL;
  dc1394_t * d;
  dc1394camera_list_t * list;
  dc1394error_t err;
  char *filename;
  
  grab_n_frames = atoi(argv[1]);
  filename = argv[2];
  
  width = 320;
  height = 240;
  naxes[0] = width;
  naxes[1] = height;
  naxes[2] = grab_n_frames;
  
  nelements = naxes[0]*naxes[1]*naxes[2];
  
  stderr = freopen("grab_cube.log", "w", stderr);
  
  d = dc1394_new ();
  if (!d)
    return 1;
  err=dc1394_camera_enumerate (d, &list);
  DC1394_ERR_RTN(err,"Failed to enumerate cameras");
  
  if (list->num == 0) {
    dc1394_log_error("No cameras found");
    return 1;
  }
  
  camera = dc1394_camera_new (d, list->ids[0].guid);
  if (!camera) {
    dc1394_log_error("Failed to initialize camera with guid %"PRIx64,
                     list->ids[0].guid);
    return 1;
  }
  dc1394_camera_free_list (list);
  
  printf("Using camera with GUID %"PRIx64"\n", camera->guid);
  
  /*-----------------------------------------------------------------------
   *  setup capture for format 7
   *-----------------------------------------------------------------------*/
  // err=dc1394_video_set_operation_mode(camera, DC1394_OPERATION_MODE_1394B);
  // DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot operate at 1394B");
  
  // libdc1394 doesn't work well with firewire800 yet so set to legacy 400 mode
  dc1394_video_set_iso_speed(camera, DC1394_ISO_SPEED_400);
  
  // configure camera for format7
  err = dc1394_video_set_mode(camera, DC1394_VIDEO_MODE_FORMAT7_1);
  DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot choose format7_0");
  printf ("I: video mode is format7_0\n");
  
  err = dc1394_format7_get_max_image_size (camera, DC1394_VIDEO_MODE_FORMAT7_1, 
                                           &max_width, &max_height);
  DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot get max image size for format7_0");
  printf ("I: max image size is: height = %d, width = %d\n", max_height, max_width);
  printf ("I: current image size is: height = %d, width = %d\n", height, width);
  
  err = dc1394_format7_set_roi (camera, 
                                DC1394_VIDEO_MODE_FORMAT7_1, 
                                DC1394_COLOR_CODING_MONO16, // not sure why RAW8/16 don't work
                                DC1394_USE_MAX_AVAIL, 
                                0, 0, // left, top
                                width, height); // width, height
  DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set roi");
  printf ("I: roi is (0, 0) - (%d, %d)\n", width, height);
  
  // set the frame rate to absolute value in frames/sec
  err = dc1394_feature_set_mode(camera, DC1394_FEATURE_FRAME_RATE, DC1394_FEATURE_MODE_MANUAL);
  DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set framerate to manual");
  err = dc1394_feature_set_absolute_control(camera, DC1394_FEATURE_FRAME_RATE, DC1394_TRUE);
  DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set framerate to absolute mode");
  err = dc1394_feature_set_absolute_value(camera, DC1394_FEATURE_FRAME_RATE, 330.0);
  DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set framerate");
  printf("I: framerate is %f fps\n", 330.0);
  
  // set the shutter speed to absolute value in seconds 
  err = dc1394_feature_set_mode(camera, DC1394_FEATURE_SHUTTER, DC1394_FEATURE_MODE_MANUAL);
  DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set shutter to manual");
  err = dc1394_feature_set_absolute_control(camera, DC1394_FEATURE_SHUTTER, DC1394_TRUE);
  DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set shutter to absolute mode");
  err = dc1394_feature_set_absolute_value(camera, DC1394_FEATURE_SHUTTER, 3.0e-3);
  DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set shutter");
  printf("I: exptime is %f s\n", 3.0e-3);
  
  // set gain manually.  use relative value here in range 48 to 730. 
  err = dc1394_feature_set_mode(camera, DC1394_FEATURE_GAIN, DC1394_FEATURE_MODE_MANUAL);
  DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set gain to manual");
  err = dc1394_feature_set_value(camera, DC1394_FEATURE_GAIN, 400);
  DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set gain");
  printf ("I: gain is %d\n", 400);
  
  // set brightness manually.  use relative value in range 0 to 1023.
  err = dc1394_feature_set_mode(camera, DC1394_FEATURE_BRIGHTNESS, DC1394_FEATURE_MODE_MANUAL);
  DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set brightness to manual");
  err = dc1394_feature_set_value(camera, DC1394_FEATURE_BRIGHTNESS, 100);
  DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot set brightness");
  printf ("I: brightness is %d\n", 100);

  err = dc1394_format7_get_total_bytes (camera, DC1394_VIDEO_MODE_FORMAT7_1, &total_bytes);
  DC1394_ERR_CLN_RTN(err,dc1394_camera_free (camera),"cannot get total bytes");
  printf ("I: total bytes is %"PRIu64" before SFF enabled\n", total_bytes);
  
  err=dc1394_capture_setup(camera, 16, DC1394_CAPTURE_FLAGS_DEFAULT);
  DC1394_ERR_CLN_RTN(err, dc1394_camera_free(camera), "Error capturing");
  
  /*-----------------------------------------------------------------------
   *  print allowed and used packet size
   *-----------------------------------------------------------------------*/
  err=dc1394_format7_get_packet_parameters(camera, DC1394_VIDEO_MODE_FORMAT7_1, &min_bytes, &max_bytes);
  
  DC1394_ERR_RTN(err,"Packet para inq error");
  printf( "camera reports allowed packet size from %d - %d bytes\n", min_bytes, max_bytes);
  
  err=dc1394_format7_get_packet_size(camera, DC1394_VIDEO_MODE_FORMAT7_1, &actual_bytes);
  DC1394_ERR_RTN(err,"dc1394_format7_get_packet_size error");
  printf( "camera reports actual packet size = %d bytes\n", actual_bytes);
  
  err=dc1394_format7_get_total_bytes(camera, DC1394_VIDEO_MODE_FORMAT7_1, &total_bytes);
  DC1394_ERR_RTN(err,"dc1394_query_format7_total_bytes error");
  printf( "camera reports total bytes per frame = %"PRId64" bytes\n",
         total_bytes);
  
  /*-----------------------------------------------------------------------
   *  have the camera start sending us data
   *-----------------------------------------------------------------------*/
  err=dc1394_video_set_transmission(camera,DC1394_ON);
  if (err!=DC1394_SUCCESS) {
    dc1394_log_error("unable to start camera iso transmission");
    dc1394_capture_stop(camera);
    dc1394_camera_free(camera);
    exit(1);
  }
  
  // set up FITS image and capture
  fits_create_file(&fptr, filename, &status);
  dc1394_get_image_size_from_video_mode(camera, DC1394_VIDEO_MODE_FORMAT7_1, &width, &height);
  fits_create_img(fptr, USHORT_IMG, 3, naxes, &status);
  
  /*-----------------------------------------------------------------------
   *  capture frames and measure the time for this operation
   *-----------------------------------------------------------------------*/
  gettimeofday(&start_time, NULL);
  
  printf("Start capture:\n");
  
  for( i = 0; i < grab_n_frames; ++i) {
    /*-----------------------------------------------------------------------
     *  capture one frame
     *-----------------------------------------------------------------------*/
    err=dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_WAIT, &frame);
    if (err!=DC1394_SUCCESS) {
      dc1394_log_error("unable to capture");
      dc1394_capture_stop(camera);
      dc1394_camera_free(camera);
      exit(1);
    }
    
    // attempt to preallocate image array and write to memory before dumping to disk.
    // turns out to be slow due to large size of images. cfitsio buffering is far
    // more efficient.
    
    //memcpy(im_buffer+2*i*naxes[0]*naxes[1], 
    //frame->image-1, 
    //naxes[0]*naxes[1]*sizeof(short));
    
    // just writing each frame to the FITS file goes pretty fast
    fits_write_img(fptr, 
                   TUSHORT, 
                   fpixel+i*naxes[0]*naxes[1], 
                   naxes[0]*naxes[1], 
                   frame->image-1, 
                   &status);
    
    // release buffer
    dc1394_capture_enqueue(camera,frame);
  }
  
  gettimeofday(&end_time, NULL);
  printf("End capture.\n");
  
  /*-----------------------------------------------------------------------
   *  stop data transmission
   *-----------------------------------------------------------------------*/
  start_sec = start_time.tv_sec;
  start_usec = start_time.tv_usec;
  end_sec = end_time.tv_sec;
  end_usec = end_time.tv_usec;
  
  elapsed_time = (float)((end_sec + 1.0e-6*end_usec) - (start_sec + 1.0e-6*start_usec));
  fps = grab_n_frames/elapsed_time;
  printf("Elapsed time = %g seconds.\n", elapsed_time);
  printf("Framerate = %g fps.\n", fps);
  
  err=dc1394_video_set_transmission(camera,DC1394_OFF);
  DC1394_ERR_RTN(err,"couldn't stop the camera?");
  
  /*-----------------------------------------------------------------------
   *  save FITS image to disk
   *-----------------------------------------------------------------------*/
  //fits_write_img(fptr, TUSHORT, fpixel, naxes[0]*naxes[1]*naxes[2], im_buffer, &status);
  fits_close_file(fptr, &status);
  fits_report_error(stderr, status);
  //free(im_buffer);
  
  printf("wrote: %s\n", filename);
  printf("Image is %d bits/pixel.\n", frame->data_depth);
  
  /*-----------------------------------------------------------------------
   *  close camera, cleanup
   *-----------------------------------------------------------------------*/
  dc1394_capture_stop(camera);
  dc1394_video_set_transmission(camera, DC1394_OFF);
  dc1394_camera_free(camera);
  dc1394_free (d);
  return 0;
}
示例#30
0
int main(int argc, char *argv[]) 
{
  // assumes your camera can output 640x480 with 8-bit monochrome
  video_mode = DC1394_VIDEO_MODE_640x480_MONO8;

  d = dc1394_new();
  if (!d)
    return 1;
  err=dc1394_camera_enumerate (d, &list);
  DC1394_ERR_RTN(err,"Failed to enumerate cameras");

  if (list->num == 0) {
    dc1394_log_error("No cameras found");
    return 1;
  }

  // use two counters so tht cameras array does not contain gaps in the case of errors
  j = 0;
  for (i = 0; i < list->num; i++) {
    if (j >= MAX_CAMERAS)
      break;
    cameras[j] = dc1394_camera_new (d, list->ids[i].guid);
    if (!cameras[j]) {
      dc1394_log_warning("Failed to initialize camera with guid %llx", list->ids[i].guid);
      continue;
    }
    j++;
  }
  numCameras = j;
  dc1394_camera_free_list (list);

  if (numCameras == 0) {
    dc1394_log_error("No cameras found");
    exit (1);
  }

  // setup cameras for capture
  for (i = 0; i < numCameras; i++) {
    err=dc1394_video_set_iso_speed(cameras[i], DC1394_ISO_SPEED_800);
    DC1394_ERR_CLN_RTN(err,cleanup(),"Could not set ISO speed");
    
    err=dc1394_video_set_mode(cameras[i], video_mode);
    DC1394_ERR_CLN_RTN(err,cleanup(),"Could not set video mode");
    
    err=dc1394_capture_setup(cameras[i],NUM_BUFFERS, DC1394_CAPTURE_FLAGS_DEFAULT);
    DC1394_ERR_CLN_RTN(err,cleanup(),"Could not setup camera-\nmake sure that the video mode and framerate are\nsupported by your camera");

    err=dc1394_get_image_size_from_video_mode(cameras[i], video_mode, &video_mode_width, &video_mode_height);
    DC1394_ERR_CLN_RTN(err,cleanup(),"Could not query video mode width and height");

    err=dc1394_video_set_one_shot(cameras[i], DC1394_ON);
    DC1394_ERR_CLN_RTN(err,cleanup(),"Could not use one shot mode");
  }

    fflush(stdout);
    if (numCameras < 1) {
        perror("no cameras found :(\n");
        cleanup();
        exit(-1);
    }

    for (i = 0; i < numCameras; i++) {
      if (dc1394_capture_dequeue(cameras[i], DC1394_CAPTURE_POLICY_WAIT, &frames[i])!=DC1394_SUCCESS)
	dc1394_log_error("Failed to capture from camera %d", i);

      // save image as '[GUID].pgm'
      char filename[256];
      sprintf(filename, "%" PRIu64 "%s",list->ids[i].guid,IMAGE_FILE_EXTENSION);
      imagefile=fopen(filename, "w");

      if( imagefile == NULL) {
	  dc1394_log_error("Can't create %s", filename);
	}
  
      // adding the pgm file header
      fprintf(imagefile,"P5\n%u %u 255\n", video_mode_width, video_mode_height);

      // writing to the file
      fwrite((const char *)frames[i]->image, 1, \
	     video_mode_width * video_mode_height, imagefile);
      fclose(imagefile);                                    
    }

  // exit cleanly
  cleanup();
  return(0);
}