Example #1
0
void Camera::start(FrameStartTriggerMode fmode, AcquisitionMode amode)
{
  //assert( FSTmode_ == None && fmode != None );
  ///@todo verify this assert again
  //assert( fmode == SyncIn1 || fmode == SyncIn2 || fmode == Software || !userCallback_.empty() );
  
  // set camera in acquisition mode
  CHECK_ERR( PvCaptureStart(handle_), "Could not start capture");

  if (fmode == Freerun || fmode == SyncIn1 || fmode == SyncIn2 ||  fmode == FixedRate)
    for (unsigned int i = 0; i < bufferSize_; ++i)
      PvCaptureQueueFrame(handle_, frames_ + i, Camera::frameDone);

  // start capture after setting acquisition and trigger modes
  try {
    ///@todo take this one also as an argument
    CHECK_ERR( PvAttrEnumSet(handle_, "AcquisitionMode", acquisitionModes[amode]),
               "Could not set acquisition mode" );
    CHECK_ERR( PvAttrEnumSet(handle_, "FrameStartTriggerMode", triggerModes[fmode]),
               "Could not set trigger mode" );
    CHECK_ERR( PvCommandRun(handle_, "AcquisitionStart"),
               "Could not start acquisition" );
  } 
  catch (ProsilicaException& e) {
    PvCaptureEnd(handle_); // reset to non capture mode
    throw; // rethrow
  }
  FSTmode_ = fmode;
  Amode_ = amode;
}
Example #2
0
bool CvCaptureCAM_PvAPI::startCapture()
{
    // Start the camera
    PvCaptureStart(Camera.Handle);

    // Set the camera to capture continuously
    if(PvAttrEnumSet(Camera.Handle, "AcquisitionMode", "Continuous")!= ePvErrSuccess)
    {
        fprintf(stderr,"Could not set PvAPI Acquisition Mode\n");
        return false;
    }

    if(PvCommandRun(Camera.Handle, "AcquisitionStart")!= ePvErrSuccess)
    {
        fprintf(stderr,"Could not start PvAPI acquisition\n");
        return false;
    }

    if(PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "Freerun")!= ePvErrSuccess)
    {
        fprintf(stderr,"Error setting PvAPI trigger to \"Freerun\"");
        return false;
    }

    return true;
}
Example #3
0
	bool OmniCamera::connect(void) {
		if(isConnected())
			return true;

		tPvCameraInfoEx info[1];
		 unsigned long frameSize = 0;

		//PvLinkCallbackRegister(CameraEventCB,ePvLinkAdd,NULL);
        //PvLinkCallbackRegister(CameraEventCB,ePvLinkRemove,NULL);        

		if(!PvInitialize()) {

			while(!PvCameraCount()) 
				Sleep(250);


			unsigned long numCameras = PvCameraListEx(info, 1, NULL, sizeof(tPvCameraInfoEx));;
								
			if ((numCameras == 1) && (info[0].PermittedAccess & ePvAccessMaster)) {
				
				_camera.UID = info[0].UniqueId;

				if(!PvCameraOpen(_camera.UID, ePvAccessMaster, &(_camera.handle))) {

					if(!PvAttrUint32Get(_camera.handle,"TotalBytesPerFrame",&frameSize)) {

						_camera.frame.ImageBuffer = new char[frameSize];
						
						unsigned long format=0;
						PvAttrEnumSet(_camera.handle,"PixelFormat","Bayer8");
						char text[100];
						PvAttrEnumGet(_camera.handle,"PixelFormat",text,sizeof(text),&format);
						printf("format %d %s\n",format,text);
						if(_camera.frame.ImageBuffer) {
							_camera.frame.ImageBufferSize = frameSize;
									
							PvAttrUint32Get(_camera.handle,"Width",&_width);
							PvAttrUint32Get(_camera.handle,"Height",&_height);
							PvCaptureStart(_camera.handle);
							PvAttrEnumSet(_camera.handle, "AcquisitionMode", "Continuous");
							PvCommandRun(_camera.handle, "AcquisitionStart");

							_connected = true;
							return true;
						}
					}
				}
			}				
		}
		_connected = false;
		return false;
	}
Example #4
0
bool CvCaptureCAM_PvAPI::setProperty( int property_id, double value )
{
    switch ( property_id )
    {
    /*  TODO: Camera works, but IplImage must be modified for the new size
    case CV_CAP_PROP_FRAME_WIDTH:
        PvAttrUint32Set(Camera.Handle, "Width", (tPvUint32)value);
        break;
    case CV_CAP_PROP_FRAME_HEIGHT:
        PvAttrUint32Set(Camera.Handle, "Heigth", (tPvUint32)value);
        break;
    */
    case CV_CAP_PROP_MONOCROME:
        if (value==1) {
			char pixelFormat[256];
			PvAttrEnumGet(Camera.Handle, "PixelFormat", pixelFormat,256,NULL);
			if ((strcmp(pixelFormat, "Mono8")==0) || strcmp(pixelFormat, "Mono16")==0) {
				monocrome=true;
			}	
			else
				return false;
		}
		else
			monocrome=false;
		break;	
    case CV_CAP_PROP_EXPOSURE:
	    if ((PvAttrUint32Set(Camera.Handle,"ExposureValue",(tPvUint32)value)==ePvErrSuccess)) 
		break;
	    else
		return false;
    case CV_CAP_PROP_PVAPI_MULTICASTIP:
	    
    	    if (value==-1) {
		if ((PvAttrEnumSet(Camera.Handle,"MulticastEnable", "Off")==ePvErrSuccess)) 
		    break;
		else 
		    return false;
	    }
	    else {
		std::string ip=cv::format("%d.%d.%d.%d", ((int)value>>24)&255, ((int)value>>16)&255, ((int)value>>8)&255, (int)value&255);
		if ((PvAttrEnumSet(Camera.Handle,"MulticastEnable", "On")==ePvErrSuccess) &&
		(PvAttrStringSet(Camera.Handle, "MulticastIPAddress", ip.c_str())==ePvErrSuccess)) 
		    break;
		else
		    return false;
	    }
    default:
        return false;
    }
    return true;
}
// setup and start streaming
// return value: true == success, false == fail
bool CameraStart()
{
    tPvErr errCode;
	bool failed = false;

    // NOTE: This call sets camera PacketSize to largest sized test packet, up to 8228, that doesn't fail
	// on network card. Some MS VISTA network card drivers become unresponsive if test packet fails. 
	// Use PvUint32Set(handle, "PacketSize", MaxAllowablePacketSize) instead. See network card properties
	// for max allowable PacketSize/MTU/JumboFrameSize. 
	if((errCode = PvCaptureAdjustPacketSize(GCamera.Handle,8228)) != ePvErrSuccess)
	{
		printf("CameraStart: PvCaptureAdjustPacketSize err: %u\n", errCode);
		return false;
	}

    // start driver capture stream 
	if((errCode = PvCaptureStart(GCamera.Handle)) != ePvErrSuccess)
	{
		printf("CameraStart: PvCaptureStart err: %u\n", errCode);
		return false;
	}

    // queue frames with FrameDoneCB callback function. Each frame can use a unique callback function
	// or, as in this case, the same callback function.
	for(int i=0;i<FRAMESCOUNT && !failed;i++)
	{           
		if((errCode = PvCaptureQueueFrame(GCamera.Handle,&(GCamera.Frames[i]),FrameDoneCB)) != ePvErrSuccess)
		{
			printf("CameraStart: PvCaptureQueueFrame err: %u\n", errCode);
			failed = true;
		}
	}

	if (failed)
		return false;
		
	// set the camera to 5 FPS, continuous mode, and start camera receiving triggers					
	if((PvAttrFloat32Set(GCamera.Handle,"FrameRate",5) != ePvErrSuccess) ||
		(PvAttrEnumSet(GCamera.Handle,"FrameStartTriggerMode","FixedRate") != ePvErrSuccess) ||
		(PvAttrEnumSet(GCamera.Handle,"AcquisitionMode","Continuous") != ePvErrSuccess) ||
		(PvCommandRun(GCamera.Handle,"AcquisitionStart") != ePvErrSuccess))
	{		
		printf("CameraStart: failed to set camera attributes\n");
		return false;
	}	

	return true;
}
Example #6
0
// define pixel format
void cameraSetPixelFormat(tCamera* camera, string format) {
  tPvErr err = PvAttrEnumSet(camera->Handle, "PixelFormat", format.c_str());
  if (err != ePvErrSuccess){
    stringstream buf;
    buf << "failed to define pixel format : " << cameraGetError(err) << endl;
    throw buf.str();
  }
}
// setup and start streaming
// return value: true == success, false == fail
bool CameraStart()
{
    tPvErr errCode;

    // NOTE: This call sets camera PacketSize to largest sized test packet, up to 8228, that doesn't fail
	// on network card. Some MS VISTA network card drivers become unresponsive if test packet fails. 
	// Use PvUint32Set(handle, "PacketSize", MaxAllowablePacketSize) instead. See network card properties
	// for max allowable PacketSize/MTU/JumboFrameSize. 
	if((errCode = PvCaptureAdjustPacketSize(GCamera.Handle,8228)) != ePvErrSuccess)
	{
		printf("CameraStart: PvCaptureAdjustPacketSize err: %u\n", errCode);
		return false;
	}

    // start driver capture stream 
	if((errCode = PvCaptureStart(GCamera.Handle)) != ePvErrSuccess)
	{
		printf("CameraStart: PvCaptureStart err: %u\n", errCode);
		return false;
	}
	
    // queue frame
	if((errCode = PvCaptureQueueFrame(GCamera.Handle,&(GCamera.Frame),NULL)) != ePvErrSuccess)
	{
		printf("CameraStart: PvCaptureQueueFrame err: %u\n", errCode);
		// stop driver capture stream
		PvCaptureEnd(GCamera.Handle);
		return false;
	}
		
	// set the camera in hardware trigger, continuous mode, and start camera receiving triggers
	if((PvAttrEnumSet(GCamera.Handle,"FrameStartTriggerMode","SyncIn1") != ePvErrSuccess) ||
		(PvAttrEnumSet(GCamera.Handle,"AcquisitionMode","Continuous") != ePvErrSuccess) ||
		(PvCommandRun(GCamera.Handle,"AcquisitionStart") != ePvErrSuccess))
	{		
		printf("CameraStart: failed to set camera attributes\n");
		// clear queued frame
		PvCaptureQueueClear(GCamera.Handle);
		// stop driver capture stream
		PvCaptureEnd(GCamera.Handle);
		return false;
	}	

	return true;
}
Example #8
0
void Camera::setExposure(unsigned int val, AutoSetting isauto)
{
  CHECK_ERR( PvAttrEnumSet(handle_, "ExposureMode", autoValues[isauto]),
             "Couldn't set exposure mode" );

  if (isauto == Manual)
    CHECK_ERR( PvAttrUint32Set(handle_, "ExposureValue", val),
               "Couldn't set exposure value" );
}
Example #9
0
// setup and start streaming
void cameraStart(tCamera* camera, tPvUint32 packetSize) {
  cameraSetPixelFormat(camera, "Mono16");
  cameraSetExpo(camera, 1000);

  //tPvUint32 ROI[4]={0,0,493,659};
  //cameraSetROI(camera,ROI);

  // Auto adjust the packet size to max supported by the network, up to a max of 8228.
  // NOTE: In Vista, if the packet size on the network card is set lower than 8228,
  //       this call may break the network card's driver. See release notes.
  //

  tPvErr err = PvCaptureAdjustPacketSize(camera->Handle,packetSize);
  //err = PvAttrUint32Set(camera->Handle, "PacketSize", 1500);
  if (err != ePvErrSuccess){
    stringstream buf;
    buf << "failed to adjust packet size : " << cameraGetError(err) << endl;
    throw buf.str();
  }

  unsigned long FrameSize = 0;
  err = PvAttrUint32Get(camera->Handle, "TotalBytesPerFrame", &FrameSize);
  if (err != ePvErrSuccess){
    stringstream buf;
    buf << "failed to get TotalBytesPerFrame : " << cameraGetError(err) << endl;
    throw buf.str();
  }

  // allocate the buffer for the single frame we need
  camera->Frame.Context[0] = camera;
  camera->Frame.ImageBuffer = new char[FrameSize];
  if (camera->Frame.ImageBuffer)
    camera->Frame.ImageBufferSize = FrameSize;
  else
    throw "failed allocate Frame.ImageBuffer";


  // how big should the frame buffers be?
  if (!err) {
    // set the camera is capture mode
    if (!PvCaptureStart(camera->Handle)) {
      // set the camera in continuous acquisition mode
      if (!PvAttrEnumSet(camera->Handle, "FrameStartTriggerMode",
			 "Freerun")) { //"FixedRate" / "Freerun"
	// and set the acquisition mode into continuous
	if (PvCommandRun(camera->Handle, "AcquisitionStart")) {
	  // if that fail, we reset the camera to non capture mode
	  PvCaptureEnd(camera->Handle);
	  throw "failed to set the acquisition mode into continuous";
	}
      } else
	throw "failed to set the camera in continuous acquisition mode";
    } else
      throw "failed to set the camera is capture mode";
  } else
    throw "failed to get TotalBytesPerFrame parameter";
}
Example #10
0
// setup and start streaming
bool CameraStart(tCamera* Camera)
{
    unsigned long FrameSize = 0;

    // Auto adjust the packet size to max supported by the network, up to a max of 8228.
    // NOTE: In Vista, if the packet size on the network card is set lower than 8228,
    //       this call may break the network card's driver. See release notes.
    //
    //PvCaptureAdjustPacketSize(Camera->Handle,8228);

    // how big should the frame buffers be?
    if(!PvAttrUint32Get(Camera->Handle,"TotalBytesPerFrame",&FrameSize))
    {
        bool failed = false;

        // allocate the buffer for the single frame we need
        Camera->Frame.Context[0]  = Camera;
        Camera->Frame.ImageBuffer = new char[FrameSize];
        if(Camera->Frame.ImageBuffer)
            Camera->Frame.ImageBufferSize = FrameSize;
        else
            failed = true;

        if(!failed)
        {
            // set the camera is capture mode
            if(!PvCaptureStart(Camera->Handle))
            {
                // set the camera in continuous acquisition mode
                if(!PvAttrEnumSet(Camera->Handle,"FrameStartTriggerMode","Freerun"))
                {
                    // and set the acquisition mode into continuous
                    if(PvCommandRun(Camera->Handle,"AcquisitionStart"))
                    {
                        // if that fail, we reset the camera to non capture mode
                        PvCaptureEnd(Camera->Handle) ;
                        return false;
                    }
                    else
                        return true;
                }
                else
                    return false;
            }
            else
                return false;
        }
        else
            return false;
    }
    else
        return false;
}
Example #11
0
void Camera::setGain(unsigned int val, AutoSetting isauto)
{
  /// @todo Here and in setWhiteBalance, would be better to split off setGainMode etc.
  /// I didn't take into account there are cameras that don't support auto gain, auto white balance.
  if (PvAttrIsAvailable(handle_, "GainMode") == ePvErrSuccess)
  {
    CHECK_ERR( PvAttrEnumSet(handle_, "GainMode", autoValues[isauto]),
               "Couldn't set gain mode" );
  }

  if (isauto == Manual)
    CHECK_ERR( PvAttrUint32Set(handle_, "GainValue", val),
               "Couldn't set gain value" );
}
Example #12
0
void Camera::setWhiteBalance(unsigned int blue, unsigned int red, AutoSetting isauto)
{
  if (PvAttrIsAvailable(handle_, "WhitebalMode") == ePvErrSuccess)
  {
    CHECK_ERR( PvAttrEnumSet(handle_, "WhitebalMode", autoValues[isauto]),
               "Couldn't set white balance mode" );
  }

  if (isauto == Manual && PvAttrIsAvailable(handle_, "WhitebalValueBlue"))
  {
    CHECK_ERR( PvAttrUint32Set(handle_, "WhitebalValueBlue", blue),
               "Couldn't set white balance blue value" );
    CHECK_ERR( PvAttrUint32Set(handle_, "WhitebalValueRed", red),
               "Couldn't set white balance red value" );
  }
}
Example #13
0
// setup and start streaming
bool CameraStart(tCamera* Camera)
{
    unsigned long FrameSize = 0;

    // how big should the frame buffers be?
    if(!PvAttrUint32Get(Camera->Handle,"TotalBytesPerFrame",&FrameSize))
    {
        bool failed = false;

        // allocate the buffer for the single frame we need
        Camera->Frame.Context[0]  = Camera;
        Camera->Frame.ImageBuffer = new char[FrameSize];
        if(Camera->Frame.ImageBuffer)
            Camera->Frame.ImageBufferSize = FrameSize;
        else
            failed = true;

        if(!failed)
        {
            // set the camera is capture mode
            if(!PvCaptureStart(Camera->Handle))
            {
		        // set the camera in continuous acquisition mode
		        if(!PvAttrEnumSet(Camera->Handle,"FrameStartTriggerMode","Freerun"))
		        {			
                	        // and set the acquisition mode into continuous
                	        if(PvCommandRun(Camera->Handle,"AcquisitionStart"))
                	        {
                    		        // if that fail, we reset the camera to non capture mode
                    		        PvCaptureEnd(Camera->Handle) ;
                    		        return false;
                	        }
                	        else
                    		        return true;
		        }
		        else
		            return false;
            }
            else
                return false;
        }
        else
            return false;
    }
    else
        return false;
}
Example #14
0
//
// idlPvAttrEnumSet
//
// Set the value of an enumerated attribute
//
// command line arguments
// argv[0]: IN/FLAG debug
// argv[1]: IN camera index
// argv[2]: IN attribute name
// argv[3]: IN attribute value
int idlPvAttrEnumSet (int argc, char *argv[])
{
  unsigned long n;
  unsigned long err;
  IDL_STRING *name;
  IDL_STRING *value;

  debug = *(IDL_INT *) argv[0];
  n = *(unsigned long *) argv[1];
  name = (IDL_STRING *) argv[2];
  value = (IDL_STRING *) argv[3];

  CHECKINDEX(n);

  err = PvAttrEnumSet(camera[n],
		      (const char *) IDL_STRING_STR(name),
		      (const char *) IDL_STRING_STR(value));

  return idlPvErrCode(err);
}
Example #15
0
// setup and start streaming
bool CameraStart()
{
    unsigned long FrameSize = 0;

    // how big should the frame buffers be?
    if(!PvAttrUint32Get(GCamera.Handle,"TotalBytesPerFrame",&FrameSize))
    {
        bool failed = false;

        // allocate the buffer for the single frame we need
        GCamera.Frame.ImageBuffer = new char[FrameSize];
        if(GCamera.Frame.ImageBuffer)
            GCamera.Frame.ImageBufferSize = FrameSize;
        else
            failed = true;

        if(!failed)
        {
            // set the camera is capture mode
            if(!PvCaptureStart(GCamera.Handle))
            {
                // and set the acquisition mode into continuous and software trigger mode
                if(PvAttrEnumSet(GCamera.Handle,"FrameStartTriggerMode","SyncIn1") ||
                   PvCommandRun(GCamera.Handle,"AcquisitionStart"))
                {
                    // if that fail, we reset the camera to non capture mode
                    PvCaptureEnd(GCamera.Handle) ;
                    return false;
                }
                else
                    return true;
            }
            else
                return false;
        }
        else
            return false;
    }
    else
        return false;
}
//--------------------------------------------------------------------
bool ofxVideoGrabberPvAPI::initGrabber(int w, int h, bool setUseTexture){

    width = w;
    height = h;
    tPvErr ret;   //PvAPI return codes
	bUseTexture = setUseTexture;
    memset( &cameraUID, 0, sizeof(cameraUID) );
    memset( &cameraHandle, 0, sizeof(cameraHandle) );
    memset( &cameraFrame, 0, sizeof(cameraFrame) );
    
    
    //---------------------------------- 1 - open the sequence grabber
    // lazy initialization of the Prosilica API
    if( !bPvApiInitiated ){
        ret = PvInitialize();
        if( ret == ePvErrSuccess ) {
            ofLog(OF_LOG_VERBOSE, "PvAPI initialized");
        } else {
            ofLog(OF_LOG_ERROR, "unable to initialize PvAPI");
            return false;
        }
        
        bPvApiInitiated = true;
    }
            
    //---------------------------------- 3 - buffer allocation
    // Create a buffer big enough to hold the video data,
    // make sure the pointer is 32-byte aligned.
    // also the rgb image that people will grab
    
    pixels = new unsigned char[width*height];
    
    // check for any cameras plugged in
    int waitIterations = 0;
    while( PvCameraCount() < 1 ) {
        ofSleepMillis(250);
        waitIterations++;
        
        if( waitIterations > 8 ) {
            ofLog(OF_LOG_ERROR, "error: no camera found");
            return false;        
        }
    }


    //---------------------------------- 4 - device selection
    
    tPvUint32 count, connected;
    tPvCameraInfo list[maxConcurrentCams];

    count = PvCameraList( list, maxConcurrentCams, &connected );
    if(count >= 1) {
        bool bSelectedDevicePresent = false;
        if(bChooseDevice) {
            //check if selected device is available
            for( int i=0; i<count; ++i) {
                if( deviceID == list[i].UniqueId ) {
                    bSelectedDevicePresent = true;
                    cameraUID = list[i].UniqueId;
                }
            }
        }
        
        if( !bSelectedDevicePresent ){
            cameraUID = list[0].UniqueId;
            ofLog(OF_LOG_NOTICE, "cannot find selected camera -> defaulting to first available");
            ofLog(OF_LOG_VERBOSE, "there is currently an arbitrary hard limit of %i concurrent cams", maxConcurrentCams);
        }        
    } else {
        ofLog(OF_LOG_ERROR, "no cameras available");
        return false;     
    }
        
    
    //---------------------------------- 5 - final initialization steps
    
    ret = PvCameraOpen( cameraUID, ePvAccessMaster, &cameraHandle );
    if( ret == ePvErrSuccess ){
        ofLog(OF_LOG_VERBOSE, "camera opened");
    } else {
        if( ret == ePvErrAccessDenied ) {
            ofLog(OF_LOG_ERROR, "camera access denied, probably already in use");
        }
        ofLog(OF_LOG_ERROR, "failed to open camera");
        return false;     
    }


    unsigned long FrameSize = 0;
    ret = PvAttrUint32Get( cameraHandle, "TotalBytesPerFrame", &FrameSize );
    if( ret == ePvErrSuccess ){
        // allocate the buffer for the single frame we need
        cameraFrame.ImageBuffer = new char[FrameSize];
        cameraFrame.ImageBufferSize = FrameSize;    
        ofLog(OF_LOG_VERBOSE, "camera asked for TotalBytesPerFrame");
    } else { 
        ofLog(OF_LOG_ERROR, "failed to allocate capture buffer");
        return false;    
    }    
    
    
    ret = PvCaptureStart(cameraHandle);
    if( ret == ePvErrSuccess ){
        ofLog(OF_LOG_VERBOSE, "camera set to capture mode");
    } else { 
        if( ret == ePvErrUnplugged ){
            ofLog(OF_LOG_ERROR, "cannot start capture, camera was unplugged");
        }
        ofLog(OF_LOG_ERROR, "cannot set to capture mode");
        return false;    
    }
    
    
    ret = PvAttrEnumSet(cameraHandle,"FrameStartTriggerMode","Freerun");
    if( ret == ePvErrSuccess ){
        ofLog(OF_LOG_VERBOSE, "camera set to continuous mode");
    } else {
        ofLog(OF_LOG_ERROR, "cannot set to continous mode");
        return false;    
    }    
    
    
    ret = PvCommandRun(cameraHandle,"AcquisitionStart");
    if( ret == ePvErrSuccess ){
        ofLog(OF_LOG_VERBOSE, "camera continuous acquisition started");
    } else {
        // if that fail, we reset the camera to non capture mode
        PvCaptureEnd(cameraHandle) ;
        ofLog(OF_LOG_ERROR, "cannot start continuous acquisition");
        return false;    
    }
    
    bGrabberInited = true;
    //loadSettings();    
    ofLog(OF_LOG_NOTICE,"camera is ready now");  
                
                
    //---------------------------------- 6 - setup texture if needed

    if (bUseTexture){
        // create the texture, set the pixels to black and
        // upload them to the texture (so at least we see nothing black the callback)
        tex.allocate(width,height,GL_LUMINANCE);
        memset(pixels, 0, width*height);
        tex.loadData(pixels, width, height, GL_LUMINANCE);
    }

    // we are done
    return true;
}
// setup event channel
// return value: true == success, false == fail
bool EventSetup()
{
	unsigned long EventBitmask;
	tPvErr errCode;
	
	// check if events supported with this camera firmware
	if (PvAttrExists(GCamera.Handle,"EventsEnable1") == ePvErrNotFound)
	{
        printf("This camera does not support event notifications.\n");
        return false;
	}
	
	//Clear all events
	//EventsEnable1 is a bitmask of all events. Bits correspond to last two digits of EventId.
	// e.g: Bit 1 is EventAcquisitionStart, Bit 2 is EventAcquisitionEnd, Bit 10 is EventSyncIn1Rise. 
    if ((errCode = PvAttrUint32Set(GCamera.Handle,"EventsEnable1",0)) != ePvErrSuccess)
	{
		printf("Set EventsEnable1 err: %u\n", errCode);
		return false;
	}
            
	//Set individual events (could do in one step with EventsEnable1).
	if ((errCode = PvAttrEnumSet(GCamera.Handle,"EventSelector","AcquisitionStart")) != ePvErrSuccess)
	{
		printf("Set EventsSelector err: %u\n", errCode);
		return false;
	}
    if ((errCode = PvAttrEnumSet(GCamera.Handle,"EventNotification","On")) != ePvErrSuccess)
	{
		printf("Set EventsNotification err: %u\n", errCode);
		return false;
	}

	if ((errCode = PvAttrEnumSet(GCamera.Handle,"EventSelector","AcquisitionEnd")) != ePvErrSuccess)
	{
		printf("Set EventsSelector err: %u\n", errCode);
		return false;
	}
    if ((errCode = PvAttrEnumSet(GCamera.Handle,"EventNotification","On")) != ePvErrSuccess)
	{
		printf("Set EventsNotification err: %u\n", errCode);
		return false;
	}

	if ((errCode = PvAttrEnumSet(GCamera.Handle,"EventSelector","FrameTrigger")) != ePvErrSuccess)
	{
		printf("Set EventsSelector err: %u\n", errCode);
		return false;
	}
    if ((errCode = PvAttrEnumSet(GCamera.Handle,"EventNotification","On")) != ePvErrSuccess)
	{
		printf("Set EventsNotification err: %u\n", errCode);
		return false;
	}
	
	//Get and print bitmask
	PvAttrUint32Get(GCamera.Handle,"EventsEnable1", &EventBitmask);
	printf("Events set. EventsEnable1 bitmask: %u\n", EventBitmask);

    //register callback function
	if ((errCode = PvCameraEventCallbackRegister(GCamera.Handle,F_CameraEventCallback,NULL)) != ePvErrSuccess)
    {
		printf("PvCameraEventCallbackRegister err: %u\n", errCode);
        return false;
    }     
	return true;
}
Example #18
0
bool CvCaptureCAM_PvAPI::setProperty( int property_id, double value )
{
    tPvErr error;

    switch ( property_id )
    {
    case CV_CAP_PROP_FRAME_WIDTH:
    {
        tPvUint32 currHeight;

        PvAttrUint32Get(Camera.Handle, "Height", &currHeight);

        stopCapture();
        // Reallocate Frames
        if (!resizeCaptureFrame(value, currHeight))
        {
            startCapture();
            return false;
        }

        startCapture();

        break;
    }
    case CV_CAP_PROP_FRAME_HEIGHT:
    {
        tPvUint32 currWidth;

        PvAttrUint32Get(Camera.Handle, "Width", &currWidth);

        stopCapture();

        // Reallocate Frames
        if (!resizeCaptureFrame(currWidth, value))
        {
            startCapture();
            return false;
        }

        startCapture();

        break;
    }
    case CV_CAP_PROP_MONOCROME:
        if (value==1)
        {
            char pixelFormat[256];
            PvAttrEnumGet(Camera.Handle, "PixelFormat", pixelFormat,256,NULL);
            if ((strcmp(pixelFormat, "Mono8")==0) || strcmp(pixelFormat, "Mono16")==0)
            {
                monocrome=true;
            }
            else
                return false;
        }
        else
            monocrome=false;
        break;
    case CV_CAP_PROP_EXPOSURE:
        if ((PvAttrUint32Set(Camera.Handle,"ExposureValue",(tPvUint32)value)==ePvErrSuccess))
            break;
        else
            return false;
    case CV_CAP_PROP_PVAPI_MULTICASTIP:
        if (value==-1)
        {
            if ((PvAttrEnumSet(Camera.Handle,"MulticastEnable", "Off")==ePvErrSuccess))
                break;
            else
                return false;
        }
        else
        {
            cv::String ip=cv::format("%d.%d.%d.%d", ((unsigned int)value>>24)&255, ((unsigned int)value>>16)&255, ((unsigned int)value>>8)&255, (unsigned int)value&255);
            if ((PvAttrEnumSet(Camera.Handle,"MulticastEnable", "On")==ePvErrSuccess) &&
                (PvAttrStringSet(Camera.Handle, "MulticastIPAddress", ip.c_str())==ePvErrSuccess))
                break;
            else
                return false;
        }
    case CV_CAP_PROP_GAIN:
        if (PvAttrUint32Set(Camera.Handle,"GainValue",(tPvUint32)value)!=ePvErrSuccess)
        {
            return false;
        }
        break;
    case CV_CAP_PROP_PVAPI_FRAMESTARTTRIGGERMODE:
        if (value==0)
            error = PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "Freerun");
        else if (value==1)
            error = PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "SyncIn1");
        else if (value==2)
            error = PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "SyncIn2");
        else if (value==3)
            error = PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "FixedRate");
        else if (value==4)
            error = PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "Software");
        else
            error = ePvErrOutOfRange;
        if(error==ePvErrSuccess)
            break;
        else
            return false;
    case CV_CAP_PROP_PVAPI_DECIMATIONHORIZONTAL:
        if (value >= 1 && value <= 8)
            error = PvAttrUint32Set(Camera.Handle, "DecimationHorizontal", value);
        else
            error = ePvErrOutOfRange;
        if(error==ePvErrSuccess)
            break;
        else
            return false;
    case CV_CAP_PROP_PVAPI_DECIMATIONVERTICAL:
        if (value >= 1 && value <= 8)
            error = PvAttrUint32Set(Camera.Handle, "DecimationVertical", value);
        else
            error = ePvErrOutOfRange;
        if(error==ePvErrSuccess)
            break;
        else
            return false;
    case CV_CAP_PROP_PVAPI_BINNINGX:
        error = PvAttrUint32Set(Camera.Handle, "BinningX", value);
        if(error==ePvErrSuccess)
            break;
        else
            return false;
    case CV_CAP_PROP_PVAPI_BINNINGY:
        error = PvAttrUint32Set(Camera.Handle, "BinningY", value);
        if(error==ePvErrSuccess)
            break;
        else
            return false;
    default:
        return false;
    }
    return true;
}
Example #19
0
// Initialize camera input
bool CvCaptureCAM_PvAPI::open( int index )
{
    tPvCameraInfo cameraList[MAX_CAMERAS];

    tPvCameraInfo  camInfo;
    tPvIpSettings ipSettings;


    if (PvInitialize()) {
    }
    //return false;

    Sleep(1000);

    //close();

    int numCameras=PvCameraList(cameraList, MAX_CAMERAS, NULL);

    if (numCameras <= 0 || index >= numCameras)
        return false;

    Camera.UID = cameraList[index].UniqueId;

    if (!PvCameraInfo(Camera.UID,&camInfo) && !PvCameraIpSettingsGet(Camera.UID,&ipSettings))
    {
        /*
        struct in_addr addr;
        addr.s_addr = ipSettings.CurrentIpAddress;
        printf("Current address:\t%s\n",inet_ntoa(addr));
        addr.s_addr = ipSettings.CurrentIpSubnet;
        printf("Current subnet:\t\t%s\n",inet_ntoa(addr));
        addr.s_addr = ipSettings.CurrentIpGateway;
        printf("Current gateway:\t%s\n",inet_ntoa(addr));
        */
    }
    else
    {
        fprintf(stderr,"ERROR: could not retrieve camera IP settings.\n");
        return false;
    }


    if (PvCameraOpen(Camera.UID, ePvAccessMaster, &(Camera.Handle))==ePvErrSuccess)
    {
        tPvUint32 frameWidth, frameHeight;
        unsigned long maxSize;

        // By Default, try to set the pixel format to Mono8.  This can be changed later
        // via calls to setProperty. Some colour cameras (i.e. the Manta line) have a default
        // image mode of Bayer8, which is currently unsupported, so Mono8 is a safe bet for
        // startup.

        monocrome = (PvAttrEnumSet(Camera.Handle, "PixelFormat", "Mono8") == ePvErrSuccess);

        PvAttrUint32Get(Camera.Handle, "Width", &frameWidth);
        PvAttrUint32Get(Camera.Handle, "Height", &frameHeight);

        // Determine the maximum packet size supported by the system (ethernet adapter)
        // and then configure the camera to use this value.  If the system's NIC only supports
        // an MTU of 1500 or lower, this will automatically configure an MTU of 1500.
        // 8228 is the optimal size described by the API in order to enable jumbo frames

        maxSize = 8228;
        //PvAttrUint32Get(Camera.Handle,"PacketSize",&maxSize);
        if (PvCaptureAdjustPacketSize(Camera.Handle,maxSize)!=ePvErrSuccess)
            return false;

        resizeCaptureFrame(frameWidth, frameHeight);

        return startCapture();

    }
    fprintf(stderr,"Error cannot open camera\n");
    return false;

}
Example #20
0
// Initialize camera input
bool CvCaptureCAM_PvAPI::open( int index )
{
    tPvCameraInfo cameraList[MAX_CAMERAS];
    
    tPvCameraInfo  camInfo;
    tPvIpSettings ipSettings;

 
    if (PvInitialize()) {
    }
        //return false;
        
    Sleep(1000);

    //close();
    
    int numCameras=PvCameraList(cameraList, MAX_CAMERAS, NULL);

    if (numCameras <= 0 || index >= numCameras)
        return false;

    Camera.UID = cameraList[index].UniqueId;    

    if (!PvCameraInfo(Camera.UID,&camInfo) && !PvCameraIpSettingsGet(Camera.UID,&ipSettings)) {
		/*
		struct in_addr addr;
		addr.s_addr = ipSettings.CurrentIpAddress;
		printf("Current address:\t%s\n",inet_ntoa(addr));
		addr.s_addr = ipSettings.CurrentIpSubnet;
		printf("Current subnet:\t\t%s\n",inet_ntoa(addr));
		addr.s_addr = ipSettings.CurrentIpGateway;
		printf("Current gateway:\t%s\n",inet_ntoa(addr));
		*/
	}	
	else {
		fprintf(stderr,"ERROR: could not retrieve camera IP settings.\n");
		return false;
	}	


    if (PvCameraOpen(Camera.UID, ePvAccessMaster, &(Camera.Handle))==ePvErrSuccess)
    {
    
        //Set Pixel Format to BRG24 to follow conventions 
        /*Errcode = PvAttrEnumSet(Camera.Handle, "PixelFormat", "Bgr24");
        if (Errcode != ePvErrSuccess)
        {
            fprintf(stderr, "PvAPI: couldn't set PixelFormat to Bgr24\n");
            return NULL;
        }
        */
        tPvUint32 frameWidth, frameHeight, frameSize;
        unsigned long maxSize;
		char pixelFormat[256];
        PvAttrUint32Get(Camera.Handle, "TotalBytesPerFrame", &frameSize);
        PvAttrUint32Get(Camera.Handle, "Width", &frameWidth);
        PvAttrUint32Get(Camera.Handle, "Height", &frameHeight);
        PvAttrEnumGet(Camera.Handle, "PixelFormat", pixelFormat,256,NULL);
        maxSize = 8228;
        //PvAttrUint32Get(Camera.Handle,"PacketSize",&maxSize);
        if (PvCaptureAdjustPacketSize(Camera.Handle,maxSize)!=ePvErrSuccess)
			return false;
        if (strcmp(pixelFormat, "Mono8")==0) {
				grayframe = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 1);
				grayframe->widthStep = (int)frameWidth;
				frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 3);
				frame->widthStep = (int)frameWidth*3;		 
				Camera.Frame.ImageBufferSize = frameSize;
				Camera.Frame.ImageBuffer = grayframe->imageData;   
		}	    
		else if (strcmp(pixelFormat, "Mono16")==0) {
				grayframe = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_16U, 1);
				grayframe->widthStep = (int)frameWidth;	
				frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_16U, 3);
				frame->widthStep = (int)frameWidth*3;
				Camera.Frame.ImageBufferSize = frameSize;
				Camera.Frame.ImageBuffer = grayframe->imageData;
		}	  
		else if	(strcmp(pixelFormat, "Bgr24")==0) {
				frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 3);
				frame->widthStep = (int)frameWidth*3;
				Camera.Frame.ImageBufferSize = frameSize;
				Camera.Frame.ImageBuffer = frame->imageData;
		}		
		else
				return false;
        // Start the camera
        PvCaptureStart(Camera.Handle);

        // Set the camera to capture continuously
        if(PvAttrEnumSet(Camera.Handle, "AcquisitionMode", "Continuous")!= ePvErrSuccess)
        {
            fprintf(stderr,"Could not set Prosilica Acquisition Mode\n");
            return false;
        }
        
        if(PvCommandRun(Camera.Handle, "AcquisitionStart")!= ePvErrSuccess)
        {
            fprintf(stderr,"Could not start Prosilica acquisition\n");
            return false;
        }
        
        if(PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "Freerun")!= ePvErrSuccess)
        {
            fprintf(stderr,"Error setting Prosilica trigger to \"Freerun\"");
            return false;
        }
        
        return true;
    }
    fprintf(stderr,"Error cannot open camera\n");
    return false;

}
Example #21
0
void Camera::setAttributeEnum(const std::string &name, const std::string &value)
{
  std::string err_msg = "Couldn't get attribute " + name;
  CHECK_ERR( PvAttrEnumSet(handle_, name.c_str(), value.c_str()),
             err_msg.c_str());
}
Example #22
0
bool CvCaptureCAM_PvAPI::setProperty( int property_id, double value )
{
    switch ( property_id )
    {
    case CV_CAP_PROP_FRAME_WIDTH:
    {
        tPvUint32 currHeight;

        PvAttrUint32Get(Camera.Handle, "Height", &currHeight);

        stopCapture();
        // Reallocate Frames
        if (!resizeCaptureFrame(value, currHeight))
        {
            startCapture();
            return false;
        }

        startCapture();

        break;
    }
    case CV_CAP_PROP_FRAME_HEIGHT:
    {
        tPvUint32 currWidth;

        PvAttrUint32Get(Camera.Handle, "Width", &currWidth);

        stopCapture();

        // Reallocate Frames
        if (!resizeCaptureFrame(value, currWidth))
        {
            startCapture();
            return false;
        }

        startCapture();

        break;
    }
    case CV_CAP_PROP_MONOCROME:
        if (value==1)
        {
            char pixelFormat[256];
            PvAttrEnumGet(Camera.Handle, "PixelFormat", pixelFormat,256,NULL);
            if ((strcmp(pixelFormat, "Mono8")==0) || strcmp(pixelFormat, "Mono16")==0)
            {
                monocrome=true;
            }
            else
                return false;
        }
        else
            monocrome=false;
        break;
    case CV_CAP_PROP_EXPOSURE:
        if ((PvAttrUint32Set(Camera.Handle,"ExposureValue",(tPvUint32)value)==ePvErrSuccess))
            break;
        else
            return false;
    case CV_CAP_PROP_PVAPI_MULTICASTIP:
        if (value==-1)
        {
            if ((PvAttrEnumSet(Camera.Handle,"MulticastEnable", "Off")==ePvErrSuccess))
                break;
            else
                return false;
        }
        else
        {
            cv::String ip=cv::format("%d.%d.%d.%d", ((int)value>>24)&255, ((int)value>>16)&255, ((int)value>>8)&255, (int)value&255);
            if ((PvAttrEnumSet(Camera.Handle,"MulticastEnable", "On")==ePvErrSuccess) &&
                (PvAttrStringSet(Camera.Handle, "MulticastIPAddress", ip.c_str())==ePvErrSuccess))
                break;
            else
                return false;
        }
    case CV_CAP_PROP_GAIN:
        if (PvAttrUint32Set(Camera.Handle,"GainValue",(tPvUint32)value)!=ePvErrSuccess)
        {
            return false;
        }
        break;
    default:
        return false;
    }
    return true;
}
Example #23
0
bool CameraGigE::onInit() {
	LOG(LTRACE) << "CameraGigE::initialize\n";

	h_onTrigger.setup(this, &CameraGigE::onTrigger);
	registerHandler("onTrigger", &h_onTrigger);

	newImage = registerEvent("newImage");
	endOfSequence = registerEvent("endOfSequence");

	registerStream("out_img", &out_img);

	if (!props.address.empty()) {
		unsigned long ip = inet_addr(props.address.c_str());

		if (PvCameraOpenByAddr(ip, ePvAccessMaster, &cHandle) != ePvErrSuccess) {
			LOG(LERROR) << "Unable to open camera on adress "
					<< props.address << " \n";
			return false;
		}

	} else if (props.uid != 0) {
		if (PvCameraOpen(props.uid, ePvAccessMaster, &cHandle) != ePvErrSuccess) {
			LOG(LERROR) << "Unable to open camera with uid " << props.uid
					<< " \n";
			return false;
		}
	} else {
		return false;
	}

	// Set parameters
	tPvErr err;
	///	 Exposure
	if (!props.exposureMode.empty()) {
		if ((err = PvAttrEnumSet(cHandle, "ExposureMode",
				props.exposureMode.c_str())) == ePvErrSuccess) {
			if (props.exposureMode == "Manual") {
				if ((err = PvAttrUint32Set(cHandle, "ExposureValue",
						props.exposureValue / 1000000.0)) != ePvErrSuccess) {
					if (err == ePvErrOutOfRange) {
						tPvUint32 min, max;
						PvAttrRangeUint32(cHandle, "ExposureValue", &min, &max);
						LOG(LWARNING) << "ExposureValue : "
								<< props.exposureValue
								<< " is out of range, valid range [ "
								<< (double) min / 1000000.0 << " , "
								<< (double) max / 1000000.0 << " ]\n";
					}
				}
			}
		} else {
			LOG(LWARNING) << "Unable to set ExposureMode \n";
		}
	}
	/// Gain
	if (!props.gainMode.empty()) {
		if ((err = PvAttrEnumSet(cHandle, "GainMode", props.gainMode.c_str()))
				== ePvErrSuccess) {
			if (props.gainMode == "Manual") {
				if ((err = PvAttrUint32Set(cHandle, "gainValue",
						props.gainValue)) != ePvErrSuccess) {
					if (err == ePvErrOutOfRange) {
						tPvUint32 min, max;
						PvAttrRangeUint32(cHandle, "GainValue", &min, &max);
						LOG(LWARNING) << "GainValue : " << props.gainValue
								<< " is out of range, valid range [ "
								<< (double) min << " , " << (double) max
								<< " ]\n";
					}
				}
			}
		} else {
			LOG(LWARNING) << "Unable to set GainMode \n";
		}
	}
	///	White Balance
	if (!props.whitebalMode.empty()) {
		if ((err = PvAttrEnumSet(cHandle, "WhitebalMode",
				props.gainMode.c_str())) == ePvErrSuccess) {
			if (props.whitebalMode == "Manual") {
				if ((err = PvAttrUint32Set(cHandle, "WhitebalValueRed",
						props.whitebalValueRed)) != ePvErrSuccess) {
					if (err == ePvErrOutOfRange) {
						tPvUint32 min, max;
						PvAttrRangeUint32(cHandle, "WhitebalValueRed", &min,
								&max);
						LOG(LWARNING) << "WhitebalValueRed : "
								<< props.whitebalValueRed
								<< " is out of range, valid range [ "
								<< (double) min << " , " << (double) max
								<< " ]\n";
					}
				}

				if ((err = PvAttrUint32Set(cHandle, "WhitebalValueBlue",
						props.whitebalValueBlue)) != ePvErrSuccess) {
					if (err == ePvErrOutOfRange) {
						tPvUint32 min, max;
						PvAttrRangeUint32(cHandle, "WhitebalValueBlue", &min,
								&max);
						LOG(LWARNING) << "WhitebalValueBlue : "
								<< props.whitebalValueBlue
								<< " is out of range, valid range [ "
								<< (double) min << " , " << (double) max
								<< " ]\n";
					}
				}
			}
		} else {
			LOG(LWARNING) << "Unable to set WhitebalMode" << err << "\n";
		}
	}

	if ((err = PvAttrEnumSet(cHandle, "MirrorX", props.mirrorX ? "On" : "Off"))
			!= ePvErrSuccess) {

	}

	if ((err = PvAttrEnumSet(cHandle, "PixelFormat", props.pixelFormat.c_str()))
			!= ePvErrSuccess) {
		LOG(LERROR) << "Unable to set pixelformat " << err;
	}

	if ((err = PvAttrUint32Set(cHandle, "Height", props.height))
			!= ePvErrSuccess) {
		if (err == ePvErrOutOfRange) {
			tPvUint32 min, max;
			PvAttrRangeUint32(cHandle, "Height", &min, &max);
			LOG(LWARNING) << "Height : " << props.height
					<< " is out of range, valid range [ " << (double) min
					<< " , " << (double) max << " ]";
		}
	}

	if ((err = PvAttrUint32Set(cHandle, "Width", props.width)) != ePvErrSuccess) {
		if (err == ePvErrOutOfRange) {
			tPvUint32 min, max;
			PvAttrRangeUint32(cHandle, "Width", &min, &max);
			LOG(LWARNING) << "Width : " << props.width
					<< " is out of range, valid range [ " << (double) min
					<< " , " << (double) max << " ]\n";
		}
	}

	if ((err = PvAttrUint32Set(cHandle, "RegionX", props.regionX))
			!= ePvErrSuccess) {
		if (err == ePvErrOutOfRange) {
			tPvUint32 min, max;
			PvAttrRangeUint32(cHandle, "RegionX", &min, &max);
			LOG(LWARNING) << "RegionX : " << props.regionX
					<< " is out of range, valid range [ " << (double) min
					<< " , " << (double) max << " ]\n";
		}
	}

	if ((err = PvAttrUint32Set(cHandle, "RegionY", props.regionY))
			!= ePvErrSuccess) {
		if (err == ePvErrOutOfRange) {
			tPvUint32 min, max;
			PvAttrRangeUint32(cHandle, "RegionY", &min, &max);
			LOG(LWARNING) << "RegionY : " << props.regionY
					<< " is out of range, valid range [ " << (double) min
					<< " , " << (double) max << " ]\n";
		}
	}

	if ((err = PvAttrUint32Set(cHandle, "BinningX", props.binningX))
			!= ePvErrSuccess) {
		if (err == ePvErrOutOfRange) {
			tPvUint32 min, max;
			PvAttrRangeUint32(cHandle, "BinningX", &min, &max);
			LOG(LWARNING) << "BinningX : " << props.binningX
					<< " is out of range, valid range [ " << (double) min
					<< " , " << (double) max << " ]\n";
		}
	}

	if ((err = PvAttrUint32Set(cHandle, "BinningY", props.binningY))
			!= ePvErrSuccess) {
		if (err == ePvErrOutOfRange) {
			tPvUint32 min, max;
			PvAttrRangeUint32(cHandle, "BinningY", &min, &max);
			LOG(LWARNING) << "BinningY : " << props.binningY
					<< " is out of range, valid range [ " << (double) min
					<< " , " << (double) max << " ]\n";
		}
	}
	// ----------------

	PvAttrEnumSet(cHandle, "FrameStartTriggerMode", "Freerun");

	unsigned long frameSize = 0;

	if (PvAttrUint32Get(cHandle, "TotalBytesPerFrame", &frameSize)
			!= ePvErrSuccess) {
		return false;
	}

	frame.ImageBuffer = new char[frameSize];
	frame.ImageBufferSize = frameSize;

	return true;
}
// Initialize camera input
bool CvCaptureCAM_PvAPI::open( int index )
{

    tPvCameraInfo cameraInfo[MAX_CAMERAS];
    if (PvInitialize())
        return false;
        
    usleep(250000);
    
    //close();
    int numCameras = PvCameraList(cameraInfo, MAX_CAMERAS, NULL);
    if (numCameras <= 0 || index >= numCameras)
        return false;

    Camera.UID = cameraInfo[index].UniqueId;
    if (PvCameraOpen(Camera.UID, ePvAccessMaster, &(Camera.Handle))==ePvErrSuccess)
    {
    

        //Set Pixel Format to BRG24 to follow conventions 
        Errcode = PvAttrEnumSet(Camera.Handle, "PixelFormat", "Bgr24");
        if (Errcode != ePvErrSuccess)
        {
            fprintf(stderr, "PvAPI: couldn't set PixelFormat to Bgr24\n");
            return NULL;
        }

        tPvUint32 frameWidth, frameHeight, frameSize;

        PvAttrUint32Get(Camera.Handle, "TotalBytesPerFrame", &frameSize);
        PvAttrUint32Get(Camera.Handle, "Width", &frameWidth);
        PvAttrUint32Get(Camera.Handle, "Height", &frameHeight);
        

        // Create an image (24 bits RGB Color image)
        frame = cvCreateImage(cvSize(frameWidth, frameHeight), IPL_DEPTH_8U, 3);
        frame->widthStep = frameWidth*3;

        Camera.Frame.ImageBufferSize = frameSize;
        Camera.Frame.ImageBuffer = frame->imageData;

        // Start the camera
        PvCaptureStart(Camera.Handle);

        // Set the camera to capture continuously
        if(PvAttrEnumSet(Camera.Handle, "AcquisitionMode", "Continuous")!= ePvErrSuccess)
        {
            fprintf(stderr,"Could not set Prosilica Acquisition Mode\n");
            return false;
        }
        
        if(PvCommandRun(Camera.Handle, "AcquisitionStart")!= ePvErrSuccess)
        {
            fprintf(stderr,"Could not start Prosilica acquisition\n");
            return false;
        }
        
        if(PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "Freerun")!= ePvErrSuccess)
        {
            fprintf(stderr,"Error setting Prosilica trigger to \"Freerun\"");
            return false;
        }
        
        return true;
    }
    return false;

}
Example #25
0
// set the value of a given attribute from a value encoded in a string
bool String2Value(tPvHandle aCamera,const char* aLabel,tPvDatatype aType,char* aValue)
{
    switch(aType)
    {           
        case ePvDatatypeString:
        {   
            if(!PvAttrStringSet(aCamera,aLabel,aValue))
                return true;
            else
                return false;     
        }
        case ePvDatatypeEnum:
        {            
            if(!PvAttrEnumSet(aCamera,aLabel,aValue))
                return true;
            else
                return false;
        }
        case ePvDatatypeUint32:
        {
            tPvUint32 lValue = atol(aValue);
            tPvUint32 lMin,lMax;
            
           if(!PvAttrRangeUint32(aCamera,aLabel,&lMin,&lMax))
           {
               if(lMin > lValue)
                   lValue = lMin;
               else
               if(lMax < lValue)
                   lValue = lMax;
                                        
               if(!PvAttrUint32Set(aCamera,aLabel,lValue))
                   return true;
               else
                   return false;
           }
           else
               return false;
        }
        case ePvDatatypeFloat32:
        {
            tPvFloat32 lValue = (tPvFloat32)atof(aValue);
            tPvFloat32 lMin,lMax;
            
           if(!PvAttrRangeFloat32(aCamera,aLabel,&lMin,&lMax))
           {
                if(lMin > lValue)
                   lValue = lMin;
                else
                if(lMax < lValue)
                   lValue = lMax;            
            
                if(!PvAttrFloat32Set(aCamera,aLabel,lValue))
                    return true;
                else
                    return false;
           }
           else
               return false;
        }
        default:
            return false;
    }       
}