Ejemplo n.º 1
0
void *ThreadFunc(void *pContext)
#endif
{
    unsigned long FrameCompleted, FrameDropped, PacketReceived, PacketMissed;
    float FrameRate;
    tPvErr Err;
    
    //StatFramesCompleted increments when a queued frame returns with tPvFrame.Status = ePvErrSuccess
	//StatFramesDropped increments when a queued frame returns with tPvFrame.Status = ePvErrDataMissing
	//In a situation where a camera returns a frame, but there is no frame queued for it, THESE
	//STATS DO NOT INCREMENT. tPvFrame.FrameCount increments regardless of host queuing, and is a better measure
	//for what frame is being returned from the camera. See CameraStart, where we check this parameter,
	//for the case where a frame is returned from camera with no frame queued on host.
	while(!GCamera.Abort &&
          ((Err = PvAttrUint32Get(GCamera.Handle,"StatFramesCompleted",&FrameCompleted)) == ePvErrSuccess) &&
          ((Err = PvAttrUint32Get(GCamera.Handle,"StatFramesDropped",&FrameDropped)) == ePvErrSuccess) &&
		  ((Err = PvAttrUint32Get(GCamera.Handle,"StatPacketsMissed",&PacketMissed)) == ePvErrSuccess) &&
		  ((Err = PvAttrUint32Get(GCamera.Handle,"StatPacketsReceived",&PacketReceived)) == ePvErrSuccess) &&
          ((Err = PvAttrFloat32Get(GCamera.Handle,"StatFrameRate",&FrameRate)) == ePvErrSuccess))
    {
        printf("FrmCmp : %5lu  FrmDrp : %5lu PckCmp : %5lu PckMss : %5lu FrmRt : %5.2f\r", FrameCompleted, FrameDropped, PacketReceived, PacketMissed, FrameRate);
		Sleep(20);
	}

    return 0;
}
Ejemplo n.º 2
0
void cameraGetROI(tCamera* camera, tPvUint32 *ROI) {
  if(PvAttrUint32Get(camera->Handle, "RegionX", &ROI[0]))
    throw "failed to get RegionX parameter";
  if(PvAttrUint32Get(camera->Handle, "RegionY", &ROI[1]))
    throw "failed to get RegionY parameter";
  if(PvAttrUint32Get(camera->Handle, "Width",   &ROI[2]))
    throw "failed to get Width parameter";
  if(PvAttrUint32Get(camera->Handle, "Height",  &ROI[3]))
    throw "failed to get Height parameter";
}
Ejemplo n.º 3
0
	bool OmniCamera::connect(void) {
		if(isConnected())
			return true;

		tPvCameraInfoEx info[1];
		 unsigned long frameSize = 0;

		//PvLinkCallbackRegister(CameraEventCB,ePvLinkAdd,NULL);
        //PvLinkCallbackRegister(CameraEventCB,ePvLinkRemove,NULL);        

		if(!PvInitialize()) {

			while(!PvCameraCount()) 
				Sleep(250);


			unsigned long numCameras = PvCameraListEx(info, 1, NULL, sizeof(tPvCameraInfoEx));;
								
			if ((numCameras == 1) && (info[0].PermittedAccess & ePvAccessMaster)) {
				
				_camera.UID = info[0].UniqueId;

				if(!PvCameraOpen(_camera.UID, ePvAccessMaster, &(_camera.handle))) {

					if(!PvAttrUint32Get(_camera.handle,"TotalBytesPerFrame",&frameSize)) {

						_camera.frame.ImageBuffer = new char[frameSize];
						
						unsigned long format=0;
						PvAttrEnumSet(_camera.handle,"PixelFormat","Bayer8");
						char text[100];
						PvAttrEnumGet(_camera.handle,"PixelFormat",text,sizeof(text),&format);
						printf("format %d %s\n",format,text);
						if(_camera.frame.ImageBuffer) {
							_camera.frame.ImageBufferSize = frameSize;
									
							PvAttrUint32Get(_camera.handle,"Width",&_width);
							PvAttrUint32Get(_camera.handle,"Height",&_height);
							PvCaptureStart(_camera.handle);
							PvAttrEnumSet(_camera.handle, "AcquisitionMode", "Continuous");
							PvCommandRun(_camera.handle, "AcquisitionStart");

							_connected = true;
							return true;
						}
					}
				}
			}				
		}
		_connected = false;
		return false;
	}
Ejemplo n.º 4
0
double CvCaptureCAM_PvAPI::getProperty( int property_id )
{
    tPvUint32 nTemp;

    switch ( property_id )
    {
    case CV_CAP_PROP_FRAME_WIDTH:
        PvAttrUint32Get(Camera.Handle, "Width", &nTemp);
        return (double)nTemp;
    case CV_CAP_PROP_FRAME_HEIGHT:
        PvAttrUint32Get(Camera.Handle, "Height", &nTemp);
        return (double)nTemp;
    }
    return -1.0;
}
Ejemplo n.º 5
0
void Camera::setup()
{
  // adjust packet size according to the current network capacity
  tPvUint32 maxPacketSize = 9000;
  PvCaptureAdjustPacketSize(handle_, maxPacketSize);

  // set data rate to the max
  unsigned long max_data_rate = getMaxDataRate();
  if (max_data_rate < GIGE_MAX_DATA_RATE) {
    ROS_WARN("Detected max data rate is %lu bytes/s, typical maximum data rate for a "
             "GigE port is %lu bytes/s. Are you using a GigE network card and cable?\n",
             max_data_rate, GIGE_MAX_DATA_RATE);
  }
  setAttribute("StreamBytesPerSecond", max_data_rate);

  // capture whole frame by default
  setBinning();
  setRoiToWholeFrame();
  
  // query for attributes (TODO: more)
  CHECK_ERR( PvAttrUint32Get(handle_, "TotalBytesPerFrame", &frameSize_),
             "Unable to retrieve frame size" );
  
  // allocate frame buffers
  frames_ = new tPvFrame[bufferSize_];
  memset(frames_, 0, sizeof(tPvFrame) * bufferSize_);
  for (unsigned int i = 0; i < bufferSize_; ++i)
  {
    frames_[i].ImageBuffer = new char[frameSize_];
    frames_[i].ImageBufferSize = frameSize_;
    frames_[i].Context[0] = (void*)this; // for frameDone callback
  }
}
Ejemplo n.º 6
0
void Camera::getAttribute(const std::string &name, tPvUint32 &value)
{
  std::string err_msg = "Couldn't get attribute " + name;
  CHECK_ERR( PvAttrUint32Get(handle_, name.c_str(), &value),
	     err_msg.c_str());
             
}
Ejemplo n.º 7
0
unsigned long Camera::guid()
{
  unsigned long id;
  CHECK_ERR( PvAttrUint32Get(handle_, "UniqueId", &id),
             "Couldn't retrieve unique id" );
  return id;
}
Ejemplo n.º 8
0
// open camera, allocate memory
// return value: true == success, false == fail
bool CameraSetup()
{
    tPvErr errCode;
	unsigned long FrameSize = 0;

	//open camera
	if ((errCode = PvCameraOpen(GCamera.UID,ePvAccessMaster,&(GCamera.Handle))) != ePvErrSuccess)
	{
		if (errCode == ePvErrAccessDenied)
			printf("PvCameraOpen returned ePvErrAccessDenied:\nCamera already open as Master, or camera wasn't properly closed and still waiting to HeartbeatTimeout.");
		else
			printf("PvCameraOpen err: %u\n", errCode);
		return false;
	}

	// Calculate frame buffer size
    if((errCode = PvAttrUint32Get(GCamera.Handle,"TotalBytesPerFrame",&FrameSize)) != ePvErrSuccess)
	{
		printf("CameraSetup: Get TotalBytesPerFrame err: %u\n", errCode);
		return false;
	}

    // allocate image buffer
    GCamera.Frame.ImageBuffer = new char[FrameSize];
    if(!GCamera.Frame.ImageBuffer)
	{
		printf("CameraSetup: Failed to allocate buffers.\n");
		return false;
	}
	GCamera.Frame.ImageBufferSize = FrameSize;

	return true;
}
Ejemplo n.º 9
0
// setup and start streaming
void cameraStart(tCamera* camera, tPvUint32 packetSize) {
  cameraSetPixelFormat(camera, "Mono16");
  cameraSetExpo(camera, 1000);

  //tPvUint32 ROI[4]={0,0,493,659};
  //cameraSetROI(camera,ROI);

  // Auto adjust the packet size to max supported by the network, up to a max of 8228.
  // NOTE: In Vista, if the packet size on the network card is set lower than 8228,
  //       this call may break the network card's driver. See release notes.
  //

  tPvErr err = PvCaptureAdjustPacketSize(camera->Handle,packetSize);
  //err = PvAttrUint32Set(camera->Handle, "PacketSize", 1500);
  if (err != ePvErrSuccess){
    stringstream buf;
    buf << "failed to adjust packet size : " << cameraGetError(err) << endl;
    throw buf.str();
  }

  unsigned long FrameSize = 0;
  err = PvAttrUint32Get(camera->Handle, "TotalBytesPerFrame", &FrameSize);
  if (err != ePvErrSuccess){
    stringstream buf;
    buf << "failed to get TotalBytesPerFrame : " << cameraGetError(err) << endl;
    throw buf.str();
  }

  // allocate the buffer for the single frame we need
  camera->Frame.Context[0] = camera;
  camera->Frame.ImageBuffer = new char[FrameSize];
  if (camera->Frame.ImageBuffer)
    camera->Frame.ImageBufferSize = FrameSize;
  else
    throw "failed allocate Frame.ImageBuffer";


  // how big should the frame buffers be?
  if (!err) {
    // set the camera is capture mode
    if (!PvCaptureStart(camera->Handle)) {
      // set the camera in continuous acquisition mode
      if (!PvAttrEnumSet(camera->Handle, "FrameStartTriggerMode",
			 "Freerun")) { //"FixedRate" / "Freerun"
	// and set the acquisition mode into continuous
	if (PvCommandRun(camera->Handle, "AcquisitionStart")) {
	  // if that fail, we reset the camera to non capture mode
	  PvCaptureEnd(camera->Handle);
	  throw "failed to set the acquisition mode into continuous";
	}
      } else
	throw "failed to set the camera in continuous acquisition mode";
    } else
      throw "failed to set the camera is capture mode";
  } else
    throw "failed to get TotalBytesPerFrame parameter";
}
Ejemplo n.º 10
0
double CvCaptureCAM_PvAPI::getProperty( int property_id )
{
    tPvUint32 nTemp;

    switch ( property_id )
    {
    case CV_CAP_PROP_FRAME_WIDTH:
        PvAttrUint32Get(Camera.Handle, "Width", &nTemp);
        return (double)nTemp;
    case CV_CAP_PROP_FRAME_HEIGHT:
        PvAttrUint32Get(Camera.Handle, "Height", &nTemp);
        return (double)nTemp;
    case CV_CAP_PROP_EXPOSURE:
        PvAttrUint32Get(Camera.Handle,"ExposureValue",&nTemp);
        return (double)nTemp;
    case CV_CAP_PROP_FPS:
        tPvFloat32 nfTemp;
        PvAttrFloat32Get(Camera.Handle, "StatFrameRate", &nfTemp);
        return (double)nfTemp;
    case CV_CAP_PROP_PVAPI_MULTICASTIP:
        char mEnable[2];
        char mIp[11];
        PvAttrEnumGet(Camera.Handle,"MulticastEnable",mEnable,sizeof(mEnable),NULL);
        if (strcmp(mEnable, "Off") == 0)
        {
            return -1;
        }
        else
        {
            long int ip;
            int a,b,c,d;
            PvAttrStringGet(Camera.Handle, "MulticastIPAddress",mIp,sizeof(mIp),NULL);
            sscanf(mIp, "%d.%d.%d.%d", &a, &b, &c, &d); ip = ((a*256 + b)*256 + c)*256 + d;
            return (double)ip;
        }
    case CV_CAP_PROP_GAIN:
        PvAttrUint32Get(Camera.Handle, "GainValue", &nTemp);
        return (double)nTemp;
    }
    return -1.0;
}
Ejemplo n.º 11
0
// setup and start streaming
bool CameraStart(tCamera* Camera)
{
    unsigned long FrameSize = 0;

    // Auto adjust the packet size to max supported by the network, up to a max of 8228.
    // NOTE: In Vista, if the packet size on the network card is set lower than 8228,
    //       this call may break the network card's driver. See release notes.
    //
    //PvCaptureAdjustPacketSize(Camera->Handle,8228);

    // how big should the frame buffers be?
    if(!PvAttrUint32Get(Camera->Handle,"TotalBytesPerFrame",&FrameSize))
    {
        bool failed = false;

        // allocate the buffer for the single frame we need
        Camera->Frame.Context[0]  = Camera;
        Camera->Frame.ImageBuffer = new char[FrameSize];
        if(Camera->Frame.ImageBuffer)
            Camera->Frame.ImageBufferSize = FrameSize;
        else
            failed = true;

        if(!failed)
        {
            // set the camera is capture mode
            if(!PvCaptureStart(Camera->Handle))
            {
                // set the camera in continuous acquisition mode
                if(!PvAttrEnumSet(Camera->Handle,"FrameStartTriggerMode","Freerun"))
                {
                    // and set the acquisition mode into continuous
                    if(PvCommandRun(Camera->Handle,"AcquisitionStart"))
                    {
                        // if that fail, we reset the camera to non capture mode
                        PvCaptureEnd(Camera->Handle) ;
                        return false;
                    }
                    else
                        return true;
                }
                else
                    return false;
            }
            else
                return false;
        }
        else
            return false;
    }
    else
        return false;
}
Ejemplo n.º 12
0
// setup and start streaming
bool CameraStart(tCamera* Camera)
{
    unsigned long FrameSize = 0;

    // how big should the frame buffers be?
    if(!PvAttrUint32Get(Camera->Handle,"TotalBytesPerFrame",&FrameSize))
    {
        bool failed = false;

        // allocate the buffer for the single frame we need
        Camera->Frame.Context[0]  = Camera;
        Camera->Frame.ImageBuffer = new char[FrameSize];
        if(Camera->Frame.ImageBuffer)
            Camera->Frame.ImageBufferSize = FrameSize;
        else
            failed = true;

        if(!failed)
        {
            // set the camera is capture mode
            if(!PvCaptureStart(Camera->Handle))
            {
		        // set the camera in continuous acquisition mode
		        if(!PvAttrEnumSet(Camera->Handle,"FrameStartTriggerMode","Freerun"))
		        {			
                	        // and set the acquisition mode into continuous
                	        if(PvCommandRun(Camera->Handle,"AcquisitionStart"))
                	        {
                    		        // if that fail, we reset the camera to non capture mode
                    		        PvCaptureEnd(Camera->Handle) ;
                    		        return false;
                	        }
                	        else
                    		        return true;
		        }
		        else
		            return false;
            }
            else
                return false;
        }
        else
            return false;
    }
    else
        return false;
}
Ejemplo n.º 13
0
// encode the value of a given attribute in a string
bool Value2String(tPvHandle aCamera,const char* aLabel,tPvDatatype aType,char* aString,unsigned long aLength)
{
    switch(aType)
    {
        case ePvDatatypeString:
        {
            if(!PvAttrStringGet(aCamera,aLabel,aString,aLength,NULL))
                return true;
            else
                return false;
        }
        case ePvDatatypeEnum:
        {
            if(!PvAttrEnumGet(aCamera,aLabel,aString,aLength,NULL))
                return true;
            else
                return false;
        }
        case ePvDatatypeUint32:
        {
            tPvUint32 lValue;

            if(!PvAttrUint32Get(aCamera,aLabel,&lValue))
            {
                sprintf(aString,"%lu",lValue);
                return true;
            }
            else
                return false;

        }
        case ePvDatatypeFloat32:
        {
            tPvFloat32 lValue;

            if(!PvAttrFloat32Get(aCamera,aLabel,&lValue))
            {
                sprintf(aString,"%g",lValue);
                return true;
            }
            else
                return false;
        }
        default:
            return false;
    }
}
Ejemplo n.º 14
0
void CameraAdjust(tPvHandle Handle)
{
    unsigned long MaxSize = 16456;

    printf("adjusting ...\n");
  
    if(!PvCaptureAdjustPacketSize(Handle,MaxSize))
    {
        unsigned long Size;

        PvAttrUint32Get(Handle,"PacketSize",&Size);   

        printf("the best packet size is %lu bytes\n",Size);
    }
    else
        printf("sorry, there was an error while trying to adjust the packet size\n");

}
Ejemplo n.º 15
0
// snap and save a frame from the camera
tPvFrame *cameraSnap(tCamera* camera, int nbSnap) {
  unsigned long FrameSize = 0;
  PvAttrUint32Get(camera->Handle, "TotalBytesPerFrame", &FrameSize);

  tPvFrame *frames = new tPvFrame[nbSnap];
  for(int i=0; i<nbSnap; i++) {
    // allocate the buffer for the single frame we need
    frames[i].Context[0] = camera;
    frames[i].ImageBuffer = new char[FrameSize];
    if (frames[i].ImageBuffer)
      frames[i].ImageBufferSize = FrameSize;
    else
      throw "failed allocate Frame.ImageBuffer";
    
  }
  /*
  cerr<<"Warming up !\n";
  PvCaptureQueueFrame(camera->Handle, &(camera->Frame), NULL);
  while(camera->Frame.Status != ePvErrSuccess)
    PvCaptureQueueFrame(camera->Handle, &(camera->Frame), NULL);
  */

  struct timeval myTVstart, myTVend;
  gettimeofday (&myTVstart, NULL);
  for(int i=0; i<nbSnap; i++) {
    if (!PvCaptureQueueFrame(camera->Handle, &(frames[i]), NULL)) {
      int index = 0;
      while ((PvCaptureWaitForFrameDone(camera->Handle, &(frames[i]), 10) == ePvErrTimeout)
	     && (index < 100)){
	index++;
      }
      if (frames[i].Status != ePvErrSuccess)
	cout << "the frame failed to be captured : "<< cameraGetError(frames[i].Status) << endl;
    } else
      cout << "failed to enqueue the frame" << endl;
  }
  gettimeofday (&myTVend, NULL);
  double duration = my_difftime (&myTVstart, &myTVend)/1000000.;

  cout << "Acuisition tooks " << duration*1000 << " mseconds, (" << (double)nbSnap/duration << " Hz)" << endl;
  return frames;
}
Ejemplo n.º 16
0
//
// idlPvAttrUint32Get
//
// Get the value of a Uint32 attribute
//
// command line arguments
// argv[0]: IN/FLAG debug
// argv[1]: IN camera index
// argv[2]: IN attribute name
// argv[3]: OUT attribute value
int idlPvAttrUint32Get (int argc, char *argv[])
{
  unsigned long n;
  unsigned long err;
  IDL_STRING *name;
  unsigned long *value;

  debug = *(IDL_INT *) argv[0];
  n = *(unsigned long *) argv[1];
  name = (IDL_STRING *) argv[2];
  value = (unsigned long *) argv[3];

  CHECKINDEX(n);

  err = PvAttrUint32Get(camera[n],
			(const char *) IDL_STRING_STR(name),
			(tPvUint32 *) value);

  return idlPvErrCode(err);
}
Ejemplo n.º 17
0
// setup and start streaming
bool CameraStart()
{
    unsigned long FrameSize = 0;

    // how big should the frame buffers be?
    if(!PvAttrUint32Get(GCamera.Handle,"TotalBytesPerFrame",&FrameSize))
    {
        bool failed = false;

        // allocate the buffer for the single frame we need
        GCamera.Frame.ImageBuffer = new char[FrameSize];
        if(GCamera.Frame.ImageBuffer)
            GCamera.Frame.ImageBufferSize = FrameSize;
        else
            failed = true;

        if(!failed)
        {
            // set the camera is capture mode
            if(!PvCaptureStart(GCamera.Handle))
            {
                // and set the acquisition mode into continuous and software trigger mode
                if(PvAttrEnumSet(GCamera.Handle,"FrameStartTriggerMode","SyncIn1") ||
                   PvCommandRun(GCamera.Handle,"AcquisitionStart"))
                {
                    // if that fail, we reset the camera to non capture mode
                    PvCaptureEnd(GCamera.Handle) ;
                    return false;
                }
                else
                    return true;
            }
            else
                return false;
        }
        else
            return false;
    }
    else
        return false;
}
Ejemplo n.º 18
0
// open camera, allocate memory
// return value: true == success, false == fail
bool CameraSetup()
{
    tPvErr errCode;
	bool failed = false;
	unsigned long FrameSize = 0;

	// open camera
	if ((errCode = PvCameraOpen(GCamera.UID,ePvAccessMaster,&(GCamera.Handle))) != ePvErrSuccess)
	{
		if (errCode == ePvErrAccessDenied)
			printf("PvCameraOpen returned ePvErrAccessDenied:\nCamera already open, or not properly closed.\n");
		else
			printf("PvCameraOpen err: %u\n", errCode);
		return false;
	}

	// Calculate frame buffer size
    if((errCode = PvAttrUint32Get(GCamera.Handle,"TotalBytesPerFrame",&FrameSize)) != ePvErrSuccess)
	{
		printf("CameraSetup: Get TotalBytesPerFrame err: %u\n", errCode);
		return false;
	}

	// allocate the frame buffers
    for(int i=0;i<FRAMESCOUNT && !failed;i++)
    {
        GCamera.Frames[i].ImageBuffer = new char[FrameSize];
        if(GCamera.Frames[i].ImageBuffer)
        {
			GCamera.Frames[i].ImageBufferSize = FrameSize;
		}
        else
		{
			printf("CameraSetup: Failed to allocate buffers.\n");
			failed = true;
		}
    }

	return !failed;
}
Ejemplo n.º 19
0
/*
 * Method:    OpenCamera()
 * Purpose:   open a given camera
 * Comments:  none
 */
tPvHandle CMainWindow::OpenCamera(unsigned long aUID,bool& aMaster)
{
    tPvHandle lHandle = NULL;

    if(PvCameraOpen(aUID,ePvAccessMaster,&lHandle))
    {
        if(!PvCameraOpen(aUID,ePvAccessMonitor,&lHandle))
            aMaster = false;
    }
    else
    {
        tPvUint32 lMaxSize = 8228;

        // get the last packet size set on the camera
        PvAttrUint32Get(lHandle,"PacketSize",&lMaxSize);
        // adjust the packet size according to the current network capacity
        PvCaptureAdjustPacketSize(lHandle,lMaxSize);

        aMaster = true;
    }

    return lHandle;
}
Ejemplo n.º 20
0
// display info on a given attribute of the camera
void QueryAttribute(const char* aLabel)
{
    tPvAttributeInfo lInfo;

    if(!PvAttrInfo(GCamera.Handle,aLabel,&lInfo))
    {
        char lFlags[5];

        memset(lFlags,' ',sizeof(char) * 4);

        if(lInfo.Flags & ePvFlagRead)
            lFlags[0] = 'r';
        if(lInfo.Flags & ePvFlagWrite)
            lFlags[1] = 'w';
        if(lInfo.Flags & ePvFlagVolatile)
            lFlags[2] = 'v';
        if(lInfo.Flags & ePvFlagConst)
            lFlags[3] = 'c';
        lFlags[4] = '\0';

        printf("%30s (%30s) [%7s][%s]",aLabel,lInfo.Category,DatatypeToString(lInfo.Datatype),lFlags); 
        
        switch(lInfo.Datatype)
        {           
            case ePvDatatypeString:
            {
                char lValue[128];

                // we assume here that any string value will be less than 128 characters
                // long, wich we may not be the case
                
                if(!PvAttrStringGet(GCamera.Handle,aLabel,lValue,128,NULL))
                    printf(" = %s\n",lValue);
                else
                    printf(" = ERROR!\n");

                break;                
            }
            case ePvDatatypeEnum:
            {
                char lValue[128];

                // we assume here that any string value will be less than 128 characters
                // long, wich we may not be the case
                
                if(!PvAttrEnumGet(GCamera.Handle,aLabel,lValue,128,NULL))
                    printf(" = %s\n",lValue);
                else
                    printf(" = ERROR!\n");
                break;
            }
            case ePvDatatypeUint32:
            {
                tPvUint32 lValue;
                
                if(!PvAttrUint32Get(GCamera.Handle,aLabel,&lValue))
                    printf(" = %lu\n",lValue);
                else
                    printf(" = ERROR!\n");
                break;
            }
            case ePvDatatypeFloat32:
            {
                tPvFloat32 lValue;
                
                if(!PvAttrFloat32Get(GCamera.Handle,aLabel,&lValue))
                    printf(" = %f\n",lValue);
                else
                    printf(" = ERROR!\n");
                break;
            }
            default:
                printf("\n");
        }
    }
}
Ejemplo n.º 21
0
// display info on a given attribute of the camera
void QueryAttribute(const char* aLabel)
{
    tPvAttributeInfo lInfo;

    if(PvAttrInfo(GCamera.Handle,aLabel,&lInfo) == ePvErrSuccess)
    {
        char lFlags[5];

        memset(lFlags,' ',sizeof(char) * 4);

        if(lInfo.Flags & ePvFlagRead)
            lFlags[0] = 'r';
        if(lInfo.Flags & ePvFlagWrite)
            lFlags[1] = 'w';
        if(lInfo.Flags & ePvFlagVolatile)
            lFlags[2] = 'v';
        if(lInfo.Flags & ePvFlagConst)
            lFlags[3] = 'c';
        lFlags[4] = '\0';

	//	printf("%30s (%30s) [%7s]{%s}",aLabel,lInfo.Category,DatatypeToString(lInfo.Datatype),lFlags); 
    //    printf("%s/%s = %s [%s]{%s}\n",lInfo.Category,aLabel,lValue,DatatypeToString(lInfo.Datatype),lFlags); 

        switch(lInfo.Datatype)
        {           
            case ePvDatatypeString:
            {
                char lValue[128];

                // we assume here that any string value will be less than 128 characters
                // long, which we may not be the case
                
                if(PvAttrStringGet(GCamera.Handle,aLabel,lValue,128,NULL) == ePvErrSuccess)
                    printf("%s/%s = %s [%s,%s]\n",lInfo.Category,aLabel,lValue,DatatypeToString(lInfo.Datatype),lFlags); 
                else
                    printf("ERROR!\n");

                break;                
            }
            case ePvDatatypeEnum:
            {
                char lValue[128];

                // we assume here that any string value will be less than 128 characters
                // long, which we may not be the case
                
                if(PvAttrEnumGet(GCamera.Handle,aLabel,lValue,128,NULL) == ePvErrSuccess)
                    printf("%s/%s = %s [%s,%s]\n",lInfo.Category,aLabel,lValue,DatatypeToString(lInfo.Datatype),lFlags); 
                else
                    printf("ERROR!\n");
                break;
            }
            case ePvDatatypeUint32:
            {
                tPvUint32 lValue;
                
                if(PvAttrUint32Get(GCamera.Handle,aLabel,&lValue) == ePvErrSuccess)
                    printf("%s/%s = %lu [%s,%s]\n",lInfo.Category,aLabel,lValue,DatatypeToString(lInfo.Datatype),lFlags); 
                else
                    printf("ERROR!\n");
                break;
            }
            case ePvDatatypeInt64:
            {
                tPvInt64 lValue;
                
                if(PvAttrInt64Get(GCamera.Handle,aLabel,&lValue) == ePvErrSuccess)
                    printf("%s/%s = %lld [%s,%s]\n",lInfo.Category,aLabel,lValue,DatatypeToString(lInfo.Datatype),lFlags); 
                else
                    printf("ERROR!\n");
                break;
            }            
            case ePvDatatypeFloat32:
            {
                tPvFloat32 lValue;
                
                if(PvAttrFloat32Get(GCamera.Handle,aLabel,&lValue) == ePvErrSuccess)
                    printf("%s/%s = %f [%s,%s]\n",lInfo.Category,aLabel,lValue,DatatypeToString(lInfo.Datatype),lFlags); 
                else
                    printf("ERROR!\n");
                break;
            }
            case ePvDatatypeBoolean:
            {
                tPvBoolean lValue;
                
                if(PvAttrBooleanGet(GCamera.Handle,aLabel,&lValue) == ePvErrSuccess)
                    printf("%s/%s = %s [%s,%s]\n",lInfo.Category,aLabel,lValue ? "true" : "false",DatatypeToString(lInfo.Datatype),lFlags); 
                else
                    printf("ERROR!\n");                   
                break;
            }
            default:
                //command
				printf("%s/%s [%s,%s]\n",lInfo.Category,aLabel,DatatypeToString(lInfo.Datatype),lFlags);
        }
    }
}
Ejemplo n.º 22
0
double CvCaptureCAM_PvAPI::getProperty( int property_id ) const
{
    tPvUint32 nTemp;

    switch ( property_id )
    {
    case CV_CAP_PROP_FRAME_WIDTH:
        PvAttrUint32Get(Camera.Handle, "Width", &nTemp);
        return (double)nTemp;
    case CV_CAP_PROP_FRAME_HEIGHT:
        PvAttrUint32Get(Camera.Handle, "Height", &nTemp);
        return (double)nTemp;
    case CV_CAP_PROP_MONOCROME:
        if (monocrome)
          return 1;
        else
          return 0;
    case CV_CAP_PROP_EXPOSURE:
        PvAttrUint32Get(Camera.Handle,"ExposureValue",&nTemp);
        return (double)nTemp;
    case CV_CAP_PROP_FPS:
        tPvFloat32 nfTemp;
        PvAttrFloat32Get(Camera.Handle, "StatFrameRate", &nfTemp);
        return (double)nfTemp;
    case CV_CAP_PROP_PVAPI_MULTICASTIP:
        char mEnable[2];
        char mIp[11];
        PvAttrEnumGet(Camera.Handle,"MulticastEnable",mEnable,sizeof(mEnable),NULL);
        if (strcmp(mEnable, "Off") == 0)
        {
            return -1;
        }
        else
        {
            long int ip;
            int a,b,c,d;
            PvAttrStringGet(Camera.Handle, "MulticastIPAddress",mIp,sizeof(mIp),NULL);
            sscanf(mIp, "%d.%d.%d.%d", &a, &b, &c, &d); ip = ((a*256 + b)*256 + c)*256 + d;
            return (double)ip;
        }
    case CV_CAP_PROP_GAIN:
        PvAttrUint32Get(Camera.Handle, "GainValue", &nTemp);
        return (double)nTemp;
    case CV_CAP_PROP_PVAPI_FRAMESTARTTRIGGERMODE:
        char triggerMode[256];
        PvAttrEnumGet(Camera.Handle, "FrameStartTriggerMode", triggerMode, 256, NULL);
        if (strcmp(triggerMode, "Freerun")==0)
            return 0.0;
        else if (strcmp(triggerMode, "SyncIn1")==0)
            return 1.0;
        else if (strcmp(triggerMode, "SyncIn2")==0)
            return 2.0;
        else if (strcmp(triggerMode, "FixedRate")==0)
            return 3.0;
        else if (strcmp(triggerMode, "Software")==0)
            return 4.0;
        else
            return -1.0;
    case CV_CAP_PROP_PVAPI_DECIMATIONHORIZONTAL:
        PvAttrUint32Get(Camera.Handle, "DecimationHorizontal", &nTemp);
        return (double)nTemp;
    case CV_CAP_PROP_PVAPI_DECIMATIONVERTICAL:
        PvAttrUint32Get(Camera.Handle, "DecimationVertical", &nTemp);
        return (double)nTemp;
    case CV_CAP_PROP_PVAPI_BINNINGX:
        PvAttrUint32Get(Camera.Handle,"BinningX",&nTemp);
        return (double)nTemp;
    case CV_CAP_PROP_PVAPI_BINNINGY:
        PvAttrUint32Get(Camera.Handle,"BinningY",&nTemp);
        return (double)nTemp;
    }
    return -1.0;
}
Ejemplo n.º 23
0
bool CvCaptureCAM_PvAPI::setProperty( int property_id, double value )
{
    tPvErr error;

    switch ( property_id )
    {
    case CV_CAP_PROP_FRAME_WIDTH:
    {
        tPvUint32 currHeight;

        PvAttrUint32Get(Camera.Handle, "Height", &currHeight);

        stopCapture();
        // Reallocate Frames
        if (!resizeCaptureFrame(value, currHeight))
        {
            startCapture();
            return false;
        }

        startCapture();

        break;
    }
    case CV_CAP_PROP_FRAME_HEIGHT:
    {
        tPvUint32 currWidth;

        PvAttrUint32Get(Camera.Handle, "Width", &currWidth);

        stopCapture();

        // Reallocate Frames
        if (!resizeCaptureFrame(currWidth, value))
        {
            startCapture();
            return false;
        }

        startCapture();

        break;
    }
    case CV_CAP_PROP_MONOCROME:
        if (value==1)
        {
            char pixelFormat[256];
            PvAttrEnumGet(Camera.Handle, "PixelFormat", pixelFormat,256,NULL);
            if ((strcmp(pixelFormat, "Mono8")==0) || strcmp(pixelFormat, "Mono16")==0)
            {
                monocrome=true;
            }
            else
                return false;
        }
        else
            monocrome=false;
        break;
    case CV_CAP_PROP_EXPOSURE:
        if ((PvAttrUint32Set(Camera.Handle,"ExposureValue",(tPvUint32)value)==ePvErrSuccess))
            break;
        else
            return false;
    case CV_CAP_PROP_PVAPI_MULTICASTIP:
        if (value==-1)
        {
            if ((PvAttrEnumSet(Camera.Handle,"MulticastEnable", "Off")==ePvErrSuccess))
                break;
            else
                return false;
        }
        else
        {
            cv::String ip=cv::format("%d.%d.%d.%d", ((unsigned int)value>>24)&255, ((unsigned int)value>>16)&255, ((unsigned int)value>>8)&255, (unsigned int)value&255);
            if ((PvAttrEnumSet(Camera.Handle,"MulticastEnable", "On")==ePvErrSuccess) &&
                (PvAttrStringSet(Camera.Handle, "MulticastIPAddress", ip.c_str())==ePvErrSuccess))
                break;
            else
                return false;
        }
    case CV_CAP_PROP_GAIN:
        if (PvAttrUint32Set(Camera.Handle,"GainValue",(tPvUint32)value)!=ePvErrSuccess)
        {
            return false;
        }
        break;
    case CV_CAP_PROP_PVAPI_FRAMESTARTTRIGGERMODE:
        if (value==0)
            error = PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "Freerun");
        else if (value==1)
            error = PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "SyncIn1");
        else if (value==2)
            error = PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "SyncIn2");
        else if (value==3)
            error = PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "FixedRate");
        else if (value==4)
            error = PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "Software");
        else
            error = ePvErrOutOfRange;
        if(error==ePvErrSuccess)
            break;
        else
            return false;
    case CV_CAP_PROP_PVAPI_DECIMATIONHORIZONTAL:
        if (value >= 1 && value <= 8)
            error = PvAttrUint32Set(Camera.Handle, "DecimationHorizontal", value);
        else
            error = ePvErrOutOfRange;
        if(error==ePvErrSuccess)
            break;
        else
            return false;
    case CV_CAP_PROP_PVAPI_DECIMATIONVERTICAL:
        if (value >= 1 && value <= 8)
            error = PvAttrUint32Set(Camera.Handle, "DecimationVertical", value);
        else
            error = ePvErrOutOfRange;
        if(error==ePvErrSuccess)
            break;
        else
            return false;
    case CV_CAP_PROP_PVAPI_BINNINGX:
        error = PvAttrUint32Set(Camera.Handle, "BinningX", value);
        if(error==ePvErrSuccess)
            break;
        else
            return false;
    case CV_CAP_PROP_PVAPI_BINNINGY:
        error = PvAttrUint32Set(Camera.Handle, "BinningY", value);
        if(error==ePvErrSuccess)
            break;
        else
            return false;
    default:
        return false;
    }
    return true;
}
Ejemplo n.º 24
0
// Initialize camera input
bool CvCaptureCAM_PvAPI::open( int index )
{
    tPvCameraInfo cameraList[MAX_CAMERAS];
    
    tPvCameraInfo  camInfo;
    tPvIpSettings ipSettings;

 
    if (PvInitialize()) {
    }
        //return false;
        
    Sleep(1000);

    //close();
    
    int numCameras=PvCameraList(cameraList, MAX_CAMERAS, NULL);

    if (numCameras <= 0 || index >= numCameras)
        return false;

    Camera.UID = cameraList[index].UniqueId;    

    if (!PvCameraInfo(Camera.UID,&camInfo) && !PvCameraIpSettingsGet(Camera.UID,&ipSettings)) {
		/*
		struct in_addr addr;
		addr.s_addr = ipSettings.CurrentIpAddress;
		printf("Current address:\t%s\n",inet_ntoa(addr));
		addr.s_addr = ipSettings.CurrentIpSubnet;
		printf("Current subnet:\t\t%s\n",inet_ntoa(addr));
		addr.s_addr = ipSettings.CurrentIpGateway;
		printf("Current gateway:\t%s\n",inet_ntoa(addr));
		*/
	}	
	else {
		fprintf(stderr,"ERROR: could not retrieve camera IP settings.\n");
		return false;
	}	


    if (PvCameraOpen(Camera.UID, ePvAccessMaster, &(Camera.Handle))==ePvErrSuccess)
    {
    
        //Set Pixel Format to BRG24 to follow conventions 
        /*Errcode = PvAttrEnumSet(Camera.Handle, "PixelFormat", "Bgr24");
        if (Errcode != ePvErrSuccess)
        {
            fprintf(stderr, "PvAPI: couldn't set PixelFormat to Bgr24\n");
            return NULL;
        }
        */
        tPvUint32 frameWidth, frameHeight, frameSize;
        unsigned long maxSize;
		char pixelFormat[256];
        PvAttrUint32Get(Camera.Handle, "TotalBytesPerFrame", &frameSize);
        PvAttrUint32Get(Camera.Handle, "Width", &frameWidth);
        PvAttrUint32Get(Camera.Handle, "Height", &frameHeight);
        PvAttrEnumGet(Camera.Handle, "PixelFormat", pixelFormat,256,NULL);
        maxSize = 8228;
        //PvAttrUint32Get(Camera.Handle,"PacketSize",&maxSize);
        if (PvCaptureAdjustPacketSize(Camera.Handle,maxSize)!=ePvErrSuccess)
			return false;
        if (strcmp(pixelFormat, "Mono8")==0) {
				grayframe = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 1);
				grayframe->widthStep = (int)frameWidth;
				frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 3);
				frame->widthStep = (int)frameWidth*3;		 
				Camera.Frame.ImageBufferSize = frameSize;
				Camera.Frame.ImageBuffer = grayframe->imageData;   
		}	    
		else if (strcmp(pixelFormat, "Mono16")==0) {
				grayframe = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_16U, 1);
				grayframe->widthStep = (int)frameWidth;	
				frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_16U, 3);
				frame->widthStep = (int)frameWidth*3;
				Camera.Frame.ImageBufferSize = frameSize;
				Camera.Frame.ImageBuffer = grayframe->imageData;
		}	  
		else if	(strcmp(pixelFormat, "Bgr24")==0) {
				frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 3);
				frame->widthStep = (int)frameWidth*3;
				Camera.Frame.ImageBufferSize = frameSize;
				Camera.Frame.ImageBuffer = frame->imageData;
		}		
		else
				return false;
        // Start the camera
        PvCaptureStart(Camera.Handle);

        // Set the camera to capture continuously
        if(PvAttrEnumSet(Camera.Handle, "AcquisitionMode", "Continuous")!= ePvErrSuccess)
        {
            fprintf(stderr,"Could not set Prosilica Acquisition Mode\n");
            return false;
        }
        
        if(PvCommandRun(Camera.Handle, "AcquisitionStart")!= ePvErrSuccess)
        {
            fprintf(stderr,"Could not start Prosilica acquisition\n");
            return false;
        }
        
        if(PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "Freerun")!= ePvErrSuccess)
        {
            fprintf(stderr,"Error setting Prosilica trigger to \"Freerun\"");
            return false;
        }
        
        return true;
    }
    fprintf(stderr,"Error cannot open camera\n");
    return false;

}
Ejemplo n.º 25
0
// setup event channel
// return value: true == success, false == fail
bool EventSetup()
{
	unsigned long EventBitmask;
	tPvErr errCode;
	
	// check if events supported with this camera firmware
	if (PvAttrExists(GCamera.Handle,"EventsEnable1") == ePvErrNotFound)
	{
        printf("This camera does not support event notifications.\n");
        return false;
	}
	
	//Clear all events
	//EventsEnable1 is a bitmask of all events. Bits correspond to last two digits of EventId.
	// e.g: Bit 1 is EventAcquisitionStart, Bit 2 is EventAcquisitionEnd, Bit 10 is EventSyncIn1Rise. 
    if ((errCode = PvAttrUint32Set(GCamera.Handle,"EventsEnable1",0)) != ePvErrSuccess)
	{
		printf("Set EventsEnable1 err: %u\n", errCode);
		return false;
	}
            
	//Set individual events (could do in one step with EventsEnable1).
	if ((errCode = PvAttrEnumSet(GCamera.Handle,"EventSelector","AcquisitionStart")) != ePvErrSuccess)
	{
		printf("Set EventsSelector err: %u\n", errCode);
		return false;
	}
    if ((errCode = PvAttrEnumSet(GCamera.Handle,"EventNotification","On")) != ePvErrSuccess)
	{
		printf("Set EventsNotification err: %u\n", errCode);
		return false;
	}

	if ((errCode = PvAttrEnumSet(GCamera.Handle,"EventSelector","AcquisitionEnd")) != ePvErrSuccess)
	{
		printf("Set EventsSelector err: %u\n", errCode);
		return false;
	}
    if ((errCode = PvAttrEnumSet(GCamera.Handle,"EventNotification","On")) != ePvErrSuccess)
	{
		printf("Set EventsNotification err: %u\n", errCode);
		return false;
	}

	if ((errCode = PvAttrEnumSet(GCamera.Handle,"EventSelector","FrameTrigger")) != ePvErrSuccess)
	{
		printf("Set EventsSelector err: %u\n", errCode);
		return false;
	}
    if ((errCode = PvAttrEnumSet(GCamera.Handle,"EventNotification","On")) != ePvErrSuccess)
	{
		printf("Set EventsNotification err: %u\n", errCode);
		return false;
	}
	
	//Get and print bitmask
	PvAttrUint32Get(GCamera.Handle,"EventsEnable1", &EventBitmask);
	printf("Events set. EventsEnable1 bitmask: %u\n", EventBitmask);

    //register callback function
	if ((errCode = PvCameraEventCallbackRegister(GCamera.Handle,F_CameraEventCallback,NULL)) != ePvErrSuccess)
    {
		printf("PvCameraEventCallbackRegister err: %u\n", errCode);
        return false;
    }     
	return true;
}
Ejemplo n.º 26
0
// Initialize camera input
bool CvCaptureCAM_PvAPI::open( int index )
{
    tPvCameraInfo cameraList[MAX_CAMERAS];

    tPvCameraInfo  camInfo;
    tPvIpSettings ipSettings;


    if (PvInitialize()) {
    }
    //return false;

    Sleep(1000);

    //close();

    int numCameras=PvCameraList(cameraList, MAX_CAMERAS, NULL);

    if (numCameras <= 0 || index >= numCameras)
        return false;

    Camera.UID = cameraList[index].UniqueId;

    if (!PvCameraInfo(Camera.UID,&camInfo) && !PvCameraIpSettingsGet(Camera.UID,&ipSettings))
    {
        /*
        struct in_addr addr;
        addr.s_addr = ipSettings.CurrentIpAddress;
        printf("Current address:\t%s\n",inet_ntoa(addr));
        addr.s_addr = ipSettings.CurrentIpSubnet;
        printf("Current subnet:\t\t%s\n",inet_ntoa(addr));
        addr.s_addr = ipSettings.CurrentIpGateway;
        printf("Current gateway:\t%s\n",inet_ntoa(addr));
        */
    }
    else
    {
        fprintf(stderr,"ERROR: could not retrieve camera IP settings.\n");
        return false;
    }


    if (PvCameraOpen(Camera.UID, ePvAccessMaster, &(Camera.Handle))==ePvErrSuccess)
    {
        tPvUint32 frameWidth, frameHeight;
        unsigned long maxSize;

        // By Default, try to set the pixel format to Mono8.  This can be changed later
        // via calls to setProperty. Some colour cameras (i.e. the Manta line) have a default
        // image mode of Bayer8, which is currently unsupported, so Mono8 is a safe bet for
        // startup.

        monocrome = (PvAttrEnumSet(Camera.Handle, "PixelFormat", "Mono8") == ePvErrSuccess);

        PvAttrUint32Get(Camera.Handle, "Width", &frameWidth);
        PvAttrUint32Get(Camera.Handle, "Height", &frameHeight);

        // Determine the maximum packet size supported by the system (ethernet adapter)
        // and then configure the camera to use this value.  If the system's NIC only supports
        // an MTU of 1500 or lower, this will automatically configure an MTU of 1500.
        // 8228 is the optimal size described by the API in order to enable jumbo frames

        maxSize = 8228;
        //PvAttrUint32Get(Camera.Handle,"PacketSize",&maxSize);
        if (PvCaptureAdjustPacketSize(Camera.Handle,maxSize)!=ePvErrSuccess)
            return false;

        resizeCaptureFrame(frameWidth, frameHeight);

        return startCapture();

    }
    fprintf(stderr,"Error cannot open camera\n");
    return false;

}
Ejemplo n.º 27
0
bool CvCaptureCAM_PvAPI::setProperty( int property_id, double value )
{
    switch ( property_id )
    {
    case CV_CAP_PROP_FRAME_WIDTH:
    {
        tPvUint32 currHeight;

        PvAttrUint32Get(Camera.Handle, "Height", &currHeight);

        stopCapture();
        // Reallocate Frames
        if (!resizeCaptureFrame(value, currHeight))
        {
            startCapture();
            return false;
        }

        startCapture();

        break;
    }
    case CV_CAP_PROP_FRAME_HEIGHT:
    {
        tPvUint32 currWidth;

        PvAttrUint32Get(Camera.Handle, "Width", &currWidth);

        stopCapture();

        // Reallocate Frames
        if (!resizeCaptureFrame(value, currWidth))
        {
            startCapture();
            return false;
        }

        startCapture();

        break;
    }
    case CV_CAP_PROP_MONOCROME:
        if (value==1)
        {
            char pixelFormat[256];
            PvAttrEnumGet(Camera.Handle, "PixelFormat", pixelFormat,256,NULL);
            if ((strcmp(pixelFormat, "Mono8")==0) || strcmp(pixelFormat, "Mono16")==0)
            {
                monocrome=true;
            }
            else
                return false;
        }
        else
            monocrome=false;
        break;
    case CV_CAP_PROP_EXPOSURE:
        if ((PvAttrUint32Set(Camera.Handle,"ExposureValue",(tPvUint32)value)==ePvErrSuccess))
            break;
        else
            return false;
    case CV_CAP_PROP_PVAPI_MULTICASTIP:
        if (value==-1)
        {
            if ((PvAttrEnumSet(Camera.Handle,"MulticastEnable", "Off")==ePvErrSuccess))
                break;
            else
                return false;
        }
        else
        {
            cv::String ip=cv::format("%d.%d.%d.%d", ((int)value>>24)&255, ((int)value>>16)&255, ((int)value>>8)&255, (int)value&255);
            if ((PvAttrEnumSet(Camera.Handle,"MulticastEnable", "On")==ePvErrSuccess) &&
                (PvAttrStringSet(Camera.Handle, "MulticastIPAddress", ip.c_str())==ePvErrSuccess))
                break;
            else
                return false;
        }
    case CV_CAP_PROP_GAIN:
        if (PvAttrUint32Set(Camera.Handle,"GainValue",(tPvUint32)value)!=ePvErrSuccess)
        {
            return false;
        }
        break;
    default:
        return false;
    }
    return true;
}
Ejemplo n.º 28
0
bool CvCaptureCAM_PvAPI::resizeCaptureFrame (int frameWidth, int frameHeight)
{
    char pixelFormat[256];
    tPvUint32 frameSize;
    tPvUint32 sensorHeight;
    tPvUint32 sensorWidth;


    if (grayframe)
    {
        cvReleaseImage(&grayframe);
        grayframe = NULL;
    }

    if (frame)
    {
        cvReleaseImage(&frame);
        frame = NULL;
    }

    if (PvAttrUint32Get(Camera.Handle, "SensorWidth", &sensorWidth) != ePvErrSuccess)
    {
        return false;
    }

    if (PvAttrUint32Get(Camera.Handle, "SensorHeight", &sensorHeight) != ePvErrSuccess)
    {
        return false;
    }

    // Cap out of bounds widths to the max supported by the sensor
    if ((frameWidth < 0) || ((tPvUint32)frameWidth > sensorWidth))
    {
        frameWidth = sensorWidth;
    }

    if ((frameHeight < 0) || ((tPvUint32)frameHeight > sensorHeight))
    {
        frameHeight = sensorHeight;
    }


    if (PvAttrUint32Set(Camera.Handle, "Height", frameHeight) != ePvErrSuccess)
    {
        return false;
    }

    if (PvAttrUint32Set(Camera.Handle, "Width", frameWidth) != ePvErrSuccess)
    {
        return false;
    }

    PvAttrEnumGet(Camera.Handle, "PixelFormat", pixelFormat,256,NULL);
    PvAttrUint32Get(Camera.Handle, "TotalBytesPerFrame", &frameSize);


    if (strcmp(pixelFormat, "Mono8")==0)
    {
        grayframe = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 1);
        grayframe->widthStep = (int)frameWidth;
        frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 3);
        frame->widthStep = (int)frameWidth*3;
        Camera.Frame.ImageBufferSize = frameSize;
        Camera.Frame.ImageBuffer = grayframe->imageData;
    }
    else if (strcmp(pixelFormat, "Mono16")==0)
    {
        grayframe = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_16U, 1);
        grayframe->widthStep = (int)frameWidth;
        frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_16U, 3);
        frame->widthStep = (int)frameWidth*3;
        Camera.Frame.ImageBufferSize = frameSize;
        Camera.Frame.ImageBuffer = grayframe->imageData;
    }
    else if (strcmp(pixelFormat, "Bgr24")==0)
    {
        frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 3);
        frame->widthStep = (int)frameWidth*3;
        Camera.Frame.ImageBufferSize = frameSize;
        Camera.Frame.ImageBuffer = frame->imageData;
    }
    else
        return false;

    return true;
}
Ejemplo n.º 29
0
void cameraGetExpo(tCamera* camera, tPvUint32 *expo) {
  if (PvAttrUint32Get(camera->Handle, "ExposureValue", expo)) 
    throw "failed to get ExposureValue parameter";
}
Ejemplo n.º 30
0
bool CameraGigE::onInit() {
	LOG(LTRACE) << "CameraGigE::initialize\n";

	h_onTrigger.setup(this, &CameraGigE::onTrigger);
	registerHandler("onTrigger", &h_onTrigger);

	newImage = registerEvent("newImage");
	endOfSequence = registerEvent("endOfSequence");

	registerStream("out_img", &out_img);

	if (!props.address.empty()) {
		unsigned long ip = inet_addr(props.address.c_str());

		if (PvCameraOpenByAddr(ip, ePvAccessMaster, &cHandle) != ePvErrSuccess) {
			LOG(LERROR) << "Unable to open camera on adress "
					<< props.address << " \n";
			return false;
		}

	} else if (props.uid != 0) {
		if (PvCameraOpen(props.uid, ePvAccessMaster, &cHandle) != ePvErrSuccess) {
			LOG(LERROR) << "Unable to open camera with uid " << props.uid
					<< " \n";
			return false;
		}
	} else {
		return false;
	}

	// Set parameters
	tPvErr err;
	///	 Exposure
	if (!props.exposureMode.empty()) {
		if ((err = PvAttrEnumSet(cHandle, "ExposureMode",
				props.exposureMode.c_str())) == ePvErrSuccess) {
			if (props.exposureMode == "Manual") {
				if ((err = PvAttrUint32Set(cHandle, "ExposureValue",
						props.exposureValue / 1000000.0)) != ePvErrSuccess) {
					if (err == ePvErrOutOfRange) {
						tPvUint32 min, max;
						PvAttrRangeUint32(cHandle, "ExposureValue", &min, &max);
						LOG(LWARNING) << "ExposureValue : "
								<< props.exposureValue
								<< " is out of range, valid range [ "
								<< (double) min / 1000000.0 << " , "
								<< (double) max / 1000000.0 << " ]\n";
					}
				}
			}
		} else {
			LOG(LWARNING) << "Unable to set ExposureMode \n";
		}
	}
	/// Gain
	if (!props.gainMode.empty()) {
		if ((err = PvAttrEnumSet(cHandle, "GainMode", props.gainMode.c_str()))
				== ePvErrSuccess) {
			if (props.gainMode == "Manual") {
				if ((err = PvAttrUint32Set(cHandle, "gainValue",
						props.gainValue)) != ePvErrSuccess) {
					if (err == ePvErrOutOfRange) {
						tPvUint32 min, max;
						PvAttrRangeUint32(cHandle, "GainValue", &min, &max);
						LOG(LWARNING) << "GainValue : " << props.gainValue
								<< " is out of range, valid range [ "
								<< (double) min << " , " << (double) max
								<< " ]\n";
					}
				}
			}
		} else {
			LOG(LWARNING) << "Unable to set GainMode \n";
		}
	}
	///	White Balance
	if (!props.whitebalMode.empty()) {
		if ((err = PvAttrEnumSet(cHandle, "WhitebalMode",
				props.gainMode.c_str())) == ePvErrSuccess) {
			if (props.whitebalMode == "Manual") {
				if ((err = PvAttrUint32Set(cHandle, "WhitebalValueRed",
						props.whitebalValueRed)) != ePvErrSuccess) {
					if (err == ePvErrOutOfRange) {
						tPvUint32 min, max;
						PvAttrRangeUint32(cHandle, "WhitebalValueRed", &min,
								&max);
						LOG(LWARNING) << "WhitebalValueRed : "
								<< props.whitebalValueRed
								<< " is out of range, valid range [ "
								<< (double) min << " , " << (double) max
								<< " ]\n";
					}
				}

				if ((err = PvAttrUint32Set(cHandle, "WhitebalValueBlue",
						props.whitebalValueBlue)) != ePvErrSuccess) {
					if (err == ePvErrOutOfRange) {
						tPvUint32 min, max;
						PvAttrRangeUint32(cHandle, "WhitebalValueBlue", &min,
								&max);
						LOG(LWARNING) << "WhitebalValueBlue : "
								<< props.whitebalValueBlue
								<< " is out of range, valid range [ "
								<< (double) min << " , " << (double) max
								<< " ]\n";
					}
				}
			}
		} else {
			LOG(LWARNING) << "Unable to set WhitebalMode" << err << "\n";
		}
	}

	if ((err = PvAttrEnumSet(cHandle, "MirrorX", props.mirrorX ? "On" : "Off"))
			!= ePvErrSuccess) {

	}

	if ((err = PvAttrEnumSet(cHandle, "PixelFormat", props.pixelFormat.c_str()))
			!= ePvErrSuccess) {
		LOG(LERROR) << "Unable to set pixelformat " << err;
	}

	if ((err = PvAttrUint32Set(cHandle, "Height", props.height))
			!= ePvErrSuccess) {
		if (err == ePvErrOutOfRange) {
			tPvUint32 min, max;
			PvAttrRangeUint32(cHandle, "Height", &min, &max);
			LOG(LWARNING) << "Height : " << props.height
					<< " is out of range, valid range [ " << (double) min
					<< " , " << (double) max << " ]";
		}
	}

	if ((err = PvAttrUint32Set(cHandle, "Width", props.width)) != ePvErrSuccess) {
		if (err == ePvErrOutOfRange) {
			tPvUint32 min, max;
			PvAttrRangeUint32(cHandle, "Width", &min, &max);
			LOG(LWARNING) << "Width : " << props.width
					<< " is out of range, valid range [ " << (double) min
					<< " , " << (double) max << " ]\n";
		}
	}

	if ((err = PvAttrUint32Set(cHandle, "RegionX", props.regionX))
			!= ePvErrSuccess) {
		if (err == ePvErrOutOfRange) {
			tPvUint32 min, max;
			PvAttrRangeUint32(cHandle, "RegionX", &min, &max);
			LOG(LWARNING) << "RegionX : " << props.regionX
					<< " is out of range, valid range [ " << (double) min
					<< " , " << (double) max << " ]\n";
		}
	}

	if ((err = PvAttrUint32Set(cHandle, "RegionY", props.regionY))
			!= ePvErrSuccess) {
		if (err == ePvErrOutOfRange) {
			tPvUint32 min, max;
			PvAttrRangeUint32(cHandle, "RegionY", &min, &max);
			LOG(LWARNING) << "RegionY : " << props.regionY
					<< " is out of range, valid range [ " << (double) min
					<< " , " << (double) max << " ]\n";
		}
	}

	if ((err = PvAttrUint32Set(cHandle, "BinningX", props.binningX))
			!= ePvErrSuccess) {
		if (err == ePvErrOutOfRange) {
			tPvUint32 min, max;
			PvAttrRangeUint32(cHandle, "BinningX", &min, &max);
			LOG(LWARNING) << "BinningX : " << props.binningX
					<< " is out of range, valid range [ " << (double) min
					<< " , " << (double) max << " ]\n";
		}
	}

	if ((err = PvAttrUint32Set(cHandle, "BinningY", props.binningY))
			!= ePvErrSuccess) {
		if (err == ePvErrOutOfRange) {
			tPvUint32 min, max;
			PvAttrRangeUint32(cHandle, "BinningY", &min, &max);
			LOG(LWARNING) << "BinningY : " << props.binningY
					<< " is out of range, valid range [ " << (double) min
					<< " , " << (double) max << " ]\n";
		}
	}
	// ----------------

	PvAttrEnumSet(cHandle, "FrameStartTriggerMode", "Freerun");

	unsigned long frameSize = 0;

	if (PvAttrUint32Get(cHandle, "TotalBytesPerFrame", &frameSize)
			!= ePvErrSuccess) {
		return false;
	}

	frame.ImageBuffer = new char[frameSize];
	frame.ImageBufferSize = frameSize;

	return true;
}