/*
 * Do actual capturing
 * Called from inside the thread  
 */
int ThorlabsUSBCam::ThreadRun (void)
{
   MM::MMTime startFrame = GetCurrentMMTime();
   DWORD dwRet = WaitForSingleObject(hEvent, 2000);
   if (dwRet == WAIT_TIMEOUT)
   {
      return ERR_THORCAM_LIVE_TIMEOUT;
   }
   else if (dwRet == WAIT_OBJECT_0)
   {
      memcpy(img_.GetPixelsRW(),
             cameraBuf,
             img_.Width()*img_.Height()*img_.Depth());


      int ret = InsertImage();

      MM::MMTime frameInterval = GetCurrentMMTime() - startFrame;
      if (frameInterval.getMsec() > 0.0)
         framesPerSecond = 1000.0 / frameInterval.getMsec();

      return ret;
   }
   else
   {
      ostringstream os;
      os << "Unknown event status " << dwRet;
      LogMessage(os.str());
      return ERR_THORCAM_LIVE_UNKNOWN_EVENT;
   }
};
Ejemplo n.º 2
0
bool VariLC::Busy()
{
    if (delay.getMsec() > 0.0) {
        MM::MMTime interval = GetCurrentMMTime() - changedTime_;
        if (interval.getMsec() < delay.getMsec() ) {
            return true;
        }
    }

    return false;
}
Ejemplo n.º 3
0
bool FilterWheel::Busy()
{
   MM::MMTime elapsed = GetCurrentMMTime() - lastMoveTime_;
   long msPerPosition;
   switch (speed_) {
   case 3:
      msPerPosition = 40;
      break;
   case 2:
      msPerPosition = 66;
      break;
   case 1:
      msPerPosition = 100;
      break;
   case 0:
   default:
      msPerPosition = 400;
      break;
   }
   long waitTimeMs = (long) (posMoved_ * msPerPosition + GetDelayMs());
   return elapsed.getMsec() < waitTimeMs;
}
Ejemplo n.º 4
0
int BitFlowCamera::LiveThread::svc()
{
   stopRunning_ = false;
   running_ = true;
   imageCounter_ = 0;

   // put the hardware into a continuous acqusition state
   while (true)  {
      if (stopRunning_)
         break;

      int ret = cam_->SnapImage();

      if (ret != DEVICE_OK) {
         char txt[1000];
         sprintf(txt, "BitFlow live thread: ImageSnap() error %d", ret);
         cam_->GetCoreCallback()->LogMessage(cam_, txt, false);
         break;
      }

      char label[MM::MaxStrLength];
   
      cam_->GetLabel(label);

      MM::MMTime timestamp = cam_->GetCurrentMMTime();
      Metadata md;

      MetadataSingleTag mstStartTime(MM::g_Keyword_Metadata_StartTime, label, true);
	   mstStartTime.SetValue(CDeviceUtils::ConvertToString(cam_->startTime_.getMsec()));
      md.SetTag(mstStartTime);

      MetadataSingleTag mstElapsed(MM::g_Keyword_Elapsed_Time_ms, label, true);
      MM::MMTime elapsed = timestamp - cam_->startTime_;
      mstElapsed.SetValue(CDeviceUtils::ConvertToString(elapsed.getMsec()));
	  md.SetTag(mstElapsed);

	  MetadataSingleTag mstCount(MM::g_Keyword_Metadata_ImageNumber, label, true);
	  mstCount.SetValue(CDeviceUtils::ConvertToString(imageCounter_));
	  md.SetTag(mstCount);


	  // insert all channels
	  for (unsigned i=0; i<cam_->GetNumberOfChannels(); i++)
	  {
		  char buf[MM::MaxStrLength];
		  MetadataSingleTag mstChannel(MM::g_Keyword_CameraChannelIndex, label, true);
		  snprintf(buf, MM::MaxStrLength, "%d", i);
		  mstChannel.SetValue(buf);
		  md.SetTag(mstChannel);

		  MetadataSingleTag mstChannelName(MM::g_Keyword_CameraChannelName, label, true);
		  cam_->GetChannelName(i, buf);
		  mstChannelName.SetValue(buf);
		  md.SetTag(mstChannelName);


		  ret = cam_->GetCoreCallback()->InsertImage(cam_, cam_->GetImageBuffer(i),
			  cam_->GetImageWidth(),
			  cam_->GetImageHeight(),
			  cam_->GetImageBytesPerPixel(),
			  md.Serialize().c_str());
		  if (ret == DEVICE_BUFFER_OVERFLOW) {
			  cam_->GetCoreCallback()->ClearImageBuffer(cam_);
			  cam_->GetCoreCallback()->InsertImage(cam_, cam_->GetImageBuffer(i),
				  cam_->GetImageWidth(),
				  cam_->GetImageHeight(),
				  cam_->GetImageBytesPerPixel(),
				  md.Serialize().c_str());
		  }
		  else if (ret != DEVICE_OK) {
			  cam_->GetCoreCallback()->LogMessage(cam_, "BitFlow thread: error inserting image", false);
			  break;
		  }
	  }


      imageCounter_++;
      if (numImages_ >=0 && imageCounter_ >= numImages_) {
         cam_->bfDev_.StopContinuousAcq();
         break;
      }
   }
   running_ = false;
   return 0;
}
/**
* Inserts a multi-channel frame in the buffer.
*/
bool CircularBuffer::InsertMultiChannel(const unsigned char* pixArray, unsigned numChannels, unsigned width, unsigned height, unsigned byteDepth, const Metadata* pMd) throw (CMMError)
{
   MMThreadGuard guard(g_insertLock);

   static unsigned long previousTicks = 0;
   bool notOverflowed;
   ImgBuffer* pImg;
   unsigned long singleChannelSize = (unsigned long)width * height * byteDepth;

   {
      MMThreadGuard guard(g_bufferLock);

      if (previousTicks > 0)
         estimatedIntervalMs_ = GetClockTicksMs() - previousTicks;
      else
         estimatedIntervalMs_ = 0;

      // check image dimensions
      if (width != width_ || height != height_ || byteDepth != pixDepth_)
         throw CMMError("Incompatible image dimensions in the circular buffer", MMERR_CircularBufferIncompatibleImage);


      notOverflowed = (long)frameArray_.size() - (insertIndex_ - saveIndex_) > 0;
      if (!notOverflowed) {
         // buffer overflow
         overflow_ = true;
         return false;
      }
   
   }

   for (unsigned i=0; i<numChannels; i++)
   {
      Metadata md;
      {
         MMThreadGuard guard(g_bufferLock);
         // check if the requested (channel, slice) combination exists
         // we assume that all buffers are pre-allocated
         pImg = frameArray_[insertIndex_ % frameArray_.size()].FindImage(i, 0);
         if (!pImg)
            return false;

         if (pMd)
         {
            // TODO: the same metadata is inserted for each channel ???
            // Perhaps we need to add specific tags to each channel
            md = *pMd;
         }

         std::string cameraName = md.GetSingleTag("Camera").GetValue();
         if (imageNumbers_.end() == imageNumbers_.find(cameraName))
         {
            imageNumbers_[cameraName] = 0;
         }

         // insert image number. 
         md.put(MM::g_Keyword_Metadata_ImageNumber, CDeviceUtils::ConvertToString(imageNumbers_[cameraName]));
         ++imageNumbers_[cameraName];
      }

      if (!md.HasTag(MM::g_Keyword_Elapsed_Time_ms))
      {
         // if time tag was not supplied by the camera insert current timestamp
         MM::MMTime timestamp = GetMMTimeNow();
         md.PutImageTag(MM::g_Keyword_Elapsed_Time_ms, CDeviceUtils::ConvertToString(timestamp.getMsec()));
      }

      md.PutImageTag("Width",width);
      md.PutImageTag("Height",height);
      if (byteDepth == 1)
         md.PutImageTag("PixelType","GRAY8");
      else if (byteDepth == 2)
         md.PutImageTag("PixelType","GRAY16");
      else if (byteDepth == 4)
         md.PutImageTag("PixelType","RGB32");
      else if (byteDepth == 8)
         md.PutImageTag("PixelType","RGB64");
      else                          
         md.PutImageTag("PixelType","Unknown"); 

      pImg->SetMetadata(md);
      pImg->SetPixels(pixArray + i*singleChannelSize);
   }

   {
      MMThreadGuard guard(g_bufferLock);

      imageCounter_++;
      insertIndex_++;
      if ((insertIndex_ - (long)frameArray_.size()) > adjustThreshold && (saveIndex_- (long)frameArray_.size()) > adjustThreshold)
      {
         // adjust buffer indices to avoid overflowing integer size
         insertIndex_ -= adjustThreshold;
         saveIndex_ -= adjustThreshold;
      }
   }

   previousTicks = GetClockTicksMs();

   return true;

}