BitmapPtr CMUCamera::getImage(bool bWait) { if (bWait) { unsigned rc = WaitForSingleObject(m_pCamera->GetFrameEvent(), INFINITE); AVG_ASSERT(rc == WAIT_OBJECT_0); } else { unsigned rc = WaitForSingleObject(m_pCamera->GetFrameEvent(), 0); if (rc == WAIT_TIMEOUT) { // No frame yet return BitmapPtr(); } AVG_ASSERT(rc == WAIT_OBJECT_0); } int rc2 = m_pCamera->AcquireImageEx(FALSE, NULL); if (rc2 != CAM_SUCCESS) { throw Exception(AVG_ERR_CAMERA_NONFATAL, "CMUCamera: Could not acquire image from camera. " + CMUErrorToString(rc2)); } unsigned long captureBufferLength; unsigned char* pCaptureBuffer = m_pCamera->GetRawData(&captureBufferLength); BitmapPtr pCamBmp(new Bitmap(getImgSize(), getCamPF(), pCaptureBuffer, captureBufferLength / getImgSize().y, false, "TempCameraBmp")); return convertCamFrameToDestPF(pCamBmp); }
int main(int argc,char** args){ RGB *irgb; int IW,IH; //args[1] = "img\\000.bmp"; getImgSize(args[1],&IW,&IH); irgb = (RGB*)malloc(IW*IH*3); bmpIn(args[1],&IW,&IH,irgb); unsigned long r=0; unsigned long g=0; unsigned long b=0; for(int x=0;x<IW;x++){ for(int y=0;y<IH;y++){ RGB grgb = getColor(irgb,IW,x,y); r += grgb.R; g += grgb.G; b += grgb.B; } } printf(":“ü—͉摜:%s\n",args[1]); printf(":•½‹ÏRGB:%d %d %d\n",r/(IW*IH),g/(IW*IH),b/(IW*IH)); printf("ren %s %0.3d_%0.3d_%0.3d.bmp\n",args[1],r/(IW*IH),g/(IW*IH),b/(IW*IH)); printf(":------------------------------------------------------------\n"); //bmpOut("FHD_OUT.bmp",IW,IH,irgb); return 0; }
void SegmentImage::getSegmentedImage(QVector<QImage> &segmentedQImages, QImage& contourQImage) { //Returns segmented and contour images using references int rows = getImgRows(); int cols = getImgCols(); int nSeeds = getSeedNumber(); Mat segmentationMatrix = getSegmentationMatrix(); QVector<Mat > segmentedImages;// This will contain two segmented images: background and foreground //initialize both segmented images by setting background to be white for(int i = 0; i < nSeeds; i++) { segmentedImages.append(Mat( getImgSize(), getImgType())); segmentedImages[i].setTo(cv::Scalar(255, 255, 255)); } //If wrong number of seeds provided, return blank images if(nSeeds != 2) { segmentedQImages.resize(2); segmentedQImages[0] = QImage(rows, cols, QImage::Format_RGB16); segmentedQImages[0].fill(Qt::white); segmentedQImages[1] = QImage(rows, cols, QImage::Format_RGB16); segmentedQImages[1].fill(Qt::white); contourQImage = QImage(rows, cols, QImage::Format_RGB16); contourQImage.fill(Qt::white); return; } for(int i = 0; i < rows; i++) for (int j = 0; j < cols; j++ ) { // seed value #1 is 1 and #2 is 2 // So using 1.5 as threshold we separate foreground from background if (segmentationMatrix.at<double>(i, j) >= 1.5) segmentedImages[0].at<Vec3b>(i, j) = getIntensity(i,j); else segmentedImages[1].at<Vec3b>(i,j) = getIntensity(i,j); } //Find contours and return the image with drawn contours Mat contourIm; //Compute the contour image getImageWithContour(getOrigImage(), contourIm);//contour image returned via reference //Save two SegmentationData images(one for background, one for foreground) into a QVector segmentedQImages segmentedQImages.clear(); for(int i = 0; i < nSeeds; i++) segmentedQImages.push_back(cvMatToQImage(segmentedImages[i]));//convert to QImage and save in a vector //Convert the contour image into a QImage contourQImage = cvMatToQImage(contourIm); // All three images are returned through a reference }
BitmapPtr FWCamera::getImage(bool bWait) { #ifdef AVG_ENABLE_1394_2 bool bGotFrame = false; unsigned char * pCaptureBuffer = 0; dc1394video_frame_t * pFrame; dc1394error_t err; if (bWait) { err = dc1394_capture_dequeue(m_pCamera, DC1394_CAPTURE_POLICY_WAIT, &pFrame); } else { err = dc1394_capture_dequeue(m_pCamera, DC1394_CAPTURE_POLICY_POLL, &pFrame); } if (err == DC1394_SUCCESS && pFrame) { bGotFrame = true; pCaptureBuffer = pFrame->image; } if (bGotFrame) { int lineLen; if (getCamPF() == YCbCr411) { lineLen = getImgSize().x*1.5; } else { lineLen = getImgSize().x*getBytesPerPixel(getCamPF()); } BitmapPtr pCamBmp(new Bitmap(getImgSize(), getCamPF(), pCaptureBuffer, lineLen, false, "TempCameraBmp")); BitmapPtr pDestBmp = convertCamFrameToDestPF(pCamBmp); // cerr << "CamBmp: " << pCamBmp->getPixelFormat() << ", DestBmp: " // << pDestBmp->getPixelFormat() << endl; dc1394_capture_enqueue(m_pCamera, pFrame); return pDestBmp; } else { return BitmapPtr(); } #else return BitmapPtr(); #endif }
CMUCamera::CMUCamera(long long guid, bool bFW800, IntPoint size, PixelFormat camPF, PixelFormat destPF, float frameRate) : Camera(camPF, destPF, size, frameRate), m_WhitebalanceU(-1), m_WhitebalanceV(-1), m_pCamera(0) { m_pCamera = new C1394Camera(); int err; unsigned long videoFormat, videoMode; getVideoFormatAndMode(getImgSize(), getCamPF(), &videoFormat, &videoMode); // Find and open camera if (m_pCamera->RefreshCameraList() <= 0) { throw Exception(AVG_ERR_CAMERA_NONFATAL, "No Firewire cameras found"); } int i = getCamIndex(guid); err = m_pCamera->SelectCamera(i); err = m_pCamera->InitCamera(TRUE); AVG_ASSERT(err == CAM_SUCCESS); if (bFW800) { m_pCamera->Set1394b(true); } // Setup video format and rate err = m_pCamera->SetVideoFormat(videoFormat); checkCMUError(err, AVG_ERR_CAMERA_NONFATAL, string("CMUCamera: Error setting video format ") + toString(videoFormat) + ", mode: " + toString(videoMode)); err = m_pCamera->SetVideoMode(videoMode); checkCMUError(err, AVG_ERR_CAMERA_NONFATAL, string("CMUCamera: Error setting video mode ") + toString(videoMode) + ", format: " + toString(videoFormat)); err = m_pCamera->SetVideoFrameRate(getFrameRateConst(getFrameRate())); checkCMUError(err, AVG_ERR_CAMERA_NONFATAL, "Error setting frame rate"); // Start capturing images err = m_pCamera->StartImageAcquisition(); if (err != CAM_SUCCESS) { throw Exception(AVG_ERR_CAMERA_NONFATAL, "CMUCamera: Could not start image acquisition. " + CMUErrorToString(err)); } // Set camera features for (FeatureMap::iterator it=m_Features.begin(); it != m_Features.end(); it++) { setFeature(it->first, it->second, true); } setWhitebalance(m_WhitebalanceU, m_WhitebalanceV, true); if (camPF == BAYER8) { char sModel[256], sVendor[256]; m_pCamera->GetCameraName(sModel, 256); m_pCamera->GetCameraVendor(sVendor, 256); if (strcmp(sModel, "DFx 31BF03") == 0) { AVG_TRACE(Logger::CONFIG, "Applying bayer pattern fixup for IS DFx31BF03 camera"); setCamPF(BAYER8_GRBG); } else if (strcmp(sVendor, "Point Grey Research") == 0) { AVG_TRACE(Logger::CONFIG, "Applying bayer pattern fixup for PointGrey cameras"); enablePtGreyBayer(); } } }