bool CvCaptureCAM_XIMEA::grabFrame() { image.size = sizeof(XI_IMG); int mvret = xiGetImage( hmv, timeout, &image); if(mvret == MM40_ACQUISITION_STOPED) { xiStartAcquisition(hmv); mvret = xiGetImage(hmv, timeout, &image); } if(mvret != XI_OK) { errMsg("Error during GetImage", mvret); return false; } return true; }
CameraFrame CameraXIMEA::getFrame(){ // Create single image buffer XI_IMG image; image.size = sizeof(XI_IMG); // must be initialized //image.bp = NULL; //image.bp_size = 0; if(triggerMode == triggerModeSoftware){ // Fire software trigger stat = xiSetParamInt(camera, XI_PRM_TRG_SOFTWARE, 0); HandleResult(stat,"xiSetParam (XI_PRM_TRG_SOFTWARE)"); // Retrieve image from camera stat = xiGetImage(camera, 1000, &image); HandleResult(stat,"xiGetImage"); } else { // Retrieve image from camera stat = xiGetImage(camera, 1000, &image); HandleResult(stat,"xiGetImage"); } // // Empty buffer // while(xiGetImage(camera, 1, &image) == XI_OK){ // std::cerr << "drop!" << std::endl; // continue; // } //std::cout << image.exposure_time_us << std::endl << std::flush; //std::cout << image.exposure_sub_times_us[3] << std::endl << std::flush; //std::cout << image.GPI_level << std::endl << std::flush; CameraFrame frame; frame.height = image.height; frame.width = image.width; frame.memory = (unsigned char*)image.bp; frame.timeStamp = image.tsUSec; frame.sizeBytes = image.bp_size; frame.flags = image.GPI_level; return frame; }
bool CvCaptureCAM_XIMEA::grabFrame() { int mvret = XI_OK; image.size = sizeof(XI_IMG); if((mvret = xiGetImage( hmv, timeout, &image)) != XI_OK) { errMsg("Error during GetImage", mvret); return false; } return true; }
int _tmain(int argc, _TCHAR* argv[]) { // image buffer XI_IMG image; memset(&image,0,sizeof(image)); image.size = sizeof(XI_IMG); // Sample for XIMEA API V4.05 HANDLE xiH = NULL; XI_RETURN stat = XI_OK; // Retrieving a handle to the camera device printf("Opening first camera...\n"); stat = xiOpenDevice(0, &xiH); HandleResult(stat,"xiOpenDevice"); // Setting "exposure" parameter (10ms=10000us) stat = xiSetParamInt(xiH, XI_PRM_EXPOSURE, 10000); HandleResult(stat,"xiSetParam (exposure set)"); // Note: // The default parameters of each camera might be different in different API versions // In order to ensure that your application will have camera in expected state, // please set all parameters expected by your application to required value. printf("Starting acquisition...\n"); stat = xiStartAcquisition(xiH); HandleResult(stat,"xiStartAcquisition"); #define EXPECTED_IMAGES 10 for (int images=0;images < EXPECTED_IMAGES;images++) { // getting image from camera stat = xiGetImage(xiH, 5000, &image); HandleResult(stat,"xiGetImage"); unsigned char pixel = *(unsigned char*)image.bp; printf("Image %d (%dx%d) received from camera. First pixel value: %d\n", images, (int)image.width, (int)image.height, pixel); } printf("Stopping acquisition...\n"); xiStopAcquisition(xiH); xiCloseDevice(xiH); finish: printf("Done\n"); #ifdef WIN32 Sleep(2000); #endif return 0; }
bool CaptureCAM_XIMEA::grabFrame() { memset(&image, 0, sizeof(XI_IMG)); image.size = sizeof(XI_IMG); // image.width = width; // image.height = height; // image.AbsoluteOffsetX= xoffset; // image.AbsoluteOffsetY= yoffset; if(trigger == false){ xiSetParamInt(hmv, XI_PRM_TRG_SOFTWARE, 1); } int stat = xiGetImage( hmv, timeout, &image); if(stat == MM40_ACQUISITION_STOPED) { xiStartAcquisition(hmv); stat = xiGetImage(hmv, timeout, &image); } if(stat != XI_OK) { errMsg("Error during GetImage", stat); return false; } return true; }
Camera::FrameRaw* Camera::getFrame() { if (!opened) { return NULL; } xiGetImage(device, 100, &image); if (image.bp == NULL) { return NULL; } frameRaw.data = (unsigned char*)image.bp; frameRaw.size = image.bp_size; frameRaw.number = image.nframe; frameRaw.width = image.width; frameRaw.height = image.height; frameRaw.timestamp = (double)image.tsSec + (double)image.tsUSec / 1000000.0; frameRaw.fresh = frameRaw.number != lastFrameNumber; lastFrameNumber = frameRaw.number; return &frameRaw; }
Camera::FrameYUYV* Camera::getFrameYUYV() { if (!opened) { return NULL; } //double s = Util::millitime(); xiGetImage(device, 100, &image); if (image.bp == NULL) { return NULL; } //std::cout << "Get: " << (Util::millitime() - s) << std::endl; frameYUV.data = (unsigned char*)image.bp; frameYUV.size = image.bp_size; frameYUV.number = image.nframe; frameYUV.width = image.width; frameYUV.height = image.height; frameYUV.timestamp = (double)image.tsSec + (double)image.tsUSec / 1000000.0; frameYUV.fresh = frameYUV.number != lastFrameNumber; if (!yuvInitialized) { frameYUV.strideY = frameYUV.width; frameYUV.strideU = (frameYUV.width + 1) / 2; frameYUV.strideV = (frameYUV.width + 1) / 2; frameYUV.dataY = new uint8[frameYUV.width * frameYUV.height]; frameYUV.dataU = new uint8[(frameYUV.width / 2) * (frameYUV.height / 2)]; frameYUV.dataV = new uint8[(frameYUV.width / 2) * (frameYUV.height / 2)]; frameYUV.dataYUYV = new uint8[frameYUV.width * frameYUV.height * 3]; yuvInitialized = true; } lastFrameNumber = frameYUV.number; //double s = Util::millitime(); libyuv::BayerRGGBToI420( frameYUV.data, frameYUV.width, frameYUV.dataY, frameYUV.strideY, frameYUV.dataU, frameYUV.strideU, frameYUV.dataV, frameYUV.strideV, frameYUV.width, frameYUV.height ); //std::cout << "RGGB > I420: " << (Util::millitime() - s) << std::endl; //double s = Util::millitime(); /*int row; int col; int indexUV; int elements = frameYUV.width * frameYUV.height; int halfWidth = frameYUV.width / 2; bool alt = false; for (int i = 0; i < elements; i++) { row = i / frameYUV.width; col = i - row * frameYUV.width; indexUV = (row >> 1) * halfWidth + (col >> 1); frameYUV.dataYUYV[i << 1] = frameYUV.dataY[i]; frameYUV.dataYUYV[(i << 1) + 1] = alt ? frameYUV.dataV[indexUV] : frameYUV.dataU[indexUV]; alt = !alt; }*/ /* //bool alt = false; int index = 0; int uvIndex = 0; //int uvCounter = 0; int dstStride = frameYUV.width * 2; int uvStride = frameYUV.width / 2; for (int i = 0; i < frameYUV.width * frameYUV.height; i += 4) { frameYUV.dataYUYV[index] = frameYUV.dataY[i]; frameYUV.dataYUYV[index + 1] = frameYUV.dataU[uvIndex]; frameYUV.dataYUYV[index + dstStride] = frameYUV.dataY[i + 1]; frameYUV.dataYUYV[index + dstStride + 1] = frameYUV.dataV[uvIndex]; frameYUV.dataYUYV[index + 4] = frameYUV.dataY[i + 2]; frameYUV.dataYUYV[index + 5] = frameYUV.dataU[uvIndex]; frameYUV.dataYUYV[index + 6] = frameYUV.dataY[i + 3]; frameYUV.dataYUYV[index + 7] = frameYUV.dataV[uvIndex]; //alt = !alt; index += 8; uvIndex++; } */ // for each U,V pixel create four destination pixels // most time i've ever spent figuring out an algorithm.. int dstIndex = 0; int yIndex = 0; int dstStride = frameYUV.width * 2; int yStride = frameYUV.width; int uvStride = frameYUV.width / 2; int row = 0; int pixelsInRow = 0; int elementCount = (frameYUV.width / 2) * (frameYUV.height / 2); for (int uvIndex = 0; uvIndex < elementCount; uvIndex++) { frameYUV.dataYUYV[dstIndex] = frameYUV.dataY[yIndex]; frameYUV.dataYUYV[dstIndex + 1] = frameYUV.dataU[uvIndex]; frameYUV.dataYUYV[dstIndex + 2] = frameYUV.dataY[yIndex + 1]; frameYUV.dataYUYV[dstIndex + 3] = frameYUV.dataV[uvIndex]; frameYUV.dataYUYV[dstIndex + dstStride] = frameYUV.dataY[yIndex + yStride]; frameYUV.dataYUYV[dstIndex + dstStride + 1] = frameYUV.dataU[uvIndex]; frameYUV.dataYUYV[dstIndex + dstStride + 2] = frameYUV.dataY[yIndex + yStride + 1]; frameYUV.dataYUYV[dstIndex + dstStride + 3] = frameYUV.dataV[uvIndex]; dstIndex += 4; yIndex += 2; pixelsInRow += 2; if (pixelsInRow == frameYUV.width) { row += 2; yIndex = row * yStride; dstIndex = row * dstStride; pixelsInRow = 0; } } //std::cout << "I420 > YUYV: " << (Util::millitime() - s) << std::endl; return &frameYUV; }