/** * Download image from FireFly */ void FFMVCCD::grabImage() { dc1394error_t err; dc1394video_frame_t *frame; uint32_t uheight, uwidth; int sub; uint16_t val; struct timeval start, end; // Let's get a pointer to the frame buffer uint8_t * image = PrimaryCCD.getFrameBuffer(); // Get width and height int width = PrimaryCCD.getSubW() / PrimaryCCD.getBinX(); int height = PrimaryCCD.getSubH() / PrimaryCCD.getBinY(); memset(image, 0, PrimaryCCD.getFrameBufferSize()); /*----------------------------------------------------------------------- * stop data transmission *-----------------------------------------------------------------------*/ gettimeofday(&start, NULL); for (sub = 0; sub < sub_count; ++sub) { IDMessage(getDeviceName(), "Getting sub %d of %d", sub, sub_count); err=dc1394_capture_dequeue(dcam, DC1394_CAPTURE_POLICY_WAIT, &frame); if (err != DC1394_SUCCESS) { IDMessage(getDeviceName(), "Could not capture frame"); } dc1394_get_image_size_from_video_mode(dcam,DC1394_VIDEO_MODE_640x480_MONO16, &uwidth, &uheight); if (DC1394_TRUE == dc1394_capture_is_frame_corrupt(dcam, frame)) { IDMessage(getDeviceName(), "Corrupt frame!"); continue; } // Fill buffer with random pattern for (int i=0; i < height ; i++) { for (int j=0; j < width; j++) { /* Detect unsigned overflow */ val = ((uint16_t *) image)[i*width+j] + ntohs(((uint16_t*) (frame->image))[i*width+j]); if (val > ((uint16_t *) image)[i*width+j]) { ((uint16_t *) image)[i*width+j] = val; } else { ((uint16_t *) image)[i*width+j] = 0xFFFF; } } } dc1394_capture_enqueue(dcam, frame); } err=dc1394_video_set_transmission(dcam,DC1394_OFF); IDMessage(getDeviceName(), "Download complete."); gettimeofday(&end, NULL); IDMessage(getDeviceName(), "Download took %d uS", (int) ((end.tv_sec - start.tv_sec) * 1000000 + (end.tv_usec - start.tv_usec))); // Let INDI::CCD know we're done filling the image buffer ExposureComplete(&PrimaryCCD); }
bool CvCaptureCAM_DC1394_v2_CPP::grabFrame() { dc1394capture_policy_t policy = DC1394_CAPTURE_POLICY_WAIT; bool code = false, isColor; dc1394video_frame_t *dcFrame = 0, *fs = 0; int i, nch; if (!dcCam || (!started && !startCapture())) return false; dc1394_capture_dequeue(dcCam, policy, &dcFrame); if (!dcFrame) return false; if (/*dcFrame->frames_behind > 1 ||*/ dc1394_capture_is_frame_corrupt(dcCam, dcFrame) == DC1394_TRUE) { goto _exit_; } isColor = dcFrame->color_coding != DC1394_COLOR_CODING_MONO8 && dcFrame->color_coding != DC1394_COLOR_CODING_MONO16 && dcFrame->color_coding != DC1394_COLOR_CODING_MONO16S; if (nimages == 2) { fs = (dc1394video_frame_t*)calloc(1, sizeof(*fs)); //dc1394_deinterlace_stereo_frames(dcFrame, fs, DC1394_STEREO_METHOD_INTERLACED); dc1394_deinterlace_stereo_frames_fixed(dcFrame, fs, DC1394_STEREO_METHOD_INTERLACED); dc1394_capture_enqueue(dcCam, dcFrame); // release the captured frame as soon as possible dcFrame = 0; if (!fs->image) goto _exit_; isColor = colorStereo; } nch = isColor ? 3 : 1; for (i = 0; i < nimages; i++) { IplImage fhdr; dc1394video_frame_t f = fs ? *fs : *dcFrame, *fc = &f; f.size[1] /= nimages; f.image += f.size[0] * f.size[1] * i; // TODO: make it more universal if (isColor) { if (!frameC) frameC = (dc1394video_frame_t*)calloc(1, sizeof(*frameC)); frameC->color_coding = nch == 3 ? DC1394_COLOR_CODING_RGB8 : DC1394_COLOR_CODING_MONO8; if (nimages == 1) { dc1394_convert_frames(&f, frameC); dc1394_capture_enqueue(dcCam, dcFrame); dcFrame = 0; } else { f.color_filter = bayerFilter; dc1394_debayer_frames(&f, frameC, bayer); } fc = frameC; } if (!img[i]) img[i] = cvCreateImage(cvSize(fc->size[0], fc->size[1]), 8, nch); cvInitImageHeader(&fhdr, cvSize(fc->size[0], fc->size[1]), 8, nch); cvSetData(&fhdr, fc->image, fc->size[0]*nch); // Swap R&B channels: if (nch==3) cvConvertImage(&fhdr,&fhdr,CV_CVTIMG_SWAP_RB); if( rectify && cameraId == VIDERE && nimages == 2 ) { if( !maps[0][0] || maps[0][0]->width != img[i]->width || maps[0][0]->height != img[i]->height ) { CvSize size = cvGetSize(img[i]); cvReleaseImage(&maps[0][0]); cvReleaseImage(&maps[0][1]); cvReleaseImage(&maps[1][0]); cvReleaseImage(&maps[1][1]); maps[0][0] = cvCreateImage(size, IPL_DEPTH_16S, 2); maps[0][1] = cvCreateImage(size, IPL_DEPTH_16S, 1); maps[1][0] = cvCreateImage(size, IPL_DEPTH_16S, 2); maps[1][1] = cvCreateImage(size, IPL_DEPTH_16S, 1); char buf[4*4096]; if( getVidereCalibrationInfo( buf, (int)sizeof(buf) ) && initVidereRectifyMaps( buf, maps[0], maps[1] )) ; else rectify = false; } cvRemap(&fhdr, img[i], maps[i][0], maps[i][1]); } else cvCopy(&fhdr, img[i]); } code = true; _exit_: if (dcFrame) dc1394_capture_enqueue(dcCam, dcFrame); if (fs) { if (fs->image) free(fs->image); free(fs); } return code; }