void App::process() { if (sources.size() == 1) { pairSources.push_back(PairFrameSource::get(sources[0], 2)); } else if (sources.size() == 2) { pairSources.push_back(PairFrameSource::get(sources[0], sources[1])); } else { cout << "Loading default frames source...\n"; pairSources.push_back(PairFrameSource::get(new VideoSource("data/pedestrian_detect/mitsubishi.avi"), 2)); pairSources.push_back(PairFrameSource::get(new VideoSource("data/optical_flow/plush1_720p_10s.m2v"), 2)); pairSources.push_back(PairFrameSource::get(new VideoSource("data/stereo_matching/8sec_Toys_Kirill_1920x1080_xvid_L.avi"), 2)); } Mat frame0, frame1; Mat gray; GpuMat d_frame0, d_frame1; GpuMat d_gray; Mat u, v; GpuMat d_u, d_v; GpuMat d_prevPts; GpuMat d_nextPts; GpuMat d_status; vector<Point2f> prevPts; vector<Point2f> nextPts; vector<uchar> status; GoodFeaturesToTrackDetector_GPU detector(4000, 0.01, 10.0); PyrLKOpticalFlow lk; while (!exited) { int64 start = getTickCount(); pairSources[curSource]->next(frame0, frame1); d_frame0.upload(frame0); d_frame1.upload(frame1); double proc_fps; if (useGPU) { cvtColor(d_frame0, d_gray, COLOR_BGR2GRAY); int64 proc_start = getTickCount(); detector(d_gray, d_prevPts); lk.sparse(d_frame0, d_frame1, d_prevPts, d_nextPts, d_status); proc_fps = getTickFrequency() / (getTickCount() - proc_start); download(d_prevPts, prevPts); download(d_nextPts, nextPts); download(d_status, status); } else { cvtColor(frame0, gray, COLOR_BGR2GRAY); int64 proc_start = getTickCount(); goodFeaturesToTrack(gray, prevPts, detector.maxCorners, detector.qualityLevel, detector.minDistance); calcOpticalFlowPyrLK(frame0, frame1, prevPts, nextPts, status, noArray()); proc_fps = getTickFrequency() / (getTickCount() - proc_start); } Mat img_to_show = frame0.clone(); drawArrows(img_to_show, prevPts, nextPts, status, Scalar(255, 0, 0)); double total_fps = getTickFrequency() / (getTickCount() - start); displayState(img_to_show, proc_fps, total_fps); imshow("demo_sparse_optical_flow", img_to_show); processKey(waitKey(3) & 0xff); } }
void imageDataHandler(const sensor_msgs::Image::ConstPtr& imageData) { timeLast = timeCur; timeCur = imageData->header.stamp.toSec() - 0.1163; cv_bridge::CvImageConstPtr imageDataCv = cv_bridge::toCvShare(imageData, "mono8"); if (!systemInited) { remap(imageDataCv->image, image0, mapx, mapy, CV_INTER_LINEAR); oclImage0 = oclMat(image0); systemInited = true; return; } Mat imageLast, imageCur; oclMat oclImageLast, oclImageCur; if (isOddFrame) { remap(imageDataCv->image, image1, mapx, mapy, CV_INTER_LINEAR); oclImage1 = oclMat(image1); imageLast = image0; imageCur = image1; oclImageLast = oclImage0; oclImageCur = oclImage1; } else { remap(imageDataCv->image, image0, mapx, mapy, CV_INTER_LINEAR); oclImage0 = oclMat(image0); imageLast = image1; imageCur = image0; oclImageLast = oclImage1; oclImageCur = oclImage0; } isOddFrame = !isOddFrame; resize(oclImageLast, oclImageShow, showSize); oclImageShow.download(imageShow); cornerHarris(oclImageShow, oclHarrisLast, 2, 3, 0.04); oclHarrisLast.download(harrisLast); vector<Point2f> *featuresTemp = featuresLast; featuresLast = featuresCur; featuresCur = featuresTemp; pcl::PointCloud<ImagePoint>::Ptr imagePointsTemp = imagePointsLast; imagePointsLast = imagePointsCur; imagePointsCur = imagePointsTemp; imagePointsCur->clear(); int recordFeatureNum = totalFeatureNum; detectionCount = (detectionCount + 1) % (detectionSkipNum + 1); if (detectionCount == detectionSkipNum) { oclMat oclFeaturesSub; for (int i = 0; i < ySubregionNum; i++) { for (int j = 0; j < xSubregionNum; j++) { int ind = xSubregionNum * i + j; int numToFind = maxFeatureNumPerSubregion - subregionFeatureNum[ind]; if (numToFind > maxFeatureNumPerSubregion / 5.0) { int subregionLeft = xBoundary + (int)(subregionWidth * j); int subregionTop = yBoundary + (int)(subregionHeight * i); Rect subregion = Rect(subregionLeft, subregionTop, (int)subregionWidth, (int)subregionHeight); oclFeatureDetector.maxCorners = numToFind; oclFeatureDetector(oclImageLast(subregion), oclFeaturesSub); if (oclFeaturesSub.cols > 0) { oclFeatureDetector.downloadPoints(oclFeaturesSub, featuresSub); numToFind = featuresSub.size(); } else { numToFind = 0; } int numFound = 0; for(int k = 0; k < numToFind; k++) { featuresSub[k].x += subregionLeft; featuresSub[k].y += subregionTop; int xInd = (featuresSub[k].x + 0.5) / showDSRate; int yInd = (featuresSub[k].y + 0.5) / showDSRate; if (harrisLast.at<float>(yInd, xInd) > 1e-7) { featuresLast->push_back(featuresSub[k]); featuresInd.push_back(featuresIndFromStart); numFound++; featuresIndFromStart++; } } totalFeatureNum += numFound; subregionFeatureNum[ind] += numFound; } } } } if (totalFeatureNum > 0) { Mat featuresLastMat(1, totalFeatureNum, CV_32FC2, (void*)&(*featuresLast)[0]); oclMat oclFeaturesLast(featuresLastMat); oclMat oclFeaturesCur, oclFeaturesStatus; oclOpticalFlow.sparse(oclImageLast, oclImageCur, oclFeaturesLast, oclFeaturesCur, oclFeaturesStatus); if (oclFeaturesCur.cols > 0 && oclFeaturesCur.cols == oclFeaturesStatus.cols) { download(oclFeaturesCur, *featuresCur); download(oclFeaturesStatus, featuresStatus); totalFeatureNum = featuresCur->size(); } else { totalFeatureNum = 0; } } for (int i = 0; i < totalSubregionNum; i++) { subregionFeatureNum[i] = 0; } ImagePoint point; int featureCount = 0; for (int i = 0; i < totalFeatureNum; i++) { double trackDis = sqrt(((*featuresLast)[i].x - (*featuresCur)[i].x) * ((*featuresLast)[i].x - (*featuresCur)[i].x) + ((*featuresLast)[i].y - (*featuresCur)[i].y) * ((*featuresLast)[i].y - (*featuresCur)[i].y)); if (!(trackDis > maxTrackDis || (*featuresCur)[i].x < xBoundary || (*featuresCur)[i].x > imageWidth - xBoundary || (*featuresCur)[i].y < yBoundary || (*featuresCur)[i].y > imageHeight - yBoundary) && featuresStatus[i]) { int xInd = (int)(((*featuresLast)[i].x - xBoundary) / subregionWidth); int yInd = (int)(((*featuresLast)[i].y - yBoundary) / subregionHeight); int ind = xSubregionNum * yInd + xInd; if (subregionFeatureNum[ind] < maxFeatureNumPerSubregion) { (*featuresCur)[featureCount] = (*featuresCur)[i]; (*featuresLast)[featureCount] = (*featuresLast)[i]; featuresInd[featureCount] = featuresInd[i]; point.u = -((*featuresCur)[featureCount].x - kImage[2]) / kImage[0]; point.v = -((*featuresCur)[featureCount].y - kImage[5]) / kImage[4]; point.ind = featuresInd[featureCount]; imagePointsCur->push_back(point); if (i >= recordFeatureNum) { point.u = -((*featuresLast)[featureCount].x - kImage[2]) / kImage[0]; point.v = -((*featuresLast)[featureCount].y - kImage[5]) / kImage[4]; imagePointsLast->push_back(point); } featureCount++; subregionFeatureNum[ind]++; } } } totalFeatureNum = featureCount; featuresCur->resize(totalFeatureNum); featuresLast->resize(totalFeatureNum); featuresInd.resize(totalFeatureNum); sensor_msgs::PointCloud2 imagePointsLast2; pcl::toROSMsg(*imagePointsLast, imagePointsLast2); imagePointsLast2.header.stamp = ros::Time().fromSec(timeLast); imagePointsLastPubPointer->publish(imagePointsLast2); showCount = (showCount + 1) % (showSkipNum + 1); if (showCount == showSkipNum) { bridge.image = imageShow; bridge.encoding = "mono8"; sensor_msgs::Image::Ptr imageShowPointer = bridge.toImageMsg(); imageShowPubPointer->publish(imageShowPointer); } }