void checkSelfMultiplicationFactor() { QFETCH(float, xO); QFETCH(float, yO); QFETCH(float, zO); QFETCH(float, wO); QFETCH(float, factor); QFETCH(float, xR); QFETCH(float, yR); QFETCH(float, zR); QFETCH(float, wR); // GIVEN QVector4D vo(xO, yO, zO, wO); // WHEN vo *= factor; // THEN QCOMPARE(vo.x(), xR); QCOMPARE(vo.y(), yR); QCOMPARE(vo.z(), zR); QCOMPARE(vo.w(), wR); }
void BedMerge::ReportMergedScores(const vector<string> &scores) { // setup a VectorOps instances for the list of scores. // VectorOps methods used for each possible operation. VectorOps vo(scores); std::stringstream buffer; if (scores.size() > 0) { if (_scoreOp == "sum") buffer << setprecision (PRECISION) << vo.GetSum(); else if (_scoreOp == "min") buffer << setprecision (PRECISION) << vo.GetMin(); else if (_scoreOp == "max") buffer << setprecision (PRECISION) << vo.GetMax(); else if (_scoreOp == "mean") buffer << setprecision (PRECISION) << vo.GetMean(); else if (_scoreOp == "median") buffer << setprecision (PRECISION) << vo.GetMedian(); else if (_scoreOp == "mode") buffer << setprecision (PRECISION) << vo.GetMode(); else if (_scoreOp == "antimode") buffer << setprecision (PRECISION) << vo.GetAntiMode(); else if (_scoreOp == "collapse") buffer << setprecision (PRECISION) << vo.GetCollapse(_delimiter); cout << "\t" << buffer.str(); } else { cerr << endl << "*****" << endl << "*****ERROR: No scores found to report for the -scores option. Exiting." << endl << "*****" << endl; exit(1); } }
Answer CoCoAGBModule<Settings>::checkCore() { if (Settings::always_return_unknown) return Answer::UNKNOWN; if (mGBPolys.empty()) return Answer::UNKNOWN; if (mLastBasis.empty()) { std::vector<Poly> polys; for (const auto& p: mGBPolys) { polys.emplace_back(p.second); }; try { VariableOrdering vo(polys); carl::CoCoAAdaptor<Poly> cocoa(polys); cocoa.resetVariableOrdering(vo.getOrdering()); SMTRAT_LOG_DEBUG("smtrat.cocoagb", "Ordering: " << vo.getOrdering()); SMTRAT_LOG_DEBUG("smtrat.cocoagb", "Computing GB of " << polys); mLastBasis = cocoa.GBasis(polys); SMTRAT_LOG_DEBUG("smtrat.cocoagb", "-> " << mLastBasis); } catch (const CoCoA::ErrorInfo& e) { std::cerr << e << std::endl; } } else { SMTRAT_LOG_DEBUG("smtrat.cocoagb", "Reusing basis from last call."); } if (mLastBasis.size() == 1 && carl::isOne(mLastBasis[0])) { SMTRAT_LOG_DEBUG("smtrat.cocoagb", "Returning UNSAT"); generateTrivialInfeasibleSubset(); return Answer::UNSAT; } SMTRAT_LOG_DEBUG("smtrat.cocoagb", "Returning Unknown"); return Answer::UNKNOWN; }
void checkSelfAddition() { QFETCH(float, xO); QFETCH(float, yO); QFETCH(float, zO); QFETCH(float, wO); QFETCH(float, xA); QFETCH(float, yA); QFETCH(float, zA); QFETCH(float, wA); QFETCH(float, xR); QFETCH(float, yR); QFETCH(float, zR); QFETCH(float, wR); // GIVEN QVector4D vo(xO, yO, zO, wO); // WHEN vo += QVector4D(xA, yA, zA, wA); // THEN QCOMPARE(vo.x(), xR); QCOMPARE(vo.y(), yR); QCOMPARE(vo.z(), zR); QCOMPARE(vo.w(), wR); }
int main(int argc, char** argv) { ros::init(argc, argv, "VisualOdometry"); ros::NodeHandle nh; ros::NodeHandle nh_private("~"); ccny_rgbd::VisualOdometry vo(nh, nh_private); ros::spin(); return 0; }
node* module::_create_node(const string &node_name, Node_type node_type, cell* cell_ptr, module* module_ptr) { int cell_input_num = (node_type == TYPE_CELL) ? cell_ptr->input_num : module_ptr->get_input_num(); int cell_output_num = (node_type == TYPE_CELL) ? 1 : module_ptr->get_output_num(); vector<net*> vi(cell_input_num, NULL); vector<net*> vo(cell_output_num, NULL); return new node(node_name, this, node_type, cell_ptr, module_ptr, vi, vo); }
void trainCard(const Image<PixRGB<byte> > &img, const std::string &cardName) { rutz::shared_ptr<VisualObject> vo(new VisualObject(cardName, "NULL", img, Point2D<int>(-1,-1), std::vector<double>(), std::vector< rutz::shared_ptr<Keypoint> >(), USECOLOR)); itsObjectDB.addObject(vo, false); itsObjectDB.saveTo("cards.vdb"); }
zoom() { int x,y; double nx,ny,np,dx,dy,r,theta,phi; for (x=0;x<X_RES;x++) for(y=0;y<Y_RES;y++) { dx=(0.5+(double)(x-(X_RES/2)))/MAG; dy=(0.5+(double)(y-(Y_RES/2)))/MAG; r=sqrt(dx*dx+dy*dy); theta=atan2(dy,dx); phi=theta+TWIST; ny=r*sin(phi); nx=r*cos(phi); nx=nx+(double)(X_RES/2+X_OFF); ny=ny+(double)(Y_RES/2+Y_OFF); // printf("x: %d nx: %f y: %d ny: %f\n",x,nx,y,ny); np=0; np+=vo((int)nx+1,(int)ny+1) *(nx-(double)((int)nx))*(ny-(double)((int)ny)); //top-right pixel np+=vo((int)nx,(int)ny+1) *(PIXW-(nx-(double)((int)nx)))*(ny-(double)((int)ny)); //top-left pixel np+=vo((int)nx+1,(int)ny) *(nx-(double)((int)nx))*(PIXW-(ny-(double)((int)ny))); //bot-right pixel np+=vo((int)nx,(int)ny) *(PIXW-(double)(nx-(double)((int)nx)))*(PIXW-(ny-(double)((int)ny))); //bot-left pixel np*=1.0/(PIXW*PIXW); //compensates for size of pixel otherwise 'losing' light from the feedback newpic[x][y]=(int)np; // printf("np: %f\n",np); } }
void VO2Controller::render(HGE * hge) { Mover::Driver::render(hge); float size = 5 * mover->getMaster()->getSphereSize(); Pose pose = mover->getGlobalPose(); for(auto it=obstacles.begin();it!=obstacles.end();++it) { VelocityObstacle vo(pose.getPosition(),size,it->first.getPosition(0),it->second,it->first.velocity); drawVO(hge,vo, size, 10); } drawRays(hge, pose.getPosition(), rays); // // //for(auto it=segments.begin();it!=segments.end();++it) // // drawArc(object->position,object->maxVelocity*1.1,*it); }
// /// Gets the x- and y- extents of the viewport, equalizes the logical and screen /// points, and resets the x- and y- extents of the viewport. // void TPrintPreviewDC::ReOrg() { // Get the viewport origin of the printer DC and transform it into // screen device units. It is assumed that the viewport extents of // the screen DC represent the whole previewed page. TPoint pvo = PrnDC.GetViewportOrg(); TSize page = GetPageSizeInPixels(PrnDC); TSize ve; ::GetViewportExtEx(GetHDC(), &ve); // screen extents TPoint vo(MulDiv(pvo.x, ve.cx, page.cx), MulDiv(pvo.y, ve.cy, page.cy)); // Use the same logical origin as the printout. TPoint wo = PrnDC.GetWindowOrg(); // Set the origins. ::SetWindowOrgEx(GetHDC(), wo.x, wo.y, 0); ::SetViewportOrgEx(GetHDC(), vo.x, vo.y, 0); }
std::string matchObject(Image<PixRGB<byte> > &ima){ //find object in the database std::vector< rutz::shared_ptr<VisualObjectMatch> > matches; rutz::shared_ptr<VisualObject> vo(new VisualObject("PIC", "PIC", ima, Point2D<int>(-1,-1), std::vector<float>(), std::vector< rutz::shared_ptr<Keypoint> >(), USECOLOR)); const uint nmatches = vdb.getObjectMatches(vo, matches, VOMA_SIMPLE, 10000U, //max objs to return 0.5F, //keypoint distance score default 0.5F 0.5F, //affine distance score default 0.5F 1.0F, //minscore default 1.0F 3U, //min # of keypoint match 100U, //keypoint selection thershold false //sort by preattentive ); std::string objName; //LINFO("Found %i", nmatches); if (nmatches > 0 ){ rutz::shared_ptr<VisualObject> obj; //so we will have a ref to the last matches obj rutz::shared_ptr<VisualObjectMatch> vom; //for(unsigned int i=0; i< nmatches; i++){ for(unsigned int i=0; i< 1; i++){ vom = matches[i]; obj = vom->getVoTest(); // LINFO("### Object match with '%s' score=%f ID:%i", // obj->getName().c_str(), vom->getScore(), objId); objName = obj->getName(); } } return objName; }
std::string recCard(const Image<PixRGB<byte> > &img) { std::string cardName; std::vector< rutz::shared_ptr<VisualObjectMatch> > matches; rutz::shared_ptr<VisualObject> vo(new VisualObject("PIC", "PIC", img, Point2D<int>(-1,-1), std::vector<double>(), std::vector< rutz::shared_ptr<Keypoint> >(), USECOLOR)); const uint nm = itsObjectDB.getObjectMatches(vo, matches, VOMA_SIMPLE, 100U, //max objs to return 0.5F, //keypoint distance score default 0.5F 0.5F, //affine distance score default 0.5F 1.0F, //minscore default 1.0F 3U, //min # of keypoint match 100U, //keypoint selection thershold false //sort by preattentive ); LINFO("Found %i", nm); if (nm > 0) { cardName = matches[0]->getVoTest()->getName(); LINFO("***** %u object recognition match(es) *****", nm); for (uint i = 0 ; i < nm; i ++) LINFO(" Match with '%s' [score = %f]", matches[i]->getVoTest()->getName().c_str(), matches[i]->getScore()); } else LINFO("***** Could not identify attended object! *****"); return cardName; }
template<class T> void do_test() { std::cout << "Size\ttable (c/e)\ttable (s)\tvector (c/e)\tvector (s)"; std::cout << "\tOvec (c/e)\tvOvec (s) \tG(c/e)\tG(s)\tOG(c/e)\tOG(s)\n"; for(int N=1;N<=4096;N*=2) { std::cout.precision(3); std::cout << N << "\t"; table_test<T> tt(N,N,-.28319, .28319); nt2::unit::benchmark_result<nt2::details::cycles_t> dv; nt2::unit::perform_benchmark( tt, 1., dv); nt2::unit::benchmark_result<double> tv; nt2::unit::perform_benchmark( tt, 1., tv); std::cout << std::scientific << dv.median/(double)(N*N) << "\t"; std::cout << std::scientific << tv.median << "\t"; vector_test<T> vv(N,N,-.28319, .28319); nt2::unit::benchmark_result<nt2::details::cycles_t> dw; nt2::unit::perform_benchmark( vv, 1., dw); nt2::unit::benchmark_result<double> tw; nt2::unit::perform_benchmark( vv, 1., tw); std::cout << std::scientific << dw.median/(double)(N*N) << "\t"; std::cout << std::scientific << tw.median << "\t"; vector_omp_test<T> vo(N,N,-.28319, .28319); nt2::unit::benchmark_result<nt2::details::cycles_t> dow; nt2::unit::perform_benchmark( vo, 1., dow); nt2::unit::benchmark_result<double> tow; nt2::unit::perform_benchmark( vo, 1., tow); std::cout << std::scientific << dow.median/(double)(N*N) << "\t"; std::cout << std::scientific << tow.median << "\t"; std::cout << std::fixed << (double)dw.median/dv.median << "\t"; std::cout << std::fixed << (double)tw.median/tv.median << "\t"; std::cout << std::fixed << (double)dow.median/dv.median << "\t"; std::cout << std::fixed << (double)tow.median/tv.median << "\n"; } }
string BedMap::MapHits(const BED &a, const vector<BED> &hits) { ostringstream output; if (hits.size() == 0) return _nullValue; ExtractColumnFromHits(hits); VectorOps vo(_column_vec); if (_operation == "sum") output << setprecision (PRECISION) << vo.GetSum(); else if (_operation == "mean") output << setprecision (PRECISION) << vo.GetMean(); else if (_operation == "median") output << setprecision (PRECISION) << vo.GetMedian(); else if (_operation == "min") output << setprecision (PRECISION) << vo.GetMin(); else if (_operation == "max") output << setprecision (PRECISION) << vo.GetMax(); else if (_operation == "mode") output << vo.GetMode(); else if (_operation == "antimode") output << vo.GetAntiMode(); else if (_operation == "count") output << setprecision (PRECISION) << vo.GetCount(); else if (_operation == "count_distinct") output << setprecision (PRECISION) << vo.GetCountDistinct(); else if (_operation == "collapse") output << vo.GetCollapse(); else if (_operation == "distinct") output << vo.GetDistinct(); else { cerr << "ERROR: " << _operation << " is an unrecoginzed operation\n"; exit(1); } _column_vec.clear(); return output.str(); }
void operator()( const char* name )const { vo(name,(val.*member)); }
/* XWinManaged xwin(Dims(WIDTH,HEIGHT*2), 1, 1, "Test SIFT"); rutz::shared_ptr<VisualObject> objTop, objBottom; void showObjs(rutz::shared_ptr<VisualObject> obj1, rutz::shared_ptr<VisualObject> obj2){ //return ; Image<PixRGB<byte> > keyIma = rescale(obj1->getKeypointImage(), WIDTH, HEIGHT); objTop = obj1; if (obj2.is_valid()){ keyIma = concatY(keyIma, rescale(obj2->getKeypointImage(), WIDTH, HEIGHT)); objBottom = obj2; } xwin.drawImage(keyIma); } void showKeypoint(rutz::shared_ptr<VisualObject> obj, int keypi, Keypoint::CHANNEL channel = Keypoint::ORI){ char winTitle[255]; switch(channel){ case Keypoint::ORI: sprintf(winTitle, "Keypoint view (Channel ORI)"); break; case Keypoint::COL: sprintf(winTitle, "Keypoint view (Channel COL)"); break; default: sprintf(winTitle, "Keypoint view (Channel )"); break; } rutz::shared_ptr<Keypoint> keyp = obj->getKeypoint(keypi); float x = keyp->getX(); float y = keyp->getY(); float s = keyp->getS(); float o = keyp->getO(); float m = keyp->getM(); uint FVlength = keyp->getFVlength(channel); if (FVlength<=0) return; //dont show the Keypoint if we dont have a FV XWinManaged *xwinKey = new XWinManaged(Dims(WIDTH*2,HEIGHT), -1, -1, winTitle); //draw the circle around the keypoint const float sigma = 1.6F * powf(2.0F, s / float(6 - 3)); const float sig = 1.5F * sigma; const int rad = int(3.0F * sig); Image<PixRGB<byte> > img = obj->getImage(); Point2D<int> loc(int(x + 0.5F), int(y + 0.5F)); drawCircle(img, loc, rad, PixRGB<byte>(255, 0, 0)); drawDisk(img, loc, 2, PixRGB<byte>(255,0,0)); s=s*5.0F; //mag for scale if (s > 0.0f) drawLine(img, loc, Point2D<int>(int(x + s * cosf(o) + 0.5F), int(y + s * sinf(o) + 0.5F)), PixRGB<byte>(255, 0, 0)); char info[255]; sprintf(info, "(%0.2f,%0.2f) s=%0.2f o=%0.2f m=%0.2f", x, y, s, o, m); writeText(img, Point2D<int>(0, HEIGHT-20), info, PixRGB<byte>(255), PixRGB<byte>(127)); //draw the vectors from the features vectors Image<PixRGB<byte> > fvDisp(WIDTH, HEIGHT, NO_INIT); fvDisp.clear(PixRGB<byte>(255, 255, 255)); int xBins = int((float)WIDTH/4); int yBins = int((float)HEIGHT/4); drawGrid(fvDisp, xBins, yBins, 1, 1, PixRGB<byte>(0, 0, 0)); switch (channel){ case Keypoint::ORI: for (int xx=0; xx<4; xx++){ for (int yy=0; yy<4; yy++){ for (int oo=0; oo<8; oo++){ Point2D<int> loc(xBins/2+(xBins*xx), yBins/2+(yBins*yy)); byte mag = keyp->getFVelement(xx*32+yy*8+oo, channel); mag = mag/4; drawDisk(fvDisp, loc, 2, PixRGB<byte>(255, 0, 0)); drawLine(fvDisp, loc, Point2D<int>(int(loc.i + mag*cosf(oo*M_PI/4)), int(loc.j + mag*sinf(oo*M_PI/4))), PixRGB<byte>(255, 0, 0)); } } } break; case Keypoint::COL: for (int xx=0; xx<4; xx++){ for (int yy=0; yy<4; yy++){ for (int cc=0; cc<3; cc++){ Point2D<int> loc(xBins/2+(xBins*xx), yBins/2+(yBins*yy)); byte mag = keyp->getFVelement(xx*12+yy*3+cc, channel); mag = mag/4; drawDisk(fvDisp, loc, 2, PixRGB<byte>(255, 0, 0)); drawLine(fvDisp, loc, Point2D<int>(int(loc.i + mag*cosf(-1*cc*M_PI/2)), int(loc.j + mag*sinf(-1*cc*M_PI/2))), PixRGB<byte>(255, 0, 0)); } } } break; default: break; } Image<PixRGB<byte> > disp = img; disp = concatX(disp, fvDisp); xwinKey->drawImage(disp); while(!xwinKey->pressedCloseButton()){ usleep(100); } delete xwinKey; } void analizeImage(){ int key = -1; while(key != 24){ // q to quit window key = xwin.getLastKeyPress(); Point2D<int> point = xwin.getLastMouseClick(); if (point.i > -1 && point.j > -1){ //get the right object rutz::shared_ptr<VisualObject> obj; if (point.j < HEIGHT){ obj = objTop; } else { obj = objBottom; point.j = point.j - HEIGHT; } LINFO("ClickInfo: key = %i, p=%i,%i", key, point.i, point.j); //find the keypoint for(uint i=0; i<obj->numKeypoints(); i++){ rutz::shared_ptr<Keypoint> keyp = obj->getKeypoint(i); float x = keyp->getX(); float y = keyp->getY(); if ( (point.i < (int)x + 5 && point.i > (int)x - 5) && (point.j < (int)y + 5 && point.j > (int)y - 5)){ showKeypoint(obj, i, Keypoint::ORI); showKeypoint(obj, i, Keypoint::COL); } } } } } */ int main(const int argc, const char **argv) { MYLOGVERB = LOG_INFO; ModelManager manager("Test SIFT"); nub::ref<InputFrameSeries> ifs(new InputFrameSeries(manager)); manager.addSubComponent(ifs); nub::ref<OutputFrameSeries> ofs(new OutputFrameSeries(manager)); manager.addSubComponent(ofs); if (manager.parseCommandLine( (const int)argc, (const char**)argv, "<database file> <trainingLabel>", 2, 2) == false) return 0; manager.start(); Timer masterclock; // master clock for simulations Timer timer; const char *vdbFile = manager.getExtraArg(0).c_str(); const char *trainingLabel = manager.getExtraArg(1).c_str(); int numMatches = 0; //the number of correct matches int totalObjects = 0; //the number of objects presented to the network int uObjId = 0; //a unique obj id for sift bool train = false; //load the database file // if (!train) vdb.loadFrom(std::string(vdbFile)); while(1) { Image< PixRGB<byte> > inputImg; const FrameState is = ifs->updateNext(); if (is == FRAME_COMPLETE) break; //grab the images GenericFrame input = ifs->readFrame(); if (!input.initialized()) break; inputImg = input.asRgb(); totalObjects++; ofs->writeRGB(inputImg, "Input", FrameInfo("Input", SRC_POS)); if (train) { //add the object to the database char objName[255]; sprintf(objName, "%s_%i", trainingLabel, uObjId); uObjId++; rutz::shared_ptr<VisualObject> vo(new VisualObject(objName, "NULL", inputImg, Point2D<int>(-1,-1), std::vector<float>(), std::vector< rutz::shared_ptr<Keypoint> >(), USECOLOR)); vdb.addObject(vo); } else { //get the object classification std::string objName; std::string tmpName = matchObject(inputImg); int i = tmpName.find("_"); objName.assign(tmpName, 0, i); LINFO("Object name %s", objName.c_str()); printf("%i %s\n", ifs->frame(), objName.c_str()); if (objName == trainingLabel) numMatches++; //printf("objid %i:class %i:rate=%0.2f\n", // objData.description.c_str(), objData.id, cls, // (float)numMatches/(float)totalObjects); } } if (train) { printf("Trained on %i objects\n", totalObjects); printf("Object in db %i\n" , vdb.numObjects()); vdb.saveTo(std::string(vdbFile)); } else { printf("Classification Rate: %i/%i %0.2f\n", numMatches, totalObjects, (float)numMatches/(float)totalObjects); } }
int main(const int iArgc, const char** iArgv) { std::string rootDir; ConciseArgs opt(iArgc, (char**)iArgv); opt.add(rootDir, "r", "root_dir", "input root directory"); opt.parse(); // set up vo std::shared_ptr<drc::BotWrapper> botWrapper(new drc::BotWrapper()); std::shared_ptr<drc::LcmWrapper> lcmWrapper(new drc::LcmWrapper(botWrapper->getLcm())); auto boostLcm = lcmWrapper->getBoost(); auto config = new voconfig::KmclConfiguration(botWrapper->getBotParam(), "CAMERA"); boost::shared_ptr<fovis::StereoCalibration> calib(config->load_stereo_calibration()); FoVision vo(boostLcm, calib); // find file timestamps std::ifstream ifs(rootDir + "/cam_poses.txt"); std::vector<int64_t> times; std::string line; while (std::getline(ifs,line)) { std::istringstream iss(line); int64_t utime; iss >> utime; times.push_back(utime); } // iterate std::string poseFileName = rootDir + "/fovis_poses.txt"; std::ofstream ofs(poseFileName); ofs << std::setprecision(15); for (auto utime : times) { std::string fileName; std::ostringstream oss; // read image oss << rootDir << "/color_" << utime << ".png"; cv::Mat img = cv::imread(oss.str()); cv::cvtColor(img,img,CV_RGB2GRAY); // read disparity oss.str(""); oss.clear(); oss << rootDir << "/disp_" << utime << ".float"; std::ifstream ifs(oss.str(), std::ios::binary); int width, height; ifs.read((char*)&width, sizeof(width)); ifs.read((char*)&height, sizeof(height)); std::vector<float> vals(width*height); ifs.read((char*)vals.data(), width*height*sizeof(float)); ifs.close(); cv::Mat disp(height,width,CV_32FC1,vals.data()); // do fovis vo.doOdometry(img.data, (float*)disp.data, utime); Eigen::Isometry3d delta; auto worldToCamera = Eigen::Isometry3d::Identity(); Eigen::MatrixXd cov; fovis::MotionEstimateStatusCode status; vo.getMotion(delta, cov, status); worldToCamera = worldToCamera*delta; vo.fovis_stats(); // write pose auto cameraPose = worldToCamera.inverse(); auto& m = cameraPose; ofs << utime << " " << m(0,0) << " " << m(0,1) << " " << m(0,2) << " " << m(0,3) << " " << m(1,0) << " " << m(1,1) << " " << m(1,2) << " " << m(1,3) << " " << m(2,0) << " " << m(2,1) << " " << m(2,2) << " " << m(2,3) << std::endl; } ofs.close(); return 1; }
void ImageProcessing::SegmentColours( FrameBuffer * frame, FrameBuffer * outFrame, unsigned int threshold, unsigned int minLength, unsigned int minSize, unsigned int subSample, ColourDefinition const & target, RawPixel const & mark, std::vector<VisionObject> & results ) { FrameBufferIterator it( frame ); FrameBufferIterator oit( outFrame ); Pixel cPixel; RawPixel oPixel; // unsigned int len; FloodFillState state; unsigned int count; for( unsigned int row = 0; row < frame->height; row = row + subSample ) { it.goPosition(row, subSample); oit.goPosition(row, subSample); count = 0; for( unsigned int col = subSample; col < frame->width; col = col + subSample, it.goRight( subSample ),oit.goRight( subSample ) ) { oit.getPixel( & oPixel ); if ( oPixel == RawPixel( 0, 0, 0 ) ) { it.getPixel( & cPixel ); if ( target.isMatch( cPixel ) ) { count++; } else { count = 0; } if ( count >= minLength ) { state.initialize(); doFloodFill( frame, outFrame, Point( col, row), cPixel, threshold, & target, subSample, & state ); #ifdef XX_DEBUG if ( state.size() > minSize ) { std::cout << "Flood fill returns size " << state.size() << std::endl; } #endif if ( state.size() > minSize ) { unsigned int tlx = state.bBox().topLeft().x(); unsigned int tly = state.bBox().topLeft().y(); unsigned int brx = state.bBox().bottomRight().x(); unsigned int bry = state.bBox().bottomRight().y(); drawBresenhamLine( outFrame, tlx, tly, tlx, bry, mark ); drawBresenhamLine( outFrame, tlx, bry, brx, bry, mark ); drawBresenhamLine( outFrame, brx, bry, brx, tly, mark ); drawBresenhamLine( outFrame, brx, tly, tlx, tly, mark ); drawBresenhamLine( frame, tlx, tly, tlx, bry, mark ); drawBresenhamLine( frame, tlx, bry, brx, bry, mark ); drawBresenhamLine( frame, brx, bry, brx, tly, mark ); drawBresenhamLine( frame, brx, tly, tlx, tly, mark ); // swapColours( outFrame, 0, state.bBox(), 1, ColourDefinition( Pixel(colour), Pixel(colour) ), state.averageColour() ); VisionObject vo( target.name, state.size(), state.x(), state.y(), state.averageColour(), state.bBox() ); std::vector<VisionObject>::iterator i; for( i = results.begin(); i != results.end(); ++i) { if ( (*i).size < vo.size ) { break; } } results.insert(i, vo ); } count = 0; } } else { count = 0; } } } }
int main(const int argc, const char **argv) { MYLOGVERB = LOG_INFO; mgr = new ModelManager("Test ObjRec"); if (mgr->parseCommandLine( (const int)argc, (const char**)argv, "<vdb file> <server ip>", 2, 2) == false) return 1; mgr->start(); // catch signals and redirect them to terminate for clean exit: signal(SIGHUP, terminateProc); signal(SIGINT, terminateProc); signal(SIGQUIT, terminateProc); signal(SIGTERM, terminateProc); signal(SIGALRM, terminateProc); //get command line options const char *vdbFile = mgr->getExtraArg(0).c_str(); const char *server_ip = mgr->getExtraArg(1).c_str(); bool train = false; LINFO("Loading db from %s\n", vdbFile); //vdb.loadFrom(std::string(vdbFile)); xwin = new XWinManaged(Dims(256,256), -1, -1, "ILab Robot Head Demo"); labelServer = nv2_label_server_create(9930, server_ip, 9931); nv2_label_server_set_verbosity(labelServer,1); //allow warnings int send_interval = 1; while(!terminate) { Point2D clickLoc = xwin->getLastMouseClick(); if (clickLoc.isValid()) train = !train; struct nv2_image_patch p; const enum nv2_image_patch_result res = nv2_label_server_get_current_patch(labelServer, &p); std::string objName; if (res == NV2_IMAGE_PATCH_END) { fprintf(stdout, "ok, quitting\n"); break; } else if (res == NV2_IMAGE_PATCH_NONE) { usleep(10000); continue; } else if (res == NV2_IMAGE_PATCH_VALID && p.type == NV2_PIXEL_TYPE_RGB24) { Image<PixRGB<byte> > img(p.width, p.height, NO_INIT); memcpy(img.getArrayPtr(), p.data, p.width*p.height*3); Image<PixRGB<byte> > inputImg = rescale(img, 256, 256); std::string objName = matchObject(inputImg); Image<PixRGB<byte> > disp(320, 240, ZEROS); xwin->drawImage(inputImg); if (objName == "nomatch") { if (train) { printf("Is this %s\n", objName.c_str()); std::string tmp; std::getline(std::cin, tmp); if (tmp == "exit") break; if (tmp == "no") { printf("Can you tell me what this is?\n"); std::getline(std::cin, objName); rutz::shared_ptr<VisualObject> vo(new VisualObject(objName.c_str(), "NULL", inputImg, Point2D(-1,-1), std::vector<double>(), std::vector< rutz::shared_ptr<Keypoint> >(), USECOLOR)); vdb.addObject(vo); vdb.saveTo(vdbFile); } } } else { printf("Object is %s\n", objName.c_str()); struct nv2_patch_label l; l.protocol_version = NV2_LABEL_PROTOCOL_VERSION; l.patch_id = p.id; snprintf(l.source, sizeof(l.source), "%s", "ObjRec"); snprintf(l.name, sizeof(l.name), "%s", // (%ux%u #%u)", objName.c_str()); //(unsigned int) p.width, //(unsigned int) p.height, //(unsigned int) p.id); snprintf(l.extra_info, sizeof(l.extra_info), "auxiliary information"); if (l.patch_id % send_interval == 0) { nv2_label_server_send_label(labelServer, &l); fprintf(stdout, "sent label '%s (%s)'\n", l.name, l.extra_info); } else { fprintf(stdout, "DROPPED label '%s (%s)'\n", l.name, l.extra_info); } } nv2_image_patch_destroy(&p); } } nv2_label_server_destroy(labelServer); }
std::string matchObject(Image<PixRGB<byte> > &ima) { //find object in the database std::vector< rutz::shared_ptr<VisualObjectMatch> > matches; rutz::shared_ptr<VisualObject> vo(new VisualObject("PIC", "PIC", ima, Point2D(-1,-1), std::vector<double>(), std::vector< rutz::shared_ptr<Keypoint> >(), USECOLOR)); const uint nmatches = vdb.getObjectMatches(vo, matches, VOMA_SIMPLE, 5U, //max objs to return 0.5F, //keypoint distance score default 0.5F 0.5F, //affine distance score default 0.5F 1.0F, //minscore default 1.0F 3U, //min # of keypoint match 6U, //keypoint selection thershold false //sort by preattentive ); LINFO("Found %i", nmatches); float score = 0, avgScore = 0, affineAvgDist = 0; int nkeyp = 0; int objId = -1; if (nmatches > 0 ){ rutz::shared_ptr<VisualObject> obj; //so we will have a ref to the last matches obj rutz::shared_ptr<VisualObjectMatch> vom; //for(unsigned int i=0; i< nmatches; i++){ for(unsigned int i=0; i< 1; i++){ vom = matches[i]; obj = vom->getVoTest(); score = vom->getScore(); nkeyp = vom->size(); avgScore = vom->getKeypointAvgDist(); affineAvgDist = vom->getAffineAvgDist(); objId = atoi(obj->getName().c_str()+3); return obj->getName(); LINFO("### Object match with '%s' score=%f ID:%i", obj->getName().c_str(), vom->getScore(), objId); //calculate the actual distance (location of keypoints) between //keypoints. If the same patch was found, then the distance should //be close to 0 double dist = 0; for (int keyp=0; keyp<nkeyp; keyp++){ const KeypointMatch kpm = vom->getKeypointMatch(keyp); float refX = kpm.refkp->getX(); float refY = kpm.refkp->getY(); float tstX = kpm.tstkp->getX(); float tstY = kpm.tstkp->getY(); dist += (refX-tstX) * (refX-tstX); dist += (refY-tstY) * (refY-tstY); } // printf("%i:%s %i %f %i %f %f %f\n", objNum, obj->getName().c_str(), // nmatches, score, nkeyp, avgScore, affineAvgDist, sqrt(dist)); //analizeImage(); } } return std::string("nomatch"); }
/*! Load a database, an image, and find best matches. */ int main(const int argc, const char **argv) { MYLOGVERB = LOG_INFO; // check command-line args: if (argc < 3 || argc > 4) LFATAL("USAGE: app-match-SIFT-database <dbname.vdb> <image.png> " "[<fused.png>]"); // load the database: VisualObjectDB vdb; if (vdb.loadFrom(argv[1]) == false) LFATAL("Cannot operate without a valid database."); // get input image: Image< PixRGB<byte> > colim = Raster::ReadRGB(argv[2]); // create visual object and extract keypoints: rutz::shared_ptr<VisualObject> vo(new VisualObject(argv[2], argv[2], colim)); // get the matching objects: std::vector< rutz::shared_ptr<VisualObjectMatch> > matches; const uint nmatches = vdb.getObjectMatches(vo, matches, VOMA_KDTREEBBF); // prepare the fused image: Image< PixRGB<byte> > mimg; std::vector<Point2D<int> > tl, tr, br, bl; // if no match, forget it: if (nmatches == 0U) LINFO("### No matching object found."); else { // let the user know about the matches: for (uint i = 0; i < nmatches; i ++) { rutz::shared_ptr<VisualObjectMatch> vom = matches[i]; rutz::shared_ptr<VisualObject> obj = vom->getVoTest(); LINFO("### Object match with '%s' score=%f", obj->getName().c_str(), vom->getScore()); // add to our fused image if desired: if (argc > 3) { mimg = vom->getTransfTestImage(mimg); // also keep track of the corners of the test image, for // later drawing: Point2D<int> ptl, ptr, pbr, pbl; vom->getTransfTestOutline(ptl, ptr, pbr, pbl); tl.push_back(ptl); tr.push_back(ptr); br.push_back(pbr); bl.push_back(pbl); } } // do a final mix between given image and matches: if (mimg.initialized()) { mimg = Image<PixRGB<byte> >(mimg * 0.5F + colim * 0.5F); // finally draw all the object outlines: PixRGB<byte> col(255, 255, 0); for (uint i = 0; i < tl.size(); i ++) { drawLine(mimg, tl[i], tr[i], col, 1); drawLine(mimg, tr[i], br[i], col, 1); drawLine(mimg, br[i], bl[i], col, 1); drawLine(mimg, bl[i], tl[i], col, 1); } } } // save result image if desired: if (argc > 3) { if (mimg.initialized() == false) mimg = Image< PixRGB<byte> >(colim * 0.5F); Raster::WriteRGB(mimg, std::string(argv[3])); } return 0; }
// ###################################################################### void Beobot2_GistSalLocalizerWorkerI::updateMessage (const RobotSimEvents::EventMessagePtr& eMsg, const Ice::Current&) { // Get a gist-sal message if(eMsg->ice_isA("::BeobotEvents::LandmarkSearchQueueMessage")) { BeobotEvents::LandmarkSearchQueueMessagePtr lsqMsg = BeobotEvents::LandmarkSearchQueueMessagePtr::dynamicCast(eMsg); //Get the current request ID int currRequestID = lsqMsg->RequestID; itsInputFnum = currRequestID; LINFO("Got an lsqMessage with Request ID = %d", currRequestID); // get the inputImage its_input_info_mutex.lock(); itsInputImage = Ice2Image<PixRGB<byte> >(lsqMsg->currIma); //itsInputWin->setTitle(sformat("WM: %d",itsInputFnum).c_str()); //itsInputWin->drawImage(itsInputImage, 0, 0); // get the salient region information itsInputVO.clear(); itsVOKeypointsComputed.clear(); itsInputObjOffset.clear(); uint inputSize = lsqMsg->salientRegions.size(); for(uint i = 0; i < inputSize; i++) { BeobotEvents::SalientRegion salReg = lsqMsg->salientRegions[i]; LDEBUG("W[%4d] sp[%4d,%4d] rect[%4d,%4d,%4d,%4d]", i, salReg.salpt.i, salReg.salpt.j, salReg.objRect.tl.i, salReg.objRect.tl.j, salReg.objRect.br.i, salReg.objRect.br.j); // print the pre-attentive feature vector std::vector<float> features; uint fsize = salReg.salFeatures.size(); for(uint j = 0; j < fsize; j++) { features.push_back(salReg.salFeatures[j]); LDEBUG("[%4d]:%7f", j, salReg.salFeatures[j]); } Point2D<int> salpt(salReg.salpt.i, salReg.salpt.j); Point2D<int> offset( salReg.objRect.tl.i, salReg.objRect.tl.j); Rectangle rect = Rectangle::tlbrO (salReg.objRect.tl.j, salReg.objRect.tl.i, salReg.objRect.br.j, salReg.objRect.br.i); // create a visual object for the salient region Image<PixRGB<byte> > objImg = crop(itsInputImage, rect); std::string testRunFPrefix("testRunFPrefix"); std::string iname("iname"); std::string saveFilePath("saveFilePath"); std::string iName(sformat("%s_SAL_%07d_%02d", testRunFPrefix.c_str(), currRequestID, i)); std::string ifName = iName + std::string(".png"); ifName = saveFilePath + ifName; rutz::shared_ptr<VisualObject> vo(new VisualObject (iName, ifName, objImg, salpt - offset, features, std::vector< rutz::shared_ptr<Keypoint> >(), false, false)); itsInputVO.push_back(vo); itsVOKeypointsComputed.push_back(false); itsInputObjOffset.push_back(offset); LDEBUG("[%d] image[%d]: %s sal:[%d,%d] offset:[%d,%d]", currRequestID, i, iName.c_str(), (salpt - offset).i, (salpt - offset).j, offset.i, offset.j); } its_input_info_mutex.unlock(); its_results_mutex.lock(); itsMatchFound.clear(); itsVOmatch.clear(); itsVOmatch.resize(inputSize); itsLmkMatch.clear(); itsLmkMatch.resize(inputSize); itsSegNumMatch.clear(); itsSegNumMatch.resize(inputSize); itsLenTravMatch.clear(); itsLenTravMatch.resize(inputSize); itsNumObjectSearch.clear(); itsNumObjectSearch.resize(inputSize); for(uint i = 0; i < inputSize; i++) itsMatchFound.push_back(false); for(uint i = 0; i < inputSize; i++) itsNumObjectSearch[i] = 0; itsNumJobsProcessed = 0; its_results_mutex.unlock(); // fill the job queue its_job_queue_mutex.lock(); itsJobQueue.clear(); uint njobs = lsqMsg->jobs.size(); for(uint i = 0; i < njobs; i++) { BeobotEvents::LandmarkSearchJob tempJob = lsqMsg->jobs[i]; itsJobQueue.push_back (GSlocJobData(tempJob.inputSalRegID, tempJob.dbSegNum, tempJob.dbLmkNum, tempJob.dbVOStart, tempJob.dbVOEnd)); } // print the job queue std::list<GSlocJobData>::iterator itr = itsJobQueue.begin(); uint count = 0; while (itr != itsJobQueue.end()) { LDEBUG("[%5d] match obj[%d] lDB[%3d][%3d]:[%3d,%3d]", count, (*itr).objNum, (*itr).segNum, (*itr).lmkNum, (*itr).voStartNum,(*itr).voEndNum); itr++; count++; } its_job_queue_mutex.unlock(); } // Got a landmark match results - stop searching for that salient region else if(eMsg->ice_isA("::BeobotEvents::LandmarkMatchResultMessage")) { BeobotEvents::LandmarkMatchResultMessagePtr lmrMsg = BeobotEvents::LandmarkMatchResultMessagePtr::dynamicCast(eMsg); //Get the current request ID //int currRequestID = gistSalMsg->RequestID; BeobotEvents::LandmarkSearchJob tempJob = lmrMsg->matchInfo; LINFO("Got an lmrMessage"); LINFO("LMR -> found match[%d]: with itsLandmarkDB[%d][%d]", tempJob.inputSalRegID, tempJob.dbSegNum, tempJob.dbLmkNum); its_results_mutex.lock(); if(!itsMatchFound[tempJob.inputSalRegID]) { itsMatchFound[tempJob.inputSalRegID] = true; itsSegNumMatch[tempJob.inputSalRegID] = lmrMsg->segNumMatch; itsLenTravMatch[tempJob.inputSalRegID] = lmrMsg->lenTravMatch; } its_results_mutex.unlock(); } else if(eMsg->ice_isA("::BeobotEvents::CancelSearchMessage")) { its_job_queue_mutex.lock(); itsEmptyQueue = true; its_job_queue_mutex.unlock(); its_results_mutex.lock(); LINFO("CancelSearchMessage: %d processed here", itsNumJobsProcessed); its_results_mutex.unlock(); } }