int main(int argc, char* argv[]) { Eigen::internal::setNbThreads(3); Parser parser; GeneralConfig::scale = 10; parser.addGroup(TrackingConfig()); parser.addGroup(RecordingConfig()); parser.addGroup(SceneConfig()); parser.addGroup(GeneralConfig()); parser.addGroup(BulletConfig()); parser.read(argc, argv); initComm(); TowelTracker2 tv; extern bool LIVE; if (LIVE) tv.runOnline(); else tv.runOffline(); // extern bool LIVE; // if (LIVE) TrackerSystem.runOnline(); // else TrackerSystem.runOffline(); }
int main(int argc, char *argv[]) { GeneralConfig::scale = 20.; ViewerConfig::cameraHomePosition = btVector3(100, 0, 100); BulletConfig::dt = BulletConfig::internalTimeStep = 0.01; BulletConfig::maxSubSteps = 0; Parser parser; parser.addGroup(GeneralConfig()); parser.addGroup(BulletConfig()); parser.addGroup(SceneConfig()); parser.addGroup(FlatteningConfig()); parser.read(argc, argv); CustomScene().run(); return 0; }
int main(int argc, char *argv[]) { ViewerConfig::cameraHomePosition = btVector3(100, 0, 100); BulletConfig::dt = BulletConfig::internalTimeStep = 0.002; BulletConfig::maxSubSteps = 0; SceneConfig::enableIK = false; SceneConfig::enableHaptics = false; SceneConfig::enableRobot = false; GeneralConfig::scale = 20; Parser parser; parser.addGroup(GeneralConfig()); parser.addGroup(BulletConfig()); parser.addGroup(SceneConfig()); parser.read(argc, argv); CustomScene().run(); return 0; }
int main(int argc, char* argv[]) { Parser parser; GeneralConfig::scale = 10; parser.addGroup(TrackingConfig()); parser.addGroup(RecordingConfig()); parser.addGroup(SceneConfig()); parser.addGroup(GeneralConfig()); parser.addGroup(BulletConfig()); parser.read(argc, argv); Tracker* TrackerSystem; if (TrackingConfig::objType=="towel") TrackerSystem = new TowelTracker(); else if (TrackingConfig::objType=="rope") TrackerSystem = new RopeTracker(); else throw std::runtime_error("invalid objType"); extern bool LIVE; if (LIVE) TrackerSystem->runOnline(); else TrackerSystem->runOffline(); }
int main(int argc, char* argv[]) { Eigen::internal::setNbThreads(2); Parser parser; GeneralConfig::scale = 10; parser.addGroup(TrackingConfig()); parser.addGroup(SceneConfig()); parser.addGroup(GeneralConfig()); parser.addGroup(BulletConfig()); parser.read(argc, argv); initComm(); DefaultSingleHypRobotAndRopeTracker TrackerSystem; extern bool LIVE; if (LIVE) TrackerSystem.runOnline(); else TrackerSystem.runOffline(); }
int main(int argc, char *argv[]) { // command line options GeneralConfig::scale = 10; SceneConfig::enableIK = SceneConfig::enableHaptics = false; SceneConfig::enableRobot = true; Parser parser; parser.addGroup(TrackingConfig()); parser.addGroup(RecordingConfig()); parser.addGroup(GeneralConfig()); parser.addGroup(BulletConfig()); parser.addGroup(SceneConfig()); parser.read(argc,argv); // comm stuff initComm(); FileSubscriber pcSub("kinect","pcd"); CloudMessage cloudMsg; FileSubscriber ropeSub("rope_pts","pcd"); CloudMessage ropeMsg; FileSubscriber labelSub("labels","png"); ImageMessage labelMsg; FileSubscriber endSub("rope_ends","txt"); VecVecMessage<float> endMsg; FileSubscriber jointSub("joint_states","txt"); Retimer<VectorMessage<double> > retimer(&jointSub); Scene scene; PR2Manager pr2m(scene); MonitorForGrabbing lMonitor(pr2m.pr2->robot->GetManipulators()[5], scene.env->bullet->dynamicsWorld); MonitorForGrabbing rMonitor(pr2m.pr2->robot->GetManipulators()[7], scene.env->bullet->dynamicsWorld); vector<double> firstJoints = doubleVecFromFile(filePath("data000000000000.txt", "joint_states").string()); ValuesInds vi = getValuesInds(firstJoints); pr2m.pr2->setDOFValues(vi.second, vi.first); // get kinect transform KinectTrans kinectTrans(pr2m.pr2->robot); kinectTrans.calibrate(btTransform(btQuaternion(0.669785, -0.668418, 0.222562, -0.234671), btVector3(0.263565, -0.038203, 1.762524))); CoordinateTransformer CT(kinectTrans.getKinectTrans()); // load table /////////////// load table vector<btVector3> tableCornersCam = toBulletVectors(floatMatFromFile(onceFile("table_corners.txt").string())); vector<btVector3> tableCornersWorld = CT.toWorldFromCamN(tableCornersCam); BulletObject::Ptr table = makeTable(tableCornersWorld, .1*GeneralConfig::scale); table->setColor(1,1,1,.25); // load rope vector<btVector3> ropePtsCam = toBulletVectors(floatMatFromFile(onceFile("init_rope.txt").string())); CapsuleRope::Ptr rope(new CapsuleRope(CT.toWorldFromCamN(ropePtsCam), .0075*METERS)); // plots PointCloudPlot::Ptr kinectPts(new PointCloudPlot(2)); CorrPlots corrPlots; // setup scene if (TrackingConfig::showKinect) scene.env->add(kinectPts); scene.env->add(rope); scene.env->add(table); if (TrackingConfig:: showLines) scene.env->add(corrPlots.m_lines); lMonitor.setBodies(rope->children); rMonitor.setBodies(rope->children); // recording ScreenRecorder* rec; if (RecordingConfig::record != DONT_RECORD){ rec = new ScreenRecorder(scene.viewer); } // end tracker vector<RigidBodyPtr> rope_ends; rope_ends.push_back(rope->bodies[0]); rope_ends.push_back(rope->bodies[rope->bodies.size()-1]); MultiPointTrackerRigid endTracker(rope_ends,scene.env->bullet->dynamicsWorld); TrackerPlotter trackerPlotter(endTracker); //scene.env->add(trackerPlotter.m_fakeObjects[0]); //scene.env->add(trackerPlotter.m_fakeObjects[1]); scene.startViewer(); scene.setSyncTime(true); scene.idle(true); vector<double> oldvals, newvals; int count=0; while (pcSub.recv(cloudMsg)) { ENSURE(ropeSub.recv(ropeMsg)); vector<btVector3> obsPts = CT.toWorldFromCamN(toBulletVectors(ropeMsg.m_data)); ENSURE(labelSub.recv(labelMsg)); cv::Mat labels = toSingleChannel(labelMsg.m_data); ENSURE(endSub.recv(endMsg)); vector<btVector3> newEnds = CT.toWorldFromCamN(toBulletVectors(endMsg.m_data)); endTracker.update(newEnds); trackerPlotter.update(); ColorCloudPtr cloudCam = cloudMsg.m_data; ColorCloudPtr cloudWorld(new ColorCloud()); pcl::transformPointCloud(*cloudCam, *cloudWorld, CT.worldFromCamEigen); kinectPts->setPoints1(cloudWorld); cout << "loaded cloud " << count << endl; count++; VectorMessage<double>* jointMsgPtr = retimer.msgAt(cloudMsg.getTime()); vector<double> currentJoints = jointMsgPtr->m_data; ValuesInds vi = getValuesInds(currentJoints); newvals = vi.first; if (oldvals.size()==0) { cout << "first one" << endl; oldvals = newvals; } CT.reset(kinectTrans.getKinectTrans()); cv::Mat ropeMask = toSingleChannel(labels) == 1; for (int iter=0; iter<TrackingConfig::nIter; iter++) { cout << "iteration " << iter << endl; pr2m.pr2->setDOFValues(vi.second, interpolateBetween(oldvals, newvals, (iter+0.00001)/TrackingConfig::nIter)); vector<btVector3> estPts = rope->getNodes(); Eigen::MatrixXf ropePtsCam = toEigenMatrix(CT.toCamFromWorldN(estPts)); vector<float> pVis = calcVisibility(rope->bodies, scene.env->bullet->dynamicsWorld, CT.worldFromCamUnscaled.getOrigin()*METERS, TrackingConfig::sigA*METERS, TrackingConfig::nSamples); colorByVisibility(rope, pVis); SparseArray corr = toSparseArray(calcCorrProb(toEigenMatrix(estPts), toEigenMatrix(obsPts), toVectorXf(pVis), TrackingConfig::sigB*METERS, TrackingConfig::outlierParam),TrackingConfig::cutoff); corrPlots.update(estPts, obsPts, corr); vector<btVector3> impulses = calcImpulsesSimple(estPts, obsPts, corr, TrackingConfig::impulseSize); applyImpulses(impulses, rope); if (RecordingConfig::record == EVERY_ITERATION || RecordingConfig::record == FINAL_ITERATION && iter==TrackingConfig::nIter-1) rec->snapshot(); lMonitor.update(); rMonitor.update(); scene.step(DT); } oldvals = newvals; } }