DictionaryTrajectory::DictionaryTrajectory(std::string baseFolder, double az, double bz) : Trajectory() { this->baseFolder = baseFolder; vector<string> files = getFilesInDirectory(baseFolder); queryFiles = sortPrefix(files, "query"); trajFiles = sortPrefix(files, "traj"); dmpFiles = sortPrefix(files, "dmp"); if(dmpFiles.size() == 0) { // learn dmps queryPoints = mapFiles(queryFiles, trajFiles, "query", "traj"); KUKADU_SHARED_PTR<JointDMPLearner> dmpLearner; vector<mat> jointsVec; double tMax = 0.0; for(int i = 0; i < queryPoints.size(); ++i) { mat joints = readMovements((string(baseFolder) + string(queryPoints.at(i).getFileDataPath())).c_str()); degOfFreedom = joints.n_cols - 1; queryPoints.at(i).setQueryPoint(readQuery(string(baseFolder) + string(queryPoints.at(i).getFileQueryPath()))); jointsVec.push_back(joints); double currentTMax = joints(joints.n_rows - 1, 0); if(tMax < currentTMax) tMax = currentTMax; } for(int i = 0; i < jointsVec.size(); ++i) { QueryPoint currentQueryPoint = queryPoints.at(i); mat joints = jointsVec.at(i); joints = fillTrajectoryMatrix(joints, tMax); dmpLearner = KUKADU_SHARED_PTR<JointDMPLearner>(new JointDMPLearner(az, bz, joints)); KUKADU_SHARED_PTR<Dmp> learnedDmps = dmpLearner->fitTrajectories(); learnedDmps->serialize(baseFolder + currentQueryPoint.getFileDmpPath()); queryPoints.at(i).setDmp(learnedDmps); startingPos = queryPoints.at(i).getDmp()->getY0(); cout << "(DMPGeneralizer) goals for query point [" << currentQueryPoint.getQueryPoint().t() << "]" << endl << "\t ["; cout << currentQueryPoint.getDmp()->getG().t() << "]" << endl; //delete dmpLearner; dmpLearner = KUKADU_SHARED_PTR<JointDMPLearner>(); } } else { queryPoints = mapFiles(queryFiles, trajFiles, dmpFiles, "query", "traj", "dmp"); } degOfFreedom = queryPoints.at(0).getDmp()->getDegreesOfFreedom(); }
bool XmlQueryReader::read( const QString &xmlData ) { addData( xmlData ); int queryCount = 0; while( !atEnd() ) { readNext(); if( isStartElement() ) { //we expect exactly one query definition in the xml data. //so fail if we find more than one if( name() == "query" ) { if( attributes().value( "version" ) == "1.0" ) { queryCount++; readQuery(); } } } } return queryCount == 1 && !error(); }
vector<QueryPoint> DictionaryTrajectory::mapFiles(vector<string> queryFiles, vector<string> trajFiles, vector<string> dmpFiles, string prefix1, string prefix2, string prefix3) { vector<QueryPoint> ret; int prefix1Size = prefix1.size(); int prefix2Size = prefix2.size(); int prefix3Size = prefix3.size(); int querySize = queryFiles.size(); int trajSize = trajFiles.size(); int dmpSize = dmpFiles.size(); for(int i = 0; i < querySize; ++i) { string currentQueryFile = string(queryFiles.at(i)); string queryAppendix = currentQueryFile.substr(prefix1Size, currentQueryFile.size() - 1); for(int j = 0; j < trajSize; ++j) { string currentTrajFile = string(trajFiles.at(j)); string trajAppendix = currentTrajFile.substr(prefix2Size, currentTrajFile.size() - 1); if(!queryAppendix.compare(trajAppendix)) { for(int k = 0; k < dmpSize; ++k) { string currentDmpFile = string(dmpFiles.at(k)); string dmpAppendix = currentDmpFile.substr(prefix3Size, currentDmpFile.size() - 1); if(!dmpAppendix.compare(queryAppendix)) { // load dmp from file QueryPoint toAdd(queryFiles.at(i), trajFiles.at(j), prefix3 + trajAppendix, KUKADU_SHARED_PTR<Dmp>(new JointDmp(baseFolder + prefix3 + trajAppendix)), vec()); toAdd.setQueryPoint(readQuery(string(baseFolder) + string(toAdd.getFileQueryPath()))); ret.push_back(toAdd); if(i == 0) startingPos = toAdd.getDmp()->getY0(); } } } } } return ret; }
BOOST_FOREACH(intPair q, testSTPointPairs ) { readQuery(q); }