void TransportMgr::GeneratePath(GameObjectTemplate const* goInfo, TransportTemplate* transport) { uint32 pathId = goInfo->moTransport.taxiPathId; TaxiPathNodeList const& path = sTaxiPathNodesByPath[pathId]; std::vector<KeyFrame>& keyFrames = transport->keyFrames; Movement::PointsArray splinePath, allPoints; bool mapChange = false; for (size_t i = 0; i < path.size(); ++i) allPoints.push_back(G3D::Vector3(path[i].x, path[i].y, path[i].z)); // Add extra points to allow derivative calculations for all path nodes allPoints.insert(allPoints.begin(), allPoints.front().lerp(allPoints[1], -0.2f)); allPoints.push_back(allPoints.back().lerp(allPoints[allPoints.size() - 2], -0.2f)); allPoints.push_back(allPoints.back().lerp(allPoints[allPoints.size() - 2], -1.0f)); SplineRawInitializer initer(allPoints); TransportSpline orientationSpline; orientationSpline.init_spline_custom(initer); orientationSpline.initLengths(); for (size_t i = 0; i < path.size(); ++i) { if (!mapChange) { TaxiPathNodeEntry const& node_i = path[i]; if (i != path.size() - 1 && (node_i.actionFlag & 1 || node_i.mapid != path[i + 1].mapid)) { keyFrames.back().Teleport = true; mapChange = true; } else { KeyFrame k(node_i); G3D::Vector3 h; orientationSpline.evaluate_derivative(i + 1, 0.0f, h); k.InitialOrientation = Position::NormalizeOrientation(atan2(h.y, h.x) + M_PI); keyFrames.push_back(k); splinePath.push_back(G3D::Vector3(node_i.x, node_i.y, node_i.z)); transport->mapsUsed.insert(k.Node->mapid); } } else mapChange = false; } if (splinePath.size() >= 2) { // Remove special catmull-rom spline points if (!keyFrames.front().IsStopFrame() && !keyFrames.front().Node->arrivalEventID && !keyFrames.front().Node->departureEventID) { splinePath.erase(splinePath.begin()); keyFrames.erase(keyFrames.begin()); } if (!keyFrames.back().IsStopFrame() && !keyFrames.back().Node->arrivalEventID && !keyFrames.back().Node->departureEventID) { splinePath.pop_back(); keyFrames.pop_back(); } } ASSERT(!keyFrames.empty()); if (transport->mapsUsed.size() > 1) { for (std::set<uint32>::const_iterator itr = transport->mapsUsed.begin(); itr != transport->mapsUsed.end(); ++itr) ASSERT(!sMapStore.LookupEntry(*itr)->Instanceable()); transport->inInstance = false; } else transport->inInstance = sMapStore.LookupEntry(*transport->mapsUsed.begin())->Instanceable(); // last to first is always "teleport", even for closed paths keyFrames.back().Teleport = true; const float speed = float(goInfo->moTransport.moveSpeed); const float accel = float(goInfo->moTransport.accelRate); const float accel_dist = 0.5f * speed * speed / accel; transport->accelTime = speed / accel; transport->accelDist = accel_dist; int32 firstStop = -1; int32 lastStop = -1; // first cell is arrived at by teleportation :S keyFrames[0].DistFromPrev = 0; keyFrames[0].Index = 1; if (keyFrames[0].IsStopFrame()) { firstStop = 0; lastStop = 0; } // find the rest of the distances between key points // Every path segment has its own spline size_t start = 0; for (size_t i = 1; i < keyFrames.size(); ++i) { if (keyFrames[i - 1].Teleport || i + 1 == keyFrames.size()) { size_t extra = !keyFrames[i - 1].Teleport ? 1 : 0; TransportSpline* spline = new TransportSpline(); spline->init_spline(&splinePath[start], i - start + extra, Movement::SplineBase::ModeCatmullrom); spline->initLengths(); for (size_t j = start; j < i + extra; ++j) { keyFrames[j].Index = j - start + 1; keyFrames[j].DistFromPrev = spline->length(j - start, j + 1 - start); if (j > 0) keyFrames[j - 1].NextDistFromPrev = keyFrames[j].DistFromPrev; keyFrames[j].Spline = spline; } if (keyFrames[i - 1].Teleport) { keyFrames[i].Index = i - start + 1; keyFrames[i].DistFromPrev = 0.0f; keyFrames[i - 1].NextDistFromPrev = 0.0f; keyFrames[i].Spline = spline; } start = i; } if (keyFrames[i].IsStopFrame()) { // remember first stop frame if (firstStop == -1) firstStop = i; lastStop = i; } } keyFrames.back().NextDistFromPrev = keyFrames.front().DistFromPrev; if (firstStop == -1 || lastStop == -1) firstStop = lastStop = 0; // at stopping keyframes, we define distSinceStop == 0, // and distUntilStop is to the next stopping keyframe. // this is required to properly handle cases of two stopping frames in a row (yes they do exist) float tmpDist = 0.0f; for (size_t i = 0; i < keyFrames.size(); ++i) { int32 j = (i + lastStop) % keyFrames.size(); if (keyFrames[j].IsStopFrame() || j == lastStop) tmpDist = 0.0f; else tmpDist += keyFrames[j].DistFromPrev; keyFrames[j].DistSinceStop = tmpDist; } tmpDist = 0.0f; for (int32 i = int32(keyFrames.size()) - 1; i >= 0; i--) { int32 j = (i + firstStop) % keyFrames.size(); tmpDist += keyFrames[(j + 1) % keyFrames.size()].DistFromPrev; keyFrames[j].DistUntilStop = tmpDist; if (keyFrames[j].IsStopFrame() || j == firstStop) tmpDist = 0.0f; } for (size_t i = 0; i < keyFrames.size(); ++i) { float total_dist = keyFrames[i].DistSinceStop + keyFrames[i].DistUntilStop; if (total_dist < 2 * accel_dist) // won't reach full speed { if (keyFrames[i].DistSinceStop < keyFrames[i].DistUntilStop) // is still accelerating { // calculate accel+brake time for this short segment float segment_time = 2.0f * sqrt((keyFrames[i].DistUntilStop + keyFrames[i].DistSinceStop) / accel); // substract acceleration time keyFrames[i].TimeTo = segment_time - sqrt(2 * keyFrames[i].DistSinceStop / accel); } else // slowing down keyFrames[i].TimeTo = sqrt(2 * keyFrames[i].DistUntilStop / accel); } else if (keyFrames[i].DistSinceStop < accel_dist) // still accelerating (but will reach full speed) { // calculate accel + cruise + brake time for this long segment float segment_time = (keyFrames[i].DistUntilStop + keyFrames[i].DistSinceStop) / speed + (speed / accel); // substract acceleration time keyFrames[i].TimeTo = segment_time - sqrt(2 * keyFrames[i].DistSinceStop / accel); } else if (keyFrames[i].DistUntilStop < accel_dist) // already slowing down (but reached full speed) keyFrames[i].TimeTo = sqrt(2 * keyFrames[i].DistUntilStop / accel); else // at full speed keyFrames[i].TimeTo = (keyFrames[i].DistUntilStop / speed) + (0.5f * speed / accel); } // calculate tFrom times from tTo times float segmentTime = 0.0f; for (size_t i = 0; i < keyFrames.size(); ++i) { int32 j = (i + lastStop) % keyFrames.size(); if (keyFrames[j].IsStopFrame() || j == lastStop) segmentTime = keyFrames[j].TimeTo; keyFrames[j].TimeFrom = segmentTime - keyFrames[j].TimeTo; } // calculate path times keyFrames[0].ArriveTime = 0; float curPathTime = 0.0f; if (keyFrames[0].IsStopFrame()) { curPathTime = float(keyFrames[0].Node->delay); keyFrames[0].DepartureTime = uint32(curPathTime * IN_MILLISECONDS); } for (size_t i = 1; i < keyFrames.size(); ++i) { curPathTime += keyFrames[i - 1].TimeTo; if (keyFrames[i].IsStopFrame()) { keyFrames[i].ArriveTime = uint32(curPathTime * IN_MILLISECONDS); keyFrames[i - 1].NextArriveTime = keyFrames[i].ArriveTime; curPathTime += float(keyFrames[i].Node->delay); keyFrames[i].DepartureTime = uint32(curPathTime * IN_MILLISECONDS); } else { curPathTime -= keyFrames[i].TimeTo; keyFrames[i].ArriveTime = uint32(curPathTime * IN_MILLISECONDS); keyFrames[i - 1].NextArriveTime = keyFrames[i].ArriveTime; keyFrames[i].DepartureTime = keyFrames[i].ArriveTime; } } keyFrames.back().NextArriveTime = keyFrames.back().DepartureTime; transport->pathTime = keyFrames.back().DepartureTime; }
bool PNGFormat::ReadMolecule(OBBase* pOb, OBConversion* pConv) { istream& ifs = *pConv->GetInStream(); if(pConv->IsFirstInput()) { _count=0; _hasInputPngFile=true; } const char pngheader[] = {-119,80,78,71,13,10,26,10,0}; char readbytes[9]; ifs.read(readbytes, 8); if(!equal(pngheader, pngheader+8, readbytes)) { obErrorLog.ThrowError("PNG Format","Not a PNG file", obError); return false; } //Loop through all the chunks while(ifs) { unsigned int len = Read32(ifs); ifs.read(readbytes,4); string chunkid(readbytes, readbytes+4); if(chunkid=="IEND") { bytesToIEND = ifs.tellg(); bytesToIEND -= 8; break; } streampos pos = ifs.tellg(); const char* altid = pConv->IsOption("y",OBConversion::INOPTIONS); if(chunkid=="tEXt" || chunkid=="zTXt" || (altid && chunkid==altid)) { string keyword; getline(ifs, keyword, '\0'); unsigned int datalength = len - keyword.size()-1; //remove "file" from end of keyword transform(keyword.begin(),keyword.end(),keyword.begin(),::tolower); string::size_type pos = keyword.find("file"); if(pos!=string::npos) keyword.erase(pos); OBFormat* pFormat = OBConversion::FindFormat(keyword.c_str()); if(pFormat) { //We have found embedded text that we need to extract stringstream ss; if(chunkid[0]!='z') { //Copy it to a stringstream istreambuf_iterator<char> initer(ifs); ostreambuf_iterator<char> outiter(ss); for (unsigned int i = 0; i < datalength; ++i) *outiter++ = *initer++; } else { //Needs to be uncompressed first Bytef* pCompTxt = new Bytef[datalength]; ifs.read((char*)pCompTxt, datalength); --datalength; //for compression method byte uLongf uncompLen; Bytef* pUncTxt = new Bytef[datalength*6];//guess uncompressed length. NASTY! if(*pCompTxt!=0 /*compression method*/ || uncompress(pUncTxt, &uncompLen, pCompTxt+1, datalength)!=Z_OK) { obErrorLog.ThrowError("PNG Format","Errors in decompression", obError); delete[] pUncTxt; delete[] pCompTxt; return false; } pUncTxt[uncompLen] = '\0'; ss.str((char*)pUncTxt); delete[] pUncTxt; delete[] pCompTxt; } //Use a new OBConversion object to convert embedded text OBConversion conv2(&ss, pConv->GetOutStream()); conv2.CopyOptions(pConv); conv2.SetInAndOutFormats(pFormat, pConv->GetOutFormat()); _count += conv2.Convert(); ifs.ignore(4);//CRC continue; //already at the end of the chunk } } //Move to end of chunk ifs.seekg(pos); ifs.ignore(len+4); //data + CRC } //if we will be writing a png file, read and save the whole input file. CopyOfInput.clear(); if(pConv->GetOutFormat()==this) { ifs.seekg(0); copy(istreambuf_iterator<char>(ifs), istreambuf_iterator<char>(),back_inserter(CopyOfInput)); } if(pConv->IsLastFile() && _count>0) { pConv->ReportNumberConverted(_count); //report the number of chemical objects pConv->SetOutFormat(this); //so that number of files is reported as "PNG_files" } return true; }
int main(int n_args, char** args) { init_logging(); isage::util::print_pstats(); int num_topics; int num_epochs_; isage::wtm::LDAVStrategy strategy; std::string output_usage_name; std::string heldout_output_usage_name; po::variables_map vm; { po::options_description desc("Allowed options"); desc.add_options() ("help", "produce help message") ("vocab-size", po::value< int >()->default_value(10), "number of vocab words (default: 10)") ("words-per-doc", po::value<int>()->default_value(10), "number of words per document (default: 10)") ("bias", po::value<double>()->default_value(.8), "Bernoulli parameter p for how to partition the vocab words. (default: 0.8)") ("num-docs", po::value<int>()->default_value(10), "number of documents to generate (default: 10)") ////////////////////////// ("topics", po::value<int>(&num_topics)->default_value(10), "number of topics to use") ("train-epochs", po::value<int>(&num_epochs_)->default_value(5), "Number of epochs to run") ("em-iterations", po::value<int>(&(strategy.num_learn_iters))->default_value(100), "number of EM iterations to run") ("e-steps", po::value<int>(&(strategy.num_e_iters))->default_value(25), "number of iterations to perform, per E-step") ("m-steps", po::value<int>(&(strategy.num_m_iters))->default_value(1), "number of iterations to perform, per M-step") ("update-hypers", po::value<int>(&(strategy.hyper_update_iter))->default_value(-1), "how often to update the hyperparameters (default: -1 == never update)") ("update-model-interval", po::value<int>(&(strategy.update_model_every))->default_value(5), "update the model every [some] number of EM steps (default: 5)") ("print-topics-every", po::value<int>(&(strategy.print_topics_every))->default_value(5), "print topics every [some] number of EM steps (default: 5)") ("print-usage-every", po::value<int>(&(strategy.print_usage_every))->default_value(5), "print topic usage every [some] number of EM steps (default: 5)") ("top-k", po::value<int>(&(strategy.print_topics_k))->default_value(10), "number of words per topic to print (default: 10)") ("em-verbosity", po::value<int>(&(strategy.em_verbosity))->default_value(1), "how verbose should EM output be (default: 1; higher == more verbose)") ("eta-density-threshold", po::value<double>(&(strategy.eta_density_threshold))->default_value(1E-4), "the threshold t for counting the number of eta parameters are above t (default: 1E-4)") //////////////////////////////// ("topic-usage-file", po::value<std::string>(&output_usage_name)->default_value("-"), "filename to write topic usage to (default: - (to console)") ("heldout-topic-usage-file", po::value<std::string>(&heldout_output_usage_name)->default_value("-"), "filename to write heldout topic usage to (default: - (to console)") ("inferencer-serialization", po::value<std::string>(), "filename to serialize inference state to") ("serialized-inferencer", po::value<std::string>(), "filename to READ serialized inference state from") //////////////////////////////// ; po::store(po::parse_command_line(n_args, args, desc), vm); if (vm.count("help")) { ERROR << desc << "\n"; return 1; } po::notify(vm); } typedef std::string string; typedef string VocabType; typedef isage::wtm::Vocabulary< VocabType > SVocab; typedef double CountType; typedef isage::wtm::Document< VocabType, CountType > Doc; typedef isage::wtm::Corpus< Doc > Corpus; typedef std::vector<double> TopicType; typedef isage::wtm::DiscreteLDA< VocabType, std::vector<double> > Model; typedef isage::wtm::DiscreteVariational< Doc, VocabType, TopicType > Variational; isage::util::SmartWriter usage_outer(output_usage_name); isage::util::SmartWriter assign_outer("assignments"); Variational* var_inf = NULL; SVocab word_vocab("__OOV__"); for(int wi = 1; wi <= vm["vocab-size"].as<int>(); ++wi) { word_vocab.make_word("word_" + std::to_string(wi)); } Corpus corpus("train_corpus"); corpus.generate(vm["num-docs"].as<int>(), vm["words-per-doc"].as<int>(), vm["bias"].as<double>(), word_vocab ); int num_words_total = get_num_tokens(corpus); INFO << "Number of documents: " << corpus.num_docs(); INFO << "Number of word tokens total: " << num_words_total; INFO << "Number of vocab types: " << word_vocab.num_words(); isage::wtm::SymmetricHyperparams shp; shp.h_theta = 1.0/(double)num_topics; shp.h_word = 0.1; INFO << "Creating model with " << num_topics << " topics"; Model dm(num_topics, &shp, &word_vocab); INFO << "Done creating model."; var_inf = new Variational(&dm, &corpus, &word_vocab); isage::wtm::UniformHyperSeedWeightedInitializer initer(num_topics, corpus.num_docs(), (double)num_words_total/(double)corpus.num_docs()); var_inf->init(initer); for(int epoch = 0; epoch < num_epochs_; ++epoch) { INFO << "Starting learning epoch " << epoch; var_inf->learn(strategy, epoch, usage_outer, assign_outer); INFO << "Done with inference in epoch " << epoch; // // create and open a character archive for output // if(vm.count("inferencer-serialization")) { // std::string sfname = vm["inferencer-serialization"].as<std::string>() + // ".iteration" + std::to_string((1+epoch)); // std::ofstream ofs(sfname, std::ios::out|std::ios::binary); // boost::iostreams::filtering_streambuf<boost::iostreams::output> out; // out.push(boost::iostreams::gzip_compressor()); // out.push(ofs); // boost::archive::binary_oarchive oa(out); // oa << (*var_inf); // INFO << "see " << sfname << " for serialized inferencer"; // } dm.print_topics(strategy.print_topics_k, word_vocab); } if(var_inf != NULL) { delete var_inf; } return 0; }