// ----------------------------------------------------------------------------- // CMceSrvStream::UseDefaultStartupSequence // ----------------------------------------------------------------------------- // TBool CMceSrvStream::UseDefaultStartupSequence() { TBool defaultSequence = ETrue; //special case: local stream, which has camera source and // diaplay sink if ( StreamType() == CMceComMediaStream::ELocalStream && iSource->Data().iType == KMceCameraSource && iSink->Data().iType == KMceDisplaySink ) { //try to find stream, which is send stream and it has the //same camera source TMceSrvStreamIterator streams( Data().Session()->MccStreams(), iSource->Data() ); CMceSrvStream* pairedStream = NULL; CMceSrvStream* stream = NULL; while( !pairedStream && streams.Next( stream ) ) { pairedStream = ( stream->StreamType() == CMceComMediaStream::ESendStream || stream->StreamType() == CMceComMediaStream::ESendOnlyStream ) ? stream : NULL; } defaultSequence = MCE_IS_NULL_PTR( pairedStream ); if ( !defaultSequence ) { static_cast<CMceComDisplaySink&>( iSink->Data() ).SetViewFinder( ETrue ); } } return defaultSequence; }
gr::gs::Implementations::Autocovariance_impl<T>::Autocovariance_impl( unsigned length, T mean, const unsigned decimation, const unsigned offset): gr::sync_decimator("Autocovariance", io_signature::make(1,1,sizeof(T)), io_signature::make(streams(),streams(),sizeof(float)*length), decimation), m_mean(mean), m_length(length), m_offset(offset % decimation) { this->enable_update_rate(false); this->set_history(length); }
PluginStream::PluginStream(PluginStreamClient* client, Frame* frame, const ResourceRequest& resourceRequest, bool sendNotification, void* notifyData, const NPPluginFuncs* pluginFuncs, NPP instance, const PluginQuirkSet& quirks) : m_resourceRequest(resourceRequest) , m_client(client) , m_frame(frame) , m_notifyData(notifyData) , m_sendNotification(sendNotification) , m_streamState(StreamBeforeStarted) , m_loadManually(false) , m_delayDeliveryTimer(this, &PluginStream::delayDeliveryTimerFired) , m_deliveryData(0) , m_tempFileHandle(invalidPlatformFileHandle) , m_pluginFuncs(pluginFuncs) , m_instance(instance) , m_quirks(quirks) { ASSERT(m_instance); m_stream.url = 0; m_stream.ndata = 0; m_stream.pdata = 0; m_stream.end = 0; m_stream.notifyData = 0; m_stream.lastmodified = 0; streams().add(&m_stream, m_instance); }
PluginStream::~PluginStream() { ASSERT(m_streamState != StreamStarted); ASSERT(!m_loader); fastFree((char*)m_stream.url); streams().remove(&m_stream); }
ValuedAction ScenarioLowerBound::Search() { RandomStreams streams(Globals::config.num_scenarios, Globals::config.search_depth); vector<State*> particles = belief_->Sample(Globals::config.num_scenarios); ValuedAction va = Value(particles, streams, history_); for (int i = 0; i < particles.size(); i++) model_->Free(particles[i]); return va; }
int rapMapSAIndex(int argc, char* argv[]) { std::cerr << "RapMap Indexer\n"; TCLAP::CmdLine cmd("RapMap Indexer"); TCLAP::ValueArg<std::string> transcripts("t", "transcripts", "The transcript file to be indexed", true, "", "path"); TCLAP::ValueArg<std::string> index("i", "index", "The location where the index should be written", true, "", "path"); TCLAP::ValueArg<uint32_t> kval("k", "klen", "The length of k-mer to index", false, 31, "positive integer less than 32"); cmd.add(transcripts); cmd.add(index); cmd.add(kval); cmd.parse(argc, argv); // stupid parsing for now std::string transcriptFile(transcripts.getValue()); std::vector<std::string> transcriptFiles({ transcriptFile }); uint32_t k = kval.getValue(); if (k % 2 == 0) { std::cerr << "Error: k must be an odd value, you chose " << k << '\n'; std::exit(1); } else if (k > 31) { std::cerr << "Error: k must not be larger than 31, you chose " << k << '\n'; std::exit(1); } rapmap::utils::my_mer::k(k); std::string indexDir = index.getValue(); if (indexDir.back() != '/') { indexDir += '/'; } bool dirExists = rapmap::fs::DirExists(indexDir.c_str()); bool dirIsFile = rapmap::fs::FileExists(indexDir.c_str()); if (dirIsFile) { std::cerr << "The requested index directory already exists as a file."; std::exit(1); } if (!dirExists) { rapmap::fs::MakeDir(indexDir.c_str()); } size_t maxReadGroup{1000}; // Number of reads in each "job" size_t concurrentFile{2}; // Number of files to read simultaneously size_t numThreads{2}; stream_manager streams(transcriptFiles.begin(), transcriptFiles.end(), concurrentFile); std::unique_ptr<single_parser> transcriptParserPtr{nullptr}; transcriptParserPtr.reset(new single_parser(4 * numThreads, maxReadGroup, concurrentFile, streams)); std::mutex iomutex; indexTranscriptsSA(transcriptParserPtr.get(), indexDir, iomutex); return 0; }
TokenStreamPtr ChineseAnalyzer::reusableTokenStream(const String& fieldName, ReaderPtr reader) { ChineseAnalyzerSavedStreamsPtr streams(boost::dynamic_pointer_cast<ChineseAnalyzerSavedStreams>(getPreviousTokenStream())); if (!streams) { streams = newLucene<ChineseAnalyzerSavedStreams>(); streams->source = newLucene<ChineseTokenizer>(reader); streams->result = newLucene<ChineseFilter>(streams->source); setPreviousTokenStream(streams); } else streams->source->reset(reader); return streams->result; }
TokenStreamPtr RussianAnalyzer::reusableTokenStream(const String& fieldName, const ReaderPtr& reader) { RussianAnalyzerSavedStreamsPtr streams(boost::dynamic_pointer_cast<RussianAnalyzerSavedStreams>(getPreviousTokenStream())); if (!streams) { streams = newLucene<RussianAnalyzerSavedStreams>(); streams->source = newLucene<RussianLetterTokenizer>(reader); streams->result = newLucene<LowerCaseFilter>(streams->source); streams->result = newLucene<StopFilter>(StopFilter::getEnablePositionIncrementsVersionDefault(matchVersion), streams->result, stopSet); streams->result = newLucene<RussianStemFilter>(streams->result); setPreviousTokenStream(streams); } else { streams->source->reset(reader); } return streams->result; }
TokenStreamPtr DutchAnalyzer::reusableTokenStream(const String& fieldName, ReaderPtr reader) { DutchAnalyzerSavedStreamsPtr streams(boost::dynamic_pointer_cast<DutchAnalyzerSavedStreams>(getPreviousTokenStream())); if (!streams) { streams = newLucene<DutchAnalyzerSavedStreams>(); streams->source = newLucene<StandardTokenizer>(matchVersion, reader); streams->result = newLucene<StandardFilter>(streams->source); streams->result = newLucene<StopFilter>(StopFilter::getEnablePositionIncrementsVersionDefault(matchVersion), streams->result, stoptable); streams->result = newLucene<DutchStemFilter>(streams->result, excltable); setPreviousTokenStream(streams); } else streams->source->reset(reader); return streams->result; }
bool KeywordGenerator::updateSource(const KeywordLoader& loader , const std::string& sourceFile, int blocks ) const { std::stringstream newSource; const int keywords = loader.size(); const int blocksize = (keywords / blocks) + 1; std::vector< std::stringstream > streams( blocks ); for( unsigned int i = 0; i < streams.size(); ++i ) streams[ i ] << sourceHeader << std::endl << "void addDefaultKeywords" << i << "(Parser& p);" << std::endl << "void addDefaultKeywords" << i << "(Parser& p) {" << std::endl; int bi = 0; for( auto iter = loader.keyword_begin(); iter != loader.keyword_end(); ++iter ) { auto block = bi++ / blocksize; streams[ block ] << "p.addKeyword< ParserKeywords::" << iter->second->className() << " >();" << std::endl; } for( auto& stream : streams ) stream << "}}}" << std::endl; for( unsigned int i = 0; i < streams.size(); ++i ) { auto srcfile = sourceFile; updateFile( streams[i], srcfile.insert( srcfile.size() - 4, std::to_string( i ) ) ); } newSource << sourceHeader; for (auto iter = loader.keyword_begin(); iter != loader.keyword_end(); ++iter) { std::shared_ptr<ParserKeyword> keyword = (*iter).second; newSource << keyword->createCode() << std::endl; } for( auto i = 0; i < blocks; ++i ) newSource << "void addDefaultKeywords" << i << "(Parser& p);" << std::endl; newSource << "}" << std::endl; newSource << "void Parser::addDefaultKeywords() {" << std::endl; for( auto i = 0; i < blocks; ++i ) newSource << "Opm::ParserKeywords::addDefaultKeywords" << i << "(*this);" << std::endl; newSource << "}}" << std::endl; return write_file( newSource, sourceFile, m_verbose, "source" ); }
OniStatus XnOniDevice::EnableFrameSync(XnOniStream** pStreams, int streamCount) { // Translate the XnOniStream to XnDeviceStream. xnl::Array<XnDeviceStream*> streams(streamCount); streams.SetSize(streamCount); for (int i = 0; i < streamCount; ++i) { streams[i] = pStreams[i]->GetDeviceStream(); } // Set the frame sync group. XnStatus rc = m_sensor.SetFrameSyncStreamGroup(streams.GetData(), streamCount); if (rc != XN_STATUS_OK) { m_driverServices.errorLoggerAppend("Error setting frame-sync group (rc=%d)\n", rc); return ONI_STATUS_ERROR; } return ONI_STATUS_OK; }
NPP PluginStream::ownerForStream(NPStream* stream) { return streams().get(stream); }
int rapMapMap(int argc, char* argv[]) { std::cerr << "RapMap Mapper\n"; std::string versionString = rapmap::version; TCLAP::CmdLine cmd( "RapMap Mapper", ' ', versionString); cmd.getProgramName() = "rapmap"; TCLAP::ValueArg<std::string> index("i", "index", "The location of the pseudoindex", true, "", "path"); TCLAP::ValueArg<std::string> read1("1", "leftMates", "The location of the left paired-end reads", false, "", "path"); TCLAP::ValueArg<std::string> read2("2", "rightMates", "The location of the right paired-end reads", false, "", "path"); TCLAP::ValueArg<std::string> unmatedReads("r", "unmatedReads", "The location of single-end reads", false, "", "path"); TCLAP::ValueArg<uint32_t> numThreads("t", "numThreads", "Number of threads to use", false, 1, "positive integer"); TCLAP::ValueArg<uint32_t> maxNumHits("m", "maxNumHits", "Reads mapping to more than this many loci are discarded", false, 200, "positive integer"); TCLAP::ValueArg<std::string> outname("o", "output", "The output file (default: stdout)", false, "", "path"); TCLAP::SwitchArg endCollectorSwitch("e", "endCollector", "Use the simpler (and faster) \"end\" collector as opposed to the more sophisticated \"skipping\" collector", false); TCLAP::SwitchArg noout("n", "noOutput", "Don't write out any alignments (for speed testing purposes)", false); cmd.add(index); cmd.add(noout); cmd.add(read1); cmd.add(read2); cmd.add(unmatedReads); cmd.add(outname); cmd.add(numThreads); cmd.add(maxNumHits); cmd.add(endCollectorSwitch); auto consoleSink = std::make_shared<spdlog::sinks::stderr_sink_mt>(); auto consoleLog = spdlog::create("stderrLog", {consoleSink}); try { cmd.parse(argc, argv); bool pairedEnd = (read1.isSet() or read2.isSet()); if (pairedEnd and (read1.isSet() != read2.isSet())) { consoleLog->error("You must set both the -1 and -2 arguments to align " "paired end reads!"); std::exit(1); } if (pairedEnd and unmatedReads.isSet()) { consoleLog->error("You cannot specify both paired-end and unmated " "reads in the input!"); std::exit(1); } if (!pairedEnd and !unmatedReads.isSet()) { consoleLog->error("You must specify input; either both paired-end " "or unmated reads!"); std::exit(1); } std::string indexPrefix(index.getValue()); if (indexPrefix.back() != '/') { indexPrefix += "/"; } if (!rapmap::fs::DirExists(indexPrefix.c_str())) { consoleLog->error("It looks like the index you provided [{}] " "doesn't exist", indexPrefix); std::exit(1); } IndexHeader h; std::ifstream indexStream(indexPrefix + "header.json"); { cereal::JSONInputArchive ar(indexStream); ar(h); } indexStream.close(); if (h.indexType() != IndexType::PSEUDO) { consoleLog->error("The index {} does not appear to be of the " "appropriate type (pseudo)", indexPrefix); std::exit(1); } RapMapIndex rmi; rmi.load(indexPrefix); std::cerr << "\n\n\n\n"; // from: http://stackoverflow.com/questions/366955/obtain-a-stdostream-either-from-stdcout-or-stdofstreamfile // set either a file or cout as the output stream std::streambuf* outBuf; std::ofstream outFile; bool haveOutputFile{false}; if (outname.getValue() == "") { outBuf = std::cout.rdbuf(); } else { outFile.open(outname.getValue()); outBuf = outFile.rdbuf(); haveOutputFile = true; } // Now set the output stream to the buffer, which is // either std::cout, or a file. std::ostream outStream(outBuf); // Must be a power of 2 size_t queueSize{268435456}; spdlog::set_async_mode(queueSize); auto outputSink = std::make_shared<spdlog::sinks::ostream_sink_mt>(outStream); auto outLog = std::make_shared<spdlog::logger>("outLog", outputSink); outLog->set_pattern("%v"); uint32_t nthread = numThreads.getValue(); std::unique_ptr<paired_parser> pairParserPtr{nullptr}; std::unique_ptr<single_parser> singleParserPtr{nullptr}; if (!noout.getValue()) { rapmap::utils::writeSAMHeader(rmi, outLog); } SpinLockT iomutex; { ScopedTimer timer; HitCounters hctrs; consoleLog->info("mapping reads . . . \n\n\n"); if (pairedEnd) { std::vector<std::thread> threads; std::vector<std::string> read1Vec = rapmap::utils::tokenize(read1.getValue(), ','); std::vector<std::string> read2Vec = rapmap::utils::tokenize(read2.getValue(), ','); if (read1Vec.size() != read2Vec.size()) { consoleLog->error("The number of provided files for " "-1 and -2 must be the same!"); std::exit(1); } size_t numFiles = read1Vec.size() + read2Vec.size(); char** pairFileList = new char*[numFiles]; for (size_t i = 0; i < read1Vec.size(); ++i) { pairFileList[2*i] = const_cast<char*>(read1Vec[i].c_str()); pairFileList[2*i+1] = const_cast<char*>(read2Vec[i].c_str()); } size_t maxReadGroup{1000}; // Number of reads in each "job" size_t concurrentFile{2}; // Number of files to read simultaneously pairParserPtr.reset(new paired_parser(4 * nthread, maxReadGroup, concurrentFile, pairFileList, pairFileList+numFiles)); /** Create the threads depending on the collector type **/ if (endCollectorSwitch.getValue()) { EndCollector endCollector(&rmi); for (size_t i = 0; i < nthread; ++i) { threads.emplace_back(processReadsPair<EndCollector, SpinLockT>, pairParserPtr.get(), std::ref(rmi), std::ref(endCollector), &iomutex, outLog, std::ref(hctrs), maxNumHits.getValue(), noout.getValue()); } } else { SkippingCollector skippingCollector(&rmi); for (size_t i = 0; i < nthread; ++i) { threads.emplace_back(processReadsPair<SkippingCollector, SpinLockT>, pairParserPtr.get(), std::ref(rmi), std::ref(skippingCollector), &iomutex, outLog, std::ref(hctrs), maxNumHits.getValue(), noout.getValue()); } } for (auto& t : threads) { t.join(); } delete [] pairFileList; } else { std::vector<std::thread> threads; std::vector<std::string> unmatedReadVec = rapmap::utils::tokenize(unmatedReads.getValue(), ','); size_t maxReadGroup{1000}; // Number of reads in each "job" size_t concurrentFile{1}; stream_manager streams( unmatedReadVec.begin(), unmatedReadVec.end(), concurrentFile); singleParserPtr.reset(new single_parser(4 * nthread, maxReadGroup, concurrentFile, streams)); /** Create the threads depending on the collector type **/ if (endCollectorSwitch.getValue()) { EndCollector endCollector(&rmi); for (size_t i = 0; i < nthread; ++i) { threads.emplace_back(processReadsSingle<EndCollector, SpinLockT>, singleParserPtr.get(), std::ref(rmi), std::ref(endCollector), &iomutex, outLog, std::ref(hctrs), maxNumHits.getValue(), noout.getValue()); } } else { SkippingCollector skippingCollector(&rmi); for (size_t i = 0; i < nthread; ++i) { threads.emplace_back(processReadsSingle<SkippingCollector, SpinLockT>, singleParserPtr.get(), std::ref(rmi), std::ref(skippingCollector), &iomutex, outLog, std::ref(hctrs), maxNumHits.getValue(), noout.getValue()); } } for (auto& t : threads) { t.join(); } } consoleLog->info("Done mapping reads."); consoleLog->info("In total saw {} reads.", hctrs.numReads); consoleLog->info("Final # hits per read = {}", hctrs.totHits / static_cast<float>(hctrs.numReads)); consoleLog->info("Discarded {} reads because they had > {} alignments", hctrs.tooManyHits, maxNumHits.getValue()); consoleLog->info("flushing output"); outLog->flush(); } if (haveOutputFile) { outFile.close(); } return 0; } catch (TCLAP::ArgException& e) { consoleLog->error("Exception [{}] when parsing argument {}", e.error(), e.argId()); return 1; } }