bool initialize() { GPU::HiddenWindow::initialize(); Format testFmnt; testFmnt.versionMajor = 2; testFmnt.versionMinor = 1; testFmnt.versionProfile = Profile::COMPATIBILITY; Context* testCtxt = new Context(testFmnt); testCtxt->makeCurrent(); vendorString = std::string((char*)glGetString(GL_VENDOR)); rendererString = std::string((char*)glGetString(GL_RENDERER)); std::string versionString((char*)glGetString(GL_VERSION)); std::string glslVersionString((char*)glGetString(GL_SHADING_LANGUAGE_VERSION)); testCtxt->doneCurrent(); delete testCtxt; std::stringstream strStream(versionString); std::string token; std::getline(strStream, token, '.'); maxGLVersionMajor = atoi(token.c_str()); std::getline(strStream, token, '.'); maxGLVersionMinor = atoi(token.c_str()); return true; }
void FPGrowth::buildFPTree(string fileName) { ifstream ifile; ifile.open(fileName.c_str()); if (!ifile) { cout << "Can not open file :" << fileName << endl; return; } string tran; //get a transaction while (getline(ifile,tran)) { stringstream strStream(tran); string item; vector<string> items; //transaction items while (strStream >> item) { if (nameIndex.find(item) != nameIndex.end()) { items.push_back(item); //only add item that match minSupport } } //sort items gNameIndex = &nameIndex; sort(items.begin(), items.end(), CompareItem); gNameIndex = NULL; insertTran(items);//insert a modified transaction to FPTree } ifile.close(); }
bool ISceneBase::LoadCameraTrajFromFile(const std::string& name) { assert(m_pCamEyeTraj); assert(m_pCamLookAtTraj); m_pCamEyeTraj->Clear(); m_pCamLookAtTraj->Clear(); // Set up path for splines char szPath[MAX_PATH+1]; GetCurrentDirectory(MAX_PATH, szPath); // Get Our Working Directory strcat_s(szPath, "\\Data\\Splines\\"); // Append "\\Data\\Splines\\" After The Working Directory strcat_s(szPath, name.c_str()); // Append The PathName // convert back to string std::string filename; filename = szPath; std::ifstream fp(filename.c_str()); if(!fp) return false; float speed = 0.01f; std::string str; std::string strBuffer; std::getline(fp, strBuffer); if(strBuffer.size() > 3) { std::stringstream strStream(strBuffer); float f; strStream >> str >> f; if(str == "SPEED") speed = f; }
static int stringToInt(const STString &str) { std::stringstream strStream(str); int outInt = -1; strStream >> outInt; return outInt; }
Common::String AdlEngine_v2::loadMessage(uint idx) const { if (_messages[idx]) { StreamPtr strStream(_messages[idx]->createReadStream()); return readString(*strStream, 0xff); } return Common::String(); }
//입력받은 쿼리의 올바름을 검증하는 함수 bool Query::validate(string input) { int open = 0, close = 0, word = 0; bool oper = false, keyword = false; string str; stringstream strStream(input); //인풋이 아예 없는 경우 if (input[0] == 0) return false; while (strStream >> str) { if (str == "(") open++; else if (str == ")")close++; //연산자가 나왔는데 else if (str == "AND" || str == "OR") { //연산자가 연달아 나오거나 키워드가 나오지 않았는데 연산자가 나온경우 if (oper == true || keyword == false) { cout << "연산자를 확인해 주세요." << endl; return false; } else { oper = true; keyword = false; } } //단어가 나왔는데 else { word++; //단어가 연달아 나온경우 if (keyword == true) { cout << "연산자를 확인해 주세요." << endl; return false; } else { oper = false; keyword = true; } } } if (open != close) { cout << "괄호를 확인해 주세요." << endl; return false; } if (word == 0) { cout << "쿼리를 입력해 주세요" << endl; return false; } query = "( " + input + " )"; return true; }
static std::vector<std::string> split(const std::string& str, char delim) { std::vector<std::string> result; std::stringstream strStream(str); std::string item; while (std::getline(strStream, item, delim)) { result.push_back(item); } return result; }
void SplitString(string& str, char delimiter, vector<string>& cells) { stringstream strStream(str); while (strStream.good()) { string cell; getline(strStream, cell, delimiter); cells.push_back(cell); } }
void FPGrowth::buildHeaderTable(string fileName) { ifstream ifile; ifile.open(fileName.c_str()); if (!ifile) { cout << "Can not open file :" << fileName << endl; return; } string tran; //get a transaction int index = 0; //index in HeaderTable int transCount = 0; while (getline(ifile,tran)) { transCount ++; stringstream strStream(tran); string item; while (strStream >> item) { if (nameIndex.find(item) != nameIndex.end()) { FPHeaderTable[nameIndex[item]]->freq ++; } else{ //item doesn't exit in Header Table nameIndex[item] = index; FPHeaderTableNode *headerTableNode = new FPHeaderTableNode(item); FPHeaderTable.push_back(headerTableNode); index ++; } } } ifile.close(); //delete items that not match min Support vector<FPHeaderTableNode *>::iterator it; minCountSupport = (float)transCount * minSupport; for (it = FPHeaderTable.begin(); it != FPHeaderTable.end(); it++) { FPHeaderTableNode *node = *it; if ((float)(node->freq) < minCountSupport) { it = FPHeaderTable.erase(it); it --; } } sortHeaderTable(); }
void ReadingMatrix( TiXmlElement* node , le::Bone* bone ) { // Работаем с контейнером startMatrix TiXmlElement *startMatrix; startMatrix = node->FirstChildElement( "startMatrix" ); string sTmpMatrix; if ( startMatrix->GetText() != NULL ) { sTmpMatrix = startMatrix->GetText(); istringstream strStream( sTmpMatrix ); for ( int i = 0; i < 16 && !strStream.eof(); i++ ) { string sTmp; strStream >> sTmp; bone->StartMatrix[ i ] = atof( sTmp.c_str() ); } }
bool SceneBase::LoadCameraTrajFromFile(const std::string& name) { m_pCamEyeTraj->Clear(); m_pCamLookAtTraj->Clear(); std::string filename = "./data/splines/" + name; std::ifstream fp(filename.c_str()); if(!fp) return false; float speed = 0.01f; std::string str; std::string strBuffer; std::getline(fp, strBuffer); if(strBuffer.size() > 3) { std::stringstream strStream(strBuffer); float f; strStream >> str >> f; if(str == "SPEED") speed = f; }
int main(int argc, char *argv[]) { std::string rule; srand(get_rdtsc()); while (getline(std::cin, rule)) { unsigned int ruleNum; std::stringstream strStream(rule); strStream >> ruleNum; printf("%u ", ruleNum); while (!strStream.eof()) { char type; unsigned int num; strStream >> type; if (strStream.eof()) { break; } strStream >> num; if (type == 't') { if (okToMutateTerminal()) { printf("t %u ", rand() % 256); } else { printf("t %u ", num); } } else { assert(type == 'n'); printf("n %u ", num); } } if (okToMutateInsertTrailingTerminal()) { printf("t %u ", rand() % 256); } printf("\n"); } return 0; }
// Load in all available nodes from mib.nodes into comboBox_ID void Setter::initNodesList(){ std::string line; std::ifstream fin("src/dependencies/CFDP/CODE/mib.nodes"); // Ensure file opened properly if(!fin.is_open()){ ui->comboBox_ID->addItem(global_mib.ID); ui->comboBox_ID->addItem("Error"); return; } else{ while(std::getline(fin, line)){ std::istringstream strStream(line); float node; while(strStream >> node){ ui->comboBox_ID->addItem(QString::number(node)); } } // Close file fin.close(); } }
int Caller::loadEntries( const std::string path) { std::string nextLine; std::string key; std::string chr; std::string refBase; int readDepth; int pos; // Open the sample file std::ifstream inputFile( path.c_str()); if( !inputFile.is_open()) { perror( "Error opening input readcount file"); exit( 1); } // For each line in the sample file (which will correspond to a genomic location) while( std::getline( inputFile, nextLine)) { // Split the line into tokens separated by whitespace (for columns, since this is a tab delimited file) std::istringstream strStream( nextLine); std::istream_iterator<std::string> begin( strStream), end; std::vector<std::string> stringTokens( begin, end); // Get all main fields chr = stringTokens[0]; pos = atoi( stringTokens[1].c_str()); refBase = stringTokens[2]; refBase[0] = toupper( refBase[0]); readDepth = atoi( stringTokens[3].c_str()); // Generate the key, (chr:pos) key = stringTokens[0] + ":" + stringTokens[1]; if( key == "") { std::cout << "Empty key" << std::endl; } // Create the base ReadcountEntry object ReadcountEntry nextReadcountEntry( refBase, readDepth); // Get all subfields for each allele, the 5th column (stringTokens[4]) is garbage due to a bug with the bam-readcount program, ignore it for( int i = 5; i < stringTokens.size(); i++) { std::vector<std::string> nextSubTokens = Common::split( stringTokens[i], ":", true); // Create the Allele objects and add them to the current ReadcountEntry object std::string base = nextSubTokens[0]; int count = atoi( nextSubTokens[1].c_str()); double avgMappingQuality = atof( nextSubTokens[2].c_str()); double avgBaseQuality = atof( nextSubTokens[3].c_str()); double avgSEMappingQuality = atof( nextSubTokens[4].c_str()); int numPlusStrand = atoi( nextSubTokens[5].c_str()); int numMinusStrand = atoi( nextSubTokens[6].c_str()); double avgPosAsFraction = atof( nextSubTokens[7].c_str()); double avgNumMismatchesAsFraction = atof( nextSubTokens[8].c_str()); double avgSumMismatchQualities = atof( nextSubTokens[9].c_str()); int numQ2ContainingReads = atoi( nextSubTokens[10].c_str()); double avgDistanceToQ2StartInQ2Reads = atof( nextSubTokens[11].c_str()); double avgClippedLength = atof( nextSubTokens[12].c_str()); double avgDistanceToEffective3pEnd = atof( nextSubTokens[13].c_str()); bool variant = false; if( base != refBase) { variant = true; } double percentage; if( readDepth != 0) { percentage = ( double) count / ( double) readDepth * 100; } else { percentage = 0; } Allele nextAllele( base, count, avgMappingQuality, avgBaseQuality, avgSEMappingQuality, numPlusStrand, numMinusStrand, avgPosAsFraction, avgNumMismatchesAsFraction, avgSumMismatchQualities, numQ2ContainingReads, avgDistanceToQ2StartInQ2Reads, avgClippedLength, avgDistanceToEffective3pEnd, percentage, variant); nextReadcountEntry.addAllele( nextAllele); } // Now, the ReadcountEntry object is filled, so we can create the Sample object nextReadcountEntry.setMostFreqVariantAllele(); Sample nextSample( path, nextReadcountEntry); // Finally, add the Sample object to the Location object, // Check if the Location object with the current key exists in the hash table std::unordered_map<std::string, Location>::iterator iter = locationTable.find( key); if( iter == locationTable.end()) { // If it does not exist, create the Location object Location newLocation( chr, pos); // Add the new Sample to the Location object newLocation.addSample( nextSample); // Insert the new key-Location pair to the hash table std::pair<std::string, Location> newKeyPair( key, newLocation); locationTable.insert( newKeyPair); } else { bool sampleExists = false; std::vector<Sample> samples = ( iter->second).getSamples(); for( int j = 0; j < samples.size(); j++) { if( samples[j].getSampleName() == nextSample.getSampleName()) { sampleExists = true; } } if( !sampleExists) { ( iter->second).addSample( nextSample); } } } // Check if the file was read correctly if( inputFile.bad()) { perror( "Error reading input readcount file"); } // Close the input sample file inputFile.close(); }
int wmain(int argc, wchar_t* argv[]) { if(argc != 4) { const auto NamePtr = wcsrchr(argv[0], L'\\'); std::wcout << L"Usage:\n" << (NamePtr? NamePtr+1 : argv[0]) << L" input_template_file output_template_file new_lng_file" << std::endl; return -1; } const std::wstring InFeedName = argv[1], OutFeedName = argv[2], LngName = argv[3]; std::wifstream Feed(InFeedName), Lng(LngName); std::wcout << L"Reading " << LngName << std::endl; std::wstring LngHeader; std::getline(Lng, LngHeader); std::list<std::wstring> LngLines; std::wstring Buffer; while(!Lng.eof()) { std::getline(Lng, Buffer); if(!Buffer.compare(0, 1, L"\"")) { LngLines.push_back(Buffer); } } std::wcout << L"Reading " << InFeedName << std::endl; std::list<std::wstring> FeedLines; while(!Feed.eof()) { getline(Feed, Buffer); FeedLines.push_back(Buffer); } size_t ConstsCount = 0; for(auto i = FeedLines.begin(); i != FeedLines.end(); ++i) { // assume that all constants starts with 'M'. if(!i->compare(0, 1, L"M")) { ++ConstsCount; } } if(ConstsCount != LngLines.size()) { std::wcerr << L"Error: lines count mismatch: " << InFeedName << " - " << ConstsCount << L", " << LngName << L" - " << LngLines.size() << std::endl; return -1; } if(FeedLines.back().empty()) { FeedLines.pop_back(); } auto Ptr = FeedLines.begin(); if(!Ptr->compare(0, 14, L"\xef\xbb\xbfm4_include(")) { ++Ptr; } #define SKIP_EMPTY_LINES_AND_COMMENTS while(Ptr->empty() || !Ptr->compare(0, 1, L"#")) {++Ptr;} SKIP_EMPTY_LINES_AND_COMMENTS // skip header name ++Ptr; SKIP_EMPTY_LINES_AND_COMMENTS std::wstringstream strStream(*Ptr); size_t Num; strStream >> Num; std::wcout << Num << L" languages found." << std::endl; auto NumPtr = Ptr; ++Ptr; bool Update = false; size_t UpdateIndex = Num; for(size_t i = 0; i < Num; ++i, ++Ptr) { SKIP_EMPTY_LINES_AND_COMMENTS if(!Ptr->compare(0, LngName.length(), LngName)) { Update = true; UpdateIndex = i; break; } } if(Update) { std::wcout << LngName << " already exist (id == " << UpdateIndex << L"). Updating." << std::endl; } else { std::wcout << L"Inserting new language (id == " << UpdateIndex << L") from " << LngName << std::endl; strStream.clear(); strStream << Num+1; *NumPtr = strStream.str(); std::wstring ShortLngName = LngHeader.substr(LngHeader.find(L'=', 0)+1); ShortLngName.resize(ShortLngName.find(L','), 0); std::wstring FullLngName = LngHeader.substr(LngHeader.find(L',', 0)+1); FeedLines.insert(Ptr, LngName+L" " + ShortLngName + L" \"" + FullLngName + L"\""); } for(auto i = LngLines.begin(); i != LngLines.end(); ++i) { // assume that all constants start with 'M'. while(Ptr->compare(0, 1, L"M")) { ++Ptr; } ++Ptr; for(size_t j = 0; j < UpdateIndex || !UpdateIndex; ++j) { while(Ptr != FeedLines.end() && Ptr->compare(0, 1, L"\"") && Ptr->compare(0, 5, L"upd:\"")) { ++Ptr; } if(!UpdateIndex) { break; } ++Ptr; } if(Update) { const wchar_t* Str = Ptr->c_str(); size_t l = Ptr->length(); if(!Ptr->compare(0, 4, L"upd:")) { Str += 4; l -= 4; } if(i->compare(0, l, Str)) { *Ptr = *i; } } else { FeedLines.insert(Ptr, *i); } } std::wcout << L"Writing to " << OutFeedName << std::endl; std::wofstream oFeed(OutFeedName); for(auto i = FeedLines.begin(); i != FeedLines.end(); ++i) { oFeed << *i << L'\n'; } std::wcout << L"Done." << std::endl; return 0; }
Atlas::Message::MapType EntityRecipe::createEntity(Eris::TypeService& typeService) { S_LOG_VERBOSE("Creating entity."); ScriptingService& scriptingService = EmberServices::getSingleton().getScriptingService(); // Loading script code scriptingService.executeCode(mScript, "LuaScriptingProvider"); // Walking through adapter bindings for (BindingsStore::iterator I = mBindings.begin(); I != mBindings.end(); ++I) { const std::string& func = I->second->getFunc(); S_LOG_VERBOSE(" binding: " << I->first << " to func " << func); if (func.empty()) { std::vector<std::string>& adapters = I->second->getAdapters(); if (adapters.size() == 1) { std::string adapterName = adapters[0]; Atlas::Message::Element val = mGUIAdapters[adapterName]->getValue(); I->second->setValue(val); } else { S_LOG_WARNING("Should be only one adapter without calling function."); } } else { Lua::LuaScriptingCallContext callContext; lua_State* L = static_cast<Lua::LuaScriptingProvider*> (scriptingService.getProviderFor("LuaScriptingProvider"))->getLuaState(); // Pushing function params std::vector<std::string>& adapters = I->second->getAdapters(); for (std::vector<std::string>::iterator J = adapters.begin(); J != adapters.end(); J++) { std::string adapterName = *J; Atlas::Message::Element* val = new Atlas::Message::Element(mGUIAdapters[adapterName]->getValue()); tolua_pushusertype_and_takeownership(L, val, "Atlas::Message::Element"); } // Calling test function scriptingService.callFunction(func, adapters.size(), "LuaScriptingProvider", &callContext); LuaRef returnValue(callContext.getReturnValue()); Atlas::Message::Element returnObj; returnObj = returnValue.asObject<Atlas::Message::Element> ("Atlas::Message::Element"); I->second->setValue(returnObj); } } //Inject all default attributes that aren't yet added. // TiXmlElement *elem = mEntitySpec->FirstChildElement("atlas"); // if (elem) // { // Eris::TypeInfo* erisType = mConn->getTypeService()->getTypeByName(getEntityType()); // if (erisType) { // const Atlas::Message::MapType& defaultAttributes = erisType->getAttributes(); // for (Atlas::Message::MapType::const_iterator I = defaultAttributes.begin(); I != defaultAttributes.end(); ++I) { // bool hasAttribute = false; // TiXmlNode* child(0); // while(child = elem->IterateChildren(child)) { // if (child->ToElement()) { // if (std::string(child->ToElement()->Attribute("name")) == I->first) { // hasAttribute = true; // break; // } // } // } // // if (!hasAttribute) { // //The attribute isn't present, we'll inject it // //This a bit contrived, since we'll now first convert the atlas into xml and inject it into the TiXmlElement (which will convert the xml strings into TiXml structures). And then later on we'll parse the xml again and create the final atlas data from it. However, the main reason for doing it this way is that in the future we would want to have nested child elements, which could be repeated. And in those cases we'll want to work directly with xml. // } // } // } // } /* std::stringstream str; Atlas::Message::Element element(message); Atlas::Message::QueuedDecoder decoder; Atlas::Codecs::XML codec(str, decoder); Atlas::Formatter formatter(str, codec); Atlas::Message::Encoder encoder(formatter); formatter.streamBegin(); encoder.streamMessageElement(message); formatter.streamEnd(); */ if (mEntitySpec) { // Print entity into string TiXmlPrinter printer; printer.SetStreamPrinting(); mEntitySpec->Accept(&printer); S_LOG_VERBOSE("Composed entity: " << printer.Str()); std::stringstream strStream(printer.CStr(), std::ios::in); // Create objects Atlas::Message::QueuedDecoder decoder; Atlas::Codecs::XML codec(strStream, decoder); // Read whole stream into decoder queue while (!strStream.eof()) { codec.poll(); } // Read decoder queue while (decoder.queueSize() > 0) { Atlas::Message::MapType m = decoder.popMessage(); Eris::TypeInfo* erisType = typeService.getTypeByName(getEntityType()); if (erisType) { const Atlas::Message::MapType& defaultAttributes = erisType->getAttributes(); for (Atlas::Message::MapType::const_iterator I = defaultAttributes.begin(); I != defaultAttributes.end(); ++I) { if (m.find(I->first) == m.end()) { m.insert(Atlas::Message::MapType::value_type(I->first, I->second)); } } } return m; } } else { Atlas::Message::MapType msg; msg["parents"] = Atlas::Message::ListType(1, mEntityType); msg["name"] = getName(); return msg; } S_LOG_WARNING("No entity composed"); return Atlas::Message::MapType(); }
// In this method, we will execute the nslookup command and return back // four pieces of information in a map ash shown below. // "Name Server Name" => "Name of the name server" // "Name Server Address" => "IP address of the name server" // "Client Machine Name" => "Fully qualified name of the client machine" // "Client Machine Address" => "IP address of the client machine" unordered_map<string, string> NameServerLookup::getNSLookupResults(string const & nodeName) { FILE *fpipe; string nsLookupCommand = (string)"nslookup " + nodeName; char line[256]; unordered_map <string, string> result; string replyLineKey1 = "Name Server Name"; string replyLineKey2 = "Name Server Address"; string replyLineKey3 = "Client Machine Name"; string replyLineKey4 = "Client Machine Address"; if (!(fpipe = (FILE*)popen(nsLookupCommand.c_str(),"r"))) { // If fpipe is NULL // Mark the errors in the map. result[replyLineKey1] = "Pipe failure"; result[replyLineKey2] = "Pipe failure"; result[replyLineKey3] = "Pipe failure"; result[replyLineKey4] = "Pipe failure"; return (result); } // End of if (!(fpipe = (FILE*)popen(command,"r"))) int32_t cnt = 1; while (fgets( line, sizeof line, fpipe)) { // nslookup result looks like this. // Server: 10.4.24.230 // Address: 10.4.24.230#53 // // Name: a0217b10e1.hny.distillery.ibm.com // Address: 10.4.40.210 // // Let us parse only the last token in every line as that is what we want. // std::string resultStr = string(line); // construct a stream from the string std::stringstream strStream(resultStr); // use stream iterators to copy the stream to the vector as whitespace separated strings std::istream_iterator<std::string> it(strStream); std::istream_iterator<std::string> end; std::vector<std::string> results(it, end); // cout << "Vector size = " << results.size() << endl; // Let us now get the last token in the vector. std::string lastToken = ""; if (results.size() > 0) { lastToken = results.at(results.size() - 1); } // End of if (results.size() > 0) switch (cnt++) { case 1: result[replyLineKey1] = lastToken; break; case 2: result[replyLineKey2] = lastToken; break; case 3: // This must be an empty line in the nslookup result. break; case 4: result[replyLineKey3] = lastToken; break; case 5: result[replyLineKey4] = lastToken; break; } // End of switch (cnt++) // If we already processed 5 lines, we have // collected everything we need from the result. if (cnt > 5) { // Break from the while loop. break; } // End of if (cnt > 5) // printf("%s", line); } // End of while (fgets( line, sizeof line, fpipe)) pclose (fpipe); return (result); } // End of method getNSLookupResults(string & nodeName).