bool NEWSArticle::StoreHeader(const string &line) { // First we simply store the raw header string fHeader = line; // Now we try to disassemble the header and store it as such // Separate the lines vector<string> headerLines = GetTokens(fHeader,'\n'); string fieldName = ""; string fieldValue = ""; for(vector<string>::const_iterator itemIter = headerLines.begin(); itemIter != headerLines.end(); itemIter ++) { string thisLine = (*itemIter); RemoveTrailingSpaces(thisLine); if (thisLine[0] != ' ' && thisLine[0] != '\t') { // This means that this line is a new field int position = thisLine.find(":"); fieldName.assign(thisLine,0,position); fieldValue.assign(thisLine.begin()+position+1,thisLine.end()); RemoveLeadingSpaces(fieldName); RemoveTrailingSpaces(fieldName); RemoveLeadingSpaces(fieldValue); RemoveTrailingSpaces(fieldValue); // Store the field fParsedHeaderFields.push_back(fieldName); fParsedHeaders[fieldName] = fieldValue; } else { // This means that this line is a continuation of the previous line // Just append the extra data with a newline fParsedHeaders[fieldName] += "\n" + thisLine; } } string groupsField; if (GetHeaderField("Newsgroups",groupsField)) { fNewsGroups = GetTokens(groupsField,','); fNrOfNewsGroups = fNewsGroups.size(); } fState = NA_ONLY_HEADER; return true; }
bool CParser:: EatWhiteSpace (std::string& input) // ---------------------------------------------------------------------------- // Function: Parses the input line and gets the tokens // Input: string, delimiters // Output: vector containing the tokens // ---------------------------------------------------------------------------- { // remove comment from the line obtained int i; std::vector<std::string> tokens; std::string tempInput; GetTokens(input, " ", tokens); for(i=0; i<abs(tokens.size()); i++){ std::string temptoken = tokens[i]; if(temptoken != " " && temptoken !="!"){ return false; break; } else if(temptoken =="!"){ return true; break; } } return false; }
void CParser:: RemoveToken (std::string& input) // ---------------------------------------------------------------------------- // Function: Parses the input line and gets the tokens // Input: string, delimiters // Output: vector containing the tokens // ---------------------------------------------------------------------------- { // remove comment from the line obtained int i; std::vector<std::string> tokens; std::string tempInput; GetTokens(input, " ", tokens); for(i=0; i<abs(tokens.size()); i++){ std::string temptoken = tokens[i]; if(temptoken == "&"){ break; } else{ tempInput+=temptoken; if(i!=(abs(tokens.size())-1)){ //indent{ if (tokens[i+1]!="&") tempInput+=" "; } else{ tempInput+=""; // no indent } } } input = tempInput.c_str(); }
void NewsKiller::ReadGlobalKillRule(string valueName,long &value, long &valueImpact) { string iniLine; vector<string> iniLineList; // ========== value = -1; valueImpact = 100; fSettings->GetValue(SUCK_GLOBAL_KILL_RULES, valueName, iniLine); // Split the value into the required parts iniLineList = GetTokens(iniLine,';'); switch (iniLineList.size()) { case 2: StringToLong(iniLineList[1],valueImpact); case 1: StringToLong(iniLineList[0],value); } char valuestr[50]; std::sprintf(valuestr,"%5ld ; %5ld",value,valueImpact); fSettings->SetValue(SUCK_GLOBAL_KILL_RULES, valueName, valuestr); }
void TTokenizer::GetTokens(const TStrV& TextV, TVec<TStrV>& TokenVV) const { IAssert(TextV.Len() == TokenVV.Len()); // shall we rather say Tokens.Gen(Texts.Len(), 0); ? for (int TextN = 0; TextN < TextV.Len(); TextN++) { TStrV& TokenV = TokenVV[TextN]; TokenVV.Gen(32,0); // assume there will be at least 32 tokens, to avoid small resizes GetTokens(TextV[TextN], TokenV); } }
void NEWSArticle::SetXOVERLine(const char * line) { vector<string> tokens = GetTokens(line,'\t'); sscanf(tokens[0].c_str(),"%ld",&fArticleNr); fSubject = tokens[1].c_str(); fSender = tokens[2].c_str(); fDate = tokens[3].c_str(); fMessageID = tokens[4].c_str(); fReferences = tokens[5].c_str(); sscanf(tokens[6].c_str(),"%ld",&fBytes); sscanf(tokens[7].c_str(),"%ld",&fLines); fXrefHeader = tokens[8].c_str(); fParsedReferences = GetTokens(fReferences,' '); fState = NA_ONLY_XOVER; }
bool TabularFile::CheckFile(const std::string & filename, size_t & first_data_line, size_t & n_columns, bool & read_last_line, std::string & last_line) { // Check if the last line of the file consists of data // and make initial guess if the last line shoud be read (not reading when equal to 0 or -999) std::ifstream in_file0; OpenRead(in_file0, filename); last_line = FindLastNonEmptyLine(in_file0); std::vector<std::string> tokens = GetTokens(last_line); read_last_line = true; if (!IsType<double>(tokens[0])) { read_last_line = false; } else { for (size_t i = 0; i < tokens.size(); ++i) { if (!IsType<double>(tokens[i])) read_last_line = false; else { if(atof(tokens[i].c_str()) == 0.0 || atof(tokens[i].c_str()) == -999.0) read_last_line = false; } } } std::ifstream in_file; OpenRead(in_file, filename); int line_number = 0; std::string line; while (GetNextNonEmptyLine(in_file, line_number, line)) { std::vector<std::string> tokens = GetTokens(line); if (IsType<double>(tokens[0])) { first_data_line = line_number; n_columns = tokens.size(); for (size_t i = 0; i < n_columns; ++i) { if (!IsType<double>(tokens[i])) return false; } return true; } } return false; }
void FParser::Run(char *rover, char *data, char *end) { Rover = rover; try { PrevSection = NULL; // clear it while(*Rover) // go through the script executing each statement { // past end of script? if(Rover > end) break; PrevSection = Section; // store from prev. statement // get the line and tokens GetTokens(Rover); if(!NumTokens) { if(Section) // no tokens but a brace { // possible } at end of loop: // refer to spec.c spec_brace(); } continue; // continue to next statement } if(script_debug) PrintTokens(); // debug RunStatement(); // run the statement } } catch (const CFsError &err) { ErrorMessage(err.msg); } catch (const CFsTerminator &) { // The script has signalled that it wants to be terminated in an orderly fashion. } }
void CGrammarManagerFromProgram::EditText(const QString& text) { QStringList strList = GetTokens(text); if (!strList.size()) { return; } QString strMacro = strList.at(0); QStringList strListParameterName; QString strMacroUpper = strMacro.toUpper(); if (strMacroUpper == STR_MACRO_MOVL) { GetMovlList(strList, strListParameterName); EditMacroParameter(strMacro, strListParameterName); } else if (strMacroUpper == STR_MACRO_MOVC) { GetMovcList(strList, strListParameterName); EditMacroParameter(strMacro, strListParameterName); } else if (strMacroUpper == STR_MACRO_FOR || strMacroUpper == STR_MACRO_SWITCH || strMacro == STR_MACRO_IF || strMacro == STR_MACRO_CASE || strMacro == STR_MACRO_DEFAULT || strMacro == STR_MACRO_BREAK || strMacro == STR_MACRO_ELSEIF || strMacro == STR_MACRO_GOSUB) { EditNormalParameter(text); CScreenMain::GetInstance()->ChangeToScreenProgram(); } /*若不是任何命令,返回*/ else { return; } }
void NewsKiller::InitializeLogFile() { if (fSettings->GetValue(SUCK_KILL_LOGFILE,SUCK_KILL_ENABLE_LOGFILE,fLogKilledMessages)) { if (fLogKilledMessages) { if (!fSettings->GetValue(SUCK_KILL_LOGFILE,SUCK_KILL_LOGFILENAME, fKillLogFileName)) { fKillLogFileName = "/tmp/SuckMTKillLog.txt"; } string headersToLog; if (!fSettings->GetValue(SUCK_KILL_LOGFILE,SUCK_KILL_LOGFILE_HEADERS, headersToLog)) { headersToLog = "From Subject Newsgroups Lines X-Trace X-Complaints-To Message-ID"; } fHeadersToMentionInKillLog = GetTokens(headersToLog,' '); fKillLogFile.open(fKillLogFileName.c_str(),ofstream::app); // Open the logfile in append mode if (!fKillLogFile.is_open()) { Lerror << "Unable to open the file \"" << fKillLogFileName << "\" for appending the kill statistics log file." << endl << flush; fLogKilledMessages = false; } else { // Place header in the logfile fKillLogFile << "====================================================" << endl << "Suck MT "<< SUCKMT_VERSION <<" - A Multi Threaded suck replacement." << endl << "Run started at : " << fNow << endl << flush; } } } }
/////////////////////////////// // Tokenizer void TTokenizer::GetTokens(const TStr& Text, TStrV& TokenV) const { PSIn SIn = TStrIn::New(Text); GetTokens(SIn, TokenV); }
//---------------------------------------------------------------------------- ObjLoader::ObjLoader (const string& path, const string& filename) : mCode(EC_SUCCESSFUL), mCurrentGroup(-1), mCurrentPos(-1), mCurrentTcd(-1), mCurrentNor(-1), mCurrentMtl(-1), mCurrentMesh(-1) { mLogFile = fopen("ObjLogFile.txt", "wt"); if (!mLogFile) { assert(false); mCode = EC_LOGFILE_OPEN_FAILED; return; } string filePath = path + filename; ifstream inFile(filePath.c_str()); if (!inFile) { assert(false); mCode = EC_FILE_OPEN_FAILED; fprintf(mLogFile, "%s: %s\n", msCodeString[mCode], filePath.c_str()); fclose(mLogFile); return; } string line; vector<string> tokens; while (!inFile.eof()) { getline(inFile, line); // Skip blank lines. if (line == "") { continue; } // Skip comments. if (line[0] == '#') { continue; } GetTokens(line, tokens); if (tokens.size() == 0) { assert(false); mCode = EC_NO_TOKENS; break; } // mtllib if (GetMaterialLibrary(path, tokens)) { continue; } if (mCode != EC_SUCCESSFUL) { break; } // g default if (GetDefaultGroup(tokens)) { continue; } if (mCode != EC_SUCCESSFUL) { break; } // v x y z if (GetPosition(tokens)) { continue; } if (mCode != EC_SUCCESSFUL) { break; } // vt x y if (GetTCoord(tokens)) { continue; } if (mCode != EC_SUCCESSFUL) { break; } // vn x y z if (GetNormal(tokens)) { continue; } if (mCode != EC_SUCCESSFUL) { break; } // Ignore smoothing groups for now (syntax: 's number'). if (tokens[0] == "s") { continue; } // g groupname if (GetGroup(tokens)) { continue; } if (mCode != EC_SUCCESSFUL) { break; } // usemtl mtlname if (GetMaterialAndMesh(tokens)) { continue; } if (mCode != EC_SUCCESSFUL) { break; } // f vertexList if (GetFace(tokens)) { continue; } if (mCode != EC_SUCCESSFUL) { break; } assert(false); mCode = EC_UNEXPECTED_TOKEN; break; } if (mCode != EC_SUCCESSFUL) { fprintf(mLogFile, "%s: %s\n", msCodeString[mCode], line.c_str()); } else { fprintf(mLogFile, "%s\n", msCodeString[EC_SUCCESSFUL]); } fclose(mLogFile); inFile.close(); }
//---------------------------------------------------------------------------- bool FxCompiler::Parse (const std::string& fileName, const std::string& profileName, Program& program) { std::ifstream inFile(fileName.c_str()); if (!inFile) { // If the file does not exist, the assumption is that the profile does // not support the shader (in which case, the Cg compiler failed). Messages.push_back("Profile " + profileName + " not supported.\n"); return false; } program.Text = ""; while (!inFile.eof()) { std::string line; getline(inFile, line); if (line.empty()) { continue; } // Any uncommented lines are part of the program text. if (line[0] != '/' && line[0] != '#') { program.Text += line + "\n"; continue; } std::vector<std::string> tokens; std::string::size_type begin; // Get a variable line from the Cg output file. begin = line.find("var", 0); if (begin != std::string::npos) { GetTokens(line, begin, tokens); if (tokens.size() >= 2 && tokens[0] == "var") { std::string used = tokens.back(); if (used == "0" || used == "1") { if (used == "1") { program.Variables.push_back(tokens); } continue; } } inFile.close(); ReportError("Invalid variable line", &tokens); return false; } // Get the profile name. begin = line.find("profile", 0); if (begin != std::string::npos) { GetTokens(line, begin, tokens); if (tokens.size() >= 2 && tokens[0] == "profile") { // When the user has already compiled the programs, it is // because a profile is a special one. The "!!ARBfp1.0" // string and the last token of "#profile specialProfile" // most likely do not match, so do not compare them. if (mAlreadyCompiled || tokens[1] == profileName) { continue; } } inFile.close(); ReportError("Invalid profile line", &tokens); return false; } // Get the program name. begin = line.find("program", 0); if (begin != std::string::npos) { GetTokens(line, begin, tokens); if (tokens.size() >= 2 && tokens[0] == "program") { program.Name = tokens[1]; continue; } inFile.close(); ReportError("Invalid program line", &tokens); return false; } } inFile.close(); return true; }
static int cmdproc(struct cmd_syndesc *as, void *arock) { struct hostent *the; char *tp, *sysname = 0; afs_int32 uid, addr; afs_int32 code; the = (struct hostent *) hostutil_GetHostByName(tp = as->parms[0].items->data); if (!the) { printf("knfs: unknown host '%s'.\n", tp); return -1; } memcpy(&addr, the->h_addr, sizeof(afs_int32)); uid = -1; if (as->parms[1].items) { code = util_GetInt32(tp = as->parms[1].items->data, &uid); if (code) { printf("knfs: can't parse '%s' as a number (UID)\n", tp); return code; } } else uid = -1; /* means wildcard: match any user on this host */ /* * If not "-id" is passed then we use the getuid() id, unless it's root * that is doing it in which case we only authenticate as "system:anyuser" * as it's appropriate for root. (The cm handles conversions from 0 to * "afs_nobody"!) */ if (uid == -1) { uid = getuid(); } if (as->parms[2].items) { sysname = as->parms[2].items->data; } if (as->parms[4].items) { /* tokens specified */ code = GetTokens(addr, uid); if (code) { if (code == ENOEXEC) printf ("knfs: Translator in 'passwd sync' mode; remote uid must be the same as local uid\n"); else printf("knfs: failed to get tokens for uid %d (code %d)\n", uid, code); } return code; } /* finally, parsing is done, make the call */ if (as->parms[3].items) { /* unlog specified */ code = NFSUnlog(addr, uid); if (code) { if (code == ENOEXEC) printf ("knfs: Translator in 'passwd sync' mode; remote uid must be the same as local uid\n"); else printf("knfs: failed to unlog (code %d)\n", code); } } else { code = NFSCopyToken(addr, uid); if (code) { if (code == ENOEXEC) printf ("knfs: Translator in 'passwd sync' mode; remote uid must be the same as local uid\n"); else printf("knfs: failed to copy tokens (code %d)\n", code); } if (sysname) { code = SetSysname(addr, uid, sysname); if (code) { printf("knfs: failed to set client's @sys to %s (code %d)\n", sysname, code); } } } return code; }
bool CParser::ReadNextLine (std::ifstream& FileInput, int& nLineNum, std::string& szInputString, const int MAXCHARS, const std::string& szComment, bool bLowerCase) // --------------------------------------------------------------------------- // Function: reads the next line skipping over the comment lines // and converts all alphabets to lower case if requested // Input: file istream, line #, string to hold the input line, // max. # of characters expected in each input line, // comment character(s) at the beginning of a comment line, // lowercase conversion option // Output: updated values of line # and the string // return value is true if successful // false if an error state is encountered // Restriction: Cannot read a line over 256 characters // --------------------------------------------------------------------------- { int flag = 0; int flag1 =0; bool bWhSpc = false; int tokenfound = 1; const int MAXCH = 1000; char szInp[MAXCH]; char szTemp [MAXCH]; std::vector<std::string> tokens; // enough capacity to read and store? if (MAXCHARS > MAXCH) return false; // comment character(s) int nCLen = static_cast<int>(szComment.length()); // read the line (skip over comment lines) for(;;) { ++nLineNum; FileInput.getline (szInp, MAXCHARS); // // end-of-file? // if (FileInput.eof()) // return false; if (FileInput.fail()) FileInput.clear (FileInput.rdstate() & ~std::ios::failbit); // unrecoverable error? if (FileInput.bad()) return false; // successful read szInputString = szInp; GetTokens(szInputString, " ", tokens); bWhSpc = EatWhiteSpace(szInputString); if ((szInputString.substr(0,nCLen) != szComment)&& (bWhSpc ==false)){ szInputString = szInp; GetTokens(szInputString, " ", tokens); for(int i=0; i< abs(tokens.size()); i++){ std::string temptoken = tokens[i]; if (temptoken == "&") flag1 = 1; } //Filter the comment tokens // FilterComment(szInputString, szComment); //if "&" is found continue to read the next line std::string szTempString = szInputString; // check if line is continued & while(flag1 ==1 && tokenfound == 1){ GetTokens(szTempString, " ", tokens); for(int i=1; i<=abs(tokens.size()); i++){ std::string temptoken = tokens[i-1]; if (temptoken == "&"){ tokenfound = 1; flag = 1; } else{ if(flag==1) flag = 1;//do nothing token already found else tokenfound = 0; } } if(tokenfound ==1){ ++nLineNum; RemoveToken(szInputString); //- getting more tokens and add to the existing FileInput.getline (szTemp, MAXCHARS); // end-of-file? if (FileInput.eof()) return false; if (FileInput.fail()) FileInput.clear (FileInput.rdstate() & ~std::ios::failbit); // unrecoverable error? if (FileInput.bad()) return false; // successful read szTempString = szTemp; FilterComment(szTempString, szComment); szInputString+=" "; szInputString+=szTemp; } else{ break;//while loop ents } flag = 0; } // while loop ends // convert to lower case? if (bLowerCase){ for (int i=0; i < static_cast<int>(szInputString.length()); i++) szInputString[i] = tolower(szInputString[i]); } break; } } return true; }
HeaderMatcher::HeaderMatcher( IniFile *settings, const string sectionName, const string valueName) : Printable("HeaderMatcher") , ArticleImpactChecker(settings) { fObjectIsValid = true; fRegExpression = NULL; // Copy the parameters fIniSectionName = sectionName; fIniValueName = valueName; // Initialize the Cooked values fHeaderName = ""; fHeaderValue = ""; fLastMatchDate = fNow; fMatchCount = 0; fImpactValue = 100; // Default Impact value fSearchCaseINSensitive = false; // Quick check of the parameters if (fSettings == NULL || fIniSectionName == "" || fIniValueName == "") { fObjectIsValid = false; return; } // ---------- // Split the fIniValueName into the required parts // There should be two possibilities: // Casesensitive matching // fieldname:valuetomatch // Case INsensitive matching // ~fieldname:valuetomatch // First check if searching case insensitive. string parseValueName = fIniValueName; if (parseValueName[0] == '~') { fSearchCaseINSensitive = true; // Remove "~" parseValueName.erase(parseValueName.begin()); } // Now split at the first ':' unsigned long colonPosition = parseValueName.find(":"); if (colonPosition == parseValueName.npos) { // The header doesn't include a ':' --> Not a header !! fObjectIsValid = false; Lerror << "Invalid \"" << sectionName << "\" header matching line : \"" << valueName << "\"." << endl << flush; return; } fHeaderName.assign (parseValueName,0,colonPosition); fHeaderValue.assign(parseValueName.begin() + colonPosition + 1,parseValueName.end()); // ---------- // Try to compile the header value that should be matched regbase::flag_type cflags = regbase::normal; if (fSearchCaseINSensitive) cflags |= regbase::icase; try { fRegExpression = new regex(fHeaderValue, cflags); } catch (bad_expression) { fObjectIsValid = false; Lerror << "Invalid regular expression \"" << fHeaderValue << "\". Check the suckmt config file" << endl << flush; char inistr[100]; std::sprintf(inistr,"%s ; %5ld ; %5ld ; INVALID REGULAR EXPRESSION !!",fLastMatchDate.c_str(),fMatchCount,fImpactValue); fSettings->SetValue(fIniSectionName, fIniValueName, inistr); return; } // ---------- // Now get the statistics parameters for the matching process if (fSettings->GetValue(fIniSectionName,fIniValueName,fIniValue)) { // ---------- // Split the value into the required parts vector<string> iniValueList = GetTokens(fIniValue,';'); switch (iniValueList.size()) { case 3: StringToLong(iniValueList[2],fImpactValue); case 2: StringToLong(iniValueList[1],fMatchCount); case 1: RemoveLeadingSpaces(iniValueList[0]); RemoveTrailingSpaces(iniValueList[0]); fLastMatchDate = iniValueList[0]; } } char inistr[100]; std::sprintf(inistr,"%s ; %5ld ; %5ld",fLastMatchDate.c_str(),fMatchCount,fImpactValue); fSettings->SetValue(fIniSectionName, fIniValueName, inistr); }
//---------------------------------------------------------------------------- MtlLoader::MtlLoader (const string& path, const string& filename) : mCode(EC_SUCCESSFUL), mCurrent(-1) { mLogFile = fopen("MtlLogFile.txt", "wt"); if (!mLogFile) { assert(false); mCode = EC_LOGFILE_OPEN_FAILED; return; } string filePath = path + filename; ifstream inFile(filePath.c_str()); if (!inFile) { assert(false); mCode = EC_FILE_OPEN_FAILED; fprintf(mLogFile, "%s: %s\n", msCodeString[mCode], filePath.c_str()); fclose(mLogFile); return; } string line; vector<string> tokens; while (!inFile.eof()) { getline(inFile, line); // Skip blank lines. if (line == "") { continue; } // Skip comments. if (line[0] == '#') { continue; } GetTokens(line, tokens); if (tokens.size() == 0) { assert(false); mCode = EC_NO_TOKENS; break; } // newmtl if (GetNewMaterial(tokens)) { continue; } if (mCode != EC_SUCCESSFUL) { break; } // illum if (GetIlluminationModel(tokens)) { continue; } if (mCode != EC_SUCCESSFUL) { break; } // Ka if (GetAmbientColor(tokens)) { continue; } if (mCode != EC_SUCCESSFUL) { break; } // Kd if (GetDiffuseColor(tokens)) { continue; } if (mCode != EC_SUCCESSFUL) { break; } // Ks if (GetSpecularColor(tokens)) { continue; } if (mCode != EC_SUCCESSFUL) { break; } // Tf if (GetTransmissionFilter(tokens)) { continue; } if (mCode != EC_SUCCESSFUL) { break; } // Ni if (GetOpticalDensity(tokens)) { continue; } if (mCode != EC_SUCCESSFUL) { break; } // Ni if (GetSpecularExponent(tokens)) { continue; } if (mCode != EC_SUCCESSFUL) { break; } // map_Kd if (GetDiffuseTexture(tokens)) { continue; } if (mCode != EC_SUCCESSFUL) { break; } assert(false); mCode = EC_UNEXPECTED_TOKEN; break; } if (mCode != EC_SUCCESSFUL) { fprintf(mLogFile, "%s: %s\n", msCodeString[mCode], line.c_str()); } else { fprintf(mLogFile, "%s\n", msCodeString[EC_SUCCESSFUL]); } fclose(mLogFile); inFile.close(); }