// parses a hex number bool opScanner::Hexadecimals(const inputtype& Input, int& current) { if (current + 2 < Input.Size()) { int one = current; int two = current + 1; int three = current + 2; if (Input[one] == '0' && (Input[two] == 'x' || Input[two] == 'X') && IsHexDigit(Input[three])) { int end = Input.Size(); opToken newToken(T_HEXADECIMAL, opString("0x") + Input[three], CurrentLine); current += 3; one = current; while (one != end) { if (!IsHexDigit(Input[one])) break; newToken.Value += Input[one]; ++current; one = current; } Tokens.PushBack(newToken); return true; } } return false; }
// expand an opmacro call void OPMacroNode::Expand(opNode* cloned, ExpandCallArgumentListNode* args) { // make argument size checks int NumMacroArgs = (Arguments == NULL) ? 0 : (int)Arguments->GetArguments().size(); int NumExpandArgs = (args == NULL) ? 0 : (int)args->GetArguments().size(); if (NumMacroArgs != NumExpandArgs) { opError::MessageError( args, opString("Number of expand arguments does not match number " "of opmacro arguments. (got ") + NumExpandArgs + ", needed " + NumMacroArgs + ')'); } else if (NumMacroArgs == 0) return; // do the expansion const vector<OPMacroArgumentNode*>& MacroArgs = Arguments->GetArguments(); const vector<ExpandCallArgumentNode*>& ExpandArgs = args->GetArguments(); for (int i = 0; i < NumExpandArgs; i++) { ExpandChildren(cloned, ExpandArgs[i], MacroArgs[i]->GetVariableName()->GetStringValue()); } // expand concatenation // ExpandConcatenation(cloned); // MacroOperationWalker runoperations(cloned); // expand stringize }
// perform globbing bool opDriver::GlobMode(const opParameters& p) { Globber Globberobj; // test settings double starttime = opTimer::GetTimeSeconds(); // TODO: it should print info like... // globbing... (if not silent) // also.. // and if no files were found - no files found // if no indexes updated - already up to date // if things were updated - updated indexes if (!p.Silent) { Log(opString("Globbing to ") + to_relative_path(path(p.GeneratedDirectory.GetString())).string() + " directory ..."); Log(""); } bool bResult = false; try { bResult = Globberobj.Glob(p); } catch (boost::filesystem::filesystem_error& fe) { opString errorstr = fe.what(); opString who = ""; errorstr.Replace(who, "Error: Improper path detected when globbing."); Log(errorstr); } catch (...) { opError::ExceptionError("GlobMode"); } double endtime = opTimer::GetTimeSeconds(); if (p.Verbose) { double totalms = (endtime - starttime) * 1000.0; Log(opString("Glob Mode took ") + totalms + " ms"); Log(""); } if (!bResult && p.Verbose) Log(opString("Glob Mode Failed.")); return bResult; }
// compiles all files bool opDriver::NormalMode(const opParameters& p) { // verify the output directory... path dirpath = p.GeneratedDirectory.GetString(); if (!exists(dirpath)) create_directories(dirpath); opSet<path> files = GetFiles(); // if there are no files to compile, return false if (files.size() == 0) { if (!p.Silent) Log("Error: No files to compile!"); return false; } // compile all files bool bResult = true; typedef opSet<path>::const_iterator fileit; if (p.Verbose) // spacing in verbose mode Log(' '); for (fileit it = files.begin(); it != files.end(); ++it) { bResult = NormalModeFile(p, *it) ? bResult : false; } // If we had errors, print out the number of errors. if (NumErrors > 0) { Log(""); string errorstring = (NumErrors == 1) ? " error" : " errors"; Log("opC++ - " + opString(NumErrors) + errorstring); Log(""); } if (!p.Silent && files.size() > 1) { if (p.Verbose) { Log(' '); if (bResult) { Log("opC++ - 0 errors"); Log(""); Log("--------------------------------"); Log("All Files Compiled Successfully!"); Log("--------------------------------"); } else { Log("-------------------------------------"); Log("Some File(s) Compiled Unsuccessfully!"); Log("-------------------------------------"); } Log(' '); } } return bResult; }
// Random letters. opString opDemoSupport::RandLetters() { opString random = opString(abs(rand() * 1231)); for (int i = 0; i < random.Length(); i++) { char value = random[i] - '0'; char letter = 'a' + value; random[i] = letter; } return random; }
// Convert a decimal to octal. opString opDemoSupport::DecToOctal(int decimal) { opString octal; while (decimal) { int remainder = decimal % 8; decimal = decimal / 8; octal = opString(remainder) + octal; } return octal; }
// prints out the ast void opNode::PrintTree(const opString& filename, int depth) { FileNode* file = GetFile(); opString s = file->GetInputName(); int line = GetLine(); s += '('; if (line == -1) s += "UNKNOWN) : \t"; else s += opString(line) + ") : \t"; for (int i = 0; i < depth; i++) s += ". "; if (IsTerminal()) { Token t = GetId(); // NOTE: this prints the value now (make optional?) if (t == T_NEWLINE || t == T_EOF || t == T_CONTINUELINE || t == T_CCOMMENT) { Log(s + TokenFunctions::ToString(GetId())); } else Log(s + TokenFunctions::ToString(GetId()) + " '" + GetTreeValue() + "'"); } else { // If we're not in -fulltree mode, we want to limit // the kinds of nodes we print (mostly auto modifiers). if (!opParameters::Get().PrintFullTree && (id == G_AUTO_MODIFIERS)) { return; } opString value = GetTreeValue(); if (!value.Size()) Log(s + TokenFunctions::ToString(GetId())); else Log(s + TokenFunctions::ToString(GetId()) + " '" + value + "'"); iterator i = GetBegin(); iterator end = GetEnd(); while (i != end) { i->PrintTree(filename, depth + 1); ++i; } } }
void DialectNote::PrintXml(opXmlStream& stream) { { opXmlTextTag nametag(stream,"Name"); stream << GetName(); } if(GetNoteDefinition() && GetNoteDefinition()->GetFile()) { { opXmlTextTag pathtag(stream,"Path"); stream << GetNoteDefinition()->GetFile()->GetInputName(); } { opXmlTextTag linetag(stream,"Line"); stream << opString(GetNoteDefinition()->GetLine()); } } //print all arguments vector<opString> arguments; BuildValidArguments(arguments); //opXmlTag argumentstag(stream,"Arguments"); int num = (int)arguments.size(); for(int i = 0; i < num; i++) { opXmlTag argument(stream,"Argument"); { opXmlTextTag name(stream,"Name"); stream << arguments[i]; } opString description; //NOTE: needs to only look at valued modifiers! // and arguments... if(GetArgumentDescription(arguments[i],description)) { opXmlTextTag d(stream,"Description"); stream << description; } } }
std::string Order::stringRepresentation() const { std::stringstream ss; ss << "LocalID: " << m_id << "; ClientAssignedID: " << clientAssignedId() << "; " << opString(m_operation) << " " << amount() << " of " << security(); if(m_type == Order::OrderType::Limit) ss << " at " << m_price; ss << " ("; switch(m_state) { case State::Submitted: ss << "submitted"; break; case State::Cancelled: ss << "cancelled"; break; case State::Rejected: ss << "rejected"; break; case State::Executed: ss << "executed"; break; case State::PartiallyExecuted: ss << "partially executed"; break; case State::Unsubmitted: ss << "unsubmitted"; break; } ss << ")"; return ss.str(); }
// prints tokens - for testing void opScanner::Print(ostream& o) { opList<opToken>::iterator start = Tokens.Begin(); opList<opToken>::iterator end = Tokens.End(); while (start != end) { opString output = TokenFunctions::ToString(start->Id) + " (" + opString(start->Line) + "): "; o << setw(30) << output; if (start->Id == T_NEWLINE) o << "\\n"; else if (start->Id != T_WHITESPACE) o << start->Value; o << endl; ++start; } }
// basic type - labels all ISO standard/microsoft basic types // with an id of T_BASIC_TYPE void opScanner::BasicType() { opList<opToken>::iterator one = Tokens.Begin(); opList<opToken>::iterator end = Tokens.End(); opList<opToken>::iterator two; opList<opToken>::iterator three; while (one != end) { if (IsBasicType(one->Value)) one->Id = T_BASIC_TYPE; ++one; } if (Tokens.HasSize(3)) { one = Tokens.Begin(); two = ++Tokens.Begin(); three = ++(++Tokens.Begin()); while (three != end) { if (one->Id == T_BASIC_TYPE && two->Id == T_WHITESPACE && three->Id == T_BASIC_TYPE && IsBasicType(one->Value + " " + three->Value)) { one->Value += opString(" ") + three->Value; Tokens.Erase(two); Tokens.Erase(three); two = one; ++two; three = two; ++three; continue; } ++one; ++two; ++three; } } }
static int xFilter(sqlite3_vtab_cursor *pVtabCursor, int idxNum, const char *idxStr, int argc, sqlite3_value **argv) { BaseCursor *pCur = (BaseCursor *)pVtabCursor; auto *pVtab = (VirtualTable *)pVtabCursor->pVtab; auto *content = pVtab->content; pVtab->instance->addAffectedTable(content); pCur->row = 0; pCur->n = 0; QueryContext context; for (size_t i = 0; i < content->columns.size(); ++i) { // Set the column affinity for each optional constraint list. // There is a separate list for each column name. context.constraints[content->columns[i].first].affinity = content->columns[i].second; } // Filtering between cursors happens iteratively, not consecutively. // If there are multiple sets of constraints, they apply to each cursor. #if defined(DEBUG) plan("Filtering called for table: " + content->name + " [constraint_count=" + std::to_string(content->constraints.size()) + " argc=" + std::to_string(argc) + " idx=" + std::to_string(idxNum) + "]"); #endif // Iterate over every argument to xFilter, filling in constraint values. if (content->constraints.size() > 0) { auto &constraints = content->constraints[idxNum]; if (argc > 0) { for (size_t i = 0; i < static_cast<size_t>(argc); ++i) { auto expr = (const char *)sqlite3_value_text(argv[i]); if (expr == nullptr || expr[0] == 0) { // SQLite did not expose the expression value. continue; } // Set the expression from SQLite's now-populated argv. auto &constraint = constraints[i]; constraint.second.expr = std::string(expr); plan("Adding constraint to cursor (" + std::to_string(pCur->id) + "): " + constraint.first + " " + opString(constraint.second.op) + " " + constraint.second.expr); // Add the constraint to the column-sorted query request map. context.constraints[constraint.first].add(constraint.second); } } else if (constraints.size() > 0) { // Constraints failed. } } // Reset the virtual table contents. pCur->data.clear(); // Generate the row data set. PluginRequest request = {{"action", "generate"}}; plan("Scanning rows for cursor (" + std::to_string(pCur->id) + ")"); TablePlugin::setRequestFromContext(context, request); Registry::call("table", pVtab->content->name, request, pCur->data); // Set the number of rows. pCur->n = pCur->data.size(); return SQLITE_OK; }
TokenList KoEnhancedPathFormula::scan( const QString &formula ) const { // parsing state enum { Start, Finish, Bad, InNumber, InDecimal, InExpIndicator, InExponent, InString, InIdentifier } state; TokenList tokens; int i = 0; state = Start; int tokenStart = 0; QString tokenText; QString expr = formula + QChar(); // main loop while( (state != Bad) && (state != Finish) && (i < expr.length() ) ) { QChar ch = expr[i]; switch( state ) { case Start: tokenStart = i; // skip any whitespaces if( ch.isSpace() ) { i++; } // check for number else if( ch.isDigit() ) { state = InNumber; } // beginning with alphanumeric ? // could be identifier, function, function reference, modifier reference else if( isIdentifier( ch ) ) { state = InIdentifier; } // decimal dot ? else if ( ch == '.' ) { tokenText.append( expr[i++] ); state = InDecimal; } // terminator character else if ( ch == QChar::Null ) { state = Finish; } // look for operator match else { QString opString( ch ); int op = matchOperator( opString ); // any matched operator ? if( op != FormulaToken::OperatorInvalid ) { i++; tokens.append( FormulaToken( FormulaToken::TypeOperator, opString, tokenStart ) ); } else state = Bad; } break; case InIdentifier: // consume as long as alpha, dollar sign, question mark, or digit if( isIdentifier( ch ) || ch.isDigit() ) { tokenText.append( expr[i++] ); } // a '(' ? then this must be a function identifier else if( ch == '(' ) { tokens.append( FormulaToken( FormulaToken::TypeFunction, tokenText, tokenStart) ); tokenStart = i; tokenText = ""; state = Start; } // we're done with identifier else { tokens.append( FormulaToken( FormulaToken::TypeReference, tokenText, tokenStart) ); tokenStart = i; tokenText = ""; state = Start; } break; case InNumber: // consume as long as it's digit if( ch.isDigit() ) { tokenText.append( expr[i++] ); } // decimal dot ? else if( ch == '.' ) { tokenText.append( '.' ); i++; state = InDecimal; } // exponent ? else if( ch.toUpper() == 'E' ) { tokenText.append( 'E' ); i++; state = InExpIndicator; } // we're done with integer number else { tokens.append( FormulaToken( FormulaToken::TypeNumber, tokenText, tokenStart ) ); tokenText = ""; state = Start; }; break; case InDecimal: // consume as long as it's digit if( ch.isDigit() ) { tokenText.append( expr[i++] ); } // exponent ? else if( ch.toUpper() == 'E' ) { tokenText.append( 'E' ); i++; state = InExpIndicator; } // we're done with floating-point number else { tokens.append( FormulaToken( FormulaToken::TypeNumber, tokenText, tokenStart ) ); tokenText = ""; state = Start; }; break; case InExpIndicator: // possible + or - right after E, e.g 1.23E+12 or 4.67E-8 if( ( ch == '+' ) || ( ch == '-' ) ) { tokenText.append( expr[i++] ); } // consume as long as it's digit else if( ch.isDigit() ) { state = InExponent; } // invalid thing here else state = Bad; break; case InExponent: // consume as long as it's digit if( ch.isDigit() ) { tokenText.append( expr[i++] ); } // we're done with floating-point number else { tokens.append( FormulaToken( FormulaToken::TypeNumber, tokenText, tokenStart ) ); tokenText = ""; state = Start; }; break; case Bad: default: break; } } return tokens; }
// compiles a file in normal mode bool opDriver::NormalModeFile(const opParameters& p, const path& filename) { double totaltimestart = opTimer::GetTimeSeconds(); // build the output filename strings... // fix this for ../ case (convert to string and find & replace...) opString sfile = GetOutputPath(p, filename); path oohpath = (sfile + ".ooh").GetString(); path ocpppath = (sfile + ".ocpp").GetString(); path outputpath = oohpath.branch_path(); if (!exists(outputpath)) create_directories(outputpath); // lets check the timestamp... if (!p.Force) { time_t ohtime = last_write_time(filename); // we want to rebuild upon upgrades / new builds time_t opcpptime = opPlatform::GetOpCppTimeStamp(); if (exists(oohpath) && exists(ocpppath)) { time_t oohtime = last_write_time(oohpath); time_t ocpptime = last_write_time(ocpppath); time_t dohtime = GetGeneratedDialectTimestamp(p); FileNode tempfile; tempfile.LoadDependencies(sfile + ".depend"); bool bNewDepend = tempfile.IsDependencyNewer(oohtime); if (bNewDepend) { if (p.Verbose) { Log("Included file newer than generated file, forcing " "recompile ..."); Log(""); } } // up to date if ooh newer than oh, and ooh newer than opcpp build else if (oohtime < opcpptime || ocpptime < opcpptime) { if (p.Verbose) { Log(opPlatform::GetOpCppExecutableName() + " newer than generated file, forcing recompile ..."); Log(""); } } else if (oohtime <= dohtime || ocpptime <= dohtime) { if (p.Verbose) { Log("Dialect newer than generated file, forcing recompile " "..."); Log(""); } } else if (oohtime > ohtime && ocpptime > ohtime) { if (p.Verbose) Log(filename.string() + " is up to date"); return true; } } } opError::Clear(); // output compiling -file- to std out if (!p.Silent) { Log(opString("Compiling ") + filename.string() + " ..."); } // load the oh file, it will be tracked elsewhere OPFileNode* filenode = FileNode::Load<OPFileNode>(filename.string(), opScanner::SM_NormalMode); // filenode should be non-null even if there were errors assert(filenode); if (opError::HasErrors()) { if (p.PrintTree) filenode->PrintTree(filename.string()); opError::Print(); return false; } // no errors, let's print the output files try { // Save dependencies file. opString dependpath = sfile + ".depend"; filenode->SaveDependencies(dependpath); // open the output files for the generated code... FileWriteStream hfile(oohpath.string()); FileWriteStream sfile(ocpppath.string()); if (hfile.is_open() && sfile.is_open()) { filenode->SetFiles(oohpath.string(), ocpppath.string()); opFileStream filestream(hfile, sfile); // add the pre-pend path (for relative #lines) filestream.SetDepths(oohpath.string()); // files are open, now print to them filenode->PrintNode(filestream); filestream.Output(); } else { Log("Could not open output file(s)!"); return false; } } catch (opException::opCPP_Exception&) { //??? ever } // print xml! if (p.PrintXml) { try { path xmlpath = (sfile + ".xml").GetString(); // open the output files for the generated code... boost::filesystem::ofstream xfile(xmlpath); if (xfile.is_open()) { opXmlStream filestream(xfile); // files are open, now print to them filenode->PrintXml(filestream); } else { Log("Could not open output xml file!"); return false; } } catch (opException::opCPP_Exception&) { //??? ever } } // any errors left? // shouldn't be really opError::Print(); double totaltimeend = opTimer::GetTimeSeconds(); double totaltimeMs = (totaltimeend - totaltimestart) * 1000.0; // TODO: allow PrintTree to any stream // and add support for PrintTree to file // print the AST to stdout if (p.PrintTree) filenode->PrintTree(filename.string()); // write the verbose compilation notice if (p.Verbose) { Log(""); Log(opString("Compilation successful ... took ") + totaltimeMs + " ms (" + filenode->GetScanMs() + " scan ms, " + filenode->GetParseMs() + " parse ms)"); } return true; }
bool opDriver::DialectModeFile(const opParameters& p, const path& filename) { double totaltimestart = opTimer::GetTimeSeconds(); opError::Clear(); // output compiling -file- to std out if (!p.Silent) { Log(opString("Reading dialect ") + filename.string() + " ..."); } // load the oh file, it will be tracked elsewhere DialectFileNode* filenode = FileNode::Load<DialectFileNode>( filename.string(), opScanner::SM_DialectMode); // filenode should be non-null even if there were errors assert(filenode); if (opError::HasErrors()) { if (p.PrintTree) filenode->PrintTree(filename.string()); opError::Print(); return false; } // check for file not found error // if (filenode->FileNotFoundError()) // { // opError::Print(); // // //this is ambiguous doh! // //TODO: fix this to be specific // Log(opString("Cannot open input file \"") + filename.string() + // "\"!"); return false; // } // // //check for scanner error // if(filenode->ScanError()) // { // opError::Print(); // // if (p.Verbose) // { // Log("Compilation failed!"); // Log(""); // } // // return false; // } // // //check for parser errors // if(filenode->AnyErrors()) // { // //print the tree (failure) // if (p.PrintTree) // filenode->PrintTree(filename.string()); // // opError::Print(); // // if (p.Verbose) // { // Log("Compilation failed!"); // Log(""); // } // // return false; // } opString spath = GetOutputPath(p, filename); path oohpath = (spath + ".ooh").GetString(); path ocpppath = (spath + ".ocpp").GetString(); path outputpath = oohpath.branch_path(); if (!exists(outputpath)) create_directories(outputpath); // handle dialect writing // we always want to read dialects though. bool bwrite = true; if (!p.Force) { // we want to rebuild upon upgrades / new builds if (exists(oohpath) && exists(filename)) { time_t oohtime = last_write_time(oohpath); time_t opcpptime = opPlatform::GetOpCppTimeStamp(); time_t dohtime = GetDialectTimestamp(p); filenode->LoadDependencies(spath + ".depend"); bool bNewDepend = filenode->IsDependencyNewer(oohtime); if (bNewDepend) { if (p.Verbose) { Log("Included dialect newer than generated dialect file, " "forcing recompile ..."); Log(""); } } else if (oohtime < opcpptime) { if (p.Verbose) { Log(opPlatform::GetOpCppExecutableName() + " newer than generated dialect file, forcing recompile " "..."); Log(""); } } else if (oohtime <= dohtime) { if (p.Verbose) { Log("Dialect newer than generated dialect file, forcing " "recompile ..."); Log(""); } } else if (oohtime > dohtime) { if (p.Verbose) Log(filename.string() + " is up to date"); bwrite = false; } } } if (bwrite) { try { // Save dependencies file. opString dependpath = spath + ".depend"; filenode->SaveDependencies(dependpath); // open the output files for the generated code... FileWriteStream hfile(oohpath.string()); FileWriteStream sfile(ocpppath.string()); if (hfile.is_open() && sfile.is_open()) { filenode->SetFiles(oohpath.string(), ocpppath.string()); opDialectStream filestream(hfile, sfile); // add the pre-pend path (for relative #lines) filestream.SetDepths(oohpath.string()); // files are open, now print to them filenode->PrintDialectNode(filestream); filestream.Output(); } else { Log("Could not open output file(s)!"); return false; } } catch (opException::opCPP_Exception&) { } // print xml! if (p.PrintXml) { try { path xmlpath = (spath + ".xml").GetString(); // open the output files for the generated code... boost::filesystem::ofstream xfile(xmlpath); if (xfile.is_open()) { opXmlStream filestream(xfile); // files are open, now print to them filenode->PrintXml(filestream); } else { Log("Could not open output xml file!"); return false; } } catch (opException::opCPP_Exception&) { //??? ever } } } double totaltimeend = opTimer::GetTimeSeconds(); double totaltimeMs = (totaltimeend - totaltimestart) * 1000.0; // print the tree (success) if (p.PrintTree) filenode->PrintTree(filename.string()); // write the verbose compilation notice if (p.Verbose) { Log(""); Log(opString("Dialect reading successful ... took ") + totaltimeMs + " ms (" + filenode->GetScanMs() + " scan ms, " + filenode->GetParseMs() + " parse ms)"); Log(""); } return true; }
bool ExpandCallNode::Expand(opSymbolTracker& tracker, opNode::iterator expandit, opNode* parent) { //TODO: reimplement expansion depth checking OPERATIONS_START; //TODO: add good errors //need to have the arguments parsed if(Arguments) Arguments->PreProcess(); opString signature = GetSignature(); if (OPMacroNode* macro = tracker.OPMacroRegistered(signature)) { stacked<OPMacroBodyNode> cloned = macro->GetBody()->Clone(); opNode* parentNode = GetParent(); ++ExpansionDepth; if (ExpansionDepth > opParameters::Get().OPMacroExpansionDepth.GetValue()) { opString error = "Maximum opmacro expansion depth (" + opString(opParameters::Get().OPMacroExpansionDepth.GetValue()) + ") exceeded!"; opError::MessageError(this,error); } // parse arguments //NOTE: this in effect finds expands and expands the arguments // before we substitute them into the opmacro if (Arguments) { opMacroExpander expander(tracker,Arguments); if(expander.Errored()) opException::ThrowException(); } // expand the cloned nodes // this performs the expansion and replacement operations... macro->Expand(*cloned, Arguments); //in order to expand arguments which are ... expand //we must determine what order to use for this. //A. expand within the expand argument (this makes some sort of sense) - I think this is best. //how? //1. find expands within argument //2. run pre-process on this early MacroConcatenationWalker operations; //perform initial opmacro/concat fixes operations.MacroConcatenations(*cloned); // identify opmacro and expansion calls cloned->FindOPMacros(); //perform expand/concat fixes operations.ExpandConcatenations(*cloned); //find expand calls recursively using a walker class ExpandFinder expandfind(*cloned); //run expand calls recursively opMacroExpander expander(tracker,*cloned); if(expander.Errored()) opException::ThrowException(); parent->CollapseNode(cloned, expandit); ExpansionDepth--; } else { //TODO: this could be much improved opError::ExpandError(this,signature,tracker); } OPERATIONS_END; }
bool Globber::Glob(const opParameters& p) { //NOTE: Here's what we're trying to do here // first we find all the oh files // then we find where all the ocpp files SHOULD be // then we find where all the ooh files SHOULD be // now, we only want to include an oh file in the index // if the matching ooh and ocpp files exist (because maybe its not in the project anymore // ...orphaned or whatever) // and we only want to update the indexes if they're out of date (if no ooh files changed why update it?) // so, only if the above conditions hold do we include an oh file in the index and print the index. //NOTE: we must replace all '\' characters with '/' or else // boost filesystem will complain. // that is done in opParameters now. //TEST: these are test inputs, the real ones should be parameter derived opString outputdir = p.GeneratedDirectory.GetValue(); //TODO: it only supports oh files, should definitely do this differently. // the issue is .doh file output, it looks like an oh file (maybe use .h?) opString extension = "oh"; //NOTE: heres the new process... //I need to recursively find all ooh files //within the output directory //we only want ooh files with matching ocpp files //once we have narrowed these down, //we only want to include ooh/ocpp files with matching oh files //so back-convert all these paths //and do checks, this builds our validohfiles vector, //which is now usable. path outputpath = p.GeneratedDirectory.GetString(); set<path> oohfiles; set<path> ocppfiles; FindFilesInDirectoryRecursive(outputpath,".ooh",oohfiles); FindFilesInDirectoryRecursive(outputpath,".ocpp",ocppfiles); typedef set<path>::const_iterator pathit; // find the valid oh files vector<ohfileinfo> validohfiles; pathit oohend = oohfiles.end(); for(pathit oohit = oohfiles.begin(); oohit != oohend; ++oohit) { pathit ocppend = ocppfiles.end(); for(pathit ocppit = ocppfiles.begin(); ocppit != ocppend; ++ocppit) { //now we want to remove their extensions path oohpath = (*oohit); path ocpppath = (*ocppit); opString ocppstring = ocpppath.string(); opString oohstring = oohpath.string(); ocppstring = ocppstring.RLeft(5); oohstring = oohstring.RLeft(4); //potential oh file! if(ocppstring == oohstring) { //now convert the path... opString ohstring = oohstring; ohstring = opDriver::FromGeneratedPath(ohstring); //ohstring = ohstring.Replace("_up/","../"); //ohstring = ohstring.Right(outputpath.string() + "/"); path ohpath = ohstring.GetString(); //found a valid oh file! if(exists(ohpath)) { if( opString(ohpath.leaf().c_str()).Right('.') == extension) validohfiles.push_back(ohfileinfo(ohpath,oohpath,ocpppath)); } } } } //find the dialect doh files // pathit dohend = dohfiles.end(); // for(pathit dohit = dohfiles.begin(); dohit != dohend; ++dohit) // { // //check if the .ooh file exists... // //if it does we'll use it?? // } //TODO: hook it up to spheroid... // now I need to add a test .oh file // from spheroid, and gradually pound // the data statement grammar into a usable condition // - past this, we want sectioned streams (which aren't required, but enable other later things) //now we have all the valid filenames! if(validohfiles.size() && p.Verbose) { for(size_t i = 0; i < validohfiles.size(); i++) Log(opString("Globber: found oh file : ") + validohfiles[i].ohfilepath.string()); } //now we need to check whether to build any of the indexes.. //1. //determine whether or not to update the ooh index (only if there is an ooh newer than an oh) //so, we need to iterate over all the ooh paths //and see if any of the files are newer than this.. path headerindex = outputpath / "Generated.oohindex"; UpdateIndex<true>(validohfiles,headerindex,p); //2. //we always build the ocpp index //determine whether or not to update the ocpp index (yes always) path sourceindex = outputpath / "Generated.ocppindex"; UpdateIndex<false>(validohfiles,sourceindex,p); return true; }
void Globber::WriteIndex(const vector<ohfileinfo>& files, const path& indexpath, const opParameters& p) { //first, create the stream and verify it works FileWriteStream o(indexpath.string()); if(o.is_open()) { //first lets write the index file header //lets make it match the other file header somewhat.. //write normal information o << "/*" << endl; o << "\tGlob File: " << indexpath.string() << endl; o << "\topCPP Version: " << opString( opVersion::GetVersion() ) << endl; o << "\tBuild Date: " << __DATE__ << " at " << __TIME__ << endl; o << "*/" << endl << endl; //first lets extract a guarding macro from the headerindex path opString filestring = indexpath.string(); filestring.Replace('/','_'); filestring.Replace(' ','_'); filestring.Replace('.','_'); filestring.Replace(':','_'); filestring.Replace('+',"PLUS"); filestring.Replace('-',"DASH"); //TODO: should actually catch any and all characters which are valid // in linux and windows file systems, but which are not valid in // the preprocessor //TODO: this is also in print.cpp, should point to a common function filestring = opString("__") + filestring + "__"; //next lets write any header specific defines o << "#ifndef " << filestring << endl; o << "#define " << filestring << endl << endl; if(!bheader) { if (!opParameters::Get().Compact) o << "//compile source now" << endl; o << "#ifndef OPCOMPILE_SOURCE" << endl; o << "\t#define OPCOMPILE_SOURCE" << endl; o << "#endif" << endl << endl; } //now lets write all the file lists //how this works is... sections with NOSECTION defined will not be compiled //but theres no similar requirement (SECTION define) for compiling a section... opString macrostart = bheader ? "OPCOMPILE_OOH_" : "OPCOMPILE_OCPP_"; opString buildmacro = bheader ? "OPCOMPILING_OOH" : "OPCOMPILING_OCPP"; //include the dialect output files for the header index vector<opString> dialects; //dialect output paths opSet<path> pdialects = opDriver::GetDialectFiles(); typedef opSet<path>::iterator diterator; diterator dend = pdialects.end(); path outputpath = p.GeneratedDirectory.GetString(); for(diterator dit = pdialects.begin(); dit != dend; ++dit) { path dialectpath = *dit; //path dialect = outputpath / dialectpath; path dialect = dialectpath; opString dialectstring = dialect.string(); if(bheader) dialectstring += ".ooh"; else dialectstring += ".ocpp"; dialectstring.Replace("../","_/"); // if(exists(dialectstring.GetString())) // { dialects.push_back(dialectstring); // } } //now lets write the header section if (!opParameters::Get().Compact) o << "//compile all the code in file header sections now" << endl; o << "#define " << macrostart << "HEADING" << endl; WriteDialectList<bheader>(o,dialects,buildmacro + "_HEADING"); WriteIndexList<bheader>(o,files,buildmacro + "_HEADING"); o << "#undef " << macrostart << "HEADING" << endl << endl; //now lets write the body section if (!opParameters::Get().Compact) o << "//compile all the code in file body sections now" << endl; o << "#define " << macrostart << "BODY" << endl; WriteDialectList<bheader>(o,dialects,buildmacro + "_BODY"); WriteIndexList<bheader>(o,files,buildmacro + "_BODY"); o << "#undef " << macrostart << "BODY" << endl << endl; //now lets write the footer section if (!opParameters::Get().Compact) o << "//compile all the code in file footer sections now" << endl; o << "#define " << macrostart << "FOOTER" << endl; WriteDialectList<bheader>(o,dialects,buildmacro + "_FOOTER"); WriteIndexList<bheader>(o,files,buildmacro + "_FOOTER"); o << "#undef " << macrostart << "FOOTER" << endl << endl; //write the guard footer o << "#endif//header" << endl << endl; } }