DECLARE_EXPORT void Flow::validate(Action action) { // Catch null operation and buffer pointers Operation* oper = getOperation(); Buffer* buf = getBuffer(); if (!oper || !buf) { // This flow is not a valid one since it misses essential information if (!oper && !buf) throw DataException("Missing operation and buffer on a flow"); else if (!oper) throw DataException("Missing operation on a flow with buffer '" + buf->getName() + "'"); else throw DataException("Missing buffer on a flow with operation '" + oper->getName() + "'"); } // Check if a flow with 1) identical buffer, 2) identical operation and // 3) overlapping effectivity dates already exists, and 4) same // flow type. Operation::flowlist::const_iterator i = oper->getFlows().begin(); for (; i != oper->getFlows().end(); ++i) if (i->getBuffer() == buf && i->getEffective().overlap(getEffective()) && i->getType() == getType() && &*i != this) break; // Apply the appropriate action switch (action) { case ADD: if (i != oper->getFlows().end()) throw DataException("Flow of '" + oper->getName() + "' and '" + buf->getName() + "' already exists"); break; case CHANGE: throw DataException("Can't update a flow"); case ADD_CHANGE: // ADD is handled in the code after the switch statement if (i == oper->getFlows().end()) break; throw DataException("Can't update a flow between '" + oper->getName() + "' and '" + buf->getName() + "')"); case REMOVE: // Delete the temporary flow object delete this; // Nothing to delete if (i == oper->getFlows().end()) throw DataException("Can't remove nonexistent flow of '" + oper->getName() + "' and '" + buf->getName() + "'"); // Delete delete &*i; } // Set a flag to make sure the level computation is triggered again HasLevel::triggerLazyRecomputation(); }
int main(int argc, char *argv[]) { ListContainer list_container; StackContainer stack_container; stack_container.all_lists = &list_container; TreeContainer tree_container; tree_container.all_lists = &list_container; HashContainer hash_container; hash_container.all_lists = &list_container; hash_container.stopword_tree = &tree_container; GraphContainer *graph_container[64]; for(int i = 0; i<64; i++) graph_container[i] = NULL; if ( argc != 2 ) { list_container.error("No file input or too many arguments, please try again"); return -1; } ArgumentManager argMgr(argv[1]); string filename = argMgr.get("script"); //string dict_filename = argMgr.get("dictionary"); char* fn = new char[filename.length()+1]; strcpy(fn, filename.c_str()); //list_container.initialize("dictionary", dict_filename); //insert the dictionary into a list //ListNode* dictionary_list = list_container.find_list("dictionary"); //tree_container.initiate_tree(dictionary_list->list); //tree_container.printLevelOrder(tree_container.root, 6); ifstream fin(fn); if(fin.fail()) { list_container.error("Failure to open script file "+filename+" exiting program..."); return -1; } list_container.error("||||||||||||| Start log for script file "+filename+" |||||||||||||"); OperationQueue opQueue(filename); while(!opQueue.isEmpty()) { Operation op = opQueue.pop(); //cout << op; if(op.isExpression()) //if true, operation is an expression that needs to be evaluated { stack_container.convertToPostfix(op.getExpression()); //convert infix to postfix stack_container.postfixCalc(op.getName()); //calculte the postfix expression and perform the operations } else if(op.getName() == "load" || op.getName() == "rank" || op.getName() == "dijkstra" || op.getName() == "topsort" ) { if(op.getName() == "load") //loads a file-list text file into an object of graph_container, that is unique to the graph_container's web_id. { //and then uses the objects initialize_graph function to build a graph based on the given file-list text file. string cleaned_filename = list_container.clean_file_name(op.getParameter(1)); for(int i = 0; i<64; i++)//find the first empty graph_container. (support for up to 64 different load functions) { if(graph_container[i] == NULL) //once found, create an instance of the object, and initialize the graph with the given file list { graph_container[i] = new GraphContainer(); graph_container[i]->web_id = op.getParameter(0); graph_container[i]->initialize_graph(cleaned_filename); break; } } } else if(op.getName() == "rank") { string cleaned_filename = list_container.clean_file_name(op.getParameter(1)); for(int i = 0; i<64; i++) { if(op.getParameter(0) == graph_container[i]->web_id) { graph_container[i]->rank_output(cleaned_filename); break; } } } else if(op.getName() == "dijkstra")//call in script with dijkstra(WEBNAME,'vertexname.html') { string cleaned_filename = list_container.clean_file_name(op.getParameter(1)); for(int i = 0; i<64; i++) { if(op.getParameter(0) == graph_container[i]->web_id) { graph_container[i]->dijkstra_calculate(cleaned_filename); break; } } } } else if(op.getName() == "write" || op.getName() == "read") //if false, the operation could be a simple read or write { if(op.getName() == "write") { if(op.parameterCount() == 2) //this means there is no 'forward' or 'reverse' specification in which we will just assume forward { list_container.writeForward(op.getParameter(0), op.getParameter(1), "iterative"); } else if(op.getParameter(2) == "forward") { list_container.writeForward(op.getParameter(0), op.getParameter(1), "iterative"); } else if(op.getParameter(2) == "reverse") { list_container.writeReverse(op.getParameter(0), op.getParameter(1), "iterative"); } else { list_container.writeForward(op.getParameter(0), op.getParameter(1), "recursive"); } continue; } if(op.getName() == "read") { list_container.initialize(op.getParameter(0), op.getParameter(1)); continue; } } else if(op.getName() == "union" || op.getName() == "intersection" || op.getName() == "check") //if not expression or read/write, it must be a union or intersection command { if(op.getName() == "union") { //call union func from list container with op.getParameter(0) 1 and 2 list_container.union_initialize(op.getParameter(0), op.getParameter(1), op.getParameter(2), "recursive"); //continue; } else if(op.getName() == "intersection") { //call intersection func from list container with op.getParameter(0) 1 and 2 list_container.intersect_initialize(op.getParameter(0), op.getParameter(1), op.getParameter(2), "recursive"); //continue; } else if(op.getName() == "check") { //call function that checks the op.getParameter(1) list with the dictionary passed earlier and then stores //the misspelled words or words that aren't in the dictionary to op.getParameter(0). tree_container.check(op.getParameter(0), op.getParameter(1)); } } else if(/*op.getName() == "load" ||*/ op.getName() == "filter" || op.getName() == "index" || op.getName() == "search") { /*if(op.getName() == "load") { //cout << "op name: " << op.getName() << endl; list_container.initialize(op.getParameter(0), op.getParameter(1)); ListNode* temp; temp = list_container.find_list(op.getParameter(0)); //find corpus list Node* current; current = temp->list.head; while(current != NULL) { list_container.initialize(current->word, current->word); current = current->next; } } else*/ if(op.getName() == "filter") { //cout << "op name: " << op.getName() << endl; list_container.initialize("stopwordUNIQ", op.getParameter(1)); //insert the stop word dictionary into a list ListNode* stopword_list = list_container.find_list("stopwordUNIQ"); tree_container.initiate_tree(stopword_list->list); //since hash_container has an object of tree_container given to it earlier, this tree is now created and stored //in hash_container for use as a filter. hash_container.filter(op.getParameter(0)); //tree_container.printLevelOrder(tree_container.root, 6); //hash_container.stopword_tree->printLevelOrder(hash_container.stopword_tree->root, 6); } else if(op.getName() == "index") { hash_container.index(op.getParameter(0)); } else if(op.getName() == "search") { int i = 1; for(i; i < op.parameterCount()-1;i++) { string keyword = list_container.clean_file_name(op.getParameter(i)); hash_container.search(op.getParameter(0), keyword); } } } else list_container.error("operation is neither an expression, read/write, or union/intersection and therefore invalid"); } //list_container.armageddon(); //deallocates all memory taken by the various lists and crashes the program currently, after merge was implemented in union_func_iterative list_container.error("||||||||||||| End log for script file "+filename+" |||||||||||||"); return 99; }
DECLARE_EXPORT void Load::validate(Action action) { // Catch null operation and resource pointers Operation *oper = getOperation(); Resource *res = getResource(); if (!oper || !res) { // Invalid load model if (!oper && !res) throw DataException("Missing operation and resource on a load"); else if (!oper) throw DataException("Missing operation on a load on resource '" + res->getName() + "'"); else if (!res) throw DataException("Missing resource on a load on operation '" + oper->getName() + "'"); } // Check if a load with 1) identical resource, 2) identical operation and // 3) overlapping effectivity dates already exists Operation::loadlist::const_iterator i = oper->getLoads().begin(); for (; i != oper->getLoads().end(); ++i) if (i->getResource() == res && i->getEffective().overlap(getEffective()) && &*i != this) break; // Apply the appropriate action switch (action) { case ADD: if (i != oper->getLoads().end()) { throw DataException("Load of '" + oper->getName() + "' and '" + res->getName() + "' already exists"); } break; case CHANGE: throw DataException("Can't update a load"); case ADD_CHANGE: // ADD is handled in the code after the switch statement if (i == oper->getLoads().end()) break; throw DataException("Can't update a load"); case REMOVE: // This load was only used temporarily during the reading process delete this; if (i == oper->getLoads().end()) // Nothing to delete throw DataException("Can't remove nonexistent load of '" + oper->getName() + "' and '" + res->getName() + "'"); delete &*i; // Set a flag to make sure the level computation is triggered again HasLevel::triggerLazyRecomputation(); return; } // The statements below should be executed only when a new load is created. // Set a flag to make sure the level computation is triggered again HasLevel::triggerLazyRecomputation(); }
void _FSSEnvironment::runStage(const Operation& subject, DynamicArray<StyleTreeNode*>& styleTreePositions, HashMap<StyleTreeNode*, bool, HashCollection::pointerHash<StyleTreeNode>, HashCollection::pointerMatch<StyleTreeNode>>& styleTreePositionSet, Allocator& stackAllocator, FDUint depth) { #ifdef FD_FSS_DIAGNOSTIC_PRINT const char* depthString = " "; FDUint depthStringLen = strlen(depthString); printf("%s%c/%s\n", depthString + depthStringLen - depth, subject.getName().ownerType, subject.getName().name); #endif int positionsListingStart = styleTreePositions.getSize(); StyleTree& tree = styleSheet.getTree(); DynamicArray<StyleTreeNode*> styleTreePositionsToAdd(stackAllocator); HashMap<PropertyDef*, void*, HashCollection::pointerHash<PropertyDef>, HashCollection::pointerMatch<PropertyDef>> defToProperty( stackAllocator); // Check the style sheet tree for the properties of the subject. { Profiler p("FSS check for properties", true); Profiler pApplicable("Def Applicable Rules", true); DynamicArray<StyleRule*> applicableRules(stackAllocator); pApplicable.close(); // Follow up on rules suggested by the subject. // Note that we do not keep such rules in the listing of styleTreePositions // - instead we only store multi-atom rules that have a first-atom match. { Profiler p("FSS check rules", true); Array<StyleRule* const> relatedRules = subject.getRelatedRules(); for (int i = 0; i < relatedRules.length; i++) { applicableRules.append(relatedRules[i]); Selector& selector = relatedRules[i]->getSelector(); tree.advanceTopRule(*relatedRules[i], subject, styleTreePositionsToAdd, styleTreePositionSet); } } // Extract properties from the list of rules. { Profiler p("FSS extract properties", true); FDUint ruleCount = applicableRules.getSize(); for (FDUint i = 0; i < ruleCount; i++) { Array<Style> styles = applicableRules[i]->getStyles(); for (int j = 0; j < styles.length; j++) { Style& style = styles[j]; StyleDef& styleDef = style.getDef(); PropertyDef& propertyDef = styleDef.getPropertyDef(); // Create the property if it doesn't already exist. void* prop; if (!defToProperty.get(&propertyDef, prop)) { prop = propertyDef.createProperty(stackAllocator); defToProperty[&propertyDef] = prop; } style.invoke(prop); } } } { Profiler p("FSS do inline styles.", true); Array<const Style> inlineStyles = subject.getInlineStyles(); FDUint inlineStyleCount = inlineStyles.length; for (FDUint i = 0; i < inlineStyleCount; i++) { const Style& style = inlineStyles[i]; StyleDef& styleDef = style.getDef(); PropertyDef& propertyDef = styleDef.getPropertyDef(); // Create the property if it doesn't already exist. void* prop; if (!defToProperty.get(&propertyDef, prop)) { Profiler pNF("Not found shit", true); prop = propertyDef.createProperty(stackAllocator); defToProperty[&propertyDef] = prop; } Profiler p("Invoke Style", true); style.invoke(prop); } } Profiler pC("FSS prop check cleanup.", true); } void* hideVS; FDbool hasHideProp = defToProperty.get(environment.hidePDef, hideVS); if (!hasHideProp || !((SinglePropertyDef<FDbool>::ValueSpecificity*)hideVS)->value) { DoOperation doOp(subject, defToProperty); user->onDoOperation(doOp); // Recurse on children. void* childrenVoid; bool hasChildren = defToProperty.get( styleSheet.getDefaultDefs().childrenDef, childrenVoid); if (hasChildren) { Profiler p("FSS Children operations", true); DefaultDefCollection::OpSet* children = (DefaultDefCollection::OpSet*)childrenVoid; FDUint count = children->getSize(); OpConstraint* ops = FD_NEW_ARRAY(OpConstraint, count, *allocator, allocator); DefaultDefCollection::OpSetIterator childrenIterator(*children); Operation* key; bool value; FDUint i = 0; while (childrenIterator.getNext(key, value)) { ops[i++].op = key; } // Set up constraints here void* orderVoid; bool hasConstraints = defToProperty.get( styleSheet.getDefaultDefs().orderDef, orderVoid); if (hasConstraints) { DefaultDefCollection::ConstraintSet* constraints = (DefaultDefCollection::ConstraintSet*)orderVoid; // Iterate through constraints. DefaultDefCollection::ConstraintSetIterator constraintIterator(*constraints); Pair<Selector, Selector>* key; bool value; while (constraintIterator.getNext(key, value)) { // TODO: Make this better. I know this can be better, // this is a w f u l. // We first obtain a list of the things that are supposed to // happen before. OpConstraint* priors[100]; FDUint priorCount = 0; for (i = 0; i < count; i++) { if (key->first.isAtomMatch(*ops[i].op)) { priors[priorCount++] = &ops[i]; } } // We then match that list to the things that are supposed to // happen after. for (i = 0; i < count; i++) { if (!key->second.isAtomMatch(*ops[i].op)) { continue; } for (FDUint j = 0; j < priorCount; j++) { // We check that i isn't already dependent on j. if (ops[i].constraints.contains(priors[j])) { continue; } ops[i].constraints.append(priors[j]); } } } } p.close(); // Go through the children, respecting the constraints. FDUint renderCount = 0; for (i = 0; i < count; i++) { FDUint current = i; if (!ops[i].alreadyRendered) { constraintRecurse(ops[i], styleTreePositions, styleTreePositionSet, stackAllocator, depth); } } FD_FREE(ops, *allocator); } Profiler p("FSS Undo", true); user->onUndoOperation(doOp); } // Clean up positions listing int positionsToAddCount = styleTreePositionsToAdd.getSize(); for (int i = 0; i < positionsToAddCount; i++) { styleTreePositionSet.remove(styleTreePositionsToAdd[i]); } // Clean up defToProperty listing { HashMap<PropertyDef*, void*, HashCollection::pointerHash<PropertyDef>, HashCollection::pointerMatch<PropertyDef>>::Iterator propertyIterator(defToProperty); PropertyDef* key; void* value; while (propertyIterator.getNext(key, value)) { key->deleteProperty(value, stackAllocator); } } }
int main(int argc, char *argv[]) { ListContainer list_container; StackContainer stack_container; TimeComplexity time_complexity; //TODO: ensure that the below sharing of the same list_container does not interfere with the stack or arithmetic infix to postfix conversions. time_complexity.all_lists = &list_container; //gives the TimeComplexity object an address to the listContainer stack_container.all_lists = &list_container; //gives the stackContainer an address to the listContainer /* time_actual = 0; list_container.initialize("A", "A20.txt"); ListNode* list = list_container.find_list("A"); int n_hi = list_container.getN("A"); int estimate = (n_hi*(6*n_hi))+ 9; cout << estimate << " ESTIMATE " << endl; list->list.alphabetize(); cout << time_actual << endl; list->list.output(); return 99; */ if ( argc != 2 ) { list_container.error("No file input or too many arguments, please try again"); return -1; } ArgumentManager argMgr(argv[1]); string filename = argMgr.get("script"); time_complexity.result_filename = argMgr.get("result"); time_complexity.output("L1,L2,,,,-----,---,O(g(n)),--------,,", ""); time_complexity.output("size,size,operation,T(n) estimate,T(n) actual,c,n0,g(n)=n^2", "append"); char* fn = new char[filename.length()+1]; strcpy(fn, filename.c_str()); ifstream fin(fn); if(fin.fail()) { list_container.error("Failure to open script file "+filename+" exiting program..."); return -1; } list_container.error("||||||||||||| Start log for script file "+filename+" |||||||||||||"); OperationQueue opQueue(filename); while(!opQueue.isEmpty()) { Operation op = opQueue.pop(); //cout << op << endl; if(op.isExpression()) //if true, operation is an expression that needs to be evaluated { stack_container.convertToPostfix(op.getExpression()); //convert infix to postfix stack_container.postfixCalc(op.getName()); //calculte the postfix expression and perform the operations } else if(op.getName() == "write" || op.getName() == "read") //if false, the operation could be a simple read or write { if(op.getName() == "write") { if(op.parameterCount() == 2) //this means there is no 'forward' or 'reverse' specification in which we will just assume forward { list_container.writeForward(op.getParameter(0), op.getParameter(1)); } else if(op.getParameter(2) == "forward") { list_container.writeForward(op.getParameter(0), op.getParameter(1)); } else if(op.getParameter(2) == "reverse") { list_container.writeReverse(op.getParameter(0), op.getParameter(1)); } else { list_container.writeForward(op.getParameter(0), op.getParameter(1)); } continue; } if(op.getName() == "read") { list_container.initialize(op.getParameter(0), op.getParameter(1)); continue; } } else if(op.getName() == "union" || op.getName() == "intersection") //if not expression or read/write, it must be a union or intersection command { if(op.getName() == "union") { // call time complex func, and union here using op.getParameter(0) 1 and 2 time_complexity.union_tracktime(op.getParameter(0), op.getParameter(1), op.getParameter(2)); continue; } else if(op.getName() == "intersection") { // call time complex func, and intersection here using op.getParameter(0) 1 and 2 time_complexity.intersection_tracktime(op.getParameter(0), op.getParameter(1), op.getParameter(2)); continue; } } else list_container.error("operation is neither an expression, read/write, or union/intersection and therefore invalid"); } //list_container.armageddon(); //deallocates all memory taken by the various lists and crashes the program currently, after merge was implemented in union_func_iterative list_container.error("||||||||||||| End log for script file "+filename+" |||||||||||||"); return 99; }