int main() { int ver = 9; graph *g; g = initGraph(ver); addEdge(g, 0, 2); addEdge(g, 0, 3); addEdge(g, 1, 3); addEdge(g, 1, 4); addEdge(g, 2, 3); addEdge(g, 2, 5); addEdge(g, 2, 7); addEdge(g, 3, 4); addEdge(g, 4, 6); addEdge(g, 4, 8); addEdge(g, 5, 6); addEdge(g, 7, 8); printGraph(g); topoSort(g); }
Graph::~Graph() { DPRINTF("~Graph()"); std::vector<CfgNode*> ts = topoSort(); for (size_t i = 0; i < reserved; i++) { for (size_t j = 0; j < nodes.size(); j++) { if (matrix[i][j] != NULL) // free all edges delete matrix[i][j]; matrix[i][j] = NULL; } delete [] matrix[i]; matrix[i] = NULL; } delete [] matrix; matrix = NULL; // we delete in topological order so that we never get into a race // where a module (which is deleted by the Cfg) is still running and wants to // send data to an module which got already freed. We rely on the d'tors to do // a join on the underlying thread. for (size_t i = 0; i < ts.size(); i++) { delete ts[i]; } DPRINTF("~Graph() done"); }
string alienOrder(vector<string>& words) { unordered_map<char, unordered_set<char>> graph; unordered_map<char, int> mp; buildGraph(words, graph, mp); string res = topoSort(graph, mp); return res; }
void ConvolutionalNetworkLayer::update(){ if (!sorted){ topoSort(); } for (shared_ptr<ComponentNode> node : nodes){ shared_ptr<AbstractComponent> component = node->getComponent(); component->update(); component->getNum(); } }
void ConvolutionalNetworkLayer::calculate(){ /*cout << nodes.size() << endl; this->visualValue->print();*/ if (!sorted){ topoSort(); } //shared_ptr<AbstractMatrix> v = this->visualValue; for (shared_ptr<ComponentNode> node : nodes) { int id = node->getId(); shared_ptr<AbstractComponent> component = node->getComponent(); //cout << "1----" << &component->getHiddenValue() << "----" << &node << "----" << &nodes << endl; vector<shared_ptr<AbstractMatrix>> vValue; for (shared_ptr<ComponentNode> pred : node->getBeforeNode()) { vector<shared_ptr<AbstractMatrix>> temp = pred->getComponent()->getHiddenValue(); for (shared_ptr<AbstractMatrix> tempMatix : temp){ vValue.push_back(tempMatix); } } if (node->getBeforeNode().size() == 0) { vValue.push_back(visualValue); } component->setVisualValue(vValue); component->calculate(); } vector<shared_ptr<AbstractMatrix>> hValue; shared_ptr<AbstractMatrix> v = shared_ptr<AbstractMatrix>(new Matrix(0, 1)); v->setAllValue(0); for (shared_ptr<ComponentNode> node : nodes){ //cout << "2----" << &(node->getComponent()->getHiddenValue()) << "----" << &node << "----" << &nodes << endl; if (node->getNextNode().size() == 0){ vector<shared_ptr<AbstractMatrix>> temp = node->getComponent()->getHiddenValue(); for (shared_ptr<AbstractMatrix> tempMatrix : temp){ v = v->mergeRow(tempMatrix->m2vByColumn()); } } } this->hiddenValue = v; if (nodes.size() == 0){ this->hiddenValue = this->visualValue->m2vByColumn(); } //this->hiddenValue = this->visualValue->m2vByColumn(); this->hiddenUnit = hiddenValue->getRowSize(); //cout << "---" << endl; //visualValue->print(); /*cout << "---" <<hiddenUnit<< endl; hiddenValue->print();*/ }
void ConvolutionalNetworkLayer::compute(){ if (!sorted){ topoSort(); } shared_ptr<AbstractMatrix> v = this->visualValue; for (shared_ptr<ComponentNode> node : nodes) { int id = node->getId(); shared_ptr<AbstractComponent> component = node->getComponent(); vector<shared_ptr<AbstractMatrix>> vValue; for (shared_ptr<ComponentNode> pred : node->getBeforeNode()) { vector<shared_ptr<AbstractMatrix>> temp = pred->getComponent()->getHiddenValue(); for (shared_ptr<AbstractMatrix> tempMatix : temp){ vValue.push_back(tempMatix); } } if (node->getBeforeNode().size() == 0) { vValue.push_back(v); } component->setVisualValue(vValue); component->calculate(); } vector<shared_ptr<AbstractMatrix>> hValue; v = shared_ptr<AbstractMatrix>(new Matrix(0, 1)); for (shared_ptr<ComponentNode> node : nodes){ if (node->getNextNode().size() == 0){ vector<shared_ptr<AbstractMatrix>> temp = node->getComponent()->getHiddenValue(); for (shared_ptr<AbstractMatrix> tempMatrix : temp){ for (size_t i = 0; i < tempMatrix->getColumnSize(); i++){ v->mergeRow(tempMatrix->m2vByColumn()); } } } } this->hiddenValue = v; }
int main() { int n; scanf("%d", &n); DirectedGraphAsAdjList graph(n); for(int u = 0; u < n; ++u) { while(true) { int v; scanf("%d", &v); if (v == 0) break; --v; graph.AddEdge(Edge(u,v)); } } TopoSort<DirectedGraphAsAdjList> topoSort(&graph); topoSort.Compute(); for(int i = 0; i < n; ++i) { if (i > 0) printf(" "); printf("%d", topoSort.SequenceAt(i) + 1); } printf("\n"); return 0; }
vector<shared_ptr<ComponentNode>> ConvolutionalNetworkLayer::getAllComponents(){ if (!sorted){ topoSort(); } return this->nodes; }
void ConvolutionalNetworkLayer::gradient(){ if (!sorted){ topoSort(); } //shared_ptr<AbstractMatrix> m = this->hiddenGradient; //hiddenGradient->print(); size_t hiddenUnitNum = this->hiddenUnit; size_t index = hiddenUnitNum; for (int j = nodes.size() - 1; j >= 0; j--){ shared_ptr<ComponentNode> node = nodes[j]; shared_ptr<AbstractComponent> component = node->getComponent(); if (node->getNextNode().size() == 0){ size_t vectorSize = component->getHiddenValue().size(); size_t mRow = component->getHiddenValue()[0]->getRowSize(); size_t mColumn = component->getHiddenValue()[0]->getColumnSize(); size_t square = mRow*mColumn; shared_ptr<AbstractMatrix> gradientMatrix = hiddenGradient->submatrix(index - square*vectorSize, index, 0, 1); vector<shared_ptr<AbstractMatrix>> gradient; for (size_t i = 0; i < vectorSize; i++){ shared_ptr<AbstractMatrix> matrix = gradientMatrix->submatrix(i*square, (i + 1)*square, 0, 1); gradient.push_back(matrix->v2mByColomn(mRow)); } component->setHiddenGradient(gradient); component->gradient(); index -= vectorSize*square; } //cout << component->getVisualGradient().size() << j <<" "<<nodes.size() << endl; vector<shared_ptr<AbstractMatrix>> vGradient = component->getVisualGradient(); size_t predIndex = 0; for (shared_ptr<ComponentNode> pred : node->getBeforeNode()) { shared_ptr<AbstractComponent> predComponent = pred->getComponent(); size_t predSize = predComponent->getHiddenValue().size(); vector<shared_ptr<AbstractMatrix>> predGradient; //cout << vGradient.size() << predSize << endl; for (int i = 0; i < predSize; i++){ predGradient.push_back(vGradient[predIndex + i]); } /*for (int x = 0; x < predGradient.size(); x++){ predGradient[x]->print(); }*/ predComponent->setHiddenGradient(predGradient); predComponent->gradient(); //cout << predComponent->getNum() << endl; /*for (int x = 0; x < predComponent->getVisualGradient().size(); x++){ predComponent->getVisualGradient()[x]->print(); }*/ predIndex += predSize; } if (node->getBeforeNode().size() == 0){ /*for (int x = 0; x < component->getVisualGradient().size(); x++){ component->getVisualGradient()[x]->print(); }*/ this->visualGradient = component->getVisualGradient()[0]; } } /*cout << "----" << endl; this->visualGradient->print();*/ this->hiddenGradient->setAllValue(0); }
int main() { AGraph* g = new AGraph; createAGraph(g); topoSort(g); // O(V+E) return 0; }
void toposort_sortLinks(int sortedLinks[]) // // Input: none // Output: sortedLinks = array of link indexes in sorted order // Purpose: sorts links from upstream to downstream. // { int i, n = 0; // --- no need to sort links for Dyn. Wave routing for ( i=0; i<Nobjects[LINK]; i++) sortedLinks[i] = i; if ( RouteModel == DW ) { // --- find number of outflow links for each node for ( i=0; i<Nobjects[NODE]; i++ ) Node[i].degree = 0; for ( i=0; i<Nobjects[LINK]; i++ ) { // --- if upstream node is an outfall, then increment outflow // count for downstream node, otherwise increment count // for upstream node n = Link[i].node1; if ( Node[n].type == OUTFALL ) Node[ Link[i].node2 ].degree++; else Node[n].degree++; } return; } // --- allocate arrays used for topo sorting if ( ErrorCode ) return; InDegree = (int *) calloc(Nobjects[NODE], sizeof(int)); StartPos = (int *) calloc(Nobjects[NODE], sizeof(int)); AdjList = (int *) calloc(Nobjects[LINK], sizeof(int)); Stack = (int *) calloc(Nobjects[NODE], sizeof(int)); if ( InDegree == NULL || StartPos == NULL || AdjList == NULL || Stack == NULL ) { report_writeErrorMsg(ERR_MEMORY, ""); } else { // --- create a directed adjacency list of links leaving each node createAdjList(DIRECTED); // --- adjust adjacency list for DIVIDER nodes adjustAdjList(); // --- find number of links entering each node for (i = 0; i < Nobjects[NODE]; i++) InDegree[i] = 0; for (i = 0; i < Nobjects[LINK]; i++) InDegree[ Link[i].node2 ]++; // --- topo sort the links n = topoSort(sortedLinks); } // --- free allocated memory FREE(InDegree); FREE(StartPos); FREE(AdjList); FREE(Stack); // --- check that all links are included in SortedLinks if ( !ErrorCode && n != Nobjects[LINK] ) { report_writeErrorMsg(ERR_LOOP, ""); findCycles(); } }