void search(Graph<NodeT>* G, NodeT* source, SearchAlgo algo) { if (algo == BFS) { BreadthFirstSearch(G, source); } else { // write DFS code } }
TEST_F(SimpleGraphTests, test) { auto graph = Graph(); graph.ReadAdjacencyList("kargerMinCutSimple.txt", ListTypes::kNoWeights); auto result = graph.BreadthFirstSearch(2, 6); EXPECT_EQ(result, 3); }
int main() { id_no = 0; int i,j; AdjList al; for(i=0; i<GRAPH_SIZE; i++) { al.list[i] = newVertex(); //printf("(%d).\n",al.list[i]->v->id_no); } AdjMatrix am; for(i=0; i<GRAPH_SIZE; i++) for(j=0; j<GRAPH_SIZE; j++) am.matrix[i][j] = INFINITE; edgepopulator(al,am,GRAPH_DENSITY); printf("No. of vertices = %d. No. of edges = %d.\n",GRAPH_SIZE,no_of_edges); fillAdjMatrix(al,am.matrix); //printAdjMatrix(am.matrix); clock_t start = clock(); visited_nodes; int question = 3; int answer = DepthFirstSearch(al,am,3,"ijlc",&visited_nodes); int dfs_count = visited_nodes; visited_nodes = 0; printf("%d.\n",BreadthFirstSearch(al,am,3,"ijlc",&visited_nodes)); int bfs_count = visited_nodes; int g_size = GRAPH_SIZE; float g_density = GRAPH_DENSITY; printf("Under the following conditions..\n"); printf("GRAPH_SIZE = %d.\n",g_size); printf("GRAPH_DENSITY = %f.\n", g_density); printf("Initial Distance = %d.\n", abs(answer-question)); printf("Results DFS(%d) vs BFS(%d).\n",dfs_count, bfs_count); return 0; }
// Searches through the height matrix to locate height maps. In order // to speed up the search process, the step sizes in row and column are // both larger than one. If a height is more than 1 meter, say, 101, first // it is checked if it belongs to any of existing height map, if so, it is // eliminated, if not, the loop stops and calls breadth first traverse process // to find all points belonging to the new height map. int HeightMatrix::SearchHeightMap() { int row_step_size = 3; int column_step_size = 5; for (int i = 0; i <= non_zero_row_number_ - row_step_size; i += row_step_size) { for (int j = start_index_; j <= end_index_ - column_step_size; j += column_step_size) { if (matrix_[i][j] >= 100) { Coordinate c(i, j); bool is_point_already_included = false; // check if this point belongs to any other existing height map for (int k = 0; k < height_maps_.size(); ++k) { if (height_maps_[k].HasPoint(c)) { is_point_already_included = true; break; } } // only searches from homeless points if (!is_point_already_included) { HeightMap hm = BreadthFirstSearch(c); height_maps_.push_back(hm); } } } } return height_maps_.size(); }
bool EdmondsKarp<TFLOW,TCAP>::FindAugmentingPath ( typename FordFulkerson<TFLOW,TCAP>::Node *source, typename FordFulkerson<TFLOW,TCAP>::Node *sink, Size timestamp, TCAP scale, TCAP &maxrjcap) { // Shortest path if (m_strategy == ShortestPathMethod) { return BreadthFirstSearch (source, sink, timestamp, scale, maxrjcap); } // Fattest path if (this->m_cap_scale != 0) { throw System::InvalidOperationException(__FUNCTION__, __LINE__, "Fattest path strategy and capacity scaling are incompatible."); } maxrjcap = 0; return FattestPathSearch (source, sink, timestamp); }
/// 广度优先遍历 void testBreadthFirstSearch() { vector<char> v; for ( int i = 0; i < 8; ++i ) { v.push_back( 'r' + i ); } GraphicsViaAdjacencyList<char> g( v, Undigraph ); g.Link2Vertex( 0, 1 ); g.Link2Vertex( 0, 4 ); g.Link2Vertex( 1, 5 ); g.Link2Vertex( 2, 5 ); g.Link2Vertex( 2, 3 ); g.Link2Vertex( 2, 6 ); g.Link2Vertex( 3, 6 ); g.Link2Vertex( 3, 7 ); g.Link2Vertex( 5, 6 ); g.Link2Vertex( 6, 7 ); BreadthFirstSearch( g, 1 ); cout << endl; }
int main(int argc, char *argv[]) { MyBtree<std::string> mbt; MyBtree<std::string> mbt2 { "hello","this","is","james" }; std::ifstream infile(argv[1]); if (!infile.good()) { std::cerr << "Could not open " << argv[1] << " for input!" << std::endl; std::exit(1); } std::string temp; while (infile >> temp) mbt.insert(temp); std::cout << "Created a binary tree of size: " << mbt.get_size() << " (" << mbt.get_left_depth() << "," << mbt.get_right_depth() << ")" << std::endl; std::cout << "Enter words that you want to check (ctrl-c to exit)" <<std::endl; while (std::cin >> temp) { std::cout << " Result of depth first search for: " << temp << " in: " << argv[1] << " is: " << std::boolalpha << DepthFirstSearch(mbt,temp) << std::endl; std::cout << " Result of breadth first search for: " << temp << " in: " << argv[1] << " is: " << std::boolalpha << BreadthFirstSearch(mbt2,temp) << std::endl; } }
int main() { int vertices, edges, i, v1, v2; int noOfRows,noOfCols; FILE * graphFile =fopen("graph.txt","r"); fscanf(graphFile, "%d %d %d",&noOfRows, &noOfCols, &NNZ); printf("No fo rows %d, No of Cols %d, nnz %d \n",noOfRows,noOfCols,NNZ); //- done vertices = noOfRows; edges =NNZ; struct Edge * adjacencyList[vertices]; // Size is made (vertices + 1) to use the // array as 1-indexed, for simplicity int parent[vertices]; // Each element holds the Node value of its parent int level[vertices]; // Each element holds the Level value of that node // Must initialize your array for (i = 0; i < vertices; ++i) { adjacencyList[i] = NULL; parent[i] = 0; level[i] = -1; } for (i = 0; i < edges; ++i) { int val; fscanf(graphFile, "%d %d %d",&v1, &v2, &val); // Adding edge v1 --> v2 adjacencyList[v1] = AddEdge(adjacencyList[v1], v2); // Adding edge v2 --> v1 // Remove this if you want a Directed Graph // adjacencyList[v2] = AddEdge(adjacencyList[v2], v1); } // Printing Adjacency List printf("\nAdjacency List - of graph \n\n"); for (i = 0; i < vertices; ++i) { printf("adjacencyList[%d] -> ", i); struct Edge * traverse = adjacencyList[i]; while (traverse != NULL) { printf("%d -> ", traverse->vertex); traverse = traverse->next; } printf("NULL\n"); } printf("geting starting list of inputs:\n"); int startArrayCount; FILE * vectorFile= fopen("input.txt","r"); fscanf(vectorFile,"%d",&startArrayCount); int inputsCircuits[startArrayCount]; for(i=0;i<startArrayCount;i++){ int tempVal; fscanf(vectorFile,"%d",&tempVal); inputsCircuits[i]= tempVal; printf("%d ,",inputsCircuits[i]); } printf("\n"); BreadthFirstSearch(adjacencyList, vertices, parent, level, inputsCircuits ,startArrayCount); // Printing Level and Parent Arrays printf("\nLevel and Parent Arrays -\n"); for (i = 0; i < vertices; ++i) { printf("Level of Vertex %d is %d, Parent is %d\n", i, level[i], parent[i]); } return 0; }
// use a best-first search theorem prover int runbfsprover(List<Clause> &clist) { int status; // dump data info if (reportmemoryusage) { SETFINALEDATA(); DUMPDATA(cout); } // copy clauses to a tree for better performance BinaryTree_AVL<Clause> ctree; ListIterator<Clause> clIter(clist); for (nextClause=1; !clIter.done(); clIter++) { // insert clauses at current deph Clause clause(clIter()); clause.setDepth(1); clause.setNumber(nextClause++); if (ctree.insert(clause) != OK) { ERROR("insert failed.", errno); return(NOTOK); } } // dump data info if (reportmemoryusage) { SETFINALEDATA(); DUMPDATA(cout); } // list clauses cout << endl; cout << "===============================================" << endl; cout << "initial list of clauses: " << endl; BinaryTree_AVL_Iterator_InOrder<Clause> ctreeIter(ctree); for ( ; !ctreeIter.done(); ctreeIter++) { cout << ctreeIter() << endl; } cout << "===============================================" << endl; // dump data info if (reportmemoryusage) { SETFINALEDATA(); DUMPDATA(cout); } // filter clauses if (removeTautologies(ctree) != OK) { ERROR("removeTautologies failed.", errno); return(NOTOK); } if (initialRemoveSubsumed(ctree) != OK) { ERROR("removeSubsumed failed.", errno); return(NOTOK); } // dump data info if (reportmemoryusage) { SETFINALEDATA(); DUMPDATA(cout); } // separate clauses into SOS and axioms BinaryTree_AVL<Clause> soslist; BinaryTree_AVL<Clause> axiomlist; for (ctreeIter.reset(); !ctreeIter.done(); ctreeIter++) { if (ctreeIter().getSOS()) { if ((status = soslist.insert(ctreeIter())) != OK) { ERROR("insert failed.", errno); return(status); } } else { if ((status = axiomlist.insert(ctreeIter())) != OK) { ERROR("insert failed.", errno); return(status); } } } // dump data info if (reportmemoryusage) { SETFINALEDATA(); DUMPDATA(cout); } // generate a list of possible resolution states List<BFSNode> bfsnodelist; BinaryTree_AVL_Iterator_InOrder<Clause> sosIter1(soslist); for ( ; !sosIter1.done(); sosIter1++) { if (sosIter1().getTotalMembers() > maxliterals) continue; Literal maxlit1; if (sosIter1().getMaximalLiteral(maxlit1) != OK) { ERROR("get maximal literal failed.", errno); return(NOTOK); } BinaryTree_AVL_Iterator_InOrder<Clause> sosIter2(sosIter1); for (sosIter2++ ; !sosIter2.done(); sosIter2++) { // generate possible resolution states Literal maxlit2; if (sosIter2().getMaximalLiteral(maxlit2) != OK) { ERROR("get maximal literal failed.", errno); return(NOTOK); } if (sosIter2().getTotalMembers() > maxliterals) { statistics[MaximumLiteralsClausesRejected] += 1; totalstatistics[TotalMaximumLiteralsClausesRejected] += 1; continue; } if (maxlit1.unify_ne(~maxlit2)) continue; BFSNode bfsnode(maxlit1, maxlit2, sosIter1(), sosIter2()); if (bfsnodelist.insertAtEnd(bfsnode) != OK) { ERROR("insert failed.", errno); return(status); } } BinaryTree_AVL_Iterator_InOrder<Clause> axiomIter(axiomlist); for ( ; !axiomIter.done(); axiomIter++) { // generate possible resolution states Literal maxlit2; if (axiomIter().getMaximalLiteral(maxlit2) != OK) { ERROR("get maximal literal failed.", errno); return(NOTOK); } if (axiomIter().getTotalMembers() > maxliterals) { statistics[MaximumLiteralsClausesRejected] += 1; totalstatistics[TotalMaximumLiteralsClausesRejected] += 1; continue; } if (maxlit1.unify_ne(~maxlit2)) continue; BFSNode bfsnode(maxlit1, maxlit2, sosIter1(), axiomIter()); if (bfsnodelist.insertAtEnd(bfsnode) != OK) { ERROR("insert failed.", errno); return(status); } } } // dump data info if (reportmemoryusage) { SETFINALEDATA(); DUMPDATA(cout); } // call search routines switch (searchtype) { case BestFirst: status = BestFirstSearch(bfsnodelist, soslist, axiomlist); break; case DepthFirstHillClimb: status = DepthFirstHillClimbSearch(bfsnodelist, soslist, axiomlist); break; case DepthFirst: status = DepthFirstSearch(bfsnodelist, soslist, axiomlist); break; case BreadthFirst: status = BreadthFirstSearch(bfsnodelist, soslist, axiomlist); break; case IterativeDeepening: status = IterativeDeepeningSearch(bfsnodelist, soslist, axiomlist); break; default: ERRORD("unknown search type", searchtype, EINVAL); return(NOTOK); } // dump data info if (reportmemoryusage) { SETFINALEDATA(); DUMPDATA(cout); } // report results of provers switch (status) { case OK: case VALID: // valid program programstatistics[TotalValidPrograms] += 1; cout << endl; cout << "Run Time Statistics ..." << endl; cout << statistics << endl; cout << endl; cout << "VALID program." << endl; // dump data info if (reportmemoryusage) { SETFINALEDATA(); DUMPDATA(cout); } return(VALID); case NOTPROVEN: if (nextClause > maxclause) { programstatistics[TotalMaximumClauseExceededPrograms] += 1; ERROR("maxclause exceeded !!!", EINVAL); } else if (currentBFSDepth > maxdepth) { programstatistics[TotalMaximumDepthExceededPrograms] += 1; ERROR("maxdepth exceeded !!!", EINVAL); } programstatistics[TotalNotProvenPrograms] += 1; cout << endl; cout << "Run Time Statistics ..." << endl; cout << statistics << endl; cout << endl; cout << "NOTPROVEN program." << endl; // dump data info if (reportmemoryusage) { SETFINALEDATA(); DUMPDATA(cout); } return(NOTPROVEN); default: // some type of error ERRORD("unexpected return from best first search !!!", status, EINVAL); // dump data info if (reportmemoryusage) { SETFINALEDATA(); DUMPDATA(cout); } return(status); } }