bool Main::runSearchWorker(VisitorBase& v) { double best=-std::numeric_limits<double>::infinity(); size_t count = 0; BoundPropagator prop(m_problem.get(), m_space.get(), !m_options->nocaching); SearchNode* n = m_search->nextLeaf(); while (n) { prop.propagate(n, true); // true = report solutions n = m_search->nextLeaf(); if(n){ double value = n->getValue(); if( count==10000 || (!(value!=value) && (value>best))){ best = value; if(!v.visit()){ return true;// visitor force termination } count = 0; } else{ ++count; } } } m_solved = true; return true; }
bool ParallelManager::findFrontier() { assert(!m_external.empty()); SearchNode* node = m_external.back(); m_external.pop_back(); double eval = 42.0; // queue of subproblems, i.e. OR nodes, ordered according to // evaluation function priority_queue<PQEntry, vector<PQEntry>, PQEntryComp> m_open; m_open.push(make_pair(eval,node)); #ifndef NO_HEURISTIC // precompute heuristic of initial dummy OR node (couldn't be done earlier) assignCostsOR(node); #endif // intermediate container for expanding nodes into vector<SearchNode*> newNodes; // split subproblems while (m_open.size() && (m_options->threads == NONE || (int) m_open.size() < m_options->threads)) { eval = m_open.top().first; node = m_open.top().second; m_open.pop(); DIAG(oss ss; ss << "Top " << node <<"(" << *node << ") with eval " << eval << endl; myprint(ss.str());) // check for fixed-depth cutoff if (m_options->cutoff_depth != NONE) { int d = node->getDepth(); if (d == m_options->cutoff_depth) { node->setExtern(); m_external.push_back(node); continue; } } // check for complexity lower bound parameter if (m_options->cutoff_size != NONE) { if (eval < log10(m_options->cutoff_size) + 5) { // command line argument times 10^5 node->setExtern(); m_external.push_back(node); continue; } } syncAssignment(node); deepenFrontier(node,newNodes); for (vector<SearchNode*>::iterator it=newNodes.begin(); it!=newNodes.end(); ++it) { (*it)->setInitialBound(lowerBound(*it)); // store bound at time of evaluation m_open.push(make_pair(evaluate(*it),*it) ); } newNodes.clear(); }
std::set<const char*> SearchTree::findOffsets(const char* const p_inputString, const std::size_t p_inputLength) { std::set<const char*> returnValue; SearchNode* currentNode = &m_rootNode; const char* end = p_inputString + p_inputLength; for (const char* start = p_inputString; start < end; ++start) { currentNode = currentNode->getNext(*start); if (!currentNode->getStoredData().empty()) { returnValue.insert(start - 3); } } return returnValue; }
void SearchNode::addDocument( DocID doc_id , const char* string , int string_idx ) { if (string[string_idx+_depth]=='\0' || string[string_idx+_depth]==' ') { //we have reached a word ending - are we on a terminator node? if (_terminator == true) { //do we have the RIGHT match_type? //we have a match - record it... // if (LOG) printf("\nfound match doc %d idx %d ",doc_id, string_idx); // for (int i = 1; i<=_depth; i++) { // if (LOG) printf("%c",getLetterFromParentForDepth(i)); // } // cout << endl; this->reportResult(doc_id); } if (string[string_idx+_depth]==' ') { //we have more words to add string_idx = string_idx+_depth+1; if (string[string_idx] == '\0') { if (LOG) printf("warning: appear to have a nil search query word\n"); } else { SearchTree* tree = SearchTree::Instance(); tree->addDocument(doc_id, string, string_idx); } } } else { //we have not reached the end of the word... if (_child_letters[string[string_idx+_depth]]==0) { //no more nodes, stop the search } else { //found a matching child node, keep searching SearchNode* node =_child_letters[string[string_idx+_depth]]; node->addDocument(doc_id, string, string_idx); } } }
std::set<void*> SearchTree::search(const unsigned char* const p_inputString, const std::size_t p_inputLength) { assert(m_ready && p_inputString != NULL); std::set<void*> returnValue; SearchNode* currentNode = &m_rootNode; const unsigned char* end = p_inputString + p_inputLength; for (const unsigned char* start = p_inputString; start < end; ++start) { currentNode = currentNode->getNext(*start); if (!currentNode->getStoredData().empty()) { const std::set<void*>& mergeThis(currentNode->getStoredData()); returnValue.insert(mergeThis.begin(), mergeThis.end()); } } return returnValue; }
std::vector<SearchNode*>* AStar::searchPath(SearchNode* start, SearchNode* goal){ queue.clear(); currentIteration++; currentGoal = goal; //put start in the queue start->computeHeuristic(currentGoal); start->g=0; start->f=start->h; start->expanded = false; start->iteration = currentIteration; queue.push(start->f, start); //while queue front != goal while(queue.empty()==false){ SearchNode* n = queue.pop(); if(n->iteration == currentIteration && n->expanded){ continue; } else { if(n==currentGoal){ // std::cerr<<"PATH FOUND"<<std::endl; std::vector<SearchNode*>* result = new std::vector<SearchNode*>(); SearchNode* s = n; while(s!=NULL){ result->push_back(s); s = s->getPredecessor(); } return result; } else { expandNode(n); } } } std::cerr<<"search finished w/o result"<<std::endl; return NULL; }
bool PathFindingApp::doPathfinding(int startX, int startY, int endX, int endY) { // Set color for start and end pos to path color setPathColor(m_texturePathFound, startX, startY); setPathColor(m_texturePathFound, endX, endY); bool done = true; // Some variables for path finding OpenList openList; ClosedList closedList; SearchLevel searchLevel(m_textureStartCase); SearchNode *result = 0; // Start A* search: // Add start node to open list. Position startPosition = Position(startX, startY); Position endPosition = Position(endX, endY); SearchNode *newNode = new SearchNode(startPosition, searchLevel.getH(startPosition, endPosition), 0, 0); openList.insertToOpenList(newNode); // 1. Get the square on the open list which has the lowest score. Let's call this square S (or prey) while (!openList.isEmpty()) { SearchNode *prev = openList.removeSmallestFFromOpenList(); if (prev->pos == endPosition) { // Goal found! result = prev; break; } else { // 2. Remove S from the open list and add S to the closed list. closedList.addToClosedList(prev); // 3. for each square T in S's walkable adjacent tiles: std::vector<Position> adjacentNodes = searchLevel.getAdjacentNodes(prev->pos.first, prev->pos.second); for (size_t i = 0; i < adjacentNodes.size(); ++i) { // If T is in the closed list : Ignore it. if (closedList.isInClosedList(adjacentNodes[i])) { continue; } SearchNode *n = openList.findFromOpenList(adjacentNodes[i]); if (n == 0) { // If T is not in the open list : Add it and compute its score SearchNode * newNode = new SearchNode(adjacentNodes[i], searchLevel.getH(adjacentNodes[i], endPosition), searchLevel.getG(prev, adjacentNodes[i]), prev); openList.insertToOpenList(newNode); } else { // If T is already in the open list : Check if the F score is lower // when we use the current generated path to get there. If it is update // it's score and update its parent as well. SearchNode *newNode = new SearchNode(adjacentNodes[i], searchLevel.getH(adjacentNodes[i], endPosition), searchLevel.getG(prev, adjacentNodes[i]), prev); if (n->distance() < newNode->distance()) { n->resetPrev(newNode->prevNode, searchLevel.getG(newNode->prevNode, n->pos)); } } } } } if (result == 0) { printf("Path not found!!!\n"); return true; } while (result != 0) { setPathColor(m_texturePathFound, result->pos.first, result->pos.second); result = result->prevNode; } return true; // TODO: Remove that search end and delay hack as seen below.. //static int i = 0; //i = ((i+1)%10); // 10*100ms = ~500 ms of total //Sleep(100); //return i==0; }
std::vector<slm::vec2> PathFindingApp::doPathfinding(int startX, int startY, int endX, int endY) { bool done = true; // Some variables for path finding OpenList openList; ClosedList closedList; SearchLevel searchLevel(mapLayer); SearchNode *result = 0; std::vector<slm::vec2> mapPoints; // Start A* search: // Add start node to open list. Position startPosition = Position(startX, startY); Position endPosition = Position(endX, endY); SearchNode *newNode = new SearchNode(startPosition, searchLevel.getH(startPosition, endPosition), 0, 0); openList.insertToOpenList(newNode); // 1. Get the square on the open list which has the lowest score. Let's call this square S (or prey) while (!openList.isEmpty()) { SearchNode *prev = openList.removeSmallestFFromOpenList(); if (prev->pos == endPosition) { // Goal found! result = prev; break; } else { // 2. Remove S from the open list and add S to the closed list. closedList.addToClosedList(prev); // 3. for each square T in S's walkable adjacent tiles: std::vector<Position> adjacentNodes = searchLevel.getAdjacentNodes(prev->pos.first, prev->pos.second); for (size_t i = 0; i < adjacentNodes.size(); ++i) { // If T is in the closed list : Ignore it. if (closedList.isInClosedList(adjacentNodes[i])) { continue; } SearchNode *n = openList.findFromOpenList(adjacentNodes[i]); if (n == 0) { // If T is not in the open list : Add it and compute its score SearchNode * newNode = new SearchNode(adjacentNodes[i], searchLevel.getH(adjacentNodes[i], endPosition), searchLevel.getG(prev, adjacentNodes[i]), prev); openList.insertToOpenList(newNode); } else { // If T is already in the open list : Check if the F score is lower // when we use the current generated path to get there. If it is update // it's score and update its parent as well. SearchNode *newNode = new SearchNode(adjacentNodes[i], searchLevel.getH(adjacentNodes[i], endPosition), searchLevel.getG(prev, adjacentNodes[i]), prev); if (n->distance() < newNode->distance()) { n->resetPrev(newNode->prevNode, searchLevel.getG(newNode->prevNode, n->pos)); } } } } } if (result == 0) { std::cout << "Path not found!!!\n"; return mapPoints; } while (result != 0) { std::cout << "Path found!\n"; result = result->prevNode; if (result != nullptr) { slm::vec2 mapPoint; mapPoint.x = result->pos.first; mapPoint.y = result->pos.second; mapPoints.push_back(mapPoint); } } return mapPoints; }
bool ParallelManager::restoreFrontier() { assert (!m_external.empty()); // for output ostringstream ss; // records subproblems by (rootVar,context) and their id typedef hash_map<pair<int,context_t>, size_t > frontierCache; frontierCache subprobs; // filename for subproblem (=frontier) string subprobFile = filename(PREFIX_SUB,".gz"); { ifstream inTemp(subprobFile.c_str()); inTemp.close(); if (inTemp.fail()) { ss.clear(); ss << "Problem reading subprocess list from "<< subprobFile << '.' << endl; myerror(ss.str()); return false; } } igzstream in(subprobFile.c_str(), ios::binary | ios::in); int rootVar = UNKNOWN; int x = UNKNOWN; int y = UNKNOWN; count_t count = NONE; count_t z = NONE; BINREAD(in, count); // total no. of subproblems for (size_t id=0; id<count; ++id) { BINREAD(in, z); // id BINREAD(in, rootVar); // root var BINREAD(in, x); // context size context_t context; for (int i=0;i<x;++i) { BINREAD(in,y); // read context value as int context.push_back((val_t) y); // cast to val_t } BINREAD(in, x); // PST size x = (x<0)? -2*x : 2*x; BINSKIP(in,double,x); // skip PST // store in local subproblem table subprobs.insert(make_pair( make_pair(rootVar,context) , id)); } m_subprobCount = subprobs.size(); ss.clear(); ss << "Recovered " << m_subprobCount << " subproblems from file " << subprobFile << endl; myprint(ss.str()); ////////////////////////////////////////////////////////////////////// // part 2: now expand search space until stored frontier nodes found SearchNode* node = m_external.back(); m_external.pop_back(); PseudotreeNode* ptnode = NULL; ptnode = m_pseudotree->getNode(node->getVar()); #ifndef NO_HEURISTIC // precompute heuristic of initial dummy OR node (couldn't be done earlier) assignCostsOR(node); #endif stack<SearchNode*> dfs; dfs.push(node); // intermediate vector for expanding nodes into vector<SearchNode*> newNodes; count = 0; // prepare m_external vector for frontier nodes m_external.clear(); m_external.resize(subprobs.size(),NULL); while (!dfs.empty() ) { // && count != subprobs.size() ) { // TODO? node = dfs.top(); dfs.pop(); syncAssignment(node); x = node->getVar(); ptnode = m_pseudotree->getNode(x); addSubprobContext(node,ptnode->getFullContextVec()); // check against subproblems from saved list frontierCache::iterator lkup = subprobs.find(make_pair(x,node->getSubprobContext())); if (lkup != subprobs.end()) { m_external[lkup->second] = node; // cout << "External " << lkup->second << " = " << node << endl; count += 1; continue; } deepenFrontier(node,newNodes); for (vector<SearchNode*>::iterator it=newNodes.begin(); it!=newNodes.end(); ++it) dfs.push(*it); newNodes.clear(); } // end while if (count != subprobs.size()) { ss.clear(); ss << "Warning: only " << count << " frontier nodes." << endl; myprint(ss.str()); } if (!dfs.empty()) { ss.clear(); ss << "Warning: Stack still has " << dfs.size() << " nodes." << endl; myprint(ss.str()); } return true; }
void SearchTree::compile() { assert(!m_ready); // at the top level assign root as the failure node std::queue<SearchNode*> nodesByLevel; for (std::size_t i = 0; i < m_rootNode.getNextSize(); ++i) { SearchNode* next = m_rootNode.getNext(static_cast<const unsigned char>(i)); if (!next) { m_rootNode.setNext(static_cast<const unsigned char>(i), &m_rootNode); } else { next->setFailure(&m_rootNode); nodesByLevel.push(next); } } // now loop through all levels computing failure nodes. Push more on as needed while (!nodesByLevel.empty()) { SearchNode* currentNode = nodesByLevel.front(); for (std::size_t i = 0; i < currentNode->getNextSize(); ++i) { SearchNode* next = currentNode->getNext(static_cast<const unsigned char>(i)); if (next) { nodesByLevel.push(next); next->setFailure( currentNode->getFailure()->getNext( static_cast<const unsigned char>(i))); if (!next->getFailure()->getStoredData().empty()) { next->addReturnValues(next->getFailure()->getStoredData()); } } else { currentNode->setNext(static_cast<const unsigned char>(i), currentNode->getFailure()->getNext( static_cast<const unsigned char>(i))); } } nodesByLevel.pop(); } m_ready = true; }
void SearchNode::addQuery( QueryID query_id , const char* query_str , MatchType match_type , unsigned int match_dist , unsigned int query_str_idx , unsigned int query_word_counter ) { // if (LOG) printf("SearchNode::%s\n",__func__); // if (LOG) printf("depth::%d\n",_depth); if (_depth==0) { if (LOG) SearchTree::Instance()->print(); for (int i=kFirstASCIIChar; i<=kLastASCIIChar; i++ ) { //printf("_child_letter %p",_child_letters[i]); } } if (query_str[_depth+query_str_idx]=='\0' || query_str[_depth+query_str_idx]==' ') { query_word_counter++; //we have reached the last letter - mark this node as a "terminator" _terminator = true; //now we need to record the search query index number against it's search type. switch (match_type) { case 0: //exact match _match.exact.push_back(query_id); break; case 1: _match.hamming[match_dist-1]->push_back(query_id); break; case 2: _match.edit[match_dist-1]->push_back(query_id); break; default: if (LOG) cout << "invalid match_type error\n"; break; } _match.all.push_back(query_id); if (query_str[_depth+query_str_idx]==' ') { //we have more words to add query_str_idx = _depth+query_str_idx+1; if (query_str[query_str_idx] == '\0') { if (LOG) printf("warning: appear to have a nil search query word\n"); } else { SearchTree* tree = SearchTree::Instance(); tree->addQuery( query_id , query_str , match_type , match_dist , query_str_idx , query_word_counter ); } } else { //we have added the last word, need to register the query and it's wordcount //with the tree's _query_ids_map //ADDBACK AFTER WE FIX RECURSION BUG SearchTree::Instance()->addQueryToMap(query_id, query_word_counter); } } else { //we have not reached the end of the word, keep building... if (_child_letters[query_str[_depth+query_str_idx]]==0) { //need to create a new child if (LOG) printf("%d creating search node for next letter:%c\n", query_id, query_str[_depth+query_str_idx]); SearchNode* next_letter = new SearchNode( query_id , query_str , match_type , match_dist , query_str_idx , query_word_counter , this ); _child_letters[query_str[_depth+query_str_idx]] = next_letter; _child_count++; } SearchNode* node =_child_letters[query_str[_depth+query_str_idx]]; node->addQuery ( query_id , query_str , match_type , match_dist , query_str_idx , query_word_counter); } }
void SearchNode::addDocumentL( DocID doc_id , const char* string , unsigned int string_idx ) { if (string[string_idx+_depth]=='\0' || string[string_idx+_depth]==' ') { //we have reached a word ending - are we on a terminator node? if (_terminator == true) { //we have a match - record it... // if (LOG) printf("\nfound match doc %d idx %d ",doc_id, string_idx); // for (int i = 1; i<=_depth; i++) { // if (LOG) printf("%c",getLetterFromParentForDepth(i)); // } // cout << endl; this->reportResult(doc_id); } if (string[string_idx+_depth]==' ') { //we have more words to add string_idx = string_idx+_depth+1; if (string[string_idx] == '\0') { if (LOG) printf("warning: appear to have a nil search query word\n"); } else { SearchTree* tree = SearchTree::Instance(); tree->addDocument(doc_id, string, string_idx); } } } else { //we have not reached the end of the word... for (int i=kFirstASCIIChar; i<=kLastASCIIChar; i++ ) { if (_child_letters[i]) { SearchNode* node =_child_letters[i]; // char doc_prefix[_depth+1]; char doc_prefix[31]; char query_prefix[31]; //char* doc_prefix = (char*)malloc(_depth+1*sizeof(char)); //char* query_prefix = (char*)malloc(_depth+1*sizeof(char)); for (int i=0; i<_depth+1; i++) { doc_prefix[i]=string[i]; } for (int i=0; i<_depth+1; i++) { query_prefix[i] = node->getLetterFromParentForDepth(i+1); } if (LevenshteinDistance(doc_prefix, query_prefix,3) ) { node->addDocumentL(doc_id , string , string_idx ); } // free(doc_prefix); // free(query_prefix); } } } }