void OCME::GetCellsAttributes(std::vector<Cell*> cells, AttributeMapper & am){ std::map<std::string, ChainBase * >::iterator ai; std::vector<Cell*>::iterator ci; for(ci = cells.begin(); ci != cells.end(); ++ci){ for( ai = (*ci)->perVertex_attributes.begin(); ai !=(*ci)->perVertex_attributes.end(); ++ai) am.vert_attrs.push_back( (*ai).first); for( ai = (*ci)->perFace_attributes.begin(); ai !=(*ci)->perFace_attributes.end(); ++ai) am.face_attrs.push_back( (*ai).first); } RemoveDuplicates(am.vert_attrs); RemoveDuplicates(am.face_attrs); }
int main() { int a[] = {1, 1, 2}; assert(RemoveDuplicates(a, 0) == 0); assert(RemoveDuplicates(a, 3) == 2); for (int i = 0; i < 2; i++) assert(a[i] == i + 1); return 0; int b[] = {1, 1, 2, 3, 3, 3}; assert(RemoveDuplicates(b, 6) == 3); for (int i = 0; i < 3; i++) assert(b[i] == i + 1); return 0; }
int main() { int i; Node *head = NULL; Node *list1 = NULL; for(i = 0; i < 5; i++) { head = InsertHead(head, i); } Print(head); Node *list2 = NULL; list1 = InsertHead(list1, 6); list1 = InsertHead(list1, 5); list1 = InsertHead(list1, 5); list1 = InsertHead(list1, 1); list2 = InsertHead(list2, 7); list2 = InsertHead(list2, 4); list2 = InsertHead(list2, 2); Print(list1); Print(list2); /* Node *newList = MergeLists(list1, list2); */ /* Print(newList); */ Node *newList = RemoveDuplicates(list1); Print(newList); return 0; }
int BXPictureDatabase::GetPictures(std::vector<BXMetadata*> &vecMediaFiles, const std::vector<std::string>& vecPathFilter, int iOrder, int iItemLimit) { std::vector<BXMetadata*> vecPictureFiles; std::vector<BXMetadata*> vecPictureFolders; int iResult = MEDIA_DATABASE_OK; // Bring twice as much picture files and twice as much picture folders iResult = GetPictureFiles(vecPictureFiles, vecPathFilter, iOrder, iItemLimit * 2); if (iResult == MEDIA_DATABASE_ERROR) { LOG(LOG_LEVEL_ERROR, "Could not retreive picture files"); BXUtils::FreeMetaDataVec(vecPictureFiles); BXUtils::FreeMetaDataVec(vecPictureFolders); return iResult; } iResult = GetPictureFolders(vecPictureFolders, vecPathFilter, iOrder, iItemLimit * 2); if (iResult == MEDIA_DATABASE_ERROR) { LOG(LOG_LEVEL_ERROR, "Could not retreive picture files"); BXUtils::FreeMetaDataVec(vecPictureFiles); BXUtils::FreeMetaDataVec(vecPictureFolders); return iResult; } RemoveDuplicates(vecPictureFiles, vecPictureFolders); // Combine the two lists together into one, ordered by iDateAdded BXUtils::MergeByDateModified(vecPictureFiles, vecPictureFolders, vecMediaFiles, iItemLimit); BXUtils::FreeMetaDataVec(vecPictureFiles); BXUtils::FreeMetaDataVec(vecPictureFolders); return iResult; }
// ---------------------------------------------------------------------------- // CSIPClientResolver::ListImplementationsL // ---------------------------------------------------------------------------- // void CSIPClientResolver::ListImplementationsL() { // Create a temporary array to avoid destroying // an existing registry if we run out of memory RPointerArray< CSIPClientData >* tmpRegistry = new( ELeave ) RPointerArray< CSIPClientData >; CleanupStack::PushL( TCleanupItem( ResetAndDestroy, tmpRegistry ) ); // ROM clients TEComResolverParams romResolverParams; RImplInfoPtrArray romClients; REComSession::ListImplementationsL( KSIPResolvedClientIFUid, romResolverParams, KRomOnlyResolverUid, romClients ); CleanupResetAndDestroyPushL( romClients ); ConvertClientDataL( romClients, *tmpRegistry, ETrue ); SIP_CR_INT_LOG("ROM-based plug-ins count", romClients.Count()) // RAM clients RImplInfoPtrArray allClients; REComSession::ListImplementationsL( KSIPResolvedClientIFUid, allClients ); CleanupResetAndDestroyPushL( allClients ); SIP_CR_INT_LOG("All plug-ins count", allClients.Count()) RemoveDuplicates( romClients, allClients ); ConvertClientDataL( allClients, *tmpRegistry, EFalse ); CleanupStack::PopAndDestroy( 1 ); // allClients CleanupStack::PopAndDestroy( 1 ); // romClients RemoveRegistry(); delete iRegistry; iRegistry = tmpRegistry; CleanupStack::Pop( 1 ); // tmpRegistry }
// Replaces existing chromosomes with new ones based on passed parameters and selection results. void GaReplaceParents::operator ()(GaPopulation& population, const GaReplacementParams& parameters, const GaCouplingResultSet& newChromosomes) const { int size = min( parameters.GetReplacementSize(), newChromosomes.GetNumberOfOffsprings() ); RemoveDuplicates( newChromosomes ); population.ReplaceGroup( newChromosomes.GetParentsBuffer(), newChromosomes.GetOffspringsBuffer(), size ); }
/* * WarnningDialog print the warning dialog to command line and to the output file. * Additionally will remove duplicated vessels from the friend_in_danger list so we avoid duplicate warning dialogs. * * Accepts: * ------- * friend_in_danger - friend in danger list, to warn them. * output_file - name of the output file to write to. */ void WarnningDialog(Vessel* friend_in_danger, const char* output_file) { while(friend_in_danger!= NULL) { RemoveDuplicates(&friend_in_danger,friend_in_danger->next); printf("Ship %s is in danger \n", friend_in_danger->name); WriteWarningToFile(friend_in_danger,output_file); friend_in_danger = friend_in_danger->next; } }
void OCME::ComputeDependentCells( const std::vector<Cell*> & kernel_set, std::vector<CellKey> & dep_cells){ std::vector<Cell*>::const_iterator ci; std::set<CellKey>::iterator cki; for(ci = kernel_set.begin(); ci != kernel_set.end(); ++ci) for(cki = (*ci)->dependence_set.begin(); cki != (*ci)->dependence_set.end(); ++cki) dep_cells.push_back(*cki); /* eliminate duplicates */ RemoveDuplicates(dep_cells); };
Node* RemoveDuplicates(Node *head) { if (!head) { return NULL; } if (!head->next) { return head; } if (head->data == head->next->data) { Node *temp = head->next; head->next = head->next->next; delete temp; RemoveDuplicates(head); }else{ RemoveDuplicates(head->next); } return head; }
void OCME::ComputeDependentCells( const std::vector<Cell*> & cells, std::vector<Cell*> & dep_cells){ std::vector<Cell*>::const_iterator ci; std::set<CellKey>::iterator cki; for(ci = cells.begin(); ci != cells.end(); ++ci) for(cki = (*ci)->dependence_set.begin(); cki != (*ci)->dependence_set.end(); ++cki){ Cell * c = GetCell(*cki,false); if(c) // TODO: better definition of dependence_set and when it is updated dep_cells.push_back(c); } /* eliminate duplicates */ RemoveDuplicates(dep_cells); };
int main() { int a[] = {1,1,1,2,2,3}; int n = sizeof(a) / sizeof(int); std::cout << "input:"; for (int i = 0; i < n; i++) std::cout << a[i] << " "; std::cout << std::endl; n = RemoveDuplicates(a, n); std::cout << "output:"; for (int i = 0; i < n; i++) std::cout << a[i] << " "; std::cout << std::endl; return 0; }
// Replaces existing chromosomes with new ones based on passed parameters and selection results. void GaReplaceWorst::operator ()(GaPopulation& population, const GaReplacementParams& parameters, const GaCouplingResultSet& newChromosomes) const { int maxSize = min( parameters.GetReplacementSize(), newChromosomes.GetNumberOfOffsprings() ); // get worst chromosomes int* old = new int[ maxSize ]; int size = population.GetWorsChromosomes( old, 0, maxSize ); // replace them RemoveDuplicates( newChromosomes ); population.ReplaceGroup( old, newChromosomes.GetOffspringsBuffer(), size ); delete[] old; }
int CTGitPathList::FillBasedOnIndexFlags(unsigned short flag, CTGitPathList* list /*nullptr*/) { Clear(); CTGitPath path; CAutoRepository repository(g_Git.GetGitRepository()); if (!repository) return -1; CAutoIndex index; if (git_repository_index(index.GetPointer(), repository)) return -1; int count; if (list == nullptr) count = 1; else count = list->GetCount(); for (int j = 0; j < count; ++j) { for (size_t i = 0, ecount = git_index_entrycount(index); i < ecount; ++i) { const git_index_entry *e = git_index_get_byindex(index, i); if (!e || !((e->flags | e->flags_extended) & flag) || !e->path) continue; CString one = CUnicodeUtils::GetUnicode(e->path); if (!(!list || (*list)[j].GetWinPathString().IsEmpty() || one == (*list)[j].GetGitPathString() || (PathIsDirectory(g_Git.CombinePath((*list)[j].GetWinPathString())) && one.Find((*list)[j].GetGitPathString() + _T("/")) == 0))) continue; //SetFromGit will clear all status path.SetFromGit(one); if ((e->flags | e->flags_extended) & GIT_IDXENTRY_SKIP_WORKTREE) path.m_Action = CTGitPath::LOGACTIONS_SKIPWORKTREE; else if ((e->flags | e->flags_extended) & GIT_IDXENTRY_VALID) path.m_Action = CTGitPath::LOGACTIONS_ASSUMEVALID; AddPath(path); } } RemoveDuplicates(); return 0; }
// Slightly better approach - using binary search // Time complexity: O(n * log n) - one loop and nested binary search std::vector<std::pair<int, int>> FindTwoElementsBS( const std::vector<int>& data, const int& value) { std::vector<int> srtData = data; std::sort(srtData.begin(), srtData.end()); std::vector<std::pair<int, int>> result; for (size_t i = 0; i < srtData.size(); ++i) { size_t j = 0; BinarySearch::Solution solution; if (solution.Search(srtData, srtData.size(), value - srtData[i], j) && i != j) { result.push_back(std::pair<int, int>(srtData[i], srtData[j])); } } return RemoveDuplicates(result); }
// Optimal solution - using hash table // Time complexity: O(n) - one loop, access to hash element O(1) std::vector<std::pair<int, int>> FindTwoElementsHash( const std::vector<int>& data, const int& value) { std::unordered_map<int, size_t> umap; std::vector<std::pair<int, int>> result; for (size_t i = 0; i < data.size(); ++i) { if (0 < umap.count(value - data[i])) { size_t j = umap[value - data[i]]; result.push_back(std::pair<int, int>(data[i], data[j])); } else { umap[data[i]] = i; } } return RemoveDuplicates(result); }
int main() { printf("Input values in the form of positive integers separated by spaces (\"1 2 3\" is proper input, \"1,2,3\" is not): "); int holding; cnode *head = malloc(sizeof(cnode)); scanf("%d", &holding); head -> info = holding; head -> next = NULL; head -> previous = NULL; int input = 1; //repurposed code from recitation 1 char junk = ' '; junk = getchar(); if (junk != ' ') input = 0; while (input == 1) { if (scanf("%d", &holding) > 0) head = inserthead(head, holding); junk = getchar(); if (junk != ' ') input = 0; } printf("All numbers inserted at the head.\n\n"); printf("Doubly linked list: "); cnode *ptr = head; while (ptr -> next != NULL) { printf("%d, ", ptr -> info); ptr = ptr -> next; } printf("%d\n\n", ptr -> info); printf("To singly linked list: "); node *singleHead = CopytoSinglyLinked(head); node *point = singleHead; while (point -> next != NULL) { printf("%d, ", point -> info); point = point -> next; } printf("%d\n\n", point -> info); printf("Input a node value to return the previous of (0 is head, this may require some counting!): "); int go; scanf("%d", &go); if (singleHead -> next != NULL && go != 0) //prevents a null pointer from being passed to Previous() { point = singleHead; int x; for (x = 0; x < go; x++) if (point -> next != NULL) point = point -> next; else { printf("Input out of bounds\n\n"); point = singleHead; //prevents if statement below from being called break; } if (Previous(singleHead, point) != NULL) printf("\nNode given: %d, Previous node: %d\n\n", point -> info, Previous(singleHead, point) -> info); } else printf("NULL, no previous for value given\n\n"); printf("Printed in reverse: "); PrintReverse(singleHead); printf("\nList without duplicates: "); singleHead = RemoveDuplicates(singleHead); point = singleHead; while(point -> next != NULL) { printf("%d, ", point -> info); point = point -> next; } printf("%d\n", point -> info); return 0; }
int CSuggestor::Suggest(const CFSWString &szWord, bool bStartSentence){ m_TimeStart=CFSTime::Now(); m_Items.Cleanup(); m_Cap.SetCap(szWord); if (bStartSentence && m_Cap.GetCapMode()==CFSStrCap<CFSWString>::CAP_LOWER) { m_Cap.SetCapMode(CFSStrCap<CFSWString>::CAP_INITIAL); } CFSWString szWordHigh=szWord.ToUpper(); INTPTR ipWordLength=szWordHigh.GetLength(); CFSWString szTemp; INTPTR i, j; long lLevel=100; SetLevel(lLevel); // Case problems & change list i=SpellWord(szWordHigh, szTemp, &lLevel); if ((i==SPL_NOERROR || i==SPL_CHANGEONCE) && !szTemp.IsEmpty()){ SetLevel(GetLevelGroup(lLevel)); m_Items.AddItem(CSuggestorItem(szTemp, lLevel)); } else SetLevel(5); // Abbrevations // !!! Unimplemented // Quotes /* if (ipWordLength>=2 && (szAllQuot.Find(szWordHigh[0])>=0 || szAllQuot.Find(szWordHigh[ipWordLength-1])>=0)) { szTemp=szWordHigh; int iPos; if (szAllQuot.Find(szTemp[0])>=0){ if (szQuotLeft.Find(szTemp[0])>=0) { } else if ((iPos=szQuotRight.Find(szTemp[0]))>=0) { szTemp[0]=szQuotLeft[iPos]; } else if (szDQuotLeft.Find(szTemp[0])>=0) { } else if ((iPos=szDQuotRight.Find(szTemp[0]))>=0) { szTemp[0]=szDQuotLeft[iPos]; } if (szAllQuot.Find(szTemp[ipWordLength-1])>=0) { szTemp[ipWordLength-1]=(szQuotRight+szDQuotRight)[(szQuotLeft+szDQuotLeft).Find(szTemp[0])]; } else{ if (szQuotRight.Find(szTemp[ipWordLength-1])>=0) { } else if ((iPos=szQuotLeft.Find(szTemp[ipWordLength-1]))>=0) { szTemp[ipWordLength-1]=szQuotRight[iPos]; } else if (szDQuotRight.Find(szTemp[ipWordLength-1])>=0) { } else if ((iPos=szDQuotLeft.Find(szTemp[ipWordLength-1]))>=0) { szTemp[ipWordLength-1]=szDQuotRight[iPos]; } } CheckAndAdd(szTemp); }*/ // Add space for (i=1; i<ipWordLength-1; i++){ static CFSWString szPunktuation=FSWSTR(".:,;!?"); if (szPunktuation.Find(szWord[i])>=0){ long lLevel1, lLevel2; CFSWString szTemp1, szTemp2; if (SpellWord(szWord.Left(i+1), szTemp1, &lLevel1)==SPL_NOERROR && SpellWord(szWord.Mid(i+1), szTemp2, &lLevel2)==SPL_NOERROR) { m_Items.AddItem(CSuggestorItem(szWord.Left(i+1)+L' '+szWord.Mid(i+1), FSMAX(lLevel1, lLevel2))); } } } // Delete following blocks: le[nnu][nnu]jaam for (i=2; i<=3; i++){ for (j=0; j<ipWordLength-i-i; j++){ if (memcmp((const FSWCHAR *)szWordHigh+j, (const FSWCHAR *)szWordHigh+j+i, i*sizeof(FSWCHAR))==0){ szTemp=szWordHigh.Left(j)+szWordHigh.Mid(j+i); CheckAndAdd(szTemp); } } } // Change following letters: abb -> aab & aab -> abb for (i=1; i<ipWordLength-1; i++){ if (szWordHigh[i]==szWordHigh[i+1]){ szTemp=szWordHigh; szTemp[i]=szTemp[i-1]; if (FSIsLetterEst(szTemp[i])) CheckAndAdd(szTemp); } else if (szWordHigh[i]==szWordHigh[i-1]){ szTemp=szWordHigh; szTemp[i]=szTemp[i+1]; if (FSIsLetterEst(szTemp[i])) CheckAndAdd(szTemp); } } // Exchange letters: van[na]ema -> van[an]ema szTemp=szWordHigh; for (i=1; i<ipWordLength; i++){ if (szTemp[i]!=szTemp[i-1]){ FSWCHAR ch=szTemp[i]; szTemp[i]=szTemp[i-1]; szTemp[i-1]=ch; CheckAndAdd(szTemp); szTemp[i-1]=szTemp[i]; szTemp[i]=ch; } } // Change blocks for (i=0; i<ipWordLength; i++){ for (j=0; j<(INTPTR)(sizeof(ChangeStrings)/sizeof(__CChangeStrings)); j++){ if (szWordHigh.ContainsAt(i, ChangeStrings[j].m_lpszFrom)){ szTemp=szWordHigh.Left(i)+ChangeStrings[j].m_lpszTo+szWordHigh.Mid(i+FSStrLen(ChangeStrings[j].m_lpszFrom)); CheckAndAdd(szTemp); } } } // Change end blocks for (i=0; i<(INTPTR)(sizeof(ChangeStringsEnd)/sizeof(__CChangeStrings)); i++){ if (szWordHigh.EndsWith(ChangeStringsEnd[i].m_lpszFrom)){ szTemp=szWordHigh.Left(ipWordLength-FSStrLen(ChangeStringsEnd[i].m_lpszFrom))+ChangeStringsEnd[i].m_lpszTo; CheckAndAdd(szTemp); } } // Po~o~sas MultiReplace(szWordHigh, 0); // gi/ki: Kylli[gi]le -> Kyllile[gi] for (i=3; i<=6; i++){ if (i>ipWordLength) break; if (memcmp((const FSWCHAR *)szWordHigh+ipWordLength-i, FSWSTR("GI"), 2*sizeof(FSWCHAR))==0){ szTemp=szWordHigh.Left(ipWordLength-i)+szWordHigh.Mid(ipWordLength-i+2)+FSWSTR("GI"); CheckAndAdd(szTemp); szTemp=szWordHigh.Left(ipWordLength-i)+szWordHigh.Mid(ipWordLength-i+2)+FSWSTR("KI"); CheckAndAdd(szTemp); } } // Delete letters: van[n]aema -> vanaema szTemp=szWordHigh.Mid(1); CheckAndAdd(szTemp); for (i=0; i<ipWordLength-1; i++){ if (szTemp[i]!=szWordHigh[i]){ szTemp[i]=szWordHigh[i]; CheckAndAdd(szTemp); } } // Change letters from list for (i=0; i<ipWordLength; i++){ const FSWCHAR *lpszTo=__SuggestChangeLetters(szWordHigh[i]); if (!lpszTo) continue; szTemp=szWordHigh; for (; lpszTo[0]; lpszTo++){ szTemp[i]=lpszTo[0]; CheckAndAdd(szTemp); } } // Insert letters to word body for (i=1; i<ipWordLength; i++){ szTemp=szWordHigh.Left(i)+FSWSTR(' ')+szWordHigh.Mid(i); for (j=0; szInsertLetters[j]; j++){ szTemp[i]=szInsertLetters[j]; CheckAndAdd(szTemp); } } // Insert letters to the beginning szTemp=CFSWString(FSWSTR(" "))+szWordHigh; for (i=0; szInsertLettersBeg[i]; i++){ if (szTemp[1]==szInsertLettersBeg[i]) continue; szTemp[0]=szInsertLettersBeg[i]; CheckAndAdd(szTemp); } // Try apostrophe for names if (szWord[0]!=szWordHigh[0] && szWordHigh.Find('\'')<0){ for (i=0; i<5; i++){ if (i>=ipWordLength) break; szTemp=szWordHigh.Left(ipWordLength-i)+L'\''+szWordHigh.Mid(ipWordLength-i); CheckAndAdd(szTemp); } } Order(); RemoveImmoderate(); RemoveDuplicates(); return 0; }
// Replaces existing chromosomes with new ones based on passed parameters and selection results. void GaReplaceRandom::operator ()(GaPopulation& population, const GaReplacementParams& parameters, const GaCouplingResultSet& newChromosomes) const { int size = min( parameters.GetReplacementSize(), newChromosomes.GetNumberOfOffsprings() ); int elitism = ( (const GaReplaceElitismParams&) parameters ).GetElitism(); int populationSize = population.GetCurrentSize(); bool sorted = population.GetConfiguration().GetParameters().GetSorting(); // trying to save all chromosomes? if( elitism >= populationSize ) return; // adjust replacement size to fit elitisam constraint if( size > populationSize - elitism ) size = populationSize - elitism; int* old = new int[ size ]; for( int i = 0; i < size; i++ ) { int index; volatile bool duplicate = false; do { if( !sorted ) { int ranking; // select chromosome to be replace that fits elitism constraint do { index = GaGlobalRandomIntegerGenerator->Generate( populationSize - 1 ); ranking = population.GetChromosomeRanking( index ); } while( ranking >= 0 && ranking < elitism ); } else // select chromosome to be replace that fits elitism constraint index = GaGlobalRandomIntegerGenerator->Generate( elitism, populationSize - 1 ); // is it already in replacement group? for( int j = 0; j < i; j++ ) { duplicate = old[ j ] == index; if( duplicate ) break; } } while( duplicate ); // insert to replacement group old[ i ] = index; } // replace RemoveDuplicates( newChromosomes ); population.ReplaceGroup( old, newChromosomes.GetOffspringsBuffer(), size ); delete[] old; }
//========================================= int FindPath(vec3_t start, vec3_t destination) { node_t *StartNode; node_t *BestNode; node_t *tNode; int NodeNumD; int NodeNumS; int g,c,i; float h; vec3_t tstart,tdest; VectorCopy(start,tstart); VectorCopy(destination,tdest); // Get NodeNum of start vector NodeNumS=GetNodeNum(tstart); if (NodeNumS==-1) { //gi.dprintf("bad nodenum at start\n"); return 0; // ERROR } // Get NodeNum of destination vector NodeNumD=GetNodeNum(tdest); if (NodeNumD==-1) { // gi.dprintf("bad nondenum at end\n"); return 0; // ERROR } // Allocate OPEN/CLOSED list pointers.. OPEN=(node_t *)V_Malloc(sizeof(node_t), TAG_LEVEL); // OPEN=(node_t *)malloc(sizeof(node_t)); OPEN->NextNode=NULL; CLOSED=(node_t *)V_Malloc(sizeof(node_t), TAG_LEVEL); //CLOSED=(node_t *)malloc(sizeof(node_t)); CLOSED->NextNode=NULL; //================================================ // This is our very first NODE! Our start vector //================================================ StartNode=(node_t *)V_Malloc(sizeof(node_t), TAG_LEVEL); //StartNode=(node_t *)malloc(sizeof(node_t)); StartNode->nodenum=NodeNumS; // starting position nodenum StartNode->g=g=0; // we haven't gone anywhere yet StartNode->h=h=distance(start, destination);//fabs(vDiff(start,destination)); // calculate remaining distance (heuristic estimate) GHz - changed to fabs() StartNode->f=g+h; // total cost from start to finish for (c=0;c < NUMCHILDS;c++) StartNode->Child[c]=NULL; // no children for search pattern yet StartNode->NextNode=NULL; StartNode->PrevNode=NULL; //================================================ // next node in open list points to our starting node OPEN->NextNode=BestNode=StartNode; // First node on OPEN list.. //GHz - need to free these nodes too! //NodeList[NodeCount++] = OPEN; // NodeList[NodeCount++] = CLOSED; NodeCount+=2; for (;;) { tNode=BestNode; // Save last valid node BestNode=(node_t *)NextBestNode(NodeNumS, NodeNumD); // Get next node from OPEN list if (!BestNode) { //gi.dprintf("ran out of nodes to search\n"); return 0;//GHz // BestNode=tNode; // Last valid node.. // break; } if (BestNode->nodenum==NodeNumD) break;// we there yet? ComputeSuccessors(BestNode,NodeNumD);} // Search from here.. //================================================ RemoveDuplicates(BestNode, CLOSED);//FIXME: move this up before the start==end crash check // gi.dprintf("%d: processed %d nodes\n", level.framenum,NodeCount); if (BestNode==StartNode) { // Start==End?? FreeStack(StartNode);//FIXME: may cause crash //gi.dprintf("start==end\n"); return 0; } //gi.dprintf("Start = %d End = %d\n", NodeNumS, NodeNumD); // gi.dprintf("Printing tNode (in reverse):\n"); // PrintNodes(BestNode, true); // gi.dprintf("Printing OPEN list:\n"); //PrintNodes(OPEN, false); //gi.dprintf("Printing CLOSED list:\n"); // PrintNodes(CLOSED, false); BestNode->NextNode=NULL; // Must tie this off! // How many nodes we got? tNode=BestNode; i=0; while (tNode) { i++; // How many nodes? tNode=tNode->PrevNode; } if (i <= 2) { // Only nodes are Start and End?? FreeStack(BestNode);//FIXME: may cause crash //gi.dprintf("only start and end nodes\n"); return 0; } // Let's allocate our own stuff... //CLOSED->NextNode = NULL;//GHz - only needs to be null if we are using freestack() numpts=i; //GHz - free old memory //V_Free(Waypoint); Waypoint=(int *)V_Malloc(numpts*sizeof(int), TAG_LEVEL); //Waypoint=(int *)malloc(numpts*sizeof(int)); // Now, we have to assign the nodenum's along // this path in reverse order because that is // the way the A* algorithm finishes its search. // The last best node it visited was the END! // So, we copy them over in reverse.. No biggy.. tNode=BestNode; while (BestNode) { Waypoint[--i]=BestNode->nodenum;//GHz: how/when is this freed? BestNode=BestNode->PrevNode; } // NOTE: At this point, if our numpts returned is not // zero, then a path has been found! To follow this // path we simply follow node[Waypoint[i]].origin // because Waypoint array is filled with indexes into // our node[i] array of valid vectors in the map.. // We did it!! Now free the stack and exit.. //================================================ //++++++++++ GHz NOTES +++++++++++++ // FreeStack() is flawed because the lists have nodes that point to nodes on other lists // so if you free one list, then the next list will crash when it encounters a node with // an invalid pointer (node was freed in last list) //++++++++++++++++++++++++++++++++++ FreeStack(tNode); // Release ALL resources!! //GHz: cleanup test/debugging //for (i=0;i<NodeCount;i++) //{ // V_Free(NodeList[i]); // } // OPEN = NULL; //CLOSED = NULL; NodeCount = 0; //TODO: performance... cpu usage is still very high //TODO: grid editor, save grid to disk //TODO: need some way of handling manually edited grid // because NextNode() only searches within a specific 32x32 pattern // gi.dprintf("%d: found %d\n",level.framenum,numpts); return (numpts); }