static bool isDuplicate(MacInfo_t *macInfo, uint8_t *data, uint16_t len) { uint32_t timestamp; if (len < sizeof(timestamp)) return false; memcpy(×tamp, data, sizeof(timestamp)); return findDuplicate(macInfo->originalSrc.shortAddr, timestamp); }
int main(){ char*arr[4]={"root/a 1.txt(abcd) 2.txt(efgh)", "root/c 3.txt(abcd)", "root/c/d 4.txt(efgh)", "root 4.txt(efgh)"}; int*csz; int rsz; findDuplicate(arr, 4, &csz, &rsz); printf("rsz=%d\n", rsz); }
/* inserts event into year */ bool Event_set::insertToYear(Event * e) { /* check if already exists */ if (findDuplicate(e)) return false; year_set.insert(e); return true; }
void ServiceBrowser::gotNewService(RemoteService::Ptr svr) { if (findDuplicate(svr)==(d->m_services.end())) { if (d->m_flags & AutoResolve) { connect(svr,SIGNAL(resolved(bool )),this,SLOT(serviceResolved(bool ))); d->m_duringResolve+=svr; svr->resolveAsync(); } else {
void FindDuplicate::TestClass() { int Array[7]={1,3,5,2,6,3,7}; vector<int> nums(Array,Array+7); //FastSort(Array,7,0,6); cout<<findDuplicate(nums)<<endl; }
/* deletes Event object pointer from year, month, day sets and inserts * new object * returns false if object already exists * */ bool Event_set::editEvent(Event * current_e, Event * new_e) { /* if same start time / title already exists */ if (findDuplicate(new_e)) return true; deleteEvent(current_e); insertEvent(new_e); return false; }
int main(){ std::vector<int> testVector = generateTestVector(10); for(int x = 0; x < testVector.size(); x++){ std::cout << testVector[x] << "\t"; } std::cout << "\n"; std::cout << "Duplicate: " << findDuplicate(testVector); }
int main() { //n variables ranging from 0~n-1 //e.g. f(arr[4])=f(3)=arr[3]=1 //Once the first iteration ends, it never goes back to arr[4] since there is no 4 in this array int arr[5]={1,2,1,2,3}; int value=findDuplicate(arr,sizeof(arr)/sizeof(arr[0])); std::cout << value << std::endl; std::cin.get(); return 0; }
void checkBlock(mafBlock_t *block) { // read through each line of a mafBlock and filter duplicates. // Report the top scoring duplication only. mafLine_t *ml = maf_mafBlock_getHeadLine(block); unsigned n = maf_mafLine_getNumberOfSequences(ml); char **species = (char **) de_malloc(sizeof(char *) * n); char **sequences = (char **) de_malloc(sizeof(char *) * n); int index = 0; bool containsDuplicates = false; duplicate_t *d = NULL, *dupSpeciesHead = NULL; while (ml != NULL) { if (maf_mafLine_getType(ml) != 's') { // skip non-sequence lines ml = maf_mafLine_getNext(ml); continue; } species[index] = de_strdup(maf_mafLine_getSpecies(ml)); sequences[index] = de_strdup(maf_mafLine_getSequence(ml)); duplicate_t *thisDup = findDuplicate(dupSpeciesHead, maf_mafLine_getSpecies(ml)); if (thisDup == NULL) { // first instance of species, add to list if (dupSpeciesHead == NULL) { dupSpeciesHead = newDuplicate(); d = dupSpeciesHead; } else { d->next = newDuplicate(); d = d->next; } d->species = de_strdup(maf_mafLine_getSpecies(ml)); // create the mafline linked list d->headScoredMaf = newScoredMafLine(); d->headScoredMaf->mafLine = ml; d->tailScoredMaf = d->headScoredMaf; } else { // this sequence is a duplicate, extend the duplicate list. containsDuplicates = true; ++(thisDup->numSequences); scoredMafLine_t *sml = thisDup->tailScoredMaf; sml->next = newScoredMafLine(); sml = sml->next; sml->mafLine = ml; thisDup->tailScoredMaf = sml; } ++index; ml = maf_mafLine_getNext(ml); } if (!containsDuplicates) { reportBlock(block); destroyStringArray(species, n); destroyStringArray(sequences, n); destroyDuplicates(dupSpeciesHead); return; } // this block contains duplicates char *consensus = (char *) de_malloc(longestLine(block) + 1); consensus[0] = '\0'; buildConsensus(consensus, sequences, n, maf_mafLine_getLineNumber(maf_mafBlock_getHeadLine(block))); // lineno used for error reporting findBestDupes(dupSpeciesHead, consensus); reportBlockWithDuplicates(block, dupSpeciesHead); // clean up destroyStringArray(species, n); destroyStringArray(sequences, n); destroyDuplicates(dupSpeciesHead); free(consensus); }
void checkBlock(mafBlock_t *block) { // read through each line of a mafBlock and filter duplicates. // Report the top scoring duplication only. unsigned n = maf_numberOfSequencesMafLineList(block->headLine); char **species = (char **) de_malloc(sizeof(char *) * n); char **sequences = (char **) de_malloc(sizeof(char *) * n); int index = 0; bool containsDuplicates = false; mafLine_t *m = block->headLine; duplicate_t *d = NULL, *dupSpeciesHead = NULL; while (m != NULL) { if (m->type != 's') { // skip non-sequence lines m = m->next; continue; } species[index] = (char *) de_malloc(kMaxSeqName); sequences[index] = (char *) de_malloc(strlen(m->sequence) + 1); strcpy(species[index], m->species); strcpy(sequences[index], m->sequence); duplicate_t *thisDup = findDuplicate(dupSpeciesHead, m->species); if (thisDup == NULL) { // add new duplicate species debug("adding new species %s\n", m->species); if (dupSpeciesHead == NULL) { dupSpeciesHead = newDuplicate(); d = dupSpeciesHead; } else { d->next = newDuplicate(); d = d->next; } d->species = (char *) de_malloc(kMaxSeqName); strcpy(d->species, m->species); // create the mafline linked list d->headScoredMaf = newScoredMafLine(); d->headScoredMaf->mafLine = m; } else { // this sequence is a duplicate, extend the duplicate list. debug("extending duplicate on species %s\n", m->species); containsDuplicates = true; scoredMafLine_t *ml = thisDup->headScoredMaf; while (ml->next != NULL) ml = ml->next; ml->next = newScoredMafLine(); ml = ml->next; ml->mafLine = m; } ++index; m = m->next; } if (!containsDuplicates) { reportBlock(block); destroyStringArray(species, n); destroyStringArray(sequences, n); destroyDuplicates(dupSpeciesHead); return; } // this block contains duplicates char *consensus = (char *) de_malloc(longestLine(block) + 1); consensus[0] = '\0'; buildConsensus(consensus, sequences, n, block->headLine->lineNumber); findBestDupes(dupSpeciesHead, consensus); reportBlockWithDuplicates(block, dupSpeciesHead); destroyStringArray(species, n); destroyStringArray(sequences, n); destroyDuplicates(dupSpeciesHead); free(consensus); }