void SplicingEvent::mergeSets(list< shared_ptr< TranscriptFeature > > &x, const list< shared_ptr< TranscriptFeature > > &y) { // to keep list< shared_ptr< TranscriptFeature > > keep4X; list< shared_ptr< TranscriptFeature > >::const_iterator featureIteratorX; list< shared_ptr< TranscriptFeature > >::const_iterator featureIteratorY; for(featureIteratorY=y.begin(); featureIteratorY!=y.end(); featureIteratorY++) { bool sameID = false; for(featureIteratorX=x.begin(); featureIteratorX!=x.end(); featureIteratorX++) { if ((**featureIteratorY).getIdentifier().compare((**featureIteratorX).getIdentifier()) == 0) { sameID = true; break; } } if (!sameID) { keep4X.push_back((*featureIteratorY)); } } x.merge(keep4X); }
Quadtree::list Quadtree::retrieve(list& entities,Entity* e){ int index = this->getIndex(e); entities.sort(); this->entities.sort(); if (index != -1 && nodes[0] != nullptr) { nodes[index]->retrieve(entities, e); } entities.merge(this->entities); return entities; }
int shareFactors(list<int> numeratorFactors, list<int> denominatorFactors) { int hcf = 0; list<int>::iterator jter; numeratorFactors.merge(denominatorFactors); for (list<int>::iterator iter = numeratorFactors.begin(); iter != numeratorFactors.end(); iter++) { jter = iter; jter++; if (*iter == *jter) { hcf = *(iter); } } return hcf; }
//gets the results list from processing two lists list<int> processTwoLists(list<int> list1, list<int> list2){ list<int> sharedIDs; list<int>::iterator itr1=list1.begin(); list<int>::iterator itr2=list2.begin(); while(itr1!=list1.end() && itr2!=list2.end()){ if((*itr1)==(*itr2)){ sharedIDs.push_back(*itr1); itr1++; itr2++; }else if((*itr1)>(*itr2)){ itr2++; }else{ itr1++; } } if(!sharedIDs.size()){//no common IDs so add the merge of the two original lists list1.merge(list2); sharedIDs=list1; } sharedIDs.sort(); //sort IDs return sharedIDs; }
void Poly::appendTerms(const list<Term>& x) { list<Term> copy(x); terms.merge(copy); }
void compute_streamlines_load_file() { float from[3], to[3]; from[0] = minLen[0]; from[1] = minLen[1]; from[2] = minLen[2]; to[0] = maxLen[0]; to[1] = maxLen[1]; to[2] = maxLen[2]; printf("loading seeds...\n"); int nSeeds; //VECTOR3* seeds=loadSeedandRender(strcat(g_filename,".data"),nSeeds); VECTOR3* seeds=loadSeedfromFile("myseeds.seed",nSeeds); //nSeeds=25; osuflow->SetEntropySeedPoints( seeds,nSeeds); seeds = osuflow->GetSeeds(nSeeds); for (int i=0; i<nSeeds; i++) { printf(" seed no. %d : [%f %f %f]\n", i, seeds[i][0], seeds[i][1], seeds[i][2]); seed_list.push_back(VECTOR3(seeds[i][0], seeds[i][1], seeds[i][2])); } sl_list.clear(); std::vector<int> line_color; list<vtListSeedTrace*> half_lines, long_lines; printf("compute streamlines..\n"); osuflow->SetIntegrationParams(minstepsize, maxstepsize); //small and large step size osuflow->GenStreamLines(half_lines , BACKWARD_AND_FORWARD, maxi_stepnum, 0); //maxi steps printf(" done integrations\n"); printf("list size = %d\n", half_lines.size()); combinehalflines(half_lines,long_lines); sl_list.merge(long_lines); int grid_res[3]; osuflow->GetFlowField()->getDimension(grid_res[0],grid_res[1],grid_res[2]); // grid_res[0]=64;grid_res[1]=64;grid_res[2]=64; //calculate the entropy float* vectors; FILE* fp=fopen(g_filename,"rb"); // int grid_res[3]; fread(grid_res,sizeof(int),3,fp); vectors=new float[grid_res[0]*grid_res[1]*grid_res[2]*3]; fread(vectors,sizeof(float),grid_res[0]*grid_res[1]*grid_res[2]*3,fp); fclose(fp); int* donot_change=new int[grid_res[0]*grid_res[1]*grid_res[2]]; memset(donot_change,0,grid_res[0]*grid_res[1]*grid_res[2]); float* new_vectors=new float[grid_res[0]*grid_res[1]*grid_res[2]*3]; // reconstruct_field_GVF_3D(new_vectors,vectors,grid_res,long_lines,donot_change); delete [] new_vectors; delete [] donot_change; for(int i=0; i<nSeeds;i++) line_color.push_back(i); // add_context_streamlines(line_color); //save_distance_volume(); }