void Pass::add(PassType type, vector<Pass> &passes, const char *name) { for (size_t i = 0; i < passes.size(); i++) { if (passes[i].type == type && (name ? (passes[i].name == name) : passes[i].name.empty())) { return; } } Pass pass; pass.type = type; pass.filter = true; pass.exposure = false; pass.divide_type = PASS_NONE; if (name) { pass.name = name; } switch (type) { case PASS_NONE: pass.components = 0; break; case PASS_COMBINED: pass.components = 4; pass.exposure = true; break; case PASS_DEPTH: pass.components = 1; pass.filter = false; break; case PASS_MIST: pass.components = 1; break; case PASS_NORMAL: pass.components = 4; break; case PASS_UV: pass.components = 4; break; case PASS_MOTION: pass.components = 4; pass.divide_type = PASS_MOTION_WEIGHT; break; case PASS_MOTION_WEIGHT: pass.components = 1; break; case PASS_OBJECT_ID: case PASS_MATERIAL_ID: pass.components = 1; pass.filter = false; break; case PASS_EMISSION: case PASS_BACKGROUND: pass.components = 4; pass.exposure = true; break; case PASS_AO: pass.components = 4; break; case PASS_SHADOW: pass.components = 4; pass.exposure = false; break; case PASS_LIGHT: /* This isn't a real pass, used by baking to see whether * light data is needed or not. * * Set components to 0 so pass sort below happens in a * determined way. */ pass.components = 0; break; #ifdef WITH_CYCLES_DEBUG case PASS_BVH_TRAVERSED_NODES: case PASS_BVH_TRAVERSED_INSTANCES: case PASS_BVH_INTERSECTIONS: case PASS_RAY_BOUNCES: pass.components = 1; pass.exposure = false; break; #endif case PASS_RENDER_TIME: /* This pass is handled entirely on the host side. */ pass.components = 0; break; case PASS_DIFFUSE_COLOR: case PASS_GLOSSY_COLOR: case PASS_TRANSMISSION_COLOR: case PASS_SUBSURFACE_COLOR: pass.components = 4; break; case PASS_DIFFUSE_DIRECT: case PASS_DIFFUSE_INDIRECT: pass.components = 4; pass.exposure = true; pass.divide_type = PASS_DIFFUSE_COLOR; break; case PASS_GLOSSY_DIRECT: case PASS_GLOSSY_INDIRECT: pass.components = 4; pass.exposure = true; pass.divide_type = PASS_GLOSSY_COLOR; break; case PASS_TRANSMISSION_DIRECT: case PASS_TRANSMISSION_INDIRECT: pass.components = 4; pass.exposure = true; pass.divide_type = PASS_TRANSMISSION_COLOR; break; case PASS_SUBSURFACE_DIRECT: case PASS_SUBSURFACE_INDIRECT: pass.components = 4; pass.exposure = true; pass.divide_type = PASS_SUBSURFACE_COLOR; break; case PASS_VOLUME_DIRECT: case PASS_VOLUME_INDIRECT: pass.components = 4; pass.exposure = true; break; case PASS_CRYPTOMATTE: pass.components = 4; break; default: assert(false); break; } passes.push_back(pass); /* order from by components, to ensure alignment so passes with size 4 * come first and then passes with size 1 */ sort(&passes[0], &passes[0] + passes.size(), compare_pass_order); if (pass.divide_type != PASS_NONE) Pass::add(pass.divide_type, passes); }
void deleteStdVecElem(vector<Tstve>& v, int idx) { v[idx] = v.back(); v.pop_back(); }
int minMoves(vector<int>& nums) { return accumulate(nums.begin(), nums.end(), 0L)-nums.size()* *min_element(nums.begin(), nums.end()); }
bool in(string first, vector<string> second){ for (int i = 0; i < second.size(); i++) if (first == second[i]) return true; return false; }
template<class T> void print(const vector<T> &v){ostringstream os; for(int i=0; i<v.size(); ++i){if(i)os<<' ';os<<v[i];} cout<<os.str()<<endl;}
void CCanvasContext::drawImageBatch(CImage *image, const vector<float> &coords) { //LOG("drawImageBatch: %d", coords.size()); int count = (int)(coords.size() / 8); if( !image || count <= 0 ) return; //glPushMatrix(); glEnable(GL_TEXTURE_2D); glBindTexture(GL_TEXTURE_2D, image->getTexture()); if( image->hasAlpha() ) { glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA); glEnable(GL_BLEND); } else { glDisable(GL_BLEND); } glEnableClientState(GL_VERTEX_ARRAY); glEnableClientState(GL_TEXTURE_COORD_ARRAY); int vertexCount = count * 6; static int maxVertex = 100; static GLfloat *vertices = NULL; static GLfloat *textureCoords = NULL; if( !vertices ) { vertices = (GLfloat *) malloc(maxVertex * 2 * sizeof(GLfloat)); } if( !textureCoords ) { textureCoords = (GLfloat *) malloc(maxVertex * 2 * sizeof(GLfloat)); } if( vertexCount > maxVertex ) { int newMaxVertex = maxVertex * 2; if( vertexCount > newMaxVertex ) { newMaxVertex = vertexCount; } GLfloat *newVertexBuf = (GLfloat *) malloc(newMaxVertex * 2 * sizeof(GLfloat)); GLfloat *newTextureCoordBuf = (GLfloat *) malloc(newMaxVertex * 2 * sizeof(GLfloat)); free(vertices); free(textureCoords); vertices = newVertexBuf; textureCoords = newTextureCoordBuf; maxVertex = newMaxVertex; } for( int i=0; i<count; i++ ) { float sx = coords[i*8]; float sy = coords[i*8+1]; float sw = coords[i*8+2]; float sh = coords[i*8+3]; float dx = coords[i*8+4]; float dy = coords[i*8+5]; float dw = coords[i*8+6]; float dh = coords[i*8+7]; // 6个点的订单坐标,其中2,3点和4,5点相同 vertices[i*12] = dx; vertices[i*12+1] = dy; vertices[i*12+2] = dx + dw; vertices[i*12+3] = dy; vertices[i*12+4] = dx; vertices[i*12+5] = dy + dh; vertices[i*12+6] = dx + dw; vertices[i*12+7] = dy; vertices[i*12+8] = dx; vertices[i*12+9] = dy + dh; vertices[i*12+10] = dx + dw; vertices[i*12+11] = dy + dh; // 6个点的纹理坐标,其中2,3点和4,5点相同 unsigned long POTWidth = image->POTWidth(); unsigned long POTHeight = image->POTHeight(); textureCoords[i*12] = sx / POTWidth; textureCoords[i*12+1] = sy / POTHeight; textureCoords[i*12+2] = (sx + sw) / POTWidth; textureCoords[i*12+3] = sy / POTHeight; textureCoords[i*12+4] = sx / POTWidth; textureCoords[i*12+5] = (sy + sh) / POTHeight; textureCoords[i*12+6] = (sx + sw) / POTWidth; textureCoords[i*12+7] = sy / POTHeight; textureCoords[i*12+8] = sx / POTWidth; textureCoords[i*12+9] = (sy + sh) / POTHeight; textureCoords[i*12+10] = (sx + sw) / POTWidth; textureCoords[i*12+11] = (sy + sh) / POTHeight; } glVertexPointer(2, GL_FLOAT, 0, vertices); glTexCoordPointer(2, GL_FLOAT, 0, textureCoords); glDrawArrays(GL_TRIANGLES, 0, vertexCount); //free(vertices); //free(textureCoords); glDisable(GL_VERTEX_ARRAY); glDisable(GL_TEXTURE_COORD_ARRAY); glDisable(GL_TEXTURE_2D); glDisable(GL_BLEND); //glPopMatrix(); }
void dfs1(int now){ u[now]=1; for(int i:G[now])if(!u[i])dfs1(i); tp.pb(now); }
void init_seg(int N) { node tmp; tmp.pr=INT_MIN;tmp.su=INT_MIN;tmp.bs=INT_MIN,tmp.sm=INT_MIN; segtree.resize(4*N,tmp); }
void kbd(unsigned char key, int x, int y) { //If the "q" key is pressed, quit the program if(key == 'q' || key == 'Q') exit(0); //If "1" is pressed, enter Dot Mode else if (key == '1' && current != Dot) current = Dot; //If "2" is pressed, enter Line Mode else if (key == '2' && current != Line) { current = Line; if (points.size() % 2 != 0) points.erase(points.begin() + points.size() - 1); } //If "3" is pressed, enter Polygon Mode else if (key == '3' && current != Polygon) { if (points.size() != 0) { //Ensuring a minimum of three Vertex objects for drawPolygon if (points.size() < 3) { for (int i = points.size(); i < 3; i++) { px1 = (rand() % glutGet(GLUT_SCREEN_WIDTH)) + 1; py1 = (rand() % glutGet(GLUT_SCREEN_HEIGHT)) + 1; px2 = (rand() % glutGet(GLUT_SCREEN_WIDTH)) + 1; py2 = (rand() % glutGet(GLUT_SCREEN_HEIGHT)) + 1; if (randomflag == true) { for (int j = 0; j < 3; j++) c[j] = rand() / double(RAND_MAX); } Vertex p(px1, py1, px2, py2, size, c); points.push_back(p); } } if (polygonMarkers.size() == 1 || polygonMarkers.at(polygonMarkers.size() - 1) != points.size() - 1) { polygonMarkers.push_back(points.size() - 1); lastMarker = points.size() - 1; } } current = Polygon; } //If "Space" is pressed in Polygon Mode, start animation (set flag) else if (key == ' ' && current == Polygon) { polygonMarkers.push_back(points.size() - 1); lastMarker = points.size() - 1; } //If "r" or "R" is pressed, clear the vertex list else if (key == 'r' || key == 'R') points.erase(points.begin(), points.begin() + points.size()); //If "a" or "A" is pressed, add a set of randomized primitives else if (key == 'a' || key == 'A') RandomPoints(); //If "-" is pressed, decrease the speed of all stored points else if (key == '-') { for (size_t i = 0; i < points.size(); i++) points.at(i).ChangeSpeed(false); } //If "+" is pressed, increase the speed of all stored points else if (key == '+') { for (size_t i = 0; i < points.size(); i++) points.at(i).ChangeSpeed(true); } //If "p" or "P" is pressed, pause/play the animation else if (key == 'p' || key == 'P') pauseflag = !pauseflag; return; }
void op_pe_utils::op_process_events_list(vector<string> & passed_evts) { string cmd = OP_BINDIR; if (passed_evts.size() > OP_MAX_EVENTS) { cerr << "Number of events specified is greater than allowed maximum of " << OP_MAX_EVENTS << "." << endl; exit(EXIT_FAILURE); } cmd += "/ophelp --check-events --ignore-count "; for (unsigned int i = 0; i < passed_evts.size(); i++) { FILE * fp; string full_cmd = cmd; string event_spec = passed_evts[i]; #if PPC64_ARCH // Starting with CPU_PPC64_ARCH_V1, ppc64 events files are formatted like // other architectures, so no special handling is needed. if (cpu_type < CPU_PPC64_ARCH_V1) event_spec = _handle_powerpc_event_spec(event_spec); #endif full_cmd += event_spec; fp = popen(full_cmd.c_str(), "r"); if (fp == NULL) { cerr << "Unable to execute ophelp to get info for event " << event_spec << endl; exit(EXIT_FAILURE); } if (fgetc(fp) == EOF) { pclose(fp); cerr << "Error retrieving info for event " << event_spec << endl; exit(EXIT_FAILURE); } pclose(fp); char * event_str = op_xstrndup(event_spec.c_str(), event_spec.length()); operf_event_t event; strncpy(event.name, strtok(event_str, ":"), OP_MAX_EVT_NAME_LEN - 1); /* Event name is required in the event spec in order for * 'ophelp --check-events --ignore-count' to pass. But since unit mask * and domain control bits are optional, we need to ensure the result of * strtok is valid. */ char * info; #define _OP_UM 1 #define _OP_KERNEL 2 #define _OP_USER 3 int place = _OP_UM; char * endptr = NULL; event.evt_um = 0UL; event.count = 0UL; event.no_kernel = 0; event.no_user = 0; event.throttled = false; memset(event.um_name, '\0', OP_MAX_UM_NAME_LEN); while ((info = strtok(NULL, ":"))) { switch (place) { case _OP_UM: event.evt_um = strtoul(info, &endptr, 0); // If any of the UM part is not a number, then we // consider the entire part a string. if (*endptr) { event.evt_um = 0; strncpy(event.um_name, info, OP_MAX_UM_NAME_LEN - 1); } break; case _OP_KERNEL: if (atoi(info) == 0) event.no_kernel = 1; break; case _OP_USER: if (atoi(info) == 0) event.no_user = 1; break; } place++; } free(event_str); _get_event_code(&event, cpu_type); events.push_back(event); } #if PPC64_ARCH { /* For ppc64 architecture processors prior to the introduction of * architected_events_v1, the oprofile event code needs to be converted * to the appropriate event code to pass to the perf_event_open syscall. * But as of the introduction of architected_events_v1, the events * file contains the necessary event code information, so this conversion * step is no longer needed. */ using namespace op_pe_utils; if ((cpu_type < CPU_PPC64_ARCH_V1) && !convert_event_vals(&events)) { cerr << "Unable to convert all oprofile event values to perf_event values" << endl; exit(EXIT_FAILURE); } } #endif }
// static void StreetsMatcher::FindStreets(BaseContext const & ctx, FeaturesFilter const & filter, QueryParams const & params, vector<Prediction> & predictions) { for (size_t startToken = 0; startToken < ctx.m_numTokens; ++startToken) { if (ctx.IsTokenUsed(startToken)) continue; // Here we try to match as many tokens as possible while // intersection is a non-empty bit vector of streets. Single // tokens that are synonyms to streets are ignored. Moreover, // each time a token that looks like a beginning of a house number // is met, we try to use current intersection of tokens as a // street layer and try to match BUILDINGs or POIs. CBV streets(ctx.m_streets); CBV all; all.SetFull(); size_t curToken = startToken; // This variable is used for prevention of duplicate calls to // CreateStreetsLayerAndMatchLowerLayers() with the same // arguments. size_t lastToken = startToken; // When true, no bit vectors were intersected with |streets| at all. bool emptyIntersection = true; // When true, |streets| is in the incomplete state and can't be // used for creation of street layers. bool incomplete = false; auto emit = [&]() { if (!streets.IsEmpty() && !emptyIntersection && !incomplete && lastToken != curToken) { CBV fs(streets); CBV fa(all); ASSERT(!fs.IsFull(), ()); ASSERT(!fa.IsFull(), ()); if (filter.NeedToFilter(fs)) fs = filter.Filter(fs); if (fs.IsEmpty()) return; if (filter.NeedToFilter(fa)) fa = filter.Filter(fa).Union(fs); predictions.emplace_back(); auto & prediction = predictions.back(); prediction.m_tokenRange = TokenRange(startToken, curToken); ASSERT_NOT_EQUAL(fs.PopCount(), 0, ()); ASSERT_LESS_OR_EQUAL(fs.PopCount(), fa.PopCount(), ()); prediction.m_prob = static_cast<double>(fs.PopCount()) / static_cast<double>(fa.PopCount()); prediction.m_features = move(fs); prediction.m_hash = prediction.m_features.Hash(); } }; StreetTokensFilter filter([&](strings::UniString const & /* token */, size_t tag) { auto buffer = streets.Intersect(ctx.m_features[tag]); if (tag < curToken) { // This is the case for delayed // street synonym. Therefore, // |streets| is temporarily in the // incomplete state. streets = buffer; all = all.Intersect(ctx.m_features[tag]); emptyIntersection = false; incomplete = true; return; } ASSERT_EQUAL(tag, curToken, ()); // |streets| will become empty after // the intersection. Therefore we need // to create streets layer right now. if (buffer.IsEmpty()) emit(); streets = buffer; all = all.Intersect(ctx.m_features[tag]); emptyIntersection = false; incomplete = false; });
int findv(vector<int> &v,int x){ for (int i=0; i<v.size(); i++) { if (v[i] == x) return i; } return -1; }
int zone(int s){ return (upper_bound(station.begin(), station.end(), MP(s,MXN))-1)->second; }
void init(int _n){ fa.resize(_n); for (int i=0; i<_n; i++) fa[i] = i; }
void restore(int snapshot) { while (si(changes) > snapshot) { auto lr = changes.back(); changes.pop_back(); lr.first = lr.second; } }
void SizeSubMenu (GLint newSize) { switch(int(size)) { case 1: glutChangeToMenuEntry(1, "1", 1); break; case 2: glutChangeToMenuEntry(2, "2", 2); break; case 3: glutChangeToMenuEntry(3, "3", 3); break; case 4: glutChangeToMenuEntry(4, "4", 4); break; case 5: glutChangeToMenuEntry(5, "5", 5); break; case 6: glutChangeToMenuEntry(6, "6", 6); break; case 7: glutChangeToMenuEntry(7, "7", 7); break; case 8: glutChangeToMenuEntry(8, "8", 8); break; case 9: glutChangeToMenuEntry(9, "9", 9); break; case 10: glutChangeToMenuEntry(10, "10", 10); break; default: break; } size = newSize; switch(newSize) { case 1: glutChangeToMenuEntry(1, "> 1", 1); break; case 2: glutChangeToMenuEntry(2, "> 2", 2); break; case 3: glutChangeToMenuEntry(3, "> 3", 3); break; case 4: glutChangeToMenuEntry(4, "> 4", 4); break; case 5: glutChangeToMenuEntry(5, "> 5", 5); break; case 6: glutChangeToMenuEntry(6, "> 6", 6); break; case 7: glutChangeToMenuEntry(7, "> 7", 7); break; case 8: glutChangeToMenuEntry(8, "> 8", 8); break; case 9: glutChangeToMenuEntry(9, "> 9", 9); break; case 10: glutChangeToMenuEntry(10, "> 10", 10); break; default: break; } glutPostRedisplay(); for (size_t i = 0; i < points.size(); i++) points.at(i).ChangeSize(size); return; }
void merge_clusters(vector<string> filenames, int min_count_for_filters, int cluster_edit_distance_threshold) { vector<map<string, int> > file_data(filenames.size()); set<string> keys; int ct = 0; double histogram_bin_growth_factor = 1.4; //load data from files for(vector<string>::const_iterator filename = filenames.begin(); filename != filenames.end(); filename++) { ifstream file(filename->c_str()); string line; while(true) { string name; int count; getline(file, line); istringstream ss( line ); getline( ss, name, ',' ); ss >> count; if(!file.good() || ss.fail()) break; file_data[ct][name] = count; file_data[ct].insert(pair<string, int>(name, count)); keys.insert(name); } ct++; } //identify keys to be remapped map<string,int> counts; vector<pair<string,int> > sorted_keys; for(set<string>::const_iterator key = keys.begin(); key != keys.end(); key++) { counts[*key] = 0; for(vector<map<string,int> >::iterator data = file_data.begin(); data != file_data.end(); data++) { if(data->count(*key)) counts[*key] += (*data)[*key]; } sorted_keys.push_back(std::pair<string, int>(*key, counts[*key])); } map<string, string> remapped_keys; int attached = 0; value_sorter vs; sort(sorted_keys.begin(), sorted_keys.end(), vs); for(vector<pair<string,int> >::const_reverse_iterator i1 = sorted_keys.rbegin(); i1 != sorted_keys.rend(); i1++) { for(vector<pair<string,int> > ::const_iterator i2 = sorted_keys.begin(); i2 != sorted_keys.end(); i2++) { if(i1->first == i2->first) break; if(cluster_distance(i1->first, i2->first, cluster_edit_distance_threshold+2) <= cluster_edit_distance_threshold) { remapped_keys[i2->first] = i1->first; if(!remapped_keys.count(i1->first)) remapped_keys[i1->first] = i1->first; //cerr << "Attaching " << i2->first << " to cluster " << i1->first << endl; attached += 1; } } } //remove keys mapped to a cluster node that doesn't exist any more set<string> keys_seen_once, keys_seen_twice, keys_difference; for(map<string, string>::const_iterator i = remapped_keys.begin(); i != remapped_keys.end(); i++) { if(!keys_seen_once.count(i->second)) { keys_seen_once.insert(i->second); continue; } if(!keys_seen_twice.count(i->second)) keys_seen_twice.insert(i->second); } set_difference(keys_seen_once.begin(), keys_seen_once.end(), keys_seen_twice.begin(), keys_seen_twice.end(), inserter(keys_difference, keys_difference.end())); int removed = 0; vector<string> to_remove; for(map<string, string>::const_iterator i = remapped_keys.begin(); i != remapped_keys.end(); i++) { if(!keys_difference.count(i->second)) { //cerr << "Removing orphan cluster mapping " << i->first << " to " << i->second << endl; //remapped_keys.erase(i->first); to_remove.push_back(i->first); removed++; } } for(vector<string>::const_iterator i = to_remove.begin(); i != to_remove.end(); i++) remapped_keys.erase(*i); cerr << "Clustering stage 2: Attached " << attached << " sequences to clusters" << endl; // generate new names for cluster centers map<string, string> remapped_names; for(map<string, string>::const_iterator i = remapped_keys.begin(); i != remapped_keys.end(); i++) { if(remapped_names.count(i->second)) remapped_names[i->second] = cluster_name(i->first, remapped_names[i->second]); else remapped_names[i->second] = i->second; } for(map<string, string>::const_iterator i = remapped_keys.begin(); i != remapped_keys.end(); i++) cerr << "Renaming cluster " << i->first << " as " << i->second << endl; //now perform merging of files vector<map<string, int> > new_file_data; for(vector<map<string,int> >::iterator data = file_data.begin(); data != file_data.end(); data++) { new_file_data.push_back(map<string, int>()); map<string,int> & new_data = new_file_data.back(); for(set<string>::const_iterator key = keys.begin(); key != keys.end(); key++) { if(data->count(*key)) { if(remapped_keys.count(*key)) { string & cluster_key = remapped_keys[*key]; string & cluster_name = remapped_names[cluster_key]; if(new_data.count(cluster_name)) new_data[cluster_name] += (*data)[*key]; else new_data[cluster_name] = (*data)[*key]; } else new_data[*key] = (*data)[*key]; } } } file_data = new_file_data; //generate the new keys list after clustering keys.clear(); for(vector<map<string,int> >::iterator data = file_data.begin(); data != file_data.end(); data++) { for(map<string, int>::const_iterator i = data->begin(); i != data->end(); i++) keys.insert(i->first); } //generate merged counts output file { stringstream header; header << "sequence"; for(vector<string>::const_iterator i = filenames.begin(); i != filenames.end(); i++) header << "," << *i; ofstream outfile("merged_clusters.csv"); ofstream outfile_filtered("merged_clusters_filtered.csv"); outfile << header.str(); outfile_filtered << header.str(); for(set<string>::const_iterator key = keys.begin(); key != keys.end(); key++) { bool keep = false; stringstream line; line << *key; for(vector<map<string,int> >::iterator data = file_data.begin(); data != file_data.end(); data++) { if(data->count(*key)) { line << "," << (*data)[*key]; if((*data)[*key] > min_count_for_filters) keep = true; } else { line << ",0"; } } outfile << line.str() << "\n"; if(keep) outfile_filtered << line.str() << "\n"; } outfile_filtered.close(); } //generate histogram { //generate list of sizes (counts for each key) vector<int> sizes; for(set<string>::const_iterator key = keys.begin(); key != keys.end(); key++) { int ct = 0; for(vector<map<string,int> >::iterator data = file_data.begin(); data != file_data.end(); data++) { if(data->count(*key)) ct += (*data)[*key]; } sizes.push_back(ct); } sort(sizes.begin(), sizes.end()); ofstream outfile; outfile.open("merged_clusters_histogram.csv"); double bin_size(1.0); //generate bins for histogram vector<int> bins; bins.push_back(1); while(bin_size < sizes[sizes.size() - 1]) { double new_bin_size = bin_size * histogram_bin_growth_factor; if(int(bin_size) != int(new_bin_size)) bins.push_back(int(bin_size)); bin_size = new_bin_size; } bins.push_back(int(bin_size)); int bin_ct = 0, s = 0; //generate actual histogram for(size_t i = 0; i < sizes.size(); i++) { while(sizes[i] > bins[bin_ct + 1]) { if(bins[bin_ct] != bins[bin_ct + 1]) { outfile << bins[bin_ct] << "-" << bins[bin_ct + 1] << "," << s << "\n"; s = 0; } bin_ct += 1; } s += sizes[i]; } outfile << bins[bin_ct] << "-" << bins[bin_ct + 1] << "," << s << endl; } }
void display(void) { glClear(GL_COLOR_BUFFER_BIT); //Drawing and updating each vertex object in <points> if (flag1 == true) //First point selected, but no object created yet { if (current == Dot) drawStart(px1, py1, size, c); else if (current == Line) { if (points.size() == 0 || points.size() % 2 == 0) drawStart(px1, py1, size, c); else { drawPoint(points[points.size() - 1]); drawStart(px1, py1, size, c); } } else { if (lastMarker == 0 && polygonMarkers.size() > 0) { if (points.size() == 0) drawStart(px1, py1, size, c); else drawPoint(points[lastMarker]); } for (size_t i = lastMarker + 1; i < points.size(); i++) drawPoint(points.at(i)); drawStart(px1, py1, size, c); } } //Checking for the first point on a line before the second point's initial position //is defined (Line Mode Only) else if (flag1 == false && current != Dot) { if (current == Line) { if (points.size() > 0 && points.size() % 2 != 0) drawPoint(points.at(points.size() - 1)); } else if (current == Polygon) { if (polygonMarkers.size() > 0) { if (lastMarker == 0 && points.size() < 1) drawPoint(points.at(0)); else { for (size_t i = lastMarker + 1; i < points.size(); i++) polySet.push_back(points.at(i)); drawFilledPolygon(polySet); polySet.erase(polySet.begin(), polySet.begin() + polySet.size()); } } } } //Drawing the animated objects if (current == Dot) { for (size_t i = 0; i < points.size(); i++) { drawPoint(points.at(i)); if (pauseflag == false) points.at(i).UpdatePosition(); } } else if (current == Line) { //Making sure that the vector limit passed has an even number of Vertex objects if (points.size() % 2 == 0) limit = points.size(); else limit = points.size() - 1; for (int i = 0; i < limit; i = i + 2) { drawVertexLine(points.at(i), points.at(i + 1)); if (pauseflag == false) { points.at(i).UpdatePosition(); points.at(i + 1).UpdatePosition(); } } } else if (current == Polygon) { if (polygonMarkers.size() > 1) { //Drawing the first polygon for (int k = polygonMarkers.at(0); k < polygonMarkers.at(1); k++) { polySet.push_back(points.at(k)); if (pauseflag == false) points.at(k).UpdatePosition(); } polySet.push_back(points.at(polygonMarkers.at(1))); drawFilledPolygon(polySet); polySet.erase(polySet.begin(), polySet.begin() + polySet.size()); //Drawing the remaining polygons for (size_t i = 1; i < polygonMarkers.size(); i++) { for (int j = polygonMarkers.at(i - 1) + 1; j < polygonMarkers.at(i); j++) { polySet.push_back(points.at(j)); if (pauseflag == false) points[j].UpdatePosition(); } polySet.push_back(points.at(polygonMarkers.at(i))); drawFilledPolygon(polySet); polySet.erase(polySet.begin(), polySet.begin() + polySet.size()); } } } glFlush(); //Double Buffering glutSwapBuffers(); glutPostRedisplay(); }
void Log::showAllLogs() { // look at all logs stored in the string vector and output them for (int i = 0; i < logList.size(); i++) cout << logList[i] << " " << endl; }
/** convex_hull : including collinear points counterclockwise */ void ConvexHull(vector<PT>& poly,vector<PT>& ret) { int n=SZ(poly); if(n==0) return ; sort(all(poly)); poly.resize(distance(poly.begin(),unique(all(poly)))); n=SZ(poly); PT fpoint = poly[0]; for(int i=0;i<n;i++) { poly[i]=poly[i]-fpoint; } stack<PT> S; PT f; PT p1,p2,p3; if(n>2) { sort(poly.begin()+1,poly.end(),compAng); bool ok; ll c; S.push(poly[0]); S.push(poly[1]); for(int i=2;i<=n;i++) { p3=poly[i%n]; ok=(i!=n); do{ p2=S.top(); S.pop(); p1=S.top(); S.push(p2); c=cross(p2-p1,p3-p1); if(c<0) { if(SZ(S)>2) S.pop(); else break; } else if(c==0) { ll d12=dot(p2-p1,p2-p1),d13=dot(p3-p1,p3-p1); if(d13<=d12) ok=false; else { if(SZ(S)>=2) S.pop(); } break; } else break; }while(SZ(S)>=2); if(ok) S.push(p3); } while(!S.empty()){ ret.psb(S.top()); S.pop(); } reverse(all(ret)); } else { ret=poly; } n=SZ(ret); for(int i=0;i<n;i++) { ret[i]=ret[i]+fpoint; } return ; }
bool LED_Detector::findLEDs(const cv::Mat& RGBImage, cv::Mat &grayImage, cv::Mat &binaryImage, vector<Point2f> &leds, const LED_Detector::Params ¶ms, bool havePreviousState, const vector<Point2f> &reprojImgPts) { // Extract the red channel and save as gray image const int mixCh[]= {2,0}; mixChannels(&RGBImage,1,&grayImage,1,mixCh,1); // threshold the gray image threshold(grayImage, binaryImage, params.threshold, 255, THRESH_BINARY); #ifdef SHOW_BINARY_IMG imshow("binary",binaryImage); #endif // detect contours -- use CV_RETR_EXTERNAL to avoid returning holes vector<vector<Point> > contours; findContours(binaryImage,contours,CV_RETR_EXTERNAL,CV_CHAIN_APPROX_SIMPLE); /// Jump back to here if we can't use previous state not_detected_stop: /// We now have a vectorized contours object: vector<vector<Point> > contours /// Loop through each contour and filter/classify it by its properties static LED_Detector::contourStructObj contourStruct; vector <LED_Detector::contourStructObj> contourStructVec; int n_detected = 0; vector<Point2f> leaderROI_contour; // vectorize the rectangle as a contour if (havePreviousState) // Need to initialize proximities { proximityVec.resize(NO_LEDS); // This will resize 1st dimension to proximityVec[5][] for (int i=0; i<NO_LEDS; i++) proximityVec[i].resize(0); // this will clear the contents of each subvector // Compute the wingspan based on reprojected image points from last frame float span_sq = pow((reprojImgPts[NO_LEDS-1].x - reprojImgPts[0].x),2) + pow((reprojImgPts[NO_LEDS-1].y - reprojImgPts[0].y),2); span = sqrt(span_sq); // Determine the bounding rectangle of the lead UAV and scale up a little bit RotatedRect leaderROI = minAreaRect(reprojImgPts); leaderROI.size.height += 90; leaderROI.size.width += 90; Point2f verticies[4]; // vectorize the bounding ROI leaderROI.points(verticies); leaderROI_contour.assign(verticies,verticies + 4); /// Uncomment to draw ROI on image frame #if defined (DEBUG_VIDEO) || defined (SAVEOFF_FRAMES) for (int i_test=0; i_test<4; i_test++) line(frame, leaderROI_contour[i_test], leaderROI_contour[(i_test+1)%4], Scalar(255,255,255)); #endif } for (size_t contourIdx = 0; contourIdx < contours.size(); contourIdx++) { /// Reduce resolution //approxPolyDP(contours[contourIdx], contours[contourIdx], 2, true); /// Use minEnclosingCircle for blob area minEnclosingCircle(contours[contourIdx], contourStruct.center, contourStruct.radius); contourStruct.area = CV_PI * (contourStruct.radius * contourStruct.radius); /// Uncomment to view all detected blobs #if (!ARM) cv::circle(frame,contourStruct.center, 6, cv::Scalar(255,255,255), 3); #endif /// Consider only the points that fall within a ROI around the last known position if (havePreviousState) { // if point does not lie within or lie on leaderROI RotatedRect, continue if (pointPolygonTest(leaderROI_contour,contourStruct.center,false) < 0 ) continue; /// Uncomment to show/highlight blobs that appear inside ROI #if (!ARM) cv::circle(frame,contourStruct.center, 6, cv::Scalar(255,255,255), 2); #endif } if (params.filterByArea) { if (contourStruct.area < params.minArea || contourStruct.area >= params.maxArea) continue; } if (params.filterByCircularity) { Moments moms = moments(Mat(contours[contourIdx])); static float area = moms.m00; static float ratio = area / contourStruct.area; // (blob area) / (min enclosing circle area) if (ratio < params.minCircularity || ratio >= params.maxCircularity) continue; contourStruct.circularity = ratio; } // minimum bounding ellipse if (params.filterByAspectRatio) { if (contours[contourIdx].size() >=5 ) // need at least 5 elements { static RotatedRect rRect = fitEllipse(contours[contourIdx]); static float width = rRect.size.width; static float height = rRect.size.height; static float AR = (width > height) ? (height/width) : (width/height); if (AR < params.minAspectRatio || AR >= params.maxAspectRatio) continue; contourStruct.aspectRatio = AR; contourStruct.center = rRect.center; // draw the detected ellipse ellipse(grayImage, rRect, Scalar(0,255,0), 2, 8); } } // compute the average RGB color of the blob if (params.sortByColor || params.filterByColor) { unsigned int R,G,B; Mat RGB(1,1,CV_32FC3); Mat HSV(1,1,CV_32FC3); unsigned int n=0; R = G = B = 0; // get the bounding rectangle Rect roi = boundingRect(contours[contourIdx]); // iterate over bounding rectangle for (int y=roi.y; y<roi.y + roi.height; y++){ for(int x=roi.x; x< roi.x + roi.width; x++){ // Check if point lies inside contour if (pointPolygonTest(contours[contourIdx], Point2f(x,y),false) >= 0){ B += RGBImage.at<Vec3b>(y,x)[0]; G += RGBImage.at<Vec3b>(y,x)[1]; R += RGBImage.at<Vec3b>(y,x)[2]; n++; } } } if (n < 1) // We should have at least one pixel -- if not, give up continue; // Average colors inside conotur RGB.at<Vec3f>(0,0)[0] = B/n; RGB.at<Vec3f>(0,0)[1] = G/n; RGB.at<Vec3f>(0,0)[2] = R/n; cvtColor(RGB,HSV,CV_BGR2HSV); contourStruct.color = HSV.at<Vec3f>(0,0)[0]; // get the average Hue // compute difference from target contourStruct.color_delta = contourStruct.color - params.targetColor; if (contourStruct.color_delta > 180) contourStruct.color_delta -= 360; else if (contourStruct.color_delta < -180) contourStruct.color_delta += 360; contourStruct.color_delta = abs(contourStruct.color_delta); if (params.filterByColor) { if ( contourStruct.color_delta > params.maxColor ) continue; } if (havePreviousState) { // check each blob if in local raidus of previous image point for (int i=0; i<NO_LEDS; i++) { float radius_sq = pow((contourStruct.center.x - reprojImgPts[i].x),2) + pow((contourStruct.center.y - reprojImgPts[i].y),2); float radius = sqrt(radius_sq); // check if within local radius if (radius < (params.localRadius * span / 100)) { proximity tmpProx; tmpProx.center = contourStruct.center; tmpProx.radius = radius; proximityVec[i].push_back(tmpProx); } } } } contourStructVec.push_back(contourStruct); } if (!havePreviousState && params.sortByColor) { sort(contourStructVec.begin(), contourStructVec.end(), LED_Detector::sortByColor); // populate the vector of image points from the contourStructVec int stopHere = (contourStructVec.size() > params.maxBlobs) ? params.maxBlobs : contourStructVec.size(); for (int i=0; i<stopHere; i++) { leds.push_back(contourStructVec[i].center); } } /// This is the beginning of modified code to selectively select "best" image points /// and handle duplicates if (havePreviousState) { // Flatten vector to more easily access elements for duplicate detection // (don't have to traverse across two dimensions) vector<proximity*> flat; // "flattened" vector (points to elements of proximityVec) for (int i=0; i<NO_LEDS; i++) { for (int j=0; j<proximityVec[i].size(); j++) { flat.push_back(&proximityVec[i][j]); } } // Detect and "group" duplicates vector<vector<proximity*> > groups; for (int i=0; i<flat.size(); i++) { if (flat[i]->isDuplicate > 0) continue; vector<proximity*> subgroup; subgroup.push_back(flat[i]); for (int j=i+1; j<flat.size(); j++) { if (samePoint(*flat[i], *flat[j])) { flat[i]->isDuplicate = true; flat[j]->isDuplicate = true; subgroup.push_back(flat[j]); } } if (subgroup.size() > 1) groups.push_back(subgroup); } // Sort subgroups by (increasing) radius and keep only the one with the smallest radius for (int i=0; i<groups.size(); i++) sort(groups[i].begin(),groups[i].end(),compByRadius); // Eliminate any duplicate points (choosing to keep the one(s) with the lowest radius) n_detected = NO_LEDS; for (int i=0; i<groups.size(); i++) { for (int j=1; j<groups[i].size(); j++) // erase everything after the first element groups[i][j]-> delete_me = true; // set to be deleted } for (int i=0; i<proximityVec.size(); i++) { for (int j=0; j<proximityVec[i].size(); j++) { if (proximityVec[i][j].delete_me) proximityVec[i].erase(proximityVec[i].begin() + j); } // If any rows are empty, subtract from the detected LED count if (proximityVec[i].empty()) n_detected--; } // Assume we don't see the aircraft if we see fewer than 3 LEDs // and use normal detection logic if (n_detected < 3) { havePreviousState = false; goto not_detected_stop; // go back to beginning with assumption that we don't have previous state } // Sort by radius for (int i=0; i<NO_LEDS; i++) sort(proximityVec[i].begin(),proximityVec[i].end()); // sort by increasing radius // Replace any "missing" points with reprojImgPts from last time for (int i=0; i<NO_LEDS; i++) { if (proximityVec[i].empty()) { proximity tmp; tmp.center = reprojImgPts[i]; proximityVec[i].push_back(tmp); } } // Return the first 5 points in order, then add points in successive layers // (selected randomly) until we reach maximum (N) points const int LEDs_max = 8; // Pass the first LEDs for (int i=0; i<NO_LEDS; i++) leds.push_back(proximityVec[i][0].center); // Go along successive layers to add "extra" points choosing which to pass by random vector<Point2f*> layer; int layer_no = 0; while (leds.size() < LEDs_max) { // Populate the next layer if we have extracted all elements from previous one if (layer.empty()) { layer_no++; for (int i=0; i<NO_LEDS; i++) { if (proximityVec[i].size() > layer_no) layer.push_back(&proximityVec[i][layer_no].center); } } if (layer.empty()) break; int randomPick = rand() % layer.size(); leds.push_back(*layer[randomPick]); layer.erase(layer.begin() + randomPick); } } if (havePreviousState) return true; else return false; }
// generate the call graphs for each function void getCalls() { int currentFunction = -1; char * functionBuffer; // read through stream creating the call graph for each function while(!feof(optStream)) { // check if there was a transfer in the buffer from the getFunctions function if(!transfer) { fgets(buffer, max_buffer, optStream); } transfer = false; // if we reach an empty line we are at the next function if(strlen(buffer) < 5) { currentFunction = -1; } else if(currentFunction == -1) { // check if we have the null function. If we do, call get functions to create a function list if(strstr(buffer, "null function")) { getFunctions(); fgets(buffer, max_buffer, optStream); } // tokenize the function being called strtok(buffer, "'"); functionBuffer = strtok(NULL, "'"); // find the function's id and use to populate the right functiongraph if(functionBuffer != NULL) { // find the node in the binary tree BinaryNode *function = binaryTree.Find(string(functionBuffer)); // if the function does not exist in the binary tree add it in if(function == NULL) { function = new BinaryNode(string(functionBuffer), callGraph->numberOfFunctions); binaryTree.insert(function); callGraph->numberOfFunctions++; names.push_back(string(functionBuffer)); fg.resize(callGraph->numberOfFunctions); callCount.push_back(0); } currentFunction = function->ID; } } else { strtok(buffer, "'"); functionBuffer = strtok(NULL, "'"); // if we have a function add it to the function graph if( functionBuffer != NULL) { BinaryNode *function = binaryTree.Find(string(functionBuffer)); // if the function is not in the binary tree, add it if(function == NULL) { function = new BinaryNode(string(functionBuffer), callGraph->numberOfFunctions); binaryTree.insert(function); callGraph->numberOfFunctions++; names.push_back(string(functionBuffer)); fg.resize(callGraph->numberOfFunctions); callCount.push_back(0); } pair<set<int>::iterator, bool> result = fg[currentFunction].insert(function->ID); // increase the call count of the function it was added to the function list // if the function was not aded to the list, it is being called multiple times if( result.second) { callCount[function->ID]++; } } } } }
template <typename T> string print_array(const vector<T> &V) { ostringstream os; os << "{ "; for (typename vector<T>::const_iterator iter = V.begin(); iter != V.end(); ++iter) os << '\"' << *iter << "\","; os << " }"; return os.str(); }
// reads the null function set and adds the functions in there to our binary tree // this helps with creating the tree faster than reading through all the file and adding missing functions void getFunctions() { char * functionBuffer; // check if get calls saw a null function and passed on the call to getFunctions if(!transfer) { // in case start of file, read till the first call which is typically the null function while (!feof(optStream)) { fgets(buffer, max_buffer, optStream); if(strstr(buffer, "Call") != NULL) { break; } } // make sure we are at the null function, if not return if(strstr(buffer, "null function") == NULL) { transfer = true; return; } } int count = callGraph->numberOfFunctions; list<string> functions; // read through the null function and populate the function list while (!feof(optStream)) { fgets(buffer, max_buffer, optStream); // parse the buffer line into the function being called strtok(buffer, "'"); functionBuffer = strtok(NULL, "'"); // check if we have reached the end of the null function if (functionBuffer == NULL) { break; } // add the function to the binary tree and update the counts if it is a unique add string function(functionBuffer); if(binaryTree.insert(new BinaryNode(function, count))) { functions.push_back(function); count++; } } callGraph->numberOfFunctions = count; // update the call graph object fg.resize(count); // update size of the function graph // update the necessary lists list<string>::iterator it = functions.begin(); for(int i = 0; i < count; i++) { names.push_back(*it); callCount.push_back(0); it++; } }
vector<vector<int> > combinationSum(vector<int> &nums, int target) { sort(nums.begin(), nums.end()); dfs2(nums, target, 0); return result; }
void init(int n) { this->n = n; forn(i,n) pre[i] = i; changes.clear(); comp = n; }
bool CustomPattern::findPatternPass(const Mat& image, vector<Point2f>& matched_features, vector<Point3f>& pattern_points, Mat& H, vector<Point2f>& scene_corners, const double pratio, const double proj_error, const bool refine_position, const Mat& mask, OutputArray output) { if (!initialized) {return false; } matched_features.clear(); pattern_points.clear(); vector<vector<DMatch> > matches; vector<KeyPoint> f_keypoints; Mat f_descriptor; detector->detect(image, f_keypoints, mask); if (refine_position) refineKeypointsPos(image, f_keypoints); descriptorExtractor->compute(image, f_keypoints, f_descriptor); descriptorMatcher->knnMatch(f_descriptor, descriptor, matches, 2); // k = 2; vector<DMatch> good_matches; vector<Point2f> obj_points; for(int i = 0; i < f_descriptor.rows; ++i) { if(matches[i][0].distance < pratio * matches[i][1].distance) { const DMatch& dm = matches[i][0]; good_matches.push_back(dm); // "keypoints1[matches[i].queryIdx] has a corresponding point in keypoints2[matches[i].trainIdx]" matched_features.push_back(f_keypoints[dm.queryIdx].pt); pattern_points.push_back(points3d[dm.trainIdx]); obj_points.push_back(keypoints[dm.trainIdx].pt); } } if (good_matches.size() < MIN_POINTS_FOR_H) return false; Mat h_mask; H = findHomography(obj_points, matched_features, RANSAC, proj_error, h_mask); if (H.empty()) { // cout << "findHomography() returned empty Mat." << endl; return false; } for(unsigned int i = 0; i < good_matches.size(); ++i) { if(!h_mask.data[i]) { deleteStdVecElem(good_matches, i); deleteStdVecElem(matched_features, i); deleteStdVecElem(pattern_points, i); } } if (good_matches.empty()) return false; size_t numb_elem = good_matches.size(); check_matches(matched_features, obj_points, good_matches, pattern_points, H); if (good_matches.empty() || numb_elem < good_matches.size()) return false; // Get the corners from the image scene_corners = vector<Point2f>(4); perspectiveTransform(obj_corners, scene_corners, H); // Check correctnes of H // Is it a convex hull? bool cConvex = isContourConvex(scene_corners); if (!cConvex) return false; // Is the hull too large or small? double scene_area = contourArea(scene_corners); if (scene_area < MIN_CONTOUR_AREA_PX) return false; double ratio = scene_area/img_roi.size().area(); if ((ratio < MIN_CONTOUR_AREA_RATIO) || (ratio > MAX_CONTOUR_AREA_RATIO)) return false; // Is any of the projected points outside the hull? for(unsigned int i = 0; i < good_matches.size(); ++i) { if(pointPolygonTest(scene_corners, f_keypoints[good_matches[i].queryIdx].pt, false) < 0) { deleteStdVecElem(good_matches, i); deleteStdVecElem(matched_features, i); deleteStdVecElem(pattern_points, i); } } if (output.needed()) { Mat out; drawMatches(image, f_keypoints, img_roi, keypoints, good_matches, out); // Draw lines between the corners (the mapped object in the scene - image_2 ) line(out, scene_corners[0], scene_corners[1], Scalar(0, 255, 0), 2); line(out, scene_corners[1], scene_corners[2], Scalar(0, 255, 0), 2); line(out, scene_corners[2], scene_corners[3], Scalar(0, 255, 0), 2); line(out, scene_corners[3], scene_corners[0], Scalar(0, 255, 0), 2); out.copyTo(output); } return (!good_matches.empty()); // return true if there are enough good matches }
int assign(int &l, int r) { changes.eb(l,l); return l = r; }
void stov(const string& s, vector<int>& v) { for(int i = 0; i < s.size(); ++i) v.push_back(s[i] - '0'); }
Tree Transformer::getTTETree(const vector<string> tokens) { Tree tr; unsigned int i = 0; int tree_index = -1; Tree::pre_order_iterator it = tr.begin(); string strippedToken = tokens[i].substr(1); it = tr.insert(it, TreeNode(strippedToken, tree_index--)); // TODO: check if tokens index 0 exists i++; bool justCreated = false; // flag to signal blank labels string token; string label; int sibling_index; unsigned int div_index; for (; i < tokens.size(); i++) { token = tokens[i]; if (token[0] == '(') { strippedToken = token.substr(1); it = tr.append_child(it, TreeNode(strippedToken, tree_index--)); // append child and move it down justCreated = true; } else if (token == ")") { if (justCreated) { tr.append_child(it, TreeNode(blankLabel, tree_index--)); // insert blank label justCreated = false; } else { it = tr.parent(it); // set it to its parent } } else { // append child but don't move it down div_index = token.find(":"); if (div_index != string::npos) // has label (e.g., "x0:NN") { if (token[0] == 'x') { sibling_index = atoi(token.substr(1, div_index - 1).c_str()); label = token.substr(div_index + 1); tr.append_child(it, TreeNode(label, tree_index--, sibling_index)); } else { printWarning("token found with label but with no state"); continue; } } else // does not have label (e.g., "x0") { if (token[0] == 'x') // has state (e.g., "x0") { sibling_index = atoi(token.substr(1, div_index).c_str()); tr.append_child(it, TreeNode("", tree_index--, sibling_index)); } else // does not have state (e.g., "John") { tr.append_child(it, TreeNode(token, tree_index--)); } } justCreated = false; } } return tr; }