STDMETHODIMP CamShiftTracker::Process(IplImage *image) { HRESULT hr = MatchFormat(image, &m_image_format); if (FAILED(hr)) return hr; if (m_calibrate > 0) { CvRect rect = cvRect(image->width*0.47, image->height*0.47, image->width*0.06, image->height*0.07); cvRectangle(image, cvPoint(rect.x, rect.y), cvPoint(rect.x+rect.width, rect.y+rect.height), 0xffffff, 1); set_window(rect); update_histogram(static_cast<CvImage *>(image)); m_calibrate--; } //else { track_object(static_cast<CvImage *>(image)); CvRect rect = get_window(); CvPoint center = cvPoint(rect.x + rect.width/2, rect.y + rect.height/2); DrawCross(image, center); cvRectangle(image, cvPoint(rect.x, rect.y), cvPoint(rect.x+rect.width, rect.y+rect.height), 0xffffff, 1); } return NOERROR; }
int main() { int i; char line[73]; int max_occurence; HistElement vertical_hist[27]; for(i=0; i<26; i++) { vertical_hist[i].alphabet = (char)(i+65); vertical_hist[i].num = 0; } for(i=0; i<4; i++) { fgets(line, 75, stdin); update_histogram(vertical_hist, line); } max_occurence = find_max_occurence(vertical_hist); print_vertical_hist(vertical_hist, max_occurence); return 0; }
static void* update_thread(void* ptr) { int j = 0; while (j == 0) { send_status_packet(cp_ptr->mode,kd_ptr); sleep(1); get_status_packet(sp_ptr); update_histogram(sp_ptr); //sem_post(sp_sem); sleep(10); } }
void update_slice_hist(void) { int i; for(i=0; i<nsliceinfo; i++) { slice *slicei; int unit1; FILE_SIZE lenfile; int error1; float slicetime1, *sliceframe; int sliceframesize; int is1, is2, js1, js2, ks1, ks2; int testslice; slicei = sliceinfo + i; LOCK_SLICE_BOUND; if(slicei->inuse_getbounds==1) { UNLOCK_SLICE_BOUND; continue; } slicei->inuse_getbounds=1; UNLOCK_SLICE_BOUND; PRINTF(" Examining %s\n",slicei->file); lenfile=strlen(slicei->file); LOCK_COMPRESS; FORTget_file_unit(&unit1,&slicei->unit_start); FORTopenslice(slicei->file,&unit1,&is1,&is2,&js1,&js2,&ks1,&ks2,&error1,lenfile); UNLOCK_COMPRESS; sliceframesize=(is2+1-is1)*(js2+1-js1)*(ks2+1-ks1); NewMemory((void **)&sliceframe,sliceframesize*sizeof(float)); init_histogram(slicei->histogram); testslice=0; while(error1==0) { FORTgetsliceframe(&unit1, &is1, &is2, &js1, &js2, &ks1, &ks2, &slicetime1, sliceframe, &testslice,&error1); update_histogram(sliceframe,sliceframesize,slicei->histogram); } FREEMEMORY(sliceframe); LOCK_COMPRESS; FORTclosefortranfile(&unit1); UNLOCK_COMPRESS; } }
void build_distribution_json_data(std::string& file, Json::Value& nfb_histogram, Json::Value& nfb_histogram_metadata){ std::cout << "Starting Mining: " << file << "\n\n"; std::map<double, int> raw_nfb_histogram_map = mine_file(file); std::cout << "Calulating Mean " << "\n"; double data_mean = mean_from_histogram_map(raw_nfb_histogram_map); std::cout << "Calulating STDV " << "\n"; double data_stdv = calculate_stdv_from_histogram_map(raw_nfb_histogram_map, data_mean); std::cout << "Calulating Skew " << "\n"; double data_skew = calculate_skewness_from_histogram_map(raw_nfb_histogram_map, data_mean, data_stdv); std::cout << "Calulating Kurtosis " << "\n"; double data_kurt = calculate_kurtosis_from_histogram_map(raw_nfb_histogram_map, data_mean, data_stdv); std::cout << "Normalizing Data & Update Histogram " << "\n"; std::map<double, int> normed_hist_map = update_histogram(raw_nfb_histogram_map, data_mean, data_stdv); std::cout.precision(10); /* loop over twice to save on memmory alloc */ std::cout << "Saving Data "<<'\n'; Json::Value nfb_histogram_file; for(auto iter: normed_hist_map) nfb_histogram_file[std::to_string(iter.first)] = iter.second; std::vector<std::string> file_strings; boost::split(file_strings,file,boost::is_any_of("/")); save_data("nfb_histogram_data/nfb_histogram_" + file_strings[file_strings.size()-1] + ".json", nfb_histogram_file); nfb_histogram_file.clear(); for(auto iter: normed_hist_map){ if(nfb_histogram.get(std::to_string(iter.first),false) == false) nfb_histogram[std::to_string(iter.first)] = iter.second; else nfb_histogram[std::to_string(iter.first)] = nfb_histogram[std::to_string(iter.first)].asInt() + iter.second; } normed_hist_map.clear(); save_data("nfb_histogram_data/nfb_histogram.json", nfb_histogram); nfb_histogram_metadata[file]["mean"] = data_mean; nfb_histogram_metadata[file]["stdv"] = data_stdv; nfb_histogram_metadata[file]["skew"] = data_skew; nfb_histogram_metadata[file]["kurt"] = data_kurt; save_data("nfb_histogram_data/nfb_histogram_metadata.json", nfb_histogram_metadata); std::cout << "Done: " << file << "\n\n"; }
int main(int argc, char *argv[]) { if (argc < 9) { printf("Usage: %s <N> <eta> <dr> <nequil> <nproduct> <binwidth>\n", argv[0]); printf("\t<N> Number of particles\n"); printf("\t<rho> Particle density\n"); printf("\t<T> Temperature\n"); printf("\t<rc> Lennard-Jones cut-off radius\n"); printf("\t<dt> Length of one timestep\n"); printf("\t<nequil> Number of equilibration timesteps to be performed\n"); printf("\t<nproduct> Number of production timesteps to be performed\n"); printf("\t<binwidth> Width of a bin for correlation length histogram\n"); exit(EXIT_SUCCESS); } outGr = fopen("outGr.txt", "w+"); outTrajectories = fopen("outTrajectories.txt", "w+"); outAverages = fopen("outAverages.txt", "w+"); // Parse commandline parameters N = atoi(argv[1]); rho = atof(argv[2]); T = atof(argv[3]); rc = atof(argv[4]); rc2 = rc*rc; dt = atof(argv[5]); nequil = atof(argv[6]); nproduct = atof(argv[7]); nt = nequil+nproduct; double tequil = nequil*dt; double tproduct = nproduct*dt; tmax = nt*dt; // Calculate corresponding system parameters lx = ly = sqrt((double)N/rho); printf("========== PARAMETERS ==========\n"); printf("Particles:\t\t%d\n", N); printf("Density:\t\t%g\n", rho); printf("Simulationbox lx:\t%g\n", lx); printf("Simulationbox ly:\t%g\n", ly); printf("Temperature:\t\t%g\n", T); printf("Timestep length:\t%g\n", dt); printf("Equilibration steps:\t%d\n", nequil); printf("Production steps:\t%d\n", nproduct); printf("Total steps:\t%d\n", nt); printf("Equilibration time:\t%g\n", tequil); printf("Production time:\t%g\n", tproduct); printf("Total time:\t\t%g\n", tmax); printf("================================\n\n"); printf("======== INIT PARTICLES ========\n"); // Initialize arrays for particle positions & velocities r = new TVector[N]; r_next = new TVector[N]; v = new TVector[N]; v_next = new TVector[N]; // Put all particles equally spaced in the box and assign random velocities int nrows = sqrt(N); double dlx = lx/(double)nrows; double dly = ly/(double)nrows; vsum = .0; vsum2 = 0; for (int i = 0; i < N; i++) { // Positions r[i].x = i%nrows*dlx+0.5; r[i].y = floor(i/nrows)*dly+0.5; // Velocities v[i].x = rand_value(-1., 1.); v[i].y = rand_value(-1., 1.); vsum += v[i]; vsum2 += v[i].x*v[i].x + v[i].y*v[i].y; } printf("Center of mass velocity after initialization:\t(%g,%g)\n", vsum.x, vsum.y); printf("Kinetic energy after initialization:\t\t%g\n", vsum2); printf("Instantaneous temperature after initialization:\t%g\n", vsum2/(2.0*(double)N)); // Calculate average velocities vsum = vsum/(double)N; // Scalefactor for velocities to match the desired temperature (we neglect the fact // that the whole system with constrained center of mass has only (2N - 2) degrees // of freedom and approximate (2N - 2) \approx 2N since we won't run the simulation // with less than N = 100 particles.) double fs = sqrt(2.0*(double)N*T/vsum2); printf("Scaling factor for velocities:\t\t\t%g\n", fs); TVector vsumcheck; vsumcheck = .0; vsum2 = 0; for (int i = 0; i < N; i++) { v[i] = (v[i]-vsum)*fs; vsumcheck += v[i]; vsum2 += v[i].x*v[i].x + v[i].y*v[i].y; } printf("Center of mass velocity after scaling:\t\t(%g,%g)\n", vsumcheck.x, vsumcheck.y); printf("Kinetic energy after scaling:\t\t\t%g\n", vsum2); printf("Instantaneous temperature after scaling:\t%g\n", vsum2/(2.0*(double)N)); print_coords("outCoords_start.txt"); printf("================================\n\n"); printf("======== INIT POTENTIAL ========\n"); // Init the potential init_lj_shift(); F = new TVector[N]; printf("Potential initialized.\n"); printf("U(r_c)\t\t= %g\n", u_lj_shift); printf("U'(r_c)\t\t= %g\n", u_lj_deriv_shift); printf("U_s(r_c)\t= %g\n", u_lj_shifted(rc2)); printf("U'_s(r_c)\t= %g\n", u_lj_deriv_shifted(rc2)); printf("================================\n\n"); printf("======== INIT AVERAGERS ========\n"); avg_temp.init(); avg_epot.init(); avg_ekin.init(); avg_etot.init(); avg_vir.init(); printf("Averagers initialized!\n"); // Histogram for pair correlation function binwidth = atof(argv[8]); // Width of a histogram-bin nbins = ceil(grmax/binwidth); // Maximum correlation length to be measured should be L/2 due to periodic BC bincount = 0; // Number of counts done on the histogram hist = new int[nbins]; for (int i = 0; i < nbins; i++) { hist[i] = 0; } printf("Using histogram with %d bins of width %f\n", nbins, binwidth); printf("================================\n\n"); printf("======= START INTEGRATION ======\n"); t = 0; fprintf(outTrajectories, "#t\tn\tr_x\t\tr_y\t\tv_x\t\tv_y\n"); fprintf(outAverages, "#t\tT(t)\t\t<T(t)>\t\tE_tot(T)\t\t<E_tot(T)>\n"); for (int n = 0; n <= nt; n++) { if (n == 0) { printf("Equilibration phase started.\n"); } if (n == nequil) { printf("Production phase started.\n"); } // Current time t = dt*n; if(debug) printf("t:\t%6.3f\t\n", t); // Calculate all forces forces(); vsum = .0; vsum2 = .0; // update all particles for (int i = 0; i < N; i++) { // perform leap-frog-integration v_next[i] = v[i] + F[i]*dt; r_next[i] = r[i] + v_next[i]*dt; // Calculate energies vsum += v_next[i]; // vsum2 += v[i].x*v[i].x + v[i].y*v[i].y; // naiv? vsum2 += pow(v_next[i].x+v[i].x, 2)/4.0 + pow(v_next[i].y+v[i].y, 2)/4.0; // sophisticated by Frenkel/Smit // update particle coordinates v[i] = v_next[i]; r[i] = r_next[i]; // Write trajectories to a file // fprintf(outTrajectories, "%6.3f\t%6d\t%e\t%e\t%e\t%e\n", t, i, r[i].x, r[i].y, v[i].x, v[i].y); } // Equilibration phase, scale velocities to keep temperature if (n < nequil) { // Rescale velocities every ?? timesteps if (n%10 == 0) { scale_velocities(); } } else if (n%nsamp == 0) { double Tt = vsum2/(2.0*(double)N); avg_temp.add(Tt); avg_epot.add(epot); avg_vir.add(virial); double ekin = 0.5*vsum2; avg_ekin.add(ekin); double etot = (epot + ekin); avg_etot.add(etot); update_histogram(); fprintf(outAverages, "%6.3f\t%e\t%e\t%e\t%e\n", t, Tt, avg_temp.average(), etot, avg_etot.average()); } if ((n+1)%(nt/10) == 0 || n == 0) { printf("Finished %5d (t = %5.1f) out of %d (t = %g) timesteps: %3.f %% <T> = %g\n", n+1, t, nt, tmax, (double)n/(double)nt*100, avg_temp.average()); } if(debug) printf("\n"); } printf("================================\n\n"); print_coords("outCoords_end.txt"); printf("Printing histogram for g(r) & calculating pressure\n"); fprintf(outGr, "#r\tg(r)\n"); double p = 0; // Pressure for(int i = 0; i < nbins; i++) { double R = i*binwidth; double area = 2.0*PI*R*binwidth; // Multiply g(r) by two, since in the histogram we only counted each pair once, but each pair // gives two contributions to g(r) double gr = 2.0*(double)hist[i]/(rho*area*(double)bincount*N); fprintf(outGr, "%f\t%f\n", R, gr); // Calculate other quantities from g(r) if (R > 0 && R < rc) { double r6i = pow(1.0/R, 6); p += gr*2*PI*rho*rho*R*R*48*(r6i*r6i-0.5*r6i)*binwidth/2; } } p = p + rho*avg_temp.average(); printf("Final pressure P(%g) = %g\n", rho, p); FILE *outAvgFinal; outAvgFinal = fopen("outAvgFinal.txt", "w+"); fprintf(outAvgFinal, "#t\t<T(t)>\t\t<E_tot(T)>\trho\t\t1/rho\t\tp\n"); fprintf(outAvgFinal, "%6.3f\t%e\t%e\t%e\t%e\t%e\n", t, avg_temp.average(), avg_etot.average(), rho, 1.0/rho, p); fclose(outAvgFinal); outAvgFinal = NULL; delete [] r; delete [] r_next; delete [] v; delete [] v_next; delete [] F; r = r_next = v = v_next = F = NULL; // Close filepointer fclose(outGr); outGr = NULL; fclose(outTrajectories); outTrajectories = NULL; fclose(outAverages); outAverages = NULL; exit(EXIT_SUCCESS); }
static void despeckle_median (guchar *src, guchar *dst, gint width, gint height, gint bpp, gint radius, gboolean preview) { guint progress; guint max_progress; gint x, y; gint input_radius = radius; gint pos; gint ymin; gint ymax; gint xmin; gint xmax; memset (&histogram, 0, sizeof(histogram)); progress = 0; max_progress = width * height; if (! preview) gimp_progress_init(_("Despeckle")); for (y = 0; y < height; y++) { x = 0; ymin = MAX (0, y - radius); ymax = MIN (height - 1, y + radius); xmin = MAX (0, x - radius); xmax = MIN (width - 1, x + radius); hist0 = 0; histrest = 0; hist255 = 0; histogram_clean (&histogram); histogram.xmin = xmin; histogram.ymin = ymin; histogram.xmax = xmax; histogram.ymax = ymax; add_vals (&histogram, src, width, bpp, histogram.xmin, histogram.ymin, histogram.xmax, histogram.ymax); for (x = 0; x < width; x++) { const guchar *pixel; ymin = MAX (0, y - radius); /* update ymin, ymax when radius changed (FILTER_ADAPTIVE) */ ymax = MIN (height - 1, y + radius); xmin = MAX (0, x - radius); xmax = MIN (width - 1, x + radius); update_histogram (&histogram, src, width, bpp, xmin, ymin, xmax, ymax); pos = (x + (y * width)) * bpp; pixel = histogram_get_median (&histogram, src + pos); if (filter_type & FILTER_RECURSIVE) { del_val (&histogram, src, width, bpp, x, y); pixel_copy (src + pos, pixel, bpp); add_val (&histogram, src, width, bpp, x, y); } pixel_copy (dst + pos, pixel, bpp); /* * Check the histogram and adjust the diameter accordingly... */ if (filter_type & FILTER_ADAPTIVE) { if (hist0 >= radius || hist255 >= radius) { if (radius < input_radius) radius++; } else if (radius > 1) { radius--; } } } progress += width; if (! preview && y % 32 == 0) gimp_progress_update ((gdouble) progress / (gdouble) max_progress); } if (! preview) gimp_progress_update (1.0); }