示例#1
0
void make_report(int time, char *workload, int iterations, char *file)
{

	/* one to warm up everything */
	fprintf(stderr, _("Preparing to take measurements\n"));
	utf_ok = 0;
	one_measurement(1, NULL);

	if (!workload[0])
	  fprintf(stderr, _("Taking %d measurement(s) for a duration of %d second(s) each.\n"),iterations,time);
	else
	   fprintf(stderr, _("Measuring workload %s.\n"), workload);
	for (int i=0; i != iterations; i++){
		init_report_output(file, iterations);
		initialize_tuning();
		/* and then the real measurement */
		one_measurement(time, workload);
		report_show_tunables();
		finish_report_output();
		clear_tuning();
	}
	/* and wrap up */
	learn_parameters(50, 0);
	save_all_results("saved_results.powertop");
	save_parameters("saved_parameters.powertop");
	end_pci_access();
	exit(0);
}
示例#2
0
TradingCycle::TradingCycle(QVector<bid*> *_vector, long _date_start, long _date_end, int w1, int w2, int w3, int w4)
{
    vector=_vector;
    date_start=_date_start;
    date_end=_date_end;
    p = new Perceptron(_vector,w1,w2,w3,w4);
    deal=NULL;
    _profit=0;
    save_parameters(w1,w2,w3,w4);
}
示例#3
0
void OnxxxClicked_SAVE(WM_MESSAGE * pMsg,ESaveState save_state)
{
	int i,j,k;
	LISTVIEW_Handle hObj;
	char buf[10];
	hObj = WM_GetDialogItem(pMsg->hWin,GUI_ID_LISTVIEW_RESULT);
	tget_record.act_record_lenth = LISTVIEW_GetNumRows(hObj);
	for(i=0;i<LISTVIEW_GetNumRows(hObj);i++)
		{

	LISTVIEW_GetItemText(hObj,0,i,tget_record.sig_record[i].order,4);

	LISTVIEW_GetItemText(hObj,1,i,tget_record.sig_record[i].V1,5);

	LISTVIEW_GetItemText(hObj,2,i,tget_record.sig_record[i].V2,5);

	LISTVIEW_GetItemText(hObj,3,i,tget_record.sig_record[i].result_r_e,5);
	
	LISTVIEW_GetItemText(hObj,4,i,tget_record.sig_record[i].state,8);

	LISTVIEW_GetItemText(hObj,5,i,tget_record.sig_record[i].time,6);

	LISTVIEW_GetItemText(hObj,6,i,tget_record.sig_record[i].dir,3);

	
		
		}

	if(save_state == RemberToSd)
	{
	tget_record.year = Tim.year;
	tget_record.month = Tim.month;
	tget_record.date = Tim.date;

	
	file_clear();
    save_custormer();
	save_parameters();
	save_get_record();
	}
	

	
}
示例#4
0
void file_init(void)
{
#ifndef WIN_SIM
	unsigned int i, bw;
		unsigned char res;
		char men;
		
		f_mount(0, &fs);								  //注册到文件系统0区	
		res = f_open(&fsrc, "speed_old.lt",FA_OPEN_ALWAYS|FA_WRITE |FA_READ) ;
		 
		if(res)
		{
			while(1);
		}

		f_close(&fsrc);
		f_mount(0, NULL);

		save_custormer();
		save_parameters();
		save_get_record();
#endif		

}
void parameter_test(Tree &T, Model &Mod, long Nrep, long length, double eps, std::vector<double> &pvals, std::string data_prefix, bool save_mc_exact){

  long iter;
  long i, r;

  double df, C;
  double distance, KL;
	KL=0;
	distance=0;
  double likel;


  Parameters Parsim, Par, Par_noperm;
  Alignment align;
  Counts data;

  double eps_pseudo = 0.001;     // Amount added to compute the pseudo-counts.

  StateList sl;

  bool save_data = (data_prefix != "");


  std::string output_filename;
  std::stringstream output_index;
  std::ofstream logfile;
  std::ofstream logdistfile;

  std::ofstream out_chi2;
  std::ofstream out_br;
  std::ofstream out_brPerc;

  std::ofstream out_pvals;
  std::ofstream out_pvals_noperm;
  std::ofstream out_qvals;
  std::ofstream out_bound;
  std::ofstream out_variances;
  std::ofstream out_qvalsComb;
  std::ofstream out_qvalsCombzscore;
  std::ofstream out_covmatrix;

  std::ofstream out_parest;
  std::ofstream out_parsim;

  std::vector<double> KLe;
  std::vector<std::vector<double> > chi2_array; // an array of chi2 for every edge.
  std::vector<std::vector<double> > mult_array; // an array of mult for every edge.
  std::vector<std::vector<double> > br_array; // an array of br. length for every edge.
  std::vector<std::vector<double> > br_arrayPerc; // an array of br. length for every edge.

  std::vector<std::vector<double> > cota_array; // an array of upper bounds of the diff in lengths for every edge.
  std::vector<std::vector<double> > pval_array; // an array of pvals for every edge.
  std::vector<std::vector<double> > pval_noperm_array;
  std::vector<std::vector<double> > qval_array; // an array of qvalues for every edge.
  std::vector<std::vector<double> > variances_array; // an array of theoretical variances.
  std::vector<std::vector<double> > parest_array; // array of estimated parameters
  std::vector<std::vector<double> > parsim_array; // array of simulation parameters

	//  ci_binom ci_bin; // condfidence interval
  std::vector<std::vector<ci_binom> > CIbinomial ; //  	vector of CIs

  std::list<long> produced_nan;

  long npars = T.nedges*Mod.df + Mod.rdf;

  // Initializing pvals
  pvals.resize(T.nedges);

  // Initialize the parameters for simulation of K81 data for testing
  Par = create_parameters(T);
  Parsim = create_parameters(T);

  // Initializing data structures
  KLe.resize(T.nedges);

	pval_array.resize(T.nedges);
        pval_noperm_array.resize(T.nedges);
        qval_array.resize(T.nedges);
	chi2_array.resize(T.nedges);
	mult_array.resize(T.nedges);
	br_array.resize(T.nedges);
        br_arrayPerc.resize(T.nedges);
	cota_array.resize(T.nedges);
        variances_array.resize(npars);
        parest_array.resize(npars);
        parsim_array.resize(npars);

	// initialize to 0's
  for (i=0; i < T.nedges; i++) {
      pval_array[i].resize(Nrep, 0);
      pval_noperm_array[i].resize(Nrep, 0);
      qval_array[i].resize(Nrep, 0);
          chi2_array[i].resize(Nrep, 0);
	  mult_array[i].resize(Nrep, 0);
	  br_array[i].resize(Nrep, 0);
          br_arrayPerc[i].resize(Nrep, 0);
	  cota_array[i].resize(Nrep, 0);
  }

  for(i=0; i < npars; i++) {
    variances_array[i].resize(Nrep, 0);
    parest_array[i].resize(Nrep, 0);
    parsim_array[i].resize(Nrep, 0);
  }

  // Information about the chi^2.
  df = Mod.df;
  C = get_scale_constant(Mod);


  if (save_data) {
    logfile.open((data_prefix + ".log").c_str(), std::ios::out);
    logfile << "model:  " << Mod.name << std::endl;
    logfile << "length: " << length << std::endl;
    logfile << "eps:    " << eps << std::endl;
    logfile << "nalpha: " << T.nalpha << std::endl;
    logfile << "leaves: " << T.nleaves << std::endl;
    logfile << "tree:   " << T.tree_name << std::endl;
    logfile << std::endl;
    logdistfile.open((data_prefix + ".dist.log").c_str(), std::ios::out);

    out_chi2.open(("out_chi2-" + data_prefix + ".txt").c_str(), std::ios::out);
    out_br.open(("out_br-" + data_prefix + ".txt").c_str(), std::ios::out);
    out_brPerc.open(("out_brPerc-" + data_prefix + ".txt").c_str(), std::ios::out);
    out_pvals.open(("out_pvals-" + data_prefix + ".txt").c_str(), std::ios::out);
    out_pvals_noperm.open(("out_pvals_noperm-" + data_prefix + ".txt").c_str(), std::ios::out);
    out_qvals.open(("out_qvals-" + data_prefix + ".txt").c_str(), std::ios::out);
    out_variances.open(("out_variances-" + data_prefix + ".txt").c_str(), std::ios::out);
    out_parest.open(("out_params-est-" + data_prefix + ".txt").c_str(), std::ios::out);
    out_parsim.open(("out_params-sim-" + data_prefix + ".txt").c_str(), std::ios::out);
    out_bound.open(("out_bound-" + data_prefix + ".txt").c_str(), std::ios::out);
    out_qvalsComb.open(("out_qvalsComb-" + data_prefix + ".txt").c_str(), std::ios::out);
    out_qvalsCombzscore.open(("out_qvalsCombzscore-" + data_prefix + ".txt").c_str(), std::ios::out);

    out_parsim.precision(15);
    out_parest.precision(15);
    out_variances.precision(15);
  }


	// uncomment the 2 following lines if want to fix the parameters
	// random_parameters_length(T, Mod, Parsim);
	 //random_data(T, Mod, Parsim, length, align);

  for (iter=0; iter < Nrep; iter++) {
    std::cout << "iteration: " << iter << "             \n";

    // Produces an alignment from random parameters
    random_parameters_length(T, Mod, Parsim);


    random_data(T, Mod, Parsim, length, align);
    get_counts(align, data);
    add_pseudocounts(eps_pseudo, data);

    // Saving data
    if (save_data) {
      output_index.str("");
      output_index << iter;
      output_filename = data_prefix + "-" + output_index.str();
      save_alignment(align, output_filename + ".fa");
      save_parameters(Parsim, output_filename + ".sim.dat");
    }

    // Runs the EM
    std::tie(likel, iter) = EMalgorithm(T, Mod, Par, data, eps);

    // If algorithm returns NaN skip this iteration.
    if (boost::math::isnan(likel)) {
      produced_nan.push_back(iter);
      continue;
    }
    copy_parameters(Par, Par_noperm);

    // Chooses the best permutation.
    guess_permutation(T, Mod, Par);

    distance = parameters_distance(Parsim, Par);

      // estimated counts: Par ; original: Parsim
      std::vector<double> counts_est;
      counts_est.resize(T.nalpha, 0);


      // calculate the cov matrix
      std::vector<std::vector<double> > Cov;
      Array2 Cov_br;

      full_MLE_covariance_matrix(T, Mod, Parsim, length, Cov);

      if(save_data) {
          save_matrix(Cov, output_filename + ".cov.dat");
      }

      // Save the covariances in an array
      std::vector<double> param;
      std::vector<double> param_sim;

      param.resize(npars);
      param_sim.resize(npars);

      get_free_param_vector(T, Mod, Par, param);
      get_free_param_vector(T, Mod, Parsim, param_sim);

      for(i=0; i < npars; i++) {
	variances_array[i][iter] = Cov[i][i];
        parsim_array[i][iter] = param_sim[i];
        parest_array[i][iter] = param[i];
      }

      std::vector<double> xbranca, xbranca_noperm, mubranca;
      double chi2_noperm;
      xbranca.resize(Mod.df);
      xbranca_noperm.resize(Mod.df);
      mubranca.resize(Mod.df);
      for (i=0; i < T.nedges; i++) {
		  r = 0; // row to be fixed
		  // Extracts the covariance matrix, 1 edge

				  branch_inverted_covariance_matrix(Mod, Cov, i, Cov_br);
                  get_branch_free_param_vector(T, Mod, Parsim, i, mubranca);
                  get_branch_free_param_vector(T, Mod, Par, i, xbranca);
                  get_branch_free_param_vector(T, Mod, Par_noperm, i, xbranca_noperm);

			      chi2_array[i][iter] = chi2_mult(mubranca, xbranca, Cov_br);
                  chi2_noperm = chi2_mult(mubranca, xbranca_noperm, Cov_br);


                  pval_array[i][iter] =  pvalue_chi2(chi2_array[i][iter], Mod.df);
                  pval_noperm_array[i][iter] = pvalue_chi2(chi2_noperm, Mod.df);

			      br_array[i][iter] = T.edges[i].br - branch_length(Par.tm[i], T.nalpha);
				  br_arrayPerc[i][iter] = branch_length(Par.tm[i], T.nalpha)/T.edges[i].br;


		// Upper bound on the parameter distance using multinomial:
		//  cota_array[i][iter] = bound_mult(Parsim.tm[i], Xm, length);
	    // and using the  L2 bound
		  cota_array[i][iter] = branch_length_error_bound_mult(Parsim.tm[i], Par.tm[i]);
		  out_br <<  br_array[i][iter]  << " ";
		  out_brPerc <<  br_arrayPerc[i][iter]  << " ";

		  out_bound  <<  cota_array[i][iter] << " ";
		  out_chi2 << chi2_array[i][iter] << " ";

			}
         out_chi2 << std::endl;
		 out_bound  <<  std::endl;
		 out_br << std::endl;
		 out_brPerc << std::endl;




    // Saves more data.
    if (save_data) {
      logfile << iter << ": " << distance << "   " << KL << std::endl;
      save_parameters(Par, output_filename + ".est.dat");

      logdistfile << iter << ": ";
      logdistfile << parameters_distance_root(Par, Parsim) << " ";
      for(int j=0; j < T.nedges; j++) {
        logdistfile << parameters_distance_edge(Par, Parsim, j) << " ";
      }
      logdistfile << std::endl;
    }

} // close iter loop here

  // Correct the p-values
  for(i=0; i < T.nedges; i++) {
       BH(pval_array[i], qval_array[i]);
	//save them
  }

  if (save_mc_exact) {
    for(long iter=0; iter < Nrep; iter++) {
      for(long i=0; i < T.nedges; i++) {
        out_pvals << pval_array[i][iter] << "  ";
        out_pvals_noperm << pval_noperm_array[i][iter] << "  ";
        out_qvals << qval_array[i][iter] << "  ";
      }
      out_pvals  << std::endl;
      out_pvals_noperm << std::endl;
      out_qvals  << std::endl;

      for(long i=0; i < npars; i++) {
        out_variances << variances_array[i][iter] << "  ";
        out_parsim << parsim_array[i][iter] << "  ";
        out_parest << parest_array[i][iter] << "  ";
      }
      out_variances << std::endl;
      out_parsim << std::endl;
      out_parest << std::endl;
    }
  }

	// now combine the pvalues
   for(i=0; i < T.nedges; i++) {
	pvals[i] = Fisher_combined_pvalue(pval_array[i]);
    //using the Zscore it goes like this: pvals[i] = Zscore_combined_pvalue(pval_array[i]);
	if (save_mc_exact) {	   out_qvalsComb <<  pvals[i] << "  " ;
	out_qvalsCombzscore << Zscore_combined_pvalue(pval_array[i]) << " ";
	}
  }

  // Close files
  if (save_data) {
    logdistfile.close();
    logfile.close();
  }

if (save_mc_exact) {
	out_chi2.close();
	out_bound.close();
        out_variances.close();
        out_parest.close();
        out_parsim.close();
	out_br.close();
	out_brPerc.close();

	out_pvals.close();
        out_qvals.close();
	out_qvalsComb.close();
	out_qvalsCombzscore.close();
        out_covmatrix.close();
	}

  // Warn if some EM's produced NaN.
  if (produced_nan.size() > 0) {
    std::cout << std::endl;
    std::cout << "WARNING: Some iterations produced NaN." << std::endl;
    std::list<long>::iterator it;
    for (it = produced_nan.begin(); it != produced_nan.end(); it++) {
      std::cout << *it << ", ";
    }
    std::cout << std::endl;
  }
}
示例#6
0
int main(int argc, char **argv)
{
	int option_index;
	int c;
	char filename[4096];
	char workload[4096] = {0,};
	int  iterations = 1, auto_tune = 0;

	set_new_handler(out_of_memory);

	setlocale (LC_ALL, "");
	bindtextdomain (PACKAGE, LOCALEDIR);
	textdomain (PACKAGE);

	while (1) { /* parse commandline options */
		c = getopt_long (argc, argv, "ch:C:i:t:uVw:q", long_options, &option_index);
		/* Detect the end of the options. */
		if (c == -1)
			break;

		switch (c) {
			case 'V':
				print_version();
				exit(0);
				break;

			case 'e': /* Extech power analyzer support */
				checkroot();
				extech_power_meter(optarg ? optarg : "/dev/ttyUSB0");
				break;
			case 'u':
				print_usage();
				exit(0);
				break;
			case 'a':
				auto_tune = 1;
				leave_powertop = 1;
				break;
			case 'c':
				powertop_init();
				calibrate();
				break;

			case 'h': /* html report */
				reporttype = REPORT_HTML;
				sprintf(filename, "%s", optarg ? optarg : "powertop.html" );
				break;

			case 't':
				time_out = (optarg ? atoi(optarg) : 20);
				break;

			case 'i':
				iterations = (optarg ? atoi(optarg) : 1);
				break;

			case 'w': /* measure workload */
				sprintf(workload, "%s", optarg ? optarg :'\0' );
				break;
			case 'q':
				if(freopen("/dev/null", "a", stderr))
					fprintf(stderr, _("Quite mode failed!\n"));
				break;

			case 'C': /* csv report*/
				reporttype = REPORT_CSV;
				sprintf(filename, "%s", optarg ? optarg : "powertop.csv");
				break;
			case '?': /* Unknown option */
				/* getopt_long already printed an error message. */
				exit(0);
				break;
		}
	}

	powertop_init();

	if (reporttype != REPORT_OFF)
		make_report(time_out, workload, iterations, filename);

	if (debug_learning)
		printf("Learning debugging enabled\n");

	learn_parameters(250, 0);
	save_parameters("saved_parameters.powertop");


	if (debug_learning) {
	        learn_parameters(1000, 1);
		dump_parameter_bundle();
		end_pci_access();
		exit(0);
	}
	init_display();
	initialize_tuning();
	/* first one is short to not let the user wait too long */
	one_measurement(1, NULL);

	if (!auto_tune) {
		tuning_update_display();
		show_tab(0);
	} else {
		auto_toggle_tuning();
	}

	while (!leave_powertop) {
		show_cur_tab();
		one_measurement(time_out, NULL);
		learn_parameters(15, 0);
	}
	endwin();
	printf("%s\n", _("Leaving PowerTOP"));

	end_process_data();
	clear_process_data();
	end_cpu_data();
	clear_cpu_data();

	save_all_results("saved_results.powertop");
	save_parameters("saved_parameters.powertop");
	learn_parameters(500, 0);
	save_parameters("saved_parameters.powertop");
	end_pci_access();
	clear_tuning();
	reset_display();

	clean_shutdown();

	return 0;
}
示例#7
0
文件: spf.cpp 项目: njuhugn/spf
void SPF::learn() {
    double old_likelihood, delta_likelihood, likelihood = -1e10;
    int likelihood_decreasing_count = 0;
    time_t start_time, end_time;

    int iteration = 0;
    char iter_as_str[4];
    bool converged = false;
    bool on_final_pass = false;

    while (!converged) {
        time(&start_time);
        iteration++;
        printf("iteration %d\n", iteration);

        reset_helper_params();

        // update rate for user preferences
        b_theta.each_col() += sum(beta, 1);

        set<int> items;
        int user = -1, item, rating;
        for (int i = 0; i < settings->sample_size; i++) {
            if (on_final_pass && settings->final_pass_test) {
                user++;
                while (data->test_users.count(user)==0) {
                    user++;
                }
            } else if (settings->svi) {
                user = gsl_rng_uniform_int(rand_gen, data->user_count());
            } else {
                user = i;
            }

            bool user_converged = false;
            int user_iters = 0;
            while (!user_converged) {
                user_iters++;
                a_beta_user.zeros();
                a_delta_user.zeros();

                // look at all the user's items
                for (int j = 0; j < data->item_count(user); j++) {
                    item = data->get_item(user, j);
                    items.insert(item);
                    rating = 1;
                    //TODO: rating = data->get_train_rating(i);
                    update_shape(user, item, rating);
                }

                // update per-user parameters
                double user_change = 0;
                if (!settings->factor_only && !settings->fix_influence)
                    user_change += update_tau(user);
                if (!settings->social_only)
                    user_change += update_theta(user);
                if (!settings->social_only && !settings->factor_only && !settings->fix_influence) {
                    user_change /= 2;

                    // if the updates are less than 1% change, the local params have converged
                    if (user_change < 0.01)
                        user_converged = true;

                } else {
                    // if we're only looking at social or factor (not combined)
                    // then the user parameters will always have converged with
                    // a single pass (since there's nothing to balance against)
                    user_converged = true;
                }
            }
            if (settings->verbose)
                printf("%d\tuser %d took %d iters to converge\n", iteration, user, user_iters);
            a_beta += a_beta_user;
            a_delta += a_delta_user;
        }

        if (!settings->social_only) {
            // update rate for item attributes
            b_beta.each_col() += sum(theta, 1);

            // update per-item parameters
            set<int>::iterator it;
            for (it = items.begin(); it != items.end(); it++) {
                item = *it;
                if (iter_count[item] == 0)
                    iter_count[item] = 0;
                iter_count[item]++;
                update_beta(item);
                if (settings->item_bias)
                    update_delta(item);
            }
        } else if (settings->item_bias) {
            set<int>::iterator it;
            for (it = items.begin(); it != items.end(); it++) {
                item = *it;
                if (iter_count[item] == 0)
                    iter_count[item] = 0;
                iter_count[item]++;
                if (settings->item_bias)
                    update_delta(item);
            }
        }


        // check for convergence
        if (on_final_pass) {
            printf("Final pass complete\n");
            converged = true;

            old_likelihood = likelihood;
            likelihood = get_ave_log_likelihood();
            delta_likelihood = abs((old_likelihood - likelihood) /
                                   old_likelihood);
            log_convergence(iteration, likelihood, delta_likelihood);
        } else if (iteration >= settings->max_iter) {
            printf("Reached maximum number of iterations.\n");
            converged = true;

            old_likelihood = likelihood;
            likelihood = get_ave_log_likelihood();
            delta_likelihood = abs((old_likelihood - likelihood) /
                                   old_likelihood);
            log_convergence(iteration, likelihood, delta_likelihood);
        } else if (iteration % settings->conv_freq == 0) {
            old_likelihood = likelihood;
            likelihood = get_ave_log_likelihood();

            if (likelihood < old_likelihood)
                likelihood_decreasing_count += 1;
            else
                likelihood_decreasing_count = 0;
            delta_likelihood = abs((old_likelihood - likelihood) /
                                   old_likelihood);
            log_convergence(iteration, likelihood, delta_likelihood);
            if (settings->verbose) {
                printf("delta: %f\n", delta_likelihood);
                printf("old:   %f\n", old_likelihood);
                printf("new:   %f\n", likelihood);
            }
            if (iteration >= settings->min_iter &&
                    delta_likelihood < settings->likelihood_delta) {
                printf("Model converged.\n");
                converged = true;
            } else if (iteration >= settings->min_iter &&
                       likelihood_decreasing_count >= 2) {
                printf("Likelihood decreasing.\n");
                converged = true;
            }
        }

        // save intermediate results
        if (!converged && settings->save_freq > 0 &&
                iteration % settings->save_freq == 0) {
            printf(" saving\n");
            sprintf(iter_as_str, "%04d", iteration);
            save_parameters(iter_as_str);
        }

        // intermediate evaluation
        if (!converged && settings->eval_freq > 0 &&
                iteration % settings->eval_freq == 0) {
            sprintf(iter_as_str, "%04d", iteration);
            evaluate(iter_as_str);
        }

        time(&end_time);
        log_time(iteration, difftime(end_time, start_time));

        if (converged && !on_final_pass &&
                (settings->final_pass || settings->final_pass_test)) {
            printf("final pass on all users.\n");
            on_final_pass = true;
            converged = false;

            // we need to modify some settings for the final pass
            // things should look exactly like batch for all users
            if (settings->final_pass) {
                settings->set_stochastic_inference(false);
                settings->set_sample_size(data->user_count());
                scale = 1;
            } else {
                settings->set_sample_size(data->test_users.size());
                scale = data->user_count() / settings->sample_size;
            }
        }
    }

    save_parameters("final");
}