Example #1
0
int RemoteMaster(GeneralGamlConfig& conf, HKYData& data)	{

	debug_mpi("starting RemoteMaster()...");
	
	int rank;
	MPI_Comm_rank(MPI_COMM_WORLD, &rank);
	
	Parameters params;
	params.SetParams(conf, data);
	//the seed has already been set in SetParams above, but we need to
	//modify it so that all remotes are not the same
	rnd.set_seed((rank+1) * rnd.init_seed());
	LogParams(params);


	Tree::alpha = params.gammaShapeBrlen;
	Tree::meanBrlenMuts = params.meanBrlenMuts;
	
	int nprocs;
	MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
	
	bool poo=true;
//	while(poo);
	
	Population pop;
	pop.Setup(params, &conf, nprocs, rank);
	g_sw=&pop.stopwatch;
	g_gen = &pop.gen;

	
	//DEBUG for now all nodes will be SW
	debug_mpi("doing remote subtree worker");
	RemoteSubtreeWorker(pop, conf);
	/*
	if (conf.method == "fde")
		assert(0);
//		RemoteFullDuplexExchange(pop, conf);
	//DJZ changed this 9/10/03 to avoid occasional disparity in the acutal # of sm nodes and the number of shields allocated
	else if (conf.method == "sm" || (conf.method == "hybrid" && rank <= (int) (conf.hybridp.nt*(nprocs-1)))){
//	else if (conf.method == "sm" || (conf.method == "hybrid" && rank < conf.hybridp.nt*nprocs)){
		debug_mpi("doing remote shielded migrants");
		RemoteShieldedMigrants(pop, conf);
	}
	else if (conf.method == "amr" || conf.method == "hybrid")	{
		debug_mpi("doing remote alpha male replication");
		RemoteAlphaMaleReplication(pop, conf);
	}
	else	{
		debug_mpi("ERROR: unknown method (GamlConfig::General::method): %s", conf.method.c_str());
		MPI_Abort(MPI_COMM_WORLD, -1);
	}
	*/
	return 0;
}
Example #2
0
/* i would prefer that the thread initialization code happen in MPIMain(), but
 * it needs Population pop, which is declared here */
int MasterMaster(MasterGamlConfig& conf, HKYData& data)	{
	Parameters params;
	params.SetParams(conf, data);
	LogParams(params);

	int nprocs;
	MPI_Comm_size(MPI_COMM_WORLD, &nprocs);

	bool poo=true;
//	while(poo);
		
	Tree::alpha = params.gammaShapeBrlen;
	Tree::meanBrlenMuts = params.meanBrlenMuts;

	Population pop;
//	debug_mpi("about to setup");
	pop.Setup(params, &conf, nprocs, 0);
	g_sw=&pop.stopwatch;
//	debug_mpi("setup");
	g_gen = &pop.gen;

	pop.CalcAverageFitness();

	// start the thread
	pthread_t thread;
	thread_arg_t targ;
	pthread_mutex_init(&lock_pm, NULL);
	pthread_mutex_init(&lock_pop, NULL);
	pthread_cond_init(&cond_pm, NULL);
	g_quit_time = false;
	g_processing_message = false;
	targ.conf = &const_cast<MasterGamlConfig&>(conf);
	targ.pop = &pop;
	targ.nprocs = nprocs;
	
	pthread_create(&thread, NULL, thread_func2, (void*)&targ);
	
	cout << "Master running..." << endl;

	pop.gen=0;
	while (!g_quit_time){
		pthread_mutex_lock(&lock_pop);
			pop.keepTrack();
			pop.OutputFate();
			if (pop.gen % conf.logevery == 0) pop.OutputLog();
			++pop.gen;
			pop.NextGeneration();
			if(pop.gen % pop.params->saveEvery == 0) pop.CreateTreeFile( pop.params->treefname );
			if(pop.gen % pop.adap->intervalLength == 0){
				if(pop.enforceTermConditions == true
					&& pop.gen-pop.lastTopoImprove > pop.lastTopoImproveThresh 
					&& pop.adap->improveOverStoredIntervals < pop.improveOverStoredIntervalsThresh
					&& pop.adap->branchOptPrecision == pop.adap->minOptPrecision){
			//		&& pop.paraMan->updateThresh == pop.paraMan->minUpdateThresh){
					cout << "Reached termination condition!\nlast topological improvement at gen " << pop.lastTopoImprove << endl;
					cout << "Improvement over last " << pop.adap->intervalsToStore*pop.adap->intervalLength << " gen = " << pop.adap->improveOverStoredIntervals << endl;
					g_quit_time=true;
					break;
					}
				pop.CheckSubtrees();
				//6-20-05 Changing this to deterministically reduce the replace thresh
				//every subtreeInterval generations.  Tying the reduction of this to 
				//how often the master recieved new bests from the remotes had odd properties,
				//and didn't scale well with differing numbers of processors.
				//and it's not clear what a better automated approach would be
				//pop.CheckRemoteReplaceThresh();
				if(pop.gen % pop.paraMan->subtreeInterval == 0) pop.paraMan->ReduceUpdateThresh();
#ifdef INCLUDE_PERTURBATION
				pop.CheckPerturbParallel();
#endif
				}
		pthread_mutex_unlock(&lock_pop);
		pthread_mutex_lock(&lock_pm);
			while (g_processing_message)
				pthread_cond_wait(&cond_pm, &lock_pm);
		pthread_mutex_unlock(&lock_pm);
		}
	pop.FinalOptimization();
	pthread_join(thread, NULL);
	return 0;
}