Exemplo n.º 1
0
void NetworkTests::NetworkTestInclTimingBCPNNRecurrent()
{
	int nrHypercolumns = 65536;//65536;//128*16*10;//128*16*10;//128*16*10;//128*16;
	int nrRateUnits = 100;//50;//100;//100;//40;//10
	//float activity = 0.05;
	int nrItems = 5;
	bool storeData = false;
	float probConnectivity = 0.0003;//0.00005;//0.000025;//0.00003;//0.000025;//0.001;//0.00044;//0.00011;//0.00024;//0.00006;//0.0004;//0.00001;
	bool doTesting = true; // false for some scaling tests

	// network construction
	Network* network = new Network();
	network->AddTiming(network);

	PopulationColumns* layer1 = new PopulationColumns(network,nrHypercolumns,nrRateUnits,PopulationColumns::Graded);
	network->AddPopulation(layer1);
	
	FullConnectivity* full = new FullConnectivity();//false,"minicolumns");
	full->SetRandomWeights(0,0);
	RandomConnectivity* randConn = new RandomConnectivity(probConnectivity);//0.1);
	randConn->SetRandomWeights(0,0);	
	//network->AddTiming(randConn);
	
	layer1->AddPre(layer1,randConn); // recurrent

	// Add Projection changes
	float lambda0 = 10e-6;
	float alpha = 0.05;
	ProjectionModifierBcpnnOnline* bStandard = new ProjectionModifierBcpnnOnline(alpha,lambda0);
	
	//full->AddProjectionsEvent(bStandard);
	randConn->AddProjectionsEvent(bStandard);

	WTA* wta = new WTA();
	layer1->AddPopulationModifier(wta);

	// Construct initial network
	network->Initialize();
	//vector<int> partsOfDataToUseAsInput = layer1->GetMPIDistribution(network->MPIGetNodeId());

	// Specify input data
	// - change to only create local part
	DataSources source;
	// not correct right now as SetValuesAll working locally and this is global
	vector<vector<float> > data = source.GetRandomHCsOrthogonal(nrHypercolumns/100,nrRateUnits,nrItems);

	// Meters
	Meter* l1meter = new Meter("layer1.csv", Storage::CSV);
	if(storeData)
	{
		l1meter->AttachPopulation(layer1);
		network->AddMeter(l1meter);
	}

	Meter* c1meter = new Meter("Projections1.csv",Storage::CSV);
	if(storeData)
	{
		c1meter->AttachProjection(layer1->GetIncomingProjections()[0],0);
		network->AddMeter(c1meter);
	}

	// Timings
	network->AddTiming(bStandard);
	network->AddTiming(layer1);
	network->AddTiming(full);

	// need to access after it has been built
	network->AddTiming(layer1->GetIncomingProjections()[0]);

	// Training
	// set fixed pattern
	layer1->SwitchOnOff(false);

	int trainIterations = 1;
	int testIterations = 5;

	for(int i=0;i<trainIterations;i++)
	{
		//cout<<i<<"\n";

		for(int j=0;j<data.size();j++)
		{
			layer1->SetValuesAll(data[j]);

			// next time step
			network->Simulate(10);
		}
	}

	// Testing
	if(doTesting == true)
	{
		layer1->SwitchOnOff(true);
		bStandard->SwitchOnOff(false);

		for(int i=0;i<testIterations;i++)
		{
			for(int j=0;j<data.size();j++)
			{
				// clear all events before switching so no disturbance, can remove if moving average activity etc.
				network->ClearEventsIncoming();

				layer1->SetValuesAll(data[j]);
				network->Simulate(10);

				//for(int i=0;i<testIterations;i++)
				// next time step
				//			network->Simulate();
			}
		}
	}
	
	network->RecordAll();
	network->StoreAnalysis();
}
Exemplo n.º 2
0
void NetworkTests::NetworkTestTrieschAndFoldiak(int mpiRank, int mpiSize)
{
	DataSources dataSources;

	int sizeX = 5;//10;
	int sizeY = 5;//10;
	int nrItems = 2500;

	bool isTriesch = true;

	Network* network = new Network();
	network->SetMPIParameters(mpiRank,mpiSize);

	int nrInputHypercolumns = 1;
	int nrInputRateUnits = sizeX*sizeY;
	int nrOutputHypercolumns = 2;
	int nrOutputRateUnits = 5;//sizeX+sizeY;

	PopulationColumns* layer1 = new PopulationColumns(network,nrInputHypercolumns,nrInputRateUnits,PopulationColumns::GradedThresholded);
	PopulationColumns* layer2 = new PopulationColumns(network,nrOutputHypercolumns,nrOutputRateUnits,PopulationColumns::GradedThresholded);

	network->AddPopulation(layer1);
	network->AddPopulation(layer2);

	FullConnectivity* full = new FullConnectivity();
	FullConnectivity* full2;
	FullConnectivityNoLocalHypercolumns* full3NoLocal;

	layer2->AddPre(layer1,full);

	bool thresholded = true;
	ProjectionModifierTriesch* eTriesch = new ProjectionModifierTriesch(0.002f,0.2f,0.05f,1.0f/float(nrOutputRateUnits), thresholded);//0.05,0.2,0.005,1.0/(float)nrOutputRateUnits, thresholded);

	if(isTriesch)
		full->AddProjectionsEvent(eTriesch);

	//float eta1 = 3, eta2= 2.4, eta3 = 1.5, alpha = 0.005, beta = 200;
	float eta1 = 0.5, eta2= 0.02, eta3 = 0.02, alpha = 0.0005, beta = 10;//alpha = 1.0/8.0, beta = 10;
	bool lateral = false;

	ProjectionModifierFoldiak* eFoldiak = new ProjectionModifierFoldiak(eta1, eta2, eta3, alpha, beta, lateral);
	lateral = true;
	alpha = 0.75;
	ProjectionModifierFoldiak* eFoldiakLateral = new ProjectionModifierFoldiak(eta1, eta2, eta3, alpha, beta, lateral);
	//ProjectionModifierBCM* eBCM = new ProjectionModifierBCM(0.1,0.05,20);

	if(!isTriesch)
	{
		full2 = new FullConnectivity();
		layer2->AddPre(layer2,full2);
		full->AddProjectionsEvent(eFoldiak);
		full2->AddProjectionsEvent(eFoldiakLateral);
	}
	else
	{
		full3NoLocal = new FullConnectivityNoLocalHypercolumns();
		//full3NoLocal->AddProjectionsEvent(eBCM);
		full3NoLocal->AddProjectionsEvent(eFoldiakLateral);
		layer2->AddPre(layer2,full3NoLocal);
	}

	// implements N here
	SoftMax* softmax = new SoftMax(SoftMax::WTAThresholded,0.5);//(10.0, SoftMax::ProbWTA);
	WTA* wta = new WTA();
	//layer2->AddPopulationModifier(wta);
	layer2->AddPopulationModifier(softmax);

	network->Initialize();

	//////////////////////////////
	// Meters
	char* name1 = new char[50];
	char* name2 = new char[50];
	sprintf(name1,"Projection_triesch_n%d.csv",mpiRank);
	Meter* connMeter = new Meter(name1, Storage::CSV);
	connMeter->AttachProjection(layer2->GetIncomingProjections()[0],0);
	network->AddMeter(connMeter);

	sprintf(name2,"Layer2Activity_triesch.csv");

	Meter* layerMeter = new Meter(name2, Storage::CSV);
	layerMeter->AttachPopulation(layer2);
	network->AddMeter(layerMeter);
	// end Meters
	//////////////////////////////

	vector<vector<float> > trainData = dataSources.GetBars(sizeX,sizeY, nrItems);

	int iterations = 1;
	int iterSameStimuli = 100;

	if(!isTriesch)
		iterSameStimuli = 10;

	layer1->SwitchOnOff(false);	// fixed during training phase

	for(int j=0;j<iterations;j++)
	{
		for(int i=0;i<trainData.size();i++)
		{
			/*if(!isTriesch)
			{
				// in order to settle recurrent activity
				eFoldiak->SwitchOnOff(false);
				eFoldiakLateral->SwitchOnOff(false);
			}*/

			for(int k=0;k<iterSameStimuli;k++)
			{
			/*	if(!isTriesch && k==iterSameStimuli-1)
				{
					eFoldiak->SwitchOnOff(true);
					eFoldiakLateral->SwitchOnOff(true);
				}
*/
				for(int m=0;m<1;m++)
				{
					layer1->SetValuesAll(trainData[i]);
					//for(int n=0;n<3;n++)
					network->Simulate();
				}
			}

			// allow units to reset
			network->Reset();

			/*if(i%50 == 0)
			{
				network->RecordAll();
				if(mpiRank == 0)
					cout<<"Storing.";
			}*/
		}	
	}

	network->RecordAll();
}
Exemplo n.º 3
0
// Switching
void NetworkTests::NetworkTestSwitching(int mpiRank, int mpiSize)
{
	int nrHypercolumns = 5;
	int nrRateUnits = 10;
	int nrItems = 2;

	DataSources sources;
	srand(2);
	vector<vector<float> > data = sources.GetRandomHCs(nrHypercolumns,nrRateUnits,nrItems);//sources.GetRandom(size,0.1,nrItems);
	
	// setup recurrent network

	Network* network = new Network();
	network->SetMPIParameters(mpiRank,mpiSize);

	PopulationColumns* layer1 = new PopulationColumns(network,nrHypercolumns,nrRateUnits,PopulationColumns::Graded);
	FullConnectivity* full = new FullConnectivity();//FullConnectivity(false,"");

	layer1->AddPre(layer1,full);
	network->AddPopulation(layer1);

	ProjectionModifierBcpnnOnline* eBcpnn = new ProjectionModifierBcpnnOnline();
	ProjectionModifierTriesch* eTriesch = new ProjectionModifierTriesch();
	ProjectionModifierHebbSimple* eHebb = new ProjectionModifierHebbSimple();
	ProjectionModifierBCM* eBCM = new ProjectionModifierBCM();

	full->AddProjectionsEvent(eBcpnn);		// incl adding transfer fcn
	//full->AddProjectionsEvent(eTriesch);		// incl adding transfer fcn
	//full->AddProjectionsEvent(eHebb);
	//full->AddProjectionsEvent(eBCM);

	PopulationModifierAdaptation2* eAdaptation = new PopulationModifierAdaptation2();
	//eAdaptation->SetParameters(0,0); 		// adaptation off initially	
	eAdaptation->SetParameters(0); 		// adaptation off initially
	layer1->AddPopulationModifier(eAdaptation);
	
	WTA* wta = new WTA();
	layer1->AddPopulationModifier(wta);//wta);//softmax);

	network->Initialize();
	eAdaptation->Initm_Aj(1); // initialize m_Aj vector

	// set up meters
	char* name1 = new char[30];
	char* name2 = new char[30];
	char* name3 = new char[30];
	sprintf(name1,"Projections_n%d.csv",mpiRank);
	sprintf(name2,"Layer1ActivityWTA.csv");
	sprintf(name3,"Layer1Activity.csv");

	Meter* connMeter = new Meter(name1, Storage::CSV);
	connMeter->AttachProjection(layer1->GetIncomingProjections()[0],0);
	network->AddMeter(connMeter);

	Meter* layerMeter = new Meter(name3, Storage::CSV);
	layerMeter->AttachPopulation(layer1);
	network->AddMeter(layerMeter);

	Meter* eventLayerMeter=new Meter(name2, Storage::CSV);
	eventLayerMeter->AttachPopulationModifier(eAdaptation);
	network->AddMeter(eventLayerMeter);

	int nrIters = 10;
	int stimuliOn = 10;

	layer1->SwitchOnOff(false); // fixed input

	// store patterns
	for(unsigned int i=0;i<nrIters;i++)
	{
		for(unsigned int j=0;j<data.size();j++)
		{
			for(unsigned int k=0;k<stimuliOn; k++)
			{
				layer1->SetValuesAll(data[j]);
				network->Simulate();
			}
		}
	}


	// random stimulation
	vector<float> randVec(data[0].size());
	for(unsigned int i=0;i<randVec.size();i++)
		randVec[i] = 0.5f*float(rand()/RAND_MAX);

	// mixture
	vector<float> mixVec(data[0].size());
	for(unsigned int i=0;i<mixVec.size();i++)
		mixVec[i] = 1*(data[0][i] + data[1][i]);

	layer1->SetValuesAll(mixVec);//randVec);

	// Test without adaptation turned on

	layer1->SwitchOnOff(true);
	//eHebb->SetEtaHebb(0.0);
	eBCM->SwitchOnOff(false);
	eBcpnn->SwitchOnOff(false);

	for(int i=0;i<nrIters;i++)
	{
		for(unsigned int j=0;j<data.size();j++)
		{
			layer1->SetValuesAll(mixVec);//data[j]);
			for(int k=0;k<stimuliOn; k++)
			{	
				network->Simulate();
			}
		}
	}

	// Turn on adaptation
	//eAdaptation->SetParameters(10,0.2);
	eAdaptation->SetParameters(0.2f);

	for(int i=0;i<nrIters;i++)
	{
		for(unsigned int j=0;j<data.size();j++)
		{
			layer1->SetValuesAll(mixVec);
			for(int k=0;k<stimuliOn; k++)
			{	
				network->Simulate();
			}
		}
	}

	network->RecordAll();

	// check switching
}