Пример #1
0
Neuron::Neuron(int inputNum) : m_inputNum(inputNum)
{
    //we need an additional weight for the bias hence the +1
    for (int i=0; i<inputNum + 1; i++)
    {
        //set up the weights with an initial random value
        m_weights.push_back(RandomWeight());
    }
}
Пример #2
0
TNNet::TNeuron::TNeuron(TInt OutputsN, TInt MyId, TTFunc TransFunc){

    TFuncNm = TransFunc;
    for(int c = 0; c < OutputsN; ++c){
        // define the edges, 0 element is weight, 1 element is weight delta
        TFlt RandWeight = RandomWeight();
        OutEdgeV.Add(TIntFltFltTr(c, RandWeight, 0.0));
        SumDeltaWeight.Add(0.0);
    }

    Id = MyId;
}
Пример #3
0
/**
     * Construct an ELM
     * @param
     * elm_type              - 0 for regression; 1 for (both binary and multi-classes) classification
     */
void ELMTrain(int elm_type)
{
	double starttime,endtime;
	double TrainingAccuracy;
	int i,j,k = 0;
	float **input,**weight,*biase,**tranpI,**tempH,**H;
	float **PIMatrix;
	float **train_set;
	float **T,**Y;
	float **out;
	
	train_set = (float **)calloc(DATASET,sizeof(float *));
	tranpI = (float **)calloc(INPUT_NEURONS,sizeof(float *));
	input = (float **)calloc(DATASET,sizeof(float *));        		/*datasize * INPUT_NEURONS*/
	weight = (float **)calloc(HIDDEN_NEURONS,sizeof(float *));		/*HIDDEN_NEURONS * INPUT_NEURONS*/
	biase = (float *)calloc(HIDDEN_NEURONS,sizeof(float)); 			/*HIDDEN_NEURONS*/
	tempH = (float **)calloc(HIDDEN_NEURONS,sizeof(float *)); 		/*HIDDEN_NEURONS * datasize*/
	PIMatrix = (float **)calloc(HIDDEN_NEURONS,sizeof(float *));
	H = (float **)calloc(DATASET,sizeof(float *));
	T = (float **)calloc(DATASET,sizeof(float *));
	Y = (float **)calloc(DATASET,sizeof(float *));
	out = (float **)calloc(HIDDEN_NEURONS,sizeof(float *));
	
	for(i=0;i<DATASET;i++){
		train_set[i] = (float *)calloc(NUMROWS,sizeof(float));
		input[i] = (float *)calloc(INPUT_NEURONS,sizeof(float));
		H[i] = (float *)calloc(HIDDEN_NEURONS,sizeof(float));
	}
	for(i=0;i<DATASET;i++)
	{
		T[i] = (float *)calloc(OUTPUT_NEURONS,sizeof(float));
		Y[i] = (float *)calloc(OUTPUT_NEURONS,sizeof(float));
	}
	for(i=0;i<INPUT_NEURONS;i++)
		tranpI[i] = (float *)calloc(DATASET,sizeof(float));
	
	for(i=0;i<HIDDEN_NEURONS;i++)
	{
		weight[i] = (float *)calloc(INPUT_NEURONS,sizeof(float));
		tempH[i] = (float *)calloc(DATASET,sizeof(float));
		out[i] = (float *)calloc(OUTPUT_NEURONS,sizeof(float));
		PIMatrix[i] = (float *)calloc(DATASET,sizeof(float));
	}
	
	printf("begin to random weight and biase...\n");
	/*得到随机的偏置和权重*/
	RandomWeight(weight,HIDDEN_NEURONS,INPUT_NEURONS);
	RandomBiase(biase,HIDDEN_NEURONS);
	SaveMatrix(weight,"./result/weight",HIDDEN_NEURONS,INPUT_NEURONS);
	SaveMatrix_s(biase,"./result/biase",1,HIDDEN_NEURONS);
	/*加载数据集到内存*/
	printf("begin to load input from the file...\n");
	if(LoadMatrix(train_set,"./sample/frieman",DATASET,NUMROWS) == 0){
		printf("load input file error!!!\n");	
		return;
	}
	
	/*将数据集划分成输入和输出*/
	if(elm_type == 0){ 	//regression
		/*
			the first column is the output of the dataset
		*/
		for(i = 0;i < DATASET ;i++)
		{
			T[k++][0] = train_set[i][0];
			for(j = 1;j <= INPUT_NEURONS;j++)
			{
				input[i][j-1] = train_set[i][j];
			}
		}
	}else{				//classification
		/*
			the last column is the lable of class of the dataset
		*/
		InitMatrix(T,DATASET,OUTPUT_NEURONS,-1);
		for(i = 0;i < DATASET ;i++)
		{
			//get the last column
			T[k++][0] = train_set[i][0] - 1;//class label starts from 0,so minus one
			for(j = 1;j <= INPUT_NEURONS;j++)
			{
				input[i][j-1] = train_set[i][j];
			}
		}
		for(i = 0;i < DATASET ;i++)
		{
			for(j = 0;j < OUTPUT_NEURONS;j++)
			{
				k =  T[i][0];
				if(k < OUTPUT_NEURONS && k >= 0){
					T[i][k] = 1;
				}
				if(k != 0){
					T[i][0] = -1;
				}
			}
		}
	}
	/*ELM*/
	printf("begin to compute...\n");
	starttime = omp_get_wtime();	
	TranspositionMatrix(input,tranpI,DATASET,INPUT_NEURONS);
	printf("begin to compute step 1...\n");
	MultiplyMatrix(weight,HIDDEN_NEURONS,INPUT_NEURONS, tranpI,INPUT_NEURONS,DATASET,tempH);
	printf("begin to compute setp 2...\n");
	AddMatrix_bais(tempH,biase,HIDDEN_NEURONS,DATASET);
	printf("begin to compute step 3...\n");
	SigmoidHandle(tempH,HIDDEN_NEURONS,DATASET);
	printf("begin to compute step 4...\n");
	TranspositionMatrix(tempH,H,HIDDEN_NEURONS,DATASET);
	PseudoInverseMatrix(H,DATASET,HIDDEN_NEURONS,PIMatrix);
	MultiplyMatrix(PIMatrix,HIDDEN_NEURONS,DATASET,T,DATASET,OUTPUT_NEURONS,out);

	//SaveMatrix(H,"./result/H",DATASET,HIDDEN_NEURONS);
	//SaveMatrix(PIMatrix,"./result/PIMatrix",HIDDEN_NEURONS,DATASET);
	printf("begin to compute step 5...\n");
	endtime = omp_get_wtime();
	//保存输出权值
	SaveMatrix(out,"./result/result",HIDDEN_NEURONS,OUTPUT_NEURONS);
	MultiplyMatrix(H,DATASET,HIDDEN_NEURONS,out,HIDDEN_NEURONS,OUTPUT_NEURONS,Y);
	printf("use time :%f\n",endtime - starttime);
	printf("train complete...\n");

	if(elm_type == 0){
	//检测准确率
		double MSE = 0;
		for(i = 0;i< DATASET;i++)
		{
			MSE += (Y[i][0] - T[i][0])*(Y[i][0] - T[i][0]);
		}
		TrainingAccuracy = sqrt(MSE/DATASET);
		printf("Regression/trainning accuracy :%f\n",TrainingAccuracy);
	}else{
		float MissClassificationRate_Training=0;
		double maxtag1,maxtag2;
		int tag1 = 0,tag2 = 0;
		for (i = 0; i < DATASET; i++) {
				maxtag1 = Y[i][0];
				tag1 = 0;
				maxtag2 = T[i][0];
				tag2 = 0;
		    	for (j = 1; j < OUTPUT_NEURONS; j++) {
					if(Y[i][j] > maxtag1){
						maxtag1 = Y[i][j];
						tag1 = j;
					}
					if(T[i][j] > maxtag2){
						maxtag2 = T[i][j];
						tag2 = j;
					}
				}
		    	if(tag1 != tag2)
		    		MissClassificationRate_Training ++;
			}
		    TrainingAccuracy = 1 - MissClassificationRate_Training*1.0f/DATASET;
			printf("Classification/training accuracy :%f\n",TrainingAccuracy);
	}
	FreeMatrix(train_set,DATASET);
	FreeMatrix(tranpI,INPUT_NEURONS);
	FreeMatrix(input,DATASET);
	FreeMatrix(weight,HIDDEN_NEURONS);
	free(biase);
	FreeMatrix(tempH,HIDDEN_NEURONS);
	FreeMatrix(PIMatrix,HIDDEN_NEURONS);
	FreeMatrix(H,DATASET);
	FreeMatrix(T,DATASET);
	FreeMatrix(Y,DATASET);
	FreeMatrix(out,HIDDEN_NEURONS);
}
Пример #4
0
//回归
void ELMTrain()
{
	double starttime,endtime;

	int i,j,k = 0;
	double MSE = 0,TrainingAccuracy;
	float **input,**weight,*biase,**tranpI,**tempH,**H;
	float **PIMatrix;
	float **train_set;
	float **T,**Y;
	float **out;
	
	train_set = (float **)calloc(DATASET,sizeof(float *));
	tranpI = (float **)calloc(INPUT_NEURONS,sizeof(float *));
	input = (float **)calloc(DATASET,sizeof(float *));        		/*datasize * INPUT_NEURONS*/
	weight = (float **)calloc(HIDDEN_NEURONS,sizeof(float *));		/*HIDDEN_NEURONS * INPUT_NEURONS*/
	biase = (float *)calloc(HIDDEN_NEURONS,sizeof(float)); 			/*HIDDEN_NEURONS*/
	tempH = (float **)calloc(HIDDEN_NEURONS,sizeof(float *)); 		/*HIDDEN_NEURONS * datasize*/
	PIMatrix = (float **)calloc(HIDDEN_NEURONS,sizeof(float *));
	H = (float **)calloc(DATASET,sizeof(float *));
	T = (float **)calloc(DATASET,sizeof(float *));
	Y = (float **)calloc(DATASET,sizeof(float *));
	out = (float **)calloc(HIDDEN_NEURONS,sizeof(float *));
	
	for(i=0;i<DATASET;i++){
		train_set[i] = (float *)calloc(NUMROWS,sizeof(float));
		input[i] = (float *)calloc(INPUT_NEURONS,sizeof(float));
		H[i] = (float *)calloc(HIDDEN_NEURONS,sizeof(float));
	}
	for(i=0;i<DATASET;i++)
	{
		T[i] = (float *)calloc(OUTPUT_NEURONS,sizeof(float));
		Y[i] = (float *)calloc(OUTPUT_NEURONS,sizeof(float));
	}
	for(i=0;i<INPUT_NEURONS;i++)
		tranpI[i] = (float *)calloc(DATASET,sizeof(float));
	
	for(i=0;i<HIDDEN_NEURONS;i++)
	{
		weight[i] = (float *)calloc(INPUT_NEURONS,sizeof(float));
		tempH[i] = (float *)calloc(DATASET,sizeof(float));
		out[i] = (float *)calloc(OUTPUT_NEURONS,sizeof(float));
		PIMatrix[i] = (float *)calloc(DATASET,sizeof(float));
	}
	
	printf("begin to random weight and biase...\n");
	/*得到随机的偏置和权重*/
	RandomWeight(weight,HIDDEN_NEURONS,INPUT_NEURONS);
	RandomBiase(biase,HIDDEN_NEURONS);
	
	/*加载数据集到内存*/
	printf("begin to load input from the file...\n");
	if(LoadMatrix(train_set,"../TrainingDataSet/covtype",DATASET,NUMROWS,1) == 0){
		printf("load input file error!!!\n");	
		return;
	}
	
	/*将数据集划分成输入和输出*/
	for(i = 0;i < DATASET ;i++)
	{
		T[k++][0] = train_set[i][0];
		for(j = 1;j <= INPUT_NEURONS;j++)
		{
			input[i][j-1] = train_set[i][j];
		}
	}
	SaveMatrix(input,"./result/input",DATASET,INPUT_NEURONS);
	/*ELM*/
	printf("begin to compute...\n");
	starttime = omp_get_wtime();	
	TranspositionMatrix(input,tranpI,DATASET,INPUT_NEURONS);
	printf("begin to compute step 1...\n");
	MultiplyMatrix(weight,HIDDEN_NEURONS,INPUT_NEURONS, tranpI,INPUT_NEURONS,DATASET,tempH);
	printf("begin to compute setp 2...\n");
	AddMatrix_bais(tempH,biase,HIDDEN_NEURONS,DATASET);
	printf("begin to compute step 3...\n");
	SigmoidHandle(tempH,HIDDEN_NEURONS,DATASET);
	printf("begin to compute step 4...\n");
	TranspositionMatrix(tempH,H,HIDDEN_NEURONS,DATASET);
	PseudoInverseMatrix(H,DATASET,HIDDEN_NEURONS,PIMatrix);
	MultiplyMatrix(PIMatrix,HIDDEN_NEURONS,DATASET,T,DATASET,OUTPUT_NEURONS,out);

	//SaveMatrix(H,"./result/H",DATASET,HIDDEN_NEURONS);
	//SaveMatrix(PIMatrix,"./result/PIMatrix",HIDDEN_NEURONS,DATASET);

	
	printf("begin to compute step 5...\n");
	endtime = omp_get_wtime();
	//保存输出权值
	SaveMatrix(out,"./result/result",HIDDEN_NEURONS,OUTPUT_NEURONS);
	
	//检测准确率
	MultiplyMatrix(H,DATASET,HIDDEN_NEURONS,out,HIDDEN_NEURONS,OUTPUT_NEURONS,Y);
	for(i = 0;i< DATASET;i++)
	{
		MSE += (Y[i][0] - T[i][0])*(Y[i][0] - T[i][0]);
	}
	SaveMatrix(T,"./result/t",DATASET,OUTPUT_NEURONS);
	SaveMatrix(Y,"./result/y",DATASET,OUTPUT_NEURONS);
	TrainingAccuracy = sqrt(MSE/DATASET);
	
	printf("use time :%f\n",endtime - starttime);
	printf("trainning accuracy :%f\n",TrainingAccuracy);
	
	printf("train complete...\n");
	//print(PIMatrix,DATASET,HIDDEN_NEURONS);
}