Exemplo n.º 1
0
struct fann *nn_fann_NeuralNet2Fann(NeuralNet *NN) {

    struct 	fann_connection 	*ConnectionsANN;
    struct 	fann_connection 	*ConnectionsNN;
    PGM_Vetor_Int Neurons;
    struct fann *ANN;
    MemoryContext contextoAnterior = MemoryContextSwitchTo( CurTransactionContext );

    Neurons.n_elems = NN->NLayers;
    Neurons.valor = (int*) NN->NNeurons;

    ANN = nn_fann_train_create_standard_array(&Neurons,NN->Steepness,NN->FunctionActivation);

    nn_fann_parse_fann_set_scaling_default(ANN);

    MemoryContextSwitchTo( contextoAnterior );

    ConnectionsANN = (struct fann_connection*) pgm_malloc(sizeof(struct fann_connection) * ANN->total_connections);
    ConnectionsNN  = (struct fann_connection*) pgm_malloc(sizeof(struct fann_connection) * ANN->total_connections);

    fann_get_connection_array(ANN,ConnectionsANN);
    nn_fann_parse_nn_get_connection2(NN->NLayers,(int*)NN->NNeurons,ConnectionsNN);

    nn_fann_parse_adjust_get_weightByNeuralNet(NN, ANN, ConnectionsANN,ConnectionsNN);

    nn_fann_parse_fann_set_scaling(ANN,NN->InputMin,NN->InputMax,NN->OutputMin,NN->OutputMax);
    nn_fann_parse_setBihiperbolicParam(ANN,NN->BihiperbolicLambda,NN->BihiperbolicT1,NN->BihiperbolicT2);

    return ANN;
}
Exemplo n.º 2
0
NeuralNet *nn_fann_Fann2NeuralNet(struct fann *ANN ) {
    unsigned int ANN_NLayers = fann_get_num_layers(ANN),
                 *BiasArray = (unsigned int* ) pgm_malloc (sizeof(unsigned int)*ANN_NLayers),
                  *ANN_NNeurons = (unsigned int* ) pgm_malloc (sizeof(unsigned int)*ANN_NLayers);
    double *ANN_Weights;
    NeuralNet *NN;

    fann_get_bias_array(ANN,BiasArray);

    fann_get_layer_array(ANN,ANN_NNeurons);

    ANN_Weights = nn_fann_parse_get_weights(ANN,ANN_NLayers,ANN_NNeurons);
    NN = nn_NeuralNetCreate(
             ANN_NLayers,
             // Na FANN, cada Neuronio tem uma função de ativação. A neuralnet usa somente a função de ativação do primeiro neuronio,
             // porem a primeira camada da FANN nao tem função de ativação, logo pegamos da camada seguinte
             (ANN->first_layer+1)->first_neuron->activation_function,
             ANN->BiperbolicLambda,ANN->BiperbolicT1,ANN->BiperbolicT2,
             ANN_NNeurons,
             ANN->input_min,ANN->input_max,ANN->output_min,ANN->output_max,
             // Na FANN, cada Neuronio tem um steepness de ativação. A neuralnet usa somente o steepness de ativação do primeiro neuronio,
             // porem a primeira camada da FANN nao tem stepness, logo pegamos da camada seguinte
             (ANN->first_layer+1)->first_neuron->activation_steepness,
             // A FANN tem um bias para cada camada da rede, na NeuralNet eh usado somente o bias da primeira camada
             BiasArray[0],
             ANN_Weights
         );
    NN->MSE = fann_get_MSE(ANN);

    return NN;
}
Exemplo n.º 3
0
unsigned int *nn_parse_ReadNeurons(char **st, unsigned short n_layers){
	int i;
	unsigned int*neurons;

	neurons = (unsigned int*) pgm_malloc(sizeof(unsigned int)*MAX_LAYERS);

	while( **st != '#') (*st)++;
	(*st)++;

	for(i = 0; **st != '#' ; i++){
		while( **st == ' ' || **st =='\t' || **st =='\r' || **st =='\n') (*st)++;

		if((**st >= '0' && **st <= '9') || **st == '+')
			sscanf(*st,"%d",neurons+i);
		else if(**st == '-')
			nn_parse_NeuralNetParseError("Numero negativo na camada",*st);
		else
			nn_parse_NeuralNetParseError("String mal formatada",*st);

		while(**st >= '0' && **st <= '9') (*st)++;
		while( **st == ' ' || **st =='\t' || **st =='\r' || **st =='\n') (*st)++;

	}
	if(i != n_layers)
		nn_parse_NeuralNetParseError("Numero de neuronios incorretos",*st);

	return neurons;
}
Exemplo n.º 4
0
unsigned short nn_parse_ReadFunctionActivation(char **st, double *bhLambida, double *bhT1, double *bhT2){
	unsigned short function_activation = FUNCTION_ACTIVATION_DEFAULT;
	while( **st != '#') (*st)++;
	(*st)++;
	while( **st == ' ' || **st =='\t' || **st =='\r' || **st =='\n') (*st)++;
	if(**st == '#')
		return function_activation;
	else{
		int i = -1;
		char *pt, *word;
		unsigned int len_word;
		pt = *st;

		*st += 2;// adiante o # e espaço

		while( **st != ' ' && **st !='\t' && **st !='\r' && **st !='\n' && **st !='#') (*st)++; // passa a palavra toda

		len_word = *st - pt;
		word = (char*) pgm_malloc (sizeof(char)*(len_word+1));
		word[len_word] = '\0';
		strncpy(word,pt,len_word);

		while(word[++i]) word[i] = toupper(word[i]);

		if((function_activation = nn_FindFunctionActivation(word)) == 0xFFFF)
			nn_parse_NeuralNetParseError("Não foi encontrada essa função de ativação",pt);

        if(bhLambida != NULL && function_activation == NN_BIHIPERBOLIC){
            while( **st == ' ' || **st =='\t' || **st =='\r' || **st =='\n') (*st)++;
            if( **st == '#'){
                *bhLambida = NN_BI_HIPERBOLIC_LAMBDA_DEFAULT;
                *bhT1 = NN_BI_HIPERBOLIC_T1_DEFAULT;
                *bhT2 = NN_BI_HIPERBOLIC_T2_DEFAULT;
            }else if(sscanf(*st,"%lf",bhLambida) != 1)
                nn_parse_NeuralNetParseError("Não foi possivel ler o valor de lambda",pt);

            while( **st != ' ' && **st !='\t' && **st !='\r' && **st !='\n' && **st !='#') (*st)++;
            while( **st == ' ' || **st =='\t' || **st =='\r' || **st =='\n') (*st)++;
            if( **st == '#'){
                *bhT1 = NN_BI_HIPERBOLIC_T1_DEFAULT;
                *bhT2 = NN_BI_HIPERBOLIC_T2_DEFAULT;
            }else if(sscanf(*st,"%lf",bhT1) != 1)
                nn_parse_NeuralNetParseError("Não foi possivel ler o valor de lambda",pt);

            while( **st != ' ' && **st !='\t' && **st !='\r' && **st !='\n' && **st !='#') (*st)++;
            while( **st == ' ' || **st =='\t' || **st =='\r' || **st =='\n') (*st)++;
            if( **st == '#'){
                *bhT2 = *bhT1;
            }else if(sscanf(*st,"%lf",bhT2) != 1)
                nn_parse_NeuralNetParseError("Não foi possivel ler o valor de lambda",pt);
        }
		return function_activation;
	}
}
Exemplo n.º 5
0
static inline
void*
_pgm_heap_alloc (
	const size_t	n_bytes
	)
{
#       ifdef CONFIG_USE_HEAPALLOC
	return HeapAlloc (GetProcessHeap(), HEAP_GENERATE_EXCEPTIONS, n_bytes);
#	else
	return pgm_malloc (n_bytes);
#	endif
}
Exemplo n.º 6
0
void nn_parse_NeuralNetParseError(char *msg, char *pos){

	unsigned int len_0_at_pos = pos - StringError, i;
	char *str = (char*) pgm_malloc (sizeof(char) * (strlen(msg)+strlen(StringError)+250+2*len_0_at_pos));
	sprintf(str,"%s\n",msg);
	sprintf(str,"%s%s\n",str, StringError);
	for(i = 0; i < len_0_at_pos; i++)
		sprintf(str,"%s ",str);
	sprintf(str,"%s^\n",str);
	sprintf(str,"%sFormato da String\n",str);
	sprintf(str,"%s\t#NumeroDeCamadas#NeuroniosPorCamada#FunçãoDeAtivação#InputMin InputMax OutputMin OutputMax#Stepness#Bias#Pesos\n",str);
	elog(ERROR,"\n%s",str);
}
Exemplo n.º 7
0
Datum pgm_svm_train(PG_FUNCTION_ARGS){

    PGM_Matriz_Double *matrix = (PGM_Matriz_Double*)PG_GETARG_POINTER(0);
    PGM_Vetor_Double *vector = (PGM_Vetor_Double*)PG_GETARG_POINTER(1);

    struct svm_parameter *param = (struct svm_parameter*) pgm_malloc (sizeof(struct svm_parameter));
    struct svm_problem* prob;
    //Cross Validation
    int cross_validation = 0,
        n_fold = PG_GETARG_INT32(14);

    if (n_fold < 2 && n_fold != 0) exit_with_help();
    else if( n_fold >= 2){
        cross_validation = 1;
        elog(ERROR,"CROSS VALIDATION NÃO IMPLEMENTADO");
    }

    //Mount Parameter Struct
    param->svm_type = PG_GETARG_INT32(2);
	param->kernel_type= PG_GETARG_INT32(3);
	param->degree= PG_GETARG_INT32(4);
	param->gamma= PG_GETARG_FLOAT8(5);
	param->coef0= PG_GETARG_FLOAT8(6);
	param->cache_size= PG_GETARG_FLOAT8(7);
	param->eps= PG_GETARG_FLOAT8(8);
	param->C= PG_GETARG_FLOAT8(9);
	param->nr_weight = 0;
	param->weight_label = NULL;
	param->weight = NULL;
	param->nu= PG_GETARG_FLOAT8(10);
	param->p= PG_GETARG_FLOAT8(11);
	param->shrinking= PG_GETARG_INT32(12);
	param->probability= PG_GETARG_INT32(13);

	prob = PGM_Matriz_Double2svm_problem(matrix,vector,param);

	if (cross_validation){
        do_cross_validation(prob,param,n_fold);
        elog(ERROR,"CROSS VALIDATION NÃO IMPLEMENTADO"); // Pergunta ao Filipe sobre isso!
        PG_RETURN_VOID();
	}else{

        MemoryContext contextoAnterior = MemoryContextSwitchTo( CurTransactionContext );
        struct svm_model *model = svm_train(prob,param);
        MemoryContextSwitchTo( contextoAnterior );
        PG_RETURN_POINTER(model);
	}
}
Exemplo n.º 8
0
double* nn_parse_ReadWeight(char **st, unsigned int NWeights){
	unsigned int i;
	double *weight = (double*) pgm_malloc (sizeof(double)*(NWeights));

	while( **st != '#')(*st)++;
	(*st)++;

	for(i = 0; i < NWeights; i++){
		while( **st == ' ' || **st =='\t' || **st =='\r' || **st =='\n') (*st)++;
		if(!(**st))
			nn_parse_NeuralNetParseError("Numero de pesos insuficiente",*st);
		if(sscanf(*st,"%lf",weight+i) != 1)
			nn_parse_NeuralNetParseError("Não foi possivel ler o valor do peso",*st);
		while ((**st >= '0' && **st <='9') || **st =='.' || **st =='e' || **st =='E' || **st =='-' || **st =='+' ) (*st)++;
	}
	while( **st == ' ' || **st =='\t' || **st =='\r' || **st =='\n') (*st)++;

	if(**st) nn_parse_NeuralNetParseError("String mal formatada",*st);

	return weight;
}
Exemplo n.º 9
0
struct svm_model* charptr2svm_model( char* str){
	int i,j;
	unsigned int n_nodes;
	int tem_prob = 0;
	char *ptr = str;
	struct svm_model *model = (struct svm_model*) pgm_malloc (sizeof(struct svm_model));
	model->rho = NULL;
	model->probA = NULL;
	model->probB = NULL;
	model->sv_indices = NULL;
	model->label = NULL;
	model->nSV = NULL;

	if(sscanf(ptr,"#%d#%d#%d#%lf#%lf#%d#%d",&model->param.svm_type,&model->param.kernel_type,&model->param.degree,&model->param.gamma,&model->param.coef0,&model->nr_class,&model->l) < 7){
		pgm_free(model);
		return NULL;
	}

	for(i = 0; i < 8; i++)	ptr = next_delimeter(ptr);

	model->rho = (double*) pgm_malloc(sizeof(double)*(model->nr_class*(model->nr_class-1)/2));

	for(i = 0; i < model->nr_class*(model->nr_class-1)/2;i++){
		sscanf(ptr,"%lf",&model->rho[i]);
		ptr = next_delimeter(ptr);
	}

	model->label = (int*) pgm_malloc (sizeof(int)*model->nr_class);

	for(i=0;i<model->nr_class;i++){
		sscanf(ptr,"%d",&model->label[i]);
		ptr = next_delimeter(ptr);
	}
	
	do{
		sscanf(ptr,"%d",&tem_prob);
		ptr = next_delimeter(ptr);
		
		if(tem_prob == 1){
			model->probA = (double*) pgm_malloc (sizeof(double)*(model->nr_class*(model->nr_class-1)/2));
			for(i=0;i<(model->nr_class*(model->nr_class-1)/2);i++){
				sscanf(ptr,"%lf",&model->probA[i]);
				ptr = next_delimeter(ptr);
			}
		}else if(tem_prob == 2){
			model->probB = (double*) pgm_malloc (sizeof(double)*(model->nr_class*(model->nr_class-1)/2));
			for(i=0;i<(model->nr_class*(model->nr_class-1)/2);i++){
				sscanf(ptr,"%lf",&model->probB[i]);
				ptr = next_delimeter(ptr);
			}
		}else if(tem_prob == 3){
			model->nSV = (int*) pgm_malloc(sizeof(int)*(model->nr_class*(model->nr_class-1)/2));
			for(i=0;i<model->nr_class;i++){
				sscanf(ptr,"%d",&model->nSV[i]);
				ptr = next_delimeter(ptr);
			}
		}
	}while(tem_prob);

	model->sv_coef = (double**) pgm_malloc (sizeof(double*)*model->nr_class - 1);
	
	for(i = 0; i < model->nr_class - 1; i++) 
		model->sv_coef[i] = (double*) pgm_malloc (sizeof(double)*model->l);

	model->SV = (struct svm_node**) pgm_malloc (sizeof(struct svm_node)*model->l);
	
	
	for(i=0;i<model->l;i++){
		for(j=0;j<model->nr_class-1;j++){
			sscanf(ptr,"%lf",&(model->sv_coef[j][i]));
			ptr = next_delimeter(ptr);
		}
		sscanf(ptr,"%d",&n_nodes);
		ptr = next_delimeter(ptr);
		model->SV[i] = (struct svm_node*) pgm_malloc(sizeof(struct svm_node)*n_nodes);
		for(j = 0; j < n_nodes; j++){		
			sscanf(ptr,"%d#%lf#",&(model->SV[i][j].index),&(model->SV[i][j].value));
			ptr = next_delimeter(ptr);
			ptr = next_delimeter(ptr);
		}
	}

	return model;
}
Exemplo n.º 10
0
/* INTERNAL FUNCTION
   Populate the error information
 */
void fann_error(struct fann_error *errdat, const enum fann_errno_enum errno_f, ...)
{
	va_list ap;
	char *errstr;

	if(errdat != NULL)
		errdat->errno_f = errno_f;

	if(errdat != NULL && errdat->errstr != NULL)
	{
		errstr = errdat->errstr;
	}
	else
	{
		errstr = (char *) pgm_malloc(FANN_ERRSTR_MAX);
		if(errstr == NULL)
		{
			//elog(ERROR, "fann_error: Unable to allocate memory.\n");
			return;
		}
	}

	va_start(ap, errno_f);
	switch (errno_f)
	{
	case FANN_E_NO_ERROR:
		break;
	case FANN_E_CANT_OPEN_CONFIG_R:
		vsprintf(errstr, "Unable to open configuration file \"%s\" for reading.\n", ap);
		break;
	case FANN_E_CANT_OPEN_CONFIG_W:
		vsprintf(errstr, "Unable to open configuration file \"%s\" for writing.\n", ap);
		break;
	case FANN_E_WRONG_CONFIG_VERSION:
		vsprintf(errstr,
				 "Wrong version of configuration file, aborting read of configuration file \"%s\".\n",
				 ap);
		break;
	case FANN_E_CANT_READ_CONFIG:
		vsprintf(errstr, "Error reading \"%s\" from configuration file \"%s\".\n", ap);
		break;
	case FANN_E_CANT_READ_NEURON:
		vsprintf(errstr, "Error reading neuron info from configuration file \"%s\".\n", ap);
		break;
	case FANN_E_CANT_READ_CONNECTIONS:
		vsprintf(errstr, "Error reading connections from configuration file \"%s\".\n", ap);
		break;
	case FANN_E_WRONG_NUM_CONNECTIONS:
		vsprintf(errstr, "ERROR connections_so_far=%d, total_connections=%d\n", ap);
		break;
	case FANN_E_CANT_OPEN_TD_W:
		vsprintf(errstr, "Unable to open train data file \"%s\" for writing.\n", ap);
		break;
	case FANN_E_CANT_OPEN_TD_R:
		vsprintf(errstr, "Unable to open train data file \"%s\" for writing.\n", ap);
		break;
	case FANN_E_CANT_READ_TD:
		vsprintf(errstr, "Error reading info from train data file \"%s\", line: %d.\n", ap);
		break;
	case FANN_E_CANT_ALLOCATE_MEM:
		sprintf(errstr, "Unable to allocate memory.\n");
		break;
	case FANN_E_CANT_TRAIN_ACTIVATION:
		sprintf(errstr, "Unable to train with the selected activation function.\n");
		break;
	case FANN_E_CANT_USE_ACTIVATION:
		sprintf(errstr, "Unable to use the selected activation function.\n");
		break;
	case FANN_E_TRAIN_DATA_MISMATCH:
		sprintf(errstr, "Training data must be of equivalent structure.\n");
		break;
	case FANN_E_CANT_USE_TRAIN_ALG:
		sprintf(errstr, "Unable to use the selected training algorithm.\n");
		break;
	case FANN_E_TRAIN_DATA_SUBSET:
		vsprintf(errstr, "Subset from %d of length %d not valid in training set of length %d.\n", ap);
		break;
	case FANN_E_INDEX_OUT_OF_BOUND:
		vsprintf(errstr, "Index %d is out of bound.\n", ap);
		break;
	case FANN_E_SCALE_NOT_PRESENT:
		sprintf(errstr, "Scaling parameters not present.\n");
		break;
    case FANN_E_INPUT_NO_MATCH:
    	vsprintf(errstr, "The number of input neurons in the ann (%d) and data (%d) don't match\n", ap);
    	break;
    case FANN_E_OUTPUT_NO_MATCH:
     	vsprintf(errstr, "The number of output neurons in the ann (%d) and data (%d) don't match\n", ap);
     	break;
	}
	va_end(ap);

	if(errdat != NULL)
	{
		errdat->errstr = errstr;
	}

     //elog( ERROR, "FANN Error %d: %s", errno_f, errstr );

}