Esempio n. 1
0
void test_network(Network* network){
    char* input_line = malloc(50*sizeof(char));
    Vector* testing_point_in = new_vec(DIMENSION_INPUT+1);
    Vector* testing_point_out = new_vec(DIMENSION_OUTPUT);
    size_t test_set_size = 0;
    
    while (scanf("%s\n", input_line) != EOF) {
        test_set_size++;
#if REGRESSION
        sscanf(input_line, "%lf\n", &testing_point_in->scalars[0]);
        testing_point_in->scalars[0] = normalise_data(testing_point_in->scalars[0], MAX_VALUES_INPUT[0], MIN_VALUES_INPUT[0]);
        testing_point_in->scalars[DIMENSION_INPUT] = BIAS;
        testing_point_out = compute_output_network(network, testing_point_in);
        printf("%.7lf\n", denormalise_data(testing_point_out->scalars[0], MAX_VALUES_INPUT[1], MIN_VALUES_INPUT[1]));
#elif CLASSIFICATION
        sscanf(input_line, "%lf,%lf\n", &testing_point_in->scalars[0], &testing_point_in->scalars[1]);
        testing_point_in->scalars[DIMENSION_INPUT] = BIAS;
        testing_point_out = compute_output_network(network, testing_point_in);
        if (testing_point_out->scalars[0] >= 0) {
            printf("+1\n");
        }else{
            printf("-1\n");
        }
#endif
        delete_vec(testing_point_out);
    }
    
    delete_vec(testing_point_in);
    free(input_line);
}
Esempio n. 2
0
/* initialize global vectors V1,V2 for subsequent use */
void setupvectors(int len)
{
    int i;
    V1 = new_vec(len);
    V2 = new_vec(len);
    for (i = 0; i < len; i++)
    {
	set_vec_element(V1, i, i+1);   /* arbitrary values */
	set_vec_element(V2, i, len-i);
    }
}
Esempio n. 3
0
/*
** Open gcveclib
*/
LUALIB_API int luaopen_gcvec (lua_State *L) {
  // init mt
  luaL_newmetatable( L, "gcvec.vec" );
  lua_pushvalue( L, -1 );
  lua_setfield( L, -2, "__index" );	// metatable.__index = metatable
  luaL_register( L, NULL, gcveclib_m );

  luaL_register(L, LUA_GCVECLIBNAME, gcveclib_f);
  
  // numeric constants
  new_vec(L, 0, 0, 0, 0);
  lua_setfield(L, -2, "zero");
  new_vec(L, 1, 1, 1, 1);
  lua_setfield(L, -2, "one");
  return 1;
}
Esempio n. 4
0
static int gcvec_sub( lua_State* L )
{
  vec_t* v1 = checkvec( L, 1 );
  vec_t* v2 = checkvec( L, 2 );
  new_vec( L, v1->x - v2->x, v1->y - v2->y, v1->z - v2->z, v1->w - v2->w );
  return 1;
}
Esempio n. 5
0
Neuron* new_neuron(int id, int layer_id, size_t input_connections_count){
    Neuron* neuron = malloc(sizeof(Neuron));
    neuron->id = id;
    if (layer_id == 0) {
        neuron->neuron_function = neuron_function_input;
        neuron->neuron_function_derivative = neuron_function_input_derivative;
    } else if (layer_id == HIDDEN_LAYERS_COUNT+1) {
        neuron->neuron_function = neuron_function_output;
        neuron->neuron_function_derivative = neuron_function_output_derivative;
    } else {
        neuron->neuron_function = neuron_function_hidden;
        neuron->neuron_function_derivative = neuron_function_hidden_derivative;
    }
    neuron->stored_output = 0.0;
    size_t weights_size = input_connections_count+1; //+bias
    neuron->weights = new_vec(weights_size);
    for (int i = 0; i < weights_size; i++) {
        double random_weight = (double)rand() / ((double) RAND_MAX);
        if (random_weight > 0.01 || random_weight < -0.01) {
            random_weight /= 100.0;
        }
        if (layer_id == 0) {
            neuron->weights->scalars[i] = 1.0;
        } else {
            neuron->weights->scalars[i] = random_weight * (rand()&1 ? 1.0: -1.0);
        }
    }
    return neuron;
}
Esempio n. 6
0
static int gcvec_add( lua_State* L )
{
  vec_t* v1 = checkvec( L, 1 );
  vec_t* v2 = checkvec( L, 2 );
  new_vec( L, v1->x + v2->x, v1->y + v2->y, v1->z + v2->z, v1->w + v2->w );
  return 1;
}
Esempio n. 7
0
static int gcvec_new (lua_State *L) {
  float x = (float)lua_tonumber(L, 1);
  float y = (float)lua_tonumber(L, 2);
  float z = (float)lua_tonumber(L, 3);
  float w = (float)lua_tonumber(L, 4);
  new_vec(L, x, y, z, w);
  return 1;
}
Esempio n. 8
0
static int gcvec_div( lua_State* L )
{
  vec_t* v1 = checkvec( L, 1 );
  float s = (float)luaL_checknumber( L, 2 );
  luaL_argcheck( L, s != 0.0f, 2, "division by zero" );
  new_vec( L, v1->x / s, v1->y / s, v1->z / s, v1->w / s );
  return 1;
}
Esempio n. 9
0
static int gcvec_mul( lua_State* L )
{
  vec_t* v1 = checkvec( L, 1 );
  if( lua_isuserdata(L, 2) )
  {
    // vector * vector
    vec_t* v2 = checkvec( L, 2 );
    new_vec( L, v1->x * v2->x, v1->y * v2->y, v1->z * v2->z, v1->w * v2->w );
  }
  else
  {
    // vector * scalar
    float s = (float)luaL_checknumber( L, 2 );
    new_vec( L, v1->x * s, v1->y * s, v1->z * s, v1->w * s );
  }
  return 1;
}
Esempio n. 10
0
File: opt.c Progetto: Unode/ext_apps
void dfnmin(double p[], int n, double gtol, int itmax, int maxback,
	    int *iter, double *fret,
	    double ***hesinv,
	    double(*func)(double []), void (*dfunc)(double [], double []),
	    void (*ddfunc)(double [], double **))
{
  double *g, **A, **Ainv, *xi, *pnew;
  double sum,fp,fnew,lam,x;
  int loop,i,j,k;

  g=new_vec(n); xi=new_vec(n); pnew=new_vec(n);  
  A=new_mat(n,n); Ainv=new_mat(n,n);
  fp=(*func)(p); /* function */      
  for(loop=1;loop<=itmax;loop++) {
    (*dfunc)(p,g); /* derivative */
    (*ddfunc)(p,A); /* second derivative */
    luinverse(A,Ainv,n);
    x=sym_mat(Ainv,n);

    sum=0.0;
    for(i=0;i<n;i++) {
      x=0.0; for(j=0;j<n;j++) x+=Ainv[i][j]*g[j];
      sum+=g[i]*x; xi[i] = -x;
    }
    if(sum>=0.0) lam=1.0; else lam=-1.0;
    for(k=0;k<maxback;k++) {
      for(i=0;i<n;i++) pnew[i]=p[i]+lam*xi[i];
      fnew=(*func)(pnew); /* function */      
      mydprintf(3,"\n### dfnmin: lam=%g fnew=%g fp=%g",lam,fnew,fp);
      if(fnew < fp) break;
      lam *= 0.1;
    }
    if(k==maxback) break;
    fp=fnew;
    for(i=0;i<n;i++) p[i]=pnew[i];
    mydprintf(3,"\n### dfnmin: loop=%d sum=%g fp=%g",loop,sum,fp);
    if(sum>=0 && sum<gtol) break;
  }

  *fret=fp; /* function */
  *iter=loop;
  *hesinv=Ainv;

  free_mat(A); free_vec(g); free_vec(xi); free_vec(pnew);
}
Esempio n. 11
0
Vector3 Vector3::operator *(const Vector3& another)
{
    double n0,n1,n2;
    n0 = c[1]*another.c[2] - c[2]*another.c[1];
    n1 = -c[0]*another.c[2] + c[2]*another.c[0];
    n2 = c[0]*another.c[1] - c[1]*another.c[0];
    Vector3 new_vec(n0,n1,n2);
    return new_vec;
}
Esempio n. 12
0
void CutOffPlaneSolver::_buildSubgradient( Vector &x )
{
   NumberSet I;

   _findMax(x, I);

   Vector subgradient;

   int i_max = I[0];

   if (i_max == -1)
   {
      subgradient = _helpfulFuncGrad(x);
   }
   else
   {
      subgradient = _T._phi_grad[i_max]->eval(x);
   }

   double b;
   
   if (i_max == -1)
      b = -_helpfulFunc(x) + subgradient * x;
   else
      b = -_T._phi[i_max]->eval(x) + subgradient * x;

   _Sk.b.pushBack(b);

   Vector new_vec(_Sk.m + 1);

   new_vec[new_vec.size() - 1] = 1.0;

   _Sk.A << new_vec;

   for (int i = 0; i != _Sk.n; i++)
   {
      double b = 0;
      _Sk.A[i].pushBack(b);
   }

   for (int i = 0; i != subgradient.size(); i++)
   {
      _Sk.A[i][_Sk.A.m() - 1] = subgradient[i];
      _Sk.A[i + subgradient.size()][_Sk.A.m() - 1] = -subgradient[i];
   }

   b = 0;
   _Sk.c.pushBack(b);

   _Sk.n = _Sk.A.n();
   _Sk.m = _Sk.A.m();

//   cout << _Sk;
}
Esempio n. 13
0
void
shrink_to_fit(
	std::vector<T, A> &_impl
)
{
	std::vector<T, A> new_vec(
		_impl
	);

	_impl.swap(
		new_vec
	);
}
Esempio n. 14
0
Vector* compute_output_layer(Layer* layer, Vector* input){
#if ASSERTIONS_ENABLED
    if (layer->id == 0) {
        assert(input->length-1 == DIMENSION_INPUT && layer->size == DIMENSION_INPUT); // -bias
    } else {
        assert(input->length == layer->neurons[0]->weights->length); //+bias
    }
#endif
    Vector* layer_output;
    if (layer->id == HIDDEN_LAYERS_COUNT+1) {
        layer_output = new_vec(layer->size);
    } else {
        layer_output = new_vec(layer->size+1);
        layer_output->scalars[layer->size] = BIAS;
    }
    for (size_t in_neuron_id = 0; in_neuron_id < layer->size; in_neuron_id++) {
        layer_output->scalars[in_neuron_id] =
            layer->neurons[in_neuron_id]->neuron_function(layer->neurons[in_neuron_id], input);
    }
//    print_detailed_layer(layer);
    return layer_output;
}
Esempio n. 15
0
File: opt.c Progetto: Unode/ext_apps
double *lsfit(double **X, double *Y, double *W,
	      int m, int n,
	      double *beta, double *rss, double ***acmat)
{
  int i,j,k;
  double x,y;
  static double **covmat=NULL, **invmat=NULL, *xyvec=NULL;
  static int m0=0;

  /* memory allocation */
  if(m0!=m){
    covmat=renew_mat(covmat,m,m);
    invmat=renew_mat(invmat,m,m);
    xyvec=renew_vec(xyvec,m);
    m0=m;
  }
  if(!beta) beta=new_vec(m);
  if(acmat) *acmat=invmat; /* reference only */

  /* getting covariances */
  for(i=0;i<m;i++) {
    for(x=0.0,k=0;k<n;k++) x+=X[i][k]*Y[k]*W[k];
    xyvec[i]=x;
    for(j=0;j<=i;j++) {
      for(x=0.0,k=0;k<n;k++) x+=X[i][k]*X[j][k]*W[k];
      covmat[i][j]=covmat[j][i]=x;
    }
  }

  /* invmat = inverse matrix of covmat */
  luinverse(covmat,invmat,m);
  x=sym_mat(invmat,m);
  if(x>1e-5) warning("lsfit: covmat singularity %g",x);
  if(x>1e-3) warning("lsfit: COVARIANCE MATRIX IS SINGULAR");

  /* calculate the beta */
  for(i=0;i<m;i++) {
    for(x=0.0,j=0;j<m;j++) x+=invmat[i][j]*xyvec[j];
    beta[i]=x;
  }

  /* obtain the rss */
  for(x=0.0,k=0;k<n;k++) {
    for(y=0.0,i=0;i<m;i++) y+=X[i][k]*beta[i];
    x+=W[k]*(Y[k]-y)*(Y[k]-y);
  }
  *rss=x;
  
  return beta;
}
Esempio n. 16
0
static void setup(int cnt)
{
    int i;
    data = new_vec(cnt);
    /* Initialize array  */
    for (i = 0; i < cnt; i++) 
#if 0
	/* This runs into overflow inefficiencies with FLOAT PROD */
	set_vec_element(data, i, (data_t) (i+1));
#else
    set_vec_element(data, i, (data_t) (random() & 0x1) ? -1 : 1);
#endif
    sink = (data_t) 0;
}
Esempio n. 17
0
Layer* new_layer(size_t number_of_neurons, size_t connections_per_neuron, int id, Layer* prev, Layer* next){
    Layer* layer = malloc(sizeof(Layer));
    layer->id = id;
    layer->size = number_of_neurons;
    layer->next_layer = next;
    layer->prev_layer = prev;
    layer->deltas = new_vec(number_of_neurons);
    layer->learning_rate = MAX_LEARNING_RATE - abs(id - 1)*0.002;
    layer->neurons = malloc(number_of_neurons*sizeof(Neuron*));
    for (int i = 0; i < number_of_neurons; i++) {
        layer->neurons[i] = new_neuron(i, id, prev != NULL? prev->size : connections_per_neuron);
    }
    
    return layer;
}
Esempio n. 18
0
unsigned char VoxelFile::get_closest_index(RGBColor c)
{
    load_palette();
    vec3 color_vec(c.r, c.g, c.b);
    bool is_set = false;
    float dist;
    unsigned char current_index = 0;
    for (int i = 0; i < 256; i++) {
        RGBColor & pal_color = global_palette[i];
        vec3 new_vec(pal_color.r, pal_color.g, pal_color.b);
        float new_dist = glm::distance(color_vec, new_vec);
        if (is_set && new_dist >= dist)
            continue;
        is_set = true;
        dist = new_dist;
        current_index = i;
    }
    return current_index;
}
IntVec IntVec::append(const IntVec& rhs){
  IntVec new_vec(getAsString());

  //base case
  if(!rhs.first){return new_vec;}

  Node* current = rhs.first;
  new_vec.last->next = new Node;
  new_vec.last = new_vec.last->next;
  new_vec.last->i = current->i;
  while(current->next){
    current = current->next;
    new_vec.last->next = new Node;
    new_vec.last = new_vec.last->next;
    new_vec.last->i = current->i;
  }

  return new_vec;
}
Esempio n. 20
0
int main(int argc, char *argv[])
{
	long int num;
	struct vec *vec_ptr;
	srand(time(NULL));

	if (argc != 2) {
		printf("Usage: %s [ARRAY_ELEMENTS]\n", argv[0]);
		exit(EXIT_SUCCESS);
	}
	
	num = atol(argv[1]);
	printf("User entered %ld\n", num);
	vec_ptr = new_vec(num);
	combine1(vec_ptr);

    	printf("Final result %ld\n", vec_ptr->result);
	
	free(vec_ptr);
	return(EXIT_SUCCESS);
}
Esempio n. 21
0
main(int argc, char *argv[])
{
  int *iterations;
  void SOR(vec_ptr v, int *iterations);
  void SOR_blocked(vec_ptr v, int *iterations, int b);

  long int i, j;
  long int block_size;
  uint64_t acc;
  long int MAXSIZE = N;

  // declare and initialize the vector structure
  vec_ptr v0 = new_vec(MAXSIZE);
  iterations = (int *) malloc(sizeof(int));

  //Get blocked SOR data
  for(i=1000; i<=N; i+=1000) {
    //long int this_size = i - ((i-2)%IDEAL_BLOCK);
    long int this_size = i;
    acc=0;
    for(j=0; j<ITERS; j++) {
      fprintf(stderr, "\n(%d %d)", this_size, j);
      init_vector_rand(v0, this_size);
      set_vec_length(v0, this_size);
      tick();
      SOR(v0, iterations);
      //SOR_blocked(v0, iterations, IDEAL_BLOCK);
      tock();
      acc += get_execution_time();
    }
    //length, time(ns)
    printf("%d, %lld\n", this_size, acc/ ITERS + 0.5);
  }

  printf("\n");
  
}
Esempio n. 22
0
static int gcvec_cross (lua_State *L) {
  const vec_t* v1 = checkvec(L, 1);
  const vec_t* v2 = checkvec(L, 2);
  new_vec(L, v1->y * v2->z - v1->z * v2->y, v1->z * v2->x - v1->x * v2->z, v1->x * v2->y - v1->y * v2->x, 0.0f);
  return 1;
}
Esempio n. 23
0
Vector3 Vector3::operator*(double scl)
{
    Vector3 new_vec(c[0]*scl,c[1]*scl,c[2]*scl);
    return new_vec;
}
Esempio n. 24
0
Vector3 Vector3::getNormalized(void)
{
    Vector3 new_vec(*this);
    new_vec.toNormalized();
    return new_vec;
}
Esempio n. 25
0
Vector3 Vector3::operator - (const Vector3& another)
{
    Vector3 new_vec(c[0]-another.c[0],c[1]-another.c[1],c[2]-another.c[2]);
    return new_vec;
}
Esempio n. 26
0
size_t read_input(Vector* training_data[], Vector* teaching_data[]){
    
    size_t input_size = 0;
    char* input_line = malloc(50*sizeof(char));
    Vector* not_normalised_training[MAX_INPUT_LENGHT];
    Vector* not_normalised_teaching[MAX_INPUT_LENGHT];
    
    for (int i = 0; i < MAX_INPUT_LENGHT; i++) {
        not_normalised_training[i] = new_vec(DIMENSION_INPUT+1); // +bias
        not_normalised_teaching[i] = new_vec(DIMENSION_OUTPUT);
    }
    for (int i = 0; i < MAX_INPUT_LENGHT; i++) {
        if (scanf("%s\n", input_line) == EOF){
            break;
        }
        if (!strncmp(input_line, TERMINATING_STR, (int)strlen(TERMINATING_STR)-1)) {
            break;
        }
#if REGRESSION
        sscanf(input_line, "%lf,%lf\n",
               &not_normalised_training[i]->scalars[0],
               &not_normalised_teaching[i]->scalars[0]);
        not_normalised_training[i]->scalars[1] = BIAS;
#elif CLASSIFICATION
        sscanf(input_line, "%lf,%lf,%lf\n",
               &not_normalised_training[i]->scalars[0],
               &not_normalised_training[i]->scalars[1],
               &not_normalised_teaching[i]->scalars[0]);
        not_normalised_training[i]->scalars[2] = BIAS;
#endif
        input_size++;
    }
    
    double min = FLOAT_MAX;
    double max = FLOAT_MIN;
    for (int i = 0; i < input_size; i++) {
        if (not_normalised_training[i]->scalars[0] > max) {
            max = not_normalised_training[i]->scalars[0];
        }
        if (not_normalised_training[i]->scalars[0] < min) {
            min = not_normalised_training[i]->scalars[0];
        }
    }
    MAX_VALUES_INPUT[0] = max;
    MIN_VALUES_INPUT[0] = min;
    
#if REGRESSION
    min = FLOAT_MAX;
    max = FLOAT_MIN;
    for (int i = 0; i < input_size; i++) {
        if (not_normalised_teaching[i]->scalars[0] > max) {
            max = not_normalised_teaching[i]->scalars[0];
        }
        if (not_normalised_teaching[i]->scalars[0] < min) {
            min = not_normalised_teaching[i]->scalars[0];
        }
    }
    MAX_VALUES_INPUT[1] = max;
    MIN_VALUES_INPUT[1] = min;
#elif CLASSIFICATION
    min = FLOAT_MAX;
    max = FLOAT_MIN;
    for (int i = 0; i < input_size; i++) {
        if (not_normalised_training[i]->scalars[1] > max) {
            max = not_normalised_training[i]->scalars[1];
        }
        if (not_normalised_training[i]->scalars[1] < min) {
            min = not_normalised_training[i]->scalars[1];
        }
    }
    MAX_VALUES_INPUT[1] = max;
    MIN_VALUES_INPUT[1] = min;
#endif
    
    for (int i = 0; i < input_size; i++) {
#if REGRESSION
        
        training_data[i]->scalars[0] = normalise_data(not_normalised_training[i]->scalars[0], MAX_VALUES_INPUT[0], MIN_VALUES_INPUT[0]);
        training_data[i]->scalars[1] = BIAS;//normalised_training->scalars[1];
        teaching_data[i]->scalars[0] = normalise_data(not_normalised_teaching[i]->scalars[0], MAX_VALUES_INPUT[1], MIN_VALUES_INPUT[1]);
#elif CLASSIFICATION
        training_data[i]->scalars[0] = normalise_data(not_normalised_training[i]->scalars[0], MAX_VALUES_INPUT[0], MIN_VALUES_INPUT[0]);
        training_data[i]->scalars[1] = normalise_data(not_normalised_training[i]->scalars[1], MAX_VALUES_INPUT[1], MIN_VALUES_INPUT[1]);
        training_data[i]->scalars[2] = BIAS;//normalised_training->scalars[2];
        teaching_data[i]->scalars[0] = not_normalised_teaching[i]->scalars[0];
#endif
    }
    
    for (int i = 0; i < MAX_INPUT_LENGHT; i++) {
        delete_vec(not_normalised_training[i]);
        delete_vec(not_normalised_teaching[i]);
    }
    
    free(input_line);
    return input_size;
}
Esempio n. 27
0
// ------------------- MAIN ----------------- //
// ------------------------------------------ //
int main(){
    srand((unsigned int)time(NULL));
    
    Vector* training_data[MAX_INPUT_LENGHT];
    Vector* teaching_data[MAX_INPUT_LENGHT];
    for (int i = 0; i < MAX_INPUT_LENGHT; i++) {
        training_data[i] = new_vec(DIMENSION_INPUT+1);
        teaching_data[i] = new_vec(DIMENSION_OUTPUT);
    }
    size_t TRAINING_SET_SIZE = 0;
    TRAINING_SET_SIZE = read_input(training_data, teaching_data);
    
    // in_layer, out_layer, hid_layer_count, hid_layers
    Network* network = new_network(DIMENSION_INPUT, DIMENSION_OUTPUT, 2, 4, 4);
//    print_network(network);
    
    Vector*** best_weights = malloc((network->hidden_layers_count+1) * sizeof(Vector**));
    for (size_t layer = 0; layer < network->hidden_layers_count; layer++) {
        best_weights[layer] = malloc(network->hidden_layers[layer]->size * sizeof(Vector*));
        for (size_t neuron_id = 0; neuron_id < network->hidden_layers[layer]->size; neuron_id++) {
            best_weights[layer][neuron_id] = new_vec(network->hidden_layers[layer]->neurons[neuron_id]->weights->length);
        }
    }
    best_weights[network->hidden_layers_count] = malloc(network->output_layer->size * sizeof(Vector*));
    for (size_t neuron_id = 0; neuron_id < network->output_layer->size; neuron_id++) {
        best_weights[network->hidden_layers_count][neuron_id] = new_vec(network->output_layer->neurons[neuron_id]->weights->length);
    }
    
    time_t time_at_beginning = time(0);
    
    double total_error_old = FLOAT_MAX;
    double total_error = 1.0;
    double minimum_error_achieved = FLOAT_MAX;
    double epsilon = 0.0001;
    size_t epoch_count = 0;
    
    while ((time(0) - time_at_beginning) < 30 && (total_error = error_total(network, training_data, teaching_data, TRAINING_SET_SIZE)) > epsilon) {
        if (minimum_error_achieved > total_error) {
            minimum_error_achieved = total_error;
            dump_weights(network, best_weights);
//            print_detailed_layer(network->hidden_layers[1]);
        }
        for (size_t i = 0; i < TRAINING_SET_SIZE; i++) {
            train_network_with_backprop(network, training_data[i], teaching_data[i]);
        }
        
        if (epoch_count % 1000 == 0) {
            
//            printf("Epochs count: %ld\n",epoch_count);
            if (fabs(total_error - total_error_old) < 0.001) {
//                printf("Shaking Weights!\n");
                shake_weights(network);
            }
            total_error_old = total_error;
//            printf("Total error: %.15lf\n", total_error);
        }
        update_learning_rate(network, ++epoch_count);
        scramble_data(training_data, teaching_data, TRAINING_SET_SIZE);
    }
    
//    printf("Network training finished with a total error: %.15lf\n", total_error);
//    printf("Network training achieved a minimum total error: %.15lf\n", minimum_error_achieved);
//    print_detailed_layer(network->hidden_layers[1]);
    load_weights(network, best_weights);
//    print_detailed_layer(network->input_layer);
//    print_detailed_layer(network->hidden_layers[0]);
//    print_detailed_layer(network->hidden_layers[1]);
//    print_detailed_layer(network->output_layer);
    test_network(network);
    
    for (size_t layer = 0; layer < network->hidden_layers_count; layer++) {
        for (size_t neuron_id = 0; neuron_id < network->hidden_layers[layer]->size; neuron_id++) {
            delete_vec(best_weights[layer][neuron_id]);
        }
    }
    for (size_t neuron_id = 0; neuron_id < network->output_layer->size; neuron_id++) {
        delete_vec(best_weights[network->hidden_layers_count][neuron_id]);
    }
    
    delete_network(network);
    
    for (int i = 0; i < MAX_INPUT_LENGHT; i++) {
        delete_vec(training_data[i]);
        delete_vec(teaching_data[i]);
    }
    
    
    return EXIT_SUCCESS;
}
Esempio n. 28
0
int genrmt(char *infile, char *outfile)
{
  int i,j;
  FILE *fp;
  double x,t0,t1;
  char *cbuf,*fext;

  /* open file */
  switch(seqmode) {
  case SEQ_MOLPHY: fext=fext_molphy; break;
  case SEQ_PAML: fext=fext_paml; break;
  case SEQ_PAUP: fext=fext_paup; break;
  case SEQ_PUZZLE: fext=fext_puzzle; break;
  case SEQ_PHYML: fext=fext_phyml; break;
  case SEQ_MT: 
  default: fext=fext_mt; break;
  }
  if(infile) {
    fp=openfp(infile,fext,"r",&cbuf);
    printf("\n# reading %s",cbuf);
  } else {
    fp=STDIN;
    printf("\n# reading from stdin");
  }

  /* read file */
  mm=nn=0;
  switch(seqmode) {
  case SEQ_MOLPHY: 
    datmat = fread_mat_lls(fp, &mm, &nn); break;
  case SEQ_PAML: 
    datmat = fread_mat_lfh(fp, &mm, &nn); break;
  case SEQ_PAUP: 
    datmat = fread_mat_paup(fp, &mm, &nn); break;
  case SEQ_PUZZLE: 
    datmat = fread_mat_puzzle(fp, &mm, &nn); break;
  case SEQ_PHYML: 
    datmat = fread_mat_phyml(fp, &mm, &nn); break;
  case SEQ_MT: 
  default: 
    datmat = fread_mat(fp, &mm, &nn); break;  
  }
  if(infile) {fclose(fp);  FREE(cbuf);}
  printf("\n# M:%d N:%d",mm,nn);

  /* allocating buffers */
  datvec=new_vec(mm);
  bn=new_ivec(kk); rr1=new_vec(kk);

  /* calculate the log-likelihoods */
  for(i=0;i<mm;i++) {
    x=0; for(j=0;j<nn;j++) x+=datmat[i][j];
    datvec[i]=x;
  }
  
  /* calculate scales */
  for(i=0;i<kk;i++) {
    bn[i]=(int)(rr[i]*nn); /* sample size for bootstrap */
    rr1[i]=(double)bn[i]/nn; /* recalculate rr for integer adjustment */
  }

  /* open out file */
  if(outfile) {
    /* vt ascii write to file */
    fp=openfp(outfile,fext_vt,"w",&cbuf);
    printf("\n# writing %s",cbuf);
    fwrite_vec(fp,datvec,mm);
    fclose(fp); FREE(cbuf);
    /* rmt binary write to file */
    fp=openfp(outfile,fext_rmt,"wb",&cbuf);
    printf("\n# writing %s",cbuf);
    fwrite_bvec(fp,datvec,mm);
    fwrite_bvec(fp,rr1,kk);
    fwrite_bivec(fp,bb,kk);
    fwrite_bi(fp,kk);
  } else {
    /* rmt ascii write to stdout */
    printf("\n# writing to stdout");
    printf("\n# OBS:\n"); write_vec(datvec,mm);
    printf("\n# R:\n"); write_vec(rr1,kk);
    printf("\n# B:\n"); write_ivec(bb,kk);
    printf("\n# RMAT:\n");
    printf("%d\n",kk);
  }


  /* generating the replicates by resampling*/
  for(i=j=0;i<kk;i++) j+=bb[i];
  printf("\n# start generating total %d replicates for %d items",j,mm);
  fflush(STDOUT);
  t0=get_time();

  for(i=0;i<kk;i++) {
    repmat=new_lmat(mm,bb[i]);
    scaleboot(datmat,repmat,mm,nn,bn[i],bb[i]);
    if(outfile) {
      fwrite_bmat(fp,repmat,mm,bb[i]);
      putdot();
    } else {
      printf("\n## RMAT[%d]:\n",i); write_mat(repmat,mm,bb[i]);
    }
    free_lmat(repmat,mm);
  }

  t1=get_time();
  printf("\n# time elapsed for bootstrap t=%g sec",t1-t0);

  if(outfile) {
    fclose(fp); FREE(cbuf);
  }

  /* freeing buffers */
  free_vec(bn); free_vec(rr1); free_vec(datvec); free_mat(datmat);

  return 0;
}
Esempio n. 29
0
static int gcvec_negate( lua_State* L )
{
  vec_t* v = checkvec( L, 1 );
  new_vec( L, -v->x, -v->y, -v->z, -v->w );
  return 1;
}
Esempio n. 30
0
static int gcvec_normalize (lua_State *L) {
  const vec_t* v = checkvec(L, 1);
  float s = 1.0f / sqrt(v->x*v->x + v->y*v->y + v->z*v->z + v->w*v->w);
  new_vec(L, v->x*s, v->y*s, v->z*s, v->w*s);
  return 1;
}