Exemple #1
0
void BPN::ConstructDefault(int thrs) {
  unsigned sizes[] = {262, 66, 256, 1};
  outFunction functions[] = {sigmoid2, sigmoid2, sigmoid2, sigmoid2};
  bool biases[] = {false, true, true, true};

  constructor(sizes, biases, functions, 4, 0.35f, 0.3f, 29744.0f, thrs);
  InitializeWeights();
}
Exemple #2
0
void NetGraph::InitializeWeights() {
	for (NetGraphNode* node : nodes_)
		node->flag_bp_visited = false;

	for (NetGraphNode* node : nodes_)
		InitializeWeights(node);

	for (NetGraphNode* node : nodes_)
		node->flag_bp_visited = false;
}
Exemple #3
0
void NetGraph::InitializeWeights(NetGraphNode* node) {
	if (!node->flag_bp_visited) {
		std::vector<Layer*> layers_to_connect;
		for (NetGraphBackpropConnection backprop_connection : node->backprop_connections) {
			InitializeWeights(backprop_connection.node);
			layers_to_connect.push_back(backprop_connection.node->layer);
		}
		node->layer->OnLayerConnect(layers_to_connect);

		node->flag_bp_visited = true;
	}
}
Exemple #4
0
void BPN::constructor(unsigned *sizes, bool *biases, outFunction *functions, unsigned layersNumber, double et, double alph, double o_factor, int thrs) {
  pthread_mutex_init(&max_threads_mutex, NULL);
  max_threads = thrs < 0 ? 0 : thrs;
  minchunk = 64;
  size = layersNumber;
  alpha = alph;
  eta = et;
  scale_factor = o_factor;
  initial_scale = 0.03f;

  if(size > 0) {
    layers = new bpnLayer*[size];
    layers[0] = new bpnLayer(sizes[0], 0, false, functions[0], max_threads + 1);   //  input layer - no lower layer

    unsigned i;
    for(i=1; i<size; ++i) {
      layers[i] = new bpnLayer(sizes[i], sizes[i-1], biases[i], functions[i], max_threads + 1);
    }

    unsigned lowerSize = layers[size-1]->size;

    if(lowerSize >0) {
      train_output = new double[lowerSize];
      for(unsigned j=0; j < lowerSize; ++j) {
	train_output[j] = 0.0f;
      }
    }

    InitializeWeights();

    if(size > 2) {
      li_w = new LayerThread*[size -2];
      for(i=0; i < size -2; ++i) {
	li_w[i] = new LayerThread;
      }

      thread_ids_w = new pthread_t[size -2];
    }
  }

  li = new LayerThread*[max_threads + 1];
  for(int j=0; j <= max_threads; ++j) {
    li[j] = new LayerThread;
  }

  thread_ids = new pthread_t[max_threads];
}
Exemple #5
0
BPN::BPN(const char* file, int thrs) {
  std::ifstream fin;
  fin.open(file, std::ifstream::in);

  if(!fin.is_open()) {
    ConstructDefault(thrs);
    return;
  }

  unsigned i;
  max_threads = thrs < 0 ? 0 : thrs;

  readString(fin);
  fin>>initial_scale;

  readString(fin);
  fin>>eta;

  readString(fin);
  fin>>alpha;

  readString(fin);
  fin>>scale_factor;

  readString(fin);
  fin>>size;

  if(size >0) {
    unsigned f, layersize, prevsize=0;
    bool bias;

    //  input layer - no bias
    readString(fin);
    readString(fin);
    fin>>layersize;

    readString(fin);
    fin>>bias;

    readString(fin);
    fin>>f;

    bias = false;
    layers = new bpnLayer*[size];
    layers[0] = new bpnLayer(layersize, prevsize, bias, (outFunction) f, max_threads + 1);
    prevsize = layersize;

    for(i=1; i<size; ++i) {  //  itterate all layer above input
      readString(fin);
      readString(fin);
      fin>>layersize;

      readString(fin);
      fin>>bias;

      readString(fin);
      fin>>f;

      layers[i] = new bpnLayer(layersize, prevsize, bias, (outFunction) f, max_threads + 1);
      prevsize = layersize;
    }

    unsigned lowerSize = layers[size-1]->size;

    if(lowerSize > 0)
      {
	train_output = new double[lowerSize];
	for(i=0; i < lowerSize; ++i) {
	  train_output[i] = 0.0f;
	}
      }

    char ch;
    fin>>ch;

    if(ch =='x') {
      fin.close();
      InitializeWeights();
      return;
    }

    for(i=0; i<size; ++i) {
      LoadLayer(layers[i], fin);
    }
  }