Пример #1
0
void dnn::forward_activation(int index_lower_layer, float ** local_weights, float * local_bias, float * visible, float * hidden)
{
  int num_units_hidden=num_units_ineach_layer[index_lower_layer+1];
  int num_units_visible=num_units_ineach_layer[index_lower_layer];
  matrix_vector_multiply(local_weights, visible, hidden, num_units_hidden, num_units_visible);
  add_vector(hidden, local_bias, num_units_hidden);
  if(index_lower_layer<num_layers-2)	
    activate_logistic(hidden, num_units_hidden);
  else if(index_lower_layer==num_layers-2)
    log2ori(hidden,num_units_hidden );

}
Пример #2
0
// forward activation
void rbm::forward_activation(float ** weights, float * hidden_bias, float * visible, float * hidden){
    multiply_matrix_vector(weights, visible, hidden, num_hidden_units, num_visible_units);
    add_vector(hidden, hidden_bias, num_hidden_units);
    activate_logistic(hidden, num_hidden_units);
}
Пример #3
0
// compute hidden nodes given model parameters and for a new visible input
void compute_hidden(float ** weights, float * hidden_bias, float * visible, float * hidden, int num_hidden_units, int num_visible_units){
    multiply_matrix_vector(weights, visible, hidden, num_hidden_units, num_visible_units);
    add_vector(hidden, hidden_bias, num_hidden_units);
    activate_logistic(hidden, num_hidden_units);

}