void NeuralNetwork::RTRL_update_gradients()
{
	// for every neuron
	for (unsigned int k = m_num_inputs; k < m_neurons.size(); k++)
	{
		// for all possible connections
		for (unsigned int i = m_num_inputs; i < m_neurons.size(); i++)
			// to
			for (unsigned int j = 0; j < m_neurons.size(); j++) // from
			{
				int t_idx = ConnectionExists(i, j);
				if (t_idx != -1)
				{
					//double t_derivative = unsigned_sigmoid_derivative( m_neurons[k].m_activation );
					double t_derivative = 0;
					if (m_neurons[k].m_activation_function_type
							== NEAT::UNSIGNED_SIGMOID)
					{
						t_derivative = unsigned_sigmoid_derivative(
								m_neurons[k].m_activation);
					}
					else if (m_neurons[k].m_activation_function_type
							== NEAT::TANH)
					{
						t_derivative = tanh_derivative(
								m_neurons[k].m_activation);
					}

					double t_sum = 0;
					// calculate the other sum
					for (unsigned int l = 0; l < m_neurons.size(); l++)
					{
						int t_l_idx = ConnectionExists(k, l);
						if (t_l_idx != -1)
						{
							t_sum += m_connections[t_l_idx].m_weight
									* m_neurons[l].m_sensitivity_matrix[i][j];
						}
					}

					if (i == k)
					{
						t_sum += m_neurons[j].m_activation;
					}
					m_neurons[k].m_sensitivity_matrix[i][j] = t_derivative
							* t_sum;
				}
				else
				{
					m_neurons[k].m_sensitivity_matrix[i][j] = 0;
				}
			}

	}

}
Esempio n. 2
0
Real bptt_unit_derivative(Group *group,int unit,int tick)
{
  Real d;
  if (group->activationType==LOGISTIC_ACTIVATION)
    d= (sigmoid_derivative(group->outputs[tick][unit],
			   group->temperature));
  else if (group->activationType==TANH_ACTIVATION)
    d= (tanh_derivative(group->outputs[tick][unit],
			group->temperature));

  else
    {
      Choke0("Group %s does not have legal activationType",group->name);
      exit(-1);
      return 0;  /* just so stupid compilers don't complain */
    }
  d += group->primeOffset;
  return d;
}