コード例 #1
0
void TestObjectiveFunctional::test_calculate_potential_evaluation(void)
{
   message += "test_calculate_potential_evaluation\n";

   MultilayerPerceptron mlp;

   Vector<double> parameters;

   MockObjectiveFunctional mof(&mlp);

   double evaluation;
   double potential_evaluation;

   // Only neural parameters

   mlp.set_network_architecture(1,1,1);
   mlp.set_independent_parameters_number(0);
   
   evaluation = mof.calculate_evaluation();

   parameters = mlp.get_parameters();

   potential_evaluation = mof.calculate_potential_evaluation(parameters);

   assert_true(evaluation == potential_evaluation, LOG);

   // Only independent parameters

   mlp.set_network_architecture(0,0,0);
   mlp.set_independent_parameters_number(1);
   
   evaluation = mof.calculate_evaluation();

   parameters = mlp.get_parameters();

   potential_evaluation = mof.calculate_potential_evaluation(parameters);

   assert_true(evaluation == potential_evaluation, LOG);

   // Both neural and independent parameters

   mlp.set_network_architecture(1,1,1);
   mlp.set_independent_parameters_number(1);
   
   evaluation = mof.calculate_evaluation();

   parameters = mlp.get_parameters();

   potential_evaluation = mof.calculate_potential_evaluation(parameters);

   assert_true(evaluation == potential_evaluation, LOG);
}
コード例 #2
0
void TestObjectiveFunctional::test_calculate_independent_parameters_gradient_central_differences(void)
{
   message += "test_calculate_independent_parameters_gradient_central_differences\n";

   MultilayerPerceptron mlp;
   MockObjectiveFunctional mof(&mlp);

   int independent_parameters_number;
   Vector<double> independent_parameters_gradient;

   // Test 

   mlp.set();
   independent_parameters_number = mlp.get_independent_parameters_number();   
   independent_parameters_gradient = mof.calculate_independent_parameters_gradient_central_differences();
   assert_true(independent_parameters_gradient.get_size() == independent_parameters_number, LOG);

   // Test 

   mlp.set(1,1,1);
   independent_parameters_number = mlp.get_independent_parameters_number();
   independent_parameters_gradient = mof.calculate_independent_parameters_gradient_central_differences();
   assert_true(independent_parameters_gradient.get_size() == independent_parameters_number, LOG);

   // Test 

   mlp.set(1);
   independent_parameters_number = mlp.get_independent_parameters_number();
   independent_parameters_gradient = mof.calculate_independent_parameters_gradient_central_differences();
   assert_true(independent_parameters_gradient.get_size() == independent_parameters_number, LOG);
}
コード例 #3
0
void TestEvolutionaryAlgorithm::test_set_default(void)
{
   message += "test_set_default\n";

   MultilayerPerceptron mlp;
   MockObjectiveFunctional mof(&mlp);
   EvolutionaryAlgorithm ea(&mof);

   // Test
   
   ea.set_default();
   assert_true(ea.get_population_size() == 0, LOG);

   // Test

   mlp.set(1);
   ea.set_default();
   assert_true(ea.get_population_size() == 10, LOG);
}
コード例 #4
0
void NormalizedSquaredErrorTest::test_calculate_performance(void)   
{
   message += "test_calculate_performance\n";

   Vector<double> parameters;

   NeuralNetwork nn(1,1);

   MultilayerPerceptron* mlpp = nn.get_multilayer_perceptron_pointer();

   mlpp->get_layer_pointer(0)->set_activation_function(Perceptron::Linear);

   mlpp->initialize_biases(0.0);
   mlpp->initialize_synaptic_weights(1.0);

   DataSet ds(1,1,2);

   Matrix<double> new_data(2, 2);
   new_data[0][0] = -1.0;
   new_data[0][1] = -1.0;
   new_data[1][0] = 1.0;
   new_data[1][1] = 1.0;

   ds.set_data(new_data);

   NormalizedSquaredError nse(&nn, &ds);

   assert_true(nse.calculate_performance() == 0.0, LOG);

   // Test

   nn.set(1, 1);
   nn.randomize_parameters_normal();

   parameters = nn.arrange_parameters();

   ds.set(1, 1, 2);
   ds.randomize_data_normal();

   assert_true(nse.calculate_performance() == nse.calculate_performance(parameters), LOG);
}
コード例 #5
0
void TestObjectiveFunctional::test_calculate_potential_Hessian(void)
{
   message += "test_calculate_potential_Hessian\n";

   MultilayerPerceptron mlp;
   int parameters_number;
   Vector<double> parameters;
   
   MockObjectiveFunctional mof(&mlp);
   Matrix<double> potential_Hessian;

   mlp.set_network_architecture(1, 1, 1);
   mlp.set_independent_parameters_number(0);

   mlp.initialize_parameters(0.0);

   parameters_number = mlp.get_parameters_number();
   parameters = mlp.get_parameters();

   potential_Hessian = mof.calculate_potential_Hessian(parameters);

   assert_true(potential_Hessian.get_rows_number() == parameters_number, LOG);
   assert_true(potential_Hessian.get_columns_number() == parameters_number, LOG);

   mlp.set_network_architecture(0, 0, 0);
   mlp.set_independent_parameters_number(1);

   mlp.initialize_parameters(0.0);

   parameters_number = mlp.get_parameters_number();
   parameters = mlp.get_parameters();

   potential_Hessian = mof.calculate_potential_Hessian(parameters);

   assert_true(potential_Hessian.get_rows_number() == parameters_number, LOG);
   assert_true(potential_Hessian.get_columns_number() == parameters_number, LOG);

   mlp.set_network_architecture(1, 1, 1);
   mlp.set_independent_parameters_number(1);

   mlp.initialize_parameters(0.0);

   parameters_number = mlp.get_parameters_number();
   parameters = mlp.get_parameters();

   potential_Hessian = mof.calculate_potential_Hessian(parameters);

   assert_true(potential_Hessian.get_rows_number() == parameters_number, LOG);
   assert_true(potential_Hessian.get_columns_number() == parameters_number, LOG);
}
コード例 #6
0
void TestObjectiveFunctional::test_calculate_potential_gradient(void)
{
   message += "test_calculate_potential_gradient\n";
   
   MultilayerPerceptron mlp;

   MockObjectiveFunctional mof(&mlp);

   Vector<double> parameters;

   // Only neural parameters

   mlp.set_network_architecture(1, 1, 1);
   mlp.set_independent_parameters_number(0);

   mlp.initialize_parameters(0.0);

   parameters = mlp.get_parameters();

   assert_true(mof.calculate_potential_gradient(parameters) == 0.0, LOG);

   // Only independent parameters

   mlp.set_network_architecture(0, 0, 0);
   mlp.set_independent_parameters_number(1);

   mlp.initialize_parameters(0.0);

   parameters = mlp.get_parameters();

   assert_true(mof.calculate_potential_gradient(parameters) == 0.0, LOG);

   // Both neural and independent parameters

   mlp.set_network_architecture(1, 1, 1);
   mlp.set_independent_parameters_number(1);

   mlp.initialize_parameters(0.0);

   parameters = mlp.get_parameters();

   assert_true(mof.calculate_potential_gradient(parameters) == 0.0, LOG);
}