void PerformanceFunctionalTest::test_calculate_terms_Jacobian(void) {
  message += "test_calculate_terms_Jacobian\n";

  DataSet ds;
  NeuralNetwork nn;
  PerformanceFunctional pf(&nn, &ds);

  pf.set_objective_type(PerformanceFunctional::SUM_SQUARED_ERROR_OBJECTIVE);

  Matrix<double> terms_Jacobian;

  // Test

  ds.set(1, 1, 3);
  ds.initialize_data(0.0);

  nn.set(1, 1);
  nn.initialize_parameters(0.0);

  terms_Jacobian = pf.calculate_terms_Jacobian();

  assert_true(terms_Jacobian.get_rows_number() == 3, LOG);
  assert_true(terms_Jacobian.get_columns_number() == 2, LOG);
  assert_true(terms_Jacobian == 0.0, LOG);
}
Example #2
0
TEST(TestNeuralNetwork, getWeights)
{
    NeuralNetwork nn {400, 25, 10};
    EXPECT_EQ(401*25 + 26 * 10, nn.getWeights().size());

    for (auto w : nn.getWeights()) {
        EXPECT_FLOAT_EQ(0.0f, w);
    }
}
Example #3
0
void test_video() {

	VideoCapture cap(CV_CAP_ANY);
	ImageProcessor processor;
	ImageLoader loader;
	NeuralNetwork net;
	net.load(NET_FILE_NAME);

	//net.visualize_hidden_units(1, 50);

	if (!cap.isOpened()) {
		cout << "Failed to initialize camera\n";
		return;
	}

	namedWindow("CameraCapture");
	namedWindow("ProcessedCapture");

	cv::Mat frame;
	while (true) {

		cap >> frame;

		cv::Mat processedFrame = processor.process_image(frame);

		if(processedFrame.rows * processedFrame.cols == INPUT_LAYER_SIZE) {

			mat input = loader.to_arma_mat(processedFrame);

			int label = net.predict(input);

			if(label == 0)
				putText(frame, "A", Point(500, 300), FONT_HERSHEY_SCRIPT_SIMPLEX, 2, Scalar::all(0), 3, 8);
			else if(label == 1)
				putText(frame, "E", Point(500, 300), FONT_HERSHEY_SCRIPT_SIMPLEX, 2, Scalar::all(0), 3, 8);
			else if(label == 2)
				putText(frame, "I", Point(500, 300), FONT_HERSHEY_SCRIPT_SIMPLEX, 2, Scalar::all(0), 3, 8);
			else if(label == 3)
				putText(frame, "O", Point(500, 300), FONT_HERSHEY_SCRIPT_SIMPLEX, 2, Scalar::all(0), 3, 8);
			else if(label == 4)
				putText(frame, "U", Point(500, 300), FONT_HERSHEY_SCRIPT_SIMPLEX, 2, Scalar::all(0), 3, 8);
		}

		imshow("CameraCapture", frame);
		imshow("ProcessedCapture", processedFrame);

		int key = waitKey(5);

		if(key == 13) {
			imwrite("captura.jpg", frame);
		}
		if (key == 27)
			break;
	}

	destroyAllWindows();
}
void NeuralNetworkManager::resetNeuralNetworks() {
	TRACE("NeuralNetworkManager::resetNeuralNetworks");

	QMutexLocker locker(&mNetworkExecutionMutex);
	for(QListIterator<NeuralNetwork*> i(mNeuralNetworks); i.hasNext();) {
		NeuralNetwork *net = i.next();
		net->reset();
	}
}
Example #5
0
void NeuralNetworkTest::test_get_multilayer_perceptron_pointer(void) {
  message += "test_get_multilayer_perceptron_pointer\n";

  NeuralNetwork nn;

  // Test

  nn.set(1, 1);
  assert_true(nn.get_multilayer_perceptron_pointer() != NULL, LOG);
}
Example #6
0
TEST(TestNeuralNetwork, backpropagationConvergence)
{
    NeuralNetwork nn {2, 2, 2};
    vector<float> weights {0.1f, 0.2f, 0.3f, 0.4f, 0.5f, 0.6f, 0.7f, 0.8f, 0.9f, 1.0f, 1.1f, 1.2f};

    nn.setWeights(weights);
    nn.setActivationFunction(ActivationFunction::tangent());

    testConvergence(nn, &NeuralNetwork::calcBackpropagationGradient, createXorEqSample);
}
Example #7
0
void NeuralNetworkTest::test_get_conditions_layer_pointer(void) {
  message += "test_get_conditions_layer_pointer\n";

  NeuralNetwork nn;

  nn.construct_conditions_layer();

  // Test

  assert_true(nn.get_conditions_layer_pointer() != NULL, LOG);
}
Example #8
0
TEST(TestNeuralNetwork, setWeights)
{
    NeuralNetwork nn {400, 25, 10};
    float curWeight = 0.0f;

    nn.setWeights([&curWeight] {return curWeight++;});

    for (int i = 0; i < nn.getWeights().size(); ++i) {
        EXPECT_FLOAT_EQ((float)i, nn.getWeights()[i]);
    }
}
Example #9
0
void NeuralNetworkTest::test_get_independent_parameters_pointer(void) {
  message += "test_get_independent_parameters_pointer\n";

  NeuralNetwork nn;

  nn.construct_independent_parameters();

  // Test

  assert_true(nn.get_independent_parameters_pointer() != NULL, LOG);
}
Example #10
0
void evaluate_net(vector<pair<mat, mat> > &test_data) {

	NeuralNetwork net;
	net.load(NET_FILE_NAME);
	//net.print();

	double corrects = net.evaluate(test_data);
	double percentage = (corrects / CANT_TEST_ELEM) * 100;
	cout << "corrects: " << corrects << ", percentage: " << percentage << "%" << endl;

	cout<<"total cost: "<<net.calcule_total_cost(test_data, 0);
}
void LevenbergMarquardtAlgorithmTest::test_calculate_gradient(void)
{
   message += "test_calculate_gradient\n";

   DataSet ds;

   NeuralNetwork nn;

   PerformanceFunctional pf(&nn, &ds);

   Vector<double> terms;
   Matrix<double> terms_Jacobian;

   Vector<double> gradient;

   LevenbergMarquardtAlgorithm lma(&pf);

   // Test

//   ds.set(1, 1, 2);
//   ds.randomize_data_normal();

//   nn.set(1, 1);
//   nn.randomize_parameters_normal();

//   terms = pf.calculate_terms();

//   terms_Jacobian = pf.calculate_terms_Jacobian();

//   gradient = lma.calculate_gradient(terms, terms_Jacobian);

//   assert_true((gradient-pf.calculate_gradient()).calculate_absolute_value() < 1.0e-3, LOG);

   // Test

   nn.set(1, 1);

   nn.randomize_parameters_normal();

   MockErrorTerm* mptp = new MockErrorTerm(&nn);

   pf.set_user_error_pointer(mptp);

   terms= pf.calculate_terms();

   terms_Jacobian = pf.calculate_terms_Jacobian();

   gradient = lma.calculate_gradient(terms, terms_Jacobian);

   assert_true(gradient == pf.calculate_gradient(), LOG);

}
void EvolutionaryAlgorithmTest::test_from_XML(void)
{
   message += "test_from_XML\n";

   DataSet ds;

   NeuralNetwork nn;

   PerformanceFunctional pf(&nn, &ds);

   EvolutionaryAlgorithm ea(&pf);

   EvolutionaryAlgorithm ea1;
   EvolutionaryAlgorithm ea2;

   tinyxml2::XMLDocument* document;

   Matrix<double> population;

    // Test

   document = ea1.to_XML();

   ea2.from_XML(*document);

   delete document;

   assert_true(ea1 == ea2, LOG);

    // Test

   ds.set(1, 1, 1);
   ds.randomize_data_normal();

   nn.set(1, 1);

   ea.set_population_size(4);
   ea.set_elitism_size(0);

   ea.randomize_population_normal();
   population = ea.get_population();

   document = ea.to_XML();

   ea.initialize_population(0.0);

   ea.from_XML(*document);

   delete document;

   assert_true((ea.get_population() - population).calculate_absolute_value() < 1.0e-3, LOG);
}
Example #13
0
void NeuralNetworkTest::test_randomize_parameters_normal(void) {
  message += "test_randomize_parameters_normal\n";

  NeuralNetwork nn;
  Vector<double> network_parameters;

  // Test

  nn.set(1, 1, 1);
  nn.randomize_parameters_normal(1.0, 0.0);
  network_parameters = nn.arrange_parameters();
  assert_true(network_parameters == 1.0, LOG);
}
void NeuralNetworkManager::valueChanged(Value *value) {
	if(value == 0) {
		return;
	}
	if(value == mBypassNetworkValue) {
		QMutexLocker locker(&mNetworkExecutionMutex);

		for(QListIterator<NeuralNetwork*> i(mNeuralNetworks); i.hasNext();) {
			NeuralNetwork *net = i.next();
			net->bypassNetwork(mBypassNetworkValue->get());
		}
	}
}
Example #15
0
void test_function( NeuralNetwork &net, double *table, int rows, int cols, double err_thresh = 0.0 )
{
    vector< example > examples;
    load_examples( table, rows, cols, examples);
    print_examples( examples);

    cout << "Initial weights: \n";
    net.print_net();
    cout << endl;

    net.train( examples, err_thresh );
    print_results( examples, net );
    cout << endl;
}
Example #16
0
void NeuralNetworkTest::test_randomize_parameters_uniform(void) {
  message += "test_randomize_parameters_uniform\n";

  NeuralNetwork nn;
  Vector<double> parameters;

  // Test

  nn.set(1, 1, 1);
  nn.randomize_parameters_uniform();
  parameters = nn.arrange_parameters();
  assert_true(parameters >= -1.0, LOG);
  assert_true(parameters <= 1.0, LOG);
}
Example #17
0
void NeuralNetworkTest::test_calculate_parameters_norm(void) {
  message += "test_calculate_parameters_norm\n";

  NeuralNetwork nn;
  double parameters_norm;

  // Test

  nn.set();

  parameters_norm = nn.calculate_parameters_norm();

  assert_true(parameters_norm == 0.0, LOG);
}
void NeuralNetworkManager::executeNeuralNetworks() {
	TRACE("NeuralNetworkManager::executeNeuralNetworks");

	if(!mDisableNetworkUpdate->get()) {
		//mNetworkEvaluationStarted is triggered as upstream event of NextStep.
		QMutexLocker locker(&mNetworkExecutionMutex);
		for(QListIterator<NeuralNetwork*> i(mNeuralNetworks); i.hasNext();) {
			NeuralNetwork *net = i.next();
			net->executeStep(mNumberOfNetworkUpdatesPerStep->get());
		}
	}
	//trigger evaluation competed even if the update was not triggered
	//because it may still be changed by a third-party plug-in, e.g. a playback device.
	mNetworkEvaluationCompleted->trigger();
}
static void visualizeNeuron(const NeuralNetwork& network, Image& image, unsigned int outputNeuron)
{
	Matrix matrix;

	std::string solverClass = util::KnobDatabase::getKnobValue(
		"NeuronVisualizer::SolverClass", "Differentiable");
	
	if(solverClass == "Differentiable")
	{
		matrix = optimizeWithDerivative(&network, image, outputNeuron);
	}
	else if(solverClass == "NonDifferentiable")
	{
		matrix = optimizeWithoutDerivative(&network, image, outputNeuron);
	}
	else if(solverClass == "Analytical")
	{
		matrix = optimizeAnalytically(&network, image, outputNeuron);
	}
	else
	{
		throw std::runtime_error("Invalid neuron visializer solver class " + solverClass);
	}

	size_t x = 0;
	size_t y = 0;
	
	util::getNearestToSquareFactors(x, y, network.getInputBlockingFactor());

	updateImage(image, matrix, x, y);
}
static NeuralNetwork extractTileFromNetwork(const NeuralNetwork& network,
	unsigned int outputNeuron)
{
	// Get the connected subgraph
	auto newNetwork = network.getSubgraphConnectedToThisOutput(outputNeuron); 

	util::log("NeuronVisualizer")
		<< "sliced out tile with shape: " << newNetwork.shapeString() << ".\n";
		
	// Remove all other connections from the final layer
	#if 1
	size_t block  = (outputNeuron % newNetwork.getOutputNeurons()) / newNetwork.getOutputBlockingFactor();
	size_t offset = (outputNeuron % newNetwork.getOutputNeurons()) % newNetwork.getOutputBlockingFactor();	

	auto& outputLayer = newNetwork.back();
	
	assert(block < outputLayer.blocks());
	
	Matrix weights = outputLayer[block].slice(0, offset, outputLayer.getInputBlockingFactor(), 1);
	Matrix bias    = outputLayer.at_bias(block).slice(0, offset, 1, 1);
	
	outputLayer.resize(1, outputLayer.getInputBlockingFactor(), 1);
	
	outputLayer[0]         = weights;
	outputLayer.at_bias(0) = bias;

	util::log("NeuronVisualizer")
		<< " trimmed to: " << newNetwork.shapeString() << ".\n";
	#endif
	return newNetwork;
}
Example #21
0
Vector<double> Car::calculate_final_solutions(const NeuralNetwork& neural_network) const
{ 
   Car car_copy(*this);

   car_copy.set_final_independent_variable(neural_network.get_independent_parameters_pointer()->get_parameter(0));

   switch(solution_method)
   {
      case RungeKutta:
      {
         return(car_copy.calculate_Runge_Kutta_final_solution(neural_network));
      }            
      break;

      case RungeKuttaFehlberg:
      {
         return(car_copy.calculate_Runge_Kutta_Fehlberg_final_solution(neural_network));
      }
      break;

      default:
      {
         std::ostringstream buffer;

         buffer << "OpenNN Exception: Car class\n"
                << "Vector<double> calculate_final_solutions(const NeuralNetwork&) const method.\n"               
                << "Unknown solution method.\n";

	 throw std::logic_error(buffer.str());
      }
      break;
   }
}
Example #22
0
TYPE error_single(NeuralNetwork& nn, const values_t& input, const values_t& expected_output)
{
    values_t output;
    nn.process(input, output);
    TYPE res = abs(output - expected_output);
    return res*res/2;
}
Example #23
0
TEST(TestNeuralNetwork, costFunction)
{
    int called = 0;
    CostFunction c([&called](float, float expected) {
        called++;
        return expected * 64.0f;
    });

    NeuralNetwork nn {2, 2};
    vector<float> expected {1.0f, 0.5f};

    nn.setCostFunction(c);

    EXPECT_FLOAT_EQ((64.0f + 32.0f), nn.calcCost(expected));
    EXPECT_EQ(2, called);
}
Example #24
0
TEST(StateTest, AllMethodsWithInit)
{
  typedef NeuralNetwork<double, StepActivationFunction<double >> network;
  NeuralNetwork<double, StepActivationFunction<double >> nn;
  ASSERT_NO_THROW(nn.init());
  ASSERT_THROW(nn.setEntries(2), network::WrongState);
  ASSERT_THROW(nn.setExits(2), network::WrongState);
  ASSERT_THROW(nn.setLayersCount(1), network::WrongState);
  ASSERT_THROW(nn.setEntries(2), network::WrongState);
  ASSERT_THROW(nn.setNeurons(1, 2), network::WrongState);
  ASSERT_THROW(nn.setNeurons(2, 2), network::WrongState);
  std::vector<double> ans;
  ans.assign(2, 1);
  ASSERT_NO_THROW(nn.learn(ans.begin(), ans.end()));
  ASSERT_NO_THROW(nn.calcOutput());
  ASSERT_NO_THROW(nn.stop());
}
void SumSquaredErrorTest::test_calculate_squared_errors(void)
{
   message += "test_calculate_squared_errors\n";

   NeuralNetwork nn;

   DataSet ds;

   SumSquaredError sse(&nn, &ds);

   Vector<double> squared_errors;

   double objective;

   // Test 

   nn.set(1,1,1);

   nn.initialize_parameters(0.0);

   ds.set(1,1,1);

   ds.initialize_data(0.0);

   squared_errors = sse.calculate_squared_errors();

   assert_true(squared_errors.size() == 1, LOG);
   assert_true(squared_errors == 0.0, LOG);   

   // Test

   nn.set(2,2,2);

   nn.randomize_parameters_normal();

   ds.set(2,2,2);

   ds.randomize_data_normal();

   squared_errors = sse.calculate_squared_errors();

   objective = sse.calculate_error();

   assert_true(fabs(squared_errors.calculate_sum() - objective) < 1.0e-12, LOG);

}
void saveNetwork(const NeuralNetwork<TDevice> &nn, const std::string &filename)
{
    rapidjson::Document jsonDoc;
    jsonDoc.SetObject();
    nn.exportLayers (&jsonDoc);
    nn.exportWeights(&jsonDoc);

    FILE *file = fopen(filename.c_str(), "w");
    if (!file)
        throw std::runtime_error("Cannot open file");

    rapidjson::FileStream os(file);
    rapidjson::PrettyWriter<rapidjson::FileStream> writer(os);
    jsonDoc.Accept(writer);

    fclose(file);
}
Example #27
0
void NeuralNetworkTest::test_from_XML(void) {
  message += "test_from_XML\n";

  NeuralNetwork nn;

  tinyxml2::XMLDocument* document;

  // Test

  nn.initialize_random();

  document = nn.to_XML();

  nn.from_XML(*document);

  delete document;
}
void TrainingStrategyTest::test_perform_training(void)
{
   message += "test_perform_training\n";

    NeuralNetwork nn;
    DataSet ds;
    PerformanceFunctional pf(&nn, &ds);
    TrainingStrategy ts(&pf);

    // Test

    nn.set(1, 1);
    ds.set(1,1,2);

//    ts.perform_training();

}
void Physics::testPhysics(){
	int box3 = createBox(895,95,395);

	int box4 = createBox(195,195,195);
	createJoint(box3, box4,0, 50, 50, 2, 50, 50,50, 30,30,30);

	/*
	int box = createBox(85,385,185);
	createJoint(box3, box, 0,50, 50, 3, 50, 50, 3, 0,0,0);

	int box5 = createBox(95,95,395);
	createJoint(box, box5, 0,50, 50,5, 50, 50, 0, 0,0,0);
	*/

	//	createSensor(box2, pressure);

	//NN test
	std::vector<NeuralNode*> inputs;

	for(int i=0;i< (int) sensors.size();i++){
		inputs.push_back(new NeuralNode(&sensors.at(i)));
	}

	inputs.push_back(new NeuralNode(1)); //index 3
	inputs.push_back(new NeuralNode(-2)); //index 4
	float* testPoint = new float;
	*testPoint = 5;
	inputs.push_back(new NeuralNode(testPoint)); //index 5
	theNet = new NeuralNetwork(inputs);
	theNet->insertNode(SUM,5,3,3,1);
	theNet->insertNode(SIN,1,1);
	theNet->changeLayer();
	theNet->insertNode(PRODUCT,0,1,2,2);
	theNet->stopBuilding();

	NeuralNetwork* aNet = new NeuralNetwork(theNet->getLastLayer());
	aNet->insertNode(PRODUCT,0,0,10000,10000);
	aNet->stopBuilding();
	subnets.push_back(aNet);

	effectorNNindex.push_back(0);
	effectorNNindex.push_back(1);
	effectorNNindex.push_back(2);

	solveGroundConflicts();
}
Example #30
0
TEST(TestNeuralNetwork, testIfBackpropGradientIsEqToNumerical)
{
    NeuralNetwork nn {2, 2};
    vector<float> weights {0.1f, 0.2f, 0.3f, 0.4f, 0.5f, 0.6f};

    vector<float> input;
    vector<float> expected;
    vector<float> backpropGradient;
    vector<float> numericalGradient;

    int iter = 0;
    bool lastBatch;

    input.resize(2);
    expected.resize(2);

    nn.setWeights(weights);
    nn.setActivationFunction(ActivationFunction::sigmoid());

    do {
        lastBatch = createAndOrSample(iter++, input, expected);
        nn.setInput(input);
        nn.calc();
        nn.calcNumericalGradient(expected, numericalGradient);
        nn.calcBackpropagationGradient(expected, backpropGradient);

        ASSERT_EQ(numericalGradient.size(), backpropGradient.size());
        for (int i = 0; i < numericalGradient.size(); ++i) {
            EXPECT_NEAR(numericalGradient[i], backpropGradient[i], 0.0001f);
            numericalGradient[i] = 0.0f;
            backpropGradient[i] = 0.0f;
        }
    } while(!lastBatch);
}