int main(int argc, char * argv[]) { using namespace Eigen; using namespace std; using namespace igl; VectorXd D; if(!read_triangle_mesh("../shared/beetle.off",V,F)) { cout<<"failed to load mesh"<<endl; } twod = V.col(2).minCoeff()==V.col(2).maxCoeff(); bbd = (V.colwise().maxCoeff()-V.colwise().minCoeff()).norm(); SparseMatrix<double> L,M; cotmatrix(V,F,L); L = (-L).eval(); massmatrix(V,F,MASSMATRIX_TYPE_DEFAULT,M); const size_t k = 5; if(!eigs(L,M,k+1,EIGS_TYPE_SM,U,D)) { cout<<"failed."<<endl; } U = ((U.array()-U.minCoeff())/(U.maxCoeff()-U.minCoeff())).eval(); igl::viewer::Viewer viewer; viewer.callback_key_down = [&](igl::viewer::Viewer & viewer,unsigned char key,int)->bool { switch(key) { default: return false; case ' ': { U = U.rightCols(k).eval(); // Rescale eigen vectors for visualization VectorXd Z = bbd*0.5*U.col(c); Eigen::MatrixXd C; igl::parula(U.col(c).eval(),false,C); c = (c+1)%U.cols(); if(twod) { V.col(2) = Z; } viewer.data.set_mesh(V,F); viewer.data.compute_normals(); viewer.data.set_colors(C); return true; } } }; viewer.callback_key_down(viewer,' ',0); viewer.core.show_lines = false; viewer.launch(); }
void ActivationFunctionsTestCase::linear() { const int N = 1000; Eigen::MatrixXd a = Eigen::VectorXd::Random(N) * 10.0; Eigen::MatrixXd z = Eigen::VectorXd::Zero(N); OpenANN::linear(a, z); ASSERT_EQUALS(a.minCoeff(), z.minCoeff()); ASSERT_EQUALS(a.maxCoeff(), z.maxCoeff()); Eigen::MatrixXd gd = Eigen::VectorXd::Zero(N); Eigen::MatrixXd expected = Eigen::VectorXd::Ones(N); OpenANN::linearDerivative(gd); ASSERT_EQUALS(gd.sum(), expected.sum()); }
void ActivationFunctionsTestCase::normaltanh() { const int N = 1000; Eigen::MatrixXd a = Eigen::VectorXd::Random(N) * 10.0; Eigen::MatrixXd z = Eigen::VectorXd::Zero(N); OpenANN::normaltanh(a, z); ASSERT_WITHIN(z.minCoeff(), -1.0, -0.5); ASSERT_WITHIN(z.maxCoeff(), 0.5, 1.0); Eigen::MatrixXd gd = Eigen::VectorXd::Zero(N); OpenANN::normaltanhDerivative(z, gd); ASSERT_WITHIN(gd.minCoeff(), 0.0, 1.0); ASSERT_WITHIN(gd.maxCoeff(), 0.0, 1.0); }
void ActivationFunctionsTestCase::logistic() { const int N = 1000; Eigen::MatrixXd a = Eigen::VectorXd::Random(N) * 10.0; Eigen::MatrixXd z = Eigen::VectorXd::Zero(N); OpenANN::logistic(a, z); ASSERT_WITHIN(z.minCoeff(), 0.0, 0.2); ASSERT_WITHIN(z.maxCoeff(), 0.8, 1.0); Eigen::MatrixXd gd = Eigen::VectorXd::Zero(N); OpenANN::logisticDerivative(z, gd); ASSERT_WITHIN(gd.minCoeff(), 0.0, 1.0); ASSERT_WITHIN(gd.maxCoeff(), 0.0, 1.0); }
void ActivationFunctionsTestCase::softmax() { const int N = 1000; Eigen::MatrixXd a = Eigen::VectorXd::Random(N).transpose(); OpenANN::softmax(a); ASSERT_EQUALS_DELTA(1.0, a.sum(), 1e-3); ASSERT_WITHIN(a.minCoeff(), 0.0, 1.0); ASSERT_WITHIN(a.maxCoeff(), 0.0, 1.0); }
void scaleData(Eigen::MatrixXd& data, double min, double max) { if(min >= max) throw OpenANNException("Scaling failed: max has to be greater than min!"); const double minData = data.minCoeff(); const double maxData = data.maxCoeff(); const double dataRange = maxData - minData; const double desiredRange = max - min; const double scaling = desiredRange / dataRange; data = data.array() * scaling + (min - minData * scaling); }
void ActivationFunctionsTestCase::rectifier() { const int N = 1000; Eigen::MatrixXd a = Eigen::MatrixXd::Random(1, N) * 10.0; Eigen::MatrixXd z = Eigen::MatrixXd::Zero(1, N); OpenANN::rectifier(a, z); ASSERT_EQUALS(0.0, z.minCoeff()); ASSERT_EQUALS(a.maxCoeff(), z.maxCoeff()); Eigen::MatrixXd gd = Eigen::MatrixXd::Zero(1, N); Eigen::MatrixXd expected = Eigen::MatrixXd::Ones(1, N); for(int i = 0; i < N; i++) expected(i) *= (double)(z(i) > 0.0); OpenANN::rectifierDerivative(z, gd); ASSERT_EQUALS(gd.sum(), expected.sum()); }