const MatrixX& Jacobian::GetNullspace() { if(computeNullSpace_) { computeNullSpace_ = false; /*jacobianInverseNoDls_ = jacobian_; PseudoInverse(jacobianInverseNoDls_); // tmp while figuring out how to chose lambda*/ //ComputeSVD(); MatrixX id = MatrixX::Identity(jacobian_.cols(), jacobian_.cols()); ComputeSVD(); //Eigen::JacobiSVD<MatrixX> svd(jacobian_, Eigen::ComputeThinU | Eigen::ComputeThinV); MatrixX res = MatrixX::Zero(id.rows(), id.cols()); for(int i =0; i < svd_.matrixV().cols(); ++ i) { VectorX v = svd_.matrixV().col(i); res += v * v.transpose(); } Identitymin_ = id - res; //Identitymin_ = id - (jacobianInverseNoDls_* jacobian_); } return Identitymin_; }
TYPED_TEST(TestSecondOrderMultinomialLogisticRegression, MinimizerOverfitSmall) { MatrixX<TypeParam> X(2, 10); VectorXi y(10); X << 0.6097662 , 0.53395565, 0.9499446 , 0.67289898, 0.94173948, 0.56675891, 0.80363783, 0.85303565, 0.15903886, 0.99518533, 0.41655682, 0.29256121, 0.36103228, 0.29899503, 0.4957268 , -0.04277318, -0.28038614, -0.12334621, -0.17497722, 0.1492248; y << 0, 0, 0, 0, 0, 1, 1, 1, 1, 1; std::vector<MatrixX<TypeParam> > X_var; for (int i=0; i<10; i++) { //X_var.push_back(MatrixX<TypeParam>::Random(2, 2).array().abs() * 0.01); //X_var.push_back(MatrixX<TypeParam>::Zero(2, 2)); VectorX<TypeParam> a = VectorX<TypeParam>::Random(2).array() * 0.01; X_var.push_back( a * a.transpose() ); } SecondOrderLogisticRegressionApproximation<TypeParam> mlr(X, X_var, y, 0); MatrixX<TypeParam> eta = MatrixX<TypeParam>::Zero(2, 3); GradientDescent<SecondOrderLogisticRegressionApproximation<TypeParam>, MatrixX<TypeParam>> minimizer( std::make_shared< ArmijoLineSearch< SecondOrderLogisticRegressionApproximation<TypeParam>, MatrixX<TypeParam> > >(), [](TypeParam value, TypeParam gradNorm, size_t iterations) { return iterations < 5000; } ); minimizer.minimize(mlr, eta); EXPECT_GT(0.1, mlr.value(eta)); }