/* Solve Ax = b using the conjugate gradient method. */ Matrix conjugateGradient(Matrix& A, Matrix& b) { double error_tol = .5; // error tolerance int max_iter = 200; // max # of iterations ColumnVector x(A.rows()); // the solution we will iteratively arrive at int i = 0; ColumnVector r = static_cast<ColumnVector>(b - A*x); ColumnVector d = r; double sigma_old = 0; // will be used later on, in the loop double sigma_new = (r.transpose() * r)(0,0); double sigma_0 = sigma_new; while (i < max_iter && sigma_new > error_tol * error_tol * sigma_0) { ColumnVector q = A * d; double alpha = sigma_new / (d.transpose() * q)(0,0); x = x + alpha * d; if (i % 50 == 0) { r = static_cast<ColumnVector>(b - A*x); }else{ r = r - alpha * q; } sigma_old = sigma_new; sigma_new = (r.transpose() * r)(0,0); double beta = sigma_new / sigma_old; d = r + beta * d; i++; } shared_ptr<Matrix> final_x(new Matrix(static_cast<Matrix>(x))); return *final_x; }
/* solve Ax=b using the Method of Steepest Descent. */ Matrix steepestDescent(Matrix& A, Matrix& b) { // the Method of Steepest Descent *requires* a symmetric matrix. if (isSymmetric(A)==false) { shared_ptr<Matrix> nullMat(new Matrix(0,0)); return *nullMat; } /* STEP 1: Start with a guess. Our guess is all ones. */ ColumnVector x(A.cols()); fill(x.begin(),x.end(),1); /* This is NOT an infinite loop. There's a break statement inside. */ while(true) { /* STEP 2: Calculate the residual r_0 = b - Ax_0 */ ColumnVector r = static_cast<ColumnVector> (b - A*x); if (r.length() < .01) break; /* STEP 3: Calculate alpha */ double alpha = (r.transpose() * r)(0,0) / (r.transpose() * A * r)(0,0); /* STEP 4: Calculate new X_1 where X_1 = X_0 + alpha*r_0 */ x = x + alpha * r; } shared_ptr<Matrix> final_x(new Matrix(static_cast<Matrix>(x))); return *final_x; }