void TestBasicFunctionality() throw (Exception)
    {
        /*
         * We need to make sure here that the matrix is loaded with the appropriate parallel layout. Petsc's
         * default puts 1331 rows in each processor. This wouldn't be possible in a real bidomain simulation
         * because implies that equations V_665 an Phi_e_665 are solved in different processors.
         */
        unsigned num_nodes = 1331;
        DistributedVectorFactory factory(num_nodes);
        Vec parallel_layout = factory.CreateVec(2);

        Mat system_matrix;
        PetscTools::ReadPetscObject(system_matrix, "linalg/test/data/matrices/cube_6000elems_half_activated.mat", parallel_layout);

        PetscTools::Destroy(parallel_layout);

        // Set rhs = A * [1 0 1 0 ... 1 0]'
        Vec one_zeros = factory.CreateVec(2);
        Vec rhs = factory.CreateVec(2);

        for (unsigned node_index=0; node_index<2*num_nodes; node_index+=2)
        {
            PetscVecTools::SetElement(one_zeros, node_index, 1.0);
            PetscVecTools::SetElement(one_zeros, node_index+1, 0.0);
        }
        PetscVecTools::Finalise(one_zeros);

        MatMult(system_matrix, one_zeros, rhs);
        PetscTools::Destroy(one_zeros);

        LinearSystem ls = LinearSystem(rhs, system_matrix);

        ls.SetAbsoluteTolerance(1e-9);
        ls.SetKspType("cg");
        ls.SetPcType("none");

        ls.AssembleFinalLinearSystem();

        Vec solution = ls.Solve();

        DistributedVector distributed_solution = factory.CreateDistributedVector(solution);
        DistributedVector::Stripe vm(distributed_solution, 0);
        DistributedVector::Stripe phi_e(distributed_solution, 1);

        for (DistributedVector::Iterator index = distributed_solution.Begin();
             index!= distributed_solution.End();
             ++index)
        {
            /*
             * Although we're trying to enforce the solution to be [1 0 ... 1 0], the system is singular and
             * therefore it has infinite solutions. I've (migb) found that the use of different preconditioners
             * lead to different solutions ([0.8 -0.2 ... 0.8 -0.2], [0.5 -0.5 ... 0.5 -0.5], ...)
             *
             * If we were using PETSc null space, it would find the solution that satisfies x'*v=0,
             * being v the null space of the system (v=[1 1 ... 1])
             */
            TS_ASSERT_DELTA(vm[index] - phi_e[index], 1.0, 1e-6);
        }

        // Coverage (setting PC type after first solve)
        ls.SetPcType("blockdiagonal");

        PetscTools::Destroy(system_matrix);
        PetscTools::Destroy(rhs);
        PetscTools::Destroy(solution);

#if (PETSC_VERSION_MAJOR == 3 && PETSC_VERSION_MINOR <= 3) //PETSc 3.0 to PETSc 3.3
        //The PETSc developers changed this one, but later changed it back again!
        const PCType pc;
#else
        PCType pc;
#endif
        PC prec;
        KSPGetPC(ls.mKspSolver, &prec);
        PCGetType(prec, &pc);
        // Although we call it "blockdiagonal", PETSc considers this PC a generic SHELL preconditioner
        TS_ASSERT( strcmp(pc,"shell")==0 );

    }
    void TestChebyshevVsCG() throw (Exception)
    {
        unsigned num_nodes = 1331;
        DistributedVectorFactory factory(num_nodes);
        Vec parallel_layout = factory.CreateVec(2);

        unsigned cg_its;
        unsigned chebyshev_its;

        Timer::Reset();
        {
            Mat system_matrix;
            // Note that this test deadlocks if the file's not on the disk
            PetscTools::ReadPetscObject(system_matrix, "linalg/test/data/matrices/cube_6000elems_half_activated.mat", parallel_layout);

            Vec system_rhs;
            // Note that this test deadlocks if the file's not on the disk
            PetscTools::ReadPetscObject(system_rhs, "linalg/test/data/matrices/cube_6000elems_half_activated.vec", parallel_layout);

            LinearSystem ls = LinearSystem(system_rhs, system_matrix);

            ls.SetMatrixIsSymmetric();
            ls.SetAbsoluteTolerance(1e-9);
            ls.SetKspType("cg");
            ls.SetPcType("bjacobi");

            Vec solution = ls.Solve();

            cg_its = ls.GetNumIterations();

            PetscTools::Destroy(system_matrix);
            PetscTools::Destroy(system_rhs);
            PetscTools::Destroy(solution);
        }
        Timer::PrintAndReset("CG");

        {
            Mat system_matrix;
            // Note that this test deadlocks if the file's not on the disk
            PetscTools::ReadPetscObject(system_matrix, "linalg/test/data/matrices/cube_6000elems_half_activated.mat", parallel_layout);

            Vec system_rhs;
            // Note that this test deadlocks if the file's not on the disk
            PetscTools::ReadPetscObject(system_rhs, "linalg/test/data/matrices/cube_6000elems_half_activated.vec", parallel_layout);

            LinearSystem ls = LinearSystem(system_rhs, system_matrix);

            ls.SetMatrixIsSymmetric();
            ls.SetAbsoluteTolerance(1e-9);
            ls.SetKspType("chebychev");
            ls.SetPcType("bjacobi");

            Vec solution = ls.Solve();

            chebyshev_its = ls.GetNumIterations();

            PetscTools::Destroy(system_matrix);
            PetscTools::Destroy(system_rhs);
            PetscTools::Destroy(solution);
        }
        Timer::Print("Chebyshev");

        TS_ASSERT_LESS_THAN(cg_its, 15u); // Takes 14 iterations with 16 cores
        TS_ASSERT_LESS_THAN(chebyshev_its, 17u); // Takes 16 iterations with 16 cores

        PetscTools::Destroy(parallel_layout);
    }
    void TestBetterThanNoPreconditioning()
    {
        unsigned num_nodes = 1331;
        DistributedVectorFactory factory(num_nodes);
        Vec parallel_layout = factory.CreateVec(2);

        unsigned point_jacobi_its;
        unsigned block_diag_its;

        Timer::Reset();
        {
            Mat system_matrix;
            // Note that this test deadlocks if the file's not on the disk
            PetscTools::ReadPetscObject(system_matrix, "linalg/test/data/matrices/cube_6000elems_half_activated.mat", parallel_layout);

            Vec system_rhs;
            // Note that this test deadlocks if the file's not on the disk
            PetscTools::ReadPetscObject(system_rhs, "linalg/test/data/matrices/cube_6000elems_half_activated.vec", parallel_layout);

            LinearSystem ls = LinearSystem(system_rhs, system_matrix);

            ls.SetAbsoluteTolerance(1e-9);
            ls.SetKspType("cg");
            ls.SetPcType("none");

            Vec solution = ls.Solve();

            point_jacobi_its = ls.GetNumIterations();

            PetscTools::Destroy(system_matrix);
            PetscTools::Destroy(system_rhs);
            PetscTools::Destroy(solution);
        }
        Timer::PrintAndReset("No preconditioning");

        {
            Mat system_matrix;
            // Note that this test deadlocks if the file's not on the disk
            PetscTools::ReadPetscObject(system_matrix, "linalg/test/data/matrices/cube_6000elems_half_activated.mat", parallel_layout);

            Vec system_rhs;
            // Note that this test deadlocks if the file's not on the disk
            PetscTools::ReadPetscObject(system_rhs, "linalg/test/data/matrices/cube_6000elems_half_activated.vec", parallel_layout);

            LinearSystem ls = LinearSystem(system_rhs, system_matrix);

            ls.SetAbsoluteTolerance(1e-9);
            ls.SetKspType("cg");
            ls.SetPcType("blockdiagonal");

            Vec solution = ls.Solve();

            block_diag_its = ls.GetNumIterations();

            // Coverage (setting PC type after using blockdiagonal solve)
            ls.SetPcType("blockdiagonal");

            PetscTools::Destroy(system_matrix);
            PetscTools::Destroy(system_rhs);
            PetscTools::Destroy(solution);
        }
        Timer::Print("Block diagonal preconditioner");

        std::cout << block_diag_its << " " << point_jacobi_its << std::endl;
        TS_ASSERT_LESS_THAN_EQUALS(block_diag_its, point_jacobi_its);

        PetscTools::Destroy(parallel_layout);
    }