* object. This is done for performance reasons. The solver and
* preconditioner can be reset by calling reset().
*
- * One of the gotchas of PETSc is that -- in particular in MPI mode -- it
- * often does not produce very helpful error messages. In order to save
- * other users some time in searching a hard to track down error, here is
- * one situation and the error message one gets there: when you don't
- * specify an MPI communicator to your solver's constructor. In this case,
- * you will get an error of the following form from each of your parallel
- * processes:
- * @verbatim
- * [1]PETSC ERROR: PCSetVector() line 1173 in src/ksp/pc/interface/precon.c
- * [1]PETSC ERROR: Arguments must have same communicators!
- * [1]PETSC ERROR: Different communicators in the two objects: Argument #
- * 1 and 2! [1]PETSC ERROR: KSPSetUp() line 195 in
- * src/ksp/ksp/interface/itfunc.c
- * @endverbatim
- *
- * This error, on which one can spend a very long time figuring out what
- * exactly goes wrong, results from not specifying an MPI communicator. Note
- * that the communicator @em must match that of the matrix and all vectors
- * in the linear system which we want to solve. Aggravating the situation is
- * the fact that the default argument to the solver classes, @p
- * PETSC_COMM_SELF, is the appropriate argument for the sequential case
- * (which is why it is the default argument), so this error only shows up in
- * parallel mode.
- *
* @ingroup PETScWrappers
*/
class SolverBase
{
public:
/**
- * Constructor. Takes the solver control object and the MPI communicator
- * over which parallel computations are to happen.
- *
- * Note that the communicator used here must match the communicator used
- * in the system matrix, solution, and right hand side object of the solve
- * to be done with this solver. Otherwise, PETSc will generate hard to
- * track down errors, see the documentation of the SolverBase class.
+ * Constructor.
*/
- SolverBase(SolverControl &cn, const MPI_Comm &mpi_communicator);
+ SolverBase(SolverControl &cn);
/**
* Destructor.
*/
SolverControl &solver_control;
- /**
- * Copy of the MPI communicator object to be used for the solver.
- */
- const MPI_Comm mpi_communicator;
-
/**
* %Function that takes a Krylov Subspace Solver context object, and sets
* the type of solver that is requested by the derived class.
- SolverBase::SolverBase(SolverControl &cn, const MPI_Comm &mpi_communicator)
+ SolverBase::SolverBase(SolverControl &cn)
: solver_control(cn)
- , mpi_communicator(mpi_communicator)
{}
{
solver_data = std::make_unique<SolverData>();
- PetscErrorCode ierr = KSPCreate(mpi_communicator, &solver_data->ksp);
+ PetscErrorCode ierr =
+ KSPCreate(A.get_mpi_communicator(), &solver_data->ksp);
AssertThrow(ierr == 0, ExcPETScError(ierr));
// let derived classes set the solver
solver_data = std::make_unique<SolverData>();
- ierr = KSPCreate(mpi_communicator, &solver_data->ksp);
+ ierr = KSPCreate(preconditioner.get_mpi_communicator(), &solver_data->ksp);
AssertThrow(ierr == 0, ExcPETScError(ierr));
// let derived classes set the solver
- SolverRichardson::SolverRichardson(SolverControl & cn,
- const MPI_Comm & mpi_communicator,
+ SolverRichardson::SolverRichardson(SolverControl &cn,
+ const MPI_Comm &,
const AdditionalData &data)
- : SolverBase(cn, mpi_communicator)
+ : SolverBase(cn)
, additional_data(data)
{}
/* ---------------------- SolverChebychev ------------------------ */
- SolverChebychev::SolverChebychev(SolverControl & cn,
- const MPI_Comm & mpi_communicator,
+ SolverChebychev::SolverChebychev(SolverControl &cn,
+ const MPI_Comm &,
const AdditionalData &data)
- : SolverBase(cn, mpi_communicator)
+ : SolverBase(cn)
, additional_data(data)
{}
/* ---------------------- SolverCG ------------------------ */
- SolverCG::SolverCG(SolverControl & cn,
- const MPI_Comm & mpi_communicator,
+ SolverCG::SolverCG(SolverControl &cn,
+ const MPI_Comm &,
const AdditionalData &data)
- : SolverBase(cn, mpi_communicator)
+ : SolverBase(cn)
, additional_data(data)
{}
/* ---------------------- SolverBiCG ------------------------ */
- SolverBiCG::SolverBiCG(SolverControl & cn,
- const MPI_Comm & mpi_communicator,
+ SolverBiCG::SolverBiCG(SolverControl &cn,
+ const MPI_Comm &,
const AdditionalData &data)
- : SolverBase(cn, mpi_communicator)
+ : SolverBase(cn)
, additional_data(data)
{}
- SolverGMRES::SolverGMRES(SolverControl & cn,
- const MPI_Comm & mpi_communicator,
+ SolverGMRES::SolverGMRES(SolverControl &cn,
+ const MPI_Comm &,
const AdditionalData &data)
- : SolverBase(cn, mpi_communicator)
+ : SolverBase(cn)
, additional_data(data)
{}
/* ---------------------- SolverBicgstab ------------------------ */
- SolverBicgstab::SolverBicgstab(SolverControl & cn,
- const MPI_Comm & mpi_communicator,
+ SolverBicgstab::SolverBicgstab(SolverControl &cn,
+ const MPI_Comm &,
const AdditionalData &data)
- : SolverBase(cn, mpi_communicator)
+ : SolverBase(cn)
, additional_data(data)
{}
/* ---------------------- SolverCGS ------------------------ */
- SolverCGS::SolverCGS(SolverControl & cn,
- const MPI_Comm & mpi_communicator,
+ SolverCGS::SolverCGS(SolverControl &cn,
+ const MPI_Comm &,
const AdditionalData &data)
- : SolverBase(cn, mpi_communicator)
+ : SolverBase(cn)
, additional_data(data)
{}
/* ---------------------- SolverTFQMR ------------------------ */
- SolverTFQMR::SolverTFQMR(SolverControl & cn,
- const MPI_Comm & mpi_communicator,
+ SolverTFQMR::SolverTFQMR(SolverControl &cn,
+ const MPI_Comm &,
const AdditionalData &data)
- : SolverBase(cn, mpi_communicator)
+ : SolverBase(cn)
, additional_data(data)
{}
/* ---------------------- SolverTCQMR ------------------------ */
- SolverTCQMR::SolverTCQMR(SolverControl & cn,
- const MPI_Comm & mpi_communicator,
+ SolverTCQMR::SolverTCQMR(SolverControl &cn,
+ const MPI_Comm &,
const AdditionalData &data)
- : SolverBase(cn, mpi_communicator)
+ : SolverBase(cn)
, additional_data(data)
{}
/* ---------------------- SolverCR ------------------------ */
- SolverCR::SolverCR(SolverControl & cn,
- const MPI_Comm & mpi_communicator,
+ SolverCR::SolverCR(SolverControl &cn,
+ const MPI_Comm &,
const AdditionalData &data)
- : SolverBase(cn, mpi_communicator)
+ : SolverBase(cn)
, additional_data(data)
{}
/* ---------------------- SolverLSQR ------------------------ */
- SolverLSQR::SolverLSQR(SolverControl & cn,
- const MPI_Comm & mpi_communicator,
+ SolverLSQR::SolverLSQR(SolverControl &cn,
+ const MPI_Comm &,
const AdditionalData &data)
- : SolverBase(cn, mpi_communicator)
+ : SolverBase(cn)
, additional_data(data)
{}
/* ---------------------- SolverPreOnly ------------------------ */
- SolverPreOnly::SolverPreOnly(SolverControl & cn,
- const MPI_Comm & mpi_communicator,
+ SolverPreOnly::SolverPreOnly(SolverControl &cn,
+ const MPI_Comm &,
const AdditionalData &data)
- : SolverBase(cn, mpi_communicator)
+ : SolverBase(cn)
, additional_data(data)
{}
}
- SparseDirectMUMPS::SparseDirectMUMPS(SolverControl & cn,
- const MPI_Comm & mpi_communicator,
+ SparseDirectMUMPS::SparseDirectMUMPS(SolverControl &cn,
+ const MPI_Comm &,
const AdditionalData &data)
- : SolverBase(cn, mpi_communicator)
+ : SolverBase(cn)
, additional_data(data)
, symmetric_mode(false)
{}
* creates the default KSP context and puts it in the location
* solver_data->ksp
*/
- PetscErrorCode ierr = KSPCreate(mpi_communicator, &solver_data->ksp);
+ PetscErrorCode ierr =
+ KSPCreate(A.get_mpi_communicator(), &solver_data->ksp);
AssertThrow(ierr == 0, ExcPETScError(ierr));
/*