From: Wolfgang Bangerth Date: Wed, 16 Nov 2022 21:52:55 +0000 (-0700) Subject: Remove the need to store the MPI communicator in PETScWrappers::SolverBase. X-Git-Tag: v9.5.0-rc1~841^2~6 X-Git-Url: https://gitweb.dealii.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=24e4361c2e5ab800ef31efe9c395fc064d59de79;p=dealii.git Remove the need to store the MPI communicator in PETScWrappers::SolverBase. --- diff --git a/include/deal.II/lac/petsc_precondition.h b/include/deal.II/lac/petsc_precondition.h index c5fb9d702d..16df0d8039 100644 --- a/include/deal.II/lac/petsc_precondition.h +++ b/include/deal.II/lac/petsc_precondition.h @@ -164,7 +164,6 @@ namespace PETScWrappers */ PreconditionJacobi() = default; - /** * Constructor. Take the matrix which is used to form the preconditioner, * and additional flags if there are any. diff --git a/include/deal.II/lac/petsc_solver.h b/include/deal.II/lac/petsc_solver.h index 4347bd8a62..e767324e81 100644 --- a/include/deal.II/lac/petsc_solver.h +++ b/include/deal.II/lac/petsc_solver.h @@ -83,45 +83,15 @@ namespace PETScWrappers * object. This is done for performance reasons. The solver and * preconditioner can be reset by calling reset(). * - * One of the gotchas of PETSc is that -- in particular in MPI mode -- it - * often does not produce very helpful error messages. In order to save - * other users some time in searching a hard to track down error, here is - * one situation and the error message one gets there: when you don't - * specify an MPI communicator to your solver's constructor. In this case, - * you will get an error of the following form from each of your parallel - * processes: - * @verbatim - * [1]PETSC ERROR: PCSetVector() line 1173 in src/ksp/pc/interface/precon.c - * [1]PETSC ERROR: Arguments must have same communicators! - * [1]PETSC ERROR: Different communicators in the two objects: Argument # - * 1 and 2! [1]PETSC ERROR: KSPSetUp() line 195 in - * src/ksp/ksp/interface/itfunc.c - * @endverbatim - * - * This error, on which one can spend a very long time figuring out what - * exactly goes wrong, results from not specifying an MPI communicator. Note - * that the communicator @em must match that of the matrix and all vectors - * in the linear system which we want to solve. Aggravating the situation is - * the fact that the default argument to the solver classes, @p - * PETSC_COMM_SELF, is the appropriate argument for the sequential case - * (which is why it is the default argument), so this error only shows up in - * parallel mode. - * * @ingroup PETScWrappers */ class SolverBase { public: /** - * Constructor. Takes the solver control object and the MPI communicator - * over which parallel computations are to happen. - * - * Note that the communicator used here must match the communicator used - * in the system matrix, solution, and right hand side object of the solve - * to be done with this solver. Otherwise, PETSc will generate hard to - * track down errors, see the documentation of the SolverBase class. + * Constructor. */ - SolverBase(SolverControl &cn, const MPI_Comm &mpi_communicator); + SolverBase(SolverControl &cn); /** * Destructor. @@ -180,11 +150,6 @@ namespace PETScWrappers */ SolverControl &solver_control; - /** - * Copy of the MPI communicator object to be used for the solver. - */ - const MPI_Comm mpi_communicator; - /** * %Function that takes a Krylov Subspace Solver context object, and sets * the type of solver that is requested by the derived class. diff --git a/source/lac/petsc_solver.cc b/source/lac/petsc_solver.cc index 2bb04ffe55..2e5dd099b7 100644 --- a/source/lac/petsc_solver.cc +++ b/source/lac/petsc_solver.cc @@ -42,9 +42,8 @@ namespace PETScWrappers - SolverBase::SolverBase(SolverControl &cn, const MPI_Comm &mpi_communicator) + SolverBase::SolverBase(SolverControl &cn) : solver_control(cn) - , mpi_communicator(mpi_communicator) {} @@ -74,7 +73,8 @@ namespace PETScWrappers { solver_data = std::make_unique(); - PetscErrorCode ierr = KSPCreate(mpi_communicator, &solver_data->ksp); + PetscErrorCode ierr = + KSPCreate(A.get_mpi_communicator(), &solver_data->ksp); AssertThrow(ierr == 0, ExcPETScError(ierr)); // let derived classes set the solver @@ -207,7 +207,7 @@ namespace PETScWrappers solver_data = std::make_unique(); - ierr = KSPCreate(mpi_communicator, &solver_data->ksp); + ierr = KSPCreate(preconditioner.get_mpi_communicator(), &solver_data->ksp); AssertThrow(ierr == 0, ExcPETScError(ierr)); // let derived classes set the solver @@ -246,10 +246,10 @@ namespace PETScWrappers - SolverRichardson::SolverRichardson(SolverControl & cn, - const MPI_Comm & mpi_communicator, + SolverRichardson::SolverRichardson(SolverControl &cn, + const MPI_Comm &, const AdditionalData &data) - : SolverBase(cn, mpi_communicator) + : SolverBase(cn) , additional_data(data) {} @@ -294,10 +294,10 @@ namespace PETScWrappers /* ---------------------- SolverChebychev ------------------------ */ - SolverChebychev::SolverChebychev(SolverControl & cn, - const MPI_Comm & mpi_communicator, + SolverChebychev::SolverChebychev(SolverControl &cn, + const MPI_Comm &, const AdditionalData &data) - : SolverBase(cn, mpi_communicator) + : SolverBase(cn) , additional_data(data) {} @@ -318,10 +318,10 @@ namespace PETScWrappers /* ---------------------- SolverCG ------------------------ */ - SolverCG::SolverCG(SolverControl & cn, - const MPI_Comm & mpi_communicator, + SolverCG::SolverCG(SolverControl &cn, + const MPI_Comm &, const AdditionalData &data) - : SolverBase(cn, mpi_communicator) + : SolverBase(cn) , additional_data(data) {} @@ -342,10 +342,10 @@ namespace PETScWrappers /* ---------------------- SolverBiCG ------------------------ */ - SolverBiCG::SolverBiCG(SolverControl & cn, - const MPI_Comm & mpi_communicator, + SolverBiCG::SolverBiCG(SolverControl &cn, + const MPI_Comm &, const AdditionalData &data) - : SolverBase(cn, mpi_communicator) + : SolverBase(cn) , additional_data(data) {} @@ -375,10 +375,10 @@ namespace PETScWrappers - SolverGMRES::SolverGMRES(SolverControl & cn, - const MPI_Comm & mpi_communicator, + SolverGMRES::SolverGMRES(SolverControl &cn, + const MPI_Comm &, const AdditionalData &data) - : SolverBase(cn, mpi_communicator) + : SolverBase(cn) , additional_data(data) {} @@ -409,10 +409,10 @@ namespace PETScWrappers /* ---------------------- SolverBicgstab ------------------------ */ - SolverBicgstab::SolverBicgstab(SolverControl & cn, - const MPI_Comm & mpi_communicator, + SolverBicgstab::SolverBicgstab(SolverControl &cn, + const MPI_Comm &, const AdditionalData &data) - : SolverBase(cn, mpi_communicator) + : SolverBase(cn) , additional_data(data) {} @@ -433,10 +433,10 @@ namespace PETScWrappers /* ---------------------- SolverCGS ------------------------ */ - SolverCGS::SolverCGS(SolverControl & cn, - const MPI_Comm & mpi_communicator, + SolverCGS::SolverCGS(SolverControl &cn, + const MPI_Comm &, const AdditionalData &data) - : SolverBase(cn, mpi_communicator) + : SolverBase(cn) , additional_data(data) {} @@ -457,10 +457,10 @@ namespace PETScWrappers /* ---------------------- SolverTFQMR ------------------------ */ - SolverTFQMR::SolverTFQMR(SolverControl & cn, - const MPI_Comm & mpi_communicator, + SolverTFQMR::SolverTFQMR(SolverControl &cn, + const MPI_Comm &, const AdditionalData &data) - : SolverBase(cn, mpi_communicator) + : SolverBase(cn) , additional_data(data) {} @@ -481,10 +481,10 @@ namespace PETScWrappers /* ---------------------- SolverTCQMR ------------------------ */ - SolverTCQMR::SolverTCQMR(SolverControl & cn, - const MPI_Comm & mpi_communicator, + SolverTCQMR::SolverTCQMR(SolverControl &cn, + const MPI_Comm &, const AdditionalData &data) - : SolverBase(cn, mpi_communicator) + : SolverBase(cn) , additional_data(data) {} @@ -505,10 +505,10 @@ namespace PETScWrappers /* ---------------------- SolverCR ------------------------ */ - SolverCR::SolverCR(SolverControl & cn, - const MPI_Comm & mpi_communicator, + SolverCR::SolverCR(SolverControl &cn, + const MPI_Comm &, const AdditionalData &data) - : SolverBase(cn, mpi_communicator) + : SolverBase(cn) , additional_data(data) {} @@ -529,10 +529,10 @@ namespace PETScWrappers /* ---------------------- SolverLSQR ------------------------ */ - SolverLSQR::SolverLSQR(SolverControl & cn, - const MPI_Comm & mpi_communicator, + SolverLSQR::SolverLSQR(SolverControl &cn, + const MPI_Comm &, const AdditionalData &data) - : SolverBase(cn, mpi_communicator) + : SolverBase(cn) , additional_data(data) {} @@ -553,10 +553,10 @@ namespace PETScWrappers /* ---------------------- SolverPreOnly ------------------------ */ - SolverPreOnly::SolverPreOnly(SolverControl & cn, - const MPI_Comm & mpi_communicator, + SolverPreOnly::SolverPreOnly(SolverControl &cn, + const MPI_Comm &, const AdditionalData &data) - : SolverBase(cn, mpi_communicator) + : SolverBase(cn) , additional_data(data) {} @@ -595,10 +595,10 @@ namespace PETScWrappers } - SparseDirectMUMPS::SparseDirectMUMPS(SolverControl & cn, - const MPI_Comm & mpi_communicator, + SparseDirectMUMPS::SparseDirectMUMPS(SolverControl &cn, + const MPI_Comm &, const AdditionalData &data) - : SolverBase(cn, mpi_communicator) + : SolverBase(cn) , additional_data(data) , symmetric_mode(false) {} @@ -668,7 +668,8 @@ namespace PETScWrappers * creates the default KSP context and puts it in the location * solver_data->ksp */ - PetscErrorCode ierr = KSPCreate(mpi_communicator, &solver_data->ksp); + PetscErrorCode ierr = + KSPCreate(A.get_mpi_communicator(), &solver_data->ksp); AssertThrow(ierr == 0, ExcPETScError(ierr)); /*