]> https://gitweb.dealii.org/ - dealii.git/commitdiff
Remove the need to store the MPI communicator in PETScWrappers::SolverBase.
authorWolfgang Bangerth <bangerth@colostate.edu>
Wed, 16 Nov 2022 21:52:55 +0000 (14:52 -0700)
committerWolfgang Bangerth <bangerth@colostate.edu>
Thu, 17 Nov 2022 15:44:39 +0000 (08:44 -0700)
include/deal.II/lac/petsc_precondition.h
include/deal.II/lac/petsc_solver.h
source/lac/petsc_solver.cc

index c5fb9d702d992d071b4fe68a50100d381a820952..16df0d8039f66d777e2cb13f35b5f79a6b1b30e3 100644 (file)
@@ -164,7 +164,6 @@ namespace PETScWrappers
      */
     PreconditionJacobi() = default;
 
-
     /**
      * Constructor. Take the matrix which is used to form the preconditioner,
      * and additional flags if there are any.
index 4347bd8a624bfef7fb235c081d02769a232758df..e767324e8150750ea6acec068b8f435675a9bc89 100644 (file)
@@ -83,45 +83,15 @@ namespace PETScWrappers
    * object. This is done for performance reasons. The solver and
    * preconditioner can be reset by calling reset().
    *
-   * One of the gotchas of PETSc is that -- in particular in MPI mode -- it
-   * often does not produce very helpful error messages. In order to save
-   * other users some time in searching a hard to track down error, here is
-   * one situation and the error message one gets there: when you don't
-   * specify an MPI communicator to your solver's constructor. In this case,
-   * you will get an error of the following form from each of your parallel
-   * processes:
-   * @verbatim
-   *   [1]PETSC ERROR: PCSetVector() line 1173 in src/ksp/pc/interface/precon.c
-   *   [1]PETSC ERROR:   Arguments must have same communicators!
-   *   [1]PETSC ERROR:   Different communicators in the two objects: Argument #
-   * 1 and 2! [1]PETSC ERROR: KSPSetUp() line 195 in
-   * src/ksp/ksp/interface/itfunc.c
-   * @endverbatim
-   *
-   * This error, on which one can spend a very long time figuring out what
-   * exactly goes wrong, results from not specifying an MPI communicator. Note
-   * that the communicator @em must match that of the matrix and all vectors
-   * in the linear system which we want to solve. Aggravating the situation is
-   * the fact that the default argument to the solver classes, @p
-   * PETSC_COMM_SELF, is the appropriate argument for the sequential case
-   * (which is why it is the default argument), so this error only shows up in
-   * parallel mode.
-   *
    * @ingroup PETScWrappers
    */
   class SolverBase
   {
   public:
     /**
-     * Constructor. Takes the solver control object and the MPI communicator
-     * over which parallel computations are to happen.
-     *
-     * Note that the communicator used here must match the communicator used
-     * in the system matrix, solution, and right hand side object of the solve
-     * to be done with this solver. Otherwise, PETSc will generate hard to
-     * track down errors, see the documentation of the SolverBase class.
+     * Constructor.
      */
-    SolverBase(SolverControl &cn, const MPI_Comm &mpi_communicator);
+    SolverBase(SolverControl &cn);
 
     /**
      * Destructor.
@@ -180,11 +150,6 @@ namespace PETScWrappers
      */
     SolverControl &solver_control;
 
-    /**
-     * Copy of the MPI communicator object to be used for the solver.
-     */
-    const MPI_Comm mpi_communicator;
-
     /**
      * %Function that takes a Krylov Subspace Solver context object, and sets
      * the type of solver that is requested by the derived class.
index 2bb04ffe55ae48b5336419143db5e7e8840df95a..2e5dd099b720efd1b5f2a25ee1e05ff630966348 100644 (file)
@@ -42,9 +42,8 @@ namespace PETScWrappers
 
 
 
-  SolverBase::SolverBase(SolverControl &cn, const MPI_Comm &mpi_communicator)
+  SolverBase::SolverBase(SolverControl &cn)
     : solver_control(cn)
-    , mpi_communicator(mpi_communicator)
   {}
 
 
@@ -74,7 +73,8 @@ namespace PETScWrappers
       {
         solver_data = std::make_unique<SolverData>();
 
-        PetscErrorCode ierr = KSPCreate(mpi_communicator, &solver_data->ksp);
+        PetscErrorCode ierr =
+          KSPCreate(A.get_mpi_communicator(), &solver_data->ksp);
         AssertThrow(ierr == 0, ExcPETScError(ierr));
 
         // let derived classes set the solver
@@ -207,7 +207,7 @@ namespace PETScWrappers
 
     solver_data = std::make_unique<SolverData>();
 
-    ierr = KSPCreate(mpi_communicator, &solver_data->ksp);
+    ierr = KSPCreate(preconditioner.get_mpi_communicator(), &solver_data->ksp);
     AssertThrow(ierr == 0, ExcPETScError(ierr));
 
     // let derived classes set the solver
@@ -246,10 +246,10 @@ namespace PETScWrappers
 
 
 
-  SolverRichardson::SolverRichardson(SolverControl &       cn,
-                                     const MPI_Comm &      mpi_communicator,
+  SolverRichardson::SolverRichardson(SolverControl &cn,
+                                     const MPI_Comm &,
                                      const AdditionalData &data)
-    : SolverBase(cn, mpi_communicator)
+    : SolverBase(cn)
     , additional_data(data)
   {}
 
@@ -294,10 +294,10 @@ namespace PETScWrappers
 
   /* ---------------------- SolverChebychev ------------------------ */
 
-  SolverChebychev::SolverChebychev(SolverControl &       cn,
-                                   const MPI_Comm &      mpi_communicator,
+  SolverChebychev::SolverChebychev(SolverControl &cn,
+                                   const MPI_Comm &,
                                    const AdditionalData &data)
-    : SolverBase(cn, mpi_communicator)
+    : SolverBase(cn)
     , additional_data(data)
   {}
 
@@ -318,10 +318,10 @@ namespace PETScWrappers
 
   /* ---------------------- SolverCG ------------------------ */
 
-  SolverCG::SolverCG(SolverControl &       cn,
-                     const MPI_Comm &      mpi_communicator,
+  SolverCG::SolverCG(SolverControl &cn,
+                     const MPI_Comm &,
                      const AdditionalData &data)
-    : SolverBase(cn, mpi_communicator)
+    : SolverBase(cn)
     , additional_data(data)
   {}
 
@@ -342,10 +342,10 @@ namespace PETScWrappers
 
   /* ---------------------- SolverBiCG ------------------------ */
 
-  SolverBiCG::SolverBiCG(SolverControl &       cn,
-                         const MPI_Comm &      mpi_communicator,
+  SolverBiCG::SolverBiCG(SolverControl &cn,
+                         const MPI_Comm &,
                          const AdditionalData &data)
-    : SolverBase(cn, mpi_communicator)
+    : SolverBase(cn)
     , additional_data(data)
   {}
 
@@ -375,10 +375,10 @@ namespace PETScWrappers
 
 
 
-  SolverGMRES::SolverGMRES(SolverControl &       cn,
-                           const MPI_Comm &      mpi_communicator,
+  SolverGMRES::SolverGMRES(SolverControl &cn,
+                           const MPI_Comm &,
                            const AdditionalData &data)
-    : SolverBase(cn, mpi_communicator)
+    : SolverBase(cn)
     , additional_data(data)
   {}
 
@@ -409,10 +409,10 @@ namespace PETScWrappers
 
   /* ---------------------- SolverBicgstab ------------------------ */
 
-  SolverBicgstab::SolverBicgstab(SolverControl &       cn,
-                                 const MPI_Comm &      mpi_communicator,
+  SolverBicgstab::SolverBicgstab(SolverControl &cn,
+                                 const MPI_Comm &,
                                  const AdditionalData &data)
-    : SolverBase(cn, mpi_communicator)
+    : SolverBase(cn)
     , additional_data(data)
   {}
 
@@ -433,10 +433,10 @@ namespace PETScWrappers
 
   /* ---------------------- SolverCGS ------------------------ */
 
-  SolverCGS::SolverCGS(SolverControl &       cn,
-                       const MPI_Comm &      mpi_communicator,
+  SolverCGS::SolverCGS(SolverControl &cn,
+                       const MPI_Comm &,
                        const AdditionalData &data)
-    : SolverBase(cn, mpi_communicator)
+    : SolverBase(cn)
     , additional_data(data)
   {}
 
@@ -457,10 +457,10 @@ namespace PETScWrappers
 
   /* ---------------------- SolverTFQMR ------------------------ */
 
-  SolverTFQMR::SolverTFQMR(SolverControl &       cn,
-                           const MPI_Comm &      mpi_communicator,
+  SolverTFQMR::SolverTFQMR(SolverControl &cn,
+                           const MPI_Comm &,
                            const AdditionalData &data)
-    : SolverBase(cn, mpi_communicator)
+    : SolverBase(cn)
     , additional_data(data)
   {}
 
@@ -481,10 +481,10 @@ namespace PETScWrappers
 
   /* ---------------------- SolverTCQMR ------------------------ */
 
-  SolverTCQMR::SolverTCQMR(SolverControl &       cn,
-                           const MPI_Comm &      mpi_communicator,
+  SolverTCQMR::SolverTCQMR(SolverControl &cn,
+                           const MPI_Comm &,
                            const AdditionalData &data)
-    : SolverBase(cn, mpi_communicator)
+    : SolverBase(cn)
     , additional_data(data)
   {}
 
@@ -505,10 +505,10 @@ namespace PETScWrappers
 
   /* ---------------------- SolverCR ------------------------ */
 
-  SolverCR::SolverCR(SolverControl &       cn,
-                     const MPI_Comm &      mpi_communicator,
+  SolverCR::SolverCR(SolverControl &cn,
+                     const MPI_Comm &,
                      const AdditionalData &data)
-    : SolverBase(cn, mpi_communicator)
+    : SolverBase(cn)
     , additional_data(data)
   {}
 
@@ -529,10 +529,10 @@ namespace PETScWrappers
 
   /* ---------------------- SolverLSQR ------------------------ */
 
-  SolverLSQR::SolverLSQR(SolverControl &       cn,
-                         const MPI_Comm &      mpi_communicator,
+  SolverLSQR::SolverLSQR(SolverControl &cn,
+                         const MPI_Comm &,
                          const AdditionalData &data)
-    : SolverBase(cn, mpi_communicator)
+    : SolverBase(cn)
     , additional_data(data)
   {}
 
@@ -553,10 +553,10 @@ namespace PETScWrappers
 
   /* ---------------------- SolverPreOnly ------------------------ */
 
-  SolverPreOnly::SolverPreOnly(SolverControl &       cn,
-                               const MPI_Comm &      mpi_communicator,
+  SolverPreOnly::SolverPreOnly(SolverControl &cn,
+                               const MPI_Comm &,
                                const AdditionalData &data)
-    : SolverBase(cn, mpi_communicator)
+    : SolverBase(cn)
     , additional_data(data)
   {}
 
@@ -595,10 +595,10 @@ namespace PETScWrappers
   }
 
 
-  SparseDirectMUMPS::SparseDirectMUMPS(SolverControl &       cn,
-                                       const MPI_Comm &      mpi_communicator,
+  SparseDirectMUMPS::SparseDirectMUMPS(SolverControl &cn,
+                                       const MPI_Comm &,
                                        const AdditionalData &data)
-    : SolverBase(cn, mpi_communicator)
+    : SolverBase(cn)
     , additional_data(data)
     , symmetric_mode(false)
   {}
@@ -668,7 +668,8 @@ namespace PETScWrappers
          * creates the default KSP context and puts it in the location
          * solver_data->ksp
          */
-        PetscErrorCode ierr = KSPCreate(mpi_communicator, &solver_data->ksp);
+        PetscErrorCode ierr =
+          KSPCreate(A.get_mpi_communicator(), &solver_data->ksp);
         AssertThrow(ierr == 0, ExcPETScError(ierr));
 
         /*

In the beginning the Universe was created. This has made a lot of people very angry and has been widely regarded as a bad move.

Douglas Adams


Typeset in Trocchi and Trocchi Bold Sans Serif.