From: Wolfgang Bangerth Date: Thu, 17 Nov 2022 00:13:10 +0000 (-0700) Subject: Deprecate the use of PETScWrapper::Solver* constructors that take an MPI communicator. X-Git-Tag: v9.5.0-rc1~841^2~4 X-Git-Url: https://gitweb.dealii.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=cd2a84c33d4e2cc1eb138f7c82b7c9a664d48434;p=dealii.git Deprecate the use of PETScWrapper::Solver* constructors that take an MPI communicator. --- diff --git a/include/deal.II/lac/petsc_solver.h b/include/deal.II/lac/petsc_solver.h index e767324e81..a687966709 100644 --- a/include/deal.II/lac/petsc_solver.h +++ b/include/deal.II/lac/petsc_solver.h @@ -251,23 +251,24 @@ namespace PETScWrappers /** * Constructor. In contrast to deal.II's own solvers, there is no need to - * give a vector memory object. However, PETSc solvers want to have an MPI - * communicator context over which computations are parallelized. By - * default, @p PETSC_COMM_SELF is used here, but you can change this. Note - * that for single processor (non-MPI) versions, this parameter does not - * have any effect. + * give a vector memory object. * * The last argument takes a structure with additional, solver dependent * flags for tuning. + */ + SolverRichardson(SolverControl & cn, + const AdditionalData &data = AdditionalData()); + + /** + * Constructor. This constructor is deprecated and ignores the MPI + * communicator argument. Use the other constructor instead. * - * Note that the communicator used here must match the communicator used - * in the system matrix, solution, and right hand side object of the solve - * to be done with this solver. Otherwise, PETSc will generate hard to - * track down errors, see the documentation of the SolverBase class. + * @deprecated */ + DEAL_II_DEPRECATED_EARLY SolverRichardson(SolverControl & cn, - const MPI_Comm & mpi_communicator = PETSC_COMM_SELF, - const AdditionalData &data = AdditionalData()); + const MPI_Comm & mpi_communicator, + const AdditionalData &data = AdditionalData()); protected: /** @@ -302,23 +303,24 @@ namespace PETScWrappers /** * Constructor. In contrast to deal.II's own solvers, there is no need to - * give a vector memory object. However, PETSc solvers want to have an MPI - * communicator context over which computations are parallelized. By - * default, @p PETSC_COMM_SELF is used here, but you can change this. Note - * that for single processor (non-MPI) versions, this parameter does not - * have any effect. + * give a vector memory object. * * The last argument takes a structure with additional, solver dependent * flags for tuning. + */ + SolverChebychev(SolverControl & cn, + const AdditionalData &data = AdditionalData()); + + /** + * Constructor. This constructor is deprecated and ignores the MPI + * communicator argument. Use the other constructor instead. * - * Note that the communicator used here must match the communicator used - * in the system matrix, solution, and right hand side object of the solve - * to be done with this solver. Otherwise, PETSc will generate hard to - * track down errors, see the documentation of the SolverBase class. + * @deprecated */ + DEAL_II_DEPRECATED_EARLY SolverChebychev(SolverControl & cn, - const MPI_Comm & mpi_communicator = PETSC_COMM_SELF, - const AdditionalData &data = AdditionalData()); + const MPI_Comm & mpi_communicator, + const AdditionalData &data = AdditionalData()); protected: /** @@ -352,23 +354,23 @@ namespace PETScWrappers /** * Constructor. In contrast to deal.II's own solvers, there is no need to - * give a vector memory object. However, PETSc solvers want to have an MPI - * communicator context over which computations are parallelized. By - * default, @p PETSC_COMM_SELF is used here, but you can change this. Note - * that for single processor (non-MPI) versions, this parameter does not - * have any effect. + * give a vector memory object. * * The last argument takes a structure with additional, solver dependent * flags for tuning. + */ + SolverCG(SolverControl &cn, const AdditionalData &data = AdditionalData()); + + /** + * Constructor. This constructor is deprecated and ignores the MPI + * communicator argument. Use the other constructor instead. * - * Note that the communicator used here must match the communicator used - * in the system matrix, solution, and right hand side object of the solve - * to be done with this solver. Otherwise, PETSc will generate hard to - * track down errors, see the documentation of the SolverBase class. + * @deprecated */ + DEAL_II_DEPRECATED_EARLY SolverCG(SolverControl & cn, - const MPI_Comm & mpi_communicator = PETSC_COMM_SELF, - const AdditionalData &data = AdditionalData()); + const MPI_Comm & mpi_communicator, + const AdditionalData &data = AdditionalData()); protected: /** @@ -402,23 +404,24 @@ namespace PETScWrappers /** * Constructor. In contrast to deal.II's own solvers, there is no need to - * give a vector memory object. However, PETSc solvers want to have an MPI - * communicator context over which computations are parallelized. By - * default, @p PETSC_COMM_SELF is used here, but you can change this. Note - * that for single processor (non-MPI) versions, this parameter does not - * have any effect. + * give a vector memory object. * * The last argument takes a structure with additional, solver dependent * flags for tuning. + */ + SolverBiCG(SolverControl & cn, + const AdditionalData &data = AdditionalData()); + + /** + * Constructor. This constructor is deprecated and ignores the MPI + * communicator argument. Use the other constructor instead. * - * Note that the communicator used here must match the communicator used - * in the system matrix, solution, and right hand side object of the solve - * to be done with this solver. Otherwise, PETSc will generate hard to - * track down errors, see the documentation of the SolverBase class. + * @deprecated */ + DEAL_II_DEPRECATED_EARLY SolverBiCG(SolverControl & cn, - const MPI_Comm & mpi_communicator = PETSC_COMM_SELF, - const AdditionalData &data = AdditionalData()); + const MPI_Comm & mpi_communicator, + const AdditionalData &data = AdditionalData()); protected: /** @@ -469,23 +472,24 @@ namespace PETScWrappers /** * Constructor. In contrast to deal.II's own solvers, there is no need to - * give a vector memory object. However, PETSc solvers want to have an MPI - * communicator context over which computations are parallelized. By - * default, @p PETSC_COMM_SELF is used here, but you can change this. Note - * that for single processor (non-MPI) versions, this parameter does not - * have any effect. + * give a vector memory object. * * The last argument takes a structure with additional, solver dependent * flags for tuning. + */ + SolverGMRES(SolverControl & cn, + const AdditionalData &data = AdditionalData()); + + /** + * Constructor. This constructor is deprecated and ignores the MPI + * communicator argument. Use the other constructor instead. * - * Note that the communicator used here must match the communicator used - * in the system matrix, solution, and right hand side object of the solve - * to be done with this solver. Otherwise, PETSc will generate hard to - * track down errors, see the documentation of the SolverBase class. + * @deprecated */ + DEAL_II_DEPRECATED_EARLY SolverGMRES(SolverControl & cn, - const MPI_Comm & mpi_communicator = PETSC_COMM_SELF, - const AdditionalData &data = AdditionalData()); + const MPI_Comm & mpi_communicator, + const AdditionalData &data = AdditionalData()); protected: /** @@ -520,23 +524,24 @@ namespace PETScWrappers /** * Constructor. In contrast to deal.II's own solvers, there is no need to - * give a vector memory object. However, PETSc solvers want to have an MPI - * communicator context over which computations are parallelized. By - * default, @p PETSC_COMM_SELF is used here, but you can change this. Note - * that for single processor (non-MPI) versions, this parameter does not - * have any effect. + * give a vector memory object. * * The last argument takes a structure with additional, solver dependent * flags for tuning. + */ + SolverBicgstab(SolverControl & cn, + const AdditionalData &data = AdditionalData()); + + /** + * Constructor. This constructor is deprecated and ignores the MPI + * communicator argument. Use the other constructor instead. * - * Note that the communicator used here must match the communicator used - * in the system matrix, solution, and right hand side object of the solve - * to be done with this solver. Otherwise, PETSc will generate hard to - * track down errors, see the documentation of the SolverBase class. + * @deprecated */ + DEAL_II_DEPRECATED_EARLY SolverBicgstab(SolverControl & cn, - const MPI_Comm & mpi_communicator = PETSC_COMM_SELF, - const AdditionalData &data = AdditionalData()); + const MPI_Comm & mpi_communicator, + const AdditionalData &data = AdditionalData()); protected: /** @@ -552,6 +557,8 @@ namespace PETScWrappers set_solver_type(KSP &ksp) const override; }; + + /** * An implementation of the solver interface using the PETSc CG Squared * solver. @@ -569,23 +576,23 @@ namespace PETScWrappers /** * Constructor. In contrast to deal.II's own solvers, there is no need to - * give a vector memory object. However, PETSc solvers want to have an MPI - * communicator context over which computations are parallelized. By - * default, @p PETSC_COMM_SELF is used here, but you can change this. Note - * that for single processor (non-MPI) versions, this parameter does not - * have any effect. + * give a vector memory object. * * The last argument takes a structure with additional, solver dependent * flags for tuning. + */ + SolverCGS(SolverControl &cn, const AdditionalData &data = AdditionalData()); + + /** + * Constructor. This constructor is deprecated and ignores the MPI + * communicator argument. Use the other constructor instead. * - * Note that the communicator used here must match the communicator used - * in the system matrix, solution, and right hand side object of the solve - * to be done with this solver. Otherwise, PETSc will generate hard to - * track down errors, see the documentation of the SolverBase class. + * @deprecated */ + DEAL_II_DEPRECATED_EARLY SolverCGS(SolverControl & cn, - const MPI_Comm & mpi_communicator = PETSC_COMM_SELF, - const AdditionalData &data = AdditionalData()); + const MPI_Comm & mpi_communicator, + const AdditionalData &data = AdditionalData()); protected: /** @@ -619,23 +626,24 @@ namespace PETScWrappers /** * Constructor. In contrast to deal.II's own solvers, there is no need to - * give a vector memory object. However, PETSc solvers want to have an MPI - * communicator context over which computations are parallelized. By - * default, @p PETSC_COMM_SELF is used here, but you can change this. Note - * that for single processor (non-MPI) versions, this parameter does not - * have any effect. + * give a vector memory object. * * The last argument takes a structure with additional, solver dependent * flags for tuning. + */ + SolverTFQMR(SolverControl & cn, + const AdditionalData &data = AdditionalData()); + + /** + * Constructor. This constructor is deprecated and ignores the MPI + * communicator argument. Use the other constructor instead. * - * Note that the communicator used here must match the communicator used - * in the system matrix, solution, and right hand side object of the solve - * to be done with this solver. Otherwise, PETSc will generate hard to - * track down errors, see the documentation of the SolverBase class. + * @deprecated */ + DEAL_II_DEPRECATED_EARLY SolverTFQMR(SolverControl & cn, - const MPI_Comm & mpi_communicator = PETSC_COMM_SELF, - const AdditionalData &data = AdditionalData()); + const MPI_Comm & mpi_communicator, + const AdditionalData &data = AdditionalData()); protected: /** @@ -674,23 +682,24 @@ namespace PETScWrappers /** * Constructor. In contrast to deal.II's own solvers, there is no need to - * give a vector memory object. However, PETSc solvers want to have an MPI - * communicator context over which computations are parallelized. By - * default, @p PETSC_COMM_SELF is used here, but you can change this. Note - * that for single processor (non-MPI) versions, this parameter does not - * have any effect. + * give a vector memory object. * * The last argument takes a structure with additional, solver dependent * flags for tuning. + */ + SolverTCQMR(SolverControl & cn, + const AdditionalData &data = AdditionalData()); + + /** + * Constructor. This constructor is deprecated and ignores the MPI + * communicator argument. Use the other constructor instead. * - * Note that the communicator used here must match the communicator used - * in the system matrix, solution, and right hand side object of the solve - * to be done with this solver. Otherwise, PETSc will generate hard to - * track down errors, see the documentation of the SolverBase class. + * @deprecated */ + DEAL_II_DEPRECATED_EARLY SolverTCQMR(SolverControl & cn, - const MPI_Comm & mpi_communicator = PETSC_COMM_SELF, - const AdditionalData &data = AdditionalData()); + const MPI_Comm & mpi_communicator, + const AdditionalData &data = AdditionalData()); protected: /** @@ -724,23 +733,23 @@ namespace PETScWrappers /** * Constructor. In contrast to deal.II's own solvers, there is no need to - * give a vector memory object. However, PETSc solvers want to have an MPI - * communicator context over which computations are parallelized. By - * default, @p PETSC_COMM_SELF is used here, but you can change this. Note - * that for single processor (non-MPI) versions, this parameter does not - * have any effect. + * give a vector memory object. * * The last argument takes a structure with additional, solver dependent * flags for tuning. + */ + SolverCR(SolverControl &cn, const AdditionalData &data = AdditionalData()); + + /** + * Constructor. This constructor is deprecated and ignores the MPI + * communicator argument. Use the other constructor instead. * - * Note that the communicator used here must match the communicator used - * in the system matrix, solution, and right hand side object of the solve - * to be done with this solver. Otherwise, PETSc will generate hard to - * track down errors, see the documentation of the SolverBase class. + * @deprecated */ + DEAL_II_DEPRECATED_EARLY SolverCR(SolverControl & cn, - const MPI_Comm & mpi_communicator = PETSC_COMM_SELF, - const AdditionalData &data = AdditionalData()); + const MPI_Comm & mpi_communicator, + const AdditionalData &data = AdditionalData()); protected: /** @@ -775,23 +784,24 @@ namespace PETScWrappers /** * Constructor. In contrast to deal.II's own solvers, there is no need to - * give a vector memory object. However, PETSc solvers want to have an MPI - * communicator context over which computations are parallelized. By - * default, @p PETSC_COMM_SELF is used here, but you can change this. Note - * that for single processor (non-MPI) versions, this parameter does not - * have any effect. + * give a vector memory object. * * The last argument takes a structure with additional, solver dependent * flags for tuning. + */ + SolverLSQR(SolverControl & cn, + const AdditionalData &data = AdditionalData()); + + /** + * Constructor. This constructor is deprecated and ignores the MPI + * communicator argument. Use the other constructor instead. * - * Note that the communicator used here must match the communicator used - * in the system matrix, solution, and right hand side object of the solve - * to be done with this solver. Otherwise, PETSc will generate hard to - * track down errors, see the documentation of the SolverBase class. + * @deprecated */ + DEAL_II_DEPRECATED_EARLY SolverLSQR(SolverControl & cn, - const MPI_Comm & mpi_communicator = PETSC_COMM_SELF, - const AdditionalData &data = AdditionalData()); + const MPI_Comm & mpi_communicator, + const AdditionalData &data = AdditionalData()); protected: /** @@ -830,23 +840,24 @@ namespace PETScWrappers /** * Constructor. In contrast to deal.II's own solvers, there is no need to - * give a vector memory object. However, PETSc solvers want to have an MPI - * communicator context over which computations are parallelized. By - * default, @p PETSC_COMM_SELF is used here, but you can change this. Note - * that for single processor (non-MPI) versions, this parameter does not - * have any effect. + * give a vector memory object. * * The last argument takes a structure with additional, solver dependent * flags for tuning. + */ + SolverPreOnly(SolverControl & cn, + const AdditionalData &data = AdditionalData()); + + /** + * Constructor. This constructor is deprecated and ignores the MPI + * communicator argument. Use the other constructor instead. * - * Note that the communicator used here must match the communicator used - * in the system matrix, solution, and right hand side object of the solve - * to be done with this solver. Otherwise, PETSc will generate hard to - * track down errors, see the documentation of the SolverBase class. + * @deprecated */ + DEAL_II_DEPRECATED_EARLY SolverPreOnly(SolverControl & cn, - const MPI_Comm & mpi_communicator = PETSC_COMM_SELF, - const AdditionalData &data = AdditionalData()); + const MPI_Comm & mpi_communicator, + const AdditionalData &data = AdditionalData()); protected: /** @@ -893,11 +904,22 @@ namespace PETScWrappers */ struct AdditionalData {}; + /** - * Constructor + * Constructor. + */ + SparseDirectMUMPS(SolverControl & cn, + const AdditionalData &data = AdditionalData()); + + /** + * Constructor. This constructor is deprecated and ignores the MPI + * communicator argument. Use the other constructor instead. + * + * @deprecated */ + DEAL_II_DEPRECATED_EARLY SparseDirectMUMPS(SolverControl & cn, - const MPI_Comm & mpi_communicator = PETSC_COMM_SELF, + const MPI_Comm & mpi_communicator, const AdditionalData &data = AdditionalData()); /** diff --git a/source/lac/petsc_solver.cc b/source/lac/petsc_solver.cc index 2e5dd099b7..33b9e23292 100644 --- a/source/lac/petsc_solver.cc +++ b/source/lac/petsc_solver.cc @@ -200,6 +200,8 @@ namespace PETScWrappers return 0; } + + void SolverBase::initialize(const PreconditionBase &preconditioner) { @@ -246,14 +248,21 @@ namespace PETScWrappers - SolverRichardson::SolverRichardson(SolverControl &cn, - const MPI_Comm &, + SolverRichardson::SolverRichardson(SolverControl & cn, const AdditionalData &data) : SolverBase(cn) , additional_data(data) {} + + SolverRichardson::SolverRichardson(SolverControl &cn, + const MPI_Comm &, + const AdditionalData &data) + : SolverRichardson(cn, data) + {} + + void SolverRichardson::set_solver_type(KSP &ksp) const { @@ -294,14 +303,20 @@ namespace PETScWrappers /* ---------------------- SolverChebychev ------------------------ */ - SolverChebychev::SolverChebychev(SolverControl &cn, - const MPI_Comm &, + SolverChebychev::SolverChebychev(SolverControl & cn, const AdditionalData &data) : SolverBase(cn) , additional_data(data) {} + SolverChebychev::SolverChebychev(SolverControl &cn, + const MPI_Comm &, + const AdditionalData &data) + : SolverChebychev(cn, data) + {} + + void SolverChebychev::set_solver_type(KSP &ksp) const { @@ -318,11 +333,16 @@ namespace PETScWrappers /* ---------------------- SolverCG ------------------------ */ + SolverCG::SolverCG(SolverControl &cn, const AdditionalData &data) + : SolverBase(cn) + , additional_data(data) + {} + + SolverCG::SolverCG(SolverControl &cn, const MPI_Comm &, const AdditionalData &data) - : SolverBase(cn) - , additional_data(data) + : SolverCG(cn, data) {} @@ -342,11 +362,16 @@ namespace PETScWrappers /* ---------------------- SolverBiCG ------------------------ */ + SolverBiCG::SolverBiCG(SolverControl &cn, const AdditionalData &data) + : SolverBase(cn) + , additional_data(data) + {} + + SolverBiCG::SolverBiCG(SolverControl &cn, const MPI_Comm &, const AdditionalData &data) - : SolverBase(cn) - , additional_data(data) + : SolverBiCG(cn, data) {} @@ -375,11 +400,16 @@ namespace PETScWrappers + SolverGMRES::SolverGMRES(SolverControl &cn, const AdditionalData &data) + : SolverBase(cn) + , additional_data(data) + {} + + SolverGMRES::SolverGMRES(SolverControl &cn, const MPI_Comm &, const AdditionalData &data) - : SolverBase(cn) - , additional_data(data) + : SolverGMRES(cn, data) {} @@ -409,11 +439,16 @@ namespace PETScWrappers /* ---------------------- SolverBicgstab ------------------------ */ + SolverBicgstab::SolverBicgstab(SolverControl &cn, const AdditionalData &data) + : SolverBase(cn) + , additional_data(data) + {} + + SolverBicgstab::SolverBicgstab(SolverControl &cn, const MPI_Comm &, const AdditionalData &data) - : SolverBase(cn) - , additional_data(data) + : SolverBicgstab(cn, data) {} @@ -433,11 +468,16 @@ namespace PETScWrappers /* ---------------------- SolverCGS ------------------------ */ + SolverCGS::SolverCGS(SolverControl &cn, const AdditionalData &data) + : SolverBase(cn) + , additional_data(data) + {} + + SolverCGS::SolverCGS(SolverControl &cn, const MPI_Comm &, const AdditionalData &data) - : SolverBase(cn) - , additional_data(data) + : SolverCGS(cn, data) {} @@ -457,11 +497,16 @@ namespace PETScWrappers /* ---------------------- SolverTFQMR ------------------------ */ + SolverTFQMR::SolverTFQMR(SolverControl &cn, const AdditionalData &data) + : SolverBase(cn) + , additional_data(data) + {} + + SolverTFQMR::SolverTFQMR(SolverControl &cn, const MPI_Comm &, const AdditionalData &data) - : SolverBase(cn) - , additional_data(data) + : SolverTFQMR(cn, data) {} @@ -481,11 +526,16 @@ namespace PETScWrappers /* ---------------------- SolverTCQMR ------------------------ */ + SolverTCQMR::SolverTCQMR(SolverControl &cn, const AdditionalData &data) + : SolverBase(cn) + , additional_data(data) + {} + + SolverTCQMR::SolverTCQMR(SolverControl &cn, const MPI_Comm &, const AdditionalData &data) - : SolverBase(cn) - , additional_data(data) + : SolverTCQMR(cn, data) {} @@ -505,11 +555,16 @@ namespace PETScWrappers /* ---------------------- SolverCR ------------------------ */ + SolverCR::SolverCR(SolverControl &cn, const AdditionalData &data) + : SolverBase(cn) + , additional_data(data) + {} + + SolverCR::SolverCR(SolverControl &cn, const MPI_Comm &, const AdditionalData &data) - : SolverBase(cn) - , additional_data(data) + : SolverCR(cn, data) {} @@ -529,14 +584,21 @@ namespace PETScWrappers /* ---------------------- SolverLSQR ------------------------ */ + SolverLSQR::SolverLSQR(SolverControl &cn, const AdditionalData &data) + : SolverBase(cn) + , additional_data(data) + {} + + + SolverLSQR::SolverLSQR(SolverControl &cn, const MPI_Comm &, const AdditionalData &data) - : SolverBase(cn) - , additional_data(data) + : SolverLSQR(cn, data) {} + void SolverLSQR::set_solver_type(KSP &ksp) const { @@ -553,11 +615,16 @@ namespace PETScWrappers /* ---------------------- SolverPreOnly ------------------------ */ + SolverPreOnly::SolverPreOnly(SolverControl &cn, const AdditionalData &data) + : SolverBase(cn) + , additional_data(data) + {} + + SolverPreOnly::SolverPreOnly(SolverControl &cn, const MPI_Comm &, const AdditionalData &data) - : SolverBase(cn) - , additional_data(data) + : SolverPreOnly(cn, data) {} @@ -587,23 +654,31 @@ namespace PETScWrappers /* ---------------------- SparseDirectMUMPS------------------------ */ - SparseDirectMUMPS::SolverDataMUMPS::~SolverDataMUMPS() - { - destroy_krylov_solver(ksp); - // the 'pc' object is owned by the 'ksp' object, and consequently - // does not have to be destroyed explicitly here - } + SparseDirectMUMPS::SparseDirectMUMPS(SolverControl & cn, + const AdditionalData &data) + : SolverBase(cn) + , additional_data(data) + , symmetric_mode(false) + {} + SparseDirectMUMPS::SparseDirectMUMPS(SolverControl &cn, const MPI_Comm &, const AdditionalData &data) - : SolverBase(cn) - , additional_data(data) - , symmetric_mode(false) + : SparseDirectMUMPS(cn, data) {} + + SparseDirectMUMPS::SolverDataMUMPS::~SolverDataMUMPS() + { + destroy_krylov_solver(ksp); + // the 'pc' object is owned by the 'ksp' object, and consequently + // does not have to be destroyed explicitly here + } + + void SparseDirectMUMPS::set_solver_type(KSP &ksp) const { @@ -803,6 +878,8 @@ namespace PETScWrappers # endif } + + PetscErrorCode SparseDirectMUMPS::convergence_test(KSP /*ksp*/, const PetscInt iteration, @@ -840,6 +917,8 @@ namespace PETScWrappers return 0; } + + void SparseDirectMUMPS::set_symmetric_mode(const bool flag) {