/**
* Constructor. In contrast to deal.II's own solvers, there is no need to
- * give a vector memory object. However, PETSc solvers want to have an MPI
- * communicator context over which computations are parallelized. By
- * default, @p PETSC_COMM_SELF is used here, but you can change this. Note
- * that for single processor (non-MPI) versions, this parameter does not
- * have any effect.
+ * give a vector memory object.
*
* The last argument takes a structure with additional, solver dependent
* flags for tuning.
+ */
+ SolverRichardson(SolverControl & cn,
+ const AdditionalData &data = AdditionalData());
+
+ /**
+ * Constructor. This constructor is deprecated and ignores the MPI
+ * communicator argument. Use the other constructor instead.
*
- * Note that the communicator used here must match the communicator used
- * in the system matrix, solution, and right hand side object of the solve
- * to be done with this solver. Otherwise, PETSc will generate hard to
- * track down errors, see the documentation of the SolverBase class.
+ * @deprecated
*/
+ DEAL_II_DEPRECATED_EARLY
SolverRichardson(SolverControl & cn,
- const MPI_Comm & mpi_communicator = PETSC_COMM_SELF,
- const AdditionalData &data = AdditionalData());
+ const MPI_Comm & mpi_communicator,
+ const AdditionalData &data = AdditionalData());
protected:
/**
/**
* Constructor. In contrast to deal.II's own solvers, there is no need to
- * give a vector memory object. However, PETSc solvers want to have an MPI
- * communicator context over which computations are parallelized. By
- * default, @p PETSC_COMM_SELF is used here, but you can change this. Note
- * that for single processor (non-MPI) versions, this parameter does not
- * have any effect.
+ * give a vector memory object.
*
* The last argument takes a structure with additional, solver dependent
* flags for tuning.
+ */
+ SolverChebychev(SolverControl & cn,
+ const AdditionalData &data = AdditionalData());
+
+ /**
+ * Constructor. This constructor is deprecated and ignores the MPI
+ * communicator argument. Use the other constructor instead.
*
- * Note that the communicator used here must match the communicator used
- * in the system matrix, solution, and right hand side object of the solve
- * to be done with this solver. Otherwise, PETSc will generate hard to
- * track down errors, see the documentation of the SolverBase class.
+ * @deprecated
*/
+ DEAL_II_DEPRECATED_EARLY
SolverChebychev(SolverControl & cn,
- const MPI_Comm & mpi_communicator = PETSC_COMM_SELF,
- const AdditionalData &data = AdditionalData());
+ const MPI_Comm & mpi_communicator,
+ const AdditionalData &data = AdditionalData());
protected:
/**
/**
* Constructor. In contrast to deal.II's own solvers, there is no need to
- * give a vector memory object. However, PETSc solvers want to have an MPI
- * communicator context over which computations are parallelized. By
- * default, @p PETSC_COMM_SELF is used here, but you can change this. Note
- * that for single processor (non-MPI) versions, this parameter does not
- * have any effect.
+ * give a vector memory object.
*
* The last argument takes a structure with additional, solver dependent
* flags for tuning.
+ */
+ SolverCG(SolverControl &cn, const AdditionalData &data = AdditionalData());
+
+ /**
+ * Constructor. This constructor is deprecated and ignores the MPI
+ * communicator argument. Use the other constructor instead.
*
- * Note that the communicator used here must match the communicator used
- * in the system matrix, solution, and right hand side object of the solve
- * to be done with this solver. Otherwise, PETSc will generate hard to
- * track down errors, see the documentation of the SolverBase class.
+ * @deprecated
*/
+ DEAL_II_DEPRECATED_EARLY
SolverCG(SolverControl & cn,
- const MPI_Comm & mpi_communicator = PETSC_COMM_SELF,
- const AdditionalData &data = AdditionalData());
+ const MPI_Comm & mpi_communicator,
+ const AdditionalData &data = AdditionalData());
protected:
/**
/**
* Constructor. In contrast to deal.II's own solvers, there is no need to
- * give a vector memory object. However, PETSc solvers want to have an MPI
- * communicator context over which computations are parallelized. By
- * default, @p PETSC_COMM_SELF is used here, but you can change this. Note
- * that for single processor (non-MPI) versions, this parameter does not
- * have any effect.
+ * give a vector memory object.
*
* The last argument takes a structure with additional, solver dependent
* flags for tuning.
+ */
+ SolverBiCG(SolverControl & cn,
+ const AdditionalData &data = AdditionalData());
+
+ /**
+ * Constructor. This constructor is deprecated and ignores the MPI
+ * communicator argument. Use the other constructor instead.
*
- * Note that the communicator used here must match the communicator used
- * in the system matrix, solution, and right hand side object of the solve
- * to be done with this solver. Otherwise, PETSc will generate hard to
- * track down errors, see the documentation of the SolverBase class.
+ * @deprecated
*/
+ DEAL_II_DEPRECATED_EARLY
SolverBiCG(SolverControl & cn,
- const MPI_Comm & mpi_communicator = PETSC_COMM_SELF,
- const AdditionalData &data = AdditionalData());
+ const MPI_Comm & mpi_communicator,
+ const AdditionalData &data = AdditionalData());
protected:
/**
/**
* Constructor. In contrast to deal.II's own solvers, there is no need to
- * give a vector memory object. However, PETSc solvers want to have an MPI
- * communicator context over which computations are parallelized. By
- * default, @p PETSC_COMM_SELF is used here, but you can change this. Note
- * that for single processor (non-MPI) versions, this parameter does not
- * have any effect.
+ * give a vector memory object.
*
* The last argument takes a structure with additional, solver dependent
* flags for tuning.
+ */
+ SolverGMRES(SolverControl & cn,
+ const AdditionalData &data = AdditionalData());
+
+ /**
+ * Constructor. This constructor is deprecated and ignores the MPI
+ * communicator argument. Use the other constructor instead.
*
- * Note that the communicator used here must match the communicator used
- * in the system matrix, solution, and right hand side object of the solve
- * to be done with this solver. Otherwise, PETSc will generate hard to
- * track down errors, see the documentation of the SolverBase class.
+ * @deprecated
*/
+ DEAL_II_DEPRECATED_EARLY
SolverGMRES(SolverControl & cn,
- const MPI_Comm & mpi_communicator = PETSC_COMM_SELF,
- const AdditionalData &data = AdditionalData());
+ const MPI_Comm & mpi_communicator,
+ const AdditionalData &data = AdditionalData());
protected:
/**
/**
* Constructor. In contrast to deal.II's own solvers, there is no need to
- * give a vector memory object. However, PETSc solvers want to have an MPI
- * communicator context over which computations are parallelized. By
- * default, @p PETSC_COMM_SELF is used here, but you can change this. Note
- * that for single processor (non-MPI) versions, this parameter does not
- * have any effect.
+ * give a vector memory object.
*
* The last argument takes a structure with additional, solver dependent
* flags for tuning.
+ */
+ SolverBicgstab(SolverControl & cn,
+ const AdditionalData &data = AdditionalData());
+
+ /**
+ * Constructor. This constructor is deprecated and ignores the MPI
+ * communicator argument. Use the other constructor instead.
*
- * Note that the communicator used here must match the communicator used
- * in the system matrix, solution, and right hand side object of the solve
- * to be done with this solver. Otherwise, PETSc will generate hard to
- * track down errors, see the documentation of the SolverBase class.
+ * @deprecated
*/
+ DEAL_II_DEPRECATED_EARLY
SolverBicgstab(SolverControl & cn,
- const MPI_Comm & mpi_communicator = PETSC_COMM_SELF,
- const AdditionalData &data = AdditionalData());
+ const MPI_Comm & mpi_communicator,
+ const AdditionalData &data = AdditionalData());
protected:
/**
set_solver_type(KSP &ksp) const override;
};
+
+
/**
* An implementation of the solver interface using the PETSc CG Squared
* solver.
/**
* Constructor. In contrast to deal.II's own solvers, there is no need to
- * give a vector memory object. However, PETSc solvers want to have an MPI
- * communicator context over which computations are parallelized. By
- * default, @p PETSC_COMM_SELF is used here, but you can change this. Note
- * that for single processor (non-MPI) versions, this parameter does not
- * have any effect.
+ * give a vector memory object.
*
* The last argument takes a structure with additional, solver dependent
* flags for tuning.
+ */
+ SolverCGS(SolverControl &cn, const AdditionalData &data = AdditionalData());
+
+ /**
+ * Constructor. This constructor is deprecated and ignores the MPI
+ * communicator argument. Use the other constructor instead.
*
- * Note that the communicator used here must match the communicator used
- * in the system matrix, solution, and right hand side object of the solve
- * to be done with this solver. Otherwise, PETSc will generate hard to
- * track down errors, see the documentation of the SolverBase class.
+ * @deprecated
*/
+ DEAL_II_DEPRECATED_EARLY
SolverCGS(SolverControl & cn,
- const MPI_Comm & mpi_communicator = PETSC_COMM_SELF,
- const AdditionalData &data = AdditionalData());
+ const MPI_Comm & mpi_communicator,
+ const AdditionalData &data = AdditionalData());
protected:
/**
/**
* Constructor. In contrast to deal.II's own solvers, there is no need to
- * give a vector memory object. However, PETSc solvers want to have an MPI
- * communicator context over which computations are parallelized. By
- * default, @p PETSC_COMM_SELF is used here, but you can change this. Note
- * that for single processor (non-MPI) versions, this parameter does not
- * have any effect.
+ * give a vector memory object.
*
* The last argument takes a structure with additional, solver dependent
* flags for tuning.
+ */
+ SolverTFQMR(SolverControl & cn,
+ const AdditionalData &data = AdditionalData());
+
+ /**
+ * Constructor. This constructor is deprecated and ignores the MPI
+ * communicator argument. Use the other constructor instead.
*
- * Note that the communicator used here must match the communicator used
- * in the system matrix, solution, and right hand side object of the solve
- * to be done with this solver. Otherwise, PETSc will generate hard to
- * track down errors, see the documentation of the SolverBase class.
+ * @deprecated
*/
+ DEAL_II_DEPRECATED_EARLY
SolverTFQMR(SolverControl & cn,
- const MPI_Comm & mpi_communicator = PETSC_COMM_SELF,
- const AdditionalData &data = AdditionalData());
+ const MPI_Comm & mpi_communicator,
+ const AdditionalData &data = AdditionalData());
protected:
/**
/**
* Constructor. In contrast to deal.II's own solvers, there is no need to
- * give a vector memory object. However, PETSc solvers want to have an MPI
- * communicator context over which computations are parallelized. By
- * default, @p PETSC_COMM_SELF is used here, but you can change this. Note
- * that for single processor (non-MPI) versions, this parameter does not
- * have any effect.
+ * give a vector memory object.
*
* The last argument takes a structure with additional, solver dependent
* flags for tuning.
+ */
+ SolverTCQMR(SolverControl & cn,
+ const AdditionalData &data = AdditionalData());
+
+ /**
+ * Constructor. This constructor is deprecated and ignores the MPI
+ * communicator argument. Use the other constructor instead.
*
- * Note that the communicator used here must match the communicator used
- * in the system matrix, solution, and right hand side object of the solve
- * to be done with this solver. Otherwise, PETSc will generate hard to
- * track down errors, see the documentation of the SolverBase class.
+ * @deprecated
*/
+ DEAL_II_DEPRECATED_EARLY
SolverTCQMR(SolverControl & cn,
- const MPI_Comm & mpi_communicator = PETSC_COMM_SELF,
- const AdditionalData &data = AdditionalData());
+ const MPI_Comm & mpi_communicator,
+ const AdditionalData &data = AdditionalData());
protected:
/**
/**
* Constructor. In contrast to deal.II's own solvers, there is no need to
- * give a vector memory object. However, PETSc solvers want to have an MPI
- * communicator context over which computations are parallelized. By
- * default, @p PETSC_COMM_SELF is used here, but you can change this. Note
- * that for single processor (non-MPI) versions, this parameter does not
- * have any effect.
+ * give a vector memory object.
*
* The last argument takes a structure with additional, solver dependent
* flags for tuning.
+ */
+ SolverCR(SolverControl &cn, const AdditionalData &data = AdditionalData());
+
+ /**
+ * Constructor. This constructor is deprecated and ignores the MPI
+ * communicator argument. Use the other constructor instead.
*
- * Note that the communicator used here must match the communicator used
- * in the system matrix, solution, and right hand side object of the solve
- * to be done with this solver. Otherwise, PETSc will generate hard to
- * track down errors, see the documentation of the SolverBase class.
+ * @deprecated
*/
+ DEAL_II_DEPRECATED_EARLY
SolverCR(SolverControl & cn,
- const MPI_Comm & mpi_communicator = PETSC_COMM_SELF,
- const AdditionalData &data = AdditionalData());
+ const MPI_Comm & mpi_communicator,
+ const AdditionalData &data = AdditionalData());
protected:
/**
/**
* Constructor. In contrast to deal.II's own solvers, there is no need to
- * give a vector memory object. However, PETSc solvers want to have an MPI
- * communicator context over which computations are parallelized. By
- * default, @p PETSC_COMM_SELF is used here, but you can change this. Note
- * that for single processor (non-MPI) versions, this parameter does not
- * have any effect.
+ * give a vector memory object.
*
* The last argument takes a structure with additional, solver dependent
* flags for tuning.
+ */
+ SolverLSQR(SolverControl & cn,
+ const AdditionalData &data = AdditionalData());
+
+ /**
+ * Constructor. This constructor is deprecated and ignores the MPI
+ * communicator argument. Use the other constructor instead.
*
- * Note that the communicator used here must match the communicator used
- * in the system matrix, solution, and right hand side object of the solve
- * to be done with this solver. Otherwise, PETSc will generate hard to
- * track down errors, see the documentation of the SolverBase class.
+ * @deprecated
*/
+ DEAL_II_DEPRECATED_EARLY
SolverLSQR(SolverControl & cn,
- const MPI_Comm & mpi_communicator = PETSC_COMM_SELF,
- const AdditionalData &data = AdditionalData());
+ const MPI_Comm & mpi_communicator,
+ const AdditionalData &data = AdditionalData());
protected:
/**
/**
* Constructor. In contrast to deal.II's own solvers, there is no need to
- * give a vector memory object. However, PETSc solvers want to have an MPI
- * communicator context over which computations are parallelized. By
- * default, @p PETSC_COMM_SELF is used here, but you can change this. Note
- * that for single processor (non-MPI) versions, this parameter does not
- * have any effect.
+ * give a vector memory object.
*
* The last argument takes a structure with additional, solver dependent
* flags for tuning.
+ */
+ SolverPreOnly(SolverControl & cn,
+ const AdditionalData &data = AdditionalData());
+
+ /**
+ * Constructor. This constructor is deprecated and ignores the MPI
+ * communicator argument. Use the other constructor instead.
*
- * Note that the communicator used here must match the communicator used
- * in the system matrix, solution, and right hand side object of the solve
- * to be done with this solver. Otherwise, PETSc will generate hard to
- * track down errors, see the documentation of the SolverBase class.
+ * @deprecated
*/
+ DEAL_II_DEPRECATED_EARLY
SolverPreOnly(SolverControl & cn,
- const MPI_Comm & mpi_communicator = PETSC_COMM_SELF,
- const AdditionalData &data = AdditionalData());
+ const MPI_Comm & mpi_communicator,
+ const AdditionalData &data = AdditionalData());
protected:
/**
*/
struct AdditionalData
{};
+
/**
- * Constructor
+ * Constructor.
+ */
+ SparseDirectMUMPS(SolverControl & cn,
+ const AdditionalData &data = AdditionalData());
+
+ /**
+ * Constructor. This constructor is deprecated and ignores the MPI
+ * communicator argument. Use the other constructor instead.
+ *
+ * @deprecated
*/
+ DEAL_II_DEPRECATED_EARLY
SparseDirectMUMPS(SolverControl & cn,
- const MPI_Comm & mpi_communicator = PETSC_COMM_SELF,
+ const MPI_Comm & mpi_communicator,
const AdditionalData &data = AdditionalData());
/**
return 0;
}
+
+
void
SolverBase::initialize(const PreconditionBase &preconditioner)
{
- SolverRichardson::SolverRichardson(SolverControl &cn,
- const MPI_Comm &,
+ SolverRichardson::SolverRichardson(SolverControl & cn,
const AdditionalData &data)
: SolverBase(cn)
, additional_data(data)
{}
+
+ SolverRichardson::SolverRichardson(SolverControl &cn,
+ const MPI_Comm &,
+ const AdditionalData &data)
+ : SolverRichardson(cn, data)
+ {}
+
+
void
SolverRichardson::set_solver_type(KSP &ksp) const
{
/* ---------------------- SolverChebychev ------------------------ */
- SolverChebychev::SolverChebychev(SolverControl &cn,
- const MPI_Comm &,
+ SolverChebychev::SolverChebychev(SolverControl & cn,
const AdditionalData &data)
: SolverBase(cn)
, additional_data(data)
{}
+ SolverChebychev::SolverChebychev(SolverControl &cn,
+ const MPI_Comm &,
+ const AdditionalData &data)
+ : SolverChebychev(cn, data)
+ {}
+
+
void
SolverChebychev::set_solver_type(KSP &ksp) const
{
/* ---------------------- SolverCG ------------------------ */
+ SolverCG::SolverCG(SolverControl &cn, const AdditionalData &data)
+ : SolverBase(cn)
+ , additional_data(data)
+ {}
+
+
SolverCG::SolverCG(SolverControl &cn,
const MPI_Comm &,
const AdditionalData &data)
- : SolverBase(cn)
- , additional_data(data)
+ : SolverCG(cn, data)
{}
/* ---------------------- SolverBiCG ------------------------ */
+ SolverBiCG::SolverBiCG(SolverControl &cn, const AdditionalData &data)
+ : SolverBase(cn)
+ , additional_data(data)
+ {}
+
+
SolverBiCG::SolverBiCG(SolverControl &cn,
const MPI_Comm &,
const AdditionalData &data)
- : SolverBase(cn)
- , additional_data(data)
+ : SolverBiCG(cn, data)
{}
+ SolverGMRES::SolverGMRES(SolverControl &cn, const AdditionalData &data)
+ : SolverBase(cn)
+ , additional_data(data)
+ {}
+
+
SolverGMRES::SolverGMRES(SolverControl &cn,
const MPI_Comm &,
const AdditionalData &data)
- : SolverBase(cn)
- , additional_data(data)
+ : SolverGMRES(cn, data)
{}
/* ---------------------- SolverBicgstab ------------------------ */
+ SolverBicgstab::SolverBicgstab(SolverControl &cn, const AdditionalData &data)
+ : SolverBase(cn)
+ , additional_data(data)
+ {}
+
+
SolverBicgstab::SolverBicgstab(SolverControl &cn,
const MPI_Comm &,
const AdditionalData &data)
- : SolverBase(cn)
- , additional_data(data)
+ : SolverBicgstab(cn, data)
{}
/* ---------------------- SolverCGS ------------------------ */
+ SolverCGS::SolverCGS(SolverControl &cn, const AdditionalData &data)
+ : SolverBase(cn)
+ , additional_data(data)
+ {}
+
+
SolverCGS::SolverCGS(SolverControl &cn,
const MPI_Comm &,
const AdditionalData &data)
- : SolverBase(cn)
- , additional_data(data)
+ : SolverCGS(cn, data)
{}
/* ---------------------- SolverTFQMR ------------------------ */
+ SolverTFQMR::SolverTFQMR(SolverControl &cn, const AdditionalData &data)
+ : SolverBase(cn)
+ , additional_data(data)
+ {}
+
+
SolverTFQMR::SolverTFQMR(SolverControl &cn,
const MPI_Comm &,
const AdditionalData &data)
- : SolverBase(cn)
- , additional_data(data)
+ : SolverTFQMR(cn, data)
{}
/* ---------------------- SolverTCQMR ------------------------ */
+ SolverTCQMR::SolverTCQMR(SolverControl &cn, const AdditionalData &data)
+ : SolverBase(cn)
+ , additional_data(data)
+ {}
+
+
SolverTCQMR::SolverTCQMR(SolverControl &cn,
const MPI_Comm &,
const AdditionalData &data)
- : SolverBase(cn)
- , additional_data(data)
+ : SolverTCQMR(cn, data)
{}
/* ---------------------- SolverCR ------------------------ */
+ SolverCR::SolverCR(SolverControl &cn, const AdditionalData &data)
+ : SolverBase(cn)
+ , additional_data(data)
+ {}
+
+
SolverCR::SolverCR(SolverControl &cn,
const MPI_Comm &,
const AdditionalData &data)
- : SolverBase(cn)
- , additional_data(data)
+ : SolverCR(cn, data)
{}
/* ---------------------- SolverLSQR ------------------------ */
+ SolverLSQR::SolverLSQR(SolverControl &cn, const AdditionalData &data)
+ : SolverBase(cn)
+ , additional_data(data)
+ {}
+
+
+
SolverLSQR::SolverLSQR(SolverControl &cn,
const MPI_Comm &,
const AdditionalData &data)
- : SolverBase(cn)
- , additional_data(data)
+ : SolverLSQR(cn, data)
{}
+
void
SolverLSQR::set_solver_type(KSP &ksp) const
{
/* ---------------------- SolverPreOnly ------------------------ */
+ SolverPreOnly::SolverPreOnly(SolverControl &cn, const AdditionalData &data)
+ : SolverBase(cn)
+ , additional_data(data)
+ {}
+
+
SolverPreOnly::SolverPreOnly(SolverControl &cn,
const MPI_Comm &,
const AdditionalData &data)
- : SolverBase(cn)
- , additional_data(data)
+ : SolverPreOnly(cn, data)
{}
/* ---------------------- SparseDirectMUMPS------------------------ */
- SparseDirectMUMPS::SolverDataMUMPS::~SolverDataMUMPS()
- {
- destroy_krylov_solver(ksp);
- // the 'pc' object is owned by the 'ksp' object, and consequently
- // does not have to be destroyed explicitly here
- }
+ SparseDirectMUMPS::SparseDirectMUMPS(SolverControl & cn,
+ const AdditionalData &data)
+ : SolverBase(cn)
+ , additional_data(data)
+ , symmetric_mode(false)
+ {}
+
SparseDirectMUMPS::SparseDirectMUMPS(SolverControl &cn,
const MPI_Comm &,
const AdditionalData &data)
- : SolverBase(cn)
- , additional_data(data)
- , symmetric_mode(false)
+ : SparseDirectMUMPS(cn, data)
{}
+
+ SparseDirectMUMPS::SolverDataMUMPS::~SolverDataMUMPS()
+ {
+ destroy_krylov_solver(ksp);
+ // the 'pc' object is owned by the 'ksp' object, and consequently
+ // does not have to be destroyed explicitly here
+ }
+
+
void
SparseDirectMUMPS::set_solver_type(KSP &ksp) const
{
# endif
}
+
+
PetscErrorCode
SparseDirectMUMPS::convergence_test(KSP /*ksp*/,
const PetscInt iteration,
return 0;
}
+
+
void
SparseDirectMUMPS::set_symmetric_mode(const bool flag)
{