//---------------------------------------------------------------------------
// $Id$
-// Author: Toby D. Young, Polish Academy of Sciences, 2008, 2009
+// Author: Toby D. Young, Polish Academy of Sciences, 2008-2013
//
-// Copyright (C) 2009, 2010, 2012 by the deal.II authors
+// Copyright (C) 2009-2013 by the deal.II authors
//
// This file is subject to QPL and may not be distributed
// without copyright and license information. Please refer
#ifndef __deal2__slepc_solver_h
#define __deal2__slepc_solver_h
-
#include <deal.II/base/config.h>
#ifdef DEAL_II_USE_SLEPC
* interface to SLEPc solvers that handle both of these problem sets.
*
* SLEPcWrappers can be implemented in application codes in the
- * following way:
- @verbatim
- SolverControl solver_control (1000, 1e-9);
- SolverArnoldi system (solver_control, mpi_communicator);
- system.solve (A, B, lambda, x, size_of_spectrum);
- @endverbatim
- * for the generalized eigenvalue problem $Ax=B\lambda x$, where the
+ * following way: @verbatim SolverControl solver_control (1000, 1e-9);
+ * SolverArnoldi system (solver_control, mpi_communicator);
+ * system.solve (A, B, lambda, x, size_of_spectrum); @endverbatim for
+ * the generalized eigenvalue problem $Ax=B\lambda x$, where the
* variable <code>const unsigned int size_of_spectrum</code> tells
* SLEPc the number of eigenvector/eigenvalue pairs to solve for: See
* also <code>step-36</code> for a hands-on example.
*
* @ingroup SLEPcWrappers
*
- * @author Toby D. Young 2008, 2009, 2010; and Rickard Armiento 2008.
+ * @author Toby D. Young 2008, 2009, 2010, 2011, 2013; and Rickard
+ * Armiento 2008.
*
* @note Various tweaks and enhancments contributed by Eloy Romero and
* Jose E. Roman 2009, 2010.
{
/**
- * Base class for solver classes
- * using the SLEPc solvers. Since
- * solvers in SLEPc are selected
- * based on flags passed to a
- * generic solver object, basically
- * all the actual solver calls
- * happen in this class, and
- * derived classes simply set the
- * right flags to select one solver
- * or another, or to set certain
- * parameters for individual
- * solvers.
+ * Base class for solver classes using the SLEPc solvers. Since
+ * solvers in SLEPc are selected based on flags passed to a generic
+ * solver object, basically all the actual solver calls happen in
+ * this class, and derived classes simply set the right flags to
+ * select one solver or another, or to set certain parameters for
+ * individual solvers.
*/
class SolverBase
{
public:
/**
- * Constructor. Takes the MPI
- * communicator over which parallel
+ * Constructor. Takes the MPI communicator over which parallel
* computations are to happen.
*/
SolverBase (SolverControl &cn,
virtual ~SolverBase ();
/**
- * Composite method that solves the
- * eigensystem $Ax=\lambda x$. The
- * eigenvector sent in has to have
- * at least one element that we can
- * use as a template when resizing,
- * since we do not know the
- * parameters of the specific
- * vector class used
- * (i.e. local_dofs for MPI
- * vectors). However, while copying
- * eigenvectors, at least twice the
- * memory size of <tt>vr</tt> is
- * being used (and can be more). To
- * avoid doing this, the fairly
- * standard calling sequence
- * executed here is used:
- * Initialise; Set up matrices for
- * solving; Actually solve the
- * system; Gather the solution(s);
- * and reset.
+ * Composite method that solves the eigensystem $Ax=\lambda
+ * x$. The eigenvector sent in has to have at least one element
+ * that we can use as a template when resizing, since we do not
+ * know the parameters of the specific vector class used
+ * (i.e. local_dofs for MPI vectors). However, while copying
+ * eigenvectors, at least twice the memory size of <tt>vr</tt> is
+ * being used (and can be more). To avoid doing this, the fairly
+ * standard calling sequence executed here is used: Initialise;
+ * Set up matrices for solving; Actually solve the system; Gather
+ * the solution(s); and reset.
*
- * Note that the number of
- * converged eigenvectors can be
- * larger than the number of
- * eigenvectors requested; this is
- * due to a round off error
- * (success) of the eigenproblem
- * solver context. If this is found
- * to be the case we simply do not
- * bother with more eigenpairs than
- * requested, but handle that it
- * may be more than specified by
- * ignoring any extras. By default
- * one eigenvector/eigenvalue pair
- * is computed.
+ * Note that the number of converged eigenvectors can be larger
+ * than the number of eigenvectors requested; this is due to a
+ * round off error (success) of the eigenproblem solver
+ * context. If this is found to be the case we simply do not
+ * bother with more eigenpairs than requested, but handle that it
+ * may be more than specified by ignoring any extras. By default
+ * one eigenvector/eigenvalue pair is computed.
*/
template <typename OutputVector>
void
const unsigned int n_eigenvectors);
/**
- * Same as above, but here a
- * composite method for solving the
+ * Same as above, but here a composite method for solving the
* system $A x=\lambda B x$.
*/
template <typename OutputVector>
const unsigned int n_eigenvectors);
/**
- * Initialize solver for the linear
- * system $Ax=\lambda x$. (Note:
- * this is required before calling
- * solve ())
+ * Initialize solver for the linear system $Ax=\lambda x$. (Note:
+ * this is required before calling solve ())
*/
void
set_matrices (const PETScWrappers::MatrixBase &A);
/**
- * Same as above, but here
- * initialize solver for the linear
- * system $A x=\lambda B x$.
+ * Same as above, but here initialize solver for the linear system
+ * $A x=\lambda B x$.
*/
void
set_matrices (const PETScWrappers::MatrixBase &A,
const PETScWrappers::MatrixBase &B);
/**
- * Set the initial vector for the
- * solver.
+ * Set the initial vector for the solver.
*/
void
set_initial_vector
(const PETScWrappers::VectorBase &this_initial_vector);
/**
- * Set the spectral transformation
- * to be used.
+ * Set the spectral transformation to be used.
*/
void
set_transformation (SLEPcWrappers::TransformationBase &this_transformation);
/**
- * Indicate which part of the
- * spectrum is to be computed. By
- * default largest magnitude
- * eigenvalues are computed. For
- * other allowed values see the
- * SLEPc documentation.
+ * Indicate which part of the spectrum is to be computed. By
+ * default largest magnitude eigenvalues are computed. For other
+ * allowed values see the SLEPc documentation.
*/
void
set_which_eigenpairs (EPSWhich set_which);
/**
- * Solve the linear system for
- * n_eigenvectors
- * eigenstates. Parameter
- * <code>n_converged</code>
- * contains the actual number of
- * eigenstates that have .
- * converged; this can be both
- * fewer or more than
- * n_eigenvectors, depending on the
- * SLEPc eigensolver used.
+ * Solve the linear system for <code>n_eigenvectors</code>
+ * eigenstates. Parameter <code>n_converged</code> contains the
+ * actual number of eigenstates that have converged; this can be
+ * both fewer or more than <code>n_eigenvectors</code>, depending
+ * on the SLEPc eigensolver used.
*/
void
solve (const unsigned int n_eigenvectors, unsigned int *n_converged);
-
/**
- * Access the solutions for a
- * solved eigenvector problem, pair
- * index solutions,
- * $\text{index}\,\in\,0\hdots
+ * Access the solutions for a solved eigenvector problem, pair
+ * index solutions, $\text{index}\,\in\,0\hdots
* \text{n\_converged}-1$.
*/
void
PETScWrappers::VectorBase &vr);
/**
- * Reset the solver, and return
- * memory for eigenvectors
+ * Reset the solver, and return memory for eigenvectors
*/
void
reset();
/**
- * Retrieve the SLEPc solver object
- * that is internally used.
+ * Retrieve the SLEPc solver object that is internally used.
*/
EPS *
get_EPS ();
-
/**
- * Access to the object that
- * controls convergence.
+ * Access to the object that controls convergence.
*/
SolverControl &control () const;
protected:
/**
- * Reference to the object that
- * controls convergence of the
+ * Reference to the object that controls convergence of the
* iterative solver.
*/
SolverControl &solver_control;
/**
- * Copy of the MPI communicator
- * object to be used for the
- * solver.
+ * Copy of the MPI communicator object to be used for the solver.
*/
const MPI_Comm mpi_communicator;
/**
- * Function that takes an
- * Eigenvalue Problem Solver
- * context object, and sets the
- * type of solver that is requested
- * by the derived class.
+ * Function that takes an Eigenvalue Problem Solver context
+ * object, and sets the type of solver that is requested by the
+ * derived class.
*/
virtual void set_solver_type (EPS &eps) const = 0;
/**
- * Which portion of the spectrum to
- * solve from.
+ * Which portion of the spectrum to solve from.
*/
EPSWhich set_which;
/**
- * The matrix $A$ of the
- * generalized eigenvalue problem
- * $Ax=B\lambda x$, or the standard
- * eigenvalue problem $Ax=\lambda
+ * The matrix $A$ of the generalized eigenvalue problem
+ * $Ax=B\lambda x$, or the standard eigenvalue problem $Ax=\lambda
* x$.
*/
const PETScWrappers::MatrixBase *opA;
/**
- * The matrix $B$ of the
- * generalized eigenvalue problem
+ * The matrix $B$ of the generalized eigenvalue problem
* $Ax=B\lambda x$.
*/
const PETScWrappers::MatrixBase *opB;
+ /**
+ * An initial vector used to "feed" some SLEPc solvers.
+ */
const PETScWrappers::VectorBase *initial_vector;
/**
- * Pointer to an an object that
- * describes transformations that
- * can be applied to the eigenvalue
- * problem.
+ * Pointer to an an object that describes transformations that can
+ * be applied to the eigenvalue problem.
*/
SLEPcWrappers::TransformationBase *transformation;
private:
/**
- * A function that is used in SLEPc
- * as a callback to check on
- * convergence. It takes the
- * information provided from SLEPc
- * and checks it against deal.II's
- * own SolverControl objects to see
- * if convergence has been reached.
+ * A function that is used in SLEPc as a callback to check on
+ * convergence. It takes the information provided from SLEPc and
+ * checks it against deal.II's own SolverControl objects to see if
+ * convergence has been reached.
*/
static
int
void *solver_control);
/**
- * Objects of this type are
- * explicitly created, but are
- * destroyed when the surrounding
- * solver object goes out of scope,
- * or when we assign a new value to
- * the pointer to this object. The
- * respective Destroy functions are
- * therefore written into the
- * destructor of this object, even
- * though the object does not have
+ * Objects of this type are explicitly created, but are destroyed
+ * when the surrounding solver object goes out of scope, or when
+ * we assign a new value to the pointer to this object. The
+ * respective Destroy functions are therefore written into the
+ * destructor of this object, even though the object does not have
* a constructor.
*/
struct SolverData
~SolverData ();
/**
- * Objects for Eigenvalue Problem
- * Solver.
+ * Objects for Eigenvalue Problem Solver.
*/
EPS eps;
};
+ /**
+ * Pointer to the <code>dealii::SolverData</code> object.
+ */
std_cxx1x::shared_ptr<SolverData> solver_data;
};
public:
/**
- * Standardized data struct to pipe
- * additional data to the solver,
+ * Standardized data struct to pipe additional data to the solver,
* should it be needed.
*/
struct AdditionalData
{};
/**
- * SLEPc solvers will want to have
- * an MPI communicator context over
- * which computations are
- * parallelized. By default, this
- * carries the same behaviour has
- * the PETScWrappers, but you can
+ * SLEPc solvers will want to have an MPI communicator context
+ * over which computations are parallelized. By default, this
+ * carries the same behaviour has the PETScWrappers, but you can
* change that.
*/
SolverKrylovSchur (SolverControl &cn,
protected:
/**
- * Store a copy of the flags for
- * this particular solver.
+ * Store a copy of the flags for this particular solver.
*/
const AdditionalData additional_data;
/**
- * Function that takes a Eigenvalue
- * Problem Solver context object,
- * and sets the type of solver that
- * is appropriate for this class.
+ * Function that takes a Eigenvalue Problem Solver context object,
+ * and sets the type of solver that is appropriate for this class.
*/
virtual void set_solver_type (EPS &eps) const;
};
{
public:
/**
- * Standardized data struct to pipe
- * additional data to the solver,
+ * Standardized data struct to pipe additional data to the solver,
* should it be needed.
*/
struct AdditionalData
{
/**
- * Constructor. By default, set the
- * option of delayed
- * reorthogonalization to false,
- * i.e. don't do it.
+ * Constructor. By default, set the option of delayed
+ * reorthogonalization to false, i.e. don't do it.
*/
AdditionalData (const bool delayed_reorthogonalization = false);
/**
- * Flag for delayed
- * reorthogonalization.
+ * Flag for delayed reorthogonalization.
*/
bool delayed_reorthogonalization;
};
/**
- * SLEPc solvers will want to have
- * an MPI communicator context over
- * which computations are
- * parallelized. By default, this
- * carries the same behaviour has
- * the PETScWrappers, but you can
+ * SLEPc solvers will want to have an MPI communicator context
+ * over which computations are parallelized. By default, this
+ * carries the same behaviour has the PETScWrappers, but you can
* change that.
*/
SolverArnoldi (SolverControl &cn,
protected:
/**
- * Store a copy of the flags for
- * this particular solver.
+ * Store a copy of the flags for this particular solver.
*/
const AdditionalData additional_data;
/**
- * Function that takes a Eigenvalue
- * Problem Solver context object,
- * and sets the type of solver that
- * is appropriate for this class.
+ * Function that takes a Eigenvalue Problem Solver context object,
+ * and sets the type of solver that is appropriate for this class.
*/
virtual void set_solver_type (EPS &eps) const;
{
public:
/**
- * Standardized data struct to pipe
- * additional data to the solver,
+ * Standardized data struct to pipe additional data to the solver,
* should it be needed.
*/
struct AdditionalData
{};
/**
- * SLEPc solvers will want to have
- * an MPI communicator context over
- * which computations are
- * parallelized. By default, this
- * carries the same behaviour has
- * the PETScWrappers, but you can
+ * SLEPc solvers will want to have an MPI communicator context
+ * over which computations are parallelized. By default, this
+ * carries the same behaviour has the PETScWrappers, but you can
* change that.
*/
SolverLanczos (SolverControl &cn,
protected:
/**
- * Store a copy of the flags for
- * this particular solver.
+ * Store a copy of the flags for this particular solver.
*/
const AdditionalData additional_data;
/**
- * Function that takes a Eigenvalue
- * Problem Solver context object,
- * and sets the type of solver that
- * is appropriate for this class.
+ * Function that takes a Eigenvalue Problem Solver context object,
+ * and sets the type of solver that is appropriate for this class.
*/
virtual void set_solver_type (EPS &eps) const;
/**
* An implementation of the solver interface using the SLEPc Power
- * solver. Usage: Largest values of spectrum only, all problem types,
- * complex.
+ * solver. Usage: Largest values of spectrum only, all problem
+ * types, complex.
*
* @ingroup SLEPcWrappers
* @author Toby D. Young 2010
{
public:
/**
- * Standardized data struct to pipe
- * additional data to the solver,
+ * Standardized data struct to pipe additional data to the solver,
* should it be needed.
*/
struct AdditionalData
{};
/**
- * SLEPc solvers will want to have
- * an MPI communicator context over
- * which computations are
- * parallelized. By default, this
- * carries the same behaviour has
- * the PETScWrappers, but you can
+ * SLEPc solvers will want to have an MPI communicator context
+ * over which computations are parallelized. By default, this
+ * carries the same behaviour has the PETScWrappers, but you can
* change that.
*/
SolverPower (SolverControl &cn,
protected:
/**
- * Store a copy of the flags for
- * this particular solver.
+ * Store a copy of the flags for this particular solver.
*/
const AdditionalData additional_data;
/**
- * Function that takes a Eigenvalue
- * Problem Solver context object,
- * and sets the type of solver that
- * is appropriate for this class.
+ * Function that takes a Eigenvalue Problem Solver context object,
+ * and sets the type of solver that is appropriate for this class.
*/
virtual void set_solver_type (EPS &eps) const;
};
-#if DEAL_II_PETSC_VERSION_GTE(3,1,0)
- /**
- * An implementation of the solver interface using the SLEPc
- * Generalized Davidson solver. Usage: All problem types.
- *
- * @deprecated The use of this class has been superceded by the
- * class SolverGeneralizedDavidson.
- *
- * @ingroup SLEPcWrappers
- * @author Toby D. Young 2011
- */
-#define SolverDavidson SolverGeneralizedDavidson;
-
/**
* An implementation of the solver interface using the SLEPc
* Generalized Davidson solver. Usage: All problem types.
*
- * @ingroup SLEPcWrappers
- * @author Toby D. Young 2013
+ * @ingroup SLEPcWrappers @author Toby D. Young 2013
*/
class SolverGeneralizedDavidson : public SolverBase
{
public:
/**
- * Standardized data struct to pipe
- * additional data to the solver,
+ * Standardized data struct to pipe additional data to the solver,
* should it be needed.
*/
struct AdditionalData
{};
/**
- * SLEPc solvers will want to have
- * an MPI communicator context over
- * which computations are
- * parallelized. By default, this
- * carries the same behaviour has
- * the PETScWrappers, but you can
+ * SLEPc solvers will want to have an MPI communicator context
+ * over which computations are parallelized. By default, this
+ * carries the same behaviour has the PETScWrappers, but you can
* change that.
*/
SolverGeneralizedDavidson (SolverControl &cn,
protected:
/**
- * Store a copy of the flags for
- * this particular solver.
+ * Store a copy of the flags for this particular solver.
*/
const AdditionalData additional_data;
/**
- * Function that takes a Eigenvalue
- * Problem Solver context object,
- * and sets the type of solver that
- * is appropriate for this class.
+ * Function that takes a Eigenvalue Problem Solver context object,
+ * and sets the type of solver that is appropriate for this class.
*/
virtual void set_solver_type (EPS &eps) const;
* An implementation of the solver interface using the SLEPc
* Jacobi-Davidson solver. Usage: All problem types.
*
- * @ingroup SLEPcWrappers
- * @author Toby D. Young 2013
+ * @ingroup SLEPcWrappers @author Toby D. Young 2013
*/
class SolverJacobiDavidson : public SolverBase
{
public:
/**
- * Standardized data struct to pipe
- * additional data to the solver,
+ * Standardized data struct to pipe additional data to the solver,
* should it be needed.
*/
struct AdditionalData
{};
/**
- * SLEPc solvers will want to have
- * an MPI communicator context over
- * which computations are
- * parallelized. By default, this
- * carries the same behaviour has
- * the PETScWrappers, but you can
+ * SLEPc solvers will want to have an MPI communicator context
+ * over which computations are parallelized. By default, this
+ * carries the same behaviour has the PETScWrappers, but you can
* change that.
*/
SolverJacobiDavidson (SolverControl &cn,
protected:
/**
- * Store a copy of the flags for
- * this particular solver.
+ * Store a copy of the flags for this particular solver.
*/
const AdditionalData additional_data;
/**
- * Function that takes a Eigenvalue
- * Problem Solver context object,
- * and sets the type of solver that
- * is appropriate for this class.
+ * Function that takes a Eigenvalue Problem Solver context object,
+ * and sets the type of solver that is appropriate for this class.
*/
virtual void set_solver_type (EPS &eps) const;
};
-#endif
-
-
// --------------------------- inline and template functions -----------
/**
//---------------------------------------------------------------------------
// $Id$
// Version: $Name$
-// Author: Toby D. Young, Polish Academy of Sciences, 2008, 2009
+// Author: Toby D. Young, Polish Academy of Sciences, 2008-2013
//
-// Copyright (C) 2009, 2012 by the deal.II authors
+// Copyright (C) 2009-2013 by the deal.II authors
//
// This file is subject to QPL and may not be distributed
// without copyright and license information. Please refer
AssertThrow (solver_data.get() == 0, ExcSLEPcWrappersUsageError());
solver_data.reset (new SolverData());
- // create eigensolver context and
- // set operators
+ // create eigensolver context and set operators
ierr = EPSCreate (mpi_communicator, &solver_data->eps);
AssertThrow (ierr == 0, ExcSLEPcError(ierr));
- // set eigenspectrum problem type
- // (general/standard)
+ // set eigenspectrum problem type (general/standard)
AssertThrow (opA, ExcSLEPcWrappersUsageError());
if (opB)
ierr = EPSSetOperators (solver_data->eps, *opA, *opB);
// set runtime options
set_solver_type (solver_data->eps);
- // set which portion of the
- // eigenspectrum to solve for
+ // set which portion of the eigenspectrum to solve for
ierr = EPSSetWhichEigenpairs (solver_data->eps, set_which);
AssertThrow (ierr == 0, ExcSLEPcError(ierr));
- // set number of eigenvectors to
- // compute
+ // set number of eigenvectors to compute
ierr = EPSSetDimensions (solver_data->eps, n_eigenvectors,
PETSC_DECIDE, PETSC_DECIDE);
AssertThrow (ierr == 0, ExcSLEPcError(ierr));
- // set the solve options to the
- // eigenvalue problem solver
- // context
+ // set the solve options to the eigenvalue problem solver context
ierr = EPSSetFromOptions (solver_data->eps);
AssertThrow (ierr == 0, ExcSLEPcError(ierr));
ierr = EPSSolve (solver_data->eps);
AssertThrow (ierr == 0, ExcSLEPcError(ierr));
- // get number of converged
- // eigenstates
+ // get number of converged eigenstates
ierr = EPSGetConverged (solver_data->eps,
#ifdef PETSC_USE_64BIT_INDICES
reinterpret_cast<PetscInt *>(n_converged));
}
/* ---------------------- SolverKrylovSchur ------------------------ */
-
SolverKrylovSchur::SolverKrylovSchur (SolverControl &cn,
const MPI_Comm &mpi_communicator,
const AdditionalData &data)
ierr = EPSSetType (eps, const_cast<char *>(EPSKRYLOVSCHUR));
AssertThrow (ierr == 0, ExcSLEPcError(ierr));
- // hand over the absolute
- // tolerance and the maximum
- // number of iteration steps to
- // the SLEPc convergence
- // criterion.
+ // hand over the absolute tolerance and the maximum number of
+ // iteration steps to the SLEPc convergence criterion.
ierr = EPSSetTolerances(eps, this->solver_control.tolerance(),
this->solver_control.max_steps());
AssertThrow (ierr == 0, ExcSLEPcError(ierr));
}
/* ---------------------- SolverArnoldi ------------------------ */
-
SolverArnoldi::AdditionalData::
AdditionalData (const bool delayed_reorthogonalization)
:
ierr = EPSSetType (eps, const_cast<char *>(EPSARNOLDI));
AssertThrow (ierr == 0, ExcSLEPcError(ierr));
- // hand over the absolute
- // tolerance and the maximum
- // number of iteration steps to
- // the SLEPc convergence
- // criterion.
+ // hand over the absolute tolerance and the maximum number of
+ // iteration steps to the SLEPc convergence criterion.
ierr = EPSSetTolerances(eps, this->solver_control.tolerance(),
this->solver_control.max_steps());
AssertThrow (ierr == 0, ExcSLEPcError(ierr));
- // if requested, set delayed
- // reorthogonalization in the
- // Arnoldi iteration.
+ // if requested, set delayed reorthogonalization in the Arnoldi
+ // iteration.
if (additional_data.delayed_reorthogonalization)
{
ierr = EPSArnoldiSetDelayed (eps, PETSC_TRUE);
}
/* ---------------------- Lanczos ------------------------ */
-
SolverLanczos::SolverLanczos (SolverControl &cn,
const MPI_Comm &mpi_communicator,
const AdditionalData &data)
ierr = EPSSetType (eps, const_cast<char *>(EPSLANCZOS));
AssertThrow (ierr == 0, ExcSLEPcError(ierr));
- // hand over the absolute
- // tolerance and the maximum
- // number of iteration steps to
- // the SLEPc convergence
- // criterion.
+ // hand over the absolute tolerance and the maximum number of
+ // iteration steps to the SLEPc convergence criterion.
ierr = EPSSetTolerances (eps, this->solver_control.tolerance(),
this->solver_control.max_steps());
AssertThrow (ierr == 0, ExcSLEPcError(ierr));
}
/* ----------------------- Power ------------------------- */
-
SolverPower::SolverPower (SolverControl &cn,
const MPI_Comm &mpi_communicator,
const AdditionalData &data)
ierr = EPSSetType (eps, const_cast<char *>(EPSPOWER));
AssertThrow (ierr == 0, ExcSLEPcError(ierr));
- // hand over the absolute
- // tolerance and the maximum
- // number of iteration steps to
- // the SLEPc convergence
- // criterion.
+ // hand over the absolute tolerance and the maximum number of
+ // iteration steps to the SLEPc convergence criterion.
ierr = EPSSetTolerances (eps, this->solver_control.tolerance(),
this->solver_control.max_steps());
AssertThrow (ierr == 0, ExcSLEPcError(ierr));
}
-#if DEAL_II_PETSC_VERSION_GTE(3,1,0)
-
/* ---------------- Generalized Davidson ----------------- */
SolverGeneralizedDavidson::SolverGeneralizedDavidson (SolverControl &cn,
const MPI_Comm &mpi_communicator,
void
SolverGeneralizedDavidson::set_solver_type (EPS &eps) const
{
+#if DEAL_II_PETSC_VERSION_GTE(3,1,0)
int ierr;
ierr = EPSSetType (eps, const_cast<char *>(EPSGD));
AssertThrow (ierr == 0, ExcSLEPcError(ierr));
- // hand over the absolute
- // tolerance and the maximum
- // number of iteration steps to
- // the SLEPc convergence
- // criterion.
+ // hand over the absolute tolerance and the maximum number of
+ // iteration steps to the SLEPc convergence criterion.
ierr = EPSSetTolerances (eps, this->solver_control.tolerance(),
this->solver_control.max_steps());
AssertThrow (ierr == 0, ExcSLEPcError(ierr));
+#else
+ // PETSc/SLEPc version must be > 3.1.0 (supress compiler warning
+ // by performing a void operation on eps).
+ Assert (false,
+ ExcMessage ("Your SLEPc installation does not include a copy of the "
+ "Generalized Davidson solver. A SLEPc version > 3.1.0 is required."));
+ Assert (eps, ExcSLEPcWrappersUsageError());
+#endif
}
/* ------------------ Jacobi Davidson -------------------- */
-
SolverJacobiDavidson::SolverJacobiDavidson (SolverControl &cn,
const MPI_Comm &mpi_communicator,
const AdditionalData &data)
void
SolverJacobiDavidson::set_solver_type (EPS &eps) const
{
+#if DEAL_II_PETSC_VERSION_GTE(3,1,0)
int ierr;
ierr = EPSSetType (eps, const_cast<char *>(EPSJD));
AssertThrow (ierr == 0, ExcSLEPcError(ierr));
- // hand over the absolute
- // tolerance and the maximum
- // number of iteration steps to
- // the SLEPc convergence
- // criterion.
+ // hand over the absolute tolerance and the maximum number of
+ // iteration steps to the SLEPc convergence criterion.
ierr = EPSSetTolerances (eps, this->solver_control.tolerance(),
this->solver_control.max_steps());
AssertThrow (ierr == 0, ExcSLEPcError(ierr));
- }
+#else
+ // PETSc/SLEPc version must be > 3.1.0 (supress compiler warning
+ // by performing a void operation on eps).
+ Assert ((false),
+ ExcMessage ("Your SLEPc installation does not include a copy of the "
+ "Jacobi-Davidson solver. A SLEPc version > 3.1.0 is required."));
+ Assert (eps, ExcSLEPcWrappersUsageError());
#endif
+ }
}
DEAL_II_NAMESPACE_CLOSE
#else
+
// On gcc2.95 on Alpha OSF1, the native assembler does not like empty
// files, so provide some dummy code
namespace