From a2baa75e8fa8e27541d3783fcf847ff2457aa766 Mon Sep 17 00:00:00 2001 From: wolf Date: Tue, 23 Mar 2004 19:58:56 +0000 Subject: [PATCH] Name these classes the same as those in PETScWrappers, but move them into a namespace MPI. git-svn-id: https://svn.dealii.org/trunk@8852 0785d39b-7218-0410-832d-ea1e28bc413d --- .../lac/include/lac/petsc_parallel_vector.h | 231 +++++++++--------- deal.II/lac/source/petsc_parallel_vector.cc | 84 ++++--- 2 files changed, 165 insertions(+), 150 deletions(-) diff --git a/deal.II/lac/include/lac/petsc_parallel_vector.h b/deal.II/lac/include/lac/petsc_parallel_vector.h index 9faf31ef19..cd72d93d65 100644 --- a/deal.II/lac/include/lac/petsc_parallel_vector.h +++ b/deal.II/lac/include/lac/petsc_parallel_vector.h @@ -26,6 +26,15 @@ namespace PETScWrappers { +/** + * Namespace for PETSc classes that work in parallel over MPI, such as + * distributed vectors and matrices. + * + * @author Wolfgang Bangerth, 2004 + */ + namespace MPI + { + /** * Implementation of a parallel vector class based on PETSC and using MPI * communication to synchronise distributed operations. All the functionality @@ -38,132 +47,134 @@ namespace PETScWrappers * * @author Wolfgang Bangerth, 2004 */ - class ParallelVector : public VectorBase - { - public: - /** - * Default constructor. Initialize the - * vector as empty. - */ - ParallelVector (); + class Vector : public VectorBase + { + public: + /** + * Default constructor. Initialize the + * vector as empty. + */ + Vector (); - /** - * Constructor. Set dimension to - * @p{n} and initialize all - * elements with zero. - * - * The constructor is made explicit to - * avoid accidents like this: - * @p{v=0;}. Presumably, the user wants - * to set every element of the vector to - * zero, but instead, what happens is - * this call: @p{v=Vector(0);}, - * i.e. the vector is replaced by one of - * length zero. - */ - explicit ParallelVector (const unsigned int n, - const unsigned int local_size, - const MPI_Comm &communicator); + /** + * Constructor. Set dimension to + * @p{n} and initialize all + * elements with zero. + * + * The constructor is made explicit to + * avoid accidents like this: + * @p{v=0;}. Presumably, the user wants + * to set every element of the vector to + * zero, but instead, what happens is + * this call: @p{v=Vector(0);}, + * i.e. the vector is replaced by one of + * length zero. + */ + explicit Vector (const unsigned int n, + const unsigned int local_size, + const MPI_Comm &communicator); - /** - * Copy-constructor from deal.II - * vectors. Sets the dimension to that - * of the given vector, and copies all - * elements. - */ - template - explicit ParallelVector (const ::Vector &v, - const unsigned int local_size, - const MPI_Comm &communicator); - - /** - * Copy-constructor the - * values from a PETSc wrapper vector - * class. - */ - explicit ParallelVector (const VectorBase &v, - const unsigned int local_size, - const MPI_Comm &communicator); - - /** - * Set all components of the vector to - * the given number @p{s}. Simply pass - * this down to the base class, but we - * still need to declare this function - * to make the example given in the - * discussion about making the - * constructor explicit work. - */ - ParallelVector & operator = (const PetscScalar s); - - /** - * Copy the values of a deal.II vector - * (as opposed to those of the PETSc - * vector wrapper class) into this - * object. - */ - template - ParallelVector & operator = (const ::Vector &v); + /** + * Copy-constructor from deal.II + * vectors. Sets the dimension to that + * of the given vector, and copies all + * elements. + */ + template + explicit Vector (const ::Vector &v, + const unsigned int local_size, + const MPI_Comm &communicator); + + /** + * Copy-constructor the + * values from a PETSc wrapper vector + * class. + */ + explicit Vector (const VectorBase &v, + const unsigned int local_size, + const MPI_Comm &communicator); + + /** + * Set all components of the vector to + * the given number @p{s}. Simply pass + * this down to the base class, but we + * still need to declare this function + * to make the example given in the + * discussion about making the + * constructor explicit work. + */ + Vector & operator = (const PetscScalar s); + + /** + * Copy the values of a deal.II vector + * (as opposed to those of the PETSc + * vector wrapper class) into this + * object. + */ + template + Vector & operator = (const ::Vector &v); - protected: - /** - * Create a vector of length @p{n}. For - * this class, we create a parallel - * vector. @arg n denotes the total - * size of the vector to be - * created. @arg local_size denotes how - * many of these elements shall be - * stored locally. The last argument is - * ignored for sequential vectors. - */ - virtual void create_vector (const unsigned int n, - const unsigned int local_size); - - private: - /** - * Copy of the communicator object to - * be used for this parallel vector. - */ - MPI_Comm communicator; - }; + protected: + /** + * Create a vector of length @p{n}. For + * this class, we create a parallel + * vector. @arg n denotes the total + * size of the vector to be + * created. @arg local_size denotes how + * many of these elements shall be + * stored locally. The last argument is + * ignored for sequential vectors. + */ + virtual void create_vector (const unsigned int n, + const unsigned int local_size); + + private: + /** + * Copy of the communicator object to + * be used for this parallel vector. + */ + MPI_Comm communicator; + }; // ------------------ template and inline functions ------------- - template - ParallelVector::ParallelVector (const ::Vector &v, - const unsigned int local_size, - const MPI_Comm &communicator) - : - communicator (communicator) - { - ParallelVector::create_vector (v.size(), local_size); + template + Vector::Vector (const ::Vector &v, + const unsigned int local_size, + const MPI_Comm &communicator) + : + communicator (communicator) + { + Vector::create_vector (v.size(), local_size); - VectorBase::operator = (v); - } + VectorBase::operator = (v); + } - inline - ParallelVector & - ParallelVector::operator = (const PetscScalar s) - { - VectorBase::operator = (s); - - return *this; - } + inline + Vector & + Vector::operator = (const PetscScalar s) + { + VectorBase::operator = (s); + + return *this; + } - template - inline - ParallelVector & - ParallelVector::operator = (const ::Vector &v) - { - VectorBase::operator = (v); + template + inline + Vector & + Vector::operator = (const ::Vector &v) + { + VectorBase::operator = (v); - return *this; + return *this; + } + } } diff --git a/deal.II/lac/source/petsc_parallel_vector.cc b/deal.II/lac/source/petsc_parallel_vector.cc index 7cc8a18451..93371a5689 100644 --- a/deal.II/lac/source/petsc_parallel_vector.cc +++ b/deal.II/lac/source/petsc_parallel_vector.cc @@ -21,59 +21,63 @@ namespace PETScWrappers { - - - ParallelVector::ParallelVector () + namespace MPI { - // this is an invalid empty vector, so we - // can just as well create a sequential - // one to avoid all the overhead incurred - // by parallelism - const int n = 0; - const int ierr - = VecCreateSeq (PETSC_COMM_SELF, n, &vector); - AssertThrow (ierr == 0, ExcPETScError(ierr)); - } - - - ParallelVector::ParallelVector (const unsigned int n, - const unsigned int local_size, - const MPI_Comm &communicator) - : - communicator (communicator) - { - ParallelVector::create_vector (n, local_size); - } + Vector::Vector () + { + // this is an invalid empty vector, so we + // can just as well create a sequential + // one to avoid all the overhead incurred + // by parallelism + const int n = 0; + const int ierr + = VecCreateSeq (PETSC_COMM_SELF, n, &vector); + AssertThrow (ierr == 0, ExcPETScError(ierr)); + } + + + + Vector::Vector (const unsigned int n, + const unsigned int local_size, + const MPI_Comm &communicator) + : + communicator (communicator) + { + Vector::create_vector (n, local_size); + } - ParallelVector::ParallelVector (const VectorBase &v, - const unsigned int local_size, - const MPI_Comm &communicator) - : - communicator (communicator) - { - ParallelVector::create_vector (v.size(), local_size); + Vector::Vector (const VectorBase &v, + const unsigned int local_size, + const MPI_Comm &communicator) + : + communicator (communicator) + { + Vector::create_vector (v.size(), local_size); - VectorBase::operator = (v); - } + VectorBase::operator = (v); + } - void - ParallelVector::create_vector (const unsigned int n, - const unsigned int local_size) - { - Assert (local_size < n, ExcIndexRange (local_size, 0, n)); + void + Vector::create_vector (const unsigned int n, + const unsigned int local_size) + { + Assert (local_size < n, ExcIndexRange (local_size, 0, n)); + + const int ierr + = VecCreateMPI (PETSC_COMM_SELF, local_size, n, &vector); + AssertThrow (ierr == 0, ExcPETScError(ierr)); + } - const int ierr - = VecCreateMPI (PETSC_COMM_SELF, local_size, n, &vector); - AssertThrow (ierr == 0, ExcPETScError(ierr)); } + } #else // On gcc2.95 on Alpha OSF1, the native assembler does not like empty // files, so provide some dummy code -namespace { void dummy () {} } + namespace { void dummy () {} } #endif // DEAL_II_USE_PETSC -- 2.39.5