From: Marc Fehling Date: Fri, 2 Dec 2022 21:11:20 +0000 (-0700) Subject: Choose to initialize ghost elements with reinit(partitioner). X-Git-Tag: v9.5.0-rc1~176^2 X-Git-Url: https://gitweb.dealii.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=0d962c827dbbb283f4f641aa040b9bb607ffd19c;p=dealii.git Choose to initialize ghost elements with reinit(partitioner). --- diff --git a/doc/news/changes/minor/20221205Fehling b/doc/news/changes/minor/20221205Fehling new file mode 100644 index 0000000000..bd7eba9946 --- /dev/null +++ b/doc/news/changes/minor/20221205Fehling @@ -0,0 +1,5 @@ +New: TrilinosWrappers::MPI::Vector and PETScWrappers::MPI::Vector now both have +reinit functions that take a Utilities::MPI::Partitioner as an argument, so +their interface is compatible to LinearAlgebra::distributed::Vector. +
+(Marc Fehling, 2022/12/05) diff --git a/include/deal.II/lac/la_parallel_vector.h b/include/deal.II/lac/la_parallel_vector.h index 3ba62fd927..3111356866 100644 --- a/include/deal.II/lac/la_parallel_vector.h +++ b/include/deal.II/lac/la_parallel_vector.h @@ -405,6 +405,18 @@ namespace LinearAlgebra const std::shared_ptr &partitioner, const MPI_Comm &comm_sm = MPI_COMM_SELF); + /** + * This function exists purely for reasons of compatibility with the + * PETScWrappers::MPI::Vector and TrilinosWrappers::MPI::Vector classes. + * + * It calls the function above, and ignores the parameter @p make_ghosted. + */ + void + reinit( + const std::shared_ptr &partitioner, + const bool make_ghosted, + const MPI_Comm &comm_sm = MPI_COMM_SELF); + /** * Initialize vector with @p local_size locally-owned and @p ghost_size * ghost degrees of freedoms. diff --git a/include/deal.II/lac/la_parallel_vector.templates.h b/include/deal.II/lac/la_parallel_vector.templates.h index 596855e072..ded3d50546 100644 --- a/include/deal.II/lac/la_parallel_vector.templates.h +++ b/include/deal.II/lac/la_parallel_vector.templates.h @@ -671,6 +671,18 @@ namespace LinearAlgebra + template + void + Vector::reinit( + const std::shared_ptr &partitioner_in, + const bool /*make_ghosted*/, + const MPI_Comm &comm_sm) + { + this->reinit(partitioner_in, comm_sm); + } + + + template Vector::Vector() : partitioner(std::make_shared()) diff --git a/include/deal.II/lac/petsc_vector.h b/include/deal.II/lac/petsc_vector.h index 842ec2dee4..0f654eea20 100644 --- a/include/deal.II/lac/petsc_vector.h +++ b/include/deal.II/lac/petsc_vector.h @@ -343,10 +343,14 @@ namespace PETScWrappers /** * Initialize the vector given to the parallel partitioning described in * @p partitioner. + * + * You can decide whether your vector will contain ghost elements with + * @p make_ghosted. */ void reinit( - const std::shared_ptr &partitioner); + const std::shared_ptr &partitioner, + const bool make_ghosted = true); /** * Return a reference to the MPI communicator object in use with this diff --git a/include/deal.II/lac/trilinos_vector.h b/include/deal.II/lac/trilinos_vector.h index fb0232102b..63081cfe56 100644 --- a/include/deal.II/lac/trilinos_vector.h +++ b/include/deal.II/lac/trilinos_vector.h @@ -604,10 +604,17 @@ namespace TrilinosWrappers /** * Initialize the vector given to the parallel partitioning described in * @p partitioner using the function above. + * + * You can decide whether your vector will contain ghost elements with + * @p make_ghosted. + * + * The parameter @p vector_writable only has effect on ghosted vectors + * and is ignored for non-ghosted vectors. */ void reinit( const std::shared_ptr &partitioner, + const bool make_ghosted = true, const bool vector_writable = false); /** diff --git a/source/lac/petsc_parallel_vector.cc b/source/lac/petsc_parallel_vector.cc index 87242539d7..97ccf14e13 100644 --- a/source/lac/petsc_parallel_vector.cc +++ b/source/lac/petsc_parallel_vector.cc @@ -236,11 +236,24 @@ namespace PETScWrappers void Vector::reinit( - const std::shared_ptr &partitioner) + const std::shared_ptr &partitioner, + const bool make_ghosted) { - this->reinit(partitioner->locally_owned_range(), - partitioner->ghost_indices(), - partitioner->get_mpi_communicator()); + if (make_ghosted) + { + Assert(partitioner->ghost_indices_initialized(), + ExcMessage("You asked to create a ghosted vector, but the " + "partitioner does not provide ghost indices.")); + + this->reinit(partitioner->locally_owned_range(), + partitioner->ghost_indices(), + partitioner->get_mpi_communicator()); + } + else + { + this->reinit(partitioner->locally_owned_range(), + partitioner->get_mpi_communicator()); + } } diff --git a/source/lac/trilinos_vector.cc b/source/lac/trilinos_vector.cc index 58063a4a15..8162488900 100644 --- a/source/lac/trilinos_vector.cc +++ b/source/lac/trilinos_vector.cc @@ -410,12 +410,25 @@ namespace TrilinosWrappers void Vector::reinit( const std::shared_ptr &partitioner, + const bool make_ghosted, const bool vector_writable) { - this->reinit(partitioner->locally_owned_range(), - partitioner->ghost_indices(), - partitioner->get_mpi_communicator(), - vector_writable); + if (make_ghosted) + { + Assert(partitioner->ghost_indices_initialized(), + ExcMessage("You asked to create a ghosted vector, but the " + "partitioner does not provide ghost indices.")); + + this->reinit(partitioner->locally_owned_range(), + partitioner->ghost_indices(), + partitioner->get_mpi_communicator(), + vector_writable); + } + else + { + this->reinit(partitioner->locally_owned_range(), + partitioner->get_mpi_communicator()); + } }