From efb96ff1ea58b2ea3e538736d39009268a6b9c43 Mon Sep 17 00:00:00 2001 From: Wolfgang Bangerth Date: Sat, 22 Jul 2023 12:08:49 -0600 Subject: [PATCH] Allow compilation with PETSc but without MPI. --- source/lac/petsc_communication_pattern.cc | 80 +++++++++++++++++++++-- 1 file changed, 76 insertions(+), 4 deletions(-) diff --git a/source/lac/petsc_communication_pattern.cc b/source/lac/petsc_communication_pattern.cc index 78d60402a3..c2eb6c87b6 100644 --- a/source/lac/petsc_communication_pattern.cc +++ b/source/lac/petsc_communication_pattern.cc @@ -37,11 +37,15 @@ namespace PETScWrappers : sf(nullptr) {} + + CommunicationPattern::~CommunicationPattern() { clear(); } + + void CommunicationPattern::reinit(const types::global_dof_index local_size, const IndexSet & ghost_indices, @@ -78,6 +82,8 @@ namespace PETScWrappers AssertPETSc(PetscLayoutDestroy(&layout)); } + + void CommunicationPattern::reinit(const IndexSet &locally_owned_indices, const IndexSet &ghost_indices, @@ -96,6 +102,8 @@ namespace PETScWrappers this->do_reinit(in_petsc, dummy, out_petsc, dummy, communicator); } + + void CommunicationPattern::reinit( const std::vector &indices_has, @@ -149,6 +157,8 @@ namespace PETScWrappers communicator); } + + void CommunicationPattern::do_reinit(const std::vector &inidx, const std::vector &inloc, @@ -243,50 +253,80 @@ namespace PETScWrappers AssertPETSc(PetscSFDestroy(&sf2)); } + + void CommunicationPattern::clear() { AssertPETSc(PetscSFDestroy(&sf)); } + + MPI_Comm CommunicationPattern::get_mpi_communicator() const { return PetscObjectComm(reinterpret_cast(sf)); } + + template void CommunicationPattern::export_to_ghosted_array_start( const ArrayView &src, const ArrayView & dst) const { +# ifdef DEAL_II_WITH_MPI auto datatype = Utilities::MPI::mpi_type_id_for_type; -# if DEAL_II_PETSC_VERSION_LT(3, 15, 0) +# if DEAL_II_PETSC_VERSION_LT(3, 15, 0) AssertPETSc(PetscSFBcastBegin(sf, datatype, src.data(), dst.data())); -# else +# else AssertPETSc( PetscSFBcastBegin(sf, datatype, src.data(), dst.data(), MPI_REPLACE)); +# endif + +# else + + (void)src; + (void)dst; + Assert(false, + ExcMessage("This program is running without MPI. There should " + "not be anything to import or export!")); # endif } + + template void CommunicationPattern::export_to_ghosted_array_finish( const ArrayView &src, const ArrayView & dst) const { +# ifdef DEAL_II_WITH_MPI auto datatype = Utilities::MPI::mpi_type_id_for_type; -# if DEAL_II_PETSC_VERSION_LT(3, 15, 0) +# if DEAL_II_PETSC_VERSION_LT(3, 15, 0) AssertPETSc(PetscSFBcastEnd(sf, datatype, src.data(), dst.data())); -# else +# else AssertPETSc( PetscSFBcastEnd(sf, datatype, src.data(), dst.data(), MPI_REPLACE)); +# endif + +# else + + (void)src; + (void)dst; + Assert(false, + ExcMessage("This program is running without MPI. There should " + "not be anything to import or export!")); # endif } + + template void CommunicationPattern::export_to_ghosted_array( @@ -297,6 +337,8 @@ namespace PETScWrappers export_to_ghosted_array_finish(src, dst); } + + template void CommunicationPattern::import_from_ghosted_array_start( @@ -304,13 +346,26 @@ namespace PETScWrappers const ArrayView &src, const ArrayView & dst) const { +# ifdef DEAL_II_WITH_MPI MPI_Op mpiop = (op == VectorOperation::insert) ? MPI_REPLACE : MPI_SUM; auto datatype = Utilities::MPI::mpi_type_id_for_type; AssertPETSc( PetscSFReduceBegin(sf, datatype, src.data(), dst.data(), mpiop)); + +# else + + (void)op; + (void)src; + (void)dst; + Assert(false, + ExcMessage("This program is running without MPI. There should " + "not be anything to import or export!")); +# endif } + + template void CommunicationPattern::import_from_ghosted_array_finish( @@ -318,12 +373,25 @@ namespace PETScWrappers const ArrayView &src, const ArrayView & dst) const { +# ifdef DEAL_II_WITH_MPI MPI_Op mpiop = (op == VectorOperation::insert) ? MPI_REPLACE : MPI_SUM; auto datatype = Utilities::MPI::mpi_type_id_for_type; AssertPETSc(PetscSFReduceEnd(sf, datatype, src.data(), dst.data(), mpiop)); + +# else + + (void)op; + (void)src; + (void)dst; + Assert(false, + ExcMessage("This program is running without MPI. There should " + "not be anything to import or export!")); +# endif } + + template void CommunicationPattern::import_from_ghosted_array( @@ -335,6 +403,8 @@ namespace PETScWrappers import_from_ghosted_array_finish(op, src, dst); } + + // Partitioner Partitioner::Partitioner() @@ -345,6 +415,8 @@ namespace PETScWrappers , n_ghost_indices_larger(numbers::invalid_dof_index) {} + + void Partitioner::reinit(const IndexSet &locally_owned_indices, const IndexSet &ghost_indices, -- 2.39.5