From: Wolfgang Bangerth Date: Sat, 22 Jul 2023 18:08:49 +0000 (-0600) Subject: Allow compilation with PETSc but without MPI. X-Git-Tag: v9.5.2~6^2 X-Git-Url: https://gitweb.dealii.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=refs%2Fpull%2F15788%2Fhead;p=dealii.git Allow compilation with PETSc but without MPI. --- diff --git a/source/lac/petsc_communication_pattern.cc b/source/lac/petsc_communication_pattern.cc index 78d60402a3..c2eb6c87b6 100644 --- a/source/lac/petsc_communication_pattern.cc +++ b/source/lac/petsc_communication_pattern.cc @@ -37,11 +37,15 @@ namespace PETScWrappers : sf(nullptr) {} + + CommunicationPattern::~CommunicationPattern() { clear(); } + + void CommunicationPattern::reinit(const types::global_dof_index local_size, const IndexSet & ghost_indices, @@ -78,6 +82,8 @@ namespace PETScWrappers AssertPETSc(PetscLayoutDestroy(&layout)); } + + void CommunicationPattern::reinit(const IndexSet &locally_owned_indices, const IndexSet &ghost_indices, @@ -96,6 +102,8 @@ namespace PETScWrappers this->do_reinit(in_petsc, dummy, out_petsc, dummy, communicator); } + + void CommunicationPattern::reinit( const std::vector &indices_has, @@ -149,6 +157,8 @@ namespace PETScWrappers communicator); } + + void CommunicationPattern::do_reinit(const std::vector &inidx, const std::vector &inloc, @@ -243,50 +253,80 @@ namespace PETScWrappers AssertPETSc(PetscSFDestroy(&sf2)); } + + void CommunicationPattern::clear() { AssertPETSc(PetscSFDestroy(&sf)); } + + MPI_Comm CommunicationPattern::get_mpi_communicator() const { return PetscObjectComm(reinterpret_cast(sf)); } + + template void CommunicationPattern::export_to_ghosted_array_start( const ArrayView &src, const ArrayView & dst) const { +# ifdef DEAL_II_WITH_MPI auto datatype = Utilities::MPI::mpi_type_id_for_type; -# if DEAL_II_PETSC_VERSION_LT(3, 15, 0) +# if DEAL_II_PETSC_VERSION_LT(3, 15, 0) AssertPETSc(PetscSFBcastBegin(sf, datatype, src.data(), dst.data())); -# else +# else AssertPETSc( PetscSFBcastBegin(sf, datatype, src.data(), dst.data(), MPI_REPLACE)); +# endif + +# else + + (void)src; + (void)dst; + Assert(false, + ExcMessage("This program is running without MPI. There should " + "not be anything to import or export!")); # endif } + + template void CommunicationPattern::export_to_ghosted_array_finish( const ArrayView &src, const ArrayView & dst) const { +# ifdef DEAL_II_WITH_MPI auto datatype = Utilities::MPI::mpi_type_id_for_type; -# if DEAL_II_PETSC_VERSION_LT(3, 15, 0) +# if DEAL_II_PETSC_VERSION_LT(3, 15, 0) AssertPETSc(PetscSFBcastEnd(sf, datatype, src.data(), dst.data())); -# else +# else AssertPETSc( PetscSFBcastEnd(sf, datatype, src.data(), dst.data(), MPI_REPLACE)); +# endif + +# else + + (void)src; + (void)dst; + Assert(false, + ExcMessage("This program is running without MPI. There should " + "not be anything to import or export!")); # endif } + + template void CommunicationPattern::export_to_ghosted_array( @@ -297,6 +337,8 @@ namespace PETScWrappers export_to_ghosted_array_finish(src, dst); } + + template void CommunicationPattern::import_from_ghosted_array_start( @@ -304,13 +346,26 @@ namespace PETScWrappers const ArrayView &src, const ArrayView & dst) const { +# ifdef DEAL_II_WITH_MPI MPI_Op mpiop = (op == VectorOperation::insert) ? MPI_REPLACE : MPI_SUM; auto datatype = Utilities::MPI::mpi_type_id_for_type; AssertPETSc( PetscSFReduceBegin(sf, datatype, src.data(), dst.data(), mpiop)); + +# else + + (void)op; + (void)src; + (void)dst; + Assert(false, + ExcMessage("This program is running without MPI. There should " + "not be anything to import or export!")); +# endif } + + template void CommunicationPattern::import_from_ghosted_array_finish( @@ -318,12 +373,25 @@ namespace PETScWrappers const ArrayView &src, const ArrayView & dst) const { +# ifdef DEAL_II_WITH_MPI MPI_Op mpiop = (op == VectorOperation::insert) ? MPI_REPLACE : MPI_SUM; auto datatype = Utilities::MPI::mpi_type_id_for_type; AssertPETSc(PetscSFReduceEnd(sf, datatype, src.data(), dst.data(), mpiop)); + +# else + + (void)op; + (void)src; + (void)dst; + Assert(false, + ExcMessage("This program is running without MPI. There should " + "not be anything to import or export!")); +# endif } + + template void CommunicationPattern::import_from_ghosted_array( @@ -335,6 +403,8 @@ namespace PETScWrappers import_from_ghosted_array_finish(op, src, dst); } + + // Partitioner Partitioner::Partitioner() @@ -345,6 +415,8 @@ namespace PETScWrappers , n_ghost_indices_larger(numbers::invalid_dof_index) {} + + void Partitioner::reinit(const IndexSet &locally_owned_indices, const IndexSet &ghost_indices,