From: Stefano Zampini Date: Sun, 16 Apr 2023 13:34:09 +0000 (+0300) Subject: PETScWrappers: Fix block objects update operations X-Git-Tag: v9.5.0-rc1~307^2~4 X-Git-Url: https://gitweb.dealii.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=b45f02367aa16cd6a4d73b8527307eee80f57a06;p=dealii.git PETScWrappers: Fix block objects update operations --- diff --git a/include/deal.II/lac/block_vector_base.h b/include/deal.II/lac/block_vector_base.h index 858adac8f7..01bff8f4ef 100644 --- a/include/deal.II/lac/block_vector_base.h +++ b/include/deal.II/lac/block_vector_base.h @@ -498,7 +498,7 @@ public: collect_sizes(); /** - * Call the compress() function on all the subblocks of the matrix. + * Call the compress() function on all the subblocks of the vector. * * This functionality only needs to be called if using MPI based vectors and * exists in other objects for compatibility. diff --git a/include/deal.II/lac/petsc_block_sparse_matrix.h b/include/deal.II/lac/petsc_block_sparse_matrix.h index 7b9a67ff95..397725d162 100644 --- a/include/deal.II/lac/petsc_block_sparse_matrix.h +++ b/include/deal.II/lac/petsc_block_sparse_matrix.h @@ -260,6 +260,20 @@ namespace PETScWrappers void collect_sizes(); + /** + * Call the compress() function on all the subblocks of the matrix + * and update the internal state of the PETSc object. + * + * This functionality is needed because PETSc may use this information + * to decide whether or not to rebuild the preconditioner. + * + * See + * @ref GlossCompress "Compressing distributed objects" + * for more information. + */ + void + compress(VectorOperation::values operation); + /** * Return the partitioning of the domain space of this matrix, i.e., the * partitioning of the vectors this matrix has to be multiplied with. @@ -326,6 +340,20 @@ namespace PETScWrappers * blocks are the blocks of this matrix. */ Mat petsc_nest_matrix = nullptr; + + /** + * Utility to setup the MATNEST object + */ + void + setup_nest_mat(); + + /** + * An utility method to populate empty blocks with actual objects. + * This is needed because MATNEST supports nullptr as a block, + * while the BlockMatrixBase class does not. + */ + void + create_empty_matrices_if_needed(); }; diff --git a/include/deal.II/lac/petsc_block_vector.h b/include/deal.II/lac/petsc_block_vector.h index ed85850912..17e27e3b87 100644 --- a/include/deal.II/lac/petsc_block_vector.h +++ b/include/deal.II/lac/petsc_block_vector.h @@ -249,6 +249,20 @@ namespace PETScWrappers void collect_sizes(); + /** + * Call the compress() function on all the subblocks of the vector + * and update the internal state of the nested PETSc vector. + * + * This functionality is needed because PETSc may cache vector norms for + * performance reasons. + * + * See + * @ref GlossCompress "Compressing distributed objects" + * for more information. + */ + void + compress(VectorOperation::values operation); + /** * Change the number of blocks to num_blocks. The individual * blocks will get initialized with zero size, so it is assumed that the @@ -327,10 +341,19 @@ namespace PETScWrappers DeclException0(ExcNonMatchingBlockVectors); private: + /** + * A PETSc Vec object that describes the entire block vector. + * Internally, this is done by creating + * a "nested" vector using PETSc's VECNEST object whose individual + * blocks are the blocks of this vector. + */ + Vec petsc_nest_vector; + + /** + * Utility to setup the VECNEST object + */ void setup_nest_vec(); - - Vec petsc_nest_vector; }; /** @} */ diff --git a/source/lac/petsc_parallel_block_sparse_matrix.cc b/source/lac/petsc_parallel_block_sparse_matrix.cc index fdaa00f15f..a8faa324eb 100644 --- a/source/lac/petsc_parallel_block_sparse_matrix.cc +++ b/source/lac/petsc_parallel_block_sparse_matrix.cc @@ -18,6 +18,9 @@ #ifdef DEAL_II_WITH_PETSC +// For PetscObjectStateIncrease +# include + namespace { // A dummy utility routine to create an empty matrix in case we import @@ -157,7 +160,7 @@ namespace PETScWrappers void - BlockSparseMatrix::collect_sizes() + BlockSparseMatrix::create_empty_matrices_if_needed() { auto m = this->n_block_rows(); auto n = this->n_block_cols(); @@ -225,15 +228,35 @@ namespace PETScWrappers } } } + } + + void + BlockSparseMatrix::collect_sizes() + { + this->create_empty_matrices_if_needed(); BaseClass::collect_sizes(); + this->setup_nest_mat(); + } + + void + BlockSparseMatrix::setup_nest_mat() + { + auto m = this->n_block_rows(); + auto n = this->n_block_cols(); + PetscErrorCode ierr; + + MPI_Comm comm = PETSC_COMM_SELF; ierr = destroy_matrix(petsc_nest_matrix); AssertThrow(ierr == 0, ExcPETScError(ierr)); std::vector psub_objects(m * n); for (unsigned int r = 0; r < m; r++) for (unsigned int c = 0; c < n; c++) - psub_objects[r * n + c] = this->sub_objects[r][c]->petsc_matrix(); + { + comm = this->sub_objects[r][c]->get_mpi_communicator(); + psub_objects[r * n + c] = this->sub_objects[r][c]->petsc_matrix(); + } ierr = MatCreateNest( comm, m, nullptr, n, nullptr, psub_objects.data(), &petsc_nest_matrix); AssertThrow(ierr == 0, ExcPETScError(ierr)); @@ -241,6 +264,17 @@ namespace PETScWrappers + void + BlockSparseMatrix::compress(VectorOperation::values operation) + { + BaseClass::compress(operation); + PetscErrorCode ierr = PetscObjectStateIncrease( + reinterpret_cast(petsc_nest_matrix)); + AssertThrow(ierr == 0, ExcPETScError(ierr)); + } + + + std::vector BlockSparseMatrix::locally_owned_domain_indices() const { @@ -318,6 +352,7 @@ namespace PETScWrappers &isnest); AssertThrow(ierr == 0, ExcPETScError(ierr)); std::vector mats; + bool need_empty_matrices = false; if (isnest) { ierr = MatNestGetSize(A, &nr, &nc); @@ -329,6 +364,8 @@ namespace PETScWrappers Mat sA; ierr = MatNestGetSubMat(A, i, j, &sA); mats.push_back(sA); + if (!sA) + need_empty_matrices = true; } } } @@ -352,8 +389,22 @@ namespace PETScWrappers this->sub_objects[i][j] = nullptr; } } + if (need_empty_matrices) + this->create_empty_matrices_if_needed(); - this->collect_sizes(); + BaseClass::collect_sizes(); + if (need_empty_matrices || !isnest) + { + setup_nest_mat(); + } + else + { + ierr = PetscObjectReference(reinterpret_cast(A)); + AssertThrow(ierr == 0, ExcPETScError(ierr)); + PetscErrorCode ierr = MatDestroy(&petsc_nest_matrix); + AssertThrow(ierr == 0, ExcPETScError(ierr)); + petsc_nest_matrix = A; + } } } // namespace MPI diff --git a/source/lac/petsc_parallel_block_vector.cc b/source/lac/petsc_parallel_block_vector.cc index 3cb802877c..27ff0a5db2 100644 --- a/source/lac/petsc_parallel_block_vector.cc +++ b/source/lac/petsc_parallel_block_vector.cc @@ -17,6 +17,9 @@ #ifdef DEAL_II_WITH_PETSC +// For PetscObjectStateIncrease +# include + DEAL_II_NAMESPACE_OPEN namespace PETScWrappers @@ -84,7 +87,17 @@ namespace PETScWrappers this->components[i].reinit(sv[i]); } - this->collect_sizes(); + BlockVectorBase::collect_sizes(); + if (!isnest) + setup_nest_vec(); + else + { + ierr = PetscObjectReference(reinterpret_cast(v)); + AssertThrow(ierr == 0, ExcPETScError(ierr)); + PetscErrorCode ierr = VecDestroy(&petsc_nest_vector); + AssertThrow(ierr == 0, ExcPETScError(ierr)); + petsc_nest_vector = v; + } } Vec & @@ -105,6 +118,15 @@ namespace PETScWrappers setup_nest_vec(); } + void + BlockVector::compress(VectorOperation::values operation) + { + BlockVectorBase::compress(operation); + PetscErrorCode ierr = PetscObjectStateIncrease( + reinterpret_cast(petsc_nest_vector)); + AssertThrow(ierr == 0, ExcPETScError(ierr)); + } + void BlockVector::setup_nest_vec() {