From 49a247a8dfa132b2b42e86c4918d5f3f9a948d68 Mon Sep 17 00:00:00 2001 From: Stefano Zampini Date: Mon, 10 Apr 2023 16:49:46 +0300 Subject: [PATCH] PetscWrappers::BlockSparseMatrix add constructor from array of matrices fix the case of NULL blocks and add tests --- .../deal.II/lac/petsc_block_sparse_matrix.h | 24 ++++ .../lac/petsc_parallel_block_sparse_matrix.cc | 103 +++++++++++++++--- tests/petsc/block_matrices_01.cc | 82 ++++++++++++++ 3 files changed, 194 insertions(+), 15 deletions(-) diff --git a/include/deal.II/lac/petsc_block_sparse_matrix.h b/include/deal.II/lac/petsc_block_sparse_matrix.h index 33d3ef92a6..7b9a67ff95 100644 --- a/include/deal.II/lac/petsc_block_sparse_matrix.h +++ b/include/deal.II/lac/petsc_block_sparse_matrix.h @@ -111,6 +111,13 @@ namespace PETScWrappers */ explicit BlockSparseMatrix(const Mat &); + /** + * Create a BlockSparseMatrix with an array of PETSc matrices. + */ + template + BlockSparseMatrix( + const std::array, block_rows> &); + /** * Destructor. */ @@ -333,6 +340,23 @@ namespace PETScWrappers this->reinit(A); } + template + inline BlockSparseMatrix::BlockSparseMatrix( + const std::array, block_rows> &arrayA) + { + this->reinit(block_rows, block_columns); + this->sub_objects.reinit(block_rows, block_columns); + for (auto r = 0; r < block_rows; ++r) + for (auto c = 0; c < block_columns; ++c) + { + if (arrayA[r][c]) + this->sub_objects[r][c] = new BlockType(arrayA[r][c]); + else + this->sub_objects[r][c] = nullptr; + } + this->collect_sizes(); + } + inline BlockSparseMatrix & BlockSparseMatrix::operator=(const double d) { diff --git a/source/lac/petsc_parallel_block_sparse_matrix.cc b/source/lac/petsc_parallel_block_sparse_matrix.cc index 5b97e58acb..ef3d58a2b3 100644 --- a/source/lac/petsc_parallel_block_sparse_matrix.cc +++ b/source/lac/petsc_parallel_block_sparse_matrix.cc @@ -18,6 +18,38 @@ #ifdef DEAL_II_WITH_PETSC +// A dummy utility routine to create an empty matrix in case we import +// a MATNEST with NULL blocks +static void +createDummyMat(MPI_Comm comm, + PetscInt lr, + PetscInt gr, + PetscInt lc, + PetscInt gc, + Mat * dummy) +{ + PetscErrorCode ierr; + + ierr = MatCreate(comm, dummy); + AssertThrow(ierr == 0, dealii::ExcPETScError(ierr)); + ierr = MatSetSizes(*dummy, lr, lc, gr, gc); + AssertThrow(ierr == 0, dealii::ExcPETScError(ierr)); + ierr = MatSetType(*dummy, MATAIJ); + AssertThrow(ierr == 0, dealii::ExcPETScError(ierr)); + ierr = MatSeqAIJSetPreallocation(*dummy, 0, nullptr); + AssertThrow(ierr == 0, dealii::ExcPETScError(ierr)); + ierr = MatMPIAIJSetPreallocation(*dummy, 0, nullptr, 0, nullptr); + AssertThrow(ierr == 0, dealii::ExcPETScError(ierr)); + ierr = MatSetUp(*dummy); + AssertThrow(ierr == 0, dealii::ExcPETScError(ierr)); + ierr = MatSetOption(*dummy, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE); + AssertThrow(ierr == 0, dealii::ExcPETScError(ierr)); + ierr = MatAssemblyBegin(*dummy, MAT_FINAL_ASSEMBLY); + AssertThrow(ierr == 0, dealii::ExcPETScError(ierr)); + ierr = MatAssemblyEnd(*dummy, MAT_FINAL_ASSEMBLY); + AssertThrow(ierr == 0, dealii::ExcPETScError(ierr)); +} + DEAL_II_NAMESPACE_OPEN namespace PETScWrappers @@ -107,7 +139,7 @@ namespace PETScWrappers this->sub_objects[r][c] = p; } - collect_sizes(); + this->collect_sizes(); } void @@ -123,24 +155,63 @@ namespace PETScWrappers void BlockSparseMatrix::collect_sizes() { - BaseClass::collect_sizes(); + auto m = this->n_block_rows(); + auto n = this->n_block_cols(); + PetscErrorCode ierr; + + // Create empty matrices if needed + // This is neeeded by the base class + // not by MATNEST + std::vector row_sizes(m); + std::vector col_sizes(n); + std::vector row_local_sizes(m); + std::vector col_local_sizes(n); + MPI_Comm comm = MPI_COMM_NULL; + for (size_type r = 0; r < m; r++) + { + for (size_type c = 0; c < n; c++) + { + if (this->sub_objects[r][c]) + { + comm = this->sub_objects[r][c]->get_mpi_communicator(); + row_sizes[r] = this->sub_objects[r][c]->m(); + col_sizes[c] = this->sub_objects[r][c]->n(); + row_local_sizes[r] = this->sub_objects[r][c]->local_size(); + col_local_sizes[c] = + this->sub_objects[r][c]->local_domain_size(); + } + } + } + for (size_type r = 0; r < m; r++) + { + for (size_type c = 0; c < n; c++) + { + if (!this->sub_objects[r][c]) + { + Mat dummy; + createDummyMat(comm, + static_cast(row_local_sizes[r]), + static_cast(row_sizes[r]), + static_cast(col_local_sizes[c]), + static_cast(col_sizes[c]), + &dummy); + this->sub_objects[r][c] = new BlockType(dummy); + ierr = MatDestroy(&dummy); + AssertThrow(ierr == 0, ExcPETScError(ierr)); + } + } + } - auto m = this->n_block_cols(); - auto n = this->n_block_cols(); + BaseClass::collect_sizes(); - PetscErrorCode ierr = destroy_matrix(petsc_nest_matrix); + ierr = destroy_matrix(petsc_nest_matrix); AssertThrow(ierr == 0, ExcPETScError(ierr)); std::vector psub_objects(m * n); for (unsigned int r = 0; r < m; r++) for (unsigned int c = 0; c < n; c++) psub_objects[r * n + c] = this->sub_objects[r][c]->petsc_matrix(); - ierr = MatCreateNest(get_mpi_communicator(), - m, - nullptr, - n, - nullptr, - psub_objects.data(), - &petsc_nest_matrix); + ierr = MatCreateNest( + comm, m, nullptr, n, nullptr, psub_objects.data(), &petsc_nest_matrix); AssertThrow(ierr == 0, ExcPETScError(ierr)); } @@ -251,12 +322,14 @@ namespace PETScWrappers { for (PetscInt j = 0; j < nc; ++j) { - // TODO: MATNEST supports NULL blocks - this->sub_objects[i][j] = new BlockType(mats[i * nc + j]); + if (mats[i * nc + j]) + this->sub_objects[i][j] = new BlockType(mats[i * nc + j]); + else + this->sub_objects[i][j] = nullptr; } } - collect_sizes(); + this->collect_sizes(); } } // namespace MPI diff --git a/tests/petsc/block_matrices_01.cc b/tests/petsc/block_matrices_01.cc index d2a2fb9f81..aca37f1457 100644 --- a/tests/petsc/block_matrices_01.cc +++ b/tests/petsc/block_matrices_01.cc @@ -91,6 +91,88 @@ test() ExcInternalError()); } } + + // Extract the PETSc MATNEST, create an array of PETSc matrices and assign + // to a new BlockSparseMatrix + std::array arrayRows0 = { + {tmp2.block(0, 0).petsc_matrix(), tmp2.block(0, 1).petsc_matrix()}}; + std::array arrayRows1 = { + {tmp2.block(1, 0).petsc_matrix(), tmp2.block(1, 1).petsc_matrix()}}; + std::array, 2> arrayMat = {{arrayRows0, arrayRows1}}; + + PETScWrappers::MPI::BlockSparseMatrix tmp3(arrayMat); + Assert(tmp3.n_block_rows() == pbsm.n_block_rows(), ExcInternalError()); + Assert(tmp3.n_block_cols() == pbsm.n_block_cols(), ExcInternalError()); + Assert(tmp3.m() == pbsm.m(), ExcInternalError()); + Assert(tmp3.n() == pbsm.n(), ExcInternalError()); + for (unsigned int blr = 0; blr < 2; ++blr) + { + for (unsigned int blc = 0; blc < 2; ++blc) + { + Assert(tmp3.block(blr, blc).m() == pbsm.block(blr, blc).m(), + ExcInternalError()); + Assert(tmp3.block(blr, blc).n() == pbsm.block(blr, blc).n(), + ExcInternalError()); + Assert(tmp3.block(blr, blc).petsc_matrix() == + pbsm.block(blr, blc).petsc_matrix(), + ExcInternalError()); + } + } + + // Now pass empty blocks + std::array arrayRows0Empty = { + {nullptr, tmp2.block(0, 1).petsc_matrix()}}; + std::array arrayRows1Empty = { + {tmp2.block(1, 0).petsc_matrix(), nullptr}}; + std::array, 2> arrayMatEmpty = { + {arrayRows0Empty, arrayRows1Empty}}; + + PETScWrappers::MPI::BlockSparseMatrix tmp4(arrayMatEmpty); + Assert(tmp4.n_block_rows() == pbsm.n_block_rows(), ExcInternalError()); + Assert(tmp4.n_block_cols() == pbsm.n_block_cols(), ExcInternalError()); + Assert(tmp4.m() == pbsm.m(), ExcInternalError()); + Assert(tmp4.n() == pbsm.n(), ExcInternalError()); + Assert(tmp4.block(0, 1).m() == pbsm.block(0, 1).m(), ExcInternalError()); + Assert(tmp4.block(0, 1).n() == pbsm.block(0, 1).n(), ExcInternalError()); + Assert(tmp4.block(0, 1).petsc_matrix() == pbsm.block(0, 1).petsc_matrix(), + ExcInternalError()); + Assert(tmp4.block(1, 0).m() == pbsm.block(1, 0).m(), ExcInternalError()); + Assert(tmp4.block(1, 0).n() == pbsm.block(1, 0).n(), ExcInternalError()); + Assert(tmp4.block(1, 0).petsc_matrix() == pbsm.block(1, 0).petsc_matrix(), + ExcInternalError()); + + // Check the rectangular cases + std::array, 1> arrayMatRow = {{arrayRows0}}; + std::array, 2> arrayMatCol = { + {{{arrayMat[0][0]}}, {{arrayMat[1][0]}}}}; + + PETScWrappers::MPI::BlockSparseMatrix tmp5(arrayMatRow); + Assert(tmp5.n_block_cols() == pbsm.n_block_cols(), ExcInternalError()); + Assert(tmp5.n() == pbsm.n(), ExcInternalError()); + for (unsigned int blc = 0; blc < 2; ++blc) + { + Assert(tmp5.block(0, blc).m() == pbsm.block(0, blc).m(), + ExcInternalError()); + Assert(tmp5.block(0, blc).n() == pbsm.block(0, blc).n(), + ExcInternalError()); + Assert(tmp5.block(0, blc).petsc_matrix() == + pbsm.block(0, blc).petsc_matrix(), + ExcInternalError()); + } + + PETScWrappers::MPI::BlockSparseMatrix tmp6(arrayMatCol); + Assert(tmp6.n_block_rows() == pbsm.n_block_rows(), ExcInternalError()); + Assert(tmp6.m() == pbsm.m(), ExcInternalError()); + for (unsigned int blr = 0; blr < 2; ++blr) + { + Assert(tmp6.block(blr, 0).m() == pbsm.block(blr, 0).m(), + ExcInternalError()); + Assert(tmp6.block(blr, 0).n() == pbsm.block(blr, 0).n(), + ExcInternalError()); + Assert(tmp6.block(blr, 0).petsc_matrix() == + pbsm.block(blr, 0).petsc_matrix(), + ExcInternalError()); + } } -- 2.39.5