*/
explicit BlockSparseMatrix(const Mat &);
+ /**
+ * Create a BlockSparseMatrix with an array of PETSc matrices.
+ */
+ template <size_t block_rows, size_t block_columns>
+ BlockSparseMatrix(
+ const std::array<std::array<Mat, block_columns>, block_rows> &);
+
/**
* Destructor.
*/
this->reinit(A);
}
+ template <size_t block_rows, size_t block_columns>
+ inline BlockSparseMatrix::BlockSparseMatrix(
+ const std::array<std::array<Mat, block_columns>, block_rows> &arrayA)
+ {
+ this->reinit(block_rows, block_columns);
+ this->sub_objects.reinit(block_rows, block_columns);
+ for (auto r = 0; r < block_rows; ++r)
+ for (auto c = 0; c < block_columns; ++c)
+ {
+ if (arrayA[r][c])
+ this->sub_objects[r][c] = new BlockType(arrayA[r][c]);
+ else
+ this->sub_objects[r][c] = nullptr;
+ }
+ this->collect_sizes();
+ }
+
inline BlockSparseMatrix &
BlockSparseMatrix::operator=(const double d)
{
#ifdef DEAL_II_WITH_PETSC
+// A dummy utility routine to create an empty matrix in case we import
+// a MATNEST with NULL blocks
+static void
+createDummyMat(MPI_Comm comm,
+ PetscInt lr,
+ PetscInt gr,
+ PetscInt lc,
+ PetscInt gc,
+ Mat * dummy)
+{
+ PetscErrorCode ierr;
+
+ ierr = MatCreate(comm, dummy);
+ AssertThrow(ierr == 0, dealii::ExcPETScError(ierr));
+ ierr = MatSetSizes(*dummy, lr, lc, gr, gc);
+ AssertThrow(ierr == 0, dealii::ExcPETScError(ierr));
+ ierr = MatSetType(*dummy, MATAIJ);
+ AssertThrow(ierr == 0, dealii::ExcPETScError(ierr));
+ ierr = MatSeqAIJSetPreallocation(*dummy, 0, nullptr);
+ AssertThrow(ierr == 0, dealii::ExcPETScError(ierr));
+ ierr = MatMPIAIJSetPreallocation(*dummy, 0, nullptr, 0, nullptr);
+ AssertThrow(ierr == 0, dealii::ExcPETScError(ierr));
+ ierr = MatSetUp(*dummy);
+ AssertThrow(ierr == 0, dealii::ExcPETScError(ierr));
+ ierr = MatSetOption(*dummy, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE);
+ AssertThrow(ierr == 0, dealii::ExcPETScError(ierr));
+ ierr = MatAssemblyBegin(*dummy, MAT_FINAL_ASSEMBLY);
+ AssertThrow(ierr == 0, dealii::ExcPETScError(ierr));
+ ierr = MatAssemblyEnd(*dummy, MAT_FINAL_ASSEMBLY);
+ AssertThrow(ierr == 0, dealii::ExcPETScError(ierr));
+}
+
DEAL_II_NAMESPACE_OPEN
namespace PETScWrappers
this->sub_objects[r][c] = p;
}
- collect_sizes();
+ this->collect_sizes();
}
void
void
BlockSparseMatrix::collect_sizes()
{
- BaseClass::collect_sizes();
+ auto m = this->n_block_rows();
+ auto n = this->n_block_cols();
+ PetscErrorCode ierr;
+
+ // Create empty matrices if needed
+ // This is neeeded by the base class
+ // not by MATNEST
+ std::vector<size_type> row_sizes(m);
+ std::vector<size_type> col_sizes(n);
+ std::vector<size_type> row_local_sizes(m);
+ std::vector<size_type> col_local_sizes(n);
+ MPI_Comm comm = MPI_COMM_NULL;
+ for (size_type r = 0; r < m; r++)
+ {
+ for (size_type c = 0; c < n; c++)
+ {
+ if (this->sub_objects[r][c])
+ {
+ comm = this->sub_objects[r][c]->get_mpi_communicator();
+ row_sizes[r] = this->sub_objects[r][c]->m();
+ col_sizes[c] = this->sub_objects[r][c]->n();
+ row_local_sizes[r] = this->sub_objects[r][c]->local_size();
+ col_local_sizes[c] =
+ this->sub_objects[r][c]->local_domain_size();
+ }
+ }
+ }
+ for (size_type r = 0; r < m; r++)
+ {
+ for (size_type c = 0; c < n; c++)
+ {
+ if (!this->sub_objects[r][c])
+ {
+ Mat dummy;
+ createDummyMat(comm,
+ static_cast<PetscInt>(row_local_sizes[r]),
+ static_cast<PetscInt>(row_sizes[r]),
+ static_cast<PetscInt>(col_local_sizes[c]),
+ static_cast<PetscInt>(col_sizes[c]),
+ &dummy);
+ this->sub_objects[r][c] = new BlockType(dummy);
+ ierr = MatDestroy(&dummy);
+ AssertThrow(ierr == 0, ExcPETScError(ierr));
+ }
+ }
+ }
- auto m = this->n_block_cols();
- auto n = this->n_block_cols();
+ BaseClass::collect_sizes();
- PetscErrorCode ierr = destroy_matrix(petsc_nest_matrix);
+ ierr = destroy_matrix(petsc_nest_matrix);
AssertThrow(ierr == 0, ExcPETScError(ierr));
std::vector<Mat> psub_objects(m * n);
for (unsigned int r = 0; r < m; r++)
for (unsigned int c = 0; c < n; c++)
psub_objects[r * n + c] = this->sub_objects[r][c]->petsc_matrix();
- ierr = MatCreateNest(get_mpi_communicator(),
- m,
- nullptr,
- n,
- nullptr,
- psub_objects.data(),
- &petsc_nest_matrix);
+ ierr = MatCreateNest(
+ comm, m, nullptr, n, nullptr, psub_objects.data(), &petsc_nest_matrix);
AssertThrow(ierr == 0, ExcPETScError(ierr));
}
{
for (PetscInt j = 0; j < nc; ++j)
{
- // TODO: MATNEST supports NULL blocks
- this->sub_objects[i][j] = new BlockType(mats[i * nc + j]);
+ if (mats[i * nc + j])
+ this->sub_objects[i][j] = new BlockType(mats[i * nc + j]);
+ else
+ this->sub_objects[i][j] = nullptr;
}
}
- collect_sizes();
+ this->collect_sizes();
}
} // namespace MPI
ExcInternalError());
}
}
+
+ // Extract the PETSc MATNEST, create an array of PETSc matrices and assign
+ // to a new BlockSparseMatrix
+ std::array<Mat, 2> arrayRows0 = {
+ {tmp2.block(0, 0).petsc_matrix(), tmp2.block(0, 1).petsc_matrix()}};
+ std::array<Mat, 2> arrayRows1 = {
+ {tmp2.block(1, 0).petsc_matrix(), tmp2.block(1, 1).petsc_matrix()}};
+ std::array<std::array<Mat, 2>, 2> arrayMat = {{arrayRows0, arrayRows1}};
+
+ PETScWrappers::MPI::BlockSparseMatrix tmp3(arrayMat);
+ Assert(tmp3.n_block_rows() == pbsm.n_block_rows(), ExcInternalError());
+ Assert(tmp3.n_block_cols() == pbsm.n_block_cols(), ExcInternalError());
+ Assert(tmp3.m() == pbsm.m(), ExcInternalError());
+ Assert(tmp3.n() == pbsm.n(), ExcInternalError());
+ for (unsigned int blr = 0; blr < 2; ++blr)
+ {
+ for (unsigned int blc = 0; blc < 2; ++blc)
+ {
+ Assert(tmp3.block(blr, blc).m() == pbsm.block(blr, blc).m(),
+ ExcInternalError());
+ Assert(tmp3.block(blr, blc).n() == pbsm.block(blr, blc).n(),
+ ExcInternalError());
+ Assert(tmp3.block(blr, blc).petsc_matrix() ==
+ pbsm.block(blr, blc).petsc_matrix(),
+ ExcInternalError());
+ }
+ }
+
+ // Now pass empty blocks
+ std::array<Mat, 2> arrayRows0Empty = {
+ {nullptr, tmp2.block(0, 1).petsc_matrix()}};
+ std::array<Mat, 2> arrayRows1Empty = {
+ {tmp2.block(1, 0).petsc_matrix(), nullptr}};
+ std::array<std::array<Mat, 2>, 2> arrayMatEmpty = {
+ {arrayRows0Empty, arrayRows1Empty}};
+
+ PETScWrappers::MPI::BlockSparseMatrix tmp4(arrayMatEmpty);
+ Assert(tmp4.n_block_rows() == pbsm.n_block_rows(), ExcInternalError());
+ Assert(tmp4.n_block_cols() == pbsm.n_block_cols(), ExcInternalError());
+ Assert(tmp4.m() == pbsm.m(), ExcInternalError());
+ Assert(tmp4.n() == pbsm.n(), ExcInternalError());
+ Assert(tmp4.block(0, 1).m() == pbsm.block(0, 1).m(), ExcInternalError());
+ Assert(tmp4.block(0, 1).n() == pbsm.block(0, 1).n(), ExcInternalError());
+ Assert(tmp4.block(0, 1).petsc_matrix() == pbsm.block(0, 1).petsc_matrix(),
+ ExcInternalError());
+ Assert(tmp4.block(1, 0).m() == pbsm.block(1, 0).m(), ExcInternalError());
+ Assert(tmp4.block(1, 0).n() == pbsm.block(1, 0).n(), ExcInternalError());
+ Assert(tmp4.block(1, 0).petsc_matrix() == pbsm.block(1, 0).petsc_matrix(),
+ ExcInternalError());
+
+ // Check the rectangular cases
+ std::array<std::array<Mat, 2>, 1> arrayMatRow = {{arrayRows0}};
+ std::array<std::array<Mat, 1>, 2> arrayMatCol = {
+ {{{arrayMat[0][0]}}, {{arrayMat[1][0]}}}};
+
+ PETScWrappers::MPI::BlockSparseMatrix tmp5(arrayMatRow);
+ Assert(tmp5.n_block_cols() == pbsm.n_block_cols(), ExcInternalError());
+ Assert(tmp5.n() == pbsm.n(), ExcInternalError());
+ for (unsigned int blc = 0; blc < 2; ++blc)
+ {
+ Assert(tmp5.block(0, blc).m() == pbsm.block(0, blc).m(),
+ ExcInternalError());
+ Assert(tmp5.block(0, blc).n() == pbsm.block(0, blc).n(),
+ ExcInternalError());
+ Assert(tmp5.block(0, blc).petsc_matrix() ==
+ pbsm.block(0, blc).petsc_matrix(),
+ ExcInternalError());
+ }
+
+ PETScWrappers::MPI::BlockSparseMatrix tmp6(arrayMatCol);
+ Assert(tmp6.n_block_rows() == pbsm.n_block_rows(), ExcInternalError());
+ Assert(tmp6.m() == pbsm.m(), ExcInternalError());
+ for (unsigned int blr = 0; blr < 2; ++blr)
+ {
+ Assert(tmp6.block(blr, 0).m() == pbsm.block(blr, 0).m(),
+ ExcInternalError());
+ Assert(tmp6.block(blr, 0).n() == pbsm.block(blr, 0).n(),
+ ExcInternalError());
+ Assert(tmp6.block(blr, 0).petsc_matrix() ==
+ pbsm.block(blr, 0).petsc_matrix(),
+ ExcInternalError());
+ }
}