void
collect_sizes();
+ /**
+ * Call the compress() function on all the subblocks of the matrix
+ * and update the internal state of the PETSc object.
+ *
+ * This functionality is needed because PETSc may use this information
+ * to decide whether or not to rebuild the preconditioner.
+ *
+ * See
+ * @ref GlossCompress "Compressing distributed objects"
+ * for more information.
+ */
+ void
+ compress(VectorOperation::values operation);
+
/**
* Return the partitioning of the domain space of this matrix, i.e., the
* partitioning of the vectors this matrix has to be multiplied with.
* blocks are the blocks of this matrix.
*/
Mat petsc_nest_matrix = nullptr;
+
+ /**
+ * Utility to setup the MATNEST object
+ */
+ void
+ setup_nest_mat();
+
+ /**
+ * An utility method to populate empty blocks with actual objects.
+ * This is needed because MATNEST supports nullptr as a block,
+ * while the BlockMatrixBase class does not.
+ */
+ void
+ create_empty_matrices_if_needed();
};
void
collect_sizes();
+ /**
+ * Call the compress() function on all the subblocks of the vector
+ * and update the internal state of the nested PETSc vector.
+ *
+ * This functionality is needed because PETSc may cache vector norms for
+ * performance reasons.
+ *
+ * See
+ * @ref GlossCompress "Compressing distributed objects"
+ * for more information.
+ */
+ void
+ compress(VectorOperation::values operation);
+
/**
* Change the number of blocks to <tt>num_blocks</tt>. The individual
* blocks will get initialized with zero size, so it is assumed that the
DeclException0(ExcNonMatchingBlockVectors);
private:
+ /**
+ * A PETSc Vec object that describes the entire block vector.
+ * Internally, this is done by creating
+ * a "nested" vector using PETSc's VECNEST object whose individual
+ * blocks are the blocks of this vector.
+ */
+ Vec petsc_nest_vector;
+
+ /**
+ * Utility to setup the VECNEST object
+ */
void
setup_nest_vec();
-
- Vec petsc_nest_vector;
};
/** @} */
#ifdef DEAL_II_WITH_PETSC
+// For PetscObjectStateIncrease
+# include <petsc/private/petscimpl.h>
+
namespace
{
// A dummy utility routine to create an empty matrix in case we import
void
- BlockSparseMatrix::collect_sizes()
+ BlockSparseMatrix::create_empty_matrices_if_needed()
{
auto m = this->n_block_rows();
auto n = this->n_block_cols();
}
}
}
+ }
+
+ void
+ BlockSparseMatrix::collect_sizes()
+ {
+ this->create_empty_matrices_if_needed();
BaseClass::collect_sizes();
+ this->setup_nest_mat();
+ }
+
+ void
+ BlockSparseMatrix::setup_nest_mat()
+ {
+ auto m = this->n_block_rows();
+ auto n = this->n_block_cols();
+ PetscErrorCode ierr;
+
+ MPI_Comm comm = PETSC_COMM_SELF;
ierr = destroy_matrix(petsc_nest_matrix);
AssertThrow(ierr == 0, ExcPETScError(ierr));
std::vector<Mat> psub_objects(m * n);
for (unsigned int r = 0; r < m; r++)
for (unsigned int c = 0; c < n; c++)
- psub_objects[r * n + c] = this->sub_objects[r][c]->petsc_matrix();
+ {
+ comm = this->sub_objects[r][c]->get_mpi_communicator();
+ psub_objects[r * n + c] = this->sub_objects[r][c]->petsc_matrix();
+ }
ierr = MatCreateNest(
comm, m, nullptr, n, nullptr, psub_objects.data(), &petsc_nest_matrix);
AssertThrow(ierr == 0, ExcPETScError(ierr));
+ void
+ BlockSparseMatrix::compress(VectorOperation::values operation)
+ {
+ BaseClass::compress(operation);
+ PetscErrorCode ierr = PetscObjectStateIncrease(
+ reinterpret_cast<PetscObject>(petsc_nest_matrix));
+ AssertThrow(ierr == 0, ExcPETScError(ierr));
+ }
+
+
+
std::vector<IndexSet>
BlockSparseMatrix::locally_owned_domain_indices() const
{
&isnest);
AssertThrow(ierr == 0, ExcPETScError(ierr));
std::vector<Mat> mats;
+ bool need_empty_matrices = false;
if (isnest)
{
ierr = MatNestGetSize(A, &nr, &nc);
Mat sA;
ierr = MatNestGetSubMat(A, i, j, &sA);
mats.push_back(sA);
+ if (!sA)
+ need_empty_matrices = true;
}
}
}
this->sub_objects[i][j] = nullptr;
}
}
+ if (need_empty_matrices)
+ this->create_empty_matrices_if_needed();
- this->collect_sizes();
+ BaseClass::collect_sizes();
+ if (need_empty_matrices || !isnest)
+ {
+ setup_nest_mat();
+ }
+ else
+ {
+ ierr = PetscObjectReference(reinterpret_cast<PetscObject>(A));
+ AssertThrow(ierr == 0, ExcPETScError(ierr));
+ PetscErrorCode ierr = MatDestroy(&petsc_nest_matrix);
+ AssertThrow(ierr == 0, ExcPETScError(ierr));
+ petsc_nest_matrix = A;
+ }
}
} // namespace MPI
#ifdef DEAL_II_WITH_PETSC
+// For PetscObjectStateIncrease
+# include <petsc/private/petscimpl.h>
+
DEAL_II_NAMESPACE_OPEN
namespace PETScWrappers
this->components[i].reinit(sv[i]);
}
- this->collect_sizes();
+ BlockVectorBase::collect_sizes();
+ if (!isnest)
+ setup_nest_vec();
+ else
+ {
+ ierr = PetscObjectReference(reinterpret_cast<PetscObject>(v));
+ AssertThrow(ierr == 0, ExcPETScError(ierr));
+ PetscErrorCode ierr = VecDestroy(&petsc_nest_vector);
+ AssertThrow(ierr == 0, ExcPETScError(ierr));
+ petsc_nest_vector = v;
+ }
}
Vec &
setup_nest_vec();
}
+ void
+ BlockVector::compress(VectorOperation::values operation)
+ {
+ BlockVectorBase::compress(operation);
+ PetscErrorCode ierr = PetscObjectStateIncrease(
+ reinterpret_cast<PetscObject>(petsc_nest_vector));
+ AssertThrow(ierr == 0, ExcPETScError(ierr));
+ }
+
void
BlockVector::setup_nest_vec()
{