void reinit (const size_type n_block_rows,
const size_type n_block_columns);
- * Note that each IndexSet needs to contiguous. For a symmetric structure
+
+ /**
+ * Efficiently reinit the block matrix for a parallel computation.
+ * Only the BlockSparsityPattern of the Simple type can efficiently
+ * store large sparsity patterns in parallel, so this is the only
+ * supported argument.
+ * The IndexSets describe the locally owned range of DoFs for each block.
- * Same as above but only for a symmetric structure only.
++ * Note that each IndexSet needs to be contiguous. For a symmetric structure
+ * hand in the same vector for the first two arguments.
+ */
+ void reinit(const std::vector<IndexSet> &rows,
+ const std::vector<IndexSet> &cols,
+ const BlockCompressedSimpleSparsityPattern &bcsp,
+ const MPI_Comm &com);
+
+
+ /**
++ * Same as above but for a symmetric structure only.
+ */
+ void reinit(const std::vector<IndexSet> &sizes,
+ const BlockCompressedSimpleSparsityPattern &bcsp,
+ const MPI_Comm &com);
+
+
+
/**
* Matrix-vector multiplication:
* let $dst = M*src$ with $M$
*/
explicit Vector (const MPI_Comm &communicator,
const IndexSet &local,
- const IndexSet &ghost);
+ const IndexSet &ghost) DEAL_II_DEPRECATED;
+
+ /**
+ * Constructs a new parallel ghosted PETSc
+ * vector from an Indexset. Note that
+ * @p local must be contiguous and
+ * the global size of the vector is
+ * determined by local.size(). The
+ * global indices in @p ghost are
+ * supplied as ghost indices that can
+ * also be read locally.
+ *
+ * Note that the @p ghost IndexSet
+ * may be empty and that any indices
+ * already contained in @p local are
+ * ignored during construction. That
+ * way, the ghost parameter can equal
+ * the set of locally relevant
+ * degrees of freedom, see step-32.
+ *
+ * @note This operation always creates a ghosted
+ * vector.
+ */
- explicit Vector (const IndexSet &local,
++ Vector (const IndexSet &local,
+ const IndexSet &ghost,
- const MPI_Comm &communicator = MPI_COMM_WORLD);
++ const MPI_Comm &communicator);
/**
* Constructs a new parallel PETSc
* vector from an Indexset. This creates a non
* ghosted vector.
*/
- explicit Vector (const MPI_Comm &communicator,
- const IndexSet &local);
+ explicit Vector (const MPI_Comm &communicator,
+ const IndexSet &local) DEAL_II_DEPRECATED;
+ /**
+ * Constructs a new parallel PETSc
+ * vector from an Indexset. This creates a non
+ * ghosted vector.
+ */
+ explicit Vector (const IndexSet &local,
- const MPI_Comm &communicator = MPI_COMM_WORLD);
++ const MPI_Comm &communicator);
/**
* Copy the given vector. Resize the
*/
void reinit (const MPI_Comm &communicator,
const IndexSet &local,
- const IndexSet &ghost);
+ const IndexSet &ghost) DEAL_II_DEPRECATED;
+ /**
+ * Reinit as a vector without ghost elements. See
+ * constructor with same signature
+ * for more detais.
+ */
+ void reinit (const IndexSet &local,
+ const IndexSet &ghost,
- const MPI_Comm &communicator = MPI_COMM_WORLD);
++ const MPI_Comm &communicator);
/**
* Reinit as a vector without ghost elements. See
* for more detais.
*/
void reinit (const MPI_Comm &communicator,
- const IndexSet &local);
+ const IndexSet &local) DEAL_II_DEPRECATED;
+ /**
+ * Reinit as a vector without ghost elements. See
+ * constructor with same signature
+ * for more detais.
+ */
+ void reinit (const IndexSet &local,
- const MPI_Comm &communicator = MPI_COMM_WORLD);
++ const MPI_Comm &communicator);
/**
* Return a reference to the MPI