*/
explicit BlockVector(Vec v);
+ /**
+ * Create a BlockVector with an array of PETSc vectors.
+ */
+ template <size_t num_blocks>
+ BlockVector(const std::array<Vec, num_blocks> &);
+
/**
* Destructor. Clears memory
*/
/*--------------------- Inline functions --------------------------------*/
inline BlockVector::BlockVector()
- : petsc_nest_vector(nullptr)
+ : BlockVectorBase<Vector>()
+ , petsc_nest_vector(nullptr)
{}
const MPI_Comm & communicator,
const size_type block_size,
const size_type locally_owned_size)
- : petsc_nest_vector(nullptr)
+ : BlockVector()
{
reinit(n_blocks, communicator, block_size, locally_owned_size);
}
const std::vector<size_type> &block_sizes,
const MPI_Comm & communicator,
const std::vector<size_type> &local_elements)
- : petsc_nest_vector(nullptr)
+ : BlockVector()
{
reinit(block_sizes, communicator, local_elements, false);
}
+
inline BlockVector::BlockVector(const BlockVector &v)
- : BlockVectorBase<Vector>()
- , petsc_nest_vector(nullptr)
+ : BlockVector()
{
this->block_indices = v.block_indices;
this->collect_sizes();
}
+
+
inline BlockVector::BlockVector(
const std::vector<IndexSet> ¶llel_partitioning,
const MPI_Comm & communicator)
- : petsc_nest_vector(nullptr)
+ : BlockVector()
{
reinit(parallel_partitioning, communicator);
}
+
+
inline BlockVector::BlockVector(
const std::vector<IndexSet> ¶llel_partitioning,
const std::vector<IndexSet> &ghost_indices,
const MPI_Comm & communicator)
- : petsc_nest_vector(nullptr)
+ : BlockVector()
{
reinit(parallel_partitioning, ghost_indices, communicator);
}
+
+
inline BlockVector::BlockVector(Vec v)
- : BlockVectorBase<Vector>()
- , petsc_nest_vector(nullptr)
+ : BlockVector()
{
this->reinit(v);
}
+
+
+ template <size_t num_blocks>
+ inline BlockVector::BlockVector(const std::array<Vec, num_blocks> &arrayV)
+ : BlockVector()
+ {
+ this->block_indices.reinit(num_blocks, 0);
+
+ this->components.resize(num_blocks);
+ for (auto i = 0; i < num_blocks; ++i)
+ this->components[i].reinit(arrayV[i]);
+ this->collect_sizes();
+ }
+
+
+
inline BlockVector &
BlockVector::operator=(const value_type s)
{
return *this;
}
+
+
inline BlockVector &
BlockVector::operator=(const BlockVector &v)
{
this->collect_sizes();
}
+
+
inline void
BlockVector::reinit(const std::vector<IndexSet> ¶llel_partitioning,
const MPI_Comm & communicator)
this->collect_sizes();
}
+
+
inline void
BlockVector::reinit(const std::vector<IndexSet> ¶llel_partitioning,
const std::vector<IndexSet> &ghost_entries,
return comm;
}
+
+
inline bool
BlockVector::has_ghost_elements() const
{
}
+
inline void
BlockVector::swap(BlockVector &v)
{
PETScWrappers::MPI::BlockVector vb2(v.petsc_vector());
Assert(vb2.n_blocks() == v.n_blocks(), ExcInternalError());
Assert(vb2.size() == v.size(), ExcInternalError());
+ Assert(vb2.petsc_vector() == v.petsc_vector(), ExcInternalError());
for (unsigned int bl = 0; bl < 2; ++bl)
{
Assert(vb2.block(bl).size() == v.block(bl).size(), ExcInternalError());
ExcInternalError());
}
+ // Create new block vector from an array of PETSc vectors
+ std::array<Vec, 2> arrayVecs = {
+ {vb.block(0).petsc_vector(), vb.block(1).petsc_vector()}};
+ PETScWrappers::MPI::BlockVector vb3(arrayVecs);
+ Assert(vb3.n_blocks() == vb.n_blocks(), ExcInternalError());
+ Assert(vb3.size() == vb.size(), ExcInternalError());
+ for (unsigned int bl = 0; bl < 2; ++bl)
+ {
+ Assert(vb3.block(bl).size() == vb.block(bl).size(), ExcInternalError());
+ Assert(vb3.block(bl).petsc_vector() == vb.block(bl).petsc_vector(),
+ ExcInternalError());
+ }
+
+
// Test swap
auto old_v_vb2 = vb2.petsc_vector();
auto old_v_vb = vb.petsc_vector();