#include <base/memory_consumption.h>
#include <lac/sparsity_pattern.h>
#include <lac/compressed_sparsity_pattern.h>
+#include <lac/compressed_set_sparsity_pattern.h>
#include <lac/vector.h>
#include <lac/block_vector.h>
#include <lac/sparse_matrix.h>
+void ConstraintMatrix::condense_fast (CompressedSparsityPattern &sparsity) const
+{
+ /*
+ Assert (sorted == true, ExcMatrixNotClosed());
+ Assert (sparsity.n_rows() == sparsity.n_cols(),
+ ExcNotQuadratic());
+
+ // store for each index whether it must be
+ // distributed or not. If entry is
+ // deal_II_numbers::invalid_unsigned_int,
+ // no distribution is necessary.
+ // otherwise, the number states which line
+ // in the constraint matrix handles this
+ // index
+ std::vector<unsigned int> distribute(sparsity.n_rows(),
+ deal_II_numbers::invalid_unsigned_int);
+
+ for (unsigned int c=0; c<lines.size(); ++c)
+ distribute[lines[c].line] = c;
+
+ const unsigned int n_rows = sparsity.n_rows();
+ for (unsigned int row=0; row<n_rows; ++row)
+ {
+ // First copy entries to local cache data structure;
+ std::set<unsigned int> row_cache;
+ for (unsigned int j=0; j<sparsity.row_length(row); ++j)
+ row_cache.insert (sparsity.column_number(row,j));
+
+ if (distribute[row] == deal_II_numbers::invalid_unsigned_int)
+ {
+ // regular line. loop over
+ // cols. note that as we
+ // proceed to distribute
+ // cols, the loop may get
+ // longer
+ std::set<unsigned int>::iterator col_num = row_cache.begin ();
+
+ for (; col_num != row_cache.end (); ++col_num)
+ {
+ const unsigned int column = *col_num;
+
+ if (distribute[column] != deal_II_numbers::invalid_unsigned_int)
+ {
+ // row
+ for (unsigned int q=0;
+ q!=lines[distribute[column]].entries.size();
+ ++q)
+ {
+ const unsigned int
+ new_col = lines[distribute[column]].entries[q].first;
+
+ sparsity.add (row, new_col);
+ };
+ };
+ }
+ }
+ else
+ // row must be distributed
+ {
+ std::set<unsigned int>::iterator col_num = row_cache.begin ();
+
+ for (; col_num != row_cache.end (); ++col_num)
+ {
+ const unsigned int column = *col_num;
+
+ if (distribute[column] == deal_II_numbers::invalid_unsigned_int)
+ // distribute entry at irregular
+ // row @p{row} and regular column
+ // sparsity.colnums[j]
+ for (unsigned int q=0;
+ q!=lines[distribute[row]].entries.size(); ++q)
+ sparsity.add (lines[distribute[row]].entries[q].first,
+ column);
+ else
+ // distribute entry at irregular
+ // row @p{row} and irregular column
+ // sparsity.get_column_numbers()[j]
+ for (unsigned int p=0; p!=lines[distribute[row]].entries.size(); ++p)
+ for (unsigned int q=0;
+ q!=lines[distribute[column]]
+ .entries.size(); ++q)
+ sparsity.add (lines[distribute[row]].entries[p].first,
+ lines[distribute[column]]
+ .entries[q].first);
+ };
+ }
+
+ // Now copy stuff back to sparsity pattern
+ std::set<unsigned int>::iterator col_num = row_cache.begin ();
+ for (; col_num != row_cache.end (); ++col_num)
+ {
+ }
+ };
+ */
+}
+
+
+void ConstraintMatrix::condense (CompressedSetSparsityPattern &sparsity) const
+{
+ Assert (sorted == true, ExcMatrixNotClosed());
+ Assert (sparsity.n_rows() == sparsity.n_cols(),
+ ExcNotQuadratic());
+
+ // store for each index whether it must be
+ // distributed or not. If entry is
+ // deal_II_numbers::invalid_unsigned_int,
+ // no distribution is necessary.
+ // otherwise, the number states which line
+ // in the constraint matrix handles this
+ // index
+ std::vector<unsigned int> distribute(sparsity.n_rows(),
+ deal_II_numbers::invalid_unsigned_int);
+
+ for (unsigned int c=0; c<lines.size(); ++c)
+ distribute[lines[c].line] = c;
+
+ const unsigned int n_rows = sparsity.n_rows();
+ for (unsigned int row=0; row<n_rows; ++row)
+ {
+ if (distribute[row] == deal_II_numbers::invalid_unsigned_int)
+ {
+ // regular line. loop over
+ // cols. note that as we
+ // proceed to distribute
+ // cols, the loop may get
+ // longer
+ CompressedSetSparsityPattern::CSSPIterator col_num = sparsity.row_begin (row);
+
+ for (; col_num != sparsity.row_end (row); ++col_num)
+ {
+ const unsigned int column = *col_num;
+
+ if (distribute[column] != deal_II_numbers::invalid_unsigned_int)
+ {
+ // row
+ unsigned int old_rowlength = sparsity.row_length(row);
+ for (unsigned int q=0;
+ q!=lines[distribute[column]].entries.size();
+ ++q)
+ {
+ const unsigned int
+ new_col = lines[distribute[column]].entries[q].first;
+
+ sparsity.add (row, new_col);
+
+ const unsigned int new_rowlength = sparsity.row_length(row);
+ // if ((new_col < column) && (old_rowlength != new_rowlength))
+ // ++col_num;
+ old_rowlength = new_rowlength;
+ };
+ };
+ }
+ }
+ else
+ // row must be distributed
+ {
+ CompressedSetSparsityPattern::CSSPIterator col_num = sparsity.row_begin (row);
+
+ for (; col_num != sparsity.row_end (row); ++col_num)
+ {
+ const unsigned int column = *col_num;
+
+ if (distribute[column] == deal_II_numbers::invalid_unsigned_int)
+ // distribute entry at irregular
+ // row @p{row} and regular column
+ // sparsity.colnums[j]
+ for (unsigned int q=0;
+ q!=lines[distribute[row]].entries.size(); ++q)
+ sparsity.add (lines[distribute[row]].entries[q].first,
+ column);
+ else
+ // distribute entry at irregular
+ // row @p{row} and irregular column
+ // sparsity.get_column_numbers()[j]
+ for (unsigned int p=0; p!=lines[distribute[row]].entries.size(); ++p)
+ for (unsigned int q=0;
+ q!=lines[distribute[column]]
+ .entries.size(); ++q)
+ sparsity.add (lines[distribute[row]].entries[p].first,
+ lines[distribute[column]]
+ .entries[q].first);
+ };
+ }
+ };
+}
+
+
+
void ConstraintMatrix::condense (BlockSparsityPattern &sparsity) const
{
Assert (sorted == true, ExcMatrixNotClosed());
CompressedSparsityPattern &,
const bool) const;
template void ConstraintMatrix::
+add_entries_local_to_global<CompressedSetSparsityPattern> (const std::vector<unsigned int> &,
+ CompressedSetSparsityPattern &,
+ const bool) const;
+
+template void ConstraintMatrix::
add_entries_local_to_global<BlockSparsityPattern> (const std::vector<unsigned int> &,
BlockSparsityPattern &,
const bool) const;
#include <dofs/dof_tools.h>
#include <lac/sparsity_pattern.h>
#include <lac/compressed_sparsity_pattern.h>
+#include <lac/compressed_set_sparsity_pattern.h>
#include <lac/block_sparsity_pattern.h>
#include <lac/vector.h>
// explicit instantiations
+// TODO: Add missing instantiations for CompressedSetSparsityPattern
template void
DoFTools::make_sparsity_pattern<DoFHandler<deal_II_dimension>,
SparsityPattern>
(const hp::DoFHandler<deal_II_dimension> &dof,
SparsityPattern &sparsity,
const ConstraintMatrix &);
+
+
template void
DoFTools::make_sparsity_pattern<hp::DoFHandler<deal_II_dimension>,
CompressedSparsityPattern>
(const hp::DoFHandler<deal_II_dimension> &dof,
CompressedSparsityPattern &sparsity,
const ConstraintMatrix &);
+
+template void
+DoFTools::make_sparsity_pattern<hp::DoFHandler<deal_II_dimension>,
+ CompressedSetSparsityPattern>
+(const hp::DoFHandler<deal_II_dimension> &dof,
+ CompressedSetSparsityPattern &sparsity,
+ const ConstraintMatrix &);
+
+
template void
DoFTools::make_sparsity_pattern<hp::DoFHandler<deal_II_dimension>,
BlockSparsityPattern>