From 6e9560b36e63ab870479258e29d4082b62137826 Mon Sep 17 00:00:00 2001 From: David Wells Date: Wed, 3 May 2017 20:52:39 -0400 Subject: [PATCH] Make PETScWrappers::apply_boundary_values more generic. --- include/deal.II/numerics/matrix_tools.h | 29 +-- source/numerics/matrix_tools_once.cc | 229 ++++++++++-------------- 2 files changed, 104 insertions(+), 154 deletions(-) diff --git a/include/deal.II/numerics/matrix_tools.h b/include/deal.II/numerics/matrix_tools.h index e89b4280ff..6a86068070 100644 --- a/include/deal.II/numerics/matrix_tools.h +++ b/include/deal.II/numerics/matrix_tools.h @@ -58,13 +58,11 @@ namespace hp #ifdef DEAL_II_WITH_PETSC namespace PETScWrappers { - class SparseMatrix; - class Vector; + class MatrixBase; + class VectorBase; namespace MPI { - class SparseMatrix; class BlockSparseMatrix; - class Vector; class BlockVector; } } @@ -799,16 +797,6 @@ namespace MatrixTools * this namespace.) * * This function is used in step-17 and step-18. - */ - void - apply_boundary_values (const std::map &boundary_values, - PETScWrappers::SparseMatrix &matrix, - PETScWrappers::Vector &solution, - PETScWrappers::Vector &right_hand_side, - const bool eliminate_columns = true); - - /** - * Same function as above, but for parallel PETSc matrices. * * @note If the matrix is stored in parallel across multiple processors * using MPI, this function only touches rows that are locally stored and @@ -827,14 +815,15 @@ namespace MatrixTools * rows. */ void - apply_boundary_values (const std::map &boundary_values, - PETScWrappers::MPI::SparseMatrix &matrix, - PETScWrappers::MPI::Vector &solution, - PETScWrappers::MPI::Vector &right_hand_side, - const bool eliminate_columns = true); + apply_boundary_values + (const std::map &boundary_values, + PETScWrappers::MatrixBase &matrix, + PETScWrappers::VectorBase &solution, + PETScWrappers::VectorBase &right_hand_side, + const bool eliminate_columns = true); /** - * Same as above but for BlockSparseMatrix. + * Same as above but for the parallel BlockSparseMatrix. */ void apply_boundary_values (const std::map &boundary_values, diff --git a/source/numerics/matrix_tools_once.cc b/source/numerics/matrix_tools_once.cc index 887a267ba9..cea0b6260c 100644 --- a/source/numerics/matrix_tools_once.cc +++ b/source/numerics/matrix_tools_once.cc @@ -35,11 +35,10 @@ #include #ifdef DEAL_II_WITH_PETSC -# include -# include -# include -# include +# include +# include # include +# include #endif #ifdef DEAL_II_WITH_TRILINOS @@ -50,7 +49,6 @@ #endif #include -#include #include @@ -60,139 +58,102 @@ namespace MatrixTools { #ifdef DEAL_II_WITH_PETSC - - namespace internal + void + apply_boundary_values + (const std::map &boundary_values, + PETScWrappers::MatrixBase &matrix, + PETScWrappers::VectorBase &solution, + PETScWrappers::VectorBase &right_hand_side, + const bool eliminate_columns) { - namespace PETScWrappers - { - template - void - apply_boundary_values (const std::map &boundary_values, - PETScMatrix &matrix, - PETScVector &solution, - PETScVector &right_hand_side, - const bool eliminate_columns) - { - (void)eliminate_columns; - Assert (eliminate_columns == false, ExcNotImplemented()); - - Assert (matrix.n() == right_hand_side.size(), - ExcDimensionMismatch(matrix.n(), right_hand_side.size())); - Assert (matrix.n() == solution.size(), - ExcDimensionMismatch(matrix.n(), solution.size())); - - // if no boundary values are to be applied, then - // jump straight to the compress() calls that we still have - // to perform because they are collective operations - if (boundary_values.size() > 0) - { - const std::pair local_range - = matrix.local_range(); - Assert (local_range == right_hand_side.local_range(), - ExcInternalError()); - Assert (local_range == solution.local_range(), - ExcInternalError()); - - // determine the first nonzero diagonal - // entry from within the part of the - // matrix that we can see. if we can't - // find such an entry, take one - PetscScalar average_nonzero_diagonal_entry = 1; - for (types::global_dof_index i=local_range.first; i constrained_rows; - for (std::map::const_iterator - dof = boundary_values.begin(); - dof != boundary_values.end(); - ++dof) - if ((dof->first >= local_range.first) && - (dof->first < local_range.second)) - constrained_rows.push_back (dof->first); + Assert (matrix.n() == right_hand_side.size(), + ExcDimensionMismatch(matrix.n(), right_hand_side.size())); + Assert (matrix.n() == solution.size(), + ExcDimensionMismatch(matrix.n(), solution.size())); - // then eliminate these rows and set - // their diagonal entry to what we have - // determined above. note that for petsc - // matrices interleaving read with write - // operations is very expensive. thus, we - // here always replace the diagonal - // element, rather than first checking - // whether it is nonzero and in that case - // preserving it. this is different from - // the case of deal.II sparse matrices - // treated in the other functions. - matrix.clear_rows (constrained_rows, average_nonzero_diagonal_entry); + // if no boundary values are to be applied, then + // jump straight to the compress() calls that we still have + // to perform because they are collective operations + if (boundary_values.size() > 0) + { + const std::pair local_range + = matrix.local_range(); + Assert (local_range == right_hand_side.local_range(), + ExcInternalError()); + Assert (local_range == solution.local_range(), + ExcInternalError()); + + // determine the first nonzero diagonal + // entry from within the part of the + // matrix that we can see. if we can't + // find such an entry, take one + PetscScalar average_nonzero_diagonal_entry = 1; + for (types::global_dof_index i=local_range.first; i indices; - std::vector solution_values; - for (std::map::const_iterator - dof = boundary_values.begin(); - dof != boundary_values.end(); - ++dof) - if ((dof->first >= local_range.first) && - (dof->first < local_range.second)) - { - indices.push_back (dof->first); - solution_values.push_back (dof->second); - } - solution.set (indices, solution_values); + // figure out which rows of the matrix we + // have to eliminate on this processor + std::vector constrained_rows; + for (std::map::const_iterator + dof = boundary_values.begin(); + dof != boundary_values.end(); + ++dof) + if ((dof->first >= local_range.first) && + (dof->first < local_range.second)) + constrained_rows.push_back (dof->first); - // now also set appropriate values for - // the rhs - for (unsigned int i=0; i indices; + std::vector solution_values; + for (std::map::const_iterator + dof = boundary_values.begin(); + dof != boundary_values.end(); + ++dof) + if ((dof->first >= local_range.first) && + (dof->first < local_range.second)) + { + indices.push_back (dof->first); + solution_values.push_back (dof->second); + } + solution.set (indices, solution_values); - right_hand_side.set (indices, solution_values); - } - else - { - // clear_rows() is a collective operation so we still have to call - // it: - std::vector constrained_rows; - matrix.clear_rows (constrained_rows, 1.); - } + // now also set appropriate values for + // the rhs + for (unsigned int i=0; i constrained_rows; + matrix.clear_rows (constrained_rows, 1.); } - } - } - - - - void - apply_boundary_values (const std::map &boundary_values, - PETScWrappers::SparseMatrix &matrix, - PETScWrappers::Vector &solution, - PETScWrappers::Vector &right_hand_side, - const bool eliminate_columns) - { - // simply redirect to the generic function - // used for both petsc matrix types - internal::PETScWrappers::apply_boundary_values (boundary_values, matrix, solution, - right_hand_side, eliminate_columns); - } - - - void - apply_boundary_values (const std::map &boundary_values, - PETScWrappers::MPI::SparseMatrix &matrix, - PETScWrappers::MPI::Vector &solution, - PETScWrappers::MPI::Vector &right_hand_side, - const bool eliminate_columns) - { - // simply redirect to the generic function - // used for both petsc matrix types - internal::PETScWrappers::apply_boundary_values (boundary_values, matrix, solution, - right_hand_side, eliminate_columns); + // clean up + solution.compress (VectorOperation::insert); + right_hand_side.compress (VectorOperation::insert); } @@ -239,11 +200,11 @@ namespace MatrixTools // the diagonal subblocks and the // solution/rhs. for (unsigned int block=0; block