]> https://gitweb.dealii.org/ - dealii.git/commitdiff
Add interface for creating matrices of MATIS type.
authorNicolas Barnafi <nabw91@gmail.com>
Wed, 6 Apr 2022 13:41:27 +0000 (15:41 +0200)
committerNicolas Barnafi <nabw91@gmail.com>
Wed, 6 Apr 2022 13:42:12 +0000 (15:42 +0200)
include/deal.II/lac/petsc_sparse_matrix.h
source/lac/petsc_parallel_sparse_matrix.cc

index 3b4daed7706369fbc77f986963332e478cce5429..d3b81b0c3715c8050e0cf42b23d40a34f673e3f8 100644 (file)
@@ -498,6 +498,24 @@ namespace PETScWrappers
       void
       reinit(const SparseMatrix &other);
 
+      /**
+       * Create a matrix where the size() of the IndexSets determine the
+       * global number of rows and columns and the entries of the IndexSet
+       * give the rows and columns for the calling processor. Note that only
+       * ascending, 1:1 IndexSets are supported. The additional call to the
+       * local to global mappings is required to create the matrix of type
+       * IS (see DoFTools::extract_locally_active_dofs).
+       * This is required by the BDDC preconditioner.
+       */
+      template <typename SparsityPatternType>
+      void
+      reinit_IS(const IndexSet &           local_rows,
+                const IndexSet &           local_active_rows,
+                const IndexSet &           local_columns,
+                const IndexSet &           local_active_columns,
+                const SparsityPatternType &sparsity_pattern,
+                const MPI_Comm &           communicator);
+
       /**
        * Return a reference to the MPI communicator object in use with this
        * matrix.
@@ -613,6 +631,17 @@ namespace PETScWrappers
                 const IndexSet &           local_columns,
                 const SparsityPatternType &sparsity_pattern);
 
+      /**
+       * Same as previous functions, but here we consider active dofs for MATIS.
+       */
+      template <typename SparsityPatternType>
+      void
+      do_reinit_IS(const IndexSet &           local_rows,
+                   const IndexSet &           local_active_rows,
+                   const IndexSet &           local_columns,
+                   const IndexSet &           local_active_columns,
+                   const SparsityPatternType &sparsity_pattern);
+
       // To allow calling protected prepare_add() and prepare_set().
       friend class BlockMatrixBase<SparseMatrix>;
     };
index 0b5e0492dd1cc8230ac5c67e691c67168c812bb5..76b7419099bd8f6e8124295075a11456ecaaed9c 100644 (file)
@@ -85,6 +85,28 @@ namespace PETScWrappers
       AssertThrow(ierr == 0, ExcPETScError(ierr));
     }
 
+    template <typename SparsityPatternType>
+    void
+    SparseMatrix::reinit_IS(const IndexSet &           local_rows,
+                            const IndexSet &           local_active_rows,
+                            const IndexSet &           local_columns,
+                            const IndexSet &           local_active_columns,
+                            const SparsityPatternType &sparsity_pattern,
+                            const MPI_Comm &           communicator)
+    {
+      this->communicator = communicator;
+
+      // get rid of old matrix and generate a new one
+      const PetscErrorCode ierr = destroy_matrix(matrix);
+      AssertThrow(ierr == 0, ExcPETScError(ierr));
+
+      do_reinit_IS(local_rows,
+                   local_active_rows,
+                   local_columns,
+                   local_active_columns,
+                   sparsity_pattern);
+    }
+
 
     SparseMatrix &
     SparseMatrix::operator=(const value_type d)
@@ -406,6 +428,203 @@ namespace PETScWrappers
         }
     }
 
+    // BDDC
+    template <typename SparsityPatternType>
+    void
+    SparseMatrix::do_reinit_IS(const IndexSet &           local_rows,
+                               const IndexSet &           local_active_rows,
+                               const IndexSet &           local_columns,
+                               const IndexSet &           local_active_columns,
+                               const SparsityPatternType &sparsity_pattern)
+    {
+      Assert(sparsity_pattern.n_rows() == local_rows.size(),
+             ExcMessage(
+               "SparsityPattern and IndexSet have different number of rows"));
+      Assert(
+        sparsity_pattern.n_cols() == local_columns.size(),
+        ExcMessage(
+          "SparsityPattern and IndexSet have different number of columns"));
+      Assert(local_rows.is_contiguous() && local_columns.is_contiguous(),
+             ExcMessage("PETSc only supports contiguous row/column ranges"));
+      Assert(local_rows.is_ascending_and_one_to_one(communicator),
+             ExcNotImplemented());
+
+    #  ifdef DEBUG
+      {
+        // check indexsets
+        types::global_dof_index row_owners =
+          Utilities::MPI::sum(local_rows.n_elements(), communicator);
+        types::global_dof_index col_owners =
+          Utilities::MPI::sum(local_columns.n_elements(), communicator);
+        Assert(row_owners == sparsity_pattern.n_rows(),
+               ExcMessage(
+                 std::string(
+                   "Each row has to be owned by exactly one owner (n_rows()=") +
+                 std::to_string(sparsity_pattern.n_rows()) +
+                 " but sum(local_rows.n_elements())=" +
+                 std::to_string(row_owners) + ")"));
+        Assert(
+          col_owners == sparsity_pattern.n_cols(),
+          ExcMessage(
+            std::string(
+              "Each column has to be owned by exactly one owner (n_cols()=") +
+            std::to_string(sparsity_pattern.n_cols()) +
+            " but sum(local_columns.n_elements())=" +
+            std::to_string(col_owners) + ")"));
+      }
+    #  endif
+
+      // create the local to global mappings as arrays.
+      IndexSet::size_type n_l2g_row = local_active_rows.n_elements();
+      IndexSet::size_type n_l2g_col = local_active_columns.n_elements();
+      std::vector<int>    idx_glob_row(n_l2g_row);
+      std::vector<int>    idx_glob_col(n_l2g_col);
+      unsigned int        k;
+      for (k = 0; k < n_l2g_row; ++k)
+        {
+          idx_glob_row[k] = local_active_rows.nth_index_in_set(k);
+        }
+      for (k = 0; k < n_l2g_col; ++k)
+        {
+          idx_glob_col[k] = local_active_columns.nth_index_in_set(k);
+        }
+
+
+      IS is_glob_row, is_glob_col;
+      // Create row index set
+      ISLocalToGlobalMapping l2gmap_row;
+      ISCreateGeneral(communicator,
+                      n_l2g_row,
+                      idx_glob_row.data(),
+                      PETSC_COPY_VALUES,
+                      &is_glob_row);
+      ISLocalToGlobalMappingCreateIS(is_glob_row, &l2gmap_row);
+      ISDestroy(&is_glob_row);
+      ISLocalToGlobalMappingViewFromOptions(l2gmap_row, NULL, "-view_map");
+
+      // Create column index set
+      ISLocalToGlobalMapping l2gmap_col;
+      ISCreateGeneral(communicator,
+                      n_l2g_col,
+                      idx_glob_col.data(),
+                      PETSC_COPY_VALUES,
+                      &is_glob_col);
+      ISLocalToGlobalMappingCreateIS(is_glob_col, &l2gmap_col);
+      ISDestroy(&is_glob_col);
+      ISLocalToGlobalMappingViewFromOptions(l2gmap_col, NULL, "-view_map");
+
+      // create the matrix with the IS constructor.
+      PetscErrorCode ierr = MatCreateIS(communicator,
+                                        1,
+                                        local_rows.n_elements(),
+                                        local_columns.n_elements(),
+                                        sparsity_pattern.n_rows(),
+                                        sparsity_pattern.n_cols(),
+                                        l2gmap_row,
+                                        l2gmap_col,
+                                        &matrix);
+      AssertThrow(ierr == 0, ExcPETScError(ierr));
+      ISLocalToGlobalMappingDestroy(&l2gmap_row);
+      ISLocalToGlobalMappingDestroy(&l2gmap_col);
+
+      // next preset the exact given matrix
+      // entries with zeros. this doesn't avoid any
+      // memory allocations, but it at least
+      // avoids some searches later on. the
+      // key here is that we can use the
+      // matrix set routines that set an
+      // entire row at once, not a single
+      // entry at a time
+      //
+      // for the usefulness of this option
+      // read the documentation of this
+      // class.
+      // if (preset_nonzero_locations == true)
+
+      // In the MATIS case, we use the local matrix instead
+      Mat local_matrix;
+      MatISGetLocalMat(matrix, &local_matrix);
+      MatSetType(local_matrix,
+                 MATSEQAIJ); // SEQ as it is local! TODO: Allow for OpenMP
+                             // parallelization in local node.
+      if (local_rows.n_elements() > 0)
+        {
+          // MatSEQAIJSetPreallocationCSR
+          // can be used to allocate the sparsity
+          // pattern of a matrix. Local matrices start from 0 (MATIS).
+          const PetscInt local_row_start = 0;
+          const PetscInt local_row_end   = local_active_rows.n_elements();
+
+          // first set up the column number
+          // array for the rows to be stored
+          // on the local processor.
+          std::vector<PetscInt> rowstart_in_window(local_row_end -
+                                                     local_row_start + 1,
+                                                   0),
+            colnums_in_window;
+          unsigned int global_row_index = 0;
+          {
+            unsigned int n_cols           = 0;
+            unsigned int global_row_index = 0;
+            for (PetscInt i = local_row_start; i < local_row_end; ++i)
+              {
+                global_row_index = local_active_rows.nth_index_in_set(i);
+                const PetscInt row_length =
+                  sparsity_pattern.row_length(global_row_index);
+                rowstart_in_window[i + 1 - local_row_start] =
+                  rowstart_in_window[i - local_row_start] + row_length;
+                n_cols += row_length;
+              }
+            colnums_in_window.resize(n_cols + 1, -1);
+          }
+
+
+          // now copy over the information
+          // from the sparsity pattern. For this we first invert the column
+          // index set.
+          std::map<unsigned int, unsigned int> loc_act_cols_inv;
+          for (unsigned int i = 0; i < local_active_columns.n_elements(); ++i)
+            {
+              loc_act_cols_inv[local_active_columns.nth_index_in_set(i)] = i;
+            }
+
+          {
+            PetscInt *ptr = colnums_in_window.data();
+            for (PetscInt i = local_row_start; i < local_row_end; ++i)
+              {
+                global_row_index = local_active_rows.nth_index_in_set(i);
+                for (typename SparsityPatternType::iterator p =
+                       sparsity_pattern.begin(global_row_index);
+                     p != sparsity_pattern.end(global_row_index);
+                     ++p, ++ptr)
+                  *ptr = loc_act_cols_inv[p->column()];
+              }
+          }
+
+          // then call the petsc function
+          // that summarily allocates these
+          // entries:
+          ierr = MatSeqAIJSetPreallocationCSR(local_matrix,
+                                              rowstart_in_window.data(),
+                                              colnums_in_window.data(),
+                                              nullptr);
+          AssertThrow(ierr == 0, ExcPETScError(ierr));
+        }
+      else
+        {
+          PetscInt i = 0;
+          ierr = MatSeqAIJSetPreallocationCSR(local_matrix, &i, &i, nullptr);
+          AssertThrow(ierr == 0, ExcPETScError(ierr));
+        }
+      compress(dealii::VectorOperation::insert);
+
+      {
+        close_matrix(local_matrix);
+        set_keep_zero_rows(local_matrix);
+      }
+      MatISRestoreLocalMat(matrix, &local_matrix);
+    }
+
 #  ifndef DOXYGEN
     // explicit instantiations
     //
@@ -471,6 +690,34 @@ namespace PETScWrappers
     SparseMatrix::do_reinit(const IndexSet &,
                             const IndexSet &,
                             const DynamicSparsityPattern &);
+
+    template void
+    SparseMatrix::reinit_IS(const IndexSet &,
+                            const IndexSet &,
+                            const IndexSet &,
+                            const IndexSet &,
+                            const SparsityPattern &,
+                            const MPI_Comm &);
+    template void
+    SparseMatrix::reinit_IS(const IndexSet &,
+                            const IndexSet &,
+                            const IndexSet &,
+                            const IndexSet &,
+                            const DynamicSparsityPattern &,
+                            const MPI_Comm &);
+
+    template void
+    SparseMatrix::do_reinit_IS(const IndexSet &,
+                               const IndexSet &,
+                               const IndexSet &,
+                               const IndexSet &,
+                               const SparsityPattern &);
+    template void
+    SparseMatrix::do_reinit_IS(const IndexSet &,
+                               const IndexSet &,
+                               const IndexSet &,
+                               const IndexSet &,
+                               const DynamicSparsityPattern &);
 #  endif
 
 

In the beginning the Universe was created. This has made a lot of people very angry and has been widely regarded as a bad move.

Douglas Adams


Typeset in Trocchi and Trocchi Bold Sans Serif.