From: Timo Heister Date: Thu, 18 Apr 2013 23:21:13 +0000 (+0000) Subject: make step-40 more pretty (trilinos and petsc working) X-Git-Tag: v8.0.0~120^2~72 X-Git-Url: https://gitweb.dealii.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=1e744e5ce76113a6e8c7b449f0ab9bd6e52c67f1;p=dealii.git make step-40 more pretty (trilinos and petsc working) git-svn-id: https://svn.dealii.org/branches/branch_unify_linear_algebra@29339 0785d39b-7218-0410-832d-ea1e28bc413d --- diff --git a/deal.II/examples/step-40/step-40.cc b/deal.II/examples/step-40/step-40.cc index f8a90f5a9e..9e4b3334f3 100644 --- a/deal.II/examples/step-40/step-40.cc +++ b/deal.II/examples/step-40/step-40.cc @@ -245,11 +245,9 @@ namespace Step40 // locally owned cells (of course the linear solvers will read from it, // but they do not care about the geometric location of degrees of // freedom). - locally_relevant_solution.reinit (mpi_communicator, - locally_owned_dofs, - locally_relevant_dofs); - system_rhs.reinit (mpi_communicator, - locally_owned_dofs); + locally_relevant_solution.reinit (locally_owned_dofs, + locally_relevant_dofs, mpi_communicator); + system_rhs.reinit (locally_owned_dofs, mpi_communicator); system_rhs = 0; @@ -302,30 +300,19 @@ namespace Step40 // entries that will exist in that part of the finite element matrix that // it will own. The final step is to initialize the matrix with the // sparsity pattern. - CompressedSimpleSparsityPattern csp (dof_handler.n_dofs(), - dof_handler.n_dofs(), - locally_relevant_dofs); - DoFTools::make_sparsity_pattern (dof_handler, - csp, + CompressedSimpleSparsityPattern csp (locally_relevant_dofs); + + DoFTools::make_sparsity_pattern (dof_handler, csp, constraints, false); SparsityTools::distribute_sparsity_pattern (csp, dof_handler.n_locally_owned_dofs_per_processor(), mpi_communicator, locally_relevant_dofs); -#ifdef USE_PETSC_LA - system_matrix.reinit (mpi_communicator, - csp, - dof_handler.n_locally_owned_dofs_per_processor(), - dof_handler.n_locally_owned_dofs_per_processor(), - Utilities::MPI::this_mpi_process(mpi_communicator)); -#else system_matrix.reinit (locally_owned_dofs, locally_owned_dofs, csp, - mpi_communicator, - false); -#endif + mpi_communicator); } @@ -453,8 +440,7 @@ namespace Step40 void LaplaceProblem::solve () { LA::MPI::Vector - completely_distributed_solution (mpi_communicator, - locally_owned_dofs); + completely_distributed_solution (locally_owned_dofs, mpi_communicator); SolverControl solver_control (dof_handler.n_dofs(), 1e-12);