From e5471763639d0cbd7633d2e77bd2f32305ecfb3a Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Tue, 1 Nov 2016 16:35:25 +0100 Subject: [PATCH] add PArpackSolver::reinit(const VectorType &) --- doc/news/changes.h | 7 + include/deal.II/lac/parpack_solver.h | 19 +- tests/arpack/step-36_parpack_mf.cc | 241 ++++++++++++++++++ ...h_arpack_with_parpack=true.mpirun=3.output | 6 + 4 files changed, 271 insertions(+), 2 deletions(-) create mode 100644 tests/arpack/step-36_parpack_mf.cc create mode 100644 tests/arpack/step-36_parpack_mf.with_mpi=true.with_arpack_with_parpack=true.mpirun=3.output diff --git a/doc/news/changes.h b/doc/news/changes.h index 03e342ca6d..707f71548b 100644 --- a/doc/news/changes.h +++ b/doc/news/changes.h @@ -405,6 +405,13 @@ inconvenience this causes.

Specific improvements

    +
  1. New: Add PArpackSolver::reinit(const VectorType &distributed_vector) to + initialize internal data structures based on a vector. This makes PArpack + usable with MatrixFree operators. +
    + (Denis Davydov, 2016/10/31) +
  2. +
  3. New: Add MatrixFreeOperators::LaplaceOperator representing a Laplace matrix.
    (Denis Davydov, 2016/10/30) diff --git a/include/deal.II/lac/parpack_solver.h b/include/deal.II/lac/parpack_solver.h index 950edc9ece..1ff63b949b 100644 --- a/include/deal.II/lac/parpack_solver.h +++ b/include/deal.II/lac/parpack_solver.h @@ -245,7 +245,7 @@ public: const AdditionalData &data = AdditionalData()); /** - * Initialise internal variables. + * Initialize internal variables. */ void reinit(const IndexSet &locally_owned_dofs ); @@ -258,6 +258,11 @@ public: void reinit(const IndexSet &locally_owned_dofs, const std::vector &partitioning); + /** + * Initialize internal variables from the input @p distributed_vector. + */ + void reinit(const VectorType &distributed_vector); + /** * Set initial vector for building Krylov space. */ @@ -601,9 +606,20 @@ void PArpackSolver::reinit(const IndexSet &locally_owned_dofs) src.reinit (locally_owned_dofs,mpi_communicator); dst.reinit (locally_owned_dofs,mpi_communicator); tmp.reinit (locally_owned_dofs,mpi_communicator); +} + +template +void PArpackSolver::reinit(const VectorType &distributed_vector) +{ + internal_reinit(distributed_vector.locally_owned_elements()); + // deal.II vectors: + src.reinit (distributed_vector); + dst.reinit (distributed_vector); + tmp.reinit (distributed_vector); } + template void PArpackSolver::reinit(const IndexSet &locally_owned_dofs, const std::vector &partitioning) @@ -614,7 +630,6 @@ void PArpackSolver::reinit(const IndexSet &locally_owned_dofs, src.reinit (partitioning,mpi_communicator); dst.reinit (partitioning,mpi_communicator); tmp.reinit (partitioning,mpi_communicator); - } template diff --git a/tests/arpack/step-36_parpack_mf.cc b/tests/arpack/step-36_parpack_mf.cc new file mode 100644 index 0000000000..d0e6f587de --- /dev/null +++ b/tests/arpack/step-36_parpack_mf.cc @@ -0,0 +1,241 @@ +/* --------------------------------------------------------------------- + * + * Copyright (C) 2016 by the deal.II authors + * + * This file is part of the deal.II library. + * + * The deal.II library is free software; you can use it, redistribute + * it, and/or modify it under the terms of the GNU Lesser General + * Public License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * The full text of the license can be found in the file LICENSE at + * the top level of the deal.II distribution. + * + * --------------------------------------------------------------------- + + * + * This file tests the PARPACK interface for a symmetric operator taken from step-36 + * using matrix-free operators. + * + * We test that the computed vectors are eigenvectors and mass-orthonormal, i.e. + * a) (A*x_i-\lambda*B*x_i).L2() == 0 + * b) x_j*B*x_i = \delta_{i,j} + * + */ + +#include "../tests.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + +#include +#include +#include +#include + +#include + +#include +#include + + +const unsigned int dim = 2; + +using namespace dealii; + +const double eps = 1e-10; + +const unsigned int fe_degree = 1; + +void test () +{ + const unsigned int global_mesh_refinement_steps = 5; + const unsigned int number_of_eigenvalues = 5; + + MPI_Comm mpi_communicator = MPI_COMM_WORLD; + const unsigned int n_mpi_processes = Utilities::MPI::n_mpi_processes(mpi_communicator); + const unsigned int this_mpi_process = Utilities::MPI::this_mpi_process(mpi_communicator); + + parallel::distributed::Triangulation triangulation (mpi_communicator); + GridGenerator::hyper_cube (triangulation, -1, 1); + triangulation.refine_global (global_mesh_refinement_steps); + + + DoFHandler dof_handler(triangulation); + FE_Q fe(fe_degree); + dof_handler.distribute_dofs (fe); + + + IndexSet locally_relevant_dofs; + DoFTools::extract_locally_relevant_dofs (dof_handler, + locally_relevant_dofs); + ConstraintMatrix constraints; + constraints.reinit (locally_relevant_dofs); + DoFTools::make_hanging_node_constraints (dof_handler, constraints); + VectorTools::interpolate_boundary_values (dof_handler, + 0, + ZeroFunction (), + constraints); + constraints.close (); + + MatrixFree mf_data; + { + const QGauss<1> quad (fe_degree+1); + typename MatrixFree::AdditionalData data; + data.mpi_communicator = mpi_communicator; + data.tasks_parallel_scheme = + MatrixFree::AdditionalData::partition_color; + data.mapping_update_flags = update_values | update_gradients | update_JxW_values; + mf_data.reinit (dof_handler, constraints, quad, data); + } + + + std::vector > eigenfunctions; + std::vector eigenvalues; + MatrixFreeOperators::MassOperator mass; + MatrixFreeOperators::LaplaceOperator laplace; + mass.initialize(mf_data); + laplace.initialize(mf_data); + + eigenfunctions.resize (number_of_eigenvalues); + eigenvalues.resize (number_of_eigenvalues); + for (unsigned int i=0; i > lambda(number_of_eigenvalues); + + SolverControl solver_control (dof_handler.n_dofs(), 1e-9,/*log_history*/false,/*log_results*/false); + SolverControl solver_control_lin (dof_handler.n_dofs(), 1e-10,/*log_history*/false,/*log_results*/false); + + + PreconditionIdentity preconditioner; + IterativeInverse > shift_and_invert; + shift_and_invert.initialize(laplace,preconditioner); + shift_and_invert.solver.select("cg"); + static ReductionControl inner_control_c(dof_handler.n_dofs(), 0.0, 1.e-13); + shift_and_invert.solver.set_control(inner_control_c); + + const unsigned int num_arnoldi_vectors = 2*eigenvalues.size() + 2; + + PArpackSolver >::AdditionalData + additional_data(num_arnoldi_vectors, + PArpackSolver >::largest_magnitude, + true); + + PArpackSolver > eigensolver + (solver_control, + mpi_communicator, + additional_data); + eigensolver.reinit(eigenfunctions[0]); + // make sure initial vector is orthogonal to the space due to constraints + { + LinearAlgebra::distributed::Vector init_vector; + mf_data.initialize_dof_vector(init_vector); + init_vector = 1.; + constraints.set_zero(init_vector); + eigensolver.set_initial_vector(init_vector); + } + // avoid output of iterative solver: + const unsigned int previous_depth = deallog.depth_file(0); + eigensolver.solve (laplace, + mass, + shift_and_invert, + lambda, + eigenfunctions, + eigenvalues.size()); + deallog.depth_file(previous_depth); + + for (unsigned int i = 0; i < lambda.size(); i++) + eigenvalues[i] = lambda[i].real(); + + for (unsigned int i=0; i < eigenvalues.size(); i++) + deallog << eigenvalues[i] << std::endl; + + // make sure that we have eigenvectors and they are mass-orthonormal: + // a) (A*x_i-\lambda*B*x_i).L2() == 0 + // b) x_j*B*x_i=\delta_{ij} + { + const double precision = 1e-7; + LinearAlgebra::distributed::Vector Ax(eigenfunctions[0]), Bx(eigenfunctions[0]); + for (unsigned int i=0; i < eigenfunctions.size(); ++i) + { + mass.vmult(Bx,eigenfunctions[i]); + + for (unsigned int j=0; j < eigenfunctions.size(); j++) + Assert( std::abs( eigenfunctions[j] * Bx - (i==j))< precision, + ExcMessage("Eigenvectors " + + Utilities::int_to_string(i) + + " and " + + Utilities::int_to_string(j) + + " are not orthonormal!")); + + laplace.vmult(Ax,eigenfunctions[i]); + Ax.add(-1.0*eigenvalues[i],Bx); + Assert (Ax.l2_norm() < precision, + ExcMessage("Returned vector " + + Utilities::int_to_string(i) + + " is not an eigenvector!")); + } + } + } + + + dof_handler.clear (); + deallog << "Ok"<