From 54d1f8c5f2cf007d8d209aee55fc0038c8ace10b Mon Sep 17 00:00:00 2001 From: Timo Heister Date: Sat, 3 Nov 2012 03:27:29 +0000 Subject: [PATCH] MPI_InitFinalize can now also initialize PETSc. git-svn-id: https://svn.dealii.org/trunk@27328 0785d39b-7218-0410-832d-ea1e28bc413d --- deal.II/doc/news/changes.h | 5 +++++ deal.II/examples/step-17/step-17.cc | 12 +++++------- deal.II/examples/step-18/step-18.cc | 5 +---- deal.II/examples/step-40/step-40.cc | 13 +++++-------- deal.II/include/deal.II/base/mpi.h | 6 ++++++ deal.II/source/base/mpi.cc | 19 +++++++++++++++++++ 6 files changed, 41 insertions(+), 19 deletions(-) diff --git a/deal.II/doc/news/changes.h b/deal.II/doc/news/changes.h index 9ff00210fb..ad8e51d12e 100644 --- a/deal.II/doc/news/changes.h +++ b/deal.II/doc/news/changes.h @@ -108,6 +108,11 @@ never working correctly and it is not used.
    +
  1. The class Utilities::MPI::MPI_InitFinalize now also initializes +PETSc, when PETSc is installed. +
    +(Timo Heister, 2012/11/02) +
  2. step-6 now uses ConstraintMatrix::distribute_local_to_global() instead of condense(), which is the preferred way to use a ConstraintMatrix (and the only sensible way in parallel). diff --git a/deal.II/examples/step-17/step-17.cc b/deal.II/examples/step-17/step-17.cc index a93c3849a8..bdb6faaed6 100644 --- a/deal.II/examples/step-17/step-17.cc +++ b/deal.II/examples/step-17/step-17.cc @@ -1226,14 +1226,14 @@ int main (int argc, char **argv) // Here is the only real difference: // PETSc requires that we initialize it // at the beginning of the program, and - // un-initialize it at the end. So we - // call PetscInitialize and - // PetscFinalize. The original code + // un-initialize it at the end. The + // class MPI_InitFinalize takes care + // of that. The original code // sits in between, enclosed in braces // to make sure that the // elastic_problem variable goes // out of scope (and is destroyed) - // before we call + // before PETSc is closed with // PetscFinalize. (If we wouldn't // use braces, the destructor of // elastic_problem would run after @@ -1241,7 +1241,7 @@ int main (int argc, char **argv) // destructor involves calls to PETSc // functions, we would get strange // error messages from PETSc.) - PetscInitialize(&argc,&argv,0,0); + Utilities::MPI::MPI_InitFinalize mpi_initialization(argc, argv); { deallog.depth_console (0); @@ -1249,8 +1249,6 @@ int main (int argc, char **argv) ElasticProblem<2> elastic_problem; elastic_problem.run (); } - - PetscFinalize(); } catch (std::exception &exc) { diff --git a/deal.II/examples/step-18/step-18.cc b/deal.II/examples/step-18/step-18.cc index bc051f79c6..e7e1cd304b 100644 --- a/deal.II/examples/step-18/step-18.cc +++ b/deal.II/examples/step-18/step-18.cc @@ -2821,16 +2821,13 @@ int main (int argc, char **argv) using namespace dealii; using namespace Step18; - PetscInitialize(&argc,&argv,0,0); - + Utilities::MPI::MPI_InitFinalize mpi_initialization(argc, argv); { deallog.depth_console (0); TopLevel<3> elastic_problem; elastic_problem.run (); } - - PetscFinalize(); } catch (std::exception &exc) { diff --git a/deal.II/examples/step-40/step-40.cc b/deal.II/examples/step-40/step-40.cc index 3b201990e5..229b562d29 100644 --- a/deal.II/examples/step-40/step-40.cc +++ b/deal.II/examples/step-40/step-40.cc @@ -881,17 +881,16 @@ namespace Step40 // step-6. Like in the other programs // that use PETSc, we have to // inialize and finalize PETSc, which - // also initializes and finalizes the - // MPI subsystem. + // is done using the helper object + // MPI_InitFinalize. // // Note how we enclose the use the // use of the LaplaceProblem class in // a pair of braces. This makes sure // that all member variables of the // object are destroyed by the time - // we hit the - // PetscFinalize - // call. Not doing this will lead to + // we destroy the mpi_intialization + // object. Not doing this will lead to // strange and hard to debug errors // when PetscFinalize // first deletes all PETSc vectors @@ -906,15 +905,13 @@ int main(int argc, char *argv[]) using namespace dealii; using namespace Step40; - PetscInitialize(&argc, &argv, PETSC_NULL, PETSC_NULL); + Utilities::MPI::MPI_InitFinalize mpi_initialization(argc, argv); deallog.depth_console (0); { LaplaceProblem<2> laplace_problem_2d; laplace_problem_2d.run (); } - - PetscFinalize(); } catch (std::exception &exc) { diff --git a/deal.II/include/deal.II/base/mpi.h b/deal.II/include/deal.II/base/mpi.h index 2fdb14da05..6cf6a981ae 100644 --- a/deal.II/include/deal.II/base/mpi.h +++ b/deal.II/include/deal.II/base/mpi.h @@ -267,6 +267,12 @@ namespace Utilities * program and to shut it down again at * the end. * + * If deal.II is configured with PETSc, + * the library will also be initialized + * in the beginning and destructed at the + * end automatically (internally by calling + * PetscInitialize() and PetscFinalize()). + * * If a program uses MPI one would * typically just create an object of * this type at the beginning of diff --git a/deal.II/source/base/mpi.cc b/deal.II/source/base/mpi.cc index fe721b947e..c72910dc30 100644 --- a/deal.II/source/base/mpi.cc +++ b/deal.II/source/base/mpi.cc @@ -27,6 +27,13 @@ # endif #endif +#ifdef DEAL_II_USE_PETSC +# ifdef DEAL_II_COMPILER_SUPPORTS_MPI +# include +# endif +#endif + + DEAL_II_NAMESPACE_OPEN @@ -299,6 +306,11 @@ namespace Utilities "in a program since it initializes the MPI system.")); #ifdef DEAL_II_COMPILER_SUPPORTS_MPI + // if we have PETSc, we will initialize it and let it handle MPI. + // Otherwise, we will do it. +#ifdef DEAL_II_USE_PETSC + PetscInitialize(&argc, &argv, PETSC_NULL, PETSC_NULL); +#else int MPI_has_been_started = 0; MPI_Initialized(&MPI_has_been_started); AssertThrow (MPI_has_been_started == 0, @@ -308,6 +320,7 @@ namespace Utilities mpi_err = MPI_Init (&argc, &argv); AssertThrow (mpi_err == 0, ExcMessage ("MPI could not be initialized.")); +#endif #else // make sure the compiler doesn't warn // about these variables @@ -350,6 +363,11 @@ namespace Utilities ::release_unused_memory (); # endif +#ifdef DEAL_II_USE_PETSC + PetscFinalize(); +#else + + int mpi_err = 0; int MPI_has_been_started = 0; @@ -371,6 +389,7 @@ namespace Utilities AssertThrow (mpi_err == 0, ExcMessage ("An error occurred while calling MPI_Finalize()")); +#endif #endif } -- 2.39.5