From 804bc488fc236a611d3d3ffbf103d0f9a282e898 Mon Sep 17 00:00:00 2001 From: kronbichler Date: Fri, 24 Oct 2008 10:36:50 +0000 Subject: [PATCH] Wrote a TrilinosTools class that makes the use of Trilinos a bit easier. git-svn-id: https://svn.dealii.org/trunk@17332 0785d39b-7218-0410-832d-ea1e28bc413d --- deal.II/base/include/base/utilities.h | 112 ++++++++++++++++++++++++++ deal.II/base/source/utilities.cc | 78 +++++++++++++++++- deal.II/examples/step-31/step-31.cc | 13 +-- deal.II/examples/step-32/step-32.cc | 65 +++++---------- 4 files changed, 212 insertions(+), 56 deletions(-) diff --git a/deal.II/base/include/base/utilities.h b/deal.II/base/include/base/utilities.h index 439cda2b0e..bc86d128c8 100644 --- a/deal.II/base/include/base/utilities.h +++ b/deal.II/base/include/base/utilities.h @@ -26,6 +26,16 @@ typedef int MPI_Comm; #endif +#ifdef DEAL_II_USE_TRILINOS +# include +# include +# ifdef DEAL_II_COMPILER_SUPPORTS_MPI +# include +# else +# include +# endif +#endif + DEAL_II_NAMESPACE_OPEN @@ -222,6 +232,108 @@ namespace Utilities */ unsigned int get_this_mpi_process (const MPI_Comm &mpi_communicator); } + + + + /** + * This class provides the basic structures for the use of the + * Trilinos classes such as matrices, vectors, and + * preconditioners. The most important function in this class is + * comm(), which is needed for the initialization of + * Trilinos Epetra_Maps, which design the parallel distribution of + * vectors and matrices. Moreover, this class provides a unified + * interface to both serial and parallel implementations of + * Trilinos, sets up the MPI communicator in case the programs are + * run in parallel, and correctly terminates all processes when the + * destructor is called. An example usage of this class is shown in + * @ref step_32 step-32. + */ +#ifdef DEAL_II_USE_TRILINOS + class TrilinosTools + { + public: + /** + * Constructor. Takes the + * arguments from the command + * line (in case of MPI, the + * number of processes is + * specified there), and sets up + * a respective communicator by + * calling MPI_Init(). + */ + TrilinosTools (int* argc, + char*** argv); + + /** + * Copy constructor. Takes the + * Trilinos communicator from the + * input object and copies all + * content. Note that the copied + * class cannot own the MPI + * process, and hence, the + * original object needs to be + * around as long as the copy. + */ + TrilinosTools(const TrilinosTools &InputTrilinos); + + /** + * Destructor. Calls + * MPI_Finalize() in + * case this class owns the MPI + * process. + */ + ~TrilinosTools(); + + /** + * Returns a Trilinos Epetra_Comm + * object needed for creation of + * Epetra_Maps. + */ + const Epetra_Comm& comm() const; + + /** + * Returns whether we are using a + * MPI version or a serial + * version of Trilinos. + */ + bool trilinos_uses_mpi() const; + + private: + /** + * This flag tells the class + * whether it owns the MPI + * process (i.e., it has been + * constructed using the + * argc/argv input, or it has + * been copied). In the former + * case, the command + * MPI_Finalize() will + * be called at destruction. + */ + const bool owns_mpi; + + /** + * This flag tells whether we use + * MPI or not. + */ + const bool use_mpi; + + /** + * The actual communicator + * object. If we run the program + * in serial, we will have + * another communicator than when + * running in parallel. + */ +#ifdef DEAL_II_COMPILER_SUPPORTS_MPI + Teuchos::RCP communicator; +#else + Teuchos::RCP communicator; +#endif + }; + +#endif + } diff --git a/deal.II/base/source/utilities.cc b/deal.II/base/source/utilities.cc index 23076cd993..f65386d5e3 100644 --- a/deal.II/base/source/utilities.cc +++ b/deal.II/base/source/utilities.cc @@ -459,7 +459,83 @@ namespace Utilities } #endif } - + + + +#ifdef DEAL_II_USE_TRILINOS + + + TrilinosTools::TrilinosTools (int* argc, char*** argv) + : + owns_mpi (true), +#ifdef DEAL_II_COMPILER_SUPPORTS_MPI + use_mpi (true) +#else + use_mpi (false) +#endif + { +#ifdef DEAL_II_COMPILER_SUPPORTS_MPI + int MPI_has_been_started = 0; + MPI_Initialized(&MPI_has_been_started); + Assert (MPI_has_been_started == 0, + ExcMessage ("MPI error. You can only start MPI once!")); + + int mpi_err; + mpi_err = MPI_Init (argc, argv); + Assert (mpi_err == 0, + ExcMessage ("MPI could not be initialized.")); + + communicator = Teuchos::rcp (new Epetra_MpiComm (MPI_COMM_WORLD), true); +#else + communicator = Teuchos::rcp (new Epetra_SerialComm (MPI_COMM_WORLD), true); +#endif + } + + + + TrilinosTools::TrilinosTools (const TrilinosTools &InputTrilinos) + : + owns_mpi (false), +#ifdef DEAL_II_COMPILER_SUPPORTS_MPI + use_mpi (true), +#else + use_mpi (false), +#endif + communicator (&*InputTrilinos.communicator, false) + {} + + + + TrilinosTools::~TrilinosTools() + { + int mpi_err = 0; + +#ifdef DEAL_II_COMPILER_SUPPORTS_MPI + if (use_mpi == true && owns_mpi == true) + mpi_err = MPI_Finalize(); +#endif + + Assert (mpi_err == 0, + ExcMessage ("An error occurred while calling MPI_Finalize()")); + } + + + const Epetra_Comm& + TrilinosTools::comm() const + { + return *communicator; + } + + + + bool + TrilinosTools::trilinos_uses_mpi () const + { + return use_mpi; + } + +#endif + } DEAL_II_NAMESPACE_CLOSE diff --git a/deal.II/examples/step-31/step-31.cc b/deal.II/examples/step-31/step-31.cc index e1ca873c0a..7bea85ab70 100644 --- a/deal.II/examples/step-31/step-31.cc +++ b/deal.II/examples/step-31/step-31.cc @@ -2744,17 +2744,12 @@ void BoussinesqFlowProblem::run () // in serial). int main (int argc, char *argv[]) { -#ifdef DEAL_II_COMPILER_SUPPORTS_MPI - MPI_Init (&argc,&argv); -#else - (void)argc; - (void)argv; -#endif - try { deallog.depth_console (0); + Utilities::TrilinosTools trilinos (&argc, &argv); + BoussinesqFlowProblem<2> flow_problem; flow_problem.run (); } @@ -2782,10 +2777,6 @@ int main (int argc, char *argv[]) << std::endl; return 1; } - -#ifdef DEAL_II_COMPILER_SUPPORTS_MPI - MPI_Finalize(); -#endif return 0; } diff --git a/deal.II/examples/step-32/step-32.cc b/deal.II/examples/step-32/step-32.cc index 55426bf18a..01b0ebd959 100644 --- a/deal.II/examples/step-32/step-32.cc +++ b/deal.II/examples/step-32/step-32.cc @@ -54,12 +54,6 @@ #include #include -#ifdef DEAL_II_COMPILER_SUPPORTS_MPI -# include -#else -# include -#endif - #include #include @@ -264,7 +258,7 @@ template class BoussinesqFlowProblem { public: - BoussinesqFlowProblem (); + BoussinesqFlowProblem (Utilities::TrilinosTools &trilinos_tools); void run (); private: @@ -297,11 +291,7 @@ class BoussinesqFlowProblem const double old_time_step); -#ifdef DEAL_II_COMPILER_SUPPORTS_MPI - Epetra_MpiComm trilinos_communicator; -#else - Epetra_SerialComm trilinos_communicator; -#endif + Utilities::TrilinosTools trilinos_tools; ConditionalOStream pcout; @@ -353,12 +343,10 @@ class BoussinesqFlowProblem // @sect4{BoussinesqFlowProblem::BoussinesqFlowProblem} template -BoussinesqFlowProblem::BoussinesqFlowProblem () +BoussinesqFlowProblem::BoussinesqFlowProblem (Utilities::TrilinosTools &trilinos_tools) : -#ifdef DEAL_II_COMPILER_SUPPORTS_MPI - trilinos_communicator (MPI_COMM_WORLD), -#endif - pcout (std::cout, trilinos_communicator.MyPID()==0), + trilinos_tools (trilinos_tools), + pcout (std::cout, trilinos_tools.comm().MyPID()==0), triangulation (Triangulation::maximum_smoothing), @@ -371,7 +359,7 @@ BoussinesqFlowProblem::BoussinesqFlowProblem () temperature_fe (temperature_degree), temperature_dof_handler (triangulation), - temperature_partitioner (0, 0, trilinos_communicator), + temperature_partitioner (0, 0, trilinos_tools.comm()), time_step (0), old_time_step (0), @@ -401,7 +389,7 @@ double BoussinesqFlowProblem::get_maximal_velocity () const cell = stokes_dof_handler.begin_active(), endc = stokes_dof_handler.end(); for (; cell!=endc; ++cell) - if (cell->subdomain_id() == (unsigned int)trilinos_communicator.MyPID()) + if (cell->subdomain_id() == (unsigned int)trilinos_tools.comm().MyPID()) { fe_values.reinit (cell); fe_values.get_function_values (localized_stokes_solution, stokes_values); @@ -568,7 +556,7 @@ void BoussinesqFlowProblem::setup_dofs () std::vector stokes_sub_blocks (dim+1,0); stokes_sub_blocks[dim] = 1; - GridTools::partition_triangulation (trilinos_communicator.NumProc(), + GridTools::partition_triangulation (trilinos_tools.comm().NumProc(), triangulation); { @@ -623,7 +611,7 @@ void BoussinesqFlowProblem::setup_dofs () std::vector local_dofs (dim+1); DoFTools:: count_dofs_with_subdomain_association (stokes_dof_handler, - trilinos_communicator.MyPID(), + trilinos_tools.comm().MyPID(), local_dofs); unsigned int n_local_velocities = 0; for (unsigned int c=0; c::setup_dofs () const unsigned int n_local_pressures = local_dofs[dim]; - Epetra_Map map_u(n_u, n_local_velocities, 0, trilinos_communicator); + Epetra_Map map_u(n_u, n_local_velocities, 0, trilinos_tools.comm()); stokes_partitioner.push_back (map_u); - Epetra_Map map_p(n_p, n_local_pressures, 0, trilinos_communicator); + Epetra_Map map_p(n_p, n_local_pressures, 0, trilinos_tools.comm()); stokes_partitioner.push_back (map_p); } { @@ -695,9 +683,9 @@ void BoussinesqFlowProblem::setup_dofs () = Epetra_Map (n_T, DoFTools::count_dofs_with_subdomain_association (temperature_dof_handler, - trilinos_communicator.MyPID()), + trilinos_tools.comm().MyPID()), 0, - trilinos_communicator); + trilinos_tools.comm()); { temperature_mass_matrix.clear (); temperature_stiffness_matrix.clear (); @@ -751,7 +739,7 @@ BoussinesqFlowProblem::assemble_stokes_preconditioner () cell = stokes_dof_handler.begin_active(), endc = stokes_dof_handler.end(); for (; cell!=endc; ++cell) - if (cell->subdomain_id() == (unsigned int)trilinos_communicator.MyPID()) + if (cell->subdomain_id() == (unsigned int)trilinos_tools.comm().MyPID()) { stokes_fe_values.reinit (cell); local_matrix = 0; @@ -876,7 +864,7 @@ void BoussinesqFlowProblem::assemble_stokes_system () temperature_cell = temperature_dof_handler.begin_active(); for (; cell!=endc; ++cell, ++temperature_cell) - if (cell->subdomain_id() == (unsigned int)trilinos_communicator.MyPID()) + if (cell->subdomain_id() == (unsigned int)trilinos_tools.comm().MyPID()) { stokes_fe_values.reinit (cell); temperature_fe_values.reinit (temperature_cell); @@ -977,7 +965,7 @@ void BoussinesqFlowProblem::assemble_temperature_matrix () cell = temperature_dof_handler.begin_active(), endc = temperature_dof_handler.end(); for (; cell!=endc; ++cell) - if (cell->subdomain_id() == (unsigned int)trilinos_communicator.MyPID()) + if (cell->subdomain_id() == (unsigned int)trilinos_tools.comm().MyPID()) { local_mass_matrix = 0; local_stiffness_matrix = 0; @@ -1094,7 +1082,7 @@ void BoussinesqFlowProblem::assemble_temperature_system () stokes_cell = stokes_dof_handler.begin_active(); for (; cell!=endc; ++cell, ++stokes_cell) - if (cell->subdomain_id() == (unsigned int)trilinos_communicator.MyPID() ) + if (cell->subdomain_id() == (unsigned int)trilinos_tools.comm().MyPID() ) { local_rhs = 0; @@ -1363,7 +1351,7 @@ void BoussinesqFlowProblem::output_results () const DataOut data_out; - if (trilinos_communicator.MyPID() == 0) + if (trilinos_tools.comm().MyPID() == 0) { data_out.attach_dof_handler (joint_dof_handler); @@ -1513,20 +1501,13 @@ void BoussinesqFlowProblem::run () // @sect3{The main function} int main (int argc, char *argv[]) { -#ifdef DEAL_II_COMPILER_SUPPORTS_MPI - MPI_Init (&argc,&argv); -#else - (void)argc; - (void)argv; -#endif - - //sleep (20); - try { deallog.depth_console (0); + + Utilities::TrilinosTools trilinos(&argc, &argv); - BoussinesqFlowProblem<2> flow_problem; + BoussinesqFlowProblem<2> flow_problem (trilinos); flow_problem.run (); } catch (std::exception &exc) @@ -1554,9 +1535,5 @@ int main (int argc, char *argv[]) return 1; } -#ifdef DEAL_II_COMPILER_SUPPORTS_MPI - MPI_Finalize(); -#endif - return 0; } -- 2.39.5