From 7d052b2e42481537bcde7171f4745bd054190830 Mon Sep 17 00:00:00 2001 From: Daniel Arndt Date: Wed, 4 Oct 2017 00:53:21 +0200 Subject: [PATCH] Reduce number of versions of internal::all_reduce --- include/deal.II/base/mpi.h | 24 ++-- include/deal.II/base/mpi.templates.h | 161 ++++++++++----------------- source/base/mpi.inst.in | 12 +- 3 files changed, 84 insertions(+), 113 deletions(-) diff --git a/include/deal.II/base/mpi.h b/include/deal.II/base/mpi.h index 1c42e24a16..8afff6e2c5 100644 --- a/include/deal.II/base/mpi.h +++ b/include/deal.II/base/mpi.h @@ -17,6 +17,7 @@ #define dealii_mpi_h #include +#include #include @@ -163,6 +164,11 @@ namespace Utilities const MPI_Comm &mpi_communicator, T (&sums)[N]); + template + void sum (const ArrayView &values, + const MPI_Comm &mpi_communicator, + const ArrayView &sums); + /** * Like the previous function, but take the sums over the elements of a * std::vector. In other words, the i-th element of the results array is @@ -481,11 +487,10 @@ namespace Utilities namespace internal { template - void all_reduce (const MPI_Op &mpi_op, - const T *const values, - const MPI_Comm &mpi_communicator, - T *output, - const std::size_t size); + void all_reduce (const MPI_Op &mpi_op, + const ArrayView &values, + const MPI_Comm &mpi_communicator, + const ArrayView &output); } // Since these depend on N they must live in the header file @@ -494,7 +499,8 @@ namespace Utilities const MPI_Comm &mpi_communicator, T (&sums)[N]) { - internal::all_reduce(MPI_SUM, values, mpi_communicator, sums, N); + internal::all_reduce(MPI_SUM, ArrayView(values, N), + mpi_communicator, ArrayView(sums, N)); } template @@ -502,7 +508,8 @@ namespace Utilities const MPI_Comm &mpi_communicator, T (&maxima)[N]) { - internal::all_reduce(MPI_MAX, values, mpi_communicator, maxima, N); + internal::all_reduce(MPI_MAX, ArrayView(values, N), + mpi_communicator, ArrayView(maxima, N)); } template @@ -510,7 +517,8 @@ namespace Utilities const MPI_Comm &mpi_communicator, T (&minima)[N]) { - internal::all_reduce(MPI_MIN, values, mpi_communicator, minima, N); + internal::all_reduce(MPI_MIN, ArrayView(values, N), + mpi_communicator, ArrayView(minima, N)); } #endif } // end of namespace MPI diff --git a/include/deal.II/base/mpi.templates.h b/include/deal.II/base/mpi.templates.h index f316f2de85..f1ca394020 100644 --- a/include/deal.II/base/mpi.templates.h +++ b/include/deal.II/base/mpi.templates.h @@ -95,14 +95,13 @@ namespace Utilities #endif - template - void all_reduce (const MPI_Op &mpi_op, - const T *const values, - const MPI_Comm &mpi_communicator, - T *output, - const std::size_t size) + void all_reduce (const MPI_Op &mpi_op, + const ArrayView &values, + const MPI_Comm &mpi_communicator, + const ArrayView &output) { + AssertDimension(values.size(), output.size()); #ifdef DEAL_II_WITH_MPI if (job_supports_mpi()) { @@ -114,12 +113,12 @@ namespace Utilities // implementations of MPI-2. It is not needed as // of MPI-3 and we should remove it at some // point in the future. - const_cast(static_cast(values)) + const_cast(static_cast(values.begin())) : MPI_IN_PLACE, - static_cast(output), - static_cast(size), - internal::mpi_type_id(values), + static_cast(output.begin()), + static_cast(values.size()), + internal::mpi_type_id(values.begin()), mpi_op, mpi_communicator); AssertThrowMPI(ierr); @@ -129,24 +128,23 @@ namespace Utilities { (void)mpi_op; (void)mpi_communicator; - for (std::size_t i=0; i - void all_reduce (const MPI_Op &mpi_op, - const std::complex *const values, - const MPI_Comm &mpi_communicator, - std::complex *output, - const std::size_t size) + void all_reduce (const MPI_Op &mpi_op, + const ArrayView> &values, + const MPI_Comm &mpi_communicator, + const ArrayView> &output) { + AssertDimension(values.size(), output.size()); #ifdef DEAL_II_WITH_MPI if (job_supports_mpi()) { - T dummy_selector; const int ierr = MPI_Allreduce (values != output ? @@ -155,12 +153,12 @@ namespace Utilities // implementations of MPI-2. It is not needed as // of MPI-3 and we should remove it at some // point in the future. - const_cast(static_cast(values)) + const_cast(static_cast(values.begin())) : MPI_IN_PLACE, - static_cast(output), - static_cast(size*2), - internal::mpi_type_id(&dummy_selector), + static_cast(output.begin()), + static_cast(values.size()*2), + internal::mpi_type_id(static_cast(nullptr)), mpi_op, mpi_communicator); AssertThrowMPI(ierr); @@ -170,79 +168,10 @@ namespace Utilities { (void)mpi_op; (void)mpi_communicator; - for (std::size_t i=0; i - T all_reduce (const MPI_Op &mpi_op, - const T &t, - const MPI_Comm &mpi_communicator) - { - T output; - all_reduce(mpi_op, &t, mpi_communicator, &output, 1); - return output; - } - - - - template - void all_reduce (const MPI_Op &mpi_op, - const std::vector &values, - const MPI_Comm &mpi_communicator, - std::vector &output) - { - Assert(values.size() == output.size(), - ExcDimensionMismatch(values.size(), output.size())); - all_reduce(mpi_op, &values[0], mpi_communicator, &output[0], values.size()); - } - - - - template - void all_reduce (const MPI_Op &mpi_op, - const Vector &values, - const MPI_Comm &mpi_communicator, - Vector &output) - { - Assert(values.size() == output.size(), - ExcDimensionMismatch(values.size(), output.size())); - all_reduce(mpi_op, values.begin(), mpi_communicator, output.begin(), values.size()); - } - - - - template - void all_reduce (const MPI_Op &mpi_op, - const FullMatrix &values, - const MPI_Comm &mpi_communicator, - FullMatrix &output) - { - Assert(values.m() == output.m(), - ExcDimensionMismatch(values.m(), output.m())); - Assert(values.n() == output.n(), - ExcDimensionMismatch(values.n(), output.n())); - all_reduce(mpi_op, &values[0][0], mpi_communicator, &output[0][0], values.m() * values.n()); - } - - - - template - void all_reduce (const MPI_Op &mpi_op, - const LAPACKFullMatrix &values, - const MPI_Comm &mpi_communicator, - LAPACKFullMatrix &output) - { - Assert(values.m() == output.m(), - ExcDimensionMismatch(values.m(), output.m())); - Assert(values.n() == output.n(), - ExcDimensionMismatch(values.n(), output.n())); - all_reduce(mpi_op, &values(0,0), mpi_communicator, &output(0,0), values.m() * values.n()); - } - } @@ -251,7 +180,10 @@ namespace Utilities T sum (const T &t, const MPI_Comm &mpi_communicator) { - return internal::all_reduce(MPI_SUM, t, mpi_communicator); + T return_value; + internal::all_reduce(MPI_SUM, ArrayView(&t,1), + mpi_communicator, ArrayView(&return_value,1)); + return return_value; } @@ -260,6 +192,17 @@ namespace Utilities void sum (const std::vector &values, const MPI_Comm &mpi_communicator, std::vector &sums) + { + internal::all_reduce(MPI_SUM, ArrayView(values), + mpi_communicator, ArrayView(sums)); + } + + + + template + void sum (const ArrayView &values, + const MPI_Comm &mpi_communicator, + const ArrayView &sums) { internal::all_reduce(MPI_SUM, values, mpi_communicator, sums); } @@ -271,7 +214,9 @@ namespace Utilities const MPI_Comm &mpi_communicator, Vector &sums) { - internal::all_reduce(MPI_SUM, values, mpi_communicator, sums); + const auto &size = values.size(); + internal::all_reduce(MPI_SUM, ArrayView(values.begin(), size), + mpi_communicator, ArrayView(sums.begin(), size)); } @@ -281,7 +226,10 @@ namespace Utilities const MPI_Comm &mpi_communicator, FullMatrix &sums) { - internal::all_reduce(MPI_SUM, values, mpi_communicator, sums); + const auto size_values = values.n()*values.m(); + const auto size_sums = sums.n()*sums.m(); + internal::all_reduce(MPI_SUM, ArrayView(&values[0][0], size_values), + mpi_communicator, ArrayView(&sums[0][0], size_sums)); } @@ -291,7 +239,10 @@ namespace Utilities const MPI_Comm &mpi_communicator, LAPACKFullMatrix &sums) { - internal::all_reduce(MPI_SUM, values, mpi_communicator, sums); + const auto size_values = values.n()*values.m(); + const auto size_sums = sums.n()*sums.m(); + internal::all_reduce(MPI_SUM, ArrayView(&values(0,0), size_values), + mpi_communicator, ArrayView(&sums(0,0), size_sums)); } @@ -346,7 +297,10 @@ namespace Utilities T max (const T &t, const MPI_Comm &mpi_communicator) { - return internal::all_reduce(MPI_MAX, t, mpi_communicator); + T return_value; + internal::all_reduce(MPI_MAX, ArrayView(&t,1), + mpi_communicator, ArrayView (&return_value,1)); + return return_value; } @@ -356,7 +310,8 @@ namespace Utilities const MPI_Comm &mpi_communicator, std::vector &maxima) { - internal::all_reduce(MPI_MAX, values, mpi_communicator, maxima); + internal::all_reduce(MPI_MAX, ArrayView(values), + mpi_communicator, ArrayView (maxima)); } @@ -365,7 +320,10 @@ namespace Utilities T min (const T &t, const MPI_Comm &mpi_communicator) { - return internal::all_reduce(MPI_MIN, t, mpi_communicator); + T return_value; + internal::all_reduce(MPI_MIN, ArrayView(&t,1), + mpi_communicator, ArrayView(&return_value,1)); + return return_value; } @@ -375,7 +333,8 @@ namespace Utilities const MPI_Comm &mpi_communicator, std::vector &minima) { - internal::all_reduce(MPI_MIN, values, mpi_communicator, minima); + internal::all_reduce(MPI_MIN, ArrayView(values), + mpi_communicator, ArrayView (minima)); } } // end of namespace MPI } // end of namespace Utilities diff --git a/source/base/mpi.inst.in b/source/base/mpi.inst.in index 504a385c58..bbbc0b8807 100644 --- a/source/base/mpi.inst.in +++ b/source/base/mpi.inst.in @@ -27,6 +27,9 @@ for (S : MPI_SCALARS) template void sum (const FullMatrix &, const MPI_Comm &, FullMatrix &); + template + void sum (const ArrayView &, const MPI_Comm &, const ArrayView &); + template S sum (const S &, const MPI_Comm &); @@ -61,10 +64,9 @@ for (S : MPI_SCALARS) // seem to inline it with the '-march=native' flag. template void Utilities::MPI::internal::all_reduce (const MPI_Op &, - const S *const, + const ArrayView &, const MPI_Comm &, - S *, - const std::size_t); + const ArrayView &); } @@ -81,8 +83,10 @@ for (S : COMPLEX_SCALARS) template void sum (const std::vector &, const MPI_Comm &, std::vector &); -} + template + void sum (const ArrayView &, const MPI_Comm &, const ArrayView &); +} for (S : REAL_SCALARS; rank: RANKS; dim : SPACE_DIMENSIONS) { -- 2.39.5