From: Sebastian Proell Date: Thu, 6 Oct 2022 16:15:29 +0000 (+0200) Subject: Fix bug in FieldTransfer for ghosted vectors X-Git-Tag: v9.5.0-rc1~903^2 X-Git-Url: https://gitweb.dealii.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=refs%2Fpull%2F14340%2Fhead;p=dealii.git Fix bug in FieldTransfer for ghosted vectors --- diff --git a/source/distributed/field_transfer.cc b/source/distributed/field_transfer.cc index 33d747c637..2c8366d38a 100644 --- a/source/distributed/field_transfer.cc +++ b/source/distributed/field_transfer.cc @@ -120,7 +120,7 @@ namespace parallel // Communicate the results. - out.compress(dealii::VectorOperation::min); + out.compress(dealii::VectorOperation::insert); // Treat hanging nodes std::vector dof_indices; diff --git a/tests/hp/field_transfer_04.cc b/tests/hp/field_transfer_04.cc new file mode 100644 index 0000000000..3efdb4fb07 --- /dev/null +++ b/tests/hp/field_transfer_04.cc @@ -0,0 +1,124 @@ +// --------------------------------------------------------------------- +// +// Copyright (C) 2021 by the deal.II authors +// +// This file is part of the deal.II library. +// +// The deal.II library is free software; you can use it, redistribute +// it, and/or modify it under the terms of the GNU Lesser General +// Public License as published by the Free Software Foundation; either +// version 2.1 of the License, or (at your option) any later version. +// The full text of the license can be found in the file LICENSE.md at +// the top level directory of deal.II. +// +// --------------------------------------------------------------------- + +// Check that we can use FieldTransfer when multiple cells are changed from +// FE_Nothing to FE_Q + +#include + +#include +#include + +#include +#include + +#include +#include +#include + +#include + +#include "../tests.h" + +// FieldTransfer with ghosted vector + +int +main(int argc, char *argv[]) +{ + Utilities::MPI::MPI_InitFinalize mpi_initialization(argc, argv, 1); + + MPILogInitAll mpi_log; + + parallel::distributed::Triangulation<2> triangulation(MPI_COMM_WORLD); + GridGenerator::hyper_cube(triangulation); + triangulation.refine_global(2); + + hp::FECollection<2> fe_collection; + fe_collection.push_back(FE_Q<2>(1)); + fe_collection.push_back(FE_Nothing<2>()); + + DoFHandler<2> dof_handler(triangulation); + + // Assign FE_Nothing to half of the domain + for (auto cell : dof_handler.active_cell_iterators()) + { + if (cell->is_locally_owned()) + { + if (cell->center()[1] < 0.5) + { + cell->set_active_fe_index(0); + } + else + { + cell->set_active_fe_index(1); + } + } + } + + dof_handler.distribute_dofs(fe_collection); + + // Initialize solution + auto locally_relevant_dofs = + DoFTools::extract_locally_relevant_dofs(dof_handler); + LinearAlgebra::distributed::Vector solution( + dof_handler.locally_owned_dofs(), locally_relevant_dofs, MPI_COMM_WORLD); + const double old_value = 1.; + for (unsigned int i = 0; i < solution.local_size(); ++i) + solution.local_element(i) = old_value; + + { + std::stringstream ss; + solution.print(ss); + deallog << ss.str() << std::endl; + } + + parallel::distributed::experimental:: + FieldTransfer<2, LinearAlgebra::distributed::Vector> + field_transfer(dof_handler); + // Assgin FE_Q to all the cells + for (auto cell : dof_handler.active_cell_iterators()) + { + if (cell->is_locally_owned()) + { + cell->set_future_fe_index(0); + } + } + + triangulation.prepare_coarsening_and_refinement(); + + solution.update_ghost_values(); + field_transfer.prepare_for_coarsening_and_refinement(solution, 1); + + triangulation.execute_coarsening_and_refinement(); + + dof_handler.distribute_dofs(fe_collection); + const unsigned int dofs_per_cell = + dof_handler.get_fe_collection().max_dofs_per_cell(); + + AffineConstraints affine_constraints; + const double new_value = 2.; + + locally_relevant_dofs.clear(); + locally_relevant_dofs = DoFTools::extract_locally_relevant_dofs(dof_handler); + LinearAlgebra::distributed::Vector new_solution( + dof_handler.locally_owned_dofs(), locally_relevant_dofs, MPI_COMM_WORLD); + field_transfer.interpolate(new_value, affine_constraints, new_solution); + + { + std::stringstream ss; + new_solution.print(ss); + deallog << ss.str() << std::endl; + } +} diff --git a/tests/hp/field_transfer_04.with_p4est=true.mpirun=2.output b/tests/hp/field_transfer_04.with_p4est=true.mpirun=2.output new file mode 100644 index 0000000000..ccd1d62a05 --- /dev/null +++ b/tests/hp/field_transfer_04.with_p4est=true.mpirun=2.output @@ -0,0 +1,23 @@ + +DEAL:0::Process #0 +Local range: [0, 15), global size: 15 +Vector data: +1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 + +DEAL:0::Process #0 +Local range: [0, 15), global size: 25 +Vector data: +1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 + + +DEAL:1::Process #1 +Local range: [15, 15), global size: 15 +Vector data: + + +DEAL:1::Process #1 +Local range: [15, 25), global size: 25 +Vector data: +2.000e+00 2.000e+00 2.000e+00 2.000e+00 2.000e+00 2.000e+00 2.000e+00 2.000e+00 2.000e+00 2.000e+00 + + diff --git a/tests/hp/field_transfer_04.with_p4est=true.mpirun=3.output b/tests/hp/field_transfer_04.with_p4est=true.mpirun=3.output new file mode 100644 index 0000000000..f1cd31a786 --- /dev/null +++ b/tests/hp/field_transfer_04.with_p4est=true.mpirun=3.output @@ -0,0 +1,35 @@ + +DEAL:0::Process #0 +Local range: [0, 9), global size: 15 +Vector data: +1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 + +DEAL:0::Process #0 +Local range: [0, 9), global size: 25 +Vector data: +1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 + + +DEAL:1::Process #1 +Local range: [9, 15), global size: 15 +Vector data: +1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 + +DEAL:1::Process #1 +Local range: [9, 21), global size: 25 +Vector data: +1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 1.000e+00 2.000e+00 2.000e+00 2.000e+00 2.000e+00 2.000e+00 2.000e+00 + + + +DEAL:2::Process #2 +Local range: [15, 15), global size: 15 +Vector data: + + +DEAL:2::Process #2 +Local range: [21, 25), global size: 25 +Vector data: +2.000e+00 2.000e+00 2.000e+00 2.000e+00 + +