template <int dim, int spacedim>
std::map<unsigned int, types::global_vertex_index>
compute_local_to_global_vertex_index_map(
- const parallel::distributed::Triangulation<dim, spacedim> &triangulation)
+ const Triangulation<dim, spacedim> &triangulation)
{
std::map<unsigned int, types::global_vertex_index>
local_to_global_vertex_index;
#ifndef DEAL_II_WITH_MPI
- // without MPI, this function doesn't make sense because on cannot
- // use parallel::distributed::Triangulation in any meaningful
- // way
- (void)triangulation;
- Assert(false,
- ExcMessage("This function does not make any sense "
- "for parallel::distributed::Triangulation "
- "objects if you do not have MPI enabled."));
+ // If we don't have MPI then all vertices are local
+ for (unsigned int i = 0; i < triangulation.n_vertices(); ++i)
+ local_to_global_vertex_index[i] = i;
#else
--- /dev/null
+// ------------------------------------------------------------------------
+//
+// SPDX-License-Identifier: LGPL-2.1-or-later
+// Copyright (C) 2015 - 2018 by the deal.II authors
+//
+// This file is part of the deal.II library.
+//
+// Part of the source code is dual licensed under Apache-2.0 WITH
+// LLVM-exception OR LGPL-2.1-or-later. Detailed license information
+// governing the source code and code contributions can be found in
+// LICENSE.md and CONTRIBUTING.md at the top level directory of deal.II.
+//
+// ------------------------------------------------------------------------
+
+
+
+#include <deal.II/distributed/fully_distributed_tria.h>
+
+#include <deal.II/grid/cell_id.h>
+#include <deal.II/grid/grid_generator.h>
+#include <deal.II/grid/grid_out.h>
+#include <deal.II/grid/grid_tools.h>
+
+#include <map>
+#include <vector>
+
+#include "../tests.h"
+
+// test compute_local_to_global_vertex_index_map() for p::fd::T
+
+void
+test()
+{
+ const MPI_Comm mpi_comm = MPI_COMM_WORLD;
+ const types::subdomain_id rank = Utilities::MPI::this_mpi_process(mpi_comm);
+
+ Triangulation<3> serial_tria;
+ GridGenerator::subdivided_hyper_cube_with_simplices(
+ serial_tria, 4, 0.0, 1.0, true);
+ parallel::fullydistributed::Triangulation<3> tria(mpi_comm);
+ tria.copy_triangulation(serial_tria);
+
+ const std::map<unsigned int, types::global_vertex_index> local_to_global_id =
+ GridTools::compute_local_to_global_vertex_index_map(tria);
+
+ deallog << "Number of vertices = " << tria.n_vertices() << std::endl;
+ deallog << "Number of id pairs = " << local_to_global_id.size() << std::endl;
+
+ for (const auto &cell : tria.active_cell_iterators())
+ if (cell->is_locally_owned())
+ for (const auto v_no : cell->vertex_indices())
+ Assert(local_to_global_id.find(cell->vertex_index(v_no)) !=
+ local_to_global_id.end(),
+ ExcMessage("Every local vertex should be in the map."));
+
+ // Verify that we have a consistent and global ordering by checking physical
+ // coordinates.
+ std::map<types::global_vertex_index, Point<3>> vertices;
+ for (const auto &pair : local_to_global_id)
+ {
+ AssertIndexRange(pair.first, tria.n_vertices());
+ vertices[pair.second] = tria.get_vertices()[pair.first];
+ }
+
+ const auto all_vertices = Utilities::MPI::all_gather(mpi_comm, vertices);
+ for (types::subdomain_id other_rank = 0; other_rank < all_vertices.size();
+ ++other_rank)
+ {
+ unsigned int n_matches = 0;
+ for (const auto &pair : all_vertices[other_rank])
+ {
+ const auto it = vertices.find(pair.first);
+ if (it != vertices.end())
+ {
+ AssertThrow(it->second == pair.second,
+ ExcMessage("global vertices should match."));
+ n_matches += (it->second == pair.second);
+ }
+ }
+ deallog << "Found " << n_matches << " matching vertices between ranks "
+ << rank << " and " << other_rank << std::endl;
+ }
+}
+
+
+int
+main(int argc, char *argv[])
+{
+ Utilities::MPI::MPI_InitFinalize mpi_initialization(argc, argv, 1);
+ MPILogInitAll log;
+
+ test();
+
+ deallog << "OK" << std::endl;
+
+ return 0;
+}
--- /dev/null
+
+DEAL:0::Number of vertices = 96
+DEAL:0::Number of id pairs = 53
+DEAL:0::Found 53 matching vertices between ranks 0 and 0
+DEAL:0::Found 22 matching vertices between ranks 0 and 1
+DEAL:0::Found 8 matching vertices between ranks 0 and 2
+DEAL:0::Found 17 matching vertices between ranks 0 and 3
+DEAL:0::OK
+
+DEAL:1::Number of vertices = 105
+DEAL:1::Number of id pairs = 61
+DEAL:1::Found 22 matching vertices between ranks 1 and 0
+DEAL:1::Found 61 matching vertices between ranks 1 and 1
+DEAL:1::Found 37 matching vertices between ranks 1 and 2
+DEAL:1::Found 17 matching vertices between ranks 1 and 3
+DEAL:1::OK
+
+
+DEAL:2::Number of vertices = 92
+DEAL:2::Number of id pairs = 57
+DEAL:2::Found 8 matching vertices between ranks 2 and 0
+DEAL:2::Found 37 matching vertices between ranks 2 and 1
+DEAL:2::Found 57 matching vertices between ranks 2 and 2
+DEAL:2::Found 24 matching vertices between ranks 2 and 3
+DEAL:2::OK
+
+
+DEAL:3::Number of vertices = 96
+DEAL:3::Number of id pairs = 55
+DEAL:3::Found 17 matching vertices between ranks 3 and 0
+DEAL:3::Found 17 matching vertices between ranks 3 and 1
+DEAL:3::Found 24 matching vertices between ranks 3 and 2
+DEAL:3::Found 55 matching vertices between ranks 3 and 3
+DEAL:3::OK
+