From: tcclevenger Date: Thu, 9 Feb 2017 13:38:27 +0000 (-0500) Subject: fixed bug in write_mesh_per_processor_as_vtu, added test X-Git-Tag: v8.5.0-rc1~127^2 X-Git-Url: https://gitweb.dealii.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=469828e6da9164aea875406b8fac18bf6a5798e4;p=dealii.git fixed bug in write_mesh_per_processor_as_vtu, added test Previous implementation did not account for non-distributed multilevel hierarchy or active cells having different level_subdomain_id and subdomain_id. made requested changes typo in changes. GridOut function, not GridTools --- diff --git a/doc/news/changes/minor/20170209ConradClevenger b/doc/news/changes/minor/20170209ConradClevenger new file mode 100644 index 0000000000..b92ce515b4 --- /dev/null +++ b/doc/news/changes/minor/20170209ConradClevenger @@ -0,0 +1,3 @@ +Fixed: The GridOut::write_mesh_per_processor_as_vtu() function now works for a mesh whose multilevel hierarchy is not distributed, as well as a mesh whose level_subdomain_ids do not necessarily match its subdomain_ids for every active cell. +
+(Conrad Clevenger, 2017/02/09) diff --git a/source/grid/grid_out.cc b/source/grid/grid_out.cc index e8eb42d969..00d3fc68ab 100644 --- a/source/grid/grid_out.cc +++ b/source/grid/grid_out.cc @@ -2548,11 +2548,24 @@ void GridOut::write_mesh_per_processor_as_vtu (const Triangulation for (cell=tria.begin(), endc=tria.end(); cell != endc; ++cell) { - if (!include_artificial && cell->level_subdomain_id() == - numbers::artificial_subdomain_id) - continue; - if (!view_levels && cell->has_children()) - continue; + if (!view_levels) + { + if (cell->has_children()) + continue; + if (!include_artificial && + cell->subdomain_id() == numbers::artificial_subdomain_id) + continue; + } + else if (!include_artificial) + { + if (cell->has_children() && + cell->level_subdomain_id() == numbers::artificial_subdomain_id) + continue; + else if (!cell->has_children() && + cell->level_subdomain_id() == numbers::artificial_subdomain_id && + cell->subdomain_id() == numbers::artificial_subdomain_id) + continue; + } DataOutBase::Patch patch; patch.data.reinit(n_datasets, n_q_points); diff --git a/tests/grid/grid_out_per_processor_vtu_02.cc b/tests/grid/grid_out_per_processor_vtu_02.cc new file mode 100644 index 0000000000..d4254505dd --- /dev/null +++ b/tests/grid/grid_out_per_processor_vtu_02.cc @@ -0,0 +1,103 @@ +// --------------------------------------------------------------------- +// +// Copyright (C) 2009 - 2016 by the deal.II authors +// +// This file is part of the deal.II library. +// +// The deal.II library is free software; you can use it, redistribute +// it, and/or modify it under the terms of the GNU Lesser General +// Public License as published by the Free Software Foundation; either +// version 2.1 of the License, or (at your option) any later version. +// The full text of the license can be found in the file LICENSE at +// the top level of the deal.II distribution. +// +// --------------------------------------------------------------------- + + +// check GriOut::write_mesh_per_processor_as_vtu() when level_subdomain_id +// differs from subdomain_id for a particular cell + +#include "../tests.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +template +void output(const parallel::shared::Triangulation &tr, + const std::string &filename, + const bool view_levels, + const bool include_artificial) +{ + GridOut out; + out.write_mesh_per_processor_as_vtu(tr, filename, view_levels, include_artificial); + + // copy the .pvtu and .vtu files + // into the logstream + int myid = Utilities::MPI::this_mpi_process (MPI_COMM_WORLD); + if (myid==0) + { + cat_file((std::string(filename) + ".pvtu").c_str()); + cat_file((std::string(filename) + ".proc0000.vtu").c_str()); + } + else if (myid==1) + cat_file((std::string(filename) + ".proc0001.vtu").c_str()); + else + AssertThrow(false, ExcNotImplemented()); +} + +template +void test() +{ + unsigned int myid = Utilities::MPI::this_mpi_process (MPI_COMM_WORLD); + if (myid == 0) + deallog << "hyper_cube" << std::endl; + + parallel::shared::Triangulation tr(MPI_COMM_WORLD); + GridGenerator::hyper_cube(tr); + tr.refine_global(1); + typename Triangulation::active_cell_iterator + cell=tr.begin_active(), endc=tr.end(); + for (; cell!=endc; ++cell) + { + if (cell->index() < 2) + cell->set_subdomain_id(cell->index()); + else + cell->set_subdomain_id(numbers::artificial_subdomain_id); + + if (cell->index() == 0 || cell->index() == 2) + cell->set_level_subdomain_id(numbers::artificial_subdomain_id); + else if (cell->index() == 1) + cell->set_level_subdomain_id(0); + else if (cell->index() == 3) + cell->set_level_subdomain_id(1); + } + + output(tr, "file1", true, false); +} + + +int main(int argc, char *argv[]) +{ + Utilities::MPI::MPI_InitFinalize mpi_initialization (argc, argv, 1); + + MPILogInitAll init; + + deallog.push("2d"); + test<2>(); + deallog.pop(); +} diff --git a/tests/grid/grid_out_per_processor_vtu_02.mpirun=2.with_zlib=on.output b/tests/grid/grid_out_per_processor_vtu_02.mpirun=2.with_zlib=on.output new file mode 100644 index 0000000000..4303e9a9fc --- /dev/null +++ b/tests/grid/grid_out_per_processor_vtu_02.mpirun=2.with_zlib=on.output @@ -0,0 +1,100 @@ + +DEAL:0:2d::hyper_cube + + + + + + + + + + + + + + + + + + + + + + + +AQAAAIABAACAAQAALQAAAA==eNpjYMAHPtgzkCQP4xPSBwMP7EmTh/FxiRPrflzmfLAnzV5C6gmHDwBXYxlL + + + + + +AQAAAEAAAABAAAAAKAAAAA==eNoNwwkSwBAQALB1VIvi/7+VzCQiIlnMVh9fm5/d6fB3edxeCvAAeQ== + + +AQAAABAAAAAQAAAAEwAAAA==eNpjYWBg4ABiHiAWAGIAAVAAKQ== + + +AQAAAAQAAAAEAAAADAAAAA==eNrj5OTkBAAAXgAl + + + + +AQAAAIAAAACAAAAAEAAAAA==eNpjYCAGfLCnFQ0AiwIONQ== + +AQAAAIAAAACAAAAAGgAAAA==eNpjYACBD/sZ8NKEwAd7/DTDAVw0AOJFDnk= + +AQAAAIAAAACAAAAAFAAAAA==eNpjYCAKHCBAEwAf7HHRACjbB70= + +AQAAAIAAAACAAAAADAAAAA==eNpjYBhYAAAAgAAB + + + + + + + + + + + + + +AQAAAIABAACAAQAALQAAAA==eNpjYMAHPtgzkCQP4xPSBwMP7EmTh/FxiRPrflzmfLAnzV5C6gmHDwBXYxlL + + + + + +AQAAAEAAAABAAAAAKAAAAA==eNoNwwkSwBAQALB1VIvi/7+VzCQiIlnMVh9fm5/d6fB3edxeCvAAeQ== + + +AQAAABAAAAAQAAAAEwAAAA==eNpjYWBg4ABiHiAWAGIAAVAAKQ== + + +AQAAAAQAAAAEAAAADAAAAA==eNrj5OTkBAAAXgAl + + + + +AQAAAIAAAACAAAAAEAAAAA==eNpjYCAGfLCnFQ0AiwIONQ== + +AQAAAIAAAACAAAAAGgAAAA==eNpjYACBD/sZ8NKEwAd7/DTDAVw0AOJFDnk= + +AQAAAIAAAACAAAAAFAAAAA==eNpjYCAKHCBAEwAf7HHRACjbB70= + +AQAAAIAAAACAAAAAEQAAAA==eNpjYACBD/YMA0QDAJLsEvE= + + + + + +