From: bangerth Date: Tue, 21 May 2013 12:30:22 +0000 (+0000) Subject: Add a test. X-Git-Url: https://gitweb.dealii.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=749a595a0eed501602c9f7316e1c1a5839dc7645;p=dealii-svn.git Add a test. git-svn-id: https://svn.dealii.org/trunk@29532 0785d39b-7218-0410-832d-ea1e28bc413d --- diff --git a/tests/mpi/petsc_condense_02.cc b/tests/mpi/petsc_condense_02.cc new file mode 100644 index 0000000000..4499b9b583 --- /dev/null +++ b/tests/mpi/petsc_condense_02.cc @@ -0,0 +1,170 @@ +//--------------------------------------------------------------------------- +// $Id$ +// Version: $Name$ +// +// Copyright (C) 2009, 2010, 2012, 2013 by the deal.II authors +// +// This file is subject to QPL and may not be distributed +// without copyright and license information. Please refer +// to the file deal.II/doc/license.html for the text and +// further information on this license. +// +//--------------------------------------------------------------------------- + + +// check ConstraintMatrix.distribute() for a petsc vector +// +// we do this by creating a vector where each processor has 100 +// elements but no ghost elements. then we add constraints on each +// processor that constrain elements within each processor's local +// range to ones outside. these have to be added on all +// processors. then call distribute() and verify that the result is +// true. +// +// we use constraints of the form x_i = x_j with sequentially growing +// x_j's so that we can verify the correctness analytically +// +// compared to the _01 test, here the ConstraintMatrix object acts on an index +// set that only includes the locally owned vector elements, without any +// overlap. this verifies that we really only need to know about the *sources* +// of constraints locally, not the *targets*. + + +#include "../tests.h" +#include +#include +#include + +#include +#include + + + +void test() +{ + const unsigned int myid = Utilities::MPI::this_mpi_process (MPI_COMM_WORLD); + const unsigned int n_processes = Utilities::MPI::n_mpi_processes (MPI_COMM_WORLD); + + // create a vector that consists of elements indexed from 0 to n + PETScWrappers::MPI::Vector vec (MPI_COMM_WORLD, 100 * n_processes, 100); + Assert (vec.local_size() == 100, ExcInternalError()); + Assert (vec.local_range().first == 100*myid, ExcInternalError()); + Assert (vec.local_range().second == 100*myid+100, ExcInternalError()); + for (unsigned int i=vec.local_range().first; i