From: heister Date: Tue, 22 Feb 2011 21:22:02 +0000 (+0000) Subject: PETSc test for SparseMatrix::clear_rows. Fails right now. X-Git-Url: https://gitweb.dealii.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=7f486b7cb6f80f6520fe05dd147e8988cfd106fa;p=dealii-svn.git PETSc test for SparseMatrix::clear_rows. Fails right now. git-svn-id: https://svn.dealii.org/trunk@23420 0785d39b-7218-0410-832d-ea1e28bc413d --- diff --git a/tests/mpi/petsc_01.cc b/tests/mpi/petsc_01.cc new file mode 100644 index 0000000000..aa18a866ae --- /dev/null +++ b/tests/mpi/petsc_01.cc @@ -0,0 +1,101 @@ +//--------------------------------------------------------------------------- +// $Id: simple_mpi_01.cc 23327 2011-02-11 03:19:07Z bangerth $ +// Version: $Name$ +// +// Copyright (C) 2009 by the deal.II authors +// +// This file is subject to QPL and may not be distributed +// without copyright and license information. Please refer +// to the file deal.II/doc/license.html for the text and +// further information on this license. +// +//--------------------------------------------------------------------------- + + +// PETScWrappers: document bug with PETSc SparseMatrix and clear_rows() +// until now, also the PETSc-internal SparsityPattern removes the +// rows that are emptied with clear_rows(). This results in errors +// when reusing the matrix later. + +#include "../tests.h" + +#include +#include +#include +#include +#include + +#include +//#include + + +void test() +{ + + + unsigned int myid = Utilities::System::get_this_mpi_process (MPI_COMM_WORLD); + const unsigned int numprocs = Utilities::System::get_n_mpi_processes (MPI_COMM_WORLD); + + if (myid==0) + deallog << "Running on " << numprocs << " CPU(s)." << std::endl; + + + CompressedSimpleSparsityPattern csp(2*numprocs); + for (unsigned int i=0;i local_rows(numprocs,2); + + mat.reinit(MPI_COMM_WORLD, csp, local_rows, local_rows, myid); + + mat.add(2*myid,2*myid,1.0); + mat.add(2*myid+1,2*myid+1,1.0); + mat.add(1,0,42.0); + + mat.add((2*myid+2)%(2*numprocs),(2*myid+2)%(2*numprocs),0.1); + + mat.compress(); + +// std::vector rows(1,1); +// mat.clear_rows(rows); + +// mat.write_ascii(); + if (myid==0) + deallog << "2nd try" << std::endl; + + mat = 0; + mat.add(1,0,42.0); + mat.add(2*myid,2*myid,1.0); + mat.add(2*myid+1,2*myid+1,1.0); + + mat.add((2*myid+2)%(2*numprocs),(2*myid+2)%(2*numprocs),0.1); + + mat.compress(); +// mat.write_ascii(); + + if (myid==0) + deallog << "done" << std::endl; + +} + + +int main(int argc, char *argv[]) +{ + PetscInitialize(&argc,&argv,0,0); + + if (Utilities::System::get_this_mpi_process (MPI_COMM_WORLD) == 0) + { + std::ofstream logfile(output_file_for_mpi("petsc_01").c_str()); +// deallog.attach(logfile); +// deallog.depth_console(0); + deallog.threshold_double(1.e-10); + + test(); + } + else + test(); + + PetscFinalize(); +} diff --git a/tests/mpi/petsc_01/ncpu_10/cmp/generic b/tests/mpi/petsc_01/ncpu_10/cmp/generic new file mode 100644 index 0000000000..30b5ea49cd --- /dev/null +++ b/tests/mpi/petsc_01/ncpu_10/cmp/generic @@ -0,0 +1,12 @@ + +DEAL:mpi::Running on 10 CPU(s). +DEAL:mpi::got message '1' from CPU 2! +DEAL:mpi::got message '2' from CPU 3! +DEAL:mpi::got message '3' from CPU 4! +DEAL:mpi::got message '4' from CPU 5! +DEAL:mpi::got message '5' from CPU 6! +DEAL:mpi::got message '6' from CPU 7! +DEAL:mpi::got message '7' from CPU 8! +DEAL:mpi::got message '8' from CPU 9! +DEAL:mpi::got message '9' from CPU 10! +DEAL:mpi::done diff --git a/tests/mpi/petsc_01/ncpu_3/cmp/generic b/tests/mpi/petsc_01/ncpu_3/cmp/generic new file mode 100644 index 0000000000..588f4ae9eb --- /dev/null +++ b/tests/mpi/petsc_01/ncpu_3/cmp/generic @@ -0,0 +1,5 @@ + +DEAL:mpi::Running on 3 CPU(s). +DEAL:mpi::got message '1' from CPU 2! +DEAL:mpi::got message '2' from CPU 3! +DEAL:mpi::done diff --git a/tests/mpi/petsc_01/ncpu_4/cmp/generic b/tests/mpi/petsc_01/ncpu_4/cmp/generic new file mode 100644 index 0000000000..8dcacbe8b3 --- /dev/null +++ b/tests/mpi/petsc_01/ncpu_4/cmp/generic @@ -0,0 +1,6 @@ + +DEAL:mpi::Running on 4 CPU(s). +DEAL:mpi::got message '1' from CPU 2! +DEAL:mpi::got message '2' from CPU 3! +DEAL:mpi::got message '3' from CPU 4! +DEAL:mpi::done