// function.
IndexSet tmp_index_set(stokes_matrix->block(1,1).m());
tmp_index_set.add_range(0,stokes_matrix->block(1,1).m());
- tmp.reinit(tmp_index_set);
+ tmp.reinit(tmp_index_set, MPI_COMM_WORLD);
}
// right hand sides to their correct sizes and block structure:
IndexSet temperature_partitioning (n_T);
temperature_partitioning.add_range(0,n_T);
- stokes_solution.reinit (stokes_block_sizes);
- old_stokes_solution.reinit (stokes_block_sizes);
- stokes_rhs.reinit (stokes_block_sizes);
+ stokes_solution.reinit (stokes_block_sizes, MPI_COMM_WORLD);
+ old_stokes_solution.reinit (stokes_block_sizes, MPI_COMM_WORLD);
+ stokes_rhs.reinit (stokes_block_sizes, MPI_COMM_WORLD);
- temperature_solution.reinit (temperature_partitioning);
- old_temperature_solution.reinit (temperature_partitioning);
- old_old_temperature_solution.reinit (temperature_partitioning);
+ temperature_solution.reinit (temperature_partitioning, MPI_COMM_WORLD);
+ old_temperature_solution.reinit (temperature_partitioning, MPI_COMM_WORLD);
+ old_old_temperature_solution.reinit (temperature_partitioning, MPI_COMM_WORLD);
- temperature_rhs.reinit (temperature_partitioning);
+ temperature_rhs.reinit (temperature_partitioning, MPI_COMM_WORLD);
}
Utilities::MPI::MPI_InitFinalize mpi_initialization (argc, argv,
numbers::invalid_unsigned_int);
+ // This program can only be run in serial. Otherwise, throw an exception.
+ int size;
+ MPI_Comm_size(MPI_COMM_WORLD,&size);
+ AssertThrow(size==1, ExcMessage("This program can only be run in serial,"
+ " use mpirun -np 1 ./step-31"));
+
BoussinesqFlowProblem<2> flow_problem;
flow_problem.run ();
}
// The next step is to take care of the eigenspectrum. In this case, the
// outputs are eigenvalues and eigenfunctions, so we set the size of the
// list of eigenfunctions and eigenvalues to be as large as we asked for
- // in the input file. When using a PETScWrappers::MPI::Vector, the Vector
+ // in the input file. When using a PETScWrappers::MPI::Vector, the Vector
// is initialized using an IndexSet. IndexSet is used not only to resize the
- // PETScWrappers::MPI::Vector but it also associates an index in the
- // PETScWrappers::MPI::Vector with a degree of freedom (see step-40 for a
+ // PETScWrappers::MPI::Vector but it also associates an index in the
+ // PETScWrappers::MPI::Vector with a degree of freedom (see step-40 for a
// more detailed explanation). This assocation is done by the add_range()
// function:
IndexSet eigenfunction_index_set(dof_handler.n_dofs ());
using namespace Step36;
Utilities::MPI::MPI_InitFinalize mpi_initialization(argc, argv, 1);
+
+ // This program can only be run in serial. Otherwise, throw an exception.
+ int size;
+ MPI_Comm_size(MPI_COMM_WORLD,&size);
+ AssertThrow(size==1, ExcMessage("This program can only be run in serial,"
+ " use mpirun -np 1 ./step-36"));
+
{
deallog.depth_console (0);
solution_index_set.set_size(dof_handler.n_dofs());
solution_index_set.add_range(0, dof_handler.n_dofs());
- solution.reinit (solution_index_set);
- system_rhs.reinit (solution_index_set);
- complete_system_rhs.reinit (solution_index_set);
- contact_force.reinit (solution_index_set);
+ solution.reinit (solution_index_set, MPI_COMM_WORLD);
+ system_rhs.reinit (solution_index_set, MPI_COMM_WORLD);
+ complete_system_rhs.reinit (solution_index_set, MPI_COMM_WORLD);
+ contact_force.reinit (solution_index_set, MPI_COMM_WORLD);
// The only other thing to do here is to compute the factors in the $B$
// matrix which is used to scale the residual. As discussed in the
Utilities::MPI::MPI_InitFinalize mpi_initialization (argc, argv,
numbers::invalid_unsigned_int);
+ // This program can only be run in serial. Otherwise, throw an exception.
+ int size;
+ MPI_Comm_size(MPI_COMM_WORLD,&size);
+ AssertThrow(size==1, ExcMessage("This program can only be run in serial,"
+ " use mpirun -np 1 ./step-41"));
+
ObstacleProblem<2> obstacle_problem;
obstacle_problem.run ();
}
a_preconditioner (Apreconditioner)
{
IndexSet tmp_index_set(darcy_matrix->block(1,1).m());
- tmp_index_set.add_range(0,darcy_matrix->block(1,1).m());
- tmp.reinit(tmp_index_set);
+ tmp_index_set.add_range(0,darcy_matrix->block(1,1).m());
+ tmp.reinit(tmp_index_set, MPI_COMM_WORLD);
}
darcy_index_set[1].set_size(n_p);
darcy_index_set[0].add_range(0,n_u);
darcy_index_set[1].add_range(0,n_p);
- darcy_solution.reinit (darcy_index_set);
+ darcy_solution.reinit (darcy_index_set, MPI_COMM_WORLD);
darcy_solution.collect_sizes ();
- last_computed_darcy_solution.reinit (darcy_index_set);
+ last_computed_darcy_solution.reinit (darcy_index_set, MPI_COMM_WORLD);
last_computed_darcy_solution.collect_sizes ();
- second_last_computed_darcy_solution.reinit (darcy_index_set);
+ second_last_computed_darcy_solution.reinit (darcy_index_set, MPI_COMM_WORLD);
second_last_computed_darcy_solution.collect_sizes ();
- darcy_rhs.reinit (darcy_index_set);
+ darcy_rhs.reinit (darcy_index_set, MPI_COMM_WORLD);
darcy_rhs.collect_sizes ();
saturation_index_set.clear();
saturation_index_set.set_size(n_s);
saturation_index_set.add_range(0,n_s);
- saturation_solution.reinit (saturation_index_set);
- old_saturation_solution.reinit (saturation_index_set);
- old_old_saturation_solution.reinit (saturation_index_set);
+ saturation_solution.reinit (saturation_index_set, MPI_COMM_WORLD);
+ old_saturation_solution.reinit (saturation_index_set, MPI_COMM_WORLD);
+ old_old_saturation_solution.reinit (saturation_index_set, MPI_COMM_WORLD);
- saturation_matching_last_computed_darcy_solution.reinit (saturation_index_set);
+ saturation_matching_last_computed_darcy_solution.reinit (saturation_index_set,
+ MPI_COMM_WORLD);
- saturation_rhs.reinit (saturation_index_set);
+ saturation_rhs.reinit (saturation_index_set, MPI_COMM_WORLD);
}
Utilities::MPI::MPI_InitFinalize mpi_initialization (argc, argv,
numbers::invalid_unsigned_int);
+ // This program can only be run in serial. Otherwise, throw an exception.
+ int size;
+ MPI_Comm_size(MPI_COMM_WORLD,&size);
+ AssertThrow(size==1, ExcMessage("This program can only be run in serial,"
+ " use mpirun -np 1 ./step-43"));
+
TwoPhaseFlowProblem<2> two_phase_flow_problem(1);
two_phase_flow_problem.run ();
}