]> https://gitweb.dealii.org/ - dealii-svn.git/commitdiff
Fix up boundary values.
authorwolf <wolf@0785d39b-7218-0410-832d-ea1e28bc413d>
Fri, 8 Apr 2005 22:16:26 +0000 (22:16 +0000)
committerwolf <wolf@0785d39b-7218-0410-832d-ea1e28bc413d>
Fri, 8 Apr 2005 22:16:26 +0000 (22:16 +0000)
git-svn-id: https://svn.dealii.org/trunk@10456 0785d39b-7218-0410-832d-ea1e28bc413d

deal.II/examples/step-17/step-17.cc

index 7d61aab1e111e87db57397afe9a3414dc8793e3d..8df87caa1ea9a43cc1076e42f3e3ef0dbf83ac97 100644 (file)
@@ -779,23 +779,68 @@ void ElasticProblem<dim>::assemble_system ()
                                          // a few cycles during this
                                          // operation.
 
-                                         // The second task is to take care of
-                                         // hanging node constraints. This is
-                                         // a little more complicated, since
-                                         // the rows and columns of
-                                         // constrained nodes have to be
-                                         // distributed to the rows and
-                                         // columns of those nodes to which
-                                         // they are constrained. This can't
-                                         // be done on a purely local basis,
-                                         // but it can be done while
-                                         // distributing the local system to
-                                         // the global one. This is what the
-                                         // following two calls do, i.e. they
-                                         // distribute to the global objects
-                                         // and at the same time make sure
-                                         // that hanging node constraints are
-                                         // taken care of:
+                                         // The second task is to take
+                                         // care of hanging node
+                                         // constraints. This is a
+                                         // little more complicated,
+                                         // since the rows and columns
+                                         // of constrained nodes have
+                                         // to be distributed to the
+                                         // rows and columns of those
+                                         // nodes to which they are
+                                         // constrained. This can't be
+                                         // done on a purely local
+                                         // basis, but it can be done
+                                         // while distributing the
+                                         // local system to the global
+                                         // one. This is what the
+                                         // following two calls do,
+                                         // i.e. they distribute to
+                                         // the global objects and at
+                                         // the same time make sure
+                                         // that hanging node
+                                         // constraints are taken care
+                                         // of. It turns out,
+                                         // unfortunately, that these
+                                         // functions again interfere
+                                         // with boundary values, and
+                                         // that there are a few nasty
+                                         // cases where a node may
+                                         // even be both constrained
+                                         // to other nodes as well as
+                                         // fixed to certain boundary
+                                         // values: these cases happen
+                                         // in 3d when one cell on the
+                                         // boundary is refined, but a
+                                         // neighboring boundary cell
+                                         // isn't. In this case, the
+                                         // functions we will be
+                                         // calling here need
+                                         // knowledge about which of
+                                         // the degrees of freedom are
+                                         // actually fixed, for which
+                                         // we have to pass the third
+                                         // argument. To make things
+                                         // worse, however, this is
+                                         // still not enough: we now
+                                         // have all constrained nodes
+                                         // right, and we also have
+                                         // (above already) eliminated
+                                         // the lines and columns of
+                                         // boundary nodes, but the
+                                         // values of boundary nodes
+                                         // that also carry
+                                         // constraints will come out
+                                         // wrong. An explanation and
+                                         // solution to this problem
+                                         // is that we have to fix
+                                         // them up again after the
+                                         // matrix is complete, which
+                                         // we will do at the end of
+                                         // this function. First, let
+                                         // us just transfer
+                                         // everything into the global
+                                         // matrix:
         hanging_node_constraints
           .distribute_local_to_global (cell_matrix,
                                        local_dof_indices,
@@ -826,6 +871,98 @@ void ElasticProblem<dim>::assemble_system ()
                                    // PETSc holds for them:
   system_matrix.compress ();
   system_rhs.compress ();
+
+                                  // As mentioned above, this is not
+                                  // yet all: The matrix and right
+                                  // hand side entries of boundary
+                                  // nodes may still be wrong. The
+                                  // reason for this is that the
+                                  // ``MatrixTools::local_apply_boundary_values''
+                                  // function removes the rows and
+                                  // columns of nodes that are fixed
+                                  // to their boundary values, except
+                                  // for the diagonal element of the
+                                  // matrix, and the
+                                  // ``ConstraintMatrix::distribute_local_to_global''
+                                  // functions handle hanging
+                                  // nodes. However, here is the
+                                  // problem: Since the row of a
+                                  // fixed node is empty except for
+                                  // the diagonal entry, its solution
+                                  // value equals the corresponding
+                                  // value in the right hand side
+                                  // vector divided by the diagonal
+                                  // element of the matrix. In other
+                                  // words, when we treat boundary
+                                  // nodes in the matrix, we not only
+                                  // have to make sure that we zero
+                                  // out the rows and columns of
+                                  // these degrees of freedom, but we
+                                  // also have to make sure that the
+                                  // diagonal entry of the matrix and
+                                  // the corresponding value of the
+                                  // right hand side are in
+                                  // synch. But the two calls to
+                                  // ``ConstraintMatrix::distribute_local_to_global''
+                                  // can't do that because they each
+                                  // only know about either the righ
+                                  // hand side vector or the matrix,
+                                  // but not both. And even if we
+                                  // merged them into a single
+                                  // function that knows about both,
+                                  // that would not help much:
+                                  // PETSc's model of parallel
+                                  // computations is very much
+                                  // tailored to the concept of doing
+                                  // things in batches -- doing lots
+                                  // of additions to matrix or vector
+                                  // entries, doing lots of sets to
+                                  // matrix or vector entries, then
+                                  // calling ``compress'' and
+                                  // possibly reading some. Switching
+                                  // from one kind of operation to
+                                  // another usually triggers global
+                                  // communication between the
+                                  // parallel processes, making the
+                                  // program very slow. If we tried
+                                  // to keep matrix diagonal and
+                                  // right hand side vector elements
+                                  // in synch at all times, we can't
+                                  // do this in batch mode: we would
+                                  // add to the diagonal entry of the
+                                  // matrix, but then we would have
+                                  // to read its new value and set
+                                  // the corresponding value of the
+                                  // right hand side vector. That's
+                                  // inefficient. What we should
+                                  // rather do is add up all the
+                                  // time, and at the end of
+                                  // everything fix up the few
+                                  // entries there are. This is how
+                                  // this is done (note that we only
+                                  // have to consider those entries
+                                  // of the matrix/right hand side
+                                  // vector that are handled on the
+                                  // present processor, since the
+                                  // other processors will take care
+                                  // of the rest; we add a test for a
+                                  // nonzero matrix entry just to be
+                                  // really sure that everything is
+                                  // ok):
+  for (std::map<unsigned int, double>::const_iterator
+        boundary_value = boundary_values.begin();
+       boundary_value != boundary_values.end(); ++boundary_value)
+    if ((boundary_value->first >= system_matrix.local_range().first)
+       &&
+       (boundary_value->first < system_matrix.local_range().second))
+      {
+       Assert (system_matrix.diag_element (boundary_value->first) != 0,
+               ExcInternalError());
+       
+       system_rhs(boundary_value->first)
+         = (boundary_value->second /
+            system_matrix.diag_element (boundary_value->first));
+      }
 }
 
 

In the beginning the Universe was created. This has made a lot of people very angry and has been widely regarded as a bad move.

Douglas Adams


Typeset in Trocchi and Trocchi Bold Sans Serif.