From: Wolfgang Bangerth Date: Wed, 17 Jan 2024 00:20:43 +0000 (-0700) Subject: Work around warnings about invalid array accesses. X-Git-Tag: relicensing~150^2 X-Git-Url: https://gitweb.dealii.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=e085ee7c222f27d35e049fbc3ed144c9438499ef;p=dealii.git Work around warnings about invalid array accesses. --- diff --git a/source/base/polynomials_rannacher_turek.cc b/source/base/polynomials_rannacher_turek.cc index b6836910df..d85986634d 100644 --- a/source/base/polynomials_rannacher_turek.cc +++ b/source/base/polynomials_rannacher_turek.cc @@ -69,34 +69,42 @@ Tensor<1, dim> PolynomialsRannacherTurek::compute_grad(const unsigned int i, const Point &p) const { - Assert(dim == 2, ExcNotImplemented()); - Tensor<1, dim> grad; - if (i == 0) - { - grad[0] = -2.5 + 3 * p(0); - grad[1] = 1.5 - 3 * p(1); - } - else if (i == 1) - { - grad[0] = -0.5 + 3.0 * p(0); - grad[1] = 1.5 - 3.0 * p(1); - } - else if (i == 2) - { - grad[0] = 1.5 - 3.0 * p(0); - grad[1] = -2.5 + 3.0 * p(1); - } - else if (i == 3) + if constexpr (dim == 2) { - grad[0] = 1.5 - 3.0 * p(0); - grad[1] = -0.5 + 3.0 * p(1); + Tensor<1, dim> grad; + if (i == 0) + { + grad[0] = -2.5 + 3 * p(0); + grad[1] = 1.5 - 3 * p(1); + } + else if (i == 1) + { + grad[0] = -0.5 + 3.0 * p(0); + grad[1] = 1.5 - 3.0 * p(1); + } + else if (i == 2) + { + grad[0] = 1.5 - 3.0 * p(0); + grad[1] = -2.5 + 3.0 * p(1); + } + else if (i == 3) + { + grad[0] = 1.5 - 3.0 * p(0); + grad[1] = -0.5 + 3.0 * p(1); + } + else + { + Assert(false, ExcNotImplemented()); + } + + return grad; } + else { Assert(false, ExcNotImplemented()); + return {}; } - - return grad; } @@ -107,37 +115,45 @@ PolynomialsRannacherTurek::compute_grad_grad( const unsigned int i, const Point & /*p*/) const { - Assert(dim == 2, ExcNotImplemented()); - Tensor<2, dim> grad_grad; - if (i == 0) + if constexpr (dim == 2) { - grad_grad[0][0] = 3; - grad_grad[0][1] = 0; - grad_grad[1][0] = 0; - grad_grad[1][1] = -3; - } - else if (i == 1) - { - grad_grad[0][0] = 3; - grad_grad[0][1] = 0; - grad_grad[1][0] = 0; - grad_grad[1][1] = -3; - } - else if (i == 2) - { - grad_grad[0][0] = -3; - grad_grad[0][1] = 0; - grad_grad[1][0] = 0; - grad_grad[1][1] = 3; + Tensor<2, dim> grad_grad; + if (i == 0) + { + grad_grad[0][0] = 3; + grad_grad[0][1] = 0; + grad_grad[1][0] = 0; + grad_grad[1][1] = -3; + } + else if (i == 1) + { + grad_grad[0][0] = 3; + grad_grad[0][1] = 0; + grad_grad[1][0] = 0; + grad_grad[1][1] = -3; + } + else if (i == 2) + { + grad_grad[0][0] = -3; + grad_grad[0][1] = 0; + grad_grad[1][0] = 0; + grad_grad[1][1] = 3; + } + else if (i == 3) + { + grad_grad[0][0] = -3; + grad_grad[0][1] = 0; + grad_grad[1][0] = 0; + grad_grad[1][1] = 3; + } + return grad_grad; } - else if (i == 3) + + else { - grad_grad[0][0] = -3; - grad_grad[0][1] = 0; - grad_grad[1][0] = 0; - grad_grad[1][1] = 3; + Assert(false, ExcNotImplemented()); + return {}; } - return grad_grad; }