diff options
Diffstat (limited to 'stokes.hpp')
-rw-r--r-- | stokes.hpp | 50 |
1 files changed, 44 insertions, 6 deletions
@@ -59,6 +59,36 @@ class Rope { return z.size() - 2; } + double length() const { + double l = 0; + + for (unsigned i = 0; i < z.size() - 1; i++) { + l += (z[i + 1] - z[i]).norm(); + } + + return l; + } + + template <int p> + double error(const Tensor<Scalar, p>& J) const { + Scalar H0, HN; + std::tie(H0, std::ignore, std::ignore) = hamGradHess(J, z.front()); + std::tie(HN, std::ignore, std::ignore) = hamGradHess(J, z.back()); + + double ImH = imag((H0 + HN) / 2.0); + + double err = 0; + + for (unsigned i = 1; i < z.size() - 1; i++) { + Scalar Hi; + std::tie(Hi, std::ignore, std::ignore) = hamGradHess(J, z[i]); + + err += pow(imag(Hi) - ImH, 2); + } + + return sqrt(err); + } + Rope(unsigned N, const Vector<Scalar>& z1, const Vector<Scalar>& z2) : z(N + 2) { for (unsigned i = 0; i < N + 2; i++) { z[i] = normalize(z1 + (z2 - z1) * ((double)i / (N + 1.0))); @@ -86,6 +116,16 @@ class Rope { δz[i] = variation(z[i], dz(i), ddz(i), dH, ddH); } + // We return a δz with average norm of one. + double mag = 0; + for (unsigned i = 1; i < z.size() - 1; i++) { + mag += δz[i].norm(); + } + + for (unsigned i = 1; i < z.size() - 1; i++) { + δz[i] /= mag / n(); + } + return δz; } @@ -95,10 +135,12 @@ class Rope { Rope rNew = *this; double δ = δ0; + // We rescale the step size by the distance between points. + double Δl = length() / (n() + 1); while (rNew.cost(J) >= cost(J)) { for (unsigned i = 1; i < z.size() - 1; i++) { - rNew.z[i] = normalize(z[i] - δ * Δz[i].conjugate()); + rNew.z[i] = normalize(z[i] - (δ * Δl) * Δz[i].conjugate()); } δ /= 2; @@ -114,11 +156,7 @@ class Rope { } void spread() { - double l = 0; - - for (unsigned i = 0; i < z.size() - 1; i++) { - l += (z[i + 1] - z[i]).norm(); - } + double l = length(); double a = 0; unsigned pos = 0; |