From 1369382dcab07078e48e929e76802cdc4a1ceeca Mon Sep 17 00:00:00 2001 From: Jaron Kent-Dobias Date: Mon, 8 Nov 2021 23:31:58 +0100 Subject: Removed a lot more old code, including old Rope class. --- langevin.cpp | 6 +- stokes.hpp | 280 ----------------------------------------------------------- 2 files changed, 3 insertions(+), 283 deletions(-) diff --git a/langevin.cpp b/langevin.cpp index 9aec7c8..86aeb9e 100644 --- a/langevin.cpp +++ b/langevin.cpp @@ -1,9 +1,9 @@ #include #include -#include #include #include +#include "Eigen/Dense" #include "Eigen/src/Eigenvalues/ComplexEigenSolver.h" #include "Eigen/src/Eigenvalues/SelfAdjointEigenSolver.h" #include "complex_normal.hpp" @@ -196,8 +196,8 @@ int main(int argc, char* argv[]) { J = exp(Complex(0, φ)) * J; - Cord test(J, zSaddle, zSaddleNext, 5); - test.relaxNewton(J, 25, 1, 1e4); + Cord test(J, zSaddle, zSaddleNext, 3); + test.relaxNewton(J, 10, 1, 1e4); std::cout << test.z0.transpose() << std::endl; std::cout << test.z1.transpose() << std::endl; diff --git a/stokes.hpp b/stokes.hpp index cf68d9b..b3ae90b 100644 --- a/stokes.hpp +++ b/stokes.hpp @@ -4,232 +4,6 @@ #include "complex_normal.hpp" #include "dynamics.hpp" -#include - -class ropeRelaxationStallException: public std::exception { - virtual const char* what() const throw() { - return "Gradient descent stalled."; - } -}; - -template -class Rope { - public: - std::vector> z; - - Rope(unsigned N) : z(N + 2) {} - - template - Rope(unsigned N, const Vector& z1, const Vector& z2, const Tensor& J) : z(N + 2) { - Scalar H1, H2; - std::tie(H1, std::ignore, std::ignore) = hamGradHess(J, z1); - std::tie(H2, std::ignore, std::ignore) = hamGradHess(J, z2); - - if (real(H1) > real(H2)) { - z.front() = z1; - z.back() = z2; - } else { - z.front() = z2; - z.back() = z1; - } - - for (unsigned i = 0; i < N + 2; i++) { - z[i] = normalize(z.front() + (z.back() - z.front()) * ((Real)i / (N + 1.0))); - } - } - - unsigned n() const { - return z.size() - 2; - } - - Real length() const { - Real l = 0; - - for (unsigned i = 0; i < z.size() - 1; i++) { - l += (z[i + 1] - z[i]).norm(); - } - - return l; - } - - Vector dz(unsigned i) const { - return z[i + 1] - z[i - 1]; - } - - template - std::vector> generateDiscreteGradientδz(const Tensor& J, Real γ) const { - std::vector> δz(z.size()); - - std::vector> ż(z.size()); - std::vector> dH(z.size()); - std::vector> ddH(z.size()); - -#pragma omp parallel for - for (unsigned i = 1; i < z.size() - 1; i++) { - std::tie(std::ignore, dH[i], ddH[i]) = hamGradHess(J, z[i]); - ż[i] = zDot(z[i], dH[i]); - } - -#pragma omp parallel for - for (unsigned i = 1; i < z.size() - 1; i++) { - Real z² = z[i].squaredNorm(); - Matrix dż = (dH[i].conjugate() - (dH[i].dot(z[i]) / z²) * z[i].conjugate()) * z[i].adjoint() / z²; - Matrix dżc = -ddH[i] + (ddH[i] * z[i].conjugate()) * z[i].transpose() / z² - + (z[i].dot(dH[i]) / z²) * ( - Matrix::Identity(ddH[i].rows(), ddH[i].cols()) - z[i].conjugate() * z[i].transpose() / z² - ); - - Vector dC = -0.5 * (dżc * dz(i) + dż * dz(i).conjugate() - (dżc * ż[i] + dż * ż[i].conjugate()) * real(ż[i].dot(dz(i))) / ż[i].squaredNorm()) / (ż[i].norm() * dz(i).norm()); - - if (i > 1) { - dC += -0.5 * (ż[i - 1].conjugate() - dz(i - 1).conjugate() * real(ż[i - 1].dot(dz(i - 1))) / dz(i - 1).squaredNorm()) / (ż[i - 1].norm() * dz(i - 1).norm()); - } - - if (i < z.size() - 2) { - dC += 0.5 * (ż[i + 1].conjugate() - dz(i + 1).conjugate() * real(ż[i + 1].dot(dz(i + 1))) / dz(i + 1).squaredNorm()) / (ż[i + 1].norm() * dz(i + 1).norm()); - } - - dC += γ * (2 * z[i] - z[i - 1] - z[i + 1]).conjugate(); - - δz[i] = dC.conjugate(); - - δz[i] -= z[i].conjugate() * z[i].conjugate().dot(δz[i]) / z²; - } - - return δz; - } - - void spread() { - Real l = length(); - - Real a = 0; - unsigned pos = 0; - - std::vector> zNew = z; - - for (unsigned i = 1; i < z.size() - 1; i++) { - Real b = i * l / (z.size() - 1); - - while (b > a) { - pos++; - a += (z[pos] - z[pos - 1]).norm(); - } - - Vector δz = z[pos] - z[pos - 1]; - - zNew[i] = normalize(z[pos] - (a - b) / δz.norm() * δz); - } - - z = zNew; - } - - template - Real perturb(const Tensor& J, Real δ₀, const std::vector>& δz, Real γ = 0) { - Rope rNew = *this; - Real δ = δ₀; - // We rescale the step size by the distance between points. - Real Δl = length() / (n() + 1); - - while (true) { - for (unsigned i = 1; i < z.size() - 1; i++) { - rNew.z[i] = normalize(z[i] - (δ * Δl) * δz[i]); - } - - if (rNew.cost(J, γ) < cost(J, γ)) { - break; - } else { - δ /= 2; - } - - if (δ < 1e-15) { - throw ropeRelaxationStallException(); - } - } - -// rNew.spread(); - - z = rNew.z; - - return δ; - } - - template - void relaxDiscreteGradient(const Tensor& J, unsigned N, Real δ0, Real γ) { - Real δ = δ0; - try { - for (unsigned i = 0; i < N; i++) { - std::vector> δz = generateDiscreteGradientδz(J, γ); - double stepSize = 0; - for (const Vector& v : δz) { - stepSize += v.norm(); - } - if (stepSize / δz.size() < 1e-6) { - break; - } - std::cout << cost(J) << " " << stepSize / δz.size() << std::endl; - δ = 2 * perturb(J, δ, δz, γ); - } - } catch (std::exception& e) { - } - } - - template - Real cost(const Tensor& J, Real γ = 0) const { - Real c = 0; - - for (unsigned i = 1; i < z.size() - 1; i++) { - Vector dH; - std::tie(std::ignore, dH, std::ignore) = hamGradHess(J, z[i]); - - Vector zD = zDot(z[i], dH); - - c += 1.0 - real(zD.dot(dz(i))) / zD.norm() / dz(i).norm(); - } - - for (unsigned i = 0; i < z.size() - 1; i++) { - c += γ * (z[i + 1] - z[i]).squaredNorm(); - } - - return c; - } - - Rope interpolate() const { - Rope r(2 * n() + 1); - - for (unsigned i = 0; i < z.size(); i++) { - r.z[2 * i] = z[i]; - } - - for (unsigned i = 0; i < z.size() - 1; i++) { - r.z[2 * i + 1] = normalize(((z[i] + z[i + 1]) / 2.0)); - } - - return r; - } -}; - -template -bool stokesLineTest(const Tensor& J, const Vector& z1, const Vector& z2, unsigned n0, unsigned steps) { - Rope stokes(n0, z1, z2, J); - - Real oldCost = stokes.cost(J); - - for (unsigned i = 0; i < steps; i++) { - stokes.relaxDiscreteGradient(J, 1e6, 1, pow(2, steps)); - - Real newCost = stokes.cost(J); - - if (newCost > oldCost) { - return false; - } - - oldCost = newCost; - - stokes = stokes.interpolate(); - } - return true; -} - template class Cord { public: @@ -509,59 +283,6 @@ public: return {δ / 2, stepSize}; } - template - std::pair relaxStep(const Tensor& J, unsigned nt, Real δ₀) { - std::vector> dgsTot(gs.size(), Vector::Zero(z0.size())); - - for (unsigned i = 0; i < nt; i++) { - Real t = (i + 1.0) / (nt + 1.0); - std::vector> dgsI = dgs(J, t); - - for (unsigned j = 0; j < gs.size(); j++) { - dgsTot[j] += dgsI[j] / nt; - } - } - - Real stepSize = 0; - for (const Vector& dgi : dgsTot) { - stepSize += dgi.squaredNorm(); - } - stepSize = sqrt(stepSize); - - Cord cNew(*this); - - Real δ = δ₀; - Real oldCost = totalCost(J, nt); - Real newCost = std::numeric_limits::infinity(); - - while (newCost > oldCost) { - for (unsigned i = 0; i < gs.size(); i++) { - cNew.gs[i] = gs[i] - δ * dgsTot[i]; - } - - newCost = cNew.totalCost(J, nt); - - δ /= 2; - } - - gs = cNew.gs; - - return {2 * δ, stepSize}; - } - - template - void relax(const Tensor& J, unsigned nt, Real δ₀, unsigned maxSteps) { - Real δ = δ₀; - Real stepSize = std::numeric_limits::infinity(); - unsigned steps = 0; - while (stepSize > 1e-7 && steps < maxSteps) { - std::tie(δ, stepSize) = relaxStep(J, nt, δ); - std::cout << totalCost(J, nt) << " " << δ << " " << stepSize << std::endl; - δ *= 2; - steps++; - } - } - template void relaxNewton(const Tensor& J, unsigned nt, Real δ₀, unsigned maxSteps) { Real δ = δ₀; @@ -569,7 +290,6 @@ public: unsigned steps = 0; while (stepSize > 1e-7 && steps < maxSteps) { std::tie(δ, stepSize) = relaxStepNewton(J, nt, δ); - std::cout << totalCost(J, nt) << " " << δ << " " << stepSize << std::endl; δ /= 3; steps++; } -- cgit v1.2.3-70-g09d2