summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJaron Kent-Dobias <jaron@kent-dobias.com>2021-11-08 17:37:53 +0100
committerJaron Kent-Dobias <jaron@kent-dobias.com>2021-11-08 17:37:53 +0100
commit897df7986e9cafcfa0c5f60a424d5a452dceca07 (patch)
tree6fb1634c97568209799ed4e8da2f4d2f3f534bac
parent5715f5fbd77b86edbf8ff78f511fe88870e7b37e (diff)
downloadcode-897df7986e9cafcfa0c5f60a424d5a452dceca07.tar.gz
code-897df7986e9cafcfa0c5f60a424d5a452dceca07.tar.bz2
code-897df7986e9cafcfa0c5f60a424d5a452dceca07.zip
Progress towards minimizer for real models.
-rw-r--r--dynamics.hpp84
-rw-r--r--langevin.cpp14
2 files changed, 76 insertions, 22 deletions
diff --git a/dynamics.hpp b/dynamics.hpp
index 0597f35..639803b 100644
--- a/dynamics.hpp
+++ b/dynamics.hpp
@@ -44,8 +44,45 @@ std::tuple<Real, Vector<Scalar>> gradientDescent(const Tensor<Scalar, p>& J, con
return {W, z};
}
+template <class Real, int p>
+Vector<Real> findMinimum(const Tensor<Real, p>& J, const Vector<Real>& z0, Real ε) {
+ Vector<Real> z = z0;
+ Real λ = 10;
+
+ auto [H, dH, ddH] = hamGradHess(J, z);
+
+ Vector<Real> g = dH - z.dot(dH) * z / (Real)z.size();
+ Matrix<Real> M = ddH - (dH * z.transpose() + z.dot(dH) * Matrix<Real>::Identity(z.size(), z.size()) + (ddH * z) * z.transpose()) / (Real)z.size() + 2.0 * z * z.transpose();
+
+ while (g.norm() / z.size() > ε && λ < 1e8) {
+ Vector<Real> dz = (M + λ * Matrix<Real>::Identity(z.size(), z.size())).partialPivLu().solve(g);
+ dz -= z.dot(dz) * z / (Real)z.size();
+ Vector<Real> zNew = normalize(z - dz);
+
+ auto [HNew, dHNew, ddHNew] = hamGradHess(J, zNew);
+
+ if (HNew <= H * 1.01) {
+ z = zNew;
+ H = HNew;
+ dH = dHNew;
+ ddH = ddHNew;
+
+ g = dH - z.dot(dH) * z / (Real)z.size();
+ M = ddH - (dH * z.transpose() + z.dot(dH) * Matrix<Real>::Identity(z.size(), z.size()) + (ddH * z) * z.transpose()) / (Real)z.size() + 2.0 * z * z.transpose();
+
+ λ /= 1.001;
+ } else {
+ λ *= 1.5;
+ }
+
+ std::cout << "error : " << H << " " << g.norm() / z.size() << " " << λ << std::endl;
+ }
+
+ return z;
+}
+
template <class Real, class Scalar, int p>
-Vector<Scalar> findSaddle(const Tensor<Scalar, p>& J, const Vector<Scalar>& z0, Real ε, Real δW = 2, Real γ0 = 1, Real δγ = 2) {
+Vector<Scalar> findSaddle(const Tensor<Scalar, p>& J, const Vector<Scalar>& z0, Real ε) {
Vector<Scalar> z = z0;
Vector<Scalar> dH;
@@ -53,33 +90,21 @@ Vector<Scalar> findSaddle(const Tensor<Scalar, p>& J, const Vector<Scalar>& z0,
std::tie(std::ignore, dH, ddH) = hamGradHess(J, z);
Scalar zz = z.transpose() * z;
- Vector<Scalar> ż = zDot(z, dH) + z * (zz - (Real)z.size());
- Matrix<Scalar> dż = dzDot(z, dH) + Matrix<Scalar>::Identity(z.size(), z.size()) * (zz - (Real)z.size()) + 2.0 * z * z.transpose();
- Matrix<Scalar> dżc = dzDotConjugate(z, dH, ddH);
-
- Vector<Scalar> b(2 * z.size());
- Matrix<Scalar> M(2 * z.size(), 2 * z.size());
+ Vector<Scalar> g = dH - (z.transpose() * dH) * z / (Real)z.size() + z * (zz - (Real)z.size());
+ Matrix<Scalar> M = ddH - (dH * z.transpose() + (z.transpose() * dH) * Matrix<Scalar>::Identity(z.size(), z.size()) + (ddH * z) * z.transpose()) / (Real)z.size() + Matrix<Scalar>::Identity(z.size(), z.size()) * (zz - (Real)z.size()) + 2.0 * z * z.transpose();
- b << ż.conjugate(), ż;
- M << dż.conjugate(), dżc, dżc.conjugate(), dż;
-
- while (ż.norm() > ε) {
- Vector<Scalar> dz = M.partialPivLu().solve(b).tail(z.size());
+ while (g.norm() / z.size() > ε) {
+ Vector<Scalar> dz = M.partialPivLu().solve(g);
dz -= z.conjugate().dot(dz) / z.squaredNorm() * z.conjugate();
z = normalize(z - dz);
- std::cout << "error : " << z.transpose() * z << " "<< ż.norm() << " " << dz.norm() << std::endl;
- getchar();
-
std::tie(std::ignore, dH, ddH) = hamGradHess(J, z);
- zz = z.transpose() * z;
- ż = zDot(z, dH) + z * (zz - (Real)z.size());
- dż = dzDot(z, dH) + Matrix<Scalar>::Identity(z.size(), z.size()) * (zz - (Real)z.size()) + 2.0 * z * z.transpose();
- dżc = dzDotConjugate(z, dH, ddH);
+ std::cout << "error : " << g.norm() / z.size() << std::endl;
- b << ż.conjugate(), ż;
- M << dż.conjugate(), dżc, dżc.conjugate(), dż;
+ zz = z.transpose() * z;
+ g = dH - (z.transpose() * dH) * z / (Real)z.size() + z * (zz - (Real)z.size());
+ M = ddH - (dH * z.transpose() + (z.transpose() * dH) * Matrix<Scalar>::Identity(z.size(), z.size()) + (ddH * z) * z.transpose()) / (Real)z.size() + Matrix<Scalar>::Identity(z.size(), z.size()) * (zz - (Real)z.size()) + 2.0 * z * z.transpose();
}
return z;
@@ -120,6 +145,23 @@ std::tuple<Real, Vector<Scalar>> metropolis(const Tensor<Scalar, p>& J, const Ve
return {E, z};
}
+template <class Real, int p, class Distribution, class Generator>
+Vector<Real> randomMinimum(const Tensor<Real, p>& J, Distribution d, Generator& r, Real ε) {
+ Vector<Real> zSaddle;
+ bool foundSaddle = false;
+
+ while (!foundSaddle) {
+ Vector<Real> z0 = normalize(randomVector<Real>(J.dimension(0), d, r.engine()));
+
+ try {
+ zSaddle = findMinimum(J, z0, ε);
+ foundSaddle = true;
+ } catch (std::exception& e) {}
+ }
+
+ return zSaddle;
+}
+
template <class Real, class Scalar, int p, class Distribution, class Generator>
Vector<Scalar> randomSaddle(const Tensor<Scalar, p>& J, Distribution d, Generator& r, Real ε) {
Vector<Scalar> zSaddle;
diff --git a/langevin.cpp b/langevin.cpp
index adb5212..a775774 100644
--- a/langevin.cpp
+++ b/langevin.cpp
@@ -5,6 +5,7 @@
#include <list>
#include "Eigen/src/Eigenvalues/ComplexEigenSolver.h"
+#include "Eigen/src/Eigenvalues/SelfAdjointEigenSolver.h"
#include "complex_normal.hpp"
#include "p-spin.hpp"
#include "dynamics.hpp"
@@ -139,6 +140,16 @@ int main(int argc, char* argv[]) {
Rng r;
+ Tensor<Real, p> ReJ = generateCouplings<Real, p>(N, std::normal_distribution<Real>(0, σ), r.engine());
+
+ std::normal_distribution<Real> Red(0, 1);
+
+ Vector<Real> zMin = randomMinimum(ReJ, Red, r, ε);
+ auto [Hr, dHr, ddHr] = hamGradHess(ReJ, zMin);
+ Eigen::EigenSolver<Matrix<Real>> eigenS(ddHr - ((ddHr * zMin) * zMin.transpose()) / (Real)zMin.size());
+ std::cout << eigenS.eigenvalues().transpose() << std::endl;
+ getchar();
+
complex_normal_distribution<Real> d(0, 1, 0);
ComplexTensor J = generateCouplings<Complex, p>(N, complex_normal_distribution<Real>(0, σ, κ), r.engine());
@@ -161,6 +172,7 @@ int main(int argc, char* argv[]) {
zSaddleNext = findSaddle(J, z0, ε);
Real saddleDistance = (zSaddleNext - zSaddle).norm();
if (saddleDistance / N > 1e-2) {
+ std::cout << saddleDistance << std::endl;
foundSaddle = true;
}
} catch (std::exception& e) {}
@@ -222,7 +234,7 @@ int main(int argc, char* argv[]) {
*/
Cord test(J, zSaddle, zSaddleNext, 5);
- test.relaxNewton(J, 20, 1, 1e4);
+ test.relaxNewton(J, 25, 1, 1e4);
std::cout << test.z0.transpose() << std::endl;
std::cout << test.z1.transpose() << std::endl;