diff options
Diffstat (limited to 'p-spin.hpp')
-rw-r--r-- | p-spin.hpp | 21 |
1 files changed, 14 insertions, 7 deletions
@@ -19,12 +19,13 @@ using Tensor = Eigen::Tensor<Scalar, PSPIN_P>; std::tuple<Scalar, Vector, Matrix> hamGradHess(const Tensor& J, const Vector& z) { Matrix Jz = contractDown(J, z); // Contracts J into p - 2 copies of z. Vector Jzz = Jz * z; + Scalar Jzzz = Jzz.transpose() * z; - double f = factorial(p); + double pBang = factorial(p); - Matrix hessian = ((p - 1) * p / f) * Jz; - Vector gradient = (p / f) * Jzz; - Scalar hamiltonian = (1 / f) * Jzz.transpose() * z; + Matrix hessian = ((p - 1) * p / pBang) * Jz; + Vector gradient = (p / pBang) * Jzz; + Scalar hamiltonian = Jzzz / pBang; return {hamiltonian, gradient, hessian}; } @@ -34,10 +35,16 @@ std::tuple<double, Vector> WdW(const Tensor& J, const Vector& z) { Matrix hessian; std::tie(std::ignore, gradient, hessian) = hamGradHess(J, z); - Vector projectedGradient = (gradient - ((gradient.transpose() * z) * z / (double)z.size())).conjugate(); + Scalar zGrad = gradient.transpose() * z; + double N = z.size(); - double W = projectedGradient.cwiseAbs2().sum(); - Vector dW = hessian * projectedGradient - ((z.transpose() * gradient) * projectedGradient + (z.transpose() * projectedGradient) * (gradient + hessian * z)) / (double)z.size(); + Vector projGrad = gradient - (zGrad / N) * z; + Vector projGradConj = projGrad.conjugate(); + + Scalar zProjGrad = z.transpose() * projGradConj; + + double W = projGrad.norm(); + Vector dW = hessian * (projGradConj - (zProjGrad / N) * z) - (zGrad * projGradConj + zProjGrad * gradient) / N; return {W, dW}; } |