diff options
author | Jaron Kent-Dobias <jaron@kent-dobias.com> | 2021-01-05 18:15:00 +0100 |
---|---|---|
committer | Jaron Kent-Dobias <jaron@kent-dobias.com> | 2021-01-05 18:15:00 +0100 |
commit | 252ad65ae0af4249bc089cd18aad4ab739b80d70 (patch) | |
tree | d34813ed2f8bfeedce467151be0f80ad44fedaa0 | |
parent | 5ee6815f0734b2089c5b4c068cc21f2983bdba24 (diff) | |
download | code-252ad65ae0af4249bc089cd18aad4ab739b80d70.tar.gz code-252ad65ae0af4249bc089cd18aad4ab739b80d70.tar.bz2 code-252ad65ae0af4249bc089cd18aad4ab739b80d70.zip |
Small clean-up.
-rw-r--r-- | p-spin.hpp | 2 |
1 files changed, 1 insertions, 1 deletions
@@ -34,7 +34,7 @@ std::tuple<double, Vector> WdW(const Tensor& J, const Vector& z) { Matrix hessian; std::tie(std::ignore, gradient, hessian) = hamGradHess(J, z); - Vector projectedGradient = (gradient - ((Scalar)(gradient.transpose() * z) / (double)z.size()) * z).conjugate(); + Vector projectedGradient = (gradient - ((gradient.transpose() * z) * z / (double)z.size())).conjugate(); double W = projectedGradient.cwiseAbs2().sum(); Vector dW = hessian * projectedGradient - ((z.transpose() * gradient) * projectedGradient + (z.transpose() * projectedGradient) * (gradient + hessian * z)) / (double)z.size(); |