summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJaron Kent-Dobias <jaron@kent-dobias.com>2021-01-05 18:15:00 +0100
committerJaron Kent-Dobias <jaron@kent-dobias.com>2021-01-05 18:15:00 +0100
commit252ad65ae0af4249bc089cd18aad4ab739b80d70 (patch)
treed34813ed2f8bfeedce467151be0f80ad44fedaa0
parent5ee6815f0734b2089c5b4c068cc21f2983bdba24 (diff)
downloadcode-252ad65ae0af4249bc089cd18aad4ab739b80d70.tar.gz
code-252ad65ae0af4249bc089cd18aad4ab739b80d70.tar.bz2
code-252ad65ae0af4249bc089cd18aad4ab739b80d70.zip
Small clean-up.
-rw-r--r--p-spin.hpp2
1 files changed, 1 insertions, 1 deletions
diff --git a/p-spin.hpp b/p-spin.hpp
index a522aee..b90d80b 100644
--- a/p-spin.hpp
+++ b/p-spin.hpp
@@ -34,7 +34,7 @@ std::tuple<double, Vector> WdW(const Tensor& J, const Vector& z) {
Matrix hessian;
std::tie(std::ignore, gradient, hessian) = hamGradHess(J, z);
- Vector projectedGradient = (gradient - ((Scalar)(gradient.transpose() * z) / (double)z.size()) * z).conjugate();
+ Vector projectedGradient = (gradient - ((gradient.transpose() * z) * z / (double)z.size())).conjugate();
double W = projectedGradient.cwiseAbs2().sum();
Vector dW = hessian * projectedGradient - ((z.transpose() * gradient) * projectedGradient + (z.transpose() * projectedGradient) * (gradient + hessian * z)) / (double)z.size();