summaryrefslogtreecommitdiff
path: root/fits.cpp
diff options
context:
space:
mode:
authorJaron Kent-Dobias <jaron@kent-dobias.com>2025-02-11 15:26:41 -0300
committerJaron Kent-Dobias <jaron@kent-dobias.com>2025-02-11 15:26:41 -0300
commit8331cf653f6ac80ebb9c96c9c844803ce0278d43 (patch)
tree7a92c9e0cce3eec16c2c05008921a9cc64c5aa5b /fits.cpp
parent01a22225f2d207f04df595290e0e5c742a29ccee (diff)
downloadictp-saifr_colloquium-8331cf653f6ac80ebb9c96c9c844803ce0278d43.tar.gz
ictp-saifr_colloquium-8331cf653f6ac80ebb9c96c9c844803ce0278d43.tar.bz2
ictp-saifr_colloquium-8331cf653f6ac80ebb9c96c9c844803ce0278d43.zip
Split up code, worked on presentation slides.
Diffstat (limited to 'fits.cpp')
-rw-r--r--fits.cpp140
1 files changed, 1 insertions, 139 deletions
diff --git a/fits.cpp b/fits.cpp
index 09f9be5..682886a 100644
--- a/fits.cpp
+++ b/fits.cpp
@@ -1,145 +1,7 @@
#include <getopt.h>
#include <iomanip>
-#include "pcg-cpp/include/pcg_random.hpp"
-#include "randutils/randutils.hpp"
-
-#include "eigen/Eigen/Dense"
-
-using Rng = randutils::random_generator<pcg32>;
-
-using Real = double;
-using Vector = Eigen::Matrix<Real, Eigen::Dynamic, 1>;
-using Matrix = Eigen::Matrix<Real, Eigen::Dynamic, Eigen::Dynamic>;
-using Data = std::vector<std::tuple<Real, Real>>;
-
-#ifndef FITS_ABS_BASIS
-inline Real basisFunctions(unsigned N, unsigned i, Real x) {
- return std::legendre(i, 2.0 * (x - 0.5));
-}
-#endif
-
-#ifdef FITS_ABS_BASIS
-inline Real basisFunctions(unsigned N, unsigned i, Real x) {
- return abs(x - i / (N - 1.0));
-}
-#endif
-
-Real value(const Vector& coeffs, Real x) {
- Real v = 0;
-
- for (unsigned j = 0; j < coeffs.size(); j++) {
- v += coeffs[j] * basisFunctions(coeffs.size(), j, x);
- }
-
- return v;
-}
-
-Real cost(Data::const_iterator batchBegin, Data::const_iterator batchEnd, const Vector& coeffs) {
- Real c = 0;
-
-#pragma omp parallel for reduction(+:c)
- for (Data::const_iterator it = batchBegin; it != batchEnd; it++) {
- Real x = std::get<0>(*it);
- Real y = std::get<1>(*it);
- c += 0.5 * pow(value(coeffs, x) - y, 2);
- }
-
- return c;
-}
-
-Vector dCost(Data::const_iterator batchBegin, Data::const_iterator batchEnd, const Vector& coeffs) {
- Vector dC = Vector::Zero(coeffs.size());
-
- for (Data::const_iterator it = batchBegin; it != batchEnd; it++) {
- Real x = std::get<0>(*it);
- Real y = std::get<1>(*it);
- Real Δc = value(coeffs, x) - y;
-
-#pragma omp parallel for
- for (unsigned j = 0; j < coeffs.size(); j++) {
- dC[j] += Δc * basisFunctions(coeffs.size(), j, x);
- }
- }
-
- return dC;
-}
-
-Vector underSolve(const Data& data, unsigned N) {
- unsigned M = data.size();
- Matrix A(M, N);
- Vector b(M);
- for (unsigned i = 0; i < M; i++) {
- Real x = std::get<0>(data[i]);
- Real y = std::get<1>(data[i]);
- b[i] = y;
- for (unsigned j = 0; j < N; j++) {
- A(i, j) = basisFunctions(N, j, x);
- }
- }
-
- return A.colPivHouseholderQr().solve(b);
-}
-
-Vector stochasticGradientDescent(const Data& data, const Vector& a₀, unsigned nBatches, long unsigned maxSteps, Real ε = 1e-12) {
- Vector xₜ = a₀;
- Real Hₜ;
- Real α = 1.0;
- Real m;
- Vector ∇H;
- long unsigned steps = 0;
-
- unsigned batchSize = data.size() / nBatches;
- Data::const_iterator batchBegin = data.begin();
- Data::const_iterator batchEnd = data.begin() + batchSize;
- Data::const_iterator effectiveEnd = data.begin() + batchSize * nBatches;
-
- while (
- Hₜ = cost(batchBegin, batchEnd, xₜ),
- ∇H = dCost(batchBegin, batchEnd, xₜ),
- m = ∇H.squaredNorm(),
- m > ε && steps < maxSteps
- ) {
- Vector xₜ₊₁;
- Real Hₜ₊₁;
-
- while (
- xₜ₊₁ = xₜ - α * ∇H, Hₜ₊₁ = cost(batchBegin, batchEnd, xₜ₊₁),
- Hₜ₊₁ > Hₜ - 0.25 * α * m
- ) {
- α /= 2;
- }
-
- xₜ = xₜ₊₁;
- α *= 1.25;
- steps++;
-
- if (batchEnd == data.end()) {
- batchBegin = data.begin();
- batchEnd = data.begin() + batchSize;
- } else if (batchEnd == effectiveEnd) {
- batchBegin = effectiveEnd;
- batchEnd = data.end();
- } else {
- batchBegin += batchSize;
- batchEnd += batchSize;
- }
- }
-
- return xₜ;
-}
-
-Data generateData(Real(*f)(Real), unsigned M, Real σ, Rng& r) {
- Data data;
- data.reserve(M);
-
- for (unsigned i = 0; i < M; i++) {
- Real x = ((Real)i) / (M - 1.0);
- data.push_back({x, f(x) + r.variate<Real, std::normal_distribution>(0, σ)});
- }
-
- return data;
-}
+#include "fits.hpp"
int main(int argc, char* argv[]) {
unsigned Nend = 1;