From b14314b424e85d5bdf7746c1794682610a35c876 Mon Sep 17 00:00:00 2001 From: Fabian Joswig Date: Wed, 2 Mar 2022 15:10:07 +0000 Subject: [PATCH] fix: Error handling for fits and root finding with numpy instead of autograd.numpy improved. Tests added. --- pyerrors/fits.py | 10 ++++++++-- pyerrors/roots.py | 5 ++++- tests/fits_test.py | 36 +++++++++++++++++++++++++++++------- tests/roots_test.py | 12 ++++++++++++ 4 files changed, 53 insertions(+), 10 deletions(-) diff --git a/pyerrors/fits.py b/pyerrors/fits.py index e290dbfb..4e9c80d3 100644 --- a/pyerrors/fits.py +++ b/pyerrors/fits.py @@ -260,7 +260,10 @@ def total_least_squares(x, y, func, silent=False, **kwargs): output.chisquare_by_expected_chisquare) fitp = out.beta - hess_inv = np.linalg.pinv(jacobian(jacobian(odr_chisquare))(np.concatenate((fitp, out.xplus.ravel())))) + try: + hess_inv = np.linalg.pinv(jacobian(jacobian(odr_chisquare))(np.concatenate((fitp, out.xplus.ravel())))) + except TypeError: + raise Exception("It is required to use autograd.numpy instead of numpy within fit functions, see the documentation for details.") from None def odr_chisquare_compact_x(d): model = func(d[:n_parms], d[n_parms:n_parms + m].reshape(x_shape)) @@ -537,7 +540,10 @@ def _standard_fit(x, y, func, silent=False, **kwargs): output.chisquare_by_expected_chisquare) fitp = fit_result.x - hess_inv = np.linalg.pinv(jacobian(jacobian(chisqfunc))(fitp)) + try: + hess_inv = np.linalg.pinv(jacobian(jacobian(chisqfunc))(fitp)) + except TypeError: + raise Exception("It is required to use autograd.numpy instead of numpy within fit functions, see the documentation for details.") from None if kwargs.get('correlated_fit') is True: def chisqfunc_compact(d): diff --git a/pyerrors/roots.py b/pyerrors/roots.py index e26b44e7..5f6134b5 100644 --- a/pyerrors/roots.py +++ b/pyerrors/roots.py @@ -31,7 +31,10 @@ def find_root(d, func, guess=1.0, **kwargs): # Error propagation as detailed in arXiv:1809.01289 dx = jacobian(func)(root[0], d.value) - da = jacobian(lambda u, v: func(v, u))(d.value, root[0]) + try: + da = jacobian(lambda u, v: func(v, u))(d.value, root[0]) + except TypeError: + raise Exception("It is required to use autograd.numpy instead of numpy within root functions, see the documentation for details.") from None deriv = - da / dx res = derived_observable(lambda x, **kwargs: (x[0] + np.finfo(np.float64).eps) / (d.value + np.finfo(np.float64).eps) * root[0], [d], man_grad=[deriv]) diff --git a/tests/fits_test.py b/tests/fits_test.py index 4367ac65..da43b4ec 100644 --- a/tests/fits_test.py +++ b/tests/fits_test.py @@ -1,4 +1,5 @@ -import autograd.numpy as np +import numpy as np +import autograd.numpy as anp import math import scipy.optimize from scipy.odr import ODR, Model, RealData @@ -41,12 +42,12 @@ def test_least_squares(): oy.append(pe.pseudo_Obs(y[i], yerr[i], str(i))) def f(x, a, b): - return a * np.exp(-b * x) + return a * anp.exp(-b * x) popt, pcov = scipy.optimize.curve_fit(f, x, y, sigma=[o.dvalue for o in oy], absolute_sigma=True) def func(a, x): - y = a[0] * np.exp(-a[1] * x) + y = a[0] * anp.exp(-a[1] * x) return y out = pe.least_squares(x, oy, func, expected_chisquare=True, resplot=True, qqplot=True) @@ -95,7 +96,7 @@ def test_alternative_solvers(): oy.append(pe.pseudo_Obs(y[i], yerr[i], 'test')) def func(a, x): - y = a[0] * np.exp(-a[1] * x) + y = a[0] * anp.exp(-a[1] * x) return y chisquare_values = [] @@ -145,7 +146,7 @@ def test_correlated_fit(): return p[1] + p[0] * x else: def fitf(p, x): - return p[1] * np.exp(-p[0] * x) + return p[1] * anp.exp(-p[0] * x) fitp = pe.least_squares(x, data, fitf, expected_chisquare=True) @@ -172,10 +173,10 @@ def test_total_least_squares(): oy.append(pe.pseudo_Obs(y[i], yerr[i], str(i))) def f(x, a, b): - return a * np.exp(-b * x) + return a * anp.exp(-b * x) def func(a, x): - y = a[0] * np.exp(-a[1] * x) + y = a[0] * anp.exp(-a[1] * x) return y data = RealData([o.value for o in ox], [o.value for o in oy], sx=[o.dvalue for o in ox], sy=[o.dvalue for o in oy]) @@ -336,6 +337,27 @@ def test_error_band(): pe.fits.error_band(x, f, fitp.fit_parameters) +def test_fit_no_autograd(): + dim = 10 + x = np.arange(dim) + y = 2 * np.exp(-0.08 * x) + np.random.normal(0.0, 0.15, dim) + yerr = 0.1 + 0.1 * np.random.rand(dim) + + oy = [] + for i, item in enumerate(x): + oy.append(pe.pseudo_Obs(y[i], yerr[i], str(i))) + + def func(a, x): + y = a[0] * np.exp(-a[1] * x) + return y + + with pytest.raises(Exception): + pe.least_squares(x, oy, func) + + with pytest.raises(Exception): + pe.total_least_squares(oy, oy, func) + + def test_ks_test(): def f(a, x): y = a[0] + a[1] * x diff --git a/tests/roots_test.py b/tests/roots_test.py index dbb27bbe..b6e9bdb0 100644 --- a/tests/roots_test.py +++ b/tests/roots_test.py @@ -30,3 +30,15 @@ def test_root_linear_idl(): difference = my_obs - my_root assert difference.is_zero() + + +def test_root_no_autograd(): + + def root_function(x, d): + return x - np.log(np.exp(d)) + + value = np.random.normal(0, 100) + my_obs = pe.pseudo_Obs(value, 0.1, 't') + + with pytest.raises(Exception): + my_root = pe.roots.find_root(my_obs, root_function)