feat: least_squares fit error propagation can now also be performed via

numerical derivatives.
This commit is contained in:
Fabian Joswig 2022-10-05 17:44:38 +01:00
parent c111189522
commit ee4149e498
No known key found for this signature in database

View file

@ -9,8 +9,9 @@ import matplotlib.pyplot as plt
from matplotlib import gridspec from matplotlib import gridspec
from scipy.odr import ODR, Model, RealData from scipy.odr import ODR, Model, RealData
import iminuit import iminuit
from autograd import jacobian from autograd import jacobian as auto_jacobian
from autograd import elementwise_grad as egrad from autograd import elementwise_grad as egrad
from numdifftools import Jacobian as num_jacobian
from .obs import Obs, derived_observable, covariance, cov_Obs from .obs import Obs, derived_observable, covariance, cov_Obs
@ -114,6 +115,8 @@ def least_squares(x, y, func, priors=None, silent=False, **kwargs):
If True, a plot which displays fit, data and residuals is generated (default False). If True, a plot which displays fit, data and residuals is generated (default False).
qqplot : bool qqplot : bool
If True, a quantile-quantile plot of the fit result is generated (default False). If True, a quantile-quantile plot of the fit result is generated (default False).
num_grad : bool
Use numerical differentation instead of automatic differentiation to perform the error propagation (default False).
''' '''
if priors is not None: if priors is not None:
return _prior_fit(x, y, func, priors, silent=silent, **kwargs) return _prior_fit(x, y, func, priors, silent=silent, **kwargs)
@ -174,6 +177,8 @@ def total_least_squares(x, y, func, silent=False, **kwargs):
x_shape = x.shape x_shape = x.shape
jacobian = auto_jacobian
if not callable(func): if not callable(func):
raise TypeError('func has to be a function.') raise TypeError('func has to be a function.')
@ -318,6 +323,11 @@ def _prior_fit(x, y, func, priors, silent=False, **kwargs):
x = np.asarray(x) x = np.asarray(x)
if kwargs.get('num_grad') is True:
jacobian = num_jacobian
else:
jacobian = auto_jacobian
if not callable(func): if not callable(func):
raise TypeError('func has to be a function.') raise TypeError('func has to be a function.')
@ -441,6 +451,11 @@ def _standard_fit(x, y, func, silent=False, **kwargs):
x = np.asarray(x) x = np.asarray(x)
if kwargs.get('num_grad') is True:
jacobian = num_jacobian
else:
jacobian = auto_jacobian
if x.shape[-1] != len(y): if x.shape[-1] != len(y):
raise Exception('x and y input have to have the same length') raise Exception('x and y input have to have the same length')