refactor: logic in least square fits simplified.

Co-authored-by: Simon Kuberski <simon.kuberski@uni-muenster.de>
This commit is contained in:
Fabian Joswig 2023-03-02 18:39:57 +00:00
parent a140b2ab39
commit 2ac38515b6
No known key found for this signature in database

View file

@ -580,6 +580,13 @@ def _combined_fit(x, y, func, silent=False, **kwargs):
else:
x0 = [0.1] * n_parms
def general_chisqfunc_uncorr(p, ivars):
model = anp.concatenate([anp.array(funcd[key](p, anp.asarray(xd[key]))).reshape(-1) for key in key_ls])
return ((ivars - model) / dy_f)
def chisqfunc_uncorr(p):
return anp.sum(general_chisqfunc_uncorr(p, y_f) ** 2)
if kwargs.get('correlated_fit') is True:
corr = covariance(y_all, correlation=True, **kwargs)
covdiag = np.diag(1 / np.asarray(dy_f))
@ -591,26 +598,15 @@ def _combined_fit(x, y, func, silent=False, **kwargs):
chol = np.linalg.cholesky(corr)
chol_inv = scipy.linalg.solve_triangular(chol, covdiag, lower=True)
def general_chisqfunc_corr(p, ivars):
def general_chisqfunc(p, ivars):
model = anp.concatenate([anp.array(funcd[key](p, anp.asarray(xd[key]))).reshape(-1) for key in key_ls])
return anp.dot(chol_inv, (ivars - model))
def general_chisqfunc(p, ivars):
model = anp.concatenate([anp.array(funcd[key](p, anp.asarray(xd[key]))).reshape(-1) for key in key_ls])
return ((ivars - model) / dy_f)
if kwargs.get('correlated_fit') is True:
def chisqfunc_residuals_corr(p):
return general_chisqfunc_corr(p, y_f)
def chisqfunc_corr(p):
return anp.sum(chisqfunc_residuals_corr(p) ** 2)
def chisqfunc_residuals(p):
return general_chisqfunc(p, y_f)
def chisqfunc(p):
return anp.sum(chisqfunc_residuals(p) ** 2)
def chisqfunc(p):
return anp.sum(general_chisqfunc(p, y_f) ** 2)
else:
general_chisqfunc = general_chisqfunc_uncorr
chisqfunc = chisqfunc_uncorr
output.method = kwargs.get('method', 'Levenberg-Marquardt')
if not silent:
@ -621,17 +617,17 @@ def _combined_fit(x, y, func, silent=False, **kwargs):
tolerance = 1e-4 # default value of 1e-1 set by iminuit can be problematic
if 'tol' in kwargs:
tolerance = kwargs.get('tol')
fit_result = iminuit.minimize(chisqfunc, x0, tol=tolerance) # Stopping criterion 0.002 * tol * errordef
fit_result = iminuit.minimize(chisqfunc_uncorr, x0, tol=tolerance) # Stopping criterion 0.002 * tol * errordef
if kwargs.get('correlated_fit') is True:
fit_result = iminuit.minimize(chisqfunc_corr, fit_result.x, tol=tolerance)
fit_result = iminuit.minimize(chisqfunc, fit_result.x, tol=tolerance)
output.iterations = fit_result.nfev
else:
tolerance = 1e-12
if 'tol' in kwargs:
tolerance = kwargs.get('tol')
fit_result = scipy.optimize.minimize(chisqfunc, x0, method=kwargs.get('method'), tol=tolerance)
fit_result = scipy.optimize.minimize(chisqfunc_uncorr, x0, method=kwargs.get('method'), tol=tolerance)
if kwargs.get('correlated_fit') is True:
fit_result = scipy.optimize.minimize(chisqfunc_corr, fit_result.x, method=kwargs.get('method'), tol=tolerance)
fit_result = scipy.optimize.minimize(chisqfunc, fit_result.x, method=kwargs.get('method'), tol=tolerance)
output.iterations = fit_result.nit
chisquare = fit_result.fun
@ -640,15 +636,19 @@ def _combined_fit(x, y, func, silent=False, **kwargs):
if 'tol' in kwargs:
print('tol cannot be set for Levenberg-Marquardt')
fit_result = scipy.optimize.least_squares(chisqfunc_residuals, x0, method='lm', ftol=1e-15, gtol=1e-15, xtol=1e-15)
def chisqfunc_residuals_uncorr(p):
return general_chisqfunc_uncorr(p, y_f)
fit_result = scipy.optimize.least_squares(chisqfunc_residuals_uncorr, x0, method='lm', ftol=1e-15, gtol=1e-15, xtol=1e-15)
if kwargs.get('correlated_fit') is True:
fit_result = scipy.optimize.least_squares(chisqfunc_residuals_corr, fit_result.x, method='lm', ftol=1e-15, gtol=1e-15, xtol=1e-15)
def chisqfunc_residuals(p):
return general_chisqfunc(p, y_f)
fit_result = scipy.optimize.least_squares(chisqfunc_residuals, fit_result.x, method='lm', ftol=1e-15, gtol=1e-15, xtol=1e-15)
chisquare = np.sum(fit_result.fun ** 2)
if kwargs.get('correlated_fit') is True:
assert np.isclose(chisquare, chisqfunc_corr(fit_result.x), atol=1e-14)
else:
assert np.isclose(chisquare, chisqfunc(fit_result.x), atol=1e-14)
assert np.isclose(chisquare, chisqfunc(fit_result.x), atol=1e-14)
output.iterations = fit_result.nfev
@ -695,19 +695,12 @@ def _combined_fit(x, y, func, silent=False, **kwargs):
raise Exception('No y errors available, run the gamma method first.')
try:
if kwargs.get('correlated_fit') is True:
hess = hessian(chisqfunc_corr)(fitp)
else:
hess = hessian(chisqfunc)(fitp)
hess = hessian(chisqfunc)(fitp)
except TypeError:
raise Exception("It is required to use autograd.numpy instead of numpy within fit functions, see the documentation for details.") from None
if kwargs.get('correlated_fit') is True:
def chisqfunc_compact(d):
return anp.sum(general_chisqfunc_corr(d[:n_parms], d[n_parms:]) ** 2)
else:
def chisqfunc_compact(d):
return anp.sum(general_chisqfunc(d[:n_parms], d[n_parms:]) ** 2)
def chisqfunc_compact(d):
return anp.sum(general_chisqfunc(d[:n_parms], d[n_parms:]) ** 2)
jac_jac_y = hessian(chisqfunc_compact)(np.concatenate((fitp, y_f)))