feat: double jacobian in standard fit replaced by hessian

This greatly improves performance for numerical derivatives and helps
with readability.
This commit is contained in:
Fabian Joswig 2022-10-06 10:44:06 +01:00
parent f22614f999
commit b84ea1cc3b
No known key found for this signature in database

View file

@ -10,8 +10,10 @@ from matplotlib import gridspec
from scipy.odr import ODR, Model, RealData from scipy.odr import ODR, Model, RealData
import iminuit import iminuit
from autograd import jacobian as auto_jacobian from autograd import jacobian as auto_jacobian
from autograd import hessian as auto_hessian
from autograd import elementwise_grad as egrad from autograd import elementwise_grad as egrad
from numdifftools import Jacobian as num_jacobian from numdifftools import Jacobian as num_jacobian
from numdifftools import Hessian as num_hessian
from .obs import Obs, derived_observable, covariance, cov_Obs from .obs import Obs, derived_observable, covariance, cov_Obs
@ -458,8 +460,10 @@ def _standard_fit(x, y, func, silent=False, **kwargs):
if kwargs.get('num_grad') is True: if kwargs.get('num_grad') is True:
jacobian = num_jacobian jacobian = num_jacobian
hessian = num_hessian
else: else:
jacobian = auto_jacobian jacobian = auto_jacobian
hessian = auto_hessian
if x.shape[-1] != len(y): if x.shape[-1] != len(y):
raise Exception('x and y input have to have the same length') raise Exception('x and y input have to have the same length')
@ -591,13 +595,11 @@ def _standard_fit(x, y, func, silent=False, **kwargs):
fitp = fit_result.x fitp = fit_result.x
try: try:
if kwargs.get('correlated_fit') is True: if kwargs.get('correlated_fit') is True:
hess = jacobian(jacobian(chisqfunc_corr))(fitp) hess = hessian(chisqfunc_corr)(fitp)
else: else:
hess = jacobian(jacobian(chisqfunc))(fitp) hess = hessian(chisqfunc)(fitp)
except TypeError: except TypeError:
raise Exception("It is required to use autograd.numpy instead of numpy within fit functions, see the documentation for details.") from None raise Exception("It is required to use autograd.numpy instead of numpy within fit functions, see the documentation for details.") from None
if kwargs.get('num_grad') is True:
hess = hess[0]
if kwargs.get('correlated_fit') is True: if kwargs.get('correlated_fit') is True:
def chisqfunc_compact(d): def chisqfunc_compact(d):
@ -611,9 +613,7 @@ def _standard_fit(x, y, func, silent=False, **kwargs):
chisq = anp.sum(((d[n_parms:] - model) / dy_f) ** 2) chisq = anp.sum(((d[n_parms:] - model) / dy_f) ** 2)
return chisq return chisq
jac_jac = jacobian(jacobian(chisqfunc_compact))(np.concatenate((fitp, y_f))) jac_jac = hessian(chisqfunc_compact)(np.concatenate((fitp, y_f)))
if kwargs.get('num_grad') is True:
jac_jac = jac_jac[0]
# Compute hess^{-1} @ jac_jac[:n_parms, n_parms:] using LAPACK dgesv # Compute hess^{-1} @ jac_jac[:n_parms, n_parms:] using LAPACK dgesv
try: try: