mirror of
https://github.com/fjosw/pyerrors.git
synced 2025-05-14 19:43:41 +02:00
feat: hessian added to prior fit and odr fit routines.
This commit is contained in:
parent
8cf15b651f
commit
58b8383c1a
1 changed files with 15 additions and 8 deletions
|
@ -165,6 +165,8 @@ def total_least_squares(x, y, func, silent=False, **kwargs):
|
||||||
corrected by effects caused by correlated input data.
|
corrected by effects caused by correlated input data.
|
||||||
This can take a while as the full correlation matrix
|
This can take a while as the full correlation matrix
|
||||||
has to be calculated (default False).
|
has to be calculated (default False).
|
||||||
|
num_grad : bool
|
||||||
|
Use numerical differentation instead of automatic differentiation to perform the error propagation (default False).
|
||||||
|
|
||||||
Notes
|
Notes
|
||||||
-----
|
-----
|
||||||
|
@ -179,7 +181,12 @@ def total_least_squares(x, y, func, silent=False, **kwargs):
|
||||||
|
|
||||||
x_shape = x.shape
|
x_shape = x.shape
|
||||||
|
|
||||||
jacobian = auto_jacobian
|
if kwargs.get('num_grad') is True:
|
||||||
|
jacobian = num_jacobian
|
||||||
|
hessian = num_hessian
|
||||||
|
else:
|
||||||
|
jacobian = auto_jacobian
|
||||||
|
hessian = auto_hessian
|
||||||
|
|
||||||
if not callable(func):
|
if not callable(func):
|
||||||
raise TypeError('func has to be a function.')
|
raise TypeError('func has to be a function.')
|
||||||
|
@ -275,7 +282,7 @@ def total_least_squares(x, y, func, silent=False, **kwargs):
|
||||||
|
|
||||||
fitp = out.beta
|
fitp = out.beta
|
||||||
try:
|
try:
|
||||||
hess = jacobian(jacobian(odr_chisquare))(np.concatenate((fitp, out.xplus.ravel())))
|
hess = hessian(odr_chisquare)(np.concatenate((fitp, out.xplus.ravel())))
|
||||||
except TypeError:
|
except TypeError:
|
||||||
raise Exception("It is required to use autograd.numpy instead of numpy within fit functions, see the documentation for details.") from None
|
raise Exception("It is required to use autograd.numpy instead of numpy within fit functions, see the documentation for details.") from None
|
||||||
|
|
||||||
|
@ -284,7 +291,7 @@ def total_least_squares(x, y, func, silent=False, **kwargs):
|
||||||
chisq = anp.sum(((y_f - model) / dy_f) ** 2) + anp.sum(((d[n_parms + m:].reshape(x_shape) - d[n_parms:n_parms + m].reshape(x_shape)) / dx_f) ** 2)
|
chisq = anp.sum(((y_f - model) / dy_f) ** 2) + anp.sum(((d[n_parms + m:].reshape(x_shape) - d[n_parms:n_parms + m].reshape(x_shape)) / dx_f) ** 2)
|
||||||
return chisq
|
return chisq
|
||||||
|
|
||||||
jac_jac_x = jacobian(jacobian(odr_chisquare_compact_x))(np.concatenate((fitp, out.xplus.ravel(), x_f.ravel())))
|
jac_jac_x = hessian(odr_chisquare_compact_x)(np.concatenate((fitp, out.xplus.ravel(), x_f.ravel())))
|
||||||
|
|
||||||
# Compute hess^{-1} @ jac_jac_x[:n_parms + m, n_parms + m:] using LAPACK dgesv
|
# Compute hess^{-1} @ jac_jac_x[:n_parms + m, n_parms + m:] using LAPACK dgesv
|
||||||
try:
|
try:
|
||||||
|
@ -297,7 +304,7 @@ def total_least_squares(x, y, func, silent=False, **kwargs):
|
||||||
chisq = anp.sum(((d[n_parms + m:] - model) / dy_f) ** 2) + anp.sum(((x_f - d[n_parms:n_parms + m].reshape(x_shape)) / dx_f) ** 2)
|
chisq = anp.sum(((d[n_parms + m:] - model) / dy_f) ** 2) + anp.sum(((x_f - d[n_parms:n_parms + m].reshape(x_shape)) / dx_f) ** 2)
|
||||||
return chisq
|
return chisq
|
||||||
|
|
||||||
jac_jac_y = jacobian(jacobian(odr_chisquare_compact_y))(np.concatenate((fitp, out.xplus.ravel(), y_f)))
|
jac_jac_y = hessian(odr_chisquare_compact_y)(np.concatenate((fitp, out.xplus.ravel(), y_f)))
|
||||||
|
|
||||||
# Compute hess^{-1} @ jac_jac_y[:n_parms + m, n_parms + m:] using LAPACK dgesv
|
# Compute hess^{-1} @ jac_jac_y[:n_parms + m, n_parms + m:] using LAPACK dgesv
|
||||||
try:
|
try:
|
||||||
|
@ -326,9 +333,9 @@ def _prior_fit(x, y, func, priors, silent=False, **kwargs):
|
||||||
x = np.asarray(x)
|
x = np.asarray(x)
|
||||||
|
|
||||||
if kwargs.get('num_grad') is True:
|
if kwargs.get('num_grad') is True:
|
||||||
jacobian = num_jacobian
|
hessian = num_hessian
|
||||||
else:
|
else:
|
||||||
jacobian = auto_jacobian
|
hessian = auto_hessian
|
||||||
|
|
||||||
if not callable(func):
|
if not callable(func):
|
||||||
raise TypeError('func has to be a function.')
|
raise TypeError('func has to be a function.')
|
||||||
|
@ -418,7 +425,7 @@ def _prior_fit(x, y, func, priors, silent=False, **kwargs):
|
||||||
if not m.fmin.is_valid:
|
if not m.fmin.is_valid:
|
||||||
raise Exception('The minimization procedure did not converge.')
|
raise Exception('The minimization procedure did not converge.')
|
||||||
|
|
||||||
hess = jacobian(jacobian(chisqfunc))(params)
|
hess = hessian(chisqfunc)(params)
|
||||||
if kwargs.get('num_grad') is True:
|
if kwargs.get('num_grad') is True:
|
||||||
hess = hess[0]
|
hess = hess[0]
|
||||||
hess_inv = np.linalg.pinv(hess)
|
hess_inv = np.linalg.pinv(hess)
|
||||||
|
@ -428,7 +435,7 @@ def _prior_fit(x, y, func, priors, silent=False, **kwargs):
|
||||||
chisq = anp.sum(((d[n_parms: n_parms + len(x)] - model) / dy_f) ** 2) + anp.sum(((d[n_parms + len(x):] - d[:n_parms]) / dp_f) ** 2)
|
chisq = anp.sum(((d[n_parms: n_parms + len(x)] - model) / dy_f) ** 2) + anp.sum(((d[n_parms + len(x):] - d[:n_parms]) / dp_f) ** 2)
|
||||||
return chisq
|
return chisq
|
||||||
|
|
||||||
jac_jac = jacobian(jacobian(chisqfunc_compact))(np.concatenate((params, y_f, p_f)))
|
jac_jac = hessian(chisqfunc_compact)(np.concatenate((params, y_f, p_f)))
|
||||||
if kwargs.get('num_grad') is True:
|
if kwargs.get('num_grad') is True:
|
||||||
jac_jac = jac_jac[0]
|
jac_jac = jac_jac[0]
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue