diff --git a/docs/pyerrors/fits.html b/docs/pyerrors/fits.html index 3760e6b7..d5f38b1c 100644 --- a/docs/pyerrors/fits.html +++ b/docs/pyerrors/fits.html @@ -417,7 +417,7 @@ result = [] for i in range(n_parms): - result.append(derived_observable(lambda my_var, **kwargs: my_var[0] / x.ravel()[0].value * out.beta[i], list(x.ravel()) + list(y), man_grad=list(deriv_x[i]) + list(deriv_y[i]))) + result.append(derived_observable(lambda my_var, **kwargs: (my_var[0] + np.finfo(np.float64).eps) / (x.ravel()[0].value + np.finfo(np.float64).eps) * out.beta[i], list(x.ravel()) + list(y), man_grad=list(deriv_x[i]) + list(deriv_y[i]))) output.fit_parameters = result + const_par @@ -534,7 +534,7 @@ result = [] for i in range(n_parms): - result.append(derived_observable(lambda x, **kwargs: x[0] / y[0].value * params[i], list(y) + list(loc_priors), man_grad=list(deriv[i]))) + result.append(derived_observable(lambda x, **kwargs: (x[0] + np.finfo(np.float64).eps) / (y[0].value + np.finfo(np.float64).eps) * params[i], list(y) + list(loc_priors), man_grad=list(deriv[i]))) output.fit_parameters = result output.chisquare = chisqfunc(np.asarray(params)) @@ -728,7 +728,7 @@ result = [] for i in range(n_parms): - result.append(derived_observable(lambda x, **kwargs: x[0] / y[0].value * fit_result.x[i], list(y), man_grad=list(deriv[i]))) + result.append(derived_observable(lambda x, **kwargs: (x[0] + np.finfo(np.float64).eps) / (y[0].value + np.finfo(np.float64).eps) * fit_result.x[i], list(y), man_grad=list(deriv[i]))) output.fit_parameters = result + const_par @@ -1451,7 +1451,7 @@ List of N Obs that are used to constrain the last N fit parameters of func. result = [] for i in range(n_parms): - result.append(derived_observable(lambda my_var, **kwargs: my_var[0] / x.ravel()[0].value * out.beta[i], list(x.ravel()) + list(y), man_grad=list(deriv_x[i]) + list(deriv_y[i]))) + result.append(derived_observable(lambda my_var, **kwargs: (my_var[0] + np.finfo(np.float64).eps) / (x.ravel()[0].value + np.finfo(np.float64).eps) * out.beta[i], list(x.ravel()) + list(y), man_grad=list(deriv_x[i]) + list(deriv_y[i]))) output.fit_parameters = result + const_par diff --git a/docs/pyerrors/roots.html b/docs/pyerrors/roots.html index 3f5f78af..1cc2b1dd 100644 --- a/docs/pyerrors/roots.html +++ b/docs/pyerrors/roots.html @@ -69,7 +69,8 @@
View Source -
import scipy.optimize
+            
import numpy as np
+import scipy.optimize
 from autograd import jacobian
 from .obs import derived_observable
 
@@ -104,7 +105,7 @@
     da = jacobian(lambda u, v: func(v, u))(d.value, root[0])
     deriv = - da / dx
 
-    res = derived_observable(lambda x, **kwargs: x[0] / d.value * root[0], [d], man_grad=[deriv])
+    res = derived_observable(lambda x, **kwargs: (x[0] + np.finfo(np.float64).eps) / (d.value + np.finfo(np.float64).eps) * root[0], [d], man_grad=[deriv])
     return res
 
@@ -151,7 +152,7 @@ da = jacobian(lambda u, v: func(v, u))(d.value, root[0]) deriv = - da / dx - res = derived_observable(lambda x, **kwargs: x[0] / d.value * root[0], [d], man_grad=[deriv]) + res = derived_observable(lambda x, **kwargs: (x[0] + np.finfo(np.float64).eps) / (d.value + np.finfo(np.float64).eps) * root[0], [d], man_grad=[deriv]) return res