From aceee249e4790f235472ed48900a3289ef75143a Mon Sep 17 00:00:00 2001 From: fjosw Date: Tue, 10 Jan 2023 10:35:32 +0000 Subject: [PATCH] Documentation updated --- docs/pyerrors/roots.html | 48 ++++++++++++++++++++++------------------ 1 file changed, 26 insertions(+), 22 deletions(-) diff --git a/docs/pyerrors/roots.html b/docs/pyerrors/roots.html index 6ebabaad..fdfde017 100644 --- a/docs/pyerrors/roots.html +++ b/docs/pyerrors/roots.html @@ -105,18 +105,20 @@ 27 Obs 28 `Obs` valued root of the function. 29 ''' -30 root = scipy.optimize.fsolve(func, guess, d.value) +30 d_val = np.vectorize(lambda x: x.value)(np.array(d)) 31 -32 # Error propagation as detailed in arXiv:1809.01289 -33 dx = jacobian(func)(root[0], d.value) -34 try: -35 da = jacobian(lambda u, v: func(v, u))(d.value, root[0]) -36 except TypeError: -37 raise Exception("It is required to use autograd.numpy instead of numpy within root functions, see the documentation for details.") from None -38 deriv = - da / dx -39 -40 res = derived_observable(lambda x, **kwargs: (x[0] + np.finfo(np.float64).eps) / (d.value + np.finfo(np.float64).eps) * root[0], [d], man_grad=[deriv]) -41 return res +32 root = scipy.optimize.fsolve(func, guess, d_val) +33 +34 # Error propagation as detailed in arXiv:1809.01289 +35 dx = jacobian(func)(root[0], d_val) +36 try: +37 da = jacobian(lambda u, v: func(v, u))(d_val, root[0]) +38 except TypeError: +39 raise Exception("It is required to use autograd.numpy instead of numpy within root functions, see the documentation for details.") from None +40 deriv = - da / dx +41 res = derived_observable(lambda x, **kwargs: (x[0] + np.finfo(np.float64).eps) / (np.array(d).reshape(-1)[0].value + np.finfo(np.float64).eps) * root[0], +42 np.array(d).reshape(-1), man_grad=np.array(deriv).reshape(-1)) +43 return res @@ -155,18 +157,20 @@ 28 Obs 29 `Obs` valued root of the function. 30 ''' -31 root = scipy.optimize.fsolve(func, guess, d.value) +31 d_val = np.vectorize(lambda x: x.value)(np.array(d)) 32 -33 # Error propagation as detailed in arXiv:1809.01289 -34 dx = jacobian(func)(root[0], d.value) -35 try: -36 da = jacobian(lambda u, v: func(v, u))(d.value, root[0]) -37 except TypeError: -38 raise Exception("It is required to use autograd.numpy instead of numpy within root functions, see the documentation for details.") from None -39 deriv = - da / dx -40 -41 res = derived_observable(lambda x, **kwargs: (x[0] + np.finfo(np.float64).eps) / (d.value + np.finfo(np.float64).eps) * root[0], [d], man_grad=[deriv]) -42 return res +33 root = scipy.optimize.fsolve(func, guess, d_val) +34 +35 # Error propagation as detailed in arXiv:1809.01289 +36 dx = jacobian(func)(root[0], d_val) +37 try: +38 da = jacobian(lambda u, v: func(v, u))(d_val, root[0]) +39 except TypeError: +40 raise Exception("It is required to use autograd.numpy instead of numpy within root functions, see the documentation for details.") from None +41 deriv = - da / dx +42 res = derived_observable(lambda x, **kwargs: (x[0] + np.finfo(np.float64).eps) / (np.array(d).reshape(-1)[0].value + np.finfo(np.float64).eps) * root[0], +43 np.array(d).reshape(-1), man_grad=np.array(deriv).reshape(-1)) +44 return res