mirror of
https://github.com/fjosw/pyerrors.git
synced 2025-03-15 14:50:25 +01:00
Merge pull request #142 from fjosw/feat/root_of_multi_parameter_functions
Root of multi parameter functions
This commit is contained in:
commit
569bf8c2f1
2 changed files with 17 additions and 5 deletions
|
@ -27,15 +27,17 @@ def find_root(d, func, guess=1.0, **kwargs):
|
|||
Obs
|
||||
`Obs` valued root of the function.
|
||||
'''
|
||||
root = scipy.optimize.fsolve(func, guess, d.value)
|
||||
d_val = np.vectorize(lambda x: x.value)(np.array(d))
|
||||
|
||||
root = scipy.optimize.fsolve(func, guess, d_val)
|
||||
|
||||
# Error propagation as detailed in arXiv:1809.01289
|
||||
dx = jacobian(func)(root[0], d.value)
|
||||
dx = jacobian(func)(root[0], d_val)
|
||||
try:
|
||||
da = jacobian(lambda u, v: func(v, u))(d.value, root[0])
|
||||
da = jacobian(lambda u, v: func(v, u))(d_val, root[0])
|
||||
except TypeError:
|
||||
raise Exception("It is required to use autograd.numpy instead of numpy within root functions, see the documentation for details.") from None
|
||||
deriv = - da / dx
|
||||
|
||||
res = derived_observable(lambda x, **kwargs: (x[0] + np.finfo(np.float64).eps) / (d.value + np.finfo(np.float64).eps) * root[0], [d], man_grad=[deriv])
|
||||
res = derived_observable(lambda x, **kwargs: (x[0] + np.finfo(np.float64).eps) / (np.array(d).reshape(-1)[0].value + np.finfo(np.float64).eps) * root[0],
|
||||
np.array(d).reshape(-1), man_grad=np.array(deriv).reshape(-1))
|
||||
return res
|
||||
|
|
|
@ -42,3 +42,13 @@ def test_root_no_autograd():
|
|||
|
||||
with pytest.raises(Exception):
|
||||
my_root = pe.roots.find_root(my_obs, root_function)
|
||||
|
||||
|
||||
def test_root_multi_parameter():
|
||||
o1 = pe.pseudo_Obs(1.1, 0.1, "test")
|
||||
o2 = pe.pseudo_Obs(1.3, 0.12, "test")
|
||||
|
||||
f2 = lambda x, d: d[0] + d[1] * x
|
||||
|
||||
assert f2(-o1 / o2, [o1, o2]) == 0
|
||||
assert pe.find_root([o1, o2], f2) == -o1 / o2
|
||||
|
|
Loading…
Add table
Reference in a new issue