Matmul overloaded for correlator class. (#199)

* feat: matmul method added to correlator class.

* feat: corr, corr matmul and correlator matrix trace added.

* tests: tests for matmul and trace added.

* tests: slightly reduced tolerance and good guess bad guess test.

* feat: rmatmul added and __array_priority__ set.

* tests: additional tests for rmatmul added.

* tests: one more tests for rmatmul added.

* docs: docstring added to Corr.trace.

* tests: associative property test added for complex Corr matmul.

* fix: Corr.roll method now also works for correlator matrices by
explicitly specifying the axis.

Co-authored-by: Matteo Di Carlo <matteo.dicarlo93@gmail.com>

* feat: exception type for correlator trace of 1dim correlator changed.

* tests: trace N=1 exception tested.

---------

Co-authored-by: Matteo Di Carlo <matteo.dicarlo93@gmail.com>
This commit is contained in:
Fabian Joswig 2023-07-17 11:48:57 +01:00 committed by GitHub
parent 7d1858f6c4
commit f1150f09c8
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 185 additions and 6 deletions

View file

@ -235,12 +235,12 @@ def test_fit_corr_independent():
def test_linear_fit_guesses():
for err in [10, 0.1, 0.001]:
for err in [1.2, 0.1, 0.001]:
xvals = []
yvals = []
for x in range(1, 8, 2):
xvals.append(x)
yvals.append(pe.pseudo_Obs(x + np.random.normal(0.0, err), err, 'test1') + pe.pseudo_Obs(0, err / 100, 'test2', samples=87))
yvals.append(pe.pseudo_Obs(x + np.random.normal(0.0, err), err, 'test1') + pe.pseudo_Obs(0, err / 97, 'test2', samples=87))
lin_func = lambda a, x: a[0] + a[1] * x
with pytest.raises(Exception):
pe.least_squares(xvals, yvals, lin_func)
@ -251,7 +251,7 @@ def test_linear_fit_guesses():
bad_guess = pe.least_squares(xvals, yvals, lin_func, initial_guess=[999, 999])
good_guess = pe.least_squares(xvals, yvals, lin_func, initial_guess=[0, 1])
assert np.isclose(bad_guess.chisquare, good_guess.chisquare, atol=1e-8)
assert np.all([(go - ba).is_zero(atol=1e-6) for (go, ba) in zip(good_guess, bad_guess)])
assert np.all([(go - ba).is_zero(atol=5e-5) for (go, ba) in zip(good_guess, bad_guess)])
def test_total_least_squares():