mirror of
https://github.com/fjosw/pyerrors.git
synced 2025-03-15 23:00:25 +01:00
merge with develop
This commit is contained in:
commit
71fe86b8ba
59 changed files with 5367 additions and 1798 deletions
.github/workflows
README.mddata/sfcf_test/data_o
test_r0
test_r1/cfg1
test_r2/cfg1
examples
01_basic_example.ipynb02_correlators.ipynb03_pcac_example.ipynb04_fit_example.ipynb05_matrix_operations.ipynb06_gevp.ipynb
data
pyerrors
pytest.inisetup.pytests
12
.github/workflows/pytest.yml
vendored
12
.github/workflows/pytest.yml
vendored
|
@ -11,11 +11,17 @@ on:
|
|||
|
||||
jobs:
|
||||
pytest:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: true
|
||||
fail-fast: true
|
||||
matrix:
|
||||
python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"]
|
||||
os: [ubuntu-latest]
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||
include:
|
||||
- os: macos-latest
|
||||
python-version: 3.9
|
||||
- os: windows-latest
|
||||
python-version: 3.9
|
||||
|
||||
steps:
|
||||
- name: Checkout source
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
[](https://github.com/fjosw/pyerrors/actions/workflows/flake8.yml) [](https://github.com/fjosw/pyerrors/actions/workflows/pytest.yml) [](https://github.com/fjosw/pyerrors/actions/workflows/docs.yml) [](https://www.python.org/downloads/) [](https://opensource.org/licenses/MIT)
|
||||
[](https://github.com/fjosw/pyerrors/actions/workflows/flake8.yml) [](https://github.com/fjosw/pyerrors/actions/workflows/pytest.yml) [](https://github.com/fjosw/pyerrors/actions/workflows/docs.yml) [](https://www.python.org/downloads/) [](https://opensource.org/licenses/MIT)
|
||||
# pyerrors
|
||||
`pyerrors` is a python package for error computation and propagation of Markov chain Monte Carlo data.
|
||||
|
||||
- **Documentation:** https://fjosw.github.io/pyerrors/pyerrors.html
|
||||
- **Examples**: https://github.com/fjosw/pyerrors/tree/develop/examples (Do not work properly at the moment)
|
||||
- **Examples**: https://github.com/fjosw/pyerrors/tree/develop/examples
|
||||
- **Contributing:** https://github.com/fjosw/pyerrors/blob/develop/CONTRIBUTING.md
|
||||
- **Bug reports:** https://github.com/fjosw/pyerrors/issues
|
||||
|
||||
|
|
230
data/sfcf_test/data_o/test_r0/cfg2/F_V0
Normal file
230
data/sfcf_test/data_o/test_r0/cfg2/F_V0
Normal file
|
@ -0,0 +1,230 @@
|
|||
[run]
|
||||
|
||||
version 2.1
|
||||
date 2022-01-19 11:04:00 +0100
|
||||
host r04n07.palma.wwu
|
||||
dir /scratch/tmp/j_kuhl19
|
||||
user j_kuhl19
|
||||
gauge_name /unity
|
||||
gauge_md5 1ea28326e4090996111a320b8372811d
|
||||
param_name sfcf_unity_test.in
|
||||
param_md5 d881e90d41188a33b8b0f1bd0bc53ea5
|
||||
param_hash 686af5e712ee2902180f5428af94c6e7
|
||||
data_name ./output_10519905/data_oF_V0
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8367760900851147e+02 +3.0531839956225539e-10
|
||||
2 +6.6131885855823339e+02 +3.9736225045852382e-12
|
||||
3 +6.8367760900810049e+02 -2.5611665964422843e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8370283168793060e+02 +3.0532966356282939e-10
|
||||
2 +6.6134325636407561e+02 +3.9737690976212336e-12
|
||||
3 +6.8370283168751973e+02 -2.5612610847134760e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8370484437212463e+02 +3.0533056232915147e-10
|
||||
2 +6.6134520322615822e+02 +3.9737807346122766e-12
|
||||
3 +6.8370484437171353e+02 -2.5612686251836130e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8370283168792889e+02 +3.0532890521977295e-10
|
||||
2 +6.6134325636407402e+02 +3.9730355551484655e-12
|
||||
3 +6.8370283168751791e+02 -2.5612686681440218e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8372805529787934e+02 +3.0534016954586185e-10
|
||||
2 +6.6136765507001564e+02 +3.9731820664935325e-12
|
||||
3 +6.8372805529746825e+02 -2.5613631608015786e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8373006805632656e+02 +3.0534106842445933e-10
|
||||
2 +6.6136960200392332e+02 +3.9731937804440792e-12
|
||||
3 +6.8373006805591558e+02 -2.5613707007587266e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8370484437212156e+02 +3.0532220084664646e-10
|
||||
2 +6.6134520322615526e+02 +3.9656927030717790e-12
|
||||
3 +6.8370484437171069e+02 -2.5613522400086377e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8373006805632599e+02 +3.0533346499198999e-10
|
||||
2 +6.6136960200392275e+02 +3.9658390079382195e-12
|
||||
3 +6.8373006805591490e+02 -2.5614467350834153e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8373208082069789e+02 +3.0533436384459901e-10
|
||||
2 +6.6137154894356127e+02 +3.9658506942251639e-12
|
||||
3 +6.8373208082028680e+02 -2.5614542753491032e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8367760900918211e+02 -9.5149222536505804e-10
|
||||
2 +6.6131885855810310e+02 +3.2960434859595585e-10
|
||||
3 +6.8367760900806934e+02 -2.3744430846347533e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8370283168860135e+02 -9.5152732859532533e-10
|
||||
2 +6.6134325636394544e+02 +3.2961650841969937e-10
|
||||
3 +6.8370283168748858e+02 -2.3745306857315358e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8370484437279526e+02 -9.5153012965274573e-10
|
||||
2 +6.6134520322602793e+02 +3.2961747879154288e-10
|
||||
3 +6.8370484437168250e+02 -2.3745376753897864e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8370283168859953e+02 -9.5151770701795658e-10
|
||||
2 +6.6134325636394351e+02 +3.2962581533640458e-10
|
||||
3 +6.8370283168748665e+02 -2.3744344699578737e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8372805529855032e+02 -9.5155281099707234e-10
|
||||
2 +6.6136765506988547e+02 +3.2963797613709602e-10
|
||||
3 +6.8372805529743755e+02 -2.3745220688244645e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8373006805699742e+02 -9.5155561220425917e-10
|
||||
2 +6.6136960200379315e+02 +3.2963894649982994e-10
|
||||
3 +6.8373006805588454e+02 -2.3745290592048597e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8370484437279174e+02 -9.5153224647433932e-10
|
||||
2 +6.6134520322602452e+02 +3.2961543119772646e-10
|
||||
3 +6.8370484437167897e+02 -2.3745588436057620e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8373006805699617e+02 -9.5156735103669992e-10
|
||||
2 +6.6136960200379178e+02 +3.2962759157000606e-10
|
||||
3 +6.8373006805588329e+02 -2.3746464475292832e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8373208082136819e+02 -9.5157015223999714e-10
|
||||
2 +6.6137154894343041e+02 +3.2962856194733528e-10
|
||||
3 +6.8373208082025531e+02 -2.3746534378088984e-10
|
||||
|
194
data/sfcf_test/data_o/test_r0/cfg2/f_1
Normal file
194
data/sfcf_test/data_o/test_r0/cfg2/f_1
Normal file
|
@ -0,0 +1,194 @@
|
|||
[run]
|
||||
|
||||
version 2.1
|
||||
date 2022-01-19 11:04:00 +0100
|
||||
host r04n07.palma.wwu
|
||||
dir /scratch/tmp/j_kuhl19
|
||||
user j_kuhl19
|
||||
gauge_name /unity
|
||||
gauge_md5 1ea28326e4090996111a320b8372811d
|
||||
param_name sfcf_unity_test.in
|
||||
param_md5 d881e90d41188a33b8b0f1bd0bc53ea5
|
||||
param_hash 686af5e712ee2902180f5428af94c6e7
|
||||
data_name ./output_10519905/data_of_1
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5119415254545021e+02 +6.7620978057264750e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5120703575855339e+02 +6.5026340956203663e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5120808902177868e+02 +6.5443496235264788e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5120703575855515e+02 +6.9706500417651470e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5122001235609065e+02 +6.9516150897757419e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5122104108046199e+02 +6.9232860455434941e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5120808902177447e+02 +1.0849949614595719e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5122104108046182e+02 +1.0866063643253473e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5122207631098047e+02 +1.0827277318679030e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5119415254545038e+02 +3.0143306723935508e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5120703575855367e+02 +4.3340379505972648e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5120808902177902e+02 +3.9652247575094006e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5120703575855526e+02 -8.2540994138261318e-16
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5122001235609082e+02 -9.7121215247039609e-16
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5122104108046227e+02 -9.0872484903683497e-16
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5120808902177453e+02 +5.1331372776616026e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5122104108046193e+02 +5.0816653044831932e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5122207631098064e+02 +5.1165649253001659e-15
|
||||
|
80
data/sfcf_test/data_o/test_r0/cfg2/f_A
Normal file
80
data/sfcf_test/data_o/test_r0/cfg2/f_A
Normal file
|
@ -0,0 +1,80 @@
|
|||
[run]
|
||||
|
||||
version 2.1
|
||||
date 2022-01-19 11:04:00 +0100
|
||||
host r04n07.palma.wwu
|
||||
dir /scratch/tmp/j_kuhl19
|
||||
user j_kuhl19
|
||||
gauge_name /unity
|
||||
gauge_md5 1ea28326e4090996111a320b8372811d
|
||||
param_name sfcf_unity_test.in
|
||||
param_md5 d881e90d41188a33b8b0f1bd0bc53ea5
|
||||
param_hash 686af5e712ee2902180f5428af94c6e7
|
||||
data_name ./output_10519905/data_of_A
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
corr_t
|
||||
1 +6.5471188727972304e+01 -6.1214214711790100e-12
|
||||
2 +1.0447210336915187e+00 +8.9219487930753188e-13
|
||||
3 -4.1025094911185178e+01 -4.8315634170546161e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
corr_t
|
||||
1 +6.5551520722862705e+01 +2.0963356863957609e-13
|
||||
2 +1.0542820240851569e+00 +2.3989756974599379e-15
|
||||
3 -4.1024441815729936e+01 -5.7107484666182308e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
corr_t
|
||||
1 +6.5529951269442847e+01 -6.6512260271334321e-14
|
||||
2 +1.0516822345055969e+00 -2.2935262162529075e-15
|
||||
3 -4.1025142768037746e+01 +3.7566377680004518e-16
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
corr_t
|
||||
1 +6.5471188727965909e+01 -1.6112786177915427e-11
|
||||
2 +1.0447210337411881e+00 -7.0387528705692678e-13
|
||||
3 -4.1025094911167137e+01 +4.6509152745618223e-13
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
corr_t
|
||||
1 +6.5551520722842213e+01 -8.1976426690345305e-13
|
||||
2 +1.0542820240843382e+00 +2.1626370477046812e-13
|
||||
3 -4.1024441815730086e+01 -2.4147931196409923e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
corr_t
|
||||
1 +6.5529951269443117e+01 +7.9192560386479701e-14
|
||||
2 +1.0516822345055870e+00 -1.2443038782429568e-14
|
||||
3 -4.1025142768037739e+01 +5.9315333178954509e-17
|
||||
|
230
data/sfcf_test/data_o/test_r0/cfg3/F_V0
Normal file
230
data/sfcf_test/data_o/test_r0/cfg3/F_V0
Normal file
|
@ -0,0 +1,230 @@
|
|||
[run]
|
||||
|
||||
version 2.1
|
||||
date 2022-01-19 11:04:00 +0100
|
||||
host r04n07.palma.wwu
|
||||
dir /scratch/tmp/j_kuhl19
|
||||
user j_kuhl19
|
||||
gauge_name /unity
|
||||
gauge_md5 1ea28326e4090996111a320b8372811d
|
||||
param_name sfcf_unity_test.in
|
||||
param_md5 d881e90d41188a33b8b0f1bd0bc53ea5
|
||||
param_hash 686af5e712ee2902180f5428af94c6e7
|
||||
data_name ./output_10519905/data_oF_V0
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8367760900851147e+02 +3.0531839956225539e-10
|
||||
2 +6.6131885855823339e+02 +3.9736225045852382e-12
|
||||
3 +6.8367760900810049e+02 -2.5611665964422843e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8370283168793060e+02 +3.0532966356282939e-10
|
||||
2 +6.6134325636407561e+02 +3.9737690976212336e-12
|
||||
3 +6.8370283168751973e+02 -2.5612610847134760e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8370484437212463e+02 +3.0533056232915147e-10
|
||||
2 +6.6134520322615822e+02 +3.9737807346122766e-12
|
||||
3 +6.8370484437171353e+02 -2.5612686251836130e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8370283168792889e+02 +3.0532890521977295e-10
|
||||
2 +6.6134325636407402e+02 +3.9730355551484655e-12
|
||||
3 +6.8370283168751791e+02 -2.5612686681440218e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8372805529787934e+02 +3.0534016954586185e-10
|
||||
2 +6.6136765507001564e+02 +3.9731820664935325e-12
|
||||
3 +6.8372805529746825e+02 -2.5613631608015786e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8373006805632656e+02 +3.0534106842445933e-10
|
||||
2 +6.6136960200392332e+02 +3.9731937804440792e-12
|
||||
3 +6.8373006805591558e+02 -2.5613707007587266e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8370484437212156e+02 +3.0532220084664646e-10
|
||||
2 +6.6134520322615526e+02 +3.9656927030717790e-12
|
||||
3 +6.8370484437171069e+02 -2.5613522400086377e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8373006805632599e+02 +3.0533346499198999e-10
|
||||
2 +6.6136960200392275e+02 +3.9658390079382195e-12
|
||||
3 +6.8373006805591490e+02 -2.5614467350834153e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8373208082069789e+02 +3.0533436384459901e-10
|
||||
2 +6.6137154894356127e+02 +3.9658506942251639e-12
|
||||
3 +6.8373208082028680e+02 -2.5614542753491032e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8367760900918211e+02 -9.5149222536505804e-10
|
||||
2 +6.6131885855810310e+02 +3.2960434859595585e-10
|
||||
3 +6.8367760900806934e+02 -2.3744430846347533e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8370283168860135e+02 -9.5152732859532533e-10
|
||||
2 +6.6134325636394544e+02 +3.2961650841969937e-10
|
||||
3 +6.8370283168748858e+02 -2.3745306857315358e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8370484437279526e+02 -9.5153012965274573e-10
|
||||
2 +6.6134520322602793e+02 +3.2961747879154288e-10
|
||||
3 +6.8370484437168250e+02 -2.3745376753897864e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8370283168859953e+02 -9.5151770701795658e-10
|
||||
2 +6.6134325636394351e+02 +3.2962581533640458e-10
|
||||
3 +6.8370283168748665e+02 -2.3744344699578737e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8372805529855032e+02 -9.5155281099707234e-10
|
||||
2 +6.6136765506988547e+02 +3.2963797613709602e-10
|
||||
3 +6.8372805529743755e+02 -2.3745220688244645e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8373006805699742e+02 -9.5155561220425917e-10
|
||||
2 +6.6136960200379315e+02 +3.2963894649982994e-10
|
||||
3 +6.8373006805588454e+02 -2.3745290592048597e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8370484437279174e+02 -9.5153224647433932e-10
|
||||
2 +6.6134520322602452e+02 +3.2961543119772646e-10
|
||||
3 +6.8370484437167897e+02 -2.3745588436057620e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8373006805699617e+02 -9.5156735103669992e-10
|
||||
2 +6.6136960200379178e+02 +3.2962759157000606e-10
|
||||
3 +6.8373006805588329e+02 -2.3746464475292832e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8373208082136819e+02 -9.5157015223999714e-10
|
||||
2 +6.6137154894343041e+02 +3.2962856194733528e-10
|
||||
3 +6.8373208082025531e+02 -2.3746534378088984e-10
|
||||
|
194
data/sfcf_test/data_o/test_r0/cfg3/f_1
Normal file
194
data/sfcf_test/data_o/test_r0/cfg3/f_1
Normal file
|
@ -0,0 +1,194 @@
|
|||
[run]
|
||||
|
||||
version 2.1
|
||||
date 2022-01-19 11:04:00 +0100
|
||||
host r04n07.palma.wwu
|
||||
dir /scratch/tmp/j_kuhl19
|
||||
user j_kuhl19
|
||||
gauge_name /unity
|
||||
gauge_md5 1ea28326e4090996111a320b8372811d
|
||||
param_name sfcf_unity_test.in
|
||||
param_md5 d881e90d41188a33b8b0f1bd0bc53ea5
|
||||
param_hash 686af5e712ee2902180f5428af94c6e7
|
||||
data_name ./output_10519905/data_of_1
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5119415254545021e+02 +6.7620978057264750e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5120703575855339e+02 +6.5026340956203663e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5120808902177868e+02 +6.5443496235264788e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5120703575855515e+02 +6.9706500417651470e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5122001235609065e+02 +6.9516150897757419e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5122104108046199e+02 +6.9232860455434941e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5120808902177447e+02 +1.0849949614595719e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5122104108046182e+02 +1.0866063643253473e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5122207631098047e+02 +1.0827277318679030e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5119415254545038e+02 +3.0143306723935508e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5120703575855367e+02 +4.3340379505972648e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5120808902177902e+02 +3.9652247575094006e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5120703575855526e+02 -8.2540994138261318e-16
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5122001235609082e+02 -9.7121215247039609e-16
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5122104108046227e+02 -9.0872484903683497e-16
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5120808902177453e+02 +5.1331372776616026e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5122104108046193e+02 +5.0816653044831932e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5122207631098064e+02 +5.1165649253001659e-15
|
||||
|
80
data/sfcf_test/data_o/test_r0/cfg3/f_A
Normal file
80
data/sfcf_test/data_o/test_r0/cfg3/f_A
Normal file
|
@ -0,0 +1,80 @@
|
|||
[run]
|
||||
|
||||
version 2.1
|
||||
date 2022-01-19 11:04:00 +0100
|
||||
host r04n07.palma.wwu
|
||||
dir /scratch/tmp/j_kuhl19
|
||||
user j_kuhl19
|
||||
gauge_name /unity
|
||||
gauge_md5 1ea28326e4090996111a320b8372811d
|
||||
param_name sfcf_unity_test.in
|
||||
param_md5 d881e90d41188a33b8b0f1bd0bc53ea5
|
||||
param_hash 686af5e712ee2902180f5428af94c6e7
|
||||
data_name ./output_10519905/data_of_A
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
corr_t
|
||||
1 +6.5471188727972304e+01 -6.1214214711790100e-12
|
||||
2 +1.0447210336915187e+00 +8.9219487930753188e-13
|
||||
3 -4.1025094911185178e+01 -4.8315634170546161e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
corr_t
|
||||
1 +6.5551520722862705e+01 +2.0963356863957609e-13
|
||||
2 +1.0542820240851569e+00 +2.3989756974599379e-15
|
||||
3 -4.1024441815729936e+01 -5.7107484666182308e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
corr_t
|
||||
1 +6.5529951269442847e+01 -6.6512260271334321e-14
|
||||
2 +1.0516822345055969e+00 -2.2935262162529075e-15
|
||||
3 -4.1025142768037746e+01 +3.7566377680004518e-16
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
corr_t
|
||||
1 +6.5471188727965909e+01 -1.6112786177915427e-11
|
||||
2 +1.0447210337411881e+00 -7.0387528705692678e-13
|
||||
3 -4.1025094911167137e+01 +4.6509152745618223e-13
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
corr_t
|
||||
1 +6.5551520722842213e+01 -8.1976426690345305e-13
|
||||
2 +1.0542820240843382e+00 +2.1626370477046812e-13
|
||||
3 -4.1024441815730086e+01 -2.4147931196409923e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
corr_t
|
||||
1 +6.5529951269443117e+01 +7.9192560386479701e-14
|
||||
2 +1.0516822345055870e+00 -1.2443038782429568e-14
|
||||
3 -4.1025142768037739e+01 +5.9315333178954509e-17
|
||||
|
230
data/sfcf_test/data_o/test_r0/cfg4/F_V0
Normal file
230
data/sfcf_test/data_o/test_r0/cfg4/F_V0
Normal file
|
@ -0,0 +1,230 @@
|
|||
[run]
|
||||
|
||||
version 2.1
|
||||
date 2022-01-19 11:04:00 +0100
|
||||
host r04n07.palma.wwu
|
||||
dir /scratch/tmp/j_kuhl19
|
||||
user j_kuhl19
|
||||
gauge_name /unity
|
||||
gauge_md5 1ea28326e4090996111a320b8372811d
|
||||
param_name sfcf_unity_test.in
|
||||
param_md5 d881e90d41188a33b8b0f1bd0bc53ea5
|
||||
param_hash 686af5e712ee2902180f5428af94c6e7
|
||||
data_name ./output_10519905/data_oF_V0
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8367760900851147e+02 +3.0531839956225539e-10
|
||||
2 +6.6131885855823339e+02 +3.9736225045852382e-12
|
||||
3 +6.8367760900810049e+02 -2.5611665964422843e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8370283168793060e+02 +3.0532966356282939e-10
|
||||
2 +6.6134325636407561e+02 +3.9737690976212336e-12
|
||||
3 +6.8370283168751973e+02 -2.5612610847134760e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8370484437212463e+02 +3.0533056232915147e-10
|
||||
2 +6.6134520322615822e+02 +3.9737807346122766e-12
|
||||
3 +6.8370484437171353e+02 -2.5612686251836130e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8370283168792889e+02 +3.0532890521977295e-10
|
||||
2 +6.6134325636407402e+02 +3.9730355551484655e-12
|
||||
3 +6.8370283168751791e+02 -2.5612686681440218e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8372805529787934e+02 +3.0534016954586185e-10
|
||||
2 +6.6136765507001564e+02 +3.9731820664935325e-12
|
||||
3 +6.8372805529746825e+02 -2.5613631608015786e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8373006805632656e+02 +3.0534106842445933e-10
|
||||
2 +6.6136960200392332e+02 +3.9731937804440792e-12
|
||||
3 +6.8373006805591558e+02 -2.5613707007587266e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8370484437212156e+02 +3.0532220084664646e-10
|
||||
2 +6.6134520322615526e+02 +3.9656927030717790e-12
|
||||
3 +6.8370484437171069e+02 -2.5613522400086377e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8373006805632599e+02 +3.0533346499198999e-10
|
||||
2 +6.6136960200392275e+02 +3.9658390079382195e-12
|
||||
3 +6.8373006805591490e+02 -2.5614467350834153e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8373208082069789e+02 +3.0533436384459901e-10
|
||||
2 +6.6137154894356127e+02 +3.9658506942251639e-12
|
||||
3 +6.8373208082028680e+02 -2.5614542753491032e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8367760900918211e+02 -9.5149222536505804e-10
|
||||
2 +6.6131885855810310e+02 +3.2960434859595585e-10
|
||||
3 +6.8367760900806934e+02 -2.3744430846347533e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8370283168860135e+02 -9.5152732859532533e-10
|
||||
2 +6.6134325636394544e+02 +3.2961650841969937e-10
|
||||
3 +6.8370283168748858e+02 -2.3745306857315358e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8370484437279526e+02 -9.5153012965274573e-10
|
||||
2 +6.6134520322602793e+02 +3.2961747879154288e-10
|
||||
3 +6.8370484437168250e+02 -2.3745376753897864e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8370283168859953e+02 -9.5151770701795658e-10
|
||||
2 +6.6134325636394351e+02 +3.2962581533640458e-10
|
||||
3 +6.8370283168748665e+02 -2.3744344699578737e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8372805529855032e+02 -9.5155281099707234e-10
|
||||
2 +6.6136765506988547e+02 +3.2963797613709602e-10
|
||||
3 +6.8372805529743755e+02 -2.3745220688244645e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8373006805699742e+02 -9.5155561220425917e-10
|
||||
2 +6.6136960200379315e+02 +3.2963894649982994e-10
|
||||
3 +6.8373006805588454e+02 -2.3745290592048597e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8370484437279174e+02 -9.5153224647433932e-10
|
||||
2 +6.6134520322602452e+02 +3.2961543119772646e-10
|
||||
3 +6.8370484437167897e+02 -2.3745588436057620e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8373006805699617e+02 -9.5156735103669992e-10
|
||||
2 +6.6136960200379178e+02 +3.2962759157000606e-10
|
||||
3 +6.8373006805588329e+02 -2.3746464475292832e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8373208082136819e+02 -9.5157015223999714e-10
|
||||
2 +6.6137154894343041e+02 +3.2962856194733528e-10
|
||||
3 +6.8373208082025531e+02 -2.3746534378088984e-10
|
||||
|
194
data/sfcf_test/data_o/test_r0/cfg4/f_1
Normal file
194
data/sfcf_test/data_o/test_r0/cfg4/f_1
Normal file
|
@ -0,0 +1,194 @@
|
|||
[run]
|
||||
|
||||
version 2.1
|
||||
date 2022-01-19 11:04:00 +0100
|
||||
host r04n07.palma.wwu
|
||||
dir /scratch/tmp/j_kuhl19
|
||||
user j_kuhl19
|
||||
gauge_name /unity
|
||||
gauge_md5 1ea28326e4090996111a320b8372811d
|
||||
param_name sfcf_unity_test.in
|
||||
param_md5 d881e90d41188a33b8b0f1bd0bc53ea5
|
||||
param_hash 686af5e712ee2902180f5428af94c6e7
|
||||
data_name ./output_10519905/data_of_1
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5119415254545021e+02 +6.7620978057264750e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5120703575855339e+02 +6.5026340956203663e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5120808902177868e+02 +6.5443496235264788e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5120703575855515e+02 +6.9706500417651470e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5122001235609065e+02 +6.9516150897757419e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5122104108046199e+02 +6.9232860455434941e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5120808902177447e+02 +1.0849949614595719e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5122104108046182e+02 +1.0866063643253473e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5122207631098047e+02 +1.0827277318679030e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5119415254545038e+02 +3.0143306723935508e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5120703575855367e+02 +4.3340379505972648e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5120808902177902e+02 +3.9652247575094006e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5120703575855526e+02 -8.2540994138261318e-16
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5122001235609082e+02 -9.7121215247039609e-16
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5122104108046227e+02 -9.0872484903683497e-16
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5120808902177453e+02 +5.1331372776616026e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5122104108046193e+02 +5.0816653044831932e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5122207631098064e+02 +5.1165649253001659e-15
|
||||
|
80
data/sfcf_test/data_o/test_r0/cfg4/f_A
Normal file
80
data/sfcf_test/data_o/test_r0/cfg4/f_A
Normal file
|
@ -0,0 +1,80 @@
|
|||
[run]
|
||||
|
||||
version 2.1
|
||||
date 2022-01-19 11:04:00 +0100
|
||||
host r04n07.palma.wwu
|
||||
dir /scratch/tmp/j_kuhl19
|
||||
user j_kuhl19
|
||||
gauge_name /unity
|
||||
gauge_md5 1ea28326e4090996111a320b8372811d
|
||||
param_name sfcf_unity_test.in
|
||||
param_md5 d881e90d41188a33b8b0f1bd0bc53ea5
|
||||
param_hash 686af5e712ee2902180f5428af94c6e7
|
||||
data_name ./output_10519905/data_of_A
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
corr_t
|
||||
1 +6.5471188727972304e+01 -6.1214214711790100e-12
|
||||
2 +1.0447210336915187e+00 +8.9219487930753188e-13
|
||||
3 -4.1025094911185178e+01 -4.8315634170546161e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
corr_t
|
||||
1 +6.5551520722862705e+01 +2.0963356863957609e-13
|
||||
2 +1.0542820240851569e+00 +2.3989756974599379e-15
|
||||
3 -4.1024441815729936e+01 -5.7107484666182308e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
corr_t
|
||||
1 +6.5529951269442847e+01 -6.6512260271334321e-14
|
||||
2 +1.0516822345055969e+00 -2.2935262162529075e-15
|
||||
3 -4.1025142768037746e+01 +3.7566377680004518e-16
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
corr_t
|
||||
1 +6.5471188727965909e+01 -1.6112786177915427e-11
|
||||
2 +1.0447210337411881e+00 -7.0387528705692678e-13
|
||||
3 -4.1025094911167137e+01 +4.6509152745618223e-13
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
corr_t
|
||||
1 +6.5551520722842213e+01 -8.1976426690345305e-13
|
||||
2 +1.0542820240843382e+00 +2.1626370477046812e-13
|
||||
3 -4.1024441815730086e+01 -2.4147931196409923e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
corr_t
|
||||
1 +6.5529951269443117e+01 +7.9192560386479701e-14
|
||||
2 +1.0516822345055870e+00 -1.2443038782429568e-14
|
||||
3 -4.1025142768037739e+01 +5.9315333178954509e-17
|
||||
|
230
data/sfcf_test/data_o/test_r0/cfg5/F_V0
Normal file
230
data/sfcf_test/data_o/test_r0/cfg5/F_V0
Normal file
|
@ -0,0 +1,230 @@
|
|||
[run]
|
||||
|
||||
version 2.1
|
||||
date 2022-01-19 11:04:00 +0100
|
||||
host r04n07.palma.wwu
|
||||
dir /scratch/tmp/j_kuhl19
|
||||
user j_kuhl19
|
||||
gauge_name /unity
|
||||
gauge_md5 1ea28326e4090996111a320b8372811d
|
||||
param_name sfcf_unity_test.in
|
||||
param_md5 d881e90d41188a33b8b0f1bd0bc53ea5
|
||||
param_hash 686af5e712ee2902180f5428af94c6e7
|
||||
data_name ./output_10519905/data_oF_V0
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8367760900851147e+02 +3.0531839956225539e-10
|
||||
2 +6.6131885855823339e+02 +3.9736225045852382e-12
|
||||
3 +6.8367760900810049e+02 -2.5611665964422843e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8370283168793060e+02 +3.0532966356282939e-10
|
||||
2 +6.6134325636407561e+02 +3.9737690976212336e-12
|
||||
3 +6.8370283168751973e+02 -2.5612610847134760e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8370484437212463e+02 +3.0533056232915147e-10
|
||||
2 +6.6134520322615822e+02 +3.9737807346122766e-12
|
||||
3 +6.8370484437171353e+02 -2.5612686251836130e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8370283168792889e+02 +3.0532890521977295e-10
|
||||
2 +6.6134325636407402e+02 +3.9730355551484655e-12
|
||||
3 +6.8370283168751791e+02 -2.5612686681440218e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8372805529787934e+02 +3.0534016954586185e-10
|
||||
2 +6.6136765507001564e+02 +3.9731820664935325e-12
|
||||
3 +6.8372805529746825e+02 -2.5613631608015786e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8373006805632656e+02 +3.0534106842445933e-10
|
||||
2 +6.6136960200392332e+02 +3.9731937804440792e-12
|
||||
3 +6.8373006805591558e+02 -2.5613707007587266e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8370484437212156e+02 +3.0532220084664646e-10
|
||||
2 +6.6134520322615526e+02 +3.9656927030717790e-12
|
||||
3 +6.8370484437171069e+02 -2.5613522400086377e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8373006805632599e+02 +3.0533346499198999e-10
|
||||
2 +6.6136960200392275e+02 +3.9658390079382195e-12
|
||||
3 +6.8373006805591490e+02 -2.5614467350834153e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8373208082069789e+02 +3.0533436384459901e-10
|
||||
2 +6.6137154894356127e+02 +3.9658506942251639e-12
|
||||
3 +6.8373208082028680e+02 -2.5614542753491032e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8367760900918211e+02 -9.5149222536505804e-10
|
||||
2 +6.6131885855810310e+02 +3.2960434859595585e-10
|
||||
3 +6.8367760900806934e+02 -2.3744430846347533e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8370283168860135e+02 -9.5152732859532533e-10
|
||||
2 +6.6134325636394544e+02 +3.2961650841969937e-10
|
||||
3 +6.8370283168748858e+02 -2.3745306857315358e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8370484437279526e+02 -9.5153012965274573e-10
|
||||
2 +6.6134520322602793e+02 +3.2961747879154288e-10
|
||||
3 +6.8370484437168250e+02 -2.3745376753897864e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8370283168859953e+02 -9.5151770701795658e-10
|
||||
2 +6.6134325636394351e+02 +3.2962581533640458e-10
|
||||
3 +6.8370283168748665e+02 -2.3744344699578737e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8372805529855032e+02 -9.5155281099707234e-10
|
||||
2 +6.6136765506988547e+02 +3.2963797613709602e-10
|
||||
3 +6.8372805529743755e+02 -2.3745220688244645e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8373006805699742e+02 -9.5155561220425917e-10
|
||||
2 +6.6136960200379315e+02 +3.2963894649982994e-10
|
||||
3 +6.8373006805588454e+02 -2.3745290592048597e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8370484437279174e+02 -9.5153224647433932e-10
|
||||
2 +6.6134520322602452e+02 +3.2961543119772646e-10
|
||||
3 +6.8370484437167897e+02 -2.3745588436057620e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8373006805699617e+02 -9.5156735103669992e-10
|
||||
2 +6.6136960200379178e+02 +3.2962759157000606e-10
|
||||
3 +6.8373006805588329e+02 -2.3746464475292832e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8373208082136819e+02 -9.5157015223999714e-10
|
||||
2 +6.6137154894343041e+02 +3.2962856194733528e-10
|
||||
3 +6.8373208082025531e+02 -2.3746534378088984e-10
|
||||
|
194
data/sfcf_test/data_o/test_r0/cfg5/f_1
Normal file
194
data/sfcf_test/data_o/test_r0/cfg5/f_1
Normal file
|
@ -0,0 +1,194 @@
|
|||
[run]
|
||||
|
||||
version 2.1
|
||||
date 2022-01-19 11:04:00 +0100
|
||||
host r04n07.palma.wwu
|
||||
dir /scratch/tmp/j_kuhl19
|
||||
user j_kuhl19
|
||||
gauge_name /unity
|
||||
gauge_md5 1ea28326e4090996111a320b8372811d
|
||||
param_name sfcf_unity_test.in
|
||||
param_md5 d881e90d41188a33b8b0f1bd0bc53ea5
|
||||
param_hash 686af5e712ee2902180f5428af94c6e7
|
||||
data_name ./output_10519905/data_of_1
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5119415254545021e+02 +6.7620978057264750e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5120703575855339e+02 +6.5026340956203663e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5120808902177868e+02 +6.5443496235264788e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5120703575855515e+02 +6.9706500417651470e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5122001235609065e+02 +6.9516150897757419e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5122104108046199e+02 +6.9232860455434941e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5120808902177447e+02 +1.0849949614595719e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5122104108046182e+02 +1.0866063643253473e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5122207631098047e+02 +1.0827277318679030e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5119415254545038e+02 +3.0143306723935508e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5120703575855367e+02 +4.3340379505972648e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5120808902177902e+02 +3.9652247575094006e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5120703575855526e+02 -8.2540994138261318e-16
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5122001235609082e+02 -9.7121215247039609e-16
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5122104108046227e+02 -9.0872484903683497e-16
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5120808902177453e+02 +5.1331372776616026e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5122104108046193e+02 +5.0816653044831932e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5122207631098064e+02 +5.1165649253001659e-15
|
||||
|
80
data/sfcf_test/data_o/test_r0/cfg5/f_A
Normal file
80
data/sfcf_test/data_o/test_r0/cfg5/f_A
Normal file
|
@ -0,0 +1,80 @@
|
|||
[run]
|
||||
|
||||
version 2.1
|
||||
date 2022-01-19 11:04:00 +0100
|
||||
host r04n07.palma.wwu
|
||||
dir /scratch/tmp/j_kuhl19
|
||||
user j_kuhl19
|
||||
gauge_name /unity
|
||||
gauge_md5 1ea28326e4090996111a320b8372811d
|
||||
param_name sfcf_unity_test.in
|
||||
param_md5 d881e90d41188a33b8b0f1bd0bc53ea5
|
||||
param_hash 686af5e712ee2902180f5428af94c6e7
|
||||
data_name ./output_10519905/data_of_A
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
corr_t
|
||||
1 +6.5471188727972304e+01 -6.1214214711790100e-12
|
||||
2 +1.0447210336915187e+00 +8.9219487930753188e-13
|
||||
3 -4.1025094911185178e+01 -4.8315634170546161e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
corr_t
|
||||
1 +6.5551520722862705e+01 +2.0963356863957609e-13
|
||||
2 +1.0542820240851569e+00 +2.3989756974599379e-15
|
||||
3 -4.1024441815729936e+01 -5.7107484666182308e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
corr_t
|
||||
1 +6.5529951269442847e+01 -6.6512260271334321e-14
|
||||
2 +1.0516822345055969e+00 -2.2935262162529075e-15
|
||||
3 -4.1025142768037746e+01 +3.7566377680004518e-16
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
corr_t
|
||||
1 +6.5471188727965909e+01 -1.6112786177915427e-11
|
||||
2 +1.0447210337411881e+00 -7.0387528705692678e-13
|
||||
3 -4.1025094911167137e+01 +4.6509152745618223e-13
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
corr_t
|
||||
1 +6.5551520722842213e+01 -8.1976426690345305e-13
|
||||
2 +1.0542820240843382e+00 +2.1626370477046812e-13
|
||||
3 -4.1024441815730086e+01 -2.4147931196409923e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
corr_t
|
||||
1 +6.5529951269443117e+01 +7.9192560386479701e-14
|
||||
2 +1.0516822345055870e+00 -1.2443038782429568e-14
|
||||
3 -4.1025142768037739e+01 +5.9315333178954509e-17
|
||||
|
230
data/sfcf_test/data_o/test_r1/cfg1/F_V0
Normal file
230
data/sfcf_test/data_o/test_r1/cfg1/F_V0
Normal file
|
@ -0,0 +1,230 @@
|
|||
[run]
|
||||
|
||||
version 2.1
|
||||
date 2022-01-19 11:04:00 +0100
|
||||
host r04n07.palma.wwu
|
||||
dir /scratch/tmp/j_kuhl19
|
||||
user j_kuhl19
|
||||
gauge_name /unity
|
||||
gauge_md5 1ea28326e4090996111a320b8372811d
|
||||
param_name sfcf_unity_test.in
|
||||
param_md5 d881e90d41188a33b8b0f1bd0bc53ea5
|
||||
param_hash 686af5e712ee2902180f5428af94c6e7
|
||||
data_name ./output_10519905/data_oF_V0
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8367760900851147e+02 +3.0531839956225539e-10
|
||||
2 +6.6131885855823339e+02 +3.9736225045852382e-12
|
||||
3 +6.8367760900810049e+02 -2.5611665964422843e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8370283168793060e+02 +3.0532966356282939e-10
|
||||
2 +6.6134325636407561e+02 +3.9737690976212336e-12
|
||||
3 +6.8370283168751973e+02 -2.5612610847134760e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8370484437212463e+02 +3.0533056232915147e-10
|
||||
2 +6.6134520322615822e+02 +3.9737807346122766e-12
|
||||
3 +6.8370484437171353e+02 -2.5612686251836130e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8370283168792889e+02 +3.0532890521977295e-10
|
||||
2 +6.6134325636407402e+02 +3.9730355551484655e-12
|
||||
3 +6.8370283168751791e+02 -2.5612686681440218e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8372805529787934e+02 +3.0534016954586185e-10
|
||||
2 +6.6136765507001564e+02 +3.9731820664935325e-12
|
||||
3 +6.8372805529746825e+02 -2.5613631608015786e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8373006805632656e+02 +3.0534106842445933e-10
|
||||
2 +6.6136960200392332e+02 +3.9731937804440792e-12
|
||||
3 +6.8373006805591558e+02 -2.5613707007587266e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8370484437212156e+02 +3.0532220084664646e-10
|
||||
2 +6.6134520322615526e+02 +3.9656927030717790e-12
|
||||
3 +6.8370484437171069e+02 -2.5613522400086377e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8373006805632599e+02 +3.0533346499198999e-10
|
||||
2 +6.6136960200392275e+02 +3.9658390079382195e-12
|
||||
3 +6.8373006805591490e+02 -2.5614467350834153e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8373208082069789e+02 +3.0533436384459901e-10
|
||||
2 +6.6137154894356127e+02 +3.9658506942251639e-12
|
||||
3 +6.8373208082028680e+02 -2.5614542753491032e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8367760900918211e+02 -9.5149222536505804e-10
|
||||
2 +6.6131885855810310e+02 +3.2960434859595585e-10
|
||||
3 +6.8367760900806934e+02 -2.3744430846347533e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8370283168860135e+02 -9.5152732859532533e-10
|
||||
2 +6.6134325636394544e+02 +3.2961650841969937e-10
|
||||
3 +6.8370283168748858e+02 -2.3745306857315358e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8370484437279526e+02 -9.5153012965274573e-10
|
||||
2 +6.6134520322602793e+02 +3.2961747879154288e-10
|
||||
3 +6.8370484437168250e+02 -2.3745376753897864e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8370283168859953e+02 -9.5151770701795658e-10
|
||||
2 +6.6134325636394351e+02 +3.2962581533640458e-10
|
||||
3 +6.8370283168748665e+02 -2.3744344699578737e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8372805529855032e+02 -9.5155281099707234e-10
|
||||
2 +6.6136765506988547e+02 +3.2963797613709602e-10
|
||||
3 +6.8372805529743755e+02 -2.3745220688244645e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8373006805699742e+02 -9.5155561220425917e-10
|
||||
2 +6.6136960200379315e+02 +3.2963894649982994e-10
|
||||
3 +6.8373006805588454e+02 -2.3745290592048597e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8370484437279174e+02 -9.5153224647433932e-10
|
||||
2 +6.6134520322602452e+02 +3.2961543119772646e-10
|
||||
3 +6.8370484437167897e+02 -2.3745588436057620e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8373006805699617e+02 -9.5156735103669992e-10
|
||||
2 +6.6136960200379178e+02 +3.2962759157000606e-10
|
||||
3 +6.8373006805588329e+02 -2.3746464475292832e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8373208082136819e+02 -9.5157015223999714e-10
|
||||
2 +6.6137154894343041e+02 +3.2962856194733528e-10
|
||||
3 +6.8373208082025531e+02 -2.3746534378088984e-10
|
||||
|
194
data/sfcf_test/data_o/test_r1/cfg1/f_1
Normal file
194
data/sfcf_test/data_o/test_r1/cfg1/f_1
Normal file
|
@ -0,0 +1,194 @@
|
|||
[run]
|
||||
|
||||
version 2.1
|
||||
date 2022-01-19 11:04:00 +0100
|
||||
host r04n07.palma.wwu
|
||||
dir /scratch/tmp/j_kuhl19
|
||||
user j_kuhl19
|
||||
gauge_name /unity
|
||||
gauge_md5 1ea28326e4090996111a320b8372811d
|
||||
param_name sfcf_unity_test.in
|
||||
param_md5 d881e90d41188a33b8b0f1bd0bc53ea5
|
||||
param_hash 686af5e712ee2902180f5428af94c6e7
|
||||
data_name ./output_10519905/data_of_1
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5119415254545021e+02 +6.7620978057264750e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5120703575855339e+02 +6.5026340956203663e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5120808902177868e+02 +6.5443496235264788e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5120703575855515e+02 +6.9706500417651470e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5122001235609065e+02 +6.9516150897757419e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5122104108046199e+02 +6.9232860455434941e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5120808902177447e+02 +1.0849949614595719e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5122104108046182e+02 +1.0866063643253473e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5122207631098047e+02 +1.0827277318679030e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5119415254545038e+02 +3.0143306723935508e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5120703575855367e+02 +4.3340379505972648e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5120808902177902e+02 +3.9652247575094006e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5120703575855526e+02 -8.2540994138261318e-16
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5122001235609082e+02 -9.7121215247039609e-16
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5122104108046227e+02 -9.0872484903683497e-16
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5120808902177453e+02 +5.1331372776616026e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5122104108046193e+02 +5.0816653044831932e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5122207631098064e+02 +5.1165649253001659e-15
|
||||
|
80
data/sfcf_test/data_o/test_r1/cfg1/f_A
Normal file
80
data/sfcf_test/data_o/test_r1/cfg1/f_A
Normal file
|
@ -0,0 +1,80 @@
|
|||
[run]
|
||||
|
||||
version 2.1
|
||||
date 2022-01-19 11:04:00 +0100
|
||||
host r04n07.palma.wwu
|
||||
dir /scratch/tmp/j_kuhl19
|
||||
user j_kuhl19
|
||||
gauge_name /unity
|
||||
gauge_md5 1ea28326e4090996111a320b8372811d
|
||||
param_name sfcf_unity_test.in
|
||||
param_md5 d881e90d41188a33b8b0f1bd0bc53ea5
|
||||
param_hash 686af5e712ee2902180f5428af94c6e7
|
||||
data_name ./output_10519905/data_of_A
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
corr_t
|
||||
1 +6.5471188727972304e+01 -6.1214214711790100e-12
|
||||
2 +1.0447210336915187e+00 +8.9219487930753188e-13
|
||||
3 -4.1025094911185178e+01 -4.8315634170546161e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
corr_t
|
||||
1 +6.5551520722862705e+01 +2.0963356863957609e-13
|
||||
2 +1.0542820240851569e+00 +2.3989756974599379e-15
|
||||
3 -4.1024441815729936e+01 -5.7107484666182308e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
corr_t
|
||||
1 +6.5529951269442847e+01 -6.6512260271334321e-14
|
||||
2 +1.0516822345055969e+00 -2.2935262162529075e-15
|
||||
3 -4.1025142768037746e+01 +3.7566377680004518e-16
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
corr_t
|
||||
1 +6.5471188727965909e+01 -1.6112786177915427e-11
|
||||
2 +1.0447210337411881e+00 -7.0387528705692678e-13
|
||||
3 -4.1025094911167137e+01 +4.6509152745618223e-13
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
corr_t
|
||||
1 +6.5551520722842213e+01 -8.1976426690345305e-13
|
||||
2 +1.0542820240843382e+00 +2.1626370477046812e-13
|
||||
3 -4.1024441815730086e+01 -2.4147931196409923e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
corr_t
|
||||
1 +6.5529951269443117e+01 +7.9192560386479701e-14
|
||||
2 +1.0516822345055870e+00 -1.2443038782429568e-14
|
||||
3 -4.1025142768037739e+01 +5.9315333178954509e-17
|
||||
|
230
data/sfcf_test/data_o/test_r2/cfg1/F_V0
Normal file
230
data/sfcf_test/data_o/test_r2/cfg1/F_V0
Normal file
|
@ -0,0 +1,230 @@
|
|||
[run]
|
||||
|
||||
version 2.1
|
||||
date 2022-01-19 11:04:00 +0100
|
||||
host r04n07.palma.wwu
|
||||
dir /scratch/tmp/j_kuhl19
|
||||
user j_kuhl19
|
||||
gauge_name /unity
|
||||
gauge_md5 1ea28326e4090996111a320b8372811d
|
||||
param_name sfcf_unity_test.in
|
||||
param_md5 d881e90d41188a33b8b0f1bd0bc53ea5
|
||||
param_hash 686af5e712ee2902180f5428af94c6e7
|
||||
data_name ./output_10519905/data_oF_V0
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8367760900851147e+02 +3.0531839956225539e-10
|
||||
2 +6.6131885855823339e+02 +3.9736225045852382e-12
|
||||
3 +6.8367760900810049e+02 -2.5611665964422843e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8370283168793060e+02 +3.0532966356282939e-10
|
||||
2 +6.6134325636407561e+02 +3.9737690976212336e-12
|
||||
3 +6.8370283168751973e+02 -2.5612610847134760e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8370484437212463e+02 +3.0533056232915147e-10
|
||||
2 +6.6134520322615822e+02 +3.9737807346122766e-12
|
||||
3 +6.8370484437171353e+02 -2.5612686251836130e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8370283168792889e+02 +3.0532890521977295e-10
|
||||
2 +6.6134325636407402e+02 +3.9730355551484655e-12
|
||||
3 +6.8370283168751791e+02 -2.5612686681440218e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8372805529787934e+02 +3.0534016954586185e-10
|
||||
2 +6.6136765507001564e+02 +3.9731820664935325e-12
|
||||
3 +6.8372805529746825e+02 -2.5613631608015786e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8373006805632656e+02 +3.0534106842445933e-10
|
||||
2 +6.6136960200392332e+02 +3.9731937804440792e-12
|
||||
3 +6.8373006805591558e+02 -2.5613707007587266e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8370484437212156e+02 +3.0532220084664646e-10
|
||||
2 +6.6134520322615526e+02 +3.9656927030717790e-12
|
||||
3 +6.8370484437171069e+02 -2.5613522400086377e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8373006805632599e+02 +3.0533346499198999e-10
|
||||
2 +6.6136960200392275e+02 +3.9658390079382195e-12
|
||||
3 +6.8373006805591490e+02 -2.5614467350834153e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8373208082069789e+02 +3.0533436384459901e-10
|
||||
2 +6.6137154894356127e+02 +3.9658506942251639e-12
|
||||
3 +6.8373208082028680e+02 -2.5614542753491032e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8367760900918211e+02 -9.5149222536505804e-10
|
||||
2 +6.6131885855810310e+02 +3.2960434859595585e-10
|
||||
3 +6.8367760900806934e+02 -2.3744430846347533e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8370283168860135e+02 -9.5152732859532533e-10
|
||||
2 +6.6134325636394544e+02 +3.2961650841969937e-10
|
||||
3 +6.8370283168748858e+02 -2.3745306857315358e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8370484437279526e+02 -9.5153012965274573e-10
|
||||
2 +6.6134520322602793e+02 +3.2961747879154288e-10
|
||||
3 +6.8370484437168250e+02 -2.3745376753897864e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8370283168859953e+02 -9.5151770701795658e-10
|
||||
2 +6.6134325636394351e+02 +3.2962581533640458e-10
|
||||
3 +6.8370283168748665e+02 -2.3744344699578737e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8372805529855032e+02 -9.5155281099707234e-10
|
||||
2 +6.6136765506988547e+02 +3.2963797613709602e-10
|
||||
3 +6.8372805529743755e+02 -2.3745220688244645e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8373006805699742e+02 -9.5155561220425917e-10
|
||||
2 +6.6136960200379315e+02 +3.2963894649982994e-10
|
||||
3 +6.8373006805588454e+02 -2.3745290592048597e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 0
|
||||
corr_t
|
||||
1 +6.8370484437279174e+02 -9.5153224647433932e-10
|
||||
2 +6.6134520322602452e+02 +3.2961543119772646e-10
|
||||
3 +6.8370484437167897e+02 -2.3745588436057620e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 1
|
||||
corr_t
|
||||
1 +6.8373006805699617e+02 -9.5156735103669992e-10
|
||||
2 +6.6136960200379178e+02 +3.2962759157000606e-10
|
||||
3 +6.8373006805588329e+02 -2.3746464475292832e-10
|
||||
|
||||
[correlator]
|
||||
|
||||
name F_V0
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 2
|
||||
corr_t
|
||||
1 +6.8373208082136819e+02 -9.5157015223999714e-10
|
||||
2 +6.6137154894343041e+02 +3.2962856194733528e-10
|
||||
3 +6.8373208082025531e+02 -2.3746534378088984e-10
|
||||
|
194
data/sfcf_test/data_o/test_r2/cfg1/f_1
Normal file
194
data/sfcf_test/data_o/test_r2/cfg1/f_1
Normal file
|
@ -0,0 +1,194 @@
|
|||
[run]
|
||||
|
||||
version 2.1
|
||||
date 2022-01-19 11:04:00 +0100
|
||||
host r04n07.palma.wwu
|
||||
dir /scratch/tmp/j_kuhl19
|
||||
user j_kuhl19
|
||||
gauge_name /unity
|
||||
gauge_md5 1ea28326e4090996111a320b8372811d
|
||||
param_name sfcf_unity_test.in
|
||||
param_md5 d881e90d41188a33b8b0f1bd0bc53ea5
|
||||
param_hash 686af5e712ee2902180f5428af94c6e7
|
||||
data_name ./output_10519905/data_of_1
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5119415254545021e+02 +6.7620978057264750e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5120703575855339e+02 +6.5026340956203663e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5120808902177868e+02 +6.5443496235264788e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5120703575855515e+02 +6.9706500417651470e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5122001235609065e+02 +6.9516150897757419e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5122104108046199e+02 +6.9232860455434941e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5120808902177447e+02 +1.0849949614595719e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5122104108046182e+02 +1.0866063643253473e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5122207631098047e+02 +1.0827277318679030e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5119415254545038e+02 +3.0143306723935508e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5120703575855367e+02 +4.3340379505972648e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5120808902177902e+02 +3.9652247575094006e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5120703575855526e+02 -8.2540994138261318e-16
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5122001235609082e+02 -9.7121215247039609e-16
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5122104108046227e+02 -9.0872484903683497e-16
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 0
|
||||
corr
|
||||
+3.5120808902177453e+02 +5.1331372776616026e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 1
|
||||
corr
|
||||
+3.5122104108046193e+02 +5.0816653044831932e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_1
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
wf_2 2
|
||||
corr
|
||||
+3.5122207631098064e+02 +5.1165649253001659e-15
|
||||
|
80
data/sfcf_test/data_o/test_r2/cfg1/f_A
Normal file
80
data/sfcf_test/data_o/test_r2/cfg1/f_A
Normal file
|
@ -0,0 +1,80 @@
|
|||
[run]
|
||||
|
||||
version 2.1
|
||||
date 2022-01-19 11:04:00 +0100
|
||||
host r04n07.palma.wwu
|
||||
dir /scratch/tmp/j_kuhl19
|
||||
user j_kuhl19
|
||||
gauge_name /unity
|
||||
gauge_md5 1ea28326e4090996111a320b8372811d
|
||||
param_name sfcf_unity_test.in
|
||||
param_md5 d881e90d41188a33b8b0f1bd0bc53ea5
|
||||
param_hash 686af5e712ee2902180f5428af94c6e7
|
||||
data_name ./output_10519905/data_of_A
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 0
|
||||
corr_t
|
||||
1 +6.5471188727972304e+01 -6.1214214711790100e-12
|
||||
2 +1.0447210336915187e+00 +8.9219487930753188e-13
|
||||
3 -4.1025094911185178e+01 -4.8315634170546161e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 1
|
||||
corr_t
|
||||
1 +6.5551520722862705e+01 +2.0963356863957609e-13
|
||||
2 +1.0542820240851569e+00 +2.3989756974599379e-15
|
||||
3 -4.1024441815729936e+01 -5.7107484666182308e-15
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 0
|
||||
wf 2
|
||||
corr_t
|
||||
1 +6.5529951269442847e+01 -6.6512260271334321e-14
|
||||
2 +1.0516822345055969e+00 -2.2935262162529075e-15
|
||||
3 -4.1025142768037746e+01 +3.7566377680004518e-16
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 0
|
||||
corr_t
|
||||
1 +6.5471188727965909e+01 -1.6112786177915427e-11
|
||||
2 +1.0447210337411881e+00 -7.0387528705692678e-13
|
||||
3 -4.1025094911167137e+01 +4.6509152745618223e-13
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 1
|
||||
corr_t
|
||||
1 +6.5551520722842213e+01 -8.1976426690345305e-13
|
||||
2 +1.0542820240843382e+00 +2.1626370477046812e-13
|
||||
3 -4.1024441815730086e+01 -2.4147931196409923e-14
|
||||
|
||||
[correlator]
|
||||
|
||||
name f_A
|
||||
quarks lquark lquark
|
||||
offset 1
|
||||
wf 2
|
||||
corr_t
|
||||
1 +6.5529951269443117e+01 +7.9192560386479701e-14
|
||||
2 +1.0516822345055870e+00 -1.2443038782429568e-14
|
||||
3 -4.1025142768037739e+01 +5.9315333178954509e-17
|
||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -51,7 +51,7 @@
|
|||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"The standard matrix product can be performed with @"
|
||||
"The standard matrix product can be performed with `@`"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -101,13 +101,38 @@
|
|||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Mathematical functions work elementwise"
|
||||
"For large matrices overloading the standard operator `@` can become inefficient as pyerrors has to perform a large number of elementary opeations. For these situations pyerrors provides the function `linalg.matmul` which optimizes the required automatic differentiation. The function can take an arbitray number of operands."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"[[Obs[78.12099999999998] Obs[-22.909999999999997]]\n",
|
||||
" [Obs[-22.909999999999997] Obs[7.1]]]\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"print(pe.linalg.matmul(matrix, matrix, matrix)) # Equivalent to matrix @ matrix @ matrix but faster for large matrices"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Mathematical functions work elementwise"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
|
@ -126,12 +151,12 @@
|
|||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"For a vector of `Obs`, we again use np.asarray to end up with the correct object"
|
||||
"For a vector of `Obs`, we again use `np.asarray` to end up with the correct object"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"execution_count": 7,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
|
@ -160,14 +185,14 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"execution_count": 8,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"[Obs[7.2(1.7)] Obs[-1.00(45)]]\n"
|
||||
"[Obs[7.2(1.7)] Obs[-1.00(46)]]\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
|
@ -182,40 +207,7 @@
|
|||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Matrix to scalar operations\n",
|
||||
"If we want to apply a numpy matrix function with a scalar return value we can use `scalar_mat_op`. __Here we need to use the autograd wrapped version of numpy__ (imported as anp) to use automatic differentiation."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"det \t Obs[3.10(28)]\n",
|
||||
"trace \t Obs[5.10(20)]\n",
|
||||
"norm \t Obs[4.45(19)]\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"import autograd.numpy as anp # Thinly-wrapped numpy\n",
|
||||
"funcs = [anp.linalg.det, anp.trace, anp.linalg.norm]\n",
|
||||
"\n",
|
||||
"for i, func in enumerate(funcs):\n",
|
||||
" res = pe.linalg.scalar_mat_op(func, matrix)\n",
|
||||
" res.gamma_method()\n",
|
||||
" print(func.__name__, '\\t', res)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"For matrix operations which are not supported by autograd we can use numerical differentiation"
|
||||
"`pyerrors` provides the user with wrappers to the `numpy.linalg` functions which work on `Obs` valued matrices. We can for example calculate the determinant of the matrix via"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -227,26 +219,21 @@
|
|||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"cond \t Obs[6.23(58)]\n",
|
||||
"expm_cond \t Obs[4.45(19)]\n"
|
||||
"3.10(28)\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"funcs = [np.linalg.cond, scipy.linalg.expm_cond]\n",
|
||||
"\n",
|
||||
"for i, func in enumerate(funcs):\n",
|
||||
" res = pe.linalg.scalar_mat_op(func, matrix, num_grad=True)\n",
|
||||
" res.gamma_method()\n",
|
||||
" print(func.__name__, ' \\t', res)"
|
||||
"det = pe.linalg.det(matrix)\n",
|
||||
"det.gamma_method()\n",
|
||||
"print(det)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Matrix to matrix operations\n",
|
||||
"For matrix operations with a matrix as return value we can use another wrapper `mat_mat_op`. Take as an example the cholesky decompostion. __Here we need to use the autograd wrapped version of numpy__ (imported as anp) to use automatic differentiation."
|
||||
"The cholesky decomposition can be obtained as follows"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -259,12 +246,12 @@
|
|||
"output_type": "stream",
|
||||
"text": [
|
||||
"[[Obs[2.025(49)] Obs[0.0]]\n",
|
||||
" [Obs[-0.494(51)] Obs[0.870(29)]]]\n"
|
||||
" [Obs[-0.494(50)] Obs[0.870(29)]]]\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"cholesky = pe.linalg.mat_mat_op(anp.linalg.cholesky, matrix)\n",
|
||||
"cholesky = pe.linalg.cholesky(matrix)\n",
|
||||
"for (i, j), entry in np.ndenumerate(cholesky):\n",
|
||||
" entry.gamma_method()\n",
|
||||
"print(cholesky)"
|
||||
|
@ -321,7 +308,7 @@
|
|||
}
|
||||
],
|
||||
"source": [
|
||||
"inv = pe.linalg.mat_mat_op(anp.linalg.inv, cholesky)\n",
|
||||
"inv = pe.linalg.inv(cholesky)\n",
|
||||
"for (i, j), entry in np.ndenumerate(inv):\n",
|
||||
" entry.gamma_method()\n",
|
||||
"print(inv)\n",
|
||||
|
@ -334,59 +321,14 @@
|
|||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Matrix to matrix operations which are not supported by autograd can also be computed with numeric differentiation"
|
||||
"## Eigenvalues and eigenvectors\n",
|
||||
"We can also compute eigenvalues and eigenvectors of symmetric matrices with a special wrapper `eigh`"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 13,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"orth\n",
|
||||
"[[Obs[-0.9592(76)] Obs[0.283(26)]]\n",
|
||||
" [Obs[0.283(26)] Obs[0.9592(76)]]]\n",
|
||||
"expm\n",
|
||||
"[[Obs[75(15)] Obs[-21.4(4.1)]]\n",
|
||||
" [Obs[-21.4(4.1)] Obs[8.3(1.4)]]]\n",
|
||||
"logm\n",
|
||||
"[[Obs[1.334(57)] Obs[-0.496(61)]]\n",
|
||||
" [Obs[-0.496(61)] Obs[-0.203(50)]]]\n",
|
||||
"sinhm\n",
|
||||
"[[Obs[37.3(7.4)] Obs[-10.8(2.1)]]\n",
|
||||
" [Obs[-10.8(2.1)] Obs[3.94(68)]]]\n",
|
||||
"sqrtm\n",
|
||||
"[[Obs[1.996(51)] Obs[-0.341(37)]]\n",
|
||||
" [Obs[-0.341(37)] Obs[0.940(14)]]]\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"funcs = [scipy.linalg.orth, scipy.linalg.expm, scipy.linalg.logm, scipy.linalg.sinhm, scipy.linalg.sqrtm]\n",
|
||||
"\n",
|
||||
"for i,func in enumerate(funcs):\n",
|
||||
" res = pe.linalg.mat_mat_op(func, matrix, num_grad=True)\n",
|
||||
" for (i, j), entry in np.ndenumerate(res):\n",
|
||||
" entry.gamma_method()\n",
|
||||
" print(func.__name__)\n",
|
||||
" print(res)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Eigenvalues and eigenvectors\n",
|
||||
"We can also compute eigenvalues and eigenvectors of symmetric matrices with a special wrapper `eigh`"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 14,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
|
@ -395,8 +337,8 @@
|
|||
"Eigenvalues:\n",
|
||||
"[Obs[0.705(57)] Obs[4.39(19)]]\n",
|
||||
"Eigenvectors:\n",
|
||||
"[[Obs[-0.283(26)] Obs[-0.9592(76)]]\n",
|
||||
" [Obs[-0.9592(76)] Obs[0.283(26)]]]\n"
|
||||
"[[Obs[-0.283(26)] Obs[-0.9592(75)]]\n",
|
||||
" [Obs[-0.9592(75)] Obs[0.283(26)]]]\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
|
@ -421,7 +363,7 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 15,
|
||||
"execution_count": 14,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
|
@ -451,7 +393,7 @@
|
|||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
|
@ -465,7 +407,7 @@
|
|||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.6.9"
|
||||
"version": "3.8.10"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
|
|
335
examples/06_gevp.ipynb
Normal file
335
examples/06_gevp.ipynb
Normal file
File diff suppressed because one or more lines are too long
Binary file not shown.
Binary file not shown.
BIN
examples/data/correlator_test.json.gz
Normal file
BIN
examples/data/correlator_test.json.gz
Normal file
Binary file not shown.
Binary file not shown.
BIN
examples/data/f_A.json.gz
Normal file
BIN
examples/data/f_A.json.gz
Normal file
Binary file not shown.
BIN
examples/data/f_P.json.gz
Normal file
BIN
examples/data/f_P.json.gz
Normal file
Binary file not shown.
BIN
examples/data/matrix_correlator.json.gz
Normal file
BIN
examples/data/matrix_correlator.json.gz
Normal file
Binary file not shown.
BIN
examples/data/matrix_correlator_V1V1.json.gz
Normal file
BIN
examples/data/matrix_correlator_V1V1.json.gz
Normal file
Binary file not shown.
BIN
examples/data/matrix_correlator_V2V2.json.gz
Normal file
BIN
examples/data/matrix_correlator_V2V2.json.gz
Normal file
Binary file not shown.
BIN
examples/data/matrix_correlator_V3V3.json.gz
Normal file
BIN
examples/data/matrix_correlator_V3V3.json.gz
Normal file
Binary file not shown.
|
@ -161,7 +161,7 @@ Passing arguments to the `gamma_method` still dominates over the dictionaries.
|
|||
|
||||
## Irregular Monte Carlo chains
|
||||
|
||||
Irregular Monte Carlo chains can be initialized with the parameter `idl`.
|
||||
`Obs` objects defined on irregular Monte Carlo chains can be initialized with the parameter `idl`.
|
||||
|
||||
```python
|
||||
# Observable defined on configurations 20 to 519
|
||||
|
@ -187,12 +187,64 @@ obs3.details()
|
|||
|
||||
```
|
||||
|
||||
`Obs` objects defined on regular and irregular histories of the same ensemble can be computed with each other and the correct error propagation and estimation is automatically taken care of.
|
||||
|
||||
**Warning:** Irregular Monte Carlo chains can result in odd patterns in the autocorrelation functions.
|
||||
Make sure to check the autocorrelation time with e.g. `pyerrors.obs.Obs.plot_rho` or `pyerrors.obs.Obs.plot_tauint`.
|
||||
|
||||
For the full API see `pyerrors.obs.Obs`.
|
||||
|
||||
# Correlators
|
||||
When one is not interested in single observables but correlation functions, `pyerrors` offers the `Corr` class which simplifies the corresponding error propagation and provides the user with a set of standard methods. In order to initialize a `Corr` objects one needs to arrange the data as a list of `Obs´
|
||||
```python
|
||||
my_corr = pe.Corr([obs_0, obs_1, obs_2, obs_3])
|
||||
print(my_corr)
|
||||
> x0/a Corr(x0/a)
|
||||
> ------------------
|
||||
> 0 0.7957(80)
|
||||
> 1 0.5156(51)
|
||||
> 2 0.3227(33)
|
||||
> 3 0.2041(21)
|
||||
```
|
||||
In case the correlation functions are not defined on the outermost timeslices, for example because of fixed boundary conditions, a padding can be introduced.
|
||||
```python
|
||||
my_corr = pe.Corr([obs_0, obs_1, obs_2, obs_3], padding=[1, 1])
|
||||
print(my_corr)
|
||||
> x0/a Corr(x0/a)
|
||||
> ------------------
|
||||
> 0
|
||||
> 1 0.7957(80)
|
||||
> 2 0.5156(51)
|
||||
> 3 0.3227(33)
|
||||
> 4 0.2041(21)
|
||||
> 5
|
||||
```
|
||||
The individual entries of a correlator can be accessed via slicing
|
||||
```python
|
||||
print(my_corr[3])
|
||||
> 0.3227(33)
|
||||
```
|
||||
Error propagation with the `Corr` class works very similar to `Obs` objects. Mathematical operations are overloaded and `Corr` objects can be computed together with other `Corr` objects, `Obs` objects or real numbers and integers.
|
||||
```python
|
||||
my_new_corr = 0.3 * my_corr[2] * my_corr * my_corr + 12 / my_corr
|
||||
```
|
||||
|
||||
`pyerrors` provides the user with a set of regularly used methods for the manipulation of correlator objects:
|
||||
- `Corr.gamma_method` applies the gamma method to all entries of the correlator.
|
||||
- `Corr.m_eff` to construct effective masses. Various variants for periodic and fixed temporal boundary conditions are available.
|
||||
- `Corr.deriv` returns the first derivative of the correlator as `Corr`. Different discretizations of the numerical derivative are available.
|
||||
- `Corr.second_deriv` returns the second derivative of the correlator as `Corr`. Different discretizations of the numerical derivative are available.
|
||||
- `Corr.symmetric` symmetrizes parity even correlations functions, assuming periodic boundary conditions.
|
||||
- `Corr.anti_symmetric` anti-symmetrizes parity odd correlations functions, assuming periodic boundary conditions.
|
||||
- `Corr.T_symmetry` averages a correlator with its time symmetry partner, assuming fixed boundary conditions.
|
||||
- `Corr.plateau` extracts a plateau value from the correlator in a given range.
|
||||
- `Corr.roll` periodically shifts the correlator.
|
||||
- `Corr.reverse` reverses the time ordering of the correlator.
|
||||
- `Corr.correlate` constructs a disconnected correlation function from the correlator and another `Corr` or `Obs` object.
|
||||
- `Corr.reweight` reweights the correlator.
|
||||
|
||||
`pyerrors` can also handle matrices of correlation functions and extract energy states from these matrices via a generalized eigenvalue problem (see `pyerrors.correlators.Corr.GEVP`).
|
||||
|
||||
For the full API see `pyerrors.correlators.Corr`.
|
||||
|
||||
# Complex observables
|
||||
|
@ -223,17 +275,26 @@ print(my_derived_cobs)
|
|||
`pyerrors.roots`
|
||||
|
||||
# Matrix operations
|
||||
`pyerrors.linalg`
|
||||
`pyerrors` provides wrappers for `Obs`-valued matrix operations based on `numpy.linalg`. The supported functions include:
|
||||
- `inv` for the matrix inverse.
|
||||
- `cholseky` for the Cholesky decomposition.
|
||||
- `det` for the matrix determinant.
|
||||
- `eigh` for eigenvalues and eigenvectors of hermitean matrices.
|
||||
- `eig` for eigenvalues of general matrices.
|
||||
- `pinv` for the Moore-Penrose pseudoinverse.
|
||||
- `svd` for the singular-value-decomposition.
|
||||
|
||||
For the full API see `pyerrors.linalg`.
|
||||
|
||||
# Export data
|
||||
The preferred exported file format within `pyerrors` is
|
||||
The preferred exported file format within `pyerrors` is json.gz
|
||||
|
||||
## Jackknife samples
|
||||
For comparison with other analysis workflows `pyerrors` can generate jackknife samples from an `Obs` object or import jackknife samples into an `Obs` object.
|
||||
See `pyerrors.obs.Obs.export_jackknife` and `pyerrors.obs.import_jackknife` for details.
|
||||
|
||||
# Input
|
||||
`pyerrors.input`
|
||||
`pyerrors` includes an `input` submodule in which input routines and parsers for the output of various numerical programs are contained. For details see `pyerrors.input`.
|
||||
'''
|
||||
from .obs import *
|
||||
from .correlators import *
|
||||
|
|
|
@ -3,8 +3,8 @@ import numpy as np
|
|||
import autograd.numpy as anp
|
||||
import matplotlib.pyplot as plt
|
||||
import scipy.linalg
|
||||
from .obs import Obs, reweight, correlate
|
||||
from .misc import dump_object
|
||||
from .obs import Obs, reweight, correlate, CObs
|
||||
from .misc import dump_object, _assert_equal_properties
|
||||
from .fits import least_squares
|
||||
from .linalg import eigh, inv, cholesky
|
||||
from .roots import find_root
|
||||
|
@ -19,60 +19,99 @@ class Corr:
|
|||
to iterate over all timeslices for every operation. This is especially true, when dealing with smearing matrices.
|
||||
|
||||
The correlator can have two types of content: An Obs at every timeslice OR a GEVP
|
||||
smearing matrix at every timeslice. Other dependency (eg. spacial) are not supported.
|
||||
smearing matrix at every timeslice. Other dependency (eg. spatial) are not supported.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, data_input, padding_front=0, padding_back=0, prange=None):
|
||||
# All data_input should be a list of things at different timeslices. This needs to be verified
|
||||
def __init__(self, data_input, padding=[0, 0], prange=None):
|
||||
""" Initialize a Corr object.
|
||||
|
||||
if not isinstance(data_input, list):
|
||||
raise TypeError('Corr__init__ expects a list of timeslices.')
|
||||
# data_input can have multiple shapes. The simplest one is a list of Obs.
|
||||
# We check, if this is the case
|
||||
if all([isinstance(item, Obs) for item in data_input]):
|
||||
self.content = [np.asarray([item]) for item in data_input]
|
||||
# Wrapping the Obs in an array ensures that the data structure is consistent with smearing matrices.
|
||||
self.N = 1 # number of smearings
|
||||
Parameters
|
||||
----------
|
||||
data_input : list or array
|
||||
list of Obs or list of arrays of Obs or array of Corrs
|
||||
padding : list, optional
|
||||
List with two entries where the first labels the padding
|
||||
at the front of the correlator and the second the padding
|
||||
at the back.
|
||||
prange : list, optional
|
||||
List containing the first and last timeslice of the plateau
|
||||
region indentified for this correlator.
|
||||
"""
|
||||
|
||||
# data_input in the form [np.array(Obs,NxN)]
|
||||
elif all([isinstance(item, np.ndarray) or item is None for item in data_input]) and any([isinstance(item, np.ndarray) for item in data_input]):
|
||||
self.content = data_input
|
||||
if isinstance(data_input, np.ndarray): # Input is an array of Corrs
|
||||
|
||||
noNull = [a for a in self.content if not (a is None)] # To check if the matrices are correct for all undefined elements
|
||||
self.N = noNull[0].shape[0]
|
||||
# The checks are now identical to the case above
|
||||
if self.N > 1 and noNull[0].shape[0] != noNull[0].shape[1]:
|
||||
raise Exception("Smearing matrices are not NxN")
|
||||
if (not all([item.shape == noNull[0].shape for item in noNull])):
|
||||
raise Exception("Items in data_input are not of identical shape." + str(noNull))
|
||||
else: # In case its a list of something else.
|
||||
raise Exception("data_input contains item of wrong type")
|
||||
# This only works, if the array fulfills the conditions below
|
||||
if not len(data_input.shape) == 2 and data_input.shape[0] == data_input.shape[1]:
|
||||
raise Exception("Incompatible array shape")
|
||||
if not all([isinstance(item, Corr) for item in data_input.flatten()]):
|
||||
raise Exception("If the input is an array, its elements must be of type pe.Corr")
|
||||
if not all([item.N == 1 for item in data_input.flatten()]):
|
||||
raise Exception("Can only construct matrix correlator from single valued correlators")
|
||||
if not len(set([item.T for item in data_input.flatten()])) == 1:
|
||||
raise Exception("All input Correlators must be defined over the same timeslices.")
|
||||
|
||||
T = data_input[0, 0].T
|
||||
N = data_input.shape[0]
|
||||
input_as_list = []
|
||||
for t in range(T):
|
||||
if any([(item.content[t][0] is None) for item in data_input.flatten()]):
|
||||
if not all([(item.content[t][0] is None) for item in data_input.flatten()]):
|
||||
warnings.warn("Input ill-defined at different timeslices. Conversion leads to data loss!", RuntimeWarning)
|
||||
input_as_list.append(None)
|
||||
else:
|
||||
array_at_timeslace = np.empty([N, N], dtype="object")
|
||||
for i in range(N):
|
||||
for j in range(N):
|
||||
array_at_timeslace[i, j] = data_input[i, j][t]
|
||||
input_as_list.append(array_at_timeslace)
|
||||
data_input = input_as_list
|
||||
|
||||
if isinstance(data_input, list):
|
||||
|
||||
if all([(isinstance(item, Obs) or isinstance(item, CObs)) for item in data_input]):
|
||||
_assert_equal_properties(data_input)
|
||||
self.content = [np.asarray([item]) for item in data_input]
|
||||
if all([(isinstance(item, Obs) or isinstance(item, CObs)) or item is None for item in data_input]):
|
||||
_assert_equal_properties([o for o in data_input if o is not None])
|
||||
self.content = [np.asarray([item]) if item is not None else None for item in data_input]
|
||||
self.N = 1
|
||||
|
||||
elif all([isinstance(item, np.ndarray) or item is None for item in data_input]) and any([isinstance(item, np.ndarray) for item in data_input]):
|
||||
self.content = data_input
|
||||
noNull = [a for a in self.content if not (a is None)] # To check if the matrices are correct for all undefined elements
|
||||
self.N = noNull[0].shape[0]
|
||||
if self.N > 1 and noNull[0].shape[0] != noNull[0].shape[1]:
|
||||
raise Exception("Smearing matrices are not NxN")
|
||||
if (not all([item.shape == noNull[0].shape for item in noNull])):
|
||||
raise Exception("Items in data_input are not of identical shape." + str(noNull))
|
||||
else:
|
||||
raise Exception("data_input contains item of wrong type")
|
||||
else:
|
||||
raise Exception("Data input was not given as list or correct array")
|
||||
|
||||
self.tag = None
|
||||
|
||||
# We now apply some padding to our list. In case that our list represents a correlator of length T but is not defined at every value.
|
||||
# An undefined timeslice is represented by the None object
|
||||
self.content = [None] * padding_front + self.content + [None] * padding_back
|
||||
self.T = len(self.content) # for convenience: will be used a lot
|
||||
self.content = [None] * padding[0] + self.content + [None] * padding[1]
|
||||
self.T = len(self.content)
|
||||
|
||||
# The attribute "range" [start,end] marks a range of two timeslices.
|
||||
# This is useful for keeping track of plateaus and fitranges.
|
||||
# The range can be inherited from other Corrs, if the operation should not alter a chosen range eg. multiplication with a constant.
|
||||
self.prange = prange
|
||||
|
||||
self.gamma_method()
|
||||
|
||||
def __getitem__(self, idx):
|
||||
"""Return the content of timeslice idx"""
|
||||
if len(self.content[idx]) == 1:
|
||||
if self.content[idx] is None:
|
||||
return None
|
||||
elif len(self.content[idx]) == 1:
|
||||
return self.content[idx][0]
|
||||
else:
|
||||
return self.content[idx]
|
||||
|
||||
@property
|
||||
def reweighted(self):
|
||||
bool_array = np.array([list(map(lambda x: x.reweighted, o)) for o in self.content])
|
||||
bool_array = np.array([list(map(lambda x: x.reweighted, o)) for o in [x for x in self.content if x is not None]])
|
||||
if np.all(bool_array == 1):
|
||||
return True
|
||||
elif np.all(bool_array == 0):
|
||||
|
@ -91,15 +130,15 @@ class Corr:
|
|||
for j in range(self.N):
|
||||
item[i, j].gamma_method(**kwargs)
|
||||
|
||||
# We need to project the Correlator with a Vector to get a single value at each timeslice.
|
||||
# The method can use one or two vectors.
|
||||
# If two are specified it returns v1@G@v2 (the order might be very important.)
|
||||
# By default it will return the lowest source, which usually means unsmeared-unsmeared (0,0), but it does not have to
|
||||
def projected(self, vector_l=None, vector_r=None):
|
||||
def projected(self, vector_l=None, vector_r=None, normalize=False):
|
||||
"""We need to project the Correlator with a Vector to get a single value at each timeslice.
|
||||
|
||||
The method can use one or two vectors.
|
||||
If two are specified it returns v1@G@v2 (the order might be very important.)
|
||||
By default it will return the lowest source, which usually means unsmeared-unsmeared (0,0), but it does not have to
|
||||
"""
|
||||
if self.N == 1:
|
||||
raise Exception("Trying to project a Corr, that already has N=1.")
|
||||
# This Exception is in no way necessary. One could just return self
|
||||
# But there is no scenario, where a user would want that to happen and the error message might be more informative.
|
||||
|
||||
self.gamma_method()
|
||||
|
||||
|
@ -107,31 +146,49 @@ class Corr:
|
|||
vector_l, vector_r = np.asarray([1.] + (self.N - 1) * [0.]), np.asarray([1.] + (self.N - 1) * [0.])
|
||||
elif(vector_r is None):
|
||||
vector_r = vector_l
|
||||
if isinstance(vector_l, list) and not isinstance(vector_r, list):
|
||||
if len(vector_l) != self.T:
|
||||
raise Exception("Length of vector list must be equal to T")
|
||||
vector_r = [vector_r] * self.T
|
||||
if isinstance(vector_r, list) and not isinstance(vector_l, list):
|
||||
if len(vector_r) != self.T:
|
||||
raise Exception("Length of vector list must be equal to T")
|
||||
vector_l = [vector_l] * self.T
|
||||
|
||||
if not vector_l.shape == vector_r.shape == (self.N,):
|
||||
raise Exception("Vectors are of wrong shape!")
|
||||
if not isinstance(vector_l, list):
|
||||
if not vector_l.shape == vector_r.shape == (self.N,):
|
||||
raise Exception("Vectors are of wrong shape!")
|
||||
if normalize:
|
||||
vector_l, vector_r = vector_l / np.sqrt((vector_l @ vector_l)), vector_r / np.sqrt(vector_r @ vector_r)
|
||||
# if (not (0.95 < vector_r @ vector_r < 1.05)) or (not (0.95 < vector_l @ vector_l < 1.05)):
|
||||
# print("Vectors are normalized before projection!")
|
||||
|
||||
# We always normalize before projecting! But we only raise a warning, when it is clear, they where not meant to be normalized.
|
||||
if (not (0.95 < vector_r @ vector_r < 1.05)) or (not (0.95 < vector_l @ vector_l < 1.05)):
|
||||
print("Vectors are normalized before projection!")
|
||||
newcontent = [None if (item is None) else np.asarray([vector_l.T @ item @ vector_r]) for item in self.content]
|
||||
|
||||
vector_l, vector_r = vector_l / np.sqrt((vector_l @ vector_l)), vector_r / np.sqrt(vector_r @ vector_r)
|
||||
else:
|
||||
# There are no checks here yet. There are so many possible scenarios, where this can go wrong.
|
||||
if normalize:
|
||||
for t in range(self.T):
|
||||
vector_l[t], vector_r[t] = vector_l[t] / np.sqrt((vector_l[t] @ vector_l[t])), vector_r[t] / np.sqrt(vector_r[t] @ vector_r[t])
|
||||
|
||||
newcontent = [None if (item is None) else np.asarray([vector_l.T @ item @ vector_r]) for item in self.content]
|
||||
newcontent = [None if (self.content[t] is None or vector_l[t] is None or vector_r[t] is None) else np.asarray([vector_l[t].T @ self.content[t] @ vector_r[t]]) for t in range(self.T)]
|
||||
return Corr(newcontent)
|
||||
|
||||
def sum(self):
|
||||
return np.sqrt(self.N) * self.projected(np.ones(self.N))
|
||||
|
||||
# For purposes of debugging and verification, one might want to see a single smearing level. smearing will return a Corr at the specified i,j. where both are integers 0<=i,j<N.
|
||||
def smearing(self, i, j):
|
||||
"""Picks the element [i,j] from every matrix and returns a correlator containing one Obs per timeslice.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
i : int
|
||||
First index to be picked.
|
||||
j : int
|
||||
Second index to be picked.
|
||||
"""
|
||||
if self.N == 1:
|
||||
raise Exception("Trying to pick smearing from projected Corr")
|
||||
newcontent = [None if(item is None) else item[i, j] for item in self.content]
|
||||
return Corr(newcontent)
|
||||
|
||||
# Obs and Matplotlib do not play nicely
|
||||
# We often want to retrieve x,y,y_err as lists to pass them to something like pyplot.errorbar
|
||||
def plottable(self):
|
||||
"""Outputs the correlator in a plotable format.
|
||||
|
||||
|
@ -139,16 +196,13 @@ class Corr:
|
|||
timeslice and the error on each timeslice.
|
||||
"""
|
||||
if self.N != 1:
|
||||
raise Exception("Can only make Corr[N=1] plottable") # We could also autoproject to the groundstate or expect vectors, but this is supposed to be a super simple function.
|
||||
raise Exception("Can only make Corr[N=1] plottable")
|
||||
x_list = [x for x in range(self.T) if not self.content[x] is None]
|
||||
y_list = [y[0].value for y in self.content if y is not None]
|
||||
y_err_list = [y[0].dvalue for y in self.content if y is not None]
|
||||
|
||||
return x_list, y_list, y_err_list
|
||||
|
||||
# symmetric returns a Corr, that has been symmetrized.
|
||||
# A symmetry checker is still to be implemented
|
||||
# The method will not delete any redundant timeslices (Bad for memory, Great for convenience)
|
||||
def symmetric(self):
|
||||
""" Symmetrize the correlator around x0=0."""
|
||||
if self.T % 2 != 0:
|
||||
|
@ -185,28 +239,72 @@ class Corr:
|
|||
raise Exception("Corr could not be symmetrized: No redundant values")
|
||||
return Corr(newcontent, prange=self.prange)
|
||||
|
||||
# This method will symmetrice the matrices and therefore make them positive definit.
|
||||
def smearing_symmetric(self):
|
||||
"""Symmetrizes the matrices and therefore make them positive definite."""
|
||||
if self.N > 1:
|
||||
transposed = [None if (G is None) else G.T for G in self.content]
|
||||
return 0.5 * (Corr(transposed) + self)
|
||||
if self.N == 1:
|
||||
raise Exception("Trying to symmetrize a smearing matrix, that already has N=1.")
|
||||
|
||||
# We also include a simple GEVP method based on Scipy.linalg
|
||||
def GEVP(self, t0, ts, state=1):
|
||||
if (self.content[t0] is None) or (self.content[ts] is None):
|
||||
raise Exception("Corr not defined at t0/ts")
|
||||
G0, Gt = np.empty([self.N, self.N], dtype="double"), np.empty([self.N, self.N], dtype="double")
|
||||
for i in range(self.N):
|
||||
for j in range(self.N):
|
||||
G0[i, j] = self.content[t0][i, j].value
|
||||
Gt[i, j] = self.content[ts][i, j].value
|
||||
def GEVP(self, t0, ts=None, state=0, sorted_list=None):
|
||||
"""Solve the general eigenvalue problem on the current correlator
|
||||
|
||||
sp_val, sp_vec = scipy.linalg.eig(Gt, G0)
|
||||
sp_vec = sp_vec[:, np.argsort(sp_val)[-state]] # We only want the eigenvector belonging to the selected state
|
||||
sp_vec = sp_vec / np.sqrt(sp_vec @ sp_vec)
|
||||
return sp_vec
|
||||
Parameters
|
||||
----------
|
||||
t0 : int
|
||||
The time t0 for G(t)v= lambda G(t_0)v
|
||||
ts : int
|
||||
fixed time G(t_s)v= lambda G(t_0)v if return_list=False
|
||||
If return_list=True and sorting=Eigenvector it gives a reference point for the sorting method.
|
||||
state : int
|
||||
The state one is interested in ordered by energy. The lowest state is zero.
|
||||
sorted_list : string
|
||||
if this argument is set, a list of vectors (len=self.T) is returned. If it is left as None, only one vector is returned.
|
||||
"Eigenvalue" - The eigenvector is chosen according to which einvenvalue it belongs individually on every timeslice.
|
||||
"Eigenvector" - Use the method described in arXiv:2004.10472 [hep-lat] to find the set of v(t) belonging to the state.
|
||||
The referense state is identified by its eigenvalue at t=ts
|
||||
"""
|
||||
if sorted_list is None:
|
||||
if (ts is None):
|
||||
raise Exception("ts is required if sorted_list=None")
|
||||
if (self.content[t0] is None) or (self.content[ts] is None):
|
||||
raise Exception("Corr not defined at t0/ts")
|
||||
G0, Gt = np.empty([self.N, self.N], dtype="double"), np.empty([self.N, self.N], dtype="double")
|
||||
for i in range(self.N):
|
||||
for j in range(self.N):
|
||||
G0[i, j] = self.content[t0][i, j].value
|
||||
Gt[i, j] = self.content[ts][i, j].value
|
||||
|
||||
sp_vecs = _GEVP_solver(Gt, G0)
|
||||
sp_vec = sp_vecs[state]
|
||||
return sp_vec
|
||||
else:
|
||||
|
||||
all_vecs = []
|
||||
for t in range(self.T):
|
||||
try:
|
||||
G0, Gt = np.empty([self.N, self.N], dtype="double"), np.empty([self.N, self.N], dtype="double")
|
||||
for i in range(self.N):
|
||||
for j in range(self.N):
|
||||
G0[i, j] = self.content[t0][i, j].value
|
||||
Gt[i, j] = self.content[t][i, j].value
|
||||
|
||||
sp_vecs = _GEVP_solver(Gt, G0)
|
||||
if sorted_list == "Eigenvalue":
|
||||
sp_vec = sp_vecs[state]
|
||||
all_vecs.append(sp_vec)
|
||||
else:
|
||||
all_vecs.append(sp_vecs)
|
||||
except Exception:
|
||||
all_vecs.append(None)
|
||||
if sorted_list == "Eigenvector":
|
||||
if (ts is None):
|
||||
raise Exception("ts is required for the Eigenvector sorting method.")
|
||||
all_vecs = _sort_vectors(all_vecs, ts)
|
||||
all_vecs = [a[state] for a in all_vecs]
|
||||
|
||||
return all_vecs
|
||||
|
||||
def Eigenvalue(self, t0, state=1):
|
||||
G = self.smearing_symmetric()
|
||||
|
@ -217,13 +315,57 @@ class Corr:
|
|||
LTi = inv(LT)
|
||||
newcontent = []
|
||||
for t in range(self.T):
|
||||
Gt = G.content[t]
|
||||
M = Li @ Gt @ LTi
|
||||
eigenvalues = eigh(M)[0]
|
||||
eigenvalue = eigenvalues[-state]
|
||||
newcontent.append(eigenvalue)
|
||||
if self.content[t] is None:
|
||||
newcontent.append(None)
|
||||
else:
|
||||
Gt = G.content[t]
|
||||
M = Li @ Gt @ LTi
|
||||
eigenvalues = eigh(M)[0]
|
||||
eigenvalue = eigenvalues[-state]
|
||||
newcontent.append(eigenvalue)
|
||||
return Corr(newcontent)
|
||||
|
||||
def Hankel(self, N, periodic=False):
|
||||
"""Constructs an NxN Hankel matrix
|
||||
|
||||
C(t) c(t+1) ... c(t+n-1)
|
||||
C(t+1) c(t+2) ... c(t+n)
|
||||
.................
|
||||
C(t+(n-1)) c(t+n) ... c(t+2(n-1))
|
||||
|
||||
Parameters
|
||||
----------
|
||||
N : int
|
||||
Dimension of the Hankel matrix
|
||||
periodic : bool, optional
|
||||
determines whether the matrix is extended periodically
|
||||
"""
|
||||
|
||||
if self.N != 1:
|
||||
raise Exception("Multi-operator Prony not implemented!")
|
||||
|
||||
array = np.empty([N, N], dtype="object")
|
||||
new_content = []
|
||||
for t in range(self.T):
|
||||
new_content.append(array.copy())
|
||||
|
||||
def wrap(i):
|
||||
if i >= self.T:
|
||||
return i - self.T
|
||||
return i
|
||||
|
||||
for t in range(self.T):
|
||||
for i in range(N):
|
||||
for j in range(N):
|
||||
if periodic:
|
||||
new_content[t][i, j] = self.content[wrap(t + i + j)][0]
|
||||
elif (t + i + j) >= self.T:
|
||||
new_content[t] = None
|
||||
else:
|
||||
new_content[t][i, j] = self.content[t + i + j][0]
|
||||
|
||||
return Corr(new_content)
|
||||
|
||||
def roll(self, dt):
|
||||
"""Periodically shift the correlator by dt timeslices
|
||||
|
||||
|
@ -236,7 +378,7 @@ class Corr:
|
|||
|
||||
def reverse(self):
|
||||
"""Reverse the time ordering of the Corr"""
|
||||
return Corr(self.content[::-1])
|
||||
return Corr(self.content[:: -1])
|
||||
|
||||
def correlate(self, partner):
|
||||
"""Correlate the correlator with another correlator or Obs
|
||||
|
@ -258,7 +400,7 @@ class Corr:
|
|||
new_content.append(None)
|
||||
else:
|
||||
new_content.append(np.array([correlate(o, partner.content[x0][0]) for o in t_slice]))
|
||||
elif isinstance(partner, Obs):
|
||||
elif isinstance(partner, Obs): # Should this include CObs?
|
||||
new_content.append(np.array([correlate(o, partner) for o in t_slice]))
|
||||
else:
|
||||
raise Exception("Can only correlate with an Obs or a Corr.")
|
||||
|
@ -312,25 +454,16 @@ class Corr:
|
|||
|
||||
return (self + T_partner) / 2
|
||||
|
||||
def deriv(self, symmetric=True):
|
||||
def deriv(self, variant="symmetric"):
|
||||
"""Return the first derivative of the correlator with respect to x0.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
symmetric : bool
|
||||
decides whether symmetric of simple finite differences are used. Default: True
|
||||
variant : str
|
||||
decides which definition of the finite differences derivative is used.
|
||||
Available choice: symmetric, forward, backward, improved, default: symmetric
|
||||
"""
|
||||
if not symmetric:
|
||||
newcontent = []
|
||||
for t in range(self.T - 1):
|
||||
if (self.content[t] is None) or (self.content[t + 1] is None):
|
||||
newcontent.append(None)
|
||||
else:
|
||||
newcontent.append(self.content[t + 1] - self.content[t])
|
||||
if(all([x is None for x in newcontent])):
|
||||
raise Exception("Derivative is undefined at all timeslices")
|
||||
return Corr(newcontent, padding_back=1)
|
||||
if symmetric:
|
||||
if variant == "symmetric":
|
||||
newcontent = []
|
||||
for t in range(1, self.T - 1):
|
||||
if (self.content[t - 1] is None) or (self.content[t + 1] is None):
|
||||
|
@ -339,19 +472,71 @@ class Corr:
|
|||
newcontent.append(0.5 * (self.content[t + 1] - self.content[t - 1]))
|
||||
if(all([x is None for x in newcontent])):
|
||||
raise Exception('Derivative is undefined at all timeslices')
|
||||
return Corr(newcontent, padding_back=1, padding_front=1)
|
||||
return Corr(newcontent, padding=[1, 1])
|
||||
elif variant == "forward":
|
||||
newcontent = []
|
||||
for t in range(self.T - 1):
|
||||
if (self.content[t] is None) or (self.content[t + 1] is None):
|
||||
newcontent.append(None)
|
||||
else:
|
||||
newcontent.append(self.content[t + 1] - self.content[t])
|
||||
if(all([x is None for x in newcontent])):
|
||||
raise Exception("Derivative is undefined at all timeslices")
|
||||
return Corr(newcontent, padding=[0, 1])
|
||||
elif variant == "backward":
|
||||
newcontent = []
|
||||
for t in range(1, self.T):
|
||||
if (self.content[t - 1] is None) or (self.content[t] is None):
|
||||
newcontent.append(None)
|
||||
else:
|
||||
newcontent.append(self.content[t] - self.content[t - 1])
|
||||
if(all([x is None for x in newcontent])):
|
||||
raise Exception("Derivative is undefined at all timeslices")
|
||||
return Corr(newcontent, padding=[1, 0])
|
||||
elif variant == "improved":
|
||||
newcontent = []
|
||||
for t in range(2, self.T - 2):
|
||||
if (self.content[t - 2] is None) or (self.content[t - 1] is None) or (self.content[t + 1] is None) or (self.content[t + 2] is None):
|
||||
newcontent.append(None)
|
||||
else:
|
||||
newcontent.append((1 / 12) * (self.content[t - 2] - 8 * self.content[t - 1] + 8 * self.content[t + 1] - self.content[t + 2]))
|
||||
if(all([x is None for x in newcontent])):
|
||||
raise Exception('Derivative is undefined at all timeslices')
|
||||
return Corr(newcontent, padding=[2, 2])
|
||||
else:
|
||||
raise Exception("Unknown variant.")
|
||||
|
||||
def second_deriv(self):
|
||||
"""Return the second derivative of the correlator with respect to x0."""
|
||||
newcontent = []
|
||||
for t in range(1, self.T - 1):
|
||||
if (self.content[t - 1] is None) or (self.content[t + 1] is None):
|
||||
newcontent.append(None)
|
||||
else:
|
||||
newcontent.append((self.content[t + 1] - 2 * self.content[t] + self.content[t - 1]))
|
||||
if(all([x is None for x in newcontent])):
|
||||
raise Exception("Derivative is undefined at all timeslices")
|
||||
return Corr(newcontent, padding_back=1, padding_front=1)
|
||||
def second_deriv(self, variant="symmetric"):
|
||||
"""Return the second derivative of the correlator with respect to x0.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
variant : str
|
||||
decides which definition of the finite differences derivative is used.
|
||||
Available choice: symmetric, improved, default: symmetric
|
||||
"""
|
||||
if variant == "symmetric":
|
||||
newcontent = []
|
||||
for t in range(1, self.T - 1):
|
||||
if (self.content[t - 1] is None) or (self.content[t + 1] is None):
|
||||
newcontent.append(None)
|
||||
else:
|
||||
newcontent.append((self.content[t + 1] - 2 * self.content[t] + self.content[t - 1]))
|
||||
if(all([x is None for x in newcontent])):
|
||||
raise Exception("Derivative is undefined at all timeslices")
|
||||
return Corr(newcontent, padding=[1, 1])
|
||||
elif variant == "improved":
|
||||
newcontent = []
|
||||
for t in range(2, self.T - 2):
|
||||
if (self.content[t - 2] is None) or (self.content[t - 1] is None) or (self.content[t] is None) or (self.content[t + 1] is None) or (self.content[t + 2] is None):
|
||||
newcontent.append(None)
|
||||
else:
|
||||
newcontent.append((1 / 12) * (-self.content[t + 2] + 16 * self.content[t + 1] - 30 * self.content[t] + 16 * self.content[t - 1] - self.content[t - 2]))
|
||||
if(all([x is None for x in newcontent])):
|
||||
raise Exception("Derivative is undefined at all timeslices")
|
||||
return Corr(newcontent, padding=[2, 2])
|
||||
else:
|
||||
raise Exception("Unknown variant.")
|
||||
|
||||
def m_eff(self, variant='log', guess=1.0):
|
||||
"""Returns the effective mass of the correlator as correlator object
|
||||
|
@ -379,7 +564,7 @@ class Corr:
|
|||
if(all([x is None for x in newcontent])):
|
||||
raise Exception('m_eff is undefined at all timeslices')
|
||||
|
||||
return np.log(Corr(newcontent, padding_back=1))
|
||||
return np.log(Corr(newcontent, padding=[0, 1]))
|
||||
|
||||
elif variant in ['periodic', 'cosh', 'sinh']:
|
||||
if variant in ['periodic', 'cosh']:
|
||||
|
@ -402,7 +587,7 @@ class Corr:
|
|||
if(all([x is None for x in newcontent])):
|
||||
raise Exception('m_eff is undefined at all timeslices')
|
||||
|
||||
return Corr(newcontent, padding_back=1)
|
||||
return Corr(newcontent, padding=[0, 1])
|
||||
|
||||
elif variant == 'arccosh':
|
||||
newcontent = []
|
||||
|
@ -413,7 +598,7 @@ class Corr:
|
|||
newcontent.append((self.content[t + 1] + self.content[t - 1]) / (2 * self.content[t]))
|
||||
if(all([x is None for x in newcontent])):
|
||||
raise Exception("m_eff is undefined at all timeslices")
|
||||
return np.arccosh(Corr(newcontent, padding_back=1, padding_front=1))
|
||||
return np.arccosh(Corr(newcontent, padding=[1, 1]))
|
||||
|
||||
else:
|
||||
raise Exception('Unknown variant.')
|
||||
|
@ -434,11 +619,6 @@ class Corr:
|
|||
if self.N != 1:
|
||||
raise Exception("Correlator must be projected before fitting")
|
||||
|
||||
# The default behavior is:
|
||||
# 1 use explicit fitrange
|
||||
# if none is provided, use the range of the corr
|
||||
# if this is also not set, use the whole length of the corr (This could come with a warning!)
|
||||
|
||||
if fitrange is None:
|
||||
if self.prange:
|
||||
fitrange = self.prange
|
||||
|
@ -520,7 +700,7 @@ class Corr:
|
|||
if self.N != 1:
|
||||
raise Exception("Correlator must be projected before plotting")
|
||||
if x_range is None:
|
||||
x_range = [0, self.T]
|
||||
x_range = [0, self.T - 1]
|
||||
|
||||
fig = plt.figure()
|
||||
ax1 = fig.add_subplot(111)
|
||||
|
@ -582,24 +762,45 @@ class Corr:
|
|||
|
||||
return
|
||||
|
||||
def dump(self, filename):
|
||||
"""Dumps the Corr into a pickle file
|
||||
|
||||
def dump(self, filename, datatype="json.gz", **kwargs):
|
||||
"""Dumps the Corr into a file of chosen type
|
||||
Parameters
|
||||
----------
|
||||
filename : str
|
||||
Name of the file
|
||||
Name of the file to be saved.
|
||||
datatype : str
|
||||
Format of the exported file. Supported formats include
|
||||
"json.gz" and "pickle"
|
||||
path : str
|
||||
specifies a custom path for the file (default '.')
|
||||
"""
|
||||
dump_object(self, filename)
|
||||
return
|
||||
if datatype == "json.gz":
|
||||
from .input.json import dump_to_json
|
||||
if 'path' in kwargs:
|
||||
file_name = kwargs.get('path') + '/' + filename
|
||||
else:
|
||||
file_name = filename
|
||||
dump_to_json(self, file_name)
|
||||
elif datatype == "pickle":
|
||||
dump_object(self, filename, **kwargs)
|
||||
else:
|
||||
raise Exception("Unknown datatype " + str(datatype))
|
||||
|
||||
def print(self, range=[0, None]):
|
||||
print(self.__repr__(range))
|
||||
|
||||
def __repr__(self, range=[0, None]):
|
||||
content_string = ""
|
||||
|
||||
content_string += "Corr T=" + str(self.T) + " N=" + str(self.N) + "\n" # +" filled with"+ str(type(self.content[0][0])) there should be a good solution here
|
||||
|
||||
if self.tag is not None:
|
||||
content_string += "Description: " + self.tag + "\n"
|
||||
if self.N != 1:
|
||||
return content_string
|
||||
# This avoids a crash for N>1. I do not know, what else to do here. I like the list representation for N==1. We could print only one "smearing" or one matrix. Printing everything will just
|
||||
# be a wall of numbers.
|
||||
|
||||
if range[1]:
|
||||
range[1] += 1
|
||||
content_string += 'x0/a\tCorr(x0/a)\n------------------\n'
|
||||
|
@ -633,7 +834,7 @@ class Corr:
|
|||
newcontent.append(self.content[t] + y.content[t])
|
||||
return Corr(newcontent)
|
||||
|
||||
elif isinstance(y, Obs) or isinstance(y, int) or isinstance(y, float):
|
||||
elif isinstance(y, Obs) or isinstance(y, int) or isinstance(y, float) or isinstance(y, CObs):
|
||||
newcontent = []
|
||||
for t in range(self.T):
|
||||
if (self.content[t] is None):
|
||||
|
@ -656,7 +857,7 @@ class Corr:
|
|||
newcontent.append(self.content[t] * y.content[t])
|
||||
return Corr(newcontent)
|
||||
|
||||
elif isinstance(y, Obs) or isinstance(y, int) or isinstance(y, float):
|
||||
elif isinstance(y, Obs) or isinstance(y, int) or isinstance(y, float) or isinstance(y, CObs):
|
||||
newcontent = []
|
||||
for t in range(self.T):
|
||||
if (self.content[t] is None):
|
||||
|
@ -689,9 +890,14 @@ class Corr:
|
|||
raise Exception("Division returns completely undefined correlator")
|
||||
return Corr(newcontent)
|
||||
|
||||
elif isinstance(y, Obs):
|
||||
if y.value == 0:
|
||||
raise Exception('Division by zero will return undefined correlator')
|
||||
elif isinstance(y, Obs) or isinstance(y, CObs):
|
||||
if isinstance(y, Obs):
|
||||
if y.value == 0:
|
||||
raise Exception('Division by zero will return undefined correlator')
|
||||
if isinstance(y, CObs):
|
||||
if y.is_zero():
|
||||
raise Exception('Division by zero will return undefined correlator')
|
||||
|
||||
newcontent = []
|
||||
for t in range(self.T):
|
||||
if (self.content[t] is None):
|
||||
|
@ -721,7 +927,7 @@ class Corr:
|
|||
return self + (-y)
|
||||
|
||||
def __pow__(self, y):
|
||||
if isinstance(y, Obs) or isinstance(y, int) or isinstance(y, float):
|
||||
if isinstance(y, Obs) or isinstance(y, int) or isinstance(y, float) or isinstance(y, CObs):
|
||||
newcontent = [None if (item is None) else item**y for item in self.content]
|
||||
return Corr(newcontent, prange=self.prange)
|
||||
else:
|
||||
|
@ -802,3 +1008,70 @@ class Corr:
|
|||
|
||||
def __rtruediv__(self, y):
|
||||
return (self / y) ** (-1)
|
||||
|
||||
@property
|
||||
def real(self):
|
||||
def return_real(obs_OR_cobs):
|
||||
if isinstance(obs_OR_cobs, CObs):
|
||||
return obs_OR_cobs.real
|
||||
else:
|
||||
return obs_OR_cobs
|
||||
|
||||
return self._apply_func_to_corr(return_real)
|
||||
|
||||
@property
|
||||
def imag(self):
|
||||
def return_imag(obs_OR_cobs):
|
||||
if isinstance(obs_OR_cobs, CObs):
|
||||
return obs_OR_cobs.imag
|
||||
else:
|
||||
return obs_OR_cobs * 0 # So it stays the right type
|
||||
|
||||
return self._apply_func_to_corr(return_imag)
|
||||
|
||||
|
||||
def _sort_vectors(vec_set, ts):
|
||||
"""Helper function used to find a set of Eigenvectors consistent over all timeslices"""
|
||||
reference_sorting = np.array(vec_set[ts])
|
||||
N = reference_sorting.shape[0]
|
||||
sorted_vec_set = []
|
||||
for t in range(len(vec_set)):
|
||||
if vec_set[t] is None:
|
||||
sorted_vec_set.append(None)
|
||||
elif not t == ts:
|
||||
perms = permutation([i for i in range(N)])
|
||||
best_score = 0
|
||||
for perm in perms:
|
||||
current_score = 1
|
||||
for k in range(N):
|
||||
new_sorting = reference_sorting.copy()
|
||||
new_sorting[perm[k], :] = vec_set[t][k]
|
||||
current_score *= abs(np.linalg.det(new_sorting))
|
||||
if current_score > best_score:
|
||||
best_score = current_score
|
||||
best_perm = perm
|
||||
sorted_vec_set.append([vec_set[t][k] for k in best_perm])
|
||||
else:
|
||||
sorted_vec_set.append(vec_set[t])
|
||||
|
||||
return sorted_vec_set
|
||||
|
||||
|
||||
def permutation(lst): # Shamelessly copied
|
||||
if len(lst) == 1:
|
||||
return [lst]
|
||||
ll = []
|
||||
for i in range(len(lst)):
|
||||
m = lst[i]
|
||||
remLst = lst[:i] + lst[i + 1:]
|
||||
# Generating all permutations where m is first
|
||||
for p in permutation(remLst):
|
||||
ll.append([m] + p)
|
||||
return ll
|
||||
|
||||
|
||||
def _GEVP_solver(Gt, G0): # Just so normalization an sorting does not need to be repeated. Here we could later put in some checks
|
||||
sp_val, sp_vecs = scipy.linalg.eig(Gt, G0)
|
||||
sp_vecs = [sp_vecs[:, np.argsort(sp_val)[-i]] for i in range(1, sp_vecs.shape[0] + 1)]
|
||||
sp_vecs = [v / np.sqrt((v.T @ G0 @ v)) for v in sp_vecs]
|
||||
return sp_vecs
|
||||
|
|
|
@ -22,6 +22,30 @@ identity = np.array(
|
|||
dtype=complex)
|
||||
|
||||
|
||||
def epsilon_tensor(i, j, k):
|
||||
"""Rank-3 epsilon tensor
|
||||
|
||||
Based on https://codegolf.stackexchange.com/a/160375
|
||||
"""
|
||||
test_set = set((i, j, k))
|
||||
if not (test_set <= set((1, 2, 3)) or test_set <= set((0, 1, 2))):
|
||||
raise Exception("Unexpected input", i, j, k)
|
||||
|
||||
return (i - j) * (j - k) * (k - i) / 2
|
||||
|
||||
|
||||
def epsilon_tensor_rank4(i, j, k, o):
|
||||
"""Rank-4 epsilon tensor
|
||||
|
||||
Extension of https://codegolf.stackexchange.com/a/160375
|
||||
"""
|
||||
test_set = set((i, j, k, o))
|
||||
if not (test_set <= set((1, 2, 3, 4)) or test_set <= set((0, 1, 2, 3))):
|
||||
raise Exception("Unexpected input", i, j, k, o)
|
||||
|
||||
return (i - j) * (j - k) * (k - i) * (i - o) * (j - o) * (o - k) / 12
|
||||
|
||||
|
||||
def Grid_gamma(gamma_tag):
|
||||
"""Returns gamma matrix in Grid labeling."""
|
||||
if gamma_tag == 'Identity':
|
||||
|
|
175
pyerrors/fits.py
175
pyerrors/fits.py
|
@ -1,3 +1,4 @@
|
|||
import gc
|
||||
from collections.abc import Sequence
|
||||
import warnings
|
||||
import numpy as np
|
||||
|
@ -7,6 +8,7 @@ import scipy.stats
|
|||
import matplotlib.pyplot as plt
|
||||
from matplotlib import gridspec
|
||||
from scipy.odr import ODR, Model, RealData
|
||||
from scipy.stats import chi2
|
||||
import iminuit
|
||||
from autograd import jacobian
|
||||
from autograd import elementwise_grad as egrad
|
||||
|
@ -45,6 +47,8 @@ class Fit_result(Sequence):
|
|||
my_str += 'residual variance = ' + f'{self.residual_variance:2.6f}' + '\n'
|
||||
if hasattr(self, 'chisquare_by_expected_chisquare'):
|
||||
my_str += '\u03C7\u00b2/\u03C7\u00b2exp = ' + f'{self.chisquare_by_expected_chisquare:2.6f}' + '\n'
|
||||
if hasattr(self, 'p_value'):
|
||||
my_str += 'p-value = ' + f'{self.p_value:2.4f}' + '\n'
|
||||
my_str += 'Fit parameters:\n'
|
||||
for i_par, par in enumerate(self.fit_parameters):
|
||||
my_str += str(i_par) + '\t' + ' ' * int(par >= 0) + str(par).rjust(int(par < 0.0)) + '\n'
|
||||
|
@ -92,7 +96,7 @@ def least_squares(x, y, func, priors=None, silent=False, **kwargs):
|
|||
initial_guess : list
|
||||
can provide an initial guess for the input parameters. Relevant for
|
||||
non-linear fits with many parameters.
|
||||
method : str
|
||||
method : str, optional
|
||||
can be used to choose an alternative method for the minimization of chisquare.
|
||||
The possible methods are the ones which can be used for scipy.optimize.minimize and
|
||||
migrad of iminuit. If no method is specified, Levenberg-Marquard is used.
|
||||
|
@ -109,8 +113,6 @@ def least_squares(x, y, func, priors=None, silent=False, **kwargs):
|
|||
correlated_fit : bool
|
||||
If true, use the full correlation matrix in the definition of the chisquare
|
||||
(only works for prior==None and when no method is given, at the moment).
|
||||
const_par : list, optional
|
||||
List of N Obs that are used to constrain the last N fit parameters of func.
|
||||
'''
|
||||
if priors is not None:
|
||||
return _prior_fit(x, y, func, priors, silent=silent, **kwargs)
|
||||
|
@ -156,8 +158,6 @@ def total_least_squares(x, y, func, silent=False, **kwargs):
|
|||
corrected by effects caused by correlated input data.
|
||||
This can take a while as the full correlation matrix
|
||||
has to be calculated (default False).
|
||||
const_par : list, optional
|
||||
List of N Obs that are used to constrain the last N fit parameters of func.
|
||||
|
||||
Based on the orthogonal distance regression module of scipy
|
||||
'''
|
||||
|
@ -173,17 +173,6 @@ def total_least_squares(x, y, func, silent=False, **kwargs):
|
|||
if not callable(func):
|
||||
raise TypeError('func has to be a function.')
|
||||
|
||||
func_aug = func
|
||||
if 'const_par' in kwargs:
|
||||
const_par = kwargs['const_par']
|
||||
if isinstance(const_par, Obs):
|
||||
const_par = [const_par]
|
||||
|
||||
def func(p, x):
|
||||
return func_aug(np.concatenate((p, [o.value for o in const_par])), x)
|
||||
else:
|
||||
const_par = []
|
||||
|
||||
for i in range(25):
|
||||
try:
|
||||
func(np.arange(i), x.T[0])
|
||||
|
@ -194,9 +183,7 @@ def total_least_squares(x, y, func, silent=False, **kwargs):
|
|||
|
||||
n_parms = i
|
||||
if not silent:
|
||||
print('Fit with', n_parms, 'parameters')
|
||||
if(len(const_par) > 0):
|
||||
print('\t and %d constrained parameter%s' % (len(const_par), 's' if len(const_par) > 1 else ''), const_par)
|
||||
print('Fit with', n_parms, 'parameter' + 's' * (n_parms > 1))
|
||||
|
||||
x_f = np.vectorize(lambda o: o.value)(x)
|
||||
dx_f = np.vectorize(lambda o: o.dvalue)(x)
|
||||
|
@ -239,18 +226,12 @@ def total_least_squares(x, y, func, silent=False, **kwargs):
|
|||
raise Exception('The minimization procedure did not converge.')
|
||||
|
||||
m = x_f.size
|
||||
n_parms_aug = n_parms + len(const_par)
|
||||
|
||||
def odr_chisquare(p):
|
||||
model = func(p[:n_parms], p[n_parms:].reshape(x_shape))
|
||||
chisq = anp.sum(((y_f - model) / dy_f) ** 2) + anp.sum(((x_f - p[n_parms:].reshape(x_shape)) / dx_f) ** 2)
|
||||
return chisq
|
||||
|
||||
def odr_chisquare_aug(p):
|
||||
model = func_aug(np.concatenate((p[:n_parms_aug], [o.value for o in const_par])), p[n_parms_aug:].reshape(x_shape))
|
||||
chisq = anp.sum(((y_f - model) / dy_f) ** 2) + anp.sum(((x_f - p[n_parms_aug:].reshape(x_shape)) / dx_f) ** 2)
|
||||
return chisq
|
||||
|
||||
if kwargs.get('expected_chisquare') is True:
|
||||
W = np.diag(1 / np.asarray(np.concatenate((dy_f.ravel(), dx_f.ravel()))))
|
||||
|
||||
|
@ -277,44 +258,40 @@ def total_least_squares(x, y, func, silent=False, **kwargs):
|
|||
print('chisquare/expected_chisquare:',
|
||||
output.chisquare_by_expected_chisquare)
|
||||
|
||||
fitp = np.concatenate((out.beta, [o.value for o in const_par]))
|
||||
hess_inv = np.linalg.pinv(jacobian(jacobian(odr_chisquare_aug))(np.concatenate((fitp, out.xplus.ravel()))))
|
||||
fitp = out.beta
|
||||
hess_inv = np.linalg.pinv(jacobian(jacobian(odr_chisquare))(np.concatenate((fitp, out.xplus.ravel()))))
|
||||
|
||||
def odr_chisquare_compact_x(d):
|
||||
model = func_aug(d[:n_parms_aug], d[n_parms_aug:n_parms_aug + m].reshape(x_shape))
|
||||
chisq = anp.sum(((y_f - model) / dy_f) ** 2) + anp.sum(((d[n_parms_aug + m:].reshape(x_shape) - d[n_parms_aug:n_parms_aug + m].reshape(x_shape)) / dx_f) ** 2)
|
||||
model = func(d[:n_parms], d[n_parms:n_parms + m].reshape(x_shape))
|
||||
chisq = anp.sum(((y_f - model) / dy_f) ** 2) + anp.sum(((d[n_parms + m:].reshape(x_shape) - d[n_parms:n_parms + m].reshape(x_shape)) / dx_f) ** 2)
|
||||
return chisq
|
||||
|
||||
jac_jac_x = jacobian(jacobian(odr_chisquare_compact_x))(np.concatenate((fitp, out.xplus.ravel(), x_f.ravel())))
|
||||
|
||||
deriv_x = -hess_inv @ jac_jac_x[:n_parms_aug + m, n_parms_aug + m:]
|
||||
deriv_x = -hess_inv @ jac_jac_x[:n_parms + m, n_parms + m:]
|
||||
|
||||
def odr_chisquare_compact_y(d):
|
||||
model = func_aug(d[:n_parms_aug], d[n_parms_aug:n_parms_aug + m].reshape(x_shape))
|
||||
chisq = anp.sum(((d[n_parms_aug + m:] - model) / dy_f) ** 2) + anp.sum(((x_f - d[n_parms_aug:n_parms_aug + m].reshape(x_shape)) / dx_f) ** 2)
|
||||
model = func(d[:n_parms], d[n_parms:n_parms + m].reshape(x_shape))
|
||||
chisq = anp.sum(((d[n_parms + m:] - model) / dy_f) ** 2) + anp.sum(((x_f - d[n_parms:n_parms + m].reshape(x_shape)) / dx_f) ** 2)
|
||||
return chisq
|
||||
|
||||
jac_jac_y = jacobian(jacobian(odr_chisquare_compact_y))(np.concatenate((fitp, out.xplus.ravel(), y_f)))
|
||||
|
||||
deriv_y = -hess_inv @ jac_jac_y[:n_parms_aug + m, n_parms_aug + m:]
|
||||
deriv_y = -hess_inv @ jac_jac_y[:n_parms + m, n_parms + m:]
|
||||
|
||||
result = []
|
||||
for i in range(n_parms):
|
||||
result.append(derived_observable(lambda my_var, **kwargs: (my_var[0] + np.finfo(np.float64).eps) / (x.ravel()[0].value + np.finfo(np.float64).eps) * out.beta[i], list(x.ravel()) + list(y), man_grad=list(deriv_x[i]) + list(deriv_y[i])))
|
||||
|
||||
output.fit_parameters = result + const_par
|
||||
output.fit_parameters = result
|
||||
|
||||
output.odr_chisquare = odr_chisquare(np.concatenate((out.beta, out.xplus.ravel())))
|
||||
output.dof = x.shape[-1] - n_parms
|
||||
output.p_value = 1 - chi2.cdf(output.odr_chisquare, output.dof)
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def prior_fit(x, y, func, priors, silent=False, **kwargs):
|
||||
warnings.warn("prior_fit renamed to least_squares", DeprecationWarning)
|
||||
return least_squares(x, y, func, priors=priors, silent=silent, **kwargs)
|
||||
|
||||
|
||||
def _prior_fit(x, y, func, priors, silent=False, **kwargs):
|
||||
output = Fit_result()
|
||||
|
||||
|
@ -357,7 +334,7 @@ def _prior_fit(x, y, func, priors, silent=False, **kwargs):
|
|||
output.priors = loc_priors
|
||||
|
||||
if not silent:
|
||||
print('Fit with', n_parms, 'parameters')
|
||||
print('Fit with', n_parms, 'parameter' + 's' * (n_parms > 1))
|
||||
|
||||
y_f = [o.value for o in y]
|
||||
dy_f = [o.dvalue for o in y]
|
||||
|
@ -433,11 +410,6 @@ def _prior_fit(x, y, func, priors, silent=False, **kwargs):
|
|||
return output
|
||||
|
||||
|
||||
def standard_fit(x, y, func, silent=False, **kwargs):
|
||||
warnings.warn("standard_fit renamed to least_squares", DeprecationWarning)
|
||||
return least_squares(x, y, func, silent=silent, **kwargs)
|
||||
|
||||
|
||||
def _standard_fit(x, y, func, silent=False, **kwargs):
|
||||
|
||||
output = Fit_result()
|
||||
|
@ -455,17 +427,6 @@ def _standard_fit(x, y, func, silent=False, **kwargs):
|
|||
if not callable(func):
|
||||
raise TypeError('func has to be a function.')
|
||||
|
||||
func_aug = func
|
||||
if 'const_par' in kwargs:
|
||||
const_par = kwargs['const_par']
|
||||
if isinstance(const_par, Obs):
|
||||
const_par = [const_par]
|
||||
|
||||
def func(p, x):
|
||||
return func_aug(np.concatenate((p, [o.value for o in const_par])), x)
|
||||
else:
|
||||
const_par = []
|
||||
|
||||
for i in range(25):
|
||||
try:
|
||||
func(np.arange(i), x.T[0])
|
||||
|
@ -477,9 +438,7 @@ def _standard_fit(x, y, func, silent=False, **kwargs):
|
|||
n_parms = i
|
||||
|
||||
if not silent:
|
||||
print('Fit with', n_parms, 'parameters')
|
||||
if(len(const_par) > 0):
|
||||
print('\t and %d constrained parameter%s' % (len(const_par), 's' if len(const_par) > 1 else ''), const_par)
|
||||
print('Fit with', n_parms, 'parameter' + 's' * (n_parms > 1))
|
||||
|
||||
y_f = [o.value for o in y]
|
||||
dy_f = [o.dvalue for o in y]
|
||||
|
@ -512,42 +471,27 @@ def _standard_fit(x, y, func, silent=False, **kwargs):
|
|||
model = func(p, x)
|
||||
chisq = anp.sum(anp.dot(chol_inv, (y_f - model)) ** 2)
|
||||
return chisq
|
||||
|
||||
def chisqfunc_aug(p):
|
||||
model = func_aug(np.concatenate((p, [o.value for o in const_par])), x)
|
||||
chisq = anp.sum(anp.dot(chol_inv, (y_f - model)) ** 2)
|
||||
return chisq
|
||||
|
||||
else:
|
||||
def chisqfunc(p):
|
||||
model = func(p, x)
|
||||
chisq = anp.sum(((y_f - model) / dy_f) ** 2)
|
||||
return chisq
|
||||
|
||||
def chisqfunc_aug(p):
|
||||
model = func_aug(np.concatenate((p, [o.value for o in const_par])), x)
|
||||
chisq = anp.sum(((y_f - model) / dy_f) ** 2)
|
||||
return chisq
|
||||
output.method = kwargs.get('method', 'Levenberg-Marquardt')
|
||||
if not silent:
|
||||
print('Method:', output.method)
|
||||
|
||||
if 'method' in kwargs:
|
||||
output.method = kwargs.get('method')
|
||||
if not silent:
|
||||
print('Method:', kwargs.get('method'))
|
||||
if kwargs.get('method') == 'migrad':
|
||||
fit_result = iminuit.minimize(chisqfunc, x0)
|
||||
fit_result = iminuit.minimize(chisqfunc, fit_result.x)
|
||||
if output.method != 'Levenberg-Marquardt':
|
||||
if output.method == 'migrad':
|
||||
fit_result = iminuit.minimize(chisqfunc, x0, tol=1e-4) # Stopping crieterion 0.002 * tol * errordef
|
||||
output.iterations = fit_result.nfev
|
||||
else:
|
||||
fit_result = scipy.optimize.minimize(chisqfunc, x0, method=kwargs.get('method'))
|
||||
fit_result = scipy.optimize.minimize(chisqfunc, fit_result.x, method=kwargs.get('method'), tol=1e-12)
|
||||
fit_result = scipy.optimize.minimize(chisqfunc, x0, method=kwargs.get('method'), tol=1e-12)
|
||||
output.iterations = fit_result.nit
|
||||
|
||||
chisquare = fit_result.fun
|
||||
|
||||
output.iterations = fit_result.nit
|
||||
else:
|
||||
output.method = 'Levenberg-Marquardt'
|
||||
if not silent:
|
||||
print('Method: Levenberg-Marquardt')
|
||||
|
||||
if kwargs.get('correlated_fit') is True:
|
||||
def chisqfunc_residuals(p):
|
||||
model = func(p, x)
|
||||
|
@ -591,34 +535,34 @@ def _standard_fit(x, y, func, silent=False, **kwargs):
|
|||
print('chisquare/expected_chisquare:',
|
||||
output.chisquare_by_expected_chisquare)
|
||||
|
||||
fitp = np.concatenate((fit_result.x, [o.value for o in const_par]))
|
||||
hess_inv = np.linalg.pinv(jacobian(jacobian(chisqfunc_aug))(fitp))
|
||||
fitp = fit_result.x
|
||||
hess_inv = np.linalg.pinv(jacobian(jacobian(chisqfunc))(fitp))
|
||||
|
||||
n_parms_aug = n_parms + len(const_par)
|
||||
if kwargs.get('correlated_fit') is True:
|
||||
def chisqfunc_compact(d):
|
||||
model = func_aug(d[:n_parms_aug], x)
|
||||
chisq = anp.sum(anp.dot(chol_inv, (d[n_parms_aug:] - model)) ** 2)
|
||||
model = func(d[:n_parms], x)
|
||||
chisq = anp.sum(anp.dot(chol_inv, (d[n_parms:] - model)) ** 2)
|
||||
return chisq
|
||||
|
||||
else:
|
||||
def chisqfunc_compact(d):
|
||||
model = func_aug(d[:n_parms_aug], x)
|
||||
chisq = anp.sum(((d[n_parms_aug:] - model) / dy_f) ** 2)
|
||||
model = func(d[:n_parms], x)
|
||||
chisq = anp.sum(((d[n_parms:] - model) / dy_f) ** 2)
|
||||
return chisq
|
||||
|
||||
jac_jac = jacobian(jacobian(chisqfunc_compact))(np.concatenate((fitp, y_f)))
|
||||
|
||||
deriv = -hess_inv @ jac_jac[:n_parms_aug, n_parms_aug:]
|
||||
deriv = -hess_inv @ jac_jac[:n_parms, n_parms:]
|
||||
|
||||
result = []
|
||||
for i in range(n_parms):
|
||||
result.append(derived_observable(lambda x, **kwargs: (x[0] + np.finfo(np.float64).eps) / (y[0].value + np.finfo(np.float64).eps) * fit_result.x[i], list(y), man_grad=list(deriv[i])))
|
||||
|
||||
output.fit_parameters = result + const_par
|
||||
output.fit_parameters = result
|
||||
|
||||
output.chisquare = chisqfunc(fit_result.x)
|
||||
output.dof = x.shape[-1] - n_parms
|
||||
output.p_value = 1 - chi2.cdf(output.chisquare, output.dof)
|
||||
|
||||
if kwargs.get('resplot') is True:
|
||||
residual_plot(x, y, func, result)
|
||||
|
@ -629,11 +573,6 @@ def _standard_fit(x, y, func, silent=False, **kwargs):
|
|||
return output
|
||||
|
||||
|
||||
def odr_fit(x, y, func, silent=False, **kwargs):
|
||||
warnings.warn("odr_fit renamed to total_least_squares", DeprecationWarning)
|
||||
return total_least_squares(x, y, func, silent=silent, **kwargs)
|
||||
|
||||
|
||||
def fit_lin(x, y, **kwargs):
|
||||
"""Performs a linear fit to y = n + m * x and returns two Obs n, m.
|
||||
|
||||
|
@ -703,7 +642,7 @@ def residual_plot(x, y, func, fit_res):
|
|||
ax1.plot(x, residuals, 'ko', ls='none', markersize=5)
|
||||
ax1.tick_params(direction='out')
|
||||
ax1.tick_params(axis="x", bottom=True, top=True, labelbottom=True)
|
||||
ax1.axhline(y=0.0, ls='--', color='k')
|
||||
ax1.axhline(y=0.0, ls='--', color='k', marker=" ")
|
||||
ax1.fill_between(x_samples, -1.0, 1.0, alpha=0.1, facecolor='k')
|
||||
ax1.set_xlim([xstart, xstop])
|
||||
ax1.set_ylabel('Residuals')
|
||||
|
@ -740,3 +679,43 @@ def error_band(x, func, beta):
|
|||
err = np.array(err)
|
||||
|
||||
return err
|
||||
|
||||
|
||||
def ks_test(objects=None):
|
||||
"""Performs a Kolmogorov–Smirnov test for the p-values of all fit object.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
objects : list
|
||||
List of fit results to include in the analysis (optional).
|
||||
"""
|
||||
|
||||
if objects is None:
|
||||
obs_list = []
|
||||
for obj in gc.get_objects():
|
||||
if isinstance(obj, Fit_result):
|
||||
obs_list.append(obj)
|
||||
else:
|
||||
obs_list = objects
|
||||
|
||||
p_values = [o.p_value for o in obs_list]
|
||||
|
||||
bins = len(p_values)
|
||||
x = np.arange(0, 1.001, 0.001)
|
||||
plt.plot(x, x, 'k', zorder=1)
|
||||
plt.xlim(0, 1)
|
||||
plt.ylim(0, 1)
|
||||
plt.xlabel('p-value')
|
||||
plt.ylabel('Cumulative probability')
|
||||
plt.title(str(bins) + ' p-values')
|
||||
|
||||
n = np.arange(1, bins + 1) / np.float64(bins)
|
||||
Xs = np.sort(p_values)
|
||||
plt.step(Xs, n)
|
||||
diffs = n - Xs
|
||||
loc_max_diff = np.argmax(np.abs(diffs))
|
||||
loc = Xs[loc_max_diff]
|
||||
plt.annotate('', xy=(loc, loc), xytext=(loc, loc + diffs[loc_max_diff]), arrowprops=dict(arrowstyle='<->', shrinkA=0, shrinkB=0))
|
||||
plt.draw()
|
||||
|
||||
print(scipy.stats.kstest(p_values, 'uniform'))
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import os
|
||||
import h5py
|
||||
import numpy as np
|
||||
from collections import Counter
|
||||
from ..obs import Obs, CObs
|
||||
from ..correlators import Corr
|
||||
|
||||
|
@ -32,6 +33,10 @@ def _get_files(path, filestem, idl):
|
|||
filtered_files.append(line)
|
||||
cnfg_numbers.append(no)
|
||||
|
||||
if idl:
|
||||
if Counter(list(idl)) != Counter(cnfg_numbers):
|
||||
raise Exception("Not all configurations specified in idl found, configurations " + str(list(Counter(list(idl)) - Counter(cnfg_numbers))) + " are missing.")
|
||||
|
||||
# Check that configurations are evenly spaced
|
||||
dc = np.unique(np.diff(cnfg_numbers))
|
||||
if np.any(dc < 0):
|
||||
|
@ -58,16 +63,13 @@ def read_meson_hd5(path, filestem, ens_id, meson='meson_0', tree='meson', idl=No
|
|||
meson : str
|
||||
label of the meson to be extracted, standard value meson_0 which
|
||||
corresponds to the pseudoscalar pseudoscalar two-point function.
|
||||
tree : str
|
||||
Label of the upmost directory in the hdf5 file, default 'meson'
|
||||
for outputs of the Meson module. Can be altered to read input
|
||||
from other modules with similar structures.
|
||||
idl : range
|
||||
If specified only configurations in the given range are read in.
|
||||
"""
|
||||
|
||||
files, idx = _get_files(path, filestem, idl)
|
||||
|
||||
tree = meson.rsplit('_')[0]
|
||||
corr_data = []
|
||||
infos = []
|
||||
for hd5_file in files:
|
||||
|
@ -155,7 +157,7 @@ def read_ExternalLeg_hd5(path, filestem, ens_id, idl=None):
|
|||
raw_data = file['ExternalLeg/corr'][0][0].view('complex')
|
||||
corr_data.append(raw_data)
|
||||
if mom is None:
|
||||
mom = np.array(str(file['ExternalLeg/info'].attrs['pIn'])[3:-2].strip().split(' '), dtype=int)
|
||||
mom = np.array(str(file['ExternalLeg/info'].attrs['pIn'])[3:-2].strip().split(), dtype=float)
|
||||
file.close()
|
||||
corr_data = np.array(corr_data)
|
||||
|
||||
|
@ -200,9 +202,9 @@ def read_Bilinear_hd5(path, filestem, ens_id, idl=None):
|
|||
raw_data = file['Bilinear/Bilinear_' + str(i) + '/corr'][0][0].view('complex')
|
||||
corr_data[name].append(raw_data)
|
||||
if mom_in is None:
|
||||
mom_in = np.array(str(file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['pIn'])[3:-2].strip().split(' '), dtype=int)
|
||||
mom_in = np.array(str(file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['pIn'])[3:-2].strip().split(), dtype=float)
|
||||
if mom_out is None:
|
||||
mom_out = np.array(str(file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['pOut'])[3:-2].strip().split(' '), dtype=int)
|
||||
mom_out = np.array(str(file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['pOut'])[3:-2].strip().split(), dtype=float)
|
||||
|
||||
file.close()
|
||||
|
||||
|
@ -265,9 +267,9 @@ def read_Fourquark_hd5(path, filestem, ens_id, idl=None, vertices=["VA", "AV"]):
|
|||
raw_data = file[tree + str(i) + '/corr'][0][0].view('complex')
|
||||
corr_data[name].append(raw_data)
|
||||
if mom_in is None:
|
||||
mom_in = np.array(str(file[tree + str(i) + '/info'].attrs['pIn'])[3:-2].strip().split(' '), dtype=int)
|
||||
mom_in = np.array(str(file[tree + str(i) + '/info'].attrs['pIn'])[3:-2].strip().split(), dtype=float)
|
||||
if mom_out is None:
|
||||
mom_out = np.array(str(file[tree + str(i) + '/info'].attrs['pOut'])[3:-2].strip().split(' '), dtype=int)
|
||||
mom_out = np.array(str(file[tree + str(i) + '/info'].attrs['pOut'])[3:-2].strip().split(), dtype=float)
|
||||
|
||||
file.close()
|
||||
|
||||
|
|
|
@ -6,8 +6,11 @@ import socket
|
|||
import datetime
|
||||
import platform
|
||||
import warnings
|
||||
import re
|
||||
from ..obs import Obs
|
||||
from ..covobs import Covobs
|
||||
from ..correlators import Corr
|
||||
from ..misc import _assert_equal_properties
|
||||
from .. import version as pyerrorsversion
|
||||
|
||||
|
||||
|
@ -18,8 +21,8 @@ def create_json_string(ol, description='', indent=1):
|
|||
Parameters
|
||||
----------
|
||||
ol : list
|
||||
List of objects that will be exported. At the moments, these objects can be
|
||||
either of: Obs, list, numpy.ndarray.
|
||||
List of objects that will be exported. At the moment, these objects can be
|
||||
either of: Obs, list, numpy.ndarray, Corr.
|
||||
All Obs inside a structure have to be defined on the same set of configurations.
|
||||
description : str
|
||||
Optional string that describes the contents of the json file.
|
||||
|
@ -103,20 +106,6 @@ def create_json_string(ol, description='', indent=1):
|
|||
dl.append(ed)
|
||||
return dl
|
||||
|
||||
def _assert_equal_properties(ol, otype=Obs):
|
||||
for o in ol:
|
||||
if not isinstance(o, otype):
|
||||
raise Exception("Wrong data type in list.")
|
||||
for o in ol[1:]:
|
||||
if not ol[0].is_merged == o.is_merged:
|
||||
raise Exception("All Obs in list have to be defined on the same set of configs.")
|
||||
if not ol[0].reweighted == o.reweighted:
|
||||
raise Exception("All Obs in list have to have the same property 'reweighted'.")
|
||||
if not ol[0].e_content == o.e_content:
|
||||
raise Exception("All Obs in list have to be defined on the same set of configs.")
|
||||
if not ol[0].idl == o.idl:
|
||||
raise Exception("All Obs in list have to be defined on the same set of configurations.")
|
||||
|
||||
def write_Obs_to_dict(o):
|
||||
d = {}
|
||||
d['type'] = 'Obs'
|
||||
|
@ -173,12 +162,44 @@ def create_json_string(ol, description='', indent=1):
|
|||
d['cdata'] = cdata
|
||||
return d
|
||||
|
||||
def _nan_Obs_like(obs):
|
||||
samples = []
|
||||
names = []
|
||||
idl = []
|
||||
for key, value in obs.idl.items():
|
||||
samples.append([np.nan] * len(value))
|
||||
names.append(key)
|
||||
idl.append(value)
|
||||
my_obs = Obs(samples, names, idl)
|
||||
my_obs.reweighted = obs.reweighted
|
||||
my_obs.is_merged = obs.is_merged
|
||||
return my_obs
|
||||
|
||||
def write_Corr_to_dict(my_corr):
|
||||
first_not_none = next(i for i, j in enumerate(my_corr.content) if np.all(j))
|
||||
dummy_array = np.empty((my_corr.N, my_corr.N), dtype=object)
|
||||
dummy_array[:] = _nan_Obs_like(my_corr.content[first_not_none].ravel()[0])
|
||||
content = [o if o is not None else dummy_array for o in my_corr.content]
|
||||
dat = write_Array_to_dict(np.array(content, dtype=object))
|
||||
dat['type'] = 'Corr'
|
||||
corr_meta_data = str(my_corr.tag)
|
||||
if 'tag' in dat.keys():
|
||||
dat['tag'].append(corr_meta_data)
|
||||
else:
|
||||
dat['tag'] = [corr_meta_data]
|
||||
taglist = dat['tag']
|
||||
dat['tag'] = {} # tag is now a dictionary, that contains the previous taglist in the key "tag"
|
||||
dat['tag']['tag'] = taglist
|
||||
if my_corr.prange is not None:
|
||||
dat['tag']['prange'] = my_corr.prange
|
||||
return dat
|
||||
|
||||
if not isinstance(ol, list):
|
||||
ol = [ol]
|
||||
|
||||
d = {}
|
||||
d['program'] = 'pyerrors %s' % (pyerrorsversion.__version__)
|
||||
d['version'] = '0.1'
|
||||
d['version'] = '0.2'
|
||||
d['who'] = getpass.getuser()
|
||||
d['date'] = datetime.datetime.now().astimezone().strftime('%Y-%m-%d %H:%M:%S %z')
|
||||
d['host'] = socket.gethostname() + ', ' + platform.platform()
|
||||
|
@ -193,6 +214,10 @@ def create_json_string(ol, description='', indent=1):
|
|||
d['obsdata'].append(write_List_to_dict(io))
|
||||
elif isinstance(io, np.ndarray):
|
||||
d['obsdata'].append(write_Array_to_dict(io))
|
||||
elif isinstance(io, Corr):
|
||||
d['obsdata'].append(write_Corr_to_dict(io))
|
||||
else:
|
||||
raise Exception("Unkown datatype.")
|
||||
|
||||
jsonstring = json.dumps(d, indent=indent, cls=my_encoder, ensure_ascii=False)
|
||||
|
||||
|
@ -212,6 +237,7 @@ def create_json_string(ol, description='', indent=1):
|
|||
return '\n'.join(split)
|
||||
|
||||
jsonstring = remove_quotationmarks(jsonstring)
|
||||
jsonstring = jsonstring.replace('nan', 'NaN')
|
||||
return jsonstring
|
||||
|
||||
|
||||
|
@ -221,8 +247,8 @@ def dump_to_json(ol, fname, description='', indent=1, gz=True):
|
|||
Parameters
|
||||
----------
|
||||
ol : list
|
||||
List of objects that will be exported. At the moments, these objects can be
|
||||
either of: Obs, list, numpy.ndarray.
|
||||
List of objects that will be exported. At the moment, these objects can be
|
||||
either of: Obs, list, numpy.ndarray, Corr.
|
||||
All Obs inside a structure have to be defined on the same set of configurations.
|
||||
fname : str
|
||||
Filename of the output file.
|
||||
|
@ -255,7 +281,7 @@ def dump_to_json(ol, fname, description='', indent=1, gz=True):
|
|||
def import_json_string(json_string, verbose=True, full_output=False):
|
||||
"""Reconstruct a list of Obs or structures containing Obs from a json string.
|
||||
|
||||
The following structures are supported: Obs, list, numpy.ndarray
|
||||
The following structures are supported: Obs, list, numpy.ndarray, Corr
|
||||
If the list contains only one element, it is unpacked from the list.
|
||||
|
||||
Parameters
|
||||
|
@ -374,6 +400,33 @@ def import_json_string(json_string, verbose=True, full_output=False):
|
|||
ret[-1].tag = taglist[i]
|
||||
return np.reshape(ret, layout)
|
||||
|
||||
def get_Corr_from_dict(o):
|
||||
if isinstance(o.get('tag'), list): # supports the old way
|
||||
taglist = o.get('tag') # This had to be modified to get the taglist from the dictionary
|
||||
temp_prange = None
|
||||
elif isinstance(o.get('tag'), dict):
|
||||
tagdic = o.get('tag')
|
||||
taglist = tagdic['tag']
|
||||
if 'prange' in tagdic:
|
||||
temp_prange = tagdic['prange']
|
||||
else:
|
||||
temp_prange = None
|
||||
else:
|
||||
raise Exception("The tag is not a list or dict")
|
||||
|
||||
corr_tag = taglist[-1]
|
||||
tmp_o = o
|
||||
tmp_o['tag'] = taglist[:-1]
|
||||
if len(tmp_o['tag']) == 0:
|
||||
del tmp_o['tag']
|
||||
dat = get_Array_from_dict(tmp_o)
|
||||
my_corr = Corr([None if np.isnan(o.ravel()[0].value) else o for o in list(dat)])
|
||||
if corr_tag != 'None':
|
||||
my_corr.tag = corr_tag
|
||||
|
||||
my_corr.prange = temp_prange
|
||||
return my_corr
|
||||
|
||||
json_dict = json.loads(json_string)
|
||||
|
||||
prog = json_dict.get('program', '')
|
||||
|
@ -400,6 +453,10 @@ def import_json_string(json_string, verbose=True, full_output=False):
|
|||
ol.append(get_List_from_dict(io))
|
||||
elif io['type'] == 'Array':
|
||||
ol.append(get_Array_from_dict(io))
|
||||
elif io['type'] == 'Corr':
|
||||
ol.append(get_Corr_from_dict(io))
|
||||
else:
|
||||
raise Exception("Unkown datatype.")
|
||||
|
||||
if full_output:
|
||||
retd = {}
|
||||
|
@ -420,9 +477,9 @@ def import_json_string(json_string, verbose=True, full_output=False):
|
|||
|
||||
|
||||
def load_json(fname, verbose=True, gz=True, full_output=False):
|
||||
"""Import a list of Obs or structures containing Obs from a .json.gz file.
|
||||
"""Import a list of Obs or structures containing Obs from a .json(.gz) file.
|
||||
|
||||
The following structures are supported: Obs, list, numpy.ndarray
|
||||
The following structures are supported: Obs, list, numpy.ndarray, Corr
|
||||
If the list contains only one element, it is unpacked from the list.
|
||||
|
||||
Parameters
|
||||
|
@ -451,3 +508,215 @@ def load_json(fname, verbose=True, gz=True, full_output=False):
|
|||
d = fin.read()
|
||||
|
||||
return import_json_string(d, verbose, full_output)
|
||||
|
||||
|
||||
def _ol_from_dict(ind, reps='DICTOBS'):
|
||||
"""Convert a dictionary of Obs objects to a list and a dictionary that contains
|
||||
placeholders instead of the Obs objects.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
ind : dict
|
||||
Dict of JSON valid structures and objects that will be exported.
|
||||
At the moment, these object can be either of: Obs, list, numpy.ndarray, Corr.
|
||||
All Obs inside a structure have to be defined on the same set of configurations.
|
||||
reps : str
|
||||
Specify the structure of the placeholder in exported dict to be reps[0-9]+.
|
||||
"""
|
||||
|
||||
obstypes = (Obs, Corr, np.ndarray)
|
||||
|
||||
if not reps.isalnum():
|
||||
raise Exception('Placeholder string has to be alphanumeric!')
|
||||
ol = []
|
||||
counter = 0
|
||||
|
||||
def dict_replace_obs(d):
|
||||
nonlocal ol
|
||||
nonlocal counter
|
||||
x = {}
|
||||
for k, v in d.items():
|
||||
if isinstance(v, dict):
|
||||
v = dict_replace_obs(v)
|
||||
elif isinstance(v, list) and all([isinstance(o, Obs) for o in v]):
|
||||
v = obslist_replace_obs(v)
|
||||
elif isinstance(v, list):
|
||||
v = list_replace_obs(v)
|
||||
elif isinstance(v, obstypes):
|
||||
ol.append(v)
|
||||
v = reps + '%d' % (counter)
|
||||
counter += 1
|
||||
elif isinstance(v, str):
|
||||
if bool(re.match(r'%s[0-9]+' % (reps), v)):
|
||||
raise Exception('Dict contains string %s that matches the placeholder! %s Cannot be savely exported.' % (v, reps))
|
||||
x[k] = v
|
||||
return x
|
||||
|
||||
def list_replace_obs(li):
|
||||
nonlocal ol
|
||||
nonlocal counter
|
||||
x = []
|
||||
for e in li:
|
||||
if isinstance(e, list):
|
||||
e = list_replace_obs(e)
|
||||
elif isinstance(e, list) and all([isinstance(o, Obs) for o in e]):
|
||||
e = obslist_replace_obs(e)
|
||||
elif isinstance(e, dict):
|
||||
e = dict_replace_obs(e)
|
||||
elif isinstance(e, obstypes):
|
||||
ol.append(e)
|
||||
e = reps + '%d' % (counter)
|
||||
counter += 1
|
||||
elif isinstance(e, str):
|
||||
if bool(re.match(r'%s[0-9]+' % (reps), e)):
|
||||
raise Exception('Dict contains string %s that matches the placeholder! %s Cannot be savely exported.' % (e, reps))
|
||||
x.append(e)
|
||||
return x
|
||||
|
||||
def obslist_replace_obs(li):
|
||||
nonlocal ol
|
||||
nonlocal counter
|
||||
il = []
|
||||
for e in li:
|
||||
il.append(e)
|
||||
|
||||
ol.append(il)
|
||||
x = reps + '%d' % (counter)
|
||||
counter += 1
|
||||
return x
|
||||
|
||||
nd = dict_replace_obs(ind)
|
||||
|
||||
return ol, nd
|
||||
|
||||
|
||||
def dump_dict_to_json(od, fname, description='', indent=1, reps='DICTOBS', gz=True):
|
||||
"""Export a dict of Obs or structures containing Obs to a .json(.gz) file
|
||||
|
||||
Parameters
|
||||
----------
|
||||
od : dict
|
||||
Dict of JSON valid structures and objects that will be exported.
|
||||
At the moment, these objects can be either of: Obs, list, numpy.ndarray, Corr.
|
||||
All Obs inside a structure have to be defined on the same set of configurations.
|
||||
fname : str
|
||||
Filename of the output file.
|
||||
description : str
|
||||
Optional string that describes the contents of the json file.
|
||||
indent : int
|
||||
Specify the indentation level of the json file. None or 0 is permissible and
|
||||
saves disk space.
|
||||
reps : str
|
||||
Specify the structure of the placeholder in exported dict to be reps[0-9]+.
|
||||
gz : bool
|
||||
If True, the output is a gzipped json. If False, the output is a json file.
|
||||
"""
|
||||
|
||||
if not isinstance(od, dict):
|
||||
raise Exception('od has to be a dictionary. Did you want to use dump_to_json?')
|
||||
|
||||
infostring = ('This JSON file contains a python dictionary that has been parsed to a list of structures. '
|
||||
'OBSDICT contains the dictionary, where Obs or other structures have been replaced by '
|
||||
'' + reps + '[0-9]+. The field description contains the additional description of this JSON file. '
|
||||
'This file may be parsed to a dict with the pyerrors routine load_json_dict.')
|
||||
|
||||
desc_dict = {'INFO': infostring, 'OBSDICT': {}, 'description': description}
|
||||
ol, desc_dict['OBSDICT'] = _ol_from_dict(od, reps=reps)
|
||||
|
||||
dump_to_json(ol, fname, description=desc_dict, indent=indent, gz=gz)
|
||||
|
||||
|
||||
def _od_from_list_and_dict(ol, ind, reps='DICTOBS'):
|
||||
"""Parse a list of Obs or structures containing Obs and an accompanying
|
||||
dict, where the structures have been replaced by placeholders to a
|
||||
dict that contains the structures.
|
||||
|
||||
The following structures are supported: Obs, list, numpy.ndarray, Corr
|
||||
|
||||
Parameters
|
||||
----------
|
||||
ol : list
|
||||
List of objects -
|
||||
At the moment, these objects can be either of: Obs, list, numpy.ndarray, Corr.
|
||||
All Obs inside a structure have to be defined on the same set of configurations.
|
||||
ind : dict
|
||||
Dict that defines the structure of the resulting dict and contains placeholders
|
||||
reps : str
|
||||
Specify the structure of the placeholder in imported dict to be reps[0-9]+.
|
||||
"""
|
||||
if not reps.isalnum():
|
||||
raise Exception('Placeholder string has to be alphanumeric!')
|
||||
|
||||
counter = 0
|
||||
|
||||
def dict_replace_string(d):
|
||||
nonlocal counter
|
||||
nonlocal ol
|
||||
x = {}
|
||||
for k, v in d.items():
|
||||
if isinstance(v, dict):
|
||||
v = dict_replace_string(v)
|
||||
elif isinstance(v, list):
|
||||
v = list_replace_string(v)
|
||||
elif isinstance(v, str) and bool(re.match(r'%s[0-9]+' % (reps), v)):
|
||||
index = int(v[len(reps):])
|
||||
v = ol[index]
|
||||
counter += 1
|
||||
x[k] = v
|
||||
return x
|
||||
|
||||
def list_replace_string(li):
|
||||
nonlocal counter
|
||||
nonlocal ol
|
||||
x = []
|
||||
for e in li:
|
||||
if isinstance(e, list):
|
||||
e = list_replace_string(e)
|
||||
elif isinstance(e, dict):
|
||||
e = dict_replace_string(e)
|
||||
elif isinstance(e, str) and bool(re.match(r'%s[0-9]+' % (reps), e)):
|
||||
index = int(e[len(reps):])
|
||||
e = ol[index]
|
||||
counter += 1
|
||||
x.append(e)
|
||||
return x
|
||||
|
||||
nd = dict_replace_string(ind)
|
||||
|
||||
if counter == 0:
|
||||
raise Exception('No placeholder has been replaced! Check if reps is set correctly.')
|
||||
|
||||
return nd
|
||||
|
||||
|
||||
def load_json_dict(fname, verbose=True, gz=True, full_output=False, reps='DICTOBS'):
|
||||
"""Import a dict of Obs or structures containing Obs from a .json(.gz) file.
|
||||
|
||||
The following structures are supported: Obs, list, numpy.ndarray, Corr
|
||||
|
||||
Parameters
|
||||
----------
|
||||
fname : str
|
||||
Filename of the input file.
|
||||
verbose : bool
|
||||
Print additional information that was written to the file.
|
||||
gz : bool
|
||||
If True, assumes that data is gzipped. If False, assumes JSON file.
|
||||
full_output : bool
|
||||
If True, a dict containing auxiliary information and the data is returned.
|
||||
If False, only the data is returned.
|
||||
reps : str
|
||||
Specify the structure of the placeholder in imported dict to be reps[0-9]+.
|
||||
"""
|
||||
indata = load_json(fname, verbose=verbose, gz=gz, full_output=True)
|
||||
description = indata['description']['description']
|
||||
indict = indata['description']['OBSDICT']
|
||||
ol = indata['obsdata']
|
||||
od = _od_from_list_and_dict(ol, indict, reps=reps)
|
||||
|
||||
if full_output:
|
||||
indata['description'] = description
|
||||
indata['obsdata'] = od
|
||||
return indata
|
||||
else:
|
||||
return od
|
||||
|
|
|
@ -1,6 +1,3 @@
|
|||
#!/usr/bin/env python
|
||||
# coding: utf-8
|
||||
|
||||
import os
|
||||
import fnmatch
|
||||
import re
|
||||
|
@ -12,11 +9,12 @@ from ..obs import Obs
|
|||
def read_pbp(path, prefix, **kwargs):
|
||||
"""Read pbp format from given folder structure. Returns a list of length nrw
|
||||
|
||||
Keyword arguments
|
||||
-----------------
|
||||
r_start -- list which contains the first config to be read for each replicum
|
||||
r_stop -- list which contains the last config to be read for each replicum
|
||||
|
||||
Parameters
|
||||
----------
|
||||
r_start : list
|
||||
list which contains the first config to be read for each replicum
|
||||
r_stop : list
|
||||
list which contains the last config to be read for each replicum
|
||||
"""
|
||||
|
||||
extract_nfct = 1
|
||||
|
@ -66,7 +64,6 @@ def read_pbp(path, prefix, **kwargs):
|
|||
tmp_array = []
|
||||
with open(path + '/' + ls[rep], 'rb') as fp:
|
||||
|
||||
# header
|
||||
t = fp.read(4) # number of reweighting factors
|
||||
if rep == 0:
|
||||
nrw = struct.unpack('i', t)[0]
|
||||
|
@ -74,7 +71,7 @@ def read_pbp(path, prefix, **kwargs):
|
|||
deltas.append([])
|
||||
else:
|
||||
if nrw != struct.unpack('i', t)[0]:
|
||||
raise Exception('Error: different number of reweighting factors for replicum', rep)
|
||||
raise Exception('Error: different number of factors for replicum', rep)
|
||||
|
||||
for k in range(nrw):
|
||||
tmp_array.append([])
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
#!/usr/bin/env python
|
||||
# coding: utf-8
|
||||
|
||||
import os
|
||||
import fnmatch
|
||||
import re
|
||||
import struct
|
||||
import numpy as np # Thinly-wrapped numpy
|
||||
import warnings
|
||||
import matplotlib.pyplot as plt
|
||||
from matplotlib import gridspec
|
||||
from ..obs import Obs
|
||||
from ..fits import fit_lin
|
||||
|
||||
|
@ -15,16 +15,31 @@ def read_rwms(path, prefix, version='2.0', names=None, **kwargs):
|
|||
|
||||
Parameters
|
||||
----------
|
||||
path : str
|
||||
path that contains the data files
|
||||
prefix : str
|
||||
all files in path that start with prefix are considered as input files.
|
||||
May be used together postfix to consider only special file endings.
|
||||
Prefix is ignored, if the keyword 'files' is used.
|
||||
version : str
|
||||
version of openQCD, default 2.0
|
||||
names : list
|
||||
list of names that is assigned to the data according according
|
||||
to the order in the file list. Use careful, if you do not provide file names!
|
||||
r_start : list
|
||||
list which contains the first config to be read for each replicum
|
||||
r_stop : list
|
||||
list which contains the last config to be read for each replicum
|
||||
r_step : int
|
||||
integer that defines a fixed step size between two measurements (in units of configs)
|
||||
If not given, r_step=1 is assumed.
|
||||
postfix : str
|
||||
postfix of the file to read, e.g. '.ms1' for openQCD-files
|
||||
idl_offsets : list
|
||||
offsets to the idl range of obs. Useful for the case that the measurements of rwms are only starting at cfg. 20
|
||||
files : list
|
||||
list which contains the filenames to be read. No automatic detection of
|
||||
files performed if given.
|
||||
print_err : bool
|
||||
Print additional information that is useful for debugging.
|
||||
"""
|
||||
known_oqcd_versions = ['1.4', '1.6', '2.0']
|
||||
if not (version in known_oqcd_versions):
|
||||
|
@ -44,7 +59,6 @@ def read_rwms(path, prefix, version='2.0', names=None, **kwargs):
|
|||
if 'files' in kwargs:
|
||||
ls = kwargs.get('files')
|
||||
else:
|
||||
# Exclude files with different names
|
||||
for exc in ls:
|
||||
if not fnmatch.fnmatch(exc, prefix + '*' + postfix + '.dat'):
|
||||
ls = list(set(ls) - set([exc]))
|
||||
|
@ -56,8 +70,7 @@ def read_rwms(path, prefix, version='2.0', names=None, **kwargs):
|
|||
r_start = kwargs.get('r_start')
|
||||
if len(r_start) != replica:
|
||||
raise Exception('r_start does not match number of replicas')
|
||||
# Adjust Configuration numbering to python index
|
||||
r_start = [o - 1 if o else None for o in r_start]
|
||||
r_start = [o if o else None for o in r_start]
|
||||
else:
|
||||
r_start = [None] * replica
|
||||
|
||||
|
@ -68,16 +81,22 @@ def read_rwms(path, prefix, version='2.0', names=None, **kwargs):
|
|||
else:
|
||||
r_stop = [None] * replica
|
||||
|
||||
if 'r_step' in kwargs:
|
||||
r_step = kwargs.get('r_step')
|
||||
else:
|
||||
r_step = 1
|
||||
|
||||
print('Read reweighting factors from', prefix[:-1], ',',
|
||||
replica, 'replica', end='')
|
||||
|
||||
# Adjust replica names to new bookmarking system
|
||||
if names is None:
|
||||
rep_names = []
|
||||
for entry in ls:
|
||||
truncated_entry = entry.split('.')[0]
|
||||
idx = truncated_entry.index('r')
|
||||
rep_names.append(truncated_entry[:idx] + '|' + truncated_entry[idx:])
|
||||
else:
|
||||
rep_names = names
|
||||
|
||||
print_err = 0
|
||||
if 'print_err' in kwargs:
|
||||
|
@ -86,11 +105,14 @@ def read_rwms(path, prefix, version='2.0', names=None, **kwargs):
|
|||
|
||||
deltas = []
|
||||
|
||||
configlist = []
|
||||
r_start_index = []
|
||||
r_stop_index = []
|
||||
|
||||
for rep in range(replica):
|
||||
tmp_array = []
|
||||
with open(path + '/' + ls[rep], 'rb') as fp:
|
||||
|
||||
# header
|
||||
t = fp.read(4) # number of reweighting factors
|
||||
if rep == 0:
|
||||
nrw = struct.unpack('i', t)[0]
|
||||
|
@ -99,7 +121,6 @@ def read_rwms(path, prefix, version='2.0', names=None, **kwargs):
|
|||
for k in range(nrw):
|
||||
deltas.append([])
|
||||
else:
|
||||
# little weird if-clause due to the /2 operation needed.
|
||||
if ((nrw != struct.unpack('i', t)[0] and (not version == '2.0')) or (nrw != struct.unpack('i', t)[0] / 2 and version == '2.0')):
|
||||
raise Exception('Error: different number of reweighting factors for replicum', rep)
|
||||
|
||||
|
@ -112,8 +133,6 @@ def read_rwms(path, prefix, version='2.0', names=None, **kwargs):
|
|||
for i in range(nrw):
|
||||
t = fp.read(4)
|
||||
nfct.append(struct.unpack('i', t)[0])
|
||||
# print('nfct: ', nfct) # Hasenbusch factor,
|
||||
# 1 for rat reweighting
|
||||
else:
|
||||
for i in range(nrw):
|
||||
nfct.append(1)
|
||||
|
@ -126,13 +145,13 @@ def read_rwms(path, prefix, version='2.0', names=None, **kwargs):
|
|||
if not struct.unpack('i', fp.read(4))[0] == 0:
|
||||
print('something is wrong!')
|
||||
|
||||
# body
|
||||
configlist.append([])
|
||||
while 0 < 1:
|
||||
t = fp.read(4)
|
||||
if len(t) < 4:
|
||||
break
|
||||
if print_err:
|
||||
config_no = struct.unpack('i', t)
|
||||
config_no = struct.unpack('i', t)[0]
|
||||
configlist[-1].append(config_no)
|
||||
for i in range(nrw):
|
||||
if(version == '2.0'):
|
||||
tmpd = _read_array_openQCD2(fp)
|
||||
|
@ -163,8 +182,32 @@ def read_rwms(path, prefix, version='2.0', names=None, **kwargs):
|
|||
print('Partial factor:', tmp_nfct)
|
||||
tmp_array[i].append(tmp_nfct)
|
||||
|
||||
if r_start[rep] is None:
|
||||
r_start_index.append(0)
|
||||
else:
|
||||
try:
|
||||
r_start_index.append(configlist[-1].index(r_start[rep]))
|
||||
except ValueError:
|
||||
raise Exception('Config %d not in file with range [%d, %d]' % (
|
||||
r_start[rep], configlist[-1][0], configlist[-1][-1])) from None
|
||||
|
||||
if r_stop[rep] is None:
|
||||
r_stop_index.append(len(configlist[-1]) - 1)
|
||||
else:
|
||||
try:
|
||||
r_stop_index.append(configlist[-1].index(r_stop[rep]))
|
||||
except ValueError:
|
||||
raise Exception('Config %d not in file with range [%d, %d]' % (
|
||||
r_stop[rep], configlist[-1][0], configlist[-1][-1])) from None
|
||||
|
||||
for k in range(nrw):
|
||||
deltas[k].append(tmp_array[k][r_start[rep]:r_stop[rep]])
|
||||
deltas[k].append(tmp_array[k][r_start_index[rep]:r_stop_index[rep]][::r_step])
|
||||
|
||||
if np.any([len(np.unique(np.diff(cl))) != 1 for cl in configlist]):
|
||||
raise Exception('Irregular spaced data in input file!', [len(np.unique(np.diff(cl))) for cl in configlist])
|
||||
stepsizes = [list(np.unique(np.diff(cl)))[0] for cl in configlist]
|
||||
if np.any([step != 1 for step in stepsizes]):
|
||||
warnings.warn('Stepsize between configurations is greater than one!' + str(stepsizes), RuntimeWarning)
|
||||
|
||||
print(',', nrw, 'reweighting factors with', nsrc, 'sources')
|
||||
if "idl_offsets" in kwargs:
|
||||
|
@ -172,15 +215,9 @@ def read_rwms(path, prefix, version='2.0', names=None, **kwargs):
|
|||
else:
|
||||
idl_offsets = np.ones(nrw, dtype=int)
|
||||
result = []
|
||||
idl = [range(configlist[rep][r_start_index[rep]], configlist[rep][r_stop_index[rep]], r_step) for rep in range(replica)]
|
||||
for t in range(nrw):
|
||||
idl = []
|
||||
for rep in range(replica):
|
||||
idl.append(range(idl_offsets[rep], len(deltas[t][rep] + idl_offsets[rep])))
|
||||
if names is None:
|
||||
result.append(Obs(deltas[t], rep_names, idl=idl))
|
||||
else:
|
||||
print(names)
|
||||
result.append(Obs(deltas[t], names, idl=idl))
|
||||
result.append(Obs(deltas[t], rep_names, idl=idl))
|
||||
return result
|
||||
|
||||
|
||||
|
@ -193,7 +230,12 @@ def extract_t0(path, prefix, dtr_read, xmin,
|
|||
The data around the zero crossing of t^2<E> - 0.3
|
||||
is fitted with a linear function
|
||||
from which the exact root is extracted.
|
||||
Only works with openQCD v 1.2.
|
||||
Only works with openQCD
|
||||
|
||||
It is assumed that one measurement is performed for each config.
|
||||
If this is not the case, the resulting idl, as well as the handling
|
||||
of r_start, r_stop and r_step is wrong and the user has to correct
|
||||
this in the resulting observable.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
|
@ -215,10 +257,26 @@ def extract_t0(path, prefix, dtr_read, xmin,
|
|||
crossing to be included in the linear fit. (Default: 5)
|
||||
r_start : list
|
||||
list which contains the first config to be read for each replicum.
|
||||
r_stop: list
|
||||
r_stop : list
|
||||
list which contains the last config to be read for each replicum.
|
||||
r_step : int
|
||||
integer that defines a fixed step size between two measurements (in units of configs)
|
||||
If not given, r_step=1 is assumed.
|
||||
plaquette : bool
|
||||
If true extract the plaquette estimate of t0 instead.
|
||||
names : list
|
||||
list of names that is assigned to the data according according
|
||||
to the order in the file list. Use careful, if you do not provide file names!
|
||||
files : list
|
||||
list which contains the filenames to be read. No automatic detection of
|
||||
files performed if given.
|
||||
plot_fit : bool
|
||||
If true, the fit for the extraction of t0 is shown together with the data.
|
||||
assume_thermalization : bool
|
||||
If True: If the first record divided by the distance between two measurements is larger than
|
||||
1, it is assumed that this is due to thermalization and the first measurement belongs
|
||||
to the first config (default).
|
||||
If False: The config numbers are assumed to be traj_number // difference
|
||||
"""
|
||||
|
||||
ls = []
|
||||
|
@ -229,20 +287,21 @@ def extract_t0(path, prefix, dtr_read, xmin,
|
|||
if not ls:
|
||||
raise Exception('Error, directory not found')
|
||||
|
||||
# Exclude files with different names
|
||||
for exc in ls:
|
||||
if not fnmatch.fnmatch(exc, prefix + '*.ms.dat'):
|
||||
ls = list(set(ls) - set([exc]))
|
||||
if len(ls) > 1:
|
||||
ls.sort(key=lambda x: int(re.findall(r'\d+', x[len(prefix):])[0]))
|
||||
if 'files' in kwargs:
|
||||
ls = kwargs.get('files')
|
||||
else:
|
||||
for exc in ls:
|
||||
if not fnmatch.fnmatch(exc, prefix + '*.ms.dat'):
|
||||
ls = list(set(ls) - set([exc]))
|
||||
if len(ls) > 1:
|
||||
ls.sort(key=lambda x: int(re.findall(r'\d+', x[len(prefix):])[0]))
|
||||
replica = len(ls)
|
||||
|
||||
if 'r_start' in kwargs:
|
||||
r_start = kwargs.get('r_start')
|
||||
if len(r_start) != replica:
|
||||
raise Exception('r_start does not match number of replicas')
|
||||
# Adjust Configuration numbering to python index
|
||||
r_start = [o - 1 if o else None for o in r_start]
|
||||
r_start = [o if o else None for o in r_start]
|
||||
else:
|
||||
r_start = [None] * replica
|
||||
|
||||
|
@ -253,14 +312,31 @@ def extract_t0(path, prefix, dtr_read, xmin,
|
|||
else:
|
||||
r_stop = [None] * replica
|
||||
|
||||
if 'r_step' in kwargs:
|
||||
r_step = kwargs.get('r_step')
|
||||
else:
|
||||
r_step = 1
|
||||
|
||||
print('Extract t0 from', prefix, ',', replica, 'replica')
|
||||
|
||||
if 'names' in kwargs:
|
||||
rep_names = kwargs.get('names')
|
||||
else:
|
||||
rep_names = []
|
||||
for entry in ls:
|
||||
truncated_entry = entry.split('.')[0]
|
||||
idx = truncated_entry.index('r')
|
||||
rep_names.append(truncated_entry[:idx] + '|' + truncated_entry[idx:])
|
||||
|
||||
Ysum = []
|
||||
|
||||
configlist = []
|
||||
r_start_index = []
|
||||
r_stop_index = []
|
||||
|
||||
for rep in range(replica):
|
||||
|
||||
with open(path + '/' + ls[rep], 'rb') as fp:
|
||||
# Read header
|
||||
t = fp.read(12)
|
||||
header = struct.unpack('iii', t)
|
||||
if rep == 0:
|
||||
|
@ -279,12 +355,13 @@ def extract_t0(path, prefix, dtr_read, xmin,
|
|||
|
||||
Ysl = []
|
||||
|
||||
# Read body
|
||||
configlist.append([])
|
||||
while 0 < 1:
|
||||
t = fp.read(4)
|
||||
if(len(t) < 4):
|
||||
break
|
||||
nc = struct.unpack('i', t)[0]
|
||||
configlist[-1].append(nc)
|
||||
|
||||
t = fp.read(8 * tmax * (nn + 1))
|
||||
if kwargs.get('plaquette'):
|
||||
|
@ -302,6 +379,38 @@ def extract_t0(path, prefix, dtr_read, xmin,
|
|||
current + tmax - xmin])
|
||||
for current in range(0, len(item), tmax)])
|
||||
|
||||
diffmeas = configlist[-1][-1] - configlist[-1][-2]
|
||||
configlist[-1] = [item // diffmeas for item in configlist[-1]]
|
||||
if kwargs.get('assume_thermalization', True) and configlist[-1][0] > 1:
|
||||
warnings.warn('Assume thermalization and that the first measurement belongs to the first config.')
|
||||
offset = configlist[-1][0] - 1
|
||||
configlist[-1] = [item - offset for item in configlist[-1]]
|
||||
|
||||
if r_start[rep] is None:
|
||||
r_start_index.append(0)
|
||||
else:
|
||||
try:
|
||||
r_start_index.append(configlist[-1].index(r_start[rep]))
|
||||
except ValueError:
|
||||
raise Exception('Config %d not in file with range [%d, %d]' % (
|
||||
r_start[rep], configlist[-1][0], configlist[-1][-1])) from None
|
||||
|
||||
if r_stop[rep] is None:
|
||||
r_stop_index.append(len(configlist[-1]) - 1)
|
||||
else:
|
||||
try:
|
||||
r_stop_index.append(configlist[-1].index(r_stop[rep]))
|
||||
except ValueError:
|
||||
raise Exception('Config %d not in file with range [%d, %d]' % (
|
||||
r_stop[rep], configlist[-1][0], configlist[-1][-1])) from None
|
||||
|
||||
if np.any([len(np.unique(np.diff(cl))) != 1 for cl in configlist]):
|
||||
raise Exception('Irregular spaced data in input file!', [len(np.unique(np.diff(cl))) for cl in configlist])
|
||||
stepsizes = [list(np.unique(np.diff(cl)))[0] for cl in configlist]
|
||||
if np.any([step != 1 for step in stepsizes]):
|
||||
warnings.warn('Stepsize between configurations is greater than one!' + str(stepsizes), RuntimeWarning)
|
||||
|
||||
idl = [range(configlist[rep][r_start_index[rep]], configlist[rep][r_stop_index[rep]], r_step) for rep in range(replica)]
|
||||
t2E_dict = {}
|
||||
for n in range(nn + 1):
|
||||
samples = []
|
||||
|
@ -309,8 +418,8 @@ def extract_t0(path, prefix, dtr_read, xmin,
|
|||
samples.append([])
|
||||
for cnfg in rep:
|
||||
samples[-1].append(cnfg[n])
|
||||
samples[-1] = samples[-1][r_start[nrep]:r_stop[nrep]]
|
||||
new_obs = Obs(samples, [(w.split('.'))[0] for w in ls])
|
||||
samples[-1] = samples[-1][r_start_index[nrep]:r_stop_index[nrep]][::r_step]
|
||||
new_obs = Obs(samples, rep_names, idl=idl)
|
||||
t2E_dict[n * dn * eps] = (n * dn * eps) ** 2 * new_obs / (spatial_extent ** 3) - 0.3
|
||||
|
||||
zero_crossing = np.argmax(np.array(
|
||||
|
@ -323,32 +432,62 @@ def extract_t0(path, prefix, dtr_read, xmin,
|
|||
[o.gamma_method() for o in y]
|
||||
|
||||
fit_result = fit_lin(x, y)
|
||||
|
||||
if kwargs.get('plot_fit'):
|
||||
plt.figure()
|
||||
gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1], wspace=0.0, hspace=0.0)
|
||||
ax0 = plt.subplot(gs[0])
|
||||
xmore = list(t2E_dict.keys())[zero_crossing - fit_range - 2: zero_crossing + fit_range + 2]
|
||||
ymore = list(t2E_dict.values())[zero_crossing - fit_range - 2: zero_crossing + fit_range + 2]
|
||||
[o.gamma_method() for o in ymore]
|
||||
ax0.errorbar(xmore, [yi.value for yi in ymore], yerr=[yi.dvalue for yi in ymore], fmt='x')
|
||||
xplot = np.linspace(np.min(x), np.max(x))
|
||||
yplot = [fit_result[0] + fit_result[1] * xi for xi in xplot]
|
||||
[yi.gamma_method() for yi in yplot]
|
||||
ax0.fill_between(xplot, y1=[yi.value - yi.dvalue for yi in yplot], y2=[yi.value + yi.dvalue for yi in yplot])
|
||||
retval = (-fit_result[0] / fit_result[1])
|
||||
retval.gamma_method()
|
||||
ylim = ax0.get_ylim()
|
||||
ax0.fill_betweenx(ylim, x1=retval.value - retval.dvalue, x2=retval.value + retval.dvalue, color='gray', alpha=0.4)
|
||||
ax0.set_ylim(ylim)
|
||||
ax0.set_ylabel(r'$t^2 \langle E(t) \rangle - 0.3 $')
|
||||
xlim = ax0.get_xlim()
|
||||
|
||||
fit_res = [fit_result[0] + fit_result[1] * xi for xi in x]
|
||||
residuals = (np.asarray([o.value for o in y]) - [o.value for o in fit_res]) / np.asarray([o.dvalue for o in y])
|
||||
ax1 = plt.subplot(gs[1])
|
||||
ax1.plot(x, residuals, 'ko', ls='none', markersize=5)
|
||||
ax1.tick_params(direction='out')
|
||||
ax1.tick_params(axis="x", bottom=True, top=True, labelbottom=True)
|
||||
ax1.axhline(y=0.0, ls='--', color='k')
|
||||
ax1.fill_between(xlim, -1.0, 1.0, alpha=0.1, facecolor='k')
|
||||
ax1.set_xlim(xlim)
|
||||
ax1.set_ylabel('Residuals')
|
||||
ax1.set_xlabel(r'$t/a^2$')
|
||||
|
||||
plt.show()
|
||||
return -fit_result[0] / fit_result[1]
|
||||
|
||||
|
||||
def _parse_array_openQCD2(d, n, size, wa, quadrupel=False):
|
||||
arr = []
|
||||
if d == 2:
|
||||
tot = 0
|
||||
for i in range(n[d - 1] - 1):
|
||||
for i in range(n[0]):
|
||||
tmp = wa[i * n[1]:(i + 1) * n[1]]
|
||||
if quadrupel:
|
||||
tmp = wa[tot:n[d - 1]]
|
||||
tmp2 = []
|
||||
for i in range(len(tmp)):
|
||||
if i % 2 == 0:
|
||||
tmp2.append(tmp[i])
|
||||
for j in range(0, len(tmp), 2):
|
||||
tmp2.append(tmp[j])
|
||||
arr.append(tmp2)
|
||||
else:
|
||||
arr.append(np.asarray(wa[tot:n[d - 1]]))
|
||||
arr.append(np.asarray(tmp))
|
||||
|
||||
else:
|
||||
raise Exception('Only two-dimensional arrays supported!')
|
||||
|
||||
return arr
|
||||
|
||||
|
||||
# mimic the read_array routine of openQCD-2.0.
|
||||
# fp is the opened file handle
|
||||
# returns the dict array
|
||||
# at this point we only parse a 2d array
|
||||
# d = 2
|
||||
# n = [nfct[irw], 2*nsrc[irw]]
|
||||
def _read_array_openQCD2(fp):
|
||||
t = fp.read(4)
|
||||
d = struct.unpack('i', t)[0]
|
||||
|
@ -380,42 +519,40 @@ def read_qtop(path, prefix, c, dtr_cnfg=1, version="1.2", **kwargs):
|
|||
|
||||
Parameters
|
||||
----------
|
||||
path:
|
||||
path : str
|
||||
path of the measurement files
|
||||
prefix:
|
||||
prefix : str
|
||||
prefix of the measurement files, e.g. <prefix>_id0_r0.ms.dat
|
||||
c: double
|
||||
c : double
|
||||
Smearing radius in units of the lattice extent, c = sqrt(8 t0) / L
|
||||
dtr_cnfg: int
|
||||
dtr_cnfg : int
|
||||
(optional) parameter that specifies the number of trajectories
|
||||
between two configs.
|
||||
if it is not set, the distance between two measurements
|
||||
in the file is assumed to be
|
||||
the distance between two configurations.
|
||||
steps: int
|
||||
steps : int
|
||||
(optional) (maybe only necessary for openQCD2.0)
|
||||
nt step size, guessed if not given
|
||||
version: str
|
||||
version : str
|
||||
version string of the openQCD (sfqcd) version used to create
|
||||
the ensemble
|
||||
L: int
|
||||
L : int
|
||||
spatial length of the lattice in L/a.
|
||||
HAS to be set if version != sfqcd, since openQCD does not provide
|
||||
this in the header
|
||||
r_start: list
|
||||
r_start : list
|
||||
offset of the first ensemble, making it easier to match
|
||||
later on with other Obs
|
||||
r_stop: list
|
||||
r_stop : list
|
||||
last configurations that need to be read (per replicum)
|
||||
files: list
|
||||
files : list
|
||||
specify the exact files that need to be read
|
||||
from path, pratical if e.g. only one replicum is needed
|
||||
names: list
|
||||
from path, practical if e.g. only one replicum is needed
|
||||
names : list
|
||||
Alternative labeling for replicas/ensembles.
|
||||
Has to have the appropriate length
|
||||
"""
|
||||
# one could read L from the header in case of sfQCD
|
||||
# c = 0.35
|
||||
known_versions = ["1.0", "1.2", "1.4", "1.6", "2.0", "sfqcd"]
|
||||
|
||||
if version not in known_versions:
|
||||
|
@ -435,11 +572,9 @@ def read_qtop(path, prefix, c, dtr_cnfg=1, version="1.2", **kwargs):
|
|||
r_start = kwargs.get("r_start")
|
||||
if "r_stop" in kwargs:
|
||||
r_stop = kwargs.get("r_stop")
|
||||
# if one wants to read specific files with this method...
|
||||
if "files" in kwargs:
|
||||
files = kwargs.get("files")
|
||||
else:
|
||||
# find files in path
|
||||
found = []
|
||||
files = []
|
||||
for (dirpath, dirnames, filenames) in os.walk(path + "/"):
|
||||
|
@ -450,14 +585,12 @@ def read_qtop(path, prefix, c, dtr_cnfg=1, version="1.2", **kwargs):
|
|||
if fnmatch.fnmatch(f, prefix + "*" + ".ms.dat"):
|
||||
files.append(f)
|
||||
print(files)
|
||||
# now that we found our files, we dechiffer them...
|
||||
rep_names = []
|
||||
|
||||
deltas = []
|
||||
idl = []
|
||||
for rep, file in enumerate(files):
|
||||
with open(path + "/" + file, "rb") as fp:
|
||||
# header
|
||||
t = fp.read(12)
|
||||
header = struct.unpack('<iii', t)
|
||||
# step size in integration steps "dnms"
|
||||
|
@ -486,7 +619,6 @@ def read_qtop(path, prefix, c, dtr_cnfg=1, version="1.2", **kwargs):
|
|||
Q = []
|
||||
ncs = []
|
||||
while 0 < 1:
|
||||
# int nt
|
||||
t = fp.read(4)
|
||||
if(len(t) < 4):
|
||||
break
|
||||
|
@ -533,8 +665,6 @@ def read_qtop(path, prefix, c, dtr_cnfg=1, version="1.2", **kwargs):
|
|||
if len(Q_round) != len(ncs) // dtr_cnfg:
|
||||
raise Exception("qtops and ncs dont have the same length")
|
||||
|
||||
# replica = len(files)
|
||||
|
||||
truncated_file = file[:-7]
|
||||
print(truncated_file)
|
||||
idl_start = 1
|
||||
|
@ -562,17 +692,24 @@ def read_qtop(path, prefix, c, dtr_cnfg=1, version="1.2", **kwargs):
|
|||
rep_names = names
|
||||
deltas.append(np.array(Q_round))
|
||||
idl.append(range(idl_start, idl_stop))
|
||||
# print(idl)
|
||||
result = Obs(deltas, rep_names, idl=idl)
|
||||
return result
|
||||
|
||||
|
||||
def read_qtop_sector(target=0, **kwargs):
|
||||
"""target: int
|
||||
specifies the topological sector to be reweighted to (default 0)
|
||||
q_top: Obs
|
||||
alternatively takes args of read_qtop method as kwargs
|
||||
"""Constructs reweighting factors to a specified topological sector.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
target : int
|
||||
Specifies the topological sector to be reweighted to (default 0)
|
||||
q_top : Obs
|
||||
Alternatively takes args of read_qtop method as kwargs
|
||||
"""
|
||||
|
||||
if not isinstance(target, int):
|
||||
raise Exception("'target' has to be an integer.")
|
||||
|
||||
if "q_top" in kwargs:
|
||||
qtop = kwargs.get("q_top")
|
||||
else:
|
||||
|
@ -603,7 +740,6 @@ def read_qtop_sector(target=0, **kwargs):
|
|||
dtr_cnfg = 1
|
||||
qtop = read_qtop(path, prefix, c, dtr_cnfg=dtr_cnfg,
|
||||
version=version, **kwargs)
|
||||
# unpack to original values, project onto target sector
|
||||
names = qtop.names
|
||||
print(names)
|
||||
print(qtop.deltas.keys())
|
||||
|
|
|
@ -2,11 +2,20 @@
|
|||
|
||||
|
||||
def check_idl(idl, che):
|
||||
"""Checks if list of configurations is contained in an idl
|
||||
|
||||
Parameters
|
||||
----------
|
||||
idl : range or list
|
||||
idl of the current replicum
|
||||
che : list
|
||||
list of configurations to be checked against
|
||||
"""
|
||||
missing = []
|
||||
for c in che:
|
||||
if c not in idl:
|
||||
missing.append(c)
|
||||
# print missing such that it can directly be parsed to slurm terminal
|
||||
# print missing configurations such that it can directly be parsed to slurm terminal
|
||||
if not (len(missing) == 0):
|
||||
print(len(missing), "configs missing")
|
||||
miss_str = str(missing[0])
|
||||
|
|
|
@ -213,8 +213,6 @@ def _scalar_mat_op(op, obs, **kwargs):
|
|||
"""Computes the matrix to scalar operation op to a given matrix of Obs."""
|
||||
def _mat(x, **kwargs):
|
||||
dim = int(np.sqrt(len(x)))
|
||||
if np.sqrt(len(x)) != dim:
|
||||
raise Exception('Input has to have dim**2 entries')
|
||||
|
||||
mat = []
|
||||
for i in range(dim):
|
||||
|
@ -227,8 +225,6 @@ def _scalar_mat_op(op, obs, **kwargs):
|
|||
|
||||
if isinstance(obs, np.ndarray):
|
||||
raveled_obs = (1 * (obs.ravel())).tolist()
|
||||
elif isinstance(obs, list):
|
||||
raveled_obs = obs
|
||||
else:
|
||||
raise TypeError('Unproper type of input.')
|
||||
return derived_observable(_mat, raveled_obs, **kwargs)
|
||||
|
@ -248,10 +244,7 @@ def _mat_mat_op(op, obs, **kwargs):
|
|||
A[n, m] = entry
|
||||
B[n, m] = 0.0
|
||||
big_matrix = np.block([[A, -B], [B, A]])
|
||||
if kwargs.get('num_grad') is True:
|
||||
op_big_matrix = _num_diff_mat_mat_op(op, big_matrix, **kwargs)
|
||||
else:
|
||||
op_big_matrix = derived_observable(lambda x, **kwargs: op(x), [big_matrix], array_mode=True)[0]
|
||||
op_big_matrix = derived_observable(lambda x, **kwargs: op(x), [big_matrix], array_mode=True)[0]
|
||||
dim = op_big_matrix.shape[0]
|
||||
op_A = op_big_matrix[0: dim // 2, 0: dim // 2]
|
||||
op_B = op_big_matrix[dim // 2:, 0: dim // 2]
|
||||
|
@ -260,15 +253,11 @@ def _mat_mat_op(op, obs, **kwargs):
|
|||
res[n, m] = CObs(op_A[n, m], op_B[n, m])
|
||||
return res
|
||||
else:
|
||||
if kwargs.get('num_grad') is True:
|
||||
return _num_diff_mat_mat_op(op, obs, **kwargs)
|
||||
return derived_observable(lambda x, **kwargs: op(x), [obs], array_mode=True)[0]
|
||||
|
||||
|
||||
def eigh(obs, **kwargs):
|
||||
"""Computes the eigenvalues and eigenvectors of a given hermitian matrix of Obs according to np.linalg.eigh."""
|
||||
if kwargs.get('num_grad') is True:
|
||||
return _num_diff_eigh(obs, **kwargs)
|
||||
w = derived_observable(lambda x, **kwargs: anp.linalg.eigh(x)[0], obs)
|
||||
v = derived_observable(lambda x, **kwargs: anp.linalg.eigh(x)[1], obs)
|
||||
return w, v
|
||||
|
@ -276,232 +265,18 @@ def eigh(obs, **kwargs):
|
|||
|
||||
def eig(obs, **kwargs):
|
||||
"""Computes the eigenvalues of a given matrix of Obs according to np.linalg.eig."""
|
||||
if kwargs.get('num_grad') is True:
|
||||
return _num_diff_eig(obs, **kwargs)
|
||||
# Note: Automatic differentiation of eig is implemented in the git of autograd
|
||||
# but not yet released to PyPi (1.3)
|
||||
w = derived_observable(lambda x, **kwargs: anp.real(anp.linalg.eig(x)[0]), obs)
|
||||
return w
|
||||
|
||||
|
||||
def pinv(obs, **kwargs):
|
||||
"""Computes the Moore-Penrose pseudoinverse of a matrix of Obs."""
|
||||
if kwargs.get('num_grad') is True:
|
||||
return _num_diff_pinv(obs, **kwargs)
|
||||
return derived_observable(lambda x, **kwargs: anp.linalg.pinv(x), obs)
|
||||
|
||||
|
||||
def svd(obs, **kwargs):
|
||||
"""Computes the singular value decomposition of a matrix of Obs."""
|
||||
if kwargs.get('num_grad') is True:
|
||||
return _num_diff_svd(obs, **kwargs)
|
||||
u = derived_observable(lambda x, **kwargs: anp.linalg.svd(x, full_matrices=False)[0], obs)
|
||||
s = derived_observable(lambda x, **kwargs: anp.linalg.svd(x, full_matrices=False)[1], obs)
|
||||
vh = derived_observable(lambda x, **kwargs: anp.linalg.svd(x, full_matrices=False)[2], obs)
|
||||
return (u, s, vh)
|
||||
|
||||
|
||||
# Variants for numerical differentiation
|
||||
|
||||
def _num_diff_mat_mat_op(op, obs, **kwargs):
|
||||
"""Computes the matrix to matrix operation op to a given matrix of Obs elementwise
|
||||
which is suitable for numerical differentiation."""
|
||||
def _mat(x, **kwargs):
|
||||
dim = int(np.sqrt(len(x)))
|
||||
if np.sqrt(len(x)) != dim:
|
||||
raise Exception('Input has to have dim**2 entries')
|
||||
|
||||
mat = []
|
||||
for i in range(dim):
|
||||
row = []
|
||||
for j in range(dim):
|
||||
row.append(x[j + dim * i])
|
||||
mat.append(row)
|
||||
|
||||
return op(np.array(mat))[kwargs.get('i')][kwargs.get('j')]
|
||||
|
||||
if isinstance(obs, np.ndarray):
|
||||
raveled_obs = (1 * (obs.ravel())).tolist()
|
||||
elif isinstance(obs, list):
|
||||
raveled_obs = obs
|
||||
else:
|
||||
raise TypeError('Unproper type of input.')
|
||||
|
||||
dim = int(np.sqrt(len(raveled_obs)))
|
||||
|
||||
res_mat = []
|
||||
for i in range(dim):
|
||||
row = []
|
||||
for j in range(dim):
|
||||
row.append(derived_observable(_mat, raveled_obs, i=i, j=j, **kwargs))
|
||||
res_mat.append(row)
|
||||
|
||||
return np.array(res_mat) @ np.identity(dim)
|
||||
|
||||
|
||||
def _num_diff_eigh(obs, **kwargs):
|
||||
"""Computes the eigenvalues and eigenvectors of a given hermitian matrix of Obs according to np.linalg.eigh
|
||||
elementwise which is suitable for numerical differentiation."""
|
||||
def _mat(x, **kwargs):
|
||||
dim = int(np.sqrt(len(x)))
|
||||
if np.sqrt(len(x)) != dim:
|
||||
raise Exception('Input has to have dim**2 entries')
|
||||
|
||||
mat = []
|
||||
for i in range(dim):
|
||||
row = []
|
||||
for j in range(dim):
|
||||
row.append(x[j + dim * i])
|
||||
mat.append(row)
|
||||
|
||||
n = kwargs.get('n')
|
||||
res = np.linalg.eigh(np.array(mat))[n]
|
||||
|
||||
if n == 0:
|
||||
return res[kwargs.get('i')]
|
||||
else:
|
||||
return res[kwargs.get('i')][kwargs.get('j')]
|
||||
|
||||
if isinstance(obs, np.ndarray):
|
||||
raveled_obs = (1 * (obs.ravel())).tolist()
|
||||
elif isinstance(obs, list):
|
||||
raveled_obs = obs
|
||||
else:
|
||||
raise TypeError('Unproper type of input.')
|
||||
|
||||
dim = int(np.sqrt(len(raveled_obs)))
|
||||
|
||||
res_vec = []
|
||||
for i in range(dim):
|
||||
res_vec.append(derived_observable(_mat, raveled_obs, n=0, i=i, **kwargs))
|
||||
|
||||
res_mat = []
|
||||
for i in range(dim):
|
||||
row = []
|
||||
for j in range(dim):
|
||||
row.append(derived_observable(_mat, raveled_obs, n=1, i=i, j=j, **kwargs))
|
||||
res_mat.append(row)
|
||||
|
||||
return (np.array(res_vec) @ np.identity(dim), np.array(res_mat) @ np.identity(dim))
|
||||
|
||||
|
||||
def _num_diff_eig(obs, **kwargs):
|
||||
"""Computes the eigenvalues of a given matrix of Obs according to np.linalg.eig
|
||||
elementwise which is suitable for numerical differentiation."""
|
||||
def _mat(x, **kwargs):
|
||||
dim = int(np.sqrt(len(x)))
|
||||
if np.sqrt(len(x)) != dim:
|
||||
raise Exception('Input has to have dim**2 entries')
|
||||
|
||||
mat = []
|
||||
for i in range(dim):
|
||||
row = []
|
||||
for j in range(dim):
|
||||
row.append(x[j + dim * i])
|
||||
mat.append(row)
|
||||
|
||||
n = kwargs.get('n')
|
||||
res = np.linalg.eig(np.array(mat))[n]
|
||||
|
||||
if n == 0:
|
||||
# Discard imaginary part of eigenvalue here
|
||||
return np.real(res[kwargs.get('i')])
|
||||
else:
|
||||
return res[kwargs.get('i')][kwargs.get('j')]
|
||||
|
||||
if isinstance(obs, np.ndarray):
|
||||
raveled_obs = (1 * (obs.ravel())).tolist()
|
||||
elif isinstance(obs, list):
|
||||
raveled_obs = obs
|
||||
else:
|
||||
raise TypeError('Unproper type of input.')
|
||||
|
||||
dim = int(np.sqrt(len(raveled_obs)))
|
||||
|
||||
res_vec = []
|
||||
for i in range(dim):
|
||||
# Note: Automatic differentiation of eig is implemented in the git of autograd
|
||||
# but not yet released to PyPi (1.3)
|
||||
res_vec.append(derived_observable(_mat, raveled_obs, n=0, i=i, **kwargs))
|
||||
|
||||
return np.array(res_vec) @ np.identity(dim)
|
||||
|
||||
|
||||
def _num_diff_pinv(obs, **kwargs):
|
||||
"""Computes the Moore-Penrose pseudoinverse of a matrix of Obs elementwise which is suitable
|
||||
for numerical differentiation."""
|
||||
def _mat(x, **kwargs):
|
||||
shape = kwargs.get('shape')
|
||||
|
||||
mat = []
|
||||
for i in range(shape[0]):
|
||||
row = []
|
||||
for j in range(shape[1]):
|
||||
row.append(x[j + shape[1] * i])
|
||||
mat.append(row)
|
||||
|
||||
return np.linalg.pinv(np.array(mat))[kwargs.get('i')][kwargs.get('j')]
|
||||
|
||||
if isinstance(obs, np.ndarray):
|
||||
shape = obs.shape
|
||||
raveled_obs = (1 * (obs.ravel())).tolist()
|
||||
else:
|
||||
raise TypeError('Unproper type of input.')
|
||||
|
||||
res_mat = []
|
||||
for i in range(shape[1]):
|
||||
row = []
|
||||
for j in range(shape[0]):
|
||||
row.append(derived_observable(_mat, raveled_obs, shape=shape, i=i, j=j, **kwargs))
|
||||
res_mat.append(row)
|
||||
|
||||
return np.array(res_mat) @ np.identity(shape[0])
|
||||
|
||||
|
||||
def _num_diff_svd(obs, **kwargs):
|
||||
"""Computes the singular value decomposition of a matrix of Obs elementwise which
|
||||
is suitable for numerical differentiation."""
|
||||
def _mat(x, **kwargs):
|
||||
shape = kwargs.get('shape')
|
||||
|
||||
mat = []
|
||||
for i in range(shape[0]):
|
||||
row = []
|
||||
for j in range(shape[1]):
|
||||
row.append(x[j + shape[1] * i])
|
||||
mat.append(row)
|
||||
|
||||
res = np.linalg.svd(np.array(mat), full_matrices=False)
|
||||
|
||||
if kwargs.get('n') == 1:
|
||||
return res[1][kwargs.get('i')]
|
||||
else:
|
||||
return res[kwargs.get('n')][kwargs.get('i')][kwargs.get('j')]
|
||||
|
||||
if isinstance(obs, np.ndarray):
|
||||
shape = obs.shape
|
||||
raveled_obs = (1 * (obs.ravel())).tolist()
|
||||
else:
|
||||
raise TypeError('Unproper type of input.')
|
||||
|
||||
mid_index = min(shape[0], shape[1])
|
||||
|
||||
res_mat0 = []
|
||||
for i in range(shape[0]):
|
||||
row = []
|
||||
for j in range(mid_index):
|
||||
row.append(derived_observable(_mat, raveled_obs, shape=shape, n=0, i=i, j=j, **kwargs))
|
||||
res_mat0.append(row)
|
||||
|
||||
res_mat1 = []
|
||||
for i in range(mid_index):
|
||||
res_mat1.append(derived_observable(_mat, raveled_obs, shape=shape, n=1, i=i, **kwargs))
|
||||
|
||||
res_mat2 = []
|
||||
for i in range(mid_index):
|
||||
row = []
|
||||
for j in range(shape[1]):
|
||||
row.append(derived_observable(_mat, raveled_obs, shape=shape, n=2, i=i, j=j, **kwargs))
|
||||
res_mat2.append(row)
|
||||
|
||||
return (np.array(res_mat0) @ np.identity(mid_index), np.array(res_mat1) @ np.identity(mid_index), np.array(res_mat2) @ np.identity(shape[1]))
|
||||
|
|
|
@ -70,3 +70,19 @@ def gen_correlated_data(means, cov, name, tau=0.5, samples=1000):
|
|||
data.append(np.sqrt(1 - a ** 2) * rand[i] + a * data[-1])
|
||||
corr_data = np.array(data) - np.mean(data, axis=0) + means
|
||||
return [Obs([dat], [name]) for dat in corr_data.T]
|
||||
|
||||
|
||||
def _assert_equal_properties(ol, otype=Obs):
|
||||
if not isinstance(ol[0], otype):
|
||||
raise Exception("Wrong data type in list.")
|
||||
for o in ol[1:]:
|
||||
if not isinstance(o, otype):
|
||||
raise Exception("Wrong data type in list.")
|
||||
if not ol[0].is_merged == o.is_merged:
|
||||
raise Exception("All Obs in list have to have the same state 'is_merged'.")
|
||||
if not ol[0].reweighted == o.reweighted:
|
||||
raise Exception("All Obs in list have to have the same property 'reweighted'.")
|
||||
if not ol[0].e_content == o.e_content:
|
||||
raise Exception("All Obs in list have to be defined on the same set of configs.")
|
||||
if not ol[0].idl == o.idl:
|
||||
raise Exception("All Obs in list have to be defined on the same set of configurations.")
|
||||
|
|
|
@ -4,6 +4,7 @@ import numpy as np
|
|||
import autograd.numpy as anp # Thinly-wrapped numpy
|
||||
from autograd import jacobian
|
||||
import matplotlib.pyplot as plt
|
||||
from scipy.stats import skew, skewtest, kurtosis, kurtosistest
|
||||
import numdifftools as nd
|
||||
from itertools import groupby
|
||||
from .covobs import Covobs
|
||||
|
@ -287,8 +288,6 @@ class Obs:
|
|||
_compute_drho(1)
|
||||
if self.tau_exp[e_name] > 0:
|
||||
texp = self.tau_exp[e_name]
|
||||
# if type(self.idl[e_name]) is range: # scale tau_exp according to step size
|
||||
# texp /= self.idl[e_name].step
|
||||
# Critical slowing down analysis
|
||||
if w_max // 2 <= 1:
|
||||
raise Exception("Need at least 8 samples for tau_exp error analysis")
|
||||
|
@ -434,10 +433,6 @@ class Obs:
|
|||
my_string_list.append(my_string)
|
||||
print('\n'.join(my_string_list))
|
||||
|
||||
def print(self, level=1):
|
||||
warnings.warn("Method 'print' renamed to 'details'", DeprecationWarning)
|
||||
self.details(level > 1)
|
||||
|
||||
def is_zero_within_error(self, sigma=1):
|
||||
"""Checks whether the observable is zero within 'sigma' standard errors.
|
||||
|
||||
|
@ -473,8 +468,8 @@ class Obs:
|
|||
if not hasattr(self, 'e_dvalue'):
|
||||
raise Exception('Run the gamma method first.')
|
||||
|
||||
fig = plt.figure()
|
||||
for e, e_name in enumerate(self.mc_names):
|
||||
fig = plt.figure()
|
||||
plt.xlabel(r'$W$')
|
||||
plt.ylabel(r'$\tau_\mathrm{int}$')
|
||||
length = int(len(self.e_n_tauint[e_name]))
|
||||
|
@ -499,13 +494,20 @@ class Obs:
|
|||
plt.ylim(bottom=0.0)
|
||||
plt.draw()
|
||||
if save:
|
||||
fig.savefig(save)
|
||||
fig.savefig(save + "_" + str(e))
|
||||
|
||||
def plot_rho(self):
|
||||
"""Plot normalized autocorrelation function time for each ensemble."""
|
||||
def plot_rho(self, save=None):
|
||||
"""Plot normalized autocorrelation function time for each ensemble.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
save : str
|
||||
saves the figure to a file named 'save' if.
|
||||
"""
|
||||
if not hasattr(self, 'e_dvalue'):
|
||||
raise Exception('Run the gamma method first.')
|
||||
for e, e_name in enumerate(self.mc_names):
|
||||
fig = plt.figure()
|
||||
plt.xlabel('W')
|
||||
plt.ylabel('rho')
|
||||
length = int(len(self.e_drho[e_name]))
|
||||
|
@ -522,6 +524,8 @@ class Obs:
|
|||
plt.plot([-0.5, xmax], [0, 0], 'k--', lw=1)
|
||||
plt.xlim(-0.5, xmax)
|
||||
plt.draw()
|
||||
if save:
|
||||
fig.savefig(save + "_" + str(e))
|
||||
|
||||
def plot_rep_dist(self):
|
||||
"""Plot replica distribution for each ensemble with more than one replicum."""
|
||||
|
@ -557,18 +561,24 @@ class Obs:
|
|||
plt.figure()
|
||||
r_length = []
|
||||
tmp = []
|
||||
tmp_expanded = []
|
||||
for r, r_name in enumerate(self.e_content[e_name]):
|
||||
tmp.append(self.deltas[r_name] + self.r_values[r_name])
|
||||
if expand:
|
||||
tmp.append(_expand_deltas(self.deltas[r_name], list(self.idl[r_name]), self.shape[r_name]) + self.r_values[r_name])
|
||||
tmp_expanded.append(_expand_deltas(self.deltas[r_name], list(self.idl[r_name]), self.shape[r_name]) + self.r_values[r_name])
|
||||
r_length.append(len(tmp_expanded[-1]))
|
||||
else:
|
||||
tmp.append(self.deltas[r_name] + self.r_values[r_name])
|
||||
r_length.append(len(tmp[-1]))
|
||||
r_length.append(len(tmp[-1]))
|
||||
e_N = np.sum(r_length)
|
||||
x = np.arange(e_N)
|
||||
y = np.concatenate(tmp, axis=0)
|
||||
y_test = np.concatenate(tmp, axis=0)
|
||||
if expand:
|
||||
y = np.concatenate(tmp_expanded, axis=0)
|
||||
else:
|
||||
y = y_test
|
||||
plt.errorbar(x, y, fmt='.', markersize=3)
|
||||
plt.xlim(-0.5, e_N - 0.5)
|
||||
plt.title(e_name)
|
||||
plt.title(e_name + f'\nskew: {skew(y_test):.3f} (p={skewtest(y_test).pvalue:.3f}), kurtosis: {kurtosis(y_test):.3f} (p={kurtosistest(y_test).pvalue:.3f})')
|
||||
plt.draw()
|
||||
|
||||
def plot_piechart(self):
|
||||
|
@ -587,22 +597,32 @@ class Obs:
|
|||
|
||||
return dict(zip(self.e_names, sizes))
|
||||
|
||||
def dump(self, name, **kwargs):
|
||||
"""Dump the Obs to a pickle file 'name'.
|
||||
def dump(self, filename, datatype="json.gz", **kwargs):
|
||||
"""Dump the Obs to a file 'name' of chosen format.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
name : str
|
||||
filename : str
|
||||
name of the file to be saved.
|
||||
datatype : str
|
||||
Format of the exported file. Supported formats include
|
||||
"json.gz" and "pickle"
|
||||
path : str
|
||||
specifies a custom path for the file (default '.')
|
||||
"""
|
||||
if 'path' in kwargs:
|
||||
file_name = kwargs.get('path') + '/' + name + '.p'
|
||||
file_name = kwargs.get('path') + '/' + filename
|
||||
else:
|
||||
file_name = name + '.p'
|
||||
with open(file_name, 'wb') as fb:
|
||||
pickle.dump(self, fb)
|
||||
file_name = filename
|
||||
|
||||
if datatype == "json.gz":
|
||||
from .input.json import dump_to_json
|
||||
dump_to_json([self], file_name)
|
||||
elif datatype == "pickle":
|
||||
with open(file_name + '.p', 'wb') as fb:
|
||||
pickle.dump(self, fb)
|
||||
else:
|
||||
raise Exception("Unknown datatype " + str(datatype))
|
||||
|
||||
def export_jackknife(self):
|
||||
"""Export jackknife samples from the Obs
|
||||
|
@ -799,9 +819,6 @@ class Obs:
|
|||
def arctanh(self):
|
||||
return derived_observable(lambda x: anp.arctanh(x[0]), [self])
|
||||
|
||||
def sinc(self):
|
||||
return derived_observable(lambda x: anp.sinc(x[0]), [self])
|
||||
|
||||
|
||||
class CObs:
|
||||
"""Class for a complex valued observable."""
|
||||
|
@ -1106,14 +1123,7 @@ def derived_observable(func, data, array_mode=False, **kwargs):
|
|||
raise Exception('Multi mode currently not supported for numerical derivative')
|
||||
options = {
|
||||
'base_step': 0.1,
|
||||
'step_ratio': 2.5,
|
||||
'num_steps': None,
|
||||
'step_nom': None,
|
||||
'offset': None,
|
||||
'num_extrap': None,
|
||||
'use_exact_steps': None,
|
||||
'check_num_steps': None,
|
||||
'scale': None}
|
||||
'step_ratio': 2.5}
|
||||
for key in options.keys():
|
||||
kwarg = kwargs.get(key)
|
||||
if kwarg is not None:
|
||||
|
|
|
@ -1 +1 @@
|
|||
__version__ = "2.0.0"
|
||||
__version__ = "2.0.0+dev"
|
||||
|
|
|
@ -1,4 +0,0 @@
|
|||
[pytest]
|
||||
filterwarnings =
|
||||
ignore::RuntimeWarning:autograd.*:
|
||||
ignore::RuntimeWarning:numdifftools.*:
|
2
setup.py
2
setup.py
|
@ -3,7 +3,7 @@
|
|||
from setuptools import setup, find_packages
|
||||
|
||||
setup(name='pyerrors',
|
||||
version='2.0.0',
|
||||
version='2.0.0+dev',
|
||||
description='Error analysis for lattice QCD',
|
||||
author='Fabian Joswig',
|
||||
author_email='fabian.joswig@ed.ac.uk',
|
||||
|
|
|
@ -31,6 +31,13 @@ def test_function_overloading():
|
|||
assert np.isclose(con[0].dvalue, t2.dvalue)
|
||||
assert np.allclose(con[0].deltas['t'], t2.deltas['t'])
|
||||
|
||||
np.arcsin(corr_a)
|
||||
np.arccos(corr_a)
|
||||
np.arctan(corr_a)
|
||||
np.arcsinh(corr_a)
|
||||
np.arccosh(corr_a + 1.1)
|
||||
np.arctanh(corr_a)
|
||||
|
||||
|
||||
def test_modify_correlator():
|
||||
corr_content = []
|
||||
|
@ -38,24 +45,58 @@ def test_modify_correlator():
|
|||
exponent = np.random.normal(3, 5)
|
||||
corr_content.append(pe.pseudo_Obs(2 + 10 ** exponent, 10 ** (exponent - 1), 't'))
|
||||
|
||||
corr = pe.correlators.Corr(corr_content)
|
||||
corr = pe.Corr(corr_content)
|
||||
|
||||
with pytest.warns(RuntimeWarning):
|
||||
corr.symmetric()
|
||||
with pytest.warns(RuntimeWarning):
|
||||
corr.anti_symmetric()
|
||||
corr.roll(np.random.randint(100))
|
||||
corr.deriv(symmetric=True)
|
||||
corr.deriv(symmetric=False)
|
||||
corr.second_deriv()
|
||||
|
||||
for pad in [0, 2]:
|
||||
corr = pe.Corr(corr_content, padding=[pad, pad])
|
||||
corr.roll(np.random.randint(100))
|
||||
corr.deriv(variant="forward")
|
||||
corr.deriv(variant="symmetric")
|
||||
corr.deriv(variant="improved")
|
||||
corr.deriv().deriv()
|
||||
corr.second_deriv(variant="symmetric")
|
||||
corr.second_deriv(variant="improved")
|
||||
corr.second_deriv().second_deriv()
|
||||
|
||||
for i, e in enumerate(corr.content):
|
||||
corr.content[i] = None
|
||||
|
||||
for func in [pe.Corr.deriv, pe.Corr.second_deriv]:
|
||||
for variant in ["symmetric", "improved", "forward", "gibberish", None]:
|
||||
with pytest.raises(Exception):
|
||||
func(corr, variant=variant)
|
||||
|
||||
|
||||
def test_deriv():
|
||||
corr_content = []
|
||||
for t in range(24):
|
||||
exponent = 1.2
|
||||
corr_content.append(pe.pseudo_Obs(2 + t ** exponent, 0.2, 't'))
|
||||
|
||||
corr = pe.Corr(corr_content)
|
||||
|
||||
forward = corr.deriv(variant="forward")
|
||||
backward = corr.deriv(variant="backward")
|
||||
sym = corr.deriv(variant="symmetric")
|
||||
assert np.all([o == 0 for o in (0.5 * (forward + backward) - sym)[1:-1]])
|
||||
assert np.all([o == 0 for o in (corr.deriv('forward').deriv('backward') - corr.second_deriv())[1:-1]])
|
||||
assert np.all([o == 0 for o in (corr.deriv('backward').deriv('forward') - corr.second_deriv())[1:-1]])
|
||||
|
||||
|
||||
def test_m_eff():
|
||||
my_corr = pe.correlators.Corr([pe.pseudo_Obs(10, 0.1, 't'), pe.pseudo_Obs(9, 0.05, 't'), pe.pseudo_Obs(8, 0.1, 't'), pe.pseudo_Obs(7, 0.05, 't')])
|
||||
my_corr.m_eff('log')
|
||||
my_corr.m_eff('cosh')
|
||||
my_corr.m_eff('sinh')
|
||||
my_corr.m_eff('arccosh')
|
||||
for padding in [0, 4]:
|
||||
my_corr = pe.correlators.Corr([pe.pseudo_Obs(10, 0.1, 't'), pe.pseudo_Obs(9, 0.05, 't'), pe.pseudo_Obs(9, 0.1, 't'), pe.pseudo_Obs(10, 0.05, 't')], padding=[padding, padding])
|
||||
my_corr.m_eff('log')
|
||||
my_corr.m_eff('cosh')
|
||||
my_corr.m_eff('arccosh')
|
||||
|
||||
with pytest.warns(RuntimeWarning):
|
||||
my_corr.m_eff('sinh')
|
||||
|
||||
|
||||
def test_reweighting():
|
||||
|
@ -99,6 +140,31 @@ def test_plateau():
|
|||
with pytest.raises(Exception):
|
||||
my_corr.plateau()
|
||||
|
||||
|
||||
def test_padded_correlator():
|
||||
my_list = [pe.Obs([np.random.normal(1.0, 0.1, 100)], ['ens1']) for o in range(8)]
|
||||
my_corr = pe.Corr(my_list, padding=[7, 3])
|
||||
my_corr.reweighted
|
||||
[o for o in my_corr]
|
||||
|
||||
|
||||
def test_corr_exceptions():
|
||||
obs_a = pe.Obs([np.random.normal(0.1, 0.1, 100)], ['test'])
|
||||
obs_b= pe.Obs([np.random.normal(0.1, 0.1, 99)], ['test'])
|
||||
with pytest.raises(Exception):
|
||||
pe.Corr([obs_a, obs_b])
|
||||
|
||||
obs_a = pe.Obs([np.random.normal(0.1, 0.1, 100)], ['test'])
|
||||
obs_b= pe.Obs([np.random.normal(0.1, 0.1, 100)], ['test'], idl=[range(1, 200, 2)])
|
||||
with pytest.raises(Exception):
|
||||
pe.Corr([obs_a, obs_b])
|
||||
|
||||
obs_a = pe.Obs([np.random.normal(0.1, 0.1, 100)], ['test'])
|
||||
obs_b= pe.Obs([np.random.normal(0.1, 0.1, 100)], ['test2'])
|
||||
with pytest.raises(Exception):
|
||||
pe.Corr([obs_a, obs_b])
|
||||
|
||||
|
||||
def test_utility():
|
||||
corr_content = []
|
||||
for t in range(8):
|
||||
|
@ -110,10 +176,68 @@ def test_utility():
|
|||
corr.print([2, 4])
|
||||
corr.show()
|
||||
|
||||
corr.dump('test_dump')
|
||||
corr.dump('test_dump', datatype="pickle", path='.')
|
||||
corr.dump('test_dump', datatype="pickle")
|
||||
new_corr = pe.load_object('test_dump.p')
|
||||
os.remove('test_dump.p')
|
||||
for o_a, o_b in zip(corr.content, new_corr.content):
|
||||
assert np.isclose(o_a[0].value, o_b[0].value)
|
||||
assert np.isclose(o_a[0].dvalue, o_b[0].dvalue)
|
||||
assert np.allclose(o_a[0].deltas['t'], o_b[0].deltas['t'])
|
||||
|
||||
corr.dump('test_dump', datatype="json.gz", path='.')
|
||||
corr.dump('test_dump', datatype="json.gz")
|
||||
new_corr = pe.input.json.load_json('test_dump')
|
||||
os.remove('test_dump.json.gz')
|
||||
for o_a, o_b in zip(corr.content, new_corr.content):
|
||||
assert np.isclose(o_a[0].value, o_b[0].value)
|
||||
assert np.isclose(o_a[0].dvalue, o_b[0].dvalue)
|
||||
assert np.allclose(o_a[0].deltas['t'], o_b[0].deltas['t'])
|
||||
|
||||
|
||||
def test_matrix_corr():
|
||||
def _gen_corr(val):
|
||||
corr_content = []
|
||||
for t in range(16):
|
||||
corr_content.append(pe.pseudo_Obs(val, 0.1, 't', 2000))
|
||||
|
||||
return pe.correlators.Corr(corr_content)
|
||||
|
||||
corr_aa = _gen_corr(1)
|
||||
corr_ab = _gen_corr(0.5)
|
||||
|
||||
corr_mat = pe.Corr(np.array([[corr_aa, corr_ab], [corr_ab, corr_aa]]))
|
||||
corr_mat.smearing(0, 0)
|
||||
|
||||
vec_0 = corr_mat.GEVP(0, 0)
|
||||
vec_1 = corr_mat.GEVP(0, 0, state=1)
|
||||
|
||||
corr_0 = corr_mat.projected(vec_0)
|
||||
corr_1 = corr_mat.projected(vec_1)
|
||||
|
||||
assert np.all([o == 0 for o in corr_0 - corr_aa])
|
||||
assert np.all([o == 0 for o in corr_1 - corr_aa])
|
||||
|
||||
corr_mat.GEVP(0, 0, sorted_list="Eigenvalue")
|
||||
corr_mat.GEVP(0, 0, sorted_list="Eigenvector")
|
||||
|
||||
with pytest.raises(Exception):
|
||||
corr_mat.plottable()
|
||||
|
||||
with pytest.raises(Exception):
|
||||
corr_mat.show()
|
||||
|
||||
with pytest.raises(Exception):
|
||||
corr_mat.m_eff()
|
||||
|
||||
with pytest.raises(Exception):
|
||||
corr_mat.Hankel()
|
||||
|
||||
with pytest.raises(Exception):
|
||||
corr_mat.plateau()
|
||||
|
||||
with pytest.raises(Exception):
|
||||
corr_mat.plateau([2, 4])
|
||||
|
||||
with pytest.raises(Exception):
|
||||
corr_o.smearing(0, 0)
|
||||
|
|
|
@ -32,3 +32,32 @@ def test_grid_dirac():
|
|||
pe.dirac.Grid_gamma(gamma)
|
||||
with pytest.raises(Exception):
|
||||
pe.dirac.Grid_gamma('Not a gamma matrix')
|
||||
|
||||
|
||||
def test_epsilon_tensor():
|
||||
check = {(1, 2, 3) : 1.0,
|
||||
(3, 1, 2) : 1.0,
|
||||
(2, 3, 1) : 1.0,
|
||||
(1, 1, 1) : 0.0,
|
||||
(3, 2, 1) : -1.0,
|
||||
(1, 3, 2) : -1.0,
|
||||
(1, 1, 3) : 0.0}
|
||||
for key, value in check.items():
|
||||
assert pe.dirac.epsilon_tensor(*key) == value
|
||||
with pytest.raises(Exception):
|
||||
pe.dirac.epsilon_tensor(0, 1, 3)
|
||||
|
||||
|
||||
def test_epsilon_tensor_rank4():
|
||||
check = {(1, 4, 3, 2) : -1.0,
|
||||
(1, 2, 3, 4) : 1.0,
|
||||
(2, 1, 3, 4) : -1.0,
|
||||
(4, 3, 2, 1) : 1.0,
|
||||
(3, 2, 4, 3) : 0.0,
|
||||
(0, 1, 2, 3) : 1.0,
|
||||
(1, 1, 1, 1) : 0.0,
|
||||
(1, 2, 3, 1) : 0.0}
|
||||
for key, value in check.items():
|
||||
assert pe.dirac.epsilon_tensor_rank4(*key) == value
|
||||
with pytest.raises(Exception):
|
||||
pe.dirac.epsilon_tensor_rank4(0, 1, 3, 4)
|
||||
|
|
|
@ -84,6 +84,33 @@ def test_least_squares():
|
|||
assert math.isclose(pe.covariance(betac[0], betac[1]), pcov[0, 1], abs_tol=1e-3)
|
||||
|
||||
|
||||
def test_alternative_solvers():
|
||||
dim = 192
|
||||
x = np.arange(dim)
|
||||
y = 2 * np.exp(-0.06 * x) + np.random.normal(0.0, 0.15, dim)
|
||||
yerr = 0.1 + 0.1 * np.random.rand(dim)
|
||||
|
||||
oy = []
|
||||
for i, item in enumerate(x):
|
||||
oy.append(pe.pseudo_Obs(y[i], yerr[i], 'test'))
|
||||
|
||||
def func(a, x):
|
||||
y = a[0] * np.exp(-a[1] * x)
|
||||
return y
|
||||
|
||||
chisquare_values = []
|
||||
out = pe.least_squares(x, oy, func, method='migrad')
|
||||
chisquare_values.append(out.chisquare)
|
||||
out = pe.least_squares(x, oy, func, method='Powell')
|
||||
chisquare_values.append(out.chisquare)
|
||||
out = pe.least_squares(x, oy, func, method='Nelder-Mead')
|
||||
chisquare_values.append(out.chisquare)
|
||||
out = pe.least_squares(x, oy, func, method='Levenberg-Marquardt')
|
||||
chisquare_values.append(out.chisquare)
|
||||
chisquare_values = np.array(chisquare_values)
|
||||
assert np.all(np.isclose(chisquare_values, chisquare_values[0]))
|
||||
|
||||
|
||||
def test_correlated_fit():
|
||||
num_samples = 400
|
||||
N = 10
|
||||
|
@ -93,10 +120,9 @@ def test_correlated_fit():
|
|||
r = np.zeros((N, N))
|
||||
for i in range(N):
|
||||
for j in range(N):
|
||||
r[i, j] = np.exp(-0.1 * np.fabs(i - j))
|
||||
r[i, j] = np.exp(-0.8 * np.fabs(i - j))
|
||||
|
||||
errl = np.sqrt([3.4, 2.5, 3.6, 2.8, 4.2, 4.7, 4.9, 5.1, 3.2, 4.2])
|
||||
errl *= 4
|
||||
for i in range(N):
|
||||
for j in range(N):
|
||||
r[i, j] *= errl[i] * errl[j]
|
||||
|
@ -127,7 +153,7 @@ def test_correlated_fit():
|
|||
for i in range(2):
|
||||
diff = fitp[i] - fitpc[i]
|
||||
diff.gamma_method()
|
||||
assert(diff.is_zero_within_error(sigma=1.5))
|
||||
assert(diff.is_zero_within_error(sigma=5))
|
||||
|
||||
|
||||
def test_total_least_squares():
|
||||
|
@ -310,6 +336,25 @@ def test_error_band():
|
|||
pe.fits.error_band(x, f, fitp.fit_parameters)
|
||||
|
||||
|
||||
def test_ks_test():
|
||||
def f(a, x):
|
||||
y = a[0] + a[1] * x
|
||||
return y
|
||||
|
||||
fit_res = []
|
||||
|
||||
for i in range(20):
|
||||
data = []
|
||||
for j in range(10):
|
||||
data.append(pe.pseudo_Obs(j + np.random.normal(0.0, 0.25), 0.25, 'test'))
|
||||
my_corr = pe.Corr(data)
|
||||
|
||||
fit_res.append(my_corr.fit(f, silent=True))
|
||||
|
||||
pe.fits.ks_test()
|
||||
pe.fits.ks_test(fit_res)
|
||||
|
||||
|
||||
def fit_general(x, y, func, silent=False, **kwargs):
|
||||
"""Performs a non-linear fit to y = func(x) and returns a list of Obs corresponding to the fit parameters.
|
||||
|
||||
|
|
|
@ -1,136 +0,0 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"This file is used for testing some of the input methods."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os,sys,inspect\n",
|
||||
"current_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))\n",
|
||||
"parent_dir = os.path.dirname(current_dir)\n",
|
||||
"sys.path.insert(0, parent_dir) \n",
|
||||
"\n",
|
||||
"import pyerrors as pe\n",
|
||||
"import pyerrors.input.openQCD as qcdin\n",
|
||||
"import pyerrors.input.sfcf as sfin\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"First, we will have a look at the input method for the topological charge $Q_{top}$, which is measured by the program ms from the openQCD package. For now, this part still in the making and depends on an actual file. Later, this should be changed to a more efficient way of making a proper input file.\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"['T29L20k0.13719r2.ms.dat', 'T29L20k0.13719r3.ms.dat', 'T29L20k0.13719r1.ms.dat', 'T29L20k0.13719r4.ms.dat']\n",
|
||||
"dn: 10\n",
|
||||
"nn: 60\n",
|
||||
"tmax: 30\n",
|
||||
"eps: 0.02\n",
|
||||
"max_t: 12.0\n",
|
||||
"t_aim: 6.125\n",
|
||||
"index_aim: 31\n",
|
||||
"T29L20k0.13719r2\n",
|
||||
"dn: 10\n",
|
||||
"nn: 60\n",
|
||||
"tmax: 30\n",
|
||||
"eps: 0.02\n",
|
||||
"max_t: 12.0\n",
|
||||
"t_aim: 6.125\n",
|
||||
"index_aim: 31\n",
|
||||
"T29L20k0.13719r3\n",
|
||||
"dn: 10\n",
|
||||
"nn: 60\n",
|
||||
"tmax: 30\n",
|
||||
"eps: 0.02\n",
|
||||
"max_t: 12.0\n",
|
||||
"t_aim: 6.125\n",
|
||||
"index_aim: 31\n",
|
||||
"T29L20k0.13719r1\n",
|
||||
"dn: 10\n",
|
||||
"nn: 60\n",
|
||||
"tmax: 30\n",
|
||||
"eps: 0.02\n",
|
||||
"max_t: 12.0\n",
|
||||
"t_aim: 6.125\n",
|
||||
"index_aim: 31\n",
|
||||
"T29L20k0.13719r4\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"r_qtop = qcdin.read_qtop(\"../../test_data\", prefix = \"T29L20k0.13719\",full = True, r_stop = [500,440,447,410])#, files = [\"T29L20k0.13719r1.ms.dat\"], )"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"{'T29L20k0.13719|r1': 500, 'T29L20k0.13719|r2': 440, 'T29L20k0.13719|r3': 447, 'T29L20k0.13719|r4': 410}\n",
|
||||
"0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 -2 -2 -2 -2 -3 -3 -3 -3 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 -1 -1 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -1 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 -1 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 0 0 0 0 0 -1 -1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 "
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"print(r_qtop.shape)\n",
|
||||
"#print(r_qtop.deltas['T29L20k0.13719|r1'])\n",
|
||||
"for i in r_qtop.deltas['T29L20k0.13719|r2']:\n",
|
||||
" print(round(r_qtop.value + i), end =\" \")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"interpreter": {
|
||||
"hash": "916dbcbb3f70747c44a77c7bcd40155683ae19c65e1c03b4aa3499c5328201f1"
|
||||
},
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3.9.7 64-bit",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.9.7"
|
||||
},
|
||||
"orig_nbformat": 4
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
153
tests/io_test.py
153
tests/io_test.py
|
@ -3,6 +3,7 @@ import gzip
|
|||
import numpy as np
|
||||
import pyerrors as pe
|
||||
import pyerrors.input.json as jsonio
|
||||
import pytest
|
||||
|
||||
|
||||
def test_jsonio():
|
||||
|
@ -89,3 +90,155 @@ def test_json_string_reconstruction():
|
|||
|
||||
assert reconstructed_string == json_string
|
||||
assert my_obs == reconstructed_obs2
|
||||
|
||||
|
||||
def test_json_corr_io():
|
||||
my_list = [pe.Obs([np.random.normal(1.0, 0.1, 100)], ['ens1']) for o in range(8)]
|
||||
rw_list = pe.reweight(pe.Obs([np.random.normal(1.0, 0.1, 100)], ['ens1']), my_list)
|
||||
|
||||
for obs_list in [my_list, rw_list]:
|
||||
for tag in [None, "test"]:
|
||||
obs_list[3].tag = tag
|
||||
for pad in [0, 2]:
|
||||
for corr_tag in [None, 'my_Corr_tag']:
|
||||
for prange in [None, [3, 6]]:
|
||||
for gap in [False, True]:
|
||||
my_corr = pe.Corr(obs_list, padding=[pad, pad], prange=prange)
|
||||
my_corr.tag = corr_tag
|
||||
if gap:
|
||||
my_corr.content[4] = None
|
||||
pe.input.json.dump_to_json(my_corr, 'corr')
|
||||
recover = pe.input.json.load_json('corr')
|
||||
os.remove('corr.json.gz')
|
||||
assert np.all([o.is_zero() for o in [x for x in (my_corr - recover) if x is not None]])
|
||||
for index, entry in enumerate(my_corr):
|
||||
if entry is None:
|
||||
assert recover[index] is None
|
||||
assert my_corr.tag == recover.tag
|
||||
assert my_corr.prange == recover.prange
|
||||
assert my_corr.reweighted == recover.reweighted
|
||||
|
||||
|
||||
def test_json_corr_2d_io():
|
||||
obs_list = [np.array([[pe.pseudo_Obs(1.0 + i, 0.1 * i, 'test'), pe.pseudo_Obs(0.0, 0.1 * i, 'test')], [pe.pseudo_Obs(0.0, 0.1 * i, 'test'), pe.pseudo_Obs(1.0 + i, 0.1 * i, 'test')]]) for i in range(4)]
|
||||
|
||||
for tag in [None, "test"]:
|
||||
obs_list[3][0, 1].tag = tag
|
||||
for padding in [0, 1]:
|
||||
for prange in [None, [3, 6]]:
|
||||
my_corr = pe.Corr(obs_list, padding=[padding, padding], prange=prange)
|
||||
my_corr.tag = tag
|
||||
pe.input.json.dump_to_json(my_corr, 'corr')
|
||||
recover = pe.input.json.load_json('corr')
|
||||
os.remove('corr.json.gz')
|
||||
assert np.all([np.all([o.is_zero() for o in q]) for q in [x.ravel() for x in (my_corr - recover) if x is not None]])
|
||||
for index, entry in enumerate(my_corr):
|
||||
if entry is None:
|
||||
assert recover[index] is None
|
||||
assert my_corr.tag == recover.tag
|
||||
assert my_corr.prange == recover.prange
|
||||
|
||||
|
||||
def test_json_dict_io():
|
||||
def check_dict_equality(d1, d2):
|
||||
def dict_check_obs(d1, d2):
|
||||
for k, v in d1.items():
|
||||
if isinstance(v, dict):
|
||||
v = dict_check_obs(v, d2[k])
|
||||
elif isinstance(v, list) and all([isinstance(o, pe.Obs) for o in v]):
|
||||
for i in range(len(v)):
|
||||
assert((v[i] - d2[k][i]).is_zero())
|
||||
elif isinstance(v, list):
|
||||
v = list_check_obs(v, d2[k])
|
||||
elif isinstance(v, pe.Obs):
|
||||
assert((v - d2[k]).is_zero())
|
||||
elif isinstance(v, pe.Corr):
|
||||
for i in range(v.T):
|
||||
assert((v[i] - d2[k][i]).is_zero())
|
||||
elif isinstance(v, np.ndarray):
|
||||
a1 = np.ravel(v)
|
||||
a2 = np.ravel(d2[k])
|
||||
for i in range(len(a1)):
|
||||
assert((a1[i] - a2[i]).is_zero())
|
||||
|
||||
def list_check_obs(l1, l2):
|
||||
for ei in range(len(l1)):
|
||||
e = l1[ei]
|
||||
if isinstance(e, list):
|
||||
e = list_check_obs(e, l2[ei])
|
||||
elif isinstance(e, list) and all([isinstance(o, pe.Obs) for o in e]):
|
||||
for i in range(len(e)):
|
||||
assert((e[i] - l2[ei][i]).is_zero())
|
||||
elif isinstance(e, dict):
|
||||
e = dict_check_obs(e, l2[ei])
|
||||
elif isinstance(e, pe.Obs):
|
||||
assert((e - l2[ei]).is_zero())
|
||||
elif isinstance(e, pe.Corr):
|
||||
for i in range(e.T):
|
||||
assert((e[i] - l2[ei][i]).is_zero())
|
||||
elif isinstance(e, np.ndarray):
|
||||
a1 = np.ravel(e)
|
||||
a2 = np.ravel(l2[ei])
|
||||
for i in range(len(a1)):
|
||||
assert((a1[i] - a2[i]).is_zero())
|
||||
dict_check_obs(d1, d2)
|
||||
return True
|
||||
|
||||
od = {
|
||||
'l':
|
||||
{
|
||||
'a': pe.pseudo_Obs(1, .2, 'testa', samples=10),
|
||||
'b': [pe.pseudo_Obs(1.1, .1, 'test', samples=10), pe.pseudo_Obs(1.2, .1, 'test', samples=10), pe.pseudo_Obs(1.3, .1, 'test', samples=10)],
|
||||
'c': {
|
||||
'd': 1,
|
||||
'e': pe.pseudo_Obs(.2, .01, 'teste', samples=10),
|
||||
'f': pe.Corr([pe.pseudo_Obs(.1, .01, 'a', samples=10), pe.pseudo_Obs(.1, .01, 'a', samples=10), pe.pseudo_Obs(.1, .01, 'a', samples=10), pe.pseudo_Obs(.1, .01, 'a', samples=10)]),
|
||||
'g': np.reshape(np.asarray([pe.pseudo_Obs(.1, .01, 'a', samples=10), pe.pseudo_Obs(.1, .01, 'a', samples=10), pe.pseudo_Obs(.1, .01, 'a', samples=10), pe.pseudo_Obs(.1, .01, 'a', samples=10)]), (2, 2)),
|
||||
}
|
||||
},
|
||||
's':
|
||||
{
|
||||
'a': 'Infor123',
|
||||
'b': ['Some', 'list'],
|
||||
'd': pe.pseudo_Obs(.01, .001, 'testd', samples=10) * pe.cov_Obs(1, .01, 'cov1'),
|
||||
'se': None,
|
||||
'sf': 1.2,
|
||||
}
|
||||
}
|
||||
|
||||
fname = 'test_rw'
|
||||
|
||||
desc = 'This is a random description'
|
||||
|
||||
with pytest.raises(Exception):
|
||||
jsonio.dump_dict_to_json(od, fname, description=desc, reps='|Test')
|
||||
|
||||
jsonio.dump_dict_to_json(od, fname, description=desc, reps='TEST')
|
||||
nd = jsonio.load_json_dict(fname, full_output=True, reps='TEST')
|
||||
|
||||
with pytest.raises(Exception):
|
||||
nd = jsonio.load_json_dict(fname, full_output=True)
|
||||
|
||||
jsonio.dump_dict_to_json(od, fname, description=desc)
|
||||
nd = jsonio.load_json_dict(fname, full_output=True)
|
||||
assert (desc == nd['description'])
|
||||
|
||||
assert(check_dict_equality(od, nd['obsdata']))
|
||||
nd = jsonio.load_json_dict(fname, full_output=False)
|
||||
assert(check_dict_equality(od, nd))
|
||||
|
||||
nl = jsonio.load_json(fname, full_output=True)
|
||||
nl = jsonio.load_json(fname, full_output=False)
|
||||
|
||||
with pytest.raises(Exception):
|
||||
jsonio.dump_dict_to_json(nl, fname, description=desc)
|
||||
|
||||
od['k'] = 'DICTOBS2'
|
||||
with pytest.raises(Exception):
|
||||
jsonio.dump_dict_to_json(od, fname, description=desc)
|
||||
|
||||
od['k'] = ['DICTOBS2']
|
||||
with pytest.raises(Exception):
|
||||
jsonio.dump_dict_to_json(od, fname, description=desc)
|
||||
|
||||
os.remove(fname + '.json.gz')
|
||||
|
|
|
@ -314,6 +314,8 @@ def test_matrix_functions():
|
|||
# Check determinant
|
||||
assert pe.linalg.det(np.diag(np.diag(matrix))) == np.prod(np.diag(matrix))
|
||||
|
||||
pe.linalg.pinv(matrix[:,:3])
|
||||
|
||||
|
||||
def test_complex_matrix_operations():
|
||||
dimension = 4
|
||||
|
|
14
tests/mpm_test.py
Normal file
14
tests/mpm_test.py
Normal file
|
@ -0,0 +1,14 @@
|
|||
import numpy as np
|
||||
import pyerrors as pe
|
||||
import pytest
|
||||
|
||||
np.random.seed(0)
|
||||
|
||||
|
||||
def test_mpm():
|
||||
corr_content = []
|
||||
for t in range(8):
|
||||
f = 0.8 * np.exp(-0.4 * t)
|
||||
corr_content.append(pe.pseudo_Obs(np.random.normal(f, 1e-2 * f), 1e-2 * f, 't'))
|
||||
|
||||
res = pe.mpm.matrix_pencil_method(corr_content)
|
|
@ -57,10 +57,16 @@ def test_dump():
|
|||
value = np.random.normal(5, 10)
|
||||
dvalue = np.abs(np.random.normal(0, 1))
|
||||
test_obs = pe.pseudo_Obs(value, dvalue, 't')
|
||||
test_obs.dump('test_dump')
|
||||
test_obs.dump('test_dump', datatype="pickle", path=".")
|
||||
test_obs.dump('test_dump', datatype="pickle")
|
||||
new_obs = pe.load_object('test_dump.p')
|
||||
os.remove('test_dump.p')
|
||||
assert test_obs.deltas['t'].all() == new_obs.deltas['t'].all()
|
||||
assert test_obs == new_obs
|
||||
test_obs.dump('test_dump', dataype="json.gz", path=".")
|
||||
test_obs.dump('test_dump', dataype="json.gz")
|
||||
new_obs = pe.input.json.load_json("test_dump")
|
||||
os.remove('test_dump.json.gz')
|
||||
assert test_obs == new_obs
|
||||
|
||||
|
||||
def test_comparison():
|
||||
|
@ -105,6 +111,12 @@ def test_function_overloading():
|
|||
assert np.sqrt(b ** 2) == b
|
||||
assert np.sqrt(b) ** 2 == b
|
||||
|
||||
np.arcsin(1 / b)
|
||||
np.arccos(1 / b)
|
||||
np.arctan(1 / b)
|
||||
np.arctanh(1 / b)
|
||||
np.sinc(1 / b)
|
||||
|
||||
|
||||
def test_overloading_vectorization():
|
||||
a = np.random.randint(1, 100, 10)
|
||||
|
@ -292,14 +304,14 @@ def test_derived_observables():
|
|||
d_Obs_fd = pe.derived_observable(lambda x, **kwargs: x[0] * x[1] * np.sin(x[0] * x[1]), [test_obs, test_obs], num_grad=True)
|
||||
d_Obs_fd.gamma_method()
|
||||
|
||||
assert d_Obs_ad.value == d_Obs_fd.value
|
||||
assert d_Obs_ad == d_Obs_fd
|
||||
assert np.abs(4.0 * np.sin(4.0) - d_Obs_ad.value) < 1000 * np.finfo(np.float64).eps * np.abs(d_Obs_ad.value)
|
||||
assert np.abs(d_Obs_ad.dvalue-d_Obs_fd.dvalue) < 1000 * np.finfo(np.float64).eps * d_Obs_ad.dvalue
|
||||
|
||||
i_am_one = pe.derived_observable(lambda x, **kwargs: x[0] / x[1], [d_Obs_ad, d_Obs_ad])
|
||||
i_am_one.gamma_method()
|
||||
|
||||
assert i_am_one.value == 1.0
|
||||
assert i_am_one == 1.0
|
||||
assert i_am_one.dvalue < 2 * np.finfo(np.float64).eps
|
||||
assert i_am_one.e_dvalue['t'] <= 2 * np.finfo(np.float64).eps
|
||||
assert i_am_one.e_ddvalue['t'] <= 2 * np.finfo(np.float64).eps
|
||||
|
@ -428,6 +440,14 @@ def test_reweighting():
|
|||
assert r_obs[0].reweighted
|
||||
r_obs2 = r_obs[0] * my_obs
|
||||
assert r_obs2.reweighted
|
||||
my_covobs = pe.cov_Obs(1.0, 0.003, 'cov')
|
||||
with pytest.raises(Exception):
|
||||
pe.reweight(my_obs, [my_covobs])
|
||||
my_obs2 = pe.Obs([np.random.rand(1000)], ['t2'])
|
||||
with pytest.raises(Exception):
|
||||
pe.reweight(my_obs, [my_obs + my_obs2])
|
||||
with pytest.raises(Exception):
|
||||
pe.reweight(my_irregular_obs, [my_obs])
|
||||
|
||||
|
||||
def test_merge_obs():
|
||||
|
@ -436,6 +456,12 @@ def test_merge_obs():
|
|||
merged = pe.merge_obs([my_obs1, my_obs2])
|
||||
diff = merged - my_obs2 - my_obs1
|
||||
assert diff == -(my_obs1.value + my_obs2.value) / 2
|
||||
with pytest.raises(Exception):
|
||||
pe.merge_obs([my_obs1, my_obs1])
|
||||
my_covobs = pe.cov_Obs(1.0, 0.003, 'cov')
|
||||
with pytest.raises(Exception):
|
||||
pe.merge_obs([my_obs1, my_covobs])
|
||||
|
||||
|
||||
|
||||
def test_merge_obs_r_values():
|
||||
|
@ -468,6 +494,17 @@ def test_correlate():
|
|||
corr3 = pe.correlate(my_obs5, my_obs6)
|
||||
assert my_obs5.idl == corr3.idl
|
||||
|
||||
my_new_obs = pe.Obs([np.random.rand(100)], ['q3'])
|
||||
with pytest.raises(Exception):
|
||||
pe.correlate(my_obs1, my_new_obs)
|
||||
my_covobs = pe.cov_Obs(1.0, 0.003, 'cov')
|
||||
with pytest.raises(Exception):
|
||||
pe.correlate(my_covobs, my_covobs)
|
||||
r_obs = pe.reweight(my_obs1, [my_obs1])[0]
|
||||
with pytest.warns(RuntimeWarning):
|
||||
pe.correlate(r_obs, r_obs)
|
||||
|
||||
|
||||
|
||||
def test_irregular_error_propagation():
|
||||
obs_list = [pe.Obs([np.random.rand(100)], ['t']),
|
||||
|
@ -583,7 +620,7 @@ def test_covariance_symmetry():
|
|||
cov_ab = pe.covariance(test_obs1, a)
|
||||
cov_ba = pe.covariance(a, test_obs1)
|
||||
assert np.abs(cov_ab - cov_ba) <= 10 * np.finfo(np.float64).eps
|
||||
assert np.abs(cov_ab) < test_obs1.dvalue * test_obs2.dvalue * (1 + 10 * np.finfo(np.float64).eps)
|
||||
assert np.abs(cov_ab) < test_obs1.dvalue * a.dvalue * (1 + 10 * np.finfo(np.float64).eps)
|
||||
|
||||
|
||||
def test_empty_obs():
|
||||
|
|
Loading…
Add table
Reference in a new issue