mirror of
https://github.com/fjosw/pyerrors.git
synced 2025-03-15 06:40:24 +01:00
feat!: dump methods now export to the json.gz format by default. Pickle
format can be chosen via parameter.
This commit is contained in:
parent
59eb1ee546
commit
9b52a9a615
6 changed files with 57 additions and 21 deletions
|
@ -320,7 +320,7 @@
|
|||
"outputs": [],
|
||||
"source": [
|
||||
"pcac_plateau.tag = \"O(a) improved PCAC mass extracted on the test ensemble\"\n",
|
||||
"pe.input.json.dump_to_json(pcac_plateau, \"pcac_plateau_test_ensemble\")"
|
||||
"pcac_plateau.dump(\"pcac_plateau_test_ensemble\", datatype=\"json.gz\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
|
|
@ -280,8 +280,8 @@ class Corr:
|
|||
.................
|
||||
C(t+(n-1)) c(t+n) ... c(t+2(n-1))
|
||||
|
||||
Parameters:
|
||||
-----------
|
||||
Parameters
|
||||
----------
|
||||
N : int
|
||||
Dimension of the Hankel matrix
|
||||
periodic : bool, optional
|
||||
|
@ -666,17 +666,29 @@ class Corr:
|
|||
|
||||
return
|
||||
|
||||
def dump(self, filename, **kwargs):
|
||||
"""Dumps the Corr into a pickle file
|
||||
def dump(self, filename, datatype="json.gz", **kwargs):
|
||||
"""Dumps the Corr into a file of chosen type
|
||||
Parameters
|
||||
----------
|
||||
filename : str
|
||||
Name of the file
|
||||
Name of the file to be saved.
|
||||
datatype : str
|
||||
Format of the exported file. Supported formats include
|
||||
"json.gz" and "pickle"
|
||||
path : str
|
||||
specifies a custom path for the file (default '.')
|
||||
"""
|
||||
dump_object(self, filename, **kwargs)
|
||||
return
|
||||
if datatype == "json.gz":
|
||||
from .input.json import dump_to_json
|
||||
if 'path' in kwargs:
|
||||
file_name = kwargs.get('path') + '/' + filename
|
||||
else:
|
||||
file_name = filename
|
||||
dump_to_json(self, file_name)
|
||||
elif datatype == "pickle":
|
||||
dump_object(self, filename, **kwargs)
|
||||
else:
|
||||
raise Exception("Unknown datatype " + str(datatype))
|
||||
|
||||
def print(self, range=[0, None]):
|
||||
print(self.__repr__(range))
|
||||
|
|
|
@ -590,22 +590,32 @@ class Obs:
|
|||
|
||||
return dict(zip(self.e_names, sizes))
|
||||
|
||||
def dump(self, name, **kwargs):
|
||||
"""Dump the Obs to a pickle file 'name'.
|
||||
def dump(self, filename, datatype="json.gz", **kwargs):
|
||||
"""Dump the Obs to a file 'name' of chosen format.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
name : str
|
||||
filename : str
|
||||
name of the file to be saved.
|
||||
datatype : str
|
||||
Format of the exported file. Supported formats include
|
||||
"json.gz" and "pickle"
|
||||
path : str
|
||||
specifies a custom path for the file (default '.')
|
||||
"""
|
||||
if 'path' in kwargs:
|
||||
file_name = kwargs.get('path') + '/' + name + '.p'
|
||||
file_name = kwargs.get('path') + '/' + filename
|
||||
else:
|
||||
file_name = name + '.p'
|
||||
with open(file_name, 'wb') as fb:
|
||||
pickle.dump(self, fb)
|
||||
file_name = filename
|
||||
|
||||
if datatype == "json.gz":
|
||||
from .input.json import dump_to_json
|
||||
dump_to_json([self], file_name)
|
||||
elif datatype == "pickle":
|
||||
with open(file_name + '.p', 'wb') as fb:
|
||||
pickle.dump(self, fb)
|
||||
else:
|
||||
raise Exception("Unknown datatype " + str(datatype))
|
||||
|
||||
def export_jackknife(self):
|
||||
"""Export jackknife samples from the Obs
|
||||
|
|
|
@ -131,11 +131,20 @@ def test_utility():
|
|||
corr.print([2, 4])
|
||||
corr.show()
|
||||
|
||||
corr.dump('test_dump', path='.')
|
||||
corr.dump('test_dump')
|
||||
corr.dump('test_dump', datatype="pickle", path='.')
|
||||
corr.dump('test_dump', datatype="pickle")
|
||||
new_corr = pe.load_object('test_dump.p')
|
||||
os.remove('test_dump.p')
|
||||
for o_a, o_b in zip(corr.content, new_corr.content):
|
||||
assert np.isclose(o_a[0].value, o_b[0].value)
|
||||
assert np.isclose(o_a[0].dvalue, o_b[0].dvalue)
|
||||
assert np.allclose(o_a[0].deltas['t'], o_b[0].deltas['t'])
|
||||
|
||||
corr.dump('test_dump', datatype="json.gz", path='.')
|
||||
corr.dump('test_dump', datatype="json.gz")
|
||||
new_corr = pe.input.json.load_json('test_dump')
|
||||
os.remove('test_dump.json.gz')
|
||||
for o_a, o_b in zip(corr.content, new_corr.content):
|
||||
assert np.isclose(o_a[0].value, o_b[0].value)
|
||||
assert np.isclose(o_a[0].dvalue, o_b[0].dvalue)
|
||||
assert np.allclose(o_a[0].deltas['t'], o_b[0].deltas['t'])
|
||||
|
|
|
@ -111,7 +111,7 @@ def test_json_corr_io():
|
|||
|
||||
|
||||
def test_json_corr_2d_io():
|
||||
obs_list = [np.array([[pe.pseudo_Obs(1.0 + i, 0.1 * i, 'test'), pe.pseudo_Obs(0.0, 0.1 * i, 'test')], [pe.pseudo_Obs(0.0, 0.1 * i, 'test'), pe.pseudo_Obs(1.0 + i, 0.1 * i, 'test')]]) for i in range(8)]
|
||||
obs_list = [np.array([[pe.pseudo_Obs(1.0 + i, 0.1 * i, 'test'), pe.pseudo_Obs(0.0, 0.1 * i, 'test')], [pe.pseudo_Obs(0.0, 0.1 * i, 'test'), pe.pseudo_Obs(1.0 + i, 0.1 * i, 'test')]]) for i in range(4)]
|
||||
|
||||
for tag in [None, "test"]:
|
||||
obs_list[3][0, 1].tag = tag
|
||||
|
|
|
@ -57,11 +57,16 @@ def test_dump():
|
|||
value = np.random.normal(5, 10)
|
||||
dvalue = np.abs(np.random.normal(0, 1))
|
||||
test_obs = pe.pseudo_Obs(value, dvalue, 't')
|
||||
test_obs.dump('test_dump', path=".")
|
||||
test_obs.dump('test_dump')
|
||||
test_obs.dump('test_dump', datatype="pickle", path=".")
|
||||
test_obs.dump('test_dump', datatype="pickle")
|
||||
new_obs = pe.load_object('test_dump.p')
|
||||
os.remove('test_dump.p')
|
||||
assert test_obs.deltas['t'].all() == new_obs.deltas['t'].all()
|
||||
assert test_obs == new_obs
|
||||
test_obs.dump('test_dump', dataype="json.gz", path=".")
|
||||
test_obs.dump('test_dump', dataype="json.gz")
|
||||
new_obs = pe.input.json.load_json("test_dump")
|
||||
os.remove('test_dump.json.gz')
|
||||
assert test_obs == new_obs
|
||||
|
||||
|
||||
def test_comparison():
|
||||
|
|
Loading…
Add table
Reference in a new issue