Merge branch 'develop' into develop

This commit is contained in:
JanNeuendorf 2022-01-18 16:21:59 +01:00 committed by GitHub
commit 5244e34d9e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
8 changed files with 125 additions and 45 deletions

View file

@ -206,7 +206,7 @@ print(my_corr)
```
In case the correlation functions are not defined on the outermost timeslices, for example because of fixed boundary conditions, a padding can be introduced.
```python
my_corr = pe.Corr([obs_0, obs_1, obs_2, obs_3], padding_front=1, padding_back=1)
my_corr = pe.Corr([obs_0, obs_1, obs_2, obs_3], padding=[1, 1])
print(my_corr)
> x0/a Corr(x0/a)
> ------------------

View file

@ -19,46 +19,55 @@ class Corr:
to iterate over all timeslices for every operation. This is especially true, when dealing with smearing matrices.
The correlator can have two types of content: An Obs at every timeslice OR a GEVP
smearing matrix at every timeslice. Other dependency (eg. spacial) are not supported.
smearing matrix at every timeslice. Other dependency (eg. spatial) are not supported.
"""
def __init__(self, data_input, padding_front=0, padding_back=0, prange=None):
# All data_input should be a list of things at different timeslices. This needs to be verified
def __init__(self, data_input, padding=[0, 0], prange=None):
""" Initialize a Corr object.
Parameters
----------
data_input : list
list of Obs or list of arrays of Obs.
padding : list, optional
List with two entries where the first labels the padding
at the front of the correlator and the second the padding
at the back.
prange : list, optional
List containing the first and last timeslice of the plateau
region indentified for this correlator.
"""
if not isinstance(data_input, list):
raise TypeError('Corr__init__ expects a list of timeslices.')
# data_input can have multiple shapes. The simplest one is a list of Obs.
# We check, if this is the case
if all([(isinstance(item, Obs) or isinstance(item, CObs)) for item in data_input]):
self.content = [np.asarray([item]) for item in data_input]
# Wrapping the Obs in an array ensures that the data structure is consistent with smearing matrices.
self.N = 1 # number of smearings
# data_input in the form [np.array(Obs,NxN)]
self.content = [np.asarray([item]) for item in data_input]
self.N = 1
elif all([isinstance(item, np.ndarray) or item is None for item in data_input]) and any([isinstance(item, np.ndarray) for item in data_input]):
self.content = data_input
noNull = [a for a in self.content if not (a is None)] # To check if the matrices are correct for all undefined elements
self.N = noNull[0].shape[0]
# The checks are now identical to the case above
if self.N > 1 and noNull[0].shape[0] != noNull[0].shape[1]:
raise Exception("Smearing matrices are not NxN")
if (not all([item.shape == noNull[0].shape for item in noNull])):
raise Exception("Items in data_input are not of identical shape." + str(noNull))
else: # In case its a list of something else.
else:
raise Exception("data_input contains item of wrong type")
self.tag = None
# We now apply some padding to our list. In case that our list represents a correlator of length T but is not defined at every value.
# An undefined timeslice is represented by the None object
self.content = [None] * padding_front + self.content + [None] * padding_back
self.T = len(self.content) # for convenience: will be used a lot
self.content = [None] * padding[0] + self.content + [None] * padding[1]
self.T = len(self.content)
# The attribute "range" [start,end] marks a range of two timeslices.
# This is useful for keeping track of plateaus and fitranges.
# The range can be inherited from other Corrs, if the operation should not alter a chosen range eg. multiplication with a constant.
self.prange = prange
self.gamma_method()
@ -406,7 +415,7 @@ class Corr:
newcontent.append(self.content[t + 1] - self.content[t])
if(all([x is None for x in newcontent])):
raise Exception("Derivative is undefined at all timeslices")
return Corr(newcontent, padding_back=1)
return Corr(newcontent, padding=[0, 1])
if symmetric:
newcontent = []
for t in range(1, self.T - 1):
@ -416,7 +425,7 @@ class Corr:
newcontent.append(0.5 * (self.content[t + 1] - self.content[t - 1]))
if(all([x is None for x in newcontent])):
raise Exception('Derivative is undefined at all timeslices')
return Corr(newcontent, padding_back=1, padding_front=1)
return Corr(newcontent, padding=[1, 1])
def second_deriv(self):
"""Return the second derivative of the correlator with respect to x0."""
@ -428,7 +437,7 @@ class Corr:
newcontent.append((self.content[t + 1] - 2 * self.content[t] + self.content[t - 1]))
if(all([x is None for x in newcontent])):
raise Exception("Derivative is undefined at all timeslices")
return Corr(newcontent, padding_back=1, padding_front=1)
return Corr(newcontent, padding=[1, 1])
def m_eff(self, variant='log', guess=1.0):
"""Returns the effective mass of the correlator as correlator object
@ -456,7 +465,7 @@ class Corr:
if(all([x is None for x in newcontent])):
raise Exception('m_eff is undefined at all timeslices')
return np.log(Corr(newcontent, padding_back=1))
return np.log(Corr(newcontent, padding=[0, 1]))
elif variant in ['periodic', 'cosh', 'sinh']:
if variant in ['periodic', 'cosh']:
@ -479,7 +488,7 @@ class Corr:
if(all([x is None for x in newcontent])):
raise Exception('m_eff is undefined at all timeslices')
return Corr(newcontent, padding_back=1)
return Corr(newcontent, padding=[0, 1])
elif variant == 'arccosh':
newcontent = []
@ -490,7 +499,7 @@ class Corr:
newcontent.append((self.content[t + 1] + self.content[t - 1]) / (2 * self.content[t]))
if(all([x is None for x in newcontent])):
raise Exception("m_eff is undefined at all timeslices")
return np.arccosh(Corr(newcontent, padding_back=1, padding_front=1))
return np.arccosh(Corr(newcontent, padding=[1, 1]))
else:
raise Exception('Unknown variant.')

View file

@ -8,6 +8,7 @@ import platform
import warnings
from ..obs import Obs
from ..covobs import Covobs
from ..correlators import Corr
from .. import version as pyerrorsversion
@ -19,7 +20,7 @@ def create_json_string(ol, description='', indent=1):
----------
ol : list
List of objects that will be exported. At the moments, these objects can be
either of: Obs, list, numpy.ndarray.
either of: Obs, list, numpy.ndarray, Corr.
All Obs inside a structure have to be defined on the same set of configurations.
description : str
Optional string that describes the contents of the json file.
@ -173,6 +174,18 @@ def create_json_string(ol, description='', indent=1):
d['cdata'] = cdata
return d
def write_Corr_to_dict(my_corr):
front_padding = next(i for i, j in enumerate(my_corr.content) if np.all(j))
back_padding_start = front_padding + next((i for i, j in enumerate(my_corr.content[front_padding:]) if not np.all(j)), my_corr.T)
dat = write_Array_to_dict(np.array(my_corr.content[front_padding:back_padding_start]))
dat['type'] = 'Corr'
corr_meta_data = str(front_padding) + '|' + str(my_corr.T - back_padding_start) + '|' + str(my_corr.tag)
if 'tag' in dat.keys():
dat['tag'].append(corr_meta_data)
else:
dat['tag'] = [corr_meta_data]
return dat
if not isinstance(ol, list):
ol = [ol]
@ -193,6 +206,10 @@ def create_json_string(ol, description='', indent=1):
d['obsdata'].append(write_List_to_dict(io))
elif isinstance(io, np.ndarray):
d['obsdata'].append(write_Array_to_dict(io))
elif isinstance(io, Corr):
d['obsdata'].append(write_Corr_to_dict(io))
else:
raise Exception("Unkown datatype.")
jsonstring = json.dumps(d, indent=indent, cls=my_encoder, ensure_ascii=False)
@ -222,7 +239,7 @@ def dump_to_json(ol, fname, description='', indent=1, gz=True):
----------
ol : list
List of objects that will be exported. At the moments, these objects can be
either of: Obs, list, numpy.ndarray.
either of: Obs, list, numpy.ndarray, Corr.
All Obs inside a structure have to be defined on the same set of configurations.
fname : str
Filename of the output file.
@ -255,7 +272,7 @@ def dump_to_json(ol, fname, description='', indent=1, gz=True):
def import_json_string(json_string, verbose=True, full_output=False):
"""Reconstruct a list of Obs or structures containing Obs from a json string.
The following structures are supported: Obs, list, numpy.ndarray
The following structures are supported: Obs, list, numpy.ndarray, Corr
If the list contains only one element, it is unpacked from the list.
Parameters
@ -374,6 +391,22 @@ def import_json_string(json_string, verbose=True, full_output=False):
ret[-1].tag = taglist[i]
return np.reshape(ret, layout)
def get_Corr_from_dict(o):
taglist = o.get('tag')
corr_meta_data = taglist[-1].split('|')
padding_front = int(corr_meta_data[0])
padding_back = int(corr_meta_data[1])
corr_tag = corr_meta_data[2]
tmp_o = o
tmp_o['tag'] = taglist[:-1]
if len(tmp_o['tag']) == 0:
del tmp_o['tag']
dat = get_Array_from_dict(tmp_o)
my_corr = Corr(list(dat), padding=[padding_front, padding_back])
if corr_tag != 'None':
my_corr.tag = corr_tag
return my_corr
json_dict = json.loads(json_string)
prog = json_dict.get('program', '')
@ -400,6 +433,10 @@ def import_json_string(json_string, verbose=True, full_output=False):
ol.append(get_List_from_dict(io))
elif io['type'] == 'Array':
ol.append(get_Array_from_dict(io))
elif io['type'] == 'Corr':
ol.append(get_Corr_from_dict(io))
else:
raise Exception("Unkown datatype.")
if full_output:
retd = {}
@ -422,7 +459,7 @@ def import_json_string(json_string, verbose=True, full_output=False):
def load_json(fname, verbose=True, gz=True, full_output=False):
"""Import a list of Obs or structures containing Obs from a .json.gz file.
The following structures are supported: Obs, list, numpy.ndarray
The following structures are supported: Obs, list, numpy.ndarray, Corr
If the list contains only one element, it is unpacked from the list.
Parameters