mirror of
https://github.com/fjosw/pyerrors.git
synced 2025-05-15 03:53:41 +02:00
feat: functions which extracts npr fourquark vertices now constructs
Lorentz scalars.
This commit is contained in:
parent
58ccd11e48
commit
aec90803ef
1 changed files with 64 additions and 14 deletions
|
@ -224,7 +224,7 @@ def read_Bilinear_hd5(path, filestem, ens_id, idl=None):
|
||||||
return result_dict
|
return result_dict
|
||||||
|
|
||||||
|
|
||||||
def read_Fourquark_hd5(path, filestem, ens_id, idl=None):
|
def read_Fourquark_hd5(path, filestem, ens_id, idl=None, vertices=["VA", "AV"]):
|
||||||
"""Read hadrons FourquarkFullyConnected hdf5 file and output an array of CObs
|
"""Read hadrons FourquarkFullyConnected hdf5 file and output an array of CObs
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
|
@ -237,6 +237,8 @@ def read_Fourquark_hd5(path, filestem, ens_id, idl=None):
|
||||||
name of the ensemble, required for internal bookkeeping
|
name of the ensemble, required for internal bookkeeping
|
||||||
idl : range
|
idl : range
|
||||||
If specified only configurations in the given range are read in.
|
If specified only configurations in the given range are read in.
|
||||||
|
vertices : list
|
||||||
|
Vertex functions to be extracted.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
files, idx = _get_files(path, filestem, idl)
|
files, idx = _get_files(path, filestem, idl)
|
||||||
|
@ -244,6 +246,11 @@ def read_Fourquark_hd5(path, filestem, ens_id, idl=None):
|
||||||
mom_in = None
|
mom_in = None
|
||||||
mom_out = None
|
mom_out = None
|
||||||
|
|
||||||
|
vertex_names = []
|
||||||
|
for vertex in vertices:
|
||||||
|
vertex_names += _get_lorentz_names(vertex)
|
||||||
|
print(vertex_names)
|
||||||
|
|
||||||
corr_data = {}
|
corr_data = {}
|
||||||
|
|
||||||
tree = 'FourQuarkFullyConnected/FourQuarkFullyConnected_'
|
tree = 'FourQuarkFullyConnected/FourQuarkFullyConnected_'
|
||||||
|
@ -251,25 +258,35 @@ def read_Fourquark_hd5(path, filestem, ens_id, idl=None):
|
||||||
for hd5_file in files:
|
for hd5_file in files:
|
||||||
file = h5py.File(path + '/' + hd5_file, "r")
|
file = h5py.File(path + '/' + hd5_file, "r")
|
||||||
|
|
||||||
for i in range(1):
|
for i in range(32):
|
||||||
name = file[tree + str(i) + '/info'].attrs['gammaA'][0].decode('UTF-8') + '_' + file[tree + str(i) + '/info'].attrs['gammaB'][0].decode('UTF-8')
|
name = (file[tree + str(i) + '/info'].attrs['gammaA'][0].decode('UTF-8'), file[tree + str(i) + '/info'].attrs['gammaB'][0].decode('UTF-8'))
|
||||||
if name not in corr_data:
|
if name in vertex_names:
|
||||||
corr_data[name] = []
|
if name not in corr_data:
|
||||||
raw_data = file[tree + str(i) + '/corr'][0][0].view('complex')
|
corr_data[name] = []
|
||||||
corr_data[name].append(raw_data)
|
raw_data = file[tree + str(i) + '/corr'][0][0].view('complex')
|
||||||
if mom_in is None:
|
corr_data[name].append(raw_data)
|
||||||
mom_in = np.array(str(file[tree + str(i) + '/info'].attrs['pIn'])[3:-2].strip().split(' '), dtype=int)
|
if mom_in is None:
|
||||||
if mom_out is None:
|
mom_in = np.array(str(file[tree + str(i) + '/info'].attrs['pIn'])[3:-2].strip().split(' '), dtype=int)
|
||||||
mom_out = np.array(str(file[tree + str(i) + '/info'].attrs['pOut'])[3:-2].strip().split(' '), dtype=int)
|
if mom_out is None:
|
||||||
|
mom_out = np.array(str(file[tree + str(i) + '/info'].attrs['pOut'])[3:-2].strip().split(' '), dtype=int)
|
||||||
|
|
||||||
file.close()
|
file.close()
|
||||||
|
|
||||||
|
intermediate_dict = {}
|
||||||
|
|
||||||
|
for vertex in vertices:
|
||||||
|
lorentz_names = _get_lorentz_names(vertex)
|
||||||
|
for v_name in lorentz_names:
|
||||||
|
if vertex not in intermediate_dict:
|
||||||
|
intermediate_dict[vertex] = np.array(corr_data[v_name])
|
||||||
|
else:
|
||||||
|
intermediate_dict[vertex] += np.array(corr_data[v_name])
|
||||||
|
|
||||||
result_dict = {}
|
result_dict = {}
|
||||||
|
|
||||||
for key, data in corr_data.items():
|
for key, data in intermediate_dict.items():
|
||||||
local_data = np.array(data)
|
|
||||||
|
|
||||||
rolled_array = np.moveaxis(local_data, 0, 8)
|
rolled_array = np.moveaxis(data, 0, 8)
|
||||||
|
|
||||||
matrix = np.empty((rolled_array.shape[:-1]), dtype=object)
|
matrix = np.empty((rolled_array.shape[:-1]), dtype=object)
|
||||||
for index in np.ndindex(rolled_array.shape[:-1]):
|
for index in np.ndindex(rolled_array.shape[:-1]):
|
||||||
|
@ -281,3 +298,36 @@ def read_Fourquark_hd5(path, filestem, ens_id, idl=None):
|
||||||
# result_dict[key] = Npr_matrix(matrix.swapaxes(1, 2).reshape((12, 12), order='F'), mom_in=mom_in, mom_out=mom_out)
|
# result_dict[key] = Npr_matrix(matrix.swapaxes(1, 2).reshape((12, 12), order='F'), mom_in=mom_in, mom_out=mom_out)
|
||||||
|
|
||||||
return result_dict
|
return result_dict
|
||||||
|
|
||||||
|
|
||||||
|
def _get_lorentz_names(name):
|
||||||
|
assert len(name) == 2
|
||||||
|
|
||||||
|
res = []
|
||||||
|
|
||||||
|
if not set(name) <= set(['S', 'P', 'V', 'A', 'T']):
|
||||||
|
raise Exception("Name can only contain 'S', 'P', 'V', 'A' or 'T'")
|
||||||
|
|
||||||
|
if 'S' in name or 'P' in name:
|
||||||
|
if not set(name) <= set(['S', 'P']):
|
||||||
|
raise Exception("'" + name + "' is not a Lorentz scalar")
|
||||||
|
|
||||||
|
g_names = {'S': 'Identity',
|
||||||
|
'P': 'Gamma5'}
|
||||||
|
|
||||||
|
res.append((g_names[name[0]], g_names[name[1]]))
|
||||||
|
|
||||||
|
elif 'T' in name:
|
||||||
|
if not set(name) <= set(['T']):
|
||||||
|
raise Exception("'" + name + "' is not a Lorentz scalar")
|
||||||
|
raise Exception("Tensor operators not yet implemented.")
|
||||||
|
else:
|
||||||
|
if not set(name) <= set(['V', 'A']):
|
||||||
|
raise Exception("'" + name + "' is not a Lorentz scalar")
|
||||||
|
lorentz_index = ['X', 'Y', 'Z', 'T']
|
||||||
|
|
||||||
|
for ind in lorentz_index:
|
||||||
|
res.append(('Gamma' + ind + (name[0] == 'A') * 'Gamma5',
|
||||||
|
'Gamma' + ind + (name[1] == 'A') * 'Gamma5'))
|
||||||
|
|
||||||
|
return res
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue