pyerrors.input.hadrons
1import os 2from collections import Counter 3import h5py 4from pathlib import Path 5import numpy as np 6from ..obs import Obs, CObs 7from ..correlators import Corr 8from ..dirac import epsilon_tensor_rank4 9 10 11def _get_files(path, filestem, idl): 12 ls = os.listdir(path) 13 14 # Clean up file list 15 files = list(filter(lambda x: x.startswith(filestem + "."), ls)) 16 17 if not files: 18 raise Exception('No files starting with', filestem, 'in folder', path) 19 20 def get_cnfg_number(n): 21 return int(n.replace(".h5", "")[len(filestem) + 1:]) # From python 3.9 onward the safer 'removesuffix' method can be used. 22 23 # Sort according to configuration number 24 files.sort(key=get_cnfg_number) 25 26 cnfg_numbers = [] 27 filtered_files = [] 28 for line in files: 29 no = get_cnfg_number(line) 30 if idl: 31 if no in list(idl): 32 filtered_files.append(line) 33 cnfg_numbers.append(no) 34 else: 35 filtered_files.append(line) 36 cnfg_numbers.append(no) 37 38 if idl: 39 if Counter(list(idl)) != Counter(cnfg_numbers): 40 raise Exception("Not all configurations specified in idl found, configurations " + str(list(Counter(list(idl)) - Counter(cnfg_numbers))) + " are missing.") 41 42 # Check that configurations are evenly spaced 43 dc = np.unique(np.diff(cnfg_numbers)) 44 if np.any(dc < 0): 45 raise Exception("Unsorted files") 46 if len(dc) == 1: 47 idx = range(cnfg_numbers[0], cnfg_numbers[-1] + dc[0], dc[0]) 48 elif idl: 49 idx = idl 50 else: 51 raise Exception("Configurations are not evenly spaced. Provide an idl if you want to proceed with this set of configurations.") 52 53 return filtered_files, idx 54 55 56def read_meson_hd5(path, filestem, ens_id, meson='meson_0', idl=None, gammas=None): 57 r'''Read hadrons meson hdf5 file and extract the meson labeled 'meson' 58 59 Parameters 60 ----------------- 61 path : str 62 path to the files to read 63 filestem : str 64 namestem of the files to read 65 ens_id : str 66 name of the ensemble, required for internal bookkeeping 67 meson : str 68 label of the meson to be extracted, standard value meson_0 which 69 corresponds to the pseudoscalar pseudoscalar two-point function. 70 gammas : tuple of strings 71 Instrad of a meson label one can also provide a tuple of two strings 72 indicating the gamma matrices at source and sink. 73 ("Gamma5", "Gamma5") corresponds to the pseudoscalar pseudoscalar 74 two-point function. The gammas argument dominateds over meson. 75 idl : range 76 If specified only configurations in the given range are read in. 77 78 Returns 79 ------- 80 corr : Corr 81 Correlator of the source sink combination in question. 82 ''' 83 84 files, idx = _get_files(path, filestem, idl) 85 86 tree = meson.rsplit('_')[0] 87 if gammas is not None: 88 h5file = h5py.File(path + '/' + files[0], "r") 89 found_meson = None 90 for key in h5file[tree].keys(): 91 if gammas[0] == h5file[tree][key].attrs["gamma_snk"][0].decode() and h5file[tree][key].attrs["gamma_src"][0].decode() == gammas[1]: 92 found_meson = key 93 break 94 h5file.close() 95 if found_meson: 96 meson = found_meson 97 else: 98 raise Exception("Source Sink combination " + str(gammas) + " not found.") 99 100 corr_data = [] 101 infos = [] 102 for hd5_file in files: 103 h5file = h5py.File(path + '/' + hd5_file, "r") 104 if not tree + '/' + meson in h5file: 105 raise Exception("Entry '" + meson + "' not contained in the files.") 106 raw_data = h5file[tree + '/' + meson + '/corr'] 107 real_data = raw_data[:]["re"].astype(np.double) 108 corr_data.append(real_data) 109 if not infos: 110 for k, i in h5file[tree + '/' + meson].attrs.items(): 111 infos.append(k + ': ' + i[0].decode()) 112 h5file.close() 113 corr_data = np.array(corr_data) 114 115 l_obs = [] 116 for c in corr_data.T: 117 l_obs.append(Obs([c], [ens_id], idl=[idx])) 118 119 corr = Corr(l_obs) 120 corr.tag = r", ".join(infos) 121 return corr 122 123 124def read_DistillationContraction_hd5(path, ens_id, diagrams=["direct"], idl=None): 125 """Read hadrons DistillationContraction hdf5 files in given directory structure 126 127 Parameters 128 ----------------- 129 path : str 130 path to the directories to read 131 ens_id : str 132 name of the ensemble, required for internal bookkeeping 133 diagrams : list 134 List of strings of the diagrams to extract, e.g. ["direct", "box", "cross"]. 135 idl : range 136 If specified only configurations in the given range are read in. 137 138 Returns 139 ------- 140 result : dict 141 extracted DistillationContration data 142 """ 143 144 res_dict = {} 145 146 directories, idx = _get_files(path, "data", idl) 147 148 explore_path = Path(path + "/" + directories[0]) 149 150 for explore_file in explore_path.iterdir(): 151 if explore_file.is_file(): 152 stem = explore_file.with_suffix("").with_suffix("").as_posix().split("/")[-1] 153 else: 154 continue 155 156 file_list = [] 157 for dir in directories: 158 tmp_path = Path(path + "/" + dir) 159 file_list.append((tmp_path / stem).as_posix() + tmp_path.suffix + ".h5") 160 161 corr_data = {} 162 163 for diagram in diagrams: 164 corr_data[diagram] = [] 165 166 try: 167 for n_file, (hd5_file, n_traj) in enumerate(zip(file_list, list(idx))): 168 h5file = h5py.File(hd5_file) 169 170 if n_file == 0: 171 if h5file["DistillationContraction/Metadata"].attrs.get("TimeSources")[0].decode() != "0...": 172 raise Exception("Routine is only implemented for files containing inversions on all timeslices.") 173 174 Nt = h5file["DistillationContraction/Metadata"].attrs.get("Nt")[0] 175 176 identifier = [] 177 for in_file in range(len(h5file["DistillationContraction/Metadata/DmfInputFiles"].attrs.keys()) - 1): 178 encoded_info = h5file["DistillationContraction/Metadata/DmfInputFiles"].attrs.get("DmfInputFiles_" + str(in_file)) 179 full_info = encoded_info[0].decode().split("/")[-1].replace(".h5", "").split("_") 180 my_tuple = (full_info[0], full_info[1][1:], full_info[2], full_info[3]) 181 identifier.append(my_tuple) 182 identifier = tuple(identifier) 183 # "DistillationContraction/Metadata/DmfSuffix" contains info about different quarks, irrelevant in the SU(3) case. 184 185 for diagram in diagrams: 186 187 if diagram == "triangle" and "Identity" not in str(identifier): 188 part = "im" 189 else: 190 part = "re" 191 192 real_data = np.zeros(Nt) 193 for x0 in range(Nt): 194 raw_data = h5file["DistillationContraction/Correlators/" + diagram + "/" + str(x0)][:][part].astype(np.double) 195 real_data += np.roll(raw_data, -x0) 196 real_data /= Nt 197 198 corr_data[diagram].append(real_data) 199 h5file.close() 200 201 res_dict[str(identifier)] = {} 202 203 for diagram in diagrams: 204 205 tmp_data = np.array(corr_data[diagram]) 206 207 l_obs = [] 208 for c in tmp_data.T: 209 l_obs.append(Obs([c], [ens_id], idl=[idx])) 210 211 corr = Corr(l_obs) 212 corr.tag = str(identifier) 213 214 res_dict[str(identifier)][diagram] = corr 215 except FileNotFoundError: 216 print("Skip", stem) 217 218 return res_dict 219 220 221class Npr_matrix(np.ndarray): 222 223 def __new__(cls, input_array, mom_in=None, mom_out=None): 224 obj = np.asarray(input_array).view(cls) 225 obj.mom_in = mom_in 226 obj.mom_out = mom_out 227 return obj 228 229 @property 230 def g5H(self): 231 """Gamma_5 hermitean conjugate 232 233 Uses the fact that the propagator is gamma5 hermitean, so just the 234 in and out momenta of the propagator are exchanged. 235 """ 236 return Npr_matrix(self, 237 mom_in=self.mom_out, 238 mom_out=self.mom_in) 239 240 def _propagate_mom(self, other, name): 241 s_mom = getattr(self, name, None) 242 o_mom = getattr(other, name, None) 243 if s_mom is not None and o_mom is not None: 244 if not np.allclose(s_mom, o_mom): 245 raise Exception(name + ' does not match.') 246 return o_mom if o_mom is not None else s_mom 247 248 def __matmul__(self, other): 249 return self.__new__(Npr_matrix, 250 super().__matmul__(other), 251 self._propagate_mom(other, 'mom_in'), 252 self._propagate_mom(other, 'mom_out')) 253 254 def __array_finalize__(self, obj): 255 if obj is None: 256 return 257 self.mom_in = getattr(obj, 'mom_in', None) 258 self.mom_out = getattr(obj, 'mom_out', None) 259 260 261def read_ExternalLeg_hd5(path, filestem, ens_id, idl=None): 262 """Read hadrons ExternalLeg hdf5 file and output an array of CObs 263 264 Parameters 265 ---------- 266 path : str 267 path to the files to read 268 filestem : str 269 namestem of the files to read 270 ens_id : str 271 name of the ensemble, required for internal bookkeeping 272 idl : range 273 If specified only configurations in the given range are read in. 274 275 Returns 276 ------- 277 result : Npr_matrix 278 read Cobs-matrix 279 """ 280 281 files, idx = _get_files(path, filestem, idl) 282 283 mom = None 284 285 corr_data = [] 286 for hd5_file in files: 287 file = h5py.File(path + '/' + hd5_file, "r") 288 raw_data = file['ExternalLeg/corr'][0][0].view('complex') 289 corr_data.append(raw_data) 290 if mom is None: 291 mom = np.array(str(file['ExternalLeg/info'].attrs['pIn'])[3:-2].strip().split(), dtype=float) 292 file.close() 293 corr_data = np.array(corr_data) 294 295 rolled_array = np.rollaxis(corr_data, 0, 5) 296 297 matrix = np.empty((rolled_array.shape[:-1]), dtype=object) 298 for si, sj, ci, cj in np.ndindex(rolled_array.shape[:-1]): 299 real = Obs([rolled_array[si, sj, ci, cj].real], [ens_id], idl=[idx]) 300 imag = Obs([rolled_array[si, sj, ci, cj].imag], [ens_id], idl=[idx]) 301 matrix[si, sj, ci, cj] = CObs(real, imag) 302 303 return Npr_matrix(matrix, mom_in=mom) 304 305 306def read_Bilinear_hd5(path, filestem, ens_id, idl=None): 307 """Read hadrons Bilinear hdf5 file and output an array of CObs 308 309 Parameters 310 ---------- 311 path : str 312 path to the files to read 313 filestem : str 314 namestem of the files to read 315 ens_id : str 316 name of the ensemble, required for internal bookkeeping 317 idl : range 318 If specified only configurations in the given range are read in. 319 320 Returns 321 ------- 322 result_dict: dict[Npr_matrix] 323 extracted Bilinears 324 """ 325 326 files, idx = _get_files(path, filestem, idl) 327 328 mom_in = None 329 mom_out = None 330 331 corr_data = {} 332 for hd5_file in files: 333 file = h5py.File(path + '/' + hd5_file, "r") 334 for i in range(16): 335 name = file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['gamma'][0].decode('UTF-8') 336 if name not in corr_data: 337 corr_data[name] = [] 338 raw_data = file['Bilinear/Bilinear_' + str(i) + '/corr'][0][0].view('complex') 339 corr_data[name].append(raw_data) 340 if mom_in is None: 341 mom_in = np.array(str(file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['pIn'])[3:-2].strip().split(), dtype=float) 342 if mom_out is None: 343 mom_out = np.array(str(file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['pOut'])[3:-2].strip().split(), dtype=float) 344 345 file.close() 346 347 result_dict = {} 348 349 for key, data in corr_data.items(): 350 local_data = np.array(data) 351 352 rolled_array = np.rollaxis(local_data, 0, 5) 353 354 matrix = np.empty((rolled_array.shape[:-1]), dtype=object) 355 for si, sj, ci, cj in np.ndindex(rolled_array.shape[:-1]): 356 real = Obs([rolled_array[si, sj, ci, cj].real], [ens_id], idl=[idx]) 357 imag = Obs([rolled_array[si, sj, ci, cj].imag], [ens_id], idl=[idx]) 358 matrix[si, sj, ci, cj] = CObs(real, imag) 359 360 result_dict[key] = Npr_matrix(matrix, mom_in=mom_in, mom_out=mom_out) 361 362 return result_dict 363 364 365def read_Fourquark_hd5(path, filestem, ens_id, idl=None, vertices=["VA", "AV"]): 366 """Read hadrons FourquarkFullyConnected hdf5 file and output an array of CObs 367 368 Parameters 369 ---------- 370 path : str 371 path to the files to read 372 filestem : str 373 namestem of the files to read 374 ens_id : str 375 name of the ensemble, required for internal bookkeeping 376 idl : range 377 If specified only configurations in the given range are read in. 378 vertices : list 379 Vertex functions to be extracted. 380 381 Returns 382 ------- 383 result_dict : dict 384 extracted fourquark matrizes 385 """ 386 387 files, idx = _get_files(path, filestem, idl) 388 389 mom_in = None 390 mom_out = None 391 392 vertex_names = [] 393 for vertex in vertices: 394 vertex_names += _get_lorentz_names(vertex) 395 396 corr_data = {} 397 398 tree = 'FourQuarkFullyConnected/FourQuarkFullyConnected_' 399 400 for hd5_file in files: 401 file = h5py.File(path + '/' + hd5_file, "r") 402 403 for i in range(32): 404 name = (file[tree + str(i) + '/info'].attrs['gammaA'][0].decode('UTF-8'), file[tree + str(i) + '/info'].attrs['gammaB'][0].decode('UTF-8')) 405 if name in vertex_names: 406 if name not in corr_data: 407 corr_data[name] = [] 408 raw_data = file[tree + str(i) + '/corr'][0][0].view('complex') 409 corr_data[name].append(raw_data) 410 if mom_in is None: 411 mom_in = np.array(str(file[tree + str(i) + '/info'].attrs['pIn'])[3:-2].strip().split(), dtype=float) 412 if mom_out is None: 413 mom_out = np.array(str(file[tree + str(i) + '/info'].attrs['pOut'])[3:-2].strip().split(), dtype=float) 414 415 file.close() 416 417 intermediate_dict = {} 418 419 for vertex in vertices: 420 lorentz_names = _get_lorentz_names(vertex) 421 for v_name in lorentz_names: 422 if v_name in [('SigmaXY', 'SigmaZT'), 423 ('SigmaXT', 'SigmaYZ'), 424 ('SigmaYZ', 'SigmaXT'), 425 ('SigmaZT', 'SigmaXY')]: 426 sign = -1 427 else: 428 sign = 1 429 if vertex not in intermediate_dict: 430 intermediate_dict[vertex] = sign * np.array(corr_data[v_name]) 431 else: 432 intermediate_dict[vertex] += sign * np.array(corr_data[v_name]) 433 434 result_dict = {} 435 436 for key, data in intermediate_dict.items(): 437 438 rolled_array = np.moveaxis(data, 0, 8) 439 440 matrix = np.empty((rolled_array.shape[:-1]), dtype=object) 441 for index in np.ndindex(rolled_array.shape[:-1]): 442 real = Obs([rolled_array[index].real], [ens_id], idl=[idx]) 443 imag = Obs([rolled_array[index].imag], [ens_id], idl=[idx]) 444 matrix[index] = CObs(real, imag) 445 446 result_dict[key] = Npr_matrix(matrix, mom_in=mom_in, mom_out=mom_out) 447 448 return result_dict 449 450 451def _get_lorentz_names(name): 452 lorentz_index = ['X', 'Y', 'Z', 'T'] 453 454 res = [] 455 456 if name == "TT": 457 for i in range(4): 458 for j in range(i + 1, 4): 459 res.append(("Sigma" + lorentz_index[i] + lorentz_index[j], "Sigma" + lorentz_index[i] + lorentz_index[j])) 460 return res 461 462 if name == "TTtilde": 463 for i in range(4): 464 for j in range(i + 1, 4): 465 for k in range(4): 466 for o in range(k + 1, 4): 467 fac = epsilon_tensor_rank4(i, j, k, o) 468 if not np.isclose(fac, 0.0): 469 res.append(("Sigma" + lorentz_index[i] + lorentz_index[j], "Sigma" + lorentz_index[k] + lorentz_index[o])) 470 return res 471 472 assert len(name) == 2 473 474 if 'S' in name or 'P' in name: 475 if not set(name) <= set(['S', 'P']): 476 raise Exception("'" + name + "' is not a Lorentz scalar") 477 478 g_names = {'S': 'Identity', 479 'P': 'Gamma5'} 480 481 res.append((g_names[name[0]], g_names[name[1]])) 482 483 else: 484 if not set(name) <= set(['V', 'A']): 485 raise Exception("'" + name + "' is not a Lorentz scalar") 486 487 for ind in lorentz_index: 488 res.append(('Gamma' + ind + (name[0] == 'A') * 'Gamma5', 489 'Gamma' + ind + (name[1] == 'A') * 'Gamma5')) 490 491 return res
57def read_meson_hd5(path, filestem, ens_id, meson='meson_0', idl=None, gammas=None): 58 r'''Read hadrons meson hdf5 file and extract the meson labeled 'meson' 59 60 Parameters 61 ----------------- 62 path : str 63 path to the files to read 64 filestem : str 65 namestem of the files to read 66 ens_id : str 67 name of the ensemble, required for internal bookkeeping 68 meson : str 69 label of the meson to be extracted, standard value meson_0 which 70 corresponds to the pseudoscalar pseudoscalar two-point function. 71 gammas : tuple of strings 72 Instrad of a meson label one can also provide a tuple of two strings 73 indicating the gamma matrices at source and sink. 74 ("Gamma5", "Gamma5") corresponds to the pseudoscalar pseudoscalar 75 two-point function. The gammas argument dominateds over meson. 76 idl : range 77 If specified only configurations in the given range are read in. 78 79 Returns 80 ------- 81 corr : Corr 82 Correlator of the source sink combination in question. 83 ''' 84 85 files, idx = _get_files(path, filestem, idl) 86 87 tree = meson.rsplit('_')[0] 88 if gammas is not None: 89 h5file = h5py.File(path + '/' + files[0], "r") 90 found_meson = None 91 for key in h5file[tree].keys(): 92 if gammas[0] == h5file[tree][key].attrs["gamma_snk"][0].decode() and h5file[tree][key].attrs["gamma_src"][0].decode() == gammas[1]: 93 found_meson = key 94 break 95 h5file.close() 96 if found_meson: 97 meson = found_meson 98 else: 99 raise Exception("Source Sink combination " + str(gammas) + " not found.") 100 101 corr_data = [] 102 infos = [] 103 for hd5_file in files: 104 h5file = h5py.File(path + '/' + hd5_file, "r") 105 if not tree + '/' + meson in h5file: 106 raise Exception("Entry '" + meson + "' not contained in the files.") 107 raw_data = h5file[tree + '/' + meson + '/corr'] 108 real_data = raw_data[:]["re"].astype(np.double) 109 corr_data.append(real_data) 110 if not infos: 111 for k, i in h5file[tree + '/' + meson].attrs.items(): 112 infos.append(k + ': ' + i[0].decode()) 113 h5file.close() 114 corr_data = np.array(corr_data) 115 116 l_obs = [] 117 for c in corr_data.T: 118 l_obs.append(Obs([c], [ens_id], idl=[idx])) 119 120 corr = Corr(l_obs) 121 corr.tag = r", ".join(infos) 122 return corr
Read hadrons meson hdf5 file and extract the meson labeled 'meson'
Parameters
- path (str): path to the files to read
- filestem (str): namestem of the files to read
- ens_id (str): name of the ensemble, required for internal bookkeeping
- meson (str): label of the meson to be extracted, standard value meson_0 which corresponds to the pseudoscalar pseudoscalar two-point function.
- gammas (tuple of strings): Instrad of a meson label one can also provide a tuple of two strings indicating the gamma matrices at source and sink. ("Gamma5", "Gamma5") corresponds to the pseudoscalar pseudoscalar two-point function. The gammas argument dominateds over meson.
- idl (range): If specified only configurations in the given range are read in.
Returns
- corr (Corr): Correlator of the source sink combination in question.
125def read_DistillationContraction_hd5(path, ens_id, diagrams=["direct"], idl=None): 126 """Read hadrons DistillationContraction hdf5 files in given directory structure 127 128 Parameters 129 ----------------- 130 path : str 131 path to the directories to read 132 ens_id : str 133 name of the ensemble, required for internal bookkeeping 134 diagrams : list 135 List of strings of the diagrams to extract, e.g. ["direct", "box", "cross"]. 136 idl : range 137 If specified only configurations in the given range are read in. 138 139 Returns 140 ------- 141 result : dict 142 extracted DistillationContration data 143 """ 144 145 res_dict = {} 146 147 directories, idx = _get_files(path, "data", idl) 148 149 explore_path = Path(path + "/" + directories[0]) 150 151 for explore_file in explore_path.iterdir(): 152 if explore_file.is_file(): 153 stem = explore_file.with_suffix("").with_suffix("").as_posix().split("/")[-1] 154 else: 155 continue 156 157 file_list = [] 158 for dir in directories: 159 tmp_path = Path(path + "/" + dir) 160 file_list.append((tmp_path / stem).as_posix() + tmp_path.suffix + ".h5") 161 162 corr_data = {} 163 164 for diagram in diagrams: 165 corr_data[diagram] = [] 166 167 try: 168 for n_file, (hd5_file, n_traj) in enumerate(zip(file_list, list(idx))): 169 h5file = h5py.File(hd5_file) 170 171 if n_file == 0: 172 if h5file["DistillationContraction/Metadata"].attrs.get("TimeSources")[0].decode() != "0...": 173 raise Exception("Routine is only implemented for files containing inversions on all timeslices.") 174 175 Nt = h5file["DistillationContraction/Metadata"].attrs.get("Nt")[0] 176 177 identifier = [] 178 for in_file in range(len(h5file["DistillationContraction/Metadata/DmfInputFiles"].attrs.keys()) - 1): 179 encoded_info = h5file["DistillationContraction/Metadata/DmfInputFiles"].attrs.get("DmfInputFiles_" + str(in_file)) 180 full_info = encoded_info[0].decode().split("/")[-1].replace(".h5", "").split("_") 181 my_tuple = (full_info[0], full_info[1][1:], full_info[2], full_info[3]) 182 identifier.append(my_tuple) 183 identifier = tuple(identifier) 184 # "DistillationContraction/Metadata/DmfSuffix" contains info about different quarks, irrelevant in the SU(3) case. 185 186 for diagram in diagrams: 187 188 if diagram == "triangle" and "Identity" not in str(identifier): 189 part = "im" 190 else: 191 part = "re" 192 193 real_data = np.zeros(Nt) 194 for x0 in range(Nt): 195 raw_data = h5file["DistillationContraction/Correlators/" + diagram + "/" + str(x0)][:][part].astype(np.double) 196 real_data += np.roll(raw_data, -x0) 197 real_data /= Nt 198 199 corr_data[diagram].append(real_data) 200 h5file.close() 201 202 res_dict[str(identifier)] = {} 203 204 for diagram in diagrams: 205 206 tmp_data = np.array(corr_data[diagram]) 207 208 l_obs = [] 209 for c in tmp_data.T: 210 l_obs.append(Obs([c], [ens_id], idl=[idx])) 211 212 corr = Corr(l_obs) 213 corr.tag = str(identifier) 214 215 res_dict[str(identifier)][diagram] = corr 216 except FileNotFoundError: 217 print("Skip", stem) 218 219 return res_dict
Read hadrons DistillationContraction hdf5 files in given directory structure
Parameters
- path (str): path to the directories to read
- ens_id (str): name of the ensemble, required for internal bookkeeping
- diagrams (list): List of strings of the diagrams to extract, e.g. ["direct", "box", "cross"].
- idl (range): If specified only configurations in the given range are read in.
Returns
- result (dict): extracted DistillationContration data
222class Npr_matrix(np.ndarray): 223 224 def __new__(cls, input_array, mom_in=None, mom_out=None): 225 obj = np.asarray(input_array).view(cls) 226 obj.mom_in = mom_in 227 obj.mom_out = mom_out 228 return obj 229 230 @property 231 def g5H(self): 232 """Gamma_5 hermitean conjugate 233 234 Uses the fact that the propagator is gamma5 hermitean, so just the 235 in and out momenta of the propagator are exchanged. 236 """ 237 return Npr_matrix(self, 238 mom_in=self.mom_out, 239 mom_out=self.mom_in) 240 241 def _propagate_mom(self, other, name): 242 s_mom = getattr(self, name, None) 243 o_mom = getattr(other, name, None) 244 if s_mom is not None and o_mom is not None: 245 if not np.allclose(s_mom, o_mom): 246 raise Exception(name + ' does not match.') 247 return o_mom if o_mom is not None else s_mom 248 249 def __matmul__(self, other): 250 return self.__new__(Npr_matrix, 251 super().__matmul__(other), 252 self._propagate_mom(other, 'mom_in'), 253 self._propagate_mom(other, 'mom_out')) 254 255 def __array_finalize__(self, obj): 256 if obj is None: 257 return 258 self.mom_in = getattr(obj, 'mom_in', None) 259 self.mom_out = getattr(obj, 'mom_out', None)
ndarray(shape, dtype=float, buffer=None, offset=0, strides=None, order=None)
An array object represents a multidimensional, homogeneous array of fixed-size items. An associated data-type object describes the format of each element in the array (its byte-order, how many bytes it occupies in memory, whether it is an integer, a floating point number, or something else, etc.)
Arrays should be constructed using array
, zeros
or empty
(refer
to the See Also section below). The parameters given here refer to
a low-level method (ndarray(...)
) for instantiating an array.
For more information, refer to the numpy
module and examine the
methods and attributes of an array.
Parameters
- (for the __new__ method; see Notes below)
- shape (tuple of ints): Shape of created array.
- dtype (data-type, optional): Any object that can be interpreted as a numpy data type.
- buffer (object exposing buffer interface, optional): Used to fill the array with data.
- offset (int, optional): Offset of array data in buffer.
- strides (tuple of ints, optional): Strides of data in memory.
- order ({'C', 'F'}, optional): Row-major (C-style) or column-major (Fortran-style) order.
Attributes
- T (ndarray): Transpose of the array.
- data (buffer): The array's elements, in memory.
- dtype (dtype object): Describes the format of the elements in the array.
- flags (dict): Dictionary containing information related to memory use, e.g., 'C_CONTIGUOUS', 'OWNDATA', 'WRITEABLE', etc.
- flat (numpy.flatiter object):
Flattened version of the array as an iterator. The iterator
allows assignments, e.g.,
x.flat = 3
(Seendarray.flat
for assignment examples; TODO). - imag (ndarray): Imaginary part of the array.
- real (ndarray): Real part of the array.
- size (int): Number of elements in the array.
- itemsize (int): The memory use of each array element in bytes.
- nbytes (int):
The total number of bytes required to store the array data,
i.e.,
itemsize * size
. - ndim (int): The array's number of dimensions.
- shape (tuple of ints): Shape of the array.
- strides (tuple of ints):
The step-size required to move from one element to the next in
memory. For example, a contiguous
(3, 4)
array of typeint16
in C-order has strides(8, 2)
. This implies that to move from element to element in memory requires jumps of 2 bytes. To move from row-to-row, one needs to jump 8 bytes at a time (2 * 4
). - ctypes (ctypes object): Class containing properties of the array needed for interaction with ctypes.
- base (ndarray):
If the array is a view into another array, that array is its
base
(unless that array is also a view). Thebase
array is where the array data is actually stored.
See Also
array
: Construct an array.
zeros
: Create an array, each element of which is zero.
empty
: Create an array, but leave its allocated memory unchanged (i.e.,
it contains "garbage").
dtype
: Create a data-type.
numpy.typing.NDArray
: An ndarray alias :term:generic <generic type>
w.r.t. its dtype.type <numpy.dtype.type>
.
Notes
There are two modes of creating an array using __new__
:
- If
buffer
is None, then onlyshape
,dtype
, andorder
are used. - If
buffer
is an object exposing the buffer interface, then all keywords are interpreted.
No __init__
method is needed because the array is fully initialized
after the __new__
method.
Examples
These examples illustrate the low-level ndarray
constructor. Refer
to the See Also
section above for easier ways of constructing an
ndarray.
First mode, buffer
is None:
>>> np.ndarray(shape=(2,2), dtype=float, order='F')
array([[0.0e+000, 0.0e+000], # random
[ nan, 2.5e-323]])
Second mode:
>>> np.ndarray((2,), buffer=np.array([1,2,3]),
... offset=np.int_().itemsize,
... dtype=int) # offset = 1*itemsize, i.e. skip first element
array([2, 3])
Gamma_5 hermitean conjugate
Uses the fact that the propagator is gamma5 hermitean, so just the in and out momenta of the propagator are exchanged.
Inherited Members
- numpy.ndarray
- dumps
- dump
- all
- any
- argmax
- argmin
- argpartition
- argsort
- astype
- byteswap
- choose
- clip
- compress
- conj
- conjugate
- copy
- cumprod
- cumsum
- diagonal
- dot
- fill
- flatten
- getfield
- item
- itemset
- max
- mean
- min
- newbyteorder
- nonzero
- partition
- prod
- ptp
- put
- ravel
- repeat
- reshape
- resize
- round
- searchsorted
- setfield
- setflags
- sort
- squeeze
- std
- sum
- swapaxes
- take
- tobytes
- tofile
- tolist
- tostring
- trace
- transpose
- var
- view
- ndim
- flags
- shape
- strides
- data
- itemsize
- size
- nbytes
- base
- dtype
- real
- imag
- flat
- ctypes
- T
262def read_ExternalLeg_hd5(path, filestem, ens_id, idl=None): 263 """Read hadrons ExternalLeg hdf5 file and output an array of CObs 264 265 Parameters 266 ---------- 267 path : str 268 path to the files to read 269 filestem : str 270 namestem of the files to read 271 ens_id : str 272 name of the ensemble, required for internal bookkeeping 273 idl : range 274 If specified only configurations in the given range are read in. 275 276 Returns 277 ------- 278 result : Npr_matrix 279 read Cobs-matrix 280 """ 281 282 files, idx = _get_files(path, filestem, idl) 283 284 mom = None 285 286 corr_data = [] 287 for hd5_file in files: 288 file = h5py.File(path + '/' + hd5_file, "r") 289 raw_data = file['ExternalLeg/corr'][0][0].view('complex') 290 corr_data.append(raw_data) 291 if mom is None: 292 mom = np.array(str(file['ExternalLeg/info'].attrs['pIn'])[3:-2].strip().split(), dtype=float) 293 file.close() 294 corr_data = np.array(corr_data) 295 296 rolled_array = np.rollaxis(corr_data, 0, 5) 297 298 matrix = np.empty((rolled_array.shape[:-1]), dtype=object) 299 for si, sj, ci, cj in np.ndindex(rolled_array.shape[:-1]): 300 real = Obs([rolled_array[si, sj, ci, cj].real], [ens_id], idl=[idx]) 301 imag = Obs([rolled_array[si, sj, ci, cj].imag], [ens_id], idl=[idx]) 302 matrix[si, sj, ci, cj] = CObs(real, imag) 303 304 return Npr_matrix(matrix, mom_in=mom)
Read hadrons ExternalLeg hdf5 file and output an array of CObs
Parameters
- path (str): path to the files to read
- filestem (str): namestem of the files to read
- ens_id (str): name of the ensemble, required for internal bookkeeping
- idl (range): If specified only configurations in the given range are read in.
Returns
- result (Npr_matrix): read Cobs-matrix
307def read_Bilinear_hd5(path, filestem, ens_id, idl=None): 308 """Read hadrons Bilinear hdf5 file and output an array of CObs 309 310 Parameters 311 ---------- 312 path : str 313 path to the files to read 314 filestem : str 315 namestem of the files to read 316 ens_id : str 317 name of the ensemble, required for internal bookkeeping 318 idl : range 319 If specified only configurations in the given range are read in. 320 321 Returns 322 ------- 323 result_dict: dict[Npr_matrix] 324 extracted Bilinears 325 """ 326 327 files, idx = _get_files(path, filestem, idl) 328 329 mom_in = None 330 mom_out = None 331 332 corr_data = {} 333 for hd5_file in files: 334 file = h5py.File(path + '/' + hd5_file, "r") 335 for i in range(16): 336 name = file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['gamma'][0].decode('UTF-8') 337 if name not in corr_data: 338 corr_data[name] = [] 339 raw_data = file['Bilinear/Bilinear_' + str(i) + '/corr'][0][0].view('complex') 340 corr_data[name].append(raw_data) 341 if mom_in is None: 342 mom_in = np.array(str(file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['pIn'])[3:-2].strip().split(), dtype=float) 343 if mom_out is None: 344 mom_out = np.array(str(file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['pOut'])[3:-2].strip().split(), dtype=float) 345 346 file.close() 347 348 result_dict = {} 349 350 for key, data in corr_data.items(): 351 local_data = np.array(data) 352 353 rolled_array = np.rollaxis(local_data, 0, 5) 354 355 matrix = np.empty((rolled_array.shape[:-1]), dtype=object) 356 for si, sj, ci, cj in np.ndindex(rolled_array.shape[:-1]): 357 real = Obs([rolled_array[si, sj, ci, cj].real], [ens_id], idl=[idx]) 358 imag = Obs([rolled_array[si, sj, ci, cj].imag], [ens_id], idl=[idx]) 359 matrix[si, sj, ci, cj] = CObs(real, imag) 360 361 result_dict[key] = Npr_matrix(matrix, mom_in=mom_in, mom_out=mom_out) 362 363 return result_dict
Read hadrons Bilinear hdf5 file and output an array of CObs
Parameters
- path (str): path to the files to read
- filestem (str): namestem of the files to read
- ens_id (str): name of the ensemble, required for internal bookkeeping
- idl (range): If specified only configurations in the given range are read in.
Returns
- result_dict (dict[Npr_matrix]): extracted Bilinears
366def read_Fourquark_hd5(path, filestem, ens_id, idl=None, vertices=["VA", "AV"]): 367 """Read hadrons FourquarkFullyConnected hdf5 file and output an array of CObs 368 369 Parameters 370 ---------- 371 path : str 372 path to the files to read 373 filestem : str 374 namestem of the files to read 375 ens_id : str 376 name of the ensemble, required for internal bookkeeping 377 idl : range 378 If specified only configurations in the given range are read in. 379 vertices : list 380 Vertex functions to be extracted. 381 382 Returns 383 ------- 384 result_dict : dict 385 extracted fourquark matrizes 386 """ 387 388 files, idx = _get_files(path, filestem, idl) 389 390 mom_in = None 391 mom_out = None 392 393 vertex_names = [] 394 for vertex in vertices: 395 vertex_names += _get_lorentz_names(vertex) 396 397 corr_data = {} 398 399 tree = 'FourQuarkFullyConnected/FourQuarkFullyConnected_' 400 401 for hd5_file in files: 402 file = h5py.File(path + '/' + hd5_file, "r") 403 404 for i in range(32): 405 name = (file[tree + str(i) + '/info'].attrs['gammaA'][0].decode('UTF-8'), file[tree + str(i) + '/info'].attrs['gammaB'][0].decode('UTF-8')) 406 if name in vertex_names: 407 if name not in corr_data: 408 corr_data[name] = [] 409 raw_data = file[tree + str(i) + '/corr'][0][0].view('complex') 410 corr_data[name].append(raw_data) 411 if mom_in is None: 412 mom_in = np.array(str(file[tree + str(i) + '/info'].attrs['pIn'])[3:-2].strip().split(), dtype=float) 413 if mom_out is None: 414 mom_out = np.array(str(file[tree + str(i) + '/info'].attrs['pOut'])[3:-2].strip().split(), dtype=float) 415 416 file.close() 417 418 intermediate_dict = {} 419 420 for vertex in vertices: 421 lorentz_names = _get_lorentz_names(vertex) 422 for v_name in lorentz_names: 423 if v_name in [('SigmaXY', 'SigmaZT'), 424 ('SigmaXT', 'SigmaYZ'), 425 ('SigmaYZ', 'SigmaXT'), 426 ('SigmaZT', 'SigmaXY')]: 427 sign = -1 428 else: 429 sign = 1 430 if vertex not in intermediate_dict: 431 intermediate_dict[vertex] = sign * np.array(corr_data[v_name]) 432 else: 433 intermediate_dict[vertex] += sign * np.array(corr_data[v_name]) 434 435 result_dict = {} 436 437 for key, data in intermediate_dict.items(): 438 439 rolled_array = np.moveaxis(data, 0, 8) 440 441 matrix = np.empty((rolled_array.shape[:-1]), dtype=object) 442 for index in np.ndindex(rolled_array.shape[:-1]): 443 real = Obs([rolled_array[index].real], [ens_id], idl=[idx]) 444 imag = Obs([rolled_array[index].imag], [ens_id], idl=[idx]) 445 matrix[index] = CObs(real, imag) 446 447 result_dict[key] = Npr_matrix(matrix, mom_in=mom_in, mom_out=mom_out) 448 449 return result_dict
Read hadrons FourquarkFullyConnected hdf5 file and output an array of CObs
Parameters
- path (str): path to the files to read
- filestem (str): namestem of the files to read
- ens_id (str): name of the ensemble, required for internal bookkeeping
- idl (range): If specified only configurations in the given range are read in.
- vertices (list): Vertex functions to be extracted.
Returns
- result_dict (dict): extracted fourquark matrizes