pyerrors.input.hadrons
1import os 2from collections import Counter 3import h5py 4from pathlib import Path 5import numpy as np 6from ..obs import Obs, CObs 7from ..correlators import Corr 8from ..dirac import epsilon_tensor_rank4 9 10 11def _get_files(path, filestem, idl): 12 ls = os.listdir(path) 13 14 # Clean up file list 15 files = list(filter(lambda x: x.startswith(filestem + "."), ls)) 16 17 if not files: 18 raise Exception('No files starting with', filestem, 'in folder', path) 19 20 def get_cnfg_number(n): 21 return int(n.replace(".h5", "")[len(filestem) + 1:]) # From python 3.9 onward the safer 'removesuffix' method can be used. 22 23 # Sort according to configuration number 24 files.sort(key=get_cnfg_number) 25 26 cnfg_numbers = [] 27 filtered_files = [] 28 for line in files: 29 no = get_cnfg_number(line) 30 if idl: 31 if no in list(idl): 32 filtered_files.append(line) 33 cnfg_numbers.append(no) 34 else: 35 filtered_files.append(line) 36 cnfg_numbers.append(no) 37 38 if idl: 39 if Counter(list(idl)) != Counter(cnfg_numbers): 40 raise Exception("Not all configurations specified in idl found, configurations " + str(list(Counter(list(idl)) - Counter(cnfg_numbers))) + " are missing.") 41 42 # Check that configurations are evenly spaced 43 dc = np.unique(np.diff(cnfg_numbers)) 44 if np.any(dc < 0): 45 raise Exception("Unsorted files") 46 if len(dc) == 1: 47 idx = range(cnfg_numbers[0], cnfg_numbers[-1] + dc[0], dc[0]) 48 elif idl: 49 idx = idl 50 else: 51 raise Exception("Configurations are not evenly spaced. Provide an idl if you want to proceed with this set of configurations.") 52 53 return filtered_files, idx 54 55 56def read_meson_hd5(path, filestem, ens_id, meson='meson_0', idl=None, gammas=None): 57 r'''Read hadrons meson hdf5 file and extract the meson labeled 'meson' 58 59 Parameters 60 ----------------- 61 path : str 62 path to the files to read 63 filestem : str 64 namestem of the files to read 65 ens_id : str 66 name of the ensemble, required for internal bookkeeping 67 meson : str 68 label of the meson to be extracted, standard value meson_0 which 69 corresponds to the pseudoscalar pseudoscalar two-point function. 70 gammas : tuple of strings 71 Instrad of a meson label one can also provide a tuple of two strings 72 indicating the gamma matrices at source and sink. 73 ("Gamma5", "Gamma5") corresponds to the pseudoscalar pseudoscalar 74 two-point function. The gammas argument dominateds over meson. 75 idl : range 76 If specified only configurations in the given range are read in. 77 ''' 78 79 files, idx = _get_files(path, filestem, idl) 80 81 tree = meson.rsplit('_')[0] 82 if gammas is not None: 83 h5file = h5py.File(path + '/' + files[0], "r") 84 found_meson = None 85 for key in h5file[tree].keys(): 86 if gammas[0] == h5file[tree][key].attrs["gamma_snk"][0].decode() and h5file[tree][key].attrs["gamma_src"][0].decode() == gammas[1]: 87 found_meson = key 88 break 89 h5file.close() 90 if found_meson: 91 meson = found_meson 92 else: 93 raise Exception("Source Sink combination " + str(gammas) + " not found.") 94 95 corr_data = [] 96 infos = [] 97 for hd5_file in files: 98 h5file = h5py.File(path + '/' + hd5_file, "r") 99 if not tree + '/' + meson in h5file: 100 raise Exception("Entry '" + meson + "' not contained in the files.") 101 raw_data = h5file[tree + '/' + meson + '/corr'] 102 real_data = raw_data[:]["re"].astype(np.double) 103 corr_data.append(real_data) 104 if not infos: 105 for k, i in h5file[tree + '/' + meson].attrs.items(): 106 infos.append(k + ': ' + i[0].decode()) 107 h5file.close() 108 corr_data = np.array(corr_data) 109 110 l_obs = [] 111 for c in corr_data.T: 112 l_obs.append(Obs([c], [ens_id], idl=[idx])) 113 114 corr = Corr(l_obs) 115 corr.tag = r", ".join(infos) 116 return corr 117 118 119def read_DistillationContraction_hd5(path, ens_id, diagrams=["direct"], idl=None): 120 """Read hadrons DistillationContraction hdf5 files in given directory structure 121 122 Parameters 123 ----------------- 124 path : str 125 path to the directories to read 126 ens_id : str 127 name of the ensemble, required for internal bookkeeping 128 diagrams : list 129 List of strings of the diagrams to extract, e.g. ["direct", "box", "cross"]. 130 idl : range 131 If specified only configurations in the given range are read in. 132 """ 133 134 res_dict = {} 135 136 directories, idx = _get_files(path, "data", idl) 137 138 explore_path = Path(path + "/" + directories[0]) 139 140 for explore_file in explore_path.iterdir(): 141 if explore_file.is_file(): 142 stem = explore_file.with_suffix("").with_suffix("").as_posix().split("/")[-1] 143 else: 144 continue 145 146 file_list = [] 147 for dir in directories: 148 tmp_path = Path(path + "/" + dir) 149 file_list.append((tmp_path / stem).as_posix() + tmp_path.suffix + ".h5") 150 151 corr_data = {} 152 153 for diagram in diagrams: 154 corr_data[diagram] = [] 155 156 for n_file, (hd5_file, n_traj) in enumerate(zip(file_list, list(idx))): 157 h5file = h5py.File(hd5_file) 158 159 if n_file == 0: 160 if h5file["DistillationContraction/Metadata"].attrs.get("TimeSources")[0].decode() != "0...": 161 raise Exception("Routine is only implemented for files containing inversions on all timeslices.") 162 163 Nt = h5file["DistillationContraction/Metadata"].attrs.get("Nt")[0] 164 165 identifier = [] 166 for in_file in range(len(h5file["DistillationContraction/Metadata/DmfInputFiles"].attrs.keys()) - 1): 167 encoded_info = h5file["DistillationContraction/Metadata/DmfInputFiles"].attrs.get("DmfInputFiles_" + str(in_file)) 168 full_info = encoded_info[0].decode().split("/")[-1].replace(".h5", "").split("_") 169 my_tuple = (full_info[0], full_info[1][1:], full_info[2], full_info[3]) 170 identifier.append(my_tuple) 171 identifier = tuple(identifier) 172 # "DistillationContraction/Metadata/DmfSuffix" contains info about different quarks, irrelevant in the SU(3) case. 173 174 for diagram in diagrams: 175 real_data = np.zeros(Nt) 176 for x0 in range(Nt): 177 raw_data = h5file["DistillationContraction/Correlators/" + diagram + "/" + str(x0)][:]["re"].astype(np.double) 178 real_data += np.roll(raw_data, -x0) 179 real_data /= Nt 180 181 corr_data[diagram].append(real_data) 182 h5file.close() 183 184 res_dict[str(identifier)] = {} 185 186 for diagram in diagrams: 187 188 tmp_data = np.array(corr_data[diagram]) 189 190 l_obs = [] 191 for c in tmp_data.T: 192 l_obs.append(Obs([c], [ens_id], idl=[idx])) 193 194 corr = Corr(l_obs) 195 corr.tag = str(identifier) 196 197 res_dict[str(identifier)][diagram] = corr 198 199 return res_dict 200 201 202class Npr_matrix(np.ndarray): 203 204 def __new__(cls, input_array, mom_in=None, mom_out=None): 205 obj = np.asarray(input_array).view(cls) 206 obj.mom_in = mom_in 207 obj.mom_out = mom_out 208 return obj 209 210 @property 211 def g5H(self): 212 """Gamma_5 hermitean conjugate 213 214 Uses the fact that the propagator is gamma5 hermitean, so just the 215 in and out momenta of the propagator are exchanged. 216 """ 217 return Npr_matrix(self, 218 mom_in=self.mom_out, 219 mom_out=self.mom_in) 220 221 def _propagate_mom(self, other, name): 222 s_mom = getattr(self, name, None) 223 o_mom = getattr(other, name, None) 224 if s_mom is not None and o_mom is not None: 225 if not np.allclose(s_mom, o_mom): 226 raise Exception(name + ' does not match.') 227 return o_mom if o_mom is not None else s_mom 228 229 def __matmul__(self, other): 230 return self.__new__(Npr_matrix, 231 super().__matmul__(other), 232 self._propagate_mom(other, 'mom_in'), 233 self._propagate_mom(other, 'mom_out')) 234 235 def __array_finalize__(self, obj): 236 if obj is None: 237 return 238 self.mom_in = getattr(obj, 'mom_in', None) 239 self.mom_out = getattr(obj, 'mom_out', None) 240 241 242def read_ExternalLeg_hd5(path, filestem, ens_id, idl=None): 243 """Read hadrons ExternalLeg hdf5 file and output an array of CObs 244 245 Parameters 246 ---------- 247 path : str 248 path to the files to read 249 filestem : str 250 namestem of the files to read 251 ens_id : str 252 name of the ensemble, required for internal bookkeeping 253 idl : range 254 If specified only configurations in the given range are read in. 255 """ 256 257 files, idx = _get_files(path, filestem, idl) 258 259 mom = None 260 261 corr_data = [] 262 for hd5_file in files: 263 file = h5py.File(path + '/' + hd5_file, "r") 264 raw_data = file['ExternalLeg/corr'][0][0].view('complex') 265 corr_data.append(raw_data) 266 if mom is None: 267 mom = np.array(str(file['ExternalLeg/info'].attrs['pIn'])[3:-2].strip().split(), dtype=float) 268 file.close() 269 corr_data = np.array(corr_data) 270 271 rolled_array = np.rollaxis(corr_data, 0, 5) 272 273 matrix = np.empty((rolled_array.shape[:-1]), dtype=object) 274 for si, sj, ci, cj in np.ndindex(rolled_array.shape[:-1]): 275 real = Obs([rolled_array[si, sj, ci, cj].real], [ens_id], idl=[idx]) 276 imag = Obs([rolled_array[si, sj, ci, cj].imag], [ens_id], idl=[idx]) 277 matrix[si, sj, ci, cj] = CObs(real, imag) 278 279 return Npr_matrix(matrix, mom_in=mom) 280 281 282def read_Bilinear_hd5(path, filestem, ens_id, idl=None): 283 """Read hadrons Bilinear hdf5 file and output an array of CObs 284 285 Parameters 286 ---------- 287 path : str 288 path to the files to read 289 filestem : str 290 namestem of the files to read 291 ens_id : str 292 name of the ensemble, required for internal bookkeeping 293 idl : range 294 If specified only configurations in the given range are read in. 295 """ 296 297 files, idx = _get_files(path, filestem, idl) 298 299 mom_in = None 300 mom_out = None 301 302 corr_data = {} 303 for hd5_file in files: 304 file = h5py.File(path + '/' + hd5_file, "r") 305 for i in range(16): 306 name = file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['gamma'][0].decode('UTF-8') 307 if name not in corr_data: 308 corr_data[name] = [] 309 raw_data = file['Bilinear/Bilinear_' + str(i) + '/corr'][0][0].view('complex') 310 corr_data[name].append(raw_data) 311 if mom_in is None: 312 mom_in = np.array(str(file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['pIn'])[3:-2].strip().split(), dtype=float) 313 if mom_out is None: 314 mom_out = np.array(str(file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['pOut'])[3:-2].strip().split(), dtype=float) 315 316 file.close() 317 318 result_dict = {} 319 320 for key, data in corr_data.items(): 321 local_data = np.array(data) 322 323 rolled_array = np.rollaxis(local_data, 0, 5) 324 325 matrix = np.empty((rolled_array.shape[:-1]), dtype=object) 326 for si, sj, ci, cj in np.ndindex(rolled_array.shape[:-1]): 327 real = Obs([rolled_array[si, sj, ci, cj].real], [ens_id], idl=[idx]) 328 imag = Obs([rolled_array[si, sj, ci, cj].imag], [ens_id], idl=[idx]) 329 matrix[si, sj, ci, cj] = CObs(real, imag) 330 331 result_dict[key] = Npr_matrix(matrix, mom_in=mom_in, mom_out=mom_out) 332 333 return result_dict 334 335 336def read_Fourquark_hd5(path, filestem, ens_id, idl=None, vertices=["VA", "AV"]): 337 """Read hadrons FourquarkFullyConnected hdf5 file and output an array of CObs 338 339 Parameters 340 ---------- 341 path : str 342 path to the files to read 343 filestem : str 344 namestem of the files to read 345 ens_id : str 346 name of the ensemble, required for internal bookkeeping 347 idl : range 348 If specified only configurations in the given range are read in. 349 vertices : list 350 Vertex functions to be extracted. 351 """ 352 353 files, idx = _get_files(path, filestem, idl) 354 355 mom_in = None 356 mom_out = None 357 358 vertex_names = [] 359 for vertex in vertices: 360 vertex_names += _get_lorentz_names(vertex) 361 362 corr_data = {} 363 364 tree = 'FourQuarkFullyConnected/FourQuarkFullyConnected_' 365 366 for hd5_file in files: 367 file = h5py.File(path + '/' + hd5_file, "r") 368 369 for i in range(32): 370 name = (file[tree + str(i) + '/info'].attrs['gammaA'][0].decode('UTF-8'), file[tree + str(i) + '/info'].attrs['gammaB'][0].decode('UTF-8')) 371 if name in vertex_names: 372 if name not in corr_data: 373 corr_data[name] = [] 374 raw_data = file[tree + str(i) + '/corr'][0][0].view('complex') 375 corr_data[name].append(raw_data) 376 if mom_in is None: 377 mom_in = np.array(str(file[tree + str(i) + '/info'].attrs['pIn'])[3:-2].strip().split(), dtype=float) 378 if mom_out is None: 379 mom_out = np.array(str(file[tree + str(i) + '/info'].attrs['pOut'])[3:-2].strip().split(), dtype=float) 380 381 file.close() 382 383 intermediate_dict = {} 384 385 for vertex in vertices: 386 lorentz_names = _get_lorentz_names(vertex) 387 for v_name in lorentz_names: 388 if v_name in [('SigmaXY', 'SigmaZT'), 389 ('SigmaXT', 'SigmaYZ'), 390 ('SigmaYZ', 'SigmaXT'), 391 ('SigmaZT', 'SigmaXY')]: 392 sign = -1 393 else: 394 sign = 1 395 if vertex not in intermediate_dict: 396 intermediate_dict[vertex] = sign * np.array(corr_data[v_name]) 397 else: 398 intermediate_dict[vertex] += sign * np.array(corr_data[v_name]) 399 400 result_dict = {} 401 402 for key, data in intermediate_dict.items(): 403 404 rolled_array = np.moveaxis(data, 0, 8) 405 406 matrix = np.empty((rolled_array.shape[:-1]), dtype=object) 407 for index in np.ndindex(rolled_array.shape[:-1]): 408 real = Obs([rolled_array[index].real], [ens_id], idl=[idx]) 409 imag = Obs([rolled_array[index].imag], [ens_id], idl=[idx]) 410 matrix[index] = CObs(real, imag) 411 412 result_dict[key] = Npr_matrix(matrix, mom_in=mom_in, mom_out=mom_out) 413 414 return result_dict 415 416 417def _get_lorentz_names(name): 418 lorentz_index = ['X', 'Y', 'Z', 'T'] 419 420 res = [] 421 422 if name == "TT": 423 for i in range(4): 424 for j in range(i + 1, 4): 425 res.append(("Sigma" + lorentz_index[i] + lorentz_index[j], "Sigma" + lorentz_index[i] + lorentz_index[j])) 426 return res 427 428 if name == "TTtilde": 429 for i in range(4): 430 for j in range(i + 1, 4): 431 for k in range(4): 432 for o in range(k + 1, 4): 433 fac = epsilon_tensor_rank4(i, j, k, o) 434 if not np.isclose(fac, 0.0): 435 res.append(("Sigma" + lorentz_index[i] + lorentz_index[j], "Sigma" + lorentz_index[k] + lorentz_index[o])) 436 return res 437 438 assert len(name) == 2 439 440 if 'S' in name or 'P' in name: 441 if not set(name) <= set(['S', 'P']): 442 raise Exception("'" + name + "' is not a Lorentz scalar") 443 444 g_names = {'S': 'Identity', 445 'P': 'Gamma5'} 446 447 res.append((g_names[name[0]], g_names[name[1]])) 448 449 else: 450 if not set(name) <= set(['V', 'A']): 451 raise Exception("'" + name + "' is not a Lorentz scalar") 452 453 for ind in lorentz_index: 454 res.append(('Gamma' + ind + (name[0] == 'A') * 'Gamma5', 455 'Gamma' + ind + (name[1] == 'A') * 'Gamma5')) 456 457 return res
57def read_meson_hd5(path, filestem, ens_id, meson='meson_0', idl=None, gammas=None): 58 r'''Read hadrons meson hdf5 file and extract the meson labeled 'meson' 59 60 Parameters 61 ----------------- 62 path : str 63 path to the files to read 64 filestem : str 65 namestem of the files to read 66 ens_id : str 67 name of the ensemble, required for internal bookkeeping 68 meson : str 69 label of the meson to be extracted, standard value meson_0 which 70 corresponds to the pseudoscalar pseudoscalar two-point function. 71 gammas : tuple of strings 72 Instrad of a meson label one can also provide a tuple of two strings 73 indicating the gamma matrices at source and sink. 74 ("Gamma5", "Gamma5") corresponds to the pseudoscalar pseudoscalar 75 two-point function. The gammas argument dominateds over meson. 76 idl : range 77 If specified only configurations in the given range are read in. 78 ''' 79 80 files, idx = _get_files(path, filestem, idl) 81 82 tree = meson.rsplit('_')[0] 83 if gammas is not None: 84 h5file = h5py.File(path + '/' + files[0], "r") 85 found_meson = None 86 for key in h5file[tree].keys(): 87 if gammas[0] == h5file[tree][key].attrs["gamma_snk"][0].decode() and h5file[tree][key].attrs["gamma_src"][0].decode() == gammas[1]: 88 found_meson = key 89 break 90 h5file.close() 91 if found_meson: 92 meson = found_meson 93 else: 94 raise Exception("Source Sink combination " + str(gammas) + " not found.") 95 96 corr_data = [] 97 infos = [] 98 for hd5_file in files: 99 h5file = h5py.File(path + '/' + hd5_file, "r") 100 if not tree + '/' + meson in h5file: 101 raise Exception("Entry '" + meson + "' not contained in the files.") 102 raw_data = h5file[tree + '/' + meson + '/corr'] 103 real_data = raw_data[:]["re"].astype(np.double) 104 corr_data.append(real_data) 105 if not infos: 106 for k, i in h5file[tree + '/' + meson].attrs.items(): 107 infos.append(k + ': ' + i[0].decode()) 108 h5file.close() 109 corr_data = np.array(corr_data) 110 111 l_obs = [] 112 for c in corr_data.T: 113 l_obs.append(Obs([c], [ens_id], idl=[idx])) 114 115 corr = Corr(l_obs) 116 corr.tag = r", ".join(infos) 117 return corr
Read hadrons meson hdf5 file and extract the meson labeled 'meson'
Parameters
- path (str): path to the files to read
- filestem (str): namestem of the files to read
- ens_id (str): name of the ensemble, required for internal bookkeeping
- meson (str): label of the meson to be extracted, standard value meson_0 which corresponds to the pseudoscalar pseudoscalar two-point function.
- gammas (tuple of strings): Instrad of a meson label one can also provide a tuple of two strings indicating the gamma matrices at source and sink. ("Gamma5", "Gamma5") corresponds to the pseudoscalar pseudoscalar two-point function. The gammas argument dominateds over meson.
- idl (range): If specified only configurations in the given range are read in.
120def read_DistillationContraction_hd5(path, ens_id, diagrams=["direct"], idl=None): 121 """Read hadrons DistillationContraction hdf5 files in given directory structure 122 123 Parameters 124 ----------------- 125 path : str 126 path to the directories to read 127 ens_id : str 128 name of the ensemble, required for internal bookkeeping 129 diagrams : list 130 List of strings of the diagrams to extract, e.g. ["direct", "box", "cross"]. 131 idl : range 132 If specified only configurations in the given range are read in. 133 """ 134 135 res_dict = {} 136 137 directories, idx = _get_files(path, "data", idl) 138 139 explore_path = Path(path + "/" + directories[0]) 140 141 for explore_file in explore_path.iterdir(): 142 if explore_file.is_file(): 143 stem = explore_file.with_suffix("").with_suffix("").as_posix().split("/")[-1] 144 else: 145 continue 146 147 file_list = [] 148 for dir in directories: 149 tmp_path = Path(path + "/" + dir) 150 file_list.append((tmp_path / stem).as_posix() + tmp_path.suffix + ".h5") 151 152 corr_data = {} 153 154 for diagram in diagrams: 155 corr_data[diagram] = [] 156 157 for n_file, (hd5_file, n_traj) in enumerate(zip(file_list, list(idx))): 158 h5file = h5py.File(hd5_file) 159 160 if n_file == 0: 161 if h5file["DistillationContraction/Metadata"].attrs.get("TimeSources")[0].decode() != "0...": 162 raise Exception("Routine is only implemented for files containing inversions on all timeslices.") 163 164 Nt = h5file["DistillationContraction/Metadata"].attrs.get("Nt")[0] 165 166 identifier = [] 167 for in_file in range(len(h5file["DistillationContraction/Metadata/DmfInputFiles"].attrs.keys()) - 1): 168 encoded_info = h5file["DistillationContraction/Metadata/DmfInputFiles"].attrs.get("DmfInputFiles_" + str(in_file)) 169 full_info = encoded_info[0].decode().split("/")[-1].replace(".h5", "").split("_") 170 my_tuple = (full_info[0], full_info[1][1:], full_info[2], full_info[3]) 171 identifier.append(my_tuple) 172 identifier = tuple(identifier) 173 # "DistillationContraction/Metadata/DmfSuffix" contains info about different quarks, irrelevant in the SU(3) case. 174 175 for diagram in diagrams: 176 real_data = np.zeros(Nt) 177 for x0 in range(Nt): 178 raw_data = h5file["DistillationContraction/Correlators/" + diagram + "/" + str(x0)][:]["re"].astype(np.double) 179 real_data += np.roll(raw_data, -x0) 180 real_data /= Nt 181 182 corr_data[diagram].append(real_data) 183 h5file.close() 184 185 res_dict[str(identifier)] = {} 186 187 for diagram in diagrams: 188 189 tmp_data = np.array(corr_data[diagram]) 190 191 l_obs = [] 192 for c in tmp_data.T: 193 l_obs.append(Obs([c], [ens_id], idl=[idx])) 194 195 corr = Corr(l_obs) 196 corr.tag = str(identifier) 197 198 res_dict[str(identifier)][diagram] = corr 199 200 return res_dict
Read hadrons DistillationContraction hdf5 files in given directory structure
Parameters
- path (str): path to the directories to read
- ens_id (str): name of the ensemble, required for internal bookkeeping
- diagrams (list): List of strings of the diagrams to extract, e.g. ["direct", "box", "cross"].
- idl (range): If specified only configurations in the given range are read in.
203class Npr_matrix(np.ndarray): 204 205 def __new__(cls, input_array, mom_in=None, mom_out=None): 206 obj = np.asarray(input_array).view(cls) 207 obj.mom_in = mom_in 208 obj.mom_out = mom_out 209 return obj 210 211 @property 212 def g5H(self): 213 """Gamma_5 hermitean conjugate 214 215 Uses the fact that the propagator is gamma5 hermitean, so just the 216 in and out momenta of the propagator are exchanged. 217 """ 218 return Npr_matrix(self, 219 mom_in=self.mom_out, 220 mom_out=self.mom_in) 221 222 def _propagate_mom(self, other, name): 223 s_mom = getattr(self, name, None) 224 o_mom = getattr(other, name, None) 225 if s_mom is not None and o_mom is not None: 226 if not np.allclose(s_mom, o_mom): 227 raise Exception(name + ' does not match.') 228 return o_mom if o_mom is not None else s_mom 229 230 def __matmul__(self, other): 231 return self.__new__(Npr_matrix, 232 super().__matmul__(other), 233 self._propagate_mom(other, 'mom_in'), 234 self._propagate_mom(other, 'mom_out')) 235 236 def __array_finalize__(self, obj): 237 if obj is None: 238 return 239 self.mom_in = getattr(obj, 'mom_in', None) 240 self.mom_out = getattr(obj, 'mom_out', None)
ndarray(shape, dtype=float, buffer=None, offset=0, strides=None, order=None)
An array object represents a multidimensional, homogeneous array of fixed-size items. An associated data-type object describes the format of each element in the array (its byte-order, how many bytes it occupies in memory, whether it is an integer, a floating point number, or something else, etc.)
Arrays should be constructed using array
, zeros
or empty
(refer
to the See Also section below). The parameters given here refer to
a low-level method (ndarray(...)
) for instantiating an array.
For more information, refer to the numpy
module and examine the
methods and attributes of an array.
Parameters
- (for the __new__ method; see Notes below)
- shape (tuple of ints): Shape of created array.
- dtype (data-type, optional): Any object that can be interpreted as a numpy data type.
- buffer (object exposing buffer interface, optional): Used to fill the array with data.
- offset (int, optional): Offset of array data in buffer.
- strides (tuple of ints, optional): Strides of data in memory.
- order ({'C', 'F'}, optional): Row-major (C-style) or column-major (Fortran-style) order.
Attributes
- T (ndarray): Transpose of the array.
- data (buffer): The array's elements, in memory.
- dtype (dtype object): Describes the format of the elements in the array.
- flags (dict): Dictionary containing information related to memory use, e.g., 'C_CONTIGUOUS', 'OWNDATA', 'WRITEABLE', etc.
- flat (numpy.flatiter object):
Flattened version of the array as an iterator. The iterator
allows assignments, e.g.,
x.flat = 3
(Seendarray.flat
for assignment examples; TODO). - imag (ndarray): Imaginary part of the array.
- real (ndarray): Real part of the array.
- size (int): Number of elements in the array.
- itemsize (int): The memory use of each array element in bytes.
- nbytes (int):
The total number of bytes required to store the array data,
i.e.,
itemsize * size
. - ndim (int): The array's number of dimensions.
- shape (tuple of ints): Shape of the array.
- strides (tuple of ints):
The step-size required to move from one element to the next in
memory. For example, a contiguous
(3, 4)
array of typeint16
in C-order has strides(8, 2)
. This implies that to move from element to element in memory requires jumps of 2 bytes. To move from row-to-row, one needs to jump 8 bytes at a time (2 * 4
). - ctypes (ctypes object): Class containing properties of the array needed for interaction with ctypes.
- base (ndarray):
If the array is a view into another array, that array is its
base
(unless that array is also a view). Thebase
array is where the array data is actually stored.
See Also
array
: Construct an array.
zeros
: Create an array, each element of which is zero.
empty
: Create an array, but leave its allocated memory unchanged (i.e.,
it contains "garbage").
dtype
: Create a data-type.
numpy.typing.NDArray
: An ndarray alias :term:generic <generic type>
w.r.t. its dtype.type <numpy.dtype.type>
.
Notes
There are two modes of creating an array using __new__
:
- If
buffer
is None, then onlyshape
,dtype
, andorder
are used. - If
buffer
is an object exposing the buffer interface, then all keywords are interpreted.
No __init__
method is needed because the array is fully initialized
after the __new__
method.
Examples
These examples illustrate the low-level ndarray
constructor. Refer
to the See Also
section above for easier ways of constructing an
ndarray.
First mode, buffer
is None:
>>> np.ndarray(shape=(2,2), dtype=float, order='F')
array([[0.0e+000, 0.0e+000], # random
[ nan, 2.5e-323]])
Second mode:
>>> np.ndarray((2,), buffer=np.array([1,2,3]),
... offset=np.int_().itemsize,
... dtype=int) # offset = 1*itemsize, i.e. skip first element
array([2, 3])
Gamma_5 hermitean conjugate
Uses the fact that the propagator is gamma5 hermitean, so just the in and out momenta of the propagator are exchanged.
Inherited Members
- numpy.ndarray
- dumps
- dump
- all
- any
- argmax
- argmin
- argpartition
- argsort
- astype
- byteswap
- choose
- clip
- compress
- conj
- conjugate
- copy
- cumprod
- cumsum
- diagonal
- dot
- fill
- flatten
- getfield
- item
- itemset
- max
- mean
- min
- newbyteorder
- nonzero
- partition
- prod
- ptp
- put
- ravel
- repeat
- reshape
- resize
- round
- searchsorted
- setfield
- setflags
- sort
- squeeze
- std
- sum
- swapaxes
- take
- tobytes
- tofile
- tolist
- tostring
- trace
- transpose
- var
- view
- ndim
- flags
- shape
- strides
- data
- itemsize
- size
- nbytes
- base
- dtype
- real
- imag
- flat
- ctypes
- T
243def read_ExternalLeg_hd5(path, filestem, ens_id, idl=None): 244 """Read hadrons ExternalLeg hdf5 file and output an array of CObs 245 246 Parameters 247 ---------- 248 path : str 249 path to the files to read 250 filestem : str 251 namestem of the files to read 252 ens_id : str 253 name of the ensemble, required for internal bookkeeping 254 idl : range 255 If specified only configurations in the given range are read in. 256 """ 257 258 files, idx = _get_files(path, filestem, idl) 259 260 mom = None 261 262 corr_data = [] 263 for hd5_file in files: 264 file = h5py.File(path + '/' + hd5_file, "r") 265 raw_data = file['ExternalLeg/corr'][0][0].view('complex') 266 corr_data.append(raw_data) 267 if mom is None: 268 mom = np.array(str(file['ExternalLeg/info'].attrs['pIn'])[3:-2].strip().split(), dtype=float) 269 file.close() 270 corr_data = np.array(corr_data) 271 272 rolled_array = np.rollaxis(corr_data, 0, 5) 273 274 matrix = np.empty((rolled_array.shape[:-1]), dtype=object) 275 for si, sj, ci, cj in np.ndindex(rolled_array.shape[:-1]): 276 real = Obs([rolled_array[si, sj, ci, cj].real], [ens_id], idl=[idx]) 277 imag = Obs([rolled_array[si, sj, ci, cj].imag], [ens_id], idl=[idx]) 278 matrix[si, sj, ci, cj] = CObs(real, imag) 279 280 return Npr_matrix(matrix, mom_in=mom)
Read hadrons ExternalLeg hdf5 file and output an array of CObs
Parameters
- path (str): path to the files to read
- filestem (str): namestem of the files to read
- ens_id (str): name of the ensemble, required for internal bookkeeping
- idl (range): If specified only configurations in the given range are read in.
283def read_Bilinear_hd5(path, filestem, ens_id, idl=None): 284 """Read hadrons Bilinear hdf5 file and output an array of CObs 285 286 Parameters 287 ---------- 288 path : str 289 path to the files to read 290 filestem : str 291 namestem of the files to read 292 ens_id : str 293 name of the ensemble, required for internal bookkeeping 294 idl : range 295 If specified only configurations in the given range are read in. 296 """ 297 298 files, idx = _get_files(path, filestem, idl) 299 300 mom_in = None 301 mom_out = None 302 303 corr_data = {} 304 for hd5_file in files: 305 file = h5py.File(path + '/' + hd5_file, "r") 306 for i in range(16): 307 name = file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['gamma'][0].decode('UTF-8') 308 if name not in corr_data: 309 corr_data[name] = [] 310 raw_data = file['Bilinear/Bilinear_' + str(i) + '/corr'][0][0].view('complex') 311 corr_data[name].append(raw_data) 312 if mom_in is None: 313 mom_in = np.array(str(file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['pIn'])[3:-2].strip().split(), dtype=float) 314 if mom_out is None: 315 mom_out = np.array(str(file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['pOut'])[3:-2].strip().split(), dtype=float) 316 317 file.close() 318 319 result_dict = {} 320 321 for key, data in corr_data.items(): 322 local_data = np.array(data) 323 324 rolled_array = np.rollaxis(local_data, 0, 5) 325 326 matrix = np.empty((rolled_array.shape[:-1]), dtype=object) 327 for si, sj, ci, cj in np.ndindex(rolled_array.shape[:-1]): 328 real = Obs([rolled_array[si, sj, ci, cj].real], [ens_id], idl=[idx]) 329 imag = Obs([rolled_array[si, sj, ci, cj].imag], [ens_id], idl=[idx]) 330 matrix[si, sj, ci, cj] = CObs(real, imag) 331 332 result_dict[key] = Npr_matrix(matrix, mom_in=mom_in, mom_out=mom_out) 333 334 return result_dict
Read hadrons Bilinear hdf5 file and output an array of CObs
Parameters
- path (str): path to the files to read
- filestem (str): namestem of the files to read
- ens_id (str): name of the ensemble, required for internal bookkeeping
- idl (range): If specified only configurations in the given range are read in.
337def read_Fourquark_hd5(path, filestem, ens_id, idl=None, vertices=["VA", "AV"]): 338 """Read hadrons FourquarkFullyConnected hdf5 file and output an array of CObs 339 340 Parameters 341 ---------- 342 path : str 343 path to the files to read 344 filestem : str 345 namestem of the files to read 346 ens_id : str 347 name of the ensemble, required for internal bookkeeping 348 idl : range 349 If specified only configurations in the given range are read in. 350 vertices : list 351 Vertex functions to be extracted. 352 """ 353 354 files, idx = _get_files(path, filestem, idl) 355 356 mom_in = None 357 mom_out = None 358 359 vertex_names = [] 360 for vertex in vertices: 361 vertex_names += _get_lorentz_names(vertex) 362 363 corr_data = {} 364 365 tree = 'FourQuarkFullyConnected/FourQuarkFullyConnected_' 366 367 for hd5_file in files: 368 file = h5py.File(path + '/' + hd5_file, "r") 369 370 for i in range(32): 371 name = (file[tree + str(i) + '/info'].attrs['gammaA'][0].decode('UTF-8'), file[tree + str(i) + '/info'].attrs['gammaB'][0].decode('UTF-8')) 372 if name in vertex_names: 373 if name not in corr_data: 374 corr_data[name] = [] 375 raw_data = file[tree + str(i) + '/corr'][0][0].view('complex') 376 corr_data[name].append(raw_data) 377 if mom_in is None: 378 mom_in = np.array(str(file[tree + str(i) + '/info'].attrs['pIn'])[3:-2].strip().split(), dtype=float) 379 if mom_out is None: 380 mom_out = np.array(str(file[tree + str(i) + '/info'].attrs['pOut'])[3:-2].strip().split(), dtype=float) 381 382 file.close() 383 384 intermediate_dict = {} 385 386 for vertex in vertices: 387 lorentz_names = _get_lorentz_names(vertex) 388 for v_name in lorentz_names: 389 if v_name in [('SigmaXY', 'SigmaZT'), 390 ('SigmaXT', 'SigmaYZ'), 391 ('SigmaYZ', 'SigmaXT'), 392 ('SigmaZT', 'SigmaXY')]: 393 sign = -1 394 else: 395 sign = 1 396 if vertex not in intermediate_dict: 397 intermediate_dict[vertex] = sign * np.array(corr_data[v_name]) 398 else: 399 intermediate_dict[vertex] += sign * np.array(corr_data[v_name]) 400 401 result_dict = {} 402 403 for key, data in intermediate_dict.items(): 404 405 rolled_array = np.moveaxis(data, 0, 8) 406 407 matrix = np.empty((rolled_array.shape[:-1]), dtype=object) 408 for index in np.ndindex(rolled_array.shape[:-1]): 409 real = Obs([rolled_array[index].real], [ens_id], idl=[idx]) 410 imag = Obs([rolled_array[index].imag], [ens_id], idl=[idx]) 411 matrix[index] = CObs(real, imag) 412 413 result_dict[key] = Npr_matrix(matrix, mom_in=mom_in, mom_out=mom_out) 414 415 return result_dict
Read hadrons FourquarkFullyConnected hdf5 file and output an array of CObs
Parameters
- path (str): path to the files to read
- filestem (str): namestem of the files to read
- ens_id (str): name of the ensemble, required for internal bookkeeping
- idl (range): If specified only configurations in the given range are read in.
- vertices (list): Vertex functions to be extracted.