pyerrors.misc
1import pickle 2import numpy as np 3from .obs import Obs 4 5 6def dump_object(obj, name, **kwargs): 7 """Dump object into pickle file. 8 9 Parameters 10 ---------- 11 obj : object 12 object to be saved in the pickle file 13 name : str 14 name of the file 15 path : str 16 specifies a custom path for the file (default '.') 17 """ 18 if 'path' in kwargs: 19 file_name = kwargs.get('path') + '/' + name + '.p' 20 else: 21 file_name = name + '.p' 22 with open(file_name, 'wb') as fb: 23 pickle.dump(obj, fb) 24 25 26def load_object(path): 27 """Load object from pickle file. 28 29 Parameters 30 ---------- 31 path : str 32 path to the file 33 """ 34 with open(path, 'rb') as file: 35 return pickle.load(file) 36 37 38def pseudo_Obs(value, dvalue, name, samples=1000): 39 """Generate an Obs object with given value, dvalue and name for test purposes 40 41 Parameters 42 ---------- 43 value : float 44 central value of the Obs to be generated. 45 dvalue : float 46 error of the Obs to be generated. 47 name : str 48 name of the ensemble for which the Obs is to be generated. 49 samples: int 50 number of samples for the Obs (default 1000). 51 """ 52 if dvalue <= 0.0: 53 return Obs([np.zeros(samples) + value], [name]) 54 else: 55 for _ in range(100): 56 deltas = [np.random.normal(0.0, dvalue * np.sqrt(samples), samples)] 57 deltas -= np.mean(deltas) 58 deltas *= dvalue / np.sqrt((np.var(deltas) / samples)) / np.sqrt(1 + 3 / samples) 59 deltas += value 60 res = Obs(deltas, [name]) 61 res.gamma_method(S=2, tau_exp=0) 62 if abs(res.dvalue - dvalue) < 1e-10 * dvalue: 63 break 64 65 res._value = float(value) 66 67 return res 68 69 70def gen_correlated_data(means, cov, name, tau=0.5, samples=1000): 71 """ Generate observables with given covariance and autocorrelation times. 72 73 Parameters 74 ---------- 75 means : list 76 list containing the mean value of each observable. 77 cov : numpy.ndarray 78 covariance matrix for the data to be generated. 79 name : str 80 ensemble name for the data to be geneated. 81 tau : float or list 82 can either be a real number or a list with an entry for 83 every dataset. 84 samples : int 85 number of samples to be generated for each observable. 86 """ 87 88 assert len(means) == cov.shape[-1] 89 tau = np.asarray(tau) 90 if np.min(tau) < 0.5: 91 raise Exception('All integrated autocorrelations have to be >= 0.5.') 92 93 a = (2 * tau - 1) / (2 * tau + 1) 94 rand = np.random.multivariate_normal(np.zeros_like(means), cov * samples, samples) 95 96 # Normalize samples such that sample variance matches input 97 norm = np.array([np.var(o, ddof=1) / samples for o in rand.T]) 98 rand = rand @ np.diag(np.sqrt(np.diag(cov))) @ np.diag(1 / np.sqrt(norm)) 99 100 data = [rand[0]] 101 for i in range(1, samples): 102 data.append(np.sqrt(1 - a ** 2) * rand[i] + a * data[-1]) 103 corr_data = np.array(data) - np.mean(data, axis=0) + means 104 return [Obs([dat], [name]) for dat in corr_data.T] 105 106 107def _assert_equal_properties(ol, otype=Obs): 108 otype = type(ol[0]) 109 for o in ol[1:]: 110 if not isinstance(o, otype): 111 raise Exception("Wrong data type in list.") 112 for attr in ["is_merged", "reweighted", "e_content", "idl"]: 113 if hasattr(ol[0], attr): 114 if not getattr(ol[0], attr) == getattr(o, attr): 115 raise Exception(f"All Obs in list have to have the same state '{attr}'.")
def
dump_object(obj, name, **kwargs):
7def dump_object(obj, name, **kwargs): 8 """Dump object into pickle file. 9 10 Parameters 11 ---------- 12 obj : object 13 object to be saved in the pickle file 14 name : str 15 name of the file 16 path : str 17 specifies a custom path for the file (default '.') 18 """ 19 if 'path' in kwargs: 20 file_name = kwargs.get('path') + '/' + name + '.p' 21 else: 22 file_name = name + '.p' 23 with open(file_name, 'wb') as fb: 24 pickle.dump(obj, fb)
Dump object into pickle file.
Parameters
- obj (object): object to be saved in the pickle file
- name (str): name of the file
- path (str): specifies a custom path for the file (default '.')
def
load_object(path):
27def load_object(path): 28 """Load object from pickle file. 29 30 Parameters 31 ---------- 32 path : str 33 path to the file 34 """ 35 with open(path, 'rb') as file: 36 return pickle.load(file)
Load object from pickle file.
Parameters
- path (str): path to the file
def
pseudo_Obs(value, dvalue, name, samples=1000):
39def pseudo_Obs(value, dvalue, name, samples=1000): 40 """Generate an Obs object with given value, dvalue and name for test purposes 41 42 Parameters 43 ---------- 44 value : float 45 central value of the Obs to be generated. 46 dvalue : float 47 error of the Obs to be generated. 48 name : str 49 name of the ensemble for which the Obs is to be generated. 50 samples: int 51 number of samples for the Obs (default 1000). 52 """ 53 if dvalue <= 0.0: 54 return Obs([np.zeros(samples) + value], [name]) 55 else: 56 for _ in range(100): 57 deltas = [np.random.normal(0.0, dvalue * np.sqrt(samples), samples)] 58 deltas -= np.mean(deltas) 59 deltas *= dvalue / np.sqrt((np.var(deltas) / samples)) / np.sqrt(1 + 3 / samples) 60 deltas += value 61 res = Obs(deltas, [name]) 62 res.gamma_method(S=2, tau_exp=0) 63 if abs(res.dvalue - dvalue) < 1e-10 * dvalue: 64 break 65 66 res._value = float(value) 67 68 return res
Generate an Obs object with given value, dvalue and name for test purposes
Parameters
- value (float): central value of the Obs to be generated.
- dvalue (float): error of the Obs to be generated.
- name (str): name of the ensemble for which the Obs is to be generated.
- samples (int): number of samples for the Obs (default 1000).