From 9b4c9de760fa0d52113e7df6dd70c811d674bb24 Mon Sep 17 00:00:00 2001 From: fjosw Date: Sat, 11 Mar 2023 21:32:24 +0000 Subject: [PATCH] Documentation updated --- docs/pyerrors/misc.html | 632 ++++++++++++++++++++-------------------- 1 file changed, 317 insertions(+), 315 deletions(-) diff --git a/docs/pyerrors/misc.html b/docs/pyerrors/misc.html index 59f2c9eb..8d3b3fee 100644 --- a/docs/pyerrors/misc.html +++ b/docs/pyerrors/misc.html @@ -104,177 +104,178 @@ 11 12def print_config(): 13 """Print information about version of python, pyerrors and dependencies.""" - 14 config = {"python": platform.python_version(), - 15 "pyerrors": __version__, - 16 "numpy": np.__version__, - 17 "scipy": scipy.__version__, - 18 "matplotlib": matplotlib.__version__, - 19 "pandas": pd.__version__} - 20 - 21 for key, value in config.items(): - 22 print(f"{key : <10}\t {value}") - 23 + 14 config = {"system": platform.system(), + 15 "python": platform.python_version(), + 16 "pyerrors": __version__, + 17 "numpy": np.__version__, + 18 "scipy": scipy.__version__, + 19 "matplotlib": matplotlib.__version__, + 20 "pandas": pd.__version__} + 21 + 22 for key, value in config.items(): + 23 print(f"{key : <10}\t {value}") 24 - 25def errorbar(x, y, axes=plt, **kwargs): - 26 """pyerrors wrapper for the errorbars method of matplotlib - 27 - 28 Parameters - 29 ---------- - 30 x : list - 31 A list of x-values which can be Obs. - 32 y : list - 33 A list of y-values which can be Obs. - 34 axes : (matplotlib.pyplot.axes) - 35 The axes to plot on. default is plt. - 36 """ - 37 val = {} - 38 err = {} - 39 for name, comp in zip(["x", "y"], [x, y]): - 40 if all(isinstance(o, Obs) for o in comp): - 41 if not all(hasattr(o, 'e_dvalue') for o in comp): - 42 [o.gamma_method() for o in comp] - 43 val[name] = [o.value for o in comp] - 44 err[name] = [o.dvalue for o in comp] - 45 else: - 46 val[name] = comp - 47 err[name] = None - 48 - 49 if f"{name}err" in kwargs: - 50 err[name] = kwargs.get(f"{name}err") - 51 kwargs.pop(f"{name}err", None) - 52 - 53 axes.errorbar(val["x"], val["y"], xerr=err["x"], yerr=err["y"], **kwargs) - 54 + 25 + 26def errorbar(x, y, axes=plt, **kwargs): + 27 """pyerrors wrapper for the errorbars method of matplotlib + 28 + 29 Parameters + 30 ---------- + 31 x : list + 32 A list of x-values which can be Obs. + 33 y : list + 34 A list of y-values which can be Obs. + 35 axes : (matplotlib.pyplot.axes) + 36 The axes to plot on. default is plt. + 37 """ + 38 val = {} + 39 err = {} + 40 for name, comp in zip(["x", "y"], [x, y]): + 41 if all(isinstance(o, Obs) for o in comp): + 42 if not all(hasattr(o, 'e_dvalue') for o in comp): + 43 [o.gamma_method() for o in comp] + 44 val[name] = [o.value for o in comp] + 45 err[name] = [o.dvalue for o in comp] + 46 else: + 47 val[name] = comp + 48 err[name] = None + 49 + 50 if f"{name}err" in kwargs: + 51 err[name] = kwargs.get(f"{name}err") + 52 kwargs.pop(f"{name}err", None) + 53 + 54 axes.errorbar(val["x"], val["y"], xerr=err["x"], yerr=err["y"], **kwargs) 55 - 56def dump_object(obj, name, **kwargs): - 57 """Dump object into pickle file. - 58 - 59 Parameters - 60 ---------- - 61 obj : object - 62 object to be saved in the pickle file - 63 name : str - 64 name of the file - 65 path : str - 66 specifies a custom path for the file (default '.') - 67 - 68 Returns - 69 ------- - 70 None - 71 """ - 72 if 'path' in kwargs: - 73 file_name = kwargs.get('path') + '/' + name + '.p' - 74 else: - 75 file_name = name + '.p' - 76 with open(file_name, 'wb') as fb: - 77 pickle.dump(obj, fb) - 78 + 56 + 57def dump_object(obj, name, **kwargs): + 58 """Dump object into pickle file. + 59 + 60 Parameters + 61 ---------- + 62 obj : object + 63 object to be saved in the pickle file + 64 name : str + 65 name of the file + 66 path : str + 67 specifies a custom path for the file (default '.') + 68 + 69 Returns + 70 ------- + 71 None + 72 """ + 73 if 'path' in kwargs: + 74 file_name = kwargs.get('path') + '/' + name + '.p' + 75 else: + 76 file_name = name + '.p' + 77 with open(file_name, 'wb') as fb: + 78 pickle.dump(obj, fb) 79 - 80def load_object(path): - 81 """Load object from pickle file. - 82 - 83 Parameters - 84 ---------- - 85 path : str - 86 path to the file - 87 - 88 Returns - 89 ------- - 90 object : Obs - 91 Loaded Object - 92 """ - 93 with open(path, 'rb') as file: - 94 return pickle.load(file) - 95 + 80 + 81def load_object(path): + 82 """Load object from pickle file. + 83 + 84 Parameters + 85 ---------- + 86 path : str + 87 path to the file + 88 + 89 Returns + 90 ------- + 91 object : Obs + 92 Loaded Object + 93 """ + 94 with open(path, 'rb') as file: + 95 return pickle.load(file) 96 - 97def pseudo_Obs(value, dvalue, name, samples=1000): - 98 """Generate an Obs object with given value, dvalue and name for test purposes - 99 -100 Parameters -101 ---------- -102 value : float -103 central value of the Obs to be generated. -104 dvalue : float -105 error of the Obs to be generated. -106 name : str -107 name of the ensemble for which the Obs is to be generated. -108 samples: int -109 number of samples for the Obs (default 1000). -110 -111 Returns -112 ------- -113 res : Obs -114 Generated Observable -115 """ -116 if dvalue <= 0.0: -117 return Obs([np.zeros(samples) + value], [name]) -118 else: -119 for _ in range(100): -120 deltas = [np.random.normal(0.0, dvalue * np.sqrt(samples), samples)] -121 deltas -= np.mean(deltas) -122 deltas *= dvalue / np.sqrt((np.var(deltas) / samples)) / np.sqrt(1 + 3 / samples) -123 deltas += value -124 res = Obs(deltas, [name]) -125 res.gamma_method(S=2, tau_exp=0) -126 if abs(res.dvalue - dvalue) < 1e-10 * dvalue: -127 break -128 -129 res._value = float(value) -130 -131 return res -132 + 97 + 98def pseudo_Obs(value, dvalue, name, samples=1000): + 99 """Generate an Obs object with given value, dvalue and name for test purposes +100 +101 Parameters +102 ---------- +103 value : float +104 central value of the Obs to be generated. +105 dvalue : float +106 error of the Obs to be generated. +107 name : str +108 name of the ensemble for which the Obs is to be generated. +109 samples: int +110 number of samples for the Obs (default 1000). +111 +112 Returns +113 ------- +114 res : Obs +115 Generated Observable +116 """ +117 if dvalue <= 0.0: +118 return Obs([np.zeros(samples) + value], [name]) +119 else: +120 for _ in range(100): +121 deltas = [np.random.normal(0.0, dvalue * np.sqrt(samples), samples)] +122 deltas -= np.mean(deltas) +123 deltas *= dvalue / np.sqrt((np.var(deltas) / samples)) / np.sqrt(1 + 3 / samples) +124 deltas += value +125 res = Obs(deltas, [name]) +126 res.gamma_method(S=2, tau_exp=0) +127 if abs(res.dvalue - dvalue) < 1e-10 * dvalue: +128 break +129 +130 res._value = float(value) +131 +132 return res 133 -134def gen_correlated_data(means, cov, name, tau=0.5, samples=1000): -135 """ Generate observables with given covariance and autocorrelation times. -136 -137 Parameters -138 ---------- -139 means : list -140 list containing the mean value of each observable. -141 cov : numpy.ndarray -142 covariance matrix for the data to be generated. -143 name : str -144 ensemble name for the data to be geneated. -145 tau : float or list -146 can either be a real number or a list with an entry for -147 every dataset. -148 samples : int -149 number of samples to be generated for each observable. -150 -151 Returns -152 ------- -153 corr_obs : list[Obs] -154 Generated observable list -155 """ -156 -157 assert len(means) == cov.shape[-1] -158 tau = np.asarray(tau) -159 if np.min(tau) < 0.5: -160 raise Exception('All integrated autocorrelations have to be >= 0.5.') -161 -162 a = (2 * tau - 1) / (2 * tau + 1) -163 rand = np.random.multivariate_normal(np.zeros_like(means), cov * samples, samples) -164 -165 # Normalize samples such that sample variance matches input -166 norm = np.array([np.var(o, ddof=1) / samples for o in rand.T]) -167 rand = rand @ np.diag(np.sqrt(np.diag(cov))) @ np.diag(1 / np.sqrt(norm)) -168 -169 data = [rand[0]] -170 for i in range(1, samples): -171 data.append(np.sqrt(1 - a ** 2) * rand[i] + a * data[-1]) -172 corr_data = np.array(data) - np.mean(data, axis=0) + means -173 return [Obs([dat], [name]) for dat in corr_data.T] -174 +134 +135def gen_correlated_data(means, cov, name, tau=0.5, samples=1000): +136 """ Generate observables with given covariance and autocorrelation times. +137 +138 Parameters +139 ---------- +140 means : list +141 list containing the mean value of each observable. +142 cov : numpy.ndarray +143 covariance matrix for the data to be generated. +144 name : str +145 ensemble name for the data to be geneated. +146 tau : float or list +147 can either be a real number or a list with an entry for +148 every dataset. +149 samples : int +150 number of samples to be generated for each observable. +151 +152 Returns +153 ------- +154 corr_obs : list[Obs] +155 Generated observable list +156 """ +157 +158 assert len(means) == cov.shape[-1] +159 tau = np.asarray(tau) +160 if np.min(tau) < 0.5: +161 raise Exception('All integrated autocorrelations have to be >= 0.5.') +162 +163 a = (2 * tau - 1) / (2 * tau + 1) +164 rand = np.random.multivariate_normal(np.zeros_like(means), cov * samples, samples) +165 +166 # Normalize samples such that sample variance matches input +167 norm = np.array([np.var(o, ddof=1) / samples for o in rand.T]) +168 rand = rand @ np.diag(np.sqrt(np.diag(cov))) @ np.diag(1 / np.sqrt(norm)) +169 +170 data = [rand[0]] +171 for i in range(1, samples): +172 data.append(np.sqrt(1 - a ** 2) * rand[i] + a * data[-1]) +173 corr_data = np.array(data) - np.mean(data, axis=0) + means +174 return [Obs([dat], [name]) for dat in corr_data.T] 175 -176def _assert_equal_properties(ol, otype=Obs): -177 otype = type(ol[0]) -178 for o in ol[1:]: -179 if not isinstance(o, otype): -180 raise Exception("Wrong data type in list.") -181 for attr in ["reweighted", "e_content", "idl"]: -182 if hasattr(ol[0], attr): -183 if not getattr(ol[0], attr) == getattr(o, attr): -184 raise Exception(f"All Obs in list have to have the same state '{attr}'.") +176 +177def _assert_equal_properties(ol, otype=Obs): +178 otype = type(ol[0]) +179 for o in ol[1:]: +180 if not isinstance(o, otype): +181 raise Exception("Wrong data type in list.") +182 for attr in ["reweighted", "e_content", "idl"]: +183 if hasattr(ol[0], attr): +184 if not getattr(ol[0], attr) == getattr(o, attr): +185 raise Exception(f"All Obs in list have to have the same state '{attr}'.") @@ -292,15 +293,16 @@
13def print_config():
 14    """Print information about version of python, pyerrors and dependencies."""
-15    config = {"python": platform.python_version(),
-16              "pyerrors": __version__,
-17              "numpy": np.__version__,
-18              "scipy": scipy.__version__,
-19              "matplotlib": matplotlib.__version__,
-20              "pandas": pd.__version__}
-21
-22    for key, value in config.items():
-23        print(f"{key : <10}\t {value}")
+15    config = {"system": platform.system(),
+16              "python": platform.python_version(),
+17              "pyerrors": __version__,
+18              "numpy": np.__version__,
+19              "scipy": scipy.__version__,
+20              "matplotlib": matplotlib.__version__,
+21              "pandas": pd.__version__}
+22
+23    for key, value in config.items():
+24        print(f"{key : <10}\t {value}")
 
@@ -320,35 +322,35 @@ -
26def errorbar(x, y, axes=plt, **kwargs):
-27    """pyerrors wrapper for the errorbars method of matplotlib
-28
-29    Parameters
-30    ----------
-31    x : list
-32        A list of x-values which can be Obs.
-33    y : list
-34        A list of y-values which can be Obs.
-35    axes : (matplotlib.pyplot.axes)
-36        The axes to plot on. default is plt.
-37    """
-38    val = {}
-39    err = {}
-40    for name, comp in zip(["x", "y"], [x, y]):
-41        if all(isinstance(o, Obs) for o in comp):
-42            if not all(hasattr(o, 'e_dvalue') for o in comp):
-43                [o.gamma_method() for o in comp]
-44            val[name] = [o.value for o in comp]
-45            err[name] = [o.dvalue for o in comp]
-46        else:
-47            val[name] = comp
-48            err[name] = None
-49
-50        if f"{name}err" in kwargs:
-51            err[name] = kwargs.get(f"{name}err")
-52            kwargs.pop(f"{name}err", None)
-53
-54    axes.errorbar(val["x"], val["y"], xerr=err["x"], yerr=err["y"], **kwargs)
+            
27def errorbar(x, y, axes=plt, **kwargs):
+28    """pyerrors wrapper for the errorbars method of matplotlib
+29
+30    Parameters
+31    ----------
+32    x : list
+33        A list of x-values which can be Obs.
+34    y : list
+35        A list of y-values which can be Obs.
+36    axes : (matplotlib.pyplot.axes)
+37        The axes to plot on. default is plt.
+38    """
+39    val = {}
+40    err = {}
+41    for name, comp in zip(["x", "y"], [x, y]):
+42        if all(isinstance(o, Obs) for o in comp):
+43            if not all(hasattr(o, 'e_dvalue') for o in comp):
+44                [o.gamma_method() for o in comp]
+45            val[name] = [o.value for o in comp]
+46            err[name] = [o.dvalue for o in comp]
+47        else:
+48            val[name] = comp
+49            err[name] = None
+50
+51        if f"{name}err" in kwargs:
+52            err[name] = kwargs.get(f"{name}err")
+53            kwargs.pop(f"{name}err", None)
+54
+55    axes.errorbar(val["x"], val["y"], xerr=err["x"], yerr=err["y"], **kwargs)
 
@@ -379,28 +381,28 @@ The axes to plot on. default is plt.
-
57def dump_object(obj, name, **kwargs):
-58    """Dump object into pickle file.
-59
-60    Parameters
-61    ----------
-62    obj : object
-63        object to be saved in the pickle file
-64    name : str
-65        name of the file
-66    path : str
-67        specifies a custom path for the file (default '.')
-68
-69    Returns
-70    -------
-71    None
-72    """
-73    if 'path' in kwargs:
-74        file_name = kwargs.get('path') + '/' + name + '.p'
-75    else:
-76        file_name = name + '.p'
-77    with open(file_name, 'wb') as fb:
-78        pickle.dump(obj, fb)
+            
58def dump_object(obj, name, **kwargs):
+59    """Dump object into pickle file.
+60
+61    Parameters
+62    ----------
+63    obj : object
+64        object to be saved in the pickle file
+65    name : str
+66        name of the file
+67    path : str
+68        specifies a custom path for the file (default '.')
+69
+70    Returns
+71    -------
+72    None
+73    """
+74    if 'path' in kwargs:
+75        file_name = kwargs.get('path') + '/' + name + '.p'
+76    else:
+77        file_name = name + '.p'
+78    with open(file_name, 'wb') as fb:
+79        pickle.dump(obj, fb)
 
@@ -437,21 +439,21 @@ specifies a custom path for the file (default '.')
-
81def load_object(path):
-82    """Load object from pickle file.
-83
-84    Parameters
-85    ----------
-86    path : str
-87        path to the file
-88
-89    Returns
-90    -------
-91    object : Obs
-92        Loaded Object
-93    """
-94    with open(path, 'rb') as file:
-95        return pickle.load(file)
+            
82def load_object(path):
+83    """Load object from pickle file.
+84
+85    Parameters
+86    ----------
+87    path : str
+88        path to the file
+89
+90    Returns
+91    -------
+92    object : Obs
+93        Loaded Object
+94    """
+95    with open(path, 'rb') as file:
+96        return pickle.load(file)
 
@@ -485,41 +487,41 @@ Loaded Object
-
 98def pseudo_Obs(value, dvalue, name, samples=1000):
- 99    """Generate an Obs object with given value, dvalue and name for test purposes
-100
-101    Parameters
-102    ----------
-103    value : float
-104        central value of the Obs to be generated.
-105    dvalue : float
-106        error of the Obs to be generated.
-107    name : str
-108        name of the ensemble for which the Obs is to be generated.
-109    samples: int
-110        number of samples for the Obs (default 1000).
-111
-112    Returns
-113    -------
-114    res : Obs
-115        Generated Observable
-116    """
-117    if dvalue <= 0.0:
-118        return Obs([np.zeros(samples) + value], [name])
-119    else:
-120        for _ in range(100):
-121            deltas = [np.random.normal(0.0, dvalue * np.sqrt(samples), samples)]
-122            deltas -= np.mean(deltas)
-123            deltas *= dvalue / np.sqrt((np.var(deltas) / samples)) / np.sqrt(1 + 3 / samples)
-124            deltas += value
-125            res = Obs(deltas, [name])
-126            res.gamma_method(S=2, tau_exp=0)
-127            if abs(res.dvalue - dvalue) < 1e-10 * dvalue:
-128                break
-129
-130        res._value = float(value)
-131
-132        return res
+            
 99def pseudo_Obs(value, dvalue, name, samples=1000):
+100    """Generate an Obs object with given value, dvalue and name for test purposes
+101
+102    Parameters
+103    ----------
+104    value : float
+105        central value of the Obs to be generated.
+106    dvalue : float
+107        error of the Obs to be generated.
+108    name : str
+109        name of the ensemble for which the Obs is to be generated.
+110    samples: int
+111        number of samples for the Obs (default 1000).
+112
+113    Returns
+114    -------
+115    res : Obs
+116        Generated Observable
+117    """
+118    if dvalue <= 0.0:
+119        return Obs([np.zeros(samples) + value], [name])
+120    else:
+121        for _ in range(100):
+122            deltas = [np.random.normal(0.0, dvalue * np.sqrt(samples), samples)]
+123            deltas -= np.mean(deltas)
+124            deltas *= dvalue / np.sqrt((np.var(deltas) / samples)) / np.sqrt(1 + 3 / samples)
+125            deltas += value
+126            res = Obs(deltas, [name])
+127            res.gamma_method(S=2, tau_exp=0)
+128            if abs(res.dvalue - dvalue) < 1e-10 * dvalue:
+129                break
+130
+131        res._value = float(value)
+132
+133        return res
 
@@ -559,46 +561,46 @@ Generated Observable
-
135def gen_correlated_data(means, cov, name, tau=0.5, samples=1000):
-136    """ Generate observables with given covariance and autocorrelation times.
-137
-138    Parameters
-139    ----------
-140    means : list
-141        list containing the mean value of each observable.
-142    cov : numpy.ndarray
-143        covariance matrix for the data to be generated.
-144    name : str
-145        ensemble name for the data to be geneated.
-146    tau : float or list
-147        can either be a real number or a list with an entry for
-148        every dataset.
-149    samples : int
-150        number of samples to be generated for each observable.
-151
-152    Returns
-153    -------
-154    corr_obs : list[Obs]
-155        Generated observable list
-156    """
-157
-158    assert len(means) == cov.shape[-1]
-159    tau = np.asarray(tau)
-160    if np.min(tau) < 0.5:
-161        raise Exception('All integrated autocorrelations have to be >= 0.5.')
-162
-163    a = (2 * tau - 1) / (2 * tau + 1)
-164    rand = np.random.multivariate_normal(np.zeros_like(means), cov * samples, samples)
-165
-166    # Normalize samples such that sample variance matches input
-167    norm = np.array([np.var(o, ddof=1) / samples for o in rand.T])
-168    rand = rand @ np.diag(np.sqrt(np.diag(cov))) @ np.diag(1 / np.sqrt(norm))
-169
-170    data = [rand[0]]
-171    for i in range(1, samples):
-172        data.append(np.sqrt(1 - a ** 2) * rand[i] + a * data[-1])
-173    corr_data = np.array(data) - np.mean(data, axis=0) + means
-174    return [Obs([dat], [name]) for dat in corr_data.T]
+            
136def gen_correlated_data(means, cov, name, tau=0.5, samples=1000):
+137    """ Generate observables with given covariance and autocorrelation times.
+138
+139    Parameters
+140    ----------
+141    means : list
+142        list containing the mean value of each observable.
+143    cov : numpy.ndarray
+144        covariance matrix for the data to be generated.
+145    name : str
+146        ensemble name for the data to be geneated.
+147    tau : float or list
+148        can either be a real number or a list with an entry for
+149        every dataset.
+150    samples : int
+151        number of samples to be generated for each observable.
+152
+153    Returns
+154    -------
+155    corr_obs : list[Obs]
+156        Generated observable list
+157    """
+158
+159    assert len(means) == cov.shape[-1]
+160    tau = np.asarray(tau)
+161    if np.min(tau) < 0.5:
+162        raise Exception('All integrated autocorrelations have to be >= 0.5.')
+163
+164    a = (2 * tau - 1) / (2 * tau + 1)
+165    rand = np.random.multivariate_normal(np.zeros_like(means), cov * samples, samples)
+166
+167    # Normalize samples such that sample variance matches input
+168    norm = np.array([np.var(o, ddof=1) / samples for o in rand.T])
+169    rand = rand @ np.diag(np.sqrt(np.diag(cov))) @ np.diag(1 / np.sqrt(norm))
+170
+171    data = [rand[0]]
+172    for i in range(1, samples):
+173        data.append(np.sqrt(1 - a ** 2) * rand[i] + a * data[-1])
+174    corr_data = np.array(data) - np.mean(data, axis=0) + means
+175    return [Obs([dat], [name]) for dat in corr_data.T]