From ffa1193595665d959f255c36602aec434ee40b66 Mon Sep 17 00:00:00 2001 From: fjosw Date: Fri, 14 Jul 2023 12:13:08 +0000 Subject: [PATCH] Documentation updated --- docs/pyerrors/obs.html | 7714 +++++++++++++++++++++------------------- docs/search.js | 2 +- 2 files changed, 3992 insertions(+), 3724 deletions(-) diff --git a/docs/pyerrors/obs.html b/docs/pyerrors/obs.html index ebbfa10a..313720b4 100644 --- a/docs/pyerrors/obs.html +++ b/docs/pyerrors/obs.html @@ -163,6 +163,9 @@
  • export_jackknife
  • +
  • + export_bootstrap +
  • sqrt
  • @@ -289,6 +292,9 @@
  • import_jackknife
  • +
  • + import_bootstrap +
  • merge_obs
  • @@ -321,1654 +327,1728 @@ 3import pickle 4import numpy as np 5import autograd.numpy as anp # Thinly-wrapped numpy - 6from autograd import jacobian - 7import matplotlib.pyplot as plt - 8from scipy.stats import skew, skewtest, kurtosis, kurtosistest - 9import numdifftools as nd - 10from itertools import groupby - 11from .covobs import Covobs - 12 - 13# Improve print output of numpy.ndarrays containing Obs objects. - 14np.set_printoptions(formatter={'object': lambda x: str(x)}) - 15 + 6import scipy + 7from autograd import jacobian + 8import matplotlib.pyplot as plt + 9from scipy.stats import skew, skewtest, kurtosis, kurtosistest + 10import numdifftools as nd + 11from itertools import groupby + 12from .covobs import Covobs + 13 + 14# Improve print output of numpy.ndarrays containing Obs objects. + 15np.set_printoptions(formatter={'object': lambda x: str(x)}) 16 - 17class Obs: - 18 """Class for a general observable. - 19 - 20 Instances of Obs are the basic objects of a pyerrors error analysis. - 21 They are initialized with a list which contains arrays of samples for - 22 different ensembles/replica and another list of same length which contains - 23 the names of the ensembles/replica. Mathematical operations can be - 24 performed on instances. The result is another instance of Obs. The error of - 25 an instance can be computed with the gamma_method. Also contains additional - 26 methods for output and visualization of the error calculation. - 27 - 28 Attributes - 29 ---------- - 30 S_global : float - 31 Standard value for S (default 2.0) - 32 S_dict : dict - 33 Dictionary for S values. If an entry for a given ensemble - 34 exists this overwrites the standard value for that ensemble. - 35 tau_exp_global : float - 36 Standard value for tau_exp (default 0.0) - 37 tau_exp_dict : dict - 38 Dictionary for tau_exp values. If an entry for a given ensemble exists - 39 this overwrites the standard value for that ensemble. - 40 N_sigma_global : float - 41 Standard value for N_sigma (default 1.0) - 42 N_sigma_dict : dict - 43 Dictionary for N_sigma values. If an entry for a given ensemble exists - 44 this overwrites the standard value for that ensemble. - 45 """ - 46 __slots__ = ['names', 'shape', 'r_values', 'deltas', 'N', '_value', '_dvalue', - 47 'ddvalue', 'reweighted', 'S', 'tau_exp', 'N_sigma', - 48 'e_dvalue', 'e_ddvalue', 'e_tauint', 'e_dtauint', - 49 'e_windowsize', 'e_rho', 'e_drho', 'e_n_tauint', 'e_n_dtauint', - 50 'idl', 'tag', '_covobs', '__dict__'] - 51 - 52 S_global = 2.0 - 53 S_dict = {} - 54 tau_exp_global = 0.0 - 55 tau_exp_dict = {} - 56 N_sigma_global = 1.0 - 57 N_sigma_dict = {} - 58 - 59 def __init__(self, samples, names, idl=None, **kwargs): - 60 """ Initialize Obs object. - 61 - 62 Parameters - 63 ---------- - 64 samples : list - 65 list of numpy arrays containing the Monte Carlo samples - 66 names : list - 67 list of strings labeling the individual samples - 68 idl : list, optional - 69 list of ranges or lists on which the samples are defined - 70 """ - 71 - 72 if kwargs.get("means") is None and len(samples): - 73 if len(samples) != len(names): - 74 raise ValueError('Length of samples and names incompatible.') - 75 if idl is not None: - 76 if len(idl) != len(names): - 77 raise ValueError('Length of idl incompatible with samples and names.') - 78 name_length = len(names) - 79 if name_length > 1: - 80 if name_length != len(set(names)): - 81 raise ValueError('Names are not unique.') - 82 if not all(isinstance(x, str) for x in names): - 83 raise TypeError('All names have to be strings.') - 84 else: - 85 if not isinstance(names[0], str): - 86 raise TypeError('All names have to be strings.') - 87 if min(len(x) for x in samples) <= 4: - 88 raise ValueError('Samples have to have at least 5 entries.') - 89 - 90 self.names = sorted(names) - 91 self.shape = {} - 92 self.r_values = {} - 93 self.deltas = {} - 94 self._covobs = {} - 95 - 96 self._value = 0 - 97 self.N = 0 - 98 self.idl = {} - 99 if idl is not None: - 100 for name, idx in sorted(zip(names, idl)): - 101 if isinstance(idx, range): - 102 self.idl[name] = idx - 103 elif isinstance(idx, (list, np.ndarray)): - 104 dc = np.unique(np.diff(idx)) - 105 if np.any(dc < 0): - 106 raise ValueError("Unsorted idx for idl[%s]" % (name)) - 107 if len(dc) == 1: - 108 self.idl[name] = range(idx[0], idx[-1] + dc[0], dc[0]) - 109 else: - 110 self.idl[name] = list(idx) - 111 else: - 112 raise TypeError('incompatible type for idl[%s].' % (name)) - 113 else: - 114 for name, sample in sorted(zip(names, samples)): - 115 self.idl[name] = range(1, len(sample) + 1) - 116 - 117 if kwargs.get("means") is not None: - 118 for name, sample, mean in sorted(zip(names, samples, kwargs.get("means"))): - 119 self.shape[name] = len(self.idl[name]) - 120 self.N += self.shape[name] - 121 self.r_values[name] = mean - 122 self.deltas[name] = sample - 123 else: - 124 for name, sample in sorted(zip(names, samples)): - 125 self.shape[name] = len(self.idl[name]) - 126 self.N += self.shape[name] - 127 if len(sample) != self.shape[name]: - 128 raise ValueError('Incompatible samples and idx for %s: %d vs. %d' % (name, len(sample), self.shape[name])) - 129 self.r_values[name] = np.mean(sample) - 130 self.deltas[name] = sample - self.r_values[name] - 131 self._value += self.shape[name] * self.r_values[name] - 132 self._value /= self.N - 133 - 134 self._dvalue = 0.0 - 135 self.ddvalue = 0.0 - 136 self.reweighted = False - 137 - 138 self.tag = None - 139 - 140 @property - 141 def value(self): - 142 return self._value - 143 - 144 @property - 145 def dvalue(self): - 146 return self._dvalue - 147 - 148 @property - 149 def e_names(self): - 150 return sorted(set([o.split('|')[0] for o in self.names])) - 151 - 152 @property - 153 def cov_names(self): - 154 return sorted(set([o for o in self.covobs.keys()])) - 155 - 156 @property - 157 def mc_names(self): - 158 return sorted(set([o.split('|')[0] for o in self.names if o not in self.cov_names])) - 159 - 160 @property - 161 def e_content(self): - 162 res = {} - 163 for e, e_name in enumerate(self.e_names): - 164 res[e_name] = sorted(filter(lambda x: x.startswith(e_name + '|'), self.names)) - 165 if e_name in self.names: - 166 res[e_name].append(e_name) - 167 return res - 168 - 169 @property - 170 def covobs(self): - 171 return self._covobs - 172 - 173 def gamma_method(self, **kwargs): - 174 """Estimate the error and related properties of the Obs. - 175 - 176 Parameters - 177 ---------- - 178 S : float - 179 specifies a custom value for the parameter S (default 2.0). - 180 If set to 0 it is assumed that the data exhibits no - 181 autocorrelation. In this case the error estimates coincides - 182 with the sample standard error. - 183 tau_exp : float - 184 positive value triggers the critical slowing down analysis - 185 (default 0.0). - 186 N_sigma : float - 187 number of standard deviations from zero until the tail is - 188 attached to the autocorrelation function (default 1). - 189 fft : bool - 190 determines whether the fft algorithm is used for the computation - 191 of the autocorrelation function (default True) - 192 """ - 193 - 194 e_content = self.e_content - 195 self.e_dvalue = {} - 196 self.e_ddvalue = {} - 197 self.e_tauint = {} - 198 self.e_dtauint = {} - 199 self.e_windowsize = {} - 200 self.e_n_tauint = {} - 201 self.e_n_dtauint = {} - 202 e_gamma = {} - 203 self.e_rho = {} - 204 self.e_drho = {} - 205 self._dvalue = 0 - 206 self.ddvalue = 0 - 207 - 208 self.S = {} - 209 self.tau_exp = {} - 210 self.N_sigma = {} - 211 - 212 if kwargs.get('fft') is False: - 213 fft = False - 214 else: - 215 fft = True - 216 - 217 def _parse_kwarg(kwarg_name): - 218 if kwarg_name in kwargs: - 219 tmp = kwargs.get(kwarg_name) - 220 if isinstance(tmp, (int, float)): - 221 if tmp < 0: - 222 raise Exception(kwarg_name + ' has to be larger or equal to 0.') - 223 for e, e_name in enumerate(self.e_names): - 224 getattr(self, kwarg_name)[e_name] = tmp - 225 else: - 226 raise TypeError(kwarg_name + ' is not in proper format.') - 227 else: - 228 for e, e_name in enumerate(self.e_names): - 229 if e_name in getattr(Obs, kwarg_name + '_dict'): - 230 getattr(self, kwarg_name)[e_name] = getattr(Obs, kwarg_name + '_dict')[e_name] - 231 else: - 232 getattr(self, kwarg_name)[e_name] = getattr(Obs, kwarg_name + '_global') - 233 - 234 _parse_kwarg('S') - 235 _parse_kwarg('tau_exp') - 236 _parse_kwarg('N_sigma') - 237 - 238 for e, e_name in enumerate(self.mc_names): - 239 gapsize = _determine_gap(self, e_content, e_name) - 240 - 241 r_length = [] - 242 for r_name in e_content[e_name]: - 243 if isinstance(self.idl[r_name], range): - 244 r_length.append(len(self.idl[r_name]) * self.idl[r_name].step // gapsize) - 245 else: - 246 r_length.append((self.idl[r_name][-1] - self.idl[r_name][0] + 1) // gapsize) - 247 - 248 e_N = np.sum([self.shape[r_name] for r_name in e_content[e_name]]) - 249 w_max = max(r_length) // 2 - 250 e_gamma[e_name] = np.zeros(w_max) - 251 self.e_rho[e_name] = np.zeros(w_max) - 252 self.e_drho[e_name] = np.zeros(w_max) - 253 - 254 for r_name in e_content[e_name]: - 255 e_gamma[e_name] += self._calc_gamma(self.deltas[r_name], self.idl[r_name], self.shape[r_name], w_max, fft, gapsize) - 256 - 257 gamma_div = np.zeros(w_max) - 258 for r_name in e_content[e_name]: - 259 gamma_div += self._calc_gamma(np.ones((self.shape[r_name])), self.idl[r_name], self.shape[r_name], w_max, fft, gapsize) - 260 gamma_div[gamma_div < 1] = 1.0 - 261 e_gamma[e_name] /= gamma_div[:w_max] - 262 - 263 if np.abs(e_gamma[e_name][0]) < 10 * np.finfo(float).tiny: # Prevent division by zero - 264 self.e_tauint[e_name] = 0.5 - 265 self.e_dtauint[e_name] = 0.0 - 266 self.e_dvalue[e_name] = 0.0 - 267 self.e_ddvalue[e_name] = 0.0 - 268 self.e_windowsize[e_name] = 0 - 269 continue - 270 - 271 self.e_rho[e_name] = e_gamma[e_name][:w_max] / e_gamma[e_name][0] - 272 self.e_n_tauint[e_name] = np.cumsum(np.concatenate(([0.5], self.e_rho[e_name][1:]))) - 273 # Make sure no entry of tauint is smaller than 0.5 - 274 self.e_n_tauint[e_name][self.e_n_tauint[e_name] <= 0.5] = 0.5 + np.finfo(np.float64).eps - 275 # hep-lat/0306017 eq. (42) - 276 self.e_n_dtauint[e_name] = self.e_n_tauint[e_name] * 2 * np.sqrt(np.abs(np.arange(w_max) + 0.5 - self.e_n_tauint[e_name]) / e_N) - 277 self.e_n_dtauint[e_name][0] = 0.0 - 278 - 279 def _compute_drho(i): - 280 tmp = (self.e_rho[e_name][i + 1:w_max] - 281 + np.concatenate([self.e_rho[e_name][i - 1:None if i - (w_max - 1) // 2 <= 0 else (2 * i - (2 * w_max) // 2):-1], - 282 self.e_rho[e_name][1:max(1, w_max - 2 * i)]]) - 283 - 2 * self.e_rho[e_name][i] * self.e_rho[e_name][1:w_max - i]) - 284 self.e_drho[e_name][i] = np.sqrt(np.sum(tmp ** 2) / e_N) - 285 - 286 if self.tau_exp[e_name] > 0: - 287 _compute_drho(1) - 288 texp = self.tau_exp[e_name] - 289 # Critical slowing down analysis - 290 if w_max // 2 <= 1: - 291 raise Exception("Need at least 8 samples for tau_exp error analysis") - 292 for n in range(1, w_max // 2): - 293 _compute_drho(n + 1) - 294 if (self.e_rho[e_name][n] - self.N_sigma[e_name] * self.e_drho[e_name][n]) < 0 or n >= w_max // 2 - 2: - 295 # Bias correction hep-lat/0306017 eq. (49) included - 296 self.e_tauint[e_name] = self.e_n_tauint[e_name][n] * (1 + (2 * n + 1) / e_N) / (1 + 1 / e_N) + texp * np.abs(self.e_rho[e_name][n + 1]) # The absolute makes sure, that the tail contribution is always positive - 297 self.e_dtauint[e_name] = np.sqrt(self.e_n_dtauint[e_name][n] ** 2 + texp ** 2 * self.e_drho[e_name][n + 1] ** 2) - 298 # Error of tau_exp neglected so far, missing term: self.e_rho[e_name][n + 1] ** 2 * d_tau_exp ** 2 - 299 self.e_dvalue[e_name] = np.sqrt(2 * self.e_tauint[e_name] * e_gamma[e_name][0] * (1 + 1 / e_N) / e_N) - 300 self.e_ddvalue[e_name] = self.e_dvalue[e_name] * np.sqrt((n + 0.5) / e_N) - 301 self.e_windowsize[e_name] = n - 302 break - 303 else: - 304 if self.S[e_name] == 0.0: - 305 self.e_tauint[e_name] = 0.5 - 306 self.e_dtauint[e_name] = 0.0 - 307 self.e_dvalue[e_name] = np.sqrt(e_gamma[e_name][0] / (e_N - 1)) - 308 self.e_ddvalue[e_name] = self.e_dvalue[e_name] * np.sqrt(0.5 / e_N) - 309 self.e_windowsize[e_name] = 0 - 310 else: - 311 # Standard automatic windowing procedure - 312 tau = self.S[e_name] / np.log((2 * self.e_n_tauint[e_name][1:] + 1) / (2 * self.e_n_tauint[e_name][1:] - 1)) - 313 g_w = np.exp(- np.arange(1, len(tau) + 1) / tau) - tau / np.sqrt(np.arange(1, len(tau) + 1) * e_N) - 314 for n in range(1, w_max): - 315 if g_w[n - 1] < 0 or n >= w_max - 1: - 316 _compute_drho(n) - 317 self.e_tauint[e_name] = self.e_n_tauint[e_name][n] * (1 + (2 * n + 1) / e_N) / (1 + 1 / e_N) # Bias correction hep-lat/0306017 eq. (49) - 318 self.e_dtauint[e_name] = self.e_n_dtauint[e_name][n] - 319 self.e_dvalue[e_name] = np.sqrt(2 * self.e_tauint[e_name] * e_gamma[e_name][0] * (1 + 1 / e_N) / e_N) - 320 self.e_ddvalue[e_name] = self.e_dvalue[e_name] * np.sqrt((n + 0.5) / e_N) - 321 self.e_windowsize[e_name] = n - 322 break - 323 - 324 self._dvalue += self.e_dvalue[e_name] ** 2 - 325 self.ddvalue += (self.e_dvalue[e_name] * self.e_ddvalue[e_name]) ** 2 - 326 - 327 for e_name in self.cov_names: - 328 self.e_dvalue[e_name] = np.sqrt(self.covobs[e_name].errsq()) - 329 self.e_ddvalue[e_name] = 0 - 330 self._dvalue += self.e_dvalue[e_name]**2 - 331 - 332 self._dvalue = np.sqrt(self._dvalue) - 333 if self._dvalue == 0.0: - 334 self.ddvalue = 0.0 - 335 else: - 336 self.ddvalue = np.sqrt(self.ddvalue) / self._dvalue - 337 return - 338 - 339 gm = gamma_method - 340 - 341 def _calc_gamma(self, deltas, idx, shape, w_max, fft, gapsize): - 342 """Calculate Gamma_{AA} from the deltas, which are defined on idx. - 343 idx is assumed to be a contiguous range (possibly with a stepsize != 1) - 344 - 345 Parameters - 346 ---------- - 347 deltas : list - 348 List of fluctuations - 349 idx : list - 350 List or range of configurations on which the deltas are defined. - 351 shape : int - 352 Number of configurations in idx. - 353 w_max : int - 354 Upper bound for the summation window. - 355 fft : bool - 356 determines whether the fft algorithm is used for the computation - 357 of the autocorrelation function. - 358 gapsize : int - 359 The target distance between two configurations. If longer distances - 360 are found in idx, the data is expanded. - 361 """ - 362 gamma = np.zeros(w_max) - 363 deltas = _expand_deltas(deltas, idx, shape, gapsize) - 364 new_shape = len(deltas) - 365 if fft: - 366 max_gamma = min(new_shape, w_max) - 367 # The padding for the fft has to be even - 368 padding = new_shape + max_gamma + (new_shape + max_gamma) % 2 - 369 gamma[:max_gamma] += np.fft.irfft(np.abs(np.fft.rfft(deltas, padding)) ** 2)[:max_gamma] - 370 else: - 371 for n in range(w_max): - 372 if new_shape - n >= 0: - 373 gamma[n] += deltas[0:new_shape - n].dot(deltas[n:new_shape]) - 374 - 375 return gamma - 376 - 377 def details(self, ens_content=True): - 378 """Output detailed properties of the Obs. - 379 - 380 Parameters - 381 ---------- - 382 ens_content : bool - 383 print details about the ensembles and replica if true. - 384 """ - 385 if self.tag is not None: - 386 print("Description:", self.tag) - 387 if not hasattr(self, 'e_dvalue'): - 388 print('Result\t %3.8e' % (self.value)) - 389 else: - 390 if self.value == 0.0: - 391 percentage = np.nan - 392 else: - 393 percentage = np.abs(self._dvalue / self.value) * 100 - 394 print('Result\t %3.8e +/- %3.8e +/- %3.8e (%3.3f%%)' % (self.value, self._dvalue, self.ddvalue, percentage)) - 395 if len(self.e_names) > 1: - 396 print(' Ensemble errors:') - 397 e_content = self.e_content - 398 for e_name in self.mc_names: - 399 gap = _determine_gap(self, e_content, e_name) - 400 - 401 if len(self.e_names) > 1: - 402 print('', e_name, '\t %3.6e +/- %3.6e' % (self.e_dvalue[e_name], self.e_ddvalue[e_name])) - 403 tau_string = " \N{GREEK SMALL LETTER TAU}_int\t " + _format_uncertainty(self.e_tauint[e_name], self.e_dtauint[e_name]) - 404 tau_string += f" in units of {gap} config" - 405 if gap > 1: - 406 tau_string += "s" - 407 if self.tau_exp[e_name] > 0: - 408 tau_string = f"{tau_string: <45}" + '\t(\N{GREEK SMALL LETTER TAU}_exp=%3.2f, N_\N{GREEK SMALL LETTER SIGMA}=%1.0i)' % (self.tau_exp[e_name], self.N_sigma[e_name]) - 409 else: - 410 tau_string = f"{tau_string: <45}" + '\t(S=%3.2f)' % (self.S[e_name]) - 411 print(tau_string) - 412 for e_name in self.cov_names: - 413 print('', e_name, '\t %3.8e' % (self.e_dvalue[e_name])) - 414 if ens_content is True: - 415 if len(self.e_names) == 1: - 416 print(self.N, 'samples in', len(self.e_names), 'ensemble:') - 417 else: - 418 print(self.N, 'samples in', len(self.e_names), 'ensembles:') - 419 my_string_list = [] - 420 for key, value in sorted(self.e_content.items()): - 421 if key not in self.covobs: - 422 my_string = ' ' + "\u00B7 Ensemble '" + key + "' " - 423 if len(value) == 1: - 424 my_string += f': {self.shape[value[0]]} configurations' - 425 if isinstance(self.idl[value[0]], range): - 426 my_string += f' (from {self.idl[value[0]].start} to {self.idl[value[0]][-1]}' + int(self.idl[value[0]].step != 1) * f' in steps of {self.idl[value[0]].step}' + ')' - 427 else: - 428 my_string += f' (irregular range from {self.idl[value[0]][0]} to {self.idl[value[0]][-1]})' - 429 else: - 430 sublist = [] - 431 for v in value: - 432 my_substring = ' ' + "\u00B7 Replicum '" + v[len(key) + 1:] + "' " - 433 my_substring += f': {self.shape[v]} configurations' - 434 if isinstance(self.idl[v], range): - 435 my_substring += f' (from {self.idl[v].start} to {self.idl[v][-1]}' + int(self.idl[v].step != 1) * f' in steps of {self.idl[v].step}' + ')' - 436 else: - 437 my_substring += f' (irregular range from {self.idl[v][0]} to {self.idl[v][-1]})' - 438 sublist.append(my_substring) - 439 - 440 my_string += '\n' + '\n'.join(sublist) - 441 else: - 442 my_string = ' ' + "\u00B7 Covobs '" + key + "' " - 443 my_string_list.append(my_string) - 444 print('\n'.join(my_string_list)) - 445 - 446 def reweight(self, weight): - 447 """Reweight the obs with given rewighting factors. - 448 - 449 Parameters - 450 ---------- - 451 weight : Obs - 452 Reweighting factor. An Observable that has to be defined on a superset of the - 453 configurations in obs[i].idl for all i. - 454 all_configs : bool - 455 if True, the reweighted observables are normalized by the average of - 456 the reweighting factor on all configurations in weight.idl and not - 457 on the configurations in obs[i].idl. Default False. - 458 """ - 459 return reweight(weight, [self])[0] - 460 - 461 def is_zero_within_error(self, sigma=1): - 462 """Checks whether the observable is zero within 'sigma' standard errors. - 463 - 464 Parameters - 465 ---------- - 466 sigma : int - 467 Number of standard errors used for the check. - 468 - 469 Works only properly when the gamma method was run. - 470 """ - 471 return self.is_zero() or np.abs(self.value) <= sigma * self._dvalue - 472 - 473 def is_zero(self, atol=1e-10): - 474 """Checks whether the observable is zero within a given tolerance. - 475 - 476 Parameters - 477 ---------- - 478 atol : float - 479 Absolute tolerance (for details see numpy documentation). - 480 """ - 481 return np.isclose(0.0, self.value, 1e-14, atol) and all(np.allclose(0.0, delta, 1e-14, atol) for delta in self.deltas.values()) and all(np.allclose(0.0, delta.errsq(), 1e-14, atol) for delta in self.covobs.values()) - 482 - 483 def plot_tauint(self, save=None): - 484 """Plot integrated autocorrelation time for each ensemble. - 485 - 486 Parameters - 487 ---------- - 488 save : str - 489 saves the figure to a file named 'save' if. - 490 """ - 491 if not hasattr(self, 'e_dvalue'): - 492 raise Exception('Run the gamma method first.') - 493 - 494 for e, e_name in enumerate(self.mc_names): - 495 fig = plt.figure() - 496 plt.xlabel(r'$W$') - 497 plt.ylabel(r'$\tau_\mathrm{int}$') - 498 length = int(len(self.e_n_tauint[e_name])) - 499 if self.tau_exp[e_name] > 0: - 500 base = self.e_n_tauint[e_name][self.e_windowsize[e_name]] - 501 x_help = np.arange(2 * self.tau_exp[e_name]) - 502 y_help = (x_help + 1) * np.abs(self.e_rho[e_name][self.e_windowsize[e_name] + 1]) * (1 - x_help / (2 * (2 * self.tau_exp[e_name] - 1))) + base - 503 x_arr = np.arange(self.e_windowsize[e_name] + 1, self.e_windowsize[e_name] + 1 + 2 * self.tau_exp[e_name]) - 504 plt.plot(x_arr, y_help, 'C' + str(e), linewidth=1, ls='--', marker=',') - 505 plt.errorbar([self.e_windowsize[e_name] + 2 * self.tau_exp[e_name]], [self.e_tauint[e_name]], - 506 yerr=[self.e_dtauint[e_name]], fmt='C' + str(e), linewidth=1, capsize=2, marker='o', mfc=plt.rcParams['axes.facecolor']) - 507 xmax = self.e_windowsize[e_name] + 2 * self.tau_exp[e_name] + 1.5 - 508 label = e_name + r', $\tau_\mathrm{exp}$=' + str(np.around(self.tau_exp[e_name], decimals=2)) - 509 else: - 510 label = e_name + ', S=' + str(np.around(self.S[e_name], decimals=2)) - 511 xmax = max(10.5, 2 * self.e_windowsize[e_name] - 0.5) - 512 - 513 plt.errorbar(np.arange(length)[:int(xmax) + 1], self.e_n_tauint[e_name][:int(xmax) + 1], yerr=self.e_n_dtauint[e_name][:int(xmax) + 1], linewidth=1, capsize=2, label=label) - 514 plt.axvline(x=self.e_windowsize[e_name], color='C' + str(e), alpha=0.5, marker=',', ls='--') - 515 plt.legend() - 516 plt.xlim(-0.5, xmax) - 517 ylim = plt.ylim() - 518 plt.ylim(bottom=0.0, top=max(1.0, ylim[1])) - 519 plt.draw() - 520 if save: - 521 fig.savefig(save + "_" + str(e)) - 522 - 523 def plot_rho(self, save=None): - 524 """Plot normalized autocorrelation function time for each ensemble. - 525 - 526 Parameters - 527 ---------- - 528 save : str - 529 saves the figure to a file named 'save' if. - 530 """ - 531 if not hasattr(self, 'e_dvalue'): - 532 raise Exception('Run the gamma method first.') - 533 for e, e_name in enumerate(self.mc_names): - 534 fig = plt.figure() - 535 plt.xlabel('W') - 536 plt.ylabel('rho') - 537 length = int(len(self.e_drho[e_name])) - 538 plt.errorbar(np.arange(length), self.e_rho[e_name][:length], yerr=self.e_drho[e_name][:], linewidth=1, capsize=2) - 539 plt.axvline(x=self.e_windowsize[e_name], color='r', alpha=0.25, ls='--', marker=',') - 540 if self.tau_exp[e_name] > 0: - 541 plt.plot([self.e_windowsize[e_name] + 1, self.e_windowsize[e_name] + 1 + 2 * self.tau_exp[e_name]], - 542 [self.e_rho[e_name][self.e_windowsize[e_name] + 1], 0], 'k-', lw=1) - 543 xmax = self.e_windowsize[e_name] + 2 * self.tau_exp[e_name] + 1.5 - 544 plt.title('Rho ' + e_name + r', tau\_exp=' + str(np.around(self.tau_exp[e_name], decimals=2))) - 545 else: - 546 xmax = max(10.5, 2 * self.e_windowsize[e_name] - 0.5) - 547 plt.title('Rho ' + e_name + ', S=' + str(np.around(self.S[e_name], decimals=2))) - 548 plt.plot([-0.5, xmax], [0, 0], 'k--', lw=1) - 549 plt.xlim(-0.5, xmax) - 550 plt.draw() - 551 if save: - 552 fig.savefig(save + "_" + str(e)) - 553 - 554 def plot_rep_dist(self): - 555 """Plot replica distribution for each ensemble with more than one replicum.""" - 556 if not hasattr(self, 'e_dvalue'): - 557 raise Exception('Run the gamma method first.') - 558 for e, e_name in enumerate(self.mc_names): - 559 if len(self.e_content[e_name]) == 1: - 560 print('No replica distribution for a single replicum (', e_name, ')') - 561 continue - 562 r_length = [] - 563 sub_r_mean = 0 - 564 for r, r_name in enumerate(self.e_content[e_name]): - 565 r_length.append(len(self.deltas[r_name])) - 566 sub_r_mean += self.shape[r_name] * self.r_values[r_name] - 567 e_N = np.sum(r_length) - 568 sub_r_mean /= e_N - 569 arr = np.zeros(len(self.e_content[e_name])) - 570 for r, r_name in enumerate(self.e_content[e_name]): - 571 arr[r] = (self.r_values[r_name] - sub_r_mean) / (self.e_dvalue[e_name] * np.sqrt(e_N / self.shape[r_name] - 1)) - 572 plt.hist(arr, rwidth=0.8, bins=len(self.e_content[e_name])) - 573 plt.title('Replica distribution' + e_name + ' (mean=0, var=1)') - 574 plt.draw() - 575 - 576 def plot_history(self, expand=True): - 577 """Plot derived Monte Carlo history for each ensemble - 578 - 579 Parameters - 580 ---------- - 581 expand : bool - 582 show expanded history for irregular Monte Carlo chains (default: True). - 583 """ - 584 for e, e_name in enumerate(self.mc_names): - 585 plt.figure() - 586 r_length = [] - 587 tmp = [] - 588 tmp_expanded = [] - 589 for r, r_name in enumerate(self.e_content[e_name]): - 590 tmp.append(self.deltas[r_name] + self.r_values[r_name]) - 591 if expand: - 592 tmp_expanded.append(_expand_deltas(self.deltas[r_name], list(self.idl[r_name]), self.shape[r_name], 1) + self.r_values[r_name]) - 593 r_length.append(len(tmp_expanded[-1])) - 594 else: - 595 r_length.append(len(tmp[-1])) - 596 e_N = np.sum(r_length) - 597 x = np.arange(e_N) - 598 y_test = np.concatenate(tmp, axis=0) - 599 if expand: - 600 y = np.concatenate(tmp_expanded, axis=0) - 601 else: - 602 y = y_test - 603 plt.errorbar(x, y, fmt='.', markersize=3) - 604 plt.xlim(-0.5, e_N - 0.5) - 605 plt.title(e_name + f'\nskew: {skew(y_test):.3f} (p={skewtest(y_test).pvalue:.3f}), kurtosis: {kurtosis(y_test):.3f} (p={kurtosistest(y_test).pvalue:.3f})') - 606 plt.draw() - 607 - 608 def plot_piechart(self, save=None): - 609 """Plot piechart which shows the fractional contribution of each - 610 ensemble to the error and returns a dictionary containing the fractions. - 611 - 612 Parameters - 613 ---------- - 614 save : str - 615 saves the figure to a file named 'save' if. - 616 """ - 617 if not hasattr(self, 'e_dvalue'): - 618 raise Exception('Run the gamma method first.') - 619 if np.isclose(0.0, self._dvalue, atol=1e-15): - 620 raise Exception('Error is 0.0') - 621 labels = self.e_names - 622 sizes = [self.e_dvalue[name] ** 2 for name in labels] / self._dvalue ** 2 - 623 fig1, ax1 = plt.subplots() - 624 ax1.pie(sizes, labels=labels, startangle=90, normalize=True) - 625 ax1.axis('equal') - 626 plt.draw() - 627 if save: - 628 fig1.savefig(save) - 629 - 630 return dict(zip(labels, sizes)) - 631 - 632 def dump(self, filename, datatype="json.gz", description="", **kwargs): - 633 """Dump the Obs to a file 'name' of chosen format. - 634 - 635 Parameters - 636 ---------- - 637 filename : str - 638 name of the file to be saved. - 639 datatype : str - 640 Format of the exported file. Supported formats include - 641 "json.gz" and "pickle" - 642 description : str - 643 Description for output file, only relevant for json.gz format. - 644 path : str - 645 specifies a custom path for the file (default '.') - 646 """ - 647 if 'path' in kwargs: - 648 file_name = kwargs.get('path') + '/' + filename - 649 else: - 650 file_name = filename - 651 - 652 if datatype == "json.gz": - 653 from .input.json import dump_to_json - 654 dump_to_json([self], file_name, description=description) - 655 elif datatype == "pickle": - 656 with open(file_name + '.p', 'wb') as fb: - 657 pickle.dump(self, fb) - 658 else: - 659 raise Exception("Unknown datatype " + str(datatype)) - 660 - 661 def export_jackknife(self): - 662 """Export jackknife samples from the Obs - 663 - 664 Returns - 665 ------- - 666 numpy.ndarray - 667 Returns a numpy array of length N + 1 where N is the number of samples - 668 for the given ensemble and replicum. The zeroth entry of the array contains - 669 the mean value of the Obs, entries 1 to N contain the N jackknife samples - 670 derived from the Obs. The current implementation only works for observables - 671 defined on exactly one ensemble and replicum. The derived jackknife samples - 672 should agree with samples from a full jackknife analysis up to O(1/N). - 673 """ - 674 - 675 if len(self.names) != 1: - 676 raise Exception("'export_jackknife' is only implemented for Obs defined on one ensemble and replicum.") - 677 - 678 name = self.names[0] - 679 full_data = self.deltas[name] + self.r_values[name] - 680 n = full_data.size - 681 mean = self.value - 682 tmp_jacks = np.zeros(n + 1) - 683 tmp_jacks[0] = mean - 684 tmp_jacks[1:] = (n * mean - full_data) / (n - 1) - 685 return tmp_jacks - 686 - 687 def __float__(self): - 688 return float(self.value) - 689 - 690 def __repr__(self): - 691 return 'Obs[' + str(self) + ']' - 692 - 693 def __str__(self): - 694 return _format_uncertainty(self.value, self._dvalue) - 695 - 696 def __format__(self, format_type): - 697 if format_type == "": - 698 significance = 2 - 699 else: - 700 significance = int(float(format_type.replace("+", "").replace("-", ""))) - 701 my_str = _format_uncertainty(self.value, self._dvalue, - 702 significance=significance) - 703 for char in ["+", " "]: - 704 if format_type.startswith(char): - 705 if my_str[0] != "-": - 706 my_str = char + my_str - 707 return my_str - 708 - 709 def __hash__(self): - 710 hash_tuple = (np.array([self.value]).astype(np.float32).data.tobytes(),) - 711 hash_tuple += tuple([o.astype(np.float32).data.tobytes() for o in self.deltas.values()]) - 712 hash_tuple += tuple([np.array([o.errsq()]).astype(np.float32).data.tobytes() for o in self.covobs.values()]) - 713 hash_tuple += tuple([o.encode() for o in self.names]) - 714 m = hashlib.md5() - 715 [m.update(o) for o in hash_tuple] - 716 return int(m.hexdigest(), 16) & 0xFFFFFFFF - 717 - 718 # Overload comparisons - 719 def __lt__(self, other): - 720 return self.value < other + 17 + 18class Obs: + 19 """Class for a general observable. + 20 + 21 Instances of Obs are the basic objects of a pyerrors error analysis. + 22 They are initialized with a list which contains arrays of samples for + 23 different ensembles/replica and another list of same length which contains + 24 the names of the ensembles/replica. Mathematical operations can be + 25 performed on instances. The result is another instance of Obs. The error of + 26 an instance can be computed with the gamma_method. Also contains additional + 27 methods for output and visualization of the error calculation. + 28 + 29 Attributes + 30 ---------- + 31 S_global : float + 32 Standard value for S (default 2.0) + 33 S_dict : dict + 34 Dictionary for S values. If an entry for a given ensemble + 35 exists this overwrites the standard value for that ensemble. + 36 tau_exp_global : float + 37 Standard value for tau_exp (default 0.0) + 38 tau_exp_dict : dict + 39 Dictionary for tau_exp values. If an entry for a given ensemble exists + 40 this overwrites the standard value for that ensemble. + 41 N_sigma_global : float + 42 Standard value for N_sigma (default 1.0) + 43 N_sigma_dict : dict + 44 Dictionary for N_sigma values. If an entry for a given ensemble exists + 45 this overwrites the standard value for that ensemble. + 46 """ + 47 __slots__ = ['names', 'shape', 'r_values', 'deltas', 'N', '_value', '_dvalue', + 48 'ddvalue', 'reweighted', 'S', 'tau_exp', 'N_sigma', + 49 'e_dvalue', 'e_ddvalue', 'e_tauint', 'e_dtauint', + 50 'e_windowsize', 'e_rho', 'e_drho', 'e_n_tauint', 'e_n_dtauint', + 51 'idl', 'tag', '_covobs', '__dict__'] + 52 + 53 S_global = 2.0 + 54 S_dict = {} + 55 tau_exp_global = 0.0 + 56 tau_exp_dict = {} + 57 N_sigma_global = 1.0 + 58 N_sigma_dict = {} + 59 + 60 def __init__(self, samples, names, idl=None, **kwargs): + 61 """ Initialize Obs object. + 62 + 63 Parameters + 64 ---------- + 65 samples : list + 66 list of numpy arrays containing the Monte Carlo samples + 67 names : list + 68 list of strings labeling the individual samples + 69 idl : list, optional + 70 list of ranges or lists on which the samples are defined + 71 """ + 72 + 73 if kwargs.get("means") is None and len(samples): + 74 if len(samples) != len(names): + 75 raise ValueError('Length of samples and names incompatible.') + 76 if idl is not None: + 77 if len(idl) != len(names): + 78 raise ValueError('Length of idl incompatible with samples and names.') + 79 name_length = len(names) + 80 if name_length > 1: + 81 if name_length != len(set(names)): + 82 raise ValueError('Names are not unique.') + 83 if not all(isinstance(x, str) for x in names): + 84 raise TypeError('All names have to be strings.') + 85 else: + 86 if not isinstance(names[0], str): + 87 raise TypeError('All names have to be strings.') + 88 if min(len(x) for x in samples) <= 4: + 89 raise ValueError('Samples have to have at least 5 entries.') + 90 + 91 self.names = sorted(names) + 92 self.shape = {} + 93 self.r_values = {} + 94 self.deltas = {} + 95 self._covobs = {} + 96 + 97 self._value = 0 + 98 self.N = 0 + 99 self.idl = {} + 100 if idl is not None: + 101 for name, idx in sorted(zip(names, idl)): + 102 if isinstance(idx, range): + 103 self.idl[name] = idx + 104 elif isinstance(idx, (list, np.ndarray)): + 105 dc = np.unique(np.diff(idx)) + 106 if np.any(dc < 0): + 107 raise ValueError("Unsorted idx for idl[%s]" % (name)) + 108 if len(dc) == 1: + 109 self.idl[name] = range(idx[0], idx[-1] + dc[0], dc[0]) + 110 else: + 111 self.idl[name] = list(idx) + 112 else: + 113 raise TypeError('incompatible type for idl[%s].' % (name)) + 114 else: + 115 for name, sample in sorted(zip(names, samples)): + 116 self.idl[name] = range(1, len(sample) + 1) + 117 + 118 if kwargs.get("means") is not None: + 119 for name, sample, mean in sorted(zip(names, samples, kwargs.get("means"))): + 120 self.shape[name] = len(self.idl[name]) + 121 self.N += self.shape[name] + 122 self.r_values[name] = mean + 123 self.deltas[name] = sample + 124 else: + 125 for name, sample in sorted(zip(names, samples)): + 126 self.shape[name] = len(self.idl[name]) + 127 self.N += self.shape[name] + 128 if len(sample) != self.shape[name]: + 129 raise ValueError('Incompatible samples and idx for %s: %d vs. %d' % (name, len(sample), self.shape[name])) + 130 self.r_values[name] = np.mean(sample) + 131 self.deltas[name] = sample - self.r_values[name] + 132 self._value += self.shape[name] * self.r_values[name] + 133 self._value /= self.N + 134 + 135 self._dvalue = 0.0 + 136 self.ddvalue = 0.0 + 137 self.reweighted = False + 138 + 139 self.tag = None + 140 + 141 @property + 142 def value(self): + 143 return self._value + 144 + 145 @property + 146 def dvalue(self): + 147 return self._dvalue + 148 + 149 @property + 150 def e_names(self): + 151 return sorted(set([o.split('|')[0] for o in self.names])) + 152 + 153 @property + 154 def cov_names(self): + 155 return sorted(set([o for o in self.covobs.keys()])) + 156 + 157 @property + 158 def mc_names(self): + 159 return sorted(set([o.split('|')[0] for o in self.names if o not in self.cov_names])) + 160 + 161 @property + 162 def e_content(self): + 163 res = {} + 164 for e, e_name in enumerate(self.e_names): + 165 res[e_name] = sorted(filter(lambda x: x.startswith(e_name + '|'), self.names)) + 166 if e_name in self.names: + 167 res[e_name].append(e_name) + 168 return res + 169 + 170 @property + 171 def covobs(self): + 172 return self._covobs + 173 + 174 def gamma_method(self, **kwargs): + 175 """Estimate the error and related properties of the Obs. + 176 + 177 Parameters + 178 ---------- + 179 S : float + 180 specifies a custom value for the parameter S (default 2.0). + 181 If set to 0 it is assumed that the data exhibits no + 182 autocorrelation. In this case the error estimates coincides + 183 with the sample standard error. + 184 tau_exp : float + 185 positive value triggers the critical slowing down analysis + 186 (default 0.0). + 187 N_sigma : float + 188 number of standard deviations from zero until the tail is + 189 attached to the autocorrelation function (default 1). + 190 fft : bool + 191 determines whether the fft algorithm is used for the computation + 192 of the autocorrelation function (default True) + 193 """ + 194 + 195 e_content = self.e_content + 196 self.e_dvalue = {} + 197 self.e_ddvalue = {} + 198 self.e_tauint = {} + 199 self.e_dtauint = {} + 200 self.e_windowsize = {} + 201 self.e_n_tauint = {} + 202 self.e_n_dtauint = {} + 203 e_gamma = {} + 204 self.e_rho = {} + 205 self.e_drho = {} + 206 self._dvalue = 0 + 207 self.ddvalue = 0 + 208 + 209 self.S = {} + 210 self.tau_exp = {} + 211 self.N_sigma = {} + 212 + 213 if kwargs.get('fft') is False: + 214 fft = False + 215 else: + 216 fft = True + 217 + 218 def _parse_kwarg(kwarg_name): + 219 if kwarg_name in kwargs: + 220 tmp = kwargs.get(kwarg_name) + 221 if isinstance(tmp, (int, float)): + 222 if tmp < 0: + 223 raise Exception(kwarg_name + ' has to be larger or equal to 0.') + 224 for e, e_name in enumerate(self.e_names): + 225 getattr(self, kwarg_name)[e_name] = tmp + 226 else: + 227 raise TypeError(kwarg_name + ' is not in proper format.') + 228 else: + 229 for e, e_name in enumerate(self.e_names): + 230 if e_name in getattr(Obs, kwarg_name + '_dict'): + 231 getattr(self, kwarg_name)[e_name] = getattr(Obs, kwarg_name + '_dict')[e_name] + 232 else: + 233 getattr(self, kwarg_name)[e_name] = getattr(Obs, kwarg_name + '_global') + 234 + 235 _parse_kwarg('S') + 236 _parse_kwarg('tau_exp') + 237 _parse_kwarg('N_sigma') + 238 + 239 for e, e_name in enumerate(self.mc_names): + 240 gapsize = _determine_gap(self, e_content, e_name) + 241 + 242 r_length = [] + 243 for r_name in e_content[e_name]: + 244 if isinstance(self.idl[r_name], range): + 245 r_length.append(len(self.idl[r_name]) * self.idl[r_name].step // gapsize) + 246 else: + 247 r_length.append((self.idl[r_name][-1] - self.idl[r_name][0] + 1) // gapsize) + 248 + 249 e_N = np.sum([self.shape[r_name] for r_name in e_content[e_name]]) + 250 w_max = max(r_length) // 2 + 251 e_gamma[e_name] = np.zeros(w_max) + 252 self.e_rho[e_name] = np.zeros(w_max) + 253 self.e_drho[e_name] = np.zeros(w_max) + 254 + 255 for r_name in e_content[e_name]: + 256 e_gamma[e_name] += self._calc_gamma(self.deltas[r_name], self.idl[r_name], self.shape[r_name], w_max, fft, gapsize) + 257 + 258 gamma_div = np.zeros(w_max) + 259 for r_name in e_content[e_name]: + 260 gamma_div += self._calc_gamma(np.ones((self.shape[r_name])), self.idl[r_name], self.shape[r_name], w_max, fft, gapsize) + 261 gamma_div[gamma_div < 1] = 1.0 + 262 e_gamma[e_name] /= gamma_div[:w_max] + 263 + 264 if np.abs(e_gamma[e_name][0]) < 10 * np.finfo(float).tiny: # Prevent division by zero + 265 self.e_tauint[e_name] = 0.5 + 266 self.e_dtauint[e_name] = 0.0 + 267 self.e_dvalue[e_name] = 0.0 + 268 self.e_ddvalue[e_name] = 0.0 + 269 self.e_windowsize[e_name] = 0 + 270 continue + 271 + 272 self.e_rho[e_name] = e_gamma[e_name][:w_max] / e_gamma[e_name][0] + 273 self.e_n_tauint[e_name] = np.cumsum(np.concatenate(([0.5], self.e_rho[e_name][1:]))) + 274 # Make sure no entry of tauint is smaller than 0.5 + 275 self.e_n_tauint[e_name][self.e_n_tauint[e_name] <= 0.5] = 0.5 + np.finfo(np.float64).eps + 276 # hep-lat/0306017 eq. (42) + 277 self.e_n_dtauint[e_name] = self.e_n_tauint[e_name] * 2 * np.sqrt(np.abs(np.arange(w_max) + 0.5 - self.e_n_tauint[e_name]) / e_N) + 278 self.e_n_dtauint[e_name][0] = 0.0 + 279 + 280 def _compute_drho(i): + 281 tmp = (self.e_rho[e_name][i + 1:w_max] + 282 + np.concatenate([self.e_rho[e_name][i - 1:None if i - (w_max - 1) // 2 <= 0 else (2 * i - (2 * w_max) // 2):-1], + 283 self.e_rho[e_name][1:max(1, w_max - 2 * i)]]) + 284 - 2 * self.e_rho[e_name][i] * self.e_rho[e_name][1:w_max - i]) + 285 self.e_drho[e_name][i] = np.sqrt(np.sum(tmp ** 2) / e_N) + 286 + 287 if self.tau_exp[e_name] > 0: + 288 _compute_drho(1) + 289 texp = self.tau_exp[e_name] + 290 # Critical slowing down analysis + 291 if w_max // 2 <= 1: + 292 raise Exception("Need at least 8 samples for tau_exp error analysis") + 293 for n in range(1, w_max // 2): + 294 _compute_drho(n + 1) + 295 if (self.e_rho[e_name][n] - self.N_sigma[e_name] * self.e_drho[e_name][n]) < 0 or n >= w_max // 2 - 2: + 296 # Bias correction hep-lat/0306017 eq. (49) included + 297 self.e_tauint[e_name] = self.e_n_tauint[e_name][n] * (1 + (2 * n + 1) / e_N) / (1 + 1 / e_N) + texp * np.abs(self.e_rho[e_name][n + 1]) # The absolute makes sure, that the tail contribution is always positive + 298 self.e_dtauint[e_name] = np.sqrt(self.e_n_dtauint[e_name][n] ** 2 + texp ** 2 * self.e_drho[e_name][n + 1] ** 2) + 299 # Error of tau_exp neglected so far, missing term: self.e_rho[e_name][n + 1] ** 2 * d_tau_exp ** 2 + 300 self.e_dvalue[e_name] = np.sqrt(2 * self.e_tauint[e_name] * e_gamma[e_name][0] * (1 + 1 / e_N) / e_N) + 301 self.e_ddvalue[e_name] = self.e_dvalue[e_name] * np.sqrt((n + 0.5) / e_N) + 302 self.e_windowsize[e_name] = n + 303 break + 304 else: + 305 if self.S[e_name] == 0.0: + 306 self.e_tauint[e_name] = 0.5 + 307 self.e_dtauint[e_name] = 0.0 + 308 self.e_dvalue[e_name] = np.sqrt(e_gamma[e_name][0] / (e_N - 1)) + 309 self.e_ddvalue[e_name] = self.e_dvalue[e_name] * np.sqrt(0.5 / e_N) + 310 self.e_windowsize[e_name] = 0 + 311 else: + 312 # Standard automatic windowing procedure + 313 tau = self.S[e_name] / np.log((2 * self.e_n_tauint[e_name][1:] + 1) / (2 * self.e_n_tauint[e_name][1:] - 1)) + 314 g_w = np.exp(- np.arange(1, len(tau) + 1) / tau) - tau / np.sqrt(np.arange(1, len(tau) + 1) * e_N) + 315 for n in range(1, w_max): + 316 if g_w[n - 1] < 0 or n >= w_max - 1: + 317 _compute_drho(n) + 318 self.e_tauint[e_name] = self.e_n_tauint[e_name][n] * (1 + (2 * n + 1) / e_N) / (1 + 1 / e_N) # Bias correction hep-lat/0306017 eq. (49) + 319 self.e_dtauint[e_name] = self.e_n_dtauint[e_name][n] + 320 self.e_dvalue[e_name] = np.sqrt(2 * self.e_tauint[e_name] * e_gamma[e_name][0] * (1 + 1 / e_N) / e_N) + 321 self.e_ddvalue[e_name] = self.e_dvalue[e_name] * np.sqrt((n + 0.5) / e_N) + 322 self.e_windowsize[e_name] = n + 323 break + 324 + 325 self._dvalue += self.e_dvalue[e_name] ** 2 + 326 self.ddvalue += (self.e_dvalue[e_name] * self.e_ddvalue[e_name]) ** 2 + 327 + 328 for e_name in self.cov_names: + 329 self.e_dvalue[e_name] = np.sqrt(self.covobs[e_name].errsq()) + 330 self.e_ddvalue[e_name] = 0 + 331 self._dvalue += self.e_dvalue[e_name]**2 + 332 + 333 self._dvalue = np.sqrt(self._dvalue) + 334 if self._dvalue == 0.0: + 335 self.ddvalue = 0.0 + 336 else: + 337 self.ddvalue = np.sqrt(self.ddvalue) / self._dvalue + 338 return + 339 + 340 gm = gamma_method + 341 + 342 def _calc_gamma(self, deltas, idx, shape, w_max, fft, gapsize): + 343 """Calculate Gamma_{AA} from the deltas, which are defined on idx. + 344 idx is assumed to be a contiguous range (possibly with a stepsize != 1) + 345 + 346 Parameters + 347 ---------- + 348 deltas : list + 349 List of fluctuations + 350 idx : list + 351 List or range of configurations on which the deltas are defined. + 352 shape : int + 353 Number of configurations in idx. + 354 w_max : int + 355 Upper bound for the summation window. + 356 fft : bool + 357 determines whether the fft algorithm is used for the computation + 358 of the autocorrelation function. + 359 gapsize : int + 360 The target distance between two configurations. If longer distances + 361 are found in idx, the data is expanded. + 362 """ + 363 gamma = np.zeros(w_max) + 364 deltas = _expand_deltas(deltas, idx, shape, gapsize) + 365 new_shape = len(deltas) + 366 if fft: + 367 max_gamma = min(new_shape, w_max) + 368 # The padding for the fft has to be even + 369 padding = new_shape + max_gamma + (new_shape + max_gamma) % 2 + 370 gamma[:max_gamma] += np.fft.irfft(np.abs(np.fft.rfft(deltas, padding)) ** 2)[:max_gamma] + 371 else: + 372 for n in range(w_max): + 373 if new_shape - n >= 0: + 374 gamma[n] += deltas[0:new_shape - n].dot(deltas[n:new_shape]) + 375 + 376 return gamma + 377 + 378 def details(self, ens_content=True): + 379 """Output detailed properties of the Obs. + 380 + 381 Parameters + 382 ---------- + 383 ens_content : bool + 384 print details about the ensembles and replica if true. + 385 """ + 386 if self.tag is not None: + 387 print("Description:", self.tag) + 388 if not hasattr(self, 'e_dvalue'): + 389 print('Result\t %3.8e' % (self.value)) + 390 else: + 391 if self.value == 0.0: + 392 percentage = np.nan + 393 else: + 394 percentage = np.abs(self._dvalue / self.value) * 100 + 395 print('Result\t %3.8e +/- %3.8e +/- %3.8e (%3.3f%%)' % (self.value, self._dvalue, self.ddvalue, percentage)) + 396 if len(self.e_names) > 1: + 397 print(' Ensemble errors:') + 398 e_content = self.e_content + 399 for e_name in self.mc_names: + 400 gap = _determine_gap(self, e_content, e_name) + 401 + 402 if len(self.e_names) > 1: + 403 print('', e_name, '\t %3.6e +/- %3.6e' % (self.e_dvalue[e_name], self.e_ddvalue[e_name])) + 404 tau_string = " \N{GREEK SMALL LETTER TAU}_int\t " + _format_uncertainty(self.e_tauint[e_name], self.e_dtauint[e_name]) + 405 tau_string += f" in units of {gap} config" + 406 if gap > 1: + 407 tau_string += "s" + 408 if self.tau_exp[e_name] > 0: + 409 tau_string = f"{tau_string: <45}" + '\t(\N{GREEK SMALL LETTER TAU}_exp=%3.2f, N_\N{GREEK SMALL LETTER SIGMA}=%1.0i)' % (self.tau_exp[e_name], self.N_sigma[e_name]) + 410 else: + 411 tau_string = f"{tau_string: <45}" + '\t(S=%3.2f)' % (self.S[e_name]) + 412 print(tau_string) + 413 for e_name in self.cov_names: + 414 print('', e_name, '\t %3.8e' % (self.e_dvalue[e_name])) + 415 if ens_content is True: + 416 if len(self.e_names) == 1: + 417 print(self.N, 'samples in', len(self.e_names), 'ensemble:') + 418 else: + 419 print(self.N, 'samples in', len(self.e_names), 'ensembles:') + 420 my_string_list = [] + 421 for key, value in sorted(self.e_content.items()): + 422 if key not in self.covobs: + 423 my_string = ' ' + "\u00B7 Ensemble '" + key + "' " + 424 if len(value) == 1: + 425 my_string += f': {self.shape[value[0]]} configurations' + 426 if isinstance(self.idl[value[0]], range): + 427 my_string += f' (from {self.idl[value[0]].start} to {self.idl[value[0]][-1]}' + int(self.idl[value[0]].step != 1) * f' in steps of {self.idl[value[0]].step}' + ')' + 428 else: + 429 my_string += f' (irregular range from {self.idl[value[0]][0]} to {self.idl[value[0]][-1]})' + 430 else: + 431 sublist = [] + 432 for v in value: + 433 my_substring = ' ' + "\u00B7 Replicum '" + v[len(key) + 1:] + "' " + 434 my_substring += f': {self.shape[v]} configurations' + 435 if isinstance(self.idl[v], range): + 436 my_substring += f' (from {self.idl[v].start} to {self.idl[v][-1]}' + int(self.idl[v].step != 1) * f' in steps of {self.idl[v].step}' + ')' + 437 else: + 438 my_substring += f' (irregular range from {self.idl[v][0]} to {self.idl[v][-1]})' + 439 sublist.append(my_substring) + 440 + 441 my_string += '\n' + '\n'.join(sublist) + 442 else: + 443 my_string = ' ' + "\u00B7 Covobs '" + key + "' " + 444 my_string_list.append(my_string) + 445 print('\n'.join(my_string_list)) + 446 + 447 def reweight(self, weight): + 448 """Reweight the obs with given rewighting factors. + 449 + 450 Parameters + 451 ---------- + 452 weight : Obs + 453 Reweighting factor. An Observable that has to be defined on a superset of the + 454 configurations in obs[i].idl for all i. + 455 all_configs : bool + 456 if True, the reweighted observables are normalized by the average of + 457 the reweighting factor on all configurations in weight.idl and not + 458 on the configurations in obs[i].idl. Default False. + 459 """ + 460 return reweight(weight, [self])[0] + 461 + 462 def is_zero_within_error(self, sigma=1): + 463 """Checks whether the observable is zero within 'sigma' standard errors. + 464 + 465 Parameters + 466 ---------- + 467 sigma : int + 468 Number of standard errors used for the check. + 469 + 470 Works only properly when the gamma method was run. + 471 """ + 472 return self.is_zero() or np.abs(self.value) <= sigma * self._dvalue + 473 + 474 def is_zero(self, atol=1e-10): + 475 """Checks whether the observable is zero within a given tolerance. + 476 + 477 Parameters + 478 ---------- + 479 atol : float + 480 Absolute tolerance (for details see numpy documentation). + 481 """ + 482 return np.isclose(0.0, self.value, 1e-14, atol) and all(np.allclose(0.0, delta, 1e-14, atol) for delta in self.deltas.values()) and all(np.allclose(0.0, delta.errsq(), 1e-14, atol) for delta in self.covobs.values()) + 483 + 484 def plot_tauint(self, save=None): + 485 """Plot integrated autocorrelation time for each ensemble. + 486 + 487 Parameters + 488 ---------- + 489 save : str + 490 saves the figure to a file named 'save' if. + 491 """ + 492 if not hasattr(self, 'e_dvalue'): + 493 raise Exception('Run the gamma method first.') + 494 + 495 for e, e_name in enumerate(self.mc_names): + 496 fig = plt.figure() + 497 plt.xlabel(r'$W$') + 498 plt.ylabel(r'$\tau_\mathrm{int}$') + 499 length = int(len(self.e_n_tauint[e_name])) + 500 if self.tau_exp[e_name] > 0: + 501 base = self.e_n_tauint[e_name][self.e_windowsize[e_name]] + 502 x_help = np.arange(2 * self.tau_exp[e_name]) + 503 y_help = (x_help + 1) * np.abs(self.e_rho[e_name][self.e_windowsize[e_name] + 1]) * (1 - x_help / (2 * (2 * self.tau_exp[e_name] - 1))) + base + 504 x_arr = np.arange(self.e_windowsize[e_name] + 1, self.e_windowsize[e_name] + 1 + 2 * self.tau_exp[e_name]) + 505 plt.plot(x_arr, y_help, 'C' + str(e), linewidth=1, ls='--', marker=',') + 506 plt.errorbar([self.e_windowsize[e_name] + 2 * self.tau_exp[e_name]], [self.e_tauint[e_name]], + 507 yerr=[self.e_dtauint[e_name]], fmt='C' + str(e), linewidth=1, capsize=2, marker='o', mfc=plt.rcParams['axes.facecolor']) + 508 xmax = self.e_windowsize[e_name] + 2 * self.tau_exp[e_name] + 1.5 + 509 label = e_name + r', $\tau_\mathrm{exp}$=' + str(np.around(self.tau_exp[e_name], decimals=2)) + 510 else: + 511 label = e_name + ', S=' + str(np.around(self.S[e_name], decimals=2)) + 512 xmax = max(10.5, 2 * self.e_windowsize[e_name] - 0.5) + 513 + 514 plt.errorbar(np.arange(length)[:int(xmax) + 1], self.e_n_tauint[e_name][:int(xmax) + 1], yerr=self.e_n_dtauint[e_name][:int(xmax) + 1], linewidth=1, capsize=2, label=label) + 515 plt.axvline(x=self.e_windowsize[e_name], color='C' + str(e), alpha=0.5, marker=',', ls='--') + 516 plt.legend() + 517 plt.xlim(-0.5, xmax) + 518 ylim = plt.ylim() + 519 plt.ylim(bottom=0.0, top=max(1.0, ylim[1])) + 520 plt.draw() + 521 if save: + 522 fig.savefig(save + "_" + str(e)) + 523 + 524 def plot_rho(self, save=None): + 525 """Plot normalized autocorrelation function time for each ensemble. + 526 + 527 Parameters + 528 ---------- + 529 save : str + 530 saves the figure to a file named 'save' if. + 531 """ + 532 if not hasattr(self, 'e_dvalue'): + 533 raise Exception('Run the gamma method first.') + 534 for e, e_name in enumerate(self.mc_names): + 535 fig = plt.figure() + 536 plt.xlabel('W') + 537 plt.ylabel('rho') + 538 length = int(len(self.e_drho[e_name])) + 539 plt.errorbar(np.arange(length), self.e_rho[e_name][:length], yerr=self.e_drho[e_name][:], linewidth=1, capsize=2) + 540 plt.axvline(x=self.e_windowsize[e_name], color='r', alpha=0.25, ls='--', marker=',') + 541 if self.tau_exp[e_name] > 0: + 542 plt.plot([self.e_windowsize[e_name] + 1, self.e_windowsize[e_name] + 1 + 2 * self.tau_exp[e_name]], + 543 [self.e_rho[e_name][self.e_windowsize[e_name] + 1], 0], 'k-', lw=1) + 544 xmax = self.e_windowsize[e_name] + 2 * self.tau_exp[e_name] + 1.5 + 545 plt.title('Rho ' + e_name + r', tau\_exp=' + str(np.around(self.tau_exp[e_name], decimals=2))) + 546 else: + 547 xmax = max(10.5, 2 * self.e_windowsize[e_name] - 0.5) + 548 plt.title('Rho ' + e_name + ', S=' + str(np.around(self.S[e_name], decimals=2))) + 549 plt.plot([-0.5, xmax], [0, 0], 'k--', lw=1) + 550 plt.xlim(-0.5, xmax) + 551 plt.draw() + 552 if save: + 553 fig.savefig(save + "_" + str(e)) + 554 + 555 def plot_rep_dist(self): + 556 """Plot replica distribution for each ensemble with more than one replicum.""" + 557 if not hasattr(self, 'e_dvalue'): + 558 raise Exception('Run the gamma method first.') + 559 for e, e_name in enumerate(self.mc_names): + 560 if len(self.e_content[e_name]) == 1: + 561 print('No replica distribution for a single replicum (', e_name, ')') + 562 continue + 563 r_length = [] + 564 sub_r_mean = 0 + 565 for r, r_name in enumerate(self.e_content[e_name]): + 566 r_length.append(len(self.deltas[r_name])) + 567 sub_r_mean += self.shape[r_name] * self.r_values[r_name] + 568 e_N = np.sum(r_length) + 569 sub_r_mean /= e_N + 570 arr = np.zeros(len(self.e_content[e_name])) + 571 for r, r_name in enumerate(self.e_content[e_name]): + 572 arr[r] = (self.r_values[r_name] - sub_r_mean) / (self.e_dvalue[e_name] * np.sqrt(e_N / self.shape[r_name] - 1)) + 573 plt.hist(arr, rwidth=0.8, bins=len(self.e_content[e_name])) + 574 plt.title('Replica distribution' + e_name + ' (mean=0, var=1)') + 575 plt.draw() + 576 + 577 def plot_history(self, expand=True): + 578 """Plot derived Monte Carlo history for each ensemble + 579 + 580 Parameters + 581 ---------- + 582 expand : bool + 583 show expanded history for irregular Monte Carlo chains (default: True). + 584 """ + 585 for e, e_name in enumerate(self.mc_names): + 586 plt.figure() + 587 r_length = [] + 588 tmp = [] + 589 tmp_expanded = [] + 590 for r, r_name in enumerate(self.e_content[e_name]): + 591 tmp.append(self.deltas[r_name] + self.r_values[r_name]) + 592 if expand: + 593 tmp_expanded.append(_expand_deltas(self.deltas[r_name], list(self.idl[r_name]), self.shape[r_name], 1) + self.r_values[r_name]) + 594 r_length.append(len(tmp_expanded[-1])) + 595 else: + 596 r_length.append(len(tmp[-1])) + 597 e_N = np.sum(r_length) + 598 x = np.arange(e_N) + 599 y_test = np.concatenate(tmp, axis=0) + 600 if expand: + 601 y = np.concatenate(tmp_expanded, axis=0) + 602 else: + 603 y = y_test + 604 plt.errorbar(x, y, fmt='.', markersize=3) + 605 plt.xlim(-0.5, e_N - 0.5) + 606 plt.title(e_name + f'\nskew: {skew(y_test):.3f} (p={skewtest(y_test).pvalue:.3f}), kurtosis: {kurtosis(y_test):.3f} (p={kurtosistest(y_test).pvalue:.3f})') + 607 plt.draw() + 608 + 609 def plot_piechart(self, save=None): + 610 """Plot piechart which shows the fractional contribution of each + 611 ensemble to the error and returns a dictionary containing the fractions. + 612 + 613 Parameters + 614 ---------- + 615 save : str + 616 saves the figure to a file named 'save' if. + 617 """ + 618 if not hasattr(self, 'e_dvalue'): + 619 raise Exception('Run the gamma method first.') + 620 if np.isclose(0.0, self._dvalue, atol=1e-15): + 621 raise Exception('Error is 0.0') + 622 labels = self.e_names + 623 sizes = [self.e_dvalue[name] ** 2 for name in labels] / self._dvalue ** 2 + 624 fig1, ax1 = plt.subplots() + 625 ax1.pie(sizes, labels=labels, startangle=90, normalize=True) + 626 ax1.axis('equal') + 627 plt.draw() + 628 if save: + 629 fig1.savefig(save) + 630 + 631 return dict(zip(labels, sizes)) + 632 + 633 def dump(self, filename, datatype="json.gz", description="", **kwargs): + 634 """Dump the Obs to a file 'name' of chosen format. + 635 + 636 Parameters + 637 ---------- + 638 filename : str + 639 name of the file to be saved. + 640 datatype : str + 641 Format of the exported file. Supported formats include + 642 "json.gz" and "pickle" + 643 description : str + 644 Description for output file, only relevant for json.gz format. + 645 path : str + 646 specifies a custom path for the file (default '.') + 647 """ + 648 if 'path' in kwargs: + 649 file_name = kwargs.get('path') + '/' + filename + 650 else: + 651 file_name = filename + 652 + 653 if datatype == "json.gz": + 654 from .input.json import dump_to_json + 655 dump_to_json([self], file_name, description=description) + 656 elif datatype == "pickle": + 657 with open(file_name + '.p', 'wb') as fb: + 658 pickle.dump(self, fb) + 659 else: + 660 raise Exception("Unknown datatype " + str(datatype)) + 661 + 662 def export_jackknife(self): + 663 """Export jackknife samples from the Obs + 664 + 665 Returns + 666 ------- + 667 numpy.ndarray + 668 Returns a numpy array of length N + 1 where N is the number of samples + 669 for the given ensemble and replicum. The zeroth entry of the array contains + 670 the mean value of the Obs, entries 1 to N contain the N jackknife samples + 671 derived from the Obs. The current implementation only works for observables + 672 defined on exactly one ensemble and replicum. The derived jackknife samples + 673 should agree with samples from a full jackknife analysis up to O(1/N). + 674 """ + 675 + 676 if len(self.names) != 1: + 677 raise Exception("'export_jackknife' is only implemented for Obs defined on one ensemble and replicum.") + 678 + 679 name = self.names[0] + 680 full_data = self.deltas[name] + self.r_values[name] + 681 n = full_data.size + 682 mean = self.value + 683 tmp_jacks = np.zeros(n + 1) + 684 tmp_jacks[0] = mean + 685 tmp_jacks[1:] = (n * mean - full_data) / (n - 1) + 686 return tmp_jacks + 687 + 688 def export_bootstrap(self, samples=500, random_numbers=None, save_rng=None): + 689 """Export bootstrap samples from the Obs + 690 + 691 Parameters + 692 ---------- + 693 samples : int + 694 Number of bootstrap samples to generate. + 695 random_numbers : np.ndarray + 696 Array of shape (samples, length) containing the random numbers to generate the bootstrap samples. + 697 If not provided the bootstrap samples are generated bashed on the md5 hash of the enesmble name. + 698 save_rng : str + 699 Save the random numbers to a file if a path is specified. + 700 + 701 Returns + 702 ------- + 703 numpy.ndarray + 704 Returns a numpy array of length N + 1 where N is the number of samples + 705 for the given ensemble and replicum. The zeroth entry of the array contains + 706 the mean value of the Obs, entries 1 to N contain the N import_bootstrap samples + 707 derived from the Obs. The current implementation only works for observables + 708 defined on exactly one ensemble and replicum. The derived bootstrap samples + 709 should agree with samples from a full bootstrap analysis up to O(1/N). + 710 """ + 711 if len(self.names) != 1: + 712 raise Exception("'export_boostrap' is only implemented for Obs defined on one ensemble and replicum.") + 713 + 714 name = self.names[0] + 715 length = self.N + 716 + 717 if random_numbers is None: + 718 seed = int(hashlib.md5(name.encode()).hexdigest(), 16) & 0xFFFFFFFF + 719 rng = np.random.default_rng(seed) + 720 random_numbers = rng.integers(0, length, size=(samples, length)) 721 - 722 def __le__(self, other): - 723 return self.value <= other + 722 if save_rng is not None: + 723 np.savetxt(save_rng, random_numbers, fmt='%i') 724 - 725 def __gt__(self, other): - 726 return self.value > other - 727 - 728 def __ge__(self, other): - 729 return self.value >= other + 725 proj = np.vstack([np.bincount(o, minlength=length) for o in random_numbers]) / length + 726 ret = np.zeros(samples + 1) + 727 ret[0] = self.value + 728 ret[1:] = proj @ (self.deltas[name] + self.r_values[name]) + 729 return ret 730 - 731 def __eq__(self, other): - 732 return (self - other).is_zero() + 731 def __float__(self): + 732 return float(self.value) 733 - 734 def __ne__(self, other): - 735 return not (self - other).is_zero() + 734 def __repr__(self): + 735 return 'Obs[' + str(self) + ']' 736 - 737 # Overload math operations - 738 def __add__(self, y): - 739 if isinstance(y, Obs): - 740 return derived_observable(lambda x, **kwargs: x[0] + x[1], [self, y], man_grad=[1, 1]) - 741 else: - 742 if isinstance(y, np.ndarray): - 743 return np.array([self + o for o in y]) - 744 elif y.__class__.__name__ in ['Corr', 'CObs']: - 745 return NotImplemented - 746 else: - 747 return derived_observable(lambda x, **kwargs: x[0] + y, [self], man_grad=[1]) - 748 - 749 def __radd__(self, y): - 750 return self + y - 751 - 752 def __mul__(self, y): - 753 if isinstance(y, Obs): - 754 return derived_observable(lambda x, **kwargs: x[0] * x[1], [self, y], man_grad=[y.value, self.value]) - 755 else: - 756 if isinstance(y, np.ndarray): - 757 return np.array([self * o for o in y]) - 758 elif isinstance(y, complex): - 759 return CObs(self * y.real, self * y.imag) - 760 elif y.__class__.__name__ in ['Corr', 'CObs']: - 761 return NotImplemented - 762 else: - 763 return derived_observable(lambda x, **kwargs: x[0] * y, [self], man_grad=[y]) - 764 - 765 def __rmul__(self, y): - 766 return self * y - 767 - 768 def __sub__(self, y): - 769 if isinstance(y, Obs): - 770 return derived_observable(lambda x, **kwargs: x[0] - x[1], [self, y], man_grad=[1, -1]) - 771 else: - 772 if isinstance(y, np.ndarray): - 773 return np.array([self - o for o in y]) - 774 elif y.__class__.__name__ in ['Corr', 'CObs']: - 775 return NotImplemented - 776 else: - 777 return derived_observable(lambda x, **kwargs: x[0] - y, [self], man_grad=[1]) - 778 - 779 def __rsub__(self, y): - 780 return -1 * (self - y) - 781 - 782 def __pos__(self): - 783 return self - 784 - 785 def __neg__(self): - 786 return -1 * self - 787 - 788 def __truediv__(self, y): - 789 if isinstance(y, Obs): - 790 return derived_observable(lambda x, **kwargs: x[0] / x[1], [self, y], man_grad=[1 / y.value, - self.value / y.value ** 2]) - 791 else: - 792 if isinstance(y, np.ndarray): - 793 return np.array([self / o for o in y]) - 794 elif y.__class__.__name__ in ['Corr', 'CObs']: - 795 return NotImplemented - 796 else: - 797 return derived_observable(lambda x, **kwargs: x[0] / y, [self], man_grad=[1 / y]) - 798 - 799 def __rtruediv__(self, y): - 800 if isinstance(y, Obs): - 801 return derived_observable(lambda x, **kwargs: x[0] / x[1], [y, self], man_grad=[1 / self.value, - y.value / self.value ** 2]) - 802 else: - 803 if isinstance(y, np.ndarray): - 804 return np.array([o / self for o in y]) - 805 elif y.__class__.__name__ in ['Corr', 'CObs']: - 806 return NotImplemented - 807 else: - 808 return derived_observable(lambda x, **kwargs: y / x[0], [self], man_grad=[-y / self.value ** 2]) - 809 - 810 def __pow__(self, y): - 811 if isinstance(y, Obs): - 812 return derived_observable(lambda x: x[0] ** x[1], [self, y]) - 813 else: - 814 return derived_observable(lambda x: x[0] ** y, [self]) - 815 - 816 def __rpow__(self, y): - 817 if isinstance(y, Obs): - 818 return derived_observable(lambda x: x[0] ** x[1], [y, self]) - 819 else: - 820 return derived_observable(lambda x: y ** x[0], [self]) - 821 - 822 def __abs__(self): - 823 return derived_observable(lambda x: anp.abs(x[0]), [self]) - 824 - 825 # Overload numpy functions - 826 def sqrt(self): - 827 return derived_observable(lambda x, **kwargs: np.sqrt(x[0]), [self], man_grad=[1 / 2 / np.sqrt(self.value)]) + 737 def __str__(self): + 738 return _format_uncertainty(self.value, self._dvalue) + 739 + 740 def __format__(self, format_type): + 741 if format_type == "": + 742 significance = 2 + 743 else: + 744 significance = int(float(format_type.replace("+", "").replace("-", ""))) + 745 my_str = _format_uncertainty(self.value, self._dvalue, + 746 significance=significance) + 747 for char in ["+", " "]: + 748 if format_type.startswith(char): + 749 if my_str[0] != "-": + 750 my_str = char + my_str + 751 return my_str + 752 + 753 def __hash__(self): + 754 hash_tuple = (np.array([self.value]).astype(np.float32).data.tobytes(),) + 755 hash_tuple += tuple([o.astype(np.float32).data.tobytes() for o in self.deltas.values()]) + 756 hash_tuple += tuple([np.array([o.errsq()]).astype(np.float32).data.tobytes() for o in self.covobs.values()]) + 757 hash_tuple += tuple([o.encode() for o in self.names]) + 758 m = hashlib.md5() + 759 [m.update(o) for o in hash_tuple] + 760 return int(m.hexdigest(), 16) & 0xFFFFFFFF + 761 + 762 # Overload comparisons + 763 def __lt__(self, other): + 764 return self.value < other + 765 + 766 def __le__(self, other): + 767 return self.value <= other + 768 + 769 def __gt__(self, other): + 770 return self.value > other + 771 + 772 def __ge__(self, other): + 773 return self.value >= other + 774 + 775 def __eq__(self, other): + 776 return (self - other).is_zero() + 777 + 778 def __ne__(self, other): + 779 return not (self - other).is_zero() + 780 + 781 # Overload math operations + 782 def __add__(self, y): + 783 if isinstance(y, Obs): + 784 return derived_observable(lambda x, **kwargs: x[0] + x[1], [self, y], man_grad=[1, 1]) + 785 else: + 786 if isinstance(y, np.ndarray): + 787 return np.array([self + o for o in y]) + 788 elif y.__class__.__name__ in ['Corr', 'CObs']: + 789 return NotImplemented + 790 else: + 791 return derived_observable(lambda x, **kwargs: x[0] + y, [self], man_grad=[1]) + 792 + 793 def __radd__(self, y): + 794 return self + y + 795 + 796 def __mul__(self, y): + 797 if isinstance(y, Obs): + 798 return derived_observable(lambda x, **kwargs: x[0] * x[1], [self, y], man_grad=[y.value, self.value]) + 799 else: + 800 if isinstance(y, np.ndarray): + 801 return np.array([self * o for o in y]) + 802 elif isinstance(y, complex): + 803 return CObs(self * y.real, self * y.imag) + 804 elif y.__class__.__name__ in ['Corr', 'CObs']: + 805 return NotImplemented + 806 else: + 807 return derived_observable(lambda x, **kwargs: x[0] * y, [self], man_grad=[y]) + 808 + 809 def __rmul__(self, y): + 810 return self * y + 811 + 812 def __sub__(self, y): + 813 if isinstance(y, Obs): + 814 return derived_observable(lambda x, **kwargs: x[0] - x[1], [self, y], man_grad=[1, -1]) + 815 else: + 816 if isinstance(y, np.ndarray): + 817 return np.array([self - o for o in y]) + 818 elif y.__class__.__name__ in ['Corr', 'CObs']: + 819 return NotImplemented + 820 else: + 821 return derived_observable(lambda x, **kwargs: x[0] - y, [self], man_grad=[1]) + 822 + 823 def __rsub__(self, y): + 824 return -1 * (self - y) + 825 + 826 def __pos__(self): + 827 return self 828 - 829 def log(self): - 830 return derived_observable(lambda x, **kwargs: np.log(x[0]), [self], man_grad=[1 / self.value]) + 829 def __neg__(self): + 830 return -1 * self 831 - 832 def exp(self): - 833 return derived_observable(lambda x, **kwargs: np.exp(x[0]), [self], man_grad=[np.exp(self.value)]) - 834 - 835 def sin(self): - 836 return derived_observable(lambda x, **kwargs: np.sin(x[0]), [self], man_grad=[np.cos(self.value)]) - 837 - 838 def cos(self): - 839 return derived_observable(lambda x, **kwargs: np.cos(x[0]), [self], man_grad=[-np.sin(self.value)]) - 840 - 841 def tan(self): - 842 return derived_observable(lambda x, **kwargs: np.tan(x[0]), [self], man_grad=[1 / np.cos(self.value) ** 2]) - 843 - 844 def arcsin(self): - 845 return derived_observable(lambda x: anp.arcsin(x[0]), [self]) - 846 - 847 def arccos(self): - 848 return derived_observable(lambda x: anp.arccos(x[0]), [self]) - 849 - 850 def arctan(self): - 851 return derived_observable(lambda x: anp.arctan(x[0]), [self]) - 852 - 853 def sinh(self): - 854 return derived_observable(lambda x, **kwargs: np.sinh(x[0]), [self], man_grad=[np.cosh(self.value)]) - 855 - 856 def cosh(self): - 857 return derived_observable(lambda x, **kwargs: np.cosh(x[0]), [self], man_grad=[np.sinh(self.value)]) - 858 - 859 def tanh(self): - 860 return derived_observable(lambda x, **kwargs: np.tanh(x[0]), [self], man_grad=[1 / np.cosh(self.value) ** 2]) - 861 - 862 def arcsinh(self): - 863 return derived_observable(lambda x: anp.arcsinh(x[0]), [self]) - 864 - 865 def arccosh(self): - 866 return derived_observable(lambda x: anp.arccosh(x[0]), [self]) - 867 - 868 def arctanh(self): - 869 return derived_observable(lambda x: anp.arctanh(x[0]), [self]) - 870 - 871 - 872class CObs: - 873 """Class for a complex valued observable.""" - 874 __slots__ = ['_real', '_imag', 'tag'] + 832 def __truediv__(self, y): + 833 if isinstance(y, Obs): + 834 return derived_observable(lambda x, **kwargs: x[0] / x[1], [self, y], man_grad=[1 / y.value, - self.value / y.value ** 2]) + 835 else: + 836 if isinstance(y, np.ndarray): + 837 return np.array([self / o for o in y]) + 838 elif y.__class__.__name__ in ['Corr', 'CObs']: + 839 return NotImplemented + 840 else: + 841 return derived_observable(lambda x, **kwargs: x[0] / y, [self], man_grad=[1 / y]) + 842 + 843 def __rtruediv__(self, y): + 844 if isinstance(y, Obs): + 845 return derived_observable(lambda x, **kwargs: x[0] / x[1], [y, self], man_grad=[1 / self.value, - y.value / self.value ** 2]) + 846 else: + 847 if isinstance(y, np.ndarray): + 848 return np.array([o / self for o in y]) + 849 elif y.__class__.__name__ in ['Corr', 'CObs']: + 850 return NotImplemented + 851 else: + 852 return derived_observable(lambda x, **kwargs: y / x[0], [self], man_grad=[-y / self.value ** 2]) + 853 + 854 def __pow__(self, y): + 855 if isinstance(y, Obs): + 856 return derived_observable(lambda x: x[0] ** x[1], [self, y]) + 857 else: + 858 return derived_observable(lambda x: x[0] ** y, [self]) + 859 + 860 def __rpow__(self, y): + 861 if isinstance(y, Obs): + 862 return derived_observable(lambda x: x[0] ** x[1], [y, self]) + 863 else: + 864 return derived_observable(lambda x: y ** x[0], [self]) + 865 + 866 def __abs__(self): + 867 return derived_observable(lambda x: anp.abs(x[0]), [self]) + 868 + 869 # Overload numpy functions + 870 def sqrt(self): + 871 return derived_observable(lambda x, **kwargs: np.sqrt(x[0]), [self], man_grad=[1 / 2 / np.sqrt(self.value)]) + 872 + 873 def log(self): + 874 return derived_observable(lambda x, **kwargs: np.log(x[0]), [self], man_grad=[1 / self.value]) 875 - 876 def __init__(self, real, imag=0.0): - 877 self._real = real - 878 self._imag = imag - 879 self.tag = None - 880 - 881 @property - 882 def real(self): - 883 return self._real + 876 def exp(self): + 877 return derived_observable(lambda x, **kwargs: np.exp(x[0]), [self], man_grad=[np.exp(self.value)]) + 878 + 879 def sin(self): + 880 return derived_observable(lambda x, **kwargs: np.sin(x[0]), [self], man_grad=[np.cos(self.value)]) + 881 + 882 def cos(self): + 883 return derived_observable(lambda x, **kwargs: np.cos(x[0]), [self], man_grad=[-np.sin(self.value)]) 884 - 885 @property - 886 def imag(self): - 887 return self._imag - 888 - 889 def gamma_method(self, **kwargs): - 890 """Executes the gamma_method for the real and the imaginary part.""" - 891 if isinstance(self.real, Obs): - 892 self.real.gamma_method(**kwargs) - 893 if isinstance(self.imag, Obs): - 894 self.imag.gamma_method(**kwargs) - 895 - 896 def is_zero(self): - 897 """Checks whether both real and imaginary part are zero within machine precision.""" - 898 return self.real == 0.0 and self.imag == 0.0 + 885 def tan(self): + 886 return derived_observable(lambda x, **kwargs: np.tan(x[0]), [self], man_grad=[1 / np.cos(self.value) ** 2]) + 887 + 888 def arcsin(self): + 889 return derived_observable(lambda x: anp.arcsin(x[0]), [self]) + 890 + 891 def arccos(self): + 892 return derived_observable(lambda x: anp.arccos(x[0]), [self]) + 893 + 894 def arctan(self): + 895 return derived_observable(lambda x: anp.arctan(x[0]), [self]) + 896 + 897 def sinh(self): + 898 return derived_observable(lambda x, **kwargs: np.sinh(x[0]), [self], man_grad=[np.cosh(self.value)]) 899 - 900 def conjugate(self): - 901 return CObs(self.real, -self.imag) + 900 def cosh(self): + 901 return derived_observable(lambda x, **kwargs: np.cosh(x[0]), [self], man_grad=[np.sinh(self.value)]) 902 - 903 def __add__(self, other): - 904 if isinstance(other, np.ndarray): - 905 return other + self - 906 elif hasattr(other, 'real') and hasattr(other, 'imag'): - 907 return CObs(self.real + other.real, - 908 self.imag + other.imag) - 909 else: - 910 return CObs(self.real + other, self.imag) + 903 def tanh(self): + 904 return derived_observable(lambda x, **kwargs: np.tanh(x[0]), [self], man_grad=[1 / np.cosh(self.value) ** 2]) + 905 + 906 def arcsinh(self): + 907 return derived_observable(lambda x: anp.arcsinh(x[0]), [self]) + 908 + 909 def arccosh(self): + 910 return derived_observable(lambda x: anp.arccosh(x[0]), [self]) 911 - 912 def __radd__(self, y): - 913 return self + y + 912 def arctanh(self): + 913 return derived_observable(lambda x: anp.arctanh(x[0]), [self]) 914 - 915 def __sub__(self, other): - 916 if isinstance(other, np.ndarray): - 917 return -1 * (other - self) - 918 elif hasattr(other, 'real') and hasattr(other, 'imag'): - 919 return CObs(self.real - other.real, self.imag - other.imag) - 920 else: - 921 return CObs(self.real - other, self.imag) - 922 - 923 def __rsub__(self, other): - 924 return -1 * (self - other) - 925 - 926 def __mul__(self, other): - 927 if isinstance(other, np.ndarray): - 928 return other * self - 929 elif hasattr(other, 'real') and hasattr(other, 'imag'): - 930 if all(isinstance(i, Obs) for i in [self.real, self.imag, other.real, other.imag]): - 931 return CObs(derived_observable(lambda x, **kwargs: x[0] * x[1] - x[2] * x[3], - 932 [self.real, other.real, self.imag, other.imag], - 933 man_grad=[other.real.value, self.real.value, -other.imag.value, -self.imag.value]), - 934 derived_observable(lambda x, **kwargs: x[2] * x[1] + x[0] * x[3], - 935 [self.real, other.real, self.imag, other.imag], - 936 man_grad=[other.imag.value, self.imag.value, other.real.value, self.real.value])) - 937 elif getattr(other, 'imag', 0) != 0: - 938 return CObs(self.real * other.real - self.imag * other.imag, - 939 self.imag * other.real + self.real * other.imag) - 940 else: - 941 return CObs(self.real * other.real, self.imag * other.real) - 942 else: - 943 return CObs(self.real * other, self.imag * other) - 944 - 945 def __rmul__(self, other): - 946 return self * other - 947 - 948 def __truediv__(self, other): - 949 if isinstance(other, np.ndarray): - 950 return 1 / (other / self) - 951 elif hasattr(other, 'real') and hasattr(other, 'imag'): - 952 r = other.real ** 2 + other.imag ** 2 - 953 return CObs((self.real * other.real + self.imag * other.imag) / r, (self.imag * other.real - self.real * other.imag) / r) - 954 else: - 955 return CObs(self.real / other, self.imag / other) - 956 - 957 def __rtruediv__(self, other): - 958 r = self.real ** 2 + self.imag ** 2 - 959 if hasattr(other, 'real') and hasattr(other, 'imag'): - 960 return CObs((self.real * other.real + self.imag * other.imag) / r, (self.real * other.imag - self.imag * other.real) / r) - 961 else: - 962 return CObs(self.real * other / r, -self.imag * other / r) - 963 - 964 def __abs__(self): - 965 return np.sqrt(self.real**2 + self.imag**2) + 915 + 916class CObs: + 917 """Class for a complex valued observable.""" + 918 __slots__ = ['_real', '_imag', 'tag'] + 919 + 920 def __init__(self, real, imag=0.0): + 921 self._real = real + 922 self._imag = imag + 923 self.tag = None + 924 + 925 @property + 926 def real(self): + 927 return self._real + 928 + 929 @property + 930 def imag(self): + 931 return self._imag + 932 + 933 def gamma_method(self, **kwargs): + 934 """Executes the gamma_method for the real and the imaginary part.""" + 935 if isinstance(self.real, Obs): + 936 self.real.gamma_method(**kwargs) + 937 if isinstance(self.imag, Obs): + 938 self.imag.gamma_method(**kwargs) + 939 + 940 def is_zero(self): + 941 """Checks whether both real and imaginary part are zero within machine precision.""" + 942 return self.real == 0.0 and self.imag == 0.0 + 943 + 944 def conjugate(self): + 945 return CObs(self.real, -self.imag) + 946 + 947 def __add__(self, other): + 948 if isinstance(other, np.ndarray): + 949 return other + self + 950 elif hasattr(other, 'real') and hasattr(other, 'imag'): + 951 return CObs(self.real + other.real, + 952 self.imag + other.imag) + 953 else: + 954 return CObs(self.real + other, self.imag) + 955 + 956 def __radd__(self, y): + 957 return self + y + 958 + 959 def __sub__(self, other): + 960 if isinstance(other, np.ndarray): + 961 return -1 * (other - self) + 962 elif hasattr(other, 'real') and hasattr(other, 'imag'): + 963 return CObs(self.real - other.real, self.imag - other.imag) + 964 else: + 965 return CObs(self.real - other, self.imag) 966 - 967 def __pos__(self): - 968 return self + 967 def __rsub__(self, other): + 968 return -1 * (self - other) 969 - 970 def __neg__(self): - 971 return -1 * self - 972 - 973 def __eq__(self, other): - 974 return self.real == other.real and self.imag == other.imag - 975 - 976 def __str__(self): - 977 return '(' + str(self.real) + int(self.imag >= 0.0) * '+' + str(self.imag) + 'j)' - 978 - 979 def __repr__(self): - 980 return 'CObs[' + str(self) + ']' - 981 - 982 - 983def _format_uncertainty(value, dvalue, significance=2): - 984 """Creates a string of a value and its error in paranthesis notation, e.g., 13.02(45)""" - 985 if dvalue == 0.0 or (not np.isfinite(dvalue)): - 986 return str(value) - 987 if not isinstance(significance, int): - 988 raise TypeError("significance needs to be an integer.") - 989 if significance < 1: - 990 raise ValueError("significance needs to be larger than zero.") - 991 fexp = np.floor(np.log10(dvalue)) - 992 if fexp < 0.0: - 993 return '{:{form}}({:1.0f})'.format(value, dvalue * 10 ** (-fexp + significance - 1), form='.' + str(-int(fexp) + significance - 1) + 'f') - 994 elif fexp == 0.0: - 995 return f"{value:.{significance - 1}f}({dvalue:1.{significance - 1}f})" - 996 else: - 997 return f"{value:.{max(0, int(significance - fexp - 1))}f}({dvalue:2.{max(0, int(significance - fexp - 1))}f})" - 998 - 999 -1000def _expand_deltas(deltas, idx, shape, gapsize): -1001 """Expand deltas defined on idx to a regular range with spacing gapsize between two -1002 configurations and where holes are filled by 0. -1003 If idx is of type range, the deltas are not changed if the idx.step == gapsize. -1004 -1005 Parameters -1006 ---------- -1007 deltas : list -1008 List of fluctuations -1009 idx : list -1010 List or range of configs on which the deltas are defined, has to be sorted in ascending order. -1011 shape : int -1012 Number of configs in idx. -1013 gapsize : int -1014 The target distance between two configurations. If longer distances -1015 are found in idx, the data is expanded. -1016 """ -1017 if isinstance(idx, range): -1018 if (idx.step == gapsize): -1019 return deltas -1020 ret = np.zeros((idx[-1] - idx[0] + gapsize) // gapsize) -1021 for i in range(shape): -1022 ret[(idx[i] - idx[0]) // gapsize] = deltas[i] -1023 return ret -1024 + 970 def __mul__(self, other): + 971 if isinstance(other, np.ndarray): + 972 return other * self + 973 elif hasattr(other, 'real') and hasattr(other, 'imag'): + 974 if all(isinstance(i, Obs) for i in [self.real, self.imag, other.real, other.imag]): + 975 return CObs(derived_observable(lambda x, **kwargs: x[0] * x[1] - x[2] * x[3], + 976 [self.real, other.real, self.imag, other.imag], + 977 man_grad=[other.real.value, self.real.value, -other.imag.value, -self.imag.value]), + 978 derived_observable(lambda x, **kwargs: x[2] * x[1] + x[0] * x[3], + 979 [self.real, other.real, self.imag, other.imag], + 980 man_grad=[other.imag.value, self.imag.value, other.real.value, self.real.value])) + 981 elif getattr(other, 'imag', 0) != 0: + 982 return CObs(self.real * other.real - self.imag * other.imag, + 983 self.imag * other.real + self.real * other.imag) + 984 else: + 985 return CObs(self.real * other.real, self.imag * other.real) + 986 else: + 987 return CObs(self.real * other, self.imag * other) + 988 + 989 def __rmul__(self, other): + 990 return self * other + 991 + 992 def __truediv__(self, other): + 993 if isinstance(other, np.ndarray): + 994 return 1 / (other / self) + 995 elif hasattr(other, 'real') and hasattr(other, 'imag'): + 996 r = other.real ** 2 + other.imag ** 2 + 997 return CObs((self.real * other.real + self.imag * other.imag) / r, (self.imag * other.real - self.real * other.imag) / r) + 998 else: + 999 return CObs(self.real / other, self.imag / other) +1000 +1001 def __rtruediv__(self, other): +1002 r = self.real ** 2 + self.imag ** 2 +1003 if hasattr(other, 'real') and hasattr(other, 'imag'): +1004 return CObs((self.real * other.real + self.imag * other.imag) / r, (self.real * other.imag - self.imag * other.real) / r) +1005 else: +1006 return CObs(self.real * other / r, -self.imag * other / r) +1007 +1008 def __abs__(self): +1009 return np.sqrt(self.real**2 + self.imag**2) +1010 +1011 def __pos__(self): +1012 return self +1013 +1014 def __neg__(self): +1015 return -1 * self +1016 +1017 def __eq__(self, other): +1018 return self.real == other.real and self.imag == other.imag +1019 +1020 def __str__(self): +1021 return '(' + str(self.real) + int(self.imag >= 0.0) * '+' + str(self.imag) + 'j)' +1022 +1023 def __repr__(self): +1024 return 'CObs[' + str(self) + ']' 1025 -1026def _merge_idx(idl): -1027 """Returns the union of all lists in idl as range or sorted list -1028 -1029 Parameters -1030 ---------- -1031 idl : list -1032 List of lists or ranges. -1033 """ -1034 -1035 if _check_lists_equal(idl): -1036 return idl[0] -1037 -1038 idunion = sorted(set().union(*idl)) -1039 -1040 # Check whether idunion can be expressed as range -1041 idrange = range(idunion[0], idunion[-1] + 1, idunion[1] - idunion[0]) -1042 idtest = [list(idrange), idunion] -1043 if _check_lists_equal(idtest): -1044 return idrange -1045 -1046 return idunion -1047 +1026 +1027def _format_uncertainty(value, dvalue, significance=2): +1028 """Creates a string of a value and its error in paranthesis notation, e.g., 13.02(45)""" +1029 if dvalue == 0.0 or (not np.isfinite(dvalue)): +1030 return str(value) +1031 if not isinstance(significance, int): +1032 raise TypeError("significance needs to be an integer.") +1033 if significance < 1: +1034 raise ValueError("significance needs to be larger than zero.") +1035 fexp = np.floor(np.log10(dvalue)) +1036 if fexp < 0.0: +1037 return '{:{form}}({:1.0f})'.format(value, dvalue * 10 ** (-fexp + significance - 1), form='.' + str(-int(fexp) + significance - 1) + 'f') +1038 elif fexp == 0.0: +1039 return f"{value:.{significance - 1}f}({dvalue:1.{significance - 1}f})" +1040 else: +1041 return f"{value:.{max(0, int(significance - fexp - 1))}f}({dvalue:2.{max(0, int(significance - fexp - 1))}f})" +1042 +1043 +1044def _expand_deltas(deltas, idx, shape, gapsize): +1045 """Expand deltas defined on idx to a regular range with spacing gapsize between two +1046 configurations and where holes are filled by 0. +1047 If idx is of type range, the deltas are not changed if the idx.step == gapsize. 1048 -1049def _intersection_idx(idl): -1050 """Returns the intersection of all lists in idl as range or sorted list -1051 -1052 Parameters -1053 ---------- -1054 idl : list -1055 List of lists or ranges. -1056 """ -1057 -1058 if _check_lists_equal(idl): -1059 return idl[0] -1060 -1061 idinter = sorted(set.intersection(*[set(o) for o in idl])) -1062 -1063 # Check whether idinter can be expressed as range -1064 try: -1065 idrange = range(idinter[0], idinter[-1] + 1, idinter[1] - idinter[0]) -1066 idtest = [list(idrange), idinter] -1067 if _check_lists_equal(idtest): -1068 return idrange -1069 except IndexError: -1070 pass -1071 -1072 return idinter -1073 -1074 -1075def _expand_deltas_for_merge(deltas, idx, shape, new_idx): -1076 """Expand deltas defined on idx to the list of configs that is defined by new_idx. -1077 New, empty entries are filled by 0. If idx and new_idx are of type range, the smallest -1078 common divisor of the step sizes is used as new step size. -1079 -1080 Parameters -1081 ---------- -1082 deltas : list -1083 List of fluctuations -1084 idx : list -1085 List or range of configs on which the deltas are defined. -1086 Has to be a subset of new_idx and has to be sorted in ascending order. -1087 shape : list -1088 Number of configs in idx. -1089 new_idx : list -1090 List of configs that defines the new range, has to be sorted in ascending order. -1091 """ +1049 Parameters +1050 ---------- +1051 deltas : list +1052 List of fluctuations +1053 idx : list +1054 List or range of configs on which the deltas are defined, has to be sorted in ascending order. +1055 shape : int +1056 Number of configs in idx. +1057 gapsize : int +1058 The target distance between two configurations. If longer distances +1059 are found in idx, the data is expanded. +1060 """ +1061 if isinstance(idx, range): +1062 if (idx.step == gapsize): +1063 return deltas +1064 ret = np.zeros((idx[-1] - idx[0] + gapsize) // gapsize) +1065 for i in range(shape): +1066 ret[(idx[i] - idx[0]) // gapsize] = deltas[i] +1067 return ret +1068 +1069 +1070def _merge_idx(idl): +1071 """Returns the union of all lists in idl as range or sorted list +1072 +1073 Parameters +1074 ---------- +1075 idl : list +1076 List of lists or ranges. +1077 """ +1078 +1079 if _check_lists_equal(idl): +1080 return idl[0] +1081 +1082 idunion = sorted(set().union(*idl)) +1083 +1084 # Check whether idunion can be expressed as range +1085 idrange = range(idunion[0], idunion[-1] + 1, idunion[1] - idunion[0]) +1086 idtest = [list(idrange), idunion] +1087 if _check_lists_equal(idtest): +1088 return idrange +1089 +1090 return idunion +1091 1092 -1093 if type(idx) is range and type(new_idx) is range: -1094 if idx == new_idx: -1095 return deltas -1096 ret = np.zeros(new_idx[-1] - new_idx[0] + 1) -1097 for i in range(shape): -1098 ret[idx[i] - new_idx[0]] = deltas[i] -1099 return np.array([ret[new_idx[i] - new_idx[0]] for i in range(len(new_idx))]) * len(new_idx) / len(idx) -1100 +1093def _intersection_idx(idl): +1094 """Returns the intersection of all lists in idl as range or sorted list +1095 +1096 Parameters +1097 ---------- +1098 idl : list +1099 List of lists or ranges. +1100 """ 1101 -1102def derived_observable(func, data, array_mode=False, **kwargs): -1103 """Construct a derived Obs according to func(data, **kwargs) using automatic differentiation. +1102 if _check_lists_equal(idl): +1103 return idl[0] 1104 -1105 Parameters -1106 ---------- -1107 func : object -1108 arbitrary function of the form func(data, **kwargs). For the -1109 automatic differentiation to work, all numpy functions have to have -1110 the autograd wrapper (use 'import autograd.numpy as anp'). -1111 data : list -1112 list of Obs, e.g. [obs1, obs2, obs3]. -1113 num_grad : bool -1114 if True, numerical derivatives are used instead of autograd -1115 (default False). To control the numerical differentiation the -1116 kwargs of numdifftools.step_generators.MaxStepGenerator -1117 can be used. -1118 man_grad : list -1119 manually supply a list or an array which contains the jacobian -1120 of func. Use cautiously, supplying the wrong derivative will -1121 not be intercepted. -1122 -1123 Notes -1124 ----- -1125 For simple mathematical operations it can be practical to use anonymous -1126 functions. For the ratio of two observables one can e.g. use -1127 -1128 new_obs = derived_observable(lambda x: x[0] / x[1], [obs1, obs2]) -1129 """ -1130 -1131 data = np.asarray(data) -1132 raveled_data = data.ravel() -1133 -1134 # Workaround for matrix operations containing non Obs data -1135 if not all(isinstance(x, Obs) for x in raveled_data): -1136 for i in range(len(raveled_data)): -1137 if isinstance(raveled_data[i], (int, float)): -1138 raveled_data[i] = cov_Obs(raveled_data[i], 0.0, "###dummy_covobs###") -1139 -1140 allcov = {} -1141 for o in raveled_data: -1142 for name in o.cov_names: -1143 if name in allcov: -1144 if not np.allclose(allcov[name], o.covobs[name].cov): -1145 raise Exception('Inconsistent covariance matrices for %s!' % (name)) -1146 else: -1147 allcov[name] = o.covobs[name].cov +1105 idinter = sorted(set.intersection(*[set(o) for o in idl])) +1106 +1107 # Check whether idinter can be expressed as range +1108 try: +1109 idrange = range(idinter[0], idinter[-1] + 1, idinter[1] - idinter[0]) +1110 idtest = [list(idrange), idinter] +1111 if _check_lists_equal(idtest): +1112 return idrange +1113 except IndexError: +1114 pass +1115 +1116 return idinter +1117 +1118 +1119def _expand_deltas_for_merge(deltas, idx, shape, new_idx): +1120 """Expand deltas defined on idx to the list of configs that is defined by new_idx. +1121 New, empty entries are filled by 0. If idx and new_idx are of type range, the smallest +1122 common divisor of the step sizes is used as new step size. +1123 +1124 Parameters +1125 ---------- +1126 deltas : list +1127 List of fluctuations +1128 idx : list +1129 List or range of configs on which the deltas are defined. +1130 Has to be a subset of new_idx and has to be sorted in ascending order. +1131 shape : list +1132 Number of configs in idx. +1133 new_idx : list +1134 List of configs that defines the new range, has to be sorted in ascending order. +1135 """ +1136 +1137 if type(idx) is range and type(new_idx) is range: +1138 if idx == new_idx: +1139 return deltas +1140 ret = np.zeros(new_idx[-1] - new_idx[0] + 1) +1141 for i in range(shape): +1142 ret[idx[i] - new_idx[0]] = deltas[i] +1143 return np.array([ret[new_idx[i] - new_idx[0]] for i in range(len(new_idx))]) * len(new_idx) / len(idx) +1144 +1145 +1146def derived_observable(func, data, array_mode=False, **kwargs): +1147 """Construct a derived Obs according to func(data, **kwargs) using automatic differentiation. 1148 -1149 n_obs = len(raveled_data) -1150 new_names = sorted(set([y for x in [o.names for o in raveled_data] for y in x])) -1151 new_cov_names = sorted(set([y for x in [o.cov_names for o in raveled_data] for y in x])) -1152 new_sample_names = sorted(set(new_names) - set(new_cov_names)) -1153 -1154 reweighted = len(list(filter(lambda o: o.reweighted is True, raveled_data))) > 0 -1155 -1156 if data.ndim == 1: -1157 values = np.array([o.value for o in data]) -1158 else: -1159 values = np.vectorize(lambda x: x.value)(data) -1160 -1161 new_values = func(values, **kwargs) -1162 -1163 multi = int(isinstance(new_values, np.ndarray)) -1164 -1165 new_r_values = {} -1166 new_idl_d = {} -1167 for name in new_sample_names: -1168 idl = [] -1169 tmp_values = np.zeros(n_obs) -1170 for i, item in enumerate(raveled_data): -1171 tmp_values[i] = item.r_values.get(name, item.value) -1172 tmp_idl = item.idl.get(name) -1173 if tmp_idl is not None: -1174 idl.append(tmp_idl) -1175 if multi > 0: -1176 tmp_values = np.array(tmp_values).reshape(data.shape) -1177 new_r_values[name] = func(tmp_values, **kwargs) -1178 new_idl_d[name] = _merge_idx(idl) -1179 -1180 if 'man_grad' in kwargs: -1181 deriv = np.asarray(kwargs.get('man_grad')) -1182 if new_values.shape + data.shape != deriv.shape: -1183 raise Exception('Manual derivative does not have correct shape.') -1184 elif kwargs.get('num_grad') is True: -1185 if multi > 0: -1186 raise Exception('Multi mode currently not supported for numerical derivative') -1187 options = { -1188 'base_step': 0.1, -1189 'step_ratio': 2.5} -1190 for key in options.keys(): -1191 kwarg = kwargs.get(key) -1192 if kwarg is not None: -1193 options[key] = kwarg -1194 tmp_df = nd.Gradient(func, order=4, **{k: v for k, v in options.items() if v is not None})(values, **kwargs) -1195 if tmp_df.size == 1: -1196 deriv = np.array([tmp_df.real]) -1197 else: -1198 deriv = tmp_df.real -1199 else: -1200 deriv = jacobian(func)(values, **kwargs) -1201 -1202 final_result = np.zeros(new_values.shape, dtype=object) -1203 -1204 if array_mode is True: -1205 -1206 class _Zero_grad(): -1207 def __init__(self, N): -1208 self.grad = np.zeros((N, 1)) -1209 -1210 new_covobs_lengths = dict(set([y for x in [[(n, o.covobs[n].N) for n in o.cov_names] for o in raveled_data] for y in x])) -1211 d_extracted = {} -1212 g_extracted = {} -1213 for name in new_sample_names: -1214 d_extracted[name] = [] -1215 ens_length = len(new_idl_d[name]) -1216 for i_dat, dat in enumerate(data): -1217 d_extracted[name].append(np.array([_expand_deltas_for_merge(o.deltas.get(name, np.zeros(ens_length)), o.idl.get(name, new_idl_d[name]), o.shape.get(name, ens_length), new_idl_d[name]) for o in dat.reshape(np.prod(dat.shape))]).reshape(dat.shape + (ens_length, ))) -1218 for name in new_cov_names: -1219 g_extracted[name] = [] -1220 zero_grad = _Zero_grad(new_covobs_lengths[name]) -1221 for i_dat, dat in enumerate(data): -1222 g_extracted[name].append(np.array([o.covobs.get(name, zero_grad).grad for o in dat.reshape(np.prod(dat.shape))]).reshape(dat.shape + (new_covobs_lengths[name], 1))) +1149 Parameters +1150 ---------- +1151 func : object +1152 arbitrary function of the form func(data, **kwargs). For the +1153 automatic differentiation to work, all numpy functions have to have +1154 the autograd wrapper (use 'import autograd.numpy as anp'). +1155 data : list +1156 list of Obs, e.g. [obs1, obs2, obs3]. +1157 num_grad : bool +1158 if True, numerical derivatives are used instead of autograd +1159 (default False). To control the numerical differentiation the +1160 kwargs of numdifftools.step_generators.MaxStepGenerator +1161 can be used. +1162 man_grad : list +1163 manually supply a list or an array which contains the jacobian +1164 of func. Use cautiously, supplying the wrong derivative will +1165 not be intercepted. +1166 +1167 Notes +1168 ----- +1169 For simple mathematical operations it can be practical to use anonymous +1170 functions. For the ratio of two observables one can e.g. use +1171 +1172 new_obs = derived_observable(lambda x: x[0] / x[1], [obs1, obs2]) +1173 """ +1174 +1175 data = np.asarray(data) +1176 raveled_data = data.ravel() +1177 +1178 # Workaround for matrix operations containing non Obs data +1179 if not all(isinstance(x, Obs) for x in raveled_data): +1180 for i in range(len(raveled_data)): +1181 if isinstance(raveled_data[i], (int, float)): +1182 raveled_data[i] = cov_Obs(raveled_data[i], 0.0, "###dummy_covobs###") +1183 +1184 allcov = {} +1185 for o in raveled_data: +1186 for name in o.cov_names: +1187 if name in allcov: +1188 if not np.allclose(allcov[name], o.covobs[name].cov): +1189 raise Exception('Inconsistent covariance matrices for %s!' % (name)) +1190 else: +1191 allcov[name] = o.covobs[name].cov +1192 +1193 n_obs = len(raveled_data) +1194 new_names = sorted(set([y for x in [o.names for o in raveled_data] for y in x])) +1195 new_cov_names = sorted(set([y for x in [o.cov_names for o in raveled_data] for y in x])) +1196 new_sample_names = sorted(set(new_names) - set(new_cov_names)) +1197 +1198 reweighted = len(list(filter(lambda o: o.reweighted is True, raveled_data))) > 0 +1199 +1200 if data.ndim == 1: +1201 values = np.array([o.value for o in data]) +1202 else: +1203 values = np.vectorize(lambda x: x.value)(data) +1204 +1205 new_values = func(values, **kwargs) +1206 +1207 multi = int(isinstance(new_values, np.ndarray)) +1208 +1209 new_r_values = {} +1210 new_idl_d = {} +1211 for name in new_sample_names: +1212 idl = [] +1213 tmp_values = np.zeros(n_obs) +1214 for i, item in enumerate(raveled_data): +1215 tmp_values[i] = item.r_values.get(name, item.value) +1216 tmp_idl = item.idl.get(name) +1217 if tmp_idl is not None: +1218 idl.append(tmp_idl) +1219 if multi > 0: +1220 tmp_values = np.array(tmp_values).reshape(data.shape) +1221 new_r_values[name] = func(tmp_values, **kwargs) +1222 new_idl_d[name] = _merge_idx(idl) 1223 -1224 for i_val, new_val in np.ndenumerate(new_values): -1225 new_deltas = {} -1226 new_grad = {} -1227 if array_mode is True: -1228 for name in new_sample_names: -1229 ens_length = d_extracted[name][0].shape[-1] -1230 new_deltas[name] = np.zeros(ens_length) -1231 for i_dat, dat in enumerate(d_extracted[name]): -1232 new_deltas[name] += np.tensordot(deriv[i_val + (i_dat, )], dat) -1233 for name in new_cov_names: -1234 new_grad[name] = 0 -1235 for i_dat, dat in enumerate(g_extracted[name]): -1236 new_grad[name] += np.tensordot(deriv[i_val + (i_dat, )], dat) -1237 else: -1238 for j_obs, obs in np.ndenumerate(data): -1239 for name in obs.names: -1240 if name in obs.cov_names: -1241 new_grad[name] = new_grad.get(name, 0) + deriv[i_val + j_obs] * obs.covobs[name].grad -1242 else: -1243 new_deltas[name] = new_deltas.get(name, 0) + deriv[i_val + j_obs] * _expand_deltas_for_merge(obs.deltas[name], obs.idl[name], obs.shape[name], new_idl_d[name]) -1244 -1245 new_covobs = {name: Covobs(0, allcov[name], name, grad=new_grad[name]) for name in new_grad} -1246 -1247 if not set(new_covobs.keys()).isdisjoint(new_deltas.keys()): -1248 raise Exception('The same name has been used for deltas and covobs!') -1249 new_samples = [] -1250 new_means = [] -1251 new_idl = [] -1252 new_names_obs = [] -1253 for name in new_names: -1254 if name not in new_covobs: -1255 new_samples.append(new_deltas[name]) -1256 new_idl.append(new_idl_d[name]) -1257 new_means.append(new_r_values[name][i_val]) -1258 new_names_obs.append(name) -1259 final_result[i_val] = Obs(new_samples, new_names_obs, means=new_means, idl=new_idl) -1260 for name in new_covobs: -1261 final_result[i_val].names.append(name) -1262 final_result[i_val]._covobs = new_covobs -1263 final_result[i_val]._value = new_val -1264 final_result[i_val].reweighted = reweighted -1265 -1266 if multi == 0: -1267 final_result = final_result.item() -1268 -1269 return final_result -1270 -1271 -1272def _reduce_deltas(deltas, idx_old, idx_new): -1273 """Extract deltas defined on idx_old on all configs of idx_new. -1274 -1275 Assumes, that idx_old and idx_new are correctly defined idl, i.e., they -1276 are ordered in an ascending order. -1277 -1278 Parameters -1279 ---------- -1280 deltas : list -1281 List of fluctuations -1282 idx_old : list -1283 List or range of configs on which the deltas are defined -1284 idx_new : list -1285 List of configs for which we want to extract the deltas. -1286 Has to be a subset of idx_old. -1287 """ -1288 if not len(deltas) == len(idx_old): -1289 raise Exception('Length of deltas and idx_old have to be the same: %d != %d' % (len(deltas), len(idx_old))) -1290 if type(idx_old) is range and type(idx_new) is range: -1291 if idx_old == idx_new: -1292 return deltas -1293 if _check_lists_equal([idx_old, idx_new]): -1294 return deltas -1295 indices = np.intersect1d(idx_old, idx_new, assume_unique=True, return_indices=True)[1] -1296 if len(indices) < len(idx_new): -1297 raise Exception('Error in _reduce_deltas: Config of idx_new not in idx_old') -1298 return np.array(deltas)[indices] -1299 -1300 -1301def reweight(weight, obs, **kwargs): -1302 """Reweight a list of observables. -1303 -1304 Parameters -1305 ---------- -1306 weight : Obs -1307 Reweighting factor. An Observable that has to be defined on a superset of the -1308 configurations in obs[i].idl for all i. -1309 obs : list -1310 list of Obs, e.g. [obs1, obs2, obs3]. -1311 all_configs : bool -1312 if True, the reweighted observables are normalized by the average of -1313 the reweighting factor on all configurations in weight.idl and not -1314 on the configurations in obs[i].idl. Default False. -1315 """ -1316 result = [] -1317 for i in range(len(obs)): -1318 if len(obs[i].cov_names): -1319 raise Exception('Error: Not possible to reweight an Obs that contains covobs!') -1320 if not set(obs[i].names).issubset(weight.names): -1321 raise Exception('Error: Ensembles do not fit') -1322 for name in obs[i].names: -1323 if not set(obs[i].idl[name]).issubset(weight.idl[name]): -1324 raise Exception('obs[%d] has to be defined on a subset of the configs in weight.idl[%s]!' % (i, name)) -1325 new_samples = [] -1326 w_deltas = {} -1327 for name in sorted(obs[i].names): -1328 w_deltas[name] = _reduce_deltas(weight.deltas[name], weight.idl[name], obs[i].idl[name]) -1329 new_samples.append((w_deltas[name] + weight.r_values[name]) * (obs[i].deltas[name] + obs[i].r_values[name])) -1330 tmp_obs = Obs(new_samples, sorted(obs[i].names), idl=[obs[i].idl[name] for name in sorted(obs[i].names)]) -1331 -1332 if kwargs.get('all_configs'): -1333 new_weight = weight -1334 else: -1335 new_weight = Obs([w_deltas[name] + weight.r_values[name] for name in sorted(obs[i].names)], sorted(obs[i].names), idl=[obs[i].idl[name] for name in sorted(obs[i].names)]) -1336 -1337 result.append(tmp_obs / new_weight) -1338 result[-1].reweighted = True -1339 -1340 return result -1341 -1342 -1343def correlate(obs_a, obs_b): -1344 """Correlate two observables. -1345 -1346 Parameters -1347 ---------- -1348 obs_a : Obs -1349 First observable -1350 obs_b : Obs -1351 Second observable -1352 -1353 Notes -1354 ----- -1355 Keep in mind to only correlate primary observables which have not been reweighted -1356 yet. The reweighting has to be applied after correlating the observables. -1357 Currently only works if ensembles are identical (this is not strictly necessary). -1358 """ -1359 -1360 if sorted(obs_a.names) != sorted(obs_b.names): -1361 raise Exception(f"Ensembles do not fit {set(sorted(obs_a.names)) ^ set(sorted(obs_b.names))}") -1362 if len(obs_a.cov_names) or len(obs_b.cov_names): -1363 raise Exception('Error: Not possible to correlate Obs that contain covobs!') -1364 for name in obs_a.names: -1365 if obs_a.shape[name] != obs_b.shape[name]: -1366 raise Exception('Shapes of ensemble', name, 'do not fit') -1367 if obs_a.idl[name] != obs_b.idl[name]: -1368 raise Exception('idl of ensemble', name, 'do not fit') -1369 -1370 if obs_a.reweighted is True: -1371 warnings.warn("The first observable is already reweighted.", RuntimeWarning) -1372 if obs_b.reweighted is True: -1373 warnings.warn("The second observable is already reweighted.", RuntimeWarning) -1374 -1375 new_samples = [] -1376 new_idl = [] -1377 for name in sorted(obs_a.names): -1378 new_samples.append((obs_a.deltas[name] + obs_a.r_values[name]) * (obs_b.deltas[name] + obs_b.r_values[name])) -1379 new_idl.append(obs_a.idl[name]) +1224 if 'man_grad' in kwargs: +1225 deriv = np.asarray(kwargs.get('man_grad')) +1226 if new_values.shape + data.shape != deriv.shape: +1227 raise Exception('Manual derivative does not have correct shape.') +1228 elif kwargs.get('num_grad') is True: +1229 if multi > 0: +1230 raise Exception('Multi mode currently not supported for numerical derivative') +1231 options = { +1232 'base_step': 0.1, +1233 'step_ratio': 2.5} +1234 for key in options.keys(): +1235 kwarg = kwargs.get(key) +1236 if kwarg is not None: +1237 options[key] = kwarg +1238 tmp_df = nd.Gradient(func, order=4, **{k: v for k, v in options.items() if v is not None})(values, **kwargs) +1239 if tmp_df.size == 1: +1240 deriv = np.array([tmp_df.real]) +1241 else: +1242 deriv = tmp_df.real +1243 else: +1244 deriv = jacobian(func)(values, **kwargs) +1245 +1246 final_result = np.zeros(new_values.shape, dtype=object) +1247 +1248 if array_mode is True: +1249 +1250 class _Zero_grad(): +1251 def __init__(self, N): +1252 self.grad = np.zeros((N, 1)) +1253 +1254 new_covobs_lengths = dict(set([y for x in [[(n, o.covobs[n].N) for n in o.cov_names] for o in raveled_data] for y in x])) +1255 d_extracted = {} +1256 g_extracted = {} +1257 for name in new_sample_names: +1258 d_extracted[name] = [] +1259 ens_length = len(new_idl_d[name]) +1260 for i_dat, dat in enumerate(data): +1261 d_extracted[name].append(np.array([_expand_deltas_for_merge(o.deltas.get(name, np.zeros(ens_length)), o.idl.get(name, new_idl_d[name]), o.shape.get(name, ens_length), new_idl_d[name]) for o in dat.reshape(np.prod(dat.shape))]).reshape(dat.shape + (ens_length, ))) +1262 for name in new_cov_names: +1263 g_extracted[name] = [] +1264 zero_grad = _Zero_grad(new_covobs_lengths[name]) +1265 for i_dat, dat in enumerate(data): +1266 g_extracted[name].append(np.array([o.covobs.get(name, zero_grad).grad for o in dat.reshape(np.prod(dat.shape))]).reshape(dat.shape + (new_covobs_lengths[name], 1))) +1267 +1268 for i_val, new_val in np.ndenumerate(new_values): +1269 new_deltas = {} +1270 new_grad = {} +1271 if array_mode is True: +1272 for name in new_sample_names: +1273 ens_length = d_extracted[name][0].shape[-1] +1274 new_deltas[name] = np.zeros(ens_length) +1275 for i_dat, dat in enumerate(d_extracted[name]): +1276 new_deltas[name] += np.tensordot(deriv[i_val + (i_dat, )], dat) +1277 for name in new_cov_names: +1278 new_grad[name] = 0 +1279 for i_dat, dat in enumerate(g_extracted[name]): +1280 new_grad[name] += np.tensordot(deriv[i_val + (i_dat, )], dat) +1281 else: +1282 for j_obs, obs in np.ndenumerate(data): +1283 for name in obs.names: +1284 if name in obs.cov_names: +1285 new_grad[name] = new_grad.get(name, 0) + deriv[i_val + j_obs] * obs.covobs[name].grad +1286 else: +1287 new_deltas[name] = new_deltas.get(name, 0) + deriv[i_val + j_obs] * _expand_deltas_for_merge(obs.deltas[name], obs.idl[name], obs.shape[name], new_idl_d[name]) +1288 +1289 new_covobs = {name: Covobs(0, allcov[name], name, grad=new_grad[name]) for name in new_grad} +1290 +1291 if not set(new_covobs.keys()).isdisjoint(new_deltas.keys()): +1292 raise Exception('The same name has been used for deltas and covobs!') +1293 new_samples = [] +1294 new_means = [] +1295 new_idl = [] +1296 new_names_obs = [] +1297 for name in new_names: +1298 if name not in new_covobs: +1299 new_samples.append(new_deltas[name]) +1300 new_idl.append(new_idl_d[name]) +1301 new_means.append(new_r_values[name][i_val]) +1302 new_names_obs.append(name) +1303 final_result[i_val] = Obs(new_samples, new_names_obs, means=new_means, idl=new_idl) +1304 for name in new_covobs: +1305 final_result[i_val].names.append(name) +1306 final_result[i_val]._covobs = new_covobs +1307 final_result[i_val]._value = new_val +1308 final_result[i_val].reweighted = reweighted +1309 +1310 if multi == 0: +1311 final_result = final_result.item() +1312 +1313 return final_result +1314 +1315 +1316def _reduce_deltas(deltas, idx_old, idx_new): +1317 """Extract deltas defined on idx_old on all configs of idx_new. +1318 +1319 Assumes, that idx_old and idx_new are correctly defined idl, i.e., they +1320 are ordered in an ascending order. +1321 +1322 Parameters +1323 ---------- +1324 deltas : list +1325 List of fluctuations +1326 idx_old : list +1327 List or range of configs on which the deltas are defined +1328 idx_new : list +1329 List of configs for which we want to extract the deltas. +1330 Has to be a subset of idx_old. +1331 """ +1332 if not len(deltas) == len(idx_old): +1333 raise Exception('Length of deltas and idx_old have to be the same: %d != %d' % (len(deltas), len(idx_old))) +1334 if type(idx_old) is range and type(idx_new) is range: +1335 if idx_old == idx_new: +1336 return deltas +1337 if _check_lists_equal([idx_old, idx_new]): +1338 return deltas +1339 indices = np.intersect1d(idx_old, idx_new, assume_unique=True, return_indices=True)[1] +1340 if len(indices) < len(idx_new): +1341 raise Exception('Error in _reduce_deltas: Config of idx_new not in idx_old') +1342 return np.array(deltas)[indices] +1343 +1344 +1345def reweight(weight, obs, **kwargs): +1346 """Reweight a list of observables. +1347 +1348 Parameters +1349 ---------- +1350 weight : Obs +1351 Reweighting factor. An Observable that has to be defined on a superset of the +1352 configurations in obs[i].idl for all i. +1353 obs : list +1354 list of Obs, e.g. [obs1, obs2, obs3]. +1355 all_configs : bool +1356 if True, the reweighted observables are normalized by the average of +1357 the reweighting factor on all configurations in weight.idl and not +1358 on the configurations in obs[i].idl. Default False. +1359 """ +1360 result = [] +1361 for i in range(len(obs)): +1362 if len(obs[i].cov_names): +1363 raise Exception('Error: Not possible to reweight an Obs that contains covobs!') +1364 if not set(obs[i].names).issubset(weight.names): +1365 raise Exception('Error: Ensembles do not fit') +1366 for name in obs[i].names: +1367 if not set(obs[i].idl[name]).issubset(weight.idl[name]): +1368 raise Exception('obs[%d] has to be defined on a subset of the configs in weight.idl[%s]!' % (i, name)) +1369 new_samples = [] +1370 w_deltas = {} +1371 for name in sorted(obs[i].names): +1372 w_deltas[name] = _reduce_deltas(weight.deltas[name], weight.idl[name], obs[i].idl[name]) +1373 new_samples.append((w_deltas[name] + weight.r_values[name]) * (obs[i].deltas[name] + obs[i].r_values[name])) +1374 tmp_obs = Obs(new_samples, sorted(obs[i].names), idl=[obs[i].idl[name] for name in sorted(obs[i].names)]) +1375 +1376 if kwargs.get('all_configs'): +1377 new_weight = weight +1378 else: +1379 new_weight = Obs([w_deltas[name] + weight.r_values[name] for name in sorted(obs[i].names)], sorted(obs[i].names), idl=[obs[i].idl[name] for name in sorted(obs[i].names)]) 1380 -1381 o = Obs(new_samples, sorted(obs_a.names), idl=new_idl) -1382 o.reweighted = obs_a.reweighted or obs_b.reweighted -1383 return o -1384 +1381 result.append(tmp_obs / new_weight) +1382 result[-1].reweighted = True +1383 +1384 return result 1385 -1386def covariance(obs, visualize=False, correlation=False, smooth=None, **kwargs): -1387 r'''Calculates the error covariance matrix of a set of observables. -1388 -1389 WARNING: This function should be used with care, especially for observables with support on multiple -1390 ensembles with differing autocorrelations. See the notes below for details. -1391 -1392 The gamma method has to be applied first to all observables. -1393 -1394 Parameters -1395 ---------- -1396 obs : list or numpy.ndarray -1397 List or one dimensional array of Obs -1398 visualize : bool -1399 If True plots the corresponding normalized correlation matrix (default False). -1400 correlation : bool -1401 If True the correlation matrix instead of the error covariance matrix is returned (default False). -1402 smooth : None or int -1403 If smooth is an integer 'E' between 2 and the dimension of the matrix minus 1 the eigenvalue -1404 smoothing procedure of hep-lat/9412087 is applied to the correlation matrix which leaves the -1405 largest E eigenvalues essentially unchanged and smoothes the smaller eigenvalues to avoid extremely -1406 small ones. -1407 -1408 Notes -1409 ----- -1410 The error covariance is defined such that it agrees with the squared standard error for two identical observables -1411 $$\operatorname{cov}(a,a)=\sum_{s=1}^N\delta_a^s\delta_a^s/N^2=\Gamma_{aa}(0)/N=\operatorname{var}(a)/N=\sigma_a^2$$ -1412 in the absence of autocorrelation. -1413 The error covariance is estimated by calculating the correlation matrix assuming no autocorrelation and then rescaling the correlation matrix by the full errors including the previous gamma method estimate for the autocorrelation of the observables. The covariance at windowsize 0 is guaranteed to be positive semi-definite -1414 $$\sum_{i,j}v_i\Gamma_{ij}(0)v_j=\frac{1}{N}\sum_{s=1}^N\sum_{i,j}v_i\delta_i^s\delta_j^s v_j=\frac{1}{N}\sum_{s=1}^N\sum_{i}|v_i\delta_i^s|^2\geq 0\,,$$ for every $v\in\mathbb{R}^M$, while such an identity does not hold for larger windows/lags. -1415 For observables defined on a single ensemble our approximation is equivalent to assuming that the integrated autocorrelation time of an off-diagonal element is equal to the geometric mean of the integrated autocorrelation times of the corresponding diagonal elements. -1416 $$\tau_{\mathrm{int}, ij}=\sqrt{\tau_{\mathrm{int}, i}\times \tau_{\mathrm{int}, j}}$$ -1417 This construction ensures that the estimated covariance matrix is positive semi-definite (up to numerical rounding errors). -1418 ''' -1419 -1420 length = len(obs) -1421 -1422 max_samples = np.max([o.N for o in obs]) -1423 if max_samples <= length and not [item for sublist in [o.cov_names for o in obs] for item in sublist]: -1424 warnings.warn(f"The dimension of the covariance matrix ({length}) is larger or equal to the number of samples ({max_samples}). This will result in a rank deficient matrix.", RuntimeWarning) -1425 -1426 cov = np.zeros((length, length)) -1427 for i in range(length): -1428 for j in range(i, length): -1429 cov[i, j] = _covariance_element(obs[i], obs[j]) -1430 cov = cov + cov.T - np.diag(np.diag(cov)) -1431 -1432 corr = np.diag(1 / np.sqrt(np.diag(cov))) @ cov @ np.diag(1 / np.sqrt(np.diag(cov))) -1433 -1434 if isinstance(smooth, int): -1435 corr = _smooth_eigenvalues(corr, smooth) -1436 -1437 if visualize: -1438 plt.matshow(corr, vmin=-1, vmax=1) -1439 plt.set_cmap('RdBu') -1440 plt.colorbar() -1441 plt.draw() -1442 -1443 if correlation is True: -1444 return corr -1445 -1446 errors = [o.dvalue for o in obs] -1447 cov = np.diag(errors) @ corr @ np.diag(errors) -1448 -1449 eigenvalues = np.linalg.eigh(cov)[0] -1450 if not np.all(eigenvalues >= 0): -1451 warnings.warn("Covariance matrix is not positive semi-definite (Eigenvalues: " + str(eigenvalues) + ")", RuntimeWarning) -1452 -1453 return cov -1454 -1455 -1456def _smooth_eigenvalues(corr, E): -1457 """Eigenvalue smoothing as described in hep-lat/9412087 -1458 -1459 corr : np.ndarray -1460 correlation matrix -1461 E : integer -1462 Number of eigenvalues to be left substantially unchanged -1463 """ -1464 if not (2 < E < corr.shape[0] - 1): -1465 raise Exception(f"'E' has to be between 2 and the dimension of the correlation matrix minus 1 ({corr.shape[0] - 1}).") -1466 vals, vec = np.linalg.eigh(corr) -1467 lambda_min = np.mean(vals[:-E]) -1468 vals[vals < lambda_min] = lambda_min -1469 vals /= np.mean(vals) -1470 return vec @ np.diag(vals) @ vec.T -1471 -1472 -1473def _covariance_element(obs1, obs2): -1474 """Estimates the covariance of two Obs objects, neglecting autocorrelations.""" +1386 +1387def correlate(obs_a, obs_b): +1388 """Correlate two observables. +1389 +1390 Parameters +1391 ---------- +1392 obs_a : Obs +1393 First observable +1394 obs_b : Obs +1395 Second observable +1396 +1397 Notes +1398 ----- +1399 Keep in mind to only correlate primary observables which have not been reweighted +1400 yet. The reweighting has to be applied after correlating the observables. +1401 Currently only works if ensembles are identical (this is not strictly necessary). +1402 """ +1403 +1404 if sorted(obs_a.names) != sorted(obs_b.names): +1405 raise Exception(f"Ensembles do not fit {set(sorted(obs_a.names)) ^ set(sorted(obs_b.names))}") +1406 if len(obs_a.cov_names) or len(obs_b.cov_names): +1407 raise Exception('Error: Not possible to correlate Obs that contain covobs!') +1408 for name in obs_a.names: +1409 if obs_a.shape[name] != obs_b.shape[name]: +1410 raise Exception('Shapes of ensemble', name, 'do not fit') +1411 if obs_a.idl[name] != obs_b.idl[name]: +1412 raise Exception('idl of ensemble', name, 'do not fit') +1413 +1414 if obs_a.reweighted is True: +1415 warnings.warn("The first observable is already reweighted.", RuntimeWarning) +1416 if obs_b.reweighted is True: +1417 warnings.warn("The second observable is already reweighted.", RuntimeWarning) +1418 +1419 new_samples = [] +1420 new_idl = [] +1421 for name in sorted(obs_a.names): +1422 new_samples.append((obs_a.deltas[name] + obs_a.r_values[name]) * (obs_b.deltas[name] + obs_b.r_values[name])) +1423 new_idl.append(obs_a.idl[name]) +1424 +1425 o = Obs(new_samples, sorted(obs_a.names), idl=new_idl) +1426 o.reweighted = obs_a.reweighted or obs_b.reweighted +1427 return o +1428 +1429 +1430def covariance(obs, visualize=False, correlation=False, smooth=None, **kwargs): +1431 r'''Calculates the error covariance matrix of a set of observables. +1432 +1433 WARNING: This function should be used with care, especially for observables with support on multiple +1434 ensembles with differing autocorrelations. See the notes below for details. +1435 +1436 The gamma method has to be applied first to all observables. +1437 +1438 Parameters +1439 ---------- +1440 obs : list or numpy.ndarray +1441 List or one dimensional array of Obs +1442 visualize : bool +1443 If True plots the corresponding normalized correlation matrix (default False). +1444 correlation : bool +1445 If True the correlation matrix instead of the error covariance matrix is returned (default False). +1446 smooth : None or int +1447 If smooth is an integer 'E' between 2 and the dimension of the matrix minus 1 the eigenvalue +1448 smoothing procedure of hep-lat/9412087 is applied to the correlation matrix which leaves the +1449 largest E eigenvalues essentially unchanged and smoothes the smaller eigenvalues to avoid extremely +1450 small ones. +1451 +1452 Notes +1453 ----- +1454 The error covariance is defined such that it agrees with the squared standard error for two identical observables +1455 $$\operatorname{cov}(a,a)=\sum_{s=1}^N\delta_a^s\delta_a^s/N^2=\Gamma_{aa}(0)/N=\operatorname{var}(a)/N=\sigma_a^2$$ +1456 in the absence of autocorrelation. +1457 The error covariance is estimated by calculating the correlation matrix assuming no autocorrelation and then rescaling the correlation matrix by the full errors including the previous gamma method estimate for the autocorrelation of the observables. The covariance at windowsize 0 is guaranteed to be positive semi-definite +1458 $$\sum_{i,j}v_i\Gamma_{ij}(0)v_j=\frac{1}{N}\sum_{s=1}^N\sum_{i,j}v_i\delta_i^s\delta_j^s v_j=\frac{1}{N}\sum_{s=1}^N\sum_{i}|v_i\delta_i^s|^2\geq 0\,,$$ for every $v\in\mathbb{R}^M$, while such an identity does not hold for larger windows/lags. +1459 For observables defined on a single ensemble our approximation is equivalent to assuming that the integrated autocorrelation time of an off-diagonal element is equal to the geometric mean of the integrated autocorrelation times of the corresponding diagonal elements. +1460 $$\tau_{\mathrm{int}, ij}=\sqrt{\tau_{\mathrm{int}, i}\times \tau_{\mathrm{int}, j}}$$ +1461 This construction ensures that the estimated covariance matrix is positive semi-definite (up to numerical rounding errors). +1462 ''' +1463 +1464 length = len(obs) +1465 +1466 max_samples = np.max([o.N for o in obs]) +1467 if max_samples <= length and not [item for sublist in [o.cov_names for o in obs] for item in sublist]: +1468 warnings.warn(f"The dimension of the covariance matrix ({length}) is larger or equal to the number of samples ({max_samples}). This will result in a rank deficient matrix.", RuntimeWarning) +1469 +1470 cov = np.zeros((length, length)) +1471 for i in range(length): +1472 for j in range(i, length): +1473 cov[i, j] = _covariance_element(obs[i], obs[j]) +1474 cov = cov + cov.T - np.diag(np.diag(cov)) 1475 -1476 def calc_gamma(deltas1, deltas2, idx1, idx2, new_idx): -1477 deltas1 = _reduce_deltas(deltas1, idx1, new_idx) -1478 deltas2 = _reduce_deltas(deltas2, idx2, new_idx) -1479 return np.sum(deltas1 * deltas2) +1476 corr = np.diag(1 / np.sqrt(np.diag(cov))) @ cov @ np.diag(1 / np.sqrt(np.diag(cov))) +1477 +1478 if isinstance(smooth, int): +1479 corr = _smooth_eigenvalues(corr, smooth) 1480 -1481 if set(obs1.names).isdisjoint(set(obs2.names)): -1482 return 0.0 -1483 -1484 if not hasattr(obs1, 'e_dvalue') or not hasattr(obs2, 'e_dvalue'): -1485 raise Exception('The gamma method has to be applied to both Obs first.') +1481 if visualize: +1482 plt.matshow(corr, vmin=-1, vmax=1) +1483 plt.set_cmap('RdBu') +1484 plt.colorbar() +1485 plt.draw() 1486 -1487 dvalue = 0.0 -1488 -1489 for e_name in obs1.mc_names: -1490 -1491 if e_name not in obs2.mc_names: -1492 continue -1493 -1494 idl_d = {} -1495 for r_name in obs1.e_content[e_name]: -1496 if r_name not in obs2.e_content[e_name]: -1497 continue -1498 idl_d[r_name] = _intersection_idx([obs1.idl[r_name], obs2.idl[r_name]]) +1487 if correlation is True: +1488 return corr +1489 +1490 errors = [o.dvalue for o in obs] +1491 cov = np.diag(errors) @ corr @ np.diag(errors) +1492 +1493 eigenvalues = np.linalg.eigh(cov)[0] +1494 if not np.all(eigenvalues >= 0): +1495 warnings.warn("Covariance matrix is not positive semi-definite (Eigenvalues: " + str(eigenvalues) + ")", RuntimeWarning) +1496 +1497 return cov +1498 1499 -1500 gamma = 0.0 -1501 -1502 for r_name in obs1.e_content[e_name]: -1503 if r_name not in obs2.e_content[e_name]: -1504 continue -1505 if len(idl_d[r_name]) == 0: -1506 continue -1507 gamma += calc_gamma(obs1.deltas[r_name], obs2.deltas[r_name], obs1.idl[r_name], obs2.idl[r_name], idl_d[r_name]) -1508 -1509 if gamma == 0.0: -1510 continue -1511 -1512 gamma_div = 0.0 -1513 for r_name in obs1.e_content[e_name]: -1514 if r_name not in obs2.e_content[e_name]: -1515 continue -1516 if len(idl_d[r_name]) == 0: -1517 continue -1518 gamma_div += np.sqrt(calc_gamma(obs1.deltas[r_name], obs1.deltas[r_name], obs1.idl[r_name], obs1.idl[r_name], idl_d[r_name]) * calc_gamma(obs2.deltas[r_name], obs2.deltas[r_name], obs2.idl[r_name], obs2.idl[r_name], idl_d[r_name])) -1519 gamma /= gamma_div -1520 -1521 dvalue += gamma -1522 -1523 for e_name in obs1.cov_names: +1500def _smooth_eigenvalues(corr, E): +1501 """Eigenvalue smoothing as described in hep-lat/9412087 +1502 +1503 corr : np.ndarray +1504 correlation matrix +1505 E : integer +1506 Number of eigenvalues to be left substantially unchanged +1507 """ +1508 if not (2 < E < corr.shape[0] - 1): +1509 raise Exception(f"'E' has to be between 2 and the dimension of the correlation matrix minus 1 ({corr.shape[0] - 1}).") +1510 vals, vec = np.linalg.eigh(corr) +1511 lambda_min = np.mean(vals[:-E]) +1512 vals[vals < lambda_min] = lambda_min +1513 vals /= np.mean(vals) +1514 return vec @ np.diag(vals) @ vec.T +1515 +1516 +1517def _covariance_element(obs1, obs2): +1518 """Estimates the covariance of two Obs objects, neglecting autocorrelations.""" +1519 +1520 def calc_gamma(deltas1, deltas2, idx1, idx2, new_idx): +1521 deltas1 = _reduce_deltas(deltas1, idx1, new_idx) +1522 deltas2 = _reduce_deltas(deltas2, idx2, new_idx) +1523 return np.sum(deltas1 * deltas2) 1524 -1525 if e_name not in obs2.cov_names: -1526 continue +1525 if set(obs1.names).isdisjoint(set(obs2.names)): +1526 return 0.0 1527 -1528 dvalue += np.dot(np.transpose(obs1.covobs[e_name].grad), np.dot(obs1.covobs[e_name].cov, obs2.covobs[e_name].grad)).item() -1529 -1530 return dvalue -1531 +1528 if not hasattr(obs1, 'e_dvalue') or not hasattr(obs2, 'e_dvalue'): +1529 raise Exception('The gamma method has to be applied to both Obs first.') +1530 +1531 dvalue = 0.0 1532 -1533def import_jackknife(jacks, name, idl=None): -1534 """Imports jackknife samples and returns an Obs -1535 -1536 Parameters -1537 ---------- -1538 jacks : numpy.ndarray -1539 numpy array containing the mean value as zeroth entry and -1540 the N jackknife samples as first to Nth entry. -1541 name : str -1542 name of the ensemble the samples are defined on. -1543 """ -1544 length = len(jacks) - 1 -1545 prj = (np.ones((length, length)) - (length - 1) * np.identity(length)) -1546 samples = jacks[1:] @ prj -1547 mean = np.mean(samples) -1548 new_obs = Obs([samples - mean], [name], idl=idl, means=[mean]) -1549 new_obs._value = jacks[0] -1550 return new_obs -1551 +1533 for e_name in obs1.mc_names: +1534 +1535 if e_name not in obs2.mc_names: +1536 continue +1537 +1538 idl_d = {} +1539 for r_name in obs1.e_content[e_name]: +1540 if r_name not in obs2.e_content[e_name]: +1541 continue +1542 idl_d[r_name] = _intersection_idx([obs1.idl[r_name], obs2.idl[r_name]]) +1543 +1544 gamma = 0.0 +1545 +1546 for r_name in obs1.e_content[e_name]: +1547 if r_name not in obs2.e_content[e_name]: +1548 continue +1549 if len(idl_d[r_name]) == 0: +1550 continue +1551 gamma += calc_gamma(obs1.deltas[r_name], obs2.deltas[r_name], obs1.idl[r_name], obs2.idl[r_name], idl_d[r_name]) 1552 -1553def merge_obs(list_of_obs): -1554 """Combine all observables in list_of_obs into one new observable +1553 if gamma == 0.0: +1554 continue 1555 -1556 Parameters -1557 ---------- -1558 list_of_obs : list -1559 list of the Obs object to be combined -1560 -1561 Notes -1562 ----- -1563 It is not possible to combine obs which are based on the same replicum -1564 """ -1565 replist = [item for obs in list_of_obs for item in obs.names] -1566 if (len(replist) == len(set(replist))) is False: -1567 raise Exception('list_of_obs contains duplicate replica: %s' % (str(replist))) -1568 if any([len(o.cov_names) for o in list_of_obs]): -1569 raise Exception('Not possible to merge data that contains covobs!') -1570 new_dict = {} -1571 idl_dict = {} -1572 for o in list_of_obs: -1573 new_dict.update({key: o.deltas.get(key, 0) + o.r_values.get(key, 0) -1574 for key in set(o.deltas) | set(o.r_values)}) -1575 idl_dict.update({key: o.idl.get(key, 0) for key in set(o.deltas)}) +1556 gamma_div = 0.0 +1557 for r_name in obs1.e_content[e_name]: +1558 if r_name not in obs2.e_content[e_name]: +1559 continue +1560 if len(idl_d[r_name]) == 0: +1561 continue +1562 gamma_div += np.sqrt(calc_gamma(obs1.deltas[r_name], obs1.deltas[r_name], obs1.idl[r_name], obs1.idl[r_name], idl_d[r_name]) * calc_gamma(obs2.deltas[r_name], obs2.deltas[r_name], obs2.idl[r_name], obs2.idl[r_name], idl_d[r_name])) +1563 gamma /= gamma_div +1564 +1565 dvalue += gamma +1566 +1567 for e_name in obs1.cov_names: +1568 +1569 if e_name not in obs2.cov_names: +1570 continue +1571 +1572 dvalue += np.dot(np.transpose(obs1.covobs[e_name].grad), np.dot(obs1.covobs[e_name].cov, obs2.covobs[e_name].grad)).item() +1573 +1574 return dvalue +1575 1576 -1577 names = sorted(new_dict.keys()) -1578 o = Obs([new_dict[name] for name in names], names, idl=[idl_dict[name] for name in names]) -1579 o.reweighted = np.max([oi.reweighted for oi in list_of_obs]) -1580 return o -1581 -1582 -1583def cov_Obs(means, cov, name, grad=None): -1584 """Create an Obs based on mean(s) and a covariance matrix -1585 -1586 Parameters -1587 ---------- -1588 mean : list of floats or float -1589 N mean value(s) of the new Obs -1590 cov : list or array -1591 2d (NxN) Covariance matrix, 1d diagonal entries or 0d covariance -1592 name : str -1593 identifier for the covariance matrix -1594 grad : list or array -1595 Gradient of the Covobs wrt. the means belonging to cov. -1596 """ -1597 -1598 def covobs_to_obs(co): -1599 """Make an Obs out of a Covobs -1600 -1601 Parameters -1602 ---------- -1603 co : Covobs -1604 Covobs to be embedded into the Obs -1605 """ -1606 o = Obs([], [], means=[]) -1607 o._value = co.value -1608 o.names.append(co.name) -1609 o._covobs[co.name] = co -1610 o._dvalue = np.sqrt(co.errsq()) -1611 return o -1612 -1613 ol = [] -1614 if isinstance(means, (float, int)): -1615 means = [means] -1616 -1617 for i in range(len(means)): -1618 ol.append(covobs_to_obs(Covobs(means[i], cov, name, pos=i, grad=grad))) -1619 if ol[0].covobs[name].N != len(means): -1620 raise Exception('You have to provide %d mean values!' % (ol[0].N)) -1621 if len(ol) == 1: -1622 return ol[0] -1623 return ol -1624 +1577def import_jackknife(jacks, name, idl=None): +1578 """Imports jackknife samples and returns an Obs +1579 +1580 Parameters +1581 ---------- +1582 jacks : numpy.ndarray +1583 numpy array containing the mean value as zeroth entry and +1584 the N jackknife samples as first to Nth entry. +1585 name : str +1586 name of the ensemble the samples are defined on. +1587 """ +1588 length = len(jacks) - 1 +1589 prj = (np.ones((length, length)) - (length - 1) * np.identity(length)) +1590 samples = jacks[1:] @ prj +1591 mean = np.mean(samples) +1592 new_obs = Obs([samples - mean], [name], idl=idl, means=[mean]) +1593 new_obs._value = jacks[0] +1594 return new_obs +1595 +1596 +1597def import_bootstrap(boots, name, random_numbers): +1598 """Imports bootstrap samples and returns an Obs +1599 +1600 Parameters +1601 ---------- +1602 boots : numpy.ndarray +1603 numpy array containing the mean value as zeroth entry and +1604 the N bootstrap samples as first to Nth entry. +1605 name : str +1606 name of the ensemble the samples are defined on. +1607 random_numbers : np.ndarray +1608 Array of shape (samples, length) containing the random numbers to generate the bootstrap samples, +1609 where samples is the number of bootstrap samples and length is the length of the original Monte Carlo +1610 chain to be reconstructed. +1611 """ +1612 samples, length = random_numbers.shape +1613 if samples != len(boots) - 1: +1614 raise ValueError("Random numbers do not have the correct shape.") +1615 +1616 if samples < length: +1617 raise ValueError("Obs can't be reconstructed if there are fewer bootstrap samples than Monte Carlo data points.") +1618 +1619 proj = np.vstack([np.bincount(o, minlength=length) for o in random_numbers]) / length +1620 +1621 samples = scipy.linalg.lstsq(proj, boots[1:])[0] +1622 ret = Obs([samples], [name]) +1623 ret._value = boots[0] +1624 return ret 1625 -1626def _determine_gap(o, e_content, e_name): -1627 gaps = [] -1628 for r_name in e_content[e_name]: -1629 if isinstance(o.idl[r_name], range): -1630 gaps.append(o.idl[r_name].step) -1631 else: -1632 gaps.append(np.min(np.diff(o.idl[r_name]))) -1633 -1634 gap = min(gaps) -1635 if not np.all([gi % gap == 0 for gi in gaps]): -1636 raise Exception(f"Replica for ensemble {e_name} do not have a common spacing.", gaps) -1637 -1638 return gap -1639 -1640 -1641def _check_lists_equal(idl): -1642 ''' -1643 Use groupby to efficiently check whether all elements of idl are identical. -1644 Returns True if all elements are equal, otherwise False. -1645 -1646 Parameters -1647 ---------- -1648 idl : list of lists, ranges or np.ndarrays -1649 ''' -1650 g = groupby([np.nditer(el) if isinstance(el, np.ndarray) else el for el in idl]) -1651 if next(g, True) and not next(g, False): -1652 return True -1653 return False +1626 +1627def merge_obs(list_of_obs): +1628 """Combine all observables in list_of_obs into one new observable +1629 +1630 Parameters +1631 ---------- +1632 list_of_obs : list +1633 list of the Obs object to be combined +1634 +1635 Notes +1636 ----- +1637 It is not possible to combine obs which are based on the same replicum +1638 """ +1639 replist = [item for obs in list_of_obs for item in obs.names] +1640 if (len(replist) == len(set(replist))) is False: +1641 raise Exception('list_of_obs contains duplicate replica: %s' % (str(replist))) +1642 if any([len(o.cov_names) for o in list_of_obs]): +1643 raise Exception('Not possible to merge data that contains covobs!') +1644 new_dict = {} +1645 idl_dict = {} +1646 for o in list_of_obs: +1647 new_dict.update({key: o.deltas.get(key, 0) + o.r_values.get(key, 0) +1648 for key in set(o.deltas) | set(o.r_values)}) +1649 idl_dict.update({key: o.idl.get(key, 0) for key in set(o.deltas)}) +1650 +1651 names = sorted(new_dict.keys()) +1652 o = Obs([new_dict[name] for name in names], names, idl=[idl_dict[name] for name in names]) +1653 o.reweighted = np.max([oi.reweighted for oi in list_of_obs]) +1654 return o +1655 +1656 +1657def cov_Obs(means, cov, name, grad=None): +1658 """Create an Obs based on mean(s) and a covariance matrix +1659 +1660 Parameters +1661 ---------- +1662 mean : list of floats or float +1663 N mean value(s) of the new Obs +1664 cov : list or array +1665 2d (NxN) Covariance matrix, 1d diagonal entries or 0d covariance +1666 name : str +1667 identifier for the covariance matrix +1668 grad : list or array +1669 Gradient of the Covobs wrt. the means belonging to cov. +1670 """ +1671 +1672 def covobs_to_obs(co): +1673 """Make an Obs out of a Covobs +1674 +1675 Parameters +1676 ---------- +1677 co : Covobs +1678 Covobs to be embedded into the Obs +1679 """ +1680 o = Obs([], [], means=[]) +1681 o._value = co.value +1682 o.names.append(co.name) +1683 o._covobs[co.name] = co +1684 o._dvalue = np.sqrt(co.errsq()) +1685 return o +1686 +1687 ol = [] +1688 if isinstance(means, (float, int)): +1689 means = [means] +1690 +1691 for i in range(len(means)): +1692 ol.append(covobs_to_obs(Covobs(means[i], cov, name, pos=i, grad=grad))) +1693 if ol[0].covobs[name].N != len(means): +1694 raise Exception('You have to provide %d mean values!' % (ol[0].N)) +1695 if len(ol) == 1: +1696 return ol[0] +1697 return ol +1698 +1699 +1700def _determine_gap(o, e_content, e_name): +1701 gaps = [] +1702 for r_name in e_content[e_name]: +1703 if isinstance(o.idl[r_name], range): +1704 gaps.append(o.idl[r_name].step) +1705 else: +1706 gaps.append(np.min(np.diff(o.idl[r_name]))) +1707 +1708 gap = min(gaps) +1709 if not np.all([gi % gap == 0 for gi in gaps]): +1710 raise Exception(f"Replica for ensemble {e_name} do not have a common spacing.", gaps) +1711 +1712 return gap +1713 +1714 +1715def _check_lists_equal(idl): +1716 ''' +1717 Use groupby to efficiently check whether all elements of idl are identical. +1718 Returns True if all elements are equal, otherwise False. +1719 +1720 Parameters +1721 ---------- +1722 idl : list of lists, ranges or np.ndarrays +1723 ''' +1724 g = groupby([np.nditer(el) if isinstance(el, np.ndarray) else el for el in idl]) +1725 if next(g, True) and not next(g, False): +1726 return True +1727 return False @@ -1984,859 +2064,902 @@ -
     18class Obs:
    - 19    """Class for a general observable.
    - 20
    - 21    Instances of Obs are the basic objects of a pyerrors error analysis.
    - 22    They are initialized with a list which contains arrays of samples for
    - 23    different ensembles/replica and another list of same length which contains
    - 24    the names of the ensembles/replica. Mathematical operations can be
    - 25    performed on instances. The result is another instance of Obs. The error of
    - 26    an instance can be computed with the gamma_method. Also contains additional
    - 27    methods for output and visualization of the error calculation.
    - 28
    - 29    Attributes
    - 30    ----------
    - 31    S_global : float
    - 32        Standard value for S (default 2.0)
    - 33    S_dict : dict
    - 34        Dictionary for S values. If an entry for a given ensemble
    - 35        exists this overwrites the standard value for that ensemble.
    - 36    tau_exp_global : float
    - 37        Standard value for tau_exp (default 0.0)
    - 38    tau_exp_dict : dict
    - 39        Dictionary for tau_exp values. If an entry for a given ensemble exists
    - 40        this overwrites the standard value for that ensemble.
    - 41    N_sigma_global : float
    - 42        Standard value for N_sigma (default 1.0)
    - 43    N_sigma_dict : dict
    - 44        Dictionary for N_sigma values. If an entry for a given ensemble exists
    - 45        this overwrites the standard value for that ensemble.
    - 46    """
    - 47    __slots__ = ['names', 'shape', 'r_values', 'deltas', 'N', '_value', '_dvalue',
    - 48                 'ddvalue', 'reweighted', 'S', 'tau_exp', 'N_sigma',
    - 49                 'e_dvalue', 'e_ddvalue', 'e_tauint', 'e_dtauint',
    - 50                 'e_windowsize', 'e_rho', 'e_drho', 'e_n_tauint', 'e_n_dtauint',
    - 51                 'idl', 'tag', '_covobs', '__dict__']
    - 52
    - 53    S_global = 2.0
    - 54    S_dict = {}
    - 55    tau_exp_global = 0.0
    - 56    tau_exp_dict = {}
    - 57    N_sigma_global = 1.0
    - 58    N_sigma_dict = {}
    - 59
    - 60    def __init__(self, samples, names, idl=None, **kwargs):
    - 61        """ Initialize Obs object.
    - 62
    - 63        Parameters
    - 64        ----------
    - 65        samples : list
    - 66            list of numpy arrays containing the Monte Carlo samples
    - 67        names : list
    - 68            list of strings labeling the individual samples
    - 69        idl : list, optional
    - 70            list of ranges or lists on which the samples are defined
    - 71        """
    - 72
    - 73        if kwargs.get("means") is None and len(samples):
    - 74            if len(samples) != len(names):
    - 75                raise ValueError('Length of samples and names incompatible.')
    - 76            if idl is not None:
    - 77                if len(idl) != len(names):
    - 78                    raise ValueError('Length of idl incompatible with samples and names.')
    - 79            name_length = len(names)
    - 80            if name_length > 1:
    - 81                if name_length != len(set(names)):
    - 82                    raise ValueError('Names are not unique.')
    - 83                if not all(isinstance(x, str) for x in names):
    - 84                    raise TypeError('All names have to be strings.')
    - 85            else:
    - 86                if not isinstance(names[0], str):
    - 87                    raise TypeError('All names have to be strings.')
    - 88            if min(len(x) for x in samples) <= 4:
    - 89                raise ValueError('Samples have to have at least 5 entries.')
    - 90
    - 91        self.names = sorted(names)
    - 92        self.shape = {}
    - 93        self.r_values = {}
    - 94        self.deltas = {}
    - 95        self._covobs = {}
    - 96
    - 97        self._value = 0
    - 98        self.N = 0
    - 99        self.idl = {}
    -100        if idl is not None:
    -101            for name, idx in sorted(zip(names, idl)):
    -102                if isinstance(idx, range):
    -103                    self.idl[name] = idx
    -104                elif isinstance(idx, (list, np.ndarray)):
    -105                    dc = np.unique(np.diff(idx))
    -106                    if np.any(dc < 0):
    -107                        raise ValueError("Unsorted idx for idl[%s]" % (name))
    -108                    if len(dc) == 1:
    -109                        self.idl[name] = range(idx[0], idx[-1] + dc[0], dc[0])
    -110                    else:
    -111                        self.idl[name] = list(idx)
    -112                else:
    -113                    raise TypeError('incompatible type for idl[%s].' % (name))
    -114        else:
    -115            for name, sample in sorted(zip(names, samples)):
    -116                self.idl[name] = range(1, len(sample) + 1)
    -117
    -118        if kwargs.get("means") is not None:
    -119            for name, sample, mean in sorted(zip(names, samples, kwargs.get("means"))):
    -120                self.shape[name] = len(self.idl[name])
    -121                self.N += self.shape[name]
    -122                self.r_values[name] = mean
    -123                self.deltas[name] = sample
    -124        else:
    -125            for name, sample in sorted(zip(names, samples)):
    -126                self.shape[name] = len(self.idl[name])
    -127                self.N += self.shape[name]
    -128                if len(sample) != self.shape[name]:
    -129                    raise ValueError('Incompatible samples and idx for %s: %d vs. %d' % (name, len(sample), self.shape[name]))
    -130                self.r_values[name] = np.mean(sample)
    -131                self.deltas[name] = sample - self.r_values[name]
    -132                self._value += self.shape[name] * self.r_values[name]
    -133            self._value /= self.N
    -134
    -135        self._dvalue = 0.0
    -136        self.ddvalue = 0.0
    -137        self.reweighted = False
    -138
    -139        self.tag = None
    -140
    -141    @property
    -142    def value(self):
    -143        return self._value
    -144
    -145    @property
    -146    def dvalue(self):
    -147        return self._dvalue
    -148
    -149    @property
    -150    def e_names(self):
    -151        return sorted(set([o.split('|')[0] for o in self.names]))
    -152
    -153    @property
    -154    def cov_names(self):
    -155        return sorted(set([o for o in self.covobs.keys()]))
    -156
    -157    @property
    -158    def mc_names(self):
    -159        return sorted(set([o.split('|')[0] for o in self.names if o not in self.cov_names]))
    -160
    -161    @property
    -162    def e_content(self):
    -163        res = {}
    -164        for e, e_name in enumerate(self.e_names):
    -165            res[e_name] = sorted(filter(lambda x: x.startswith(e_name + '|'), self.names))
    -166            if e_name in self.names:
    -167                res[e_name].append(e_name)
    -168        return res
    -169
    -170    @property
    -171    def covobs(self):
    -172        return self._covobs
    -173
    -174    def gamma_method(self, **kwargs):
    -175        """Estimate the error and related properties of the Obs.
    -176
    -177        Parameters
    -178        ----------
    -179        S : float
    -180            specifies a custom value for the parameter S (default 2.0).
    -181            If set to 0 it is assumed that the data exhibits no
    -182            autocorrelation. In this case the error estimates coincides
    -183            with the sample standard error.
    -184        tau_exp : float
    -185            positive value triggers the critical slowing down analysis
    -186            (default 0.0).
    -187        N_sigma : float
    -188            number of standard deviations from zero until the tail is
    -189            attached to the autocorrelation function (default 1).
    -190        fft : bool
    -191            determines whether the fft algorithm is used for the computation
    -192            of the autocorrelation function (default True)
    -193        """
    -194
    -195        e_content = self.e_content
    -196        self.e_dvalue = {}
    -197        self.e_ddvalue = {}
    -198        self.e_tauint = {}
    -199        self.e_dtauint = {}
    -200        self.e_windowsize = {}
    -201        self.e_n_tauint = {}
    -202        self.e_n_dtauint = {}
    -203        e_gamma = {}
    -204        self.e_rho = {}
    -205        self.e_drho = {}
    -206        self._dvalue = 0
    -207        self.ddvalue = 0
    -208
    -209        self.S = {}
    -210        self.tau_exp = {}
    -211        self.N_sigma = {}
    -212
    -213        if kwargs.get('fft') is False:
    -214            fft = False
    -215        else:
    -216            fft = True
    -217
    -218        def _parse_kwarg(kwarg_name):
    -219            if kwarg_name in kwargs:
    -220                tmp = kwargs.get(kwarg_name)
    -221                if isinstance(tmp, (int, float)):
    -222                    if tmp < 0:
    -223                        raise Exception(kwarg_name + ' has to be larger or equal to 0.')
    -224                    for e, e_name in enumerate(self.e_names):
    -225                        getattr(self, kwarg_name)[e_name] = tmp
    -226                else:
    -227                    raise TypeError(kwarg_name + ' is not in proper format.')
    -228            else:
    -229                for e, e_name in enumerate(self.e_names):
    -230                    if e_name in getattr(Obs, kwarg_name + '_dict'):
    -231                        getattr(self, kwarg_name)[e_name] = getattr(Obs, kwarg_name + '_dict')[e_name]
    -232                    else:
    -233                        getattr(self, kwarg_name)[e_name] = getattr(Obs, kwarg_name + '_global')
    -234
    -235        _parse_kwarg('S')
    -236        _parse_kwarg('tau_exp')
    -237        _parse_kwarg('N_sigma')
    -238
    -239        for e, e_name in enumerate(self.mc_names):
    -240            gapsize = _determine_gap(self, e_content, e_name)
    -241
    -242            r_length = []
    -243            for r_name in e_content[e_name]:
    -244                if isinstance(self.idl[r_name], range):
    -245                    r_length.append(len(self.idl[r_name]) * self.idl[r_name].step // gapsize)
    -246                else:
    -247                    r_length.append((self.idl[r_name][-1] - self.idl[r_name][0] + 1) // gapsize)
    -248
    -249            e_N = np.sum([self.shape[r_name] for r_name in e_content[e_name]])
    -250            w_max = max(r_length) // 2
    -251            e_gamma[e_name] = np.zeros(w_max)
    -252            self.e_rho[e_name] = np.zeros(w_max)
    -253            self.e_drho[e_name] = np.zeros(w_max)
    -254
    -255            for r_name in e_content[e_name]:
    -256                e_gamma[e_name] += self._calc_gamma(self.deltas[r_name], self.idl[r_name], self.shape[r_name], w_max, fft, gapsize)
    -257
    -258            gamma_div = np.zeros(w_max)
    -259            for r_name in e_content[e_name]:
    -260                gamma_div += self._calc_gamma(np.ones((self.shape[r_name])), self.idl[r_name], self.shape[r_name], w_max, fft, gapsize)
    -261            gamma_div[gamma_div < 1] = 1.0
    -262            e_gamma[e_name] /= gamma_div[:w_max]
    -263
    -264            if np.abs(e_gamma[e_name][0]) < 10 * np.finfo(float).tiny:  # Prevent division by zero
    -265                self.e_tauint[e_name] = 0.5
    -266                self.e_dtauint[e_name] = 0.0
    -267                self.e_dvalue[e_name] = 0.0
    -268                self.e_ddvalue[e_name] = 0.0
    -269                self.e_windowsize[e_name] = 0
    -270                continue
    -271
    -272            self.e_rho[e_name] = e_gamma[e_name][:w_max] / e_gamma[e_name][0]
    -273            self.e_n_tauint[e_name] = np.cumsum(np.concatenate(([0.5], self.e_rho[e_name][1:])))
    -274            # Make sure no entry of tauint is smaller than 0.5
    -275            self.e_n_tauint[e_name][self.e_n_tauint[e_name] <= 0.5] = 0.5 + np.finfo(np.float64).eps
    -276            # hep-lat/0306017 eq. (42)
    -277            self.e_n_dtauint[e_name] = self.e_n_tauint[e_name] * 2 * np.sqrt(np.abs(np.arange(w_max) + 0.5 - self.e_n_tauint[e_name]) / e_N)
    -278            self.e_n_dtauint[e_name][0] = 0.0
    -279
    -280            def _compute_drho(i):
    -281                tmp = (self.e_rho[e_name][i + 1:w_max]
    -282                       + np.concatenate([self.e_rho[e_name][i - 1:None if i - (w_max - 1) // 2 <= 0 else (2 * i - (2 * w_max) // 2):-1],
    -283                                         self.e_rho[e_name][1:max(1, w_max - 2 * i)]])
    -284                       - 2 * self.e_rho[e_name][i] * self.e_rho[e_name][1:w_max - i])
    -285                self.e_drho[e_name][i] = np.sqrt(np.sum(tmp ** 2) / e_N)
    -286
    -287            if self.tau_exp[e_name] > 0:
    -288                _compute_drho(1)
    -289                texp = self.tau_exp[e_name]
    -290                # Critical slowing down analysis
    -291                if w_max // 2 <= 1:
    -292                    raise Exception("Need at least 8 samples for tau_exp error analysis")
    -293                for n in range(1, w_max // 2):
    -294                    _compute_drho(n + 1)
    -295                    if (self.e_rho[e_name][n] - self.N_sigma[e_name] * self.e_drho[e_name][n]) < 0 or n >= w_max // 2 - 2:
    -296                        # Bias correction hep-lat/0306017 eq. (49) included
    -297                        self.e_tauint[e_name] = self.e_n_tauint[e_name][n] * (1 + (2 * n + 1) / e_N) / (1 + 1 / e_N) + texp * np.abs(self.e_rho[e_name][n + 1])  # The absolute makes sure, that the tail contribution is always positive
    -298                        self.e_dtauint[e_name] = np.sqrt(self.e_n_dtauint[e_name][n] ** 2 + texp ** 2 * self.e_drho[e_name][n + 1] ** 2)
    -299                        # Error of tau_exp neglected so far, missing term: self.e_rho[e_name][n + 1] ** 2 * d_tau_exp ** 2
    -300                        self.e_dvalue[e_name] = np.sqrt(2 * self.e_tauint[e_name] * e_gamma[e_name][0] * (1 + 1 / e_N) / e_N)
    -301                        self.e_ddvalue[e_name] = self.e_dvalue[e_name] * np.sqrt((n + 0.5) / e_N)
    -302                        self.e_windowsize[e_name] = n
    -303                        break
    -304            else:
    -305                if self.S[e_name] == 0.0:
    -306                    self.e_tauint[e_name] = 0.5
    -307                    self.e_dtauint[e_name] = 0.0
    -308                    self.e_dvalue[e_name] = np.sqrt(e_gamma[e_name][0] / (e_N - 1))
    -309                    self.e_ddvalue[e_name] = self.e_dvalue[e_name] * np.sqrt(0.5 / e_N)
    -310                    self.e_windowsize[e_name] = 0
    -311                else:
    -312                    # Standard automatic windowing procedure
    -313                    tau = self.S[e_name] / np.log((2 * self.e_n_tauint[e_name][1:] + 1) / (2 * self.e_n_tauint[e_name][1:] - 1))
    -314                    g_w = np.exp(- np.arange(1, len(tau) + 1) / tau) - tau / np.sqrt(np.arange(1, len(tau) + 1) * e_N)
    -315                    for n in range(1, w_max):
    -316                        if g_w[n - 1] < 0 or n >= w_max - 1:
    -317                            _compute_drho(n)
    -318                            self.e_tauint[e_name] = self.e_n_tauint[e_name][n] * (1 + (2 * n + 1) / e_N) / (1 + 1 / e_N)  # Bias correction hep-lat/0306017 eq. (49)
    -319                            self.e_dtauint[e_name] = self.e_n_dtauint[e_name][n]
    -320                            self.e_dvalue[e_name] = np.sqrt(2 * self.e_tauint[e_name] * e_gamma[e_name][0] * (1 + 1 / e_N) / e_N)
    -321                            self.e_ddvalue[e_name] = self.e_dvalue[e_name] * np.sqrt((n + 0.5) / e_N)
    -322                            self.e_windowsize[e_name] = n
    -323                            break
    -324
    -325            self._dvalue += self.e_dvalue[e_name] ** 2
    -326            self.ddvalue += (self.e_dvalue[e_name] * self.e_ddvalue[e_name]) ** 2
    -327
    -328        for e_name in self.cov_names:
    -329            self.e_dvalue[e_name] = np.sqrt(self.covobs[e_name].errsq())
    -330            self.e_ddvalue[e_name] = 0
    -331            self._dvalue += self.e_dvalue[e_name]**2
    -332
    -333        self._dvalue = np.sqrt(self._dvalue)
    -334        if self._dvalue == 0.0:
    -335            self.ddvalue = 0.0
    -336        else:
    -337            self.ddvalue = np.sqrt(self.ddvalue) / self._dvalue
    -338        return
    -339
    -340    gm = gamma_method
    -341
    -342    def _calc_gamma(self, deltas, idx, shape, w_max, fft, gapsize):
    -343        """Calculate Gamma_{AA} from the deltas, which are defined on idx.
    -344           idx is assumed to be a contiguous range (possibly with a stepsize != 1)
    -345
    -346        Parameters
    -347        ----------
    -348        deltas : list
    -349            List of fluctuations
    -350        idx : list
    -351            List or range of configurations on which the deltas are defined.
    -352        shape : int
    -353            Number of configurations in idx.
    -354        w_max : int
    -355            Upper bound for the summation window.
    -356        fft : bool
    -357            determines whether the fft algorithm is used for the computation
    -358            of the autocorrelation function.
    -359        gapsize : int
    -360            The target distance between two configurations. If longer distances
    -361            are found in idx, the data is expanded.
    -362        """
    -363        gamma = np.zeros(w_max)
    -364        deltas = _expand_deltas(deltas, idx, shape, gapsize)
    -365        new_shape = len(deltas)
    -366        if fft:
    -367            max_gamma = min(new_shape, w_max)
    -368            # The padding for the fft has to be even
    -369            padding = new_shape + max_gamma + (new_shape + max_gamma) % 2
    -370            gamma[:max_gamma] += np.fft.irfft(np.abs(np.fft.rfft(deltas, padding)) ** 2)[:max_gamma]
    -371        else:
    -372            for n in range(w_max):
    -373                if new_shape - n >= 0:
    -374                    gamma[n] += deltas[0:new_shape - n].dot(deltas[n:new_shape])
    -375
    -376        return gamma
    -377
    -378    def details(self, ens_content=True):
    -379        """Output detailed properties of the Obs.
    -380
    -381        Parameters
    -382        ----------
    -383        ens_content : bool
    -384            print details about the ensembles and replica if true.
    -385        """
    -386        if self.tag is not None:
    -387            print("Description:", self.tag)
    -388        if not hasattr(self, 'e_dvalue'):
    -389            print('Result\t %3.8e' % (self.value))
    -390        else:
    -391            if self.value == 0.0:
    -392                percentage = np.nan
    -393            else:
    -394                percentage = np.abs(self._dvalue / self.value) * 100
    -395            print('Result\t %3.8e +/- %3.8e +/- %3.8e (%3.3f%%)' % (self.value, self._dvalue, self.ddvalue, percentage))
    -396            if len(self.e_names) > 1:
    -397                print(' Ensemble errors:')
    -398            e_content = self.e_content
    -399            for e_name in self.mc_names:
    -400                gap = _determine_gap(self, e_content, e_name)
    -401
    -402                if len(self.e_names) > 1:
    -403                    print('', e_name, '\t %3.6e +/- %3.6e' % (self.e_dvalue[e_name], self.e_ddvalue[e_name]))
    -404                tau_string = " \N{GREEK SMALL LETTER TAU}_int\t " + _format_uncertainty(self.e_tauint[e_name], self.e_dtauint[e_name])
    -405                tau_string += f" in units of {gap} config"
    -406                if gap > 1:
    -407                    tau_string += "s"
    -408                if self.tau_exp[e_name] > 0:
    -409                    tau_string = f"{tau_string: <45}" + '\t(\N{GREEK SMALL LETTER TAU}_exp=%3.2f, N_\N{GREEK SMALL LETTER SIGMA}=%1.0i)' % (self.tau_exp[e_name], self.N_sigma[e_name])
    -410                else:
    -411                    tau_string = f"{tau_string: <45}" + '\t(S=%3.2f)' % (self.S[e_name])
    -412                print(tau_string)
    -413            for e_name in self.cov_names:
    -414                print('', e_name, '\t %3.8e' % (self.e_dvalue[e_name]))
    -415        if ens_content is True:
    -416            if len(self.e_names) == 1:
    -417                print(self.N, 'samples in', len(self.e_names), 'ensemble:')
    -418            else:
    -419                print(self.N, 'samples in', len(self.e_names), 'ensembles:')
    -420            my_string_list = []
    -421            for key, value in sorted(self.e_content.items()):
    -422                if key not in self.covobs:
    -423                    my_string = '  ' + "\u00B7 Ensemble '" + key + "' "
    -424                    if len(value) == 1:
    -425                        my_string += f': {self.shape[value[0]]} configurations'
    -426                        if isinstance(self.idl[value[0]], range):
    -427                            my_string += f' (from {self.idl[value[0]].start} to {self.idl[value[0]][-1]}' + int(self.idl[value[0]].step != 1) * f' in steps of {self.idl[value[0]].step}' + ')'
    -428                        else:
    -429                            my_string += f' (irregular range from {self.idl[value[0]][0]} to {self.idl[value[0]][-1]})'
    -430                    else:
    -431                        sublist = []
    -432                        for v in value:
    -433                            my_substring = '    ' + "\u00B7 Replicum '" + v[len(key) + 1:] + "' "
    -434                            my_substring += f': {self.shape[v]} configurations'
    -435                            if isinstance(self.idl[v], range):
    -436                                my_substring += f' (from {self.idl[v].start} to {self.idl[v][-1]}' + int(self.idl[v].step != 1) * f' in steps of {self.idl[v].step}' + ')'
    -437                            else:
    -438                                my_substring += f' (irregular range from {self.idl[v][0]} to {self.idl[v][-1]})'
    -439                            sublist.append(my_substring)
    -440
    -441                        my_string += '\n' + '\n'.join(sublist)
    -442                else:
    -443                    my_string = '  ' + "\u00B7 Covobs   '" + key + "' "
    -444                my_string_list.append(my_string)
    -445            print('\n'.join(my_string_list))
    -446
    -447    def reweight(self, weight):
    -448        """Reweight the obs with given rewighting factors.
    -449
    -450        Parameters
    -451        ----------
    -452        weight : Obs
    -453            Reweighting factor. An Observable that has to be defined on a superset of the
    -454            configurations in obs[i].idl for all i.
    -455        all_configs : bool
    -456            if True, the reweighted observables are normalized by the average of
    -457            the reweighting factor on all configurations in weight.idl and not
    -458            on the configurations in obs[i].idl. Default False.
    -459        """
    -460        return reweight(weight, [self])[0]
    -461
    -462    def is_zero_within_error(self, sigma=1):
    -463        """Checks whether the observable is zero within 'sigma' standard errors.
    -464
    -465        Parameters
    -466        ----------
    -467        sigma : int
    -468            Number of standard errors used for the check.
    -469
    -470        Works only properly when the gamma method was run.
    -471        """
    -472        return self.is_zero() or np.abs(self.value) <= sigma * self._dvalue
    -473
    -474    def is_zero(self, atol=1e-10):
    -475        """Checks whether the observable is zero within a given tolerance.
    -476
    -477        Parameters
    -478        ----------
    -479        atol : float
    -480            Absolute tolerance (for details see numpy documentation).
    -481        """
    -482        return np.isclose(0.0, self.value, 1e-14, atol) and all(np.allclose(0.0, delta, 1e-14, atol) for delta in self.deltas.values()) and all(np.allclose(0.0, delta.errsq(), 1e-14, atol) for delta in self.covobs.values())
    -483
    -484    def plot_tauint(self, save=None):
    -485        """Plot integrated autocorrelation time for each ensemble.
    -486
    -487        Parameters
    -488        ----------
    -489        save : str
    -490            saves the figure to a file named 'save' if.
    -491        """
    -492        if not hasattr(self, 'e_dvalue'):
    -493            raise Exception('Run the gamma method first.')
    -494
    -495        for e, e_name in enumerate(self.mc_names):
    -496            fig = plt.figure()
    -497            plt.xlabel(r'$W$')
    -498            plt.ylabel(r'$\tau_\mathrm{int}$')
    -499            length = int(len(self.e_n_tauint[e_name]))
    -500            if self.tau_exp[e_name] > 0:
    -501                base = self.e_n_tauint[e_name][self.e_windowsize[e_name]]
    -502                x_help = np.arange(2 * self.tau_exp[e_name])
    -503                y_help = (x_help + 1) * np.abs(self.e_rho[e_name][self.e_windowsize[e_name] + 1]) * (1 - x_help / (2 * (2 * self.tau_exp[e_name] - 1))) + base
    -504                x_arr = np.arange(self.e_windowsize[e_name] + 1, self.e_windowsize[e_name] + 1 + 2 * self.tau_exp[e_name])
    -505                plt.plot(x_arr, y_help, 'C' + str(e), linewidth=1, ls='--', marker=',')
    -506                plt.errorbar([self.e_windowsize[e_name] + 2 * self.tau_exp[e_name]], [self.e_tauint[e_name]],
    -507                             yerr=[self.e_dtauint[e_name]], fmt='C' + str(e), linewidth=1, capsize=2, marker='o', mfc=plt.rcParams['axes.facecolor'])
    -508                xmax = self.e_windowsize[e_name] + 2 * self.tau_exp[e_name] + 1.5
    -509                label = e_name + r', $\tau_\mathrm{exp}$=' + str(np.around(self.tau_exp[e_name], decimals=2))
    -510            else:
    -511                label = e_name + ', S=' + str(np.around(self.S[e_name], decimals=2))
    -512                xmax = max(10.5, 2 * self.e_windowsize[e_name] - 0.5)
    -513
    -514            plt.errorbar(np.arange(length)[:int(xmax) + 1], self.e_n_tauint[e_name][:int(xmax) + 1], yerr=self.e_n_dtauint[e_name][:int(xmax) + 1], linewidth=1, capsize=2, label=label)
    -515            plt.axvline(x=self.e_windowsize[e_name], color='C' + str(e), alpha=0.5, marker=',', ls='--')
    -516            plt.legend()
    -517            plt.xlim(-0.5, xmax)
    -518            ylim = plt.ylim()
    -519            plt.ylim(bottom=0.0, top=max(1.0, ylim[1]))
    -520            plt.draw()
    -521            if save:
    -522                fig.savefig(save + "_" + str(e))
    -523
    -524    def plot_rho(self, save=None):
    -525        """Plot normalized autocorrelation function time for each ensemble.
    -526
    -527        Parameters
    -528        ----------
    -529        save : str
    -530            saves the figure to a file named 'save' if.
    -531        """
    -532        if not hasattr(self, 'e_dvalue'):
    -533            raise Exception('Run the gamma method first.')
    -534        for e, e_name in enumerate(self.mc_names):
    -535            fig = plt.figure()
    -536            plt.xlabel('W')
    -537            plt.ylabel('rho')
    -538            length = int(len(self.e_drho[e_name]))
    -539            plt.errorbar(np.arange(length), self.e_rho[e_name][:length], yerr=self.e_drho[e_name][:], linewidth=1, capsize=2)
    -540            plt.axvline(x=self.e_windowsize[e_name], color='r', alpha=0.25, ls='--', marker=',')
    -541            if self.tau_exp[e_name] > 0:
    -542                plt.plot([self.e_windowsize[e_name] + 1, self.e_windowsize[e_name] + 1 + 2 * self.tau_exp[e_name]],
    -543                         [self.e_rho[e_name][self.e_windowsize[e_name] + 1], 0], 'k-', lw=1)
    -544                xmax = self.e_windowsize[e_name] + 2 * self.tau_exp[e_name] + 1.5
    -545                plt.title('Rho ' + e_name + r', tau\_exp=' + str(np.around(self.tau_exp[e_name], decimals=2)))
    -546            else:
    -547                xmax = max(10.5, 2 * self.e_windowsize[e_name] - 0.5)
    -548                plt.title('Rho ' + e_name + ', S=' + str(np.around(self.S[e_name], decimals=2)))
    -549            plt.plot([-0.5, xmax], [0, 0], 'k--', lw=1)
    -550            plt.xlim(-0.5, xmax)
    -551            plt.draw()
    -552            if save:
    -553                fig.savefig(save + "_" + str(e))
    -554
    -555    def plot_rep_dist(self):
    -556        """Plot replica distribution for each ensemble with more than one replicum."""
    -557        if not hasattr(self, 'e_dvalue'):
    -558            raise Exception('Run the gamma method first.')
    -559        for e, e_name in enumerate(self.mc_names):
    -560            if len(self.e_content[e_name]) == 1:
    -561                print('No replica distribution for a single replicum (', e_name, ')')
    -562                continue
    -563            r_length = []
    -564            sub_r_mean = 0
    -565            for r, r_name in enumerate(self.e_content[e_name]):
    -566                r_length.append(len(self.deltas[r_name]))
    -567                sub_r_mean += self.shape[r_name] * self.r_values[r_name]
    -568            e_N = np.sum(r_length)
    -569            sub_r_mean /= e_N
    -570            arr = np.zeros(len(self.e_content[e_name]))
    -571            for r, r_name in enumerate(self.e_content[e_name]):
    -572                arr[r] = (self.r_values[r_name] - sub_r_mean) / (self.e_dvalue[e_name] * np.sqrt(e_N / self.shape[r_name] - 1))
    -573            plt.hist(arr, rwidth=0.8, bins=len(self.e_content[e_name]))
    -574            plt.title('Replica distribution' + e_name + ' (mean=0, var=1)')
    -575            plt.draw()
    -576
    -577    def plot_history(self, expand=True):
    -578        """Plot derived Monte Carlo history for each ensemble
    -579
    -580        Parameters
    -581        ----------
    -582        expand : bool
    -583            show expanded history for irregular Monte Carlo chains (default: True).
    -584        """
    -585        for e, e_name in enumerate(self.mc_names):
    -586            plt.figure()
    -587            r_length = []
    -588            tmp = []
    -589            tmp_expanded = []
    -590            for r, r_name in enumerate(self.e_content[e_name]):
    -591                tmp.append(self.deltas[r_name] + self.r_values[r_name])
    -592                if expand:
    -593                    tmp_expanded.append(_expand_deltas(self.deltas[r_name], list(self.idl[r_name]), self.shape[r_name], 1) + self.r_values[r_name])
    -594                    r_length.append(len(tmp_expanded[-1]))
    -595                else:
    -596                    r_length.append(len(tmp[-1]))
    -597            e_N = np.sum(r_length)
    -598            x = np.arange(e_N)
    -599            y_test = np.concatenate(tmp, axis=0)
    -600            if expand:
    -601                y = np.concatenate(tmp_expanded, axis=0)
    -602            else:
    -603                y = y_test
    -604            plt.errorbar(x, y, fmt='.', markersize=3)
    -605            plt.xlim(-0.5, e_N - 0.5)
    -606            plt.title(e_name + f'\nskew: {skew(y_test):.3f} (p={skewtest(y_test).pvalue:.3f}), kurtosis: {kurtosis(y_test):.3f} (p={kurtosistest(y_test).pvalue:.3f})')
    -607            plt.draw()
    -608
    -609    def plot_piechart(self, save=None):
    -610        """Plot piechart which shows the fractional contribution of each
    -611        ensemble to the error and returns a dictionary containing the fractions.
    -612
    -613        Parameters
    -614        ----------
    -615        save : str
    -616            saves the figure to a file named 'save' if.
    -617        """
    -618        if not hasattr(self, 'e_dvalue'):
    -619            raise Exception('Run the gamma method first.')
    -620        if np.isclose(0.0, self._dvalue, atol=1e-15):
    -621            raise Exception('Error is 0.0')
    -622        labels = self.e_names
    -623        sizes = [self.e_dvalue[name] ** 2 for name in labels] / self._dvalue ** 2
    -624        fig1, ax1 = plt.subplots()
    -625        ax1.pie(sizes, labels=labels, startangle=90, normalize=True)
    -626        ax1.axis('equal')
    -627        plt.draw()
    -628        if save:
    -629            fig1.savefig(save)
    -630
    -631        return dict(zip(labels, sizes))
    -632
    -633    def dump(self, filename, datatype="json.gz", description="", **kwargs):
    -634        """Dump the Obs to a file 'name' of chosen format.
    -635
    -636        Parameters
    -637        ----------
    -638        filename : str
    -639            name of the file to be saved.
    -640        datatype : str
    -641            Format of the exported file. Supported formats include
    -642            "json.gz" and "pickle"
    -643        description : str
    -644            Description for output file, only relevant for json.gz format.
    -645        path : str
    -646            specifies a custom path for the file (default '.')
    -647        """
    -648        if 'path' in kwargs:
    -649            file_name = kwargs.get('path') + '/' + filename
    -650        else:
    -651            file_name = filename
    -652
    -653        if datatype == "json.gz":
    -654            from .input.json import dump_to_json
    -655            dump_to_json([self], file_name, description=description)
    -656        elif datatype == "pickle":
    -657            with open(file_name + '.p', 'wb') as fb:
    -658                pickle.dump(self, fb)
    -659        else:
    -660            raise Exception("Unknown datatype " + str(datatype))
    -661
    -662    def export_jackknife(self):
    -663        """Export jackknife samples from the Obs
    -664
    -665        Returns
    -666        -------
    -667        numpy.ndarray
    -668            Returns a numpy array of length N + 1 where N is the number of samples
    -669            for the given ensemble and replicum. The zeroth entry of the array contains
    -670            the mean value of the Obs, entries 1 to N contain the N jackknife samples
    -671            derived from the Obs. The current implementation only works for observables
    -672            defined on exactly one ensemble and replicum. The derived jackknife samples
    -673            should agree with samples from a full jackknife analysis up to O(1/N).
    -674        """
    -675
    -676        if len(self.names) != 1:
    -677            raise Exception("'export_jackknife' is only implemented for Obs defined on one ensemble and replicum.")
    -678
    -679        name = self.names[0]
    -680        full_data = self.deltas[name] + self.r_values[name]
    -681        n = full_data.size
    -682        mean = self.value
    -683        tmp_jacks = np.zeros(n + 1)
    -684        tmp_jacks[0] = mean
    -685        tmp_jacks[1:] = (n * mean - full_data) / (n - 1)
    -686        return tmp_jacks
    -687
    -688    def __float__(self):
    -689        return float(self.value)
    -690
    -691    def __repr__(self):
    -692        return 'Obs[' + str(self) + ']'
    -693
    -694    def __str__(self):
    -695        return _format_uncertainty(self.value, self._dvalue)
    -696
    -697    def __format__(self, format_type):
    -698        if format_type == "":
    -699            significance = 2
    -700        else:
    -701            significance = int(float(format_type.replace("+", "").replace("-", "")))
    -702        my_str = _format_uncertainty(self.value, self._dvalue,
    -703                                     significance=significance)
    -704        for char in ["+", " "]:
    -705            if format_type.startswith(char):
    -706                if my_str[0] != "-":
    -707                    my_str = char + my_str
    -708        return my_str
    -709
    -710    def __hash__(self):
    -711        hash_tuple = (np.array([self.value]).astype(np.float32).data.tobytes(),)
    -712        hash_tuple += tuple([o.astype(np.float32).data.tobytes() for o in self.deltas.values()])
    -713        hash_tuple += tuple([np.array([o.errsq()]).astype(np.float32).data.tobytes() for o in self.covobs.values()])
    -714        hash_tuple += tuple([o.encode() for o in self.names])
    -715        m = hashlib.md5()
    -716        [m.update(o) for o in hash_tuple]
    -717        return int(m.hexdigest(), 16) & 0xFFFFFFFF
    -718
    -719    # Overload comparisons
    -720    def __lt__(self, other):
    -721        return self.value < other
    +            
     19class Obs:
    + 20    """Class for a general observable.
    + 21
    + 22    Instances of Obs are the basic objects of a pyerrors error analysis.
    + 23    They are initialized with a list which contains arrays of samples for
    + 24    different ensembles/replica and another list of same length which contains
    + 25    the names of the ensembles/replica. Mathematical operations can be
    + 26    performed on instances. The result is another instance of Obs. The error of
    + 27    an instance can be computed with the gamma_method. Also contains additional
    + 28    methods for output and visualization of the error calculation.
    + 29
    + 30    Attributes
    + 31    ----------
    + 32    S_global : float
    + 33        Standard value for S (default 2.0)
    + 34    S_dict : dict
    + 35        Dictionary for S values. If an entry for a given ensemble
    + 36        exists this overwrites the standard value for that ensemble.
    + 37    tau_exp_global : float
    + 38        Standard value for tau_exp (default 0.0)
    + 39    tau_exp_dict : dict
    + 40        Dictionary for tau_exp values. If an entry for a given ensemble exists
    + 41        this overwrites the standard value for that ensemble.
    + 42    N_sigma_global : float
    + 43        Standard value for N_sigma (default 1.0)
    + 44    N_sigma_dict : dict
    + 45        Dictionary for N_sigma values. If an entry for a given ensemble exists
    + 46        this overwrites the standard value for that ensemble.
    + 47    """
    + 48    __slots__ = ['names', 'shape', 'r_values', 'deltas', 'N', '_value', '_dvalue',
    + 49                 'ddvalue', 'reweighted', 'S', 'tau_exp', 'N_sigma',
    + 50                 'e_dvalue', 'e_ddvalue', 'e_tauint', 'e_dtauint',
    + 51                 'e_windowsize', 'e_rho', 'e_drho', 'e_n_tauint', 'e_n_dtauint',
    + 52                 'idl', 'tag', '_covobs', '__dict__']
    + 53
    + 54    S_global = 2.0
    + 55    S_dict = {}
    + 56    tau_exp_global = 0.0
    + 57    tau_exp_dict = {}
    + 58    N_sigma_global = 1.0
    + 59    N_sigma_dict = {}
    + 60
    + 61    def __init__(self, samples, names, idl=None, **kwargs):
    + 62        """ Initialize Obs object.
    + 63
    + 64        Parameters
    + 65        ----------
    + 66        samples : list
    + 67            list of numpy arrays containing the Monte Carlo samples
    + 68        names : list
    + 69            list of strings labeling the individual samples
    + 70        idl : list, optional
    + 71            list of ranges or lists on which the samples are defined
    + 72        """
    + 73
    + 74        if kwargs.get("means") is None and len(samples):
    + 75            if len(samples) != len(names):
    + 76                raise ValueError('Length of samples and names incompatible.')
    + 77            if idl is not None:
    + 78                if len(idl) != len(names):
    + 79                    raise ValueError('Length of idl incompatible with samples and names.')
    + 80            name_length = len(names)
    + 81            if name_length > 1:
    + 82                if name_length != len(set(names)):
    + 83                    raise ValueError('Names are not unique.')
    + 84                if not all(isinstance(x, str) for x in names):
    + 85                    raise TypeError('All names have to be strings.')
    + 86            else:
    + 87                if not isinstance(names[0], str):
    + 88                    raise TypeError('All names have to be strings.')
    + 89            if min(len(x) for x in samples) <= 4:
    + 90                raise ValueError('Samples have to have at least 5 entries.')
    + 91
    + 92        self.names = sorted(names)
    + 93        self.shape = {}
    + 94        self.r_values = {}
    + 95        self.deltas = {}
    + 96        self._covobs = {}
    + 97
    + 98        self._value = 0
    + 99        self.N = 0
    +100        self.idl = {}
    +101        if idl is not None:
    +102            for name, idx in sorted(zip(names, idl)):
    +103                if isinstance(idx, range):
    +104                    self.idl[name] = idx
    +105                elif isinstance(idx, (list, np.ndarray)):
    +106                    dc = np.unique(np.diff(idx))
    +107                    if np.any(dc < 0):
    +108                        raise ValueError("Unsorted idx for idl[%s]" % (name))
    +109                    if len(dc) == 1:
    +110                        self.idl[name] = range(idx[0], idx[-1] + dc[0], dc[0])
    +111                    else:
    +112                        self.idl[name] = list(idx)
    +113                else:
    +114                    raise TypeError('incompatible type for idl[%s].' % (name))
    +115        else:
    +116            for name, sample in sorted(zip(names, samples)):
    +117                self.idl[name] = range(1, len(sample) + 1)
    +118
    +119        if kwargs.get("means") is not None:
    +120            for name, sample, mean in sorted(zip(names, samples, kwargs.get("means"))):
    +121                self.shape[name] = len(self.idl[name])
    +122                self.N += self.shape[name]
    +123                self.r_values[name] = mean
    +124                self.deltas[name] = sample
    +125        else:
    +126            for name, sample in sorted(zip(names, samples)):
    +127                self.shape[name] = len(self.idl[name])
    +128                self.N += self.shape[name]
    +129                if len(sample) != self.shape[name]:
    +130                    raise ValueError('Incompatible samples and idx for %s: %d vs. %d' % (name, len(sample), self.shape[name]))
    +131                self.r_values[name] = np.mean(sample)
    +132                self.deltas[name] = sample - self.r_values[name]
    +133                self._value += self.shape[name] * self.r_values[name]
    +134            self._value /= self.N
    +135
    +136        self._dvalue = 0.0
    +137        self.ddvalue = 0.0
    +138        self.reweighted = False
    +139
    +140        self.tag = None
    +141
    +142    @property
    +143    def value(self):
    +144        return self._value
    +145
    +146    @property
    +147    def dvalue(self):
    +148        return self._dvalue
    +149
    +150    @property
    +151    def e_names(self):
    +152        return sorted(set([o.split('|')[0] for o in self.names]))
    +153
    +154    @property
    +155    def cov_names(self):
    +156        return sorted(set([o for o in self.covobs.keys()]))
    +157
    +158    @property
    +159    def mc_names(self):
    +160        return sorted(set([o.split('|')[0] for o in self.names if o not in self.cov_names]))
    +161
    +162    @property
    +163    def e_content(self):
    +164        res = {}
    +165        for e, e_name in enumerate(self.e_names):
    +166            res[e_name] = sorted(filter(lambda x: x.startswith(e_name + '|'), self.names))
    +167            if e_name in self.names:
    +168                res[e_name].append(e_name)
    +169        return res
    +170
    +171    @property
    +172    def covobs(self):
    +173        return self._covobs
    +174
    +175    def gamma_method(self, **kwargs):
    +176        """Estimate the error and related properties of the Obs.
    +177
    +178        Parameters
    +179        ----------
    +180        S : float
    +181            specifies a custom value for the parameter S (default 2.0).
    +182            If set to 0 it is assumed that the data exhibits no
    +183            autocorrelation. In this case the error estimates coincides
    +184            with the sample standard error.
    +185        tau_exp : float
    +186            positive value triggers the critical slowing down analysis
    +187            (default 0.0).
    +188        N_sigma : float
    +189            number of standard deviations from zero until the tail is
    +190            attached to the autocorrelation function (default 1).
    +191        fft : bool
    +192            determines whether the fft algorithm is used for the computation
    +193            of the autocorrelation function (default True)
    +194        """
    +195
    +196        e_content = self.e_content
    +197        self.e_dvalue = {}
    +198        self.e_ddvalue = {}
    +199        self.e_tauint = {}
    +200        self.e_dtauint = {}
    +201        self.e_windowsize = {}
    +202        self.e_n_tauint = {}
    +203        self.e_n_dtauint = {}
    +204        e_gamma = {}
    +205        self.e_rho = {}
    +206        self.e_drho = {}
    +207        self._dvalue = 0
    +208        self.ddvalue = 0
    +209
    +210        self.S = {}
    +211        self.tau_exp = {}
    +212        self.N_sigma = {}
    +213
    +214        if kwargs.get('fft') is False:
    +215            fft = False
    +216        else:
    +217            fft = True
    +218
    +219        def _parse_kwarg(kwarg_name):
    +220            if kwarg_name in kwargs:
    +221                tmp = kwargs.get(kwarg_name)
    +222                if isinstance(tmp, (int, float)):
    +223                    if tmp < 0:
    +224                        raise Exception(kwarg_name + ' has to be larger or equal to 0.')
    +225                    for e, e_name in enumerate(self.e_names):
    +226                        getattr(self, kwarg_name)[e_name] = tmp
    +227                else:
    +228                    raise TypeError(kwarg_name + ' is not in proper format.')
    +229            else:
    +230                for e, e_name in enumerate(self.e_names):
    +231                    if e_name in getattr(Obs, kwarg_name + '_dict'):
    +232                        getattr(self, kwarg_name)[e_name] = getattr(Obs, kwarg_name + '_dict')[e_name]
    +233                    else:
    +234                        getattr(self, kwarg_name)[e_name] = getattr(Obs, kwarg_name + '_global')
    +235
    +236        _parse_kwarg('S')
    +237        _parse_kwarg('tau_exp')
    +238        _parse_kwarg('N_sigma')
    +239
    +240        for e, e_name in enumerate(self.mc_names):
    +241            gapsize = _determine_gap(self, e_content, e_name)
    +242
    +243            r_length = []
    +244            for r_name in e_content[e_name]:
    +245                if isinstance(self.idl[r_name], range):
    +246                    r_length.append(len(self.idl[r_name]) * self.idl[r_name].step // gapsize)
    +247                else:
    +248                    r_length.append((self.idl[r_name][-1] - self.idl[r_name][0] + 1) // gapsize)
    +249
    +250            e_N = np.sum([self.shape[r_name] for r_name in e_content[e_name]])
    +251            w_max = max(r_length) // 2
    +252            e_gamma[e_name] = np.zeros(w_max)
    +253            self.e_rho[e_name] = np.zeros(w_max)
    +254            self.e_drho[e_name] = np.zeros(w_max)
    +255
    +256            for r_name in e_content[e_name]:
    +257                e_gamma[e_name] += self._calc_gamma(self.deltas[r_name], self.idl[r_name], self.shape[r_name], w_max, fft, gapsize)
    +258
    +259            gamma_div = np.zeros(w_max)
    +260            for r_name in e_content[e_name]:
    +261                gamma_div += self._calc_gamma(np.ones((self.shape[r_name])), self.idl[r_name], self.shape[r_name], w_max, fft, gapsize)
    +262            gamma_div[gamma_div < 1] = 1.0
    +263            e_gamma[e_name] /= gamma_div[:w_max]
    +264
    +265            if np.abs(e_gamma[e_name][0]) < 10 * np.finfo(float).tiny:  # Prevent division by zero
    +266                self.e_tauint[e_name] = 0.5
    +267                self.e_dtauint[e_name] = 0.0
    +268                self.e_dvalue[e_name] = 0.0
    +269                self.e_ddvalue[e_name] = 0.0
    +270                self.e_windowsize[e_name] = 0
    +271                continue
    +272
    +273            self.e_rho[e_name] = e_gamma[e_name][:w_max] / e_gamma[e_name][0]
    +274            self.e_n_tauint[e_name] = np.cumsum(np.concatenate(([0.5], self.e_rho[e_name][1:])))
    +275            # Make sure no entry of tauint is smaller than 0.5
    +276            self.e_n_tauint[e_name][self.e_n_tauint[e_name] <= 0.5] = 0.5 + np.finfo(np.float64).eps
    +277            # hep-lat/0306017 eq. (42)
    +278            self.e_n_dtauint[e_name] = self.e_n_tauint[e_name] * 2 * np.sqrt(np.abs(np.arange(w_max) + 0.5 - self.e_n_tauint[e_name]) / e_N)
    +279            self.e_n_dtauint[e_name][0] = 0.0
    +280
    +281            def _compute_drho(i):
    +282                tmp = (self.e_rho[e_name][i + 1:w_max]
    +283                       + np.concatenate([self.e_rho[e_name][i - 1:None if i - (w_max - 1) // 2 <= 0 else (2 * i - (2 * w_max) // 2):-1],
    +284                                         self.e_rho[e_name][1:max(1, w_max - 2 * i)]])
    +285                       - 2 * self.e_rho[e_name][i] * self.e_rho[e_name][1:w_max - i])
    +286                self.e_drho[e_name][i] = np.sqrt(np.sum(tmp ** 2) / e_N)
    +287
    +288            if self.tau_exp[e_name] > 0:
    +289                _compute_drho(1)
    +290                texp = self.tau_exp[e_name]
    +291                # Critical slowing down analysis
    +292                if w_max // 2 <= 1:
    +293                    raise Exception("Need at least 8 samples for tau_exp error analysis")
    +294                for n in range(1, w_max // 2):
    +295                    _compute_drho(n + 1)
    +296                    if (self.e_rho[e_name][n] - self.N_sigma[e_name] * self.e_drho[e_name][n]) < 0 or n >= w_max // 2 - 2:
    +297                        # Bias correction hep-lat/0306017 eq. (49) included
    +298                        self.e_tauint[e_name] = self.e_n_tauint[e_name][n] * (1 + (2 * n + 1) / e_N) / (1 + 1 / e_N) + texp * np.abs(self.e_rho[e_name][n + 1])  # The absolute makes sure, that the tail contribution is always positive
    +299                        self.e_dtauint[e_name] = np.sqrt(self.e_n_dtauint[e_name][n] ** 2 + texp ** 2 * self.e_drho[e_name][n + 1] ** 2)
    +300                        # Error of tau_exp neglected so far, missing term: self.e_rho[e_name][n + 1] ** 2 * d_tau_exp ** 2
    +301                        self.e_dvalue[e_name] = np.sqrt(2 * self.e_tauint[e_name] * e_gamma[e_name][0] * (1 + 1 / e_N) / e_N)
    +302                        self.e_ddvalue[e_name] = self.e_dvalue[e_name] * np.sqrt((n + 0.5) / e_N)
    +303                        self.e_windowsize[e_name] = n
    +304                        break
    +305            else:
    +306                if self.S[e_name] == 0.0:
    +307                    self.e_tauint[e_name] = 0.5
    +308                    self.e_dtauint[e_name] = 0.0
    +309                    self.e_dvalue[e_name] = np.sqrt(e_gamma[e_name][0] / (e_N - 1))
    +310                    self.e_ddvalue[e_name] = self.e_dvalue[e_name] * np.sqrt(0.5 / e_N)
    +311                    self.e_windowsize[e_name] = 0
    +312                else:
    +313                    # Standard automatic windowing procedure
    +314                    tau = self.S[e_name] / np.log((2 * self.e_n_tauint[e_name][1:] + 1) / (2 * self.e_n_tauint[e_name][1:] - 1))
    +315                    g_w = np.exp(- np.arange(1, len(tau) + 1) / tau) - tau / np.sqrt(np.arange(1, len(tau) + 1) * e_N)
    +316                    for n in range(1, w_max):
    +317                        if g_w[n - 1] < 0 or n >= w_max - 1:
    +318                            _compute_drho(n)
    +319                            self.e_tauint[e_name] = self.e_n_tauint[e_name][n] * (1 + (2 * n + 1) / e_N) / (1 + 1 / e_N)  # Bias correction hep-lat/0306017 eq. (49)
    +320                            self.e_dtauint[e_name] = self.e_n_dtauint[e_name][n]
    +321                            self.e_dvalue[e_name] = np.sqrt(2 * self.e_tauint[e_name] * e_gamma[e_name][0] * (1 + 1 / e_N) / e_N)
    +322                            self.e_ddvalue[e_name] = self.e_dvalue[e_name] * np.sqrt((n + 0.5) / e_N)
    +323                            self.e_windowsize[e_name] = n
    +324                            break
    +325
    +326            self._dvalue += self.e_dvalue[e_name] ** 2
    +327            self.ddvalue += (self.e_dvalue[e_name] * self.e_ddvalue[e_name]) ** 2
    +328
    +329        for e_name in self.cov_names:
    +330            self.e_dvalue[e_name] = np.sqrt(self.covobs[e_name].errsq())
    +331            self.e_ddvalue[e_name] = 0
    +332            self._dvalue += self.e_dvalue[e_name]**2
    +333
    +334        self._dvalue = np.sqrt(self._dvalue)
    +335        if self._dvalue == 0.0:
    +336            self.ddvalue = 0.0
    +337        else:
    +338            self.ddvalue = np.sqrt(self.ddvalue) / self._dvalue
    +339        return
    +340
    +341    gm = gamma_method
    +342
    +343    def _calc_gamma(self, deltas, idx, shape, w_max, fft, gapsize):
    +344        """Calculate Gamma_{AA} from the deltas, which are defined on idx.
    +345           idx is assumed to be a contiguous range (possibly with a stepsize != 1)
    +346
    +347        Parameters
    +348        ----------
    +349        deltas : list
    +350            List of fluctuations
    +351        idx : list
    +352            List or range of configurations on which the deltas are defined.
    +353        shape : int
    +354            Number of configurations in idx.
    +355        w_max : int
    +356            Upper bound for the summation window.
    +357        fft : bool
    +358            determines whether the fft algorithm is used for the computation
    +359            of the autocorrelation function.
    +360        gapsize : int
    +361            The target distance between two configurations. If longer distances
    +362            are found in idx, the data is expanded.
    +363        """
    +364        gamma = np.zeros(w_max)
    +365        deltas = _expand_deltas(deltas, idx, shape, gapsize)
    +366        new_shape = len(deltas)
    +367        if fft:
    +368            max_gamma = min(new_shape, w_max)
    +369            # The padding for the fft has to be even
    +370            padding = new_shape + max_gamma + (new_shape + max_gamma) % 2
    +371            gamma[:max_gamma] += np.fft.irfft(np.abs(np.fft.rfft(deltas, padding)) ** 2)[:max_gamma]
    +372        else:
    +373            for n in range(w_max):
    +374                if new_shape - n >= 0:
    +375                    gamma[n] += deltas[0:new_shape - n].dot(deltas[n:new_shape])
    +376
    +377        return gamma
    +378
    +379    def details(self, ens_content=True):
    +380        """Output detailed properties of the Obs.
    +381
    +382        Parameters
    +383        ----------
    +384        ens_content : bool
    +385            print details about the ensembles and replica if true.
    +386        """
    +387        if self.tag is not None:
    +388            print("Description:", self.tag)
    +389        if not hasattr(self, 'e_dvalue'):
    +390            print('Result\t %3.8e' % (self.value))
    +391        else:
    +392            if self.value == 0.0:
    +393                percentage = np.nan
    +394            else:
    +395                percentage = np.abs(self._dvalue / self.value) * 100
    +396            print('Result\t %3.8e +/- %3.8e +/- %3.8e (%3.3f%%)' % (self.value, self._dvalue, self.ddvalue, percentage))
    +397            if len(self.e_names) > 1:
    +398                print(' Ensemble errors:')
    +399            e_content = self.e_content
    +400            for e_name in self.mc_names:
    +401                gap = _determine_gap(self, e_content, e_name)
    +402
    +403                if len(self.e_names) > 1:
    +404                    print('', e_name, '\t %3.6e +/- %3.6e' % (self.e_dvalue[e_name], self.e_ddvalue[e_name]))
    +405                tau_string = " \N{GREEK SMALL LETTER TAU}_int\t " + _format_uncertainty(self.e_tauint[e_name], self.e_dtauint[e_name])
    +406                tau_string += f" in units of {gap} config"
    +407                if gap > 1:
    +408                    tau_string += "s"
    +409                if self.tau_exp[e_name] > 0:
    +410                    tau_string = f"{tau_string: <45}" + '\t(\N{GREEK SMALL LETTER TAU}_exp=%3.2f, N_\N{GREEK SMALL LETTER SIGMA}=%1.0i)' % (self.tau_exp[e_name], self.N_sigma[e_name])
    +411                else:
    +412                    tau_string = f"{tau_string: <45}" + '\t(S=%3.2f)' % (self.S[e_name])
    +413                print(tau_string)
    +414            for e_name in self.cov_names:
    +415                print('', e_name, '\t %3.8e' % (self.e_dvalue[e_name]))
    +416        if ens_content is True:
    +417            if len(self.e_names) == 1:
    +418                print(self.N, 'samples in', len(self.e_names), 'ensemble:')
    +419            else:
    +420                print(self.N, 'samples in', len(self.e_names), 'ensembles:')
    +421            my_string_list = []
    +422            for key, value in sorted(self.e_content.items()):
    +423                if key not in self.covobs:
    +424                    my_string = '  ' + "\u00B7 Ensemble '" + key + "' "
    +425                    if len(value) == 1:
    +426                        my_string += f': {self.shape[value[0]]} configurations'
    +427                        if isinstance(self.idl[value[0]], range):
    +428                            my_string += f' (from {self.idl[value[0]].start} to {self.idl[value[0]][-1]}' + int(self.idl[value[0]].step != 1) * f' in steps of {self.idl[value[0]].step}' + ')'
    +429                        else:
    +430                            my_string += f' (irregular range from {self.idl[value[0]][0]} to {self.idl[value[0]][-1]})'
    +431                    else:
    +432                        sublist = []
    +433                        for v in value:
    +434                            my_substring = '    ' + "\u00B7 Replicum '" + v[len(key) + 1:] + "' "
    +435                            my_substring += f': {self.shape[v]} configurations'
    +436                            if isinstance(self.idl[v], range):
    +437                                my_substring += f' (from {self.idl[v].start} to {self.idl[v][-1]}' + int(self.idl[v].step != 1) * f' in steps of {self.idl[v].step}' + ')'
    +438                            else:
    +439                                my_substring += f' (irregular range from {self.idl[v][0]} to {self.idl[v][-1]})'
    +440                            sublist.append(my_substring)
    +441
    +442                        my_string += '\n' + '\n'.join(sublist)
    +443                else:
    +444                    my_string = '  ' + "\u00B7 Covobs   '" + key + "' "
    +445                my_string_list.append(my_string)
    +446            print('\n'.join(my_string_list))
    +447
    +448    def reweight(self, weight):
    +449        """Reweight the obs with given rewighting factors.
    +450
    +451        Parameters
    +452        ----------
    +453        weight : Obs
    +454            Reweighting factor. An Observable that has to be defined on a superset of the
    +455            configurations in obs[i].idl for all i.
    +456        all_configs : bool
    +457            if True, the reweighted observables are normalized by the average of
    +458            the reweighting factor on all configurations in weight.idl and not
    +459            on the configurations in obs[i].idl. Default False.
    +460        """
    +461        return reweight(weight, [self])[0]
    +462
    +463    def is_zero_within_error(self, sigma=1):
    +464        """Checks whether the observable is zero within 'sigma' standard errors.
    +465
    +466        Parameters
    +467        ----------
    +468        sigma : int
    +469            Number of standard errors used for the check.
    +470
    +471        Works only properly when the gamma method was run.
    +472        """
    +473        return self.is_zero() or np.abs(self.value) <= sigma * self._dvalue
    +474
    +475    def is_zero(self, atol=1e-10):
    +476        """Checks whether the observable is zero within a given tolerance.
    +477
    +478        Parameters
    +479        ----------
    +480        atol : float
    +481            Absolute tolerance (for details see numpy documentation).
    +482        """
    +483        return np.isclose(0.0, self.value, 1e-14, atol) and all(np.allclose(0.0, delta, 1e-14, atol) for delta in self.deltas.values()) and all(np.allclose(0.0, delta.errsq(), 1e-14, atol) for delta in self.covobs.values())
    +484
    +485    def plot_tauint(self, save=None):
    +486        """Plot integrated autocorrelation time for each ensemble.
    +487
    +488        Parameters
    +489        ----------
    +490        save : str
    +491            saves the figure to a file named 'save' if.
    +492        """
    +493        if not hasattr(self, 'e_dvalue'):
    +494            raise Exception('Run the gamma method first.')
    +495
    +496        for e, e_name in enumerate(self.mc_names):
    +497            fig = plt.figure()
    +498            plt.xlabel(r'$W$')
    +499            plt.ylabel(r'$\tau_\mathrm{int}$')
    +500            length = int(len(self.e_n_tauint[e_name]))
    +501            if self.tau_exp[e_name] > 0:
    +502                base = self.e_n_tauint[e_name][self.e_windowsize[e_name]]
    +503                x_help = np.arange(2 * self.tau_exp[e_name])
    +504                y_help = (x_help + 1) * np.abs(self.e_rho[e_name][self.e_windowsize[e_name] + 1]) * (1 - x_help / (2 * (2 * self.tau_exp[e_name] - 1))) + base
    +505                x_arr = np.arange(self.e_windowsize[e_name] + 1, self.e_windowsize[e_name] + 1 + 2 * self.tau_exp[e_name])
    +506                plt.plot(x_arr, y_help, 'C' + str(e), linewidth=1, ls='--', marker=',')
    +507                plt.errorbar([self.e_windowsize[e_name] + 2 * self.tau_exp[e_name]], [self.e_tauint[e_name]],
    +508                             yerr=[self.e_dtauint[e_name]], fmt='C' + str(e), linewidth=1, capsize=2, marker='o', mfc=plt.rcParams['axes.facecolor'])
    +509                xmax = self.e_windowsize[e_name] + 2 * self.tau_exp[e_name] + 1.5
    +510                label = e_name + r', $\tau_\mathrm{exp}$=' + str(np.around(self.tau_exp[e_name], decimals=2))
    +511            else:
    +512                label = e_name + ', S=' + str(np.around(self.S[e_name], decimals=2))
    +513                xmax = max(10.5, 2 * self.e_windowsize[e_name] - 0.5)
    +514
    +515            plt.errorbar(np.arange(length)[:int(xmax) + 1], self.e_n_tauint[e_name][:int(xmax) + 1], yerr=self.e_n_dtauint[e_name][:int(xmax) + 1], linewidth=1, capsize=2, label=label)
    +516            plt.axvline(x=self.e_windowsize[e_name], color='C' + str(e), alpha=0.5, marker=',', ls='--')
    +517            plt.legend()
    +518            plt.xlim(-0.5, xmax)
    +519            ylim = plt.ylim()
    +520            plt.ylim(bottom=0.0, top=max(1.0, ylim[1]))
    +521            plt.draw()
    +522            if save:
    +523                fig.savefig(save + "_" + str(e))
    +524
    +525    def plot_rho(self, save=None):
    +526        """Plot normalized autocorrelation function time for each ensemble.
    +527
    +528        Parameters
    +529        ----------
    +530        save : str
    +531            saves the figure to a file named 'save' if.
    +532        """
    +533        if not hasattr(self, 'e_dvalue'):
    +534            raise Exception('Run the gamma method first.')
    +535        for e, e_name in enumerate(self.mc_names):
    +536            fig = plt.figure()
    +537            plt.xlabel('W')
    +538            plt.ylabel('rho')
    +539            length = int(len(self.e_drho[e_name]))
    +540            plt.errorbar(np.arange(length), self.e_rho[e_name][:length], yerr=self.e_drho[e_name][:], linewidth=1, capsize=2)
    +541            plt.axvline(x=self.e_windowsize[e_name], color='r', alpha=0.25, ls='--', marker=',')
    +542            if self.tau_exp[e_name] > 0:
    +543                plt.plot([self.e_windowsize[e_name] + 1, self.e_windowsize[e_name] + 1 + 2 * self.tau_exp[e_name]],
    +544                         [self.e_rho[e_name][self.e_windowsize[e_name] + 1], 0], 'k-', lw=1)
    +545                xmax = self.e_windowsize[e_name] + 2 * self.tau_exp[e_name] + 1.5
    +546                plt.title('Rho ' + e_name + r', tau\_exp=' + str(np.around(self.tau_exp[e_name], decimals=2)))
    +547            else:
    +548                xmax = max(10.5, 2 * self.e_windowsize[e_name] - 0.5)
    +549                plt.title('Rho ' + e_name + ', S=' + str(np.around(self.S[e_name], decimals=2)))
    +550            plt.plot([-0.5, xmax], [0, 0], 'k--', lw=1)
    +551            plt.xlim(-0.5, xmax)
    +552            plt.draw()
    +553            if save:
    +554                fig.savefig(save + "_" + str(e))
    +555
    +556    def plot_rep_dist(self):
    +557        """Plot replica distribution for each ensemble with more than one replicum."""
    +558        if not hasattr(self, 'e_dvalue'):
    +559            raise Exception('Run the gamma method first.')
    +560        for e, e_name in enumerate(self.mc_names):
    +561            if len(self.e_content[e_name]) == 1:
    +562                print('No replica distribution for a single replicum (', e_name, ')')
    +563                continue
    +564            r_length = []
    +565            sub_r_mean = 0
    +566            for r, r_name in enumerate(self.e_content[e_name]):
    +567                r_length.append(len(self.deltas[r_name]))
    +568                sub_r_mean += self.shape[r_name] * self.r_values[r_name]
    +569            e_N = np.sum(r_length)
    +570            sub_r_mean /= e_N
    +571            arr = np.zeros(len(self.e_content[e_name]))
    +572            for r, r_name in enumerate(self.e_content[e_name]):
    +573                arr[r] = (self.r_values[r_name] - sub_r_mean) / (self.e_dvalue[e_name] * np.sqrt(e_N / self.shape[r_name] - 1))
    +574            plt.hist(arr, rwidth=0.8, bins=len(self.e_content[e_name]))
    +575            plt.title('Replica distribution' + e_name + ' (mean=0, var=1)')
    +576            plt.draw()
    +577
    +578    def plot_history(self, expand=True):
    +579        """Plot derived Monte Carlo history for each ensemble
    +580
    +581        Parameters
    +582        ----------
    +583        expand : bool
    +584            show expanded history for irregular Monte Carlo chains (default: True).
    +585        """
    +586        for e, e_name in enumerate(self.mc_names):
    +587            plt.figure()
    +588            r_length = []
    +589            tmp = []
    +590            tmp_expanded = []
    +591            for r, r_name in enumerate(self.e_content[e_name]):
    +592                tmp.append(self.deltas[r_name] + self.r_values[r_name])
    +593                if expand:
    +594                    tmp_expanded.append(_expand_deltas(self.deltas[r_name], list(self.idl[r_name]), self.shape[r_name], 1) + self.r_values[r_name])
    +595                    r_length.append(len(tmp_expanded[-1]))
    +596                else:
    +597                    r_length.append(len(tmp[-1]))
    +598            e_N = np.sum(r_length)
    +599            x = np.arange(e_N)
    +600            y_test = np.concatenate(tmp, axis=0)
    +601            if expand:
    +602                y = np.concatenate(tmp_expanded, axis=0)
    +603            else:
    +604                y = y_test
    +605            plt.errorbar(x, y, fmt='.', markersize=3)
    +606            plt.xlim(-0.5, e_N - 0.5)
    +607            plt.title(e_name + f'\nskew: {skew(y_test):.3f} (p={skewtest(y_test).pvalue:.3f}), kurtosis: {kurtosis(y_test):.3f} (p={kurtosistest(y_test).pvalue:.3f})')
    +608            plt.draw()
    +609
    +610    def plot_piechart(self, save=None):
    +611        """Plot piechart which shows the fractional contribution of each
    +612        ensemble to the error and returns a dictionary containing the fractions.
    +613
    +614        Parameters
    +615        ----------
    +616        save : str
    +617            saves the figure to a file named 'save' if.
    +618        """
    +619        if not hasattr(self, 'e_dvalue'):
    +620            raise Exception('Run the gamma method first.')
    +621        if np.isclose(0.0, self._dvalue, atol=1e-15):
    +622            raise Exception('Error is 0.0')
    +623        labels = self.e_names
    +624        sizes = [self.e_dvalue[name] ** 2 for name in labels] / self._dvalue ** 2
    +625        fig1, ax1 = plt.subplots()
    +626        ax1.pie(sizes, labels=labels, startangle=90, normalize=True)
    +627        ax1.axis('equal')
    +628        plt.draw()
    +629        if save:
    +630            fig1.savefig(save)
    +631
    +632        return dict(zip(labels, sizes))
    +633
    +634    def dump(self, filename, datatype="json.gz", description="", **kwargs):
    +635        """Dump the Obs to a file 'name' of chosen format.
    +636
    +637        Parameters
    +638        ----------
    +639        filename : str
    +640            name of the file to be saved.
    +641        datatype : str
    +642            Format of the exported file. Supported formats include
    +643            "json.gz" and "pickle"
    +644        description : str
    +645            Description for output file, only relevant for json.gz format.
    +646        path : str
    +647            specifies a custom path for the file (default '.')
    +648        """
    +649        if 'path' in kwargs:
    +650            file_name = kwargs.get('path') + '/' + filename
    +651        else:
    +652            file_name = filename
    +653
    +654        if datatype == "json.gz":
    +655            from .input.json import dump_to_json
    +656            dump_to_json([self], file_name, description=description)
    +657        elif datatype == "pickle":
    +658            with open(file_name + '.p', 'wb') as fb:
    +659                pickle.dump(self, fb)
    +660        else:
    +661            raise Exception("Unknown datatype " + str(datatype))
    +662
    +663    def export_jackknife(self):
    +664        """Export jackknife samples from the Obs
    +665
    +666        Returns
    +667        -------
    +668        numpy.ndarray
    +669            Returns a numpy array of length N + 1 where N is the number of samples
    +670            for the given ensemble and replicum. The zeroth entry of the array contains
    +671            the mean value of the Obs, entries 1 to N contain the N jackknife samples
    +672            derived from the Obs. The current implementation only works for observables
    +673            defined on exactly one ensemble and replicum. The derived jackknife samples
    +674            should agree with samples from a full jackknife analysis up to O(1/N).
    +675        """
    +676
    +677        if len(self.names) != 1:
    +678            raise Exception("'export_jackknife' is only implemented for Obs defined on one ensemble and replicum.")
    +679
    +680        name = self.names[0]
    +681        full_data = self.deltas[name] + self.r_values[name]
    +682        n = full_data.size
    +683        mean = self.value
    +684        tmp_jacks = np.zeros(n + 1)
    +685        tmp_jacks[0] = mean
    +686        tmp_jacks[1:] = (n * mean - full_data) / (n - 1)
    +687        return tmp_jacks
    +688
    +689    def export_bootstrap(self, samples=500, random_numbers=None, save_rng=None):
    +690        """Export bootstrap samples from the Obs
    +691
    +692        Parameters
    +693        ----------
    +694        samples : int
    +695            Number of bootstrap samples to generate.
    +696        random_numbers : np.ndarray
    +697            Array of shape (samples, length) containing the random numbers to generate the bootstrap samples.
    +698            If not provided the bootstrap samples are generated bashed on the md5 hash of the enesmble name.
    +699        save_rng : str
    +700            Save the random numbers to a file if a path is specified.
    +701
    +702        Returns
    +703        -------
    +704        numpy.ndarray
    +705            Returns a numpy array of length N + 1 where N is the number of samples
    +706            for the given ensemble and replicum. The zeroth entry of the array contains
    +707            the mean value of the Obs, entries 1 to N contain the N import_bootstrap samples
    +708            derived from the Obs. The current implementation only works for observables
    +709            defined on exactly one ensemble and replicum. The derived bootstrap samples
    +710            should agree with samples from a full bootstrap analysis up to O(1/N).
    +711        """
    +712        if len(self.names) != 1:
    +713            raise Exception("'export_boostrap' is only implemented for Obs defined on one ensemble and replicum.")
    +714
    +715        name = self.names[0]
    +716        length = self.N
    +717
    +718        if random_numbers is None:
    +719            seed = int(hashlib.md5(name.encode()).hexdigest(), 16) & 0xFFFFFFFF
    +720            rng = np.random.default_rng(seed)
    +721            random_numbers = rng.integers(0, length, size=(samples, length))
     722
    -723    def __le__(self, other):
    -724        return self.value <= other
    +723        if save_rng is not None:
    +724            np.savetxt(save_rng, random_numbers, fmt='%i')
     725
    -726    def __gt__(self, other):
    -727        return self.value > other
    -728
    -729    def __ge__(self, other):
    -730        return self.value >= other
    +726        proj = np.vstack([np.bincount(o, minlength=length) for o in random_numbers]) / length
    +727        ret = np.zeros(samples + 1)
    +728        ret[0] = self.value
    +729        ret[1:] = proj @ (self.deltas[name] + self.r_values[name])
    +730        return ret
     731
    -732    def __eq__(self, other):
    -733        return (self - other).is_zero()
    +732    def __float__(self):
    +733        return float(self.value)
     734
    -735    def __ne__(self, other):
    -736        return not (self - other).is_zero()
    +735    def __repr__(self):
    +736        return 'Obs[' + str(self) + ']'
     737
    -738    # Overload math operations
    -739    def __add__(self, y):
    -740        if isinstance(y, Obs):
    -741            return derived_observable(lambda x, **kwargs: x[0] + x[1], [self, y], man_grad=[1, 1])
    -742        else:
    -743            if isinstance(y, np.ndarray):
    -744                return np.array([self + o for o in y])
    -745            elif y.__class__.__name__ in ['Corr', 'CObs']:
    -746                return NotImplemented
    -747            else:
    -748                return derived_observable(lambda x, **kwargs: x[0] + y, [self], man_grad=[1])
    -749
    -750    def __radd__(self, y):
    -751        return self + y
    -752
    -753    def __mul__(self, y):
    -754        if isinstance(y, Obs):
    -755            return derived_observable(lambda x, **kwargs: x[0] * x[1], [self, y], man_grad=[y.value, self.value])
    -756        else:
    -757            if isinstance(y, np.ndarray):
    -758                return np.array([self * o for o in y])
    -759            elif isinstance(y, complex):
    -760                return CObs(self * y.real, self * y.imag)
    -761            elif y.__class__.__name__ in ['Corr', 'CObs']:
    -762                return NotImplemented
    -763            else:
    -764                return derived_observable(lambda x, **kwargs: x[0] * y, [self], man_grad=[y])
    -765
    -766    def __rmul__(self, y):
    -767        return self * y
    -768
    -769    def __sub__(self, y):
    -770        if isinstance(y, Obs):
    -771            return derived_observable(lambda x, **kwargs: x[0] - x[1], [self, y], man_grad=[1, -1])
    -772        else:
    -773            if isinstance(y, np.ndarray):
    -774                return np.array([self - o for o in y])
    -775            elif y.__class__.__name__ in ['Corr', 'CObs']:
    -776                return NotImplemented
    -777            else:
    -778                return derived_observable(lambda x, **kwargs: x[0] - y, [self], man_grad=[1])
    -779
    -780    def __rsub__(self, y):
    -781        return -1 * (self - y)
    -782
    -783    def __pos__(self):
    -784        return self
    -785
    -786    def __neg__(self):
    -787        return -1 * self
    -788
    -789    def __truediv__(self, y):
    -790        if isinstance(y, Obs):
    -791            return derived_observable(lambda x, **kwargs: x[0] / x[1], [self, y], man_grad=[1 / y.value, - self.value / y.value ** 2])
    -792        else:
    -793            if isinstance(y, np.ndarray):
    -794                return np.array([self / o for o in y])
    -795            elif y.__class__.__name__ in ['Corr', 'CObs']:
    -796                return NotImplemented
    -797            else:
    -798                return derived_observable(lambda x, **kwargs: x[0] / y, [self], man_grad=[1 / y])
    -799
    -800    def __rtruediv__(self, y):
    -801        if isinstance(y, Obs):
    -802            return derived_observable(lambda x, **kwargs: x[0] / x[1], [y, self], man_grad=[1 / self.value, - y.value / self.value ** 2])
    -803        else:
    -804            if isinstance(y, np.ndarray):
    -805                return np.array([o / self for o in y])
    -806            elif y.__class__.__name__ in ['Corr', 'CObs']:
    -807                return NotImplemented
    -808            else:
    -809                return derived_observable(lambda x, **kwargs: y / x[0], [self], man_grad=[-y / self.value ** 2])
    -810
    -811    def __pow__(self, y):
    -812        if isinstance(y, Obs):
    -813            return derived_observable(lambda x: x[0] ** x[1], [self, y])
    -814        else:
    -815            return derived_observable(lambda x: x[0] ** y, [self])
    -816
    -817    def __rpow__(self, y):
    -818        if isinstance(y, Obs):
    -819            return derived_observable(lambda x: x[0] ** x[1], [y, self])
    -820        else:
    -821            return derived_observable(lambda x: y ** x[0], [self])
    -822
    -823    def __abs__(self):
    -824        return derived_observable(lambda x: anp.abs(x[0]), [self])
    -825
    -826    # Overload numpy functions
    -827    def sqrt(self):
    -828        return derived_observable(lambda x, **kwargs: np.sqrt(x[0]), [self], man_grad=[1 / 2 / np.sqrt(self.value)])
    +738    def __str__(self):
    +739        return _format_uncertainty(self.value, self._dvalue)
    +740
    +741    def __format__(self, format_type):
    +742        if format_type == "":
    +743            significance = 2
    +744        else:
    +745            significance = int(float(format_type.replace("+", "").replace("-", "")))
    +746        my_str = _format_uncertainty(self.value, self._dvalue,
    +747                                     significance=significance)
    +748        for char in ["+", " "]:
    +749            if format_type.startswith(char):
    +750                if my_str[0] != "-":
    +751                    my_str = char + my_str
    +752        return my_str
    +753
    +754    def __hash__(self):
    +755        hash_tuple = (np.array([self.value]).astype(np.float32).data.tobytes(),)
    +756        hash_tuple += tuple([o.astype(np.float32).data.tobytes() for o in self.deltas.values()])
    +757        hash_tuple += tuple([np.array([o.errsq()]).astype(np.float32).data.tobytes() for o in self.covobs.values()])
    +758        hash_tuple += tuple([o.encode() for o in self.names])
    +759        m = hashlib.md5()
    +760        [m.update(o) for o in hash_tuple]
    +761        return int(m.hexdigest(), 16) & 0xFFFFFFFF
    +762
    +763    # Overload comparisons
    +764    def __lt__(self, other):
    +765        return self.value < other
    +766
    +767    def __le__(self, other):
    +768        return self.value <= other
    +769
    +770    def __gt__(self, other):
    +771        return self.value > other
    +772
    +773    def __ge__(self, other):
    +774        return self.value >= other
    +775
    +776    def __eq__(self, other):
    +777        return (self - other).is_zero()
    +778
    +779    def __ne__(self, other):
    +780        return not (self - other).is_zero()
    +781
    +782    # Overload math operations
    +783    def __add__(self, y):
    +784        if isinstance(y, Obs):
    +785            return derived_observable(lambda x, **kwargs: x[0] + x[1], [self, y], man_grad=[1, 1])
    +786        else:
    +787            if isinstance(y, np.ndarray):
    +788                return np.array([self + o for o in y])
    +789            elif y.__class__.__name__ in ['Corr', 'CObs']:
    +790                return NotImplemented
    +791            else:
    +792                return derived_observable(lambda x, **kwargs: x[0] + y, [self], man_grad=[1])
    +793
    +794    def __radd__(self, y):
    +795        return self + y
    +796
    +797    def __mul__(self, y):
    +798        if isinstance(y, Obs):
    +799            return derived_observable(lambda x, **kwargs: x[0] * x[1], [self, y], man_grad=[y.value, self.value])
    +800        else:
    +801            if isinstance(y, np.ndarray):
    +802                return np.array([self * o for o in y])
    +803            elif isinstance(y, complex):
    +804                return CObs(self * y.real, self * y.imag)
    +805            elif y.__class__.__name__ in ['Corr', 'CObs']:
    +806                return NotImplemented
    +807            else:
    +808                return derived_observable(lambda x, **kwargs: x[0] * y, [self], man_grad=[y])
    +809
    +810    def __rmul__(self, y):
    +811        return self * y
    +812
    +813    def __sub__(self, y):
    +814        if isinstance(y, Obs):
    +815            return derived_observable(lambda x, **kwargs: x[0] - x[1], [self, y], man_grad=[1, -1])
    +816        else:
    +817            if isinstance(y, np.ndarray):
    +818                return np.array([self - o for o in y])
    +819            elif y.__class__.__name__ in ['Corr', 'CObs']:
    +820                return NotImplemented
    +821            else:
    +822                return derived_observable(lambda x, **kwargs: x[0] - y, [self], man_grad=[1])
    +823
    +824    def __rsub__(self, y):
    +825        return -1 * (self - y)
    +826
    +827    def __pos__(self):
    +828        return self
     829
    -830    def log(self):
    -831        return derived_observable(lambda x, **kwargs: np.log(x[0]), [self], man_grad=[1 / self.value])
    +830    def __neg__(self):
    +831        return -1 * self
     832
    -833    def exp(self):
    -834        return derived_observable(lambda x, **kwargs: np.exp(x[0]), [self], man_grad=[np.exp(self.value)])
    -835
    -836    def sin(self):
    -837        return derived_observable(lambda x, **kwargs: np.sin(x[0]), [self], man_grad=[np.cos(self.value)])
    -838
    -839    def cos(self):
    -840        return derived_observable(lambda x, **kwargs: np.cos(x[0]), [self], man_grad=[-np.sin(self.value)])
    -841
    -842    def tan(self):
    -843        return derived_observable(lambda x, **kwargs: np.tan(x[0]), [self], man_grad=[1 / np.cos(self.value) ** 2])
    -844
    -845    def arcsin(self):
    -846        return derived_observable(lambda x: anp.arcsin(x[0]), [self])
    -847
    -848    def arccos(self):
    -849        return derived_observable(lambda x: anp.arccos(x[0]), [self])
    -850
    -851    def arctan(self):
    -852        return derived_observable(lambda x: anp.arctan(x[0]), [self])
    -853
    -854    def sinh(self):
    -855        return derived_observable(lambda x, **kwargs: np.sinh(x[0]), [self], man_grad=[np.cosh(self.value)])
    -856
    -857    def cosh(self):
    -858        return derived_observable(lambda x, **kwargs: np.cosh(x[0]), [self], man_grad=[np.sinh(self.value)])
    -859
    -860    def tanh(self):
    -861        return derived_observable(lambda x, **kwargs: np.tanh(x[0]), [self], man_grad=[1 / np.cosh(self.value) ** 2])
    -862
    -863    def arcsinh(self):
    -864        return derived_observable(lambda x: anp.arcsinh(x[0]), [self])
    -865
    -866    def arccosh(self):
    -867        return derived_observable(lambda x: anp.arccosh(x[0]), [self])
    -868
    -869    def arctanh(self):
    -870        return derived_observable(lambda x: anp.arctanh(x[0]), [self])
    +833    def __truediv__(self, y):
    +834        if isinstance(y, Obs):
    +835            return derived_observable(lambda x, **kwargs: x[0] / x[1], [self, y], man_grad=[1 / y.value, - self.value / y.value ** 2])
    +836        else:
    +837            if isinstance(y, np.ndarray):
    +838                return np.array([self / o for o in y])
    +839            elif y.__class__.__name__ in ['Corr', 'CObs']:
    +840                return NotImplemented
    +841            else:
    +842                return derived_observable(lambda x, **kwargs: x[0] / y, [self], man_grad=[1 / y])
    +843
    +844    def __rtruediv__(self, y):
    +845        if isinstance(y, Obs):
    +846            return derived_observable(lambda x, **kwargs: x[0] / x[1], [y, self], man_grad=[1 / self.value, - y.value / self.value ** 2])
    +847        else:
    +848            if isinstance(y, np.ndarray):
    +849                return np.array([o / self for o in y])
    +850            elif y.__class__.__name__ in ['Corr', 'CObs']:
    +851                return NotImplemented
    +852            else:
    +853                return derived_observable(lambda x, **kwargs: y / x[0], [self], man_grad=[-y / self.value ** 2])
    +854
    +855    def __pow__(self, y):
    +856        if isinstance(y, Obs):
    +857            return derived_observable(lambda x: x[0] ** x[1], [self, y])
    +858        else:
    +859            return derived_observable(lambda x: x[0] ** y, [self])
    +860
    +861    def __rpow__(self, y):
    +862        if isinstance(y, Obs):
    +863            return derived_observable(lambda x: x[0] ** x[1], [y, self])
    +864        else:
    +865            return derived_observable(lambda x: y ** x[0], [self])
    +866
    +867    def __abs__(self):
    +868        return derived_observable(lambda x: anp.abs(x[0]), [self])
    +869
    +870    # Overload numpy functions
    +871    def sqrt(self):
    +872        return derived_observable(lambda x, **kwargs: np.sqrt(x[0]), [self], man_grad=[1 / 2 / np.sqrt(self.value)])
    +873
    +874    def log(self):
    +875        return derived_observable(lambda x, **kwargs: np.log(x[0]), [self], man_grad=[1 / self.value])
    +876
    +877    def exp(self):
    +878        return derived_observable(lambda x, **kwargs: np.exp(x[0]), [self], man_grad=[np.exp(self.value)])
    +879
    +880    def sin(self):
    +881        return derived_observable(lambda x, **kwargs: np.sin(x[0]), [self], man_grad=[np.cos(self.value)])
    +882
    +883    def cos(self):
    +884        return derived_observable(lambda x, **kwargs: np.cos(x[0]), [self], man_grad=[-np.sin(self.value)])
    +885
    +886    def tan(self):
    +887        return derived_observable(lambda x, **kwargs: np.tan(x[0]), [self], man_grad=[1 / np.cos(self.value) ** 2])
    +888
    +889    def arcsin(self):
    +890        return derived_observable(lambda x: anp.arcsin(x[0]), [self])
    +891
    +892    def arccos(self):
    +893        return derived_observable(lambda x: anp.arccos(x[0]), [self])
    +894
    +895    def arctan(self):
    +896        return derived_observable(lambda x: anp.arctan(x[0]), [self])
    +897
    +898    def sinh(self):
    +899        return derived_observable(lambda x, **kwargs: np.sinh(x[0]), [self], man_grad=[np.cosh(self.value)])
    +900
    +901    def cosh(self):
    +902        return derived_observable(lambda x, **kwargs: np.cosh(x[0]), [self], man_grad=[np.sinh(self.value)])
    +903
    +904    def tanh(self):
    +905        return derived_observable(lambda x, **kwargs: np.tanh(x[0]), [self], man_grad=[1 / np.cosh(self.value) ** 2])
    +906
    +907    def arcsinh(self):
    +908        return derived_observable(lambda x: anp.arcsinh(x[0]), [self])
    +909
    +910    def arccosh(self):
    +911        return derived_observable(lambda x: anp.arccosh(x[0]), [self])
    +912
    +913    def arctanh(self):
    +914        return derived_observable(lambda x: anp.arctanh(x[0]), [self])
     
    @@ -2882,86 +3005,86 @@ this overwrites the standard value for that ensemble.
    -
     60    def __init__(self, samples, names, idl=None, **kwargs):
    - 61        """ Initialize Obs object.
    - 62
    - 63        Parameters
    - 64        ----------
    - 65        samples : list
    - 66            list of numpy arrays containing the Monte Carlo samples
    - 67        names : list
    - 68            list of strings labeling the individual samples
    - 69        idl : list, optional
    - 70            list of ranges or lists on which the samples are defined
    - 71        """
    - 72
    - 73        if kwargs.get("means") is None and len(samples):
    - 74            if len(samples) != len(names):
    - 75                raise ValueError('Length of samples and names incompatible.')
    - 76            if idl is not None:
    - 77                if len(idl) != len(names):
    - 78                    raise ValueError('Length of idl incompatible with samples and names.')
    - 79            name_length = len(names)
    - 80            if name_length > 1:
    - 81                if name_length != len(set(names)):
    - 82                    raise ValueError('Names are not unique.')
    - 83                if not all(isinstance(x, str) for x in names):
    - 84                    raise TypeError('All names have to be strings.')
    - 85            else:
    - 86                if not isinstance(names[0], str):
    - 87                    raise TypeError('All names have to be strings.')
    - 88            if min(len(x) for x in samples) <= 4:
    - 89                raise ValueError('Samples have to have at least 5 entries.')
    - 90
    - 91        self.names = sorted(names)
    - 92        self.shape = {}
    - 93        self.r_values = {}
    - 94        self.deltas = {}
    - 95        self._covobs = {}
    - 96
    - 97        self._value = 0
    - 98        self.N = 0
    - 99        self.idl = {}
    -100        if idl is not None:
    -101            for name, idx in sorted(zip(names, idl)):
    -102                if isinstance(idx, range):
    -103                    self.idl[name] = idx
    -104                elif isinstance(idx, (list, np.ndarray)):
    -105                    dc = np.unique(np.diff(idx))
    -106                    if np.any(dc < 0):
    -107                        raise ValueError("Unsorted idx for idl[%s]" % (name))
    -108                    if len(dc) == 1:
    -109                        self.idl[name] = range(idx[0], idx[-1] + dc[0], dc[0])
    -110                    else:
    -111                        self.idl[name] = list(idx)
    -112                else:
    -113                    raise TypeError('incompatible type for idl[%s].' % (name))
    -114        else:
    -115            for name, sample in sorted(zip(names, samples)):
    -116                self.idl[name] = range(1, len(sample) + 1)
    -117
    -118        if kwargs.get("means") is not None:
    -119            for name, sample, mean in sorted(zip(names, samples, kwargs.get("means"))):
    -120                self.shape[name] = len(self.idl[name])
    -121                self.N += self.shape[name]
    -122                self.r_values[name] = mean
    -123                self.deltas[name] = sample
    -124        else:
    -125            for name, sample in sorted(zip(names, samples)):
    -126                self.shape[name] = len(self.idl[name])
    -127                self.N += self.shape[name]
    -128                if len(sample) != self.shape[name]:
    -129                    raise ValueError('Incompatible samples and idx for %s: %d vs. %d' % (name, len(sample), self.shape[name]))
    -130                self.r_values[name] = np.mean(sample)
    -131                self.deltas[name] = sample - self.r_values[name]
    -132                self._value += self.shape[name] * self.r_values[name]
    -133            self._value /= self.N
    -134
    -135        self._dvalue = 0.0
    -136        self.ddvalue = 0.0
    -137        self.reweighted = False
    -138
    -139        self.tag = None
    +            
     61    def __init__(self, samples, names, idl=None, **kwargs):
    + 62        """ Initialize Obs object.
    + 63
    + 64        Parameters
    + 65        ----------
    + 66        samples : list
    + 67            list of numpy arrays containing the Monte Carlo samples
    + 68        names : list
    + 69            list of strings labeling the individual samples
    + 70        idl : list, optional
    + 71            list of ranges or lists on which the samples are defined
    + 72        """
    + 73
    + 74        if kwargs.get("means") is None and len(samples):
    + 75            if len(samples) != len(names):
    + 76                raise ValueError('Length of samples and names incompatible.')
    + 77            if idl is not None:
    + 78                if len(idl) != len(names):
    + 79                    raise ValueError('Length of idl incompatible with samples and names.')
    + 80            name_length = len(names)
    + 81            if name_length > 1:
    + 82                if name_length != len(set(names)):
    + 83                    raise ValueError('Names are not unique.')
    + 84                if not all(isinstance(x, str) for x in names):
    + 85                    raise TypeError('All names have to be strings.')
    + 86            else:
    + 87                if not isinstance(names[0], str):
    + 88                    raise TypeError('All names have to be strings.')
    + 89            if min(len(x) for x in samples) <= 4:
    + 90                raise ValueError('Samples have to have at least 5 entries.')
    + 91
    + 92        self.names = sorted(names)
    + 93        self.shape = {}
    + 94        self.r_values = {}
    + 95        self.deltas = {}
    + 96        self._covobs = {}
    + 97
    + 98        self._value = 0
    + 99        self.N = 0
    +100        self.idl = {}
    +101        if idl is not None:
    +102            for name, idx in sorted(zip(names, idl)):
    +103                if isinstance(idx, range):
    +104                    self.idl[name] = idx
    +105                elif isinstance(idx, (list, np.ndarray)):
    +106                    dc = np.unique(np.diff(idx))
    +107                    if np.any(dc < 0):
    +108                        raise ValueError("Unsorted idx for idl[%s]" % (name))
    +109                    if len(dc) == 1:
    +110                        self.idl[name] = range(idx[0], idx[-1] + dc[0], dc[0])
    +111                    else:
    +112                        self.idl[name] = list(idx)
    +113                else:
    +114                    raise TypeError('incompatible type for idl[%s].' % (name))
    +115        else:
    +116            for name, sample in sorted(zip(names, samples)):
    +117                self.idl[name] = range(1, len(sample) + 1)
    +118
    +119        if kwargs.get("means") is not None:
    +120            for name, sample, mean in sorted(zip(names, samples, kwargs.get("means"))):
    +121                self.shape[name] = len(self.idl[name])
    +122                self.N += self.shape[name]
    +123                self.r_values[name] = mean
    +124                self.deltas[name] = sample
    +125        else:
    +126            for name, sample in sorted(zip(names, samples)):
    +127                self.shape[name] = len(self.idl[name])
    +128                self.N += self.shape[name]
    +129                if len(sample) != self.shape[name]:
    +130                    raise ValueError('Incompatible samples and idx for %s: %d vs. %d' % (name, len(sample), self.shape[name]))
    +131                self.r_values[name] = np.mean(sample)
    +132                self.deltas[name] = sample - self.r_values[name]
    +133                self._value += self.shape[name] * self.r_values[name]
    +134            self._value /= self.N
    +135
    +136        self._dvalue = 0.0
    +137        self.ddvalue = 0.0
    +138        self.reweighted = False
    +139
    +140        self.tag = None
     
    @@ -3240,171 +3363,171 @@ list of ranges or lists on which the samples are defined
    -
    174    def gamma_method(self, **kwargs):
    -175        """Estimate the error and related properties of the Obs.
    -176
    -177        Parameters
    -178        ----------
    -179        S : float
    -180            specifies a custom value for the parameter S (default 2.0).
    -181            If set to 0 it is assumed that the data exhibits no
    -182            autocorrelation. In this case the error estimates coincides
    -183            with the sample standard error.
    -184        tau_exp : float
    -185            positive value triggers the critical slowing down analysis
    -186            (default 0.0).
    -187        N_sigma : float
    -188            number of standard deviations from zero until the tail is
    -189            attached to the autocorrelation function (default 1).
    -190        fft : bool
    -191            determines whether the fft algorithm is used for the computation
    -192            of the autocorrelation function (default True)
    -193        """
    -194
    -195        e_content = self.e_content
    -196        self.e_dvalue = {}
    -197        self.e_ddvalue = {}
    -198        self.e_tauint = {}
    -199        self.e_dtauint = {}
    -200        self.e_windowsize = {}
    -201        self.e_n_tauint = {}
    -202        self.e_n_dtauint = {}
    -203        e_gamma = {}
    -204        self.e_rho = {}
    -205        self.e_drho = {}
    -206        self._dvalue = 0
    -207        self.ddvalue = 0
    -208
    -209        self.S = {}
    -210        self.tau_exp = {}
    -211        self.N_sigma = {}
    -212
    -213        if kwargs.get('fft') is False:
    -214            fft = False
    -215        else:
    -216            fft = True
    -217
    -218        def _parse_kwarg(kwarg_name):
    -219            if kwarg_name in kwargs:
    -220                tmp = kwargs.get(kwarg_name)
    -221                if isinstance(tmp, (int, float)):
    -222                    if tmp < 0:
    -223                        raise Exception(kwarg_name + ' has to be larger or equal to 0.')
    -224                    for e, e_name in enumerate(self.e_names):
    -225                        getattr(self, kwarg_name)[e_name] = tmp
    -226                else:
    -227                    raise TypeError(kwarg_name + ' is not in proper format.')
    -228            else:
    -229                for e, e_name in enumerate(self.e_names):
    -230                    if e_name in getattr(Obs, kwarg_name + '_dict'):
    -231                        getattr(self, kwarg_name)[e_name] = getattr(Obs, kwarg_name + '_dict')[e_name]
    -232                    else:
    -233                        getattr(self, kwarg_name)[e_name] = getattr(Obs, kwarg_name + '_global')
    -234
    -235        _parse_kwarg('S')
    -236        _parse_kwarg('tau_exp')
    -237        _parse_kwarg('N_sigma')
    -238
    -239        for e, e_name in enumerate(self.mc_names):
    -240            gapsize = _determine_gap(self, e_content, e_name)
    -241
    -242            r_length = []
    -243            for r_name in e_content[e_name]:
    -244                if isinstance(self.idl[r_name], range):
    -245                    r_length.append(len(self.idl[r_name]) * self.idl[r_name].step // gapsize)
    -246                else:
    -247                    r_length.append((self.idl[r_name][-1] - self.idl[r_name][0] + 1) // gapsize)
    -248
    -249            e_N = np.sum([self.shape[r_name] for r_name in e_content[e_name]])
    -250            w_max = max(r_length) // 2
    -251            e_gamma[e_name] = np.zeros(w_max)
    -252            self.e_rho[e_name] = np.zeros(w_max)
    -253            self.e_drho[e_name] = np.zeros(w_max)
    -254
    -255            for r_name in e_content[e_name]:
    -256                e_gamma[e_name] += self._calc_gamma(self.deltas[r_name], self.idl[r_name], self.shape[r_name], w_max, fft, gapsize)
    -257
    -258            gamma_div = np.zeros(w_max)
    -259            for r_name in e_content[e_name]:
    -260                gamma_div += self._calc_gamma(np.ones((self.shape[r_name])), self.idl[r_name], self.shape[r_name], w_max, fft, gapsize)
    -261            gamma_div[gamma_div < 1] = 1.0
    -262            e_gamma[e_name] /= gamma_div[:w_max]
    -263
    -264            if np.abs(e_gamma[e_name][0]) < 10 * np.finfo(float).tiny:  # Prevent division by zero
    -265                self.e_tauint[e_name] = 0.5
    -266                self.e_dtauint[e_name] = 0.0
    -267                self.e_dvalue[e_name] = 0.0
    -268                self.e_ddvalue[e_name] = 0.0
    -269                self.e_windowsize[e_name] = 0
    -270                continue
    -271
    -272            self.e_rho[e_name] = e_gamma[e_name][:w_max] / e_gamma[e_name][0]
    -273            self.e_n_tauint[e_name] = np.cumsum(np.concatenate(([0.5], self.e_rho[e_name][1:])))
    -274            # Make sure no entry of tauint is smaller than 0.5
    -275            self.e_n_tauint[e_name][self.e_n_tauint[e_name] <= 0.5] = 0.5 + np.finfo(np.float64).eps
    -276            # hep-lat/0306017 eq. (42)
    -277            self.e_n_dtauint[e_name] = self.e_n_tauint[e_name] * 2 * np.sqrt(np.abs(np.arange(w_max) + 0.5 - self.e_n_tauint[e_name]) / e_N)
    -278            self.e_n_dtauint[e_name][0] = 0.0
    -279
    -280            def _compute_drho(i):
    -281                tmp = (self.e_rho[e_name][i + 1:w_max]
    -282                       + np.concatenate([self.e_rho[e_name][i - 1:None if i - (w_max - 1) // 2 <= 0 else (2 * i - (2 * w_max) // 2):-1],
    -283                                         self.e_rho[e_name][1:max(1, w_max - 2 * i)]])
    -284                       - 2 * self.e_rho[e_name][i] * self.e_rho[e_name][1:w_max - i])
    -285                self.e_drho[e_name][i] = np.sqrt(np.sum(tmp ** 2) / e_N)
    -286
    -287            if self.tau_exp[e_name] > 0:
    -288                _compute_drho(1)
    -289                texp = self.tau_exp[e_name]
    -290                # Critical slowing down analysis
    -291                if w_max // 2 <= 1:
    -292                    raise Exception("Need at least 8 samples for tau_exp error analysis")
    -293                for n in range(1, w_max // 2):
    -294                    _compute_drho(n + 1)
    -295                    if (self.e_rho[e_name][n] - self.N_sigma[e_name] * self.e_drho[e_name][n]) < 0 or n >= w_max // 2 - 2:
    -296                        # Bias correction hep-lat/0306017 eq. (49) included
    -297                        self.e_tauint[e_name] = self.e_n_tauint[e_name][n] * (1 + (2 * n + 1) / e_N) / (1 + 1 / e_N) + texp * np.abs(self.e_rho[e_name][n + 1])  # The absolute makes sure, that the tail contribution is always positive
    -298                        self.e_dtauint[e_name] = np.sqrt(self.e_n_dtauint[e_name][n] ** 2 + texp ** 2 * self.e_drho[e_name][n + 1] ** 2)
    -299                        # Error of tau_exp neglected so far, missing term: self.e_rho[e_name][n + 1] ** 2 * d_tau_exp ** 2
    -300                        self.e_dvalue[e_name] = np.sqrt(2 * self.e_tauint[e_name] * e_gamma[e_name][0] * (1 + 1 / e_N) / e_N)
    -301                        self.e_ddvalue[e_name] = self.e_dvalue[e_name] * np.sqrt((n + 0.5) / e_N)
    -302                        self.e_windowsize[e_name] = n
    -303                        break
    -304            else:
    -305                if self.S[e_name] == 0.0:
    -306                    self.e_tauint[e_name] = 0.5
    -307                    self.e_dtauint[e_name] = 0.0
    -308                    self.e_dvalue[e_name] = np.sqrt(e_gamma[e_name][0] / (e_N - 1))
    -309                    self.e_ddvalue[e_name] = self.e_dvalue[e_name] * np.sqrt(0.5 / e_N)
    -310                    self.e_windowsize[e_name] = 0
    -311                else:
    -312                    # Standard automatic windowing procedure
    -313                    tau = self.S[e_name] / np.log((2 * self.e_n_tauint[e_name][1:] + 1) / (2 * self.e_n_tauint[e_name][1:] - 1))
    -314                    g_w = np.exp(- np.arange(1, len(tau) + 1) / tau) - tau / np.sqrt(np.arange(1, len(tau) + 1) * e_N)
    -315                    for n in range(1, w_max):
    -316                        if g_w[n - 1] < 0 or n >= w_max - 1:
    -317                            _compute_drho(n)
    -318                            self.e_tauint[e_name] = self.e_n_tauint[e_name][n] * (1 + (2 * n + 1) / e_N) / (1 + 1 / e_N)  # Bias correction hep-lat/0306017 eq. (49)
    -319                            self.e_dtauint[e_name] = self.e_n_dtauint[e_name][n]
    -320                            self.e_dvalue[e_name] = np.sqrt(2 * self.e_tauint[e_name] * e_gamma[e_name][0] * (1 + 1 / e_N) / e_N)
    -321                            self.e_ddvalue[e_name] = self.e_dvalue[e_name] * np.sqrt((n + 0.5) / e_N)
    -322                            self.e_windowsize[e_name] = n
    -323                            break
    -324
    -325            self._dvalue += self.e_dvalue[e_name] ** 2
    -326            self.ddvalue += (self.e_dvalue[e_name] * self.e_ddvalue[e_name]) ** 2
    -327
    -328        for e_name in self.cov_names:
    -329            self.e_dvalue[e_name] = np.sqrt(self.covobs[e_name].errsq())
    -330            self.e_ddvalue[e_name] = 0
    -331            self._dvalue += self.e_dvalue[e_name]**2
    -332
    -333        self._dvalue = np.sqrt(self._dvalue)
    -334        if self._dvalue == 0.0:
    -335            self.ddvalue = 0.0
    -336        else:
    -337            self.ddvalue = np.sqrt(self.ddvalue) / self._dvalue
    -338        return
    +            
    175    def gamma_method(self, **kwargs):
    +176        """Estimate the error and related properties of the Obs.
    +177
    +178        Parameters
    +179        ----------
    +180        S : float
    +181            specifies a custom value for the parameter S (default 2.0).
    +182            If set to 0 it is assumed that the data exhibits no
    +183            autocorrelation. In this case the error estimates coincides
    +184            with the sample standard error.
    +185        tau_exp : float
    +186            positive value triggers the critical slowing down analysis
    +187            (default 0.0).
    +188        N_sigma : float
    +189            number of standard deviations from zero until the tail is
    +190            attached to the autocorrelation function (default 1).
    +191        fft : bool
    +192            determines whether the fft algorithm is used for the computation
    +193            of the autocorrelation function (default True)
    +194        """
    +195
    +196        e_content = self.e_content
    +197        self.e_dvalue = {}
    +198        self.e_ddvalue = {}
    +199        self.e_tauint = {}
    +200        self.e_dtauint = {}
    +201        self.e_windowsize = {}
    +202        self.e_n_tauint = {}
    +203        self.e_n_dtauint = {}
    +204        e_gamma = {}
    +205        self.e_rho = {}
    +206        self.e_drho = {}
    +207        self._dvalue = 0
    +208        self.ddvalue = 0
    +209
    +210        self.S = {}
    +211        self.tau_exp = {}
    +212        self.N_sigma = {}
    +213
    +214        if kwargs.get('fft') is False:
    +215            fft = False
    +216        else:
    +217            fft = True
    +218
    +219        def _parse_kwarg(kwarg_name):
    +220            if kwarg_name in kwargs:
    +221                tmp = kwargs.get(kwarg_name)
    +222                if isinstance(tmp, (int, float)):
    +223                    if tmp < 0:
    +224                        raise Exception(kwarg_name + ' has to be larger or equal to 0.')
    +225                    for e, e_name in enumerate(self.e_names):
    +226                        getattr(self, kwarg_name)[e_name] = tmp
    +227                else:
    +228                    raise TypeError(kwarg_name + ' is not in proper format.')
    +229            else:
    +230                for e, e_name in enumerate(self.e_names):
    +231                    if e_name in getattr(Obs, kwarg_name + '_dict'):
    +232                        getattr(self, kwarg_name)[e_name] = getattr(Obs, kwarg_name + '_dict')[e_name]
    +233                    else:
    +234                        getattr(self, kwarg_name)[e_name] = getattr(Obs, kwarg_name + '_global')
    +235
    +236        _parse_kwarg('S')
    +237        _parse_kwarg('tau_exp')
    +238        _parse_kwarg('N_sigma')
    +239
    +240        for e, e_name in enumerate(self.mc_names):
    +241            gapsize = _determine_gap(self, e_content, e_name)
    +242
    +243            r_length = []
    +244            for r_name in e_content[e_name]:
    +245                if isinstance(self.idl[r_name], range):
    +246                    r_length.append(len(self.idl[r_name]) * self.idl[r_name].step // gapsize)
    +247                else:
    +248                    r_length.append((self.idl[r_name][-1] - self.idl[r_name][0] + 1) // gapsize)
    +249
    +250            e_N = np.sum([self.shape[r_name] for r_name in e_content[e_name]])
    +251            w_max = max(r_length) // 2
    +252            e_gamma[e_name] = np.zeros(w_max)
    +253            self.e_rho[e_name] = np.zeros(w_max)
    +254            self.e_drho[e_name] = np.zeros(w_max)
    +255
    +256            for r_name in e_content[e_name]:
    +257                e_gamma[e_name] += self._calc_gamma(self.deltas[r_name], self.idl[r_name], self.shape[r_name], w_max, fft, gapsize)
    +258
    +259            gamma_div = np.zeros(w_max)
    +260            for r_name in e_content[e_name]:
    +261                gamma_div += self._calc_gamma(np.ones((self.shape[r_name])), self.idl[r_name], self.shape[r_name], w_max, fft, gapsize)
    +262            gamma_div[gamma_div < 1] = 1.0
    +263            e_gamma[e_name] /= gamma_div[:w_max]
    +264
    +265            if np.abs(e_gamma[e_name][0]) < 10 * np.finfo(float).tiny:  # Prevent division by zero
    +266                self.e_tauint[e_name] = 0.5
    +267                self.e_dtauint[e_name] = 0.0
    +268                self.e_dvalue[e_name] = 0.0
    +269                self.e_ddvalue[e_name] = 0.0
    +270                self.e_windowsize[e_name] = 0
    +271                continue
    +272
    +273            self.e_rho[e_name] = e_gamma[e_name][:w_max] / e_gamma[e_name][0]
    +274            self.e_n_tauint[e_name] = np.cumsum(np.concatenate(([0.5], self.e_rho[e_name][1:])))
    +275            # Make sure no entry of tauint is smaller than 0.5
    +276            self.e_n_tauint[e_name][self.e_n_tauint[e_name] <= 0.5] = 0.5 + np.finfo(np.float64).eps
    +277            # hep-lat/0306017 eq. (42)
    +278            self.e_n_dtauint[e_name] = self.e_n_tauint[e_name] * 2 * np.sqrt(np.abs(np.arange(w_max) + 0.5 - self.e_n_tauint[e_name]) / e_N)
    +279            self.e_n_dtauint[e_name][0] = 0.0
    +280
    +281            def _compute_drho(i):
    +282                tmp = (self.e_rho[e_name][i + 1:w_max]
    +283                       + np.concatenate([self.e_rho[e_name][i - 1:None if i - (w_max - 1) // 2 <= 0 else (2 * i - (2 * w_max) // 2):-1],
    +284                                         self.e_rho[e_name][1:max(1, w_max - 2 * i)]])
    +285                       - 2 * self.e_rho[e_name][i] * self.e_rho[e_name][1:w_max - i])
    +286                self.e_drho[e_name][i] = np.sqrt(np.sum(tmp ** 2) / e_N)
    +287
    +288            if self.tau_exp[e_name] > 0:
    +289                _compute_drho(1)
    +290                texp = self.tau_exp[e_name]
    +291                # Critical slowing down analysis
    +292                if w_max // 2 <= 1:
    +293                    raise Exception("Need at least 8 samples for tau_exp error analysis")
    +294                for n in range(1, w_max // 2):
    +295                    _compute_drho(n + 1)
    +296                    if (self.e_rho[e_name][n] - self.N_sigma[e_name] * self.e_drho[e_name][n]) < 0 or n >= w_max // 2 - 2:
    +297                        # Bias correction hep-lat/0306017 eq. (49) included
    +298                        self.e_tauint[e_name] = self.e_n_tauint[e_name][n] * (1 + (2 * n + 1) / e_N) / (1 + 1 / e_N) + texp * np.abs(self.e_rho[e_name][n + 1])  # The absolute makes sure, that the tail contribution is always positive
    +299                        self.e_dtauint[e_name] = np.sqrt(self.e_n_dtauint[e_name][n] ** 2 + texp ** 2 * self.e_drho[e_name][n + 1] ** 2)
    +300                        # Error of tau_exp neglected so far, missing term: self.e_rho[e_name][n + 1] ** 2 * d_tau_exp ** 2
    +301                        self.e_dvalue[e_name] = np.sqrt(2 * self.e_tauint[e_name] * e_gamma[e_name][0] * (1 + 1 / e_N) / e_N)
    +302                        self.e_ddvalue[e_name] = self.e_dvalue[e_name] * np.sqrt((n + 0.5) / e_N)
    +303                        self.e_windowsize[e_name] = n
    +304                        break
    +305            else:
    +306                if self.S[e_name] == 0.0:
    +307                    self.e_tauint[e_name] = 0.5
    +308                    self.e_dtauint[e_name] = 0.0
    +309                    self.e_dvalue[e_name] = np.sqrt(e_gamma[e_name][0] / (e_N - 1))
    +310                    self.e_ddvalue[e_name] = self.e_dvalue[e_name] * np.sqrt(0.5 / e_N)
    +311                    self.e_windowsize[e_name] = 0
    +312                else:
    +313                    # Standard automatic windowing procedure
    +314                    tau = self.S[e_name] / np.log((2 * self.e_n_tauint[e_name][1:] + 1) / (2 * self.e_n_tauint[e_name][1:] - 1))
    +315                    g_w = np.exp(- np.arange(1, len(tau) + 1) / tau) - tau / np.sqrt(np.arange(1, len(tau) + 1) * e_N)
    +316                    for n in range(1, w_max):
    +317                        if g_w[n - 1] < 0 or n >= w_max - 1:
    +318                            _compute_drho(n)
    +319                            self.e_tauint[e_name] = self.e_n_tauint[e_name][n] * (1 + (2 * n + 1) / e_N) / (1 + 1 / e_N)  # Bias correction hep-lat/0306017 eq. (49)
    +320                            self.e_dtauint[e_name] = self.e_n_dtauint[e_name][n]
    +321                            self.e_dvalue[e_name] = np.sqrt(2 * self.e_tauint[e_name] * e_gamma[e_name][0] * (1 + 1 / e_N) / e_N)
    +322                            self.e_ddvalue[e_name] = self.e_dvalue[e_name] * np.sqrt((n + 0.5) / e_N)
    +323                            self.e_windowsize[e_name] = n
    +324                            break
    +325
    +326            self._dvalue += self.e_dvalue[e_name] ** 2
    +327            self.ddvalue += (self.e_dvalue[e_name] * self.e_ddvalue[e_name]) ** 2
    +328
    +329        for e_name in self.cov_names:
    +330            self.e_dvalue[e_name] = np.sqrt(self.covobs[e_name].errsq())
    +331            self.e_ddvalue[e_name] = 0
    +332            self._dvalue += self.e_dvalue[e_name]**2
    +333
    +334        self._dvalue = np.sqrt(self._dvalue)
    +335        if self._dvalue == 0.0:
    +336            self.ddvalue = 0.0
    +337        else:
    +338            self.ddvalue = np.sqrt(self.ddvalue) / self._dvalue
    +339        return
     
    @@ -3443,171 +3566,171 @@ of the autocorrelation function (default True)
    -
    174    def gamma_method(self, **kwargs):
    -175        """Estimate the error and related properties of the Obs.
    -176
    -177        Parameters
    -178        ----------
    -179        S : float
    -180            specifies a custom value for the parameter S (default 2.0).
    -181            If set to 0 it is assumed that the data exhibits no
    -182            autocorrelation. In this case the error estimates coincides
    -183            with the sample standard error.
    -184        tau_exp : float
    -185            positive value triggers the critical slowing down analysis
    -186            (default 0.0).
    -187        N_sigma : float
    -188            number of standard deviations from zero until the tail is
    -189            attached to the autocorrelation function (default 1).
    -190        fft : bool
    -191            determines whether the fft algorithm is used for the computation
    -192            of the autocorrelation function (default True)
    -193        """
    -194
    -195        e_content = self.e_content
    -196        self.e_dvalue = {}
    -197        self.e_ddvalue = {}
    -198        self.e_tauint = {}
    -199        self.e_dtauint = {}
    -200        self.e_windowsize = {}
    -201        self.e_n_tauint = {}
    -202        self.e_n_dtauint = {}
    -203        e_gamma = {}
    -204        self.e_rho = {}
    -205        self.e_drho = {}
    -206        self._dvalue = 0
    -207        self.ddvalue = 0
    -208
    -209        self.S = {}
    -210        self.tau_exp = {}
    -211        self.N_sigma = {}
    -212
    -213        if kwargs.get('fft') is False:
    -214            fft = False
    -215        else:
    -216            fft = True
    -217
    -218        def _parse_kwarg(kwarg_name):
    -219            if kwarg_name in kwargs:
    -220                tmp = kwargs.get(kwarg_name)
    -221                if isinstance(tmp, (int, float)):
    -222                    if tmp < 0:
    -223                        raise Exception(kwarg_name + ' has to be larger or equal to 0.')
    -224                    for e, e_name in enumerate(self.e_names):
    -225                        getattr(self, kwarg_name)[e_name] = tmp
    -226                else:
    -227                    raise TypeError(kwarg_name + ' is not in proper format.')
    -228            else:
    -229                for e, e_name in enumerate(self.e_names):
    -230                    if e_name in getattr(Obs, kwarg_name + '_dict'):
    -231                        getattr(self, kwarg_name)[e_name] = getattr(Obs, kwarg_name + '_dict')[e_name]
    -232                    else:
    -233                        getattr(self, kwarg_name)[e_name] = getattr(Obs, kwarg_name + '_global')
    -234
    -235        _parse_kwarg('S')
    -236        _parse_kwarg('tau_exp')
    -237        _parse_kwarg('N_sigma')
    -238
    -239        for e, e_name in enumerate(self.mc_names):
    -240            gapsize = _determine_gap(self, e_content, e_name)
    -241
    -242            r_length = []
    -243            for r_name in e_content[e_name]:
    -244                if isinstance(self.idl[r_name], range):
    -245                    r_length.append(len(self.idl[r_name]) * self.idl[r_name].step // gapsize)
    -246                else:
    -247                    r_length.append((self.idl[r_name][-1] - self.idl[r_name][0] + 1) // gapsize)
    -248
    -249            e_N = np.sum([self.shape[r_name] for r_name in e_content[e_name]])
    -250            w_max = max(r_length) // 2
    -251            e_gamma[e_name] = np.zeros(w_max)
    -252            self.e_rho[e_name] = np.zeros(w_max)
    -253            self.e_drho[e_name] = np.zeros(w_max)
    -254
    -255            for r_name in e_content[e_name]:
    -256                e_gamma[e_name] += self._calc_gamma(self.deltas[r_name], self.idl[r_name], self.shape[r_name], w_max, fft, gapsize)
    -257
    -258            gamma_div = np.zeros(w_max)
    -259            for r_name in e_content[e_name]:
    -260                gamma_div += self._calc_gamma(np.ones((self.shape[r_name])), self.idl[r_name], self.shape[r_name], w_max, fft, gapsize)
    -261            gamma_div[gamma_div < 1] = 1.0
    -262            e_gamma[e_name] /= gamma_div[:w_max]
    -263
    -264            if np.abs(e_gamma[e_name][0]) < 10 * np.finfo(float).tiny:  # Prevent division by zero
    -265                self.e_tauint[e_name] = 0.5
    -266                self.e_dtauint[e_name] = 0.0
    -267                self.e_dvalue[e_name] = 0.0
    -268                self.e_ddvalue[e_name] = 0.0
    -269                self.e_windowsize[e_name] = 0
    -270                continue
    -271
    -272            self.e_rho[e_name] = e_gamma[e_name][:w_max] / e_gamma[e_name][0]
    -273            self.e_n_tauint[e_name] = np.cumsum(np.concatenate(([0.5], self.e_rho[e_name][1:])))
    -274            # Make sure no entry of tauint is smaller than 0.5
    -275            self.e_n_tauint[e_name][self.e_n_tauint[e_name] <= 0.5] = 0.5 + np.finfo(np.float64).eps
    -276            # hep-lat/0306017 eq. (42)
    -277            self.e_n_dtauint[e_name] = self.e_n_tauint[e_name] * 2 * np.sqrt(np.abs(np.arange(w_max) + 0.5 - self.e_n_tauint[e_name]) / e_N)
    -278            self.e_n_dtauint[e_name][0] = 0.0
    -279
    -280            def _compute_drho(i):
    -281                tmp = (self.e_rho[e_name][i + 1:w_max]
    -282                       + np.concatenate([self.e_rho[e_name][i - 1:None if i - (w_max - 1) // 2 <= 0 else (2 * i - (2 * w_max) // 2):-1],
    -283                                         self.e_rho[e_name][1:max(1, w_max - 2 * i)]])
    -284                       - 2 * self.e_rho[e_name][i] * self.e_rho[e_name][1:w_max - i])
    -285                self.e_drho[e_name][i] = np.sqrt(np.sum(tmp ** 2) / e_N)
    -286
    -287            if self.tau_exp[e_name] > 0:
    -288                _compute_drho(1)
    -289                texp = self.tau_exp[e_name]
    -290                # Critical slowing down analysis
    -291                if w_max // 2 <= 1:
    -292                    raise Exception("Need at least 8 samples for tau_exp error analysis")
    -293                for n in range(1, w_max // 2):
    -294                    _compute_drho(n + 1)
    -295                    if (self.e_rho[e_name][n] - self.N_sigma[e_name] * self.e_drho[e_name][n]) < 0 or n >= w_max // 2 - 2:
    -296                        # Bias correction hep-lat/0306017 eq. (49) included
    -297                        self.e_tauint[e_name] = self.e_n_tauint[e_name][n] * (1 + (2 * n + 1) / e_N) / (1 + 1 / e_N) + texp * np.abs(self.e_rho[e_name][n + 1])  # The absolute makes sure, that the tail contribution is always positive
    -298                        self.e_dtauint[e_name] = np.sqrt(self.e_n_dtauint[e_name][n] ** 2 + texp ** 2 * self.e_drho[e_name][n + 1] ** 2)
    -299                        # Error of tau_exp neglected so far, missing term: self.e_rho[e_name][n + 1] ** 2 * d_tau_exp ** 2
    -300                        self.e_dvalue[e_name] = np.sqrt(2 * self.e_tauint[e_name] * e_gamma[e_name][0] * (1 + 1 / e_N) / e_N)
    -301                        self.e_ddvalue[e_name] = self.e_dvalue[e_name] * np.sqrt((n + 0.5) / e_N)
    -302                        self.e_windowsize[e_name] = n
    -303                        break
    -304            else:
    -305                if self.S[e_name] == 0.0:
    -306                    self.e_tauint[e_name] = 0.5
    -307                    self.e_dtauint[e_name] = 0.0
    -308                    self.e_dvalue[e_name] = np.sqrt(e_gamma[e_name][0] / (e_N - 1))
    -309                    self.e_ddvalue[e_name] = self.e_dvalue[e_name] * np.sqrt(0.5 / e_N)
    -310                    self.e_windowsize[e_name] = 0
    -311                else:
    -312                    # Standard automatic windowing procedure
    -313                    tau = self.S[e_name] / np.log((2 * self.e_n_tauint[e_name][1:] + 1) / (2 * self.e_n_tauint[e_name][1:] - 1))
    -314                    g_w = np.exp(- np.arange(1, len(tau) + 1) / tau) - tau / np.sqrt(np.arange(1, len(tau) + 1) * e_N)
    -315                    for n in range(1, w_max):
    -316                        if g_w[n - 1] < 0 or n >= w_max - 1:
    -317                            _compute_drho(n)
    -318                            self.e_tauint[e_name] = self.e_n_tauint[e_name][n] * (1 + (2 * n + 1) / e_N) / (1 + 1 / e_N)  # Bias correction hep-lat/0306017 eq. (49)
    -319                            self.e_dtauint[e_name] = self.e_n_dtauint[e_name][n]
    -320                            self.e_dvalue[e_name] = np.sqrt(2 * self.e_tauint[e_name] * e_gamma[e_name][0] * (1 + 1 / e_N) / e_N)
    -321                            self.e_ddvalue[e_name] = self.e_dvalue[e_name] * np.sqrt((n + 0.5) / e_N)
    -322                            self.e_windowsize[e_name] = n
    -323                            break
    -324
    -325            self._dvalue += self.e_dvalue[e_name] ** 2
    -326            self.ddvalue += (self.e_dvalue[e_name] * self.e_ddvalue[e_name]) ** 2
    -327
    -328        for e_name in self.cov_names:
    -329            self.e_dvalue[e_name] = np.sqrt(self.covobs[e_name].errsq())
    -330            self.e_ddvalue[e_name] = 0
    -331            self._dvalue += self.e_dvalue[e_name]**2
    -332
    -333        self._dvalue = np.sqrt(self._dvalue)
    -334        if self._dvalue == 0.0:
    -335            self.ddvalue = 0.0
    -336        else:
    -337            self.ddvalue = np.sqrt(self.ddvalue) / self._dvalue
    -338        return
    +            
    175    def gamma_method(self, **kwargs):
    +176        """Estimate the error and related properties of the Obs.
    +177
    +178        Parameters
    +179        ----------
    +180        S : float
    +181            specifies a custom value for the parameter S (default 2.0).
    +182            If set to 0 it is assumed that the data exhibits no
    +183            autocorrelation. In this case the error estimates coincides
    +184            with the sample standard error.
    +185        tau_exp : float
    +186            positive value triggers the critical slowing down analysis
    +187            (default 0.0).
    +188        N_sigma : float
    +189            number of standard deviations from zero until the tail is
    +190            attached to the autocorrelation function (default 1).
    +191        fft : bool
    +192            determines whether the fft algorithm is used for the computation
    +193            of the autocorrelation function (default True)
    +194        """
    +195
    +196        e_content = self.e_content
    +197        self.e_dvalue = {}
    +198        self.e_ddvalue = {}
    +199        self.e_tauint = {}
    +200        self.e_dtauint = {}
    +201        self.e_windowsize = {}
    +202        self.e_n_tauint = {}
    +203        self.e_n_dtauint = {}
    +204        e_gamma = {}
    +205        self.e_rho = {}
    +206        self.e_drho = {}
    +207        self._dvalue = 0
    +208        self.ddvalue = 0
    +209
    +210        self.S = {}
    +211        self.tau_exp = {}
    +212        self.N_sigma = {}
    +213
    +214        if kwargs.get('fft') is False:
    +215            fft = False
    +216        else:
    +217            fft = True
    +218
    +219        def _parse_kwarg(kwarg_name):
    +220            if kwarg_name in kwargs:
    +221                tmp = kwargs.get(kwarg_name)
    +222                if isinstance(tmp, (int, float)):
    +223                    if tmp < 0:
    +224                        raise Exception(kwarg_name + ' has to be larger or equal to 0.')
    +225                    for e, e_name in enumerate(self.e_names):
    +226                        getattr(self, kwarg_name)[e_name] = tmp
    +227                else:
    +228                    raise TypeError(kwarg_name + ' is not in proper format.')
    +229            else:
    +230                for e, e_name in enumerate(self.e_names):
    +231                    if e_name in getattr(Obs, kwarg_name + '_dict'):
    +232                        getattr(self, kwarg_name)[e_name] = getattr(Obs, kwarg_name + '_dict')[e_name]
    +233                    else:
    +234                        getattr(self, kwarg_name)[e_name] = getattr(Obs, kwarg_name + '_global')
    +235
    +236        _parse_kwarg('S')
    +237        _parse_kwarg('tau_exp')
    +238        _parse_kwarg('N_sigma')
    +239
    +240        for e, e_name in enumerate(self.mc_names):
    +241            gapsize = _determine_gap(self, e_content, e_name)
    +242
    +243            r_length = []
    +244            for r_name in e_content[e_name]:
    +245                if isinstance(self.idl[r_name], range):
    +246                    r_length.append(len(self.idl[r_name]) * self.idl[r_name].step // gapsize)
    +247                else:
    +248                    r_length.append((self.idl[r_name][-1] - self.idl[r_name][0] + 1) // gapsize)
    +249
    +250            e_N = np.sum([self.shape[r_name] for r_name in e_content[e_name]])
    +251            w_max = max(r_length) // 2
    +252            e_gamma[e_name] = np.zeros(w_max)
    +253            self.e_rho[e_name] = np.zeros(w_max)
    +254            self.e_drho[e_name] = np.zeros(w_max)
    +255
    +256            for r_name in e_content[e_name]:
    +257                e_gamma[e_name] += self._calc_gamma(self.deltas[r_name], self.idl[r_name], self.shape[r_name], w_max, fft, gapsize)
    +258
    +259            gamma_div = np.zeros(w_max)
    +260            for r_name in e_content[e_name]:
    +261                gamma_div += self._calc_gamma(np.ones((self.shape[r_name])), self.idl[r_name], self.shape[r_name], w_max, fft, gapsize)
    +262            gamma_div[gamma_div < 1] = 1.0
    +263            e_gamma[e_name] /= gamma_div[:w_max]
    +264
    +265            if np.abs(e_gamma[e_name][0]) < 10 * np.finfo(float).tiny:  # Prevent division by zero
    +266                self.e_tauint[e_name] = 0.5
    +267                self.e_dtauint[e_name] = 0.0
    +268                self.e_dvalue[e_name] = 0.0
    +269                self.e_ddvalue[e_name] = 0.0
    +270                self.e_windowsize[e_name] = 0
    +271                continue
    +272
    +273            self.e_rho[e_name] = e_gamma[e_name][:w_max] / e_gamma[e_name][0]
    +274            self.e_n_tauint[e_name] = np.cumsum(np.concatenate(([0.5], self.e_rho[e_name][1:])))
    +275            # Make sure no entry of tauint is smaller than 0.5
    +276            self.e_n_tauint[e_name][self.e_n_tauint[e_name] <= 0.5] = 0.5 + np.finfo(np.float64).eps
    +277            # hep-lat/0306017 eq. (42)
    +278            self.e_n_dtauint[e_name] = self.e_n_tauint[e_name] * 2 * np.sqrt(np.abs(np.arange(w_max) + 0.5 - self.e_n_tauint[e_name]) / e_N)
    +279            self.e_n_dtauint[e_name][0] = 0.0
    +280
    +281            def _compute_drho(i):
    +282                tmp = (self.e_rho[e_name][i + 1:w_max]
    +283                       + np.concatenate([self.e_rho[e_name][i - 1:None if i - (w_max - 1) // 2 <= 0 else (2 * i - (2 * w_max) // 2):-1],
    +284                                         self.e_rho[e_name][1:max(1, w_max - 2 * i)]])
    +285                       - 2 * self.e_rho[e_name][i] * self.e_rho[e_name][1:w_max - i])
    +286                self.e_drho[e_name][i] = np.sqrt(np.sum(tmp ** 2) / e_N)
    +287
    +288            if self.tau_exp[e_name] > 0:
    +289                _compute_drho(1)
    +290                texp = self.tau_exp[e_name]
    +291                # Critical slowing down analysis
    +292                if w_max // 2 <= 1:
    +293                    raise Exception("Need at least 8 samples for tau_exp error analysis")
    +294                for n in range(1, w_max // 2):
    +295                    _compute_drho(n + 1)
    +296                    if (self.e_rho[e_name][n] - self.N_sigma[e_name] * self.e_drho[e_name][n]) < 0 or n >= w_max // 2 - 2:
    +297                        # Bias correction hep-lat/0306017 eq. (49) included
    +298                        self.e_tauint[e_name] = self.e_n_tauint[e_name][n] * (1 + (2 * n + 1) / e_N) / (1 + 1 / e_N) + texp * np.abs(self.e_rho[e_name][n + 1])  # The absolute makes sure, that the tail contribution is always positive
    +299                        self.e_dtauint[e_name] = np.sqrt(self.e_n_dtauint[e_name][n] ** 2 + texp ** 2 * self.e_drho[e_name][n + 1] ** 2)
    +300                        # Error of tau_exp neglected so far, missing term: self.e_rho[e_name][n + 1] ** 2 * d_tau_exp ** 2
    +301                        self.e_dvalue[e_name] = np.sqrt(2 * self.e_tauint[e_name] * e_gamma[e_name][0] * (1 + 1 / e_N) / e_N)
    +302                        self.e_ddvalue[e_name] = self.e_dvalue[e_name] * np.sqrt((n + 0.5) / e_N)
    +303                        self.e_windowsize[e_name] = n
    +304                        break
    +305            else:
    +306                if self.S[e_name] == 0.0:
    +307                    self.e_tauint[e_name] = 0.5
    +308                    self.e_dtauint[e_name] = 0.0
    +309                    self.e_dvalue[e_name] = np.sqrt(e_gamma[e_name][0] / (e_N - 1))
    +310                    self.e_ddvalue[e_name] = self.e_dvalue[e_name] * np.sqrt(0.5 / e_N)
    +311                    self.e_windowsize[e_name] = 0
    +312                else:
    +313                    # Standard automatic windowing procedure
    +314                    tau = self.S[e_name] / np.log((2 * self.e_n_tauint[e_name][1:] + 1) / (2 * self.e_n_tauint[e_name][1:] - 1))
    +315                    g_w = np.exp(- np.arange(1, len(tau) + 1) / tau) - tau / np.sqrt(np.arange(1, len(tau) + 1) * e_N)
    +316                    for n in range(1, w_max):
    +317                        if g_w[n - 1] < 0 or n >= w_max - 1:
    +318                            _compute_drho(n)
    +319                            self.e_tauint[e_name] = self.e_n_tauint[e_name][n] * (1 + (2 * n + 1) / e_N) / (1 + 1 / e_N)  # Bias correction hep-lat/0306017 eq. (49)
    +320                            self.e_dtauint[e_name] = self.e_n_dtauint[e_name][n]
    +321                            self.e_dvalue[e_name] = np.sqrt(2 * self.e_tauint[e_name] * e_gamma[e_name][0] * (1 + 1 / e_N) / e_N)
    +322                            self.e_ddvalue[e_name] = self.e_dvalue[e_name] * np.sqrt((n + 0.5) / e_N)
    +323                            self.e_windowsize[e_name] = n
    +324                            break
    +325
    +326            self._dvalue += self.e_dvalue[e_name] ** 2
    +327            self.ddvalue += (self.e_dvalue[e_name] * self.e_ddvalue[e_name]) ** 2
    +328
    +329        for e_name in self.cov_names:
    +330            self.e_dvalue[e_name] = np.sqrt(self.covobs[e_name].errsq())
    +331            self.e_ddvalue[e_name] = 0
    +332            self._dvalue += self.e_dvalue[e_name]**2
    +333
    +334        self._dvalue = np.sqrt(self._dvalue)
    +335        if self._dvalue == 0.0:
    +336            self.ddvalue = 0.0
    +337        else:
    +338            self.ddvalue = np.sqrt(self.ddvalue) / self._dvalue
    +339        return
     
    @@ -3646,74 +3769,74 @@ of the autocorrelation function (default True)
    -
    378    def details(self, ens_content=True):
    -379        """Output detailed properties of the Obs.
    -380
    -381        Parameters
    -382        ----------
    -383        ens_content : bool
    -384            print details about the ensembles and replica if true.
    -385        """
    -386        if self.tag is not None:
    -387            print("Description:", self.tag)
    -388        if not hasattr(self, 'e_dvalue'):
    -389            print('Result\t %3.8e' % (self.value))
    -390        else:
    -391            if self.value == 0.0:
    -392                percentage = np.nan
    -393            else:
    -394                percentage = np.abs(self._dvalue / self.value) * 100
    -395            print('Result\t %3.8e +/- %3.8e +/- %3.8e (%3.3f%%)' % (self.value, self._dvalue, self.ddvalue, percentage))
    -396            if len(self.e_names) > 1:
    -397                print(' Ensemble errors:')
    -398            e_content = self.e_content
    -399            for e_name in self.mc_names:
    -400                gap = _determine_gap(self, e_content, e_name)
    -401
    -402                if len(self.e_names) > 1:
    -403                    print('', e_name, '\t %3.6e +/- %3.6e' % (self.e_dvalue[e_name], self.e_ddvalue[e_name]))
    -404                tau_string = " \N{GREEK SMALL LETTER TAU}_int\t " + _format_uncertainty(self.e_tauint[e_name], self.e_dtauint[e_name])
    -405                tau_string += f" in units of {gap} config"
    -406                if gap > 1:
    -407                    tau_string += "s"
    -408                if self.tau_exp[e_name] > 0:
    -409                    tau_string = f"{tau_string: <45}" + '\t(\N{GREEK SMALL LETTER TAU}_exp=%3.2f, N_\N{GREEK SMALL LETTER SIGMA}=%1.0i)' % (self.tau_exp[e_name], self.N_sigma[e_name])
    -410                else:
    -411                    tau_string = f"{tau_string: <45}" + '\t(S=%3.2f)' % (self.S[e_name])
    -412                print(tau_string)
    -413            for e_name in self.cov_names:
    -414                print('', e_name, '\t %3.8e' % (self.e_dvalue[e_name]))
    -415        if ens_content is True:
    -416            if len(self.e_names) == 1:
    -417                print(self.N, 'samples in', len(self.e_names), 'ensemble:')
    -418            else:
    -419                print(self.N, 'samples in', len(self.e_names), 'ensembles:')
    -420            my_string_list = []
    -421            for key, value in sorted(self.e_content.items()):
    -422                if key not in self.covobs:
    -423                    my_string = '  ' + "\u00B7 Ensemble '" + key + "' "
    -424                    if len(value) == 1:
    -425                        my_string += f': {self.shape[value[0]]} configurations'
    -426                        if isinstance(self.idl[value[0]], range):
    -427                            my_string += f' (from {self.idl[value[0]].start} to {self.idl[value[0]][-1]}' + int(self.idl[value[0]].step != 1) * f' in steps of {self.idl[value[0]].step}' + ')'
    -428                        else:
    -429                            my_string += f' (irregular range from {self.idl[value[0]][0]} to {self.idl[value[0]][-1]})'
    -430                    else:
    -431                        sublist = []
    -432                        for v in value:
    -433                            my_substring = '    ' + "\u00B7 Replicum '" + v[len(key) + 1:] + "' "
    -434                            my_substring += f': {self.shape[v]} configurations'
    -435                            if isinstance(self.idl[v], range):
    -436                                my_substring += f' (from {self.idl[v].start} to {self.idl[v][-1]}' + int(self.idl[v].step != 1) * f' in steps of {self.idl[v].step}' + ')'
    -437                            else:
    -438                                my_substring += f' (irregular range from {self.idl[v][0]} to {self.idl[v][-1]})'
    -439                            sublist.append(my_substring)
    -440
    -441                        my_string += '\n' + '\n'.join(sublist)
    -442                else:
    -443                    my_string = '  ' + "\u00B7 Covobs   '" + key + "' "
    -444                my_string_list.append(my_string)
    -445            print('\n'.join(my_string_list))
    +            
    379    def details(self, ens_content=True):
    +380        """Output detailed properties of the Obs.
    +381
    +382        Parameters
    +383        ----------
    +384        ens_content : bool
    +385            print details about the ensembles and replica if true.
    +386        """
    +387        if self.tag is not None:
    +388            print("Description:", self.tag)
    +389        if not hasattr(self, 'e_dvalue'):
    +390            print('Result\t %3.8e' % (self.value))
    +391        else:
    +392            if self.value == 0.0:
    +393                percentage = np.nan
    +394            else:
    +395                percentage = np.abs(self._dvalue / self.value) * 100
    +396            print('Result\t %3.8e +/- %3.8e +/- %3.8e (%3.3f%%)' % (self.value, self._dvalue, self.ddvalue, percentage))
    +397            if len(self.e_names) > 1:
    +398                print(' Ensemble errors:')
    +399            e_content = self.e_content
    +400            for e_name in self.mc_names:
    +401                gap = _determine_gap(self, e_content, e_name)
    +402
    +403                if len(self.e_names) > 1:
    +404                    print('', e_name, '\t %3.6e +/- %3.6e' % (self.e_dvalue[e_name], self.e_ddvalue[e_name]))
    +405                tau_string = " \N{GREEK SMALL LETTER TAU}_int\t " + _format_uncertainty(self.e_tauint[e_name], self.e_dtauint[e_name])
    +406                tau_string += f" in units of {gap} config"
    +407                if gap > 1:
    +408                    tau_string += "s"
    +409                if self.tau_exp[e_name] > 0:
    +410                    tau_string = f"{tau_string: <45}" + '\t(\N{GREEK SMALL LETTER TAU}_exp=%3.2f, N_\N{GREEK SMALL LETTER SIGMA}=%1.0i)' % (self.tau_exp[e_name], self.N_sigma[e_name])
    +411                else:
    +412                    tau_string = f"{tau_string: <45}" + '\t(S=%3.2f)' % (self.S[e_name])
    +413                print(tau_string)
    +414            for e_name in self.cov_names:
    +415                print('', e_name, '\t %3.8e' % (self.e_dvalue[e_name]))
    +416        if ens_content is True:
    +417            if len(self.e_names) == 1:
    +418                print(self.N, 'samples in', len(self.e_names), 'ensemble:')
    +419            else:
    +420                print(self.N, 'samples in', len(self.e_names), 'ensembles:')
    +421            my_string_list = []
    +422            for key, value in sorted(self.e_content.items()):
    +423                if key not in self.covobs:
    +424                    my_string = '  ' + "\u00B7 Ensemble '" + key + "' "
    +425                    if len(value) == 1:
    +426                        my_string += f': {self.shape[value[0]]} configurations'
    +427                        if isinstance(self.idl[value[0]], range):
    +428                            my_string += f' (from {self.idl[value[0]].start} to {self.idl[value[0]][-1]}' + int(self.idl[value[0]].step != 1) * f' in steps of {self.idl[value[0]].step}' + ')'
    +429                        else:
    +430                            my_string += f' (irregular range from {self.idl[value[0]][0]} to {self.idl[value[0]][-1]})'
    +431                    else:
    +432                        sublist = []
    +433                        for v in value:
    +434                            my_substring = '    ' + "\u00B7 Replicum '" + v[len(key) + 1:] + "' "
    +435                            my_substring += f': {self.shape[v]} configurations'
    +436                            if isinstance(self.idl[v], range):
    +437                                my_substring += f' (from {self.idl[v].start} to {self.idl[v][-1]}' + int(self.idl[v].step != 1) * f' in steps of {self.idl[v].step}' + ')'
    +438                            else:
    +439                                my_substring += f' (irregular range from {self.idl[v][0]} to {self.idl[v][-1]})'
    +440                            sublist.append(my_substring)
    +441
    +442                        my_string += '\n' + '\n'.join(sublist)
    +443                else:
    +444                    my_string = '  ' + "\u00B7 Covobs   '" + key + "' "
    +445                my_string_list.append(my_string)
    +446            print('\n'.join(my_string_list))
     
    @@ -3740,20 +3863,20 @@ print details about the ensembles and replica if true.
    -
    447    def reweight(self, weight):
    -448        """Reweight the obs with given rewighting factors.
    -449
    -450        Parameters
    -451        ----------
    -452        weight : Obs
    -453            Reweighting factor. An Observable that has to be defined on a superset of the
    -454            configurations in obs[i].idl for all i.
    -455        all_configs : bool
    -456            if True, the reweighted observables are normalized by the average of
    -457            the reweighting factor on all configurations in weight.idl and not
    -458            on the configurations in obs[i].idl. Default False.
    -459        """
    -460        return reweight(weight, [self])[0]
    +            
    448    def reweight(self, weight):
    +449        """Reweight the obs with given rewighting factors.
    +450
    +451        Parameters
    +452        ----------
    +453        weight : Obs
    +454            Reweighting factor. An Observable that has to be defined on a superset of the
    +455            configurations in obs[i].idl for all i.
    +456        all_configs : bool
    +457            if True, the reweighted observables are normalized by the average of
    +458            the reweighting factor on all configurations in weight.idl and not
    +459            on the configurations in obs[i].idl. Default False.
    +460        """
    +461        return reweight(weight, [self])[0]
     
    @@ -3785,17 +3908,17 @@ on the configurations in obs[i].idl. Default False.
    -
    462    def is_zero_within_error(self, sigma=1):
    -463        """Checks whether the observable is zero within 'sigma' standard errors.
    -464
    -465        Parameters
    -466        ----------
    -467        sigma : int
    -468            Number of standard errors used for the check.
    -469
    -470        Works only properly when the gamma method was run.
    -471        """
    -472        return self.is_zero() or np.abs(self.value) <= sigma * self._dvalue
    +            
    463    def is_zero_within_error(self, sigma=1):
    +464        """Checks whether the observable is zero within 'sigma' standard errors.
    +465
    +466        Parameters
    +467        ----------
    +468        sigma : int
    +469            Number of standard errors used for the check.
    +470
    +471        Works only properly when the gamma method was run.
    +472        """
    +473        return self.is_zero() or np.abs(self.value) <= sigma * self._dvalue
     
    @@ -3823,15 +3946,15 @@ Number of standard errors used for the check.
    -
    474    def is_zero(self, atol=1e-10):
    -475        """Checks whether the observable is zero within a given tolerance.
    -476
    -477        Parameters
    -478        ----------
    -479        atol : float
    -480            Absolute tolerance (for details see numpy documentation).
    -481        """
    -482        return np.isclose(0.0, self.value, 1e-14, atol) and all(np.allclose(0.0, delta, 1e-14, atol) for delta in self.deltas.values()) and all(np.allclose(0.0, delta.errsq(), 1e-14, atol) for delta in self.covobs.values())
    +            
    475    def is_zero(self, atol=1e-10):
    +476        """Checks whether the observable is zero within a given tolerance.
    +477
    +478        Parameters
    +479        ----------
    +480        atol : float
    +481            Absolute tolerance (for details see numpy documentation).
    +482        """
    +483        return np.isclose(0.0, self.value, 1e-14, atol) and all(np.allclose(0.0, delta, 1e-14, atol) for delta in self.deltas.values()) and all(np.allclose(0.0, delta.errsq(), 1e-14, atol) for delta in self.covobs.values())
     
    @@ -3858,45 +3981,45 @@ Absolute tolerance (for details see numpy documentation).
    -
    484    def plot_tauint(self, save=None):
    -485        """Plot integrated autocorrelation time for each ensemble.
    -486
    -487        Parameters
    -488        ----------
    -489        save : str
    -490            saves the figure to a file named 'save' if.
    -491        """
    -492        if not hasattr(self, 'e_dvalue'):
    -493            raise Exception('Run the gamma method first.')
    -494
    -495        for e, e_name in enumerate(self.mc_names):
    -496            fig = plt.figure()
    -497            plt.xlabel(r'$W$')
    -498            plt.ylabel(r'$\tau_\mathrm{int}$')
    -499            length = int(len(self.e_n_tauint[e_name]))
    -500            if self.tau_exp[e_name] > 0:
    -501                base = self.e_n_tauint[e_name][self.e_windowsize[e_name]]
    -502                x_help = np.arange(2 * self.tau_exp[e_name])
    -503                y_help = (x_help + 1) * np.abs(self.e_rho[e_name][self.e_windowsize[e_name] + 1]) * (1 - x_help / (2 * (2 * self.tau_exp[e_name] - 1))) + base
    -504                x_arr = np.arange(self.e_windowsize[e_name] + 1, self.e_windowsize[e_name] + 1 + 2 * self.tau_exp[e_name])
    -505                plt.plot(x_arr, y_help, 'C' + str(e), linewidth=1, ls='--', marker=',')
    -506                plt.errorbar([self.e_windowsize[e_name] + 2 * self.tau_exp[e_name]], [self.e_tauint[e_name]],
    -507                             yerr=[self.e_dtauint[e_name]], fmt='C' + str(e), linewidth=1, capsize=2, marker='o', mfc=plt.rcParams['axes.facecolor'])
    -508                xmax = self.e_windowsize[e_name] + 2 * self.tau_exp[e_name] + 1.5
    -509                label = e_name + r', $\tau_\mathrm{exp}$=' + str(np.around(self.tau_exp[e_name], decimals=2))
    -510            else:
    -511                label = e_name + ', S=' + str(np.around(self.S[e_name], decimals=2))
    -512                xmax = max(10.5, 2 * self.e_windowsize[e_name] - 0.5)
    -513
    -514            plt.errorbar(np.arange(length)[:int(xmax) + 1], self.e_n_tauint[e_name][:int(xmax) + 1], yerr=self.e_n_dtauint[e_name][:int(xmax) + 1], linewidth=1, capsize=2, label=label)
    -515            plt.axvline(x=self.e_windowsize[e_name], color='C' + str(e), alpha=0.5, marker=',', ls='--')
    -516            plt.legend()
    -517            plt.xlim(-0.5, xmax)
    -518            ylim = plt.ylim()
    -519            plt.ylim(bottom=0.0, top=max(1.0, ylim[1]))
    -520            plt.draw()
    -521            if save:
    -522                fig.savefig(save + "_" + str(e))
    +            
    485    def plot_tauint(self, save=None):
    +486        """Plot integrated autocorrelation time for each ensemble.
    +487
    +488        Parameters
    +489        ----------
    +490        save : str
    +491            saves the figure to a file named 'save' if.
    +492        """
    +493        if not hasattr(self, 'e_dvalue'):
    +494            raise Exception('Run the gamma method first.')
    +495
    +496        for e, e_name in enumerate(self.mc_names):
    +497            fig = plt.figure()
    +498            plt.xlabel(r'$W$')
    +499            plt.ylabel(r'$\tau_\mathrm{int}$')
    +500            length = int(len(self.e_n_tauint[e_name]))
    +501            if self.tau_exp[e_name] > 0:
    +502                base = self.e_n_tauint[e_name][self.e_windowsize[e_name]]
    +503                x_help = np.arange(2 * self.tau_exp[e_name])
    +504                y_help = (x_help + 1) * np.abs(self.e_rho[e_name][self.e_windowsize[e_name] + 1]) * (1 - x_help / (2 * (2 * self.tau_exp[e_name] - 1))) + base
    +505                x_arr = np.arange(self.e_windowsize[e_name] + 1, self.e_windowsize[e_name] + 1 + 2 * self.tau_exp[e_name])
    +506                plt.plot(x_arr, y_help, 'C' + str(e), linewidth=1, ls='--', marker=',')
    +507                plt.errorbar([self.e_windowsize[e_name] + 2 * self.tau_exp[e_name]], [self.e_tauint[e_name]],
    +508                             yerr=[self.e_dtauint[e_name]], fmt='C' + str(e), linewidth=1, capsize=2, marker='o', mfc=plt.rcParams['axes.facecolor'])
    +509                xmax = self.e_windowsize[e_name] + 2 * self.tau_exp[e_name] + 1.5
    +510                label = e_name + r', $\tau_\mathrm{exp}$=' + str(np.around(self.tau_exp[e_name], decimals=2))
    +511            else:
    +512                label = e_name + ', S=' + str(np.around(self.S[e_name], decimals=2))
    +513                xmax = max(10.5, 2 * self.e_windowsize[e_name] - 0.5)
    +514
    +515            plt.errorbar(np.arange(length)[:int(xmax) + 1], self.e_n_tauint[e_name][:int(xmax) + 1], yerr=self.e_n_dtauint[e_name][:int(xmax) + 1], linewidth=1, capsize=2, label=label)
    +516            plt.axvline(x=self.e_windowsize[e_name], color='C' + str(e), alpha=0.5, marker=',', ls='--')
    +517            plt.legend()
    +518            plt.xlim(-0.5, xmax)
    +519            ylim = plt.ylim()
    +520            plt.ylim(bottom=0.0, top=max(1.0, ylim[1]))
    +521            plt.draw()
    +522            if save:
    +523                fig.savefig(save + "_" + str(e))
     
    @@ -3923,36 +4046,36 @@ saves the figure to a file named 'save' if.
    -
    524    def plot_rho(self, save=None):
    -525        """Plot normalized autocorrelation function time for each ensemble.
    -526
    -527        Parameters
    -528        ----------
    -529        save : str
    -530            saves the figure to a file named 'save' if.
    -531        """
    -532        if not hasattr(self, 'e_dvalue'):
    -533            raise Exception('Run the gamma method first.')
    -534        for e, e_name in enumerate(self.mc_names):
    -535            fig = plt.figure()
    -536            plt.xlabel('W')
    -537            plt.ylabel('rho')
    -538            length = int(len(self.e_drho[e_name]))
    -539            plt.errorbar(np.arange(length), self.e_rho[e_name][:length], yerr=self.e_drho[e_name][:], linewidth=1, capsize=2)
    -540            plt.axvline(x=self.e_windowsize[e_name], color='r', alpha=0.25, ls='--', marker=',')
    -541            if self.tau_exp[e_name] > 0:
    -542                plt.plot([self.e_windowsize[e_name] + 1, self.e_windowsize[e_name] + 1 + 2 * self.tau_exp[e_name]],
    -543                         [self.e_rho[e_name][self.e_windowsize[e_name] + 1], 0], 'k-', lw=1)
    -544                xmax = self.e_windowsize[e_name] + 2 * self.tau_exp[e_name] + 1.5
    -545                plt.title('Rho ' + e_name + r', tau\_exp=' + str(np.around(self.tau_exp[e_name], decimals=2)))
    -546            else:
    -547                xmax = max(10.5, 2 * self.e_windowsize[e_name] - 0.5)
    -548                plt.title('Rho ' + e_name + ', S=' + str(np.around(self.S[e_name], decimals=2)))
    -549            plt.plot([-0.5, xmax], [0, 0], 'k--', lw=1)
    -550            plt.xlim(-0.5, xmax)
    -551            plt.draw()
    -552            if save:
    -553                fig.savefig(save + "_" + str(e))
    +            
    525    def plot_rho(self, save=None):
    +526        """Plot normalized autocorrelation function time for each ensemble.
    +527
    +528        Parameters
    +529        ----------
    +530        save : str
    +531            saves the figure to a file named 'save' if.
    +532        """
    +533        if not hasattr(self, 'e_dvalue'):
    +534            raise Exception('Run the gamma method first.')
    +535        for e, e_name in enumerate(self.mc_names):
    +536            fig = plt.figure()
    +537            plt.xlabel('W')
    +538            plt.ylabel('rho')
    +539            length = int(len(self.e_drho[e_name]))
    +540            plt.errorbar(np.arange(length), self.e_rho[e_name][:length], yerr=self.e_drho[e_name][:], linewidth=1, capsize=2)
    +541            plt.axvline(x=self.e_windowsize[e_name], color='r', alpha=0.25, ls='--', marker=',')
    +542            if self.tau_exp[e_name] > 0:
    +543                plt.plot([self.e_windowsize[e_name] + 1, self.e_windowsize[e_name] + 1 + 2 * self.tau_exp[e_name]],
    +544                         [self.e_rho[e_name][self.e_windowsize[e_name] + 1], 0], 'k-', lw=1)
    +545                xmax = self.e_windowsize[e_name] + 2 * self.tau_exp[e_name] + 1.5
    +546                plt.title('Rho ' + e_name + r', tau\_exp=' + str(np.around(self.tau_exp[e_name], decimals=2)))
    +547            else:
    +548                xmax = max(10.5, 2 * self.e_windowsize[e_name] - 0.5)
    +549                plt.title('Rho ' + e_name + ', S=' + str(np.around(self.S[e_name], decimals=2)))
    +550            plt.plot([-0.5, xmax], [0, 0], 'k--', lw=1)
    +551            plt.xlim(-0.5, xmax)
    +552            plt.draw()
    +553            if save:
    +554                fig.savefig(save + "_" + str(e))
     
    @@ -3979,27 +4102,27 @@ saves the figure to a file named 'save' if.
    -
    555    def plot_rep_dist(self):
    -556        """Plot replica distribution for each ensemble with more than one replicum."""
    -557        if not hasattr(self, 'e_dvalue'):
    -558            raise Exception('Run the gamma method first.')
    -559        for e, e_name in enumerate(self.mc_names):
    -560            if len(self.e_content[e_name]) == 1:
    -561                print('No replica distribution for a single replicum (', e_name, ')')
    -562                continue
    -563            r_length = []
    -564            sub_r_mean = 0
    -565            for r, r_name in enumerate(self.e_content[e_name]):
    -566                r_length.append(len(self.deltas[r_name]))
    -567                sub_r_mean += self.shape[r_name] * self.r_values[r_name]
    -568            e_N = np.sum(r_length)
    -569            sub_r_mean /= e_N
    -570            arr = np.zeros(len(self.e_content[e_name]))
    -571            for r, r_name in enumerate(self.e_content[e_name]):
    -572                arr[r] = (self.r_values[r_name] - sub_r_mean) / (self.e_dvalue[e_name] * np.sqrt(e_N / self.shape[r_name] - 1))
    -573            plt.hist(arr, rwidth=0.8, bins=len(self.e_content[e_name]))
    -574            plt.title('Replica distribution' + e_name + ' (mean=0, var=1)')
    -575            plt.draw()
    +            
    556    def plot_rep_dist(self):
    +557        """Plot replica distribution for each ensemble with more than one replicum."""
    +558        if not hasattr(self, 'e_dvalue'):
    +559            raise Exception('Run the gamma method first.')
    +560        for e, e_name in enumerate(self.mc_names):
    +561            if len(self.e_content[e_name]) == 1:
    +562                print('No replica distribution for a single replicum (', e_name, ')')
    +563                continue
    +564            r_length = []
    +565            sub_r_mean = 0
    +566            for r, r_name in enumerate(self.e_content[e_name]):
    +567                r_length.append(len(self.deltas[r_name]))
    +568                sub_r_mean += self.shape[r_name] * self.r_values[r_name]
    +569            e_N = np.sum(r_length)
    +570            sub_r_mean /= e_N
    +571            arr = np.zeros(len(self.e_content[e_name]))
    +572            for r, r_name in enumerate(self.e_content[e_name]):
    +573                arr[r] = (self.r_values[r_name] - sub_r_mean) / (self.e_dvalue[e_name] * np.sqrt(e_N / self.shape[r_name] - 1))
    +574            plt.hist(arr, rwidth=0.8, bins=len(self.e_content[e_name]))
    +575            plt.title('Replica distribution' + e_name + ' (mean=0, var=1)')
    +576            plt.draw()
     
    @@ -4019,37 +4142,37 @@ saves the figure to a file named 'save' if.
    -
    577    def plot_history(self, expand=True):
    -578        """Plot derived Monte Carlo history for each ensemble
    -579
    -580        Parameters
    -581        ----------
    -582        expand : bool
    -583            show expanded history for irregular Monte Carlo chains (default: True).
    -584        """
    -585        for e, e_name in enumerate(self.mc_names):
    -586            plt.figure()
    -587            r_length = []
    -588            tmp = []
    -589            tmp_expanded = []
    -590            for r, r_name in enumerate(self.e_content[e_name]):
    -591                tmp.append(self.deltas[r_name] + self.r_values[r_name])
    -592                if expand:
    -593                    tmp_expanded.append(_expand_deltas(self.deltas[r_name], list(self.idl[r_name]), self.shape[r_name], 1) + self.r_values[r_name])
    -594                    r_length.append(len(tmp_expanded[-1]))
    -595                else:
    -596                    r_length.append(len(tmp[-1]))
    -597            e_N = np.sum(r_length)
    -598            x = np.arange(e_N)
    -599            y_test = np.concatenate(tmp, axis=0)
    -600            if expand:
    -601                y = np.concatenate(tmp_expanded, axis=0)
    -602            else:
    -603                y = y_test
    -604            plt.errorbar(x, y, fmt='.', markersize=3)
    -605            plt.xlim(-0.5, e_N - 0.5)
    -606            plt.title(e_name + f'\nskew: {skew(y_test):.3f} (p={skewtest(y_test).pvalue:.3f}), kurtosis: {kurtosis(y_test):.3f} (p={kurtosistest(y_test).pvalue:.3f})')
    -607            plt.draw()
    +            
    578    def plot_history(self, expand=True):
    +579        """Plot derived Monte Carlo history for each ensemble
    +580
    +581        Parameters
    +582        ----------
    +583        expand : bool
    +584            show expanded history for irregular Monte Carlo chains (default: True).
    +585        """
    +586        for e, e_name in enumerate(self.mc_names):
    +587            plt.figure()
    +588            r_length = []
    +589            tmp = []
    +590            tmp_expanded = []
    +591            for r, r_name in enumerate(self.e_content[e_name]):
    +592                tmp.append(self.deltas[r_name] + self.r_values[r_name])
    +593                if expand:
    +594                    tmp_expanded.append(_expand_deltas(self.deltas[r_name], list(self.idl[r_name]), self.shape[r_name], 1) + self.r_values[r_name])
    +595                    r_length.append(len(tmp_expanded[-1]))
    +596                else:
    +597                    r_length.append(len(tmp[-1]))
    +598            e_N = np.sum(r_length)
    +599            x = np.arange(e_N)
    +600            y_test = np.concatenate(tmp, axis=0)
    +601            if expand:
    +602                y = np.concatenate(tmp_expanded, axis=0)
    +603            else:
    +604                y = y_test
    +605            plt.errorbar(x, y, fmt='.', markersize=3)
    +606            plt.xlim(-0.5, e_N - 0.5)
    +607            plt.title(e_name + f'\nskew: {skew(y_test):.3f} (p={skewtest(y_test).pvalue:.3f}), kurtosis: {kurtosis(y_test):.3f} (p={kurtosistest(y_test).pvalue:.3f})')
    +608            plt.draw()
     
    @@ -4076,29 +4199,29 @@ show expanded history for irregular Monte Carlo chains (default: True).
    -
    609    def plot_piechart(self, save=None):
    -610        """Plot piechart which shows the fractional contribution of each
    -611        ensemble to the error and returns a dictionary containing the fractions.
    -612
    -613        Parameters
    -614        ----------
    -615        save : str
    -616            saves the figure to a file named 'save' if.
    -617        """
    -618        if not hasattr(self, 'e_dvalue'):
    -619            raise Exception('Run the gamma method first.')
    -620        if np.isclose(0.0, self._dvalue, atol=1e-15):
    -621            raise Exception('Error is 0.0')
    -622        labels = self.e_names
    -623        sizes = [self.e_dvalue[name] ** 2 for name in labels] / self._dvalue ** 2
    -624        fig1, ax1 = plt.subplots()
    -625        ax1.pie(sizes, labels=labels, startangle=90, normalize=True)
    -626        ax1.axis('equal')
    -627        plt.draw()
    -628        if save:
    -629            fig1.savefig(save)
    -630
    -631        return dict(zip(labels, sizes))
    +            
    610    def plot_piechart(self, save=None):
    +611        """Plot piechart which shows the fractional contribution of each
    +612        ensemble to the error and returns a dictionary containing the fractions.
    +613
    +614        Parameters
    +615        ----------
    +616        save : str
    +617            saves the figure to a file named 'save' if.
    +618        """
    +619        if not hasattr(self, 'e_dvalue'):
    +620            raise Exception('Run the gamma method first.')
    +621        if np.isclose(0.0, self._dvalue, atol=1e-15):
    +622            raise Exception('Error is 0.0')
    +623        labels = self.e_names
    +624        sizes = [self.e_dvalue[name] ** 2 for name in labels] / self._dvalue ** 2
    +625        fig1, ax1 = plt.subplots()
    +626        ax1.pie(sizes, labels=labels, startangle=90, normalize=True)
    +627        ax1.axis('equal')
    +628        plt.draw()
    +629        if save:
    +630            fig1.savefig(save)
    +631
    +632        return dict(zip(labels, sizes))
     
    @@ -4126,34 +4249,34 @@ saves the figure to a file named 'save' if.
    -
    633    def dump(self, filename, datatype="json.gz", description="", **kwargs):
    -634        """Dump the Obs to a file 'name' of chosen format.
    -635
    -636        Parameters
    -637        ----------
    -638        filename : str
    -639            name of the file to be saved.
    -640        datatype : str
    -641            Format of the exported file. Supported formats include
    -642            "json.gz" and "pickle"
    -643        description : str
    -644            Description for output file, only relevant for json.gz format.
    -645        path : str
    -646            specifies a custom path for the file (default '.')
    -647        """
    -648        if 'path' in kwargs:
    -649            file_name = kwargs.get('path') + '/' + filename
    -650        else:
    -651            file_name = filename
    -652
    -653        if datatype == "json.gz":
    -654            from .input.json import dump_to_json
    -655            dump_to_json([self], file_name, description=description)
    -656        elif datatype == "pickle":
    -657            with open(file_name + '.p', 'wb') as fb:
    -658                pickle.dump(self, fb)
    -659        else:
    -660            raise Exception("Unknown datatype " + str(datatype))
    +            
    634    def dump(self, filename, datatype="json.gz", description="", **kwargs):
    +635        """Dump the Obs to a file 'name' of chosen format.
    +636
    +637        Parameters
    +638        ----------
    +639        filename : str
    +640            name of the file to be saved.
    +641        datatype : str
    +642            Format of the exported file. Supported formats include
    +643            "json.gz" and "pickle"
    +644        description : str
    +645            Description for output file, only relevant for json.gz format.
    +646        path : str
    +647            specifies a custom path for the file (default '.')
    +648        """
    +649        if 'path' in kwargs:
    +650            file_name = kwargs.get('path') + '/' + filename
    +651        else:
    +652            file_name = filename
    +653
    +654        if datatype == "json.gz":
    +655            from .input.json import dump_to_json
    +656            dump_to_json([self], file_name, description=description)
    +657        elif datatype == "pickle":
    +658            with open(file_name + '.p', 'wb') as fb:
    +659                pickle.dump(self, fb)
    +660        else:
    +661            raise Exception("Unknown datatype " + str(datatype))
     
    @@ -4187,31 +4310,31 @@ specifies a custom path for the file (default '.')
    -
    662    def export_jackknife(self):
    -663        """Export jackknife samples from the Obs
    -664
    -665        Returns
    -666        -------
    -667        numpy.ndarray
    -668            Returns a numpy array of length N + 1 where N is the number of samples
    -669            for the given ensemble and replicum. The zeroth entry of the array contains
    -670            the mean value of the Obs, entries 1 to N contain the N jackknife samples
    -671            derived from the Obs. The current implementation only works for observables
    -672            defined on exactly one ensemble and replicum. The derived jackknife samples
    -673            should agree with samples from a full jackknife analysis up to O(1/N).
    -674        """
    -675
    -676        if len(self.names) != 1:
    -677            raise Exception("'export_jackknife' is only implemented for Obs defined on one ensemble and replicum.")
    -678
    -679        name = self.names[0]
    -680        full_data = self.deltas[name] + self.r_values[name]
    -681        n = full_data.size
    -682        mean = self.value
    -683        tmp_jacks = np.zeros(n + 1)
    -684        tmp_jacks[0] = mean
    -685        tmp_jacks[1:] = (n * mean - full_data) / (n - 1)
    -686        return tmp_jacks
    +            
    663    def export_jackknife(self):
    +664        """Export jackknife samples from the Obs
    +665
    +666        Returns
    +667        -------
    +668        numpy.ndarray
    +669            Returns a numpy array of length N + 1 where N is the number of samples
    +670            for the given ensemble and replicum. The zeroth entry of the array contains
    +671            the mean value of the Obs, entries 1 to N contain the N jackknife samples
    +672            derived from the Obs. The current implementation only works for observables
    +673            defined on exactly one ensemble and replicum. The derived jackknife samples
    +674            should agree with samples from a full jackknife analysis up to O(1/N).
    +675        """
    +676
    +677        if len(self.names) != 1:
    +678            raise Exception("'export_jackknife' is only implemented for Obs defined on one ensemble and replicum.")
    +679
    +680        name = self.names[0]
    +681        full_data = self.deltas[name] + self.r_values[name]
    +682        n = full_data.size
    +683        mean = self.value
    +684        tmp_jacks = np.zeros(n + 1)
    +685        tmp_jacks[0] = mean
    +686        tmp_jacks[1:] = (n * mean - full_data) / (n - 1)
    +687        return tmp_jacks
     
    @@ -4230,6 +4353,90 @@ should agree with samples from a full jackknife analysis up to O(1/N).
    + +
    + +
    + + def + export_bootstrap(self, samples=500, random_numbers=None, save_rng=None): + + + +
    + +
    689    def export_bootstrap(self, samples=500, random_numbers=None, save_rng=None):
    +690        """Export bootstrap samples from the Obs
    +691
    +692        Parameters
    +693        ----------
    +694        samples : int
    +695            Number of bootstrap samples to generate.
    +696        random_numbers : np.ndarray
    +697            Array of shape (samples, length) containing the random numbers to generate the bootstrap samples.
    +698            If not provided the bootstrap samples are generated bashed on the md5 hash of the enesmble name.
    +699        save_rng : str
    +700            Save the random numbers to a file if a path is specified.
    +701
    +702        Returns
    +703        -------
    +704        numpy.ndarray
    +705            Returns a numpy array of length N + 1 where N is the number of samples
    +706            for the given ensemble and replicum. The zeroth entry of the array contains
    +707            the mean value of the Obs, entries 1 to N contain the N import_bootstrap samples
    +708            derived from the Obs. The current implementation only works for observables
    +709            defined on exactly one ensemble and replicum. The derived bootstrap samples
    +710            should agree with samples from a full bootstrap analysis up to O(1/N).
    +711        """
    +712        if len(self.names) != 1:
    +713            raise Exception("'export_boostrap' is only implemented for Obs defined on one ensemble and replicum.")
    +714
    +715        name = self.names[0]
    +716        length = self.N
    +717
    +718        if random_numbers is None:
    +719            seed = int(hashlib.md5(name.encode()).hexdigest(), 16) & 0xFFFFFFFF
    +720            rng = np.random.default_rng(seed)
    +721            random_numbers = rng.integers(0, length, size=(samples, length))
    +722
    +723        if save_rng is not None:
    +724            np.savetxt(save_rng, random_numbers, fmt='%i')
    +725
    +726        proj = np.vstack([np.bincount(o, minlength=length) for o in random_numbers]) / length
    +727        ret = np.zeros(samples + 1)
    +728        ret[0] = self.value
    +729        ret[1:] = proj @ (self.deltas[name] + self.r_values[name])
    +730        return ret
    +
    + + +

    Export bootstrap samples from the Obs

    + +
    Parameters
    + +
      +
    • samples (int): +Number of bootstrap samples to generate.
    • +
    • random_numbers (np.ndarray): +Array of shape (samples, length) containing the random numbers to generate the bootstrap samples. +If not provided the bootstrap samples are generated bashed on the md5 hash of the enesmble name.
    • +
    • save_rng (str): +Save the random numbers to a file if a path is specified.
    • +
    + +
    Returns
    + +
      +
    • numpy.ndarray: Returns a numpy array of length N + 1 where N is the number of samples +for the given ensemble and replicum. The zeroth entry of the array contains +the mean value of the Obs, entries 1 to N contain the N import_bootstrap samples +derived from the Obs. The current implementation only works for observables +defined on exactly one ensemble and replicum. The derived bootstrap samples +should agree with samples from a full bootstrap analysis up to O(1/N).
    • +
    +
    + +
    @@ -4242,8 +4449,8 @@ should agree with samples from a full jackknife analysis up to O(1/N).
    -
    827    def sqrt(self):
    -828        return derived_observable(lambda x, **kwargs: np.sqrt(x[0]), [self], man_grad=[1 / 2 / np.sqrt(self.value)])
    +            
    871    def sqrt(self):
    +872        return derived_observable(lambda x, **kwargs: np.sqrt(x[0]), [self], man_grad=[1 / 2 / np.sqrt(self.value)])
     
    @@ -4261,8 +4468,8 @@ should agree with samples from a full jackknife analysis up to O(1/N).
    -
    830    def log(self):
    -831        return derived_observable(lambda x, **kwargs: np.log(x[0]), [self], man_grad=[1 / self.value])
    +            
    874    def log(self):
    +875        return derived_observable(lambda x, **kwargs: np.log(x[0]), [self], man_grad=[1 / self.value])
     
    @@ -4280,8 +4487,8 @@ should agree with samples from a full jackknife analysis up to O(1/N).
    -
    833    def exp(self):
    -834        return derived_observable(lambda x, **kwargs: np.exp(x[0]), [self], man_grad=[np.exp(self.value)])
    +            
    877    def exp(self):
    +878        return derived_observable(lambda x, **kwargs: np.exp(x[0]), [self], man_grad=[np.exp(self.value)])
     
    @@ -4299,8 +4506,8 @@ should agree with samples from a full jackknife analysis up to O(1/N).
    -
    836    def sin(self):
    -837        return derived_observable(lambda x, **kwargs: np.sin(x[0]), [self], man_grad=[np.cos(self.value)])
    +            
    880    def sin(self):
    +881        return derived_observable(lambda x, **kwargs: np.sin(x[0]), [self], man_grad=[np.cos(self.value)])
     
    @@ -4318,8 +4525,8 @@ should agree with samples from a full jackknife analysis up to O(1/N).
    -
    839    def cos(self):
    -840        return derived_observable(lambda x, **kwargs: np.cos(x[0]), [self], man_grad=[-np.sin(self.value)])
    +            
    883    def cos(self):
    +884        return derived_observable(lambda x, **kwargs: np.cos(x[0]), [self], man_grad=[-np.sin(self.value)])
     
    @@ -4337,8 +4544,8 @@ should agree with samples from a full jackknife analysis up to O(1/N).
    -
    842    def tan(self):
    -843        return derived_observable(lambda x, **kwargs: np.tan(x[0]), [self], man_grad=[1 / np.cos(self.value) ** 2])
    +            
    886    def tan(self):
    +887        return derived_observable(lambda x, **kwargs: np.tan(x[0]), [self], man_grad=[1 / np.cos(self.value) ** 2])
     
    @@ -4356,8 +4563,8 @@ should agree with samples from a full jackknife analysis up to O(1/N).
    -
    845    def arcsin(self):
    -846        return derived_observable(lambda x: anp.arcsin(x[0]), [self])
    +            
    889    def arcsin(self):
    +890        return derived_observable(lambda x: anp.arcsin(x[0]), [self])
     
    @@ -4375,8 +4582,8 @@ should agree with samples from a full jackknife analysis up to O(1/N).
    -
    848    def arccos(self):
    -849        return derived_observable(lambda x: anp.arccos(x[0]), [self])
    +            
    892    def arccos(self):
    +893        return derived_observable(lambda x: anp.arccos(x[0]), [self])
     
    @@ -4394,8 +4601,8 @@ should agree with samples from a full jackknife analysis up to O(1/N).
    -
    851    def arctan(self):
    -852        return derived_observable(lambda x: anp.arctan(x[0]), [self])
    +            
    895    def arctan(self):
    +896        return derived_observable(lambda x: anp.arctan(x[0]), [self])
     
    @@ -4413,8 +4620,8 @@ should agree with samples from a full jackknife analysis up to O(1/N).
    -
    854    def sinh(self):
    -855        return derived_observable(lambda x, **kwargs: np.sinh(x[0]), [self], man_grad=[np.cosh(self.value)])
    +            
    898    def sinh(self):
    +899        return derived_observable(lambda x, **kwargs: np.sinh(x[0]), [self], man_grad=[np.cosh(self.value)])
     
    @@ -4432,8 +4639,8 @@ should agree with samples from a full jackknife analysis up to O(1/N).
    -
    857    def cosh(self):
    -858        return derived_observable(lambda x, **kwargs: np.cosh(x[0]), [self], man_grad=[np.sinh(self.value)])
    +            
    901    def cosh(self):
    +902        return derived_observable(lambda x, **kwargs: np.cosh(x[0]), [self], man_grad=[np.sinh(self.value)])
     
    @@ -4451,8 +4658,8 @@ should agree with samples from a full jackknife analysis up to O(1/N).
    -
    860    def tanh(self):
    -861        return derived_observable(lambda x, **kwargs: np.tanh(x[0]), [self], man_grad=[1 / np.cosh(self.value) ** 2])
    +            
    904    def tanh(self):
    +905        return derived_observable(lambda x, **kwargs: np.tanh(x[0]), [self], man_grad=[1 / np.cosh(self.value) ** 2])
     
    @@ -4470,8 +4677,8 @@ should agree with samples from a full jackknife analysis up to O(1/N).
    -
    863    def arcsinh(self):
    -864        return derived_observable(lambda x: anp.arcsinh(x[0]), [self])
    +            
    907    def arcsinh(self):
    +908        return derived_observable(lambda x: anp.arcsinh(x[0]), [self])
     
    @@ -4489,8 +4696,8 @@ should agree with samples from a full jackknife analysis up to O(1/N).
    -
    866    def arccosh(self):
    -867        return derived_observable(lambda x: anp.arccosh(x[0]), [self])
    +            
    910    def arccosh(self):
    +911        return derived_observable(lambda x: anp.arccosh(x[0]), [self])
     
    @@ -4508,8 +4715,8 @@ should agree with samples from a full jackknife analysis up to O(1/N).
    -
    869    def arctanh(self):
    -870        return derived_observable(lambda x: anp.arctanh(x[0]), [self])
    +            
    913    def arctanh(self):
    +914        return derived_observable(lambda x: anp.arctanh(x[0]), [self])
     
    @@ -4660,115 +4867,115 @@ should agree with samples from a full jackknife analysis up to O(1/N).
    -
    873class CObs:
    -874    """Class for a complex valued observable."""
    -875    __slots__ = ['_real', '_imag', 'tag']
    -876
    -877    def __init__(self, real, imag=0.0):
    -878        self._real = real
    -879        self._imag = imag
    -880        self.tag = None
    -881
    -882    @property
    -883    def real(self):
    -884        return self._real
    -885
    -886    @property
    -887    def imag(self):
    -888        return self._imag
    -889
    -890    def gamma_method(self, **kwargs):
    -891        """Executes the gamma_method for the real and the imaginary part."""
    -892        if isinstance(self.real, Obs):
    -893            self.real.gamma_method(**kwargs)
    -894        if isinstance(self.imag, Obs):
    -895            self.imag.gamma_method(**kwargs)
    -896
    -897    def is_zero(self):
    -898        """Checks whether both real and imaginary part are zero within machine precision."""
    -899        return self.real == 0.0 and self.imag == 0.0
    -900
    -901    def conjugate(self):
    -902        return CObs(self.real, -self.imag)
    -903
    -904    def __add__(self, other):
    -905        if isinstance(other, np.ndarray):
    -906            return other + self
    -907        elif hasattr(other, 'real') and hasattr(other, 'imag'):
    -908            return CObs(self.real + other.real,
    -909                        self.imag + other.imag)
    -910        else:
    -911            return CObs(self.real + other, self.imag)
    -912
    -913    def __radd__(self, y):
    -914        return self + y
    -915
    -916    def __sub__(self, other):
    -917        if isinstance(other, np.ndarray):
    -918            return -1 * (other - self)
    -919        elif hasattr(other, 'real') and hasattr(other, 'imag'):
    -920            return CObs(self.real - other.real, self.imag - other.imag)
    -921        else:
    -922            return CObs(self.real - other, self.imag)
    -923
    -924    def __rsub__(self, other):
    -925        return -1 * (self - other)
    -926
    -927    def __mul__(self, other):
    -928        if isinstance(other, np.ndarray):
    -929            return other * self
    -930        elif hasattr(other, 'real') and hasattr(other, 'imag'):
    -931            if all(isinstance(i, Obs) for i in [self.real, self.imag, other.real, other.imag]):
    -932                return CObs(derived_observable(lambda x, **kwargs: x[0] * x[1] - x[2] * x[3],
    -933                                               [self.real, other.real, self.imag, other.imag],
    -934                                               man_grad=[other.real.value, self.real.value, -other.imag.value, -self.imag.value]),
    -935                            derived_observable(lambda x, **kwargs: x[2] * x[1] + x[0] * x[3],
    -936                                               [self.real, other.real, self.imag, other.imag],
    -937                                               man_grad=[other.imag.value, self.imag.value, other.real.value, self.real.value]))
    -938            elif getattr(other, 'imag', 0) != 0:
    -939                return CObs(self.real * other.real - self.imag * other.imag,
    -940                            self.imag * other.real + self.real * other.imag)
    -941            else:
    -942                return CObs(self.real * other.real, self.imag * other.real)
    -943        else:
    -944            return CObs(self.real * other, self.imag * other)
    -945
    -946    def __rmul__(self, other):
    -947        return self * other
    -948
    -949    def __truediv__(self, other):
    -950        if isinstance(other, np.ndarray):
    -951            return 1 / (other / self)
    -952        elif hasattr(other, 'real') and hasattr(other, 'imag'):
    -953            r = other.real ** 2 + other.imag ** 2
    -954            return CObs((self.real * other.real + self.imag * other.imag) / r, (self.imag * other.real - self.real * other.imag) / r)
    -955        else:
    -956            return CObs(self.real / other, self.imag / other)
    -957
    -958    def __rtruediv__(self, other):
    -959        r = self.real ** 2 + self.imag ** 2
    -960        if hasattr(other, 'real') and hasattr(other, 'imag'):
    -961            return CObs((self.real * other.real + self.imag * other.imag) / r, (self.real * other.imag - self.imag * other.real) / r)
    -962        else:
    -963            return CObs(self.real * other / r, -self.imag * other / r)
    -964
    -965    def __abs__(self):
    -966        return np.sqrt(self.real**2 + self.imag**2)
    -967
    -968    def __pos__(self):
    -969        return self
    -970
    -971    def __neg__(self):
    -972        return -1 * self
    -973
    -974    def __eq__(self, other):
    -975        return self.real == other.real and self.imag == other.imag
    -976
    -977    def __str__(self):
    -978        return '(' + str(self.real) + int(self.imag >= 0.0) * '+' + str(self.imag) + 'j)'
    -979
    -980    def __repr__(self):
    -981        return 'CObs[' + str(self) + ']'
    +            
     917class CObs:
    + 918    """Class for a complex valued observable."""
    + 919    __slots__ = ['_real', '_imag', 'tag']
    + 920
    + 921    def __init__(self, real, imag=0.0):
    + 922        self._real = real
    + 923        self._imag = imag
    + 924        self.tag = None
    + 925
    + 926    @property
    + 927    def real(self):
    + 928        return self._real
    + 929
    + 930    @property
    + 931    def imag(self):
    + 932        return self._imag
    + 933
    + 934    def gamma_method(self, **kwargs):
    + 935        """Executes the gamma_method for the real and the imaginary part."""
    + 936        if isinstance(self.real, Obs):
    + 937            self.real.gamma_method(**kwargs)
    + 938        if isinstance(self.imag, Obs):
    + 939            self.imag.gamma_method(**kwargs)
    + 940
    + 941    def is_zero(self):
    + 942        """Checks whether both real and imaginary part are zero within machine precision."""
    + 943        return self.real == 0.0 and self.imag == 0.0
    + 944
    + 945    def conjugate(self):
    + 946        return CObs(self.real, -self.imag)
    + 947
    + 948    def __add__(self, other):
    + 949        if isinstance(other, np.ndarray):
    + 950            return other + self
    + 951        elif hasattr(other, 'real') and hasattr(other, 'imag'):
    + 952            return CObs(self.real + other.real,
    + 953                        self.imag + other.imag)
    + 954        else:
    + 955            return CObs(self.real + other, self.imag)
    + 956
    + 957    def __radd__(self, y):
    + 958        return self + y
    + 959
    + 960    def __sub__(self, other):
    + 961        if isinstance(other, np.ndarray):
    + 962            return -1 * (other - self)
    + 963        elif hasattr(other, 'real') and hasattr(other, 'imag'):
    + 964            return CObs(self.real - other.real, self.imag - other.imag)
    + 965        else:
    + 966            return CObs(self.real - other, self.imag)
    + 967
    + 968    def __rsub__(self, other):
    + 969        return -1 * (self - other)
    + 970
    + 971    def __mul__(self, other):
    + 972        if isinstance(other, np.ndarray):
    + 973            return other * self
    + 974        elif hasattr(other, 'real') and hasattr(other, 'imag'):
    + 975            if all(isinstance(i, Obs) for i in [self.real, self.imag, other.real, other.imag]):
    + 976                return CObs(derived_observable(lambda x, **kwargs: x[0] * x[1] - x[2] * x[3],
    + 977                                               [self.real, other.real, self.imag, other.imag],
    + 978                                               man_grad=[other.real.value, self.real.value, -other.imag.value, -self.imag.value]),
    + 979                            derived_observable(lambda x, **kwargs: x[2] * x[1] + x[0] * x[3],
    + 980                                               [self.real, other.real, self.imag, other.imag],
    + 981                                               man_grad=[other.imag.value, self.imag.value, other.real.value, self.real.value]))
    + 982            elif getattr(other, 'imag', 0) != 0:
    + 983                return CObs(self.real * other.real - self.imag * other.imag,
    + 984                            self.imag * other.real + self.real * other.imag)
    + 985            else:
    + 986                return CObs(self.real * other.real, self.imag * other.real)
    + 987        else:
    + 988            return CObs(self.real * other, self.imag * other)
    + 989
    + 990    def __rmul__(self, other):
    + 991        return self * other
    + 992
    + 993    def __truediv__(self, other):
    + 994        if isinstance(other, np.ndarray):
    + 995            return 1 / (other / self)
    + 996        elif hasattr(other, 'real') and hasattr(other, 'imag'):
    + 997            r = other.real ** 2 + other.imag ** 2
    + 998            return CObs((self.real * other.real + self.imag * other.imag) / r, (self.imag * other.real - self.real * other.imag) / r)
    + 999        else:
    +1000            return CObs(self.real / other, self.imag / other)
    +1001
    +1002    def __rtruediv__(self, other):
    +1003        r = self.real ** 2 + self.imag ** 2
    +1004        if hasattr(other, 'real') and hasattr(other, 'imag'):
    +1005            return CObs((self.real * other.real + self.imag * other.imag) / r, (self.real * other.imag - self.imag * other.real) / r)
    +1006        else:
    +1007            return CObs(self.real * other / r, -self.imag * other / r)
    +1008
    +1009    def __abs__(self):
    +1010        return np.sqrt(self.real**2 + self.imag**2)
    +1011
    +1012    def __pos__(self):
    +1013        return self
    +1014
    +1015    def __neg__(self):
    +1016        return -1 * self
    +1017
    +1018    def __eq__(self, other):
    +1019        return self.real == other.real and self.imag == other.imag
    +1020
    +1021    def __str__(self):
    +1022        return '(' + str(self.real) + int(self.imag >= 0.0) * '+' + str(self.imag) + 'j)'
    +1023
    +1024    def __repr__(self):
    +1025        return 'CObs[' + str(self) + ']'
     
    @@ -4786,10 +4993,10 @@ should agree with samples from a full jackknife analysis up to O(1/N).
    -
    877    def __init__(self, real, imag=0.0):
    -878        self._real = real
    -879        self._imag = imag
    -880        self.tag = None
    +            
    921    def __init__(self, real, imag=0.0):
    +922        self._real = real
    +923        self._imag = imag
    +924        self.tag = None
     
    @@ -4840,12 +5047,12 @@ should agree with samples from a full jackknife analysis up to O(1/N).
    -
    890    def gamma_method(self, **kwargs):
    -891        """Executes the gamma_method for the real and the imaginary part."""
    -892        if isinstance(self.real, Obs):
    -893            self.real.gamma_method(**kwargs)
    -894        if isinstance(self.imag, Obs):
    -895            self.imag.gamma_method(**kwargs)
    +            
    934    def gamma_method(self, **kwargs):
    +935        """Executes the gamma_method for the real and the imaginary part."""
    +936        if isinstance(self.real, Obs):
    +937            self.real.gamma_method(**kwargs)
    +938        if isinstance(self.imag, Obs):
    +939            self.imag.gamma_method(**kwargs)
     
    @@ -4865,9 +5072,9 @@ should agree with samples from a full jackknife analysis up to O(1/N).
    -
    897    def is_zero(self):
    -898        """Checks whether both real and imaginary part are zero within machine precision."""
    -899        return self.real == 0.0 and self.imag == 0.0
    +            
    941    def is_zero(self):
    +942        """Checks whether both real and imaginary part are zero within machine precision."""
    +943        return self.real == 0.0 and self.imag == 0.0
     
    @@ -4887,8 +5094,8 @@ should agree with samples from a full jackknife analysis up to O(1/N).
    -
    901    def conjugate(self):
    -902        return CObs(self.real, -self.imag)
    +            
    945    def conjugate(self):
    +946        return CObs(self.real, -self.imag)
     
    @@ -4907,174 +5114,174 @@ should agree with samples from a full jackknife analysis up to O(1/N).
    -
    1103def derived_observable(func, data, array_mode=False, **kwargs):
    -1104    """Construct a derived Obs according to func(data, **kwargs) using automatic differentiation.
    -1105
    -1106    Parameters
    -1107    ----------
    -1108    func : object
    -1109        arbitrary function of the form func(data, **kwargs). For the
    -1110        automatic differentiation to work, all numpy functions have to have
    -1111        the autograd wrapper (use 'import autograd.numpy as anp').
    -1112    data : list
    -1113        list of Obs, e.g. [obs1, obs2, obs3].
    -1114    num_grad : bool
    -1115        if True, numerical derivatives are used instead of autograd
    -1116        (default False). To control the numerical differentiation the
    -1117        kwargs of numdifftools.step_generators.MaxStepGenerator
    -1118        can be used.
    -1119    man_grad : list
    -1120        manually supply a list or an array which contains the jacobian
    -1121        of func. Use cautiously, supplying the wrong derivative will
    -1122        not be intercepted.
    -1123
    -1124    Notes
    -1125    -----
    -1126    For simple mathematical operations it can be practical to use anonymous
    -1127    functions. For the ratio of two observables one can e.g. use
    -1128
    -1129    new_obs = derived_observable(lambda x: x[0] / x[1], [obs1, obs2])
    -1130    """
    -1131
    -1132    data = np.asarray(data)
    -1133    raveled_data = data.ravel()
    -1134
    -1135    # Workaround for matrix operations containing non Obs data
    -1136    if not all(isinstance(x, Obs) for x in raveled_data):
    -1137        for i in range(len(raveled_data)):
    -1138            if isinstance(raveled_data[i], (int, float)):
    -1139                raveled_data[i] = cov_Obs(raveled_data[i], 0.0, "###dummy_covobs###")
    -1140
    -1141    allcov = {}
    -1142    for o in raveled_data:
    -1143        for name in o.cov_names:
    -1144            if name in allcov:
    -1145                if not np.allclose(allcov[name], o.covobs[name].cov):
    -1146                    raise Exception('Inconsistent covariance matrices for %s!' % (name))
    -1147            else:
    -1148                allcov[name] = o.covobs[name].cov
    +            
    1147def derived_observable(func, data, array_mode=False, **kwargs):
    +1148    """Construct a derived Obs according to func(data, **kwargs) using automatic differentiation.
     1149
    -1150    n_obs = len(raveled_data)
    -1151    new_names = sorted(set([y for x in [o.names for o in raveled_data] for y in x]))
    -1152    new_cov_names = sorted(set([y for x in [o.cov_names for o in raveled_data] for y in x]))
    -1153    new_sample_names = sorted(set(new_names) - set(new_cov_names))
    -1154
    -1155    reweighted = len(list(filter(lambda o: o.reweighted is True, raveled_data))) > 0
    -1156
    -1157    if data.ndim == 1:
    -1158        values = np.array([o.value for o in data])
    -1159    else:
    -1160        values = np.vectorize(lambda x: x.value)(data)
    -1161
    -1162    new_values = func(values, **kwargs)
    -1163
    -1164    multi = int(isinstance(new_values, np.ndarray))
    -1165
    -1166    new_r_values = {}
    -1167    new_idl_d = {}
    -1168    for name in new_sample_names:
    -1169        idl = []
    -1170        tmp_values = np.zeros(n_obs)
    -1171        for i, item in enumerate(raveled_data):
    -1172            tmp_values[i] = item.r_values.get(name, item.value)
    -1173            tmp_idl = item.idl.get(name)
    -1174            if tmp_idl is not None:
    -1175                idl.append(tmp_idl)
    -1176        if multi > 0:
    -1177            tmp_values = np.array(tmp_values).reshape(data.shape)
    -1178        new_r_values[name] = func(tmp_values, **kwargs)
    -1179        new_idl_d[name] = _merge_idx(idl)
    -1180
    -1181    if 'man_grad' in kwargs:
    -1182        deriv = np.asarray(kwargs.get('man_grad'))
    -1183        if new_values.shape + data.shape != deriv.shape:
    -1184            raise Exception('Manual derivative does not have correct shape.')
    -1185    elif kwargs.get('num_grad') is True:
    -1186        if multi > 0:
    -1187            raise Exception('Multi mode currently not supported for numerical derivative')
    -1188        options = {
    -1189            'base_step': 0.1,
    -1190            'step_ratio': 2.5}
    -1191        for key in options.keys():
    -1192            kwarg = kwargs.get(key)
    -1193            if kwarg is not None:
    -1194                options[key] = kwarg
    -1195        tmp_df = nd.Gradient(func, order=4, **{k: v for k, v in options.items() if v is not None})(values, **kwargs)
    -1196        if tmp_df.size == 1:
    -1197            deriv = np.array([tmp_df.real])
    -1198        else:
    -1199            deriv = tmp_df.real
    -1200    else:
    -1201        deriv = jacobian(func)(values, **kwargs)
    -1202
    -1203    final_result = np.zeros(new_values.shape, dtype=object)
    -1204
    -1205    if array_mode is True:
    -1206
    -1207        class _Zero_grad():
    -1208            def __init__(self, N):
    -1209                self.grad = np.zeros((N, 1))
    -1210
    -1211        new_covobs_lengths = dict(set([y for x in [[(n, o.covobs[n].N) for n in o.cov_names] for o in raveled_data] for y in x]))
    -1212        d_extracted = {}
    -1213        g_extracted = {}
    -1214        for name in new_sample_names:
    -1215            d_extracted[name] = []
    -1216            ens_length = len(new_idl_d[name])
    -1217            for i_dat, dat in enumerate(data):
    -1218                d_extracted[name].append(np.array([_expand_deltas_for_merge(o.deltas.get(name, np.zeros(ens_length)), o.idl.get(name, new_idl_d[name]), o.shape.get(name, ens_length), new_idl_d[name]) for o in dat.reshape(np.prod(dat.shape))]).reshape(dat.shape + (ens_length, )))
    -1219        for name in new_cov_names:
    -1220            g_extracted[name] = []
    -1221            zero_grad = _Zero_grad(new_covobs_lengths[name])
    -1222            for i_dat, dat in enumerate(data):
    -1223                g_extracted[name].append(np.array([o.covobs.get(name, zero_grad).grad for o in dat.reshape(np.prod(dat.shape))]).reshape(dat.shape + (new_covobs_lengths[name], 1)))
    +1150    Parameters
    +1151    ----------
    +1152    func : object
    +1153        arbitrary function of the form func(data, **kwargs). For the
    +1154        automatic differentiation to work, all numpy functions have to have
    +1155        the autograd wrapper (use 'import autograd.numpy as anp').
    +1156    data : list
    +1157        list of Obs, e.g. [obs1, obs2, obs3].
    +1158    num_grad : bool
    +1159        if True, numerical derivatives are used instead of autograd
    +1160        (default False). To control the numerical differentiation the
    +1161        kwargs of numdifftools.step_generators.MaxStepGenerator
    +1162        can be used.
    +1163    man_grad : list
    +1164        manually supply a list or an array which contains the jacobian
    +1165        of func. Use cautiously, supplying the wrong derivative will
    +1166        not be intercepted.
    +1167
    +1168    Notes
    +1169    -----
    +1170    For simple mathematical operations it can be practical to use anonymous
    +1171    functions. For the ratio of two observables one can e.g. use
    +1172
    +1173    new_obs = derived_observable(lambda x: x[0] / x[1], [obs1, obs2])
    +1174    """
    +1175
    +1176    data = np.asarray(data)
    +1177    raveled_data = data.ravel()
    +1178
    +1179    # Workaround for matrix operations containing non Obs data
    +1180    if not all(isinstance(x, Obs) for x in raveled_data):
    +1181        for i in range(len(raveled_data)):
    +1182            if isinstance(raveled_data[i], (int, float)):
    +1183                raveled_data[i] = cov_Obs(raveled_data[i], 0.0, "###dummy_covobs###")
    +1184
    +1185    allcov = {}
    +1186    for o in raveled_data:
    +1187        for name in o.cov_names:
    +1188            if name in allcov:
    +1189                if not np.allclose(allcov[name], o.covobs[name].cov):
    +1190                    raise Exception('Inconsistent covariance matrices for %s!' % (name))
    +1191            else:
    +1192                allcov[name] = o.covobs[name].cov
    +1193
    +1194    n_obs = len(raveled_data)
    +1195    new_names = sorted(set([y for x in [o.names for o in raveled_data] for y in x]))
    +1196    new_cov_names = sorted(set([y for x in [o.cov_names for o in raveled_data] for y in x]))
    +1197    new_sample_names = sorted(set(new_names) - set(new_cov_names))
    +1198
    +1199    reweighted = len(list(filter(lambda o: o.reweighted is True, raveled_data))) > 0
    +1200
    +1201    if data.ndim == 1:
    +1202        values = np.array([o.value for o in data])
    +1203    else:
    +1204        values = np.vectorize(lambda x: x.value)(data)
    +1205
    +1206    new_values = func(values, **kwargs)
    +1207
    +1208    multi = int(isinstance(new_values, np.ndarray))
    +1209
    +1210    new_r_values = {}
    +1211    new_idl_d = {}
    +1212    for name in new_sample_names:
    +1213        idl = []
    +1214        tmp_values = np.zeros(n_obs)
    +1215        for i, item in enumerate(raveled_data):
    +1216            tmp_values[i] = item.r_values.get(name, item.value)
    +1217            tmp_idl = item.idl.get(name)
    +1218            if tmp_idl is not None:
    +1219                idl.append(tmp_idl)
    +1220        if multi > 0:
    +1221            tmp_values = np.array(tmp_values).reshape(data.shape)
    +1222        new_r_values[name] = func(tmp_values, **kwargs)
    +1223        new_idl_d[name] = _merge_idx(idl)
     1224
    -1225    for i_val, new_val in np.ndenumerate(new_values):
    -1226        new_deltas = {}
    -1227        new_grad = {}
    -1228        if array_mode is True:
    -1229            for name in new_sample_names:
    -1230                ens_length = d_extracted[name][0].shape[-1]
    -1231                new_deltas[name] = np.zeros(ens_length)
    -1232                for i_dat, dat in enumerate(d_extracted[name]):
    -1233                    new_deltas[name] += np.tensordot(deriv[i_val + (i_dat, )], dat)
    -1234            for name in new_cov_names:
    -1235                new_grad[name] = 0
    -1236                for i_dat, dat in enumerate(g_extracted[name]):
    -1237                    new_grad[name] += np.tensordot(deriv[i_val + (i_dat, )], dat)
    -1238        else:
    -1239            for j_obs, obs in np.ndenumerate(data):
    -1240                for name in obs.names:
    -1241                    if name in obs.cov_names:
    -1242                        new_grad[name] = new_grad.get(name, 0) + deriv[i_val + j_obs] * obs.covobs[name].grad
    -1243                    else:
    -1244                        new_deltas[name] = new_deltas.get(name, 0) + deriv[i_val + j_obs] * _expand_deltas_for_merge(obs.deltas[name], obs.idl[name], obs.shape[name], new_idl_d[name])
    -1245
    -1246        new_covobs = {name: Covobs(0, allcov[name], name, grad=new_grad[name]) for name in new_grad}
    -1247
    -1248        if not set(new_covobs.keys()).isdisjoint(new_deltas.keys()):
    -1249            raise Exception('The same name has been used for deltas and covobs!')
    -1250        new_samples = []
    -1251        new_means = []
    -1252        new_idl = []
    -1253        new_names_obs = []
    -1254        for name in new_names:
    -1255            if name not in new_covobs:
    -1256                new_samples.append(new_deltas[name])
    -1257                new_idl.append(new_idl_d[name])
    -1258                new_means.append(new_r_values[name][i_val])
    -1259                new_names_obs.append(name)
    -1260        final_result[i_val] = Obs(new_samples, new_names_obs, means=new_means, idl=new_idl)
    -1261        for name in new_covobs:
    -1262            final_result[i_val].names.append(name)
    -1263        final_result[i_val]._covobs = new_covobs
    -1264        final_result[i_val]._value = new_val
    -1265        final_result[i_val].reweighted = reweighted
    -1266
    -1267    if multi == 0:
    -1268        final_result = final_result.item()
    -1269
    -1270    return final_result
    +1225    if 'man_grad' in kwargs:
    +1226        deriv = np.asarray(kwargs.get('man_grad'))
    +1227        if new_values.shape + data.shape != deriv.shape:
    +1228            raise Exception('Manual derivative does not have correct shape.')
    +1229    elif kwargs.get('num_grad') is True:
    +1230        if multi > 0:
    +1231            raise Exception('Multi mode currently not supported for numerical derivative')
    +1232        options = {
    +1233            'base_step': 0.1,
    +1234            'step_ratio': 2.5}
    +1235        for key in options.keys():
    +1236            kwarg = kwargs.get(key)
    +1237            if kwarg is not None:
    +1238                options[key] = kwarg
    +1239        tmp_df = nd.Gradient(func, order=4, **{k: v for k, v in options.items() if v is not None})(values, **kwargs)
    +1240        if tmp_df.size == 1:
    +1241            deriv = np.array([tmp_df.real])
    +1242        else:
    +1243            deriv = tmp_df.real
    +1244    else:
    +1245        deriv = jacobian(func)(values, **kwargs)
    +1246
    +1247    final_result = np.zeros(new_values.shape, dtype=object)
    +1248
    +1249    if array_mode is True:
    +1250
    +1251        class _Zero_grad():
    +1252            def __init__(self, N):
    +1253                self.grad = np.zeros((N, 1))
    +1254
    +1255        new_covobs_lengths = dict(set([y for x in [[(n, o.covobs[n].N) for n in o.cov_names] for o in raveled_data] for y in x]))
    +1256        d_extracted = {}
    +1257        g_extracted = {}
    +1258        for name in new_sample_names:
    +1259            d_extracted[name] = []
    +1260            ens_length = len(new_idl_d[name])
    +1261            for i_dat, dat in enumerate(data):
    +1262                d_extracted[name].append(np.array([_expand_deltas_for_merge(o.deltas.get(name, np.zeros(ens_length)), o.idl.get(name, new_idl_d[name]), o.shape.get(name, ens_length), new_idl_d[name]) for o in dat.reshape(np.prod(dat.shape))]).reshape(dat.shape + (ens_length, )))
    +1263        for name in new_cov_names:
    +1264            g_extracted[name] = []
    +1265            zero_grad = _Zero_grad(new_covobs_lengths[name])
    +1266            for i_dat, dat in enumerate(data):
    +1267                g_extracted[name].append(np.array([o.covobs.get(name, zero_grad).grad for o in dat.reshape(np.prod(dat.shape))]).reshape(dat.shape + (new_covobs_lengths[name], 1)))
    +1268
    +1269    for i_val, new_val in np.ndenumerate(new_values):
    +1270        new_deltas = {}
    +1271        new_grad = {}
    +1272        if array_mode is True:
    +1273            for name in new_sample_names:
    +1274                ens_length = d_extracted[name][0].shape[-1]
    +1275                new_deltas[name] = np.zeros(ens_length)
    +1276                for i_dat, dat in enumerate(d_extracted[name]):
    +1277                    new_deltas[name] += np.tensordot(deriv[i_val + (i_dat, )], dat)
    +1278            for name in new_cov_names:
    +1279                new_grad[name] = 0
    +1280                for i_dat, dat in enumerate(g_extracted[name]):
    +1281                    new_grad[name] += np.tensordot(deriv[i_val + (i_dat, )], dat)
    +1282        else:
    +1283            for j_obs, obs in np.ndenumerate(data):
    +1284                for name in obs.names:
    +1285                    if name in obs.cov_names:
    +1286                        new_grad[name] = new_grad.get(name, 0) + deriv[i_val + j_obs] * obs.covobs[name].grad
    +1287                    else:
    +1288                        new_deltas[name] = new_deltas.get(name, 0) + deriv[i_val + j_obs] * _expand_deltas_for_merge(obs.deltas[name], obs.idl[name], obs.shape[name], new_idl_d[name])
    +1289
    +1290        new_covobs = {name: Covobs(0, allcov[name], name, grad=new_grad[name]) for name in new_grad}
    +1291
    +1292        if not set(new_covobs.keys()).isdisjoint(new_deltas.keys()):
    +1293            raise Exception('The same name has been used for deltas and covobs!')
    +1294        new_samples = []
    +1295        new_means = []
    +1296        new_idl = []
    +1297        new_names_obs = []
    +1298        for name in new_names:
    +1299            if name not in new_covobs:
    +1300                new_samples.append(new_deltas[name])
    +1301                new_idl.append(new_idl_d[name])
    +1302                new_means.append(new_r_values[name][i_val])
    +1303                new_names_obs.append(name)
    +1304        final_result[i_val] = Obs(new_samples, new_names_obs, means=new_means, idl=new_idl)
    +1305        for name in new_covobs:
    +1306            final_result[i_val].names.append(name)
    +1307        final_result[i_val]._covobs = new_covobs
    +1308        final_result[i_val]._value = new_val
    +1309        final_result[i_val].reweighted = reweighted
    +1310
    +1311    if multi == 0:
    +1312        final_result = final_result.item()
    +1313
    +1314    return final_result
     
    @@ -5121,46 +5328,46 @@ functions. For the ratio of two observables one can e.g. use

    -
    1302def reweight(weight, obs, **kwargs):
    -1303    """Reweight a list of observables.
    -1304
    -1305    Parameters
    -1306    ----------
    -1307    weight : Obs
    -1308        Reweighting factor. An Observable that has to be defined on a superset of the
    -1309        configurations in obs[i].idl for all i.
    -1310    obs : list
    -1311        list of Obs, e.g. [obs1, obs2, obs3].
    -1312    all_configs : bool
    -1313        if True, the reweighted observables are normalized by the average of
    -1314        the reweighting factor on all configurations in weight.idl and not
    -1315        on the configurations in obs[i].idl. Default False.
    -1316    """
    -1317    result = []
    -1318    for i in range(len(obs)):
    -1319        if len(obs[i].cov_names):
    -1320            raise Exception('Error: Not possible to reweight an Obs that contains covobs!')
    -1321        if not set(obs[i].names).issubset(weight.names):
    -1322            raise Exception('Error: Ensembles do not fit')
    -1323        for name in obs[i].names:
    -1324            if not set(obs[i].idl[name]).issubset(weight.idl[name]):
    -1325                raise Exception('obs[%d] has to be defined on a subset of the configs in weight.idl[%s]!' % (i, name))
    -1326        new_samples = []
    -1327        w_deltas = {}
    -1328        for name in sorted(obs[i].names):
    -1329            w_deltas[name] = _reduce_deltas(weight.deltas[name], weight.idl[name], obs[i].idl[name])
    -1330            new_samples.append((w_deltas[name] + weight.r_values[name]) * (obs[i].deltas[name] + obs[i].r_values[name]))
    -1331        tmp_obs = Obs(new_samples, sorted(obs[i].names), idl=[obs[i].idl[name] for name in sorted(obs[i].names)])
    -1332
    -1333        if kwargs.get('all_configs'):
    -1334            new_weight = weight
    -1335        else:
    -1336            new_weight = Obs([w_deltas[name] + weight.r_values[name] for name in sorted(obs[i].names)], sorted(obs[i].names), idl=[obs[i].idl[name] for name in sorted(obs[i].names)])
    -1337
    -1338        result.append(tmp_obs / new_weight)
    -1339        result[-1].reweighted = True
    -1340
    -1341    return result
    +            
    1346def reweight(weight, obs, **kwargs):
    +1347    """Reweight a list of observables.
    +1348
    +1349    Parameters
    +1350    ----------
    +1351    weight : Obs
    +1352        Reweighting factor. An Observable that has to be defined on a superset of the
    +1353        configurations in obs[i].idl for all i.
    +1354    obs : list
    +1355        list of Obs, e.g. [obs1, obs2, obs3].
    +1356    all_configs : bool
    +1357        if True, the reweighted observables are normalized by the average of
    +1358        the reweighting factor on all configurations in weight.idl and not
    +1359        on the configurations in obs[i].idl. Default False.
    +1360    """
    +1361    result = []
    +1362    for i in range(len(obs)):
    +1363        if len(obs[i].cov_names):
    +1364            raise Exception('Error: Not possible to reweight an Obs that contains covobs!')
    +1365        if not set(obs[i].names).issubset(weight.names):
    +1366            raise Exception('Error: Ensembles do not fit')
    +1367        for name in obs[i].names:
    +1368            if not set(obs[i].idl[name]).issubset(weight.idl[name]):
    +1369                raise Exception('obs[%d] has to be defined on a subset of the configs in weight.idl[%s]!' % (i, name))
    +1370        new_samples = []
    +1371        w_deltas = {}
    +1372        for name in sorted(obs[i].names):
    +1373            w_deltas[name] = _reduce_deltas(weight.deltas[name], weight.idl[name], obs[i].idl[name])
    +1374            new_samples.append((w_deltas[name] + weight.r_values[name]) * (obs[i].deltas[name] + obs[i].r_values[name]))
    +1375        tmp_obs = Obs(new_samples, sorted(obs[i].names), idl=[obs[i].idl[name] for name in sorted(obs[i].names)])
    +1376
    +1377        if kwargs.get('all_configs'):
    +1378            new_weight = weight
    +1379        else:
    +1380            new_weight = Obs([w_deltas[name] + weight.r_values[name] for name in sorted(obs[i].names)], sorted(obs[i].names), idl=[obs[i].idl[name] for name in sorted(obs[i].names)])
    +1381
    +1382        result.append(tmp_obs / new_weight)
    +1383        result[-1].reweighted = True
    +1384
    +1385    return result
     
    @@ -5194,47 +5401,47 @@ on the configurations in obs[i].idl. Default False.
    -
    1344def correlate(obs_a, obs_b):
    -1345    """Correlate two observables.
    -1346
    -1347    Parameters
    -1348    ----------
    -1349    obs_a : Obs
    -1350        First observable
    -1351    obs_b : Obs
    -1352        Second observable
    -1353
    -1354    Notes
    -1355    -----
    -1356    Keep in mind to only correlate primary observables which have not been reweighted
    -1357    yet. The reweighting has to be applied after correlating the observables.
    -1358    Currently only works if ensembles are identical (this is not strictly necessary).
    -1359    """
    -1360
    -1361    if sorted(obs_a.names) != sorted(obs_b.names):
    -1362        raise Exception(f"Ensembles do not fit {set(sorted(obs_a.names)) ^ set(sorted(obs_b.names))}")
    -1363    if len(obs_a.cov_names) or len(obs_b.cov_names):
    -1364        raise Exception('Error: Not possible to correlate Obs that contain covobs!')
    -1365    for name in obs_a.names:
    -1366        if obs_a.shape[name] != obs_b.shape[name]:
    -1367            raise Exception('Shapes of ensemble', name, 'do not fit')
    -1368        if obs_a.idl[name] != obs_b.idl[name]:
    -1369            raise Exception('idl of ensemble', name, 'do not fit')
    -1370
    -1371    if obs_a.reweighted is True:
    -1372        warnings.warn("The first observable is already reweighted.", RuntimeWarning)
    -1373    if obs_b.reweighted is True:
    -1374        warnings.warn("The second observable is already reweighted.", RuntimeWarning)
    -1375
    -1376    new_samples = []
    -1377    new_idl = []
    -1378    for name in sorted(obs_a.names):
    -1379        new_samples.append((obs_a.deltas[name] + obs_a.r_values[name]) * (obs_b.deltas[name] + obs_b.r_values[name]))
    -1380        new_idl.append(obs_a.idl[name])
    -1381
    -1382    o = Obs(new_samples, sorted(obs_a.names), idl=new_idl)
    -1383    o.reweighted = obs_a.reweighted or obs_b.reweighted
    -1384    return o
    +            
    1388def correlate(obs_a, obs_b):
    +1389    """Correlate two observables.
    +1390
    +1391    Parameters
    +1392    ----------
    +1393    obs_a : Obs
    +1394        First observable
    +1395    obs_b : Obs
    +1396        Second observable
    +1397
    +1398    Notes
    +1399    -----
    +1400    Keep in mind to only correlate primary observables which have not been reweighted
    +1401    yet. The reweighting has to be applied after correlating the observables.
    +1402    Currently only works if ensembles are identical (this is not strictly necessary).
    +1403    """
    +1404
    +1405    if sorted(obs_a.names) != sorted(obs_b.names):
    +1406        raise Exception(f"Ensembles do not fit {set(sorted(obs_a.names)) ^ set(sorted(obs_b.names))}")
    +1407    if len(obs_a.cov_names) or len(obs_b.cov_names):
    +1408        raise Exception('Error: Not possible to correlate Obs that contain covobs!')
    +1409    for name in obs_a.names:
    +1410        if obs_a.shape[name] != obs_b.shape[name]:
    +1411            raise Exception('Shapes of ensemble', name, 'do not fit')
    +1412        if obs_a.idl[name] != obs_b.idl[name]:
    +1413            raise Exception('idl of ensemble', name, 'do not fit')
    +1414
    +1415    if obs_a.reweighted is True:
    +1416        warnings.warn("The first observable is already reweighted.", RuntimeWarning)
    +1417    if obs_b.reweighted is True:
    +1418        warnings.warn("The second observable is already reweighted.", RuntimeWarning)
    +1419
    +1420    new_samples = []
    +1421    new_idl = []
    +1422    for name in sorted(obs_a.names):
    +1423        new_samples.append((obs_a.deltas[name] + obs_a.r_values[name]) * (obs_b.deltas[name] + obs_b.r_values[name]))
    +1424        new_idl.append(obs_a.idl[name])
    +1425
    +1426    o = Obs(new_samples, sorted(obs_a.names), idl=new_idl)
    +1427    o.reweighted = obs_a.reweighted or obs_b.reweighted
    +1428    return o
     
    @@ -5269,74 +5476,74 @@ Currently only works if ensembles are identical (this is not strictly necessary)
    -
    1387def covariance(obs, visualize=False, correlation=False, smooth=None, **kwargs):
    -1388    r'''Calculates the error covariance matrix of a set of observables.
    -1389
    -1390    WARNING: This function should be used with care, especially for observables with support on multiple
    -1391             ensembles with differing autocorrelations. See the notes below for details.
    -1392
    -1393    The gamma method has to be applied first to all observables.
    -1394
    -1395    Parameters
    -1396    ----------
    -1397    obs : list or numpy.ndarray
    -1398        List or one dimensional array of Obs
    -1399    visualize : bool
    -1400        If True plots the corresponding normalized correlation matrix (default False).
    -1401    correlation : bool
    -1402        If True the correlation matrix instead of the error covariance matrix is returned (default False).
    -1403    smooth : None or int
    -1404        If smooth is an integer 'E' between 2 and the dimension of the matrix minus 1 the eigenvalue
    -1405        smoothing procedure of hep-lat/9412087 is applied to the correlation matrix which leaves the
    -1406        largest E eigenvalues essentially unchanged and smoothes the smaller eigenvalues to avoid extremely
    -1407        small ones.
    -1408
    -1409    Notes
    -1410    -----
    -1411    The error covariance is defined such that it agrees with the squared standard error for two identical observables
    -1412    $$\operatorname{cov}(a,a)=\sum_{s=1}^N\delta_a^s\delta_a^s/N^2=\Gamma_{aa}(0)/N=\operatorname{var}(a)/N=\sigma_a^2$$
    -1413    in the absence of autocorrelation.
    -1414    The error covariance is estimated by calculating the correlation matrix assuming no autocorrelation and then rescaling the correlation matrix by the full errors including the previous gamma method estimate for the autocorrelation of the observables. The covariance at windowsize 0 is guaranteed to be positive semi-definite
    -1415    $$\sum_{i,j}v_i\Gamma_{ij}(0)v_j=\frac{1}{N}\sum_{s=1}^N\sum_{i,j}v_i\delta_i^s\delta_j^s v_j=\frac{1}{N}\sum_{s=1}^N\sum_{i}|v_i\delta_i^s|^2\geq 0\,,$$ for every $v\in\mathbb{R}^M$, while such an identity does not hold for larger windows/lags.
    -1416    For observables defined on a single ensemble our approximation is equivalent to assuming that the integrated autocorrelation time of an off-diagonal element is equal to the geometric mean of the integrated autocorrelation times of the corresponding diagonal elements.
    -1417    $$\tau_{\mathrm{int}, ij}=\sqrt{\tau_{\mathrm{int}, i}\times \tau_{\mathrm{int}, j}}$$
    -1418    This construction ensures that the estimated covariance matrix is positive semi-definite (up to numerical rounding errors).
    -1419    '''
    -1420
    -1421    length = len(obs)
    -1422
    -1423    max_samples = np.max([o.N for o in obs])
    -1424    if max_samples <= length and not [item for sublist in [o.cov_names for o in obs] for item in sublist]:
    -1425        warnings.warn(f"The dimension of the covariance matrix ({length}) is larger or equal to the number of samples ({max_samples}). This will result in a rank deficient matrix.", RuntimeWarning)
    -1426
    -1427    cov = np.zeros((length, length))
    -1428    for i in range(length):
    -1429        for j in range(i, length):
    -1430            cov[i, j] = _covariance_element(obs[i], obs[j])
    -1431    cov = cov + cov.T - np.diag(np.diag(cov))
    -1432
    -1433    corr = np.diag(1 / np.sqrt(np.diag(cov))) @ cov @ np.diag(1 / np.sqrt(np.diag(cov)))
    -1434
    -1435    if isinstance(smooth, int):
    -1436        corr = _smooth_eigenvalues(corr, smooth)
    -1437
    -1438    if visualize:
    -1439        plt.matshow(corr, vmin=-1, vmax=1)
    -1440        plt.set_cmap('RdBu')
    -1441        plt.colorbar()
    -1442        plt.draw()
    -1443
    -1444    if correlation is True:
    -1445        return corr
    -1446
    -1447    errors = [o.dvalue for o in obs]
    -1448    cov = np.diag(errors) @ corr @ np.diag(errors)
    -1449
    -1450    eigenvalues = np.linalg.eigh(cov)[0]
    -1451    if not np.all(eigenvalues >= 0):
    -1452        warnings.warn("Covariance matrix is not positive semi-definite (Eigenvalues: " + str(eigenvalues) + ")", RuntimeWarning)
    -1453
    -1454    return cov
    +            
    1431def covariance(obs, visualize=False, correlation=False, smooth=None, **kwargs):
    +1432    r'''Calculates the error covariance matrix of a set of observables.
    +1433
    +1434    WARNING: This function should be used with care, especially for observables with support on multiple
    +1435             ensembles with differing autocorrelations. See the notes below for details.
    +1436
    +1437    The gamma method has to be applied first to all observables.
    +1438
    +1439    Parameters
    +1440    ----------
    +1441    obs : list or numpy.ndarray
    +1442        List or one dimensional array of Obs
    +1443    visualize : bool
    +1444        If True plots the corresponding normalized correlation matrix (default False).
    +1445    correlation : bool
    +1446        If True the correlation matrix instead of the error covariance matrix is returned (default False).
    +1447    smooth : None or int
    +1448        If smooth is an integer 'E' between 2 and the dimension of the matrix minus 1 the eigenvalue
    +1449        smoothing procedure of hep-lat/9412087 is applied to the correlation matrix which leaves the
    +1450        largest E eigenvalues essentially unchanged and smoothes the smaller eigenvalues to avoid extremely
    +1451        small ones.
    +1452
    +1453    Notes
    +1454    -----
    +1455    The error covariance is defined such that it agrees with the squared standard error for two identical observables
    +1456    $$\operatorname{cov}(a,a)=\sum_{s=1}^N\delta_a^s\delta_a^s/N^2=\Gamma_{aa}(0)/N=\operatorname{var}(a)/N=\sigma_a^2$$
    +1457    in the absence of autocorrelation.
    +1458    The error covariance is estimated by calculating the correlation matrix assuming no autocorrelation and then rescaling the correlation matrix by the full errors including the previous gamma method estimate for the autocorrelation of the observables. The covariance at windowsize 0 is guaranteed to be positive semi-definite
    +1459    $$\sum_{i,j}v_i\Gamma_{ij}(0)v_j=\frac{1}{N}\sum_{s=1}^N\sum_{i,j}v_i\delta_i^s\delta_j^s v_j=\frac{1}{N}\sum_{s=1}^N\sum_{i}|v_i\delta_i^s|^2\geq 0\,,$$ for every $v\in\mathbb{R}^M$, while such an identity does not hold for larger windows/lags.
    +1460    For observables defined on a single ensemble our approximation is equivalent to assuming that the integrated autocorrelation time of an off-diagonal element is equal to the geometric mean of the integrated autocorrelation times of the corresponding diagonal elements.
    +1461    $$\tau_{\mathrm{int}, ij}=\sqrt{\tau_{\mathrm{int}, i}\times \tau_{\mathrm{int}, j}}$$
    +1462    This construction ensures that the estimated covariance matrix is positive semi-definite (up to numerical rounding errors).
    +1463    '''
    +1464
    +1465    length = len(obs)
    +1466
    +1467    max_samples = np.max([o.N for o in obs])
    +1468    if max_samples <= length and not [item for sublist in [o.cov_names for o in obs] for item in sublist]:
    +1469        warnings.warn(f"The dimension of the covariance matrix ({length}) is larger or equal to the number of samples ({max_samples}). This will result in a rank deficient matrix.", RuntimeWarning)
    +1470
    +1471    cov = np.zeros((length, length))
    +1472    for i in range(length):
    +1473        for j in range(i, length):
    +1474            cov[i, j] = _covariance_element(obs[i], obs[j])
    +1475    cov = cov + cov.T - np.diag(np.diag(cov))
    +1476
    +1477    corr = np.diag(1 / np.sqrt(np.diag(cov))) @ cov @ np.diag(1 / np.sqrt(np.diag(cov)))
    +1478
    +1479    if isinstance(smooth, int):
    +1480        corr = _smooth_eigenvalues(corr, smooth)
    +1481
    +1482    if visualize:
    +1483        plt.matshow(corr, vmin=-1, vmax=1)
    +1484        plt.set_cmap('RdBu')
    +1485        plt.colorbar()
    +1486        plt.draw()
    +1487
    +1488    if correlation is True:
    +1489        return corr
    +1490
    +1491    errors = [o.dvalue for o in obs]
    +1492    cov = np.diag(errors) @ corr @ np.diag(errors)
    +1493
    +1494    eigenvalues = np.linalg.eigh(cov)[0]
    +1495    if not np.all(eigenvalues >= 0):
    +1496        warnings.warn("Covariance matrix is not positive semi-definite (Eigenvalues: " + str(eigenvalues) + ")", RuntimeWarning)
    +1497
    +1498    return cov
     
    @@ -5388,24 +5595,24 @@ This construction ensures that the estimated covariance matrix is positive semi-
    -
    1534def import_jackknife(jacks, name, idl=None):
    -1535    """Imports jackknife samples and returns an Obs
    -1536
    -1537    Parameters
    -1538    ----------
    -1539    jacks : numpy.ndarray
    -1540        numpy array containing the mean value as zeroth entry and
    -1541        the N jackknife samples as first to Nth entry.
    -1542    name : str
    -1543        name of the ensemble the samples are defined on.
    -1544    """
    -1545    length = len(jacks) - 1
    -1546    prj = (np.ones((length, length)) - (length - 1) * np.identity(length))
    -1547    samples = jacks[1:] @ prj
    -1548    mean = np.mean(samples)
    -1549    new_obs = Obs([samples - mean], [name], idl=idl, means=[mean])
    -1550    new_obs._value = jacks[0]
    -1551    return new_obs
    +            
    1578def import_jackknife(jacks, name, idl=None):
    +1579    """Imports jackknife samples and returns an Obs
    +1580
    +1581    Parameters
    +1582    ----------
    +1583    jacks : numpy.ndarray
    +1584        numpy array containing the mean value as zeroth entry and
    +1585        the N jackknife samples as first to Nth entry.
    +1586    name : str
    +1587        name of the ensemble the samples are defined on.
    +1588    """
    +1589    length = len(jacks) - 1
    +1590    prj = (np.ones((length, length)) - (length - 1) * np.identity(length))
    +1591    samples = jacks[1:] @ prj
    +1592    mean = np.mean(samples)
    +1593    new_obs = Obs([samples - mean], [name], idl=idl, means=[mean])
    +1594    new_obs._value = jacks[0]
    +1595    return new_obs
     
    @@ -5423,6 +5630,67 @@ name of the ensemble the samples are defined on.
    + +
    + +
    + + def + import_bootstrap(boots, name, random_numbers): + + + +
    + +
    1598def import_bootstrap(boots, name, random_numbers):
    +1599    """Imports bootstrap samples and returns an Obs
    +1600
    +1601    Parameters
    +1602    ----------
    +1603    boots : numpy.ndarray
    +1604        numpy array containing the mean value as zeroth entry and
    +1605        the N bootstrap samples as first to Nth entry.
    +1606    name : str
    +1607        name of the ensemble the samples are defined on.
    +1608    random_numbers : np.ndarray
    +1609        Array of shape (samples, length) containing the random numbers to generate the bootstrap samples,
    +1610        where samples is the number of bootstrap samples and length is the length of the original Monte Carlo
    +1611        chain to be reconstructed.
    +1612    """
    +1613    samples, length = random_numbers.shape
    +1614    if samples != len(boots) - 1:
    +1615        raise ValueError("Random numbers do not have the correct shape.")
    +1616
    +1617    if samples < length:
    +1618        raise ValueError("Obs can't be reconstructed if there are fewer bootstrap samples than Monte Carlo data points.")
    +1619
    +1620    proj = np.vstack([np.bincount(o, minlength=length) for o in random_numbers]) / length
    +1621
    +1622    samples = scipy.linalg.lstsq(proj, boots[1:])[0]
    +1623    ret = Obs([samples], [name])
    +1624    ret._value = boots[0]
    +1625    return ret
    +
    + + +

    Imports bootstrap samples and returns an Obs

    + +
    Parameters
    + +
      +
    • boots (numpy.ndarray): +numpy array containing the mean value as zeroth entry and +the N bootstrap samples as first to Nth entry.
    • +
    • name (str): +name of the ensemble the samples are defined on.
    • +
    • random_numbers (np.ndarray): +Array of shape (samples, length) containing the random numbers to generate the bootstrap samples, +where samples is the number of bootstrap samples and length is the length of the original Monte Carlo +chain to be reconstructed.
    • +
    +
    + +
    @@ -5435,34 +5703,34 @@ name of the ensemble the samples are defined on. -
    1554def merge_obs(list_of_obs):
    -1555    """Combine all observables in list_of_obs into one new observable
    -1556
    -1557    Parameters
    -1558    ----------
    -1559    list_of_obs : list
    -1560        list of the Obs object to be combined
    -1561
    -1562    Notes
    -1563    -----
    -1564    It is not possible to combine obs which are based on the same replicum
    -1565    """
    -1566    replist = [item for obs in list_of_obs for item in obs.names]
    -1567    if (len(replist) == len(set(replist))) is False:
    -1568        raise Exception('list_of_obs contains duplicate replica: %s' % (str(replist)))
    -1569    if any([len(o.cov_names) for o in list_of_obs]):
    -1570        raise Exception('Not possible to merge data that contains covobs!')
    -1571    new_dict = {}
    -1572    idl_dict = {}
    -1573    for o in list_of_obs:
    -1574        new_dict.update({key: o.deltas.get(key, 0) + o.r_values.get(key, 0)
    -1575                        for key in set(o.deltas) | set(o.r_values)})
    -1576        idl_dict.update({key: o.idl.get(key, 0) for key in set(o.deltas)})
    -1577
    -1578    names = sorted(new_dict.keys())
    -1579    o = Obs([new_dict[name] for name in names], names, idl=[idl_dict[name] for name in names])
    -1580    o.reweighted = np.max([oi.reweighted for oi in list_of_obs])
    -1581    return o
    +            
    1628def merge_obs(list_of_obs):
    +1629    """Combine all observables in list_of_obs into one new observable
    +1630
    +1631    Parameters
    +1632    ----------
    +1633    list_of_obs : list
    +1634        list of the Obs object to be combined
    +1635
    +1636    Notes
    +1637    -----
    +1638    It is not possible to combine obs which are based on the same replicum
    +1639    """
    +1640    replist = [item for obs in list_of_obs for item in obs.names]
    +1641    if (len(replist) == len(set(replist))) is False:
    +1642        raise Exception('list_of_obs contains duplicate replica: %s' % (str(replist)))
    +1643    if any([len(o.cov_names) for o in list_of_obs]):
    +1644        raise Exception('Not possible to merge data that contains covobs!')
    +1645    new_dict = {}
    +1646    idl_dict = {}
    +1647    for o in list_of_obs:
    +1648        new_dict.update({key: o.deltas.get(key, 0) + o.r_values.get(key, 0)
    +1649                        for key in set(o.deltas) | set(o.r_values)})
    +1650        idl_dict.update({key: o.idl.get(key, 0) for key in set(o.deltas)})
    +1651
    +1652    names = sorted(new_dict.keys())
    +1653    o = Obs([new_dict[name] for name in names], names, idl=[idl_dict[name] for name in names])
    +1654    o.reweighted = np.max([oi.reweighted for oi in list_of_obs])
    +1655    return o
     
    @@ -5493,47 +5761,47 @@ list of the Obs object to be combined
    -
    1584def cov_Obs(means, cov, name, grad=None):
    -1585    """Create an Obs based on mean(s) and a covariance matrix
    -1586
    -1587    Parameters
    -1588    ----------
    -1589    mean : list of floats or float
    -1590        N mean value(s) of the new Obs
    -1591    cov : list or array
    -1592        2d (NxN) Covariance matrix, 1d diagonal entries or 0d covariance
    -1593    name : str
    -1594        identifier for the covariance matrix
    -1595    grad : list or array
    -1596        Gradient of the Covobs wrt. the means belonging to cov.
    -1597    """
    -1598
    -1599    def covobs_to_obs(co):
    -1600        """Make an Obs out of a Covobs
    -1601
    -1602        Parameters
    -1603        ----------
    -1604        co : Covobs
    -1605            Covobs to be embedded into the Obs
    -1606        """
    -1607        o = Obs([], [], means=[])
    -1608        o._value = co.value
    -1609        o.names.append(co.name)
    -1610        o._covobs[co.name] = co
    -1611        o._dvalue = np.sqrt(co.errsq())
    -1612        return o
    -1613
    -1614    ol = []
    -1615    if isinstance(means, (float, int)):
    -1616        means = [means]
    -1617
    -1618    for i in range(len(means)):
    -1619        ol.append(covobs_to_obs(Covobs(means[i], cov, name, pos=i, grad=grad)))
    -1620    if ol[0].covobs[name].N != len(means):
    -1621        raise Exception('You have to provide %d mean values!' % (ol[0].N))
    -1622    if len(ol) == 1:
    -1623        return ol[0]
    -1624    return ol
    +            
    1658def cov_Obs(means, cov, name, grad=None):
    +1659    """Create an Obs based on mean(s) and a covariance matrix
    +1660
    +1661    Parameters
    +1662    ----------
    +1663    mean : list of floats or float
    +1664        N mean value(s) of the new Obs
    +1665    cov : list or array
    +1666        2d (NxN) Covariance matrix, 1d diagonal entries or 0d covariance
    +1667    name : str
    +1668        identifier for the covariance matrix
    +1669    grad : list or array
    +1670        Gradient of the Covobs wrt. the means belonging to cov.
    +1671    """
    +1672
    +1673    def covobs_to_obs(co):
    +1674        """Make an Obs out of a Covobs
    +1675
    +1676        Parameters
    +1677        ----------
    +1678        co : Covobs
    +1679            Covobs to be embedded into the Obs
    +1680        """
    +1681        o = Obs([], [], means=[])
    +1682        o._value = co.value
    +1683        o.names.append(co.name)
    +1684        o._covobs[co.name] = co
    +1685        o._dvalue = np.sqrt(co.errsq())
    +1686        return o
    +1687
    +1688    ol = []
    +1689    if isinstance(means, (float, int)):
    +1690        means = [means]
    +1691
    +1692    for i in range(len(means)):
    +1693        ol.append(covobs_to_obs(Covobs(means[i], cov, name, pos=i, grad=grad)))
    +1694    if ol[0].covobs[name].N != len(means):
    +1695        raise Exception('You have to provide %d mean values!' % (ol[0].N))
    +1696    if len(ol) == 1:
    +1697        return ol[0]
    +1698    return ol
     
    diff --git a/docs/search.js b/docs/search.js index e93aa23b..f24f2e35 100644 --- a/docs/search.js +++ b/docs/search.js @@ -1,6 +1,6 @@ window.pdocSearch = (function(){ /** elasticlunr - http://weixsong.github.io * Copyright (C) 2017 Oliver Nightingale * Copyright (C) 2017 Wei Song * MIT Licensed */!function(){function e(e){if(null===e||"object"!=typeof e)return e;var t=e.constructor();for(var n in e)e.hasOwnProperty(n)&&(t[n]=e[n]);return t}var t=function(e){var n=new t.Index;return n.pipeline.add(t.trimmer,t.stopWordFilter,t.stemmer),e&&e.call(n,n),n};t.version="0.9.5",lunr=t,t.utils={},t.utils.warn=function(e){return function(t){e.console&&console.warn&&console.warn(t)}}(this),t.utils.toString=function(e){return void 0===e||null===e?"":e.toString()},t.EventEmitter=function(){this.events={}},t.EventEmitter.prototype.addListener=function(){var e=Array.prototype.slice.call(arguments),t=e.pop(),n=e;if("function"!=typeof t)throw new TypeError("last argument must be a function");n.forEach(function(e){this.hasHandler(e)||(this.events[e]=[]),this.events[e].push(t)},this)},t.EventEmitter.prototype.removeListener=function(e,t){if(this.hasHandler(e)){var n=this.events[e].indexOf(t);-1!==n&&(this.events[e].splice(n,1),0==this.events[e].length&&delete this.events[e])}},t.EventEmitter.prototype.emit=function(e){if(this.hasHandler(e)){var t=Array.prototype.slice.call(arguments,1);this.events[e].forEach(function(e){e.apply(void 0,t)},this)}},t.EventEmitter.prototype.hasHandler=function(e){return e in this.events},t.tokenizer=function(e){if(!arguments.length||null===e||void 0===e)return[];if(Array.isArray(e)){var n=e.filter(function(e){return null===e||void 0===e?!1:!0});n=n.map(function(e){return t.utils.toString(e).toLowerCase()});var i=[];return n.forEach(function(e){var n=e.split(t.tokenizer.seperator);i=i.concat(n)},this),i}return e.toString().trim().toLowerCase().split(t.tokenizer.seperator)},t.tokenizer.defaultSeperator=/[\s\-]+/,t.tokenizer.seperator=t.tokenizer.defaultSeperator,t.tokenizer.setSeperator=function(e){null!==e&&void 0!==e&&"object"==typeof e&&(t.tokenizer.seperator=e)},t.tokenizer.resetSeperator=function(){t.tokenizer.seperator=t.tokenizer.defaultSeperator},t.tokenizer.getSeperator=function(){return t.tokenizer.seperator},t.Pipeline=function(){this._queue=[]},t.Pipeline.registeredFunctions={},t.Pipeline.registerFunction=function(e,n){n in t.Pipeline.registeredFunctions&&t.utils.warn("Overwriting existing registered function: "+n),e.label=n,t.Pipeline.registeredFunctions[n]=e},t.Pipeline.getRegisteredFunction=function(e){return e in t.Pipeline.registeredFunctions!=!0?null:t.Pipeline.registeredFunctions[e]},t.Pipeline.warnIfFunctionNotRegistered=function(e){var n=e.label&&e.label in this.registeredFunctions;n||t.utils.warn("Function is not registered with pipeline. This may cause problems when serialising the index.\n",e)},t.Pipeline.load=function(e){var n=new t.Pipeline;return e.forEach(function(e){var i=t.Pipeline.getRegisteredFunction(e);if(!i)throw new Error("Cannot load un-registered function: "+e);n.add(i)}),n},t.Pipeline.prototype.add=function(){var e=Array.prototype.slice.call(arguments);e.forEach(function(e){t.Pipeline.warnIfFunctionNotRegistered(e),this._queue.push(e)},this)},t.Pipeline.prototype.after=function(e,n){t.Pipeline.warnIfFunctionNotRegistered(n);var i=this._queue.indexOf(e);if(-1===i)throw new Error("Cannot find existingFn");this._queue.splice(i+1,0,n)},t.Pipeline.prototype.before=function(e,n){t.Pipeline.warnIfFunctionNotRegistered(n);var i=this._queue.indexOf(e);if(-1===i)throw new Error("Cannot find existingFn");this._queue.splice(i,0,n)},t.Pipeline.prototype.remove=function(e){var t=this._queue.indexOf(e);-1!==t&&this._queue.splice(t,1)},t.Pipeline.prototype.run=function(e){for(var t=[],n=e.length,i=this._queue.length,o=0;n>o;o++){for(var r=e[o],s=0;i>s&&(r=this._queue[s](r,o,e),void 0!==r&&null!==r);s++);void 0!==r&&null!==r&&t.push(r)}return t},t.Pipeline.prototype.reset=function(){this._queue=[]},t.Pipeline.prototype.get=function(){return this._queue},t.Pipeline.prototype.toJSON=function(){return this._queue.map(function(e){return t.Pipeline.warnIfFunctionNotRegistered(e),e.label})},t.Index=function(){this._fields=[],this._ref="id",this.pipeline=new t.Pipeline,this.documentStore=new t.DocumentStore,this.index={},this.eventEmitter=new t.EventEmitter,this._idfCache={},this.on("add","remove","update",function(){this._idfCache={}}.bind(this))},t.Index.prototype.on=function(){var e=Array.prototype.slice.call(arguments);return this.eventEmitter.addListener.apply(this.eventEmitter,e)},t.Index.prototype.off=function(e,t){return this.eventEmitter.removeListener(e,t)},t.Index.load=function(e){e.version!==t.version&&t.utils.warn("version mismatch: current "+t.version+" importing "+e.version);var n=new this;n._fields=e.fields,n._ref=e.ref,n.documentStore=t.DocumentStore.load(e.documentStore),n.pipeline=t.Pipeline.load(e.pipeline),n.index={};for(var i in e.index)n.index[i]=t.InvertedIndex.load(e.index[i]);return n},t.Index.prototype.addField=function(e){return this._fields.push(e),this.index[e]=new t.InvertedIndex,this},t.Index.prototype.setRef=function(e){return this._ref=e,this},t.Index.prototype.saveDocument=function(e){return this.documentStore=new t.DocumentStore(e),this},t.Index.prototype.addDoc=function(e,n){if(e){var n=void 0===n?!0:n,i=e[this._ref];this.documentStore.addDoc(i,e),this._fields.forEach(function(n){var o=this.pipeline.run(t.tokenizer(e[n]));this.documentStore.addFieldLength(i,n,o.length);var r={};o.forEach(function(e){e in r?r[e]+=1:r[e]=1},this);for(var s in r){var u=r[s];u=Math.sqrt(u),this.index[n].addToken(s,{ref:i,tf:u})}},this),n&&this.eventEmitter.emit("add",e,this)}},t.Index.prototype.removeDocByRef=function(e){if(e&&this.documentStore.isDocStored()!==!1&&this.documentStore.hasDoc(e)){var t=this.documentStore.getDoc(e);this.removeDoc(t,!1)}},t.Index.prototype.removeDoc=function(e,n){if(e){var n=void 0===n?!0:n,i=e[this._ref];this.documentStore.hasDoc(i)&&(this.documentStore.removeDoc(i),this._fields.forEach(function(n){var o=this.pipeline.run(t.tokenizer(e[n]));o.forEach(function(e){this.index[n].removeToken(e,i)},this)},this),n&&this.eventEmitter.emit("remove",e,this))}},t.Index.prototype.updateDoc=function(e,t){var t=void 0===t?!0:t;this.removeDocByRef(e[this._ref],!1),this.addDoc(e,!1),t&&this.eventEmitter.emit("update",e,this)},t.Index.prototype.idf=function(e,t){var n="@"+t+"/"+e;if(Object.prototype.hasOwnProperty.call(this._idfCache,n))return this._idfCache[n];var i=this.index[t].getDocFreq(e),o=1+Math.log(this.documentStore.length/(i+1));return this._idfCache[n]=o,o},t.Index.prototype.getFields=function(){return this._fields.slice()},t.Index.prototype.search=function(e,n){if(!e)return[];e="string"==typeof e?{any:e}:JSON.parse(JSON.stringify(e));var i=null;null!=n&&(i=JSON.stringify(n));for(var o=new t.Configuration(i,this.getFields()).get(),r={},s=Object.keys(e),u=0;u0&&t.push(e);for(var i in n)"docs"!==i&&"df"!==i&&this.expandToken(e+i,t,n[i]);return t},t.InvertedIndex.prototype.toJSON=function(){return{root:this.root}},t.Configuration=function(e,n){var e=e||"";if(void 0==n||null==n)throw new Error("fields should not be null");this.config={};var i;try{i=JSON.parse(e),this.buildUserConfig(i,n)}catch(o){t.utils.warn("user configuration parse failed, will use default configuration"),this.buildDefaultConfig(n)}},t.Configuration.prototype.buildDefaultConfig=function(e){this.reset(),e.forEach(function(e){this.config[e]={boost:1,bool:"OR",expand:!1}},this)},t.Configuration.prototype.buildUserConfig=function(e,n){var i="OR",o=!1;if(this.reset(),"bool"in e&&(i=e.bool||i),"expand"in e&&(o=e.expand||o),"fields"in e)for(var r in e.fields)if(n.indexOf(r)>-1){var s=e.fields[r],u=o;void 0!=s.expand&&(u=s.expand),this.config[r]={boost:s.boost||0===s.boost?s.boost:1,bool:s.bool||i,expand:u}}else t.utils.warn("field name in user configuration not found in index instance fields");else this.addAllFields2UserConfig(i,o,n)},t.Configuration.prototype.addAllFields2UserConfig=function(e,t,n){n.forEach(function(n){this.config[n]={boost:1,bool:e,expand:t}},this)},t.Configuration.prototype.get=function(){return this.config},t.Configuration.prototype.reset=function(){this.config={}},lunr.SortedSet=function(){this.length=0,this.elements=[]},lunr.SortedSet.load=function(e){var t=new this;return t.elements=e,t.length=e.length,t},lunr.SortedSet.prototype.add=function(){var e,t;for(e=0;e1;){if(r===e)return o;e>r&&(t=o),r>e&&(n=o),i=n-t,o=t+Math.floor(i/2),r=this.elements[o]}return r===e?o:-1},lunr.SortedSet.prototype.locationFor=function(e){for(var t=0,n=this.elements.length,i=n-t,o=t+Math.floor(i/2),r=this.elements[o];i>1;)e>r&&(t=o),r>e&&(n=o),i=n-t,o=t+Math.floor(i/2),r=this.elements[o];return r>e?o:e>r?o+1:void 0},lunr.SortedSet.prototype.intersect=function(e){for(var t=new lunr.SortedSet,n=0,i=0,o=this.length,r=e.length,s=this.elements,u=e.elements;;){if(n>o-1||i>r-1)break;s[n]!==u[i]?s[n]u[i]&&i++:(t.add(s[n]),n++,i++)}return t},lunr.SortedSet.prototype.clone=function(){var e=new lunr.SortedSet;return e.elements=this.toArray(),e.length=e.elements.length,e},lunr.SortedSet.prototype.union=function(e){var t,n,i;this.length>=e.length?(t=this,n=e):(t=e,n=this),i=t.clone();for(var o=0,r=n.toArray();oWhat is pyerrors?\n\n

    pyerrors is a python package for error computation and propagation of Markov chain Monte Carlo data.\nIt is based on the gamma method arXiv:hep-lat/0306017. Some of its features are:

    \n\n
      \n
    • automatic differentiation for exact linear error propagation as suggested in arXiv:1809.01289 (partly based on the autograd package).
    • \n
    • treatment of slow modes in the simulation as suggested in arXiv:1009.5228.
    • \n
    • coherent error propagation for data from different Markov chains.
    • \n
    • non-linear fits with x- and y-errors and exact linear error propagation based on automatic differentiation as introduced in arXiv:1809.01289.
    • \n
    • real and complex matrix operations and their error propagation based on automatic differentiation (Matrix inverse, Cholesky decomposition, calculation of eigenvalues and eigenvectors, singular value decomposition...).
    • \n
    \n\n

    More detailed examples can found in the GitHub repository \"badge\".

    \n\n

    If you use pyerrors for research that leads to a publication please consider citing:

    \n\n
      \n
    • Fabian Joswig, Simon Kuberski, Justus T. Kuhlmann, Jan Neuendorf, pyerrors: a python framework for error analysis of Monte Carlo data. Comput.Phys.Commun. 288 (2023) 108750.
    • \n
    • Ulli Wolff, Monte Carlo errors with less errors. Comput.Phys.Commun. 156 (2004) 143-153, Comput.Phys.Commun. 176 (2007) 383 (erratum).
    • \n
    • Alberto Ramos, Automatic differentiation for error analysis of Monte Carlo data. Comput.Phys.Commun. 238 (2019) 19-35.
    • \n
    \n\n

    and

    \n\n
      \n
    • Stefan Schaefer, Rainer Sommer, Francesco Virotta, Critical slowing down and error analysis in lattice QCD simulations. Nucl.Phys.B 845 (2011) 93-119.
    • \n
    \n\n

    where applicable.

    \n\n

    There exist similar publicly available implementations of gamma method error analysis suites in Fortran, Julia and Python.

    \n\n

    Installation

    \n\n

    Install the most recent release using pip and pypi:

    \n\n
    \n
    python -m pip install pyerrors     # Fresh install\npython -m pip install -U pyerrors  # Update\n
    \n
    \n\n

    Install the most recent release using conda and conda-forge:

    \n\n
    \n
    conda install -c conda-forge pyerrors  # Fresh install\nconda update -c conda-forge pyerrors   # Update\n
    \n
    \n\n

    Install the current develop version:

    \n\n
    \n
    python -m pip install git+https://github.com/fjosw/pyerrors.git@develop\n
    \n
    \n\n

    Basic example

    \n\n
    \n
    import numpy as np\nimport pyerrors as pe\n\nmy_obs = pe.Obs([samples], ['ensemble_name']) # Initialize an Obs object\nmy_new_obs = 2 * np.log(my_obs) / my_obs ** 2 # Construct derived Obs object\nmy_new_obs.gamma_method()                     # Estimate the statistical error\nprint(my_new_obs)                             # Print the result to stdout\n> 0.31498(72)\n
    \n
    \n\n

    The Obs class

    \n\n

    pyerrors introduces a new datatype, Obs, which simplifies error propagation and estimation for auto- and cross-correlated data.\nAn Obs object can be initialized with two arguments, the first is a list containing the samples for an observable from a Monte Carlo chain.\nThe samples can either be provided as python list or as numpy array.\nThe second argument is a list containing the names of the respective Monte Carlo chains as strings. These strings uniquely identify a Monte Carlo chain/ensemble. It is crucial for the correct error propagation that observations from the same Monte Carlo history are labeled with the same name. See Multiple ensembles/replica for details.

    \n\n
    \n
    import pyerrors as pe\n\nmy_obs = pe.Obs([samples], ['ensemble_name'])\n
    \n
    \n\n

    Error propagation

    \n\n

    When performing mathematical operations on Obs objects the correct error propagation is intrinsically taken care of using a first order Taylor expansion\n$$\\delta_f^i=\\sum_\\alpha \\bar{f}_\\alpha \\delta_\\alpha^i\\,,\\quad \\delta_\\alpha^i=a_\\alpha^i-\\bar{a}_\\alpha\\,,$$\nas introduced in arXiv:hep-lat/0306017.\nThe required derivatives $\\bar{f}_\\alpha$ are evaluated up to machine precision via automatic differentiation as suggested in arXiv:1809.01289.

    \n\n

    The Obs class is designed such that mathematical numpy functions can be used on Obs just as for regular floats.

    \n\n
    \n
    import numpy as np\nimport pyerrors as pe\n\nmy_obs1 = pe.Obs([samples1], ['ensemble_name'])\nmy_obs2 = pe.Obs([samples2], ['ensemble_name'])\n\nmy_sum = my_obs1 + my_obs2\n\nmy_m_eff = np.log(my_obs1 / my_obs2)\n\niamzero = my_m_eff - my_m_eff\n# Check that value and fluctuations are zero within machine precision\nprint(iamzero == 0.0)\n> True\n
    \n
    \n\n

    Error estimation

    \n\n

    The error estimation within pyerrors is based on the gamma method introduced in arXiv:hep-lat/0306017.\nAfter having arrived at the derived quantity of interest the gamma_method can be called as detailed in the following example.

    \n\n
    \n
    my_sum.gamma_method()\nprint(my_sum)\n> 1.70(57)\nmy_sum.details()\n> Result         1.70000000e+00 +/- 5.72046658e-01 +/- 7.56746598e-02 (33.650%)\n>  t_int         2.71422900e+00 +/- 6.40320983e-01 S = 2.00\n> 1000 samples in 1 ensemble:\n>   \u00b7 Ensemble 'ensemble_name' : 1000 configurations (from 1 to 1000)\n
    \n
    \n\n

    The gamma_method is not automatically called after every intermediate step in order to prevent computational overhead.

    \n\n

    We use the following definition of the integrated autocorrelation time established in Madras & Sokal 1988\n$$\\tau_\\mathrm{int}=\\frac{1}{2}+\\sum_{t=1}^{W}\\rho(t)\\geq \\frac{1}{2}\\,.$$\nThe window $W$ is determined via the automatic windowing procedure described in arXiv:hep-lat/0306017.\nThe standard value for the parameter $S$ of this automatic windowing procedure is $S=2$. Other values for $S$ can be passed to the gamma_method as parameter.

    \n\n
    \n
    my_sum.gamma_method(S=3.0)\nmy_sum.details()\n> Result         1.70000000e+00 +/- 6.30675201e-01 +/- 1.04585650e-01 (37.099%)\n>  t_int         3.29909703e+00 +/- 9.77310102e-01 S = 3.00\n> 1000 samples in 1 ensemble:\n>   \u00b7 Ensemble 'ensemble_name' : 1000 configurations (from 1 to 1000)\n
    \n
    \n\n

    The integrated autocorrelation time $\\tau_\\mathrm{int}$ and the autocorrelation function $\\rho(W)$ can be monitored via the methods pyerrors.obs.Obs.plot_tauint and pyerrors.obs.Obs.plot_rho.

    \n\n

    If the parameter $S$ is set to zero it is assumed that the dataset does not exhibit any autocorrelation and the window size is chosen to be zero.\nIn this case the error estimate is identical to the sample standard error.

    \n\n

    Exponential tails

    \n\n

    Slow modes in the Monte Carlo history can be accounted for by attaching an exponential tail to the autocorrelation function $\\rho$ as suggested in arXiv:1009.5228. The longest autocorrelation time in the history, $\\tau_\\mathrm{exp}$, can be passed to the gamma_method as parameter. In this case the automatic windowing procedure is vacated and the parameter $S$ does not affect the error estimate.

    \n\n
    \n
    my_sum.gamma_method(tau_exp=7.2)\nmy_sum.details()\n> Result         1.70000000e+00 +/- 6.28097762e-01 +/- 5.79077524e-02 (36.947%)\n>  t_int         3.27218667e+00 +/- 7.99583654e-01 tau_exp = 7.20,  N_sigma = 1\n> 1000 samples in 1 ensemble:\n>   \u00b7 Ensemble 'ensemble_name' : 1000 configurations (from 1 to 1000)\n
    \n
    \n\n

    For the full API see pyerrors.obs.Obs.gamma_method.

    \n\n

    Multiple ensembles/replica

    \n\n

    Error propagation for multiple ensembles (Markov chains with different simulation parameters) is handled automatically. Ensembles are uniquely identified by their name.

    \n\n
    \n
    obs1 = pe.Obs([samples1], ['ensemble1'])\nobs2 = pe.Obs([samples2], ['ensemble2'])\n\nmy_sum = obs1 + obs2\nmy_sum.details()\n> Result   2.00697958e+00\n> 1500 samples in 2 ensembles:\n>   \u00b7 Ensemble 'ensemble1' : 1000 configurations (from 1 to 1000)\n>   \u00b7 Ensemble 'ensemble2' : 500 configurations (from 1 to 500)\n
    \n
    \n\n

    Observables from the same Monte Carlo chain have to be initialized with the same name for correct error propagation. If different names were used in this case the data would be treated as statistically independent resulting in loss of relevant information and a potential over or under estimate of the statistical error.

    \n\n

    pyerrors identifies multiple replica (independent Markov chains with identical simulation parameters) by the vertical bar | in the name of the data set.

    \n\n
    \n
    obs1 = pe.Obs([samples1], ['ensemble1|r01'])\nobs2 = pe.Obs([samples2], ['ensemble1|r02'])\n\n> my_sum = obs1 + obs2\n> my_sum.details()\n> Result   2.00697958e+00\n> 1500 samples in 1 ensemble:\n>   \u00b7 Ensemble 'ensemble1'\n>     \u00b7 Replicum 'r01' : 1000 configurations (from 1 to 1000)\n>     \u00b7 Replicum 'r02' : 500 configurations (from 1 to 500)\n
    \n
    \n\n

    Error estimation for multiple ensembles

    \n\n

    In order to keep track of different error analysis parameters for different ensembles one can make use of global dictionaries as detailed in the following example.

    \n\n
    \n
    pe.Obs.S_dict['ensemble1'] = 2.5\npe.Obs.tau_exp_dict['ensemble2'] = 8.0\npe.Obs.tau_exp_dict['ensemble3'] = 2.0\n
    \n
    \n\n

    In case the gamma_method is called without any parameters it will use the values specified in the dictionaries for the respective ensembles.\nPassing arguments to the gamma_method still dominates over the dictionaries.

    \n\n

    Irregular Monte Carlo chains

    \n\n

    Obs objects defined on irregular Monte Carlo chains can be initialized with the parameter idl.

    \n\n
    \n
    # Observable defined on configurations 20 to 519\nobs1 = pe.Obs([samples1], ['ensemble1'], idl=[range(20, 520)])\nobs1.details()\n> Result         9.98319881e-01\n> 500 samples in 1 ensemble:\n>   \u00b7 Ensemble 'ensemble1' : 500 configurations (from 20 to 519)\n\n# Observable defined on every second configuration between 5 and 1003\nobs2 = pe.Obs([samples2], ['ensemble1'], idl=[range(5, 1005, 2)])\nobs2.details()\n> Result         9.99100712e-01\n> 500 samples in 1 ensemble:\n>   \u00b7 Ensemble 'ensemble1' : 500 configurations (from 5 to 1003 in steps of 2)\n\n# Observable defined on configurations 2, 9, 28, 29 and 501\nobs3 = pe.Obs([samples3], ['ensemble1'], idl=[[2, 9, 28, 29, 501]])\nobs3.details()\n> Result         1.01718064e+00\n> 5 samples in 1 ensemble:\n>   \u00b7 Ensemble 'ensemble1' : 5 configurations (irregular range)\n
    \n
    \n\n

    Obs objects defined on regular and irregular histories of the same ensemble can be combined with each other and the correct error propagation and estimation is automatically taken care of.

    \n\n

    Warning: Irregular Monte Carlo chains can result in odd patterns in the autocorrelation functions.\nMake sure to check the autocorrelation time with e.g. pyerrors.obs.Obs.plot_rho or pyerrors.obs.Obs.plot_tauint.

    \n\n

    For the full API see pyerrors.obs.Obs.

    \n\n

    Correlators

    \n\n

    When one is not interested in single observables but correlation functions, pyerrors offers the Corr class which simplifies the corresponding error propagation and provides the user with a set of standard methods. In order to initialize a Corr objects one needs to arrange the data as a list of Obs

    \n\n
    \n
    my_corr = pe.Corr([obs_0, obs_1, obs_2, obs_3])\nprint(my_corr)\n> x0/a  Corr(x0/a)\n> ------------------\n> 0      0.7957(80)\n> 1      0.5156(51)\n> 2      0.3227(33)\n> 3      0.2041(21)\n
    \n
    \n\n

    In case the correlation functions are not defined on the outermost timeslices, for example because of fixed boundary conditions, a padding can be introduced.

    \n\n
    \n
    my_corr = pe.Corr([obs_0, obs_1, obs_2, obs_3], padding=[1, 1])\nprint(my_corr)\n> x0/a  Corr(x0/a)\n> ------------------\n> 0\n> 1      0.7957(80)\n> 2      0.5156(51)\n> 3      0.3227(33)\n> 4      0.2041(21)\n> 5\n
    \n
    \n\n

    The individual entries of a correlator can be accessed via slicing

    \n\n
    \n
    print(my_corr[3])\n> 0.3227(33)\n
    \n
    \n\n

    Error propagation with the Corr class works very similar to Obs objects. Mathematical operations are overloaded and Corr objects can be computed together with other Corr objects, Obs objects or real numbers and integers.

    \n\n
    \n
    my_new_corr = 0.3 * my_corr[2] * my_corr * my_corr + 12 / my_corr\n
    \n
    \n\n

    pyerrors provides the user with a set of regularly used methods for the manipulation of correlator objects:

    \n\n
      \n
    • Corr.gamma_method applies the gamma method to all entries of the correlator.
    • \n
    • Corr.m_eff to construct effective masses. Various variants for periodic and fixed temporal boundary conditions are available.
    • \n
    • Corr.deriv returns the first derivative of the correlator as Corr. Different discretizations of the numerical derivative are available.
    • \n
    • Corr.second_deriv returns the second derivative of the correlator as Corr. Different discretizations of the numerical derivative are available.
    • \n
    • Corr.symmetric symmetrizes parity even correlations functions, assuming periodic boundary conditions.
    • \n
    • Corr.anti_symmetric anti-symmetrizes parity odd correlations functions, assuming periodic boundary conditions.
    • \n
    • Corr.T_symmetry averages a correlator with its time symmetry partner, assuming fixed boundary conditions.
    • \n
    • Corr.plateau extracts a plateau value from the correlator in a given range.
    • \n
    • Corr.roll periodically shifts the correlator.
    • \n
    • Corr.reverse reverses the time ordering of the correlator.
    • \n
    • Corr.correlate constructs a disconnected correlation function from the correlator and another Corr or Obs object.
    • \n
    • Corr.reweight reweights the correlator.
    • \n
    \n\n

    pyerrors can also handle matrices of correlation functions and extract energy states from these matrices via a generalized eigenvalue problem (see pyerrors.correlators.Corr.GEVP).

    \n\n

    For the full API see pyerrors.correlators.Corr.

    \n\n

    Complex valued observables

    \n\n

    pyerrors can handle complex valued observables via the class pyerrors.obs.CObs.\nCObs are initialized with a real and an imaginary part which both can be Obs valued.

    \n\n
    \n
    my_real_part = pe.Obs([samples1], ['ensemble1'])\nmy_imag_part = pe.Obs([samples2], ['ensemble1'])\n\nmy_cobs = pe.CObs(my_real_part, my_imag_part)\nmy_cobs.gamma_method()\nprint(my_cobs)\n> (0.9959(91)+0.659(28)j)\n
    \n
    \n\n

    Elementary mathematical operations are overloaded and samples are properly propagated as for the Obs class.

    \n\n
    \n
    my_derived_cobs = (my_cobs + my_cobs.conjugate()) / np.abs(my_cobs)\nmy_derived_cobs.gamma_method()\nprint(my_derived_cobs)\n> (1.668(23)+0.0j)\n
    \n
    \n\n

    The Covobs class

    \n\n

    In many projects, auxiliary data that is not based on Monte Carlo chains enters. Examples are experimentally determined mesons masses which are used to set the scale or renormalization constants. These numbers come with an error that has to be propagated through the analysis. The Covobs class allows to define such quantities in pyerrors. Furthermore, external input might consist of correlated quantities. An example are the parameters of an interpolation formula, which are defined via mean values and a covariance matrix between all parameters. The contribution of the interpolation formula to the error of a derived quantity therefore might depend on the complete covariance matrix.

    \n\n

    This concept is built into the definition of Covobs. In pyerrors, external input is defined by $M$ mean values, a $M\\times M$ covariance matrix, where $M=1$ is permissible, and a name that uniquely identifies the covariance matrix. Below, we define the pion mass, based on its mean value and error, 134.9768(5). Note, that the square of the error enters cov_Obs, since the second argument of this function is the covariance matrix of the Covobs.

    \n\n
    \n
    import pyerrors.obs as pe\n\nmpi = pe.cov_Obs(134.9768, 0.0005**2, 'pi^0 mass')\nmpi.gamma_method()\nmpi.details()\n> Result         1.34976800e+02 +/- 5.00000000e-04 +/- 0.00000000e+00 (0.000%)\n>  pi^0 mass     5.00000000e-04\n> 0 samples in 1 ensemble:\n>   \u00b7 Covobs   'pi^0 mass'\n
    \n
    \n\n

    The resulting object mpi is an Obs that contains a Covobs. In the following, it may be handled as any other Obs. The contribution of the covariance matrix to the error of an Obs is determined from the $M \\times M$ covariance matrix $\\Sigma$ and the gradient of the Obs with respect to the external quantities, which is the $1\\times M$ Jacobian matrix $J$, via\n$$s = \\sqrt{J^T \\Sigma J}\\,,$$\nwhere the Jacobian is computed for each derived quantity via automatic differentiation.

    \n\n

    Correlated auxiliary data is defined similarly to above, e.g., via

    \n\n
    \n
    RAP = pe.cov_Obs([16.7457, -19.0475], [[3.49591, -6.07560], [-6.07560, 10.5834]], 'R_AP, 1906.03445, (5.3a)')\nprint(RAP)\n> [Obs[16.7(1.9)], Obs[-19.0(3.3)]]\n
    \n
    \n\n

    where RAP now is a list of two Obs that contains the two correlated parameters.

    \n\n

    Since the gradient of a derived observable with respect to an external covariance matrix is propagated through the entire analysis, the Covobs class allows to quote the derivative of a result with respect to the external quantities. If these derivatives are published together with the result, small shifts in the definition of external quantities, e.g., the definition of the physical point, can be performed a posteriori based on the published information. This may help to compare results of different groups. The gradient of an Obs o with respect to a covariance matrix with the identifying string k may be accessed via

    \n\n
    \n
    o.covobs[k].grad\n
    \n
    \n\n

    Error propagation in iterative algorithms

    \n\n

    pyerrors supports exact linear error propagation for iterative algorithms like various variants of non-linear least squares fits or root finding. The derivatives required for the error propagation are calculated as described in arXiv:1809.01289.

    \n\n

    Least squares fits

    \n\n

    Standard non-linear least square fits with errors on the dependent but not the independent variables can be performed with pyerrors.fits.least_squares. As default solver the Levenberg-Marquardt algorithm implemented in scipy is used.

    \n\n

    Fit functions have to be of the following form

    \n\n
    \n
    import autograd.numpy as anp\n\ndef func(a, x):\n    return a[1] * anp.exp(-a[0] * x)\n
    \n
    \n\n

    It is important that numerical functions refer to autograd.numpy instead of numpy for the automatic differentiation in iterative algorithms to work properly.

    \n\n

    Fits can then be performed via

    \n\n
    \n
    fit_result = pe.fits.least_squares(x, y, func)\nprint("\\n", fit_result)\n> Fit with 2 parameters\n> Method: Levenberg-Marquardt\n> `ftol` termination condition is satisfied.\n> chisquare/d.o.f.: 0.9593035785160936\n\n>  Goodness of fit:\n> \u03c7\u00b2/d.o.f. = 0.959304\n> p-value   = 0.5673\n> Fit parameters:\n> 0      0.0548(28)\n> 1      1.933(64)\n
    \n
    \n\n

    where x is a list or numpy.array of floats and y is a list or numpy.array of Obs.

    \n\n

    Data stored in Corr objects can be fitted directly using the Corr.fit method.

    \n\n
    \n
    my_corr = pe.Corr(y)\nfit_result = my_corr.fit(func, fitrange=[12, 25])\n
    \n
    \n\n

    this can simplify working with absolute fit ranges and takes care of gaps in the data automatically.

    \n\n

    For fit functions with multiple independent variables the fit function can be of the form

    \n\n
    \n
    def func(a, x):\n    (x1, x2) = x\n    return a[0] * x1 ** 2 + a[1] * x2\n
    \n
    \n\n

    pyerrors also supports correlated fits which can be triggered via the parameter correlated_fit=True.\nDetails about how the required covariance matrix is estimated can be found in pyerrors.obs.covariance.\nDirect visualizations of the performed fits can be triggered via resplot=True or qqplot=True.

    \n\n

    For all available options including combined fits to multiple datasets see pyerrors.fits.least_squares.

    \n\n

    Total least squares fits

    \n\n

    pyerrors can also fit data with errors on both the dependent and independent variables using the total least squares method also referred to as orthogonal distance regression as implemented in scipy, see pyerrors.fits.least_squares. The syntax is identical to the standard least squares case, the only difference being that x also has to be a list or numpy.array of Obs.

    \n\n

    For the full API see pyerrors.fits for fits and pyerrors.roots for finding roots of functions.

    \n\n

    Matrix operations

    \n\n

    pyerrors provides wrappers for Obs- and CObs-valued matrix operations based on numpy.linalg. The supported functions include:

    \n\n
      \n
    • inv for the matrix inverse.
    • \n
    • cholseky for the Cholesky decomposition.
    • \n
    • det for the matrix determinant.
    • \n
    • eigh for eigenvalues and eigenvectors of hermitean matrices.
    • \n
    • eig for eigenvalues of general matrices.
    • \n
    • pinv for the Moore-Penrose pseudoinverse.
    • \n
    • svd for the singular-value-decomposition.
    • \n
    \n\n

    For the full API see pyerrors.linalg.

    \n\n

    Export data

    \n\n

    \n\n

    The preferred exported file format within pyerrors is json.gz. Files written to this format are valid JSON files that have been compressed using gzip. The structure of the content is inspired by the dobs format of the ALPHA collaboration. The aim of the format is to facilitate the storage of data in a self-contained way such that, even years after the creation of the file, it is possible to extract all necessary information:

    \n\n
      \n
    • What observables are stored? Possibly: How exactly are they defined.
    • \n
    • How does each single ensemble or external quantity contribute to the error of the observable?
    • \n
    • Who did write the file when and on which machine?
    • \n
    \n\n

    This can be achieved by storing all information in one single file. The export routines of pyerrors are written such that as much information as possible is written automatically as described in the following example

    \n\n
    \n
    my_obs = pe.Obs([samples], ["test_ensemble"])\nmy_obs.tag = "My observable"\n\npe.input.json.dump_to_json(my_obs, "test_output_file", description="This file contains a test observable")\n# For a single observable one can equivalently use the class method dump\nmy_obs.dump("test_output_file", description="This file contains a test observable")\n\ncheck = pe.input.json.load_json("test_output_file")\n\nprint(my_obs == check)\n> True\n
    \n
    \n\n

    The format also allows to directly write out the content of Corr objects or lists and arrays of Obs objects by passing the desired data to pyerrors.input.json.dump_to_json.

    \n\n

    json.gz format specification

    \n\n

    The first entries of the file provide optional auxiliary information:

    \n\n
      \n
    • program is a string that indicates which program was used to write the file.
    • \n
    • version is a string that specifies the version of the format.
    • \n
    • who is a string that specifies the user name of the creator of the file.
    • \n
    • date is a string and contains the creation date of the file.
    • \n
    • host is a string and contains the hostname of the machine where the file has been written.
    • \n
    • description contains information on the content of the file. This field is not filled automatically in pyerrors. The user is advised to provide as detailed information as possible in this field. Examples are: Input files of measurements or simulations, LaTeX formulae or references to publications to specify how the observables have been computed, details on the analysis strategy, ... This field may be any valid JSON type. Strings, arrays or objects (equivalent to dicts in python) are well suited to provide information.
    • \n
    \n\n

    The only necessary entry of the file is the field\n-obsdata, an array that contains the actual data.

    \n\n

    Each entry of the array belongs to a single structure of observables. Currently, these structures can be either of Obs, list, numpy.ndarray, Corr. All Obs inside a structure (with dimension > 0) have to be defined on the same set of configurations. Different structures, that are represented by entries of the array obsdata, are treated independently. Each entry of the array obsdata has the following required entries:

    \n\n
      \n
    • type is a string that specifies the type of the structure. This allows to parse the content to the correct form after reading the file. It is always possible to interpret the content as list of Obs.
    • \n
    • value is an array that contains the mean values of the Obs inside the structure.\nThe following entries are optional:
    • \n
    • layout is a string that specifies the layout of multi-dimensional structures. Examples are \"2, 2\" for a 2x2 dimensional matrix or \"64, 4, 4\" for a Corr with $T=64$ and 4x4 matrices on each time slices. \"1\" denotes a single Obs. Multi-dimensional structures are stored in row-major format (see below).
    • \n
    • tag is any JSON type. It contains additional information concerning the structure. The tag of an Obs in pyerrors is written here.
    • \n
    • reweighted is a Bool that may be used to specify, whether the Obs in the structure have been reweighted.
    • \n
    • data is an array that contains the data from MC chains. We will define it below.
    • \n
    • cdata is an array that contains the data from external quantities with an error (Covobs in pyerrors). We will define it below.
    • \n
    \n\n

    The array data contains the data from MC chains. Each entry of the array corresponds to one ensemble and contains:

    \n\n
      \n
    • id, a string that contains the name of the ensemble
    • \n
    • replica, an array that contains an entry per replica of the ensemble.
    • \n
    \n\n

    Each entry of replica contains\nname, a string that contains the name of the replica\ndeltas, an array that contains the actual data.

    \n\n

    Each entry in deltas corresponds to one configuration of the replica and has $1+N$ many entries. The first entry is an integer that specifies the configuration number that, together with ensemble and replica name, may be used to uniquely identify the configuration on which the data has been obtained. The following N entries specify the deltas, i.e., the deviation of the observable from the mean value on this configuration, of each Obs inside the structure. Multi-dimensional structures are stored in a row-major format. For primary observables, such as correlation functions, $value + delta_i$ matches the primary data obtained on the configuration.

    \n\n

    The array cdata contains information about the contribution of auxiliary observables, represented by Covobs in pyerrors, to the total error of the observables. Each entry of the array belongs to one auxiliary covariance matrix and contains:

    \n\n
      \n
    • id, a string that identifies the covariance matrix
    • \n
    • layout, a string that defines the dimensions of the $M\\times M$ covariance matrix (has to be \"M, M\" or \"1\").
    • \n
    • cov, an array that contains the $M\\times M$ many entries of the covariance matrix, stored in row-major format.
    • \n
    • grad, an array that contains N entries, one for each Obs inside the structure. Each entry itself is an array, that contains the M gradients of the Nth observable with respect to the quantity that corresponds to the Mth diagonal entry of the covariance matrix.
    • \n
    \n\n

    A JSON schema that may be used to verify the correctness of a file with respect to the format definition is stored in ./examples/json_schema.json. The schema is a self-descriptive format definition and contains an exemplary file.

    \n\n

    Julia I/O routines for the json.gz format, compatible with ADerrors.jl, can be found here.

    \n"}, "pyerrors.correlators": {"fullname": "pyerrors.correlators", "modulename": "pyerrors.correlators", "kind": "module", "doc": "

    \n"}, "pyerrors.correlators.Corr": {"fullname": "pyerrors.correlators.Corr", "modulename": "pyerrors.correlators", "qualname": "Corr", "kind": "class", "doc": "

    The class for a correlator (time dependent sequence of pe.Obs).

    \n\n

    Everything, this class does, can be achieved using lists or arrays of Obs.\nBut it is simply more convenient to have a dedicated object for correlators.\nOne often wants to add or multiply correlators of the same length at every timeslice and it is inconvenient\nto iterate over all timeslices for every operation. This is especially true, when dealing with matrices.

    \n\n

    The correlator can have two types of content: An Obs at every timeslice OR a GEVP\nmatrix at every timeslice. Other dependency (eg. spatial) are not supported.

    \n"}, "pyerrors.correlators.Corr.__init__": {"fullname": "pyerrors.correlators.Corr.__init__", "modulename": "pyerrors.correlators", "qualname": "Corr.__init__", "kind": "function", "doc": "

    Initialize a Corr object.

    \n\n
    Parameters
    \n\n
      \n
    • data_input (list or array):\nlist of Obs or list of arrays of Obs or array of Corrs
    • \n
    • padding (list, optional):\nList with two entries where the first labels the padding\nat the front of the correlator and the second the padding\nat the back.
    • \n
    • prange (list, optional):\nList containing the first and last timeslice of the plateau\nregion indentified for this correlator.
    • \n
    \n", "signature": "(data_input, padding=[0, 0], prange=None)"}, "pyerrors.correlators.Corr.tag": {"fullname": "pyerrors.correlators.Corr.tag", "modulename": "pyerrors.correlators", "qualname": "Corr.tag", "kind": "variable", "doc": "

    \n"}, "pyerrors.correlators.Corr.content": {"fullname": "pyerrors.correlators.Corr.content", "modulename": "pyerrors.correlators", "qualname": "Corr.content", "kind": "variable", "doc": "

    \n"}, "pyerrors.correlators.Corr.T": {"fullname": "pyerrors.correlators.Corr.T", "modulename": "pyerrors.correlators", "qualname": "Corr.T", "kind": "variable", "doc": "

    \n"}, "pyerrors.correlators.Corr.prange": {"fullname": "pyerrors.correlators.Corr.prange", "modulename": "pyerrors.correlators", "qualname": "Corr.prange", "kind": "variable", "doc": "

    \n"}, "pyerrors.correlators.Corr.reweighted": {"fullname": "pyerrors.correlators.Corr.reweighted", "modulename": "pyerrors.correlators", "qualname": "Corr.reweighted", "kind": "variable", "doc": "

    \n"}, "pyerrors.correlators.Corr.gamma_method": {"fullname": "pyerrors.correlators.Corr.gamma_method", "modulename": "pyerrors.correlators", "qualname": "Corr.gamma_method", "kind": "function", "doc": "

    Apply the gamma method to the content of the Corr.

    \n", "signature": "(self, **kwargs):", "funcdef": "def"}, "pyerrors.correlators.Corr.gm": {"fullname": "pyerrors.correlators.Corr.gm", "modulename": "pyerrors.correlators", "qualname": "Corr.gm", "kind": "function", "doc": "

    Apply the gamma method to the content of the Corr.

    \n", "signature": "(self, **kwargs):", "funcdef": "def"}, "pyerrors.correlators.Corr.projected": {"fullname": "pyerrors.correlators.Corr.projected", "modulename": "pyerrors.correlators", "qualname": "Corr.projected", "kind": "function", "doc": "

    We need to project the Correlator with a Vector to get a single value at each timeslice.

    \n\n

    The method can use one or two vectors.\nIf two are specified it returns v1@G@v2 (the order might be very important.)\nBy default it will return the lowest source, which usually means unsmeared-unsmeared (0,0), but it does not have to

    \n", "signature": "(self, vector_l=None, vector_r=None, normalize=False):", "funcdef": "def"}, "pyerrors.correlators.Corr.item": {"fullname": "pyerrors.correlators.Corr.item", "modulename": "pyerrors.correlators", "qualname": "Corr.item", "kind": "function", "doc": "

    Picks the element [i,j] from every matrix and returns a correlator containing one Obs per timeslice.

    \n\n
    Parameters
    \n\n
      \n
    • i (int):\nFirst index to be picked.
    • \n
    • j (int):\nSecond index to be picked.
    • \n
    \n", "signature": "(self, i, j):", "funcdef": "def"}, "pyerrors.correlators.Corr.plottable": {"fullname": "pyerrors.correlators.Corr.plottable", "modulename": "pyerrors.correlators", "qualname": "Corr.plottable", "kind": "function", "doc": "

    Outputs the correlator in a plotable format.

    \n\n

    Outputs three lists containing the timeslice index, the value on each\ntimeslice and the error on each timeslice.

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.symmetric": {"fullname": "pyerrors.correlators.Corr.symmetric", "modulename": "pyerrors.correlators", "qualname": "Corr.symmetric", "kind": "function", "doc": "

    Symmetrize the correlator around x0=0.

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.anti_symmetric": {"fullname": "pyerrors.correlators.Corr.anti_symmetric", "modulename": "pyerrors.correlators", "qualname": "Corr.anti_symmetric", "kind": "function", "doc": "

    Anti-symmetrize the correlator around x0=0.

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"fullname": "pyerrors.correlators.Corr.is_matrix_symmetric", "modulename": "pyerrors.correlators", "qualname": "Corr.is_matrix_symmetric", "kind": "function", "doc": "

    Checks whether a correlator matrices is symmetric on every timeslice.

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.matrix_symmetric": {"fullname": "pyerrors.correlators.Corr.matrix_symmetric", "modulename": "pyerrors.correlators", "qualname": "Corr.matrix_symmetric", "kind": "function", "doc": "

    Symmetrizes the correlator matrices on every timeslice.

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.GEVP": {"fullname": "pyerrors.correlators.Corr.GEVP", "modulename": "pyerrors.correlators", "qualname": "Corr.GEVP", "kind": "function", "doc": "

    Solve the generalized eigenvalue problem on the correlator matrix and returns the corresponding eigenvectors.

    \n\n

    The eigenvectors are sorted according to the descending eigenvalues, the zeroth eigenvector(s) correspond to the\nlargest eigenvalue(s). The eigenvector(s) for the individual states can be accessed via slicing

    \n\n
    \n
    C.GEVP(t0=2)[0]  # Ground state vector(s)\nC.GEVP(t0=2)[:3]  # Vectors for the lowest three states\n
    \n
    \n\n
    Parameters
    \n\n
      \n
    • t0 (int):\nThe time t0 for the right hand side of the GEVP according to $G(t)v_i=\\lambda_i G(t_0)v_i$
    • \n
    • ts (int):\nfixed time $G(t_s)v_i=\\lambda_i G(t_0)v_i$ if sort=None.\nIf sort=\"Eigenvector\" it gives a reference point for the sorting method.
    • \n
    • sort (string):\nIf this argument is set, a list of self.T vectors per state is returned. If it is set to None, only one vector is returned.\n
        \n
      • \"Eigenvalue\": The eigenvector is chosen according to which eigenvalue it belongs individually on every timeslice.
      • \n
      • \"Eigenvector\": Use the method described in arXiv:2004.10472 to find the set of v(t) belonging to the state.\nThe reference state is identified by its eigenvalue at $t=t_s$.
      • \n
    • \n
    \n\n
    Other Parameters
    \n\n
      \n
    • state (int):\nReturns only the vector(s) for a specified state. The lowest state is zero.
    • \n
    \n", "signature": "(self, t0, ts=None, sort='Eigenvalue', **kwargs):", "funcdef": "def"}, "pyerrors.correlators.Corr.Eigenvalue": {"fullname": "pyerrors.correlators.Corr.Eigenvalue", "modulename": "pyerrors.correlators", "qualname": "Corr.Eigenvalue", "kind": "function", "doc": "

    Determines the eigenvalue of the GEVP by solving and projecting the correlator

    \n\n
    Parameters
    \n\n
      \n
    • state (int):\nThe state one is interested in ordered by energy. The lowest state is zero.
    • \n
    • All other parameters are identical to the ones of Corr.GEVP.
    • \n
    \n", "signature": "(self, t0, ts=None, state=0, sort='Eigenvalue'):", "funcdef": "def"}, "pyerrors.correlators.Corr.Hankel": {"fullname": "pyerrors.correlators.Corr.Hankel", "modulename": "pyerrors.correlators", "qualname": "Corr.Hankel", "kind": "function", "doc": "

    Constructs an NxN Hankel matrix

    \n\n

    C(t) c(t+1) ... c(t+n-1)\nC(t+1) c(t+2) ... c(t+n)\n.................\nC(t+(n-1)) c(t+n) ... c(t+2(n-1))

    \n\n
    Parameters
    \n\n
      \n
    • N (int):\nDimension of the Hankel matrix
    • \n
    • periodic (bool, optional):\ndetermines whether the matrix is extended periodically
    • \n
    \n", "signature": "(self, N, periodic=False):", "funcdef": "def"}, "pyerrors.correlators.Corr.roll": {"fullname": "pyerrors.correlators.Corr.roll", "modulename": "pyerrors.correlators", "qualname": "Corr.roll", "kind": "function", "doc": "

    Periodically shift the correlator by dt timeslices

    \n\n
    Parameters
    \n\n
      \n
    • dt (int):\nnumber of timeslices
    • \n
    \n", "signature": "(self, dt):", "funcdef": "def"}, "pyerrors.correlators.Corr.reverse": {"fullname": "pyerrors.correlators.Corr.reverse", "modulename": "pyerrors.correlators", "qualname": "Corr.reverse", "kind": "function", "doc": "

    Reverse the time ordering of the Corr

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.thin": {"fullname": "pyerrors.correlators.Corr.thin", "modulename": "pyerrors.correlators", "qualname": "Corr.thin", "kind": "function", "doc": "

    Thin out a correlator to suppress correlations

    \n\n
    Parameters
    \n\n
      \n
    • spacing (int):\nKeep only every 'spacing'th entry of the correlator
    • \n
    • offset (int):\nOffset the equal spacing
    • \n
    \n", "signature": "(self, spacing=2, offset=0):", "funcdef": "def"}, "pyerrors.correlators.Corr.correlate": {"fullname": "pyerrors.correlators.Corr.correlate", "modulename": "pyerrors.correlators", "qualname": "Corr.correlate", "kind": "function", "doc": "

    Correlate the correlator with another correlator or Obs

    \n\n
    Parameters
    \n\n
      \n
    • partner (Obs or Corr):\npartner to correlate the correlator with.\nCan either be an Obs which is correlated with all entries of the\ncorrelator or a Corr of same length.
    • \n
    \n", "signature": "(self, partner):", "funcdef": "def"}, "pyerrors.correlators.Corr.reweight": {"fullname": "pyerrors.correlators.Corr.reweight", "modulename": "pyerrors.correlators", "qualname": "Corr.reweight", "kind": "function", "doc": "

    Reweight the correlator.

    \n\n
    Parameters
    \n\n
      \n
    • weight (Obs):\nReweighting factor. An Observable that has to be defined on a superset of the\nconfigurations in obs[i].idl for all i.
    • \n
    • all_configs (bool):\nif True, the reweighted observables are normalized by the average of\nthe reweighting factor on all configurations in weight.idl and not\non the configurations in obs[i].idl.
    • \n
    \n", "signature": "(self, weight, **kwargs):", "funcdef": "def"}, "pyerrors.correlators.Corr.T_symmetry": {"fullname": "pyerrors.correlators.Corr.T_symmetry", "modulename": "pyerrors.correlators", "qualname": "Corr.T_symmetry", "kind": "function", "doc": "

    Return the time symmetry average of the correlator and its partner

    \n\n
    Parameters
    \n\n
      \n
    • partner (Corr):\nTime symmetry partner of the Corr
    • \n
    • partity (int):\nParity quantum number of the correlator, can be +1 or -1
    • \n
    \n", "signature": "(self, partner, parity=1):", "funcdef": "def"}, "pyerrors.correlators.Corr.deriv": {"fullname": "pyerrors.correlators.Corr.deriv", "modulename": "pyerrors.correlators", "qualname": "Corr.deriv", "kind": "function", "doc": "

    Return the first derivative of the correlator with respect to x0.

    \n\n
    Parameters
    \n\n
      \n
    • variant (str):\ndecides which definition of the finite differences derivative is used.\nAvailable choice: symmetric, forward, backward, improved, log, default: symmetric
    • \n
    \n", "signature": "(self, variant='symmetric'):", "funcdef": "def"}, "pyerrors.correlators.Corr.second_deriv": {"fullname": "pyerrors.correlators.Corr.second_deriv", "modulename": "pyerrors.correlators", "qualname": "Corr.second_deriv", "kind": "function", "doc": "

    Return the second derivative of the correlator with respect to x0.

    \n\n
    Parameters
    \n\n
      \n
    • variant (str):\ndecides which definition of the finite differences derivative is used.\nAvailable choice:\n - symmetric (default)\n $$\\tilde{\\partial}^2_0 f(x_0) = f(x_0+1)-2f(x_0)+f(x_0-1)$$\n - big_symmetric\n $$\\partial^2_0 f(x_0) = \\frac{f(x_0+2)-2f(x_0)+f(x_0-2)}{4}$$\n - improved\n $$\\partial^2_0 f(x_0) = \\frac{-f(x_0+2) + 16 * f(x_0+1) - 30 * f(x_0) + 16 * f(x_0-1) - f(x_0-2)}{12}$$\n - log\n $$f(x) = \\tilde{\\partial}^2_0 log(f(x_0))+(\\tilde{\\partial}_0 log(f(x_0)))^2$$
    • \n
    \n", "signature": "(self, variant='symmetric'):", "funcdef": "def"}, "pyerrors.correlators.Corr.m_eff": {"fullname": "pyerrors.correlators.Corr.m_eff", "modulename": "pyerrors.correlators", "qualname": "Corr.m_eff", "kind": "function", "doc": "

    Returns the effective mass of the correlator as correlator object

    \n\n
    Parameters
    \n\n
      \n
    • variant (str):\nlog : uses the standard effective mass log(C(t) / C(t+1))\ncosh, periodic : Use periodicitiy of the correlator by solving C(t) / C(t+1) = cosh(m * (t - T/2)) / cosh(m * (t + 1 - T/2)) for m.\nsinh : Use anti-periodicitiy of the correlator by solving C(t) / C(t+1) = sinh(m * (t - T/2)) / sinh(m * (t + 1 - T/2)) for m.\nSee, e.g., arXiv:1205.5380\narccosh : Uses the explicit form of the symmetrized correlator (not recommended)\nlogsym: uses the symmetric effective mass log(C(t-1) / C(t+1))/2
    • \n
    • guess (float):\nguess for the root finder, only relevant for the root variant
    • \n
    \n", "signature": "(self, variant='log', guess=1.0):", "funcdef": "def"}, "pyerrors.correlators.Corr.fit": {"fullname": "pyerrors.correlators.Corr.fit", "modulename": "pyerrors.correlators", "qualname": "Corr.fit", "kind": "function", "doc": "

    Fits function to the data

    \n\n
    Parameters
    \n\n
      \n
    • function (obj):\nfunction to fit to the data. See fits.least_squares for details.
    • \n
    • fitrange (list):\nTwo element list containing the timeslices on which the fit is supposed to start and stop.\nCaution: This range is inclusive as opposed to standard python indexing.\nfitrange=[4, 6] corresponds to the three entries 4, 5 and 6.\nIf not specified, self.prange or all timeslices are used.
    • \n
    • silent (bool):\nDecides whether output is printed to the standard output.
    • \n
    \n", "signature": "(self, function, fitrange=None, silent=False, **kwargs):", "funcdef": "def"}, "pyerrors.correlators.Corr.plateau": {"fullname": "pyerrors.correlators.Corr.plateau", "modulename": "pyerrors.correlators", "qualname": "Corr.plateau", "kind": "function", "doc": "

    Extract a plateau value from a Corr object

    \n\n
    Parameters
    \n\n
      \n
    • plateau_range (list):\nlist with two entries, indicating the first and the last timeslice\nof the plateau region.
    • \n
    • method (str):\nmethod to extract the plateau.\n 'fit' fits a constant to the plateau region\n 'avg', 'average' or 'mean' just average over the given timeslices.
    • \n
    • auto_gamma (bool):\napply gamma_method with default parameters to the Corr. Defaults to None
    • \n
    \n", "signature": "(self, plateau_range=None, method='fit', auto_gamma=False):", "funcdef": "def"}, "pyerrors.correlators.Corr.set_prange": {"fullname": "pyerrors.correlators.Corr.set_prange", "modulename": "pyerrors.correlators", "qualname": "Corr.set_prange", "kind": "function", "doc": "

    Sets the attribute prange of the Corr object.

    \n", "signature": "(self, prange):", "funcdef": "def"}, "pyerrors.correlators.Corr.show": {"fullname": "pyerrors.correlators.Corr.show", "modulename": "pyerrors.correlators", "qualname": "Corr.show", "kind": "function", "doc": "

    Plots the correlator using the tag of the correlator as label if available.

    \n\n
    Parameters
    \n\n
      \n
    • x_range (list):\nlist of two values, determining the range of the x-axis e.g. [4, 8].
    • \n
    • comp (Corr or list of Corr):\nCorrelator or list of correlators which are plotted for comparison.\nThe tags of these correlators are used as labels if available.
    • \n
    • logscale (bool):\nSets y-axis to logscale.
    • \n
    • plateau (Obs):\nPlateau value to be visualized in the figure.
    • \n
    • fit_res (Fit_result):\nFit_result object to be visualized.
    • \n
    • fit_key (str):\nKey for the fit function in Fit_result.fit_function (for combined fits).
    • \n
    • ylabel (str):\nLabel for the y-axis.
    • \n
    • save (str):\npath to file in which the figure should be saved.
    • \n
    • auto_gamma (bool):\nApply the gamma method with standard parameters to all correlators and plateau values before plotting.
    • \n
    • hide_sigma (float):\nHides data points from the first value on which is consistent with zero within 'hide_sigma' standard errors.
    • \n
    • references (list):\nList of floating point values that are displayed as horizontal lines for reference.
    • \n
    • title (string):\nOptional title of the figure.
    • \n
    \n", "signature": "(\tself,\tx_range=None,\tcomp=None,\ty_range=None,\tlogscale=False,\tplateau=None,\tfit_res=None,\tfit_key=None,\tylabel=None,\tsave=None,\tauto_gamma=False,\thide_sigma=None,\treferences=None,\ttitle=None):", "funcdef": "def"}, "pyerrors.correlators.Corr.spaghetti_plot": {"fullname": "pyerrors.correlators.Corr.spaghetti_plot", "modulename": "pyerrors.correlators", "qualname": "Corr.spaghetti_plot", "kind": "function", "doc": "

    Produces a spaghetti plot of the correlator suited to monitor exceptional configurations.

    \n\n
    Parameters
    \n\n
      \n
    • logscale (bool):\nDetermines whether the scale of the y-axis is logarithmic or standard.
    • \n
    \n", "signature": "(self, logscale=True):", "funcdef": "def"}, "pyerrors.correlators.Corr.dump": {"fullname": "pyerrors.correlators.Corr.dump", "modulename": "pyerrors.correlators", "qualname": "Corr.dump", "kind": "function", "doc": "

    Dumps the Corr into a file of chosen type

    \n\n
    Parameters
    \n\n
      \n
    • filename (str):\nName of the file to be saved.
    • \n
    • datatype (str):\nFormat of the exported file. Supported formats include\n\"json.gz\" and \"pickle\"
    • \n
    • path (str):\nspecifies a custom path for the file (default '.')
    • \n
    \n", "signature": "(self, filename, datatype='json.gz', **kwargs):", "funcdef": "def"}, "pyerrors.correlators.Corr.print": {"fullname": "pyerrors.correlators.Corr.print", "modulename": "pyerrors.correlators", "qualname": "Corr.print", "kind": "function", "doc": "

    \n", "signature": "(self, print_range=None):", "funcdef": "def"}, "pyerrors.correlators.Corr.sqrt": {"fullname": "pyerrors.correlators.Corr.sqrt", "modulename": "pyerrors.correlators", "qualname": "Corr.sqrt", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.log": {"fullname": "pyerrors.correlators.Corr.log", "modulename": "pyerrors.correlators", "qualname": "Corr.log", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.exp": {"fullname": "pyerrors.correlators.Corr.exp", "modulename": "pyerrors.correlators", "qualname": "Corr.exp", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.sin": {"fullname": "pyerrors.correlators.Corr.sin", "modulename": "pyerrors.correlators", "qualname": "Corr.sin", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.cos": {"fullname": "pyerrors.correlators.Corr.cos", "modulename": "pyerrors.correlators", "qualname": "Corr.cos", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.tan": {"fullname": "pyerrors.correlators.Corr.tan", "modulename": "pyerrors.correlators", "qualname": "Corr.tan", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.sinh": {"fullname": "pyerrors.correlators.Corr.sinh", "modulename": "pyerrors.correlators", "qualname": "Corr.sinh", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.cosh": {"fullname": "pyerrors.correlators.Corr.cosh", "modulename": "pyerrors.correlators", "qualname": "Corr.cosh", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.tanh": {"fullname": "pyerrors.correlators.Corr.tanh", "modulename": "pyerrors.correlators", "qualname": "Corr.tanh", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.arcsin": {"fullname": "pyerrors.correlators.Corr.arcsin", "modulename": "pyerrors.correlators", "qualname": "Corr.arcsin", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.arccos": {"fullname": "pyerrors.correlators.Corr.arccos", "modulename": "pyerrors.correlators", "qualname": "Corr.arccos", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.arctan": {"fullname": "pyerrors.correlators.Corr.arctan", "modulename": "pyerrors.correlators", "qualname": "Corr.arctan", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.arcsinh": {"fullname": "pyerrors.correlators.Corr.arcsinh", "modulename": "pyerrors.correlators", "qualname": "Corr.arcsinh", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.arccosh": {"fullname": "pyerrors.correlators.Corr.arccosh", "modulename": "pyerrors.correlators", "qualname": "Corr.arccosh", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.arctanh": {"fullname": "pyerrors.correlators.Corr.arctanh", "modulename": "pyerrors.correlators", "qualname": "Corr.arctanh", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.real": {"fullname": "pyerrors.correlators.Corr.real", "modulename": "pyerrors.correlators", "qualname": "Corr.real", "kind": "variable", "doc": "

    \n"}, "pyerrors.correlators.Corr.imag": {"fullname": "pyerrors.correlators.Corr.imag", "modulename": "pyerrors.correlators", "qualname": "Corr.imag", "kind": "variable", "doc": "

    \n"}, "pyerrors.correlators.Corr.prune": {"fullname": "pyerrors.correlators.Corr.prune", "modulename": "pyerrors.correlators", "qualname": "Corr.prune", "kind": "function", "doc": "

    Project large correlation matrix to lowest states

    \n\n

    This method can be used to reduce the size of an (N x N) correlation matrix\nto (Ntrunc x Ntrunc) by solving a GEVP at very early times where the noise\nis still small.

    \n\n
    Parameters
    \n\n
      \n
    • Ntrunc (int):\nRank of the target matrix.
    • \n
    • tproj (int):\nTime where the eigenvectors are evaluated, corresponds to ts in the GEVP method.\nThe default value is 3.
    • \n
    • t0proj (int):\nTime where the correlation matrix is inverted. Choosing t0proj=1 is strongly\ndiscouraged for O(a) improved theories, since the correctness of the procedure\ncannot be granted in this case. The default value is 2.
    • \n
    • basematrix (Corr):\nCorrelation matrix that is used to determine the eigenvectors of the\nlowest states based on a GEVP. basematrix is taken to be the Corr itself if\nis is not specified.
    • \n
    \n\n
    Notes
    \n\n

    We have the basematrix $C(t)$ and the target matrix $G(t)$. We start by solving\nthe GEVP $$C(t) v_n(t, t_0) = \\lambda_n(t, t_0) C(t_0) v_n(t, t_0)$$ where $t \\equiv t_\\mathrm{proj}$\nand $t_0 \\equiv t_{0, \\mathrm{proj}}$. The target matrix is projected onto the subspace of the\nresulting eigenvectors $v_n, n=1,\\dots,N_\\mathrm{trunc}$ via\n$$G^\\prime_{i, j}(t) = (v_i, G(t) v_j)$$. This allows to reduce the size of a large\ncorrelation matrix and to remove some noise that is added by irrelevant operators.\nThis may allow to use the GEVP on $G(t)$ at late times such that the theoretically motivated\nbound $t_0 \\leq t/2$ holds, since the condition number of $G(t)$ is decreased, compared to $C(t)$.

    \n", "signature": "(self, Ntrunc, tproj=3, t0proj=2, basematrix=None):", "funcdef": "def"}, "pyerrors.correlators.Corr.N": {"fullname": "pyerrors.correlators.Corr.N", "modulename": "pyerrors.correlators", "qualname": "Corr.N", "kind": "variable", "doc": "

    \n"}, "pyerrors.covobs": {"fullname": "pyerrors.covobs", "modulename": "pyerrors.covobs", "kind": "module", "doc": "

    \n"}, "pyerrors.covobs.Covobs": {"fullname": "pyerrors.covobs.Covobs", "modulename": "pyerrors.covobs", "qualname": "Covobs", "kind": "class", "doc": "

    \n"}, "pyerrors.covobs.Covobs.__init__": {"fullname": "pyerrors.covobs.Covobs.__init__", "modulename": "pyerrors.covobs", "qualname": "Covobs.__init__", "kind": "function", "doc": "

    Initialize Covobs object.

    \n\n
    Parameters
    \n\n
      \n
    • mean (float):\nMean value of the new Obs
    • \n
    • cov (list or array):\n2d Covariance matrix or 1d diagonal entries
    • \n
    • name (str):\nidentifier for the covariance matrix
    • \n
    • pos (int):\nPosition of the variance belonging to mean in cov.\nIs taken to be 1 if cov is 0-dimensional
    • \n
    • grad (list or array):\nGradient of the Covobs wrt. the means belonging to cov.
    • \n
    \n", "signature": "(mean, cov, name, pos=None, grad=None)"}, "pyerrors.covobs.Covobs.name": {"fullname": "pyerrors.covobs.Covobs.name", "modulename": "pyerrors.covobs", "qualname": "Covobs.name", "kind": "variable", "doc": "

    \n"}, "pyerrors.covobs.Covobs.value": {"fullname": "pyerrors.covobs.Covobs.value", "modulename": "pyerrors.covobs", "qualname": "Covobs.value", "kind": "variable", "doc": "

    \n"}, "pyerrors.covobs.Covobs.errsq": {"fullname": "pyerrors.covobs.Covobs.errsq", "modulename": "pyerrors.covobs", "qualname": "Covobs.errsq", "kind": "function", "doc": "

    Return the variance (= square of the error) of the Covobs

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.covobs.Covobs.cov": {"fullname": "pyerrors.covobs.Covobs.cov", "modulename": "pyerrors.covobs", "qualname": "Covobs.cov", "kind": "variable", "doc": "

    \n"}, "pyerrors.covobs.Covobs.grad": {"fullname": "pyerrors.covobs.Covobs.grad", "modulename": "pyerrors.covobs", "qualname": "Covobs.grad", "kind": "variable", "doc": "

    \n"}, "pyerrors.dirac": {"fullname": "pyerrors.dirac", "modulename": "pyerrors.dirac", "kind": "module", "doc": "

    \n"}, "pyerrors.dirac.gammaX": {"fullname": "pyerrors.dirac.gammaX", "modulename": "pyerrors.dirac", "qualname": "gammaX", "kind": "variable", "doc": "

    \n", "default_value": "array([[ 0.+0.j, 0.+0.j, 0.+0.j, 0.+1.j],\n [ 0.+0.j, 0.+0.j, 0.+1.j, 0.+0.j],\n [ 0.+0.j, -0.-1.j, 0.+0.j, 0.+0.j],\n [-0.-1.j, 0.+0.j, 0.+0.j, 0.+0.j]])"}, "pyerrors.dirac.gammaY": {"fullname": "pyerrors.dirac.gammaY", "modulename": "pyerrors.dirac", "qualname": "gammaY", "kind": "variable", "doc": "

    \n", "default_value": "array([[ 0.+0.j, 0.+0.j, 0.+0.j, -1.+0.j],\n [ 0.+0.j, 0.+0.j, 1.+0.j, 0.+0.j],\n [ 0.+0.j, 1.+0.j, 0.+0.j, 0.+0.j],\n [-1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j]])"}, "pyerrors.dirac.gammaZ": {"fullname": "pyerrors.dirac.gammaZ", "modulename": "pyerrors.dirac", "qualname": "gammaZ", "kind": "variable", "doc": "

    \n", "default_value": "array([[ 0.+0.j, 0.+0.j, 0.+1.j, 0.+0.j],\n [ 0.+0.j, 0.+0.j, 0.+0.j, -0.-1.j],\n [-0.-1.j, 0.+0.j, 0.+0.j, 0.+0.j],\n [ 0.+0.j, 0.+1.j, 0.+0.j, 0.+0.j]])"}, "pyerrors.dirac.gammaT": {"fullname": "pyerrors.dirac.gammaT", "modulename": "pyerrors.dirac", "qualname": "gammaT", "kind": "variable", "doc": "

    \n", "default_value": "array([[0.+0.j, 0.+0.j, 1.+0.j, 0.+0.j],\n [0.+0.j, 0.+0.j, 0.+0.j, 1.+0.j],\n [1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j],\n [0.+0.j, 1.+0.j, 0.+0.j, 0.+0.j]])"}, "pyerrors.dirac.gamma": {"fullname": "pyerrors.dirac.gamma", "modulename": "pyerrors.dirac", "qualname": "gamma", "kind": "variable", "doc": "

    \n", "default_value": "array([[[ 0.+0.j, 0.+0.j, 0.+0.j, 0.+1.j],\n [ 0.+0.j, 0.+0.j, 0.+1.j, 0.+0.j],\n [ 0.+0.j, -0.-1.j, 0.+0.j, 0.+0.j],\n [-0.-1.j, 0.+0.j, 0.+0.j, 0.+0.j]],\n\n [[ 0.+0.j, 0.+0.j, 0.+0.j, -1.+0.j],\n [ 0.+0.j, 0.+0.j, 1.+0.j, 0.+0.j],\n [ 0.+0.j, 1.+0.j, 0.+0.j, 0.+0.j],\n [-1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j]],\n\n [[ 0.+0.j, 0.+0.j, 0.+1.j, 0.+0.j],\n [ 0.+0.j, 0.+0.j, 0.+0.j, -0.-1.j],\n [-0.-1.j, 0.+0.j, 0.+0.j, 0.+0.j],\n [ 0.+0.j, 0.+1.j, 0.+0.j, 0.+0.j]],\n\n [[ 0.+0.j, 0.+0.j, 1.+0.j, 0.+0.j],\n [ 0.+0.j, 0.+0.j, 0.+0.j, 1.+0.j],\n [ 1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j],\n [ 0.+0.j, 1.+0.j, 0.+0.j, 0.+0.j]]])"}, "pyerrors.dirac.gamma5": {"fullname": "pyerrors.dirac.gamma5", "modulename": "pyerrors.dirac", "qualname": "gamma5", "kind": "variable", "doc": "

    \n", "default_value": "array([[ 1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j],\n [ 0.+0.j, 1.+0.j, 0.+0.j, 0.+0.j],\n [ 0.+0.j, 0.+0.j, -1.+0.j, 0.+0.j],\n [ 0.+0.j, 0.+0.j, 0.+0.j, -1.+0.j]])"}, "pyerrors.dirac.identity": {"fullname": "pyerrors.dirac.identity", "modulename": "pyerrors.dirac", "qualname": "identity", "kind": "variable", "doc": "

    \n", "default_value": "array([[1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j],\n [0.+0.j, 1.+0.j, 0.+0.j, 0.+0.j],\n [0.+0.j, 0.+0.j, 1.+0.j, 0.+0.j],\n [0.+0.j, 0.+0.j, 0.+0.j, 1.+0.j]])"}, "pyerrors.dirac.epsilon_tensor": {"fullname": "pyerrors.dirac.epsilon_tensor", "modulename": "pyerrors.dirac", "qualname": "epsilon_tensor", "kind": "function", "doc": "

    Rank-3 epsilon tensor

    \n\n

    Based on https://codegolf.stackexchange.com/a/160375

    \n\n
    Returns
    \n\n
      \n
    • elem (int):\nElement (i,j,k) of the epsilon tensor of rank 3
    • \n
    \n", "signature": "(i, j, k):", "funcdef": "def"}, "pyerrors.dirac.epsilon_tensor_rank4": {"fullname": "pyerrors.dirac.epsilon_tensor_rank4", "modulename": "pyerrors.dirac", "qualname": "epsilon_tensor_rank4", "kind": "function", "doc": "

    Rank-4 epsilon tensor

    \n\n

    Extension of https://codegolf.stackexchange.com/a/160375

    \n\n
    Returns
    \n\n
      \n
    • elem (int):\nElement (i,j,k,o) of the epsilon tensor of rank 4
    • \n
    \n", "signature": "(i, j, k, o):", "funcdef": "def"}, "pyerrors.dirac.Grid_gamma": {"fullname": "pyerrors.dirac.Grid_gamma", "modulename": "pyerrors.dirac", "qualname": "Grid_gamma", "kind": "function", "doc": "

    Returns gamma matrix in Grid labeling.

    \n", "signature": "(gamma_tag):", "funcdef": "def"}, "pyerrors.fits": {"fullname": "pyerrors.fits", "modulename": "pyerrors.fits", "kind": "module", "doc": "

    \n"}, "pyerrors.fits.Fit_result": {"fullname": "pyerrors.fits.Fit_result", "modulename": "pyerrors.fits", "qualname": "Fit_result", "kind": "class", "doc": "

    Represents fit results.

    \n\n
    Attributes
    \n\n
      \n
    • fit_parameters (list):\nresults for the individual fit parameters,\nalso accessible via indices.
    • \n
    • chisquare_by_dof (float):\nreduced chisquare.
    • \n
    • p_value (float):\np-value of the fit
    • \n
    • t2_p_value (float):\nHotelling t-squared p-value for correlated fits.
    • \n
    \n", "bases": "collections.abc.Sequence"}, "pyerrors.fits.Fit_result.fit_parameters": {"fullname": "pyerrors.fits.Fit_result.fit_parameters", "modulename": "pyerrors.fits", "qualname": "Fit_result.fit_parameters", "kind": "variable", "doc": "

    \n"}, "pyerrors.fits.Fit_result.gamma_method": {"fullname": "pyerrors.fits.Fit_result.gamma_method", "modulename": "pyerrors.fits", "qualname": "Fit_result.gamma_method", "kind": "function", "doc": "

    Apply the gamma method to all fit parameters

    \n", "signature": "(self, **kwargs):", "funcdef": "def"}, "pyerrors.fits.Fit_result.gm": {"fullname": "pyerrors.fits.Fit_result.gm", "modulename": "pyerrors.fits", "qualname": "Fit_result.gm", "kind": "function", "doc": "

    Apply the gamma method to all fit parameters

    \n", "signature": "(self, **kwargs):", "funcdef": "def"}, "pyerrors.fits.least_squares": {"fullname": "pyerrors.fits.least_squares", "modulename": "pyerrors.fits", "qualname": "least_squares", "kind": "function", "doc": "

    Performs a non-linear fit to y = func(x).\n ```

    \n\n
    Parameters
    \n\n
      \n
    • For an uncombined fit:
    • \n
    • x (list):\nlist of floats.
    • \n
    • y (list):\nlist of Obs.
    • \n
    • func (object):\nfit function, has to be of the form

      \n\n
      \n
      import autograd.numpy as anp\n\ndef func(a, x):\n   return a[0] + a[1] * x + a[2] * anp.sinh(x)\n
      \n
      \n\n

      For multiple x values func can be of the form

      \n\n
      \n
      def func(a, x):\n   (x1, x2) = x\n   return a[0] * x1 ** 2 + a[1] * x2\n
      \n
      \n\n

      It is important that all numpy functions refer to autograd.numpy, otherwise the differentiation\nwill not work.

    • \n
    • OR For a combined fit:
    • \n
    • x (dict):\ndict of lists.
    • \n
    • y (dict):\ndict of lists of Obs.
    • \n
    • funcs (dict):\ndict of objects\nfit functions have to be of the form (here a[0] is the common fit parameter)\n```python\nimport autograd.numpy as anp\nfuncs = {\"a\": func_a,\n \"b\": func_b}

      \n\n

      def func_a(a, x):\n return a[1] * anp.exp(-a[0] * x)

      \n\n

      def func_b(a, x):\n return a[2] * anp.exp(-a[0] * x)

      \n\n

      It is important that all numpy functions refer to autograd.numpy, otherwise the differentiation\nwill not work.

    • \n
    • priors (dict or list, optional):\npriors can either be a dictionary with integer keys and the corresponding priors as values or\na list with an entry for every parameter in the fit. The entries can either be\nObs (e.g. results from a previous fit) or strings containing a value and an error formatted like\n0.548(23), 500(40) or 0.5(0.4)
    • \n
    • silent (bool, optional):\nIf true all output to the console is omitted (default False).
    • \n
    • initial_guess (list):\ncan provide an initial guess for the input parameters. Relevant for\nnon-linear fits with many parameters. In case of correlated fits the guess is used to perform\nan uncorrelated fit which then serves as guess for the correlated fit.
    • \n
    • method (str, optional):\ncan be used to choose an alternative method for the minimization of chisquare.\nThe possible methods are the ones which can be used for scipy.optimize.minimize and\nmigrad of iminuit. If no method is specified, Levenberg-Marquard is used.\nReliable alternatives are migrad, Powell and Nelder-Mead.
    • \n
    • tol (float, optional):\ncan be used (only for combined fits and methods other than Levenberg-Marquard) to set the tolerance for convergence\nto a different value to either speed up convergence at the cost of a larger error on the fitted parameters (and possibly\ninvalid estimates for parameter uncertainties) or smaller values to get more accurate parameter values\nThe stopping criterion depends on the method, e.g. migrad: edm_max = 0.002 * tol * errordef (EDM criterion: edm < edm_max)
    • \n
    • correlated_fit (bool):\nIf True, use the full inverse covariance matrix in the definition of the chisquare cost function.\nFor details about how the covariance matrix is estimated see pyerrors.obs.covariance.\nIn practice the correlation matrix is Cholesky decomposed and inverted (instead of the covariance matrix).\nThis procedure should be numerically more stable as the correlation matrix is typically better conditioned (Jacobi preconditioning).
    • \n
    • expected_chisquare (bool):\nIf True estimates the expected chisquare which is\ncorrected by effects caused by correlated input data (default False).
    • \n
    • resplot (bool):\nIf True, a plot which displays fit, data and residuals is generated (default False).
    • \n
    • qqplot (bool):\nIf True, a quantile-quantile plot of the fit result is generated (default False).
    • \n
    • num_grad (bool):\nUse numerical differentation instead of automatic differentiation to perform the error propagation (default False).
    • \n
    \n\n
    Returns
    \n\n
      \n
    • output (Fit_result):\nParameters and information on the fitted result.
    • \n
    \n", "signature": "(x, y, func, priors=None, silent=False, **kwargs):", "funcdef": "def"}, "pyerrors.fits.total_least_squares": {"fullname": "pyerrors.fits.total_least_squares", "modulename": "pyerrors.fits", "qualname": "total_least_squares", "kind": "function", "doc": "

    Performs a non-linear fit to y = func(x) and returns a list of Obs corresponding to the fit parameters.

    \n\n
    Parameters
    \n\n
      \n
    • x (list):\nlist of Obs, or a tuple of lists of Obs
    • \n
    • y (list):\nlist of Obs. The dvalues of the Obs are used as x- and yerror for the fit.
    • \n
    • func (object):\nfunc has to be of the form

      \n\n
      \n
      import autograd.numpy as anp\n\ndef func(a, x):\n   return a[0] + a[1] * x + a[2] * anp.sinh(x)\n
      \n
      \n\n

      For multiple x values func can be of the form

      \n\n
      \n
      def func(a, x):\n   (x1, x2) = x\n   return a[0] * x1 ** 2 + a[1] * x2\n
      \n
      \n\n

      It is important that all numpy functions refer to autograd.numpy, otherwise the differentiation\nwill not work.

    • \n
    • silent (bool, optional):\nIf true all output to the console is omitted (default False).
    • \n
    • initial_guess (list):\ncan provide an initial guess for the input parameters. Relevant for non-linear\nfits with many parameters.
    • \n
    • expected_chisquare (bool):\nIf true prints the expected chisquare which is\ncorrected by effects caused by correlated input data.\nThis can take a while as the full correlation matrix\nhas to be calculated (default False).
    • \n
    • num_grad (bool):\nUse numerical differentation instead of automatic differentiation to perform the error propagation (default False).
    • \n
    \n\n
    Notes
    \n\n

    Based on the orthogonal distance regression module of scipy.

    \n\n
    Returns
    \n\n
      \n
    • output (Fit_result):\nParameters and information on the fitted result.
    • \n
    \n", "signature": "(x, y, func, silent=False, **kwargs):", "funcdef": "def"}, "pyerrors.fits.fit_lin": {"fullname": "pyerrors.fits.fit_lin", "modulename": "pyerrors.fits", "qualname": "fit_lin", "kind": "function", "doc": "

    Performs a linear fit to y = n + m * x and returns two Obs n, m.

    \n\n
    Parameters
    \n\n
      \n
    • x (list):\nCan either be a list of floats in which case no xerror is assumed, or\na list of Obs, where the dvalues of the Obs are used as xerror for the fit.
    • \n
    • y (list):\nList of Obs, the dvalues of the Obs are used as yerror for the fit.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • fit_parameters (list[Obs]):\nLIist of fitted observables.
    • \n
    \n", "signature": "(x, y, **kwargs):", "funcdef": "def"}, "pyerrors.fits.qqplot": {"fullname": "pyerrors.fits.qqplot", "modulename": "pyerrors.fits", "qualname": "qqplot", "kind": "function", "doc": "

    Generates a quantile-quantile plot of the fit result which can be used to\n check if the residuals of the fit are gaussian distributed.

    \n\n
    Returns
    \n\n
      \n
    • None
    • \n
    \n", "signature": "(x, o_y, func, p, title=''):", "funcdef": "def"}, "pyerrors.fits.residual_plot": {"fullname": "pyerrors.fits.residual_plot", "modulename": "pyerrors.fits", "qualname": "residual_plot", "kind": "function", "doc": "

    Generates a plot which compares the fit to the data and displays the corresponding residuals

    \n\n

    For uncorrelated data the residuals are expected to be distributed ~N(0,1).

    \n\n
    Returns
    \n\n
      \n
    • None
    • \n
    \n", "signature": "(x, y, func, fit_res, title=''):", "funcdef": "def"}, "pyerrors.fits.error_band": {"fullname": "pyerrors.fits.error_band", "modulename": "pyerrors.fits", "qualname": "error_band", "kind": "function", "doc": "

    Calculate the error band for an array of sample values x, for given fit function func with optimized parameters beta.

    \n\n
    Returns
    \n\n
      \n
    • err (np.array(Obs)):\nError band for an array of sample values x
    • \n
    \n", "signature": "(x, func, beta):", "funcdef": "def"}, "pyerrors.fits.ks_test": {"fullname": "pyerrors.fits.ks_test", "modulename": "pyerrors.fits", "qualname": "ks_test", "kind": "function", "doc": "

    Performs a Kolmogorov\u2013Smirnov test for the p-values of all fit object.

    \n\n
    Parameters
    \n\n
      \n
    • objects (list):\nList of fit results to include in the analysis (optional).
    • \n
    \n\n
    Returns
    \n\n
      \n
    • None
    • \n
    \n", "signature": "(objects=None):", "funcdef": "def"}, "pyerrors.input": {"fullname": "pyerrors.input", "modulename": "pyerrors.input", "kind": "module", "doc": "

    pyerrors includes an input submodule in which input routines and parsers for the output of various numerical programs are contained.

    \n\n

    Jackknife samples

    \n\n

    For comparison with other analysis workflows pyerrors can also generate jackknife samples from an Obs object or import jackknife samples into an Obs object.\nSee pyerrors.obs.Obs.export_jackknife and pyerrors.obs.import_jackknife for details.

    \n"}, "pyerrors.input.bdio": {"fullname": "pyerrors.input.bdio", "modulename": "pyerrors.input.bdio", "kind": "module", "doc": "

    \n"}, "pyerrors.input.bdio.read_ADerrors": {"fullname": "pyerrors.input.bdio.read_ADerrors", "modulename": "pyerrors.input.bdio", "qualname": "read_ADerrors", "kind": "function", "doc": "

    Extract generic MCMC data from a bdio file

    \n\n

    read_ADerrors requires bdio to be compiled into a shared library. This can be achieved by\nadding the flag -fPIC to CC and changing the all target to

    \n\n

    all: bdio.o $(LIBDIR)\n gcc -shared -Wl,-soname,libbdio.so -o $(BUILDDIR)/libbdio.so $(BUILDDIR)/bdio.o\n cp $(BUILDDIR)/libbdio.so $(LIBDIR)/

    \n\n
    Parameters
    \n\n
      \n
    • file_path -- path to the bdio file
    • \n
    • bdio_path -- path to the shared bdio library libbdio.so (default ./libbdio.so)
    • \n
    \n\n
    Returns
    \n\n
      \n
    • data (List[Obs]):\nExtracted data
    • \n
    \n", "signature": "(file_path, bdio_path='./libbdio.so', **kwargs):", "funcdef": "def"}, "pyerrors.input.bdio.write_ADerrors": {"fullname": "pyerrors.input.bdio.write_ADerrors", "modulename": "pyerrors.input.bdio", "qualname": "write_ADerrors", "kind": "function", "doc": "

    Write Obs to a bdio file according to ADerrors conventions

    \n\n

    read_mesons requires bdio to be compiled into a shared library. This can be achieved by\nadding the flag -fPIC to CC and changing the all target to

    \n\n

    all: bdio.o $(LIBDIR)\n gcc -shared -Wl,-soname,libbdio.so -o $(BUILDDIR)/libbdio.so $(BUILDDIR)/bdio.o\n cp $(BUILDDIR)/libbdio.so $(LIBDIR)/

    \n\n
    Parameters
    \n\n
      \n
    • file_path -- path to the bdio file
    • \n
    • bdio_path -- path to the shared bdio library libbdio.so (default ./libbdio.so)
    • \n
    \n\n
    Returns
    \n\n
      \n
    • success (int):\nreturns 0 is successful
    • \n
    \n", "signature": "(obs_list, file_path, bdio_path='./libbdio.so', **kwargs):", "funcdef": "def"}, "pyerrors.input.bdio.read_mesons": {"fullname": "pyerrors.input.bdio.read_mesons", "modulename": "pyerrors.input.bdio", "qualname": "read_mesons", "kind": "function", "doc": "

    Extract mesons data from a bdio file and return it as a dictionary

    \n\n

    The dictionary can be accessed with a tuple consisting of (type, source_position, kappa1, kappa2)

    \n\n

    read_mesons requires bdio to be compiled into a shared library. This can be achieved by\nadding the flag -fPIC to CC and changing the all target to

    \n\n

    all: bdio.o $(LIBDIR)\n gcc -shared -Wl,-soname,libbdio.so -o $(BUILDDIR)/libbdio.so $(BUILDDIR)/bdio.o\n cp $(BUILDDIR)/libbdio.so $(LIBDIR)/

    \n\n
    Parameters
    \n\n
      \n
    • file_path (str):\npath to the bdio file
    • \n
    • bdio_path (str):\npath to the shared bdio library libbdio.so (default ./libbdio.so)
    • \n
    • start (int):\nThe first configuration to be read (default 1)
    • \n
    • stop (int):\nThe last configuration to be read (default None)
    • \n
    • step (int):\nFixed step size between two measurements (default 1)
    • \n
    • alternative_ensemble_name (str):\nManually overwrite ensemble name
    • \n
    \n\n
    Returns
    \n\n
      \n
    • data (dict):\nExtracted meson data
    • \n
    \n", "signature": "(file_path, bdio_path='./libbdio.so', **kwargs):", "funcdef": "def"}, "pyerrors.input.bdio.read_dSdm": {"fullname": "pyerrors.input.bdio.read_dSdm", "modulename": "pyerrors.input.bdio", "qualname": "read_dSdm", "kind": "function", "doc": "

    Extract dSdm data from a bdio file and return it as a dictionary

    \n\n

    The dictionary can be accessed with a tuple consisting of (type, kappa)

    \n\n

    read_dSdm requires bdio to be compiled into a shared library. This can be achieved by\nadding the flag -fPIC to CC and changing the all target to

    \n\n

    all: bdio.o $(LIBDIR)\n gcc -shared -Wl,-soname,libbdio.so -o $(BUILDDIR)/libbdio.so $(BUILDDIR)/bdio.o\n cp $(BUILDDIR)/libbdio.so $(LIBDIR)/

    \n\n
    Parameters
    \n\n
      \n
    • file_path (str):\npath to the bdio file
    • \n
    • bdio_path (str):\npath to the shared bdio library libbdio.so (default ./libbdio.so)
    • \n
    • start (int):\nThe first configuration to be read (default 1)
    • \n
    • stop (int):\nThe last configuration to be read (default None)
    • \n
    • step (int):\nFixed step size between two measurements (default 1)
    • \n
    • alternative_ensemble_name (str):\nManually overwrite ensemble name
    • \n
    \n", "signature": "(file_path, bdio_path='./libbdio.so', **kwargs):", "funcdef": "def"}, "pyerrors.input.dobs": {"fullname": "pyerrors.input.dobs", "modulename": "pyerrors.input.dobs", "kind": "module", "doc": "

    \n"}, "pyerrors.input.dobs.create_pobs_string": {"fullname": "pyerrors.input.dobs.create_pobs_string", "modulename": "pyerrors.input.dobs", "qualname": "create_pobs_string", "kind": "function", "doc": "

    Export a list of Obs or structures containing Obs to an xml string\naccording to the Zeuthen pobs format.

    \n\n

    Tags are not written or recovered automatically. The separator | is removed from the replica names.

    \n\n
    Parameters
    \n\n
      \n
    • obsl (list):\nList of Obs that will be exported.\nThe Obs inside a structure have to be defined on the same ensemble.
    • \n
    • name (str):\nThe name of the observable.
    • \n
    • spec (str):\nOptional string that describes the contents of the file.
    • \n
    • origin (str):\nSpecify where the data has its origin.
    • \n
    • symbol (list):\nA list of symbols that describe the observables to be written. May be empty.
    • \n
    • enstag (str):\nEnstag that is written to pobs. If None, the ensemble name is used.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • xml_str (str):\nXML formatted string of the input data
    • \n
    \n", "signature": "(obsl, name, spec='', origin='', symbol=[], enstag=None):", "funcdef": "def"}, "pyerrors.input.dobs.write_pobs": {"fullname": "pyerrors.input.dobs.write_pobs", "modulename": "pyerrors.input.dobs", "qualname": "write_pobs", "kind": "function", "doc": "

    Export a list of Obs or structures containing Obs to a .xml.gz file\naccording to the Zeuthen pobs format.

    \n\n

    Tags are not written or recovered automatically. The separator | is removed from the replica names.

    \n\n
    Parameters
    \n\n
      \n
    • obsl (list):\nList of Obs that will be exported.\nThe Obs inside a structure have to be defined on the same ensemble.
    • \n
    • fname (str):\nFilename of the output file.
    • \n
    • name (str):\nThe name of the observable.
    • \n
    • spec (str):\nOptional string that describes the contents of the file.
    • \n
    • origin (str):\nSpecify where the data has its origin.
    • \n
    • symbol (list):\nA list of symbols that describe the observables to be written. May be empty.
    • \n
    • enstag (str):\nEnstag that is written to pobs. If None, the ensemble name is used.
    • \n
    • gz (bool):\nIf True, the output is a gzipped xml. If False, the output is an xml file.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • None
    • \n
    \n", "signature": "(\tobsl,\tfname,\tname,\tspec='',\torigin='',\tsymbol=[],\tenstag=None,\tgz=True):", "funcdef": "def"}, "pyerrors.input.dobs.read_pobs": {"fullname": "pyerrors.input.dobs.read_pobs", "modulename": "pyerrors.input.dobs", "qualname": "read_pobs", "kind": "function", "doc": "

    Import a list of Obs from an xml.gz file in the Zeuthen pobs format.

    \n\n

    Tags are not written or recovered automatically.

    \n\n
    Parameters
    \n\n
      \n
    • fname (str):\nFilename of the input file.
    • \n
    • full_output (bool):\nIf True, a dict containing auxiliary information and the data is returned.\nIf False, only the data is returned as list.
    • \n
    • separatior_insertion (str or int):\nstr: replace all occurences of \"separator_insertion\" within the replica names\nby \"|%s\" % (separator_insertion) when constructing the names of the replica.\nint: Insert the separator \"|\" at the position given by separator_insertion.\nNone (default): Replica names remain unchanged.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • res (list[Obs]):\nImported data
    • \n
    • or
    • \n
    • res (dict):\nImported data and meta-data
    • \n
    \n", "signature": "(fname, full_output=False, gz=True, separator_insertion=None):", "funcdef": "def"}, "pyerrors.input.dobs.import_dobs_string": {"fullname": "pyerrors.input.dobs.import_dobs_string", "modulename": "pyerrors.input.dobs", "qualname": "import_dobs_string", "kind": "function", "doc": "

    Import a list of Obs from a string in the Zeuthen dobs format.

    \n\n

    Tags are not written or recovered automatically.

    \n\n
    Parameters
    \n\n
      \n
    • content (str):\nXML string containing the data
    • \n
    • full_output (bool):\nIf True, a dict containing auxiliary information and the data is returned.\nIf False, only the data is returned as list.
    • \n
    • separatior_insertion (str, int or bool):\nstr: replace all occurences of \"separator_insertion\" within the replica names\nby \"|%s\" % (separator_insertion) when constructing the names of the replica.\nint: Insert the separator \"|\" at the position given by separator_insertion.\nTrue (default): separator \"|\" is inserted after len(ensname), assuming that the\nensemble name is a prefix to the replica name.\nNone or False: No separator is inserted.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • res (list[Obs]):\nImported data
    • \n
    • or
    • \n
    • res (dict):\nImported data and meta-data
    • \n
    \n", "signature": "(content, full_output=False, separator_insertion=True):", "funcdef": "def"}, "pyerrors.input.dobs.read_dobs": {"fullname": "pyerrors.input.dobs.read_dobs", "modulename": "pyerrors.input.dobs", "qualname": "read_dobs", "kind": "function", "doc": "

    Import a list of Obs from an xml.gz file in the Zeuthen dobs format.

    \n\n

    Tags are not written or recovered automatically.

    \n\n
    Parameters
    \n\n
      \n
    • fname (str):\nFilename of the input file.
    • \n
    • full_output (bool):\nIf True, a dict containing auxiliary information and the data is returned.\nIf False, only the data is returned as list.
    • \n
    • gz (bool):\nIf True, assumes that data is gzipped. If False, assumes XML file.
    • \n
    • separatior_insertion (str, int or bool):\nstr: replace all occurences of \"separator_insertion\" within the replica names\nby \"|%s\" % (separator_insertion) when constructing the names of the replica.\nint: Insert the separator \"|\" at the position given by separator_insertion.\nTrue (default): separator \"|\" is inserted after len(ensname), assuming that the\nensemble name is a prefix to the replica name.\nNone or False: No separator is inserted.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • res (list[Obs]):\nImported data
    • \n
    • or
    • \n
    • res (dict):\nImported data and meta-data
    • \n
    \n", "signature": "(fname, full_output=False, gz=True, separator_insertion=True):", "funcdef": "def"}, "pyerrors.input.dobs.create_dobs_string": {"fullname": "pyerrors.input.dobs.create_dobs_string", "modulename": "pyerrors.input.dobs", "qualname": "create_dobs_string", "kind": "function", "doc": "

    Generate the string for the export of a list of Obs or structures containing Obs\nto a .xml.gz file according to the Zeuthen dobs format.

    \n\n

    Tags are not written or recovered automatically. The separator |is removed from the replica names.

    \n\n
    Parameters
    \n\n
      \n
    • obsl (list):\nList of Obs that will be exported.\nThe Obs inside a structure do not have to be defined on the same set of configurations,\nbut the storage requirement is increased, if this is not the case.
    • \n
    • name (str):\nThe name of the observable.
    • \n
    • spec (str):\nOptional string that describes the contents of the file.
    • \n
    • origin (str):\nSpecify where the data has its origin.
    • \n
    • symbol (list):\nA list of symbols that describe the observables to be written. May be empty.
    • \n
    • who (str):\nProvide the name of the person that exports the data.
    • \n
    • enstags (dict):\nProvide alternative enstag for ensembles in the form enstags = {ename: enstag}\nOtherwise, the ensemble name is used.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • xml_str (str):\nXML string generated from the data
    • \n
    \n", "signature": "(\tobsl,\tname,\tspec='dobs v1.0',\torigin='',\tsymbol=[],\twho=None,\tenstags=None):", "funcdef": "def"}, "pyerrors.input.dobs.write_dobs": {"fullname": "pyerrors.input.dobs.write_dobs", "modulename": "pyerrors.input.dobs", "qualname": "write_dobs", "kind": "function", "doc": "

    Export a list of Obs or structures containing Obs to a .xml.gz file\naccording to the Zeuthen dobs format.

    \n\n

    Tags are not written or recovered automatically. The separator | is removed from the replica names.

    \n\n
    Parameters
    \n\n
      \n
    • obsl (list):\nList of Obs that will be exported.\nThe Obs inside a structure do not have to be defined on the same set of configurations,\nbut the storage requirement is increased, if this is not the case.
    • \n
    • fname (str):\nFilename of the output file.
    • \n
    • name (str):\nThe name of the observable.
    • \n
    • spec (str):\nOptional string that describes the contents of the file.
    • \n
    • origin (str):\nSpecify where the data has its origin.
    • \n
    • symbol (list):\nA list of symbols that describe the observables to be written. May be empty.
    • \n
    • who (str):\nProvide the name of the person that exports the data.
    • \n
    • enstags (dict):\nProvide alternative enstag for ensembles in the form enstags = {ename: enstag}\nOtherwise, the ensemble name is used.
    • \n
    • gz (bool):\nIf True, the output is a gzipped XML. If False, the output is a XML file.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • None
    • \n
    \n", "signature": "(\tobsl,\tfname,\tname,\tspec='dobs v1.0',\torigin='',\tsymbol=[],\twho=None,\tenstags=None,\tgz=True):", "funcdef": "def"}, "pyerrors.input.hadrons": {"fullname": "pyerrors.input.hadrons", "modulename": "pyerrors.input.hadrons", "kind": "module", "doc": "

    \n"}, "pyerrors.input.hadrons.read_meson_hd5": {"fullname": "pyerrors.input.hadrons.read_meson_hd5", "modulename": "pyerrors.input.hadrons", "qualname": "read_meson_hd5", "kind": "function", "doc": "

    Read hadrons meson hdf5 file and extract the meson labeled 'meson'

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\npath to the files to read
    • \n
    • filestem (str):\nnamestem of the files to read
    • \n
    • ens_id (str):\nname of the ensemble, required for internal bookkeeping
    • \n
    • meson (str):\nlabel of the meson to be extracted, standard value meson_0 which\ncorresponds to the pseudoscalar pseudoscalar two-point function.
    • \n
    • gammas (tuple of strings):\nInstrad of a meson label one can also provide a tuple of two strings\nindicating the gamma matrices at source and sink.\n(\"Gamma5\", \"Gamma5\") corresponds to the pseudoscalar pseudoscalar\ntwo-point function. The gammas argument dominateds over meson.
    • \n
    • idl (range):\nIf specified only configurations in the given range are read in.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • corr (Corr):\nCorrelator of the source sink combination in question.
    • \n
    \n", "signature": "(path, filestem, ens_id, meson='meson_0', idl=None, gammas=None):", "funcdef": "def"}, "pyerrors.input.hadrons.extract_t0_hd5": {"fullname": "pyerrors.input.hadrons.extract_t0_hd5", "modulename": "pyerrors.input.hadrons", "qualname": "extract_t0_hd5", "kind": "function", "doc": "

    Read hadrons FlowObservables hdf5 file and extract t0

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\npath to the files to read
    • \n
    • filestem (str):\nnamestem of the files to read
    • \n
    • ens_id (str):\nname of the ensemble, required for internal bookkeeping
    • \n
    • obs (str):\nlabel of the observable from which t0 should be extracted.\nOptions: 'Clover energy density' and 'Plaquette energy density'
    • \n
    • fit_range (int):\nNumber of data points left and right of the zero\ncrossing to be included in the linear fit. (Default: 5)
    • \n
    • idl (range):\nIf specified only configurations in the given range are read in.
    • \n
    • plot_fit (bool):\nIf true, the fit for the extraction of t0 is shown together with the data.
    • \n
    \n", "signature": "(\tpath,\tfilestem,\tens_id,\tobs='Clover energy density',\tfit_range=5,\tidl=None,\t**kwargs):", "funcdef": "def"}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"fullname": "pyerrors.input.hadrons.read_DistillationContraction_hd5", "modulename": "pyerrors.input.hadrons", "qualname": "read_DistillationContraction_hd5", "kind": "function", "doc": "

    Read hadrons DistillationContraction hdf5 files in given directory structure

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\npath to the directories to read
    • \n
    • ens_id (str):\nname of the ensemble, required for internal bookkeeping
    • \n
    • diagrams (list):\nList of strings of the diagrams to extract, e.g. [\"direct\", \"box\", \"cross\"].
    • \n
    • idl (range):\nIf specified only configurations in the given range are read in.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • result (dict):\nextracted DistillationContration data
    • \n
    \n", "signature": "(path, ens_id, diagrams=['direct'], idl=None):", "funcdef": "def"}, "pyerrors.input.hadrons.Npr_matrix": {"fullname": "pyerrors.input.hadrons.Npr_matrix", "modulename": "pyerrors.input.hadrons", "qualname": "Npr_matrix", "kind": "class", "doc": "

    ndarray(shape, dtype=float, buffer=None, offset=0,\n strides=None, order=None)

    \n\n

    An array object represents a multidimensional, homogeneous array\nof fixed-size items. An associated data-type object describes the\nformat of each element in the array (its byte-order, how many bytes it\noccupies in memory, whether it is an integer, a floating point number,\nor something else, etc.)

    \n\n

    Arrays should be constructed using array, zeros or empty (refer\nto the See Also section below). The parameters given here refer to\na low-level method (ndarray(...)) for instantiating an array.

    \n\n

    For more information, refer to the numpy module and examine the\nmethods and attributes of an array.

    \n\n
    Parameters
    \n\n
      \n
    • (for the __new__ method; see Notes below)
    • \n
    • shape (tuple of ints):\nShape of created array.
    • \n
    • dtype (data-type, optional):\nAny object that can be interpreted as a numpy data type.
    • \n
    • buffer (object exposing buffer interface, optional):\nUsed to fill the array with data.
    • \n
    • offset (int, optional):\nOffset of array data in buffer.
    • \n
    • strides (tuple of ints, optional):\nStrides of data in memory.
    • \n
    • order ({'C', 'F'}, optional):\nRow-major (C-style) or column-major (Fortran-style) order.
    • \n
    \n\n
    Attributes
    \n\n
      \n
    • T (ndarray):\nTranspose of the array.
    • \n
    • data (buffer):\nThe array's elements, in memory.
    • \n
    • dtype (dtype object):\nDescribes the format of the elements in the array.
    • \n
    • flags (dict):\nDictionary containing information related to memory use, e.g.,\n'C_CONTIGUOUS', 'OWNDATA', 'WRITEABLE', etc.
    • \n
    • flat (numpy.flatiter object):\nFlattened version of the array as an iterator. The iterator\nallows assignments, e.g., x.flat = 3 (See ndarray.flat for\nassignment examples; TODO).
    • \n
    • imag (ndarray):\nImaginary part of the array.
    • \n
    • real (ndarray):\nReal part of the array.
    • \n
    • size (int):\nNumber of elements in the array.
    • \n
    • itemsize (int):\nThe memory use of each array element in bytes.
    • \n
    • nbytes (int):\nThe total number of bytes required to store the array data,\ni.e., itemsize * size.
    • \n
    • ndim (int):\nThe array's number of dimensions.
    • \n
    • shape (tuple of ints):\nShape of the array.
    • \n
    • strides (tuple of ints):\nThe step-size required to move from one element to the next in\nmemory. For example, a contiguous (3, 4) array of type\nint16 in C-order has strides (8, 2). This implies that\nto move from element to element in memory requires jumps of 2 bytes.\nTo move from row-to-row, one needs to jump 8 bytes at a time\n(2 * 4).
    • \n
    • ctypes (ctypes object):\nClass containing properties of the array needed for interaction\nwith ctypes.
    • \n
    • base (ndarray):\nIf the array is a view into another array, that array is its base\n(unless that array is also a view). The base array is where the\narray data is actually stored.
    • \n
    \n\n
    See Also
    \n\n

    array: Construct an array.
    \nzeros: Create an array, each element of which is zero.
    \nempty: Create an array, but leave its allocated memory unchanged (i.e.,\nit contains \"garbage\").
    \ndtype: Create a data-type.
    \nnumpy.typing.NDArray: An ndarray alias :term:generic <generic type>\nw.r.t. its dtype.type <numpy.dtype.type>.

    \n\n
    Notes
    \n\n

    There are two modes of creating an array using __new__:

    \n\n
      \n
    1. If buffer is None, then only shape, dtype, and order\nare used.
    2. \n
    3. If buffer is an object exposing the buffer interface, then\nall keywords are interpreted.
    4. \n
    \n\n

    No __init__ method is needed because the array is fully initialized\nafter the __new__ method.

    \n\n
    Examples
    \n\n

    These examples illustrate the low-level ndarray constructor. Refer\nto the See Also section above for easier ways of constructing an\nndarray.

    \n\n

    First mode, buffer is None:

    \n\n
    \n
    >>> np.ndarray(shape=(2,2), dtype=float, order='F')\narray([[0.0e+000, 0.0e+000], # random\n       [     nan, 2.5e-323]])\n
    \n
    \n\n

    Second mode:

    \n\n
    \n
    >>> np.ndarray((2,), buffer=np.array([1,2,3]),\n...            offset=np.int_().itemsize,\n...            dtype=int) # offset = 1*itemsize, i.e. skip first element\narray([2, 3])\n
    \n
    \n", "bases": "numpy.ndarray"}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"fullname": "pyerrors.input.hadrons.Npr_matrix.g5H", "modulename": "pyerrors.input.hadrons", "qualname": "Npr_matrix.g5H", "kind": "variable", "doc": "

    Gamma_5 hermitean conjugate

    \n\n

    Uses the fact that the propagator is gamma5 hermitean, so just the\nin and out momenta of the propagator are exchanged.

    \n"}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"fullname": "pyerrors.input.hadrons.read_ExternalLeg_hd5", "modulename": "pyerrors.input.hadrons", "qualname": "read_ExternalLeg_hd5", "kind": "function", "doc": "

    Read hadrons ExternalLeg hdf5 file and output an array of CObs

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\npath to the files to read
    • \n
    • filestem (str):\nnamestem of the files to read
    • \n
    • ens_id (str):\nname of the ensemble, required for internal bookkeeping
    • \n
    • idl (range):\nIf specified only configurations in the given range are read in.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • result (Npr_matrix):\nread Cobs-matrix
    • \n
    \n", "signature": "(path, filestem, ens_id, idl=None):", "funcdef": "def"}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"fullname": "pyerrors.input.hadrons.read_Bilinear_hd5", "modulename": "pyerrors.input.hadrons", "qualname": "read_Bilinear_hd5", "kind": "function", "doc": "

    Read hadrons Bilinear hdf5 file and output an array of CObs

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\npath to the files to read
    • \n
    • filestem (str):\nnamestem of the files to read
    • \n
    • ens_id (str):\nname of the ensemble, required for internal bookkeeping
    • \n
    • idl (range):\nIf specified only configurations in the given range are read in.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • result_dict (dict[Npr_matrix]):\nextracted Bilinears
    • \n
    \n", "signature": "(path, filestem, ens_id, idl=None):", "funcdef": "def"}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"fullname": "pyerrors.input.hadrons.read_Fourquark_hd5", "modulename": "pyerrors.input.hadrons", "qualname": "read_Fourquark_hd5", "kind": "function", "doc": "

    Read hadrons FourquarkFullyConnected hdf5 file and output an array of CObs

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\npath to the files to read
    • \n
    • filestem (str):\nnamestem of the files to read
    • \n
    • ens_id (str):\nname of the ensemble, required for internal bookkeeping
    • \n
    • idl (range):\nIf specified only configurations in the given range are read in.
    • \n
    • vertices (list):\nVertex functions to be extracted.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • result_dict (dict):\nextracted fourquark matrizes
    • \n
    \n", "signature": "(path, filestem, ens_id, idl=None, vertices=['VA', 'AV']):", "funcdef": "def"}, "pyerrors.input.json": {"fullname": "pyerrors.input.json", "modulename": "pyerrors.input.json", "kind": "module", "doc": "

    \n"}, "pyerrors.input.json.create_json_string": {"fullname": "pyerrors.input.json.create_json_string", "modulename": "pyerrors.input.json", "qualname": "create_json_string", "kind": "function", "doc": "

    Generate the string for the export of a list of Obs or structures containing Obs\nto a .json(.gz) file

    \n\n
    Parameters
    \n\n
      \n
    • ol (list):\nList of objects that will be exported. At the moment, these objects can be\neither of: Obs, list, numpy.ndarray, Corr.\nAll Obs inside a structure have to be defined on the same set of configurations.
    • \n
    • description (str):\nOptional string that describes the contents of the json file.
    • \n
    • indent (int):\nSpecify the indentation level of the json file. None or 0 is permissible and\nsaves disk space.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • json_string (str):\nString for export to .json(.gz) file
    • \n
    \n", "signature": "(ol, description='', indent=1):", "funcdef": "def"}, "pyerrors.input.json.dump_to_json": {"fullname": "pyerrors.input.json.dump_to_json", "modulename": "pyerrors.input.json", "qualname": "dump_to_json", "kind": "function", "doc": "

    Export a list of Obs or structures containing Obs to a .json(.gz) file.\nDict keys that are not JSON-serializable such as floats are converted to strings.

    \n\n
    Parameters
    \n\n
      \n
    • ol (list):\nList of objects that will be exported. At the moment, these objects can be\neither of: Obs, list, numpy.ndarray, Corr.\nAll Obs inside a structure have to be defined on the same set of configurations.
    • \n
    • fname (str):\nFilename of the output file.
    • \n
    • description (str):\nOptional string that describes the contents of the json file.
    • \n
    • indent (int):\nSpecify the indentation level of the json file. None or 0 is permissible and\nsaves disk space.
    • \n
    • gz (bool):\nIf True, the output is a gzipped json. If False, the output is a json file.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • Null
    • \n
    \n", "signature": "(ol, fname, description='', indent=1, gz=True):", "funcdef": "def"}, "pyerrors.input.json.import_json_string": {"fullname": "pyerrors.input.json.import_json_string", "modulename": "pyerrors.input.json", "qualname": "import_json_string", "kind": "function", "doc": "

    Reconstruct a list of Obs or structures containing Obs from a json string.

    \n\n

    The following structures are supported: Obs, list, numpy.ndarray, Corr\nIf the list contains only one element, it is unpacked from the list.

    \n\n
    Parameters
    \n\n
      \n
    • json_string (str):\njson string containing the data.
    • \n
    • verbose (bool):\nPrint additional information that was written to the file.
    • \n
    • full_output (bool):\nIf True, a dict containing auxiliary information and the data is returned.\nIf False, only the data is returned.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • result (list[Obs]):\nreconstructed list of observables from the json string
    • \n
    • or
    • \n
    • result (Obs):\nonly one observable if the list only has one entry
    • \n
    • or
    • \n
    • result (dict):\nif full_output=True
    • \n
    \n", "signature": "(json_string, verbose=True, full_output=False):", "funcdef": "def"}, "pyerrors.input.json.load_json": {"fullname": "pyerrors.input.json.load_json", "modulename": "pyerrors.input.json", "qualname": "load_json", "kind": "function", "doc": "

    Import a list of Obs or structures containing Obs from a .json(.gz) file.

    \n\n

    The following structures are supported: Obs, list, numpy.ndarray, Corr\nIf the list contains only one element, it is unpacked from the list.

    \n\n
    Parameters
    \n\n
      \n
    • fname (str):\nFilename of the input file.
    • \n
    • verbose (bool):\nPrint additional information that was written to the file.
    • \n
    • gz (bool):\nIf True, assumes that data is gzipped. If False, assumes JSON file.
    • \n
    • full_output (bool):\nIf True, a dict containing auxiliary information and the data is returned.\nIf False, only the data is returned.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • result (list[Obs]):\nreconstructed list of observables from the json string
    • \n
    • or
    • \n
    • result (Obs):\nonly one observable if the list only has one entry
    • \n
    • or
    • \n
    • result (dict):\nif full_output=True
    • \n
    \n", "signature": "(fname, verbose=True, gz=True, full_output=False):", "funcdef": "def"}, "pyerrors.input.json.dump_dict_to_json": {"fullname": "pyerrors.input.json.dump_dict_to_json", "modulename": "pyerrors.input.json", "qualname": "dump_dict_to_json", "kind": "function", "doc": "

    Export a dict of Obs or structures containing Obs to a .json(.gz) file

    \n\n
    Parameters
    \n\n
      \n
    • od (dict):\nDict of JSON valid structures and objects that will be exported.\nAt the moment, these objects can be either of: Obs, list, numpy.ndarray, Corr.\nAll Obs inside a structure have to be defined on the same set of configurations.
    • \n
    • fname (str):\nFilename of the output file.
    • \n
    • description (str):\nOptional string that describes the contents of the json file.
    • \n
    • indent (int):\nSpecify the indentation level of the json file. None or 0 is permissible and\nsaves disk space.
    • \n
    • reps (str):\nSpecify the structure of the placeholder in exported dict to be reps[0-9]+.
    • \n
    • gz (bool):\nIf True, the output is a gzipped json. If False, the output is a json file.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • None
    • \n
    \n", "signature": "(od, fname, description='', indent=1, reps='DICTOBS', gz=True):", "funcdef": "def"}, "pyerrors.input.json.load_json_dict": {"fullname": "pyerrors.input.json.load_json_dict", "modulename": "pyerrors.input.json", "qualname": "load_json_dict", "kind": "function", "doc": "

    Import a dict of Obs or structures containing Obs from a .json(.gz) file.

    \n\n

    The following structures are supported: Obs, list, numpy.ndarray, Corr

    \n\n
    Parameters
    \n\n
      \n
    • fname (str):\nFilename of the input file.
    • \n
    • verbose (bool):\nPrint additional information that was written to the file.
    • \n
    • gz (bool):\nIf True, assumes that data is gzipped. If False, assumes JSON file.
    • \n
    • full_output (bool):\nIf True, a dict containing auxiliary information and the data is returned.\nIf False, only the data is returned.
    • \n
    • reps (str):\nSpecify the structure of the placeholder in imported dict to be reps[0-9]+.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • data (Obs / list / Corr):\nRead data
    • \n
    • or
    • \n
    • data (dict):\nRead data and meta-data
    • \n
    \n", "signature": "(fname, verbose=True, gz=True, full_output=False, reps='DICTOBS'):", "funcdef": "def"}, "pyerrors.input.misc": {"fullname": "pyerrors.input.misc", "modulename": "pyerrors.input.misc", "kind": "module", "doc": "

    \n"}, "pyerrors.input.misc.fit_t0": {"fullname": "pyerrors.input.misc.fit_t0", "modulename": "pyerrors.input.misc", "qualname": "fit_t0", "kind": "function", "doc": "

    Compute the root of (flow-based) data based on a dictionary that contains\nthe necessary information in key-value pairs a la (flow time: observable at flow time).

    \n\n

    It is assumed that the data is monotonically increasing and passes zero from below.\nNo exception is thrown if this is not the case (several roots, no monotonic increase).\nAn exception is thrown if no root can be found in the data.

    \n\n

    A linear fit in the vicinity of the root is performed to exctract the root from the\ntwo fit parameters.

    \n\n
    Parameters
    \n\n
      \n
    • t2E_dict (dict):\nDictionary with pairs of (flow time: observable at flow time) where the flow times\nare of type float and the observables of type Obs.
    • \n
    • fit_range (int):\nNumber of data points left and right of the zero\ncrossing to be included in the linear fit.
    • \n
    • plot_fit (bool):\nIf true, the fit for the extraction of t0 is shown together with the data. (Default: False)
    • \n
    • observable (str):\nKeyword to identify the observable to print the correct ylabel (if plot_fit is True)\nfor the observables 't0' and 'w0'. No y label is printed otherwise. (Default: 't0')
    • \n
    \n\n
    Returns
    \n\n
      \n
    • root (Obs):\nThe root of the data series.
    • \n
    \n", "signature": "(t2E_dict, fit_range, plot_fit=False, observable='t0'):", "funcdef": "def"}, "pyerrors.input.misc.read_pbp": {"fullname": "pyerrors.input.misc.read_pbp", "modulename": "pyerrors.input.misc", "qualname": "read_pbp", "kind": "function", "doc": "

    Read pbp format from given folder structure.

    \n\n
    Parameters
    \n\n
      \n
    • r_start (list):\nlist which contains the first config to be read for each replicum
    • \n
    • r_stop (list):\nlist which contains the last config to be read for each replicum
    • \n
    \n\n
    Returns
    \n\n
      \n
    • result (list[Obs]):\nlist of observables read
    • \n
    \n", "signature": "(path, prefix, **kwargs):", "funcdef": "def"}, "pyerrors.input.openQCD": {"fullname": "pyerrors.input.openQCD", "modulename": "pyerrors.input.openQCD", "kind": "module", "doc": "

    \n"}, "pyerrors.input.openQCD.read_rwms": {"fullname": "pyerrors.input.openQCD.read_rwms", "modulename": "pyerrors.input.openQCD", "qualname": "read_rwms", "kind": "function", "doc": "

    Read rwms format from given folder structure. Returns a list of length nrw

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\npath that contains the data files
    • \n
    • prefix (str):\nall files in path that start with prefix are considered as input files.\nMay be used together postfix to consider only special file endings.\nPrefix is ignored, if the keyword 'files' is used.
    • \n
    • version (str):\nversion of openQCD, default 2.0
    • \n
    • names (list):\nlist of names that is assigned to the data according according\nto the order in the file list. Use careful, if you do not provide file names!
    • \n
    • r_start (list):\nlist which contains the first config to be read for each replicum
    • \n
    • r_stop (list):\nlist which contains the last config to be read for each replicum
    • \n
    • r_step (int):\ninteger that defines a fixed step size between two measurements (in units of configs)\nIf not given, r_step=1 is assumed.
    • \n
    • postfix (str):\npostfix of the file to read, e.g. '.ms1' for openQCD-files
    • \n
    • files (list):\nlist which contains the filenames to be read. No automatic detection of\nfiles performed if given.
    • \n
    • print_err (bool):\nPrint additional information that is useful for debugging.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • rwms (Obs):\nReweighting factors read
    • \n
    \n", "signature": "(path, prefix, version='2.0', names=None, **kwargs):", "funcdef": "def"}, "pyerrors.input.openQCD.extract_t0": {"fullname": "pyerrors.input.openQCD.extract_t0", "modulename": "pyerrors.input.openQCD", "qualname": "extract_t0", "kind": "function", "doc": "

    Extract t0/a^2 from given .ms.dat files. Returns t0 as Obs.

    \n\n

    It is assumed that all boundary effects have\nsufficiently decayed at x0=xmin.\nThe data around the zero crossing of t^2 - c (where c=0.3 by default)\nis fitted with a linear function\nfrom which the exact root is extracted.

    \n\n

    It is assumed that one measurement is performed for each config.\nIf this is not the case, the resulting idl, as well as the handling\nof r_start, r_stop and r_step is wrong and the user has to correct\nthis in the resulting observable.

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\nPath to .ms.dat files
    • \n
    • prefix (str):\nEnsemble prefix
    • \n
    • dtr_read (int):\nDetermines how many trajectories should be skipped\nwhen reading the ms.dat files.\nCorresponds to dtr_cnfg / dtr_ms in the openQCD input file.
    • \n
    • xmin (int):\nFirst timeslice where the boundary\neffects have sufficiently decayed.
    • \n
    • spatial_extent (int):\nspatial extent of the lattice, required for normalization.
    • \n
    • fit_range (int):\nNumber of data points left and right of the zero\ncrossing to be included in the linear fit. (Default: 5)
    • \n
    • postfix (str):\nPostfix of measurement file (Default: ms)
    • \n
    • c (float):\nConstant that defines the flow scale. Default 0.3 for t_0, choose 2./3 for t_1.
    • \n
    • r_start (list):\nlist which contains the first config to be read for each replicum.
    • \n
    • r_stop (list):\nlist which contains the last config to be read for each replicum.
    • \n
    • r_step (int):\ninteger that defines a fixed step size between two measurements (in units of configs)\nIf not given, r_step=1 is assumed.
    • \n
    • plaquette (bool):\nIf true extract the plaquette estimate of t0 instead.
    • \n
    • names (list):\nlist of names that is assigned to the data according according\nto the order in the file list. Use careful, if you do not provide file names!
    • \n
    • files (list):\nlist which contains the filenames to be read. No automatic detection of\nfiles performed if given.
    • \n
    • plot_fit (bool):\nIf true, the fit for the extraction of t0 is shown together with the data.
    • \n
    • assume_thermalization (bool):\nIf True: If the first record divided by the distance between two measurements is larger than\n1, it is assumed that this is due to thermalization and the first measurement belongs\nto the first config (default).\nIf False: The config numbers are assumed to be traj_number // difference
    • \n
    \n\n
    Returns
    \n\n
      \n
    • t0 (Obs):\nExtracted t0
    • \n
    \n", "signature": "(\tpath,\tprefix,\tdtr_read,\txmin,\tspatial_extent,\tfit_range=5,\tpostfix='ms',\tc=0.3,\t**kwargs):", "funcdef": "def"}, "pyerrors.input.openQCD.extract_w0": {"fullname": "pyerrors.input.openQCD.extract_w0", "modulename": "pyerrors.input.openQCD", "qualname": "extract_w0", "kind": "function", "doc": "

    Extract w0/a from given .ms.dat files. Returns w0 as Obs.

    \n\n

    It is assumed that all boundary effects have\nsufficiently decayed at x0=xmin.\nThe data around the zero crossing of t d(t^2)/dt - (where c=0.3 by default)\nis fitted with a linear function\nfrom which the exact root is extracted.

    \n\n

    It is assumed that one measurement is performed for each config.\nIf this is not the case, the resulting idl, as well as the handling\nof r_start, r_stop and r_step is wrong and the user has to correct\nthis in the resulting observable.

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\nPath to .ms.dat files
    • \n
    • prefix (str):\nEnsemble prefix
    • \n
    • dtr_read (int):\nDetermines how many trajectories should be skipped\nwhen reading the ms.dat files.\nCorresponds to dtr_cnfg / dtr_ms in the openQCD input file.
    • \n
    • xmin (int):\nFirst timeslice where the boundary\neffects have sufficiently decayed.
    • \n
    • spatial_extent (int):\nspatial extent of the lattice, required for normalization.
    • \n
    • fit_range (int):\nNumber of data points left and right of the zero\ncrossing to be included in the linear fit. (Default: 5)
    • \n
    • postfix (str):\nPostfix of measurement file (Default: ms)
    • \n
    • c (float):\nConstant that defines the flow scale. Default 0.3 for w_0, choose 2./3 for w_1.
    • \n
    • r_start (list):\nlist which contains the first config to be read for each replicum.
    • \n
    • r_stop (list):\nlist which contains the last config to be read for each replicum.
    • \n
    • r_step (int):\ninteger that defines a fixed step size between two measurements (in units of configs)\nIf not given, r_step=1 is assumed.
    • \n
    • plaquette (bool):\nIf true extract the plaquette estimate of w0 instead.
    • \n
    • names (list):\nlist of names that is assigned to the data according according\nto the order in the file list. Use careful, if you do not provide file names!
    • \n
    • files (list):\nlist which contains the filenames to be read. No automatic detection of\nfiles performed if given.
    • \n
    • plot_fit (bool):\nIf true, the fit for the extraction of w0 is shown together with the data.
    • \n
    • assume_thermalization (bool):\nIf True: If the first record divided by the distance between two measurements is larger than\n1, it is assumed that this is due to thermalization and the first measurement belongs\nto the first config (default).\nIf False: The config numbers are assumed to be traj_number // difference
    • \n
    \n\n
    Returns
    \n\n
      \n
    • w0 (Obs):\nExtracted w0
    • \n
    \n", "signature": "(\tpath,\tprefix,\tdtr_read,\txmin,\tspatial_extent,\tfit_range=5,\tpostfix='ms',\tc=0.3,\t**kwargs):", "funcdef": "def"}, "pyerrors.input.openQCD.read_qtop": {"fullname": "pyerrors.input.openQCD.read_qtop", "modulename": "pyerrors.input.openQCD", "qualname": "read_qtop", "kind": "function", "doc": "

    Read the topologial charge based on openQCD gradient flow measurements.

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\npath of the measurement files
    • \n
    • prefix (str):\nprefix of the measurement files, e.g. _id0_r0.ms.dat.\nIgnored if file names are passed explicitly via keyword files.
    • \n
    • c (double):\nSmearing radius in units of the lattice extent, c = sqrt(8 t0) / L.
    • \n
    • dtr_cnfg (int):\n(optional) parameter that specifies the number of measurements\nbetween two configs.\nIf it is not set, the distance between two measurements\nin the file is assumed to be the distance between two configurations.
    • \n
    • steps (int):\n(optional) Distance between two configurations in units of trajectories /\n cycles. Assumed to be the distance between two measurements * dtr_cnfg if not given
    • \n
    • version (str):\nEither openQCD or sfqcd, depending on the data.
    • \n
    • L (int):\nspatial length of the lattice in L/a.\nHAS to be set if version != sfqcd, since openQCD does not provide\nthis in the header
    • \n
    • r_start (list):\nlist which contains the first config to be read for each replicum.
    • \n
    • r_stop (list):\nlist which contains the last config to be read for each replicum.
    • \n
    • files (list):\nspecify the exact files that need to be read\nfrom path, practical if e.g. only one replicum is needed
    • \n
    • postfix (str):\npostfix of the file to read, e.g. '.gfms.dat' for openQCD-files
    • \n
    • names (list):\nAlternative labeling for replicas/ensembles.\nHas to have the appropriate length.
    • \n
    • Zeuthen_flow (bool):\n(optional) If True, the Zeuthen flow is used for Qtop. Only possible\nfor version=='sfqcd' If False, the Wilson flow is used.
    • \n
    • integer_charge (bool):\nIf True, the charge is rounded towards the nearest integer on each config.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • result (Obs):\nRead topological charge
    • \n
    \n", "signature": "(path, prefix, c, dtr_cnfg=1, version='openQCD', **kwargs):", "funcdef": "def"}, "pyerrors.input.openQCD.read_gf_coupling": {"fullname": "pyerrors.input.openQCD.read_gf_coupling", "modulename": "pyerrors.input.openQCD", "qualname": "read_gf_coupling", "kind": "function", "doc": "

    Read the gradient flow coupling based on sfqcd gradient flow measurements. See 1607.06423 for details.

    \n\n

    Note: The current implementation only works for c=0.3 and T=L. The definition of the coupling in 1607.06423 requires projection to topological charge zero which is not done within this function but has to be performed in a separate step.

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\npath of the measurement files
    • \n
    • prefix (str):\nprefix of the measurement files, e.g. _id0_r0.ms.dat.\nIgnored if file names are passed explicitly via keyword files.
    • \n
    • c (double):\nSmearing radius in units of the lattice extent, c = sqrt(8 t0) / L.
    • \n
    • dtr_cnfg (int):\n(optional) parameter that specifies the number of measurements\nbetween two configs.\nIf it is not set, the distance between two measurements\nin the file is assumed to be the distance between two configurations.
    • \n
    • steps (int):\n(optional) Distance between two configurations in units of trajectories /\n cycles. Assumed to be the distance between two measurements * dtr_cnfg if not given
    • \n
    • r_start (list):\nlist which contains the first config to be read for each replicum.
    • \n
    • r_stop (list):\nlist which contains the last config to be read for each replicum.
    • \n
    • files (list):\nspecify the exact files that need to be read\nfrom path, practical if e.g. only one replicum is needed
    • \n
    • names (list):\nAlternative labeling for replicas/ensembles.\nHas to have the appropriate length.
    • \n
    • postfix (str):\npostfix of the file to read, e.g. '.gfms.dat' for openQCD-files
    • \n
    • Zeuthen_flow (bool):\n(optional) If True, the Zeuthen flow is used for the coupling. If False, the Wilson flow is used.
    • \n
    \n", "signature": "(path, prefix, c, dtr_cnfg=1, Zeuthen_flow=True, **kwargs):", "funcdef": "def"}, "pyerrors.input.openQCD.qtop_projection": {"fullname": "pyerrors.input.openQCD.qtop_projection", "modulename": "pyerrors.input.openQCD", "qualname": "qtop_projection", "kind": "function", "doc": "

    Returns the projection to the topological charge sector defined by target.

    \n\n
    Parameters
    \n\n
      \n
    • path (Obs):\nTopological charge.
    • \n
    • target (int):\nSpecifies the topological sector to be reweighted to (default 0)
    • \n
    \n\n
    Returns
    \n\n
      \n
    • reto (Obs):\nprojection to the topological charge sector defined by target
    • \n
    \n", "signature": "(qtop, target=0):", "funcdef": "def"}, "pyerrors.input.openQCD.read_qtop_sector": {"fullname": "pyerrors.input.openQCD.read_qtop_sector", "modulename": "pyerrors.input.openQCD", "qualname": "read_qtop_sector", "kind": "function", "doc": "

    Constructs reweighting factors to a specified topological sector.

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\npath of the measurement files
    • \n
    • prefix (str):\nprefix of the measurement files, e.g. _id0_r0.ms.dat
    • \n
    • c (double):\nSmearing radius in units of the lattice extent, c = sqrt(8 t0) / L
    • \n
    • target (int):\nSpecifies the topological sector to be reweighted to (default 0)
    • \n
    • dtr_cnfg (int):\n(optional) parameter that specifies the number of trajectories\nbetween two configs.\nif it is not set, the distance between two measurements\nin the file is assumed to be the distance between two configurations.
    • \n
    • steps (int):\n(optional) Distance between two configurations in units of trajectories /\n cycles. Assumed to be the distance between two measurements * dtr_cnfg if not given
    • \n
    • version (str):\nversion string of the openQCD (sfqcd) version used to create\nthe ensemble. Default is 2.0. May also be set to sfqcd.
    • \n
    • L (int):\nspatial length of the lattice in L/a.\nHAS to be set if version != sfqcd, since openQCD does not provide\nthis in the header
    • \n
    • r_start (list):\noffset of the first ensemble, making it easier to match\nlater on with other Obs
    • \n
    • r_stop (list):\nlast configurations that need to be read (per replicum)
    • \n
    • files (list):\nspecify the exact files that need to be read\nfrom path, practical if e.g. only one replicum is needed
    • \n
    • names (list):\nAlternative labeling for replicas/ensembles.\nHas to have the appropriate length
    • \n
    • Zeuthen_flow (bool):\n(optional) If True, the Zeuthen flow is used for Qtop. Only possible\nfor version=='sfqcd' If False, the Wilson flow is used.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • reto (Obs):\nprojection to the topological charge sector defined by target
    • \n
    \n", "signature": "(path, prefix, c, target=0, **kwargs):", "funcdef": "def"}, "pyerrors.input.openQCD.read_ms5_xsf": {"fullname": "pyerrors.input.openQCD.read_ms5_xsf", "modulename": "pyerrors.input.openQCD", "qualname": "read_ms5_xsf", "kind": "function", "doc": "

    Read data from files in the specified directory with the specified prefix and quark combination extension, and return a Corr object containing the data.

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\nThe directory to search for the files in.
    • \n
    • prefix (str):\nThe prefix to match the files against.
    • \n
    • qc (str):\nThe quark combination extension to match the files against.
    • \n
    • corr (str):\nThe correlator to extract data for.
    • \n
    • sep (str, optional):\nThe separator to use when parsing the replika names.
    • \n
    • **kwargs: Additional keyword arguments. The following keyword arguments are recognized:

      \n\n
        \n
      • names (List[str]): A list of names to use for the replicas.
      • \n
      • files (List[str]): A list of files to read data from.
      • \n
      • idl (List[List[int]]): A list of idls per replicum, resticting data to the idls given.
      • \n
    • \n
    \n\n
    Returns
    \n\n
      \n
    • Corr: A complex valued Corr object containing the data read from the files. In case of boudary to bulk correlators.
    • \n
    • or
    • \n
    • CObs: A complex valued CObs object containing the data read from the files. In case of boudary to boundary correlators.
    • \n
    \n\n
    Raises
    \n\n
      \n
    • FileNotFoundError: If no files matching the specified prefix and quark combination extension are found in the specified directory.
    • \n
    • IOError: If there is an error reading a file.
    • \n
    • struct.error: If there is an error unpacking binary data.
    • \n
    \n", "signature": "(path, prefix, qc, corr, sep='r', **kwargs):", "funcdef": "def"}, "pyerrors.input.pandas": {"fullname": "pyerrors.input.pandas", "modulename": "pyerrors.input.pandas", "kind": "module", "doc": "

    \n"}, "pyerrors.input.pandas.to_sql": {"fullname": "pyerrors.input.pandas.to_sql", "modulename": "pyerrors.input.pandas", "qualname": "to_sql", "kind": "function", "doc": "

    Write DataFrame including Obs or Corr valued columns to sqlite database.

    \n\n
    Parameters
    \n\n
      \n
    • df (pandas.DataFrame):\nDataframe to be written to the database.
    • \n
    • table_name (str):\nName of the table in the database.
    • \n
    • db (str):\nPath to the sqlite database.
    • \n
    • if exists (str):\nHow to behave if table already exists. Options 'fail', 'replace', 'append'.
    • \n
    • gz (bool):\nIf True the json strings are gzipped.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • None
    • \n
    \n", "signature": "(df, table_name, db, if_exists='fail', gz=True, **kwargs):", "funcdef": "def"}, "pyerrors.input.pandas.read_sql": {"fullname": "pyerrors.input.pandas.read_sql", "modulename": "pyerrors.input.pandas", "qualname": "read_sql", "kind": "function", "doc": "

    Execute SQL query on sqlite database and obtain DataFrame including Obs or Corr valued columns.

    \n\n
    Parameters
    \n\n
      \n
    • sql (str):\nSQL query to be executed.
    • \n
    • db (str):\nPath to the sqlite database.
    • \n
    • auto_gamma (bool):\nIf True applies the gamma_method to all imported Obs objects with the default parameters for\nthe error analysis. Default False.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • data (pandas.DataFrame):\nDataframe with the content of the sqlite database.
    • \n
    \n", "signature": "(sql, db, auto_gamma=False, **kwargs):", "funcdef": "def"}, "pyerrors.input.pandas.dump_df": {"fullname": "pyerrors.input.pandas.dump_df", "modulename": "pyerrors.input.pandas", "qualname": "dump_df", "kind": "function", "doc": "

    Exports a pandas DataFrame containing Obs valued columns to a (gzipped) csv file.

    \n\n

    Before making use of pandas to_csv functionality Obs objects are serialized via the standardized\njson format of pyerrors.

    \n\n
    Parameters
    \n\n
      \n
    • df (pandas.DataFrame):\nDataframe to be dumped to a file.
    • \n
    • fname (str):\nFilename of the output file.
    • \n
    • gz (bool):\nIf True, the output is a gzipped csv file. If False, the output is a csv file.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • None
    • \n
    \n", "signature": "(df, fname, gz=True):", "funcdef": "def"}, "pyerrors.input.pandas.load_df": {"fullname": "pyerrors.input.pandas.load_df", "modulename": "pyerrors.input.pandas", "qualname": "load_df", "kind": "function", "doc": "

    Imports a pandas DataFrame from a csv.(gz) file in which Obs objects are serialized as json strings.

    \n\n
    Parameters
    \n\n
      \n
    • fname (str):\nFilename of the input file.
    • \n
    • auto_gamma (bool):\nIf True applies the gamma_method to all imported Obs objects with the default parameters for\nthe error analysis. Default False.
    • \n
    • gz (bool):\nIf True, assumes that data is gzipped. If False, assumes JSON file.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • data (pandas.DataFrame):\nDataframe with the content of the sqlite database.
    • \n
    \n", "signature": "(fname, auto_gamma=False, gz=True):", "funcdef": "def"}, "pyerrors.input.sfcf": {"fullname": "pyerrors.input.sfcf", "modulename": "pyerrors.input.sfcf", "kind": "module", "doc": "

    \n"}, "pyerrors.input.sfcf.read_sfcf": {"fullname": "pyerrors.input.sfcf.read_sfcf", "modulename": "pyerrors.input.sfcf", "qualname": "read_sfcf", "kind": "function", "doc": "

    Read sfcf files from given folder structure.

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\nPath to the sfcf files.
    • \n
    • prefix (str):\nPrefix of the sfcf files.
    • \n
    • name (str):\nName of the correlation function to read.
    • \n
    • quarks (str):\nLabel of the quarks used in the sfcf input file. e.g. \"quark quark\"\nfor version 0.0 this does NOT need to be given with the typical \" - \"\nthat is present in the output file,\nthis is done automatically for this version
    • \n
    • corr_type (str):\nType of correlation function to read. Can be\n
        \n
      • 'bi' for boundary-inner
      • \n
      • 'bb' for boundary-boundary
      • \n
      • 'bib' for boundary-inner-boundary
      • \n
    • \n
    • noffset (int):\nOffset of the source (only relevant when wavefunctions are used)
    • \n
    • wf (int):\nID of wave function
    • \n
    • wf2 (int):\nID of the second wavefunction\n(only relevant for boundary-to-boundary correlation functions)
    • \n
    • im (bool):\nif True, read imaginary instead of real part\nof the correlation function.
    • \n
    • names (list):\nAlternative labeling for replicas/ensembles.\nHas to have the appropriate length
    • \n
    • ens_name (str):\nreplaces the name of the ensemble
    • \n
    • version (str):\nversion of SFCF, with which the measurement was done.\nif the compact output option (-c) was specified,\nappend a \"c\" to the version (e.g. \"1.0c\")\nif the append output option (-a) was specified,\nappend an \"a\" to the version
    • \n
    • cfg_separator (str):\nString that separates the ensemble identifier from the configuration number (default 'n').
    • \n
    • replica (list):\nlist of replica to be read, default is all
    • \n
    • files (list):\nlist of files to be read per replica, default is all.\nfor non-compact output format, hand the folders to be read here.
    • \n
    • check_configs (list[list[int]]):\nlist of list of supposed configs, eg. [range(1,1000)]\nfor one replicum with 1000 configs
    • \n
    \n\n
    Returns
    \n\n
      \n
    • result (list[Obs]):\nlist of Observables with length T, observable per timeslice.\nbb-type correlators have length 1.
    • \n
    \n", "signature": "(\tpath,\tprefix,\tname,\tquarks='.*',\tcorr_type='bi',\tnoffset=0,\twf=0,\twf2=0,\tversion='1.0c',\tcfg_separator='n',\tsilent=False,\t**kwargs):", "funcdef": "def"}, "pyerrors.input.utils": {"fullname": "pyerrors.input.utils", "modulename": "pyerrors.input.utils", "kind": "module", "doc": "

    \n"}, "pyerrors.input.utils.sort_names": {"fullname": "pyerrors.input.utils.sort_names", "modulename": "pyerrors.input.utils", "qualname": "sort_names", "kind": "function", "doc": "

    Sorts a list of names of replika with searches for r and id in the replikum string.\nIf this search fails, a fallback method is used,\nwhere the strings are simply compared and the first diffeing numeral is used for differentiation.

    \n\n
    Parameters
    \n\n
      \n
    • ll (list):\nlist to sort
    • \n
    \n\n
    Returns
    \n\n
      \n
    • ll (list):\nsorted list
    • \n
    \n", "signature": "(ll):", "funcdef": "def"}, "pyerrors.input.utils.check_idl": {"fullname": "pyerrors.input.utils.check_idl", "modulename": "pyerrors.input.utils", "qualname": "check_idl", "kind": "function", "doc": "

    Checks if list of configurations is contained in an idl

    \n\n
    Parameters
    \n\n
      \n
    • idl (range or list):\nidl of the current replicum
    • \n
    • che (list):\nlist of configurations to be checked against
    • \n
    \n\n
    Returns
    \n\n
      \n
    • miss_str (str):\nstring with integers of which idls are missing
    • \n
    \n", "signature": "(idl, che):", "funcdef": "def"}, "pyerrors.linalg": {"fullname": "pyerrors.linalg", "modulename": "pyerrors.linalg", "kind": "module", "doc": "

    \n"}, "pyerrors.linalg.matmul": {"fullname": "pyerrors.linalg.matmul", "modulename": "pyerrors.linalg", "qualname": "matmul", "kind": "function", "doc": "

    Matrix multiply all operands.

    \n\n
    Parameters
    \n\n
      \n
    • operands (numpy.ndarray):\nArbitrary number of 2d-numpy arrays which can be real or complex\nObs valued.
    • \n
    • This implementation is faster compared to standard multiplication via the @ operator.
    • \n
    \n", "signature": "(*operands):", "funcdef": "def"}, "pyerrors.linalg.jack_matmul": {"fullname": "pyerrors.linalg.jack_matmul", "modulename": "pyerrors.linalg", "qualname": "jack_matmul", "kind": "function", "doc": "

    Matrix multiply both operands making use of the jackknife approximation.

    \n\n
    Parameters
    \n\n
      \n
    • operands (numpy.ndarray):\nArbitrary number of 2d-numpy arrays which can be real or complex\nObs valued.
    • \n
    • For large matrices this is considerably faster compared to matmul.
    • \n
    \n", "signature": "(*operands):", "funcdef": "def"}, "pyerrors.linalg.einsum": {"fullname": "pyerrors.linalg.einsum", "modulename": "pyerrors.linalg", "qualname": "einsum", "kind": "function", "doc": "

    Wrapper for numpy.einsum

    \n\n
    Parameters
    \n\n
      \n
    • subscripts (str):\nSubscripts for summation (see numpy documentation for details)
    • \n
    • operands (numpy.ndarray):\nArbitrary number of 2d-numpy arrays which can be real or complex\nObs valued.
    • \n
    \n", "signature": "(subscripts, *operands):", "funcdef": "def"}, "pyerrors.linalg.inv": {"fullname": "pyerrors.linalg.inv", "modulename": "pyerrors.linalg", "qualname": "inv", "kind": "function", "doc": "

    Inverse of Obs or CObs valued matrices.

    \n", "signature": "(x):", "funcdef": "def"}, "pyerrors.linalg.cholesky": {"fullname": "pyerrors.linalg.cholesky", "modulename": "pyerrors.linalg", "qualname": "cholesky", "kind": "function", "doc": "

    Cholesky decomposition of Obs valued matrices.

    \n", "signature": "(x):", "funcdef": "def"}, "pyerrors.linalg.det": {"fullname": "pyerrors.linalg.det", "modulename": "pyerrors.linalg", "qualname": "det", "kind": "function", "doc": "

    Determinant of Obs valued matrices.

    \n", "signature": "(x):", "funcdef": "def"}, "pyerrors.linalg.eigh": {"fullname": "pyerrors.linalg.eigh", "modulename": "pyerrors.linalg", "qualname": "eigh", "kind": "function", "doc": "

    Computes the eigenvalues and eigenvectors of a given hermitian matrix of Obs according to np.linalg.eigh.

    \n", "signature": "(obs, **kwargs):", "funcdef": "def"}, "pyerrors.linalg.eig": {"fullname": "pyerrors.linalg.eig", "modulename": "pyerrors.linalg", "qualname": "eig", "kind": "function", "doc": "

    Computes the eigenvalues of a given matrix of Obs according to np.linalg.eig.

    \n", "signature": "(obs, **kwargs):", "funcdef": "def"}, "pyerrors.linalg.pinv": {"fullname": "pyerrors.linalg.pinv", "modulename": "pyerrors.linalg", "qualname": "pinv", "kind": "function", "doc": "

    Computes the Moore-Penrose pseudoinverse of a matrix of Obs.

    \n", "signature": "(obs, **kwargs):", "funcdef": "def"}, "pyerrors.linalg.svd": {"fullname": "pyerrors.linalg.svd", "modulename": "pyerrors.linalg", "qualname": "svd", "kind": "function", "doc": "

    Computes the singular value decomposition of a matrix of Obs.

    \n", "signature": "(obs, **kwargs):", "funcdef": "def"}, "pyerrors.misc": {"fullname": "pyerrors.misc", "modulename": "pyerrors.misc", "kind": "module", "doc": "

    \n"}, "pyerrors.misc.print_config": {"fullname": "pyerrors.misc.print_config", "modulename": "pyerrors.misc", "qualname": "print_config", "kind": "function", "doc": "

    Print information about version of python, pyerrors and dependencies.

    \n", "signature": "():", "funcdef": "def"}, "pyerrors.misc.errorbar": {"fullname": "pyerrors.misc.errorbar", "modulename": "pyerrors.misc", "qualname": "errorbar", "kind": "function", "doc": "

    pyerrors wrapper for the errorbars method of matplotlib

    \n\n
    Parameters
    \n\n
      \n
    • x (list):\nA list of x-values which can be Obs.
    • \n
    • y (list):\nA list of y-values which can be Obs.
    • \n
    • axes ((matplotlib.pyplot.axes)):\nThe axes to plot on. default is plt.
    • \n
    \n", "signature": "(\tx,\ty,\taxes=<module 'matplotlib.pyplot' from '/opt/hostedtoolcache/Python/3.10.12/x64/lib/python3.10/site-packages/matplotlib/pyplot.py'>,\t**kwargs):", "funcdef": "def"}, "pyerrors.misc.dump_object": {"fullname": "pyerrors.misc.dump_object", "modulename": "pyerrors.misc", "qualname": "dump_object", "kind": "function", "doc": "

    Dump object into pickle file.

    \n\n
    Parameters
    \n\n
      \n
    • obj (object):\nobject to be saved in the pickle file
    • \n
    • name (str):\nname of the file
    • \n
    • path (str):\nspecifies a custom path for the file (default '.')
    • \n
    \n\n
    Returns
    \n\n
      \n
    • None
    • \n
    \n", "signature": "(obj, name, **kwargs):", "funcdef": "def"}, "pyerrors.misc.load_object": {"fullname": "pyerrors.misc.load_object", "modulename": "pyerrors.misc", "qualname": "load_object", "kind": "function", "doc": "

    Load object from pickle file.

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\npath to the file
    • \n
    \n\n
    Returns
    \n\n
      \n
    • object (Obs):\nLoaded Object
    • \n
    \n", "signature": "(path):", "funcdef": "def"}, "pyerrors.misc.pseudo_Obs": {"fullname": "pyerrors.misc.pseudo_Obs", "modulename": "pyerrors.misc", "qualname": "pseudo_Obs", "kind": "function", "doc": "

    Generate an Obs object with given value, dvalue and name for test purposes

    \n\n
    Parameters
    \n\n
      \n
    • value (float):\ncentral value of the Obs to be generated.
    • \n
    • dvalue (float):\nerror of the Obs to be generated.
    • \n
    • name (str):\nname of the ensemble for which the Obs is to be generated.
    • \n
    • samples (int):\nnumber of samples for the Obs (default 1000).
    • \n
    \n\n
    Returns
    \n\n
      \n
    • res (Obs):\nGenerated Observable
    • \n
    \n", "signature": "(value, dvalue, name, samples=1000):", "funcdef": "def"}, "pyerrors.misc.gen_correlated_data": {"fullname": "pyerrors.misc.gen_correlated_data", "modulename": "pyerrors.misc", "qualname": "gen_correlated_data", "kind": "function", "doc": "

    Generate observables with given covariance and autocorrelation times.

    \n\n
    Parameters
    \n\n
      \n
    • means (list):\nlist containing the mean value of each observable.
    • \n
    • cov (numpy.ndarray):\ncovariance matrix for the data to be generated.
    • \n
    • name (str):\nensemble name for the data to be geneated.
    • \n
    • tau (float or list):\ncan either be a real number or a list with an entry for\nevery dataset.
    • \n
    • samples (int):\nnumber of samples to be generated for each observable.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • corr_obs (list[Obs]):\nGenerated observable list
    • \n
    \n", "signature": "(means, cov, name, tau=0.5, samples=1000):", "funcdef": "def"}, "pyerrors.mpm": {"fullname": "pyerrors.mpm", "modulename": "pyerrors.mpm", "kind": "module", "doc": "

    \n"}, "pyerrors.mpm.matrix_pencil_method": {"fullname": "pyerrors.mpm.matrix_pencil_method", "modulename": "pyerrors.mpm", "qualname": "matrix_pencil_method", "kind": "function", "doc": "

    Matrix pencil method to extract k energy levels from data

    \n\n

    Implementation of the matrix pencil method based on\neq. (2.17) of Y. Hua, T. K. Sarkar, IEEE Trans. Acoust. 38, 814-824 (1990)

    \n\n
    Parameters
    \n\n
      \n
    • data (list):\ncan be a list of Obs for the analysis of a single correlator, or a list of lists\nof Obs if several correlators are to analyzed at once.
    • \n
    • k (int):\nNumber of states to extract (default 1).
    • \n
    • p (int):\nmatrix pencil parameter which filters noise. The optimal value is expected between\nlen(data)/3 and 2*len(data)/3. The computation is more expensive the closer p is\nto len(data)/2 but could possibly suppress more noise (default len(data)//2).
    • \n
    \n\n
    Returns
    \n\n
      \n
    • energy_levels (list[Obs]):\nExtracted energy levels
    • \n
    \n", "signature": "(corrs, k=1, p=None, **kwargs):", "funcdef": "def"}, "pyerrors.obs": {"fullname": "pyerrors.obs", "modulename": "pyerrors.obs", "kind": "module", "doc": "

    \n"}, "pyerrors.obs.Obs": {"fullname": "pyerrors.obs.Obs", "modulename": "pyerrors.obs", "qualname": "Obs", "kind": "class", "doc": "

    Class for a general observable.

    \n\n

    Instances of Obs are the basic objects of a pyerrors error analysis.\nThey are initialized with a list which contains arrays of samples for\ndifferent ensembles/replica and another list of same length which contains\nthe names of the ensembles/replica. Mathematical operations can be\nperformed on instances. The result is another instance of Obs. The error of\nan instance can be computed with the gamma_method. Also contains additional\nmethods for output and visualization of the error calculation.

    \n\n
    Attributes
    \n\n
      \n
    • S_global (float):\nStandard value for S (default 2.0)
    • \n
    • S_dict (dict):\nDictionary for S values. If an entry for a given ensemble\nexists this overwrites the standard value for that ensemble.
    • \n
    • tau_exp_global (float):\nStandard value for tau_exp (default 0.0)
    • \n
    • tau_exp_dict (dict):\nDictionary for tau_exp values. If an entry for a given ensemble exists\nthis overwrites the standard value for that ensemble.
    • \n
    • N_sigma_global (float):\nStandard value for N_sigma (default 1.0)
    • \n
    • N_sigma_dict (dict):\nDictionary for N_sigma values. If an entry for a given ensemble exists\nthis overwrites the standard value for that ensemble.
    • \n
    \n"}, "pyerrors.obs.Obs.__init__": {"fullname": "pyerrors.obs.Obs.__init__", "modulename": "pyerrors.obs", "qualname": "Obs.__init__", "kind": "function", "doc": "

    Initialize Obs object.

    \n\n
    Parameters
    \n\n
      \n
    • samples (list):\nlist of numpy arrays containing the Monte Carlo samples
    • \n
    • names (list):\nlist of strings labeling the individual samples
    • \n
    • idl (list, optional):\nlist of ranges or lists on which the samples are defined
    • \n
    \n", "signature": "(samples, names, idl=None, **kwargs)"}, "pyerrors.obs.Obs.S_global": {"fullname": "pyerrors.obs.Obs.S_global", "modulename": "pyerrors.obs", "qualname": "Obs.S_global", "kind": "variable", "doc": "

    \n", "default_value": "2.0"}, "pyerrors.obs.Obs.S_dict": {"fullname": "pyerrors.obs.Obs.S_dict", "modulename": "pyerrors.obs", "qualname": "Obs.S_dict", "kind": "variable", "doc": "

    \n", "default_value": "{}"}, "pyerrors.obs.Obs.tau_exp_global": {"fullname": "pyerrors.obs.Obs.tau_exp_global", "modulename": "pyerrors.obs", "qualname": "Obs.tau_exp_global", "kind": "variable", "doc": "

    \n", "default_value": "0.0"}, "pyerrors.obs.Obs.tau_exp_dict": {"fullname": "pyerrors.obs.Obs.tau_exp_dict", "modulename": "pyerrors.obs", "qualname": "Obs.tau_exp_dict", "kind": "variable", "doc": "

    \n", "default_value": "{}"}, "pyerrors.obs.Obs.N_sigma_global": {"fullname": "pyerrors.obs.Obs.N_sigma_global", "modulename": "pyerrors.obs", "qualname": "Obs.N_sigma_global", "kind": "variable", "doc": "

    \n", "default_value": "1.0"}, "pyerrors.obs.Obs.N_sigma_dict": {"fullname": "pyerrors.obs.Obs.N_sigma_dict", "modulename": "pyerrors.obs", "qualname": "Obs.N_sigma_dict", "kind": "variable", "doc": "

    \n", "default_value": "{}"}, "pyerrors.obs.Obs.names": {"fullname": "pyerrors.obs.Obs.names", "modulename": "pyerrors.obs", "qualname": "Obs.names", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.shape": {"fullname": "pyerrors.obs.Obs.shape", "modulename": "pyerrors.obs", "qualname": "Obs.shape", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.r_values": {"fullname": "pyerrors.obs.Obs.r_values", "modulename": "pyerrors.obs", "qualname": "Obs.r_values", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.deltas": {"fullname": "pyerrors.obs.Obs.deltas", "modulename": "pyerrors.obs", "qualname": "Obs.deltas", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.N": {"fullname": "pyerrors.obs.Obs.N", "modulename": "pyerrors.obs", "qualname": "Obs.N", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.idl": {"fullname": "pyerrors.obs.Obs.idl", "modulename": "pyerrors.obs", "qualname": "Obs.idl", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.ddvalue": {"fullname": "pyerrors.obs.Obs.ddvalue", "modulename": "pyerrors.obs", "qualname": "Obs.ddvalue", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.reweighted": {"fullname": "pyerrors.obs.Obs.reweighted", "modulename": "pyerrors.obs", "qualname": "Obs.reweighted", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.tag": {"fullname": "pyerrors.obs.Obs.tag", "modulename": "pyerrors.obs", "qualname": "Obs.tag", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.value": {"fullname": "pyerrors.obs.Obs.value", "modulename": "pyerrors.obs", "qualname": "Obs.value", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.dvalue": {"fullname": "pyerrors.obs.Obs.dvalue", "modulename": "pyerrors.obs", "qualname": "Obs.dvalue", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.e_names": {"fullname": "pyerrors.obs.Obs.e_names", "modulename": "pyerrors.obs", "qualname": "Obs.e_names", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.cov_names": {"fullname": "pyerrors.obs.Obs.cov_names", "modulename": "pyerrors.obs", "qualname": "Obs.cov_names", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.mc_names": {"fullname": "pyerrors.obs.Obs.mc_names", "modulename": "pyerrors.obs", "qualname": "Obs.mc_names", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.e_content": {"fullname": "pyerrors.obs.Obs.e_content", "modulename": "pyerrors.obs", "qualname": "Obs.e_content", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.covobs": {"fullname": "pyerrors.obs.Obs.covobs", "modulename": "pyerrors.obs", "qualname": "Obs.covobs", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.gamma_method": {"fullname": "pyerrors.obs.Obs.gamma_method", "modulename": "pyerrors.obs", "qualname": "Obs.gamma_method", "kind": "function", "doc": "

    Estimate the error and related properties of the Obs.

    \n\n
    Parameters
    \n\n
      \n
    • S (float):\nspecifies a custom value for the parameter S (default 2.0).\nIf set to 0 it is assumed that the data exhibits no\nautocorrelation. In this case the error estimates coincides\nwith the sample standard error.
    • \n
    • tau_exp (float):\npositive value triggers the critical slowing down analysis\n(default 0.0).
    • \n
    • N_sigma (float):\nnumber of standard deviations from zero until the tail is\nattached to the autocorrelation function (default 1).
    • \n
    • fft (bool):\ndetermines whether the fft algorithm is used for the computation\nof the autocorrelation function (default True)
    • \n
    \n", "signature": "(self, **kwargs):", "funcdef": "def"}, "pyerrors.obs.Obs.gm": {"fullname": "pyerrors.obs.Obs.gm", "modulename": "pyerrors.obs", "qualname": "Obs.gm", "kind": "function", "doc": "

    Estimate the error and related properties of the Obs.

    \n\n
    Parameters
    \n\n
      \n
    • S (float):\nspecifies a custom value for the parameter S (default 2.0).\nIf set to 0 it is assumed that the data exhibits no\nautocorrelation. In this case the error estimates coincides\nwith the sample standard error.
    • \n
    • tau_exp (float):\npositive value triggers the critical slowing down analysis\n(default 0.0).
    • \n
    • N_sigma (float):\nnumber of standard deviations from zero until the tail is\nattached to the autocorrelation function (default 1).
    • \n
    • fft (bool):\ndetermines whether the fft algorithm is used for the computation\nof the autocorrelation function (default True)
    • \n
    \n", "signature": "(self, **kwargs):", "funcdef": "def"}, "pyerrors.obs.Obs.details": {"fullname": "pyerrors.obs.Obs.details", "modulename": "pyerrors.obs", "qualname": "Obs.details", "kind": "function", "doc": "

    Output detailed properties of the Obs.

    \n\n
    Parameters
    \n\n
      \n
    • ens_content (bool):\nprint details about the ensembles and replica if true.
    • \n
    \n", "signature": "(self, ens_content=True):", "funcdef": "def"}, "pyerrors.obs.Obs.reweight": {"fullname": "pyerrors.obs.Obs.reweight", "modulename": "pyerrors.obs", "qualname": "Obs.reweight", "kind": "function", "doc": "

    Reweight the obs with given rewighting factors.

    \n\n
    Parameters
    \n\n
      \n
    • weight (Obs):\nReweighting factor. An Observable that has to be defined on a superset of the\nconfigurations in obs[i].idl for all i.
    • \n
    • all_configs (bool):\nif True, the reweighted observables are normalized by the average of\nthe reweighting factor on all configurations in weight.idl and not\non the configurations in obs[i].idl. Default False.
    • \n
    \n", "signature": "(self, weight):", "funcdef": "def"}, "pyerrors.obs.Obs.is_zero_within_error": {"fullname": "pyerrors.obs.Obs.is_zero_within_error", "modulename": "pyerrors.obs", "qualname": "Obs.is_zero_within_error", "kind": "function", "doc": "

    Checks whether the observable is zero within 'sigma' standard errors.

    \n\n
    Parameters
    \n\n
      \n
    • sigma (int):\nNumber of standard errors used for the check.
    • \n
    • Works only properly when the gamma method was run.
    • \n
    \n", "signature": "(self, sigma=1):", "funcdef": "def"}, "pyerrors.obs.Obs.is_zero": {"fullname": "pyerrors.obs.Obs.is_zero", "modulename": "pyerrors.obs", "qualname": "Obs.is_zero", "kind": "function", "doc": "

    Checks whether the observable is zero within a given tolerance.

    \n\n
    Parameters
    \n\n
      \n
    • atol (float):\nAbsolute tolerance (for details see numpy documentation).
    • \n
    \n", "signature": "(self, atol=1e-10):", "funcdef": "def"}, "pyerrors.obs.Obs.plot_tauint": {"fullname": "pyerrors.obs.Obs.plot_tauint", "modulename": "pyerrors.obs", "qualname": "Obs.plot_tauint", "kind": "function", "doc": "

    Plot integrated autocorrelation time for each ensemble.

    \n\n
    Parameters
    \n\n
      \n
    • save (str):\nsaves the figure to a file named 'save' if.
    • \n
    \n", "signature": "(self, save=None):", "funcdef": "def"}, "pyerrors.obs.Obs.plot_rho": {"fullname": "pyerrors.obs.Obs.plot_rho", "modulename": "pyerrors.obs", "qualname": "Obs.plot_rho", "kind": "function", "doc": "

    Plot normalized autocorrelation function time for each ensemble.

    \n\n
    Parameters
    \n\n
      \n
    • save (str):\nsaves the figure to a file named 'save' if.
    • \n
    \n", "signature": "(self, save=None):", "funcdef": "def"}, "pyerrors.obs.Obs.plot_rep_dist": {"fullname": "pyerrors.obs.Obs.plot_rep_dist", "modulename": "pyerrors.obs", "qualname": "Obs.plot_rep_dist", "kind": "function", "doc": "

    Plot replica distribution for each ensemble with more than one replicum.

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.plot_history": {"fullname": "pyerrors.obs.Obs.plot_history", "modulename": "pyerrors.obs", "qualname": "Obs.plot_history", "kind": "function", "doc": "

    Plot derived Monte Carlo history for each ensemble

    \n\n
    Parameters
    \n\n
      \n
    • expand (bool):\nshow expanded history for irregular Monte Carlo chains (default: True).
    • \n
    \n", "signature": "(self, expand=True):", "funcdef": "def"}, "pyerrors.obs.Obs.plot_piechart": {"fullname": "pyerrors.obs.Obs.plot_piechart", "modulename": "pyerrors.obs", "qualname": "Obs.plot_piechart", "kind": "function", "doc": "

    Plot piechart which shows the fractional contribution of each\nensemble to the error and returns a dictionary containing the fractions.

    \n\n
    Parameters
    \n\n
      \n
    • save (str):\nsaves the figure to a file named 'save' if.
    • \n
    \n", "signature": "(self, save=None):", "funcdef": "def"}, "pyerrors.obs.Obs.dump": {"fullname": "pyerrors.obs.Obs.dump", "modulename": "pyerrors.obs", "qualname": "Obs.dump", "kind": "function", "doc": "

    Dump the Obs to a file 'name' of chosen format.

    \n\n
    Parameters
    \n\n
      \n
    • filename (str):\nname of the file to be saved.
    • \n
    • datatype (str):\nFormat of the exported file. Supported formats include\n\"json.gz\" and \"pickle\"
    • \n
    • description (str):\nDescription for output file, only relevant for json.gz format.
    • \n
    • path (str):\nspecifies a custom path for the file (default '.')
    • \n
    \n", "signature": "(self, filename, datatype='json.gz', description='', **kwargs):", "funcdef": "def"}, "pyerrors.obs.Obs.export_jackknife": {"fullname": "pyerrors.obs.Obs.export_jackknife", "modulename": "pyerrors.obs", "qualname": "Obs.export_jackknife", "kind": "function", "doc": "

    Export jackknife samples from the Obs

    \n\n
    Returns
    \n\n
      \n
    • numpy.ndarray: Returns a numpy array of length N + 1 where N is the number of samples\nfor the given ensemble and replicum. The zeroth entry of the array contains\nthe mean value of the Obs, entries 1 to N contain the N jackknife samples\nderived from the Obs. The current implementation only works for observables\ndefined on exactly one ensemble and replicum. The derived jackknife samples\nshould agree with samples from a full jackknife analysis up to O(1/N).
    • \n
    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.sqrt": {"fullname": "pyerrors.obs.Obs.sqrt", "modulename": "pyerrors.obs", "qualname": "Obs.sqrt", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.log": {"fullname": "pyerrors.obs.Obs.log", "modulename": "pyerrors.obs", "qualname": "Obs.log", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.exp": {"fullname": "pyerrors.obs.Obs.exp", "modulename": "pyerrors.obs", "qualname": "Obs.exp", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.sin": {"fullname": "pyerrors.obs.Obs.sin", "modulename": "pyerrors.obs", "qualname": "Obs.sin", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.cos": {"fullname": "pyerrors.obs.Obs.cos", "modulename": "pyerrors.obs", "qualname": "Obs.cos", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.tan": {"fullname": "pyerrors.obs.Obs.tan", "modulename": "pyerrors.obs", "qualname": "Obs.tan", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.arcsin": {"fullname": "pyerrors.obs.Obs.arcsin", "modulename": "pyerrors.obs", "qualname": "Obs.arcsin", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.arccos": {"fullname": "pyerrors.obs.Obs.arccos", "modulename": "pyerrors.obs", "qualname": "Obs.arccos", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.arctan": {"fullname": "pyerrors.obs.Obs.arctan", "modulename": "pyerrors.obs", "qualname": "Obs.arctan", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.sinh": {"fullname": "pyerrors.obs.Obs.sinh", "modulename": "pyerrors.obs", "qualname": "Obs.sinh", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.cosh": {"fullname": "pyerrors.obs.Obs.cosh", "modulename": "pyerrors.obs", "qualname": "Obs.cosh", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.tanh": {"fullname": "pyerrors.obs.Obs.tanh", "modulename": "pyerrors.obs", "qualname": "Obs.tanh", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.arcsinh": {"fullname": "pyerrors.obs.Obs.arcsinh", "modulename": "pyerrors.obs", "qualname": "Obs.arcsinh", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.arccosh": {"fullname": "pyerrors.obs.Obs.arccosh", "modulename": "pyerrors.obs", "qualname": "Obs.arccosh", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.arctanh": {"fullname": "pyerrors.obs.Obs.arctanh", "modulename": "pyerrors.obs", "qualname": "Obs.arctanh", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.N_sigma": {"fullname": "pyerrors.obs.Obs.N_sigma", "modulename": "pyerrors.obs", "qualname": "Obs.N_sigma", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.S": {"fullname": "pyerrors.obs.Obs.S", "modulename": "pyerrors.obs", "qualname": "Obs.S", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.e_ddvalue": {"fullname": "pyerrors.obs.Obs.e_ddvalue", "modulename": "pyerrors.obs", "qualname": "Obs.e_ddvalue", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.e_drho": {"fullname": "pyerrors.obs.Obs.e_drho", "modulename": "pyerrors.obs", "qualname": "Obs.e_drho", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.e_dtauint": {"fullname": "pyerrors.obs.Obs.e_dtauint", "modulename": "pyerrors.obs", "qualname": "Obs.e_dtauint", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.e_dvalue": {"fullname": "pyerrors.obs.Obs.e_dvalue", "modulename": "pyerrors.obs", "qualname": "Obs.e_dvalue", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.e_n_dtauint": {"fullname": "pyerrors.obs.Obs.e_n_dtauint", "modulename": "pyerrors.obs", "qualname": "Obs.e_n_dtauint", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.e_n_tauint": {"fullname": "pyerrors.obs.Obs.e_n_tauint", "modulename": "pyerrors.obs", "qualname": "Obs.e_n_tauint", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.e_rho": {"fullname": "pyerrors.obs.Obs.e_rho", "modulename": "pyerrors.obs", "qualname": "Obs.e_rho", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.e_tauint": {"fullname": "pyerrors.obs.Obs.e_tauint", "modulename": "pyerrors.obs", "qualname": "Obs.e_tauint", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.e_windowsize": {"fullname": "pyerrors.obs.Obs.e_windowsize", "modulename": "pyerrors.obs", "qualname": "Obs.e_windowsize", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.tau_exp": {"fullname": "pyerrors.obs.Obs.tau_exp", "modulename": "pyerrors.obs", "qualname": "Obs.tau_exp", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.CObs": {"fullname": "pyerrors.obs.CObs", "modulename": "pyerrors.obs", "qualname": "CObs", "kind": "class", "doc": "

    Class for a complex valued observable.

    \n"}, "pyerrors.obs.CObs.__init__": {"fullname": "pyerrors.obs.CObs.__init__", "modulename": "pyerrors.obs", "qualname": "CObs.__init__", "kind": "function", "doc": "

    \n", "signature": "(real, imag=0.0)"}, "pyerrors.obs.CObs.tag": {"fullname": "pyerrors.obs.CObs.tag", "modulename": "pyerrors.obs", "qualname": "CObs.tag", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.CObs.real": {"fullname": "pyerrors.obs.CObs.real", "modulename": "pyerrors.obs", "qualname": "CObs.real", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.CObs.imag": {"fullname": "pyerrors.obs.CObs.imag", "modulename": "pyerrors.obs", "qualname": "CObs.imag", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.CObs.gamma_method": {"fullname": "pyerrors.obs.CObs.gamma_method", "modulename": "pyerrors.obs", "qualname": "CObs.gamma_method", "kind": "function", "doc": "

    Executes the gamma_method for the real and the imaginary part.

    \n", "signature": "(self, **kwargs):", "funcdef": "def"}, "pyerrors.obs.CObs.is_zero": {"fullname": "pyerrors.obs.CObs.is_zero", "modulename": "pyerrors.obs", "qualname": "CObs.is_zero", "kind": "function", "doc": "

    Checks whether both real and imaginary part are zero within machine precision.

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.CObs.conjugate": {"fullname": "pyerrors.obs.CObs.conjugate", "modulename": "pyerrors.obs", "qualname": "CObs.conjugate", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.derived_observable": {"fullname": "pyerrors.obs.derived_observable", "modulename": "pyerrors.obs", "qualname": "derived_observable", "kind": "function", "doc": "

    Construct a derived Obs according to func(data, **kwargs) using automatic differentiation.

    \n\n
    Parameters
    \n\n
      \n
    • func (object):\narbitrary function of the form func(data, **kwargs). For the\nautomatic differentiation to work, all numpy functions have to have\nthe autograd wrapper (use 'import autograd.numpy as anp').
    • \n
    • data (list):\nlist of Obs, e.g. [obs1, obs2, obs3].
    • \n
    • num_grad (bool):\nif True, numerical derivatives are used instead of autograd\n(default False). To control the numerical differentiation the\nkwargs of numdifftools.step_generators.MaxStepGenerator\ncan be used.
    • \n
    • man_grad (list):\nmanually supply a list or an array which contains the jacobian\nof func. Use cautiously, supplying the wrong derivative will\nnot be intercepted.
    • \n
    \n\n
    Notes
    \n\n

    For simple mathematical operations it can be practical to use anonymous\nfunctions. For the ratio of two observables one can e.g. use

    \n\n

    new_obs = derived_observable(lambda x: x[0] / x[1], [obs1, obs2])

    \n", "signature": "(func, data, array_mode=False, **kwargs):", "funcdef": "def"}, "pyerrors.obs.reweight": {"fullname": "pyerrors.obs.reweight", "modulename": "pyerrors.obs", "qualname": "reweight", "kind": "function", "doc": "

    Reweight a list of observables.

    \n\n
    Parameters
    \n\n
      \n
    • weight (Obs):\nReweighting factor. An Observable that has to be defined on a superset of the\nconfigurations in obs[i].idl for all i.
    • \n
    • obs (list):\nlist of Obs, e.g. [obs1, obs2, obs3].
    • \n
    • all_configs (bool):\nif True, the reweighted observables are normalized by the average of\nthe reweighting factor on all configurations in weight.idl and not\non the configurations in obs[i].idl. Default False.
    • \n
    \n", "signature": "(weight, obs, **kwargs):", "funcdef": "def"}, "pyerrors.obs.correlate": {"fullname": "pyerrors.obs.correlate", "modulename": "pyerrors.obs", "qualname": "correlate", "kind": "function", "doc": "

    Correlate two observables.

    \n\n
    Parameters
    \n\n
      \n
    • obs_a (Obs):\nFirst observable
    • \n
    • obs_b (Obs):\nSecond observable
    • \n
    \n\n
    Notes
    \n\n

    Keep in mind to only correlate primary observables which have not been reweighted\nyet. The reweighting has to be applied after correlating the observables.\nCurrently only works if ensembles are identical (this is not strictly necessary).

    \n", "signature": "(obs_a, obs_b):", "funcdef": "def"}, "pyerrors.obs.covariance": {"fullname": "pyerrors.obs.covariance", "modulename": "pyerrors.obs", "qualname": "covariance", "kind": "function", "doc": "

    Calculates the error covariance matrix of a set of observables.

    \n\n

    WARNING: This function should be used with care, especially for observables with support on multiple\n ensembles with differing autocorrelations. See the notes below for details.

    \n\n

    The gamma method has to be applied first to all observables.

    \n\n
    Parameters
    \n\n
      \n
    • obs (list or numpy.ndarray):\nList or one dimensional array of Obs
    • \n
    • visualize (bool):\nIf True plots the corresponding normalized correlation matrix (default False).
    • \n
    • correlation (bool):\nIf True the correlation matrix instead of the error covariance matrix is returned (default False).
    • \n
    • smooth (None or int):\nIf smooth is an integer 'E' between 2 and the dimension of the matrix minus 1 the eigenvalue\nsmoothing procedure of hep-lat/9412087 is applied to the correlation matrix which leaves the\nlargest E eigenvalues essentially unchanged and smoothes the smaller eigenvalues to avoid extremely\nsmall ones.
    • \n
    \n\n
    Notes
    \n\n

    The error covariance is defined such that it agrees with the squared standard error for two identical observables\n$$\\operatorname{cov}(a,a)=\\sum_{s=1}^N\\delta_a^s\\delta_a^s/N^2=\\Gamma_{aa}(0)/N=\\operatorname{var}(a)/N=\\sigma_a^2$$\nin the absence of autocorrelation.\nThe error covariance is estimated by calculating the correlation matrix assuming no autocorrelation and then rescaling the correlation matrix by the full errors including the previous gamma method estimate for the autocorrelation of the observables. The covariance at windowsize 0 is guaranteed to be positive semi-definite\n$$\\sum_{i,j}v_i\\Gamma_{ij}(0)v_j=\\frac{1}{N}\\sum_{s=1}^N\\sum_{i,j}v_i\\delta_i^s\\delta_j^s v_j=\\frac{1}{N}\\sum_{s=1}^N\\sum_{i}|v_i\\delta_i^s|^2\\geq 0\\,,$$ for every $v\\in\\mathbb{R}^M$, while such an identity does not hold for larger windows/lags.\nFor observables defined on a single ensemble our approximation is equivalent to assuming that the integrated autocorrelation time of an off-diagonal element is equal to the geometric mean of the integrated autocorrelation times of the corresponding diagonal elements.\n$$\\tau_{\\mathrm{int}, ij}=\\sqrt{\\tau_{\\mathrm{int}, i}\\times \\tau_{\\mathrm{int}, j}}$$\nThis construction ensures that the estimated covariance matrix is positive semi-definite (up to numerical rounding errors).

    \n", "signature": "(obs, visualize=False, correlation=False, smooth=None, **kwargs):", "funcdef": "def"}, "pyerrors.obs.import_jackknife": {"fullname": "pyerrors.obs.import_jackknife", "modulename": "pyerrors.obs", "qualname": "import_jackknife", "kind": "function", "doc": "

    Imports jackknife samples and returns an Obs

    \n\n
    Parameters
    \n\n
      \n
    • jacks (numpy.ndarray):\nnumpy array containing the mean value as zeroth entry and\nthe N jackknife samples as first to Nth entry.
    • \n
    • name (str):\nname of the ensemble the samples are defined on.
    • \n
    \n", "signature": "(jacks, name, idl=None):", "funcdef": "def"}, "pyerrors.obs.merge_obs": {"fullname": "pyerrors.obs.merge_obs", "modulename": "pyerrors.obs", "qualname": "merge_obs", "kind": "function", "doc": "

    Combine all observables in list_of_obs into one new observable

    \n\n
    Parameters
    \n\n
      \n
    • list_of_obs (list):\nlist of the Obs object to be combined
    • \n
    \n\n
    Notes
    \n\n

    It is not possible to combine obs which are based on the same replicum

    \n", "signature": "(list_of_obs):", "funcdef": "def"}, "pyerrors.obs.cov_Obs": {"fullname": "pyerrors.obs.cov_Obs", "modulename": "pyerrors.obs", "qualname": "cov_Obs", "kind": "function", "doc": "

    Create an Obs based on mean(s) and a covariance matrix

    \n\n
    Parameters
    \n\n
      \n
    • mean (list of floats or float):\nN mean value(s) of the new Obs
    • \n
    • cov (list or array):\n2d (NxN) Covariance matrix, 1d diagonal entries or 0d covariance
    • \n
    • name (str):\nidentifier for the covariance matrix
    • \n
    • grad (list or array):\nGradient of the Covobs wrt. the means belonging to cov.
    • \n
    \n", "signature": "(means, cov, name, grad=None):", "funcdef": "def"}, "pyerrors.roots": {"fullname": "pyerrors.roots", "modulename": "pyerrors.roots", "kind": "module", "doc": "

    \n"}, "pyerrors.roots.find_root": {"fullname": "pyerrors.roots.find_root", "modulename": "pyerrors.roots", "qualname": "find_root", "kind": "function", "doc": "

    Finds the root of the function func(x, d) where d is an Obs.

    \n\n
    Parameters
    \n\n
      \n
    • d (Obs):\nObs passed to the function.
    • \n
    • func (object):\nFunction to be minimized. Any numpy functions have to use the autograd.numpy wrapper.\nExample:

      \n\n
      \n
      import autograd.numpy as anp\ndef root_func(x, d):\n   return anp.exp(-x ** 2) - d\n
      \n
    • \n
    • guess (float):\nInitial guess for the minimization.

    • \n
    \n\n
    Returns
    \n\n
      \n
    • res (Obs):\nObs valued root of the function.
    • \n
    \n", "signature": "(d, func, guess=1.0, **kwargs):", "funcdef": "def"}, "pyerrors.version": {"fullname": "pyerrors.version", "modulename": "pyerrors.version", "kind": "module", "doc": "

    \n"}}, "docInfo": {"pyerrors": {"qualname": 0, "fullname": 1, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 8312}, "pyerrors.correlators": {"qualname": 0, "fullname": 2, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 108}, "pyerrors.correlators.Corr.__init__": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 40, "bases": 0, "doc": 94}, "pyerrors.correlators.Corr.tag": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.content": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.T": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.prange": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.reweighted": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.gamma_method": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 18, "bases": 0, "doc": 13}, "pyerrors.correlators.Corr.gm": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 18, "bases": 0, "doc": 13}, "pyerrors.correlators.Corr.projected": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 43, "bases": 0, "doc": 64}, "pyerrors.correlators.Corr.item": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 21, "bases": 0, "doc": 53}, "pyerrors.correlators.Corr.plottable": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 31}, "pyerrors.correlators.Corr.symmetric": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 9}, "pyerrors.correlators.Corr.anti_symmetric": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 10}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"qualname": 4, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 13}, "pyerrors.correlators.Corr.matrix_symmetric": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 10}, "pyerrors.correlators.Corr.GEVP": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 47, "bases": 0, "doc": 326}, "pyerrors.correlators.Corr.Eigenvalue": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 50, "bases": 0, "doc": 59}, "pyerrors.correlators.Corr.Hankel": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 26, "bases": 0, "doc": 67}, "pyerrors.correlators.Corr.roll": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 16, "bases": 0, "doc": 26}, "pyerrors.correlators.Corr.reverse": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 9}, "pyerrors.correlators.Corr.thin": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 31, "bases": 0, "doc": 43}, "pyerrors.correlators.Corr.correlate": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 16, "bases": 0, "doc": 53}, "pyerrors.correlators.Corr.reweight": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 23, "bases": 0, "doc": 79}, "pyerrors.correlators.Corr.T_symmetry": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 26, "bases": 0, "doc": 51}, "pyerrors.correlators.Corr.deriv": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 25, "bases": 0, "doc": 47}, "pyerrors.correlators.Corr.second_deriv": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 25, "bases": 0, "doc": 126}, "pyerrors.correlators.Corr.m_eff": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 36, "bases": 0, "doc": 148}, "pyerrors.correlators.Corr.fit": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 43, "bases": 0, "doc": 110}, "pyerrors.correlators.Corr.plateau": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 47, "bases": 0, "doc": 92}, "pyerrors.correlators.Corr.set_prange": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 16, "bases": 0, "doc": 11}, "pyerrors.correlators.Corr.show": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 161, "bases": 0, "doc": 263}, "pyerrors.correlators.Corr.spaghetti_plot": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 21, "bases": 0, "doc": 42}, "pyerrors.correlators.Corr.dump": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 38, "bases": 0, "doc": 69}, "pyerrors.correlators.Corr.print": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 22, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.sqrt": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.log": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.exp": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.sin": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.cos": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.tan": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.sinh": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.cosh": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.tanh": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.arcsin": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.arccos": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.arctan": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.arcsinh": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.arccosh": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.arctanh": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.real": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.imag": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.prune": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 46, "bases": 0, "doc": 325}, "pyerrors.correlators.Corr.N": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.covobs": {"qualname": 0, "fullname": 2, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.covobs.Covobs": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.covobs.Covobs.__init__": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 39, "bases": 0, "doc": 100}, "pyerrors.covobs.Covobs.name": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.covobs.Covobs.value": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.covobs.Covobs.errsq": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 12}, "pyerrors.covobs.Covobs.cov": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.covobs.Covobs.grad": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.dirac": {"qualname": 0, "fullname": 2, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.dirac.gammaX": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 54, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.dirac.gammaY": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 54, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.dirac.gammaZ": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 54, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.dirac.gammaT": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 50, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.dirac.gamma": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 210, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.dirac.gamma5": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 54, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.dirac.identity": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 50, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.dirac.epsilon_tensor": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 21, "bases": 0, "doc": 40}, "pyerrors.dirac.epsilon_tensor_rank4": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 26, "bases": 0, "doc": 41}, "pyerrors.dirac.Grid_gamma": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 12, "bases": 0, "doc": 9}, "pyerrors.fits": {"qualname": 0, "fullname": 2, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.fits.Fit_result": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 3, "doc": 75}, "pyerrors.fits.Fit_result.fit_parameters": {"qualname": 4, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.fits.Fit_result.gamma_method": {"qualname": 4, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 18, "bases": 0, "doc": 10}, "pyerrors.fits.Fit_result.gm": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 18, "bases": 0, "doc": 10}, "pyerrors.fits.least_squares": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 48, "bases": 0, "doc": 902}, "pyerrors.fits.total_least_squares": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 38, "bases": 0, "doc": 468}, "pyerrors.fits.fit_lin": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 23, "bases": 0, "doc": 110}, "pyerrors.fits.qqplot": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 40, "bases": 0, "doc": 39}, "pyerrors.fits.residual_plot": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 40, "bases": 0, "doc": 45}, "pyerrors.fits.error_band": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 21, "bases": 0, "doc": 48}, "pyerrors.fits.ks_test": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 16, "bases": 0, "doc": 52}, "pyerrors.input": {"qualname": 0, "fullname": 2, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 81}, "pyerrors.input.bdio": {"qualname": 0, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.input.bdio.read_ADerrors": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 35, "bases": 0, "doc": 122}, "pyerrors.input.bdio.write_ADerrors": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 41, "bases": 0, "doc": 126}, "pyerrors.input.bdio.read_mesons": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 35, "bases": 0, "doc": 211}, "pyerrors.input.bdio.read_dSdm": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 35, "bases": 0, "doc": 191}, "pyerrors.input.dobs": {"qualname": 0, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.input.dobs.create_pobs_string": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 62, "bases": 0, "doc": 186}, "pyerrors.input.dobs.write_pobs": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 85, "bases": 0, "doc": 214}, "pyerrors.input.dobs.read_pobs": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 43, "bases": 0, "doc": 164}, "pyerrors.input.dobs.import_dobs_string": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 33, "bases": 0, "doc": 184}, "pyerrors.input.dobs.read_dobs": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 43, "bases": 0, "doc": 207}, "pyerrors.input.dobs.create_dobs_string": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 82, "bases": 0, "doc": 229}, "pyerrors.input.dobs.write_dobs": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 99, "bases": 0, "doc": 252}, "pyerrors.input.hadrons": {"qualname": 0, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.input.hadrons.read_meson_hd5": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 57, "bases": 0, "doc": 181}, "pyerrors.input.hadrons.extract_t0_hd5": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 73, "bases": 0, "doc": 157}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 45, "bases": 0, "doc": 106}, "pyerrors.input.hadrons.Npr_matrix": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 2, "doc": 1069}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 30}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 32, "bases": 0, "doc": 99}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 32, "bases": 0, "doc": 99}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 60, "bases": 0, "doc": 112}, "pyerrors.input.json": {"qualname": 0, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.input.json.create_json_string": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 34, "bases": 0, "doc": 138}, "pyerrors.input.json.dump_to_json": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 49, "bases": 0, "doc": 174}, "pyerrors.input.json.import_json_string": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 33, "bases": 0, "doc": 168}, "pyerrors.input.json.load_json": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 42, "bases": 0, "doc": 188}, "pyerrors.input.json.dump_dict_to_json": {"qualname": 4, "fullname": 7, "annotation": 0, "default_value": 0, "signature": 63, "bases": 0, "doc": 184}, "pyerrors.input.json.load_json_dict": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 56, "bases": 0, "doc": 172}, "pyerrors.input.misc": {"qualname": 0, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.input.misc.fit_t0": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 43, "bases": 0, "doc": 250}, "pyerrors.input.misc.read_pbp": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 23, "bases": 0, "doc": 75}, "pyerrors.input.openQCD": {"qualname": 0, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.input.openQCD.read_rwms": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 48, "bases": 0, "doc": 271}, "pyerrors.input.openQCD.extract_t0": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 85, "bases": 0, "doc": 518}, "pyerrors.input.openQCD.extract_w0": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 85, "bases": 0, "doc": 520}, "pyerrors.input.openQCD.read_qtop": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 53, "bases": 0, "doc": 383}, "pyerrors.input.openQCD.read_gf_coupling": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 50, "bases": 0, "doc": 345}, "pyerrors.input.openQCD.qtop_projection": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 21, "bases": 0, "doc": 72}, "pyerrors.input.openQCD.read_qtop_sector": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 38, "bases": 0, "doc": 363}, "pyerrors.input.openQCD.read_ms5_xsf": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 47, "bases": 0, "doc": 308}, "pyerrors.input.pandas": {"qualname": 0, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.input.pandas.to_sql": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 54, "bases": 0, "doc": 113}, "pyerrors.input.pandas.read_sql": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 34, "bases": 0, "doc": 105}, "pyerrors.input.pandas.dump_df": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 26, "bases": 0, "doc": 111}, "pyerrors.input.pandas.load_df": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 32, "bases": 0, "doc": 115}, "pyerrors.input.sfcf": {"qualname": 0, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.input.sfcf.read_sfcf": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 139, "bases": 0, "doc": 421}, "pyerrors.input.utils": {"qualname": 0, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.input.utils.sort_names": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 81}, "pyerrors.input.utils.check_idl": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 16, "bases": 0, "doc": 70}, "pyerrors.linalg": {"qualname": 0, "fullname": 2, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.linalg.matmul": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 13, "bases": 0, "doc": 54}, "pyerrors.linalg.jack_matmul": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 13, "bases": 0, "doc": 58}, "pyerrors.linalg.einsum": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 18, "bases": 0, "doc": 52}, "pyerrors.linalg.inv": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 10}, "pyerrors.linalg.cholesky": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 9}, "pyerrors.linalg.det": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 8}, "pyerrors.linalg.eigh": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 18, "bases": 0, "doc": 20}, "pyerrors.linalg.eig": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 18, "bases": 0, "doc": 17}, "pyerrors.linalg.pinv": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 18, "bases": 0, "doc": 13}, "pyerrors.linalg.svd": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 18, "bases": 0, "doc": 13}, "pyerrors.misc": {"qualname": 0, "fullname": 2, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.misc.print_config": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 7, "bases": 0, "doc": 12}, "pyerrors.misc.errorbar": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 65, "bases": 0, "doc": 69}, "pyerrors.misc.dump_object": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 23, "bases": 0, "doc": 69}, "pyerrors.misc.load_object": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 42}, "pyerrors.misc.pseudo_Obs": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 31, "bases": 0, "doc": 105}, "pyerrors.misc.gen_correlated_data": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 42, "bases": 0, "doc": 127}, "pyerrors.mpm": {"qualname": 0, "fullname": 2, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.mpm.matrix_pencil_method": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 38, "bases": 0, "doc": 165}, "pyerrors.obs": {"qualname": 0, "fullname": 2, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 238}, "pyerrors.obs.Obs.__init__": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 31, "bases": 0, "doc": 62}, "pyerrors.obs.Obs.S_global": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 2, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.S_dict": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 1, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.tau_exp_global": {"qualname": 4, "fullname": 6, "annotation": 0, "default_value": 2, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.tau_exp_dict": {"qualname": 4, "fullname": 6, "annotation": 0, "default_value": 1, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.N_sigma_global": {"qualname": 4, "fullname": 6, "annotation": 0, "default_value": 2, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.N_sigma_dict": {"qualname": 4, "fullname": 6, "annotation": 0, "default_value": 1, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.names": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.shape": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.r_values": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.deltas": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.N": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.idl": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.ddvalue": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.reweighted": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.tag": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.value": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.dvalue": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.e_names": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.cov_names": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.mc_names": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.e_content": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.covobs": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.gamma_method": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 18, "bases": 0, "doc": 133}, "pyerrors.obs.Obs.gm": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 18, "bases": 0, "doc": 133}, "pyerrors.obs.Obs.details": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 22, "bases": 0, "doc": 34}, "pyerrors.obs.Obs.reweight": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 16, "bases": 0, "doc": 85}, "pyerrors.obs.Obs.is_zero_within_error": {"qualname": 5, "fullname": 7, "annotation": 0, "default_value": 0, "signature": 21, "bases": 0, "doc": 50}, "pyerrors.obs.Obs.is_zero": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 22, "bases": 0, "doc": 35}, "pyerrors.obs.Obs.plot_tauint": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 21, "bases": 0, "doc": 34}, "pyerrors.obs.Obs.plot_rho": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 21, "bases": 0, "doc": 35}, "pyerrors.obs.Obs.plot_rep_dist": {"qualname": 4, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 14}, "pyerrors.obs.Obs.plot_history": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 21, "bases": 0, "doc": 35}, "pyerrors.obs.Obs.plot_piechart": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 21, "bases": 0, "doc": 47}, "pyerrors.obs.Obs.dump": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 51, "bases": 0, "doc": 89}, "pyerrors.obs.Obs.export_jackknife": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 101}, "pyerrors.obs.Obs.sqrt": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.log": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.exp": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.sin": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.cos": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.tan": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.arcsin": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.arccos": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.arctan": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.sinh": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.cosh": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.tanh": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.arcsinh": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.arccosh": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.arctanh": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.N_sigma": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.S": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.e_ddvalue": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.e_drho": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.e_dtauint": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.e_dvalue": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.e_n_dtauint": {"qualname": 4, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.e_n_tauint": {"qualname": 4, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.e_rho": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.e_tauint": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.e_windowsize": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.tau_exp": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.CObs": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 9}, "pyerrors.obs.CObs.__init__": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 20, "bases": 0, "doc": 3}, "pyerrors.obs.CObs.tag": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.CObs.real": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.CObs.imag": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.CObs.gamma_method": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 18, "bases": 0, "doc": 14}, "pyerrors.obs.CObs.is_zero": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 15}, "pyerrors.obs.CObs.conjugate": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.derived_observable": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 34, "bases": 0, "doc": 184}, "pyerrors.obs.reweight": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 23, "bases": 0, "doc": 99}, "pyerrors.obs.correlate": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 18, "bases": 0, "doc": 75}, "pyerrors.obs.covariance": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 48, "bases": 0, "doc": 374}, "pyerrors.obs.import_jackknife": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 26, "bases": 0, "doc": 61}, "pyerrors.obs.merge_obs": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 13, "bases": 0, "doc": 56}, "pyerrors.obs.cov_Obs": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 31, "bases": 0, "doc": 90}, "pyerrors.roots": {"qualname": 0, "fullname": 2, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.roots.find_root": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 34, "bases": 0, "doc": 181}, "pyerrors.version": {"qualname": 0, "fullname": 2, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}}, "length": 242, "save": true}, "index": {"qualname": {"root": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.CObs.__init__": {"tf": 1}}, "df": 4, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.tag": {"tf": 1}, "pyerrors.correlators.Corr.content": {"tf": 1}, "pyerrors.correlators.Corr.T": {"tf": 1}, "pyerrors.correlators.Corr.prange": {"tf": 1}, "pyerrors.correlators.Corr.reweighted": {"tf": 1}, "pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.correlators.Corr.symmetric": {"tf": 1}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.roll": {"tf": 1}, "pyerrors.correlators.Corr.reverse": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.set_prange": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.correlators.Corr.print": {"tf": 1}, "pyerrors.correlators.Corr.sqrt": {"tf": 1}, "pyerrors.correlators.Corr.log": {"tf": 1}, "pyerrors.correlators.Corr.exp": {"tf": 1}, "pyerrors.correlators.Corr.sin": {"tf": 1}, "pyerrors.correlators.Corr.cos": {"tf": 1}, "pyerrors.correlators.Corr.tan": {"tf": 1}, "pyerrors.correlators.Corr.sinh": {"tf": 1}, "pyerrors.correlators.Corr.cosh": {"tf": 1}, "pyerrors.correlators.Corr.tanh": {"tf": 1}, "pyerrors.correlators.Corr.arcsin": {"tf": 1}, "pyerrors.correlators.Corr.arccos": {"tf": 1}, "pyerrors.correlators.Corr.arctan": {"tf": 1}, "pyerrors.correlators.Corr.arcsinh": {"tf": 1}, "pyerrors.correlators.Corr.arccosh": {"tf": 1}, "pyerrors.correlators.Corr.arctanh": {"tf": 1}, "pyerrors.correlators.Corr.real": {"tf": 1}, "pyerrors.correlators.Corr.imag": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.correlators.Corr.N": {"tf": 1}}, "df": 54, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}}, "df": 2, "d": {"docs": {"pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 1}}}}}}}}, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.content": {"tf": 1}, "pyerrors.obs.Obs.e_content": {"tf": 1}}, "df": 2}}}}, "f": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.misc.print_config": {"tf": 1}}, "df": 1}}}, "j": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.CObs.conjugate": {"tf": 1}}, "df": 1}}}}}}}, "s": {"docs": {"pyerrors.correlators.Corr.cos": {"tf": 1}, "pyerrors.obs.Obs.cos": {"tf": 1}}, "df": 2, "h": {"docs": {"pyerrors.correlators.Corr.cosh": {"tf": 1}, "pyerrors.obs.Obs.cosh": {"tf": 1}}, "df": 2}}, "v": {"docs": {"pyerrors.covobs.Covobs.cov": {"tf": 1}, "pyerrors.obs.Obs.cov_names": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 3, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.covobs.Covobs": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.covobs.Covobs.name": {"tf": 1}, "pyerrors.covobs.Covobs.value": {"tf": 1}, "pyerrors.covobs.Covobs.errsq": {"tf": 1}, "pyerrors.covobs.Covobs.cov": {"tf": 1}, "pyerrors.covobs.Covobs.grad": {"tf": 1}, "pyerrors.obs.Obs.covobs": {"tf": 1}}, "df": 8}}}, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}}, "u": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 1}}}}}}, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.CObs": {"tf": 1}, "pyerrors.obs.CObs.__init__": {"tf": 1}, "pyerrors.obs.CObs.tag": {"tf": 1}, "pyerrors.obs.CObs.real": {"tf": 1}, "pyerrors.obs.CObs.imag": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}, "pyerrors.obs.CObs.conjugate": {"tf": 1}}, "df": 8}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}}, "df": 3}}}}}, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.linalg.cholesky": {"tf": 1}}, "df": 1}}}}}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.CObs.__init__": {"tf": 1}}, "df": 4}}, "v": {"docs": {"pyerrors.linalg.inv": {"tf": 1}}, "df": 1}}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.correlators.Corr.item": {"tf": 1}}, "df": 1}}}, "s": {"docs": {"pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 4}, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.imag": {"tf": 1}, "pyerrors.obs.CObs.imag": {"tf": 1}}, "df": 2}}, "p": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}}, "df": 3}}}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.dirac.identity": {"tf": 1}}, "df": 1}}}}}}, "l": {"docs": {"pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.obs.Obs.idl": {"tf": 1}}, "df": 2}}}, "t": {"0": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}}, "df": 3}, "docs": {"pyerrors.correlators.Corr.T": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}}, "df": 2, "a": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.tag": {"tf": 1}, "pyerrors.obs.Obs.tag": {"tf": 1}, "pyerrors.obs.CObs.tag": {"tf": 1}}, "df": 3}, "n": {"docs": {"pyerrors.correlators.Corr.tan": {"tf": 1}, "pyerrors.obs.Obs.tan": {"tf": 1}}, "df": 2, "h": {"docs": {"pyerrors.correlators.Corr.tanh": {"tf": 1}, "pyerrors.obs.Obs.tanh": {"tf": 1}}, "df": 2}}, "u": {"docs": {"pyerrors.obs.Obs.tau_exp_global": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_dict": {"tf": 1}, "pyerrors.obs.Obs.tau_exp": {"tf": 1}}, "df": 3, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.e_n_tauint": {"tf": 1}, "pyerrors.obs.Obs.e_tauint": {"tf": 1}}, "df": 3}}}}}, "h": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.thin": {"tf": 1}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}}, "df": 2}}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.fits.ks_test": {"tf": 1}}, "df": 1}}}, "o": {"docs": {"pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 3, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 1}}}}}, "p": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.prange": {"tf": 1}, "pyerrors.correlators.Corr.set_prange": {"tf": 1}}, "df": 2}}}}, "o": {"docs": {}, "df": 0, "j": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.openQCD.qtop_projection": {"tf": 1}}, "df": 1}}}}}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.print": {"tf": 1}, "pyerrors.misc.print_config": {"tf": 1}}, "df": 2}}}, "u": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 7, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.plottable": {"tf": 1}}, "df": 1}}}}}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "u": {"docs": {"pyerrors.correlators.Corr.plateau": {"tf": 1}}, "df": 1}}}}}}, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.Fit_result.fit_parameters": {"tf": 1}}, "df": 1}}}}}}}}}, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}}, "df": 3}}}, "b": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.input.misc.read_pbp": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "v": {"docs": {"pyerrors.linalg.pinv": {"tf": 1}}, "df": 1}}, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 1}}}}}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.misc.pseudo_Obs": {"tf": 1}}, "df": 1}}}}}, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}}}}, "r": {"docs": {"pyerrors.obs.Obs.r_values": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "w": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}}, "df": 3, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.reweighted": {"tf": 1}, "pyerrors.obs.Obs.reweighted": {"tf": 1}}, "df": 2}}}}}}}}, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.reverse": {"tf": 1}}, "df": 1}}}}}, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.real": {"tf": 1}, "pyerrors.obs.CObs.real": {"tf": 1}}, "df": 2}, "d": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 18}}, "s": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.fits.Fit_result.fit_parameters": {"tf": 1}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}}, "df": 4}}}, "i": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.fits.residual_plot": {"tf": 1}}, "df": 1}}}}}}, "p": {"docs": {"pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}}, "df": 1}}, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.roll": {"tf": 1}}, "df": 1}}, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.roots.find_root": {"tf": 1}}, "df": 1}}}, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "k": {"4": {"docs": {"pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}}}, "w": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}}, "df": 1}}}, "h": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.e_rho": {"tf": 1}}, "df": 2}}}, "g": {"5": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"5": {"docs": {"pyerrors.dirac.gamma5": {"tf": 1}}, "df": 1}, "docs": {"pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.dirac.gamma": {"tf": 1}, "pyerrors.dirac.Grid_gamma": {"tf": 1}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}}, "df": 6, "x": {"docs": {"pyerrors.dirac.gammaX": {"tf": 1}}, "df": 1}, "y": {"docs": {"pyerrors.dirac.gammaY": {"tf": 1}}, "df": 1}, "z": {"docs": {"pyerrors.dirac.gammaZ": {"tf": 1}}, "df": 1}, "t": {"docs": {"pyerrors.dirac.gammaT": {"tf": 1}}, "df": 1}}}}}, "m": {"docs": {"pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 3}, "e": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 1}}, "n": {"docs": {"pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 1}}, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.covobs.Covobs.grad": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.dirac.Grid_gamma": {"tf": 1}}, "df": 1}}}, "f": {"docs": {"pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 1}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.obs.Obs.S_global": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_global": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_global": {"tf": 1}}, "df": 3}}}}}}, "m": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}}, "df": 5}}}}, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.bdio.read_mesons": {"tf": 1}}, "df": 1}}}}, "r": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.merge_obs": {"tf": 1}}, "df": 1}}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "x": {"docs": {"pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 5}}}, "m": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}}, "df": 2}}}}}, "s": {"5": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "c": {"docs": {"pyerrors.obs.Obs.mc_names": {"tf": 1}}, "df": 1}}, "s": {"docs": {"pyerrors.obs.Obs.S_global": {"tf": 1}, "pyerrors.obs.Obs.S_dict": {"tf": 1}, "pyerrors.obs.Obs.S": {"tf": 1}}, "df": 3, "y": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.correlators.Corr.symmetric": {"tf": 1}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}}, "df": 4}}, "y": {"docs": {"pyerrors.correlators.Corr.T_symmetry": {"tf": 1}}, "df": 1}}}}}}}, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 1}}}, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 1}}}}, "t": {"docs": {"pyerrors.correlators.Corr.set_prange": {"tf": 1}}, "df": 1}}, "h": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "w": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}, "a": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.Obs.shape": {"tf": 1}}, "df": 1}}}}, "p": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}}, "df": 1}}}}}}}}, "q": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.sqrt": {"tf": 1}, "pyerrors.obs.Obs.sqrt": {"tf": 1}}, "df": 2}}, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 2}}}}}, "l": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}}, "df": 2}}, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.sin": {"tf": 1}, "pyerrors.obs.Obs.sin": {"tf": 1}}, "df": 2, "h": {"docs": {"pyerrors.correlators.Corr.sinh": {"tf": 1}, "pyerrors.obs.Obs.sinh": {"tf": 1}}, "df": 2}}, "g": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.obs.Obs.N_sigma_global": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_dict": {"tf": 1}, "pyerrors.obs.Obs.N_sigma": {"tf": 1}}, "df": 3}}}}, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}}, "df": 5}}}}}, "f": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 1}}}, "v": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.linalg.svd": {"tf": 1}}, "df": 1}}}, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}}, "df": 1}}}, "r": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.arcsin": {"tf": 1}, "pyerrors.obs.Obs.arcsin": {"tf": 1}}, "df": 2, "h": {"docs": {"pyerrors.correlators.Corr.arcsinh": {"tf": 1}, "pyerrors.obs.Obs.arcsinh": {"tf": 1}}, "df": 2}}}}, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.arccos": {"tf": 1}, "pyerrors.obs.Obs.arccos": {"tf": 1}}, "df": 2, "h": {"docs": {"pyerrors.correlators.Corr.arccosh": {"tf": 1}, "pyerrors.obs.Obs.arccosh": {"tf": 1}}, "df": 2}}}}, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.arctan": {"tf": 1}, "pyerrors.obs.Obs.arctan": {"tf": 1}}, "df": 2, "h": {"docs": {"pyerrors.correlators.Corr.arctanh": {"tf": 1}, "pyerrors.obs.Obs.arctanh": {"tf": 1}}, "df": 2}}}}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}}, "df": 2}}}}}}}}, "e": {"docs": {"pyerrors.obs.Obs.e_names": {"tf": 1}, "pyerrors.obs.Obs.e_content": {"tf": 1}, "pyerrors.obs.Obs.e_ddvalue": {"tf": 1}, "pyerrors.obs.Obs.e_drho": {"tf": 1}, "pyerrors.obs.Obs.e_dtauint": {"tf": 1}, "pyerrors.obs.Obs.e_dvalue": {"tf": 1}, "pyerrors.obs.Obs.e_n_dtauint": {"tf": 1}, "pyerrors.obs.Obs.e_n_tauint": {"tf": 1}, "pyerrors.obs.Obs.e_rho": {"tf": 1}, "pyerrors.obs.Obs.e_tauint": {"tf": 1}, "pyerrors.obs.Obs.e_windowsize": {"tf": 1}}, "df": 11, "i": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.linalg.eig": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}}, "df": 1}}}}}}}, "h": {"docs": {"pyerrors.linalg.eigh": {"tf": 1}}, "df": 1}}, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.linalg.einsum": {"tf": 1}}, "df": 1}}}}}, "f": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 1}}, "x": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.correlators.Corr.exp": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_global": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_dict": {"tf": 1}, "pyerrors.obs.Obs.exp": {"tf": 1}, "pyerrors.obs.Obs.tau_exp": {"tf": 1}}, "df": 5, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.obs.Obs.export_jackknife": {"tf": 1}}, "df": 1}}}}, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 3}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}}, "df": 1}}}}}}}}}}, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "q": {"docs": {"pyerrors.covobs.Covobs.errsq": {"tf": 1}}, "df": 1}}, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.fits.error_band": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}}, "df": 2, "b": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}}}}}}, "p": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}}, "df": 2}}}}}}}, "h": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.Hankel": {"tf": 1}}, "df": 1}}}}}, "d": {"5": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 6}, "docs": {}, "df": 0}, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.obs.Obs.plot_history": {"tf": 1}}, "df": 1}}}}}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "v": {"docs": {"pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 2, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}}, "t": {"docs": {"pyerrors.linalg.det": {"tf": 1}}, "df": 1, "a": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.Obs.details": {"tf": 1}}, "df": 1}}}}}, "l": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.Obs.deltas": {"tf": 1}}, "df": 1}}}}}, "u": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 6}}}, "s": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 4}}}, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}}, "df": 1, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}}, "df": 1}}}}}}}}}}}}}}}}}}}}}, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.obs.Obs.S_dict": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_dict": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_dict": {"tf": 1}}, "df": 5}}}, "f": {"docs": {"pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 2}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 1}}}, "d": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.Obs.ddvalue": {"tf": 1}, "pyerrors.obs.Obs.e_ddvalue": {"tf": 1}}, "df": 2}}}}}}, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.Obs.dvalue": {"tf": 1}, "pyerrors.obs.Obs.e_dvalue": {"tf": 1}}, "df": 2}}}}}, "r": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.obs.Obs.e_drho": {"tf": 1}}, "df": 1}}}, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.obs.Obs.e_dtauint": {"tf": 1}, "pyerrors.obs.Obs.e_n_dtauint": {"tf": 1}}, "df": 2}}}}}}}, "f": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.fits.Fit_result.fit_parameters": {"tf": 1.4142135623730951}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 7}, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.roots.find_root": {"tf": 1}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "q": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 1}}}}}}}}}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.log": {"tf": 1}, "pyerrors.obs.Obs.log": {"tf": 1}}, "df": 2}, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.misc.load_object": {"tf": 1}}, "df": 4}}}, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 2}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.fits.fit_lin": {"tf": 1}}, "df": 1}}}, "n": {"docs": {"pyerrors.correlators.Corr.N": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_global": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_dict": {"tf": 1}, "pyerrors.obs.Obs.N": {"tf": 1}, "pyerrors.obs.Obs.N_sigma": {"tf": 1}, "pyerrors.obs.Obs.e_n_dtauint": {"tf": 1}, "pyerrors.obs.Obs.e_n_tauint": {"tf": 1}}, "df": 7, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.covobs.Covobs.name": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.obs.Obs.names": {"tf": 1}, "pyerrors.obs.Obs.e_names": {"tf": 1}, "pyerrors.obs.Obs.cov_names": {"tf": 1}, "pyerrors.obs.Obs.mc_names": {"tf": 1}}, "df": 5}}}}, "p": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}}, "df": 2}}}, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.covobs.Covobs.value": {"tf": 1}, "pyerrors.obs.Obs.value": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors.obs.Obs.r_values": {"tf": 1}}, "df": 1}}}}}}, "q": {"docs": {}, "df": 0, "q": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.fits.qqplot": {"tf": 1}}, "df": 1}}}}}, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 3}}}}, "b": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.error_band": {"tf": 1}}, "df": 1}}}, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}}, "df": 1}}}}}}}}, "k": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.ks_test": {"tf": 1}}, "df": 1}}, "w": {"0": {"docs": {"pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 3}}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}}, "df": 1}}}}, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "w": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.Obs.e_windowsize": {"tf": 1}}, "df": 1}}}}}}}}}}, "j": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}}, "df": 6}}}, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors.linalg.jack_matmul": {"tf": 1}}, "df": 1, "k": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}}, "df": 2}}}}}}}}}, "x": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "j": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.misc.load_object": {"tf": 1}}, "df": 2}}}}, "s": {"docs": {"pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.S_global": {"tf": 1}, "pyerrors.obs.Obs.S_dict": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_global": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_dict": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_global": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_dict": {"tf": 1}, "pyerrors.obs.Obs.names": {"tf": 1}, "pyerrors.obs.Obs.shape": {"tf": 1}, "pyerrors.obs.Obs.r_values": {"tf": 1}, "pyerrors.obs.Obs.deltas": {"tf": 1}, "pyerrors.obs.Obs.N": {"tf": 1}, "pyerrors.obs.Obs.idl": {"tf": 1}, "pyerrors.obs.Obs.ddvalue": {"tf": 1}, "pyerrors.obs.Obs.reweighted": {"tf": 1}, "pyerrors.obs.Obs.tag": {"tf": 1}, "pyerrors.obs.Obs.value": {"tf": 1}, "pyerrors.obs.Obs.dvalue": {"tf": 1}, "pyerrors.obs.Obs.e_names": {"tf": 1}, "pyerrors.obs.Obs.cov_names": {"tf": 1}, "pyerrors.obs.Obs.mc_names": {"tf": 1}, "pyerrors.obs.Obs.e_content": {"tf": 1}, "pyerrors.obs.Obs.covobs": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.sqrt": {"tf": 1}, "pyerrors.obs.Obs.log": {"tf": 1}, "pyerrors.obs.Obs.exp": {"tf": 1}, "pyerrors.obs.Obs.sin": {"tf": 1}, "pyerrors.obs.Obs.cos": {"tf": 1}, "pyerrors.obs.Obs.tan": {"tf": 1}, "pyerrors.obs.Obs.arcsin": {"tf": 1}, "pyerrors.obs.Obs.arccos": {"tf": 1}, "pyerrors.obs.Obs.arctan": {"tf": 1}, "pyerrors.obs.Obs.sinh": {"tf": 1}, "pyerrors.obs.Obs.cosh": {"tf": 1}, "pyerrors.obs.Obs.tanh": {"tf": 1}, "pyerrors.obs.Obs.arcsinh": {"tf": 1}, "pyerrors.obs.Obs.arccosh": {"tf": 1}, "pyerrors.obs.Obs.arctanh": {"tf": 1}, "pyerrors.obs.Obs.N_sigma": {"tf": 1}, "pyerrors.obs.Obs.S": {"tf": 1}, "pyerrors.obs.Obs.e_ddvalue": {"tf": 1}, "pyerrors.obs.Obs.e_drho": {"tf": 1}, "pyerrors.obs.Obs.e_dtauint": {"tf": 1}, "pyerrors.obs.Obs.e_dvalue": {"tf": 1}, "pyerrors.obs.Obs.e_n_dtauint": {"tf": 1}, "pyerrors.obs.Obs.e_n_tauint": {"tf": 1}, "pyerrors.obs.Obs.e_rho": {"tf": 1}, "pyerrors.obs.Obs.e_tauint": {"tf": 1}, "pyerrors.obs.Obs.e_windowsize": {"tf": 1}, "pyerrors.obs.Obs.tau_exp": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 67, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}}}}}}}, "z": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 3}}}}}}, "fullname": {"root": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.CObs.__init__": {"tf": 1}}, "df": 4, "p": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators": {"tf": 1}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.tag": {"tf": 1}, "pyerrors.correlators.Corr.content": {"tf": 1}, "pyerrors.correlators.Corr.T": {"tf": 1}, "pyerrors.correlators.Corr.prange": {"tf": 1}, "pyerrors.correlators.Corr.reweighted": {"tf": 1}, "pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.correlators.Corr.symmetric": {"tf": 1}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.roll": {"tf": 1}, "pyerrors.correlators.Corr.reverse": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.set_prange": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.correlators.Corr.print": {"tf": 1}, "pyerrors.correlators.Corr.sqrt": {"tf": 1}, "pyerrors.correlators.Corr.log": {"tf": 1}, "pyerrors.correlators.Corr.exp": {"tf": 1}, "pyerrors.correlators.Corr.sin": {"tf": 1}, "pyerrors.correlators.Corr.cos": {"tf": 1}, "pyerrors.correlators.Corr.tan": {"tf": 1}, "pyerrors.correlators.Corr.sinh": {"tf": 1}, "pyerrors.correlators.Corr.cosh": {"tf": 1}, "pyerrors.correlators.Corr.tanh": {"tf": 1}, "pyerrors.correlators.Corr.arcsin": {"tf": 1}, "pyerrors.correlators.Corr.arccos": {"tf": 1}, "pyerrors.correlators.Corr.arctan": {"tf": 1}, "pyerrors.correlators.Corr.arcsinh": {"tf": 1}, "pyerrors.correlators.Corr.arccosh": {"tf": 1}, "pyerrors.correlators.Corr.arctanh": {"tf": 1}, "pyerrors.correlators.Corr.real": {"tf": 1}, "pyerrors.correlators.Corr.imag": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.correlators.Corr.N": {"tf": 1}, "pyerrors.covobs": {"tf": 1}, "pyerrors.covobs.Covobs": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.covobs.Covobs.name": {"tf": 1}, "pyerrors.covobs.Covobs.value": {"tf": 1}, "pyerrors.covobs.Covobs.errsq": {"tf": 1}, "pyerrors.covobs.Covobs.cov": {"tf": 1}, "pyerrors.covobs.Covobs.grad": {"tf": 1}, "pyerrors.dirac": {"tf": 1}, "pyerrors.dirac.gammaX": {"tf": 1}, "pyerrors.dirac.gammaY": {"tf": 1}, "pyerrors.dirac.gammaZ": {"tf": 1}, "pyerrors.dirac.gammaT": {"tf": 1}, "pyerrors.dirac.gamma": {"tf": 1}, "pyerrors.dirac.gamma5": {"tf": 1}, "pyerrors.dirac.identity": {"tf": 1}, "pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}, "pyerrors.dirac.Grid_gamma": {"tf": 1}, "pyerrors.fits": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.fits.Fit_result.fit_parameters": {"tf": 1}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.bdio": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.dobs": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.misc": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.pandas": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.input.utils": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.linalg": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.linalg.inv": {"tf": 1}, "pyerrors.linalg.cholesky": {"tf": 1}, "pyerrors.linalg.det": {"tf": 1}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}, "pyerrors.linalg.pinv": {"tf": 1}, "pyerrors.linalg.svd": {"tf": 1}, "pyerrors.misc": {"tf": 1}, "pyerrors.misc.print_config": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.misc.load_object": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.mpm": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.S_global": {"tf": 1}, "pyerrors.obs.Obs.S_dict": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_global": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_dict": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_global": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_dict": {"tf": 1}, "pyerrors.obs.Obs.names": {"tf": 1}, "pyerrors.obs.Obs.shape": {"tf": 1}, "pyerrors.obs.Obs.r_values": {"tf": 1}, "pyerrors.obs.Obs.deltas": {"tf": 1}, "pyerrors.obs.Obs.N": {"tf": 1}, "pyerrors.obs.Obs.idl": {"tf": 1}, "pyerrors.obs.Obs.ddvalue": {"tf": 1}, "pyerrors.obs.Obs.reweighted": {"tf": 1}, "pyerrors.obs.Obs.tag": {"tf": 1}, "pyerrors.obs.Obs.value": {"tf": 1}, "pyerrors.obs.Obs.dvalue": {"tf": 1}, "pyerrors.obs.Obs.e_names": {"tf": 1}, "pyerrors.obs.Obs.cov_names": {"tf": 1}, "pyerrors.obs.Obs.mc_names": {"tf": 1}, "pyerrors.obs.Obs.e_content": {"tf": 1}, "pyerrors.obs.Obs.covobs": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.sqrt": {"tf": 1}, "pyerrors.obs.Obs.log": {"tf": 1}, "pyerrors.obs.Obs.exp": {"tf": 1}, "pyerrors.obs.Obs.sin": {"tf": 1}, "pyerrors.obs.Obs.cos": {"tf": 1}, "pyerrors.obs.Obs.tan": {"tf": 1}, "pyerrors.obs.Obs.arcsin": {"tf": 1}, "pyerrors.obs.Obs.arccos": {"tf": 1}, "pyerrors.obs.Obs.arctan": {"tf": 1}, "pyerrors.obs.Obs.sinh": {"tf": 1}, "pyerrors.obs.Obs.cosh": {"tf": 1}, "pyerrors.obs.Obs.tanh": {"tf": 1}, "pyerrors.obs.Obs.arcsinh": {"tf": 1}, "pyerrors.obs.Obs.arccosh": {"tf": 1}, "pyerrors.obs.Obs.arctanh": {"tf": 1}, "pyerrors.obs.Obs.N_sigma": {"tf": 1}, "pyerrors.obs.Obs.S": {"tf": 1}, "pyerrors.obs.Obs.e_ddvalue": {"tf": 1}, "pyerrors.obs.Obs.e_drho": {"tf": 1}, "pyerrors.obs.Obs.e_dtauint": {"tf": 1}, "pyerrors.obs.Obs.e_dvalue": {"tf": 1}, "pyerrors.obs.Obs.e_n_dtauint": {"tf": 1}, "pyerrors.obs.Obs.e_n_tauint": {"tf": 1}, "pyerrors.obs.Obs.e_rho": {"tf": 1}, "pyerrors.obs.Obs.e_tauint": {"tf": 1}, "pyerrors.obs.Obs.e_windowsize": {"tf": 1}, "pyerrors.obs.Obs.tau_exp": {"tf": 1}, "pyerrors.obs.CObs": {"tf": 1}, "pyerrors.obs.CObs.__init__": {"tf": 1}, "pyerrors.obs.CObs.tag": {"tf": 1}, "pyerrors.obs.CObs.real": {"tf": 1}, "pyerrors.obs.CObs.imag": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}, "pyerrors.obs.CObs.conjugate": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}, "pyerrors.roots": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}, "pyerrors.version": {"tf": 1}}, "df": 242}}}}}}}, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.prange": {"tf": 1}, "pyerrors.correlators.Corr.set_prange": {"tf": 1}}, "df": 2}}}}, "o": {"docs": {}, "df": 0, "j": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.openQCD.qtop_projection": {"tf": 1}}, "df": 1}}}}}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.print": {"tf": 1}, "pyerrors.misc.print_config": {"tf": 1}}, "df": 2}}}, "u": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 7, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.plottable": {"tf": 1}}, "df": 1}}}}}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "u": {"docs": {"pyerrors.correlators.Corr.plateau": {"tf": 1}}, "df": 1}}}}}}, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.Fit_result.fit_parameters": {"tf": 1}}, "df": 1}}}}}}}}, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.pandas": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 5}}}}}, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}}, "df": 3}}}, "b": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.input.misc.read_pbp": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "v": {"docs": {"pyerrors.linalg.pinv": {"tf": 1}}, "df": 1}}, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 1}}}}}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.misc.pseudo_Obs": {"tf": 1}}, "df": 1}}}}}, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}}}}, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.tag": {"tf": 1}, "pyerrors.correlators.Corr.content": {"tf": 1}, "pyerrors.correlators.Corr.T": {"tf": 1}, "pyerrors.correlators.Corr.prange": {"tf": 1}, "pyerrors.correlators.Corr.reweighted": {"tf": 1}, "pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.correlators.Corr.symmetric": {"tf": 1}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.roll": {"tf": 1}, "pyerrors.correlators.Corr.reverse": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.set_prange": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.correlators.Corr.print": {"tf": 1}, "pyerrors.correlators.Corr.sqrt": {"tf": 1}, "pyerrors.correlators.Corr.log": {"tf": 1}, "pyerrors.correlators.Corr.exp": {"tf": 1}, "pyerrors.correlators.Corr.sin": {"tf": 1}, "pyerrors.correlators.Corr.cos": {"tf": 1}, "pyerrors.correlators.Corr.tan": {"tf": 1}, "pyerrors.correlators.Corr.sinh": {"tf": 1}, "pyerrors.correlators.Corr.cosh": {"tf": 1}, "pyerrors.correlators.Corr.tanh": {"tf": 1}, "pyerrors.correlators.Corr.arcsin": {"tf": 1}, "pyerrors.correlators.Corr.arccos": {"tf": 1}, "pyerrors.correlators.Corr.arctan": {"tf": 1}, "pyerrors.correlators.Corr.arcsinh": {"tf": 1}, "pyerrors.correlators.Corr.arccosh": {"tf": 1}, "pyerrors.correlators.Corr.arctanh": {"tf": 1}, "pyerrors.correlators.Corr.real": {"tf": 1}, "pyerrors.correlators.Corr.imag": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.correlators.Corr.N": {"tf": 1}}, "df": 54, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators": {"tf": 1}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.tag": {"tf": 1}, "pyerrors.correlators.Corr.content": {"tf": 1}, "pyerrors.correlators.Corr.T": {"tf": 1}, "pyerrors.correlators.Corr.prange": {"tf": 1}, "pyerrors.correlators.Corr.reweighted": {"tf": 1}, "pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.correlators.Corr.symmetric": {"tf": 1}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.roll": {"tf": 1}, "pyerrors.correlators.Corr.reverse": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.set_prange": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.correlators.Corr.print": {"tf": 1}, "pyerrors.correlators.Corr.sqrt": {"tf": 1}, "pyerrors.correlators.Corr.log": {"tf": 1}, "pyerrors.correlators.Corr.exp": {"tf": 1}, "pyerrors.correlators.Corr.sin": {"tf": 1}, "pyerrors.correlators.Corr.cos": {"tf": 1}, "pyerrors.correlators.Corr.tan": {"tf": 1}, "pyerrors.correlators.Corr.sinh": {"tf": 1}, "pyerrors.correlators.Corr.cosh": {"tf": 1}, "pyerrors.correlators.Corr.tanh": {"tf": 1}, "pyerrors.correlators.Corr.arcsin": {"tf": 1}, "pyerrors.correlators.Corr.arccos": {"tf": 1}, "pyerrors.correlators.Corr.arctan": {"tf": 1}, "pyerrors.correlators.Corr.arcsinh": {"tf": 1}, "pyerrors.correlators.Corr.arccosh": {"tf": 1}, "pyerrors.correlators.Corr.arctanh": {"tf": 1}, "pyerrors.correlators.Corr.real": {"tf": 1}, "pyerrors.correlators.Corr.imag": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.correlators.Corr.N": {"tf": 1}}, "df": 55}}}, "e": {"docs": {"pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}}, "df": 2, "d": {"docs": {"pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 1}}}}}}}}, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.content": {"tf": 1}, "pyerrors.obs.Obs.e_content": {"tf": 1}}, "df": 2}}}}, "f": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.misc.print_config": {"tf": 1}}, "df": 1}}}, "j": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.CObs.conjugate": {"tf": 1}}, "df": 1}}}}}}}, "s": {"docs": {"pyerrors.correlators.Corr.cos": {"tf": 1}, "pyerrors.obs.Obs.cos": {"tf": 1}}, "df": 2, "h": {"docs": {"pyerrors.correlators.Corr.cosh": {"tf": 1}, "pyerrors.obs.Obs.cosh": {"tf": 1}}, "df": 2}}, "v": {"docs": {"pyerrors.covobs.Covobs.cov": {"tf": 1}, "pyerrors.obs.Obs.cov_names": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 3, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.covobs": {"tf": 1}, "pyerrors.covobs.Covobs": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.__init__": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.name": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.value": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.errsq": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.cov": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.grad": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.covobs": {"tf": 1}}, "df": 9}}}, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}}, "u": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 1}}}}}}, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.CObs": {"tf": 1}, "pyerrors.obs.CObs.__init__": {"tf": 1}, "pyerrors.obs.CObs.tag": {"tf": 1}, "pyerrors.obs.CObs.real": {"tf": 1}, "pyerrors.obs.CObs.imag": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}, "pyerrors.obs.CObs.conjugate": {"tf": 1}}, "df": 8}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}}, "df": 3}}}}}, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.linalg.cholesky": {"tf": 1}}, "df": 1}}}}}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.CObs.__init__": {"tf": 1}}, "df": 4}}, "p": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input": {"tf": 1}, "pyerrors.input.bdio": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.dobs": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.misc": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.pandas": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.input.utils": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 52}}}, "v": {"docs": {"pyerrors.linalg.inv": {"tf": 1}}, "df": 1}}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.correlators.Corr.item": {"tf": 1}}, "df": 1}}}, "s": {"docs": {"pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 4}, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.imag": {"tf": 1}, "pyerrors.obs.CObs.imag": {"tf": 1}}, "df": 2}}, "p": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}}, "df": 3}}}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.dirac.identity": {"tf": 1}}, "df": 1}}}}}}, "l": {"docs": {"pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.obs.Obs.idl": {"tf": 1}}, "df": 2}}}, "t": {"0": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}}, "df": 3}, "docs": {"pyerrors.correlators.Corr.T": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}}, "df": 2, "a": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.tag": {"tf": 1}, "pyerrors.obs.Obs.tag": {"tf": 1}, "pyerrors.obs.CObs.tag": {"tf": 1}}, "df": 3}, "n": {"docs": {"pyerrors.correlators.Corr.tan": {"tf": 1}, "pyerrors.obs.Obs.tan": {"tf": 1}}, "df": 2, "h": {"docs": {"pyerrors.correlators.Corr.tanh": {"tf": 1}, "pyerrors.obs.Obs.tanh": {"tf": 1}}, "df": 2}}, "u": {"docs": {"pyerrors.obs.Obs.tau_exp_global": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_dict": {"tf": 1}, "pyerrors.obs.Obs.tau_exp": {"tf": 1}}, "df": 3, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.e_n_tauint": {"tf": 1}, "pyerrors.obs.Obs.e_tauint": {"tf": 1}}, "df": 3}}}}}, "h": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.thin": {"tf": 1}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}}, "df": 2}}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.fits.ks_test": {"tf": 1}}, "df": 1}}}, "o": {"docs": {"pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 3, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 1}}}}}, "r": {"docs": {"pyerrors.obs.Obs.r_values": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "w": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}}, "df": 3, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.reweighted": {"tf": 1}, "pyerrors.obs.Obs.reweighted": {"tf": 1}}, "df": 2}}}}}}}}, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.reverse": {"tf": 1}}, "df": 1}}}}}, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.real": {"tf": 1}, "pyerrors.obs.CObs.real": {"tf": 1}}, "df": 2}, "d": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 18}}, "s": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.fits.Fit_result.fit_parameters": {"tf": 1}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}}, "df": 4}}}, "i": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.fits.residual_plot": {"tf": 1}}, "df": 1}}}}}}, "p": {"docs": {"pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}}, "df": 1}}, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.roll": {"tf": 1}}, "df": 1}}, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.roots.find_root": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.roots": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 2}}}}, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "k": {"4": {"docs": {"pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}}}, "w": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}}, "df": 1}}}, "h": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.e_rho": {"tf": 1}}, "df": 2}}}, "g": {"5": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"5": {"docs": {"pyerrors.dirac.gamma5": {"tf": 1}}, "df": 1}, "docs": {"pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.dirac.gamma": {"tf": 1}, "pyerrors.dirac.Grid_gamma": {"tf": 1}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}}, "df": 6, "x": {"docs": {"pyerrors.dirac.gammaX": {"tf": 1}}, "df": 1}, "y": {"docs": {"pyerrors.dirac.gammaY": {"tf": 1}}, "df": 1}, "z": {"docs": {"pyerrors.dirac.gammaZ": {"tf": 1}}, "df": 1}, "t": {"docs": {"pyerrors.dirac.gammaT": {"tf": 1}}, "df": 1}}}}}, "m": {"docs": {"pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 3}, "e": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 1}}, "n": {"docs": {"pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 1}}, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.covobs.Covobs.grad": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.dirac.Grid_gamma": {"tf": 1}}, "df": 1}}}, "f": {"docs": {"pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 1}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.obs.Obs.S_global": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_global": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_global": {"tf": 1}}, "df": 3}}}}}}, "m": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}}, "df": 5}}}}, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.bdio.read_mesons": {"tf": 1}}, "df": 1}}}}, "r": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.merge_obs": {"tf": 1}}, "df": 1}}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "x": {"docs": {"pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 5}}}, "m": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}}, "df": 2}}}}}, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.input.misc": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.misc": {"tf": 1}, "pyerrors.misc.print_config": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.misc.load_object": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 10}}}, "s": {"5": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "p": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.mpm": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 2}}, "c": {"docs": {"pyerrors.obs.Obs.mc_names": {"tf": 1}}, "df": 1}}, "s": {"docs": {"pyerrors.obs.Obs.S_global": {"tf": 1}, "pyerrors.obs.Obs.S_dict": {"tf": 1}, "pyerrors.obs.Obs.S": {"tf": 1}}, "df": 3, "y": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.correlators.Corr.symmetric": {"tf": 1}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}}, "df": 4}}, "y": {"docs": {"pyerrors.correlators.Corr.T_symmetry": {"tf": 1}}, "df": 1}}}}}}}, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 1}}}, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 1}}}}, "t": {"docs": {"pyerrors.correlators.Corr.set_prange": {"tf": 1}}, "df": 1}}, "h": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "w": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}, "a": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.Obs.shape": {"tf": 1}}, "df": 1}}}}, "p": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}}, "df": 1}}}}}}}}, "q": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.sqrt": {"tf": 1}, "pyerrors.obs.Obs.sqrt": {"tf": 1}}, "df": 2}}, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 2}}}}}, "l": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}}, "df": 2}}, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.sin": {"tf": 1}, "pyerrors.obs.Obs.sin": {"tf": 1}}, "df": 2, "h": {"docs": {"pyerrors.correlators.Corr.sinh": {"tf": 1}, "pyerrors.obs.Obs.sinh": {"tf": 1}}, "df": 2}}, "g": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.obs.Obs.N_sigma_global": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_dict": {"tf": 1}, "pyerrors.obs.Obs.N_sigma": {"tf": 1}}, "df": 3}}}}, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}}, "df": 5}}}}}, "f": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors.input.sfcf": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}}, "df": 2}}}, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 1}}}, "v": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.linalg.svd": {"tf": 1}}, "df": 1}}}, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}}, "df": 1}}}, "r": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.arcsin": {"tf": 1}, "pyerrors.obs.Obs.arcsin": {"tf": 1}}, "df": 2, "h": {"docs": {"pyerrors.correlators.Corr.arcsinh": {"tf": 1}, "pyerrors.obs.Obs.arcsinh": {"tf": 1}}, "df": 2}}}}, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.arccos": {"tf": 1}, "pyerrors.obs.Obs.arccos": {"tf": 1}}, "df": 2, "h": {"docs": {"pyerrors.correlators.Corr.arccosh": {"tf": 1}, "pyerrors.obs.Obs.arccosh": {"tf": 1}}, "df": 2}}}}, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.arctan": {"tf": 1}, "pyerrors.obs.Obs.arctan": {"tf": 1}}, "df": 2, "h": {"docs": {"pyerrors.correlators.Corr.arctanh": {"tf": 1}, "pyerrors.obs.Obs.arctanh": {"tf": 1}}, "df": 2}}}}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}}, "df": 2}}}}}}}}, "e": {"docs": {"pyerrors.obs.Obs.e_names": {"tf": 1}, "pyerrors.obs.Obs.e_content": {"tf": 1}, "pyerrors.obs.Obs.e_ddvalue": {"tf": 1}, "pyerrors.obs.Obs.e_drho": {"tf": 1}, "pyerrors.obs.Obs.e_dtauint": {"tf": 1}, "pyerrors.obs.Obs.e_dvalue": {"tf": 1}, "pyerrors.obs.Obs.e_n_dtauint": {"tf": 1}, "pyerrors.obs.Obs.e_n_tauint": {"tf": 1}, "pyerrors.obs.Obs.e_rho": {"tf": 1}, "pyerrors.obs.Obs.e_tauint": {"tf": 1}, "pyerrors.obs.Obs.e_windowsize": {"tf": 1}}, "df": 11, "i": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.linalg.eig": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}}, "df": 1}}}}}}}, "h": {"docs": {"pyerrors.linalg.eigh": {"tf": 1}}, "df": 1}}, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.linalg.einsum": {"tf": 1}}, "df": 1}}}}}, "f": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 1}}, "x": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.correlators.Corr.exp": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_global": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_dict": {"tf": 1}, "pyerrors.obs.Obs.exp": {"tf": 1}, "pyerrors.obs.Obs.tau_exp": {"tf": 1}}, "df": 5, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.obs.Obs.export_jackknife": {"tf": 1}}, "df": 1}}}}, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 3}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}}, "df": 1}}}}}}}}}}, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "q": {"docs": {"pyerrors.covobs.Covobs.errsq": {"tf": 1}}, "df": 1}}, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.fits.error_band": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}}, "df": 2, "b": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}}}}}}, "p": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}}, "df": 2}}}}}}}, "h": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.Hankel": {"tf": 1}}, "df": 1}}}}, "d": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 9}}}}}}, "d": {"5": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 6}, "docs": {}, "df": 0}, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.obs.Obs.plot_history": {"tf": 1}}, "df": 1}}}}}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "v": {"docs": {"pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 2, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}}, "t": {"docs": {"pyerrors.linalg.det": {"tf": 1}}, "df": 1, "a": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.Obs.details": {"tf": 1}}, "df": 1}}}}}, "l": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.Obs.deltas": {"tf": 1}}, "df": 1}}}}}, "u": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 6}}}, "i": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.dirac": {"tf": 1}, "pyerrors.dirac.gammaX": {"tf": 1}, "pyerrors.dirac.gammaY": {"tf": 1}, "pyerrors.dirac.gammaZ": {"tf": 1}, "pyerrors.dirac.gammaT": {"tf": 1}, "pyerrors.dirac.gamma": {"tf": 1}, "pyerrors.dirac.gamma5": {"tf": 1}, "pyerrors.dirac.identity": {"tf": 1}, "pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}, "pyerrors.dirac.Grid_gamma": {"tf": 1}}, "df": 11}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}}, "df": 1, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}}, "df": 1}}}}}}}}}}}}}}}}}}}}}, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.obs.Obs.S_dict": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_dict": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_dict": {"tf": 1}}, "df": 5}}}, "s": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.dobs": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_dobs": {"tf": 1.4142135623730951}}, "df": 8}}}, "f": {"docs": {"pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 2}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 1}}}, "d": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.Obs.ddvalue": {"tf": 1}, "pyerrors.obs.Obs.e_ddvalue": {"tf": 1}}, "df": 2}}}}}}, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.Obs.dvalue": {"tf": 1}, "pyerrors.obs.Obs.e_dvalue": {"tf": 1}}, "df": 2}}}}}, "r": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.obs.Obs.e_drho": {"tf": 1}}, "df": 1}}}, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.obs.Obs.e_dtauint": {"tf": 1}, "pyerrors.obs.Obs.e_n_dtauint": {"tf": 1}}, "df": 2}}}}}}}, "f": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.fits.Fit_result.fit_parameters": {"tf": 1.4142135623730951}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 7, "s": {"docs": {"pyerrors.fits": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.fits.Fit_result.fit_parameters": {"tf": 1}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}}, "df": 12}}, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.roots.find_root": {"tf": 1}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "q": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 1}}}}}}}}}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.log": {"tf": 1}, "pyerrors.obs.Obs.log": {"tf": 1}}, "df": 2}, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.misc.load_object": {"tf": 1}}, "df": 4}}}, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 2}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.fits.fit_lin": {"tf": 1}}, "df": 1, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.linalg": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.linalg.inv": {"tf": 1}, "pyerrors.linalg.cholesky": {"tf": 1}, "pyerrors.linalg.det": {"tf": 1}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}, "pyerrors.linalg.pinv": {"tf": 1}, "pyerrors.linalg.svd": {"tf": 1}}, "df": 11}}}}}}, "n": {"docs": {"pyerrors.correlators.Corr.N": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_global": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_dict": {"tf": 1}, "pyerrors.obs.Obs.N": {"tf": 1}, "pyerrors.obs.Obs.N_sigma": {"tf": 1}, "pyerrors.obs.Obs.e_n_dtauint": {"tf": 1}, "pyerrors.obs.Obs.e_n_tauint": {"tf": 1}}, "df": 7, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.covobs.Covobs.name": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.obs.Obs.names": {"tf": 1}, "pyerrors.obs.Obs.e_names": {"tf": 1}, "pyerrors.obs.Obs.cov_names": {"tf": 1}, "pyerrors.obs.Obs.mc_names": {"tf": 1}}, "df": 5}}}}, "p": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}}, "df": 2}}}, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.covobs.Covobs.value": {"tf": 1}, "pyerrors.obs.Obs.value": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors.obs.Obs.r_values": {"tf": 1}}, "df": 1}}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.version": {"tf": 1}}, "df": 1}}}}}}}, "q": {"docs": {}, "df": 0, "q": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.fits.qqplot": {"tf": 1}}, "df": 1}}}}}, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 3}}}}, "b": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.error_band": {"tf": 1}}, "df": 1}}}, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.input.bdio": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 5}}}, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}}, "df": 1}}}}}}}}, "k": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.ks_test": {"tf": 1}}, "df": 1}}, "w": {"0": {"docs": {"pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 3}}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}}, "df": 1}}}}, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "w": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.Obs.e_windowsize": {"tf": 1}}, "df": 1}}}}}}}}}}, "j": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.json": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.import_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}}, "df": 7}}}, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors.linalg.jack_matmul": {"tf": 1}}, "df": 1, "k": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}}, "df": 2}}}}}}}}}, "o": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "q": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 9}}}}}}, "b": {"docs": {}, "df": 0, "j": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.misc.load_object": {"tf": 1}}, "df": 2}}}}, "s": {"docs": {"pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.obs": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.__init__": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.S_global": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.S_dict": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.tau_exp_global": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.tau_exp_dict": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.N_sigma_global": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.N_sigma_dict": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.names": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.shape": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.r_values": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.deltas": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.N": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.idl": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.ddvalue": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.reweighted": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.tag": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.value": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.dvalue": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.e_names": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.cov_names": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.mc_names": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.e_content": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.covobs": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gamma_method": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gm": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.details": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.reweight": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.is_zero": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.plot_rho": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.plot_history": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.dump": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.sqrt": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.log": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.exp": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.sin": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.cos": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.tan": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.arcsin": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.arccos": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.arctan": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.sinh": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.cosh": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.tanh": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.arcsinh": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.arccosh": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.arctanh": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.N_sigma": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.S": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.e_ddvalue": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.e_drho": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.e_dtauint": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.e_dvalue": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.e_n_dtauint": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.e_n_tauint": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.e_rho": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.e_tauint": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.e_windowsize": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.tau_exp": {"tf": 1.4142135623730951}, "pyerrors.obs.CObs": {"tf": 1}, "pyerrors.obs.CObs.__init__": {"tf": 1}, "pyerrors.obs.CObs.tag": {"tf": 1}, "pyerrors.obs.CObs.real": {"tf": 1}, "pyerrors.obs.CObs.imag": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}, "pyerrors.obs.CObs.conjugate": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1.4142135623730951}, "pyerrors.obs.cov_Obs": {"tf": 1.4142135623730951}}, "df": 81, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}}}}}}}, "x": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}}}, "u": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.utils": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 3}}}}}, "z": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 3}}}}}}, "annotation": {"root": {"docs": {}, "df": 0}}, "default_value": {"root": {"0": {"docs": {"pyerrors.dirac.gammaX": {"tf": 5.291502622129181}, "pyerrors.dirac.gammaY": {"tf": 5.291502622129181}, "pyerrors.dirac.gammaZ": {"tf": 5.291502622129181}, "pyerrors.dirac.gammaT": {"tf": 5.291502622129181}, "pyerrors.dirac.gamma": {"tf": 10.583005244258363}, "pyerrors.dirac.gamma5": {"tf": 5.291502622129181}, "pyerrors.dirac.identity": {"tf": 5.291502622129181}, "pyerrors.obs.Obs.S_global": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_global": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.N_sigma_global": {"tf": 1}}, "df": 10}, "1": {"docs": {"pyerrors.dirac.gammaX": {"tf": 2}, "pyerrors.dirac.gammaY": {"tf": 2}, "pyerrors.dirac.gammaZ": {"tf": 2}, "pyerrors.dirac.gammaT": {"tf": 2}, "pyerrors.dirac.gamma": {"tf": 4}, "pyerrors.dirac.gamma5": {"tf": 2}, "pyerrors.dirac.identity": {"tf": 2}, "pyerrors.obs.Obs.N_sigma_global": {"tf": 1}}, "df": 8}, "2": {"docs": {"pyerrors.obs.Obs.S_global": {"tf": 1}}, "df": 1}, "docs": {"pyerrors.dirac.gammaX": {"tf": 2.23606797749979}, "pyerrors.dirac.gammaY": {"tf": 2.23606797749979}, "pyerrors.dirac.gammaZ": {"tf": 2.23606797749979}, "pyerrors.dirac.gammaT": {"tf": 1}, "pyerrors.dirac.gamma": {"tf": 4.123105625617661}, "pyerrors.dirac.gamma5": {"tf": 2.23606797749979}, "pyerrors.dirac.identity": {"tf": 1}, "pyerrors.obs.Obs.S_dict": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_dict": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_dict": {"tf": 1}}, "df": 10, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.dirac.gammaX": {"tf": 1}, "pyerrors.dirac.gammaY": {"tf": 1}, "pyerrors.dirac.gammaZ": {"tf": 1}, "pyerrors.dirac.gammaT": {"tf": 1}, "pyerrors.dirac.gamma": {"tf": 1}, "pyerrors.dirac.gamma5": {"tf": 1}, "pyerrors.dirac.identity": {"tf": 1}}, "df": 7}}}}}, "j": {"docs": {"pyerrors.dirac.gammaX": {"tf": 4}, "pyerrors.dirac.gammaY": {"tf": 4}, "pyerrors.dirac.gammaZ": {"tf": 4}, "pyerrors.dirac.gammaT": {"tf": 4}, "pyerrors.dirac.gamma": {"tf": 8}, "pyerrors.dirac.gamma5": {"tf": 4}, "pyerrors.dirac.identity": {"tf": 4}}, "df": 7}}}, "signature": {"root": {"0": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.7320508075688772}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.CObs.__init__": {"tf": 1.4142135623730951}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 16, "c": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}, "1": {"0": {"0": {"0": {"docs": {"pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 2}, "docs": {}, "df": 0}, "docs": {"pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}}, "df": 2, "/": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}}}}}, "2": {"docs": {}, "df": 0, "/": {"docs": {}, "df": 0, "x": {"6": {"4": {"docs": {}, "df": 0, "/": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "/": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"3": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}}}}}}}}}}}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}}}, "docs": {"pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 11, "e": {"docs": {"pyerrors.obs.Obs.is_zero": {"tf": 1}}, "df": 1}}, "2": {"docs": {"pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}}, "df": 3}, "3": {"9": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.deriv": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.m_eff": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.plateau": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.dump": {"tf": 1.4142135623730951}, "pyerrors.fits.qqplot": {"tf": 1.4142135623730951}, "pyerrors.fits.residual_plot": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_pobs_string": {"tf": 2}, "pyerrors.input.dobs.write_pobs": {"tf": 2}, "pyerrors.input.dobs.create_dobs_string": {"tf": 2}, "pyerrors.input.dobs.write_dobs": {"tf": 2}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 2}, "pyerrors.input.json.create_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_dict_to_json": {"tf": 2}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.to_sql": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2.8284271247461903}, "pyerrors.misc.errorbar": {"tf": 2}, "pyerrors.obs.Obs.dump": {"tf": 2}}, "df": 35}, "docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 3}, "5": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 4}, "docs": {"pyerrors.correlators.Corr.__init__": {"tf": 5.744562646538029}, "pyerrors.correlators.Corr.gamma_method": {"tf": 4}, "pyerrors.correlators.Corr.gm": {"tf": 4}, "pyerrors.correlators.Corr.projected": {"tf": 5.830951894845301}, "pyerrors.correlators.Corr.item": {"tf": 4.242640687119285}, "pyerrors.correlators.Corr.plottable": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.symmetric": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.GEVP": {"tf": 6.164414002968976}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 6.324555320336759}, "pyerrors.correlators.Corr.Hankel": {"tf": 4.69041575982343}, "pyerrors.correlators.Corr.roll": {"tf": 3.7416573867739413}, "pyerrors.correlators.Corr.reverse": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.thin": {"tf": 5.0990195135927845}, "pyerrors.correlators.Corr.correlate": {"tf": 3.7416573867739413}, "pyerrors.correlators.Corr.reweight": {"tf": 4.47213595499958}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 4.69041575982343}, "pyerrors.correlators.Corr.deriv": {"tf": 4.47213595499958}, "pyerrors.correlators.Corr.second_deriv": {"tf": 4.47213595499958}, "pyerrors.correlators.Corr.m_eff": {"tf": 5.291502622129181}, "pyerrors.correlators.Corr.fit": {"tf": 6}, "pyerrors.correlators.Corr.plateau": {"tf": 6}, "pyerrors.correlators.Corr.set_prange": {"tf": 3.7416573867739413}, "pyerrors.correlators.Corr.show": {"tf": 11.313708498984761}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 4.242640687119285}, "pyerrors.correlators.Corr.dump": {"tf": 5.477225575051661}, "pyerrors.correlators.Corr.print": {"tf": 4.242640687119285}, "pyerrors.correlators.Corr.sqrt": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.log": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.exp": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.sin": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.cos": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.tan": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.sinh": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.cosh": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.tanh": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.arcsin": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.arccos": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.arctan": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.arcsinh": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.arccosh": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.arctanh": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.prune": {"tf": 6.164414002968976}, "pyerrors.covobs.Covobs.__init__": {"tf": 5.656854249492381}, "pyerrors.covobs.Covobs.errsq": {"tf": 3.1622776601683795}, "pyerrors.dirac.epsilon_tensor": {"tf": 4.242640687119285}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 4.69041575982343}, "pyerrors.dirac.Grid_gamma": {"tf": 3.1622776601683795}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 4}, "pyerrors.fits.Fit_result.gm": {"tf": 4}, "pyerrors.fits.least_squares": {"tf": 6.324555320336759}, "pyerrors.fits.total_least_squares": {"tf": 5.656854249492381}, "pyerrors.fits.fit_lin": {"tf": 4.47213595499958}, "pyerrors.fits.qqplot": {"tf": 5.656854249492381}, "pyerrors.fits.residual_plot": {"tf": 5.656854249492381}, "pyerrors.fits.error_band": {"tf": 4.242640687119285}, "pyerrors.fits.ks_test": {"tf": 3.7416573867739413}, "pyerrors.input.bdio.read_ADerrors": {"tf": 5.0990195135927845}, "pyerrors.input.bdio.write_ADerrors": {"tf": 5.477225575051661}, "pyerrors.input.bdio.read_mesons": {"tf": 5.0990195135927845}, "pyerrors.input.bdio.read_dSdm": {"tf": 5.0990195135927845}, "pyerrors.input.dobs.create_pobs_string": {"tf": 7.14142842854285}, "pyerrors.input.dobs.write_pobs": {"tf": 8.426149773176359}, "pyerrors.input.dobs.read_pobs": {"tf": 5.830951894845301}, "pyerrors.input.dobs.import_dobs_string": {"tf": 5.0990195135927845}, "pyerrors.input.dobs.read_dobs": {"tf": 5.830951894845301}, "pyerrors.input.dobs.create_dobs_string": {"tf": 8.12403840463596}, "pyerrors.input.dobs.write_dobs": {"tf": 8.94427190999916}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 6.6332495807108}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 7.54983443527075}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 6}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 5.0990195135927845}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 5.0990195135927845}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 6.855654600401044}, "pyerrors.input.json.create_json_string": {"tf": 5.291502622129181}, "pyerrors.input.json.dump_to_json": {"tf": 6.324555320336759}, "pyerrors.input.json.import_json_string": {"tf": 5.0990195135927845}, "pyerrors.input.json.load_json": {"tf": 5.830951894845301}, "pyerrors.input.json.dump_dict_to_json": {"tf": 7.0710678118654755}, "pyerrors.input.json.load_json_dict": {"tf": 6.6332495807108}, "pyerrors.input.misc.fit_t0": {"tf": 5.656854249492381}, "pyerrors.input.misc.read_pbp": {"tf": 4.47213595499958}, "pyerrors.input.openQCD.read_rwms": {"tf": 6.164414002968976}, "pyerrors.input.openQCD.extract_t0": {"tf": 8.18535277187245}, "pyerrors.input.openQCD.extract_w0": {"tf": 8.18535277187245}, "pyerrors.input.openQCD.read_qtop": {"tf": 6.48074069840786}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 6.324555320336759}, "pyerrors.input.openQCD.qtop_projection": {"tf": 4.242640687119285}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 5.656854249492381}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 6.164414002968976}, "pyerrors.input.pandas.to_sql": {"tf": 6.48074069840786}, "pyerrors.input.pandas.read_sql": {"tf": 5.291502622129181}, "pyerrors.input.pandas.dump_df": {"tf": 4.69041575982343}, "pyerrors.input.pandas.load_df": {"tf": 5.0990195135927845}, "pyerrors.input.sfcf.read_sfcf": {"tf": 10.44030650891055}, "pyerrors.input.utils.sort_names": {"tf": 3.1622776601683795}, "pyerrors.input.utils.check_idl": {"tf": 3.7416573867739413}, "pyerrors.linalg.matmul": {"tf": 3.4641016151377544}, "pyerrors.linalg.jack_matmul": {"tf": 3.4641016151377544}, "pyerrors.linalg.einsum": {"tf": 4}, "pyerrors.linalg.inv": {"tf": 3.1622776601683795}, "pyerrors.linalg.cholesky": {"tf": 3.1622776601683795}, "pyerrors.linalg.det": {"tf": 3.1622776601683795}, "pyerrors.linalg.eigh": {"tf": 4}, "pyerrors.linalg.eig": {"tf": 4}, "pyerrors.linalg.pinv": {"tf": 4}, "pyerrors.linalg.svd": {"tf": 4}, "pyerrors.misc.print_config": {"tf": 2.6457513110645907}, "pyerrors.misc.errorbar": {"tf": 6.708203932499369}, "pyerrors.misc.dump_object": {"tf": 4.47213595499958}, "pyerrors.misc.load_object": {"tf": 3.1622776601683795}, "pyerrors.misc.pseudo_Obs": {"tf": 5.0990195135927845}, "pyerrors.misc.gen_correlated_data": {"tf": 5.830951894845301}, "pyerrors.mpm.matrix_pencil_method": {"tf": 5.656854249492381}, "pyerrors.obs.Obs.__init__": {"tf": 5.0990195135927845}, "pyerrors.obs.Obs.gamma_method": {"tf": 4}, "pyerrors.obs.Obs.gm": {"tf": 4}, "pyerrors.obs.Obs.details": {"tf": 4.242640687119285}, "pyerrors.obs.Obs.reweight": {"tf": 3.7416573867739413}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 4.242640687119285}, "pyerrors.obs.Obs.is_zero": {"tf": 4.242640687119285}, "pyerrors.obs.Obs.plot_tauint": {"tf": 4.242640687119285}, "pyerrors.obs.Obs.plot_rho": {"tf": 4.242640687119285}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.plot_history": {"tf": 4.242640687119285}, "pyerrors.obs.Obs.plot_piechart": {"tf": 4.242640687119285}, "pyerrors.obs.Obs.dump": {"tf": 6.324555320336759}, "pyerrors.obs.Obs.export_jackknife": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.sqrt": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.log": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.exp": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.sin": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.cos": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.tan": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.arcsin": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.arccos": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.arctan": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.sinh": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.cosh": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.tanh": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.arcsinh": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.arccosh": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.arctanh": {"tf": 3.1622776601683795}, "pyerrors.obs.CObs.__init__": {"tf": 4}, "pyerrors.obs.CObs.gamma_method": {"tf": 4}, "pyerrors.obs.CObs.is_zero": {"tf": 3.1622776601683795}, "pyerrors.obs.CObs.conjugate": {"tf": 3.1622776601683795}, "pyerrors.obs.derived_observable": {"tf": 5.291502622129181}, "pyerrors.obs.reweight": {"tf": 4.47213595499958}, "pyerrors.obs.correlate": {"tf": 3.7416573867739413}, "pyerrors.obs.covariance": {"tf": 6.324555320336759}, "pyerrors.obs.import_jackknife": {"tf": 4.69041575982343}, "pyerrors.obs.merge_obs": {"tf": 3.1622776601683795}, "pyerrors.obs.cov_Obs": {"tf": 5.0990195135927845}, "pyerrors.roots.find_root": {"tf": 5.291502622129181}}, "df": 157, "d": {"docs": {"pyerrors.roots.find_root": {"tf": 1}}, "df": 1, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 2, "t": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 2}}}}}}}, "t": {"docs": {"pyerrors.correlators.Corr.roll": {"tf": 1}}, "df": 1, "r": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 4}}, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 2}}}, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}}, "df": 1}}}}}, "s": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 4}}}}}}}}}}, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}}, "df": 1}}}}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}}, "df": 1}}}}, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}}, "df": 2}}}}}}, "f": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}}, "df": 2}, "b": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}}, "df": 2}, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.misc.pseudo_Obs": {"tf": 1}}, "df": 1}}}}}}, "i": {"docs": {"pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}}, "df": 3, "n": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}}, "df": 1}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}}, "df": 3}}}}}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 3}}}}}, "d": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 6, "l": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}}, "df": 9}}, "f": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 1}, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.obs.CObs.__init__": {"tf": 1}}, "df": 1}}}}, "p": {"docs": {"pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 2, "a": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}}, "df": 1}}}}}, "r": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}}, "df": 2}}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.correlators.Corr.T_symmetry": {"tf": 1}}, "df": 1}}}}, "t": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.misc.load_object": {"tf": 1}}, "df": 20}}, "c": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "/": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "/": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}}}}}}}}}}}}}}}}}}}}}}}}, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.set_prange": {"tf": 1}}, "df": 2}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.print": {"tf": 1}}, "df": 1}}, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}, "e": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "x": {"docs": {"pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 9}}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.correlators.Corr.Hankel": {"tf": 1}}, "df": 1}}}}}}}, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "u": {"docs": {"pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 2}}}}}, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.covobs.Covobs.__init__": {"tf": 1}}, "df": 1, "t": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "x": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2}}}}}}, "y": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}}}}}, "n": {"docs": {"pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 2, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 3.3166247903554}, "pyerrors.correlators.Corr.print": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 32}}, "r": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}}, "df": 1}}}}}}}, "f": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}}}}}, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}}, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 12, "s": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}}, "df": 2}}}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.correlators.Corr.symmetric": {"tf": 1}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.roll": {"tf": 1}, "pyerrors.correlators.Corr.reverse": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.set_prange": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.correlators.Corr.print": {"tf": 1}, "pyerrors.correlators.Corr.sqrt": {"tf": 1}, "pyerrors.correlators.Corr.log": {"tf": 1}, "pyerrors.correlators.Corr.exp": {"tf": 1}, "pyerrors.correlators.Corr.sin": {"tf": 1}, "pyerrors.correlators.Corr.cos": {"tf": 1}, "pyerrors.correlators.Corr.tan": {"tf": 1}, "pyerrors.correlators.Corr.sinh": {"tf": 1}, "pyerrors.correlators.Corr.cosh": {"tf": 1}, "pyerrors.correlators.Corr.tanh": {"tf": 1}, "pyerrors.correlators.Corr.arcsin": {"tf": 1}, "pyerrors.correlators.Corr.arccos": {"tf": 1}, "pyerrors.correlators.Corr.arctan": {"tf": 1}, "pyerrors.correlators.Corr.arcsinh": {"tf": 1}, "pyerrors.correlators.Corr.arccosh": {"tf": 1}, "pyerrors.correlators.Corr.arctanh": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.covobs.Covobs.errsq": {"tf": 1}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.sqrt": {"tf": 1}, "pyerrors.obs.Obs.log": {"tf": 1}, "pyerrors.obs.Obs.exp": {"tf": 1}, "pyerrors.obs.Obs.sin": {"tf": 1}, "pyerrors.obs.Obs.cos": {"tf": 1}, "pyerrors.obs.Obs.tan": {"tf": 1}, "pyerrors.obs.Obs.arcsin": {"tf": 1}, "pyerrors.obs.Obs.arccos": {"tf": 1}, "pyerrors.obs.Obs.arctan": {"tf": 1}, "pyerrors.obs.Obs.sinh": {"tf": 1}, "pyerrors.obs.Obs.cosh": {"tf": 1}, "pyerrors.obs.Obs.tanh": {"tf": 1}, "pyerrors.obs.Obs.arcsinh": {"tf": 1}, "pyerrors.obs.Obs.arccosh": {"tf": 1}, "pyerrors.obs.Obs.arctanh": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}, "pyerrors.obs.CObs.conjugate": {"tf": 1}}, "df": 78}}, "p": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 4}}}}}}}}, "o": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}}, "df": 2}}}, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}}, "df": 1}}}, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.json.import_json_string": {"tf": 1}}, "df": 1}}}}}, "p": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.thin": {"tf": 1}}, "df": 1}}}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2}}}}}, "e": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 4}}}, "y": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 2}}}}}}, "b": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 4}}}}}, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 4}}}}, "g": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}}, "df": 2}}}}, "a": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 4}}, "m": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}}, "df": 3}}}}}}, "q": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.pandas.read_sql": {"tf": 1}}, "df": 1}}, "u": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.linalg.einsum": {"tf": 1}}, "df": 1}}}}}}}}}, "m": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}, "k": {"docs": {"pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 3, "w": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}, "pyerrors.linalg.pinv": {"tf": 1}, "pyerrors.linalg.svd": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 43}}}}}, "e": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}}, "v": {"1": {"docs": {"pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 2}, "docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1.4142135623730951}}, "df": 1}}}}, "r": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 1}}}}}, "b": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}}, "df": 3}}}}, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 3}}}}}}, "a": {"docs": {"pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 1, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 3}}}}}, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.misc.pseudo_Obs": {"tf": 1}}, "df": 1}}}}, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}}}, "l": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}}, "df": 1, "o": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 1, "s": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}}, "df": 2}}}}}}}, "i": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4}}}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 2}}}, "l": {"docs": {"pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 1}, "t": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}, "r": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 2, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.print": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 7}}}}, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}}, "df": 2}, "f": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}}}}}}}, "p": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}}, "df": 2}}, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2}, "l": {"docs": {"pyerrors.obs.CObs.__init__": {"tf": 1}}, "df": 1}}}}, "f": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 19}}}, "i": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 1}}}, "u": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 7, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}}, "df": 1}}}}}}, "l": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}}, "df": 6}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 7, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}}, "df": 1}}}}}}, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 2}}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 5}}}}}}}, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 10}}}}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "w": {"docs": {"pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 1}}}, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}}}, "j": {"docs": {"pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}}, "df": 3, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 3}}}, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.import_jackknife": {"tf": 1}}, "df": 1}}}}}, "t": {"0": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 3, "p": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "j": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}}, "2": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}}, "df": 2}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}}, "df": 3}}}}, "r": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1}}, "df": 17}}}, "p": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "j": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}, "a": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.dirac.Grid_gamma": {"tf": 1}}, "df": 1}, "r": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 2}}}}, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 1}}}, "u": {"docs": {"pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 1}}, "y": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}}}, "e": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}}, "df": 2}}}}}}}}}, "n": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}}, "df": 7, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 2}}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}}, "df": 1}}}}}, "x": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2}}}}, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 1}}}}, "p": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.obs.Obs.plot_history": {"tf": 1}}, "df": 1}}}}}}, "o": {"docs": {"pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}, "pyerrors.fits.qqplot": {"tf": 1}}, "df": 2, "f": {"docs": {"pyerrors.obs.merge_obs": {"tf": 1}}, "df": 1, "f": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.thin": {"tf": 1}}, "df": 1}}}}}, "b": {"docs": {}, "df": 0, "j": {"docs": {"pyerrors.misc.dump_object": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.ks_test": {"tf": 1}}, "df": 1}}}}}, "s": {"docs": {"pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}, "pyerrors.linalg.pinv": {"tf": 1}, "pyerrors.linalg.svd": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 10, "l": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 4}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1}}}}}}}}}, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 4}}}}}, "u": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}}, "df": 6}}}}}, "l": {"docs": {"pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}}, "df": 2}, "d": {"docs": {"pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 1}, "p": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "q": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}}, "df": 1}}}}, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}}, "df": 3}}}}}}, "t": {"docs": {}, "df": 0, "/": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "/": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "/": {"3": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}}}}}}}}}}}}}}}}}}}}}}}}}}}, "w": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}}, "df": 3}}}}}, "h": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 2}}, "f": {"2": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}, "docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}, "g": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 2}}}}, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.dirac.Grid_gamma": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 5, "s": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}}, "df": 1}}}}}, "z": {"docs": {"pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 13}, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 2}}}, "t": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.plateau": {"tf": 1}}, "df": 1}}}}, "a": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.covobs.Covobs.__init__": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 2}}}, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}}, "df": 1}}}}, "s": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2}, "o": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}}, "e": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "b": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}}}}}}}}}, "a": {"docs": {"pyerrors.obs.correlate": {"tf": 1}}, "df": 1, "u": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 4}}}, "v": {"docs": {"pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 1}, "x": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}}, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.obs.Obs.is_zero": {"tf": 1}}, "df": 1}}}, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}}, "x": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.linalg.inv": {"tf": 1}, "pyerrors.linalg.cholesky": {"tf": 1}, "pyerrors.linalg.det": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1}}, "df": 11, "m": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2}}}}, "c": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 5, "o": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}, "v": {"docs": {"pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 3}, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}}, "df": 2}}}}}, "r": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}}}}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}}, "df": 1}}}}}, "n": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 2}}}, "f": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}, "h": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 1}}}, "y": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1}}, "df": 7, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}}}}}, "h": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}}}, "b": {"docs": {"pyerrors.obs.correlate": {"tf": 1}}, "df": 1, "a": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "x": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}}}}}}, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.fits.error_band": {"tf": 1}}, "df": 1}}}, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4}}}, "i": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}, "z": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 1}}}}}}}, "q": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.input.openQCD.qtop_projection": {"tf": 1}}, "df": 1}}}, "c": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}}}}}}}, "bases": {"root": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}}, "df": 1}}}}}}}}}}}, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}}, "df": 1}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "q": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}}, "df": 1}}}}}}}}, "n": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}, "d": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}}}}, "doc": {"root": {"0": {"0": {"0": {"0": {"0": {"0": {"0": {"0": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1, "+": {"0": {"0": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "5": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {"pyerrors": {"tf": 1}}, "df": 1}, "2": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}, "6": {"9": {"7": {"9": {"5": {"8": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "+": {"0": {"0": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "1": {"2": {"8": {"9": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "7": {"1": {"8": {"0": {"6": {"4": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "+": {"0": {"0": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 3}}, "df": 1}, "2": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "3": {"4": {"4": {"5": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "4": {"5": {"8": {"5": {"6": {"5": {"0": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "7": {"5": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "5": {"4": {"8": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "6": {"4": {"2": {"3": {"docs": {"pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "7": {"5": {"6": {"0": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "9": {"9": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 6.164414002968976}, "pyerrors.correlators.Corr.projected": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.symmetric": {"tf": 1}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.second_deriv": {"tf": 4.123105625617661}, "pyerrors.correlators.Corr.prune": {"tf": 2.6457513110645907}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2.449489742783178}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 2}, "pyerrors.obs.Obs.gamma_method": {"tf": 2}, "pyerrors.obs.Obs.gm": {"tf": 2}, "pyerrors.obs.covariance": {"tf": 2}}, "df": 28, "+": {"1": {"docs": {"pyerrors.correlators.Corr.second_deriv": {"tf": 1.4142135623730951}}, "df": 1}, "2": {"docs": {"pyerrors.correlators.Corr.second_deriv": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "e": {"docs": {}, "df": 0, "+": {"0": {"0": {"0": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}}, "c": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}, "d": {"docs": {"pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 1}}, "1": {"0": {"0": {"0": {"docs": {"pyerrors": {"tf": 3.605551275463989}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.misc.pseudo_Obs": {"tf": 1}}, "df": 3}, "3": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "5": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "4": {"7": {"2": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "8": {"7": {"5": {"0": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 1}}, "df": 1}, "1": {"9": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "2": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 2}, "3": {"4": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "4": {"3": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "5": {"0": {"0": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "3": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "6": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "6": {"0": {"7": {"docs": {"pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1.4142135623730951}}, "df": 2}, "7": {"6": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}, "9": {"0": {"6": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "8": {"8": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "9": {"0": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}, "docs": {"pyerrors": {"tf": 6.164414002968976}, "pyerrors.correlators.Corr.Hankel": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.m_eff": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.7320508075688772}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 23, "}": {"docs": {}, "df": 0, "^": {"docs": {}, "df": 0, "{": {"docs": {}, "df": 0, "w": {"docs": {}, "df": 0, "}": {"docs": {}, "df": 0, "\\": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "n": {"docs": {}, "df": 0, "\\": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}, "s": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}, "\\": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "+": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "d": {"docs": {"pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 2}, "*": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}}}}, "/": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.obs.Obs.export_jackknife": {"tf": 1}}, "df": 1}}}, "2": {"0": {"0": {"4": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "7": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "1": {"1": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "9": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "2": {"3": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "4": {"1": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 2}}, "df": 1}, "1": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "3": {"8": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}}, "df": 2}, "5": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "7": {"2": {"1": {"8": {"6": {"6": {"7": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "+": {"0": {"0": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "8": {"0": {"9": {"7": {"7": {"6": {"2": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "8": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {"pyerrors": {"tf": 2}}, "df": 1}, "9": {"9": {"0": {"9": {"7": {"0": {"3": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "+": {"0": {"0": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {"pyerrors": {"tf": 5}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 3}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 18, "x": {"2": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "f": {"docs": {"pyerrors.correlators.Corr.second_deriv": {"tf": 1.4142135623730951}}, "df": 1}, "d": {"docs": {"pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 5}, "*": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}}}, "3": {"0": {"6": {"7": {"5": {"2": {"0": {"1": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {"pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 1}, "1": {"4": {"9": {"8": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "2": {"2": {"7": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}, "docs": {}, "df": 0}, "3": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "3": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}, "4": {"9": {"7": {"6": {"8": {"0": {"0": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "+": {"0": {"2": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "5": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "6": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "7": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "8": {"3": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}, "9": {"docs": {"pyerrors": {"tf": 7.745966692414834}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 2}, "docs": {"pyerrors": {"tf": 3.605551275463989}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.dirac.epsilon_tensor": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.4142135623730951}}, "df": 9, "a": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "4": {"0": {"3": {"2": {"0": {"9": {"8": {"3": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}, "9": {"5": {"9": {"1": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 7, "x": {"4": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}}, "5": {"0": {"0": {"docs": {"pyerrors": {"tf": 2.8284271247461903}, "pyerrors.fits.least_squares": {"tf": 1}}, "df": 2}, "1": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "1": {"5": {"6": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "9": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "2": {"0": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "2": {"8": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "3": {"8": {"0": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "4": {"8": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "6": {"7": {"3": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "4": {"6": {"5": {"9": {"8": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "7": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "8": {"3": {"4": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 3.605551275463989}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 7, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}, "6": {"4": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}, "5": {"0": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "9": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "6": {"8": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.fit": {"tf": 1.4142135623730951}}, "df": 2}, "7": {"0": {"0": {"0": {"0": {"0": {"0": {"0": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "+": {"0": {"0": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 1}}, "df": 1}, "1": {"4": {"2": {"2": {"9": {"0": {"0": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "+": {"0": {"0": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "2": {"0": {"4": {"6": {"6": {"5": {"8": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 1}}, "df": 1}, "4": {"5": {"7": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "7": {"3": {"1": {"0": {"1": {"0": {"2": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "9": {"0": {"7": {"7": {"5": {"2": {"4": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "5": {"7": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 2.23606797749979}}, "df": 1}, "8": {"0": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "1": {"4": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "2": {"4": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "4": {"5": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 6}, "9": {"1": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "3": {"3": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {"pyerrors": {"tf": 1}}, "df": 1}, "4": {"7": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "5": {"9": {"3": {"0": {"3": {"5": {"7": {"8": {"5": {"1": {"6": {"0": {"9": {"3": {"6": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "4": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "7": {"6": {"8": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "8": {"3": {"1": {"9": {"8": {"8": {"1": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "9": {"1": {"0": {"0": {"7": {"1": {"2": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "5": {"8": {"3": {"6": {"5": {"4": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "9": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}}, "df": 3}, "docs": {"pyerrors": {"tf": 64.02343321003646}, "pyerrors.correlators": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr": {"tf": 3}, "pyerrors.correlators.Corr.__init__": {"tf": 5.0990195135927845}, "pyerrors.correlators.Corr.tag": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.content": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.T": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.prange": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.reweighted": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.gamma_method": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.gm": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.projected": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.item": {"tf": 4.58257569495584}, "pyerrors.correlators.Corr.plottable": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.symmetric": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.GEVP": {"tf": 10.535653752852738}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 4.358898943540674}, "pyerrors.correlators.Corr.Hankel": {"tf": 4.58257569495584}, "pyerrors.correlators.Corr.roll": {"tf": 3.605551275463989}, "pyerrors.correlators.Corr.reverse": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.thin": {"tf": 4.242640687119285}, "pyerrors.correlators.Corr.correlate": {"tf": 3.7416573867739413}, "pyerrors.correlators.Corr.reweight": {"tf": 4.58257569495584}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 4.242640687119285}, "pyerrors.correlators.Corr.deriv": {"tf": 3.7416573867739413}, "pyerrors.correlators.Corr.second_deriv": {"tf": 4.58257569495584}, "pyerrors.correlators.Corr.m_eff": {"tf": 5.830951894845301}, "pyerrors.correlators.Corr.fit": {"tf": 5.291502622129181}, "pyerrors.correlators.Corr.plateau": {"tf": 5}, "pyerrors.correlators.Corr.set_prange": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.show": {"tf": 9}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 3.872983346207417}, "pyerrors.correlators.Corr.dump": {"tf": 5.0990195135927845}, "pyerrors.correlators.Corr.print": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.sqrt": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.log": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.exp": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.sin": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.cos": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.tan": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.sinh": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.cosh": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.tanh": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.arcsin": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.arccos": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.arctan": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.arcsinh": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.arccosh": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.arctanh": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.real": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.imag": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.prune": {"tf": 6.855654600401044}, "pyerrors.correlators.Corr.N": {"tf": 1.7320508075688772}, "pyerrors.covobs": {"tf": 1.7320508075688772}, "pyerrors.covobs.Covobs": {"tf": 1.7320508075688772}, "pyerrors.covobs.Covobs.__init__": {"tf": 5.916079783099616}, "pyerrors.covobs.Covobs.name": {"tf": 1.7320508075688772}, "pyerrors.covobs.Covobs.value": {"tf": 1.7320508075688772}, "pyerrors.covobs.Covobs.errsq": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.cov": {"tf": 1.7320508075688772}, "pyerrors.covobs.Covobs.grad": {"tf": 1.7320508075688772}, "pyerrors.dirac": {"tf": 1.7320508075688772}, "pyerrors.dirac.gammaX": {"tf": 1.7320508075688772}, "pyerrors.dirac.gammaY": {"tf": 1.7320508075688772}, "pyerrors.dirac.gammaZ": {"tf": 1.7320508075688772}, "pyerrors.dirac.gammaT": {"tf": 1.7320508075688772}, "pyerrors.dirac.gamma": {"tf": 1.7320508075688772}, "pyerrors.dirac.gamma5": {"tf": 1.7320508075688772}, "pyerrors.dirac.identity": {"tf": 1.7320508075688772}, "pyerrors.dirac.epsilon_tensor": {"tf": 4.123105625617661}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 4.123105625617661}, "pyerrors.dirac.Grid_gamma": {"tf": 1.7320508075688772}, "pyerrors.fits": {"tf": 1.7320508075688772}, "pyerrors.fits.Fit_result": {"tf": 5.656854249492381}, "pyerrors.fits.Fit_result.fit_parameters": {"tf": 1.7320508075688772}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1.4142135623730951}, "pyerrors.fits.Fit_result.gm": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 17.86057109949175}, "pyerrors.fits.total_least_squares": {"tf": 15.427248620541512}, "pyerrors.fits.fit_lin": {"tf": 5.916079783099616}, "pyerrors.fits.qqplot": {"tf": 3.605551275463989}, "pyerrors.fits.residual_plot": {"tf": 3.872983346207417}, "pyerrors.fits.error_band": {"tf": 3.7416573867739413}, "pyerrors.fits.ks_test": {"tf": 5}, "pyerrors.input": {"tf": 4.69041575982343}, "pyerrors.input.bdio": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_ADerrors": {"tf": 6.164414002968976}, "pyerrors.input.bdio.write_ADerrors": {"tf": 6.164414002968976}, "pyerrors.input.bdio.read_mesons": {"tf": 8.12403840463596}, "pyerrors.input.bdio.read_dSdm": {"tf": 7.416198487095663}, "pyerrors.input.dobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.create_pobs_string": {"tf": 7.745966692414834}, "pyerrors.input.dobs.write_pobs": {"tf": 8.426149773176359}, "pyerrors.input.dobs.read_pobs": {"tf": 7.280109889280518}, "pyerrors.input.dobs.import_dobs_string": {"tf": 7.280109889280518}, "pyerrors.input.dobs.read_dobs": {"tf": 7.745966692414834}, "pyerrors.input.dobs.create_dobs_string": {"tf": 8.06225774829855}, "pyerrors.input.dobs.write_dobs": {"tf": 8.774964387392123}, "pyerrors.input.hadrons": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 7.3484692283495345}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 6.855654600401044}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 6.557438524302}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 20.904544960366874}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 2.23606797749979}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 6.324555320336759}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 6.324555320336759}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 6.782329983125268}, "pyerrors.input.json": {"tf": 1.7320508075688772}, "pyerrors.input.json.create_json_string": {"tf": 6.082762530298219}, "pyerrors.input.json.dump_to_json": {"tf": 7}, "pyerrors.input.json.import_json_string": {"tf": 7.681145747868608}, "pyerrors.input.json.load_json": {"tf": 8.06225774829855}, "pyerrors.input.json.dump_dict_to_json": {"tf": 7.3484692283495345}, "pyerrors.input.json.load_json_dict": {"tf": 7.937253933193772}, "pyerrors.input.misc": {"tf": 1.7320508075688772}, "pyerrors.input.misc.fit_t0": {"tf": 7.14142842854285}, "pyerrors.input.misc.read_pbp": {"tf": 5.477225575051661}, "pyerrors.input.openQCD": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_rwms": {"tf": 8.54400374531753}, "pyerrors.input.openQCD.extract_t0": {"tf": 11}, "pyerrors.input.openQCD.extract_w0": {"tf": 11}, "pyerrors.input.openQCD.read_qtop": {"tf": 10.246950765959598}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 8.888194417315589}, "pyerrors.input.openQCD.qtop_projection": {"tf": 5.656854249492381}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 9.797958971132712}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 10.392304845413264}, "pyerrors.input.pandas": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.to_sql": {"tf": 7}, "pyerrors.input.pandas.read_sql": {"tf": 6.244997998398398}, "pyerrors.input.pandas.dump_df": {"tf": 6.324555320336759}, "pyerrors.input.pandas.load_df": {"tf": 6.244997998398398}, "pyerrors.input.sfcf": {"tf": 1.7320508075688772}, "pyerrors.input.sfcf.read_sfcf": {"tf": 11.090536506409418}, "pyerrors.input.utils": {"tf": 1.7320508075688772}, "pyerrors.input.utils.sort_names": {"tf": 5.385164807134504}, "pyerrors.input.utils.check_idl": {"tf": 5.385164807134504}, "pyerrors.linalg": {"tf": 1.7320508075688772}, "pyerrors.linalg.matmul": {"tf": 4.58257569495584}, "pyerrors.linalg.jack_matmul": {"tf": 4.47213595499958}, "pyerrors.linalg.einsum": {"tf": 4.47213595499958}, "pyerrors.linalg.inv": {"tf": 1.7320508075688772}, "pyerrors.linalg.cholesky": {"tf": 1.7320508075688772}, "pyerrors.linalg.det": {"tf": 1.7320508075688772}, "pyerrors.linalg.eigh": {"tf": 1.7320508075688772}, "pyerrors.linalg.eig": {"tf": 1.7320508075688772}, "pyerrors.linalg.pinv": {"tf": 1.7320508075688772}, "pyerrors.linalg.svd": {"tf": 1.7320508075688772}, "pyerrors.misc": {"tf": 1.7320508075688772}, "pyerrors.misc.print_config": {"tf": 1.7320508075688772}, "pyerrors.misc.errorbar": {"tf": 5.0990195135927845}, "pyerrors.misc.dump_object": {"tf": 5.916079783099616}, "pyerrors.misc.load_object": {"tf": 5}, "pyerrors.misc.pseudo_Obs": {"tf": 6.557438524302}, "pyerrors.misc.gen_correlated_data": {"tf": 7.0710678118654755}, "pyerrors.mpm": {"tf": 1.7320508075688772}, "pyerrors.mpm.matrix_pencil_method": {"tf": 6.324555320336759}, "pyerrors.obs": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs": {"tf": 6.928203230275509}, "pyerrors.obs.Obs.__init__": {"tf": 4.898979485566356}, "pyerrors.obs.Obs.S_global": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.S_dict": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.tau_exp_global": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.tau_exp_dict": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.N_sigma_global": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.N_sigma_dict": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.names": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.shape": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.r_values": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.deltas": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.N": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.idl": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.ddvalue": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.reweighted": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.tag": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.value": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.dvalue": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.e_names": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.cov_names": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.mc_names": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.e_content": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.covobs": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gamma_method": {"tf": 5.744562646538029}, "pyerrors.obs.Obs.gm": {"tf": 5.744562646538029}, "pyerrors.obs.Obs.details": {"tf": 3.872983346207417}, "pyerrors.obs.Obs.reweight": {"tf": 4.58257569495584}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 4.47213595499958}, "pyerrors.obs.Obs.is_zero": {"tf": 3.872983346207417}, "pyerrors.obs.Obs.plot_tauint": {"tf": 3.872983346207417}, "pyerrors.obs.Obs.plot_rho": {"tf": 3.872983346207417}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.plot_history": {"tf": 3.7416573867739413}, "pyerrors.obs.Obs.plot_piechart": {"tf": 3.872983346207417}, "pyerrors.obs.Obs.dump": {"tf": 5.744562646538029}, "pyerrors.obs.Obs.export_jackknife": {"tf": 3.7416573867739413}, "pyerrors.obs.Obs.sqrt": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.log": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.exp": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.sin": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.cos": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.tan": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.arcsin": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.arccos": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.arctan": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.sinh": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.cosh": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.tanh": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.arcsinh": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.arccosh": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.arctanh": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.N_sigma": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.S": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.e_ddvalue": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.e_drho": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.e_dtauint": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.e_dvalue": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.e_n_dtauint": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.e_n_tauint": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.e_rho": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.e_tauint": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.e_windowsize": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.tau_exp": {"tf": 1.7320508075688772}, "pyerrors.obs.CObs": {"tf": 1.7320508075688772}, "pyerrors.obs.CObs.__init__": {"tf": 1.7320508075688772}, "pyerrors.obs.CObs.tag": {"tf": 1.7320508075688772}, "pyerrors.obs.CObs.real": {"tf": 1.7320508075688772}, "pyerrors.obs.CObs.imag": {"tf": 1.7320508075688772}, "pyerrors.obs.CObs.gamma_method": {"tf": 1.7320508075688772}, "pyerrors.obs.CObs.is_zero": {"tf": 1.7320508075688772}, "pyerrors.obs.CObs.conjugate": {"tf": 1.7320508075688772}, "pyerrors.obs.derived_observable": {"tf": 6.4031242374328485}, "pyerrors.obs.reweight": {"tf": 5.196152422706632}, "pyerrors.obs.correlate": {"tf": 4.898979485566356}, "pyerrors.obs.covariance": {"tf": 6.6332495807108}, "pyerrors.obs.import_jackknife": {"tf": 4.47213595499958}, "pyerrors.obs.merge_obs": {"tf": 4.123105625617661}, "pyerrors.obs.cov_Obs": {"tf": 5.385164807134504}, "pyerrors.roots": {"tf": 1.7320508075688772}, "pyerrors.roots.find_root": {"tf": 10.488088481701515}, "pyerrors.version": {"tf": 1.7320508075688772}}, "df": 242, "w": {"0": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 2.23606797749979}}, "df": 2, "/": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 1}}}, "docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}}, "df": 3, "h": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 2}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 15}}, "n": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}}, "df": 10}, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 11}}}}}, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_t0": {"tf": 2}, "pyerrors.input.openQCD.extract_w0": {"tf": 2}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1.4142135623730951}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 39}}, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 2}}}, "o": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 3}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors": {"tf": 6}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 2}}, "df": 37, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 9}}, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "w": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}, "/": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}}}}, "l": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 12}, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 3}}}}}, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "r": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 4, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}}, "df": 5}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "f": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "w": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input": {"tf": 1}}, "df": 1}}}}}}}}, "e": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}}, "df": 3, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "l": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 3}}, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.reweight": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.reweight": {"tf": 1.4142135623730951}, "pyerrors.obs.reweight": {"tf": 1.4142135623730951}}, "df": 3}}}}}, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 2}}}}}, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}}, "df": 6}, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}}, "df": 1}}}, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1, "f": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}}}}}}}}}}}, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 4, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_pobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 12}}}, "e": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 3, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}}, "t": {"docs": {"pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 2}, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 3}}}}, "l": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4}, "f": {"2": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}, "docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}, "i": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.item": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.GEVP": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 11, "s": {"docs": {"pyerrors": {"tf": 8.12403840463596}, "pyerrors.correlators.Corr": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 2.6457513110645907}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 3.4641016151377544}, "pyerrors.covobs.Covobs.__init__": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 3.605551275463989}, "pyerrors.fits.total_least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_pobs": {"tf": 2.23606797749979}, "pyerrors.input.dobs.read_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.import_dobs_string": {"tf": 2.23606797749979}, "pyerrors.input.dobs.read_dobs": {"tf": 2.449489742783178}, "pyerrors.input.dobs.create_dobs_string": {"tf": 2}, "pyerrors.input.dobs.write_dobs": {"tf": 2.449489742783178}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 3.4641016151377544}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.import_json_string": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json": {"tf": 2}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json_dict": {"tf": 1.7320508075688772}, "pyerrors.input.misc.fit_t0": {"tf": 3}, "pyerrors.input.openQCD.read_rwms": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.extract_t0": {"tf": 3.605551275463989}, "pyerrors.input.openQCD.extract_w0": {"tf": 3.605551275463989}, "pyerrors.input.openQCD.read_qtop": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.dump_df": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2}, "pyerrors.input.utils.sort_names": {"tf": 1.4142135623730951}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gm": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 3}, "pyerrors.obs.merge_obs": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 62}, "t": {"docs": {"pyerrors": {"tf": 3.3166247903554}, "pyerrors.correlators.Corr": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.projected": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 22, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2}}, "df": 8, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 2}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}, "e": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}}, "df": 1}, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 1}}}}}, "m": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}}, "df": 1}}}}}}}, "n": {"docs": {"pyerrors": {"tf": 8.366600265340756}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.show": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.dirac.Grid_gamma": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 3.3166247903554}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 2}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_t0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.extract_w0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 2.23606797749979}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1.7320508075688772}, "pyerrors.obs.reweight": {"tf": 1.7320508075688772}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 49, "t": {"1": {"6": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.item": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.roll": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.7320508075688772}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.read_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2.6457513110645907}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.extract_w0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 2}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.7320508075688772}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 37, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}}, "df": 2}}}}}, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "p": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 1}}}}}}, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 6}}}, "f": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 1}}}}, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}}}}, "g": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 3}}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 8, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 2}}}}}, "o": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 10}, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 2}}, "df": 1}}, "v": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.linalg.inv": {"tf": 1}}, "df": 3}}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}}, "df": 2}}}}}, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 3}}, "df": 1, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}}, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.Obs": {"tf": 1.4142135623730951}}, "df": 1, "s": {"docs": {"pyerrors.obs.Obs": {"tf": 1.4142135623730951}}, "df": 1}}}}}, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 8}}}, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}}, "df": 1}}}}, "p": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "i": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 8}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}}, "df": 3, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 2}, "pyerrors.input.dobs.import_dobs_string": {"tf": 2}, "pyerrors.input.dobs.read_dobs": {"tf": 2}}, "df": 3}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}}, "df": 2}}}}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 3, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}}, "df": 4, "d": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 3}}}}}}}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 2.23606797749979}}, "df": 1, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 3, "i": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}}, "df": 1}}}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 3}}}}}}}, "x": {"docs": {"pyerrors.correlators.Corr.item": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.plottable": {"tf": 1}}, "df": 2, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}}, "df": 1}}}}}, "i": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}}, "df": 4, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 1}}}}}}}}, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}}, "df": 2}}}}}, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}}, "df": 1}}}}}, "f": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 3.3166247903554}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.json.import_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.misc.print_config": {"tf": 1}}, "df": 13}}}}}}}}}, "p": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.input": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 15}}}, "c": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 4}}}, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 4, "s": {"docs": {"pyerrors.input": {"tf": 1}}, "df": 1}, "d": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 4}}}, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}}, "df": 1}}}}}}, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}}, "df": 1}}}}}}}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1, "d": {"docs": {"pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 2}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1}}}}}}}}, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}}, "df": 1}}}}, "f": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 2}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2.449489742783178}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.read_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 2}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.import_json_string": {"tf": 2.23606797749979}, "pyerrors.input.json.load_json": {"tf": 2.6457513110645907}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 2}, "pyerrors.input.misc.fit_t0": {"tf": 2}, "pyerrors.input.openQCD.read_rwms": {"tf": 2}, "pyerrors.input.openQCD.extract_t0": {"tf": 3}, "pyerrors.input.openQCD.extract_w0": {"tf": 3}, "pyerrors.input.openQCD.read_qtop": {"tf": 2.8284271247461903}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.to_sql": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.load_df": {"tf": 1.7320508075688772}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.7320508075688772}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.7320508075688772}}, "df": 58}, "m": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}}, "df": 4, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 2.6457513110645907}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 11, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 4}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 6}}, "s": {"docs": {"pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}}, "df": 2}}}}, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 3}}}}}}, "a": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 2, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 5}}}}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}}}, "d": {"0": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 3}, "docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 9, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 2, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 2}, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}, "r": {"docs": {"pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 3}}}}, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 4}}}, "t": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}, "l": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.reweight": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1.7320508075688772}, "pyerrors.obs.reweight": {"tf": 1.7320508075688772}}, "df": 15, "s": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}, "pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 2}}}, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}}, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.obs.Obs.plot_history": {"tf": 1}}, "df": 2}}}}}, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}}}}}}, "/": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}}}}, "g": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 3}}}}}}, "o": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}}}}}}, "e": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}, "\\": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 1}}}}}}, "j": {"docs": {"pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 1}, "^": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "\\": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}, "|": {"docs": {}, "df": 0, "^": {"2": {"docs": {}, "df": 0, "\\": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "q": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}, "docs": {}, "df": 0}}}}, "}": {"docs": {}, "df": 0, "|": {"docs": {}, "df": 0, "v": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}, "\\": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}}, "p": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 2}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.4142135623730951}}, "df": 4, "y": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 6.928203230275509}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input": {"tf": 2}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.misc.print_config": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 7}}}}}}, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 2.8284271247461903}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.misc.print_config": {"tf": 1}}, "df": 4}}}}, "p": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}}}}, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 5, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1.7320508075688772}}, "df": 3}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.correlators.Corr.T_symmetry": {"tf": 1}}, "df": 1}}, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "^": {"2": {"docs": {"pyerrors.correlators.Corr.second_deriv": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}}}}}, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 2.6457513110645907}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 8, "s": {"docs": {"pyerrors": {"tf": 3}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.roll": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1.4142135623730951}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2.23606797749979}, "pyerrors.fits.total_least_squares": {"tf": 2.23606797749979}, "pyerrors.fits.fit_lin": {"tf": 1.4142135623730951}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.misc.load_object": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 99}}}}}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}}, "df": 2}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input": {"tf": 1}}, "df": 1}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}}}}}, "s": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 4}, "s": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}, "t": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "h": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_ADerrors": {"tf": 2}, "pyerrors.input.bdio.write_ADerrors": {"tf": 2}, "pyerrors.input.bdio.read_mesons": {"tf": 2}, "pyerrors.input.bdio.read_dSdm": {"tf": 2}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.misc.dump_object": {"tf": 1.4142135623730951}, "pyerrors.misc.load_object": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.dump": {"tf": 1.4142135623730951}}, "df": 26}}, "d": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.__init__": {"tf": 1.7320508075688772}}, "df": 2}}}}}, "i": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}}, "df": 1}}}, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}}, "df": 4}}}}}, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 4.123105625617661}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 3}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1.4142135623730951}}, "df": 1}}}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}}, "df": 2}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}}, "df": 4}}}}}}}, "v": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 11, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}}}, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 4}}}}}}, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 2}}}}, "j": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}}, "df": 1}}, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 3}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}}}, "g": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1, "s": {"docs": {"pyerrors.input": {"tf": 1}}, "df": 1}}}}}, "d": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}}, "df": 1}}}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 3.4641016151377544}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.misc.print_config": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}}, "df": 8, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 2}}, "s": {"docs": {"pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 1}}}, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.obs.correlate": {"tf": 1}}, "df": 2}}}}, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.7320508075688772}}, "df": 1}}}}, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 2}}}}}, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}}}}}}}}}, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 2}}}}}, "f": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "i": {"docs": {}, "df": 0, "x": {"docs": {"pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 2}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}}, "df": 10}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}}}}, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.set_prange": {"tf": 1}}, "df": 3}}}, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 4}}}}}}}}, "u": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "s": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}, "r": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.misc.pseudo_Obs": {"tf": 1}}, "df": 1}}}}}}}, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 15, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.plottable": {"tf": 1}}, "df": 1}}}}, "s": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 2}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}}}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "u": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.show": {"tf": 1.7320508075688772}}, "df": 4}}}}, "q": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}}, "df": 3}}}}}}, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}}, "df": 2}}}}}}}}}, "t": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}, "h": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 2.23606797749979}}, "df": 1, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "i": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "p": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "^": {"0": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "n": {"docs": {}, "df": 0, "v": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "c": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.item": {"tf": 1}}, "df": 1}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.item": {"tf": 1.4142135623730951}}, "df": 1}}, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1.4142135623730951}, "pyerrors.misc.load_object": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 4}}}}, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 1}}}}}}}, "e": {"docs": {"pyerrors": {"tf": 5.477225575051661}, "pyerrors.correlators.Corr": {"tf": 1}}, "df": 2, "r": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}}, "df": 6, "f": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 2, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 7}}, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}}, "df": 4}}}}}, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 3, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.roll": {"tf": 1}}, "df": 3}}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}}}, "m": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 4}}}}}}}}, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 2}}}}, "n": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.linalg.pinv": {"tf": 1}}, "df": 2}}}}, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1.7320508075688772}}, "df": 1}}}}}, "o": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 5, "s": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 5}}}}, "s": {"docs": {"pyerrors.covobs.Covobs.__init__": {"tf": 1}}, "df": 1, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "f": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "x": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}}, "df": 5}}}}, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 5}, "y": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 3}}}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}}, "df": 5}}, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 3}}}}}}, "w": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_pobs": {"tf": 1}}, "df": 3}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.linalg.pinv": {"tf": 1}}, "df": 2}}}}}}}, "s": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 2}}, "df": 1}}}}}}}}}}}, "b": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.input.misc.read_pbp": {"tf": 1}}, "df": 1}}}, "a": {"docs": {"pyerrors": {"tf": 8.426149773176359}, "pyerrors.correlators.Corr": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 2}, "pyerrors.fits.least_squares": {"tf": 4.69041575982343}, "pyerrors.fits.total_least_squares": {"tf": 3.3166247903554}, "pyerrors.fits.fit_lin": {"tf": 1.7320508075688772}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_mesons": {"tf": 2}, "pyerrors.input.bdio.read_dSdm": {"tf": 2}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_pobs": {"tf": 2.23606797749979}, "pyerrors.input.dobs.read_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.import_dobs_string": {"tf": 2}, "pyerrors.input.dobs.read_dobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.create_dobs_string": {"tf": 2}, "pyerrors.input.dobs.write_dobs": {"tf": 2.449489742783178}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 3}, "pyerrors.input.json.create_json_string": {"tf": 1.7320508075688772}, "pyerrors.input.json.dump_to_json": {"tf": 2.23606797749979}, "pyerrors.input.json.import_json_string": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.dump_dict_to_json": {"tf": 2.23606797749979}, "pyerrors.input.json.load_json_dict": {"tf": 1.7320508075688772}, "pyerrors.input.misc.fit_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 2.6457513110645907}, "pyerrors.input.pandas.dump_df": {"tf": 2.23606797749979}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.7320508075688772}, "pyerrors.input.utils.sort_names": {"tf": 1.4142135623730951}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}, "pyerrors.linalg.pinv": {"tf": 1}, "pyerrors.linalg.svd": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1.4142135623730951}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1.4142135623730951}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs": {"tf": 2.449489742783178}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.dump": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.CObs": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1.4142135623730951}, "pyerrors.obs.reweight": {"tf": 1.4142135623730951}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 2.23606797749979}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 75, "n": {"docs": {"pyerrors": {"tf": 5.0990195135927845}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2.449489742783178}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.error_band": {"tf": 1.4142135623730951}, "pyerrors.input": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 3.605551275463989}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 2}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.7320508075688772}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 32, "d": {"docs": {"pyerrors": {"tf": 7.211102550927978}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.__init__": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 3}, "pyerrors.fits.total_least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.input": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 2}, "pyerrors.input.openQCD.extract_t0": {"tf": 2}, "pyerrors.input.openQCD.extract_w0": {"tf": 2}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1.4142135623730951}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.misc.print_config": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.7320508075688772}, "pyerrors.obs.import_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 65}, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 2.8284271247461903}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}}, "df": 10}}}, "z": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}}}}, "y": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 3}, "t": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 3}}, "o": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.4142135623730951}}, "df": 4}}}}, "n": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}}}}, "p": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 2.23606797749979}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1.4142135623730951}}, "df": 5}}, "r": {"docs": {}, "df": 0, "x": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, ":": {"1": {"0": {"0": {"9": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "2": {"0": {"5": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "8": {"0": {"9": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "2": {"0": {"0": {"4": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}}}}}}}, "e": {"docs": {"pyerrors": {"tf": 5.5677643628300215}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1.4142135623730951}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 57}, "g": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}}, "df": 3, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}}, "df": 2}}}}}}}, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 4.47213595499958}, "pyerrors.correlators.Corr.__init__": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.__init__": {"tf": 1.4142135623730951}, "pyerrors.fits.error_band": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 6.082762530298219}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1.4142135623730951}}, "df": 13, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}}, "df": 9}}, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "i": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.symmetric": {"tf": 1}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 4}}}}, "c": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 1}}}}}, "b": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 4}}}}}}}}, "u": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 5, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors": {"tf": 3.1622776601683795}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1.4142135623730951}}, "df": 7, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 9}}}}}}}}}, "g": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1.7320508075688772}, "pyerrors.roots.find_root": {"tf": 1.4142135623730951}}, "df": 5}}}}, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 2.8284271247461903}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gm": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 2.23606797749979}}, "df": 7, "s": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}}}}}}}}, "x": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}}, "df": 7}}}}}}}}, "s": {"docs": {"pyerrors": {"tf": 6.164414002968976}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 2.23606797749979}, "pyerrors.fits.total_least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.fit_lin": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1.4142135623730951}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 21, "s": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.extract_w0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 11}, "s": {"docs": {"pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}}, "df": 4}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 4}}}}}, "o": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}}, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 3}}}}}}}, "l": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "p": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors": {"tf": 2.23606797749979}}, "df": 1, "^": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}}}, "l": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1.7320508075688772}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1.7320508075688772}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 35, "o": {"docs": {}, "df": 0, "w": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 3}}, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}}, "s": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 7}}, "g": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 3, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}}}}}}, "w": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 9, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}}}}}}}, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 1}}}}}}, "p": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 3}, "d": {"docs": {"pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 2}}}, "y": {"docs": {"pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}}, "df": 6}}, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 4}}}}}}, "x": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 2}}}}}}}}}}, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.7320508075688772}}, "df": 2}}}}, "i": {"docs": {"pyerrors": {"tf": 2.23606797749979}}, "df": 1}}, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}}, "df": 4}}}}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}}, "df": 5, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "g": {"docs": {"pyerrors.correlators.Corr.plateau": {"tf": 1}}, "df": 1}, "o": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}, "f": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}}, "df": 5}}}, "f": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.__init__": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 20, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 2}}}}}, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.set_prange": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 3}}}}}}}}, "o": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.obs.Obs.is_zero": {"tf": 1}}, "df": 1}}}, "m": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "c": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "r": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 12}}}}}}, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4}}, "i": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}}, "df": 1}}}}}}}, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}}}, "h": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 6}}}}}}, "t": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}}}, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}}, "df": 2}}}}}, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}, "o": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 2}}, "u": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.misc.print_config": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}}, "df": 4}}}}, "i": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "d": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "d": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}}, "df": 1, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 7}}}}}}, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}}, "df": 3}}}}}}}, "x": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}}, "df": 2}}, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.misc.errorbar": {"tf": 1.7320508075688772}}, "df": 1}}}, "[": {"0": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.7320508075688772}}, "df": 1}, "1": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}, "2": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "g": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}, "pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 2}}}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.Obs.export_jackknife": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}, "^": {"2": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "\\": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}, "/": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "^": {"2": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}}}}}, "a": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}, "f": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.second_deriv": {"tf": 3.7416573867739413}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 3, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 7.0710678118654755}, "pyerrors.correlators.Corr": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 2}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 3.605551275463989}, "pyerrors.fits.total_least_squares": {"tf": 2}, "pyerrors.fits.fit_lin": {"tf": 1.4142135623730951}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.fits.error_band": {"tf": 1.7320508075688772}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2.6457513110645907}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.misc.read_pbp": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 2}, "pyerrors.input.openQCD.extract_t0": {"tf": 2.6457513110645907}, "pyerrors.input.openQCD.extract_w0": {"tf": 2.6457513110645907}, "pyerrors.input.openQCD.read_qtop": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2.6457513110645907}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 3}, "pyerrors.input.utils.sort_names": {"tf": 1.4142135623730951}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1.7320508075688772}, "pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1.7320508075688772}, "pyerrors.misc.gen_correlated_data": {"tf": 2}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 3.872983346207417}, "pyerrors.obs.Obs.gamma_method": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gm": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.dump": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.CObs": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1.7320508075688772}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 2.6457513110645907}, "pyerrors.obs.cov_Obs": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 68, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 2}}}}, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}, "m": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 7, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 3.605551275463989}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1.7320508075688772}}, "df": 16, "s": {"docs": {"pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 2}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}}, "df": 2}}}}}}, "w": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.deriv": {"tf": 1}}, "df": 1}}}}}, "u": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 3}}, "r": {"docs": {}, "df": 0, "q": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 1, "f": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 1}}}}}}}}}}}}}}}}}}}}}, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "w": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 3}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 5}}}}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 3, "s": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}}}}}, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors": {"tf": 4.58257569495584}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}, "pyerrors.input.json.import_json_string": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 2}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.misc.load_object": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.7320508075688772}}, "df": 37}, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}}, "df": 1}}}, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "w": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "c": {"docs": {"pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 1, "{": {"1": {"docs": {}, "df": 0, "}": {"docs": {}, "df": 0, "{": {"2": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "}": {"docs": {}, "df": 0, "+": {"docs": {}, "df": 0, "\\": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "}": {"docs": {}, "df": 0, "\\": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}}}, "docs": {}, "df": 0, "f": {"docs": {"pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 1}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 1}}, "s": {"docs": {"pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 1}}}}}}}, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 3.7416573867739413}, "pyerrors.correlators.Corr.fit": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 2.6457513110645907}, "pyerrors.fits.Fit_result": {"tf": 2}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 3.7416573867739413}, "pyerrors.fits.total_least_squares": {"tf": 2}, "pyerrors.fits.fit_lin": {"tf": 2}, "pyerrors.fits.qqplot": {"tf": 1.4142135623730951}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 2}, "pyerrors.input.misc.fit_t0": {"tf": 2.6457513110645907}, "pyerrors.input.openQCD.extract_t0": {"tf": 2}, "pyerrors.input.openQCD.extract_w0": {"tf": 2}}, "df": 18, "s": {"docs": {"pyerrors": {"tf": 3.872983346207417}, "pyerrors.correlators.Corr.fit": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 7}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 6}}}, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1.4142135623730951}}, "df": 2}}}}}}, "r": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.__init__": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.extract_w0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}}, "df": 20}}}, "x": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 8}}}, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 1, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 1}}, "s": {"docs": {"pyerrors.roots.find_root": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 2}}}}, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 4.358898943540674}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 2}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_mesons": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 2}, "pyerrors.input.dobs.read_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_dobs": {"tf": 2}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 2}, "pyerrors.input.json.dump_to_json": {"tf": 2.23606797749979}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 2}, "pyerrors.input.json.dump_dict_to_json": {"tf": 2.23606797749979}, "pyerrors.input.json.load_json_dict": {"tf": 2}, "pyerrors.input.openQCD.read_rwms": {"tf": 2}, "pyerrors.input.openQCD.extract_t0": {"tf": 2}, "pyerrors.input.openQCD.extract_w0": {"tf": 2}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 2.23606797749979}, "pyerrors.input.pandas.load_df": {"tf": 1.7320508075688772}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.misc.dump_object": {"tf": 2}, "pyerrors.misc.load_object": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 2.23606797749979}}, "df": 40, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 2.6457513110645907}, "pyerrors.input.openQCD.extract_t0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.extract_w0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 2}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 3}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2.23606797749979}}, "df": 15, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 5}}}}, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 12, "s": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 3}}}}, "o": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}}}}}}}}}}}}}}, "l": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}}}, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}}}, "g": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 4}}}}}, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "t": {"docs": {"pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}}, "df": 1, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.correlators.Corr.reweight": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.reweight": {"tf": 1.4142135623730951}, "pyerrors.obs.reweight": {"tf": 1.4142135623730951}}, "df": 3, "s": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}}, "df": 3}}}}}, "l": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 2.23606797749979}, "pyerrors.fits.total_least_squares": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 25}}, "l": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 1}}}}}}, "i": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 1}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}}, "df": 2}}}}}, "^": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "u": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.fits.least_squares": {"tf": 3}, "pyerrors.fits.total_least_squares": {"tf": 2.449489742783178}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 2}, "pyerrors.roots.find_root": {"tf": 1.7320508075688772}}, "df": 6, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.fit": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2}, "pyerrors.obs.Obs.gamma_method": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gm": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 2}}, "df": 16, "s": {"docs": {"pyerrors": {"tf": 3.605551275463989}, "pyerrors.fits.least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1.4142135623730951}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 7}, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.pandas.dump_df": {"tf": 1}}, "df": 1}}}}}}}}}, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}}, "df": 1}}}, "l": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 11, "y": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}, "r": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1.4142135623730951}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gamma_method": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gm": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 17, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 5}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 2}}}}}, "w": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 2}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.7320508075688772}}, "df": 6, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}}, "df": 1}}}}}}}}}}}}}, "u": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}}, "a": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}, "t": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}}, "df": 1, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}}}, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "p": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4}}}, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 10}}}}, "f": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.obs.Obs.gamma_method": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gm": {"tf": 1.4142135623730951}}, "df": 2}}}, "e": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1.4142135623730951}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 14, "r": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.fits.error_band": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}}, "df": 2, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 6.164414002968976}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.covobs.Covobs.errsq": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.error_band": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gamma_method": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gm": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 2.23606797749979}}, "df": 15, "s": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 4}, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}, "b": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}}}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "x": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 6, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}}, "df": 2}}}}, "m": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 3, "s": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}}, "df": 2, "/": {"docs": {}, "df": 0, "j": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1.7320508075688772}}, "df": 2}}}}, "p": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 2}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 6, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "d": {"docs": {"pyerrors.obs.Obs.plot_history": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.obs.Obs.plot_history": {"tf": 1}}, "df": 1}}}}}, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}}, "df": 10, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 10}}, "s": {"docs": {"pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}}, "df": 3}}}, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 1}}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}}, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 4}}}}, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}}}}, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 1, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 2}}}}}}}}, "h": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 2}}}}}}, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.plateau": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.4142135623730951}}, "df": 12, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 10}}, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 4}}}}}}, "e": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 2.8284271247461903}}, "df": 1, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}}, "df": 1}}}}}}}, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.Hankel": {"tf": 1}}, "df": 1}}}, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.7320508075688772}}, "df": 2}}}}, "t": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 5}}}}, "e": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "c": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.pandas.read_sql": {"tf": 1}}, "df": 1, "d": {"docs": {"pyerrors.input.pandas.read_sql": {"tf": 1}}, "df": 1}, "s": {"docs": {"pyerrors.obs.CObs.gamma_method": {"tf": 1}}, "df": 1}}}}}}, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}}, "df": 1, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}}, "df": 1}}}}}}}}, "h": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}}, "df": 1}}}}}}, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1}}}}}}}, "i": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}}, "df": 2, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 4, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 5}}}}}, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 2.23606797749979}}, "df": 1, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1.7320508075688772}, "pyerrors.linalg.eigh": {"tf": 1}}, "df": 4}}}}}}}}}, "h": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.linalg.eigh": {"tf": 1}}, "df": 2}}, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 9}}}}, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.linalg.einsum": {"tf": 1}}, "df": 1}}}}}, "n": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}}, "df": 8, "e": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"1": {"docs": {"pyerrors": {"tf": 3.4641016151377544}}, "df": 1, "|": {"docs": {}, "df": 0, "r": {"0": {"1": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "2": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}}}, "2": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}, "3": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {"pyerrors": {"tf": 5.5677643628300215}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 2.449489742783178}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}}, "df": 30, "s": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 6, "/": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1.4142135623730951}}, "df": 2}}}}}}}}}}}}}}, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_dobs": {"tf": 1.4142135623730951}}, "df": 4, "s": {"docs": {"pyerrors.input.dobs.create_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_dobs": {"tf": 1.4142135623730951}}, "df": 2}}}}, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}}, "df": 2}}}}, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 9}}}, "y": {"docs": {"pyerrors": {"tf": 3.3166247903554}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1.4142135623730951}}, "df": 9}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}, "i": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1.4142135623730951}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.7320508075688772}}, "df": 4}}}}, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 2}}}, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}}, "df": 1}}}}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 6, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 3}, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 3}}, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 2.23606797749979}}, "df": 1}}}}}}}, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}, "p": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 2}}}}}}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}}}}, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 2}}}}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr": {"tf": 2}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 10, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}}, "df": 1}}}}}}}, "n": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}, "f": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors": {"tf": 2}}, "df": 1, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1.7320508075688772}}, "df": 2}}}, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}}, "df": 4}}}}}}, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors": {"tf": 3.605551275463989}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}, "pyerrors.input.misc.read_pbp": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.misc.gen_correlated_data": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 16}}, "r": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 2}}}}}, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}}, "df": 2, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2.6457513110645907}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 8, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 2}}}}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}, "q": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1, "u": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "v": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}}, "df": 1, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 2, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 2}}}}, "g": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 2}, "p": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.dirac.epsilon_tensor": {"tf": 1.4142135623730951}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1.4142135623730951}}, "df": 2}}}}}}, "d": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.fits.least_squares": {"tf": 2}}, "df": 1}}, "m": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 5}}}}, "t": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 1}}}, "c": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.Hankel": {"tf": 3}, "pyerrors.correlators.Corr.m_eff": {"tf": 2.8284271247461903}, "pyerrors.correlators.Corr.prune": {"tf": 2}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}}, "df": 12, "o": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1, "u": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 2}}, "df": 1, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 4, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "e": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1, "d": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 2}, "s": {"docs": {"pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}, "pyerrors.linalg.pinv": {"tf": 1}, "pyerrors.linalg.svd": {"tf": 1}}, "df": 4}}}}, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "x": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.obs.CObs": {"tf": 1}}, "df": 6}, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "d": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}}, "df": 4}, "s": {"docs": {"pyerrors.fits.residual_plot": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input": {"tf": 1}}, "df": 2}}}}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}}, "df": 1}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4}}}}}, "m": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}}, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}, "/": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "j": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "w": {"docs": {}, "df": 0, "/": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}}}}}}, "a": {"docs": {}, "df": 0, "/": {"1": {"6": {"0": {"3": {"7": {"5": {"docs": {"pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}}, "df": 2}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}}}, "b": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.merge_obs": {"tf": 1.4142135623730951}}, "df": 1, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 4}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.7320508075688772}}, "df": 2}}}}}}}}, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}}, "df": 2, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}}, "df": 1}}, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.linalg.jack_matmul": {"tf": 1}}, "df": 1}}}}}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 2}}}}}}, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 3, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 3}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 4}}, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": null}}, "df": 1}}}}}}, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 3, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 2}}}}, "d": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors": {"tf": 2.449489742783178}}, "df": 1}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors": {"tf": 2.23606797749979}}, "df": 1}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}}}}}, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.obs.Obs.export_jackknife": {"tf": 1}}, "df": 1, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}}, "df": 26}}}, "s": {"docs": {"pyerrors": {"tf": 5}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 2}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 14}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 3}}}}}, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 2}}}, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "o": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}}, "df": 8, "s": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 7}}}}, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}, "f": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.misc.read_pbp": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.extract_w0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}}, "df": 6, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 4, "s": {"docs": {"pyerrors": {"tf": 3.605551275463989}, "pyerrors.correlators.Corr.reweight": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.7320508075688772}, "pyerrors.input.utils.check_idl": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.reweight": {"tf": 1.7320508075688772}, "pyerrors.obs.reweight": {"tf": 1.7320508075688772}}, "df": 20}}}}}}}}, "s": {"docs": {"pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}}, "df": 10}}}}, "j": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}}, "df": 2}}}}}}, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "r": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}}, "df": 1}}}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.bdio.write_ADerrors": {"tf": 1}}, "df": 1}}}}}}, "r": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}}, "df": 1}}}}}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.json.dump_to_json": {"tf": 1}}, "df": 1}}}}}}}, "r": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 6.6332495807108}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.reverse": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.plateau": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.set_prange": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 2}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 25, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1.4142135623730951}, "pyerrors.obs.correlate": {"tf": 1.4142135623730951}}, "df": 3, "d": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 5}}, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 3.3166247903554}, "pyerrors.correlators.Corr": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.__init__": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.correlators.Corr.symmetric": {"tf": 1}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.roll": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.correlate": {"tf": 2}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.show": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 25, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.show": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 6}}}, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.prune": {"tf": 2.23606797749979}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2}, "pyerrors.obs.covariance": {"tf": 2.449489742783178}}, "df": 6, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.thin": {"tf": 1}}, "df": 2}}}, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.obs.correlate": {"tf": 1}}, "df": 1}}}}}}, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 4, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 2}}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 2}}}}, "s": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 1, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 6}}}, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 6}}}}}}}, "s": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}}, "df": 1}}}, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 3.605551275463989}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}, "pyerrors.linalg.inv": {"tf": 1}}, "df": 6}}, "v": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.covobs.Covobs.__init__": {"tf": 2}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1.4142135623730951}}, "df": 4, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 3.1622776601683795}, "pyerrors.covobs.Covobs.__init__": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.errsq": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 4}}}, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 4}, "pyerrors.covobs.Covobs.__init__": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.misc.gen_correlated_data": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 2.449489742783178}, "pyerrors.obs.cov_Obs": {"tf": 2}}, "df": 6}}}}}}}}, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}}, "u": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}}, "df": 3}}}}}, "s": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1.7320508075688772}}, "df": 1}, "t": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}}, "df": 1}}, "u": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.7320508075688772}}, "df": 1}}}}}, "l": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 2}}}}}}}}, "h": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1, "s": {"docs": {"pyerrors": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.plot_history": {"tf": 1}}, "df": 2}, "/": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}}}, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4}}}}}, "r": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 2}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 4}}}}, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.linalg.cholesky": {"tf": 1}}, "df": 3}}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 4}}}, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 2}}}, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 3}}}}, "e": {"docs": {"pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 1, "c": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}}, "df": 4, "s": {"docs": {"pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 5}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 1}}}}}, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "q": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}}, "df": 4}}}}}}}}, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors": {"tf": 3.7416573867739413}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1.4142135623730951}}, "df": 3}}, "e": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 2, "f": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 3}}}}}, "l": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 2}}, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}, "e": {"docs": {"pyerrors.fits.error_band": {"tf": 1}}, "df": 1, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 2}, "s": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}}, "n": {"docs": {"pyerrors": {"tf": 5.744562646538029}, "pyerrors.correlators.Corr": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2.6457513110645907}, "pyerrors.fits.total_least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1.4142135623730951}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1.7320508075688772}}, "df": 31, "n": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 12}}, "u": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}}, "df": 1}, "u": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 2}}}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 3}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}}, "df": 2, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}}, "df": 4}}}}}}, "u": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 3, "d": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}}, "df": 4, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}}, "df": 2}}}}}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 5}}}}}, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.CObs": {"tf": 1}}, "df": 5}}}, "o": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}}, "df": 1}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}}}, "d": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}, "c": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4}, "p": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4}, "t": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}}, "df": 1}}}}}, "n": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}}, "df": 5}}}, "y": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 3}}}}}, "s": {"docs": {}, "df": 0, "v": {"docs": {"pyerrors.input.pandas.dump_df": {"tf": 2}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 2}}, "f": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.misc.pseudo_Obs": {"tf": 1}}, "df": 1}}}}}}}, "o": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_mesons": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}}, "df": 8, "f": {"docs": {"pyerrors": {"tf": 10.44030650891055}, "pyerrors.correlators.Corr": {"tf": 2}, "pyerrors.correlators.Corr.__init__": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.roll": {"tf": 1}, "pyerrors.correlators.Corr.reverse": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.reweight": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.deriv": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.m_eff": {"tf": 2}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.set_prange": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 2.8284271247461903}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.dump": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.prune": {"tf": 2.6457513110645907}, "pyerrors.covobs.Covobs.__init__": {"tf": 1.7320508075688772}, "pyerrors.covobs.Covobs.errsq": {"tf": 1.4142135623730951}, "pyerrors.dirac.epsilon_tensor": {"tf": 1.4142135623730951}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1.7320508075688772}, "pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 4.123105625617661}, "pyerrors.fits.total_least_squares": {"tf": 3.1622776601683795}, "pyerrors.fits.fit_lin": {"tf": 2.449489742783178}, "pyerrors.fits.qqplot": {"tf": 1.4142135623730951}, "pyerrors.fits.error_band": {"tf": 1.4142135623730951}, "pyerrors.fits.ks_test": {"tf": 1.4142135623730951}, "pyerrors.input": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 2.449489742783178}, "pyerrors.input.dobs.write_pobs": {"tf": 2.449489742783178}, "pyerrors.input.dobs.read_pobs": {"tf": 2}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.read_dobs": {"tf": 2}, "pyerrors.input.dobs.create_dobs_string": {"tf": 2.8284271247461903}, "pyerrors.input.dobs.write_dobs": {"tf": 2.8284271247461903}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 2.6457513110645907}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 2.449489742783178}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 5.0990195135927845}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.json.create_json_string": {"tf": 2.6457513110645907}, "pyerrors.input.json.dump_to_json": {"tf": 2.6457513110645907}, "pyerrors.input.json.import_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.dump_dict_to_json": {"tf": 2.8284271247461903}, "pyerrors.input.json.load_json_dict": {"tf": 1.7320508075688772}, "pyerrors.input.misc.fit_t0": {"tf": 3}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.extract_t0": {"tf": 3.3166247903554}, "pyerrors.input.openQCD.extract_w0": {"tf": 3.3166247903554}, "pyerrors.input.openQCD.read_qtop": {"tf": 2.6457513110645907}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2.6457513110645907}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 2.8284271247461903}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 2.23606797749979}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 4}, "pyerrors.input.utils.sort_names": {"tf": 1.4142135623730951}, "pyerrors.input.utils.check_idl": {"tf": 2}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1.4142135623730951}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.linalg.inv": {"tf": 1}, "pyerrors.linalg.cholesky": {"tf": 1}, "pyerrors.linalg.det": {"tf": 1}, "pyerrors.linalg.eigh": {"tf": 1.4142135623730951}, "pyerrors.linalg.eig": {"tf": 1.4142135623730951}, "pyerrors.linalg.pinv": {"tf": 1.4142135623730951}, "pyerrors.linalg.svd": {"tf": 1.4142135623730951}, "pyerrors.misc.print_config": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1.7320508075688772}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 2}, "pyerrors.misc.gen_correlated_data": {"tf": 1.4142135623730951}, "pyerrors.mpm.matrix_pencil_method": {"tf": 2.6457513110645907}, "pyerrors.obs.Obs": {"tf": 2.8284271247461903}, "pyerrors.obs.Obs.__init__": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gamma_method": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gm": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.export_jackknife": {"tf": 2}, "pyerrors.obs.derived_observable": {"tf": 2.449489742783178}, "pyerrors.obs.reweight": {"tf": 2}, "pyerrors.obs.covariance": {"tf": 3.3166247903554}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1.7320508075688772}, "pyerrors.obs.cov_Obs": {"tf": 1.7320508075688772}, "pyerrors.roots.find_root": {"tf": 1.4142135623730951}}, "df": 107, "f": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.thin": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 4}}}}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}}, "df": 1}}}}, "n": {"docs": {"pyerrors": {"tf": 5.291502622129181}, "pyerrors.correlators.Corr.plottable": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.reweight": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1.7320508075688772}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 35, "e": {"docs": {"pyerrors": {"tf": 3}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.json.import_json_string": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 21, "s": {"docs": {"pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 3}}, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 2}, "pyerrors.input.json.load_json": {"tf": 2}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1.4142135623730951}}, "df": 27}}, "t": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}, "p": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 3}}}}, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.linalg.matmul": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "{": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "v": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}}}}}, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.linalg.matmul": {"tf": 1.4142135623730951}, "pyerrors.linalg.jack_matmul": {"tf": 1.4142135623730951}, "pyerrors.linalg.einsum": {"tf": 1}}, "df": 3}}}}}, "n": {"docs": {}, "df": 0, "q": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 2}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}}, "df": 6}}}}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}}, "df": 1, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 3}, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.__init__": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2.23606797749979}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}}, "df": 20}}}}, "m": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1, "d": {"docs": {"pyerrors.fits.error_band": {"tf": 1}}, "df": 1}}}}, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}}}, "p": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}}, "df": 1}}}}}}, "b": {"docs": {}, "df": 0, "s": {"1": {"docs": {"pyerrors": {"tf": 3}, "pyerrors.obs.derived_observable": {"tf": 1.4142135623730951}, "pyerrors.obs.reweight": {"tf": 1}}, "df": 3}, "2": {"docs": {"pyerrors": {"tf": 3}, "pyerrors.obs.derived_observable": {"tf": 1.4142135623730951}, "pyerrors.obs.reweight": {"tf": 1}}, "df": 3}, "3": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}}, "df": 3}, "docs": {"pyerrors": {"tf": 9.591663046625438}, "pyerrors.correlators.Corr": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.__init__": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.fits.total_least_squares": {"tf": 2.23606797749979}, "pyerrors.fits.fit_lin": {"tf": 2.23606797749979}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.input": {"tf": 2.23606797749979}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 2}, "pyerrors.input.dobs.write_pobs": {"tf": 2}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 2}, "pyerrors.input.dobs.write_dobs": {"tf": 2}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 2}, "pyerrors.input.json.dump_to_json": {"tf": 2}, "pyerrors.input.json.import_json_string": {"tf": 2}, "pyerrors.input.json.load_json": {"tf": 2}, "pyerrors.input.json.dump_dict_to_json": {"tf": 2}, "pyerrors.input.json.load_json_dict": {"tf": 2}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.dump_df": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.linalg.inv": {"tf": 1}, "pyerrors.linalg.cholesky": {"tf": 1}, "pyerrors.linalg.det": {"tf": 1}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}, "pyerrors.linalg.pinv": {"tf": 1}, "pyerrors.linalg.svd": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1.4142135623730951}, "pyerrors.misc.load_object": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 2.449489742783178}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.dump": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.7320508075688772}, "pyerrors.obs.derived_observable": {"tf": 1.7320508075688772}, "pyerrors.obs.reweight": {"tf": 1.7320508075688772}, "pyerrors.obs.correlate": {"tf": 2}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 2}, "pyerrors.obs.cov_Obs": {"tf": 1.4142135623730951}, "pyerrors.roots.find_root": {"tf": 2.23606797749979}}, "df": 70, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 3.4641016151377544}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 2}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.CObs": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1.4142135623730951}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 24, "s": {"docs": {"pyerrors": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1.4142135623730951}, "pyerrors.obs.correlate": {"tf": 1.7320508075688772}, "pyerrors.obs.covariance": {"tf": 2.449489742783178}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 20}}}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}, "d": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}}, "[": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors.correlators.Corr.reweight": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.reweight": {"tf": 1.4142135623730951}, "pyerrors.obs.reweight": {"tf": 1.4142135623730951}}, "df": 3}}, "l": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 4}}, "j": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}}, "df": 2, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.set_prange": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2.8284271247461903}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.7320508075688772}, "pyerrors.misc.dump_object": {"tf": 1.7320508075688772}, "pyerrors.misc.load_object": {"tf": 1.7320508075688772}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 21, "s": {"docs": {"pyerrors": {"tf": 3.605551275463989}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 10}}}}}, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.pandas.read_sql": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}, "r": {"docs": {"pyerrors": {"tf": 4.242640687119285}, "pyerrors.correlators.Corr": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.__init__": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 2.449489742783178}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_pobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.import_dobs_string": {"tf": 2}, "pyerrors.input.dobs.read_dobs": {"tf": 2}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}, "pyerrors.input.json.create_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.import_json_string": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.linalg.inv": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1.4142135623730951}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.7320508075688772}, "pyerrors.obs.cov_Obs": {"tf": 2}}, "df": 44, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2.6457513110645907}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 6, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.reverse": {"tf": 1}}, "df": 2}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}}, "df": 1}}}}}, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 2}}}}}}}}, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_dobs": {"tf": 1.4142135623730951}}, "df": 4}}}}}, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}}, "df": 4, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}}, "w": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors.obs.Obs": {"tf": 1.7320508075688772}}, "df": 1}}}}}}}}}, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 7, "w": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 5}}}}}}}}, "d": {"docs": {"pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 1, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}, "u": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}}, "df": 3, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "p": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.fit": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.input": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.import_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1.7320508075688772}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 23, "s": {"docs": {"pyerrors.correlators.Corr.plottable": {"tf": 1.4142135623730951}}, "df": 1}}}}}, "r": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}, "m": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 2}}}}}}, "c": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}}, "df": 3}}}}}}, "p": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}}, "w": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}, "l": {"docs": {"pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}}, "df": 2}}, "m": {"docs": {"pyerrors": {"tf": 4.242640687119285}, "pyerrors.correlators.Corr.m_eff": {"tf": 2.449489742783178}, "pyerrors.fits.fit_lin": {"tf": 1.4142135623730951}}, "df": 3, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "v": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}}}, "q": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}}, "df": 1, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "x": {"docs": {"pyerrors": {"tf": 4.795831523312719}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.prune": {"tf": 2.8284271247461903}, "pyerrors.covobs.Covobs.__init__": {"tf": 1.4142135623730951}, "pyerrors.dirac.Grid_gamma": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2.23606797749979}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}, "pyerrors.linalg.pinv": {"tf": 1}, "pyerrors.linalg.svd": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.7320508075688772}, "pyerrors.obs.covariance": {"tf": 3}, "pyerrors.obs.cov_Obs": {"tf": 1.7320508075688772}}, "df": 22}, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.inv": {"tf": 1}, "pyerrors.linalg.cholesky": {"tf": 1}, "pyerrors.linalg.det": {"tf": 1}}, "df": 9}}}, "z": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 1}}}}}, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 3}}}}}}}}, "r": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "{": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1.7320508075688772}}, "df": 2}}}, "e": {"docs": {}, "df": 0, "x": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "p": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "j": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}}, "df": 1}}}}, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}}}}}}, "c": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}}, "df": 2, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}}}}}, "m": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.linalg.jack_matmul": {"tf": 1}}, "df": 1}}}, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "b": {"docs": {"pyerrors.misc.errorbar": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}}, "c": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 2}}}}}, "d": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "k": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}}, "df": 3}}}}, "n": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1, "i": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}, "y": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 6}, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 3}}}}}}, "s": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.m_eff": {"tf": 1.7320508075688772}}, "df": 2, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}, "y": {"docs": {"pyerrors": {"tf": 2.6457513110645907}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 8}, "j": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 2}}}, "x": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}}, "df": 1, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}}}}}}}}}}}}, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 3.7416573867739413}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1.4142135623730951}}, "df": 3}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "o": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1}}}}}}}}}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 1, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 2}}, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 2}}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}}, "df": 6}}, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.linalg.pinv": {"tf": 1}}, "df": 2}}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}}}}, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}}, "df": 1}}, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 3, "a": {"docs": {"pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}}, "df": 1}}}}}}, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 4.795831523312719}, "pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.plateau": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 21, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 4}}}}, "a": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}}, "df": 4}}, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 2.8284271247461903}}, "df": 2, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}}, "df": 3}}}}, "a": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1.7320508075688772}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1.7320508075688772}}, "df": 8, "s": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 4}}, "s": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 6, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 2}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}}, "df": 9}}}}}}}}}, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}, "m": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 2.8284271247461903}}, "df": 1}}}}}, "y": {"docs": {"pyerrors": {"tf": 7.681145747868608}}, "df": 1}, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2.6457513110645907}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 4}, "y": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}}, "df": 3}, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.linalg.matmul": {"tf": 1}}, "df": 1}}}}}}}}}, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}}}}}}}}}, "c": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.projected": {"tf": 1}}, "df": 2}}, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.7320508075688772}}, "df": 1}}}}, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 2}}}}}, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1, "d": {"docs": {"pyerrors.roots.find_root": {"tf": 1}}, "df": 1}}}}}}, "d": {"docs": {"pyerrors.obs.correlate": {"tf": 1}}, "df": 1}, "u": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}, "s": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 1, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 1}}}}}}, "\\": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}}}}, "p": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}}, "c": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1, "m": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}}, "df": 1}}}, "t": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "s": {"1": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}}, "df": 1}, "docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.extract_w0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 5}}, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 2.23606797749979}}, "df": 3, "a": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 5, "a": {"docs": {"pyerrors": {"tf": 5}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_mesons": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 2.23606797749979}, "pyerrors.input.dobs.import_dobs_string": {"tf": 2.449489742783178}, "pyerrors.input.dobs.read_dobs": {"tf": 2.449489742783178}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 3.1622776601683795}, "pyerrors.input.json.import_json_string": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json_dict": {"tf": 2.8284271247461903}, "pyerrors.input.misc.fit_t0": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 2}, "pyerrors.input.openQCD.extract_w0": {"tf": 2}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 2.8284271247461903}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.misc.gen_correlated_data": {"tf": 1.4142135623730951}, "pyerrors.mpm.matrix_pencil_method": {"tf": 2.449489742783178}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1.7320508075688772}}, "df": 36, "t": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 3}}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "f": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.read_sql": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.dump_df": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.load_df": {"tf": 1.7320508075688772}}, "df": 4}}}}}, "b": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 2}, "pyerrors.input.pandas.read_sql": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 3}}}}}, "e": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}, "i": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 3}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 3, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 2.6457513110645907}, "pyerrors.fits.least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1.7320508075688772}}, "df": 5}}}}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 2}}}}}}, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 3, "s": {"docs": {"pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 2}}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 1}}}}}}, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 2.6457513110645907}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_dict_to_json": {"tf": 2}, "pyerrors.input.json.load_json_dict": {"tf": 2}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 2.449489742783178}}, "df": 19, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}, "y": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 7}}}}}}, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "[": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}}, "df": 1}}}}}}, "s": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}}}}}, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}}}}}, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 2}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 2}}, "df": 7}}}}, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}}, "df": 2}}, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}}, "df": 1}}}}}}}}, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}}, "df": 1}}}}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}}, "df": 1}}}}}}}}}}}}}}}}}}}, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}}, "df": 2}}}}}, "k": {"docs": {"pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 3}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}}, "df": 2, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.7320508075688772}}, "df": 2}, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}}, "df": 1}}}}}}}}}, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 3, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 3}}, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 2}}}}}}}}, "a": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 4}}}}, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1.4142135623730951}}, "df": 1}}}}}}, "v": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2}}}}}}, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.linalg.cholesky": {"tf": 1}, "pyerrors.linalg.svd": {"tf": 1}}, "df": 3}}}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}}}}, "i": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}}, "df": 3}}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}}}, "a": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}}, "df": 2}}}}}, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "a": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.obs.Obs.details": {"tf": 1}}, "df": 2}}, "s": {"docs": {"pyerrors": {"tf": 3.4641016151377544}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 9}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1, "d": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}, "s": {"docs": {"pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 7}}, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.linalg.det": {"tf": 1}}, "df": 2}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}}}}}}, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 3}}}}}}}, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 2}}}}}}}}, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "v": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 2.8284271247461903}, "pyerrors.obs.Obs.plot_history": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1.4142135623730951}}, "df": 4}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.deriv": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 4, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 2}}}}}}}}}, "l": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors": {"tf": 2}}, "df": 1, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}}, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "c": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 4, "d": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 2}, "s": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 8}}}, "p": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1.4142135623730951}}, "df": 5}}, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 1}}}}}}}}, "f": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 4, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 5}}}, "e": {"docs": {"pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 1}}}, "e": {"docs": {"pyerrors": {"tf": 2}}, "df": 1, "d": {"docs": {"pyerrors": {"tf": 3.3166247903554}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}, "pyerrors.obs.import_jackknife": {"tf": 1}}, "df": 17}, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}}, "df": 4}}}}, "a": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 2.23606797749979}, "pyerrors.fits.total_least_squares": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 2}, "pyerrors.input.bdio.read_dSdm": {"tf": 2}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.extract_w0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.read_sql": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.7320508075688772}, "pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gamma_method": {"tf": 2}, "pyerrors.obs.Obs.gm": {"tf": 2}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 39, "s": {"docs": {"pyerrors.correlators.Corr.plateau": {"tf": 1}}, "df": 1}}}}}}, "p": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr": {"tf": 1}}, "df": 2}, "c": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}}, "df": 1}, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.misc.print_config": {"tf": 1}}, "df": 1}}}}}}, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}}, "df": 1}}}}}}}, "n": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1.4142135623730951}}, "df": 1}}}}}, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}}, "df": 1}}}}}}}, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}}, "df": 1}}}}}, "b": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}}, "df": 1}}}}}}}}, "o": {"docs": {"pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 5, "w": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 3}}, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 7}}, "m": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "d": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}}, "df": 1}}}}}}}}, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 5}}, "t": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}, "f": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}}, "df": 1}, "u": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 3}}}}, "n": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}}, "df": 2}}, "c": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}}, "df": 2}}}}}}}}}}}}, "u": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 3, "s": {"docs": {"pyerrors.correlators.Corr.dump": {"tf": 1}}, "df": 1}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.pandas.dump_df": {"tf": 1}}, "df": 1}}}}, "e": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2}}, "t": {"docs": {"pyerrors.correlators.Corr.roll": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2, "y": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 3.1622776601683795}}, "df": 1}}}, "r": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}}, "df": 5}}, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.misc.pseudo_Obs": {"tf": 1.4142135623730951}}, "df": 1, "s": {"docs": {"pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1.4142135623730951}}, "df": 2}}}}}}, "s": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}}, "df": 1}}}, "f": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}}, "df": 2}, "b": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}}, "df": 2}}, "b": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.7320508075688772}, "pyerrors.obs.correlate": {"tf": 1}}, "df": 3, "a": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}}, "df": 1, "d": {"docs": {"pyerrors": {"tf": 3}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 10}, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "x": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1.7320508075688772}}, "df": 1}}}}}}}, "i": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 2}}}, "r": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "{": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "a": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "c": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}}, "df": 1, "w": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.deriv": {"tf": 1}}, "df": 1}}}}}}, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.error_band": {"tf": 1.4142135623730951}}, "df": 1}}}, "e": {"docs": {"pyerrors": {"tf": 6.244997998398398}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.7320508075688772}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 3}, "pyerrors.fits.total_least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_mesons": {"tf": 2.23606797749979}, "pyerrors.input.bdio.read_dSdm": {"tf": 2.23606797749979}, "pyerrors.input.dobs.create_pobs_string": {"tf": 2}, "pyerrors.input.dobs.write_pobs": {"tf": 2}, "pyerrors.input.dobs.create_dobs_string": {"tf": 2}, "pyerrors.input.dobs.write_dobs": {"tf": 2}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1.7320508075688772}, "pyerrors.input.json.dump_to_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.dump_dict_to_json": {"tf": 2}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.misc.read_pbp": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 2}, "pyerrors.input.openQCD.extract_t0": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.extract_w0": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_qtop": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 2.6457513110645907}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2.23606797749979}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1.4142135623730951}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1.7320508075688772}, "pyerrors.misc.gen_correlated_data": {"tf": 2}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1.7320508075688772}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.7320508075688772}, "pyerrors.obs.merge_obs": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 64, "t": {"docs": {}, "df": 0, "w": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 2.23606797749979}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 11}}}}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}, "a": {"docs": {"pyerrors.fits.error_band": {"tf": 1}}, "df": 1}}, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 2}}}}}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "w": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 4}, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 4}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1.4142135623730951}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 3}}}}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.obs.correlate": {"tf": 1}}, "df": 2}}, "f": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}}, "df": 2}}}}, "h": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 1}}}}}, "y": {"docs": {"pyerrors": {"tf": 3}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.roll": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1.7320508075688772}, "pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 25, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 2.23606797749979}}, "df": 1}}}}, "u": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 8}, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "d": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_mesons": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.7320508075688772}}, "df": 4}}}}}}, "f": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 3.1622776601683795}}, "df": 1}}}}, "l": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2.6457513110645907}}, "df": 5}}}}}, "d": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}}, "df": 1}}}}}, "t": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 3}}, "o": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2.449489742783178}, "pyerrors.fits.total_least_squares": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1.7320508075688772}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 40}, "k": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 6}}}}}}}}}, "x": {"docs": {"pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}}, "df": 1}}, "i": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1, "g": {"docs": {"pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 1}, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}}, "df": 1}}}}}}}, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}}}}, "b": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 2.6457513110645907}, "pyerrors.input.bdio.write_ADerrors": {"tf": 2.6457513110645907}, "pyerrors.input.bdio.read_mesons": {"tf": 2.6457513110645907}, "pyerrors.input.bdio.read_dSdm": {"tf": 2.6457513110645907}}, "df": 4}}}, "b": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}}, "df": 1}}, "t": {"0": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 2}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.misc.fit_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_t0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 7, "p": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "j": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}}, "df": 1}}}}, "/": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "^": {"2": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}}}}, "2": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}}, "df": 1, "e": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1}}, "docs": {"pyerrors": {"tf": 2.8284271247461903}, "pyerrors.correlators.Corr.GEVP": {"tf": 2.8284271247461903}, "pyerrors.correlators.Corr.Hankel": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.m_eff": {"tf": 2.8284271247461903}, "pyerrors.correlators.Corr.prune": {"tf": 4.47213595499958}, "pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 12, "h": {"docs": {"pyerrors.correlators.Corr.thin": {"tf": 1}}, "df": 1, "e": {"docs": {"pyerrors": {"tf": 16.492422502470642}, "pyerrors.correlators.Corr": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.__init__": {"tf": 3}, "pyerrors.correlators.Corr.gamma_method": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.gm": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.projected": {"tf": 2}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 2}, "pyerrors.correlators.Corr.symmetric": {"tf": 1}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 4.58257569495584}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.Hankel": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.roll": {"tf": 1}, "pyerrors.correlators.Corr.reverse": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.thin": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.correlate": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.reweight": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 2}, "pyerrors.correlators.Corr.deriv": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.m_eff": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.fit": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.plateau": {"tf": 2.6457513110645907}, "pyerrors.correlators.Corr.set_prange": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.show": {"tf": 3.605551275463989}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.dump": {"tf": 2}, "pyerrors.correlators.Corr.prune": {"tf": 4.795831523312719}, "pyerrors.covobs.Covobs.__init__": {"tf": 2.23606797749979}, "pyerrors.covobs.Covobs.errsq": {"tf": 1.7320508075688772}, "pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1.4142135623730951}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 5.656854249492381}, "pyerrors.fits.total_least_squares": {"tf": 3.7416573867739413}, "pyerrors.fits.fit_lin": {"tf": 2.449489742783178}, "pyerrors.fits.qqplot": {"tf": 1.7320508075688772}, "pyerrors.fits.residual_plot": {"tf": 2}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1.4142135623730951}, "pyerrors.input": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 2}, "pyerrors.input.bdio.write_ADerrors": {"tf": 2}, "pyerrors.input.bdio.read_mesons": {"tf": 2.6457513110645907}, "pyerrors.input.bdio.read_dSdm": {"tf": 2.6457513110645907}, "pyerrors.input.dobs.create_pobs_string": {"tf": 3.605551275463989}, "pyerrors.input.dobs.write_pobs": {"tf": 3.872983346207417}, "pyerrors.input.dobs.read_pobs": {"tf": 3}, "pyerrors.input.dobs.import_dobs_string": {"tf": 3.3166247903554}, "pyerrors.input.dobs.read_dobs": {"tf": 3.3166247903554}, "pyerrors.input.dobs.create_dobs_string": {"tf": 4.58257569495584}, "pyerrors.input.dobs.write_dobs": {"tf": 4.58257569495584}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 3.3166247903554}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 3.1622776601683795}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 2}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 5.830951894845301}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 2}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 2}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 2}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 2}, "pyerrors.input.json.create_json_string": {"tf": 2.8284271247461903}, "pyerrors.input.json.dump_to_json": {"tf": 3}, "pyerrors.input.json.import_json_string": {"tf": 3}, "pyerrors.input.json.load_json": {"tf": 3}, "pyerrors.input.json.dump_dict_to_json": {"tf": 3.3166247903554}, "pyerrors.input.json.load_json_dict": {"tf": 2.6457513110645907}, "pyerrors.input.misc.fit_t0": {"tf": 4.58257569495584}, "pyerrors.input.misc.read_pbp": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 3}, "pyerrors.input.openQCD.extract_t0": {"tf": 5.477225575051661}, "pyerrors.input.openQCD.extract_w0": {"tf": 5.477225575051661}, "pyerrors.input.openQCD.read_qtop": {"tf": 4.58257569495584}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 4.47213595499958}, "pyerrors.input.openQCD.qtop_projection": {"tf": 2}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 4.358898943540674}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 4.58257569495584}, "pyerrors.input.pandas.to_sql": {"tf": 2.23606797749979}, "pyerrors.input.pandas.read_sql": {"tf": 2.449489742783178}, "pyerrors.input.pandas.dump_df": {"tf": 2}, "pyerrors.input.pandas.load_df": {"tf": 2.449489742783178}, "pyerrors.input.sfcf.read_sfcf": {"tf": 4.58257569495584}, "pyerrors.input.utils.sort_names": {"tf": 1.7320508075688772}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}, "pyerrors.linalg.pinv": {"tf": 1}, "pyerrors.linalg.svd": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1.4142135623730951}, "pyerrors.misc.dump_object": {"tf": 1.7320508075688772}, "pyerrors.misc.load_object": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 2.23606797749979}, "pyerrors.misc.gen_correlated_data": {"tf": 1.7320508075688772}, "pyerrors.mpm.matrix_pencil_method": {"tf": 2.23606797749979}, "pyerrors.obs.Obs": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.__init__": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gamma_method": {"tf": 3.4641016151377544}, "pyerrors.obs.Obs.gm": {"tf": 3.4641016151377544}, "pyerrors.obs.Obs.details": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.reweight": {"tf": 2.449489742783178}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 2}, "pyerrors.obs.Obs.dump": {"tf": 2}, "pyerrors.obs.Obs.export_jackknife": {"tf": 3.3166247903554}, "pyerrors.obs.CObs.gamma_method": {"tf": 1.7320508075688772}, "pyerrors.obs.derived_observable": {"tf": 2.8284271247461903}, "pyerrors.obs.reweight": {"tf": 2.23606797749979}, "pyerrors.obs.correlate": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 5.291502622129181}, "pyerrors.obs.import_jackknife": {"tf": 2}, "pyerrors.obs.merge_obs": {"tf": 1.4142135623730951}, "pyerrors.obs.cov_Obs": {"tf": 2}, "pyerrors.roots.find_root": {"tf": 2.449489742783178}}, "df": 121, "i": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}}, "df": 3, "f": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}}, "df": 2}}}}}}}}}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 6}}, "n": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 4}, "y": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 2}, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}}}}}}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 6.324555320336759}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 2}, "pyerrors.input.dobs.write_pobs": {"tf": 2}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 2}, "pyerrors.input.dobs.write_dobs": {"tf": 2}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_to_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.extract_t0": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.extract_w0": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.7320508075688772}}, "df": 35}, "n": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}}, "df": 4}}, "i": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 4.123105625617661}, "pyerrors.correlators.Corr": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 2}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.7320508075688772}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 30}, "n": {"docs": {"pyerrors.correlators.Corr.thin": {"tf": 1}}, "df": 1}}, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}, "w": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}}, "df": 3}}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2.23606797749979}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 35}}, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "n": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1, "p": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}, "j": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}}, "df": 5}}}}}}}}}}, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}, "s": {"docs": {"pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 2}}}}}}}, "o": {"docs": {"pyerrors": {"tf": 8.831760866327848}, "pyerrors.correlators.Corr": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.item": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.GEVP": {"tf": 2.6457513110645907}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 2.6457513110645907}, "pyerrors.correlators.Corr.plateau": {"tf": 2}, "pyerrors.correlators.Corr.show": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 3.1622776601683795}, "pyerrors.covobs.Covobs.__init__": {"tf": 1.7320508075688772}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 3.605551275463989}, "pyerrors.fits.total_least_squares": {"tf": 2.6457513110645907}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1.4142135623730951}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 2.23606797749979}, "pyerrors.input.bdio.write_ADerrors": {"tf": 2.6457513110645907}, "pyerrors.input.bdio.read_mesons": {"tf": 2.6457513110645907}, "pyerrors.input.bdio.read_dSdm": {"tf": 2.6457513110645907}, "pyerrors.input.dobs.create_pobs_string": {"tf": 2.23606797749979}, "pyerrors.input.dobs.write_pobs": {"tf": 2.23606797749979}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 2}, "pyerrors.input.dobs.write_dobs": {"tf": 2}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 2.449489742783178}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 2}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 3.7416573867739413}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 2}, "pyerrors.input.json.create_json_string": {"tf": 1.7320508075688772}, "pyerrors.input.json.dump_to_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 2}, "pyerrors.input.misc.read_pbp": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 2.6457513110645907}, "pyerrors.input.openQCD.extract_t0": {"tf": 3.4641016151377544}, "pyerrors.input.openQCD.extract_w0": {"tf": 3.4641016151377544}, "pyerrors.input.openQCD.read_qtop": {"tf": 2.8284271247461903}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 3}, "pyerrors.input.openQCD.qtop_projection": {"tf": 2}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 3.605551275463989}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 3.1622776601683795}, "pyerrors.input.pandas.to_sql": {"tf": 2.23606797749979}, "pyerrors.input.pandas.read_sql": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.dump_df": {"tf": 2}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 3.3166247903554}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.misc.load_object": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1.7320508075688772}, "pyerrors.misc.gen_correlated_data": {"tf": 1.7320508075688772}, "pyerrors.mpm.matrix_pencil_method": {"tf": 2}, "pyerrors.obs.Obs.gamma_method": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gm": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.dump": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 2.23606797749979}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 2.8284271247461903}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1.4142135623730951}, "pyerrors.obs.cov_Obs": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1.7320508075688772}}, "df": 94, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 6}}}}}}, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 2}}}, "l": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}}, "df": 1, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1.4142135623730951}}, "df": 2}}}}}}}, "d": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}, "p": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}}, "df": 1}}, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 2}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.7320508075688772}}, "df": 4}}}}}}}}}, "w": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}}, "df": 1}}}}}}, "w": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 2.23606797749979}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 22}}, "a": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 1, "n": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}}, "df": 3}, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "y": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "u": {"docs": {"pyerrors": {"tf": 2.6457513110645907}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 2}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 6, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}, "i": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 3, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "g": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 8}}, "r": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}}, "df": 7}}}}, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1.7320508075688772}}, "df": 1}}}}, "i": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2.6457513110645907}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.reverse": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 2}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 11, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 5, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 12, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.roll": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.fit": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.plateau": {"tf": 1}}, "df": 5}}}}}}}}, "l": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "{": {"docs": {}, "df": 0, "\\": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 1, "}": {"docs": {}, "df": 0, "^": {"2": {"docs": {"pyerrors.correlators.Corr.second_deriv": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}}}}}}}}}}}}}}, "t": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}}, "df": 1}}}}, "e": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "r": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}}, "df": 3}}, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.dirac.epsilon_tensor": {"tf": 1.4142135623730951}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1.4142135623730951}}, "df": 2}}}}}, "y": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2.8284271247461903}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.7320508075688772}}, "df": 7, "s": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}}, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}, "s": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 2}, "+": {"1": {"docs": {"pyerrors.correlators.Corr.Hankel": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.m_eff": {"tf": 2}}, "df": 2}, "2": {"docs": {"pyerrors.correlators.Corr.Hankel": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.Hankel": {"tf": 1.7320508075688772}}, "df": 1}}, "/": {"2": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 2}, "pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 2}, "docs": {}, "df": 0}, "p": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "j": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}, "u": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2}}, "df": 5}}}}, "^": {"2": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2}, "docs": {}, "df": 0}}, "g": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.GEVP": {"tf": 2}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 2}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1.4142135623730951}, "pyerrors.obs.reweight": {"tf": 1}}, "df": 15, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"5": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}}, "df": 2}, "docs": {"pyerrors": {"tf": 4.358898943540674}, "pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.dirac.Grid_gamma": {"tf": 1}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.7320508075688772}}, "df": 16, "s": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}}, "df": 1}}}}, "p": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "u": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.fits.qqplot": {"tf": 1}}, "df": 1}}}}}}, "r": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "b": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "+": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, ":": {"docs": {}, "df": 0, "/": {"docs": {}, "df": 0, "/": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "b": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}}}}}}}, "@": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}}, "df": 30}, "s": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 1}}}}, "t": {"docs": {"pyerrors": {"tf": 8.306623862918075}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2.8284271247461903}}, "df": 2}, "e": {"docs": {}, "df": 0, "q": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 2, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 2}}}}}, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 5, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 2}, "pyerrors.misc.gen_correlated_data": {"tf": 1.7320508075688772}}, "df": 4}, "s": {"docs": {"pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}}, "df": 2}}, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}}, "i": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 2}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 1}}}}}}, "v": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 2.23606797749979}}, "df": 5}}, "t": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}}, "df": 2}, "o": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.7320508075688772}}, "df": 2}}}}}, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1.4142135623730951}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 6, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 5, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}}, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 1}}}}, "i": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.dirac.Grid_gamma": {"tf": 1}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "z": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.json.create_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.dump": {"tf": 1.4142135623730951}}, "df": 16, "i": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "p": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 10}}}}}}, "u": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.roots.find_root": {"tf": 1.4142135623730951}}, "df": 4}}}, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}}}, "^": {"docs": {}, "df": 0, "\\": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}}}}, "c": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4}}, "f": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 2}}}}, "l": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}}, "df": 3, "a": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1, "t": {"docs": {}, "df": 0, "/": {"0": {"3": {"0": {"6": {"0": {"1": {"7": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "9": {"4": {"1": {"2": {"0": {"8": {"7": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}}, "df": 6}}}}, "e": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1, "x": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "r": {"docs": {"pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 1}}}, "b": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 5, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}}, "df": 2}}, "s": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 2}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.dirac.Grid_gamma": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}}, "df": 6}}}}}}, "y": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 11}}, "r": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.linalg.jack_matmul": {"tf": 1}}, "df": 2, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 2}}, "r": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 4}}}}, "m": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 3}}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}}, "df": 8}}, "s": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}}, "df": 3}}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.__init__": {"tf": 2.6457513110645907}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.plateau": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.show": {"tf": 2.449489742783178}, "pyerrors.covobs.Covobs.__init__": {"tf": 1.4142135623730951}, "pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2.6457513110645907}, "pyerrors.fits.total_least_squares": {"tf": 2.449489742783178}, "pyerrors.fits.fit_lin": {"tf": 2.23606797749979}, "pyerrors.fits.ks_test": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_pobs_string": {"tf": 2.23606797749979}, "pyerrors.input.dobs.write_pobs": {"tf": 2.23606797749979}, "pyerrors.input.dobs.read_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 2.23606797749979}, "pyerrors.input.dobs.write_dobs": {"tf": 2.23606797749979}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 2}, "pyerrors.input.json.dump_to_json": {"tf": 2}, "pyerrors.input.json.import_json_string": {"tf": 2.449489742783178}, "pyerrors.input.json.load_json": {"tf": 2.449489742783178}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.misc.read_pbp": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_rwms": {"tf": 3.1622776601683795}, "pyerrors.input.openQCD.extract_t0": {"tf": 3}, "pyerrors.input.openQCD.extract_w0": {"tf": 3}, "pyerrors.input.openQCD.read_qtop": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 2}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.7320508075688772}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2.8284271247461903}, "pyerrors.input.utils.sort_names": {"tf": 2.23606797749979}, "pyerrors.input.utils.check_idl": {"tf": 2}, "pyerrors.misc.errorbar": {"tf": 2}, "pyerrors.misc.gen_correlated_data": {"tf": 2.23606797749979}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.__init__": {"tf": 2.449489742783178}, "pyerrors.obs.derived_observable": {"tf": 2}, "pyerrors.obs.reweight": {"tf": 1.7320508075688772}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}, "pyerrors.obs.merge_obs": {"tf": 2}, "pyerrors.obs.cov_Obs": {"tf": 1.7320508075688772}}, "df": 48, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}}, "df": 7}, "[": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 11}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}}, "df": 1}}}, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "[": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 2}}}}}}}}}}}, "k": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}}, "df": 2}}, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.fits.fit_lin": {"tf": 1}}, "df": 1}}}, "b": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}}, "df": 4}}}}, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}}, "df": 4}}}, "b": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 2.23606797749979}, "pyerrors.input.bdio.write_ADerrors": {"tf": 2.23606797749979}, "pyerrors.input.bdio.read_mesons": {"tf": 2.23606797749979}, "pyerrors.input.bdio.read_dSdm": {"tf": 2.23606797749979}}, "df": 4}}}}}}, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.fit": {"tf": 1}}, "df": 2}}, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}, "s": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}}, "df": 2}}}}}, "l": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 4, "s": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1.7320508075688772}}, "df": 1}}}}, "n": {"docs": {"pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.7320508075688772}}, "df": 3, "g": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}}, "df": 9}}}}, "q": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}, "f": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 4}}}, "o": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.m_eff": {"tf": 1.7320508075688772}}, "df": 4, "s": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 1}}, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}}, "df": 2}}}}}, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}}, "df": 1}}}}}}}}}, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "s": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.misc.load_object": {"tf": 1}}, "df": 2, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.misc.load_object": {"tf": 1}}, "df": 1}}}}, "w": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 1, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}}, "df": 4}}}}}, "t": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 2}, "/": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 2}}, "l": {"docs": {"pyerrors.input.utils.sort_names": {"tf": 1.4142135623730951}}, "df": 1}}, "s": {"docs": {"pyerrors": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.GEVP": {"tf": 2.6457513110645907}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 2}, "pyerrors.obs.Obs.gamma_method": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gm": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1.7320508075688772}, "pyerrors.obs.cov_Obs": {"tf": 1.4142135623730951}}, "df": 11, "o": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 2.23606797749979}, "pyerrors.input.bdio.write_ADerrors": {"tf": 2.23606797749979}, "pyerrors.input.bdio.read_mesons": {"tf": 2.23606797749979}, "pyerrors.input.bdio.read_dSdm": {"tf": 2.23606797749979}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}}, "df": 5, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 2, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "k": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "l": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 1, "r": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}}, "df": 3}}}}}, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 4}}}}, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1.7320508075688772}, "pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 2, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 2}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 1}}}, "s": {"docs": {"pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 1}}}, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4}}}}}, "u": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}}}}}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}}, "df": 2}}}}, "m": {"docs": {"pyerrors": {"tf": 3.605551275463989}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 2, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.linalg.einsum": {"tf": 1}}, "df": 1}}}}}}}, "c": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 4}, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.bdio.write_ADerrors": {"tf": 1}}, "df": 1, "f": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.bdio.write_ADerrors": {"tf": 1}}, "df": 1}}}}}}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "p": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 7}}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 2}}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 2}}}}, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}}, "df": 3}}}}}}, "b": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}, "c": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.linalg.einsum": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}, "m": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input": {"tf": 1}}, "df": 1}}}}}}}, "f": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}}, "df": 2}}}}}}}}}}}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "w": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 3}}}}}, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 2}}}, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "i": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}}, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "y": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 2}, "e": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.linalg.svd": {"tf": 1}}, "df": 2}}}}, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 4}}}, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 4}}, "h": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 3}, "k": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}}, "df": 1}}, "z": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 8}}, "g": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 2}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 7}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 1}}, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 3}}}}}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "p": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_t0": {"tf": 2}, "pyerrors.input.openQCD.extract_w0": {"tf": 2}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 9, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 4}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}, "e": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 2.6457513110645907}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1.7320508075688772}}, "df": 2, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 4}}}, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 2.449489742783178}, "pyerrors.obs.Obs.gamma_method": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gm": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 12, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.pandas.dump_df": {"tf": 1}}, "df": 1}}}}}}}}}, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 11}}, "c": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "x": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}}, "df": 2}}}}}}}}}}, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}, "d": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "r": {"docs": {"pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.dump": {"tf": 1.7320508075688772}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.create_pobs_string": {"tf": 2.449489742783178}, "pyerrors.input.dobs.write_pobs": {"tf": 2.23606797749979}, "pyerrors.input.dobs.read_pobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.read_dobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.create_dobs_string": {"tf": 2.449489742783178}, "pyerrors.input.dobs.write_dobs": {"tf": 2.23606797749979}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 2}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 2}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.json.create_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 2}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop": {"tf": 2}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 2.23606797749979}, "pyerrors.input.pandas.to_sql": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.read_sql": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2.8284271247461903}, "pyerrors.input.utils.check_idl": {"tf": 1.4142135623730951}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1.4142135623730951}, "pyerrors.misc.load_object": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 2}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 54, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 3.4641016151377544}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 2}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 2}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 17, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}}, "df": 9}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 2.23606797749979}}, "df": 1}}}, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.obs.correlate": {"tf": 1}}, "df": 1}}}}}, "u": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 3}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 13, "s": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}}, "df": 11}}}}}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}}}, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 2}}}, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1, "d": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 2}}, "a": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 3}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "p": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 10, "p": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}}}, "y": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 1}}}}, "c": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "e": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}}, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 4}}}, "i": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 3}}}}, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.error_band": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 4, "s": {"1": {"docs": {"pyerrors": {"tf": 2.23606797749979}}, "df": 1}, "2": {"docs": {"pyerrors": {"tf": 2.23606797749979}}, "df": 1}, "3": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {"pyerrors": {"tf": 3.872983346207417}, "pyerrors.input": {"tf": 1.7320508075688772}, "pyerrors.misc.pseudo_Obs": {"tf": 1.4142135623730951}, "pyerrors.misc.gen_correlated_data": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 2}, "pyerrors.obs.Obs.export_jackknife": {"tf": 2.23606797749979}, "pyerrors.obs.import_jackknife": {"tf": 1.7320508075688772}}, "df": 8}}}}, "e": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 12}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.plot_rho": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1.4142135623730951}}, "df": 4, "d": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 4}, "s": {"docs": {"pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 6}}}, "r": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}}}, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}}, "df": 7}}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.openQCD.qtop_projection": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.7320508075688772}}, "df": 2}}}}, "e": {"docs": {"pyerrors": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 10}, "t": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 14, "s": {"docs": {"pyerrors.correlators.Corr.set_prange": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 2}}, "l": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}}, "df": 3}}, "q": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}}, "df": 1}}}}}}, "r": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.json.dump_to_json": {"tf": 1}}, "df": 1}}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 2}}}}}}, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1}}}}, "p": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 2}, "pyerrors.input.dobs.import_dobs_string": {"tf": 2.449489742783178}, "pyerrors.input.dobs.read_dobs": {"tf": 2.449489742783178}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 9}}, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}}, "df": 3}}}, "e": {"docs": {"pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}}}}}}, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 2}}}}}, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 2, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 1}}}}}}, "m": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 1}}}, "p": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 4, "i": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 2}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}}, "df": 15}, "s": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 10}}, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "y": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 12}}, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}}, "df": 1}}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 5}}}}, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.thin": {"tf": 1.7320508075688772}}, "df": 1}}}, "e": {"docs": {"pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 3}}, "g": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}}, "df": 1}}}}}}}}, "y": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 5}, "z": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.symmetric": {"tf": 1}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}}, "df": 2}, "d": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 1}}}}, "y": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1.4142135623730951}}, "df": 2}}}}}, "b": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 4, "s": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 4}}}}}, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "x": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "h": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.roll": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 8}}}, "w": {"docs": {"pyerrors.obs.Obs.plot_history": {"tf": 1}}, "df": 1, "n": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 4}, "s": {"docs": {"pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 1}}}, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_mesons": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.7320508075688772}}, "df": 4}}}, "p": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 2.6457513110645907}}, "df": 1}}}}, "q": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.errsq": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors": {"tf": 3}, "pyerrors.correlators.Corr.fit": {"tf": 1}}, "df": 2}, "d": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 2}}}}}, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 3, "{": {"docs": {}, "df": 0, "j": {"docs": {}, "df": 0, "^": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "\\": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "u": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}, "l": {"docs": {"pyerrors.input.pandas.read_sql": {"tf": 1.7320508075688772}}, "df": 1, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.read_sql": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 3}}}}}, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 3, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 2}}}}}, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 3}}}}}}, "o": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 1, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}, "v": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "k": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1, "p": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2}}}}}}, "f": {"docs": {}, "df": 0, "q": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 2}}, "df": 3}}}, "c": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 2.23606797749979}}, "df": 1}}}}, "n": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.Hankel": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.prune": {"tf": 2.8284271247461903}, "pyerrors.fits.fit_lin": {"tf": 1.4142135623730951}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 2}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 2}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 13, "o": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 2}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 13, "n": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 4, "e": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2.23606797749979}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 21}}, "t": {"docs": {"pyerrors": {"tf": 2.8284271247461903}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_dobs": {"tf": 1.7320508075688772}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.7320508075688772}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 31, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 7}}}, "w": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "r": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 5}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2}}}}}}}}}}}, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.4142135623730951}}, "df": 2}}}, "f": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}}}}}, "e": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "w": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 6}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 5, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 2}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 4}}}}, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}}, "df": 3}}}}}}}, "l": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}, "x": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}}, "df": 1}}}}}}, "u": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "m": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 3, "p": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 3.4641016151377544}, "pyerrors.fits.least_squares": {"tf": 2.449489742783178}, "pyerrors.fits.total_least_squares": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2.23606797749979}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1.4142135623730951}, "pyerrors.linalg.jack_matmul": {"tf": 1.4142135623730951}, "pyerrors.linalg.einsum": {"tf": 2}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1.4142135623730951}, "pyerrors.roots.find_root": {"tf": 1.7320508075688772}}, "df": 21}}, "b": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.roll": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1.4142135623730951}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}}, "df": 23, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 3}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 6, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}}}, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 1}}}}, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}}}}}}}, "l": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.json.dump_to_json": {"tf": 1}}, "df": 1}}}, "p": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}}, "df": 5, "r": {"docs": {"pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}}, "df": 1}}, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 4.123105625617661}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_pobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 2}, "pyerrors.input.dobs.write_dobs": {"tf": 2}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2}, "pyerrors.misc.dump_object": {"tf": 1.4142135623730951}, "pyerrors.misc.pseudo_Obs": {"tf": 1.7320508075688772}, "pyerrors.misc.gen_correlated_data": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.dump": {"tf": 1.4142135623730951}, "pyerrors.obs.import_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 25, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.7320508075688772}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}}, "df": 19, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 5}}}}, "d": {"docs": {"pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 3}}}, "n": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}, "d": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 3.605551275463989}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}}, "df": 15}}}}}, "i": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}, "t": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}}, "df": 2}, "r": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1.7320508075688772}}, "df": 1}}}}}, "x": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 2}}, "b": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}, "r": {"docs": {}, "df": 0, "w": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}}, "df": 1}}}, "x": {"0": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.symmetric": {"tf": 1}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 7}, "1": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}}, "df": 3}, "2": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}}, "df": 3}, "docs": {"pyerrors": {"tf": 2.8284271247461903}, "pyerrors.correlators.Corr.second_deriv": {"tf": 4.123105625617661}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 3.605551275463989}, "pyerrors.fits.total_least_squares": {"tf": 3}, "pyerrors.fits.fit_lin": {"tf": 1.4142135623730951}, "pyerrors.fits.error_band": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1.7320508075688772}}, "df": 12, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.fits.fit_lin": {"tf": 1.4142135623730951}}, "df": 1}}}}}, "m": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_pobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_dobs": {"tf": 1.7320508075688772}}, "df": 7}, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}}, "df": 2}}}, "[": {"0": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}, "1": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}}, "y": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.fit_lin": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1.4142135623730951}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 9, "o": {"docs": {}, "df": 0, "u": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 4}}, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}}, "df": 2}}}}, "t": {"docs": {"pyerrors.obs.correlate": {"tf": 1}}, "df": 1}}, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 2}}}}}}, "r": {"0": {"1": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "2": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 3}, "docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 2}, "pyerrors.input.openQCD.extract_t0": {"tf": 2.6457513110645907}, "pyerrors.input.openQCD.extract_w0": {"tf": 2.6457513110645907}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 10, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 9}, "d": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 2}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 2}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 2.23606797749979}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 2}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 2}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.misc.read_pbp": {"tf": 2}, "pyerrors.input.openQCD.read_rwms": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.extract_t0": {"tf": 2}, "pyerrors.input.openQCD.extract_w0": {"tf": 2}, "pyerrors.input.openQCD.read_qtop": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 2}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2.6457513110645907}}, "df": 20, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 4}}}}}, "p": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors": {"tf": 2.6457513110645907}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.read_dobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}}, "df": 11, "s": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1, "/": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 4}}}}}}}}}}}}, "u": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.misc.read_pbp": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 14}}}, "k": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 2}, "u": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 1}}}}, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 4, "s": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}, "s": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 2}}}}}}}, "s": {"docs": {"pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}}, "df": 2, "[": {"0": {"docs": {"pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}}, "df": 2}, "docs": {}, "df": 0}}}, "s": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 6, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 4}, "pyerrors.correlators.Corr.show": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json": {"tf": 1.7320508075688772}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 15, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}}, "df": 4}}}, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}}, "df": 4}}}}, "p": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 3, "i": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}}, "df": 2}}}}, "i": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1.4142135623730951}}, "df": 3}}}}}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}}}}}}}, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 1}}}}}}}, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 7}}}}}, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.json.import_json_string": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}}, "df": 2}}}}}}}}}, "r": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2}}, "g": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}}}}}}}}, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 6}}}}}, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 3}}}}}, "q": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 10}, "s": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 6}, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 2}}}}}}}}}, "g": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 2}}}}}}}, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1.4142135623730951}}, "df": 2}}}}, "t": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.covobs.Covobs.errsq": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 12, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}, "pyerrors.dirac.Grid_gamma": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.fit_lin": {"tf": 1.4142135623730951}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.misc.load_object": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 61}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.json.import_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 8}}}}}, "o": {"docs": {"pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 2}}, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.reverse": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "w": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}}, "df": 4, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}}, "df": 7}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.reweight": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1.4142135623730951}, "pyerrors.obs.reweight": {"tf": 1.4142135623730951}, "pyerrors.obs.correlate": {"tf": 1}}, "df": 6}}}}}}}}, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.obs.Obs.reweight": {"tf": 1}}, "df": 1}}}}}}}}, "n": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}}}}}, "f": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2}}, "df": 4, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 2}}}}}}}}, "d": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}}, "df": 1, "d": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}}, "df": 1}}}}}, "m": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1, "d": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 4}}}}, "a": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 1}}, "df": 1}}}}}, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}}}}, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 15, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}}, "df": 2}}}, "k": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.dirac.epsilon_tensor": {"tf": 1.4142135623730951}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1.4142135623730951}}, "df": 3}, "d": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}, "p": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 3}}}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}, "h": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}}, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1.7320508075688772}}, "df": 6, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 2}}}, "u": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input": {"tf": 1}}, "df": 2}}}}}, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}, "w": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}}, "df": 2}}, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 5}}}}, "w": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}}, "df": 1}}}, "u": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}}, "df": 1}}}, "v": {"1": {"docs": {}, "df": 0, "@": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "@": {"docs": {}, "df": 0, "v": {"2": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}}}}}, "docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.prune": {"tf": 2.23606797749979}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 3, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 2}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.linalg.svd": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1.7320508075688772}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 2.449489742783178}, "pyerrors.obs.Obs.gamma_method": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gm": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 21, "s": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.show": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.error_band": {"tf": 1.4142135623730951}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1.7320508075688772}}, "df": 8}, "d": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.linalg.inv": {"tf": 1}, "pyerrors.linalg.cholesky": {"tf": 1}, "pyerrors.linalg.det": {"tf": 1}, "pyerrors.obs.CObs": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 13}}}, "i": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 2}}}, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input": {"tf": 1}}, "df": 2}}}, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1.4142135623730951}}, "df": 3, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.covobs.Covobs.errsq": {"tf": 1}}, "df": 2}}}, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}}}}}}, "i": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "a": {"docs": {"pyerrors": {"tf": 3.7416573867739413}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}}, "df": 8}, "s": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.obs.Obs": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "e": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1, "d": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}}, "e": {"docs": {}, "df": 0, "w": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 1}}, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1}}}}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 2.23606797749979}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2.449489742783178}, "pyerrors.misc.print_config": {"tf": 1}}, "df": 7}}}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 1}}}}, "e": {"docs": {}, "df": 0, "x": {"docs": {"pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 1}}}, "y": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 3}, "i": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "b": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}}, "df": 3}}}}}, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.7320508075688772}}, "df": 2, "s": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}}, "df": 2}}}}}}, "\\": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "\\": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "{": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "}": {"docs": {}, "df": 0, "^": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}}}}}}}}}}, "u": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 2}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 16, "d": {"docs": {"pyerrors": {"tf": 3}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 2.23606797749979}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1.4142135623730951}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.7320508075688772}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.input.utils.sort_names": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 26}, "r": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 3}, "s": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}}, "df": 2}, "f": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}}, "df": 1}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 5}}}, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}}, "df": 1}}}}}}, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "p": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 4, "d": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}}}, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "q": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}}}}}, "t": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}}, "df": 6}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "s": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}}}, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}}, "df": 2}}}}}}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}}}}}}}, "h": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 3}}}}}}}, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}, "p": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}}, "df": 2}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}}}}}}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 2}}}}}, "j": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.item": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.7320508075688772}}, "df": 6, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "w": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "u": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}}, "df": 3, "u": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}, "m": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}, "a": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1, "a": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 2}}}}}, "k": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input": {"tf": 2.23606797749979}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 2}, "pyerrors.obs.import_jackknife": {"tf": 1.4142135623730951}}, "df": 4}}}}}, "s": {"docs": {"pyerrors.obs.import_jackknife": {"tf": 1}}, "df": 1}}}}, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 3.7416573867739413}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 2.23606797749979}, "pyerrors.input.json.dump_to_json": {"tf": 2.449489742783178}, "pyerrors.input.json.import_json_string": {"tf": 2}, "pyerrors.input.json.load_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.dump_dict_to_json": {"tf": 2.449489742783178}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.dump": {"tf": 1.4142135623730951}}, "df": 12}}}, "l": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "}": {"docs": {}, "df": 0, "v": {"docs": {"pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 1}}, "^": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}, "k": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.7320508075688772}}, "df": 4, "u": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "h": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "e": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}}, "df": 3}}, "y": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}}, "df": 2}, "w": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}}, "df": 5, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}}, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "\u2013": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "v": {"docs": {"pyerrors.fits.ks_test": {"tf": 1}}, "df": 1}}}}}}}}}}}}}}}}}, "a": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "a": {"1": {"docs": {"pyerrors.input.bdio.read_mesons": {"tf": 1}}, "df": 1}, "2": {"docs": {"pyerrors.input.bdio.read_mesons": {"tf": 1}}, "df": 1}, "docs": {"pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 1}}}}, "w": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1.7320508075688772}}, "df": 2}}}}}}, "q": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 2.23606797749979}}, "df": 1}, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 2.449489742783178}}, "df": 1}}}}, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.qqplot": {"tf": 1.4142135623730951}}, "df": 2}}}, "u": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.correlators.Corr.T_symmetry": {"tf": 1}}, "df": 1}}}}, "r": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.7320508075688772}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}}, "df": 2, "s": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}}, "df": 1}}}}, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 4}}, "df": 1, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}}, "df": 1}}}}}, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.pandas.read_sql": {"tf": 1.4142135623730951}}, "df": 1}}}}, "q": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}}, "df": 2}}}}}, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 2}}}}, "h": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.plot_history": {"tf": 1.4142135623730951}}, "df": 2}, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}}, "df": 1, "s": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}}}, "a": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "e": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1.4142135623730951}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 21}}, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 2, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2}}}}}, "k": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.Hankel": {"tf": 1.4142135623730951}}, "df": 1}}}}, "s": {"docs": {"pyerrors": {"tf": 2.6457513110645907}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 21}, "d": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 6}}}}}}, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "r": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1.4142135623730951}}, "df": 2}}}, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.linalg.eigh": {"tf": 1}}, "df": 1}}}}}}, "e": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 4}}, "a": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 2}}}}, "p": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}, "o": {"docs": {}, "df": 0, "w": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 6}, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}}}}}}}, "l": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}}, "df": 1}}}}}}}, "m": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}}}}}, "t": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, ":": {"docs": {}, "df": 0, "/": {"docs": {}, "df": 0, "/": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}}, "df": 2}}}}}}}}}}}}}}}, "d": {"docs": {}, "df": 0, "f": {"5": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 6}, "docs": {}, "df": 0}}, "u": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}, "z": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 15, "t": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}}, "df": 3}}, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 1}}}, "u": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}}, "df": 10}}}}}}}}}}, "pipeline": ["trimmer"], "_isPrebuiltIndex": true}; + /** pdoc search index */const docs = {"version": "0.9.5", "fields": ["qualname", "fullname", "annotation", "default_value", "signature", "bases", "doc"], "ref": "fullname", "documentStore": {"docs": {"pyerrors": {"fullname": "pyerrors", "modulename": "pyerrors", "kind": "module", "doc": "

    What is pyerrors?

    \n\n

    pyerrors is a python package for error computation and propagation of Markov chain Monte Carlo data.\nIt is based on the gamma method arXiv:hep-lat/0306017. Some of its features are:

    \n\n
      \n
    • automatic differentiation for exact linear error propagation as suggested in arXiv:1809.01289 (partly based on the autograd package).
    • \n
    • treatment of slow modes in the simulation as suggested in arXiv:1009.5228.
    • \n
    • coherent error propagation for data from different Markov chains.
    • \n
    • non-linear fits with x- and y-errors and exact linear error propagation based on automatic differentiation as introduced in arXiv:1809.01289.
    • \n
    • real and complex matrix operations and their error propagation based on automatic differentiation (Matrix inverse, Cholesky decomposition, calculation of eigenvalues and eigenvectors, singular value decomposition...).
    • \n
    \n\n

    More detailed examples can found in the GitHub repository \"badge\".

    \n\n

    If you use pyerrors for research that leads to a publication please consider citing:

    \n\n
      \n
    • Fabian Joswig, Simon Kuberski, Justus T. Kuhlmann, Jan Neuendorf, pyerrors: a python framework for error analysis of Monte Carlo data. Comput.Phys.Commun. 288 (2023) 108750.
    • \n
    • Ulli Wolff, Monte Carlo errors with less errors. Comput.Phys.Commun. 156 (2004) 143-153, Comput.Phys.Commun. 176 (2007) 383 (erratum).
    • \n
    • Alberto Ramos, Automatic differentiation for error analysis of Monte Carlo data. Comput.Phys.Commun. 238 (2019) 19-35.
    • \n
    \n\n

    and

    \n\n
      \n
    • Stefan Schaefer, Rainer Sommer, Francesco Virotta, Critical slowing down and error analysis in lattice QCD simulations. Nucl.Phys.B 845 (2011) 93-119.
    • \n
    \n\n

    where applicable.

    \n\n

    There exist similar publicly available implementations of gamma method error analysis suites in Fortran, Julia and Python.

    \n\n

    Installation

    \n\n

    Install the most recent release using pip and pypi:

    \n\n
    \n
    python -m pip install pyerrors     # Fresh install\npython -m pip install -U pyerrors  # Update\n
    \n
    \n\n

    Install the most recent release using conda and conda-forge:

    \n\n
    \n
    conda install -c conda-forge pyerrors  # Fresh install\nconda update -c conda-forge pyerrors   # Update\n
    \n
    \n\n

    Install the current develop version:

    \n\n
    \n
    python -m pip install git+https://github.com/fjosw/pyerrors.git@develop\n
    \n
    \n\n

    Basic example

    \n\n
    \n
    import numpy as np\nimport pyerrors as pe\n\nmy_obs = pe.Obs([samples], ['ensemble_name']) # Initialize an Obs object\nmy_new_obs = 2 * np.log(my_obs) / my_obs ** 2 # Construct derived Obs object\nmy_new_obs.gamma_method()                     # Estimate the statistical error\nprint(my_new_obs)                             # Print the result to stdout\n> 0.31498(72)\n
    \n
    \n\n

    The Obs class

    \n\n

    pyerrors introduces a new datatype, Obs, which simplifies error propagation and estimation for auto- and cross-correlated data.\nAn Obs object can be initialized with two arguments, the first is a list containing the samples for an observable from a Monte Carlo chain.\nThe samples can either be provided as python list or as numpy array.\nThe second argument is a list containing the names of the respective Monte Carlo chains as strings. These strings uniquely identify a Monte Carlo chain/ensemble. It is crucial for the correct error propagation that observations from the same Monte Carlo history are labeled with the same name. See Multiple ensembles/replica for details.

    \n\n
    \n
    import pyerrors as pe\n\nmy_obs = pe.Obs([samples], ['ensemble_name'])\n
    \n
    \n\n

    Error propagation

    \n\n

    When performing mathematical operations on Obs objects the correct error propagation is intrinsically taken care of using a first order Taylor expansion\n$$\\delta_f^i=\\sum_\\alpha \\bar{f}_\\alpha \\delta_\\alpha^i\\,,\\quad \\delta_\\alpha^i=a_\\alpha^i-\\bar{a}_\\alpha\\,,$$\nas introduced in arXiv:hep-lat/0306017.\nThe required derivatives $\\bar{f}_\\alpha$ are evaluated up to machine precision via automatic differentiation as suggested in arXiv:1809.01289.

    \n\n

    The Obs class is designed such that mathematical numpy functions can be used on Obs just as for regular floats.

    \n\n
    \n
    import numpy as np\nimport pyerrors as pe\n\nmy_obs1 = pe.Obs([samples1], ['ensemble_name'])\nmy_obs2 = pe.Obs([samples2], ['ensemble_name'])\n\nmy_sum = my_obs1 + my_obs2\n\nmy_m_eff = np.log(my_obs1 / my_obs2)\n\niamzero = my_m_eff - my_m_eff\n# Check that value and fluctuations are zero within machine precision\nprint(iamzero == 0.0)\n> True\n
    \n
    \n\n

    Error estimation

    \n\n

    The error estimation within pyerrors is based on the gamma method introduced in arXiv:hep-lat/0306017.\nAfter having arrived at the derived quantity of interest the gamma_method can be called as detailed in the following example.

    \n\n
    \n
    my_sum.gamma_method()\nprint(my_sum)\n> 1.70(57)\nmy_sum.details()\n> Result         1.70000000e+00 +/- 5.72046658e-01 +/- 7.56746598e-02 (33.650%)\n>  t_int         2.71422900e+00 +/- 6.40320983e-01 S = 2.00\n> 1000 samples in 1 ensemble:\n>   \u00b7 Ensemble 'ensemble_name' : 1000 configurations (from 1 to 1000)\n
    \n
    \n\n

    The gamma_method is not automatically called after every intermediate step in order to prevent computational overhead.

    \n\n

    We use the following definition of the integrated autocorrelation time established in Madras & Sokal 1988\n$$\\tau_\\mathrm{int}=\\frac{1}{2}+\\sum_{t=1}^{W}\\rho(t)\\geq \\frac{1}{2}\\,.$$\nThe window $W$ is determined via the automatic windowing procedure described in arXiv:hep-lat/0306017.\nThe standard value for the parameter $S$ of this automatic windowing procedure is $S=2$. Other values for $S$ can be passed to the gamma_method as parameter.

    \n\n
    \n
    my_sum.gamma_method(S=3.0)\nmy_sum.details()\n> Result         1.70000000e+00 +/- 6.30675201e-01 +/- 1.04585650e-01 (37.099%)\n>  t_int         3.29909703e+00 +/- 9.77310102e-01 S = 3.00\n> 1000 samples in 1 ensemble:\n>   \u00b7 Ensemble 'ensemble_name' : 1000 configurations (from 1 to 1000)\n
    \n
    \n\n

    The integrated autocorrelation time $\\tau_\\mathrm{int}$ and the autocorrelation function $\\rho(W)$ can be monitored via the methods pyerrors.obs.Obs.plot_tauint and pyerrors.obs.Obs.plot_rho.

    \n\n

    If the parameter $S$ is set to zero it is assumed that the dataset does not exhibit any autocorrelation and the window size is chosen to be zero.\nIn this case the error estimate is identical to the sample standard error.

    \n\n

    Exponential tails

    \n\n

    Slow modes in the Monte Carlo history can be accounted for by attaching an exponential tail to the autocorrelation function $\\rho$ as suggested in arXiv:1009.5228. The longest autocorrelation time in the history, $\\tau_\\mathrm{exp}$, can be passed to the gamma_method as parameter. In this case the automatic windowing procedure is vacated and the parameter $S$ does not affect the error estimate.

    \n\n
    \n
    my_sum.gamma_method(tau_exp=7.2)\nmy_sum.details()\n> Result         1.70000000e+00 +/- 6.28097762e-01 +/- 5.79077524e-02 (36.947%)\n>  t_int         3.27218667e+00 +/- 7.99583654e-01 tau_exp = 7.20,  N_sigma = 1\n> 1000 samples in 1 ensemble:\n>   \u00b7 Ensemble 'ensemble_name' : 1000 configurations (from 1 to 1000)\n
    \n
    \n\n

    For the full API see pyerrors.obs.Obs.gamma_method.

    \n\n

    Multiple ensembles/replica

    \n\n

    Error propagation for multiple ensembles (Markov chains with different simulation parameters) is handled automatically. Ensembles are uniquely identified by their name.

    \n\n
    \n
    obs1 = pe.Obs([samples1], ['ensemble1'])\nobs2 = pe.Obs([samples2], ['ensemble2'])\n\nmy_sum = obs1 + obs2\nmy_sum.details()\n> Result   2.00697958e+00\n> 1500 samples in 2 ensembles:\n>   \u00b7 Ensemble 'ensemble1' : 1000 configurations (from 1 to 1000)\n>   \u00b7 Ensemble 'ensemble2' : 500 configurations (from 1 to 500)\n
    \n
    \n\n

    Observables from the same Monte Carlo chain have to be initialized with the same name for correct error propagation. If different names were used in this case the data would be treated as statistically independent resulting in loss of relevant information and a potential over or under estimate of the statistical error.

    \n\n

    pyerrors identifies multiple replica (independent Markov chains with identical simulation parameters) by the vertical bar | in the name of the data set.

    \n\n
    \n
    obs1 = pe.Obs([samples1], ['ensemble1|r01'])\nobs2 = pe.Obs([samples2], ['ensemble1|r02'])\n\n> my_sum = obs1 + obs2\n> my_sum.details()\n> Result   2.00697958e+00\n> 1500 samples in 1 ensemble:\n>   \u00b7 Ensemble 'ensemble1'\n>     \u00b7 Replicum 'r01' : 1000 configurations (from 1 to 1000)\n>     \u00b7 Replicum 'r02' : 500 configurations (from 1 to 500)\n
    \n
    \n\n

    Error estimation for multiple ensembles

    \n\n

    In order to keep track of different error analysis parameters for different ensembles one can make use of global dictionaries as detailed in the following example.

    \n\n
    \n
    pe.Obs.S_dict['ensemble1'] = 2.5\npe.Obs.tau_exp_dict['ensemble2'] = 8.0\npe.Obs.tau_exp_dict['ensemble3'] = 2.0\n
    \n
    \n\n

    In case the gamma_method is called without any parameters it will use the values specified in the dictionaries for the respective ensembles.\nPassing arguments to the gamma_method still dominates over the dictionaries.

    \n\n

    Irregular Monte Carlo chains

    \n\n

    Obs objects defined on irregular Monte Carlo chains can be initialized with the parameter idl.

    \n\n
    \n
    # Observable defined on configurations 20 to 519\nobs1 = pe.Obs([samples1], ['ensemble1'], idl=[range(20, 520)])\nobs1.details()\n> Result         9.98319881e-01\n> 500 samples in 1 ensemble:\n>   \u00b7 Ensemble 'ensemble1' : 500 configurations (from 20 to 519)\n\n# Observable defined on every second configuration between 5 and 1003\nobs2 = pe.Obs([samples2], ['ensemble1'], idl=[range(5, 1005, 2)])\nobs2.details()\n> Result         9.99100712e-01\n> 500 samples in 1 ensemble:\n>   \u00b7 Ensemble 'ensemble1' : 500 configurations (from 5 to 1003 in steps of 2)\n\n# Observable defined on configurations 2, 9, 28, 29 and 501\nobs3 = pe.Obs([samples3], ['ensemble1'], idl=[[2, 9, 28, 29, 501]])\nobs3.details()\n> Result         1.01718064e+00\n> 5 samples in 1 ensemble:\n>   \u00b7 Ensemble 'ensemble1' : 5 configurations (irregular range)\n
    \n
    \n\n

    Obs objects defined on regular and irregular histories of the same ensemble can be combined with each other and the correct error propagation and estimation is automatically taken care of.

    \n\n

    Warning: Irregular Monte Carlo chains can result in odd patterns in the autocorrelation functions.\nMake sure to check the autocorrelation time with e.g. pyerrors.obs.Obs.plot_rho or pyerrors.obs.Obs.plot_tauint.

    \n\n

    For the full API see pyerrors.obs.Obs.

    \n\n

    Correlators

    \n\n

    When one is not interested in single observables but correlation functions, pyerrors offers the Corr class which simplifies the corresponding error propagation and provides the user with a set of standard methods. In order to initialize a Corr objects one needs to arrange the data as a list of Obs

    \n\n
    \n
    my_corr = pe.Corr([obs_0, obs_1, obs_2, obs_3])\nprint(my_corr)\n> x0/a  Corr(x0/a)\n> ------------------\n> 0      0.7957(80)\n> 1      0.5156(51)\n> 2      0.3227(33)\n> 3      0.2041(21)\n
    \n
    \n\n

    In case the correlation functions are not defined on the outermost timeslices, for example because of fixed boundary conditions, a padding can be introduced.

    \n\n
    \n
    my_corr = pe.Corr([obs_0, obs_1, obs_2, obs_3], padding=[1, 1])\nprint(my_corr)\n> x0/a  Corr(x0/a)\n> ------------------\n> 0\n> 1      0.7957(80)\n> 2      0.5156(51)\n> 3      0.3227(33)\n> 4      0.2041(21)\n> 5\n
    \n
    \n\n

    The individual entries of a correlator can be accessed via slicing

    \n\n
    \n
    print(my_corr[3])\n> 0.3227(33)\n
    \n
    \n\n

    Error propagation with the Corr class works very similar to Obs objects. Mathematical operations are overloaded and Corr objects can be computed together with other Corr objects, Obs objects or real numbers and integers.

    \n\n
    \n
    my_new_corr = 0.3 * my_corr[2] * my_corr * my_corr + 12 / my_corr\n
    \n
    \n\n

    pyerrors provides the user with a set of regularly used methods for the manipulation of correlator objects:

    \n\n
      \n
    • Corr.gamma_method applies the gamma method to all entries of the correlator.
    • \n
    • Corr.m_eff to construct effective masses. Various variants for periodic and fixed temporal boundary conditions are available.
    • \n
    • Corr.deriv returns the first derivative of the correlator as Corr. Different discretizations of the numerical derivative are available.
    • \n
    • Corr.second_deriv returns the second derivative of the correlator as Corr. Different discretizations of the numerical derivative are available.
    • \n
    • Corr.symmetric symmetrizes parity even correlations functions, assuming periodic boundary conditions.
    • \n
    • Corr.anti_symmetric anti-symmetrizes parity odd correlations functions, assuming periodic boundary conditions.
    • \n
    • Corr.T_symmetry averages a correlator with its time symmetry partner, assuming fixed boundary conditions.
    • \n
    • Corr.plateau extracts a plateau value from the correlator in a given range.
    • \n
    • Corr.roll periodically shifts the correlator.
    • \n
    • Corr.reverse reverses the time ordering of the correlator.
    • \n
    • Corr.correlate constructs a disconnected correlation function from the correlator and another Corr or Obs object.
    • \n
    • Corr.reweight reweights the correlator.
    • \n
    \n\n

    pyerrors can also handle matrices of correlation functions and extract energy states from these matrices via a generalized eigenvalue problem (see pyerrors.correlators.Corr.GEVP).

    \n\n

    For the full API see pyerrors.correlators.Corr.

    \n\n

    Complex valued observables

    \n\n

    pyerrors can handle complex valued observables via the class pyerrors.obs.CObs.\nCObs are initialized with a real and an imaginary part which both can be Obs valued.

    \n\n
    \n
    my_real_part = pe.Obs([samples1], ['ensemble1'])\nmy_imag_part = pe.Obs([samples2], ['ensemble1'])\n\nmy_cobs = pe.CObs(my_real_part, my_imag_part)\nmy_cobs.gamma_method()\nprint(my_cobs)\n> (0.9959(91)+0.659(28)j)\n
    \n
    \n\n

    Elementary mathematical operations are overloaded and samples are properly propagated as for the Obs class.

    \n\n
    \n
    my_derived_cobs = (my_cobs + my_cobs.conjugate()) / np.abs(my_cobs)\nmy_derived_cobs.gamma_method()\nprint(my_derived_cobs)\n> (1.668(23)+0.0j)\n
    \n
    \n\n

    The Covobs class

    \n\n

    In many projects, auxiliary data that is not based on Monte Carlo chains enters. Examples are experimentally determined mesons masses which are used to set the scale or renormalization constants. These numbers come with an error that has to be propagated through the analysis. The Covobs class allows to define such quantities in pyerrors. Furthermore, external input might consist of correlated quantities. An example are the parameters of an interpolation formula, which are defined via mean values and a covariance matrix between all parameters. The contribution of the interpolation formula to the error of a derived quantity therefore might depend on the complete covariance matrix.

    \n\n

    This concept is built into the definition of Covobs. In pyerrors, external input is defined by $M$ mean values, a $M\\times M$ covariance matrix, where $M=1$ is permissible, and a name that uniquely identifies the covariance matrix. Below, we define the pion mass, based on its mean value and error, 134.9768(5). Note, that the square of the error enters cov_Obs, since the second argument of this function is the covariance matrix of the Covobs.

    \n\n
    \n
    import pyerrors.obs as pe\n\nmpi = pe.cov_Obs(134.9768, 0.0005**2, 'pi^0 mass')\nmpi.gamma_method()\nmpi.details()\n> Result         1.34976800e+02 +/- 5.00000000e-04 +/- 0.00000000e+00 (0.000%)\n>  pi^0 mass     5.00000000e-04\n> 0 samples in 1 ensemble:\n>   \u00b7 Covobs   'pi^0 mass'\n
    \n
    \n\n

    The resulting object mpi is an Obs that contains a Covobs. In the following, it may be handled as any other Obs. The contribution of the covariance matrix to the error of an Obs is determined from the $M \\times M$ covariance matrix $\\Sigma$ and the gradient of the Obs with respect to the external quantities, which is the $1\\times M$ Jacobian matrix $J$, via\n$$s = \\sqrt{J^T \\Sigma J}\\,,$$\nwhere the Jacobian is computed for each derived quantity via automatic differentiation.

    \n\n

    Correlated auxiliary data is defined similarly to above, e.g., via

    \n\n
    \n
    RAP = pe.cov_Obs([16.7457, -19.0475], [[3.49591, -6.07560], [-6.07560, 10.5834]], 'R_AP, 1906.03445, (5.3a)')\nprint(RAP)\n> [Obs[16.7(1.9)], Obs[-19.0(3.3)]]\n
    \n
    \n\n

    where RAP now is a list of two Obs that contains the two correlated parameters.

    \n\n

    Since the gradient of a derived observable with respect to an external covariance matrix is propagated through the entire analysis, the Covobs class allows to quote the derivative of a result with respect to the external quantities. If these derivatives are published together with the result, small shifts in the definition of external quantities, e.g., the definition of the physical point, can be performed a posteriori based on the published information. This may help to compare results of different groups. The gradient of an Obs o with respect to a covariance matrix with the identifying string k may be accessed via

    \n\n
    \n
    o.covobs[k].grad\n
    \n
    \n\n

    Error propagation in iterative algorithms

    \n\n

    pyerrors supports exact linear error propagation for iterative algorithms like various variants of non-linear least squares fits or root finding. The derivatives required for the error propagation are calculated as described in arXiv:1809.01289.

    \n\n

    Least squares fits

    \n\n

    Standard non-linear least square fits with errors on the dependent but not the independent variables can be performed with pyerrors.fits.least_squares. As default solver the Levenberg-Marquardt algorithm implemented in scipy is used.

    \n\n

    Fit functions have to be of the following form

    \n\n
    \n
    import autograd.numpy as anp\n\ndef func(a, x):\n    return a[1] * anp.exp(-a[0] * x)\n
    \n
    \n\n

    It is important that numerical functions refer to autograd.numpy instead of numpy for the automatic differentiation in iterative algorithms to work properly.

    \n\n

    Fits can then be performed via

    \n\n
    \n
    fit_result = pe.fits.least_squares(x, y, func)\nprint("\\n", fit_result)\n> Fit with 2 parameters\n> Method: Levenberg-Marquardt\n> `ftol` termination condition is satisfied.\n> chisquare/d.o.f.: 0.9593035785160936\n\n>  Goodness of fit:\n> \u03c7\u00b2/d.o.f. = 0.959304\n> p-value   = 0.5673\n> Fit parameters:\n> 0      0.0548(28)\n> 1      1.933(64)\n
    \n
    \n\n

    where x is a list or numpy.array of floats and y is a list or numpy.array of Obs.

    \n\n

    Data stored in Corr objects can be fitted directly using the Corr.fit method.

    \n\n
    \n
    my_corr = pe.Corr(y)\nfit_result = my_corr.fit(func, fitrange=[12, 25])\n
    \n
    \n\n

    this can simplify working with absolute fit ranges and takes care of gaps in the data automatically.

    \n\n

    For fit functions with multiple independent variables the fit function can be of the form

    \n\n
    \n
    def func(a, x):\n    (x1, x2) = x\n    return a[0] * x1 ** 2 + a[1] * x2\n
    \n
    \n\n

    pyerrors also supports correlated fits which can be triggered via the parameter correlated_fit=True.\nDetails about how the required covariance matrix is estimated can be found in pyerrors.obs.covariance.\nDirect visualizations of the performed fits can be triggered via resplot=True or qqplot=True.

    \n\n

    For all available options including combined fits to multiple datasets see pyerrors.fits.least_squares.

    \n\n

    Total least squares fits

    \n\n

    pyerrors can also fit data with errors on both the dependent and independent variables using the total least squares method also referred to as orthogonal distance regression as implemented in scipy, see pyerrors.fits.least_squares. The syntax is identical to the standard least squares case, the only difference being that x also has to be a list or numpy.array of Obs.

    \n\n

    For the full API see pyerrors.fits for fits and pyerrors.roots for finding roots of functions.

    \n\n

    Matrix operations

    \n\n

    pyerrors provides wrappers for Obs- and CObs-valued matrix operations based on numpy.linalg. The supported functions include:

    \n\n
      \n
    • inv for the matrix inverse.
    • \n
    • cholseky for the Cholesky decomposition.
    • \n
    • det for the matrix determinant.
    • \n
    • eigh for eigenvalues and eigenvectors of hermitean matrices.
    • \n
    • eig for eigenvalues of general matrices.
    • \n
    • pinv for the Moore-Penrose pseudoinverse.
    • \n
    • svd for the singular-value-decomposition.
    • \n
    \n\n

    For the full API see pyerrors.linalg.

    \n\n

    Export data

    \n\n

    \n\n

    The preferred exported file format within pyerrors is json.gz. Files written to this format are valid JSON files that have been compressed using gzip. The structure of the content is inspired by the dobs format of the ALPHA collaboration. The aim of the format is to facilitate the storage of data in a self-contained way such that, even years after the creation of the file, it is possible to extract all necessary information:

    \n\n
      \n
    • What observables are stored? Possibly: How exactly are they defined.
    • \n
    • How does each single ensemble or external quantity contribute to the error of the observable?
    • \n
    • Who did write the file when and on which machine?
    • \n
    \n\n

    This can be achieved by storing all information in one single file. The export routines of pyerrors are written such that as much information as possible is written automatically as described in the following example

    \n\n
    \n
    my_obs = pe.Obs([samples], ["test_ensemble"])\nmy_obs.tag = "My observable"\n\npe.input.json.dump_to_json(my_obs, "test_output_file", description="This file contains a test observable")\n# For a single observable one can equivalently use the class method dump\nmy_obs.dump("test_output_file", description="This file contains a test observable")\n\ncheck = pe.input.json.load_json("test_output_file")\n\nprint(my_obs == check)\n> True\n
    \n
    \n\n

    The format also allows to directly write out the content of Corr objects or lists and arrays of Obs objects by passing the desired data to pyerrors.input.json.dump_to_json.

    \n\n

    json.gz format specification

    \n\n

    The first entries of the file provide optional auxiliary information:

    \n\n
      \n
    • program is a string that indicates which program was used to write the file.
    • \n
    • version is a string that specifies the version of the format.
    • \n
    • who is a string that specifies the user name of the creator of the file.
    • \n
    • date is a string and contains the creation date of the file.
    • \n
    • host is a string and contains the hostname of the machine where the file has been written.
    • \n
    • description contains information on the content of the file. This field is not filled automatically in pyerrors. The user is advised to provide as detailed information as possible in this field. Examples are: Input files of measurements or simulations, LaTeX formulae or references to publications to specify how the observables have been computed, details on the analysis strategy, ... This field may be any valid JSON type. Strings, arrays or objects (equivalent to dicts in python) are well suited to provide information.
    • \n
    \n\n

    The only necessary entry of the file is the field\n-obsdata, an array that contains the actual data.

    \n\n

    Each entry of the array belongs to a single structure of observables. Currently, these structures can be either of Obs, list, numpy.ndarray, Corr. All Obs inside a structure (with dimension > 0) have to be defined on the same set of configurations. Different structures, that are represented by entries of the array obsdata, are treated independently. Each entry of the array obsdata has the following required entries:

    \n\n
      \n
    • type is a string that specifies the type of the structure. This allows to parse the content to the correct form after reading the file. It is always possible to interpret the content as list of Obs.
    • \n
    • value is an array that contains the mean values of the Obs inside the structure.\nThe following entries are optional:
    • \n
    • layout is a string that specifies the layout of multi-dimensional structures. Examples are \"2, 2\" for a 2x2 dimensional matrix or \"64, 4, 4\" for a Corr with $T=64$ and 4x4 matrices on each time slices. \"1\" denotes a single Obs. Multi-dimensional structures are stored in row-major format (see below).
    • \n
    • tag is any JSON type. It contains additional information concerning the structure. The tag of an Obs in pyerrors is written here.
    • \n
    • reweighted is a Bool that may be used to specify, whether the Obs in the structure have been reweighted.
    • \n
    • data is an array that contains the data from MC chains. We will define it below.
    • \n
    • cdata is an array that contains the data from external quantities with an error (Covobs in pyerrors). We will define it below.
    • \n
    \n\n

    The array data contains the data from MC chains. Each entry of the array corresponds to one ensemble and contains:

    \n\n
      \n
    • id, a string that contains the name of the ensemble
    • \n
    • replica, an array that contains an entry per replica of the ensemble.
    • \n
    \n\n

    Each entry of replica contains\nname, a string that contains the name of the replica\ndeltas, an array that contains the actual data.

    \n\n

    Each entry in deltas corresponds to one configuration of the replica and has $1+N$ many entries. The first entry is an integer that specifies the configuration number that, together with ensemble and replica name, may be used to uniquely identify the configuration on which the data has been obtained. The following N entries specify the deltas, i.e., the deviation of the observable from the mean value on this configuration, of each Obs inside the structure. Multi-dimensional structures are stored in a row-major format. For primary observables, such as correlation functions, $value + delta_i$ matches the primary data obtained on the configuration.

    \n\n

    The array cdata contains information about the contribution of auxiliary observables, represented by Covobs in pyerrors, to the total error of the observables. Each entry of the array belongs to one auxiliary covariance matrix and contains:

    \n\n
      \n
    • id, a string that identifies the covariance matrix
    • \n
    • layout, a string that defines the dimensions of the $M\\times M$ covariance matrix (has to be \"M, M\" or \"1\").
    • \n
    • cov, an array that contains the $M\\times M$ many entries of the covariance matrix, stored in row-major format.
    • \n
    • grad, an array that contains N entries, one for each Obs inside the structure. Each entry itself is an array, that contains the M gradients of the Nth observable with respect to the quantity that corresponds to the Mth diagonal entry of the covariance matrix.
    • \n
    \n\n

    A JSON schema that may be used to verify the correctness of a file with respect to the format definition is stored in ./examples/json_schema.json. The schema is a self-descriptive format definition and contains an exemplary file.

    \n\n

    Julia I/O routines for the json.gz format, compatible with ADerrors.jl, can be found here.

    \n"}, "pyerrors.correlators": {"fullname": "pyerrors.correlators", "modulename": "pyerrors.correlators", "kind": "module", "doc": "

    \n"}, "pyerrors.correlators.Corr": {"fullname": "pyerrors.correlators.Corr", "modulename": "pyerrors.correlators", "qualname": "Corr", "kind": "class", "doc": "

    The class for a correlator (time dependent sequence of pe.Obs).

    \n\n

    Everything, this class does, can be achieved using lists or arrays of Obs.\nBut it is simply more convenient to have a dedicated object for correlators.\nOne often wants to add or multiply correlators of the same length at every timeslice and it is inconvenient\nto iterate over all timeslices for every operation. This is especially true, when dealing with matrices.

    \n\n

    The correlator can have two types of content: An Obs at every timeslice OR a GEVP\nmatrix at every timeslice. Other dependency (eg. spatial) are not supported.

    \n"}, "pyerrors.correlators.Corr.__init__": {"fullname": "pyerrors.correlators.Corr.__init__", "modulename": "pyerrors.correlators", "qualname": "Corr.__init__", "kind": "function", "doc": "

    Initialize a Corr object.

    \n\n
    Parameters
    \n\n
      \n
    • data_input (list or array):\nlist of Obs or list of arrays of Obs or array of Corrs
    • \n
    • padding (list, optional):\nList with two entries where the first labels the padding\nat the front of the correlator and the second the padding\nat the back.
    • \n
    • prange (list, optional):\nList containing the first and last timeslice of the plateau\nregion indentified for this correlator.
    • \n
    \n", "signature": "(data_input, padding=[0, 0], prange=None)"}, "pyerrors.correlators.Corr.tag": {"fullname": "pyerrors.correlators.Corr.tag", "modulename": "pyerrors.correlators", "qualname": "Corr.tag", "kind": "variable", "doc": "

    \n"}, "pyerrors.correlators.Corr.content": {"fullname": "pyerrors.correlators.Corr.content", "modulename": "pyerrors.correlators", "qualname": "Corr.content", "kind": "variable", "doc": "

    \n"}, "pyerrors.correlators.Corr.T": {"fullname": "pyerrors.correlators.Corr.T", "modulename": "pyerrors.correlators", "qualname": "Corr.T", "kind": "variable", "doc": "

    \n"}, "pyerrors.correlators.Corr.prange": {"fullname": "pyerrors.correlators.Corr.prange", "modulename": "pyerrors.correlators", "qualname": "Corr.prange", "kind": "variable", "doc": "

    \n"}, "pyerrors.correlators.Corr.reweighted": {"fullname": "pyerrors.correlators.Corr.reweighted", "modulename": "pyerrors.correlators", "qualname": "Corr.reweighted", "kind": "variable", "doc": "

    \n"}, "pyerrors.correlators.Corr.gamma_method": {"fullname": "pyerrors.correlators.Corr.gamma_method", "modulename": "pyerrors.correlators", "qualname": "Corr.gamma_method", "kind": "function", "doc": "

    Apply the gamma method to the content of the Corr.

    \n", "signature": "(self, **kwargs):", "funcdef": "def"}, "pyerrors.correlators.Corr.gm": {"fullname": "pyerrors.correlators.Corr.gm", "modulename": "pyerrors.correlators", "qualname": "Corr.gm", "kind": "function", "doc": "

    Apply the gamma method to the content of the Corr.

    \n", "signature": "(self, **kwargs):", "funcdef": "def"}, "pyerrors.correlators.Corr.projected": {"fullname": "pyerrors.correlators.Corr.projected", "modulename": "pyerrors.correlators", "qualname": "Corr.projected", "kind": "function", "doc": "

    We need to project the Correlator with a Vector to get a single value at each timeslice.

    \n\n

    The method can use one or two vectors.\nIf two are specified it returns v1@G@v2 (the order might be very important.)\nBy default it will return the lowest source, which usually means unsmeared-unsmeared (0,0), but it does not have to

    \n", "signature": "(self, vector_l=None, vector_r=None, normalize=False):", "funcdef": "def"}, "pyerrors.correlators.Corr.item": {"fullname": "pyerrors.correlators.Corr.item", "modulename": "pyerrors.correlators", "qualname": "Corr.item", "kind": "function", "doc": "

    Picks the element [i,j] from every matrix and returns a correlator containing one Obs per timeslice.

    \n\n
    Parameters
    \n\n
      \n
    • i (int):\nFirst index to be picked.
    • \n
    • j (int):\nSecond index to be picked.
    • \n
    \n", "signature": "(self, i, j):", "funcdef": "def"}, "pyerrors.correlators.Corr.plottable": {"fullname": "pyerrors.correlators.Corr.plottable", "modulename": "pyerrors.correlators", "qualname": "Corr.plottable", "kind": "function", "doc": "

    Outputs the correlator in a plotable format.

    \n\n

    Outputs three lists containing the timeslice index, the value on each\ntimeslice and the error on each timeslice.

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.symmetric": {"fullname": "pyerrors.correlators.Corr.symmetric", "modulename": "pyerrors.correlators", "qualname": "Corr.symmetric", "kind": "function", "doc": "

    Symmetrize the correlator around x0=0.

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.anti_symmetric": {"fullname": "pyerrors.correlators.Corr.anti_symmetric", "modulename": "pyerrors.correlators", "qualname": "Corr.anti_symmetric", "kind": "function", "doc": "

    Anti-symmetrize the correlator around x0=0.

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"fullname": "pyerrors.correlators.Corr.is_matrix_symmetric", "modulename": "pyerrors.correlators", "qualname": "Corr.is_matrix_symmetric", "kind": "function", "doc": "

    Checks whether a correlator matrices is symmetric on every timeslice.

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.matrix_symmetric": {"fullname": "pyerrors.correlators.Corr.matrix_symmetric", "modulename": "pyerrors.correlators", "qualname": "Corr.matrix_symmetric", "kind": "function", "doc": "

    Symmetrizes the correlator matrices on every timeslice.

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.GEVP": {"fullname": "pyerrors.correlators.Corr.GEVP", "modulename": "pyerrors.correlators", "qualname": "Corr.GEVP", "kind": "function", "doc": "

    Solve the generalized eigenvalue problem on the correlator matrix and returns the corresponding eigenvectors.

    \n\n

    The eigenvectors are sorted according to the descending eigenvalues, the zeroth eigenvector(s) correspond to the\nlargest eigenvalue(s). The eigenvector(s) for the individual states can be accessed via slicing

    \n\n
    \n
    C.GEVP(t0=2)[0]  # Ground state vector(s)\nC.GEVP(t0=2)[:3]  # Vectors for the lowest three states\n
    \n
    \n\n
    Parameters
    \n\n
      \n
    • t0 (int):\nThe time t0 for the right hand side of the GEVP according to $G(t)v_i=\\lambda_i G(t_0)v_i$
    • \n
    • ts (int):\nfixed time $G(t_s)v_i=\\lambda_i G(t_0)v_i$ if sort=None.\nIf sort=\"Eigenvector\" it gives a reference point for the sorting method.
    • \n
    • sort (string):\nIf this argument is set, a list of self.T vectors per state is returned. If it is set to None, only one vector is returned.\n
        \n
      • \"Eigenvalue\": The eigenvector is chosen according to which eigenvalue it belongs individually on every timeslice.
      • \n
      • \"Eigenvector\": Use the method described in arXiv:2004.10472 to find the set of v(t) belonging to the state.\nThe reference state is identified by its eigenvalue at $t=t_s$.
      • \n
    • \n
    \n\n
    Other Parameters
    \n\n
      \n
    • state (int):\nReturns only the vector(s) for a specified state. The lowest state is zero.
    • \n
    \n", "signature": "(self, t0, ts=None, sort='Eigenvalue', **kwargs):", "funcdef": "def"}, "pyerrors.correlators.Corr.Eigenvalue": {"fullname": "pyerrors.correlators.Corr.Eigenvalue", "modulename": "pyerrors.correlators", "qualname": "Corr.Eigenvalue", "kind": "function", "doc": "

    Determines the eigenvalue of the GEVP by solving and projecting the correlator

    \n\n
    Parameters
    \n\n
      \n
    • state (int):\nThe state one is interested in ordered by energy. The lowest state is zero.
    • \n
    • All other parameters are identical to the ones of Corr.GEVP.
    • \n
    \n", "signature": "(self, t0, ts=None, state=0, sort='Eigenvalue'):", "funcdef": "def"}, "pyerrors.correlators.Corr.Hankel": {"fullname": "pyerrors.correlators.Corr.Hankel", "modulename": "pyerrors.correlators", "qualname": "Corr.Hankel", "kind": "function", "doc": "

    Constructs an NxN Hankel matrix

    \n\n

    C(t) c(t+1) ... c(t+n-1)\nC(t+1) c(t+2) ... c(t+n)\n.................\nC(t+(n-1)) c(t+n) ... c(t+2(n-1))

    \n\n
    Parameters
    \n\n
      \n
    • N (int):\nDimension of the Hankel matrix
    • \n
    • periodic (bool, optional):\ndetermines whether the matrix is extended periodically
    • \n
    \n", "signature": "(self, N, periodic=False):", "funcdef": "def"}, "pyerrors.correlators.Corr.roll": {"fullname": "pyerrors.correlators.Corr.roll", "modulename": "pyerrors.correlators", "qualname": "Corr.roll", "kind": "function", "doc": "

    Periodically shift the correlator by dt timeslices

    \n\n
    Parameters
    \n\n
      \n
    • dt (int):\nnumber of timeslices
    • \n
    \n", "signature": "(self, dt):", "funcdef": "def"}, "pyerrors.correlators.Corr.reverse": {"fullname": "pyerrors.correlators.Corr.reverse", "modulename": "pyerrors.correlators", "qualname": "Corr.reverse", "kind": "function", "doc": "

    Reverse the time ordering of the Corr

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.thin": {"fullname": "pyerrors.correlators.Corr.thin", "modulename": "pyerrors.correlators", "qualname": "Corr.thin", "kind": "function", "doc": "

    Thin out a correlator to suppress correlations

    \n\n
    Parameters
    \n\n
      \n
    • spacing (int):\nKeep only every 'spacing'th entry of the correlator
    • \n
    • offset (int):\nOffset the equal spacing
    • \n
    \n", "signature": "(self, spacing=2, offset=0):", "funcdef": "def"}, "pyerrors.correlators.Corr.correlate": {"fullname": "pyerrors.correlators.Corr.correlate", "modulename": "pyerrors.correlators", "qualname": "Corr.correlate", "kind": "function", "doc": "

    Correlate the correlator with another correlator or Obs

    \n\n
    Parameters
    \n\n
      \n
    • partner (Obs or Corr):\npartner to correlate the correlator with.\nCan either be an Obs which is correlated with all entries of the\ncorrelator or a Corr of same length.
    • \n
    \n", "signature": "(self, partner):", "funcdef": "def"}, "pyerrors.correlators.Corr.reweight": {"fullname": "pyerrors.correlators.Corr.reweight", "modulename": "pyerrors.correlators", "qualname": "Corr.reweight", "kind": "function", "doc": "

    Reweight the correlator.

    \n\n
    Parameters
    \n\n
      \n
    • weight (Obs):\nReweighting factor. An Observable that has to be defined on a superset of the\nconfigurations in obs[i].idl for all i.
    • \n
    • all_configs (bool):\nif True, the reweighted observables are normalized by the average of\nthe reweighting factor on all configurations in weight.idl and not\non the configurations in obs[i].idl.
    • \n
    \n", "signature": "(self, weight, **kwargs):", "funcdef": "def"}, "pyerrors.correlators.Corr.T_symmetry": {"fullname": "pyerrors.correlators.Corr.T_symmetry", "modulename": "pyerrors.correlators", "qualname": "Corr.T_symmetry", "kind": "function", "doc": "

    Return the time symmetry average of the correlator and its partner

    \n\n
    Parameters
    \n\n
      \n
    • partner (Corr):\nTime symmetry partner of the Corr
    • \n
    • partity (int):\nParity quantum number of the correlator, can be +1 or -1
    • \n
    \n", "signature": "(self, partner, parity=1):", "funcdef": "def"}, "pyerrors.correlators.Corr.deriv": {"fullname": "pyerrors.correlators.Corr.deriv", "modulename": "pyerrors.correlators", "qualname": "Corr.deriv", "kind": "function", "doc": "

    Return the first derivative of the correlator with respect to x0.

    \n\n
    Parameters
    \n\n
      \n
    • variant (str):\ndecides which definition of the finite differences derivative is used.\nAvailable choice: symmetric, forward, backward, improved, log, default: symmetric
    • \n
    \n", "signature": "(self, variant='symmetric'):", "funcdef": "def"}, "pyerrors.correlators.Corr.second_deriv": {"fullname": "pyerrors.correlators.Corr.second_deriv", "modulename": "pyerrors.correlators", "qualname": "Corr.second_deriv", "kind": "function", "doc": "

    Return the second derivative of the correlator with respect to x0.

    \n\n
    Parameters
    \n\n
      \n
    • variant (str):\ndecides which definition of the finite differences derivative is used.\nAvailable choice:\n - symmetric (default)\n $$\\tilde{\\partial}^2_0 f(x_0) = f(x_0+1)-2f(x_0)+f(x_0-1)$$\n - big_symmetric\n $$\\partial^2_0 f(x_0) = \\frac{f(x_0+2)-2f(x_0)+f(x_0-2)}{4}$$\n - improved\n $$\\partial^2_0 f(x_0) = \\frac{-f(x_0+2) + 16 * f(x_0+1) - 30 * f(x_0) + 16 * f(x_0-1) - f(x_0-2)}{12}$$\n - log\n $$f(x) = \\tilde{\\partial}^2_0 log(f(x_0))+(\\tilde{\\partial}_0 log(f(x_0)))^2$$
    • \n
    \n", "signature": "(self, variant='symmetric'):", "funcdef": "def"}, "pyerrors.correlators.Corr.m_eff": {"fullname": "pyerrors.correlators.Corr.m_eff", "modulename": "pyerrors.correlators", "qualname": "Corr.m_eff", "kind": "function", "doc": "

    Returns the effective mass of the correlator as correlator object

    \n\n
    Parameters
    \n\n
      \n
    • variant (str):\nlog : uses the standard effective mass log(C(t) / C(t+1))\ncosh, periodic : Use periodicitiy of the correlator by solving C(t) / C(t+1) = cosh(m * (t - T/2)) / cosh(m * (t + 1 - T/2)) for m.\nsinh : Use anti-periodicitiy of the correlator by solving C(t) / C(t+1) = sinh(m * (t - T/2)) / sinh(m * (t + 1 - T/2)) for m.\nSee, e.g., arXiv:1205.5380\narccosh : Uses the explicit form of the symmetrized correlator (not recommended)\nlogsym: uses the symmetric effective mass log(C(t-1) / C(t+1))/2
    • \n
    • guess (float):\nguess for the root finder, only relevant for the root variant
    • \n
    \n", "signature": "(self, variant='log', guess=1.0):", "funcdef": "def"}, "pyerrors.correlators.Corr.fit": {"fullname": "pyerrors.correlators.Corr.fit", "modulename": "pyerrors.correlators", "qualname": "Corr.fit", "kind": "function", "doc": "

    Fits function to the data

    \n\n
    Parameters
    \n\n
      \n
    • function (obj):\nfunction to fit to the data. See fits.least_squares for details.
    • \n
    • fitrange (list):\nTwo element list containing the timeslices on which the fit is supposed to start and stop.\nCaution: This range is inclusive as opposed to standard python indexing.\nfitrange=[4, 6] corresponds to the three entries 4, 5 and 6.\nIf not specified, self.prange or all timeslices are used.
    • \n
    • silent (bool):\nDecides whether output is printed to the standard output.
    • \n
    \n", "signature": "(self, function, fitrange=None, silent=False, **kwargs):", "funcdef": "def"}, "pyerrors.correlators.Corr.plateau": {"fullname": "pyerrors.correlators.Corr.plateau", "modulename": "pyerrors.correlators", "qualname": "Corr.plateau", "kind": "function", "doc": "

    Extract a plateau value from a Corr object

    \n\n
    Parameters
    \n\n
      \n
    • plateau_range (list):\nlist with two entries, indicating the first and the last timeslice\nof the plateau region.
    • \n
    • method (str):\nmethod to extract the plateau.\n 'fit' fits a constant to the plateau region\n 'avg', 'average' or 'mean' just average over the given timeslices.
    • \n
    • auto_gamma (bool):\napply gamma_method with default parameters to the Corr. Defaults to None
    • \n
    \n", "signature": "(self, plateau_range=None, method='fit', auto_gamma=False):", "funcdef": "def"}, "pyerrors.correlators.Corr.set_prange": {"fullname": "pyerrors.correlators.Corr.set_prange", "modulename": "pyerrors.correlators", "qualname": "Corr.set_prange", "kind": "function", "doc": "

    Sets the attribute prange of the Corr object.

    \n", "signature": "(self, prange):", "funcdef": "def"}, "pyerrors.correlators.Corr.show": {"fullname": "pyerrors.correlators.Corr.show", "modulename": "pyerrors.correlators", "qualname": "Corr.show", "kind": "function", "doc": "

    Plots the correlator using the tag of the correlator as label if available.

    \n\n
    Parameters
    \n\n
      \n
    • x_range (list):\nlist of two values, determining the range of the x-axis e.g. [4, 8].
    • \n
    • comp (Corr or list of Corr):\nCorrelator or list of correlators which are plotted for comparison.\nThe tags of these correlators are used as labels if available.
    • \n
    • logscale (bool):\nSets y-axis to logscale.
    • \n
    • plateau (Obs):\nPlateau value to be visualized in the figure.
    • \n
    • fit_res (Fit_result):\nFit_result object to be visualized.
    • \n
    • fit_key (str):\nKey for the fit function in Fit_result.fit_function (for combined fits).
    • \n
    • ylabel (str):\nLabel for the y-axis.
    • \n
    • save (str):\npath to file in which the figure should be saved.
    • \n
    • auto_gamma (bool):\nApply the gamma method with standard parameters to all correlators and plateau values before plotting.
    • \n
    • hide_sigma (float):\nHides data points from the first value on which is consistent with zero within 'hide_sigma' standard errors.
    • \n
    • references (list):\nList of floating point values that are displayed as horizontal lines for reference.
    • \n
    • title (string):\nOptional title of the figure.
    • \n
    \n", "signature": "(\tself,\tx_range=None,\tcomp=None,\ty_range=None,\tlogscale=False,\tplateau=None,\tfit_res=None,\tfit_key=None,\tylabel=None,\tsave=None,\tauto_gamma=False,\thide_sigma=None,\treferences=None,\ttitle=None):", "funcdef": "def"}, "pyerrors.correlators.Corr.spaghetti_plot": {"fullname": "pyerrors.correlators.Corr.spaghetti_plot", "modulename": "pyerrors.correlators", "qualname": "Corr.spaghetti_plot", "kind": "function", "doc": "

    Produces a spaghetti plot of the correlator suited to monitor exceptional configurations.

    \n\n
    Parameters
    \n\n
      \n
    • logscale (bool):\nDetermines whether the scale of the y-axis is logarithmic or standard.
    • \n
    \n", "signature": "(self, logscale=True):", "funcdef": "def"}, "pyerrors.correlators.Corr.dump": {"fullname": "pyerrors.correlators.Corr.dump", "modulename": "pyerrors.correlators", "qualname": "Corr.dump", "kind": "function", "doc": "

    Dumps the Corr into a file of chosen type

    \n\n
    Parameters
    \n\n
      \n
    • filename (str):\nName of the file to be saved.
    • \n
    • datatype (str):\nFormat of the exported file. Supported formats include\n\"json.gz\" and \"pickle\"
    • \n
    • path (str):\nspecifies a custom path for the file (default '.')
    • \n
    \n", "signature": "(self, filename, datatype='json.gz', **kwargs):", "funcdef": "def"}, "pyerrors.correlators.Corr.print": {"fullname": "pyerrors.correlators.Corr.print", "modulename": "pyerrors.correlators", "qualname": "Corr.print", "kind": "function", "doc": "

    \n", "signature": "(self, print_range=None):", "funcdef": "def"}, "pyerrors.correlators.Corr.sqrt": {"fullname": "pyerrors.correlators.Corr.sqrt", "modulename": "pyerrors.correlators", "qualname": "Corr.sqrt", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.log": {"fullname": "pyerrors.correlators.Corr.log", "modulename": "pyerrors.correlators", "qualname": "Corr.log", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.exp": {"fullname": "pyerrors.correlators.Corr.exp", "modulename": "pyerrors.correlators", "qualname": "Corr.exp", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.sin": {"fullname": "pyerrors.correlators.Corr.sin", "modulename": "pyerrors.correlators", "qualname": "Corr.sin", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.cos": {"fullname": "pyerrors.correlators.Corr.cos", "modulename": "pyerrors.correlators", "qualname": "Corr.cos", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.tan": {"fullname": "pyerrors.correlators.Corr.tan", "modulename": "pyerrors.correlators", "qualname": "Corr.tan", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.sinh": {"fullname": "pyerrors.correlators.Corr.sinh", "modulename": "pyerrors.correlators", "qualname": "Corr.sinh", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.cosh": {"fullname": "pyerrors.correlators.Corr.cosh", "modulename": "pyerrors.correlators", "qualname": "Corr.cosh", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.tanh": {"fullname": "pyerrors.correlators.Corr.tanh", "modulename": "pyerrors.correlators", "qualname": "Corr.tanh", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.arcsin": {"fullname": "pyerrors.correlators.Corr.arcsin", "modulename": "pyerrors.correlators", "qualname": "Corr.arcsin", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.arccos": {"fullname": "pyerrors.correlators.Corr.arccos", "modulename": "pyerrors.correlators", "qualname": "Corr.arccos", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.arctan": {"fullname": "pyerrors.correlators.Corr.arctan", "modulename": "pyerrors.correlators", "qualname": "Corr.arctan", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.arcsinh": {"fullname": "pyerrors.correlators.Corr.arcsinh", "modulename": "pyerrors.correlators", "qualname": "Corr.arcsinh", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.arccosh": {"fullname": "pyerrors.correlators.Corr.arccosh", "modulename": "pyerrors.correlators", "qualname": "Corr.arccosh", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.arctanh": {"fullname": "pyerrors.correlators.Corr.arctanh", "modulename": "pyerrors.correlators", "qualname": "Corr.arctanh", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.correlators.Corr.real": {"fullname": "pyerrors.correlators.Corr.real", "modulename": "pyerrors.correlators", "qualname": "Corr.real", "kind": "variable", "doc": "

    \n"}, "pyerrors.correlators.Corr.imag": {"fullname": "pyerrors.correlators.Corr.imag", "modulename": "pyerrors.correlators", "qualname": "Corr.imag", "kind": "variable", "doc": "

    \n"}, "pyerrors.correlators.Corr.prune": {"fullname": "pyerrors.correlators.Corr.prune", "modulename": "pyerrors.correlators", "qualname": "Corr.prune", "kind": "function", "doc": "

    Project large correlation matrix to lowest states

    \n\n

    This method can be used to reduce the size of an (N x N) correlation matrix\nto (Ntrunc x Ntrunc) by solving a GEVP at very early times where the noise\nis still small.

    \n\n
    Parameters
    \n\n
      \n
    • Ntrunc (int):\nRank of the target matrix.
    • \n
    • tproj (int):\nTime where the eigenvectors are evaluated, corresponds to ts in the GEVP method.\nThe default value is 3.
    • \n
    • t0proj (int):\nTime where the correlation matrix is inverted. Choosing t0proj=1 is strongly\ndiscouraged for O(a) improved theories, since the correctness of the procedure\ncannot be granted in this case. The default value is 2.
    • \n
    • basematrix (Corr):\nCorrelation matrix that is used to determine the eigenvectors of the\nlowest states based on a GEVP. basematrix is taken to be the Corr itself if\nis is not specified.
    • \n
    \n\n
    Notes
    \n\n

    We have the basematrix $C(t)$ and the target matrix $G(t)$. We start by solving\nthe GEVP $$C(t) v_n(t, t_0) = \\lambda_n(t, t_0) C(t_0) v_n(t, t_0)$$ where $t \\equiv t_\\mathrm{proj}$\nand $t_0 \\equiv t_{0, \\mathrm{proj}}$. The target matrix is projected onto the subspace of the\nresulting eigenvectors $v_n, n=1,\\dots,N_\\mathrm{trunc}$ via\n$$G^\\prime_{i, j}(t) = (v_i, G(t) v_j)$$. This allows to reduce the size of a large\ncorrelation matrix and to remove some noise that is added by irrelevant operators.\nThis may allow to use the GEVP on $G(t)$ at late times such that the theoretically motivated\nbound $t_0 \\leq t/2$ holds, since the condition number of $G(t)$ is decreased, compared to $C(t)$.

    \n", "signature": "(self, Ntrunc, tproj=3, t0proj=2, basematrix=None):", "funcdef": "def"}, "pyerrors.correlators.Corr.N": {"fullname": "pyerrors.correlators.Corr.N", "modulename": "pyerrors.correlators", "qualname": "Corr.N", "kind": "variable", "doc": "

    \n"}, "pyerrors.covobs": {"fullname": "pyerrors.covobs", "modulename": "pyerrors.covobs", "kind": "module", "doc": "

    \n"}, "pyerrors.covobs.Covobs": {"fullname": "pyerrors.covobs.Covobs", "modulename": "pyerrors.covobs", "qualname": "Covobs", "kind": "class", "doc": "

    \n"}, "pyerrors.covobs.Covobs.__init__": {"fullname": "pyerrors.covobs.Covobs.__init__", "modulename": "pyerrors.covobs", "qualname": "Covobs.__init__", "kind": "function", "doc": "

    Initialize Covobs object.

    \n\n
    Parameters
    \n\n
      \n
    • mean (float):\nMean value of the new Obs
    • \n
    • cov (list or array):\n2d Covariance matrix or 1d diagonal entries
    • \n
    • name (str):\nidentifier for the covariance matrix
    • \n
    • pos (int):\nPosition of the variance belonging to mean in cov.\nIs taken to be 1 if cov is 0-dimensional
    • \n
    • grad (list or array):\nGradient of the Covobs wrt. the means belonging to cov.
    • \n
    \n", "signature": "(mean, cov, name, pos=None, grad=None)"}, "pyerrors.covobs.Covobs.name": {"fullname": "pyerrors.covobs.Covobs.name", "modulename": "pyerrors.covobs", "qualname": "Covobs.name", "kind": "variable", "doc": "

    \n"}, "pyerrors.covobs.Covobs.value": {"fullname": "pyerrors.covobs.Covobs.value", "modulename": "pyerrors.covobs", "qualname": "Covobs.value", "kind": "variable", "doc": "

    \n"}, "pyerrors.covobs.Covobs.errsq": {"fullname": "pyerrors.covobs.Covobs.errsq", "modulename": "pyerrors.covobs", "qualname": "Covobs.errsq", "kind": "function", "doc": "

    Return the variance (= square of the error) of the Covobs

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.covobs.Covobs.cov": {"fullname": "pyerrors.covobs.Covobs.cov", "modulename": "pyerrors.covobs", "qualname": "Covobs.cov", "kind": "variable", "doc": "

    \n"}, "pyerrors.covobs.Covobs.grad": {"fullname": "pyerrors.covobs.Covobs.grad", "modulename": "pyerrors.covobs", "qualname": "Covobs.grad", "kind": "variable", "doc": "

    \n"}, "pyerrors.dirac": {"fullname": "pyerrors.dirac", "modulename": "pyerrors.dirac", "kind": "module", "doc": "

    \n"}, "pyerrors.dirac.gammaX": {"fullname": "pyerrors.dirac.gammaX", "modulename": "pyerrors.dirac", "qualname": "gammaX", "kind": "variable", "doc": "

    \n", "default_value": "array([[ 0.+0.j, 0.+0.j, 0.+0.j, 0.+1.j],\n [ 0.+0.j, 0.+0.j, 0.+1.j, 0.+0.j],\n [ 0.+0.j, -0.-1.j, 0.+0.j, 0.+0.j],\n [-0.-1.j, 0.+0.j, 0.+0.j, 0.+0.j]])"}, "pyerrors.dirac.gammaY": {"fullname": "pyerrors.dirac.gammaY", "modulename": "pyerrors.dirac", "qualname": "gammaY", "kind": "variable", "doc": "

    \n", "default_value": "array([[ 0.+0.j, 0.+0.j, 0.+0.j, -1.+0.j],\n [ 0.+0.j, 0.+0.j, 1.+0.j, 0.+0.j],\n [ 0.+0.j, 1.+0.j, 0.+0.j, 0.+0.j],\n [-1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j]])"}, "pyerrors.dirac.gammaZ": {"fullname": "pyerrors.dirac.gammaZ", "modulename": "pyerrors.dirac", "qualname": "gammaZ", "kind": "variable", "doc": "

    \n", "default_value": "array([[ 0.+0.j, 0.+0.j, 0.+1.j, 0.+0.j],\n [ 0.+0.j, 0.+0.j, 0.+0.j, -0.-1.j],\n [-0.-1.j, 0.+0.j, 0.+0.j, 0.+0.j],\n [ 0.+0.j, 0.+1.j, 0.+0.j, 0.+0.j]])"}, "pyerrors.dirac.gammaT": {"fullname": "pyerrors.dirac.gammaT", "modulename": "pyerrors.dirac", "qualname": "gammaT", "kind": "variable", "doc": "

    \n", "default_value": "array([[0.+0.j, 0.+0.j, 1.+0.j, 0.+0.j],\n [0.+0.j, 0.+0.j, 0.+0.j, 1.+0.j],\n [1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j],\n [0.+0.j, 1.+0.j, 0.+0.j, 0.+0.j]])"}, "pyerrors.dirac.gamma": {"fullname": "pyerrors.dirac.gamma", "modulename": "pyerrors.dirac", "qualname": "gamma", "kind": "variable", "doc": "

    \n", "default_value": "array([[[ 0.+0.j, 0.+0.j, 0.+0.j, 0.+1.j],\n [ 0.+0.j, 0.+0.j, 0.+1.j, 0.+0.j],\n [ 0.+0.j, -0.-1.j, 0.+0.j, 0.+0.j],\n [-0.-1.j, 0.+0.j, 0.+0.j, 0.+0.j]],\n\n [[ 0.+0.j, 0.+0.j, 0.+0.j, -1.+0.j],\n [ 0.+0.j, 0.+0.j, 1.+0.j, 0.+0.j],\n [ 0.+0.j, 1.+0.j, 0.+0.j, 0.+0.j],\n [-1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j]],\n\n [[ 0.+0.j, 0.+0.j, 0.+1.j, 0.+0.j],\n [ 0.+0.j, 0.+0.j, 0.+0.j, -0.-1.j],\n [-0.-1.j, 0.+0.j, 0.+0.j, 0.+0.j],\n [ 0.+0.j, 0.+1.j, 0.+0.j, 0.+0.j]],\n\n [[ 0.+0.j, 0.+0.j, 1.+0.j, 0.+0.j],\n [ 0.+0.j, 0.+0.j, 0.+0.j, 1.+0.j],\n [ 1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j],\n [ 0.+0.j, 1.+0.j, 0.+0.j, 0.+0.j]]])"}, "pyerrors.dirac.gamma5": {"fullname": "pyerrors.dirac.gamma5", "modulename": "pyerrors.dirac", "qualname": "gamma5", "kind": "variable", "doc": "

    \n", "default_value": "array([[ 1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j],\n [ 0.+0.j, 1.+0.j, 0.+0.j, 0.+0.j],\n [ 0.+0.j, 0.+0.j, -1.+0.j, 0.+0.j],\n [ 0.+0.j, 0.+0.j, 0.+0.j, -1.+0.j]])"}, "pyerrors.dirac.identity": {"fullname": "pyerrors.dirac.identity", "modulename": "pyerrors.dirac", "qualname": "identity", "kind": "variable", "doc": "

    \n", "default_value": "array([[1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j],\n [0.+0.j, 1.+0.j, 0.+0.j, 0.+0.j],\n [0.+0.j, 0.+0.j, 1.+0.j, 0.+0.j],\n [0.+0.j, 0.+0.j, 0.+0.j, 1.+0.j]])"}, "pyerrors.dirac.epsilon_tensor": {"fullname": "pyerrors.dirac.epsilon_tensor", "modulename": "pyerrors.dirac", "qualname": "epsilon_tensor", "kind": "function", "doc": "

    Rank-3 epsilon tensor

    \n\n

    Based on https://codegolf.stackexchange.com/a/160375

    \n\n
    Returns
    \n\n
      \n
    • elem (int):\nElement (i,j,k) of the epsilon tensor of rank 3
    • \n
    \n", "signature": "(i, j, k):", "funcdef": "def"}, "pyerrors.dirac.epsilon_tensor_rank4": {"fullname": "pyerrors.dirac.epsilon_tensor_rank4", "modulename": "pyerrors.dirac", "qualname": "epsilon_tensor_rank4", "kind": "function", "doc": "

    Rank-4 epsilon tensor

    \n\n

    Extension of https://codegolf.stackexchange.com/a/160375

    \n\n
    Returns
    \n\n
      \n
    • elem (int):\nElement (i,j,k,o) of the epsilon tensor of rank 4
    • \n
    \n", "signature": "(i, j, k, o):", "funcdef": "def"}, "pyerrors.dirac.Grid_gamma": {"fullname": "pyerrors.dirac.Grid_gamma", "modulename": "pyerrors.dirac", "qualname": "Grid_gamma", "kind": "function", "doc": "

    Returns gamma matrix in Grid labeling.

    \n", "signature": "(gamma_tag):", "funcdef": "def"}, "pyerrors.fits": {"fullname": "pyerrors.fits", "modulename": "pyerrors.fits", "kind": "module", "doc": "

    \n"}, "pyerrors.fits.Fit_result": {"fullname": "pyerrors.fits.Fit_result", "modulename": "pyerrors.fits", "qualname": "Fit_result", "kind": "class", "doc": "

    Represents fit results.

    \n\n
    Attributes
    \n\n
      \n
    • fit_parameters (list):\nresults for the individual fit parameters,\nalso accessible via indices.
    • \n
    • chisquare_by_dof (float):\nreduced chisquare.
    • \n
    • p_value (float):\np-value of the fit
    • \n
    • t2_p_value (float):\nHotelling t-squared p-value for correlated fits.
    • \n
    \n", "bases": "collections.abc.Sequence"}, "pyerrors.fits.Fit_result.fit_parameters": {"fullname": "pyerrors.fits.Fit_result.fit_parameters", "modulename": "pyerrors.fits", "qualname": "Fit_result.fit_parameters", "kind": "variable", "doc": "

    \n"}, "pyerrors.fits.Fit_result.gamma_method": {"fullname": "pyerrors.fits.Fit_result.gamma_method", "modulename": "pyerrors.fits", "qualname": "Fit_result.gamma_method", "kind": "function", "doc": "

    Apply the gamma method to all fit parameters

    \n", "signature": "(self, **kwargs):", "funcdef": "def"}, "pyerrors.fits.Fit_result.gm": {"fullname": "pyerrors.fits.Fit_result.gm", "modulename": "pyerrors.fits", "qualname": "Fit_result.gm", "kind": "function", "doc": "

    Apply the gamma method to all fit parameters

    \n", "signature": "(self, **kwargs):", "funcdef": "def"}, "pyerrors.fits.least_squares": {"fullname": "pyerrors.fits.least_squares", "modulename": "pyerrors.fits", "qualname": "least_squares", "kind": "function", "doc": "

    Performs a non-linear fit to y = func(x).\n ```

    \n\n
    Parameters
    \n\n
      \n
    • For an uncombined fit:
    • \n
    • x (list):\nlist of floats.
    • \n
    • y (list):\nlist of Obs.
    • \n
    • func (object):\nfit function, has to be of the form

      \n\n
      \n
      import autograd.numpy as anp\n\ndef func(a, x):\n   return a[0] + a[1] * x + a[2] * anp.sinh(x)\n
      \n
      \n\n

      For multiple x values func can be of the form

      \n\n
      \n
      def func(a, x):\n   (x1, x2) = x\n   return a[0] * x1 ** 2 + a[1] * x2\n
      \n
      \n\n

      It is important that all numpy functions refer to autograd.numpy, otherwise the differentiation\nwill not work.

    • \n
    • OR For a combined fit:
    • \n
    • x (dict):\ndict of lists.
    • \n
    • y (dict):\ndict of lists of Obs.
    • \n
    • funcs (dict):\ndict of objects\nfit functions have to be of the form (here a[0] is the common fit parameter)\n```python\nimport autograd.numpy as anp\nfuncs = {\"a\": func_a,\n \"b\": func_b}

      \n\n

      def func_a(a, x):\n return a[1] * anp.exp(-a[0] * x)

      \n\n

      def func_b(a, x):\n return a[2] * anp.exp(-a[0] * x)

      \n\n

      It is important that all numpy functions refer to autograd.numpy, otherwise the differentiation\nwill not work.

    • \n
    • priors (dict or list, optional):\npriors can either be a dictionary with integer keys and the corresponding priors as values or\na list with an entry for every parameter in the fit. The entries can either be\nObs (e.g. results from a previous fit) or strings containing a value and an error formatted like\n0.548(23), 500(40) or 0.5(0.4)
    • \n
    • silent (bool, optional):\nIf true all output to the console is omitted (default False).
    • \n
    • initial_guess (list):\ncan provide an initial guess for the input parameters. Relevant for\nnon-linear fits with many parameters. In case of correlated fits the guess is used to perform\nan uncorrelated fit which then serves as guess for the correlated fit.
    • \n
    • method (str, optional):\ncan be used to choose an alternative method for the minimization of chisquare.\nThe possible methods are the ones which can be used for scipy.optimize.minimize and\nmigrad of iminuit. If no method is specified, Levenberg-Marquard is used.\nReliable alternatives are migrad, Powell and Nelder-Mead.
    • \n
    • tol (float, optional):\ncan be used (only for combined fits and methods other than Levenberg-Marquard) to set the tolerance for convergence\nto a different value to either speed up convergence at the cost of a larger error on the fitted parameters (and possibly\ninvalid estimates for parameter uncertainties) or smaller values to get more accurate parameter values\nThe stopping criterion depends on the method, e.g. migrad: edm_max = 0.002 * tol * errordef (EDM criterion: edm < edm_max)
    • \n
    • correlated_fit (bool):\nIf True, use the full inverse covariance matrix in the definition of the chisquare cost function.\nFor details about how the covariance matrix is estimated see pyerrors.obs.covariance.\nIn practice the correlation matrix is Cholesky decomposed and inverted (instead of the covariance matrix).\nThis procedure should be numerically more stable as the correlation matrix is typically better conditioned (Jacobi preconditioning).
    • \n
    • expected_chisquare (bool):\nIf True estimates the expected chisquare which is\ncorrected by effects caused by correlated input data (default False).
    • \n
    • resplot (bool):\nIf True, a plot which displays fit, data and residuals is generated (default False).
    • \n
    • qqplot (bool):\nIf True, a quantile-quantile plot of the fit result is generated (default False).
    • \n
    • num_grad (bool):\nUse numerical differentation instead of automatic differentiation to perform the error propagation (default False).
    • \n
    \n\n
    Returns
    \n\n
      \n
    • output (Fit_result):\nParameters and information on the fitted result.
    • \n
    \n", "signature": "(x, y, func, priors=None, silent=False, **kwargs):", "funcdef": "def"}, "pyerrors.fits.total_least_squares": {"fullname": "pyerrors.fits.total_least_squares", "modulename": "pyerrors.fits", "qualname": "total_least_squares", "kind": "function", "doc": "

    Performs a non-linear fit to y = func(x) and returns a list of Obs corresponding to the fit parameters.

    \n\n
    Parameters
    \n\n
      \n
    • x (list):\nlist of Obs, or a tuple of lists of Obs
    • \n
    • y (list):\nlist of Obs. The dvalues of the Obs are used as x- and yerror for the fit.
    • \n
    • func (object):\nfunc has to be of the form

      \n\n
      \n
      import autograd.numpy as anp\n\ndef func(a, x):\n   return a[0] + a[1] * x + a[2] * anp.sinh(x)\n
      \n
      \n\n

      For multiple x values func can be of the form

      \n\n
      \n
      def func(a, x):\n   (x1, x2) = x\n   return a[0] * x1 ** 2 + a[1] * x2\n
      \n
      \n\n

      It is important that all numpy functions refer to autograd.numpy, otherwise the differentiation\nwill not work.

    • \n
    • silent (bool, optional):\nIf true all output to the console is omitted (default False).
    • \n
    • initial_guess (list):\ncan provide an initial guess for the input parameters. Relevant for non-linear\nfits with many parameters.
    • \n
    • expected_chisquare (bool):\nIf true prints the expected chisquare which is\ncorrected by effects caused by correlated input data.\nThis can take a while as the full correlation matrix\nhas to be calculated (default False).
    • \n
    • num_grad (bool):\nUse numerical differentation instead of automatic differentiation to perform the error propagation (default False).
    • \n
    \n\n
    Notes
    \n\n

    Based on the orthogonal distance regression module of scipy.

    \n\n
    Returns
    \n\n
      \n
    • output (Fit_result):\nParameters and information on the fitted result.
    • \n
    \n", "signature": "(x, y, func, silent=False, **kwargs):", "funcdef": "def"}, "pyerrors.fits.fit_lin": {"fullname": "pyerrors.fits.fit_lin", "modulename": "pyerrors.fits", "qualname": "fit_lin", "kind": "function", "doc": "

    Performs a linear fit to y = n + m * x and returns two Obs n, m.

    \n\n
    Parameters
    \n\n
      \n
    • x (list):\nCan either be a list of floats in which case no xerror is assumed, or\na list of Obs, where the dvalues of the Obs are used as xerror for the fit.
    • \n
    • y (list):\nList of Obs, the dvalues of the Obs are used as yerror for the fit.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • fit_parameters (list[Obs]):\nLIist of fitted observables.
    • \n
    \n", "signature": "(x, y, **kwargs):", "funcdef": "def"}, "pyerrors.fits.qqplot": {"fullname": "pyerrors.fits.qqplot", "modulename": "pyerrors.fits", "qualname": "qqplot", "kind": "function", "doc": "

    Generates a quantile-quantile plot of the fit result which can be used to\n check if the residuals of the fit are gaussian distributed.

    \n\n
    Returns
    \n\n
      \n
    • None
    • \n
    \n", "signature": "(x, o_y, func, p, title=''):", "funcdef": "def"}, "pyerrors.fits.residual_plot": {"fullname": "pyerrors.fits.residual_plot", "modulename": "pyerrors.fits", "qualname": "residual_plot", "kind": "function", "doc": "

    Generates a plot which compares the fit to the data and displays the corresponding residuals

    \n\n

    For uncorrelated data the residuals are expected to be distributed ~N(0,1).

    \n\n
    Returns
    \n\n
      \n
    • None
    • \n
    \n", "signature": "(x, y, func, fit_res, title=''):", "funcdef": "def"}, "pyerrors.fits.error_band": {"fullname": "pyerrors.fits.error_band", "modulename": "pyerrors.fits", "qualname": "error_band", "kind": "function", "doc": "

    Calculate the error band for an array of sample values x, for given fit function func with optimized parameters beta.

    \n\n
    Returns
    \n\n
      \n
    • err (np.array(Obs)):\nError band for an array of sample values x
    • \n
    \n", "signature": "(x, func, beta):", "funcdef": "def"}, "pyerrors.fits.ks_test": {"fullname": "pyerrors.fits.ks_test", "modulename": "pyerrors.fits", "qualname": "ks_test", "kind": "function", "doc": "

    Performs a Kolmogorov\u2013Smirnov test for the p-values of all fit object.

    \n\n
    Parameters
    \n\n
      \n
    • objects (list):\nList of fit results to include in the analysis (optional).
    • \n
    \n\n
    Returns
    \n\n
      \n
    • None
    • \n
    \n", "signature": "(objects=None):", "funcdef": "def"}, "pyerrors.input": {"fullname": "pyerrors.input", "modulename": "pyerrors.input", "kind": "module", "doc": "

    pyerrors includes an input submodule in which input routines and parsers for the output of various numerical programs are contained.

    \n\n

    Jackknife samples

    \n\n

    For comparison with other analysis workflows pyerrors can also generate jackknife samples from an Obs object or import jackknife samples into an Obs object.\nSee pyerrors.obs.Obs.export_jackknife and pyerrors.obs.import_jackknife for details.

    \n"}, "pyerrors.input.bdio": {"fullname": "pyerrors.input.bdio", "modulename": "pyerrors.input.bdio", "kind": "module", "doc": "

    \n"}, "pyerrors.input.bdio.read_ADerrors": {"fullname": "pyerrors.input.bdio.read_ADerrors", "modulename": "pyerrors.input.bdio", "qualname": "read_ADerrors", "kind": "function", "doc": "

    Extract generic MCMC data from a bdio file

    \n\n

    read_ADerrors requires bdio to be compiled into a shared library. This can be achieved by\nadding the flag -fPIC to CC and changing the all target to

    \n\n

    all: bdio.o $(LIBDIR)\n gcc -shared -Wl,-soname,libbdio.so -o $(BUILDDIR)/libbdio.so $(BUILDDIR)/bdio.o\n cp $(BUILDDIR)/libbdio.so $(LIBDIR)/

    \n\n
    Parameters
    \n\n
      \n
    • file_path -- path to the bdio file
    • \n
    • bdio_path -- path to the shared bdio library libbdio.so (default ./libbdio.so)
    • \n
    \n\n
    Returns
    \n\n
      \n
    • data (List[Obs]):\nExtracted data
    • \n
    \n", "signature": "(file_path, bdio_path='./libbdio.so', **kwargs):", "funcdef": "def"}, "pyerrors.input.bdio.write_ADerrors": {"fullname": "pyerrors.input.bdio.write_ADerrors", "modulename": "pyerrors.input.bdio", "qualname": "write_ADerrors", "kind": "function", "doc": "

    Write Obs to a bdio file according to ADerrors conventions

    \n\n

    read_mesons requires bdio to be compiled into a shared library. This can be achieved by\nadding the flag -fPIC to CC and changing the all target to

    \n\n

    all: bdio.o $(LIBDIR)\n gcc -shared -Wl,-soname,libbdio.so -o $(BUILDDIR)/libbdio.so $(BUILDDIR)/bdio.o\n cp $(BUILDDIR)/libbdio.so $(LIBDIR)/

    \n\n
    Parameters
    \n\n
      \n
    • file_path -- path to the bdio file
    • \n
    • bdio_path -- path to the shared bdio library libbdio.so (default ./libbdio.so)
    • \n
    \n\n
    Returns
    \n\n
      \n
    • success (int):\nreturns 0 is successful
    • \n
    \n", "signature": "(obs_list, file_path, bdio_path='./libbdio.so', **kwargs):", "funcdef": "def"}, "pyerrors.input.bdio.read_mesons": {"fullname": "pyerrors.input.bdio.read_mesons", "modulename": "pyerrors.input.bdio", "qualname": "read_mesons", "kind": "function", "doc": "

    Extract mesons data from a bdio file and return it as a dictionary

    \n\n

    The dictionary can be accessed with a tuple consisting of (type, source_position, kappa1, kappa2)

    \n\n

    read_mesons requires bdio to be compiled into a shared library. This can be achieved by\nadding the flag -fPIC to CC and changing the all target to

    \n\n

    all: bdio.o $(LIBDIR)\n gcc -shared -Wl,-soname,libbdio.so -o $(BUILDDIR)/libbdio.so $(BUILDDIR)/bdio.o\n cp $(BUILDDIR)/libbdio.so $(LIBDIR)/

    \n\n
    Parameters
    \n\n
      \n
    • file_path (str):\npath to the bdio file
    • \n
    • bdio_path (str):\npath to the shared bdio library libbdio.so (default ./libbdio.so)
    • \n
    • start (int):\nThe first configuration to be read (default 1)
    • \n
    • stop (int):\nThe last configuration to be read (default None)
    • \n
    • step (int):\nFixed step size between two measurements (default 1)
    • \n
    • alternative_ensemble_name (str):\nManually overwrite ensemble name
    • \n
    \n\n
    Returns
    \n\n
      \n
    • data (dict):\nExtracted meson data
    • \n
    \n", "signature": "(file_path, bdio_path='./libbdio.so', **kwargs):", "funcdef": "def"}, "pyerrors.input.bdio.read_dSdm": {"fullname": "pyerrors.input.bdio.read_dSdm", "modulename": "pyerrors.input.bdio", "qualname": "read_dSdm", "kind": "function", "doc": "

    Extract dSdm data from a bdio file and return it as a dictionary

    \n\n

    The dictionary can be accessed with a tuple consisting of (type, kappa)

    \n\n

    read_dSdm requires bdio to be compiled into a shared library. This can be achieved by\nadding the flag -fPIC to CC and changing the all target to

    \n\n

    all: bdio.o $(LIBDIR)\n gcc -shared -Wl,-soname,libbdio.so -o $(BUILDDIR)/libbdio.so $(BUILDDIR)/bdio.o\n cp $(BUILDDIR)/libbdio.so $(LIBDIR)/

    \n\n
    Parameters
    \n\n
      \n
    • file_path (str):\npath to the bdio file
    • \n
    • bdio_path (str):\npath to the shared bdio library libbdio.so (default ./libbdio.so)
    • \n
    • start (int):\nThe first configuration to be read (default 1)
    • \n
    • stop (int):\nThe last configuration to be read (default None)
    • \n
    • step (int):\nFixed step size between two measurements (default 1)
    • \n
    • alternative_ensemble_name (str):\nManually overwrite ensemble name
    • \n
    \n", "signature": "(file_path, bdio_path='./libbdio.so', **kwargs):", "funcdef": "def"}, "pyerrors.input.dobs": {"fullname": "pyerrors.input.dobs", "modulename": "pyerrors.input.dobs", "kind": "module", "doc": "

    \n"}, "pyerrors.input.dobs.create_pobs_string": {"fullname": "pyerrors.input.dobs.create_pobs_string", "modulename": "pyerrors.input.dobs", "qualname": "create_pobs_string", "kind": "function", "doc": "

    Export a list of Obs or structures containing Obs to an xml string\naccording to the Zeuthen pobs format.

    \n\n

    Tags are not written or recovered automatically. The separator | is removed from the replica names.

    \n\n
    Parameters
    \n\n
      \n
    • obsl (list):\nList of Obs that will be exported.\nThe Obs inside a structure have to be defined on the same ensemble.
    • \n
    • name (str):\nThe name of the observable.
    • \n
    • spec (str):\nOptional string that describes the contents of the file.
    • \n
    • origin (str):\nSpecify where the data has its origin.
    • \n
    • symbol (list):\nA list of symbols that describe the observables to be written. May be empty.
    • \n
    • enstag (str):\nEnstag that is written to pobs. If None, the ensemble name is used.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • xml_str (str):\nXML formatted string of the input data
    • \n
    \n", "signature": "(obsl, name, spec='', origin='', symbol=[], enstag=None):", "funcdef": "def"}, "pyerrors.input.dobs.write_pobs": {"fullname": "pyerrors.input.dobs.write_pobs", "modulename": "pyerrors.input.dobs", "qualname": "write_pobs", "kind": "function", "doc": "

    Export a list of Obs or structures containing Obs to a .xml.gz file\naccording to the Zeuthen pobs format.

    \n\n

    Tags are not written or recovered automatically. The separator | is removed from the replica names.

    \n\n
    Parameters
    \n\n
      \n
    • obsl (list):\nList of Obs that will be exported.\nThe Obs inside a structure have to be defined on the same ensemble.
    • \n
    • fname (str):\nFilename of the output file.
    • \n
    • name (str):\nThe name of the observable.
    • \n
    • spec (str):\nOptional string that describes the contents of the file.
    • \n
    • origin (str):\nSpecify where the data has its origin.
    • \n
    • symbol (list):\nA list of symbols that describe the observables to be written. May be empty.
    • \n
    • enstag (str):\nEnstag that is written to pobs. If None, the ensemble name is used.
    • \n
    • gz (bool):\nIf True, the output is a gzipped xml. If False, the output is an xml file.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • None
    • \n
    \n", "signature": "(\tobsl,\tfname,\tname,\tspec='',\torigin='',\tsymbol=[],\tenstag=None,\tgz=True):", "funcdef": "def"}, "pyerrors.input.dobs.read_pobs": {"fullname": "pyerrors.input.dobs.read_pobs", "modulename": "pyerrors.input.dobs", "qualname": "read_pobs", "kind": "function", "doc": "

    Import a list of Obs from an xml.gz file in the Zeuthen pobs format.

    \n\n

    Tags are not written or recovered automatically.

    \n\n
    Parameters
    \n\n
      \n
    • fname (str):\nFilename of the input file.
    • \n
    • full_output (bool):\nIf True, a dict containing auxiliary information and the data is returned.\nIf False, only the data is returned as list.
    • \n
    • separatior_insertion (str or int):\nstr: replace all occurences of \"separator_insertion\" within the replica names\nby \"|%s\" % (separator_insertion) when constructing the names of the replica.\nint: Insert the separator \"|\" at the position given by separator_insertion.\nNone (default): Replica names remain unchanged.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • res (list[Obs]):\nImported data
    • \n
    • or
    • \n
    • res (dict):\nImported data and meta-data
    • \n
    \n", "signature": "(fname, full_output=False, gz=True, separator_insertion=None):", "funcdef": "def"}, "pyerrors.input.dobs.import_dobs_string": {"fullname": "pyerrors.input.dobs.import_dobs_string", "modulename": "pyerrors.input.dobs", "qualname": "import_dobs_string", "kind": "function", "doc": "

    Import a list of Obs from a string in the Zeuthen dobs format.

    \n\n

    Tags are not written or recovered automatically.

    \n\n
    Parameters
    \n\n
      \n
    • content (str):\nXML string containing the data
    • \n
    • full_output (bool):\nIf True, a dict containing auxiliary information and the data is returned.\nIf False, only the data is returned as list.
    • \n
    • separatior_insertion (str, int or bool):\nstr: replace all occurences of \"separator_insertion\" within the replica names\nby \"|%s\" % (separator_insertion) when constructing the names of the replica.\nint: Insert the separator \"|\" at the position given by separator_insertion.\nTrue (default): separator \"|\" is inserted after len(ensname), assuming that the\nensemble name is a prefix to the replica name.\nNone or False: No separator is inserted.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • res (list[Obs]):\nImported data
    • \n
    • or
    • \n
    • res (dict):\nImported data and meta-data
    • \n
    \n", "signature": "(content, full_output=False, separator_insertion=True):", "funcdef": "def"}, "pyerrors.input.dobs.read_dobs": {"fullname": "pyerrors.input.dobs.read_dobs", "modulename": "pyerrors.input.dobs", "qualname": "read_dobs", "kind": "function", "doc": "

    Import a list of Obs from an xml.gz file in the Zeuthen dobs format.

    \n\n

    Tags are not written or recovered automatically.

    \n\n
    Parameters
    \n\n
      \n
    • fname (str):\nFilename of the input file.
    • \n
    • full_output (bool):\nIf True, a dict containing auxiliary information and the data is returned.\nIf False, only the data is returned as list.
    • \n
    • gz (bool):\nIf True, assumes that data is gzipped. If False, assumes XML file.
    • \n
    • separatior_insertion (str, int or bool):\nstr: replace all occurences of \"separator_insertion\" within the replica names\nby \"|%s\" % (separator_insertion) when constructing the names of the replica.\nint: Insert the separator \"|\" at the position given by separator_insertion.\nTrue (default): separator \"|\" is inserted after len(ensname), assuming that the\nensemble name is a prefix to the replica name.\nNone or False: No separator is inserted.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • res (list[Obs]):\nImported data
    • \n
    • or
    • \n
    • res (dict):\nImported data and meta-data
    • \n
    \n", "signature": "(fname, full_output=False, gz=True, separator_insertion=True):", "funcdef": "def"}, "pyerrors.input.dobs.create_dobs_string": {"fullname": "pyerrors.input.dobs.create_dobs_string", "modulename": "pyerrors.input.dobs", "qualname": "create_dobs_string", "kind": "function", "doc": "

    Generate the string for the export of a list of Obs or structures containing Obs\nto a .xml.gz file according to the Zeuthen dobs format.

    \n\n

    Tags are not written or recovered automatically. The separator |is removed from the replica names.

    \n\n
    Parameters
    \n\n
      \n
    • obsl (list):\nList of Obs that will be exported.\nThe Obs inside a structure do not have to be defined on the same set of configurations,\nbut the storage requirement is increased, if this is not the case.
    • \n
    • name (str):\nThe name of the observable.
    • \n
    • spec (str):\nOptional string that describes the contents of the file.
    • \n
    • origin (str):\nSpecify where the data has its origin.
    • \n
    • symbol (list):\nA list of symbols that describe the observables to be written. May be empty.
    • \n
    • who (str):\nProvide the name of the person that exports the data.
    • \n
    • enstags (dict):\nProvide alternative enstag for ensembles in the form enstags = {ename: enstag}\nOtherwise, the ensemble name is used.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • xml_str (str):\nXML string generated from the data
    • \n
    \n", "signature": "(\tobsl,\tname,\tspec='dobs v1.0',\torigin='',\tsymbol=[],\twho=None,\tenstags=None):", "funcdef": "def"}, "pyerrors.input.dobs.write_dobs": {"fullname": "pyerrors.input.dobs.write_dobs", "modulename": "pyerrors.input.dobs", "qualname": "write_dobs", "kind": "function", "doc": "

    Export a list of Obs or structures containing Obs to a .xml.gz file\naccording to the Zeuthen dobs format.

    \n\n

    Tags are not written or recovered automatically. The separator | is removed from the replica names.

    \n\n
    Parameters
    \n\n
      \n
    • obsl (list):\nList of Obs that will be exported.\nThe Obs inside a structure do not have to be defined on the same set of configurations,\nbut the storage requirement is increased, if this is not the case.
    • \n
    • fname (str):\nFilename of the output file.
    • \n
    • name (str):\nThe name of the observable.
    • \n
    • spec (str):\nOptional string that describes the contents of the file.
    • \n
    • origin (str):\nSpecify where the data has its origin.
    • \n
    • symbol (list):\nA list of symbols that describe the observables to be written. May be empty.
    • \n
    • who (str):\nProvide the name of the person that exports the data.
    • \n
    • enstags (dict):\nProvide alternative enstag for ensembles in the form enstags = {ename: enstag}\nOtherwise, the ensemble name is used.
    • \n
    • gz (bool):\nIf True, the output is a gzipped XML. If False, the output is a XML file.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • None
    • \n
    \n", "signature": "(\tobsl,\tfname,\tname,\tspec='dobs v1.0',\torigin='',\tsymbol=[],\twho=None,\tenstags=None,\tgz=True):", "funcdef": "def"}, "pyerrors.input.hadrons": {"fullname": "pyerrors.input.hadrons", "modulename": "pyerrors.input.hadrons", "kind": "module", "doc": "

    \n"}, "pyerrors.input.hadrons.read_meson_hd5": {"fullname": "pyerrors.input.hadrons.read_meson_hd5", "modulename": "pyerrors.input.hadrons", "qualname": "read_meson_hd5", "kind": "function", "doc": "

    Read hadrons meson hdf5 file and extract the meson labeled 'meson'

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\npath to the files to read
    • \n
    • filestem (str):\nnamestem of the files to read
    • \n
    • ens_id (str):\nname of the ensemble, required for internal bookkeeping
    • \n
    • meson (str):\nlabel of the meson to be extracted, standard value meson_0 which\ncorresponds to the pseudoscalar pseudoscalar two-point function.
    • \n
    • gammas (tuple of strings):\nInstrad of a meson label one can also provide a tuple of two strings\nindicating the gamma matrices at source and sink.\n(\"Gamma5\", \"Gamma5\") corresponds to the pseudoscalar pseudoscalar\ntwo-point function. The gammas argument dominateds over meson.
    • \n
    • idl (range):\nIf specified only configurations in the given range are read in.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • corr (Corr):\nCorrelator of the source sink combination in question.
    • \n
    \n", "signature": "(path, filestem, ens_id, meson='meson_0', idl=None, gammas=None):", "funcdef": "def"}, "pyerrors.input.hadrons.extract_t0_hd5": {"fullname": "pyerrors.input.hadrons.extract_t0_hd5", "modulename": "pyerrors.input.hadrons", "qualname": "extract_t0_hd5", "kind": "function", "doc": "

    Read hadrons FlowObservables hdf5 file and extract t0

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\npath to the files to read
    • \n
    • filestem (str):\nnamestem of the files to read
    • \n
    • ens_id (str):\nname of the ensemble, required for internal bookkeeping
    • \n
    • obs (str):\nlabel of the observable from which t0 should be extracted.\nOptions: 'Clover energy density' and 'Plaquette energy density'
    • \n
    • fit_range (int):\nNumber of data points left and right of the zero\ncrossing to be included in the linear fit. (Default: 5)
    • \n
    • idl (range):\nIf specified only configurations in the given range are read in.
    • \n
    • plot_fit (bool):\nIf true, the fit for the extraction of t0 is shown together with the data.
    • \n
    \n", "signature": "(\tpath,\tfilestem,\tens_id,\tobs='Clover energy density',\tfit_range=5,\tidl=None,\t**kwargs):", "funcdef": "def"}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"fullname": "pyerrors.input.hadrons.read_DistillationContraction_hd5", "modulename": "pyerrors.input.hadrons", "qualname": "read_DistillationContraction_hd5", "kind": "function", "doc": "

    Read hadrons DistillationContraction hdf5 files in given directory structure

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\npath to the directories to read
    • \n
    • ens_id (str):\nname of the ensemble, required for internal bookkeeping
    • \n
    • diagrams (list):\nList of strings of the diagrams to extract, e.g. [\"direct\", \"box\", \"cross\"].
    • \n
    • idl (range):\nIf specified only configurations in the given range are read in.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • result (dict):\nextracted DistillationContration data
    • \n
    \n", "signature": "(path, ens_id, diagrams=['direct'], idl=None):", "funcdef": "def"}, "pyerrors.input.hadrons.Npr_matrix": {"fullname": "pyerrors.input.hadrons.Npr_matrix", "modulename": "pyerrors.input.hadrons", "qualname": "Npr_matrix", "kind": "class", "doc": "

    ndarray(shape, dtype=float, buffer=None, offset=0,\n strides=None, order=None)

    \n\n

    An array object represents a multidimensional, homogeneous array\nof fixed-size items. An associated data-type object describes the\nformat of each element in the array (its byte-order, how many bytes it\noccupies in memory, whether it is an integer, a floating point number,\nor something else, etc.)

    \n\n

    Arrays should be constructed using array, zeros or empty (refer\nto the See Also section below). The parameters given here refer to\na low-level method (ndarray(...)) for instantiating an array.

    \n\n

    For more information, refer to the numpy module and examine the\nmethods and attributes of an array.

    \n\n
    Parameters
    \n\n
      \n
    • (for the __new__ method; see Notes below)
    • \n
    • shape (tuple of ints):\nShape of created array.
    • \n
    • dtype (data-type, optional):\nAny object that can be interpreted as a numpy data type.
    • \n
    • buffer (object exposing buffer interface, optional):\nUsed to fill the array with data.
    • \n
    • offset (int, optional):\nOffset of array data in buffer.
    • \n
    • strides (tuple of ints, optional):\nStrides of data in memory.
    • \n
    • order ({'C', 'F'}, optional):\nRow-major (C-style) or column-major (Fortran-style) order.
    • \n
    \n\n
    Attributes
    \n\n
      \n
    • T (ndarray):\nTranspose of the array.
    • \n
    • data (buffer):\nThe array's elements, in memory.
    • \n
    • dtype (dtype object):\nDescribes the format of the elements in the array.
    • \n
    • flags (dict):\nDictionary containing information related to memory use, e.g.,\n'C_CONTIGUOUS', 'OWNDATA', 'WRITEABLE', etc.
    • \n
    • flat (numpy.flatiter object):\nFlattened version of the array as an iterator. The iterator\nallows assignments, e.g., x.flat = 3 (See ndarray.flat for\nassignment examples; TODO).
    • \n
    • imag (ndarray):\nImaginary part of the array.
    • \n
    • real (ndarray):\nReal part of the array.
    • \n
    • size (int):\nNumber of elements in the array.
    • \n
    • itemsize (int):\nThe memory use of each array element in bytes.
    • \n
    • nbytes (int):\nThe total number of bytes required to store the array data,\ni.e., itemsize * size.
    • \n
    • ndim (int):\nThe array's number of dimensions.
    • \n
    • shape (tuple of ints):\nShape of the array.
    • \n
    • strides (tuple of ints):\nThe step-size required to move from one element to the next in\nmemory. For example, a contiguous (3, 4) array of type\nint16 in C-order has strides (8, 2). This implies that\nto move from element to element in memory requires jumps of 2 bytes.\nTo move from row-to-row, one needs to jump 8 bytes at a time\n(2 * 4).
    • \n
    • ctypes (ctypes object):\nClass containing properties of the array needed for interaction\nwith ctypes.
    • \n
    • base (ndarray):\nIf the array is a view into another array, that array is its base\n(unless that array is also a view). The base array is where the\narray data is actually stored.
    • \n
    \n\n
    See Also
    \n\n

    array: Construct an array.
    \nzeros: Create an array, each element of which is zero.
    \nempty: Create an array, but leave its allocated memory unchanged (i.e.,\nit contains \"garbage\").
    \ndtype: Create a data-type.
    \nnumpy.typing.NDArray: An ndarray alias :term:generic <generic type>\nw.r.t. its dtype.type <numpy.dtype.type>.

    \n\n
    Notes
    \n\n

    There are two modes of creating an array using __new__:

    \n\n
      \n
    1. If buffer is None, then only shape, dtype, and order\nare used.
    2. \n
    3. If buffer is an object exposing the buffer interface, then\nall keywords are interpreted.
    4. \n
    \n\n

    No __init__ method is needed because the array is fully initialized\nafter the __new__ method.

    \n\n
    Examples
    \n\n

    These examples illustrate the low-level ndarray constructor. Refer\nto the See Also section above for easier ways of constructing an\nndarray.

    \n\n

    First mode, buffer is None:

    \n\n
    \n
    >>> np.ndarray(shape=(2,2), dtype=float, order='F')\narray([[0.0e+000, 0.0e+000], # random\n       [     nan, 2.5e-323]])\n
    \n
    \n\n

    Second mode:

    \n\n
    \n
    >>> np.ndarray((2,), buffer=np.array([1,2,3]),\n...            offset=np.int_().itemsize,\n...            dtype=int) # offset = 1*itemsize, i.e. skip first element\narray([2, 3])\n
    \n
    \n", "bases": "numpy.ndarray"}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"fullname": "pyerrors.input.hadrons.Npr_matrix.g5H", "modulename": "pyerrors.input.hadrons", "qualname": "Npr_matrix.g5H", "kind": "variable", "doc": "

    Gamma_5 hermitean conjugate

    \n\n

    Uses the fact that the propagator is gamma5 hermitean, so just the\nin and out momenta of the propagator are exchanged.

    \n"}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"fullname": "pyerrors.input.hadrons.read_ExternalLeg_hd5", "modulename": "pyerrors.input.hadrons", "qualname": "read_ExternalLeg_hd5", "kind": "function", "doc": "

    Read hadrons ExternalLeg hdf5 file and output an array of CObs

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\npath to the files to read
    • \n
    • filestem (str):\nnamestem of the files to read
    • \n
    • ens_id (str):\nname of the ensemble, required for internal bookkeeping
    • \n
    • idl (range):\nIf specified only configurations in the given range are read in.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • result (Npr_matrix):\nread Cobs-matrix
    • \n
    \n", "signature": "(path, filestem, ens_id, idl=None):", "funcdef": "def"}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"fullname": "pyerrors.input.hadrons.read_Bilinear_hd5", "modulename": "pyerrors.input.hadrons", "qualname": "read_Bilinear_hd5", "kind": "function", "doc": "

    Read hadrons Bilinear hdf5 file and output an array of CObs

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\npath to the files to read
    • \n
    • filestem (str):\nnamestem of the files to read
    • \n
    • ens_id (str):\nname of the ensemble, required for internal bookkeeping
    • \n
    • idl (range):\nIf specified only configurations in the given range are read in.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • result_dict (dict[Npr_matrix]):\nextracted Bilinears
    • \n
    \n", "signature": "(path, filestem, ens_id, idl=None):", "funcdef": "def"}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"fullname": "pyerrors.input.hadrons.read_Fourquark_hd5", "modulename": "pyerrors.input.hadrons", "qualname": "read_Fourquark_hd5", "kind": "function", "doc": "

    Read hadrons FourquarkFullyConnected hdf5 file and output an array of CObs

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\npath to the files to read
    • \n
    • filestem (str):\nnamestem of the files to read
    • \n
    • ens_id (str):\nname of the ensemble, required for internal bookkeeping
    • \n
    • idl (range):\nIf specified only configurations in the given range are read in.
    • \n
    • vertices (list):\nVertex functions to be extracted.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • result_dict (dict):\nextracted fourquark matrizes
    • \n
    \n", "signature": "(path, filestem, ens_id, idl=None, vertices=['VA', 'AV']):", "funcdef": "def"}, "pyerrors.input.json": {"fullname": "pyerrors.input.json", "modulename": "pyerrors.input.json", "kind": "module", "doc": "

    \n"}, "pyerrors.input.json.create_json_string": {"fullname": "pyerrors.input.json.create_json_string", "modulename": "pyerrors.input.json", "qualname": "create_json_string", "kind": "function", "doc": "

    Generate the string for the export of a list of Obs or structures containing Obs\nto a .json(.gz) file

    \n\n
    Parameters
    \n\n
      \n
    • ol (list):\nList of objects that will be exported. At the moment, these objects can be\neither of: Obs, list, numpy.ndarray, Corr.\nAll Obs inside a structure have to be defined on the same set of configurations.
    • \n
    • description (str):\nOptional string that describes the contents of the json file.
    • \n
    • indent (int):\nSpecify the indentation level of the json file. None or 0 is permissible and\nsaves disk space.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • json_string (str):\nString for export to .json(.gz) file
    • \n
    \n", "signature": "(ol, description='', indent=1):", "funcdef": "def"}, "pyerrors.input.json.dump_to_json": {"fullname": "pyerrors.input.json.dump_to_json", "modulename": "pyerrors.input.json", "qualname": "dump_to_json", "kind": "function", "doc": "

    Export a list of Obs or structures containing Obs to a .json(.gz) file.\nDict keys that are not JSON-serializable such as floats are converted to strings.

    \n\n
    Parameters
    \n\n
      \n
    • ol (list):\nList of objects that will be exported. At the moment, these objects can be\neither of: Obs, list, numpy.ndarray, Corr.\nAll Obs inside a structure have to be defined on the same set of configurations.
    • \n
    • fname (str):\nFilename of the output file.
    • \n
    • description (str):\nOptional string that describes the contents of the json file.
    • \n
    • indent (int):\nSpecify the indentation level of the json file. None or 0 is permissible and\nsaves disk space.
    • \n
    • gz (bool):\nIf True, the output is a gzipped json. If False, the output is a json file.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • Null
    • \n
    \n", "signature": "(ol, fname, description='', indent=1, gz=True):", "funcdef": "def"}, "pyerrors.input.json.import_json_string": {"fullname": "pyerrors.input.json.import_json_string", "modulename": "pyerrors.input.json", "qualname": "import_json_string", "kind": "function", "doc": "

    Reconstruct a list of Obs or structures containing Obs from a json string.

    \n\n

    The following structures are supported: Obs, list, numpy.ndarray, Corr\nIf the list contains only one element, it is unpacked from the list.

    \n\n
    Parameters
    \n\n
      \n
    • json_string (str):\njson string containing the data.
    • \n
    • verbose (bool):\nPrint additional information that was written to the file.
    • \n
    • full_output (bool):\nIf True, a dict containing auxiliary information and the data is returned.\nIf False, only the data is returned.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • result (list[Obs]):\nreconstructed list of observables from the json string
    • \n
    • or
    • \n
    • result (Obs):\nonly one observable if the list only has one entry
    • \n
    • or
    • \n
    • result (dict):\nif full_output=True
    • \n
    \n", "signature": "(json_string, verbose=True, full_output=False):", "funcdef": "def"}, "pyerrors.input.json.load_json": {"fullname": "pyerrors.input.json.load_json", "modulename": "pyerrors.input.json", "qualname": "load_json", "kind": "function", "doc": "

    Import a list of Obs or structures containing Obs from a .json(.gz) file.

    \n\n

    The following structures are supported: Obs, list, numpy.ndarray, Corr\nIf the list contains only one element, it is unpacked from the list.

    \n\n
    Parameters
    \n\n
      \n
    • fname (str):\nFilename of the input file.
    • \n
    • verbose (bool):\nPrint additional information that was written to the file.
    • \n
    • gz (bool):\nIf True, assumes that data is gzipped. If False, assumes JSON file.
    • \n
    • full_output (bool):\nIf True, a dict containing auxiliary information and the data is returned.\nIf False, only the data is returned.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • result (list[Obs]):\nreconstructed list of observables from the json string
    • \n
    • or
    • \n
    • result (Obs):\nonly one observable if the list only has one entry
    • \n
    • or
    • \n
    • result (dict):\nif full_output=True
    • \n
    \n", "signature": "(fname, verbose=True, gz=True, full_output=False):", "funcdef": "def"}, "pyerrors.input.json.dump_dict_to_json": {"fullname": "pyerrors.input.json.dump_dict_to_json", "modulename": "pyerrors.input.json", "qualname": "dump_dict_to_json", "kind": "function", "doc": "

    Export a dict of Obs or structures containing Obs to a .json(.gz) file

    \n\n
    Parameters
    \n\n
      \n
    • od (dict):\nDict of JSON valid structures and objects that will be exported.\nAt the moment, these objects can be either of: Obs, list, numpy.ndarray, Corr.\nAll Obs inside a structure have to be defined on the same set of configurations.
    • \n
    • fname (str):\nFilename of the output file.
    • \n
    • description (str):\nOptional string that describes the contents of the json file.
    • \n
    • indent (int):\nSpecify the indentation level of the json file. None or 0 is permissible and\nsaves disk space.
    • \n
    • reps (str):\nSpecify the structure of the placeholder in exported dict to be reps[0-9]+.
    • \n
    • gz (bool):\nIf True, the output is a gzipped json. If False, the output is a json file.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • None
    • \n
    \n", "signature": "(od, fname, description='', indent=1, reps='DICTOBS', gz=True):", "funcdef": "def"}, "pyerrors.input.json.load_json_dict": {"fullname": "pyerrors.input.json.load_json_dict", "modulename": "pyerrors.input.json", "qualname": "load_json_dict", "kind": "function", "doc": "

    Import a dict of Obs or structures containing Obs from a .json(.gz) file.

    \n\n

    The following structures are supported: Obs, list, numpy.ndarray, Corr

    \n\n
    Parameters
    \n\n
      \n
    • fname (str):\nFilename of the input file.
    • \n
    • verbose (bool):\nPrint additional information that was written to the file.
    • \n
    • gz (bool):\nIf True, assumes that data is gzipped. If False, assumes JSON file.
    • \n
    • full_output (bool):\nIf True, a dict containing auxiliary information and the data is returned.\nIf False, only the data is returned.
    • \n
    • reps (str):\nSpecify the structure of the placeholder in imported dict to be reps[0-9]+.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • data (Obs / list / Corr):\nRead data
    • \n
    • or
    • \n
    • data (dict):\nRead data and meta-data
    • \n
    \n", "signature": "(fname, verbose=True, gz=True, full_output=False, reps='DICTOBS'):", "funcdef": "def"}, "pyerrors.input.misc": {"fullname": "pyerrors.input.misc", "modulename": "pyerrors.input.misc", "kind": "module", "doc": "

    \n"}, "pyerrors.input.misc.fit_t0": {"fullname": "pyerrors.input.misc.fit_t0", "modulename": "pyerrors.input.misc", "qualname": "fit_t0", "kind": "function", "doc": "

    Compute the root of (flow-based) data based on a dictionary that contains\nthe necessary information in key-value pairs a la (flow time: observable at flow time).

    \n\n

    It is assumed that the data is monotonically increasing and passes zero from below.\nNo exception is thrown if this is not the case (several roots, no monotonic increase).\nAn exception is thrown if no root can be found in the data.

    \n\n

    A linear fit in the vicinity of the root is performed to exctract the root from the\ntwo fit parameters.

    \n\n
    Parameters
    \n\n
      \n
    • t2E_dict (dict):\nDictionary with pairs of (flow time: observable at flow time) where the flow times\nare of type float and the observables of type Obs.
    • \n
    • fit_range (int):\nNumber of data points left and right of the zero\ncrossing to be included in the linear fit.
    • \n
    • plot_fit (bool):\nIf true, the fit for the extraction of t0 is shown together with the data. (Default: False)
    • \n
    • observable (str):\nKeyword to identify the observable to print the correct ylabel (if plot_fit is True)\nfor the observables 't0' and 'w0'. No y label is printed otherwise. (Default: 't0')
    • \n
    \n\n
    Returns
    \n\n
      \n
    • root (Obs):\nThe root of the data series.
    • \n
    \n", "signature": "(t2E_dict, fit_range, plot_fit=False, observable='t0'):", "funcdef": "def"}, "pyerrors.input.misc.read_pbp": {"fullname": "pyerrors.input.misc.read_pbp", "modulename": "pyerrors.input.misc", "qualname": "read_pbp", "kind": "function", "doc": "

    Read pbp format from given folder structure.

    \n\n
    Parameters
    \n\n
      \n
    • r_start (list):\nlist which contains the first config to be read for each replicum
    • \n
    • r_stop (list):\nlist which contains the last config to be read for each replicum
    • \n
    \n\n
    Returns
    \n\n
      \n
    • result (list[Obs]):\nlist of observables read
    • \n
    \n", "signature": "(path, prefix, **kwargs):", "funcdef": "def"}, "pyerrors.input.openQCD": {"fullname": "pyerrors.input.openQCD", "modulename": "pyerrors.input.openQCD", "kind": "module", "doc": "

    \n"}, "pyerrors.input.openQCD.read_rwms": {"fullname": "pyerrors.input.openQCD.read_rwms", "modulename": "pyerrors.input.openQCD", "qualname": "read_rwms", "kind": "function", "doc": "

    Read rwms format from given folder structure. Returns a list of length nrw

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\npath that contains the data files
    • \n
    • prefix (str):\nall files in path that start with prefix are considered as input files.\nMay be used together postfix to consider only special file endings.\nPrefix is ignored, if the keyword 'files' is used.
    • \n
    • version (str):\nversion of openQCD, default 2.0
    • \n
    • names (list):\nlist of names that is assigned to the data according according\nto the order in the file list. Use careful, if you do not provide file names!
    • \n
    • r_start (list):\nlist which contains the first config to be read for each replicum
    • \n
    • r_stop (list):\nlist which contains the last config to be read for each replicum
    • \n
    • r_step (int):\ninteger that defines a fixed step size between two measurements (in units of configs)\nIf not given, r_step=1 is assumed.
    • \n
    • postfix (str):\npostfix of the file to read, e.g. '.ms1' for openQCD-files
    • \n
    • files (list):\nlist which contains the filenames to be read. No automatic detection of\nfiles performed if given.
    • \n
    • print_err (bool):\nPrint additional information that is useful for debugging.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • rwms (Obs):\nReweighting factors read
    • \n
    \n", "signature": "(path, prefix, version='2.0', names=None, **kwargs):", "funcdef": "def"}, "pyerrors.input.openQCD.extract_t0": {"fullname": "pyerrors.input.openQCD.extract_t0", "modulename": "pyerrors.input.openQCD", "qualname": "extract_t0", "kind": "function", "doc": "

    Extract t0/a^2 from given .ms.dat files. Returns t0 as Obs.

    \n\n

    It is assumed that all boundary effects have\nsufficiently decayed at x0=xmin.\nThe data around the zero crossing of t^2 - c (where c=0.3 by default)\nis fitted with a linear function\nfrom which the exact root is extracted.

    \n\n

    It is assumed that one measurement is performed for each config.\nIf this is not the case, the resulting idl, as well as the handling\nof r_start, r_stop and r_step is wrong and the user has to correct\nthis in the resulting observable.

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\nPath to .ms.dat files
    • \n
    • prefix (str):\nEnsemble prefix
    • \n
    • dtr_read (int):\nDetermines how many trajectories should be skipped\nwhen reading the ms.dat files.\nCorresponds to dtr_cnfg / dtr_ms in the openQCD input file.
    • \n
    • xmin (int):\nFirst timeslice where the boundary\neffects have sufficiently decayed.
    • \n
    • spatial_extent (int):\nspatial extent of the lattice, required for normalization.
    • \n
    • fit_range (int):\nNumber of data points left and right of the zero\ncrossing to be included in the linear fit. (Default: 5)
    • \n
    • postfix (str):\nPostfix of measurement file (Default: ms)
    • \n
    • c (float):\nConstant that defines the flow scale. Default 0.3 for t_0, choose 2./3 for t_1.
    • \n
    • r_start (list):\nlist which contains the first config to be read for each replicum.
    • \n
    • r_stop (list):\nlist which contains the last config to be read for each replicum.
    • \n
    • r_step (int):\ninteger that defines a fixed step size between two measurements (in units of configs)\nIf not given, r_step=1 is assumed.
    • \n
    • plaquette (bool):\nIf true extract the plaquette estimate of t0 instead.
    • \n
    • names (list):\nlist of names that is assigned to the data according according\nto the order in the file list. Use careful, if you do not provide file names!
    • \n
    • files (list):\nlist which contains the filenames to be read. No automatic detection of\nfiles performed if given.
    • \n
    • plot_fit (bool):\nIf true, the fit for the extraction of t0 is shown together with the data.
    • \n
    • assume_thermalization (bool):\nIf True: If the first record divided by the distance between two measurements is larger than\n1, it is assumed that this is due to thermalization and the first measurement belongs\nto the first config (default).\nIf False: The config numbers are assumed to be traj_number // difference
    • \n
    \n\n
    Returns
    \n\n
      \n
    • t0 (Obs):\nExtracted t0
    • \n
    \n", "signature": "(\tpath,\tprefix,\tdtr_read,\txmin,\tspatial_extent,\tfit_range=5,\tpostfix='ms',\tc=0.3,\t**kwargs):", "funcdef": "def"}, "pyerrors.input.openQCD.extract_w0": {"fullname": "pyerrors.input.openQCD.extract_w0", "modulename": "pyerrors.input.openQCD", "qualname": "extract_w0", "kind": "function", "doc": "

    Extract w0/a from given .ms.dat files. Returns w0 as Obs.

    \n\n

    It is assumed that all boundary effects have\nsufficiently decayed at x0=xmin.\nThe data around the zero crossing of t d(t^2)/dt - (where c=0.3 by default)\nis fitted with a linear function\nfrom which the exact root is extracted.

    \n\n

    It is assumed that one measurement is performed for each config.\nIf this is not the case, the resulting idl, as well as the handling\nof r_start, r_stop and r_step is wrong and the user has to correct\nthis in the resulting observable.

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\nPath to .ms.dat files
    • \n
    • prefix (str):\nEnsemble prefix
    • \n
    • dtr_read (int):\nDetermines how many trajectories should be skipped\nwhen reading the ms.dat files.\nCorresponds to dtr_cnfg / dtr_ms in the openQCD input file.
    • \n
    • xmin (int):\nFirst timeslice where the boundary\neffects have sufficiently decayed.
    • \n
    • spatial_extent (int):\nspatial extent of the lattice, required for normalization.
    • \n
    • fit_range (int):\nNumber of data points left and right of the zero\ncrossing to be included in the linear fit. (Default: 5)
    • \n
    • postfix (str):\nPostfix of measurement file (Default: ms)
    • \n
    • c (float):\nConstant that defines the flow scale. Default 0.3 for w_0, choose 2./3 for w_1.
    • \n
    • r_start (list):\nlist which contains the first config to be read for each replicum.
    • \n
    • r_stop (list):\nlist which contains the last config to be read for each replicum.
    • \n
    • r_step (int):\ninteger that defines a fixed step size between two measurements (in units of configs)\nIf not given, r_step=1 is assumed.
    • \n
    • plaquette (bool):\nIf true extract the plaquette estimate of w0 instead.
    • \n
    • names (list):\nlist of names that is assigned to the data according according\nto the order in the file list. Use careful, if you do not provide file names!
    • \n
    • files (list):\nlist which contains the filenames to be read. No automatic detection of\nfiles performed if given.
    • \n
    • plot_fit (bool):\nIf true, the fit for the extraction of w0 is shown together with the data.
    • \n
    • assume_thermalization (bool):\nIf True: If the first record divided by the distance between two measurements is larger than\n1, it is assumed that this is due to thermalization and the first measurement belongs\nto the first config (default).\nIf False: The config numbers are assumed to be traj_number // difference
    • \n
    \n\n
    Returns
    \n\n
      \n
    • w0 (Obs):\nExtracted w0
    • \n
    \n", "signature": "(\tpath,\tprefix,\tdtr_read,\txmin,\tspatial_extent,\tfit_range=5,\tpostfix='ms',\tc=0.3,\t**kwargs):", "funcdef": "def"}, "pyerrors.input.openQCD.read_qtop": {"fullname": "pyerrors.input.openQCD.read_qtop", "modulename": "pyerrors.input.openQCD", "qualname": "read_qtop", "kind": "function", "doc": "

    Read the topologial charge based on openQCD gradient flow measurements.

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\npath of the measurement files
    • \n
    • prefix (str):\nprefix of the measurement files, e.g. _id0_r0.ms.dat.\nIgnored if file names are passed explicitly via keyword files.
    • \n
    • c (double):\nSmearing radius in units of the lattice extent, c = sqrt(8 t0) / L.
    • \n
    • dtr_cnfg (int):\n(optional) parameter that specifies the number of measurements\nbetween two configs.\nIf it is not set, the distance between two measurements\nin the file is assumed to be the distance between two configurations.
    • \n
    • steps (int):\n(optional) Distance between two configurations in units of trajectories /\n cycles. Assumed to be the distance between two measurements * dtr_cnfg if not given
    • \n
    • version (str):\nEither openQCD or sfqcd, depending on the data.
    • \n
    • L (int):\nspatial length of the lattice in L/a.\nHAS to be set if version != sfqcd, since openQCD does not provide\nthis in the header
    • \n
    • r_start (list):\nlist which contains the first config to be read for each replicum.
    • \n
    • r_stop (list):\nlist which contains the last config to be read for each replicum.
    • \n
    • files (list):\nspecify the exact files that need to be read\nfrom path, practical if e.g. only one replicum is needed
    • \n
    • postfix (str):\npostfix of the file to read, e.g. '.gfms.dat' for openQCD-files
    • \n
    • names (list):\nAlternative labeling for replicas/ensembles.\nHas to have the appropriate length.
    • \n
    • Zeuthen_flow (bool):\n(optional) If True, the Zeuthen flow is used for Qtop. Only possible\nfor version=='sfqcd' If False, the Wilson flow is used.
    • \n
    • integer_charge (bool):\nIf True, the charge is rounded towards the nearest integer on each config.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • result (Obs):\nRead topological charge
    • \n
    \n", "signature": "(path, prefix, c, dtr_cnfg=1, version='openQCD', **kwargs):", "funcdef": "def"}, "pyerrors.input.openQCD.read_gf_coupling": {"fullname": "pyerrors.input.openQCD.read_gf_coupling", "modulename": "pyerrors.input.openQCD", "qualname": "read_gf_coupling", "kind": "function", "doc": "

    Read the gradient flow coupling based on sfqcd gradient flow measurements. See 1607.06423 for details.

    \n\n

    Note: The current implementation only works for c=0.3 and T=L. The definition of the coupling in 1607.06423 requires projection to topological charge zero which is not done within this function but has to be performed in a separate step.

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\npath of the measurement files
    • \n
    • prefix (str):\nprefix of the measurement files, e.g. _id0_r0.ms.dat.\nIgnored if file names are passed explicitly via keyword files.
    • \n
    • c (double):\nSmearing radius in units of the lattice extent, c = sqrt(8 t0) / L.
    • \n
    • dtr_cnfg (int):\n(optional) parameter that specifies the number of measurements\nbetween two configs.\nIf it is not set, the distance between two measurements\nin the file is assumed to be the distance between two configurations.
    • \n
    • steps (int):\n(optional) Distance between two configurations in units of trajectories /\n cycles. Assumed to be the distance between two measurements * dtr_cnfg if not given
    • \n
    • r_start (list):\nlist which contains the first config to be read for each replicum.
    • \n
    • r_stop (list):\nlist which contains the last config to be read for each replicum.
    • \n
    • files (list):\nspecify the exact files that need to be read\nfrom path, practical if e.g. only one replicum is needed
    • \n
    • names (list):\nAlternative labeling for replicas/ensembles.\nHas to have the appropriate length.
    • \n
    • postfix (str):\npostfix of the file to read, e.g. '.gfms.dat' for openQCD-files
    • \n
    • Zeuthen_flow (bool):\n(optional) If True, the Zeuthen flow is used for the coupling. If False, the Wilson flow is used.
    • \n
    \n", "signature": "(path, prefix, c, dtr_cnfg=1, Zeuthen_flow=True, **kwargs):", "funcdef": "def"}, "pyerrors.input.openQCD.qtop_projection": {"fullname": "pyerrors.input.openQCD.qtop_projection", "modulename": "pyerrors.input.openQCD", "qualname": "qtop_projection", "kind": "function", "doc": "

    Returns the projection to the topological charge sector defined by target.

    \n\n
    Parameters
    \n\n
      \n
    • path (Obs):\nTopological charge.
    • \n
    • target (int):\nSpecifies the topological sector to be reweighted to (default 0)
    • \n
    \n\n
    Returns
    \n\n
      \n
    • reto (Obs):\nprojection to the topological charge sector defined by target
    • \n
    \n", "signature": "(qtop, target=0):", "funcdef": "def"}, "pyerrors.input.openQCD.read_qtop_sector": {"fullname": "pyerrors.input.openQCD.read_qtop_sector", "modulename": "pyerrors.input.openQCD", "qualname": "read_qtop_sector", "kind": "function", "doc": "

    Constructs reweighting factors to a specified topological sector.

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\npath of the measurement files
    • \n
    • prefix (str):\nprefix of the measurement files, e.g. _id0_r0.ms.dat
    • \n
    • c (double):\nSmearing radius in units of the lattice extent, c = sqrt(8 t0) / L
    • \n
    • target (int):\nSpecifies the topological sector to be reweighted to (default 0)
    • \n
    • dtr_cnfg (int):\n(optional) parameter that specifies the number of trajectories\nbetween two configs.\nif it is not set, the distance between two measurements\nin the file is assumed to be the distance between two configurations.
    • \n
    • steps (int):\n(optional) Distance between two configurations in units of trajectories /\n cycles. Assumed to be the distance between two measurements * dtr_cnfg if not given
    • \n
    • version (str):\nversion string of the openQCD (sfqcd) version used to create\nthe ensemble. Default is 2.0. May also be set to sfqcd.
    • \n
    • L (int):\nspatial length of the lattice in L/a.\nHAS to be set if version != sfqcd, since openQCD does not provide\nthis in the header
    • \n
    • r_start (list):\noffset of the first ensemble, making it easier to match\nlater on with other Obs
    • \n
    • r_stop (list):\nlast configurations that need to be read (per replicum)
    • \n
    • files (list):\nspecify the exact files that need to be read\nfrom path, practical if e.g. only one replicum is needed
    • \n
    • names (list):\nAlternative labeling for replicas/ensembles.\nHas to have the appropriate length
    • \n
    • Zeuthen_flow (bool):\n(optional) If True, the Zeuthen flow is used for Qtop. Only possible\nfor version=='sfqcd' If False, the Wilson flow is used.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • reto (Obs):\nprojection to the topological charge sector defined by target
    • \n
    \n", "signature": "(path, prefix, c, target=0, **kwargs):", "funcdef": "def"}, "pyerrors.input.openQCD.read_ms5_xsf": {"fullname": "pyerrors.input.openQCD.read_ms5_xsf", "modulename": "pyerrors.input.openQCD", "qualname": "read_ms5_xsf", "kind": "function", "doc": "

    Read data from files in the specified directory with the specified prefix and quark combination extension, and return a Corr object containing the data.

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\nThe directory to search for the files in.
    • \n
    • prefix (str):\nThe prefix to match the files against.
    • \n
    • qc (str):\nThe quark combination extension to match the files against.
    • \n
    • corr (str):\nThe correlator to extract data for.
    • \n
    • sep (str, optional):\nThe separator to use when parsing the replika names.
    • \n
    • **kwargs: Additional keyword arguments. The following keyword arguments are recognized:

      \n\n
        \n
      • names (List[str]): A list of names to use for the replicas.
      • \n
      • files (List[str]): A list of files to read data from.
      • \n
      • idl (List[List[int]]): A list of idls per replicum, resticting data to the idls given.
      • \n
    • \n
    \n\n
    Returns
    \n\n
      \n
    • Corr: A complex valued Corr object containing the data read from the files. In case of boudary to bulk correlators.
    • \n
    • or
    • \n
    • CObs: A complex valued CObs object containing the data read from the files. In case of boudary to boundary correlators.
    • \n
    \n\n
    Raises
    \n\n
      \n
    • FileNotFoundError: If no files matching the specified prefix and quark combination extension are found in the specified directory.
    • \n
    • IOError: If there is an error reading a file.
    • \n
    • struct.error: If there is an error unpacking binary data.
    • \n
    \n", "signature": "(path, prefix, qc, corr, sep='r', **kwargs):", "funcdef": "def"}, "pyerrors.input.pandas": {"fullname": "pyerrors.input.pandas", "modulename": "pyerrors.input.pandas", "kind": "module", "doc": "

    \n"}, "pyerrors.input.pandas.to_sql": {"fullname": "pyerrors.input.pandas.to_sql", "modulename": "pyerrors.input.pandas", "qualname": "to_sql", "kind": "function", "doc": "

    Write DataFrame including Obs or Corr valued columns to sqlite database.

    \n\n
    Parameters
    \n\n
      \n
    • df (pandas.DataFrame):\nDataframe to be written to the database.
    • \n
    • table_name (str):\nName of the table in the database.
    • \n
    • db (str):\nPath to the sqlite database.
    • \n
    • if exists (str):\nHow to behave if table already exists. Options 'fail', 'replace', 'append'.
    • \n
    • gz (bool):\nIf True the json strings are gzipped.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • None
    • \n
    \n", "signature": "(df, table_name, db, if_exists='fail', gz=True, **kwargs):", "funcdef": "def"}, "pyerrors.input.pandas.read_sql": {"fullname": "pyerrors.input.pandas.read_sql", "modulename": "pyerrors.input.pandas", "qualname": "read_sql", "kind": "function", "doc": "

    Execute SQL query on sqlite database and obtain DataFrame including Obs or Corr valued columns.

    \n\n
    Parameters
    \n\n
      \n
    • sql (str):\nSQL query to be executed.
    • \n
    • db (str):\nPath to the sqlite database.
    • \n
    • auto_gamma (bool):\nIf True applies the gamma_method to all imported Obs objects with the default parameters for\nthe error analysis. Default False.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • data (pandas.DataFrame):\nDataframe with the content of the sqlite database.
    • \n
    \n", "signature": "(sql, db, auto_gamma=False, **kwargs):", "funcdef": "def"}, "pyerrors.input.pandas.dump_df": {"fullname": "pyerrors.input.pandas.dump_df", "modulename": "pyerrors.input.pandas", "qualname": "dump_df", "kind": "function", "doc": "

    Exports a pandas DataFrame containing Obs valued columns to a (gzipped) csv file.

    \n\n

    Before making use of pandas to_csv functionality Obs objects are serialized via the standardized\njson format of pyerrors.

    \n\n
    Parameters
    \n\n
      \n
    • df (pandas.DataFrame):\nDataframe to be dumped to a file.
    • \n
    • fname (str):\nFilename of the output file.
    • \n
    • gz (bool):\nIf True, the output is a gzipped csv file. If False, the output is a csv file.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • None
    • \n
    \n", "signature": "(df, fname, gz=True):", "funcdef": "def"}, "pyerrors.input.pandas.load_df": {"fullname": "pyerrors.input.pandas.load_df", "modulename": "pyerrors.input.pandas", "qualname": "load_df", "kind": "function", "doc": "

    Imports a pandas DataFrame from a csv.(gz) file in which Obs objects are serialized as json strings.

    \n\n
    Parameters
    \n\n
      \n
    • fname (str):\nFilename of the input file.
    • \n
    • auto_gamma (bool):\nIf True applies the gamma_method to all imported Obs objects with the default parameters for\nthe error analysis. Default False.
    • \n
    • gz (bool):\nIf True, assumes that data is gzipped. If False, assumes JSON file.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • data (pandas.DataFrame):\nDataframe with the content of the sqlite database.
    • \n
    \n", "signature": "(fname, auto_gamma=False, gz=True):", "funcdef": "def"}, "pyerrors.input.sfcf": {"fullname": "pyerrors.input.sfcf", "modulename": "pyerrors.input.sfcf", "kind": "module", "doc": "

    \n"}, "pyerrors.input.sfcf.read_sfcf": {"fullname": "pyerrors.input.sfcf.read_sfcf", "modulename": "pyerrors.input.sfcf", "qualname": "read_sfcf", "kind": "function", "doc": "

    Read sfcf files from given folder structure.

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\nPath to the sfcf files.
    • \n
    • prefix (str):\nPrefix of the sfcf files.
    • \n
    • name (str):\nName of the correlation function to read.
    • \n
    • quarks (str):\nLabel of the quarks used in the sfcf input file. e.g. \"quark quark\"\nfor version 0.0 this does NOT need to be given with the typical \" - \"\nthat is present in the output file,\nthis is done automatically for this version
    • \n
    • corr_type (str):\nType of correlation function to read. Can be\n
        \n
      • 'bi' for boundary-inner
      • \n
      • 'bb' for boundary-boundary
      • \n
      • 'bib' for boundary-inner-boundary
      • \n
    • \n
    • noffset (int):\nOffset of the source (only relevant when wavefunctions are used)
    • \n
    • wf (int):\nID of wave function
    • \n
    • wf2 (int):\nID of the second wavefunction\n(only relevant for boundary-to-boundary correlation functions)
    • \n
    • im (bool):\nif True, read imaginary instead of real part\nof the correlation function.
    • \n
    • names (list):\nAlternative labeling for replicas/ensembles.\nHas to have the appropriate length
    • \n
    • ens_name (str):\nreplaces the name of the ensemble
    • \n
    • version (str):\nversion of SFCF, with which the measurement was done.\nif the compact output option (-c) was specified,\nappend a \"c\" to the version (e.g. \"1.0c\")\nif the append output option (-a) was specified,\nappend an \"a\" to the version
    • \n
    • cfg_separator (str):\nString that separates the ensemble identifier from the configuration number (default 'n').
    • \n
    • replica (list):\nlist of replica to be read, default is all
    • \n
    • files (list):\nlist of files to be read per replica, default is all.\nfor non-compact output format, hand the folders to be read here.
    • \n
    • check_configs (list[list[int]]):\nlist of list of supposed configs, eg. [range(1,1000)]\nfor one replicum with 1000 configs
    • \n
    \n\n
    Returns
    \n\n
      \n
    • result (list[Obs]):\nlist of Observables with length T, observable per timeslice.\nbb-type correlators have length 1.
    • \n
    \n", "signature": "(\tpath,\tprefix,\tname,\tquarks='.*',\tcorr_type='bi',\tnoffset=0,\twf=0,\twf2=0,\tversion='1.0c',\tcfg_separator='n',\tsilent=False,\t**kwargs):", "funcdef": "def"}, "pyerrors.input.utils": {"fullname": "pyerrors.input.utils", "modulename": "pyerrors.input.utils", "kind": "module", "doc": "

    \n"}, "pyerrors.input.utils.sort_names": {"fullname": "pyerrors.input.utils.sort_names", "modulename": "pyerrors.input.utils", "qualname": "sort_names", "kind": "function", "doc": "

    Sorts a list of names of replika with searches for r and id in the replikum string.\nIf this search fails, a fallback method is used,\nwhere the strings are simply compared and the first diffeing numeral is used for differentiation.

    \n\n
    Parameters
    \n\n
      \n
    • ll (list):\nlist to sort
    • \n
    \n\n
    Returns
    \n\n
      \n
    • ll (list):\nsorted list
    • \n
    \n", "signature": "(ll):", "funcdef": "def"}, "pyerrors.input.utils.check_idl": {"fullname": "pyerrors.input.utils.check_idl", "modulename": "pyerrors.input.utils", "qualname": "check_idl", "kind": "function", "doc": "

    Checks if list of configurations is contained in an idl

    \n\n
    Parameters
    \n\n
      \n
    • idl (range or list):\nidl of the current replicum
    • \n
    • che (list):\nlist of configurations to be checked against
    • \n
    \n\n
    Returns
    \n\n
      \n
    • miss_str (str):\nstring with integers of which idls are missing
    • \n
    \n", "signature": "(idl, che):", "funcdef": "def"}, "pyerrors.linalg": {"fullname": "pyerrors.linalg", "modulename": "pyerrors.linalg", "kind": "module", "doc": "

    \n"}, "pyerrors.linalg.matmul": {"fullname": "pyerrors.linalg.matmul", "modulename": "pyerrors.linalg", "qualname": "matmul", "kind": "function", "doc": "

    Matrix multiply all operands.

    \n\n
    Parameters
    \n\n
      \n
    • operands (numpy.ndarray):\nArbitrary number of 2d-numpy arrays which can be real or complex\nObs valued.
    • \n
    • This implementation is faster compared to standard multiplication via the @ operator.
    • \n
    \n", "signature": "(*operands):", "funcdef": "def"}, "pyerrors.linalg.jack_matmul": {"fullname": "pyerrors.linalg.jack_matmul", "modulename": "pyerrors.linalg", "qualname": "jack_matmul", "kind": "function", "doc": "

    Matrix multiply both operands making use of the jackknife approximation.

    \n\n
    Parameters
    \n\n
      \n
    • operands (numpy.ndarray):\nArbitrary number of 2d-numpy arrays which can be real or complex\nObs valued.
    • \n
    • For large matrices this is considerably faster compared to matmul.
    • \n
    \n", "signature": "(*operands):", "funcdef": "def"}, "pyerrors.linalg.einsum": {"fullname": "pyerrors.linalg.einsum", "modulename": "pyerrors.linalg", "qualname": "einsum", "kind": "function", "doc": "

    Wrapper for numpy.einsum

    \n\n
    Parameters
    \n\n
      \n
    • subscripts (str):\nSubscripts for summation (see numpy documentation for details)
    • \n
    • operands (numpy.ndarray):\nArbitrary number of 2d-numpy arrays which can be real or complex\nObs valued.
    • \n
    \n", "signature": "(subscripts, *operands):", "funcdef": "def"}, "pyerrors.linalg.inv": {"fullname": "pyerrors.linalg.inv", "modulename": "pyerrors.linalg", "qualname": "inv", "kind": "function", "doc": "

    Inverse of Obs or CObs valued matrices.

    \n", "signature": "(x):", "funcdef": "def"}, "pyerrors.linalg.cholesky": {"fullname": "pyerrors.linalg.cholesky", "modulename": "pyerrors.linalg", "qualname": "cholesky", "kind": "function", "doc": "

    Cholesky decomposition of Obs valued matrices.

    \n", "signature": "(x):", "funcdef": "def"}, "pyerrors.linalg.det": {"fullname": "pyerrors.linalg.det", "modulename": "pyerrors.linalg", "qualname": "det", "kind": "function", "doc": "

    Determinant of Obs valued matrices.

    \n", "signature": "(x):", "funcdef": "def"}, "pyerrors.linalg.eigh": {"fullname": "pyerrors.linalg.eigh", "modulename": "pyerrors.linalg", "qualname": "eigh", "kind": "function", "doc": "

    Computes the eigenvalues and eigenvectors of a given hermitian matrix of Obs according to np.linalg.eigh.

    \n", "signature": "(obs, **kwargs):", "funcdef": "def"}, "pyerrors.linalg.eig": {"fullname": "pyerrors.linalg.eig", "modulename": "pyerrors.linalg", "qualname": "eig", "kind": "function", "doc": "

    Computes the eigenvalues of a given matrix of Obs according to np.linalg.eig.

    \n", "signature": "(obs, **kwargs):", "funcdef": "def"}, "pyerrors.linalg.pinv": {"fullname": "pyerrors.linalg.pinv", "modulename": "pyerrors.linalg", "qualname": "pinv", "kind": "function", "doc": "

    Computes the Moore-Penrose pseudoinverse of a matrix of Obs.

    \n", "signature": "(obs, **kwargs):", "funcdef": "def"}, "pyerrors.linalg.svd": {"fullname": "pyerrors.linalg.svd", "modulename": "pyerrors.linalg", "qualname": "svd", "kind": "function", "doc": "

    Computes the singular value decomposition of a matrix of Obs.

    \n", "signature": "(obs, **kwargs):", "funcdef": "def"}, "pyerrors.misc": {"fullname": "pyerrors.misc", "modulename": "pyerrors.misc", "kind": "module", "doc": "

    \n"}, "pyerrors.misc.print_config": {"fullname": "pyerrors.misc.print_config", "modulename": "pyerrors.misc", "qualname": "print_config", "kind": "function", "doc": "

    Print information about version of python, pyerrors and dependencies.

    \n", "signature": "():", "funcdef": "def"}, "pyerrors.misc.errorbar": {"fullname": "pyerrors.misc.errorbar", "modulename": "pyerrors.misc", "qualname": "errorbar", "kind": "function", "doc": "

    pyerrors wrapper for the errorbars method of matplotlib

    \n\n
    Parameters
    \n\n
      \n
    • x (list):\nA list of x-values which can be Obs.
    • \n
    • y (list):\nA list of y-values which can be Obs.
    • \n
    • axes ((matplotlib.pyplot.axes)):\nThe axes to plot on. default is plt.
    • \n
    \n", "signature": "(\tx,\ty,\taxes=<module 'matplotlib.pyplot' from '/opt/hostedtoolcache/Python/3.10.12/x64/lib/python3.10/site-packages/matplotlib/pyplot.py'>,\t**kwargs):", "funcdef": "def"}, "pyerrors.misc.dump_object": {"fullname": "pyerrors.misc.dump_object", "modulename": "pyerrors.misc", "qualname": "dump_object", "kind": "function", "doc": "

    Dump object into pickle file.

    \n\n
    Parameters
    \n\n
      \n
    • obj (object):\nobject to be saved in the pickle file
    • \n
    • name (str):\nname of the file
    • \n
    • path (str):\nspecifies a custom path for the file (default '.')
    • \n
    \n\n
    Returns
    \n\n
      \n
    • None
    • \n
    \n", "signature": "(obj, name, **kwargs):", "funcdef": "def"}, "pyerrors.misc.load_object": {"fullname": "pyerrors.misc.load_object", "modulename": "pyerrors.misc", "qualname": "load_object", "kind": "function", "doc": "

    Load object from pickle file.

    \n\n
    Parameters
    \n\n
      \n
    • path (str):\npath to the file
    • \n
    \n\n
    Returns
    \n\n
      \n
    • object (Obs):\nLoaded Object
    • \n
    \n", "signature": "(path):", "funcdef": "def"}, "pyerrors.misc.pseudo_Obs": {"fullname": "pyerrors.misc.pseudo_Obs", "modulename": "pyerrors.misc", "qualname": "pseudo_Obs", "kind": "function", "doc": "

    Generate an Obs object with given value, dvalue and name for test purposes

    \n\n
    Parameters
    \n\n
      \n
    • value (float):\ncentral value of the Obs to be generated.
    • \n
    • dvalue (float):\nerror of the Obs to be generated.
    • \n
    • name (str):\nname of the ensemble for which the Obs is to be generated.
    • \n
    • samples (int):\nnumber of samples for the Obs (default 1000).
    • \n
    \n\n
    Returns
    \n\n
      \n
    • res (Obs):\nGenerated Observable
    • \n
    \n", "signature": "(value, dvalue, name, samples=1000):", "funcdef": "def"}, "pyerrors.misc.gen_correlated_data": {"fullname": "pyerrors.misc.gen_correlated_data", "modulename": "pyerrors.misc", "qualname": "gen_correlated_data", "kind": "function", "doc": "

    Generate observables with given covariance and autocorrelation times.

    \n\n
    Parameters
    \n\n
      \n
    • means (list):\nlist containing the mean value of each observable.
    • \n
    • cov (numpy.ndarray):\ncovariance matrix for the data to be generated.
    • \n
    • name (str):\nensemble name for the data to be geneated.
    • \n
    • tau (float or list):\ncan either be a real number or a list with an entry for\nevery dataset.
    • \n
    • samples (int):\nnumber of samples to be generated for each observable.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • corr_obs (list[Obs]):\nGenerated observable list
    • \n
    \n", "signature": "(means, cov, name, tau=0.5, samples=1000):", "funcdef": "def"}, "pyerrors.mpm": {"fullname": "pyerrors.mpm", "modulename": "pyerrors.mpm", "kind": "module", "doc": "

    \n"}, "pyerrors.mpm.matrix_pencil_method": {"fullname": "pyerrors.mpm.matrix_pencil_method", "modulename": "pyerrors.mpm", "qualname": "matrix_pencil_method", "kind": "function", "doc": "

    Matrix pencil method to extract k energy levels from data

    \n\n

    Implementation of the matrix pencil method based on\neq. (2.17) of Y. Hua, T. K. Sarkar, IEEE Trans. Acoust. 38, 814-824 (1990)

    \n\n
    Parameters
    \n\n
      \n
    • data (list):\ncan be a list of Obs for the analysis of a single correlator, or a list of lists\nof Obs if several correlators are to analyzed at once.
    • \n
    • k (int):\nNumber of states to extract (default 1).
    • \n
    • p (int):\nmatrix pencil parameter which filters noise. The optimal value is expected between\nlen(data)/3 and 2*len(data)/3. The computation is more expensive the closer p is\nto len(data)/2 but could possibly suppress more noise (default len(data)//2).
    • \n
    \n\n
    Returns
    \n\n
      \n
    • energy_levels (list[Obs]):\nExtracted energy levels
    • \n
    \n", "signature": "(corrs, k=1, p=None, **kwargs):", "funcdef": "def"}, "pyerrors.obs": {"fullname": "pyerrors.obs", "modulename": "pyerrors.obs", "kind": "module", "doc": "

    \n"}, "pyerrors.obs.Obs": {"fullname": "pyerrors.obs.Obs", "modulename": "pyerrors.obs", "qualname": "Obs", "kind": "class", "doc": "

    Class for a general observable.

    \n\n

    Instances of Obs are the basic objects of a pyerrors error analysis.\nThey are initialized with a list which contains arrays of samples for\ndifferent ensembles/replica and another list of same length which contains\nthe names of the ensembles/replica. Mathematical operations can be\nperformed on instances. The result is another instance of Obs. The error of\nan instance can be computed with the gamma_method. Also contains additional\nmethods for output and visualization of the error calculation.

    \n\n
    Attributes
    \n\n
      \n
    • S_global (float):\nStandard value for S (default 2.0)
    • \n
    • S_dict (dict):\nDictionary for S values. If an entry for a given ensemble\nexists this overwrites the standard value for that ensemble.
    • \n
    • tau_exp_global (float):\nStandard value for tau_exp (default 0.0)
    • \n
    • tau_exp_dict (dict):\nDictionary for tau_exp values. If an entry for a given ensemble exists\nthis overwrites the standard value for that ensemble.
    • \n
    • N_sigma_global (float):\nStandard value for N_sigma (default 1.0)
    • \n
    • N_sigma_dict (dict):\nDictionary for N_sigma values. If an entry for a given ensemble exists\nthis overwrites the standard value for that ensemble.
    • \n
    \n"}, "pyerrors.obs.Obs.__init__": {"fullname": "pyerrors.obs.Obs.__init__", "modulename": "pyerrors.obs", "qualname": "Obs.__init__", "kind": "function", "doc": "

    Initialize Obs object.

    \n\n
    Parameters
    \n\n
      \n
    • samples (list):\nlist of numpy arrays containing the Monte Carlo samples
    • \n
    • names (list):\nlist of strings labeling the individual samples
    • \n
    • idl (list, optional):\nlist of ranges or lists on which the samples are defined
    • \n
    \n", "signature": "(samples, names, idl=None, **kwargs)"}, "pyerrors.obs.Obs.S_global": {"fullname": "pyerrors.obs.Obs.S_global", "modulename": "pyerrors.obs", "qualname": "Obs.S_global", "kind": "variable", "doc": "

    \n", "default_value": "2.0"}, "pyerrors.obs.Obs.S_dict": {"fullname": "pyerrors.obs.Obs.S_dict", "modulename": "pyerrors.obs", "qualname": "Obs.S_dict", "kind": "variable", "doc": "

    \n", "default_value": "{}"}, "pyerrors.obs.Obs.tau_exp_global": {"fullname": "pyerrors.obs.Obs.tau_exp_global", "modulename": "pyerrors.obs", "qualname": "Obs.tau_exp_global", "kind": "variable", "doc": "

    \n", "default_value": "0.0"}, "pyerrors.obs.Obs.tau_exp_dict": {"fullname": "pyerrors.obs.Obs.tau_exp_dict", "modulename": "pyerrors.obs", "qualname": "Obs.tau_exp_dict", "kind": "variable", "doc": "

    \n", "default_value": "{}"}, "pyerrors.obs.Obs.N_sigma_global": {"fullname": "pyerrors.obs.Obs.N_sigma_global", "modulename": "pyerrors.obs", "qualname": "Obs.N_sigma_global", "kind": "variable", "doc": "

    \n", "default_value": "1.0"}, "pyerrors.obs.Obs.N_sigma_dict": {"fullname": "pyerrors.obs.Obs.N_sigma_dict", "modulename": "pyerrors.obs", "qualname": "Obs.N_sigma_dict", "kind": "variable", "doc": "

    \n", "default_value": "{}"}, "pyerrors.obs.Obs.names": {"fullname": "pyerrors.obs.Obs.names", "modulename": "pyerrors.obs", "qualname": "Obs.names", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.shape": {"fullname": "pyerrors.obs.Obs.shape", "modulename": "pyerrors.obs", "qualname": "Obs.shape", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.r_values": {"fullname": "pyerrors.obs.Obs.r_values", "modulename": "pyerrors.obs", "qualname": "Obs.r_values", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.deltas": {"fullname": "pyerrors.obs.Obs.deltas", "modulename": "pyerrors.obs", "qualname": "Obs.deltas", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.N": {"fullname": "pyerrors.obs.Obs.N", "modulename": "pyerrors.obs", "qualname": "Obs.N", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.idl": {"fullname": "pyerrors.obs.Obs.idl", "modulename": "pyerrors.obs", "qualname": "Obs.idl", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.ddvalue": {"fullname": "pyerrors.obs.Obs.ddvalue", "modulename": "pyerrors.obs", "qualname": "Obs.ddvalue", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.reweighted": {"fullname": "pyerrors.obs.Obs.reweighted", "modulename": "pyerrors.obs", "qualname": "Obs.reweighted", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.tag": {"fullname": "pyerrors.obs.Obs.tag", "modulename": "pyerrors.obs", "qualname": "Obs.tag", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.value": {"fullname": "pyerrors.obs.Obs.value", "modulename": "pyerrors.obs", "qualname": "Obs.value", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.dvalue": {"fullname": "pyerrors.obs.Obs.dvalue", "modulename": "pyerrors.obs", "qualname": "Obs.dvalue", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.e_names": {"fullname": "pyerrors.obs.Obs.e_names", "modulename": "pyerrors.obs", "qualname": "Obs.e_names", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.cov_names": {"fullname": "pyerrors.obs.Obs.cov_names", "modulename": "pyerrors.obs", "qualname": "Obs.cov_names", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.mc_names": {"fullname": "pyerrors.obs.Obs.mc_names", "modulename": "pyerrors.obs", "qualname": "Obs.mc_names", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.e_content": {"fullname": "pyerrors.obs.Obs.e_content", "modulename": "pyerrors.obs", "qualname": "Obs.e_content", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.covobs": {"fullname": "pyerrors.obs.Obs.covobs", "modulename": "pyerrors.obs", "qualname": "Obs.covobs", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.gamma_method": {"fullname": "pyerrors.obs.Obs.gamma_method", "modulename": "pyerrors.obs", "qualname": "Obs.gamma_method", "kind": "function", "doc": "

    Estimate the error and related properties of the Obs.

    \n\n
    Parameters
    \n\n
      \n
    • S (float):\nspecifies a custom value for the parameter S (default 2.0).\nIf set to 0 it is assumed that the data exhibits no\nautocorrelation. In this case the error estimates coincides\nwith the sample standard error.
    • \n
    • tau_exp (float):\npositive value triggers the critical slowing down analysis\n(default 0.0).
    • \n
    • N_sigma (float):\nnumber of standard deviations from zero until the tail is\nattached to the autocorrelation function (default 1).
    • \n
    • fft (bool):\ndetermines whether the fft algorithm is used for the computation\nof the autocorrelation function (default True)
    • \n
    \n", "signature": "(self, **kwargs):", "funcdef": "def"}, "pyerrors.obs.Obs.gm": {"fullname": "pyerrors.obs.Obs.gm", "modulename": "pyerrors.obs", "qualname": "Obs.gm", "kind": "function", "doc": "

    Estimate the error and related properties of the Obs.

    \n\n
    Parameters
    \n\n
      \n
    • S (float):\nspecifies a custom value for the parameter S (default 2.0).\nIf set to 0 it is assumed that the data exhibits no\nautocorrelation. In this case the error estimates coincides\nwith the sample standard error.
    • \n
    • tau_exp (float):\npositive value triggers the critical slowing down analysis\n(default 0.0).
    • \n
    • N_sigma (float):\nnumber of standard deviations from zero until the tail is\nattached to the autocorrelation function (default 1).
    • \n
    • fft (bool):\ndetermines whether the fft algorithm is used for the computation\nof the autocorrelation function (default True)
    • \n
    \n", "signature": "(self, **kwargs):", "funcdef": "def"}, "pyerrors.obs.Obs.details": {"fullname": "pyerrors.obs.Obs.details", "modulename": "pyerrors.obs", "qualname": "Obs.details", "kind": "function", "doc": "

    Output detailed properties of the Obs.

    \n\n
    Parameters
    \n\n
      \n
    • ens_content (bool):\nprint details about the ensembles and replica if true.
    • \n
    \n", "signature": "(self, ens_content=True):", "funcdef": "def"}, "pyerrors.obs.Obs.reweight": {"fullname": "pyerrors.obs.Obs.reweight", "modulename": "pyerrors.obs", "qualname": "Obs.reweight", "kind": "function", "doc": "

    Reweight the obs with given rewighting factors.

    \n\n
    Parameters
    \n\n
      \n
    • weight (Obs):\nReweighting factor. An Observable that has to be defined on a superset of the\nconfigurations in obs[i].idl for all i.
    • \n
    • all_configs (bool):\nif True, the reweighted observables are normalized by the average of\nthe reweighting factor on all configurations in weight.idl and not\non the configurations in obs[i].idl. Default False.
    • \n
    \n", "signature": "(self, weight):", "funcdef": "def"}, "pyerrors.obs.Obs.is_zero_within_error": {"fullname": "pyerrors.obs.Obs.is_zero_within_error", "modulename": "pyerrors.obs", "qualname": "Obs.is_zero_within_error", "kind": "function", "doc": "

    Checks whether the observable is zero within 'sigma' standard errors.

    \n\n
    Parameters
    \n\n
      \n
    • sigma (int):\nNumber of standard errors used for the check.
    • \n
    • Works only properly when the gamma method was run.
    • \n
    \n", "signature": "(self, sigma=1):", "funcdef": "def"}, "pyerrors.obs.Obs.is_zero": {"fullname": "pyerrors.obs.Obs.is_zero", "modulename": "pyerrors.obs", "qualname": "Obs.is_zero", "kind": "function", "doc": "

    Checks whether the observable is zero within a given tolerance.

    \n\n
    Parameters
    \n\n
      \n
    • atol (float):\nAbsolute tolerance (for details see numpy documentation).
    • \n
    \n", "signature": "(self, atol=1e-10):", "funcdef": "def"}, "pyerrors.obs.Obs.plot_tauint": {"fullname": "pyerrors.obs.Obs.plot_tauint", "modulename": "pyerrors.obs", "qualname": "Obs.plot_tauint", "kind": "function", "doc": "

    Plot integrated autocorrelation time for each ensemble.

    \n\n
    Parameters
    \n\n
      \n
    • save (str):\nsaves the figure to a file named 'save' if.
    • \n
    \n", "signature": "(self, save=None):", "funcdef": "def"}, "pyerrors.obs.Obs.plot_rho": {"fullname": "pyerrors.obs.Obs.plot_rho", "modulename": "pyerrors.obs", "qualname": "Obs.plot_rho", "kind": "function", "doc": "

    Plot normalized autocorrelation function time for each ensemble.

    \n\n
    Parameters
    \n\n
      \n
    • save (str):\nsaves the figure to a file named 'save' if.
    • \n
    \n", "signature": "(self, save=None):", "funcdef": "def"}, "pyerrors.obs.Obs.plot_rep_dist": {"fullname": "pyerrors.obs.Obs.plot_rep_dist", "modulename": "pyerrors.obs", "qualname": "Obs.plot_rep_dist", "kind": "function", "doc": "

    Plot replica distribution for each ensemble with more than one replicum.

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.plot_history": {"fullname": "pyerrors.obs.Obs.plot_history", "modulename": "pyerrors.obs", "qualname": "Obs.plot_history", "kind": "function", "doc": "

    Plot derived Monte Carlo history for each ensemble

    \n\n
    Parameters
    \n\n
      \n
    • expand (bool):\nshow expanded history for irregular Monte Carlo chains (default: True).
    • \n
    \n", "signature": "(self, expand=True):", "funcdef": "def"}, "pyerrors.obs.Obs.plot_piechart": {"fullname": "pyerrors.obs.Obs.plot_piechart", "modulename": "pyerrors.obs", "qualname": "Obs.plot_piechart", "kind": "function", "doc": "

    Plot piechart which shows the fractional contribution of each\nensemble to the error and returns a dictionary containing the fractions.

    \n\n
    Parameters
    \n\n
      \n
    • save (str):\nsaves the figure to a file named 'save' if.
    • \n
    \n", "signature": "(self, save=None):", "funcdef": "def"}, "pyerrors.obs.Obs.dump": {"fullname": "pyerrors.obs.Obs.dump", "modulename": "pyerrors.obs", "qualname": "Obs.dump", "kind": "function", "doc": "

    Dump the Obs to a file 'name' of chosen format.

    \n\n
    Parameters
    \n\n
      \n
    • filename (str):\nname of the file to be saved.
    • \n
    • datatype (str):\nFormat of the exported file. Supported formats include\n\"json.gz\" and \"pickle\"
    • \n
    • description (str):\nDescription for output file, only relevant for json.gz format.
    • \n
    • path (str):\nspecifies a custom path for the file (default '.')
    • \n
    \n", "signature": "(self, filename, datatype='json.gz', description='', **kwargs):", "funcdef": "def"}, "pyerrors.obs.Obs.export_jackknife": {"fullname": "pyerrors.obs.Obs.export_jackknife", "modulename": "pyerrors.obs", "qualname": "Obs.export_jackknife", "kind": "function", "doc": "

    Export jackknife samples from the Obs

    \n\n
    Returns
    \n\n
      \n
    • numpy.ndarray: Returns a numpy array of length N + 1 where N is the number of samples\nfor the given ensemble and replicum. The zeroth entry of the array contains\nthe mean value of the Obs, entries 1 to N contain the N jackknife samples\nderived from the Obs. The current implementation only works for observables\ndefined on exactly one ensemble and replicum. The derived jackknife samples\nshould agree with samples from a full jackknife analysis up to O(1/N).
    • \n
    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.export_bootstrap": {"fullname": "pyerrors.obs.Obs.export_bootstrap", "modulename": "pyerrors.obs", "qualname": "Obs.export_bootstrap", "kind": "function", "doc": "

    Export bootstrap samples from the Obs

    \n\n
    Parameters
    \n\n
      \n
    • samples (int):\nNumber of bootstrap samples to generate.
    • \n
    • random_numbers (np.ndarray):\nArray of shape (samples, length) containing the random numbers to generate the bootstrap samples.\nIf not provided the bootstrap samples are generated bashed on the md5 hash of the enesmble name.
    • \n
    • save_rng (str):\nSave the random numbers to a file if a path is specified.
    • \n
    \n\n
    Returns
    \n\n
      \n
    • numpy.ndarray: Returns a numpy array of length N + 1 where N is the number of samples\nfor the given ensemble and replicum. The zeroth entry of the array contains\nthe mean value of the Obs, entries 1 to N contain the N import_bootstrap samples\nderived from the Obs. The current implementation only works for observables\ndefined on exactly one ensemble and replicum. The derived bootstrap samples\nshould agree with samples from a full bootstrap analysis up to O(1/N).
    • \n
    \n", "signature": "(self, samples=500, random_numbers=None, save_rng=None):", "funcdef": "def"}, "pyerrors.obs.Obs.sqrt": {"fullname": "pyerrors.obs.Obs.sqrt", "modulename": "pyerrors.obs", "qualname": "Obs.sqrt", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.log": {"fullname": "pyerrors.obs.Obs.log", "modulename": "pyerrors.obs", "qualname": "Obs.log", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.exp": {"fullname": "pyerrors.obs.Obs.exp", "modulename": "pyerrors.obs", "qualname": "Obs.exp", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.sin": {"fullname": "pyerrors.obs.Obs.sin", "modulename": "pyerrors.obs", "qualname": "Obs.sin", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.cos": {"fullname": "pyerrors.obs.Obs.cos", "modulename": "pyerrors.obs", "qualname": "Obs.cos", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.tan": {"fullname": "pyerrors.obs.Obs.tan", "modulename": "pyerrors.obs", "qualname": "Obs.tan", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.arcsin": {"fullname": "pyerrors.obs.Obs.arcsin", "modulename": "pyerrors.obs", "qualname": "Obs.arcsin", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.arccos": {"fullname": "pyerrors.obs.Obs.arccos", "modulename": "pyerrors.obs", "qualname": "Obs.arccos", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.arctan": {"fullname": "pyerrors.obs.Obs.arctan", "modulename": "pyerrors.obs", "qualname": "Obs.arctan", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.sinh": {"fullname": "pyerrors.obs.Obs.sinh", "modulename": "pyerrors.obs", "qualname": "Obs.sinh", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.cosh": {"fullname": "pyerrors.obs.Obs.cosh", "modulename": "pyerrors.obs", "qualname": "Obs.cosh", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.tanh": {"fullname": "pyerrors.obs.Obs.tanh", "modulename": "pyerrors.obs", "qualname": "Obs.tanh", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.arcsinh": {"fullname": "pyerrors.obs.Obs.arcsinh", "modulename": "pyerrors.obs", "qualname": "Obs.arcsinh", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.arccosh": {"fullname": "pyerrors.obs.Obs.arccosh", "modulename": "pyerrors.obs", "qualname": "Obs.arccosh", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.arctanh": {"fullname": "pyerrors.obs.Obs.arctanh", "modulename": "pyerrors.obs", "qualname": "Obs.arctanh", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.Obs.N_sigma": {"fullname": "pyerrors.obs.Obs.N_sigma", "modulename": "pyerrors.obs", "qualname": "Obs.N_sigma", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.S": {"fullname": "pyerrors.obs.Obs.S", "modulename": "pyerrors.obs", "qualname": "Obs.S", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.e_ddvalue": {"fullname": "pyerrors.obs.Obs.e_ddvalue", "modulename": "pyerrors.obs", "qualname": "Obs.e_ddvalue", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.e_drho": {"fullname": "pyerrors.obs.Obs.e_drho", "modulename": "pyerrors.obs", "qualname": "Obs.e_drho", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.e_dtauint": {"fullname": "pyerrors.obs.Obs.e_dtauint", "modulename": "pyerrors.obs", "qualname": "Obs.e_dtauint", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.e_dvalue": {"fullname": "pyerrors.obs.Obs.e_dvalue", "modulename": "pyerrors.obs", "qualname": "Obs.e_dvalue", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.e_n_dtauint": {"fullname": "pyerrors.obs.Obs.e_n_dtauint", "modulename": "pyerrors.obs", "qualname": "Obs.e_n_dtauint", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.e_n_tauint": {"fullname": "pyerrors.obs.Obs.e_n_tauint", "modulename": "pyerrors.obs", "qualname": "Obs.e_n_tauint", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.e_rho": {"fullname": "pyerrors.obs.Obs.e_rho", "modulename": "pyerrors.obs", "qualname": "Obs.e_rho", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.e_tauint": {"fullname": "pyerrors.obs.Obs.e_tauint", "modulename": "pyerrors.obs", "qualname": "Obs.e_tauint", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.e_windowsize": {"fullname": "pyerrors.obs.Obs.e_windowsize", "modulename": "pyerrors.obs", "qualname": "Obs.e_windowsize", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.Obs.tau_exp": {"fullname": "pyerrors.obs.Obs.tau_exp", "modulename": "pyerrors.obs", "qualname": "Obs.tau_exp", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.CObs": {"fullname": "pyerrors.obs.CObs", "modulename": "pyerrors.obs", "qualname": "CObs", "kind": "class", "doc": "

    Class for a complex valued observable.

    \n"}, "pyerrors.obs.CObs.__init__": {"fullname": "pyerrors.obs.CObs.__init__", "modulename": "pyerrors.obs", "qualname": "CObs.__init__", "kind": "function", "doc": "

    \n", "signature": "(real, imag=0.0)"}, "pyerrors.obs.CObs.tag": {"fullname": "pyerrors.obs.CObs.tag", "modulename": "pyerrors.obs", "qualname": "CObs.tag", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.CObs.real": {"fullname": "pyerrors.obs.CObs.real", "modulename": "pyerrors.obs", "qualname": "CObs.real", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.CObs.imag": {"fullname": "pyerrors.obs.CObs.imag", "modulename": "pyerrors.obs", "qualname": "CObs.imag", "kind": "variable", "doc": "

    \n"}, "pyerrors.obs.CObs.gamma_method": {"fullname": "pyerrors.obs.CObs.gamma_method", "modulename": "pyerrors.obs", "qualname": "CObs.gamma_method", "kind": "function", "doc": "

    Executes the gamma_method for the real and the imaginary part.

    \n", "signature": "(self, **kwargs):", "funcdef": "def"}, "pyerrors.obs.CObs.is_zero": {"fullname": "pyerrors.obs.CObs.is_zero", "modulename": "pyerrors.obs", "qualname": "CObs.is_zero", "kind": "function", "doc": "

    Checks whether both real and imaginary part are zero within machine precision.

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.CObs.conjugate": {"fullname": "pyerrors.obs.CObs.conjugate", "modulename": "pyerrors.obs", "qualname": "CObs.conjugate", "kind": "function", "doc": "

    \n", "signature": "(self):", "funcdef": "def"}, "pyerrors.obs.derived_observable": {"fullname": "pyerrors.obs.derived_observable", "modulename": "pyerrors.obs", "qualname": "derived_observable", "kind": "function", "doc": "

    Construct a derived Obs according to func(data, **kwargs) using automatic differentiation.

    \n\n
    Parameters
    \n\n
      \n
    • func (object):\narbitrary function of the form func(data, **kwargs). For the\nautomatic differentiation to work, all numpy functions have to have\nthe autograd wrapper (use 'import autograd.numpy as anp').
    • \n
    • data (list):\nlist of Obs, e.g. [obs1, obs2, obs3].
    • \n
    • num_grad (bool):\nif True, numerical derivatives are used instead of autograd\n(default False). To control the numerical differentiation the\nkwargs of numdifftools.step_generators.MaxStepGenerator\ncan be used.
    • \n
    • man_grad (list):\nmanually supply a list or an array which contains the jacobian\nof func. Use cautiously, supplying the wrong derivative will\nnot be intercepted.
    • \n
    \n\n
    Notes
    \n\n

    For simple mathematical operations it can be practical to use anonymous\nfunctions. For the ratio of two observables one can e.g. use

    \n\n

    new_obs = derived_observable(lambda x: x[0] / x[1], [obs1, obs2])

    \n", "signature": "(func, data, array_mode=False, **kwargs):", "funcdef": "def"}, "pyerrors.obs.reweight": {"fullname": "pyerrors.obs.reweight", "modulename": "pyerrors.obs", "qualname": "reweight", "kind": "function", "doc": "

    Reweight a list of observables.

    \n\n
    Parameters
    \n\n
      \n
    • weight (Obs):\nReweighting factor. An Observable that has to be defined on a superset of the\nconfigurations in obs[i].idl for all i.
    • \n
    • obs (list):\nlist of Obs, e.g. [obs1, obs2, obs3].
    • \n
    • all_configs (bool):\nif True, the reweighted observables are normalized by the average of\nthe reweighting factor on all configurations in weight.idl and not\non the configurations in obs[i].idl. Default False.
    • \n
    \n", "signature": "(weight, obs, **kwargs):", "funcdef": "def"}, "pyerrors.obs.correlate": {"fullname": "pyerrors.obs.correlate", "modulename": "pyerrors.obs", "qualname": "correlate", "kind": "function", "doc": "

    Correlate two observables.

    \n\n
    Parameters
    \n\n
      \n
    • obs_a (Obs):\nFirst observable
    • \n
    • obs_b (Obs):\nSecond observable
    • \n
    \n\n
    Notes
    \n\n

    Keep in mind to only correlate primary observables which have not been reweighted\nyet. The reweighting has to be applied after correlating the observables.\nCurrently only works if ensembles are identical (this is not strictly necessary).

    \n", "signature": "(obs_a, obs_b):", "funcdef": "def"}, "pyerrors.obs.covariance": {"fullname": "pyerrors.obs.covariance", "modulename": "pyerrors.obs", "qualname": "covariance", "kind": "function", "doc": "

    Calculates the error covariance matrix of a set of observables.

    \n\n

    WARNING: This function should be used with care, especially for observables with support on multiple\n ensembles with differing autocorrelations. See the notes below for details.

    \n\n

    The gamma method has to be applied first to all observables.

    \n\n
    Parameters
    \n\n
      \n
    • obs (list or numpy.ndarray):\nList or one dimensional array of Obs
    • \n
    • visualize (bool):\nIf True plots the corresponding normalized correlation matrix (default False).
    • \n
    • correlation (bool):\nIf True the correlation matrix instead of the error covariance matrix is returned (default False).
    • \n
    • smooth (None or int):\nIf smooth is an integer 'E' between 2 and the dimension of the matrix minus 1 the eigenvalue\nsmoothing procedure of hep-lat/9412087 is applied to the correlation matrix which leaves the\nlargest E eigenvalues essentially unchanged and smoothes the smaller eigenvalues to avoid extremely\nsmall ones.
    • \n
    \n\n
    Notes
    \n\n

    The error covariance is defined such that it agrees with the squared standard error for two identical observables\n$$\\operatorname{cov}(a,a)=\\sum_{s=1}^N\\delta_a^s\\delta_a^s/N^2=\\Gamma_{aa}(0)/N=\\operatorname{var}(a)/N=\\sigma_a^2$$\nin the absence of autocorrelation.\nThe error covariance is estimated by calculating the correlation matrix assuming no autocorrelation and then rescaling the correlation matrix by the full errors including the previous gamma method estimate for the autocorrelation of the observables. The covariance at windowsize 0 is guaranteed to be positive semi-definite\n$$\\sum_{i,j}v_i\\Gamma_{ij}(0)v_j=\\frac{1}{N}\\sum_{s=1}^N\\sum_{i,j}v_i\\delta_i^s\\delta_j^s v_j=\\frac{1}{N}\\sum_{s=1}^N\\sum_{i}|v_i\\delta_i^s|^2\\geq 0\\,,$$ for every $v\\in\\mathbb{R}^M$, while such an identity does not hold for larger windows/lags.\nFor observables defined on a single ensemble our approximation is equivalent to assuming that the integrated autocorrelation time of an off-diagonal element is equal to the geometric mean of the integrated autocorrelation times of the corresponding diagonal elements.\n$$\\tau_{\\mathrm{int}, ij}=\\sqrt{\\tau_{\\mathrm{int}, i}\\times \\tau_{\\mathrm{int}, j}}$$\nThis construction ensures that the estimated covariance matrix is positive semi-definite (up to numerical rounding errors).

    \n", "signature": "(obs, visualize=False, correlation=False, smooth=None, **kwargs):", "funcdef": "def"}, "pyerrors.obs.import_jackknife": {"fullname": "pyerrors.obs.import_jackknife", "modulename": "pyerrors.obs", "qualname": "import_jackknife", "kind": "function", "doc": "

    Imports jackknife samples and returns an Obs

    \n\n
    Parameters
    \n\n
      \n
    • jacks (numpy.ndarray):\nnumpy array containing the mean value as zeroth entry and\nthe N jackknife samples as first to Nth entry.
    • \n
    • name (str):\nname of the ensemble the samples are defined on.
    • \n
    \n", "signature": "(jacks, name, idl=None):", "funcdef": "def"}, "pyerrors.obs.import_bootstrap": {"fullname": "pyerrors.obs.import_bootstrap", "modulename": "pyerrors.obs", "qualname": "import_bootstrap", "kind": "function", "doc": "

    Imports bootstrap samples and returns an Obs

    \n\n
    Parameters
    \n\n
      \n
    • boots (numpy.ndarray):\nnumpy array containing the mean value as zeroth entry and\nthe N bootstrap samples as first to Nth entry.
    • \n
    • name (str):\nname of the ensemble the samples are defined on.
    • \n
    • random_numbers (np.ndarray):\nArray of shape (samples, length) containing the random numbers to generate the bootstrap samples,\nwhere samples is the number of bootstrap samples and length is the length of the original Monte Carlo\nchain to be reconstructed.
    • \n
    \n", "signature": "(boots, name, random_numbers):", "funcdef": "def"}, "pyerrors.obs.merge_obs": {"fullname": "pyerrors.obs.merge_obs", "modulename": "pyerrors.obs", "qualname": "merge_obs", "kind": "function", "doc": "

    Combine all observables in list_of_obs into one new observable

    \n\n
    Parameters
    \n\n
      \n
    • list_of_obs (list):\nlist of the Obs object to be combined
    • \n
    \n\n
    Notes
    \n\n

    It is not possible to combine obs which are based on the same replicum

    \n", "signature": "(list_of_obs):", "funcdef": "def"}, "pyerrors.obs.cov_Obs": {"fullname": "pyerrors.obs.cov_Obs", "modulename": "pyerrors.obs", "qualname": "cov_Obs", "kind": "function", "doc": "

    Create an Obs based on mean(s) and a covariance matrix

    \n\n
    Parameters
    \n\n
      \n
    • mean (list of floats or float):\nN mean value(s) of the new Obs
    • \n
    • cov (list or array):\n2d (NxN) Covariance matrix, 1d diagonal entries or 0d covariance
    • \n
    • name (str):\nidentifier for the covariance matrix
    • \n
    • grad (list or array):\nGradient of the Covobs wrt. the means belonging to cov.
    • \n
    \n", "signature": "(means, cov, name, grad=None):", "funcdef": "def"}, "pyerrors.roots": {"fullname": "pyerrors.roots", "modulename": "pyerrors.roots", "kind": "module", "doc": "

    \n"}, "pyerrors.roots.find_root": {"fullname": "pyerrors.roots.find_root", "modulename": "pyerrors.roots", "qualname": "find_root", "kind": "function", "doc": "

    Finds the root of the function func(x, d) where d is an Obs.

    \n\n
    Parameters
    \n\n
      \n
    • d (Obs):\nObs passed to the function.
    • \n
    • func (object):\nFunction to be minimized. Any numpy functions have to use the autograd.numpy wrapper.\nExample:

      \n\n
      \n
      import autograd.numpy as anp\ndef root_func(x, d):\n   return anp.exp(-x ** 2) - d\n
      \n
    • \n
    • guess (float):\nInitial guess for the minimization.

    • \n
    \n\n
    Returns
    \n\n
      \n
    • res (Obs):\nObs valued root of the function.
    • \n
    \n", "signature": "(d, func, guess=1.0, **kwargs):", "funcdef": "def"}, "pyerrors.version": {"fullname": "pyerrors.version", "modulename": "pyerrors.version", "kind": "module", "doc": "

    \n"}}, "docInfo": {"pyerrors": {"qualname": 0, "fullname": 1, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 8312}, "pyerrors.correlators": {"qualname": 0, "fullname": 2, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 108}, "pyerrors.correlators.Corr.__init__": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 40, "bases": 0, "doc": 94}, "pyerrors.correlators.Corr.tag": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.content": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.T": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.prange": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.reweighted": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.gamma_method": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 18, "bases": 0, "doc": 13}, "pyerrors.correlators.Corr.gm": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 18, "bases": 0, "doc": 13}, "pyerrors.correlators.Corr.projected": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 43, "bases": 0, "doc": 64}, "pyerrors.correlators.Corr.item": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 21, "bases": 0, "doc": 53}, "pyerrors.correlators.Corr.plottable": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 31}, "pyerrors.correlators.Corr.symmetric": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 9}, "pyerrors.correlators.Corr.anti_symmetric": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 10}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"qualname": 4, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 13}, "pyerrors.correlators.Corr.matrix_symmetric": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 10}, "pyerrors.correlators.Corr.GEVP": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 47, "bases": 0, "doc": 326}, "pyerrors.correlators.Corr.Eigenvalue": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 50, "bases": 0, "doc": 59}, "pyerrors.correlators.Corr.Hankel": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 26, "bases": 0, "doc": 67}, "pyerrors.correlators.Corr.roll": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 16, "bases": 0, "doc": 26}, "pyerrors.correlators.Corr.reverse": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 9}, "pyerrors.correlators.Corr.thin": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 31, "bases": 0, "doc": 43}, "pyerrors.correlators.Corr.correlate": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 16, "bases": 0, "doc": 53}, "pyerrors.correlators.Corr.reweight": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 23, "bases": 0, "doc": 79}, "pyerrors.correlators.Corr.T_symmetry": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 26, "bases": 0, "doc": 51}, "pyerrors.correlators.Corr.deriv": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 25, "bases": 0, "doc": 47}, "pyerrors.correlators.Corr.second_deriv": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 25, "bases": 0, "doc": 126}, "pyerrors.correlators.Corr.m_eff": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 36, "bases": 0, "doc": 148}, "pyerrors.correlators.Corr.fit": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 43, "bases": 0, "doc": 110}, "pyerrors.correlators.Corr.plateau": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 47, "bases": 0, "doc": 92}, "pyerrors.correlators.Corr.set_prange": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 16, "bases": 0, "doc": 11}, "pyerrors.correlators.Corr.show": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 161, "bases": 0, "doc": 263}, "pyerrors.correlators.Corr.spaghetti_plot": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 21, "bases": 0, "doc": 42}, "pyerrors.correlators.Corr.dump": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 38, "bases": 0, "doc": 69}, "pyerrors.correlators.Corr.print": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 22, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.sqrt": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.log": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.exp": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.sin": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.cos": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.tan": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.sinh": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.cosh": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.tanh": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.arcsin": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.arccos": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.arctan": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.arcsinh": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.arccosh": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.arctanh": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.real": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.imag": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.correlators.Corr.prune": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 46, "bases": 0, "doc": 325}, "pyerrors.correlators.Corr.N": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.covobs": {"qualname": 0, "fullname": 2, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.covobs.Covobs": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.covobs.Covobs.__init__": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 39, "bases": 0, "doc": 100}, "pyerrors.covobs.Covobs.name": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.covobs.Covobs.value": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.covobs.Covobs.errsq": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 12}, "pyerrors.covobs.Covobs.cov": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.covobs.Covobs.grad": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.dirac": {"qualname": 0, "fullname": 2, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.dirac.gammaX": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 54, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.dirac.gammaY": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 54, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.dirac.gammaZ": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 54, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.dirac.gammaT": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 50, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.dirac.gamma": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 210, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.dirac.gamma5": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 54, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.dirac.identity": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 50, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.dirac.epsilon_tensor": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 21, "bases": 0, "doc": 40}, "pyerrors.dirac.epsilon_tensor_rank4": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 26, "bases": 0, "doc": 41}, "pyerrors.dirac.Grid_gamma": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 12, "bases": 0, "doc": 9}, "pyerrors.fits": {"qualname": 0, "fullname": 2, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.fits.Fit_result": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 3, "doc": 75}, "pyerrors.fits.Fit_result.fit_parameters": {"qualname": 4, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.fits.Fit_result.gamma_method": {"qualname": 4, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 18, "bases": 0, "doc": 10}, "pyerrors.fits.Fit_result.gm": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 18, "bases": 0, "doc": 10}, "pyerrors.fits.least_squares": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 48, "bases": 0, "doc": 902}, "pyerrors.fits.total_least_squares": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 38, "bases": 0, "doc": 468}, "pyerrors.fits.fit_lin": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 23, "bases": 0, "doc": 110}, "pyerrors.fits.qqplot": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 40, "bases": 0, "doc": 39}, "pyerrors.fits.residual_plot": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 40, "bases": 0, "doc": 45}, "pyerrors.fits.error_band": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 21, "bases": 0, "doc": 48}, "pyerrors.fits.ks_test": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 16, "bases": 0, "doc": 52}, "pyerrors.input": {"qualname": 0, "fullname": 2, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 81}, "pyerrors.input.bdio": {"qualname": 0, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.input.bdio.read_ADerrors": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 35, "bases": 0, "doc": 122}, "pyerrors.input.bdio.write_ADerrors": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 41, "bases": 0, "doc": 126}, "pyerrors.input.bdio.read_mesons": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 35, "bases": 0, "doc": 211}, "pyerrors.input.bdio.read_dSdm": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 35, "bases": 0, "doc": 191}, "pyerrors.input.dobs": {"qualname": 0, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.input.dobs.create_pobs_string": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 62, "bases": 0, "doc": 186}, "pyerrors.input.dobs.write_pobs": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 85, "bases": 0, "doc": 214}, "pyerrors.input.dobs.read_pobs": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 43, "bases": 0, "doc": 164}, "pyerrors.input.dobs.import_dobs_string": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 33, "bases": 0, "doc": 184}, "pyerrors.input.dobs.read_dobs": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 43, "bases": 0, "doc": 207}, "pyerrors.input.dobs.create_dobs_string": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 82, "bases": 0, "doc": 229}, "pyerrors.input.dobs.write_dobs": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 99, "bases": 0, "doc": 252}, "pyerrors.input.hadrons": {"qualname": 0, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.input.hadrons.read_meson_hd5": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 57, "bases": 0, "doc": 181}, "pyerrors.input.hadrons.extract_t0_hd5": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 73, "bases": 0, "doc": 157}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 45, "bases": 0, "doc": 106}, "pyerrors.input.hadrons.Npr_matrix": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 2, "doc": 1069}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 30}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 32, "bases": 0, "doc": 99}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 32, "bases": 0, "doc": 99}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 60, "bases": 0, "doc": 112}, "pyerrors.input.json": {"qualname": 0, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.input.json.create_json_string": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 34, "bases": 0, "doc": 138}, "pyerrors.input.json.dump_to_json": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 49, "bases": 0, "doc": 174}, "pyerrors.input.json.import_json_string": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 33, "bases": 0, "doc": 168}, "pyerrors.input.json.load_json": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 42, "bases": 0, "doc": 188}, "pyerrors.input.json.dump_dict_to_json": {"qualname": 4, "fullname": 7, "annotation": 0, "default_value": 0, "signature": 63, "bases": 0, "doc": 184}, "pyerrors.input.json.load_json_dict": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 56, "bases": 0, "doc": 172}, "pyerrors.input.misc": {"qualname": 0, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.input.misc.fit_t0": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 43, "bases": 0, "doc": 250}, "pyerrors.input.misc.read_pbp": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 23, "bases": 0, "doc": 75}, "pyerrors.input.openQCD": {"qualname": 0, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.input.openQCD.read_rwms": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 48, "bases": 0, "doc": 271}, "pyerrors.input.openQCD.extract_t0": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 85, "bases": 0, "doc": 518}, "pyerrors.input.openQCD.extract_w0": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 85, "bases": 0, "doc": 520}, "pyerrors.input.openQCD.read_qtop": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 53, "bases": 0, "doc": 383}, "pyerrors.input.openQCD.read_gf_coupling": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 50, "bases": 0, "doc": 345}, "pyerrors.input.openQCD.qtop_projection": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 21, "bases": 0, "doc": 72}, "pyerrors.input.openQCD.read_qtop_sector": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 38, "bases": 0, "doc": 363}, "pyerrors.input.openQCD.read_ms5_xsf": {"qualname": 3, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 47, "bases": 0, "doc": 308}, "pyerrors.input.pandas": {"qualname": 0, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.input.pandas.to_sql": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 54, "bases": 0, "doc": 113}, "pyerrors.input.pandas.read_sql": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 34, "bases": 0, "doc": 105}, "pyerrors.input.pandas.dump_df": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 26, "bases": 0, "doc": 111}, "pyerrors.input.pandas.load_df": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 32, "bases": 0, "doc": 115}, "pyerrors.input.sfcf": {"qualname": 0, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.input.sfcf.read_sfcf": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 139, "bases": 0, "doc": 421}, "pyerrors.input.utils": {"qualname": 0, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.input.utils.sort_names": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 81}, "pyerrors.input.utils.check_idl": {"qualname": 2, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 16, "bases": 0, "doc": 70}, "pyerrors.linalg": {"qualname": 0, "fullname": 2, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.linalg.matmul": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 13, "bases": 0, "doc": 54}, "pyerrors.linalg.jack_matmul": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 13, "bases": 0, "doc": 58}, "pyerrors.linalg.einsum": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 18, "bases": 0, "doc": 52}, "pyerrors.linalg.inv": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 10}, "pyerrors.linalg.cholesky": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 9}, "pyerrors.linalg.det": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 8}, "pyerrors.linalg.eigh": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 18, "bases": 0, "doc": 20}, "pyerrors.linalg.eig": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 18, "bases": 0, "doc": 17}, "pyerrors.linalg.pinv": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 18, "bases": 0, "doc": 13}, "pyerrors.linalg.svd": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 18, "bases": 0, "doc": 13}, "pyerrors.misc": {"qualname": 0, "fullname": 2, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.misc.print_config": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 7, "bases": 0, "doc": 12}, "pyerrors.misc.errorbar": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 65, "bases": 0, "doc": 69}, "pyerrors.misc.dump_object": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 23, "bases": 0, "doc": 69}, "pyerrors.misc.load_object": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 42}, "pyerrors.misc.pseudo_Obs": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 31, "bases": 0, "doc": 105}, "pyerrors.misc.gen_correlated_data": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 42, "bases": 0, "doc": 127}, "pyerrors.mpm": {"qualname": 0, "fullname": 2, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.mpm.matrix_pencil_method": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 38, "bases": 0, "doc": 165}, "pyerrors.obs": {"qualname": 0, "fullname": 2, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 238}, "pyerrors.obs.Obs.__init__": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 31, "bases": 0, "doc": 62}, "pyerrors.obs.Obs.S_global": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 2, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.S_dict": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 1, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.tau_exp_global": {"qualname": 4, "fullname": 6, "annotation": 0, "default_value": 2, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.tau_exp_dict": {"qualname": 4, "fullname": 6, "annotation": 0, "default_value": 1, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.N_sigma_global": {"qualname": 4, "fullname": 6, "annotation": 0, "default_value": 2, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.N_sigma_dict": {"qualname": 4, "fullname": 6, "annotation": 0, "default_value": 1, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.names": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.shape": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.r_values": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.deltas": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.N": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.idl": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.ddvalue": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.reweighted": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.tag": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.value": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.dvalue": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.e_names": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.cov_names": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.mc_names": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.e_content": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.covobs": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.gamma_method": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 18, "bases": 0, "doc": 133}, "pyerrors.obs.Obs.gm": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 18, "bases": 0, "doc": 133}, "pyerrors.obs.Obs.details": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 22, "bases": 0, "doc": 34}, "pyerrors.obs.Obs.reweight": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 16, "bases": 0, "doc": 85}, "pyerrors.obs.Obs.is_zero_within_error": {"qualname": 5, "fullname": 7, "annotation": 0, "default_value": 0, "signature": 21, "bases": 0, "doc": 50}, "pyerrors.obs.Obs.is_zero": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 22, "bases": 0, "doc": 35}, "pyerrors.obs.Obs.plot_tauint": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 21, "bases": 0, "doc": 34}, "pyerrors.obs.Obs.plot_rho": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 21, "bases": 0, "doc": 35}, "pyerrors.obs.Obs.plot_rep_dist": {"qualname": 4, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 14}, "pyerrors.obs.Obs.plot_history": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 21, "bases": 0, "doc": 35}, "pyerrors.obs.Obs.plot_piechart": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 21, "bases": 0, "doc": 47}, "pyerrors.obs.Obs.dump": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 51, "bases": 0, "doc": 89}, "pyerrors.obs.Obs.export_jackknife": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 101}, "pyerrors.obs.Obs.export_bootstrap": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 43, "bases": 0, "doc": 185}, "pyerrors.obs.Obs.sqrt": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.log": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.exp": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.sin": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.cos": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.tan": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.arcsin": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.arccos": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.arctan": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.sinh": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.cosh": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.tanh": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.arcsinh": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.arccosh": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.arctanh": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.N_sigma": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.S": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.e_ddvalue": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.e_drho": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.e_dtauint": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.e_dvalue": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.e_n_dtauint": {"qualname": 4, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.e_n_tauint": {"qualname": 4, "fullname": 6, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.e_rho": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.e_tauint": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.e_windowsize": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.Obs.tau_exp": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.CObs": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 9}, "pyerrors.obs.CObs.__init__": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 20, "bases": 0, "doc": 3}, "pyerrors.obs.CObs.tag": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.CObs.real": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.CObs.imag": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.obs.CObs.gamma_method": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 18, "bases": 0, "doc": 14}, "pyerrors.obs.CObs.is_zero": {"qualname": 3, "fullname": 5, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 15}, "pyerrors.obs.CObs.conjugate": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 11, "bases": 0, "doc": 3}, "pyerrors.obs.derived_observable": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 34, "bases": 0, "doc": 184}, "pyerrors.obs.reweight": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 23, "bases": 0, "doc": 99}, "pyerrors.obs.correlate": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 18, "bases": 0, "doc": 75}, "pyerrors.obs.covariance": {"qualname": 1, "fullname": 3, "annotation": 0, "default_value": 0, "signature": 48, "bases": 0, "doc": 374}, "pyerrors.obs.import_jackknife": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 26, "bases": 0, "doc": 61}, "pyerrors.obs.import_bootstrap": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 22, "bases": 0, "doc": 107}, "pyerrors.obs.merge_obs": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 13, "bases": 0, "doc": 56}, "pyerrors.obs.cov_Obs": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 31, "bases": 0, "doc": 90}, "pyerrors.roots": {"qualname": 0, "fullname": 2, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}, "pyerrors.roots.find_root": {"qualname": 2, "fullname": 4, "annotation": 0, "default_value": 0, "signature": 34, "bases": 0, "doc": 181}, "pyerrors.version": {"qualname": 0, "fullname": 2, "annotation": 0, "default_value": 0, "signature": 0, "bases": 0, "doc": 3}}, "length": 244, "save": true}, "index": {"qualname": {"root": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.CObs.__init__": {"tf": 1}}, "df": 4, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.tag": {"tf": 1}, "pyerrors.correlators.Corr.content": {"tf": 1}, "pyerrors.correlators.Corr.T": {"tf": 1}, "pyerrors.correlators.Corr.prange": {"tf": 1}, "pyerrors.correlators.Corr.reweighted": {"tf": 1}, "pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.correlators.Corr.symmetric": {"tf": 1}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.roll": {"tf": 1}, "pyerrors.correlators.Corr.reverse": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.set_prange": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.correlators.Corr.print": {"tf": 1}, "pyerrors.correlators.Corr.sqrt": {"tf": 1}, "pyerrors.correlators.Corr.log": {"tf": 1}, "pyerrors.correlators.Corr.exp": {"tf": 1}, "pyerrors.correlators.Corr.sin": {"tf": 1}, "pyerrors.correlators.Corr.cos": {"tf": 1}, "pyerrors.correlators.Corr.tan": {"tf": 1}, "pyerrors.correlators.Corr.sinh": {"tf": 1}, "pyerrors.correlators.Corr.cosh": {"tf": 1}, "pyerrors.correlators.Corr.tanh": {"tf": 1}, "pyerrors.correlators.Corr.arcsin": {"tf": 1}, "pyerrors.correlators.Corr.arccos": {"tf": 1}, "pyerrors.correlators.Corr.arctan": {"tf": 1}, "pyerrors.correlators.Corr.arcsinh": {"tf": 1}, "pyerrors.correlators.Corr.arccosh": {"tf": 1}, "pyerrors.correlators.Corr.arctanh": {"tf": 1}, "pyerrors.correlators.Corr.real": {"tf": 1}, "pyerrors.correlators.Corr.imag": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.correlators.Corr.N": {"tf": 1}}, "df": 54, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}}, "df": 2, "d": {"docs": {"pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 1}}}}}}}}, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.content": {"tf": 1}, "pyerrors.obs.Obs.e_content": {"tf": 1}}, "df": 2}}}}, "f": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.misc.print_config": {"tf": 1}}, "df": 1}}}, "j": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.CObs.conjugate": {"tf": 1}}, "df": 1}}}}}}}, "s": {"docs": {"pyerrors.correlators.Corr.cos": {"tf": 1}, "pyerrors.obs.Obs.cos": {"tf": 1}}, "df": 2, "h": {"docs": {"pyerrors.correlators.Corr.cosh": {"tf": 1}, "pyerrors.obs.Obs.cosh": {"tf": 1}}, "df": 2}}, "v": {"docs": {"pyerrors.covobs.Covobs.cov": {"tf": 1}, "pyerrors.obs.Obs.cov_names": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 3, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.covobs.Covobs": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.covobs.Covobs.name": {"tf": 1}, "pyerrors.covobs.Covobs.value": {"tf": 1}, "pyerrors.covobs.Covobs.errsq": {"tf": 1}, "pyerrors.covobs.Covobs.cov": {"tf": 1}, "pyerrors.covobs.Covobs.grad": {"tf": 1}, "pyerrors.obs.Obs.covobs": {"tf": 1}}, "df": 8}}}, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}}, "u": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 1}}}}}}, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.CObs": {"tf": 1}, "pyerrors.obs.CObs.__init__": {"tf": 1}, "pyerrors.obs.CObs.tag": {"tf": 1}, "pyerrors.obs.CObs.real": {"tf": 1}, "pyerrors.obs.CObs.imag": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}, "pyerrors.obs.CObs.conjugate": {"tf": 1}}, "df": 8}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}}, "df": 3}}}}}, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.linalg.cholesky": {"tf": 1}}, "df": 1}}}}}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.CObs.__init__": {"tf": 1}}, "df": 4}}, "v": {"docs": {"pyerrors.linalg.inv": {"tf": 1}}, "df": 1}}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.correlators.Corr.item": {"tf": 1}}, "df": 1}}}, "s": {"docs": {"pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 4}, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.imag": {"tf": 1}, "pyerrors.obs.CObs.imag": {"tf": 1}}, "df": 2}}, "p": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}}, "df": 4}}}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.dirac.identity": {"tf": 1}}, "df": 1}}}}}}, "l": {"docs": {"pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.obs.Obs.idl": {"tf": 1}}, "df": 2}}}, "t": {"0": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}}, "df": 3}, "docs": {"pyerrors.correlators.Corr.T": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}}, "df": 2, "a": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.tag": {"tf": 1}, "pyerrors.obs.Obs.tag": {"tf": 1}, "pyerrors.obs.CObs.tag": {"tf": 1}}, "df": 3}, "n": {"docs": {"pyerrors.correlators.Corr.tan": {"tf": 1}, "pyerrors.obs.Obs.tan": {"tf": 1}}, "df": 2, "h": {"docs": {"pyerrors.correlators.Corr.tanh": {"tf": 1}, "pyerrors.obs.Obs.tanh": {"tf": 1}}, "df": 2}}, "u": {"docs": {"pyerrors.obs.Obs.tau_exp_global": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_dict": {"tf": 1}, "pyerrors.obs.Obs.tau_exp": {"tf": 1}}, "df": 3, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.e_n_tauint": {"tf": 1}, "pyerrors.obs.Obs.e_tauint": {"tf": 1}}, "df": 3}}}}}, "h": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.thin": {"tf": 1}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}}, "df": 2}}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.fits.ks_test": {"tf": 1}}, "df": 1}}}, "o": {"docs": {"pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 3, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 1}}}}}, "p": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.prange": {"tf": 1}, "pyerrors.correlators.Corr.set_prange": {"tf": 1}}, "df": 2}}}}, "o": {"docs": {}, "df": 0, "j": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.openQCD.qtop_projection": {"tf": 1}}, "df": 1}}}}}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.print": {"tf": 1}, "pyerrors.misc.print_config": {"tf": 1}}, "df": 2}}}, "u": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 7, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.plottable": {"tf": 1}}, "df": 1}}}}}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "u": {"docs": {"pyerrors.correlators.Corr.plateau": {"tf": 1}}, "df": 1}}}}}}, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.Fit_result.fit_parameters": {"tf": 1}}, "df": 1}}}}}}}}}, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}}, "df": 3}}}, "b": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.input.misc.read_pbp": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "v": {"docs": {"pyerrors.linalg.pinv": {"tf": 1}}, "df": 1}}, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 1}}}}}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.misc.pseudo_Obs": {"tf": 1}}, "df": 1}}}}}, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}}}}, "r": {"docs": {"pyerrors.obs.Obs.r_values": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "w": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}}, "df": 3, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.reweighted": {"tf": 1}, "pyerrors.obs.Obs.reweighted": {"tf": 1}}, "df": 2}}}}}}}}, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.reverse": {"tf": 1}}, "df": 1}}}}}, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.real": {"tf": 1}, "pyerrors.obs.CObs.real": {"tf": 1}}, "df": 2}, "d": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 18}}, "s": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.fits.Fit_result.fit_parameters": {"tf": 1}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}}, "df": 4}}}, "i": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.fits.residual_plot": {"tf": 1}}, "df": 1}}}}}}, "p": {"docs": {"pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}}, "df": 1}}, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.roll": {"tf": 1}}, "df": 1}}, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.roots.find_root": {"tf": 1}}, "df": 1}}}, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "k": {"4": {"docs": {"pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}}}, "w": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}}, "df": 1}}}, "h": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.e_rho": {"tf": 1}}, "df": 2}}}, "g": {"5": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"5": {"docs": {"pyerrors.dirac.gamma5": {"tf": 1}}, "df": 1}, "docs": {"pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.dirac.gamma": {"tf": 1}, "pyerrors.dirac.Grid_gamma": {"tf": 1}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}}, "df": 6, "x": {"docs": {"pyerrors.dirac.gammaX": {"tf": 1}}, "df": 1}, "y": {"docs": {"pyerrors.dirac.gammaY": {"tf": 1}}, "df": 1}, "z": {"docs": {"pyerrors.dirac.gammaZ": {"tf": 1}}, "df": 1}, "t": {"docs": {"pyerrors.dirac.gammaT": {"tf": 1}}, "df": 1}}}}}, "m": {"docs": {"pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 3}, "e": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 1}}, "n": {"docs": {"pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 1}}, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.covobs.Covobs.grad": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.dirac.Grid_gamma": {"tf": 1}}, "df": 1}}}, "f": {"docs": {"pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 1}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.obs.Obs.S_global": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_global": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_global": {"tf": 1}}, "df": 3}}}}}}, "m": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}}, "df": 5}}}}, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.bdio.read_mesons": {"tf": 1}}, "df": 1}}}}, "r": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.merge_obs": {"tf": 1}}, "df": 1}}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "x": {"docs": {"pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 5}}}, "m": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}}, "df": 2}}}}}, "s": {"5": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "c": {"docs": {"pyerrors.obs.Obs.mc_names": {"tf": 1}}, "df": 1}}, "s": {"docs": {"pyerrors.obs.Obs.S_global": {"tf": 1}, "pyerrors.obs.Obs.S_dict": {"tf": 1}, "pyerrors.obs.Obs.S": {"tf": 1}}, "df": 3, "y": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.correlators.Corr.symmetric": {"tf": 1}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}}, "df": 4}}, "y": {"docs": {"pyerrors.correlators.Corr.T_symmetry": {"tf": 1}}, "df": 1}}}}}}}, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 1}}}, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 1}}}}, "t": {"docs": {"pyerrors.correlators.Corr.set_prange": {"tf": 1}}, "df": 1}}, "h": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "w": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}, "a": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.Obs.shape": {"tf": 1}}, "df": 1}}}}, "p": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}}, "df": 1}}}}}}}}, "q": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.sqrt": {"tf": 1}, "pyerrors.obs.Obs.sqrt": {"tf": 1}}, "df": 2}}, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 2}}}}}, "l": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}}, "df": 2}}, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.sin": {"tf": 1}, "pyerrors.obs.Obs.sin": {"tf": 1}}, "df": 2, "h": {"docs": {"pyerrors.correlators.Corr.sinh": {"tf": 1}, "pyerrors.obs.Obs.sinh": {"tf": 1}}, "df": 2}}, "g": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.obs.Obs.N_sigma_global": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_dict": {"tf": 1}, "pyerrors.obs.Obs.N_sigma": {"tf": 1}}, "df": 3}}}}, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}}, "df": 5}}}}}, "f": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 1}}}, "v": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.linalg.svd": {"tf": 1}}, "df": 1}}}, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}}, "df": 1}}}, "r": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.arcsin": {"tf": 1}, "pyerrors.obs.Obs.arcsin": {"tf": 1}}, "df": 2, "h": {"docs": {"pyerrors.correlators.Corr.arcsinh": {"tf": 1}, "pyerrors.obs.Obs.arcsinh": {"tf": 1}}, "df": 2}}}}, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.arccos": {"tf": 1}, "pyerrors.obs.Obs.arccos": {"tf": 1}}, "df": 2, "h": {"docs": {"pyerrors.correlators.Corr.arccosh": {"tf": 1}, "pyerrors.obs.Obs.arccosh": {"tf": 1}}, "df": 2}}}}, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.arctan": {"tf": 1}, "pyerrors.obs.Obs.arctan": {"tf": 1}}, "df": 2, "h": {"docs": {"pyerrors.correlators.Corr.arctanh": {"tf": 1}, "pyerrors.obs.Obs.arctanh": {"tf": 1}}, "df": 2}}}}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}}, "df": 2}}}}}}}}, "e": {"docs": {"pyerrors.obs.Obs.e_names": {"tf": 1}, "pyerrors.obs.Obs.e_content": {"tf": 1}, "pyerrors.obs.Obs.e_ddvalue": {"tf": 1}, "pyerrors.obs.Obs.e_drho": {"tf": 1}, "pyerrors.obs.Obs.e_dtauint": {"tf": 1}, "pyerrors.obs.Obs.e_dvalue": {"tf": 1}, "pyerrors.obs.Obs.e_n_dtauint": {"tf": 1}, "pyerrors.obs.Obs.e_n_tauint": {"tf": 1}, "pyerrors.obs.Obs.e_rho": {"tf": 1}, "pyerrors.obs.Obs.e_tauint": {"tf": 1}, "pyerrors.obs.Obs.e_windowsize": {"tf": 1}}, "df": 11, "i": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.linalg.eig": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}}, "df": 1}}}}}}}, "h": {"docs": {"pyerrors.linalg.eigh": {"tf": 1}}, "df": 1}}, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.linalg.einsum": {"tf": 1}}, "df": 1}}}}}, "f": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 1}}, "x": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.correlators.Corr.exp": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_global": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_dict": {"tf": 1}, "pyerrors.obs.Obs.exp": {"tf": 1}, "pyerrors.obs.Obs.tau_exp": {"tf": 1}}, "df": 5, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}}, "df": 2}}}}, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 3}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}}, "df": 1}}}}}}}}}}, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "q": {"docs": {"pyerrors.covobs.Covobs.errsq": {"tf": 1}}, "df": 1}}, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.fits.error_band": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}}, "df": 2, "b": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}}}}}}, "p": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}}, "df": 2}}}}}}}, "h": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.Hankel": {"tf": 1}}, "df": 1}}}}}, "d": {"5": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 6}, "docs": {}, "df": 0}, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.obs.Obs.plot_history": {"tf": 1}}, "df": 1}}}}}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "v": {"docs": {"pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 2, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}}, "t": {"docs": {"pyerrors.linalg.det": {"tf": 1}}, "df": 1, "a": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.Obs.details": {"tf": 1}}, "df": 1}}}}}, "l": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.Obs.deltas": {"tf": 1}}, "df": 1}}}}}, "u": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 6}}}, "s": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 4}}}, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}}, "df": 1, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}}, "df": 1}}}}}}}}}}}}}}}}}}}}}, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.obs.Obs.S_dict": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_dict": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_dict": {"tf": 1}}, "df": 5}}}, "f": {"docs": {"pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 2}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 1}}}, "d": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.Obs.ddvalue": {"tf": 1}, "pyerrors.obs.Obs.e_ddvalue": {"tf": 1}}, "df": 2}}}}}}, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.Obs.dvalue": {"tf": 1}, "pyerrors.obs.Obs.e_dvalue": {"tf": 1}}, "df": 2}}}}}, "r": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.obs.Obs.e_drho": {"tf": 1}}, "df": 1}}}, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.obs.Obs.e_dtauint": {"tf": 1}, "pyerrors.obs.Obs.e_n_dtauint": {"tf": 1}}, "df": 2}}}}}}}, "f": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.fits.Fit_result.fit_parameters": {"tf": 1.4142135623730951}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 7}, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.roots.find_root": {"tf": 1}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "q": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 1}}}}}}}}}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.log": {"tf": 1}, "pyerrors.obs.Obs.log": {"tf": 1}}, "df": 2}, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.misc.load_object": {"tf": 1}}, "df": 4}}}, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 2}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.fits.fit_lin": {"tf": 1}}, "df": 1}}}, "n": {"docs": {"pyerrors.correlators.Corr.N": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_global": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_dict": {"tf": 1}, "pyerrors.obs.Obs.N": {"tf": 1}, "pyerrors.obs.Obs.N_sigma": {"tf": 1}, "pyerrors.obs.Obs.e_n_dtauint": {"tf": 1}, "pyerrors.obs.Obs.e_n_tauint": {"tf": 1}}, "df": 7, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.covobs.Covobs.name": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.obs.Obs.names": {"tf": 1}, "pyerrors.obs.Obs.e_names": {"tf": 1}, "pyerrors.obs.Obs.cov_names": {"tf": 1}, "pyerrors.obs.Obs.mc_names": {"tf": 1}}, "df": 5}}}}, "p": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}}, "df": 2}}}, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.covobs.Covobs.value": {"tf": 1}, "pyerrors.obs.Obs.value": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors.obs.Obs.r_values": {"tf": 1}}, "df": 1}}}}}}, "q": {"docs": {}, "df": 0, "q": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.fits.qqplot": {"tf": 1}}, "df": 1}}}}}, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 3}}}}, "b": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.error_band": {"tf": 1}}, "df": 1}}}, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}}, "df": 1}}}}}}}, "o": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}}, "df": 2}}}}}}}}}, "k": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.ks_test": {"tf": 1}}, "df": 1}}, "w": {"0": {"docs": {"pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 3}}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}}, "df": 1}}}}, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "w": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.Obs.e_windowsize": {"tf": 1}}, "df": 1}}}}}}}}}}, "j": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}}, "df": 6}}}, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors.linalg.jack_matmul": {"tf": 1}}, "df": 1, "k": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}}, "df": 2}}}}}}}}}, "x": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "j": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.misc.load_object": {"tf": 1}}, "df": 2}}}}, "s": {"docs": {"pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.S_global": {"tf": 1}, "pyerrors.obs.Obs.S_dict": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_global": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_dict": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_global": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_dict": {"tf": 1}, "pyerrors.obs.Obs.names": {"tf": 1}, "pyerrors.obs.Obs.shape": {"tf": 1}, "pyerrors.obs.Obs.r_values": {"tf": 1}, "pyerrors.obs.Obs.deltas": {"tf": 1}, "pyerrors.obs.Obs.N": {"tf": 1}, "pyerrors.obs.Obs.idl": {"tf": 1}, "pyerrors.obs.Obs.ddvalue": {"tf": 1}, "pyerrors.obs.Obs.reweighted": {"tf": 1}, "pyerrors.obs.Obs.tag": {"tf": 1}, "pyerrors.obs.Obs.value": {"tf": 1}, "pyerrors.obs.Obs.dvalue": {"tf": 1}, "pyerrors.obs.Obs.e_names": {"tf": 1}, "pyerrors.obs.Obs.cov_names": {"tf": 1}, "pyerrors.obs.Obs.mc_names": {"tf": 1}, "pyerrors.obs.Obs.e_content": {"tf": 1}, "pyerrors.obs.Obs.covobs": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.Obs.sqrt": {"tf": 1}, "pyerrors.obs.Obs.log": {"tf": 1}, "pyerrors.obs.Obs.exp": {"tf": 1}, "pyerrors.obs.Obs.sin": {"tf": 1}, "pyerrors.obs.Obs.cos": {"tf": 1}, "pyerrors.obs.Obs.tan": {"tf": 1}, "pyerrors.obs.Obs.arcsin": {"tf": 1}, "pyerrors.obs.Obs.arccos": {"tf": 1}, "pyerrors.obs.Obs.arctan": {"tf": 1}, "pyerrors.obs.Obs.sinh": {"tf": 1}, "pyerrors.obs.Obs.cosh": {"tf": 1}, "pyerrors.obs.Obs.tanh": {"tf": 1}, "pyerrors.obs.Obs.arcsinh": {"tf": 1}, "pyerrors.obs.Obs.arccosh": {"tf": 1}, "pyerrors.obs.Obs.arctanh": {"tf": 1}, "pyerrors.obs.Obs.N_sigma": {"tf": 1}, "pyerrors.obs.Obs.S": {"tf": 1}, "pyerrors.obs.Obs.e_ddvalue": {"tf": 1}, "pyerrors.obs.Obs.e_drho": {"tf": 1}, "pyerrors.obs.Obs.e_dtauint": {"tf": 1}, "pyerrors.obs.Obs.e_dvalue": {"tf": 1}, "pyerrors.obs.Obs.e_n_dtauint": {"tf": 1}, "pyerrors.obs.Obs.e_n_tauint": {"tf": 1}, "pyerrors.obs.Obs.e_rho": {"tf": 1}, "pyerrors.obs.Obs.e_tauint": {"tf": 1}, "pyerrors.obs.Obs.e_windowsize": {"tf": 1}, "pyerrors.obs.Obs.tau_exp": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 68, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}}}}}}}, "z": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 3}}}}}}, "fullname": {"root": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.CObs.__init__": {"tf": 1}}, "df": 4, "p": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators": {"tf": 1}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.tag": {"tf": 1}, "pyerrors.correlators.Corr.content": {"tf": 1}, "pyerrors.correlators.Corr.T": {"tf": 1}, "pyerrors.correlators.Corr.prange": {"tf": 1}, "pyerrors.correlators.Corr.reweighted": {"tf": 1}, "pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.correlators.Corr.symmetric": {"tf": 1}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.roll": {"tf": 1}, "pyerrors.correlators.Corr.reverse": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.set_prange": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.correlators.Corr.print": {"tf": 1}, "pyerrors.correlators.Corr.sqrt": {"tf": 1}, "pyerrors.correlators.Corr.log": {"tf": 1}, "pyerrors.correlators.Corr.exp": {"tf": 1}, "pyerrors.correlators.Corr.sin": {"tf": 1}, "pyerrors.correlators.Corr.cos": {"tf": 1}, "pyerrors.correlators.Corr.tan": {"tf": 1}, "pyerrors.correlators.Corr.sinh": {"tf": 1}, "pyerrors.correlators.Corr.cosh": {"tf": 1}, "pyerrors.correlators.Corr.tanh": {"tf": 1}, "pyerrors.correlators.Corr.arcsin": {"tf": 1}, "pyerrors.correlators.Corr.arccos": {"tf": 1}, "pyerrors.correlators.Corr.arctan": {"tf": 1}, "pyerrors.correlators.Corr.arcsinh": {"tf": 1}, "pyerrors.correlators.Corr.arccosh": {"tf": 1}, "pyerrors.correlators.Corr.arctanh": {"tf": 1}, "pyerrors.correlators.Corr.real": {"tf": 1}, "pyerrors.correlators.Corr.imag": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.correlators.Corr.N": {"tf": 1}, "pyerrors.covobs": {"tf": 1}, "pyerrors.covobs.Covobs": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.covobs.Covobs.name": {"tf": 1}, "pyerrors.covobs.Covobs.value": {"tf": 1}, "pyerrors.covobs.Covobs.errsq": {"tf": 1}, "pyerrors.covobs.Covobs.cov": {"tf": 1}, "pyerrors.covobs.Covobs.grad": {"tf": 1}, "pyerrors.dirac": {"tf": 1}, "pyerrors.dirac.gammaX": {"tf": 1}, "pyerrors.dirac.gammaY": {"tf": 1}, "pyerrors.dirac.gammaZ": {"tf": 1}, "pyerrors.dirac.gammaT": {"tf": 1}, "pyerrors.dirac.gamma": {"tf": 1}, "pyerrors.dirac.gamma5": {"tf": 1}, "pyerrors.dirac.identity": {"tf": 1}, "pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}, "pyerrors.dirac.Grid_gamma": {"tf": 1}, "pyerrors.fits": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.fits.Fit_result.fit_parameters": {"tf": 1}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.bdio": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.dobs": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.misc": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.pandas": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.input.utils": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.linalg": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.linalg.inv": {"tf": 1}, "pyerrors.linalg.cholesky": {"tf": 1}, "pyerrors.linalg.det": {"tf": 1}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}, "pyerrors.linalg.pinv": {"tf": 1}, "pyerrors.linalg.svd": {"tf": 1}, "pyerrors.misc": {"tf": 1}, "pyerrors.misc.print_config": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.misc.load_object": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.mpm": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.S_global": {"tf": 1}, "pyerrors.obs.Obs.S_dict": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_global": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_dict": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_global": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_dict": {"tf": 1}, "pyerrors.obs.Obs.names": {"tf": 1}, "pyerrors.obs.Obs.shape": {"tf": 1}, "pyerrors.obs.Obs.r_values": {"tf": 1}, "pyerrors.obs.Obs.deltas": {"tf": 1}, "pyerrors.obs.Obs.N": {"tf": 1}, "pyerrors.obs.Obs.idl": {"tf": 1}, "pyerrors.obs.Obs.ddvalue": {"tf": 1}, "pyerrors.obs.Obs.reweighted": {"tf": 1}, "pyerrors.obs.Obs.tag": {"tf": 1}, "pyerrors.obs.Obs.value": {"tf": 1}, "pyerrors.obs.Obs.dvalue": {"tf": 1}, "pyerrors.obs.Obs.e_names": {"tf": 1}, "pyerrors.obs.Obs.cov_names": {"tf": 1}, "pyerrors.obs.Obs.mc_names": {"tf": 1}, "pyerrors.obs.Obs.e_content": {"tf": 1}, "pyerrors.obs.Obs.covobs": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.Obs.sqrt": {"tf": 1}, "pyerrors.obs.Obs.log": {"tf": 1}, "pyerrors.obs.Obs.exp": {"tf": 1}, "pyerrors.obs.Obs.sin": {"tf": 1}, "pyerrors.obs.Obs.cos": {"tf": 1}, "pyerrors.obs.Obs.tan": {"tf": 1}, "pyerrors.obs.Obs.arcsin": {"tf": 1}, "pyerrors.obs.Obs.arccos": {"tf": 1}, "pyerrors.obs.Obs.arctan": {"tf": 1}, "pyerrors.obs.Obs.sinh": {"tf": 1}, "pyerrors.obs.Obs.cosh": {"tf": 1}, "pyerrors.obs.Obs.tanh": {"tf": 1}, "pyerrors.obs.Obs.arcsinh": {"tf": 1}, "pyerrors.obs.Obs.arccosh": {"tf": 1}, "pyerrors.obs.Obs.arctanh": {"tf": 1}, "pyerrors.obs.Obs.N_sigma": {"tf": 1}, "pyerrors.obs.Obs.S": {"tf": 1}, "pyerrors.obs.Obs.e_ddvalue": {"tf": 1}, "pyerrors.obs.Obs.e_drho": {"tf": 1}, "pyerrors.obs.Obs.e_dtauint": {"tf": 1}, "pyerrors.obs.Obs.e_dvalue": {"tf": 1}, "pyerrors.obs.Obs.e_n_dtauint": {"tf": 1}, "pyerrors.obs.Obs.e_n_tauint": {"tf": 1}, "pyerrors.obs.Obs.e_rho": {"tf": 1}, "pyerrors.obs.Obs.e_tauint": {"tf": 1}, "pyerrors.obs.Obs.e_windowsize": {"tf": 1}, "pyerrors.obs.Obs.tau_exp": {"tf": 1}, "pyerrors.obs.CObs": {"tf": 1}, "pyerrors.obs.CObs.__init__": {"tf": 1}, "pyerrors.obs.CObs.tag": {"tf": 1}, "pyerrors.obs.CObs.real": {"tf": 1}, "pyerrors.obs.CObs.imag": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}, "pyerrors.obs.CObs.conjugate": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}, "pyerrors.roots": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}, "pyerrors.version": {"tf": 1}}, "df": 244}}}}}}}, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.prange": {"tf": 1}, "pyerrors.correlators.Corr.set_prange": {"tf": 1}}, "df": 2}}}}, "o": {"docs": {}, "df": 0, "j": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.openQCD.qtop_projection": {"tf": 1}}, "df": 1}}}}}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.print": {"tf": 1}, "pyerrors.misc.print_config": {"tf": 1}}, "df": 2}}}, "u": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 7, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.plottable": {"tf": 1}}, "df": 1}}}}}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "u": {"docs": {"pyerrors.correlators.Corr.plateau": {"tf": 1}}, "df": 1}}}}}}, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.Fit_result.fit_parameters": {"tf": 1}}, "df": 1}}}}}}}}, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.pandas": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 5}}}}}, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}}, "df": 3}}}, "b": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.input.misc.read_pbp": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "v": {"docs": {"pyerrors.linalg.pinv": {"tf": 1}}, "df": 1}}, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 1}}}}}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.misc.pseudo_Obs": {"tf": 1}}, "df": 1}}}}}, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}}}}, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.tag": {"tf": 1}, "pyerrors.correlators.Corr.content": {"tf": 1}, "pyerrors.correlators.Corr.T": {"tf": 1}, "pyerrors.correlators.Corr.prange": {"tf": 1}, "pyerrors.correlators.Corr.reweighted": {"tf": 1}, "pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.correlators.Corr.symmetric": {"tf": 1}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.roll": {"tf": 1}, "pyerrors.correlators.Corr.reverse": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.set_prange": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.correlators.Corr.print": {"tf": 1}, "pyerrors.correlators.Corr.sqrt": {"tf": 1}, "pyerrors.correlators.Corr.log": {"tf": 1}, "pyerrors.correlators.Corr.exp": {"tf": 1}, "pyerrors.correlators.Corr.sin": {"tf": 1}, "pyerrors.correlators.Corr.cos": {"tf": 1}, "pyerrors.correlators.Corr.tan": {"tf": 1}, "pyerrors.correlators.Corr.sinh": {"tf": 1}, "pyerrors.correlators.Corr.cosh": {"tf": 1}, "pyerrors.correlators.Corr.tanh": {"tf": 1}, "pyerrors.correlators.Corr.arcsin": {"tf": 1}, "pyerrors.correlators.Corr.arccos": {"tf": 1}, "pyerrors.correlators.Corr.arctan": {"tf": 1}, "pyerrors.correlators.Corr.arcsinh": {"tf": 1}, "pyerrors.correlators.Corr.arccosh": {"tf": 1}, "pyerrors.correlators.Corr.arctanh": {"tf": 1}, "pyerrors.correlators.Corr.real": {"tf": 1}, "pyerrors.correlators.Corr.imag": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.correlators.Corr.N": {"tf": 1}}, "df": 54, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators": {"tf": 1}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.tag": {"tf": 1}, "pyerrors.correlators.Corr.content": {"tf": 1}, "pyerrors.correlators.Corr.T": {"tf": 1}, "pyerrors.correlators.Corr.prange": {"tf": 1}, "pyerrors.correlators.Corr.reweighted": {"tf": 1}, "pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.correlators.Corr.symmetric": {"tf": 1}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.roll": {"tf": 1}, "pyerrors.correlators.Corr.reverse": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.set_prange": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.correlators.Corr.print": {"tf": 1}, "pyerrors.correlators.Corr.sqrt": {"tf": 1}, "pyerrors.correlators.Corr.log": {"tf": 1}, "pyerrors.correlators.Corr.exp": {"tf": 1}, "pyerrors.correlators.Corr.sin": {"tf": 1}, "pyerrors.correlators.Corr.cos": {"tf": 1}, "pyerrors.correlators.Corr.tan": {"tf": 1}, "pyerrors.correlators.Corr.sinh": {"tf": 1}, "pyerrors.correlators.Corr.cosh": {"tf": 1}, "pyerrors.correlators.Corr.tanh": {"tf": 1}, "pyerrors.correlators.Corr.arcsin": {"tf": 1}, "pyerrors.correlators.Corr.arccos": {"tf": 1}, "pyerrors.correlators.Corr.arctan": {"tf": 1}, "pyerrors.correlators.Corr.arcsinh": {"tf": 1}, "pyerrors.correlators.Corr.arccosh": {"tf": 1}, "pyerrors.correlators.Corr.arctanh": {"tf": 1}, "pyerrors.correlators.Corr.real": {"tf": 1}, "pyerrors.correlators.Corr.imag": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.correlators.Corr.N": {"tf": 1}}, "df": 55}}}, "e": {"docs": {"pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}}, "df": 2, "d": {"docs": {"pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 1}}}}}}}}, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.content": {"tf": 1}, "pyerrors.obs.Obs.e_content": {"tf": 1}}, "df": 2}}}}, "f": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.misc.print_config": {"tf": 1}}, "df": 1}}}, "j": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.CObs.conjugate": {"tf": 1}}, "df": 1}}}}}}}, "s": {"docs": {"pyerrors.correlators.Corr.cos": {"tf": 1}, "pyerrors.obs.Obs.cos": {"tf": 1}}, "df": 2, "h": {"docs": {"pyerrors.correlators.Corr.cosh": {"tf": 1}, "pyerrors.obs.Obs.cosh": {"tf": 1}}, "df": 2}}, "v": {"docs": {"pyerrors.covobs.Covobs.cov": {"tf": 1}, "pyerrors.obs.Obs.cov_names": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 3, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.covobs": {"tf": 1}, "pyerrors.covobs.Covobs": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.__init__": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.name": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.value": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.errsq": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.cov": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.grad": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.covobs": {"tf": 1}}, "df": 9}}}, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}}, "u": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 1}}}}}}, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.CObs": {"tf": 1}, "pyerrors.obs.CObs.__init__": {"tf": 1}, "pyerrors.obs.CObs.tag": {"tf": 1}, "pyerrors.obs.CObs.real": {"tf": 1}, "pyerrors.obs.CObs.imag": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}, "pyerrors.obs.CObs.conjugate": {"tf": 1}}, "df": 8}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}}, "df": 3}}}}}, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.linalg.cholesky": {"tf": 1}}, "df": 1}}}}}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.CObs.__init__": {"tf": 1}}, "df": 4}}, "p": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input": {"tf": 1}, "pyerrors.input.bdio": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.dobs": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.misc": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.pandas": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.input.utils": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 52}}}, "v": {"docs": {"pyerrors.linalg.inv": {"tf": 1}}, "df": 1}}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.correlators.Corr.item": {"tf": 1}}, "df": 1}}}, "s": {"docs": {"pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 4}, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.imag": {"tf": 1}, "pyerrors.obs.CObs.imag": {"tf": 1}}, "df": 2}}, "p": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}}, "df": 4}}}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.dirac.identity": {"tf": 1}}, "df": 1}}}}}}, "l": {"docs": {"pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.obs.Obs.idl": {"tf": 1}}, "df": 2}}}, "t": {"0": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}}, "df": 3}, "docs": {"pyerrors.correlators.Corr.T": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}}, "df": 2, "a": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.tag": {"tf": 1}, "pyerrors.obs.Obs.tag": {"tf": 1}, "pyerrors.obs.CObs.tag": {"tf": 1}}, "df": 3}, "n": {"docs": {"pyerrors.correlators.Corr.tan": {"tf": 1}, "pyerrors.obs.Obs.tan": {"tf": 1}}, "df": 2, "h": {"docs": {"pyerrors.correlators.Corr.tanh": {"tf": 1}, "pyerrors.obs.Obs.tanh": {"tf": 1}}, "df": 2}}, "u": {"docs": {"pyerrors.obs.Obs.tau_exp_global": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_dict": {"tf": 1}, "pyerrors.obs.Obs.tau_exp": {"tf": 1}}, "df": 3, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.e_n_tauint": {"tf": 1}, "pyerrors.obs.Obs.e_tauint": {"tf": 1}}, "df": 3}}}}}, "h": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.thin": {"tf": 1}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}}, "df": 2}}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.fits.ks_test": {"tf": 1}}, "df": 1}}}, "o": {"docs": {"pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 3, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 1}}}}}, "r": {"docs": {"pyerrors.obs.Obs.r_values": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "w": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}}, "df": 3, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.reweighted": {"tf": 1}, "pyerrors.obs.Obs.reweighted": {"tf": 1}}, "df": 2}}}}}}}}, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.reverse": {"tf": 1}}, "df": 1}}}}}, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.real": {"tf": 1}, "pyerrors.obs.CObs.real": {"tf": 1}}, "df": 2}, "d": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 18}}, "s": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.fits.Fit_result.fit_parameters": {"tf": 1}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}}, "df": 4}}}, "i": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.fits.residual_plot": {"tf": 1}}, "df": 1}}}}}}, "p": {"docs": {"pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}}, "df": 1}}, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.roll": {"tf": 1}}, "df": 1}}, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.roots.find_root": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.roots": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 2}}}}, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "k": {"4": {"docs": {"pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}}}, "w": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}}, "df": 1}}}, "h": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.e_rho": {"tf": 1}}, "df": 2}}}, "g": {"5": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"5": {"docs": {"pyerrors.dirac.gamma5": {"tf": 1}}, "df": 1}, "docs": {"pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.dirac.gamma": {"tf": 1}, "pyerrors.dirac.Grid_gamma": {"tf": 1}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}}, "df": 6, "x": {"docs": {"pyerrors.dirac.gammaX": {"tf": 1}}, "df": 1}, "y": {"docs": {"pyerrors.dirac.gammaY": {"tf": 1}}, "df": 1}, "z": {"docs": {"pyerrors.dirac.gammaZ": {"tf": 1}}, "df": 1}, "t": {"docs": {"pyerrors.dirac.gammaT": {"tf": 1}}, "df": 1}}}}}, "m": {"docs": {"pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 3}, "e": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 1}}, "n": {"docs": {"pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 1}}, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.covobs.Covobs.grad": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.dirac.Grid_gamma": {"tf": 1}}, "df": 1}}}, "f": {"docs": {"pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 1}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.obs.Obs.S_global": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_global": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_global": {"tf": 1}}, "df": 3}}}}}}, "m": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}}, "df": 5}}}}, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.bdio.read_mesons": {"tf": 1}}, "df": 1}}}}, "r": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.merge_obs": {"tf": 1}}, "df": 1}}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "x": {"docs": {"pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 5}}}, "m": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}}, "df": 2}}}}}, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.input.misc": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.misc": {"tf": 1}, "pyerrors.misc.print_config": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.misc.load_object": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 10}}}, "s": {"5": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "p": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.mpm": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 2}}, "c": {"docs": {"pyerrors.obs.Obs.mc_names": {"tf": 1}}, "df": 1}}, "s": {"docs": {"pyerrors.obs.Obs.S_global": {"tf": 1}, "pyerrors.obs.Obs.S_dict": {"tf": 1}, "pyerrors.obs.Obs.S": {"tf": 1}}, "df": 3, "y": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.correlators.Corr.symmetric": {"tf": 1}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}}, "df": 4}}, "y": {"docs": {"pyerrors.correlators.Corr.T_symmetry": {"tf": 1}}, "df": 1}}}}}}}, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 1}}}, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 1}}}}, "t": {"docs": {"pyerrors.correlators.Corr.set_prange": {"tf": 1}}, "df": 1}}, "h": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "w": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}, "a": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.Obs.shape": {"tf": 1}}, "df": 1}}}}, "p": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}}, "df": 1}}}}}}}}, "q": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.sqrt": {"tf": 1}, "pyerrors.obs.Obs.sqrt": {"tf": 1}}, "df": 2}}, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 2}}}}}, "l": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}}, "df": 2}}, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.sin": {"tf": 1}, "pyerrors.obs.Obs.sin": {"tf": 1}}, "df": 2, "h": {"docs": {"pyerrors.correlators.Corr.sinh": {"tf": 1}, "pyerrors.obs.Obs.sinh": {"tf": 1}}, "df": 2}}, "g": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.obs.Obs.N_sigma_global": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_dict": {"tf": 1}, "pyerrors.obs.Obs.N_sigma": {"tf": 1}}, "df": 3}}}}, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}}, "df": 5}}}}}, "f": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors.input.sfcf": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}}, "df": 2}}}, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 1}}}, "v": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.linalg.svd": {"tf": 1}}, "df": 1}}}, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}}, "df": 1}}}, "r": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.arcsin": {"tf": 1}, "pyerrors.obs.Obs.arcsin": {"tf": 1}}, "df": 2, "h": {"docs": {"pyerrors.correlators.Corr.arcsinh": {"tf": 1}, "pyerrors.obs.Obs.arcsinh": {"tf": 1}}, "df": 2}}}}, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.arccos": {"tf": 1}, "pyerrors.obs.Obs.arccos": {"tf": 1}}, "df": 2, "h": {"docs": {"pyerrors.correlators.Corr.arccosh": {"tf": 1}, "pyerrors.obs.Obs.arccosh": {"tf": 1}}, "df": 2}}}}, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.arctan": {"tf": 1}, "pyerrors.obs.Obs.arctan": {"tf": 1}}, "df": 2, "h": {"docs": {"pyerrors.correlators.Corr.arctanh": {"tf": 1}, "pyerrors.obs.Obs.arctanh": {"tf": 1}}, "df": 2}}}}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}}, "df": 2}}}}}}}}, "e": {"docs": {"pyerrors.obs.Obs.e_names": {"tf": 1}, "pyerrors.obs.Obs.e_content": {"tf": 1}, "pyerrors.obs.Obs.e_ddvalue": {"tf": 1}, "pyerrors.obs.Obs.e_drho": {"tf": 1}, "pyerrors.obs.Obs.e_dtauint": {"tf": 1}, "pyerrors.obs.Obs.e_dvalue": {"tf": 1}, "pyerrors.obs.Obs.e_n_dtauint": {"tf": 1}, "pyerrors.obs.Obs.e_n_tauint": {"tf": 1}, "pyerrors.obs.Obs.e_rho": {"tf": 1}, "pyerrors.obs.Obs.e_tauint": {"tf": 1}, "pyerrors.obs.Obs.e_windowsize": {"tf": 1}}, "df": 11, "i": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.linalg.eig": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}}, "df": 1}}}}}}}, "h": {"docs": {"pyerrors.linalg.eigh": {"tf": 1}}, "df": 1}}, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.linalg.einsum": {"tf": 1}}, "df": 1}}}}}, "f": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 1}}, "x": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.correlators.Corr.exp": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_global": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_dict": {"tf": 1}, "pyerrors.obs.Obs.exp": {"tf": 1}, "pyerrors.obs.Obs.tau_exp": {"tf": 1}}, "df": 5, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}}, "df": 2}}}}, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 3}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}}, "df": 1}}}}}}}}}}, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "q": {"docs": {"pyerrors.covobs.Covobs.errsq": {"tf": 1}}, "df": 1}}, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.fits.error_band": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}}, "df": 2, "b": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}}}}}}, "p": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}}, "df": 2}}}}}}}, "h": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.Hankel": {"tf": 1}}, "df": 1}}}}, "d": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 9}}}}}}, "d": {"5": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 6}, "docs": {}, "df": 0}, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.obs.Obs.plot_history": {"tf": 1}}, "df": 1}}}}}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "v": {"docs": {"pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 2, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}}, "t": {"docs": {"pyerrors.linalg.det": {"tf": 1}}, "df": 1, "a": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.Obs.details": {"tf": 1}}, "df": 1}}}}}, "l": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.Obs.deltas": {"tf": 1}}, "df": 1}}}}}, "u": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 6}}}, "i": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.dirac": {"tf": 1}, "pyerrors.dirac.gammaX": {"tf": 1}, "pyerrors.dirac.gammaY": {"tf": 1}, "pyerrors.dirac.gammaZ": {"tf": 1}, "pyerrors.dirac.gammaT": {"tf": 1}, "pyerrors.dirac.gamma": {"tf": 1}, "pyerrors.dirac.gamma5": {"tf": 1}, "pyerrors.dirac.identity": {"tf": 1}, "pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}, "pyerrors.dirac.Grid_gamma": {"tf": 1}}, "df": 11}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}}, "df": 1, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}}, "df": 1}}}}}}}}}}}}}}}}}}}}}, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.obs.Obs.S_dict": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_dict": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_dict": {"tf": 1}}, "df": 5}}}, "s": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.dobs": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_dobs": {"tf": 1.4142135623730951}}, "df": 8}}}, "f": {"docs": {"pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 2}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 1}}}, "d": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.Obs.ddvalue": {"tf": 1}, "pyerrors.obs.Obs.e_ddvalue": {"tf": 1}}, "df": 2}}}}}}, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.Obs.dvalue": {"tf": 1}, "pyerrors.obs.Obs.e_dvalue": {"tf": 1}}, "df": 2}}}}}, "r": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.obs.Obs.e_drho": {"tf": 1}}, "df": 1}}}, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.obs.Obs.e_dtauint": {"tf": 1}, "pyerrors.obs.Obs.e_n_dtauint": {"tf": 1}}, "df": 2}}}}}}}, "f": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.fits.Fit_result.fit_parameters": {"tf": 1.4142135623730951}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 7, "s": {"docs": {"pyerrors.fits": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.fits.Fit_result.fit_parameters": {"tf": 1}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}}, "df": 12}}, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.roots.find_root": {"tf": 1}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "q": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 1}}}}}}}}}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.log": {"tf": 1}, "pyerrors.obs.Obs.log": {"tf": 1}}, "df": 2}, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.misc.load_object": {"tf": 1}}, "df": 4}}}, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 2}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.fits.fit_lin": {"tf": 1}}, "df": 1, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.linalg": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.linalg.inv": {"tf": 1}, "pyerrors.linalg.cholesky": {"tf": 1}, "pyerrors.linalg.det": {"tf": 1}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}, "pyerrors.linalg.pinv": {"tf": 1}, "pyerrors.linalg.svd": {"tf": 1}}, "df": 11}}}}}}, "n": {"docs": {"pyerrors.correlators.Corr.N": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_global": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_dict": {"tf": 1}, "pyerrors.obs.Obs.N": {"tf": 1}, "pyerrors.obs.Obs.N_sigma": {"tf": 1}, "pyerrors.obs.Obs.e_n_dtauint": {"tf": 1}, "pyerrors.obs.Obs.e_n_tauint": {"tf": 1}}, "df": 7, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.covobs.Covobs.name": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.obs.Obs.names": {"tf": 1}, "pyerrors.obs.Obs.e_names": {"tf": 1}, "pyerrors.obs.Obs.cov_names": {"tf": 1}, "pyerrors.obs.Obs.mc_names": {"tf": 1}}, "df": 5}}}}, "p": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}}, "df": 2}}}, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.covobs.Covobs.value": {"tf": 1}, "pyerrors.obs.Obs.value": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors.obs.Obs.r_values": {"tf": 1}}, "df": 1}}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.version": {"tf": 1}}, "df": 1}}}}}}}, "q": {"docs": {}, "df": 0, "q": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.fits.qqplot": {"tf": 1}}, "df": 1}}}}}, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 3}}}}, "b": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.error_band": {"tf": 1}}, "df": 1}}}, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.input.bdio": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 5}}}, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}}, "df": 1}}}}}}}, "o": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}}, "df": 2}}}}}}}}}, "k": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.ks_test": {"tf": 1}}, "df": 1}}, "w": {"0": {"docs": {"pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 3}}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}}, "df": 1}}}}, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "w": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.Obs.e_windowsize": {"tf": 1}}, "df": 1}}}}}}}}}}, "j": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.json": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.import_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}}, "df": 7}}}, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors.linalg.jack_matmul": {"tf": 1}}, "df": 1, "k": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}}, "df": 2}}}}}}}}}, "o": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "q": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 9}}}}}}, "b": {"docs": {}, "df": 0, "j": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.misc.load_object": {"tf": 1}}, "df": 2}}}}, "s": {"docs": {"pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.obs": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.__init__": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.S_global": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.S_dict": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.tau_exp_global": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.tau_exp_dict": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.N_sigma_global": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.N_sigma_dict": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.names": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.shape": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.r_values": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.deltas": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.N": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.idl": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.ddvalue": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.reweighted": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.tag": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.value": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.dvalue": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.e_names": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.cov_names": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.mc_names": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.e_content": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.covobs": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gamma_method": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gm": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.details": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.reweight": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.is_zero": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.plot_rho": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.plot_history": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.dump": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.sqrt": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.log": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.exp": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.sin": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.cos": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.tan": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.arcsin": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.arccos": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.arctan": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.sinh": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.cosh": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.tanh": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.arcsinh": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.arccosh": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.arctanh": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.N_sigma": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.S": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.e_ddvalue": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.e_drho": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.e_dtauint": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.e_dvalue": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.e_n_dtauint": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.e_n_tauint": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.e_rho": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.e_tauint": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.e_windowsize": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.tau_exp": {"tf": 1.4142135623730951}, "pyerrors.obs.CObs": {"tf": 1}, "pyerrors.obs.CObs.__init__": {"tf": 1}, "pyerrors.obs.CObs.tag": {"tf": 1}, "pyerrors.obs.CObs.real": {"tf": 1}, "pyerrors.obs.CObs.imag": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}, "pyerrors.obs.CObs.conjugate": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1.4142135623730951}, "pyerrors.obs.cov_Obs": {"tf": 1.4142135623730951}}, "df": 83, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}}}}}}}, "x": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}}}, "u": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.utils": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 3}}}}}, "z": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 3}}}}}}, "annotation": {"root": {"docs": {}, "df": 0}}, "default_value": {"root": {"0": {"docs": {"pyerrors.dirac.gammaX": {"tf": 5.291502622129181}, "pyerrors.dirac.gammaY": {"tf": 5.291502622129181}, "pyerrors.dirac.gammaZ": {"tf": 5.291502622129181}, "pyerrors.dirac.gammaT": {"tf": 5.291502622129181}, "pyerrors.dirac.gamma": {"tf": 10.583005244258363}, "pyerrors.dirac.gamma5": {"tf": 5.291502622129181}, "pyerrors.dirac.identity": {"tf": 5.291502622129181}, "pyerrors.obs.Obs.S_global": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_global": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.N_sigma_global": {"tf": 1}}, "df": 10}, "1": {"docs": {"pyerrors.dirac.gammaX": {"tf": 2}, "pyerrors.dirac.gammaY": {"tf": 2}, "pyerrors.dirac.gammaZ": {"tf": 2}, "pyerrors.dirac.gammaT": {"tf": 2}, "pyerrors.dirac.gamma": {"tf": 4}, "pyerrors.dirac.gamma5": {"tf": 2}, "pyerrors.dirac.identity": {"tf": 2}, "pyerrors.obs.Obs.N_sigma_global": {"tf": 1}}, "df": 8}, "2": {"docs": {"pyerrors.obs.Obs.S_global": {"tf": 1}}, "df": 1}, "docs": {"pyerrors.dirac.gammaX": {"tf": 2.23606797749979}, "pyerrors.dirac.gammaY": {"tf": 2.23606797749979}, "pyerrors.dirac.gammaZ": {"tf": 2.23606797749979}, "pyerrors.dirac.gammaT": {"tf": 1}, "pyerrors.dirac.gamma": {"tf": 4.123105625617661}, "pyerrors.dirac.gamma5": {"tf": 2.23606797749979}, "pyerrors.dirac.identity": {"tf": 1}, "pyerrors.obs.Obs.S_dict": {"tf": 1}, "pyerrors.obs.Obs.tau_exp_dict": {"tf": 1}, "pyerrors.obs.Obs.N_sigma_dict": {"tf": 1}}, "df": 10, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.dirac.gammaX": {"tf": 1}, "pyerrors.dirac.gammaY": {"tf": 1}, "pyerrors.dirac.gammaZ": {"tf": 1}, "pyerrors.dirac.gammaT": {"tf": 1}, "pyerrors.dirac.gamma": {"tf": 1}, "pyerrors.dirac.gamma5": {"tf": 1}, "pyerrors.dirac.identity": {"tf": 1}}, "df": 7}}}}}, "j": {"docs": {"pyerrors.dirac.gammaX": {"tf": 4}, "pyerrors.dirac.gammaY": {"tf": 4}, "pyerrors.dirac.gammaZ": {"tf": 4}, "pyerrors.dirac.gammaT": {"tf": 4}, "pyerrors.dirac.gamma": {"tf": 8}, "pyerrors.dirac.gamma5": {"tf": 4}, "pyerrors.dirac.identity": {"tf": 4}}, "df": 7}}}, "signature": {"root": {"0": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.7320508075688772}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.CObs.__init__": {"tf": 1.4142135623730951}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 16, "c": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}, "1": {"0": {"0": {"0": {"docs": {"pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 2}, "docs": {}, "df": 0}, "docs": {"pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}}, "df": 2, "/": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}}}}}, "2": {"docs": {}, "df": 0, "/": {"docs": {}, "df": 0, "x": {"6": {"4": {"docs": {}, "df": 0, "/": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "/": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"3": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}}}}}}}}}}}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}}}, "docs": {"pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 11, "e": {"docs": {"pyerrors.obs.Obs.is_zero": {"tf": 1}}, "df": 1}}, "2": {"docs": {"pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}}, "df": 3}, "3": {"9": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.deriv": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.m_eff": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.plateau": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.dump": {"tf": 1.4142135623730951}, "pyerrors.fits.qqplot": {"tf": 1.4142135623730951}, "pyerrors.fits.residual_plot": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_pobs_string": {"tf": 2}, "pyerrors.input.dobs.write_pobs": {"tf": 2}, "pyerrors.input.dobs.create_dobs_string": {"tf": 2}, "pyerrors.input.dobs.write_dobs": {"tf": 2}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 2}, "pyerrors.input.json.create_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_dict_to_json": {"tf": 2}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.to_sql": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2.8284271247461903}, "pyerrors.misc.errorbar": {"tf": 2}, "pyerrors.obs.Obs.dump": {"tf": 2}}, "df": 35}, "docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 3}, "5": {"0": {"0": {"docs": {"pyerrors.obs.Obs.export_bootstrap": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 4}, "docs": {"pyerrors.correlators.Corr.__init__": {"tf": 5.744562646538029}, "pyerrors.correlators.Corr.gamma_method": {"tf": 4}, "pyerrors.correlators.Corr.gm": {"tf": 4}, "pyerrors.correlators.Corr.projected": {"tf": 5.830951894845301}, "pyerrors.correlators.Corr.item": {"tf": 4.242640687119285}, "pyerrors.correlators.Corr.plottable": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.symmetric": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.GEVP": {"tf": 6.164414002968976}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 6.324555320336759}, "pyerrors.correlators.Corr.Hankel": {"tf": 4.69041575982343}, "pyerrors.correlators.Corr.roll": {"tf": 3.7416573867739413}, "pyerrors.correlators.Corr.reverse": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.thin": {"tf": 5.0990195135927845}, "pyerrors.correlators.Corr.correlate": {"tf": 3.7416573867739413}, "pyerrors.correlators.Corr.reweight": {"tf": 4.47213595499958}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 4.69041575982343}, "pyerrors.correlators.Corr.deriv": {"tf": 4.47213595499958}, "pyerrors.correlators.Corr.second_deriv": {"tf": 4.47213595499958}, "pyerrors.correlators.Corr.m_eff": {"tf": 5.291502622129181}, "pyerrors.correlators.Corr.fit": {"tf": 6}, "pyerrors.correlators.Corr.plateau": {"tf": 6}, "pyerrors.correlators.Corr.set_prange": {"tf": 3.7416573867739413}, "pyerrors.correlators.Corr.show": {"tf": 11.313708498984761}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 4.242640687119285}, "pyerrors.correlators.Corr.dump": {"tf": 5.477225575051661}, "pyerrors.correlators.Corr.print": {"tf": 4.242640687119285}, "pyerrors.correlators.Corr.sqrt": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.log": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.exp": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.sin": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.cos": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.tan": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.sinh": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.cosh": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.tanh": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.arcsin": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.arccos": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.arctan": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.arcsinh": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.arccosh": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.arctanh": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.prune": {"tf": 6.164414002968976}, "pyerrors.covobs.Covobs.__init__": {"tf": 5.656854249492381}, "pyerrors.covobs.Covobs.errsq": {"tf": 3.1622776601683795}, "pyerrors.dirac.epsilon_tensor": {"tf": 4.242640687119285}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 4.69041575982343}, "pyerrors.dirac.Grid_gamma": {"tf": 3.1622776601683795}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 4}, "pyerrors.fits.Fit_result.gm": {"tf": 4}, "pyerrors.fits.least_squares": {"tf": 6.324555320336759}, "pyerrors.fits.total_least_squares": {"tf": 5.656854249492381}, "pyerrors.fits.fit_lin": {"tf": 4.47213595499958}, "pyerrors.fits.qqplot": {"tf": 5.656854249492381}, "pyerrors.fits.residual_plot": {"tf": 5.656854249492381}, "pyerrors.fits.error_band": {"tf": 4.242640687119285}, "pyerrors.fits.ks_test": {"tf": 3.7416573867739413}, "pyerrors.input.bdio.read_ADerrors": {"tf": 5.0990195135927845}, "pyerrors.input.bdio.write_ADerrors": {"tf": 5.477225575051661}, "pyerrors.input.bdio.read_mesons": {"tf": 5.0990195135927845}, "pyerrors.input.bdio.read_dSdm": {"tf": 5.0990195135927845}, "pyerrors.input.dobs.create_pobs_string": {"tf": 7.14142842854285}, "pyerrors.input.dobs.write_pobs": {"tf": 8.426149773176359}, "pyerrors.input.dobs.read_pobs": {"tf": 5.830951894845301}, "pyerrors.input.dobs.import_dobs_string": {"tf": 5.0990195135927845}, "pyerrors.input.dobs.read_dobs": {"tf": 5.830951894845301}, "pyerrors.input.dobs.create_dobs_string": {"tf": 8.12403840463596}, "pyerrors.input.dobs.write_dobs": {"tf": 8.94427190999916}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 6.6332495807108}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 7.54983443527075}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 6}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 5.0990195135927845}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 5.0990195135927845}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 6.855654600401044}, "pyerrors.input.json.create_json_string": {"tf": 5.291502622129181}, "pyerrors.input.json.dump_to_json": {"tf": 6.324555320336759}, "pyerrors.input.json.import_json_string": {"tf": 5.0990195135927845}, "pyerrors.input.json.load_json": {"tf": 5.830951894845301}, "pyerrors.input.json.dump_dict_to_json": {"tf": 7.0710678118654755}, "pyerrors.input.json.load_json_dict": {"tf": 6.6332495807108}, "pyerrors.input.misc.fit_t0": {"tf": 5.656854249492381}, "pyerrors.input.misc.read_pbp": {"tf": 4.47213595499958}, "pyerrors.input.openQCD.read_rwms": {"tf": 6.164414002968976}, "pyerrors.input.openQCD.extract_t0": {"tf": 8.18535277187245}, "pyerrors.input.openQCD.extract_w0": {"tf": 8.18535277187245}, "pyerrors.input.openQCD.read_qtop": {"tf": 6.48074069840786}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 6.324555320336759}, "pyerrors.input.openQCD.qtop_projection": {"tf": 4.242640687119285}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 5.656854249492381}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 6.164414002968976}, "pyerrors.input.pandas.to_sql": {"tf": 6.48074069840786}, "pyerrors.input.pandas.read_sql": {"tf": 5.291502622129181}, "pyerrors.input.pandas.dump_df": {"tf": 4.69041575982343}, "pyerrors.input.pandas.load_df": {"tf": 5.0990195135927845}, "pyerrors.input.sfcf.read_sfcf": {"tf": 10.44030650891055}, "pyerrors.input.utils.sort_names": {"tf": 3.1622776601683795}, "pyerrors.input.utils.check_idl": {"tf": 3.7416573867739413}, "pyerrors.linalg.matmul": {"tf": 3.4641016151377544}, "pyerrors.linalg.jack_matmul": {"tf": 3.4641016151377544}, "pyerrors.linalg.einsum": {"tf": 4}, "pyerrors.linalg.inv": {"tf": 3.1622776601683795}, "pyerrors.linalg.cholesky": {"tf": 3.1622776601683795}, "pyerrors.linalg.det": {"tf": 3.1622776601683795}, "pyerrors.linalg.eigh": {"tf": 4}, "pyerrors.linalg.eig": {"tf": 4}, "pyerrors.linalg.pinv": {"tf": 4}, "pyerrors.linalg.svd": {"tf": 4}, "pyerrors.misc.print_config": {"tf": 2.6457513110645907}, "pyerrors.misc.errorbar": {"tf": 6.708203932499369}, "pyerrors.misc.dump_object": {"tf": 4.47213595499958}, "pyerrors.misc.load_object": {"tf": 3.1622776601683795}, "pyerrors.misc.pseudo_Obs": {"tf": 5.0990195135927845}, "pyerrors.misc.gen_correlated_data": {"tf": 5.830951894845301}, "pyerrors.mpm.matrix_pencil_method": {"tf": 5.656854249492381}, "pyerrors.obs.Obs.__init__": {"tf": 5.0990195135927845}, "pyerrors.obs.Obs.gamma_method": {"tf": 4}, "pyerrors.obs.Obs.gm": {"tf": 4}, "pyerrors.obs.Obs.details": {"tf": 4.242640687119285}, "pyerrors.obs.Obs.reweight": {"tf": 3.7416573867739413}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 4.242640687119285}, "pyerrors.obs.Obs.is_zero": {"tf": 4.242640687119285}, "pyerrors.obs.Obs.plot_tauint": {"tf": 4.242640687119285}, "pyerrors.obs.Obs.plot_rho": {"tf": 4.242640687119285}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.plot_history": {"tf": 4.242640687119285}, "pyerrors.obs.Obs.plot_piechart": {"tf": 4.242640687119285}, "pyerrors.obs.Obs.dump": {"tf": 6.324555320336759}, "pyerrors.obs.Obs.export_jackknife": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 5.830951894845301}, "pyerrors.obs.Obs.sqrt": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.log": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.exp": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.sin": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.cos": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.tan": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.arcsin": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.arccos": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.arctan": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.sinh": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.cosh": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.tanh": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.arcsinh": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.arccosh": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.arctanh": {"tf": 3.1622776601683795}, "pyerrors.obs.CObs.__init__": {"tf": 4}, "pyerrors.obs.CObs.gamma_method": {"tf": 4}, "pyerrors.obs.CObs.is_zero": {"tf": 3.1622776601683795}, "pyerrors.obs.CObs.conjugate": {"tf": 3.1622776601683795}, "pyerrors.obs.derived_observable": {"tf": 5.291502622129181}, "pyerrors.obs.reweight": {"tf": 4.47213595499958}, "pyerrors.obs.correlate": {"tf": 3.7416573867739413}, "pyerrors.obs.covariance": {"tf": 6.324555320336759}, "pyerrors.obs.import_jackknife": {"tf": 4.69041575982343}, "pyerrors.obs.import_bootstrap": {"tf": 4.242640687119285}, "pyerrors.obs.merge_obs": {"tf": 3.1622776601683795}, "pyerrors.obs.cov_Obs": {"tf": 5.0990195135927845}, "pyerrors.roots.find_root": {"tf": 5.291502622129181}}, "df": 159, "d": {"docs": {"pyerrors.roots.find_root": {"tf": 1}}, "df": 1, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 2, "t": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 2}}}}}}}, "t": {"docs": {"pyerrors.correlators.Corr.roll": {"tf": 1}}, "df": 1, "r": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 4}}, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 2}}}, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}}, "df": 1}}}}}, "s": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 4}}}}}}}}}}, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}}, "df": 1}}}}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}}, "df": 1}}}}, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}}, "df": 2}}}}}}, "f": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}}, "df": 2}, "b": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}}, "df": 2}, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.misc.pseudo_Obs": {"tf": 1}}, "df": 1}}}}}}, "i": {"docs": {"pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}}, "df": 3, "n": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}}, "df": 1}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}}, "df": 3}}}}}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 3}}}}}, "d": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 6, "l": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}}, "df": 9}}, "f": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 1}, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.obs.CObs.__init__": {"tf": 1}}, "df": 1}}}}, "p": {"docs": {"pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 2, "a": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}}, "df": 1}}}}}, "r": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}}, "df": 2}}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.correlators.Corr.T_symmetry": {"tf": 1}}, "df": 1}}}}, "t": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.misc.load_object": {"tf": 1}}, "df": 20}}, "c": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "/": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "/": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}}}}}}}}}}}}}}}}}}}}}}}}, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.set_prange": {"tf": 1}}, "df": 2}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.print": {"tf": 1}}, "df": 1}}, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}, "e": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "x": {"docs": {"pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 9}}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.correlators.Corr.Hankel": {"tf": 1}}, "df": 1}}}}}}}, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "u": {"docs": {"pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 2}}}}}, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.covobs.Covobs.__init__": {"tf": 1}}, "df": 1, "t": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "x": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2}}}}}}, "y": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}}}}}, "n": {"docs": {"pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 2, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 3.3166247903554}, "pyerrors.correlators.Corr.print": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 33}}, "r": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}}, "df": 1}}}}}}}, "f": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}}}}}, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}}, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 13, "s": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}}, "df": 2}}}}, "u": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}}, "df": 2}}}}}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.correlators.Corr.symmetric": {"tf": 1}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.roll": {"tf": 1}, "pyerrors.correlators.Corr.reverse": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.set_prange": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.correlators.Corr.print": {"tf": 1}, "pyerrors.correlators.Corr.sqrt": {"tf": 1}, "pyerrors.correlators.Corr.log": {"tf": 1}, "pyerrors.correlators.Corr.exp": {"tf": 1}, "pyerrors.correlators.Corr.sin": {"tf": 1}, "pyerrors.correlators.Corr.cos": {"tf": 1}, "pyerrors.correlators.Corr.tan": {"tf": 1}, "pyerrors.correlators.Corr.sinh": {"tf": 1}, "pyerrors.correlators.Corr.cosh": {"tf": 1}, "pyerrors.correlators.Corr.tanh": {"tf": 1}, "pyerrors.correlators.Corr.arcsin": {"tf": 1}, "pyerrors.correlators.Corr.arccos": {"tf": 1}, "pyerrors.correlators.Corr.arctan": {"tf": 1}, "pyerrors.correlators.Corr.arcsinh": {"tf": 1}, "pyerrors.correlators.Corr.arccosh": {"tf": 1}, "pyerrors.correlators.Corr.arctanh": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.covobs.Covobs.errsq": {"tf": 1}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.Obs.sqrt": {"tf": 1}, "pyerrors.obs.Obs.log": {"tf": 1}, "pyerrors.obs.Obs.exp": {"tf": 1}, "pyerrors.obs.Obs.sin": {"tf": 1}, "pyerrors.obs.Obs.cos": {"tf": 1}, "pyerrors.obs.Obs.tan": {"tf": 1}, "pyerrors.obs.Obs.arcsin": {"tf": 1}, "pyerrors.obs.Obs.arccos": {"tf": 1}, "pyerrors.obs.Obs.arctan": {"tf": 1}, "pyerrors.obs.Obs.sinh": {"tf": 1}, "pyerrors.obs.Obs.cosh": {"tf": 1}, "pyerrors.obs.Obs.tanh": {"tf": 1}, "pyerrors.obs.Obs.arcsinh": {"tf": 1}, "pyerrors.obs.Obs.arccosh": {"tf": 1}, "pyerrors.obs.Obs.arctanh": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}, "pyerrors.obs.CObs.conjugate": {"tf": 1}}, "df": 79}}, "p": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 4}}}}}}}}, "o": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}}, "df": 2}}}, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}}, "df": 1}}}, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.json.import_json_string": {"tf": 1}}, "df": 1}}}}}, "p": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.thin": {"tf": 1}}, "df": 1}}}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2}}}}}, "e": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 4}}}, "y": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 2}}}}}}, "b": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 4}}}}}, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 4}}}}, "g": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}}, "df": 2}}}}, "a": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}}, "df": 5}}, "m": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}}, "df": 4}}}}}}, "q": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.pandas.read_sql": {"tf": 1}}, "df": 1}}, "u": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.linalg.einsum": {"tf": 1}}, "df": 1}}}}}}}}}, "m": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}, "k": {"docs": {"pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 3, "w": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}, "pyerrors.linalg.pinv": {"tf": 1}, "pyerrors.linalg.svd": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 43}}}}}, "e": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}}, "v": {"1": {"docs": {"pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 2}, "docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1.4142135623730951}}, "df": 1}}}}, "r": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 1}}}}}, "b": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}}, "df": 3}}}}, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 3}}}}}}, "a": {"docs": {"pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 1, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 3}}}}}, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.misc.pseudo_Obs": {"tf": 1}}, "df": 1}}}}, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}}}, "l": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}}, "df": 1, "o": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 1, "s": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}}, "df": 2}}}}}}}, "i": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4}}}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 2}}}, "l": {"docs": {"pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 1}, "t": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}, "r": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 2, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.print": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 7}}, "d": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}}, "df": 2}}}}}, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}}, "df": 2}, "f": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}}}}}}}, "p": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}}, "df": 2}}, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2}, "l": {"docs": {"pyerrors.obs.CObs.__init__": {"tf": 1}}, "df": 1}}}, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.obs.Obs.export_bootstrap": {"tf": 1}}, "df": 1}}}, "f": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 19}}}, "i": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 1}}}, "u": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 7, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}}, "df": 1}}}}}}, "l": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}}, "df": 6}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 7, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}}, "df": 1}}}}}}, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 2}}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 5}}}}}}}, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 10}}}}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "w": {"docs": {"pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 1}}}, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}}}, "j": {"docs": {"pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}}, "df": 3, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 3}}}, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.import_jackknife": {"tf": 1}}, "df": 1}}}}}, "t": {"0": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 3, "p": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "j": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}}, "2": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}}, "df": 2}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}}, "df": 3}}}}, "r": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1}}, "df": 17}}}, "p": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "j": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}, "a": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.dirac.Grid_gamma": {"tf": 1}}, "df": 1}, "r": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 2}}}}, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 1}}}, "u": {"docs": {"pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 1}}, "y": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}}}, "e": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}}, "df": 2}}}}}}}}}, "n": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}}, "df": 7, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 2}}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}}, "df": 1}}}}}, "x": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2}}}}, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 1}}}}, "p": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.obs.Obs.plot_history": {"tf": 1}}, "df": 1}}}}}}, "o": {"docs": {"pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}, "pyerrors.fits.qqplot": {"tf": 1}}, "df": 2, "f": {"docs": {"pyerrors.obs.merge_obs": {"tf": 1}}, "df": 1, "f": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.thin": {"tf": 1}}, "df": 1}}}}}, "b": {"docs": {}, "df": 0, "j": {"docs": {"pyerrors.misc.dump_object": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.ks_test": {"tf": 1}}, "df": 1}}}}}, "s": {"docs": {"pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}, "pyerrors.linalg.pinv": {"tf": 1}, "pyerrors.linalg.svd": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 10, "l": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 4}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1}}}}}}}}}, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 4}}}}}, "u": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}}, "df": 6}}}}}, "l": {"docs": {"pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}}, "df": 2}, "d": {"docs": {"pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 1}, "p": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "q": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}}, "df": 1}}}}, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}}, "df": 3}}}}}}, "t": {"docs": {}, "df": 0, "/": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "/": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "/": {"3": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}}}}}}}}}}}}}}}}}}}}}}}}}}}, "w": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}}, "df": 3}}}}}, "h": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 2}}, "f": {"2": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}, "docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}, "g": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 2}}}}, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.dirac.Grid_gamma": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 5, "s": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}}, "df": 1}}}}}, "z": {"docs": {"pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 13}, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 2}}}, "t": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.plateau": {"tf": 1}}, "df": 1}}}}, "a": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.covobs.Covobs.__init__": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 2}}}, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}}, "df": 1}}}}, "s": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2}, "o": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}}, "e": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "b": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}}}}}}}}}, "a": {"docs": {"pyerrors.obs.correlate": {"tf": 1}}, "df": 1, "u": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 4}}}, "v": {"docs": {"pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 1}, "x": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}}, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.obs.Obs.is_zero": {"tf": 1}}, "df": 1}}}, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}}, "x": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.linalg.inv": {"tf": 1}, "pyerrors.linalg.cholesky": {"tf": 1}, "pyerrors.linalg.det": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1}}, "df": 11, "m": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2}}}}, "c": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 5, "o": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}, "v": {"docs": {"pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 3}, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}}, "df": 2}}}}}, "r": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}}}}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}}, "df": 1}}}}}, "n": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 2}}}, "f": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}, "h": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 1}}}, "y": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1}}, "df": 7, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}}}}}, "h": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}}}, "b": {"docs": {"pyerrors.obs.correlate": {"tf": 1}}, "df": 1, "a": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "x": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}}}}}}, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.fits.error_band": {"tf": 1}}, "df": 1}}}, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4}}}, "i": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}, "o": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.import_bootstrap": {"tf": 1}}, "df": 1}}}}}, "z": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 1}}}}}}}, "q": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.input.openQCD.qtop_projection": {"tf": 1}}, "df": 1}}}, "c": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}}}}}}}, "bases": {"root": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}}, "df": 1}}}}}}}}}}}, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}}, "df": 1}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "q": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}}, "df": 1}}}}}}}}, "n": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}, "d": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}}}}, "doc": {"root": {"0": {"0": {"0": {"0": {"0": {"0": {"0": {"0": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1, "+": {"0": {"0": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "5": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {"pyerrors": {"tf": 1}}, "df": 1}, "2": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}, "6": {"9": {"7": {"9": {"5": {"8": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "+": {"0": {"0": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "1": {"2": {"8": {"9": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "7": {"1": {"8": {"0": {"6": {"4": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "+": {"0": {"0": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 3}}, "df": 1}, "2": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "3": {"4": {"4": {"5": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "4": {"5": {"8": {"5": {"6": {"5": {"0": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "7": {"5": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "5": {"4": {"8": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "6": {"4": {"2": {"3": {"docs": {"pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "7": {"5": {"6": {"0": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "9": {"9": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 6.164414002968976}, "pyerrors.correlators.Corr.projected": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.symmetric": {"tf": 1}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.second_deriv": {"tf": 4.123105625617661}, "pyerrors.correlators.Corr.prune": {"tf": 2.6457513110645907}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2.449489742783178}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 2}, "pyerrors.obs.Obs.gamma_method": {"tf": 2}, "pyerrors.obs.Obs.gm": {"tf": 2}, "pyerrors.obs.covariance": {"tf": 2}}, "df": 28, "+": {"1": {"docs": {"pyerrors.correlators.Corr.second_deriv": {"tf": 1.4142135623730951}}, "df": 1}, "2": {"docs": {"pyerrors.correlators.Corr.second_deriv": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "e": {"docs": {}, "df": 0, "+": {"0": {"0": {"0": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}}, "c": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}, "d": {"docs": {"pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 1}}, "1": {"0": {"0": {"0": {"docs": {"pyerrors": {"tf": 3.605551275463989}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.misc.pseudo_Obs": {"tf": 1}}, "df": 3}, "3": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "5": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "4": {"7": {"2": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "8": {"7": {"5": {"0": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 1}}, "df": 1}, "1": {"9": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "2": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 2}, "3": {"4": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "4": {"3": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "5": {"0": {"0": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "3": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "6": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "6": {"0": {"7": {"docs": {"pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1.4142135623730951}}, "df": 2}, "7": {"6": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}, "9": {"0": {"6": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "8": {"8": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "9": {"0": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}, "docs": {"pyerrors": {"tf": 6.164414002968976}, "pyerrors.correlators.Corr.Hankel": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.m_eff": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.7320508075688772}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 24, "}": {"docs": {}, "df": 0, "^": {"docs": {}, "df": 0, "{": {"docs": {}, "df": 0, "w": {"docs": {}, "df": 0, "}": {"docs": {}, "df": 0, "\\": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "n": {"docs": {}, "df": 0, "\\": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}, "s": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}, "\\": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "+": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "d": {"docs": {"pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 2}, "*": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}}}}, "/": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}}, "df": 2}}}, "2": {"0": {"0": {"4": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "7": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "1": {"1": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "9": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "2": {"3": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "4": {"1": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 2}}, "df": 1}, "1": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "3": {"8": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}}, "df": 2}, "5": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "7": {"2": {"1": {"8": {"6": {"6": {"7": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "+": {"0": {"0": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "8": {"0": {"9": {"7": {"7": {"6": {"2": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "8": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {"pyerrors": {"tf": 2}}, "df": 1}, "9": {"9": {"0": {"9": {"7": {"0": {"3": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "+": {"0": {"0": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {"pyerrors": {"tf": 5}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 3}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 18, "x": {"2": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "f": {"docs": {"pyerrors.correlators.Corr.second_deriv": {"tf": 1.4142135623730951}}, "df": 1}, "d": {"docs": {"pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 5}, "*": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}}}, "3": {"0": {"6": {"7": {"5": {"2": {"0": {"1": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {"pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 1}, "1": {"4": {"9": {"8": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "2": {"2": {"7": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}, "docs": {}, "df": 0}, "3": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "3": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}, "4": {"9": {"7": {"6": {"8": {"0": {"0": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "+": {"0": {"2": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "5": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "6": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "7": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "8": {"3": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}, "9": {"docs": {"pyerrors": {"tf": 7.745966692414834}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 2}, "docs": {"pyerrors": {"tf": 3.605551275463989}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.dirac.epsilon_tensor": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.4142135623730951}}, "df": 9, "a": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "4": {"0": {"3": {"2": {"0": {"9": {"8": {"3": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}, "9": {"5": {"9": {"1": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 7, "x": {"4": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}}, "5": {"0": {"0": {"docs": {"pyerrors": {"tf": 2.8284271247461903}, "pyerrors.fits.least_squares": {"tf": 1}}, "df": 2}, "1": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "1": {"5": {"6": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "9": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "2": {"0": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "2": {"8": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "3": {"8": {"0": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "4": {"8": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "6": {"7": {"3": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "4": {"6": {"5": {"9": {"8": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "7": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "8": {"3": {"4": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 3.605551275463989}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 7, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}, "6": {"4": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}, "5": {"0": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "9": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "6": {"8": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.fit": {"tf": 1.4142135623730951}}, "df": 2}, "7": {"0": {"0": {"0": {"0": {"0": {"0": {"0": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "+": {"0": {"0": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 1}}, "df": 1}, "1": {"4": {"2": {"2": {"9": {"0": {"0": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "+": {"0": {"0": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "2": {"0": {"4": {"6": {"6": {"5": {"8": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 1}}, "df": 1}, "4": {"5": {"7": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "7": {"3": {"1": {"0": {"1": {"0": {"2": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "9": {"0": {"7": {"7": {"5": {"2": {"4": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "5": {"7": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 2.23606797749979}}, "df": 1}, "8": {"0": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "1": {"4": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "2": {"4": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "4": {"5": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 6}, "9": {"1": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "3": {"3": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {"pyerrors": {"tf": 1}}, "df": 1}, "4": {"7": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "5": {"9": {"3": {"0": {"3": {"5": {"7": {"8": {"5": {"1": {"6": {"0": {"9": {"3": {"6": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "4": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "7": {"6": {"8": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "8": {"3": {"1": {"9": {"8": {"8": {"1": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "9": {"1": {"0": {"0": {"7": {"1": {"2": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "5": {"8": {"3": {"6": {"5": {"4": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "9": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}}, "df": 3}, "docs": {"pyerrors": {"tf": 64.02343321003646}, "pyerrors.correlators": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr": {"tf": 3}, "pyerrors.correlators.Corr.__init__": {"tf": 5.0990195135927845}, "pyerrors.correlators.Corr.tag": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.content": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.T": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.prange": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.reweighted": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.gamma_method": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.gm": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.projected": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.item": {"tf": 4.58257569495584}, "pyerrors.correlators.Corr.plottable": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.symmetric": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.GEVP": {"tf": 10.535653752852738}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 4.358898943540674}, "pyerrors.correlators.Corr.Hankel": {"tf": 4.58257569495584}, "pyerrors.correlators.Corr.roll": {"tf": 3.605551275463989}, "pyerrors.correlators.Corr.reverse": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.thin": {"tf": 4.242640687119285}, "pyerrors.correlators.Corr.correlate": {"tf": 3.7416573867739413}, "pyerrors.correlators.Corr.reweight": {"tf": 4.58257569495584}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 4.242640687119285}, "pyerrors.correlators.Corr.deriv": {"tf": 3.7416573867739413}, "pyerrors.correlators.Corr.second_deriv": {"tf": 4.58257569495584}, "pyerrors.correlators.Corr.m_eff": {"tf": 5.830951894845301}, "pyerrors.correlators.Corr.fit": {"tf": 5.291502622129181}, "pyerrors.correlators.Corr.plateau": {"tf": 5}, "pyerrors.correlators.Corr.set_prange": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.show": {"tf": 9}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 3.872983346207417}, "pyerrors.correlators.Corr.dump": {"tf": 5.0990195135927845}, "pyerrors.correlators.Corr.print": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.sqrt": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.log": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.exp": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.sin": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.cos": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.tan": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.sinh": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.cosh": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.tanh": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.arcsin": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.arccos": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.arctan": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.arcsinh": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.arccosh": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.arctanh": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.real": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.imag": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.prune": {"tf": 6.855654600401044}, "pyerrors.correlators.Corr.N": {"tf": 1.7320508075688772}, "pyerrors.covobs": {"tf": 1.7320508075688772}, "pyerrors.covobs.Covobs": {"tf": 1.7320508075688772}, "pyerrors.covobs.Covobs.__init__": {"tf": 5.916079783099616}, "pyerrors.covobs.Covobs.name": {"tf": 1.7320508075688772}, "pyerrors.covobs.Covobs.value": {"tf": 1.7320508075688772}, "pyerrors.covobs.Covobs.errsq": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.cov": {"tf": 1.7320508075688772}, "pyerrors.covobs.Covobs.grad": {"tf": 1.7320508075688772}, "pyerrors.dirac": {"tf": 1.7320508075688772}, "pyerrors.dirac.gammaX": {"tf": 1.7320508075688772}, "pyerrors.dirac.gammaY": {"tf": 1.7320508075688772}, "pyerrors.dirac.gammaZ": {"tf": 1.7320508075688772}, "pyerrors.dirac.gammaT": {"tf": 1.7320508075688772}, "pyerrors.dirac.gamma": {"tf": 1.7320508075688772}, "pyerrors.dirac.gamma5": {"tf": 1.7320508075688772}, "pyerrors.dirac.identity": {"tf": 1.7320508075688772}, "pyerrors.dirac.epsilon_tensor": {"tf": 4.123105625617661}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 4.123105625617661}, "pyerrors.dirac.Grid_gamma": {"tf": 1.7320508075688772}, "pyerrors.fits": {"tf": 1.7320508075688772}, "pyerrors.fits.Fit_result": {"tf": 5.656854249492381}, "pyerrors.fits.Fit_result.fit_parameters": {"tf": 1.7320508075688772}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1.4142135623730951}, "pyerrors.fits.Fit_result.gm": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 17.86057109949175}, "pyerrors.fits.total_least_squares": {"tf": 15.427248620541512}, "pyerrors.fits.fit_lin": {"tf": 5.916079783099616}, "pyerrors.fits.qqplot": {"tf": 3.605551275463989}, "pyerrors.fits.residual_plot": {"tf": 3.872983346207417}, "pyerrors.fits.error_band": {"tf": 3.7416573867739413}, "pyerrors.fits.ks_test": {"tf": 5}, "pyerrors.input": {"tf": 4.69041575982343}, "pyerrors.input.bdio": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_ADerrors": {"tf": 6.164414002968976}, "pyerrors.input.bdio.write_ADerrors": {"tf": 6.164414002968976}, "pyerrors.input.bdio.read_mesons": {"tf": 8.12403840463596}, "pyerrors.input.bdio.read_dSdm": {"tf": 7.416198487095663}, "pyerrors.input.dobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.create_pobs_string": {"tf": 7.745966692414834}, "pyerrors.input.dobs.write_pobs": {"tf": 8.426149773176359}, "pyerrors.input.dobs.read_pobs": {"tf": 7.280109889280518}, "pyerrors.input.dobs.import_dobs_string": {"tf": 7.280109889280518}, "pyerrors.input.dobs.read_dobs": {"tf": 7.745966692414834}, "pyerrors.input.dobs.create_dobs_string": {"tf": 8.06225774829855}, "pyerrors.input.dobs.write_dobs": {"tf": 8.774964387392123}, "pyerrors.input.hadrons": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 7.3484692283495345}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 6.855654600401044}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 6.557438524302}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 20.904544960366874}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 2.23606797749979}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 6.324555320336759}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 6.324555320336759}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 6.782329983125268}, "pyerrors.input.json": {"tf": 1.7320508075688772}, "pyerrors.input.json.create_json_string": {"tf": 6.082762530298219}, "pyerrors.input.json.dump_to_json": {"tf": 7}, "pyerrors.input.json.import_json_string": {"tf": 7.681145747868608}, "pyerrors.input.json.load_json": {"tf": 8.06225774829855}, "pyerrors.input.json.dump_dict_to_json": {"tf": 7.3484692283495345}, "pyerrors.input.json.load_json_dict": {"tf": 7.937253933193772}, "pyerrors.input.misc": {"tf": 1.7320508075688772}, "pyerrors.input.misc.fit_t0": {"tf": 7.14142842854285}, "pyerrors.input.misc.read_pbp": {"tf": 5.477225575051661}, "pyerrors.input.openQCD": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_rwms": {"tf": 8.54400374531753}, "pyerrors.input.openQCD.extract_t0": {"tf": 11}, "pyerrors.input.openQCD.extract_w0": {"tf": 11}, "pyerrors.input.openQCD.read_qtop": {"tf": 10.246950765959598}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 8.888194417315589}, "pyerrors.input.openQCD.qtop_projection": {"tf": 5.656854249492381}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 9.797958971132712}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 10.392304845413264}, "pyerrors.input.pandas": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.to_sql": {"tf": 7}, "pyerrors.input.pandas.read_sql": {"tf": 6.244997998398398}, "pyerrors.input.pandas.dump_df": {"tf": 6.324555320336759}, "pyerrors.input.pandas.load_df": {"tf": 6.244997998398398}, "pyerrors.input.sfcf": {"tf": 1.7320508075688772}, "pyerrors.input.sfcf.read_sfcf": {"tf": 11.090536506409418}, "pyerrors.input.utils": {"tf": 1.7320508075688772}, "pyerrors.input.utils.sort_names": {"tf": 5.385164807134504}, "pyerrors.input.utils.check_idl": {"tf": 5.385164807134504}, "pyerrors.linalg": {"tf": 1.7320508075688772}, "pyerrors.linalg.matmul": {"tf": 4.58257569495584}, "pyerrors.linalg.jack_matmul": {"tf": 4.47213595499958}, "pyerrors.linalg.einsum": {"tf": 4.47213595499958}, "pyerrors.linalg.inv": {"tf": 1.7320508075688772}, "pyerrors.linalg.cholesky": {"tf": 1.7320508075688772}, "pyerrors.linalg.det": {"tf": 1.7320508075688772}, "pyerrors.linalg.eigh": {"tf": 1.7320508075688772}, "pyerrors.linalg.eig": {"tf": 1.7320508075688772}, "pyerrors.linalg.pinv": {"tf": 1.7320508075688772}, "pyerrors.linalg.svd": {"tf": 1.7320508075688772}, "pyerrors.misc": {"tf": 1.7320508075688772}, "pyerrors.misc.print_config": {"tf": 1.7320508075688772}, "pyerrors.misc.errorbar": {"tf": 5.0990195135927845}, "pyerrors.misc.dump_object": {"tf": 5.916079783099616}, "pyerrors.misc.load_object": {"tf": 5}, "pyerrors.misc.pseudo_Obs": {"tf": 6.557438524302}, "pyerrors.misc.gen_correlated_data": {"tf": 7.0710678118654755}, "pyerrors.mpm": {"tf": 1.7320508075688772}, "pyerrors.mpm.matrix_pencil_method": {"tf": 6.324555320336759}, "pyerrors.obs": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs": {"tf": 6.928203230275509}, "pyerrors.obs.Obs.__init__": {"tf": 4.898979485566356}, "pyerrors.obs.Obs.S_global": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.S_dict": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.tau_exp_global": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.tau_exp_dict": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.N_sigma_global": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.N_sigma_dict": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.names": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.shape": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.r_values": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.deltas": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.N": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.idl": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.ddvalue": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.reweighted": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.tag": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.value": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.dvalue": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.e_names": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.cov_names": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.mc_names": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.e_content": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.covobs": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gamma_method": {"tf": 5.744562646538029}, "pyerrors.obs.Obs.gm": {"tf": 5.744562646538029}, "pyerrors.obs.Obs.details": {"tf": 3.872983346207417}, "pyerrors.obs.Obs.reweight": {"tf": 4.58257569495584}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 4.47213595499958}, "pyerrors.obs.Obs.is_zero": {"tf": 3.872983346207417}, "pyerrors.obs.Obs.plot_tauint": {"tf": 3.872983346207417}, "pyerrors.obs.Obs.plot_rho": {"tf": 3.872983346207417}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.plot_history": {"tf": 3.7416573867739413}, "pyerrors.obs.Obs.plot_piechart": {"tf": 3.872983346207417}, "pyerrors.obs.Obs.dump": {"tf": 5.744562646538029}, "pyerrors.obs.Obs.export_jackknife": {"tf": 3.7416573867739413}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 6.164414002968976}, "pyerrors.obs.Obs.sqrt": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.log": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.exp": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.sin": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.cos": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.tan": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.arcsin": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.arccos": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.arctan": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.sinh": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.cosh": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.tanh": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.arcsinh": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.arccosh": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.arctanh": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.N_sigma": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.S": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.e_ddvalue": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.e_drho": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.e_dtauint": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.e_dvalue": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.e_n_dtauint": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.e_n_tauint": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.e_rho": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.e_tauint": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.e_windowsize": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.tau_exp": {"tf": 1.7320508075688772}, "pyerrors.obs.CObs": {"tf": 1.7320508075688772}, "pyerrors.obs.CObs.__init__": {"tf": 1.7320508075688772}, "pyerrors.obs.CObs.tag": {"tf": 1.7320508075688772}, "pyerrors.obs.CObs.real": {"tf": 1.7320508075688772}, "pyerrors.obs.CObs.imag": {"tf": 1.7320508075688772}, "pyerrors.obs.CObs.gamma_method": {"tf": 1.7320508075688772}, "pyerrors.obs.CObs.is_zero": {"tf": 1.7320508075688772}, "pyerrors.obs.CObs.conjugate": {"tf": 1.7320508075688772}, "pyerrors.obs.derived_observable": {"tf": 6.4031242374328485}, "pyerrors.obs.reweight": {"tf": 5.196152422706632}, "pyerrors.obs.correlate": {"tf": 4.898979485566356}, "pyerrors.obs.covariance": {"tf": 6.6332495807108}, "pyerrors.obs.import_jackknife": {"tf": 4.47213595499958}, "pyerrors.obs.import_bootstrap": {"tf": 5.0990195135927845}, "pyerrors.obs.merge_obs": {"tf": 4.123105625617661}, "pyerrors.obs.cov_Obs": {"tf": 5.385164807134504}, "pyerrors.roots": {"tf": 1.7320508075688772}, "pyerrors.roots.find_root": {"tf": 10.488088481701515}, "pyerrors.version": {"tf": 1.7320508075688772}}, "df": 244, "w": {"0": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 2.23606797749979}}, "df": 2, "/": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 1}}}, "docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}}, "df": 3, "h": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 2}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 17}}, "n": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}}, "df": 10}, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 11}}}}}, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_t0": {"tf": 2}, "pyerrors.input.openQCD.extract_w0": {"tf": 2}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1.4142135623730951}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 39}}, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 2}}}, "o": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 3}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors": {"tf": 6}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 2}}, "df": 38, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 9}}, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "w": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}, "/": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}}}}, "l": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 12}, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 3}}}}}, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "r": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 4, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}}, "df": 6}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "f": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "w": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input": {"tf": 1}}, "df": 1}}}}}}}}, "e": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}}, "df": 3, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "l": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 3}}, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.reweight": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.reweight": {"tf": 1.4142135623730951}, "pyerrors.obs.reweight": {"tf": 1.4142135623730951}}, "df": 3}}}}}, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 2}}}}}, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}}, "df": 6}, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}}, "df": 1}}}, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1, "f": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}}}}}}}}}}}, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 4, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_pobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 12}}}, "e": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 3, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}}, "t": {"docs": {"pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 2}, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 3}}}}, "l": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4}, "f": {"2": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}, "docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}, "i": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.item": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.GEVP": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 11, "s": {"docs": {"pyerrors": {"tf": 8.12403840463596}, "pyerrors.correlators.Corr": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 2.6457513110645907}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 3.4641016151377544}, "pyerrors.covobs.Covobs.__init__": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 3.605551275463989}, "pyerrors.fits.total_least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_pobs": {"tf": 2.23606797749979}, "pyerrors.input.dobs.read_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.import_dobs_string": {"tf": 2.23606797749979}, "pyerrors.input.dobs.read_dobs": {"tf": 2.449489742783178}, "pyerrors.input.dobs.create_dobs_string": {"tf": 2}, "pyerrors.input.dobs.write_dobs": {"tf": 2.449489742783178}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 3.4641016151377544}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.import_json_string": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json": {"tf": 2}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json_dict": {"tf": 1.7320508075688772}, "pyerrors.input.misc.fit_t0": {"tf": 3}, "pyerrors.input.openQCD.read_rwms": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.extract_t0": {"tf": 3.605551275463989}, "pyerrors.input.openQCD.extract_w0": {"tf": 3.605551275463989}, "pyerrors.input.openQCD.read_qtop": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.dump_df": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2}, "pyerrors.input.utils.sort_names": {"tf": 1.4142135623730951}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gm": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1.4142135623730951}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 3}, "pyerrors.obs.import_bootstrap": {"tf": 1.4142135623730951}, "pyerrors.obs.merge_obs": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 64}, "t": {"docs": {"pyerrors": {"tf": 3.3166247903554}, "pyerrors.correlators.Corr": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.projected": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 22, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2}}, "df": 8, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 2}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}, "e": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}}, "df": 1}, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 1}}}}}, "m": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}}, "df": 1}}}}}}}, "n": {"docs": {"pyerrors": {"tf": 8.366600265340756}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.show": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.dirac.Grid_gamma": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 3.3166247903554}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 2}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_t0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.extract_w0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 2.23606797749979}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1.7320508075688772}, "pyerrors.obs.reweight": {"tf": 1.7320508075688772}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 49, "t": {"1": {"6": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.item": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.roll": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.7320508075688772}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.read_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2.6457513110645907}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.extract_w0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 2}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.7320508075688772}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 38, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}}, "df": 2}}}}}, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "p": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 1}}}}}}, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 6}}}, "f": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 1}}}}, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}}}}, "g": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 3}}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 8, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 2}}}}}, "o": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 10}, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 2}}, "df": 1}}, "v": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.linalg.inv": {"tf": 1}}, "df": 3}}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}}, "df": 2}}}}}, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 3}}, "df": 1, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}}, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.Obs": {"tf": 1.4142135623730951}}, "df": 1, "s": {"docs": {"pyerrors.obs.Obs": {"tf": 1.4142135623730951}}, "df": 1}}}}}, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 8}}}, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}}, "df": 1}}}}, "p": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "i": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 8}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}}, "df": 3, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 2}, "pyerrors.input.dobs.import_dobs_string": {"tf": 2}, "pyerrors.input.dobs.read_dobs": {"tf": 2}}, "df": 3}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}}, "df": 2}}}}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 3, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}}, "df": 4, "d": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 3}}}}}}}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 2.23606797749979}}, "df": 1, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 3, "i": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}}, "df": 1}}}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 3}}}}}}}, "x": {"docs": {"pyerrors.correlators.Corr.item": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.plottable": {"tf": 1}}, "df": 2, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}}, "df": 1}}}}}, "i": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}}, "df": 4, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 1}}}}}}}}, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}}, "df": 2}}}}}, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}}, "df": 1}}}}}, "f": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 3.3166247903554}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.json.import_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.misc.print_config": {"tf": 1}}, "df": 13}}}}}}}}}, "p": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.input": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 15}}}, "c": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 4}}}, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 4, "s": {"docs": {"pyerrors.input": {"tf": 1}}, "df": 1}, "d": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 4}}}, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}}, "df": 1}}}}}}, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}}, "df": 1}}}}}}}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1, "d": {"docs": {"pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 2}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1}}}}}}}}, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}}, "df": 1}}}}, "f": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 2}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2.449489742783178}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.read_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 2}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.import_json_string": {"tf": 2.23606797749979}, "pyerrors.input.json.load_json": {"tf": 2.6457513110645907}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 2}, "pyerrors.input.misc.fit_t0": {"tf": 2}, "pyerrors.input.openQCD.read_rwms": {"tf": 2}, "pyerrors.input.openQCD.extract_t0": {"tf": 3}, "pyerrors.input.openQCD.extract_w0": {"tf": 3}, "pyerrors.input.openQCD.read_qtop": {"tf": 2.8284271247461903}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.to_sql": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.load_df": {"tf": 1.7320508075688772}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.7320508075688772}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.7320508075688772}}, "df": 59}, "m": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}}, "df": 5, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 2.6457513110645907}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 12, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 4}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 6}}, "s": {"docs": {"pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}}, "df": 3}}}}, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 3}}}}}}, "a": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 2, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 5}}}}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}}}, "d": {"0": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 3}, "docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 9, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 2, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 2}, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}, "r": {"docs": {"pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 3}}}}, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 4}}}, "t": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}, "l": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.reweight": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1.7320508075688772}, "pyerrors.obs.reweight": {"tf": 1.7320508075688772}}, "df": 15, "s": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}, "pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 2}}}, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}}, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.obs.Obs.plot_history": {"tf": 1}}, "df": 2}}}}}, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}}}}}}, "/": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}}}}, "g": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 3}}}}}}, "o": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}}}}}}, "e": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}, "\\": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 1}}}}}}, "j": {"docs": {"pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 1}, "^": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "\\": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}, "|": {"docs": {}, "df": 0, "^": {"2": {"docs": {}, "df": 0, "\\": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "q": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}, "docs": {}, "df": 0}}}}, "}": {"docs": {}, "df": 0, "|": {"docs": {}, "df": 0, "v": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}, "\\": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}}, "p": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 2}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.4142135623730951}}, "df": 4, "y": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 6.928203230275509}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input": {"tf": 2}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.misc.print_config": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 7}}}}}}, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 2.8284271247461903}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.misc.print_config": {"tf": 1}}, "df": 4}}}}, "p": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}}}}, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 5, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1.7320508075688772}}, "df": 3}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.correlators.Corr.T_symmetry": {"tf": 1}}, "df": 1}}, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "^": {"2": {"docs": {"pyerrors.correlators.Corr.second_deriv": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}}}}}, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 2.6457513110645907}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 8, "s": {"docs": {"pyerrors": {"tf": 3}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.roll": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1.4142135623730951}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2.23606797749979}, "pyerrors.fits.total_least_squares": {"tf": 2.23606797749979}, "pyerrors.fits.fit_lin": {"tf": 1.4142135623730951}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.misc.load_object": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 101}}}}}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}}, "df": 2}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input": {"tf": 1}}, "df": 1}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}}}}}, "s": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 4}, "s": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}, "t": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "h": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_ADerrors": {"tf": 2}, "pyerrors.input.bdio.write_ADerrors": {"tf": 2}, "pyerrors.input.bdio.read_mesons": {"tf": 2}, "pyerrors.input.bdio.read_dSdm": {"tf": 2}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.misc.dump_object": {"tf": 1.4142135623730951}, "pyerrors.misc.load_object": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.dump": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}}, "df": 27}}, "d": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.__init__": {"tf": 1.7320508075688772}}, "df": 2}}}}}, "i": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}}, "df": 1}}}, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}}, "df": 4}}}}}, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 4.123105625617661}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 3}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1.4142135623730951}}, "df": 1}}}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}}, "df": 2}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}}, "df": 4}}}}}}}, "v": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 11, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}}, "df": 2}, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}}}, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 4}}}}}}, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 2}}}}, "j": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}}, "df": 1}}, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 3}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}}}, "g": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1, "s": {"docs": {"pyerrors.input": {"tf": 1}}, "df": 1}}}}}, "d": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}}, "df": 1}}}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 3.4641016151377544}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.misc.print_config": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}}, "df": 8, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 2}}, "s": {"docs": {"pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 1}}}, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.obs.correlate": {"tf": 1}}, "df": 2}}}}, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.7320508075688772}}, "df": 1}}}}, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 2}}}}}, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}}}}}}}}}, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 2}}}}}, "f": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "i": {"docs": {}, "df": 0, "x": {"docs": {"pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 2}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}}, "df": 10}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}}}}, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.set_prange": {"tf": 1}}, "df": 3}}}, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 4}}}}}}}}, "u": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "s": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}, "r": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.misc.pseudo_Obs": {"tf": 1}}, "df": 1}}}}}}}, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 15, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.plottable": {"tf": 1}}, "df": 1}}}}, "s": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 2}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}}}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "u": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.show": {"tf": 1.7320508075688772}}, "df": 4}}}}, "q": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}}, "df": 3}}}}}}, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}}, "df": 2}}}}}}}}}, "t": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}, "h": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 2.23606797749979}}, "df": 1, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "i": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "p": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "^": {"0": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "n": {"docs": {}, "df": 0, "v": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "c": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.item": {"tf": 1}}, "df": 1}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.item": {"tf": 1.4142135623730951}}, "df": 1}}, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1.4142135623730951}, "pyerrors.misc.load_object": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 4}}}}, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 1}}}}}}}, "e": {"docs": {"pyerrors": {"tf": 5.477225575051661}, "pyerrors.correlators.Corr": {"tf": 1}}, "df": 2, "r": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}}, "df": 6, "f": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 2, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 7}}, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}}, "df": 4}}}}}, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 3, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.roll": {"tf": 1}}, "df": 3}}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}}}, "m": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 4}}}}}}}}, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 2}}}}, "n": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.linalg.pinv": {"tf": 1}}, "df": 2}}}}, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1.7320508075688772}}, "df": 1}}}}}, "o": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 5, "s": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 5}}}}, "s": {"docs": {"pyerrors.covobs.Covobs.__init__": {"tf": 1}}, "df": 1, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "f": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "x": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}}, "df": 5}}}}, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 5}, "y": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 3}}}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}}, "df": 5}}, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 3}}}}}}, "w": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_pobs": {"tf": 1}}, "df": 3}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.linalg.pinv": {"tf": 1}}, "df": 2}}}}}}}, "s": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 2}}, "df": 1}}}}}}}}}}}, "b": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.input.misc.read_pbp": {"tf": 1}}, "df": 1}}}, "a": {"docs": {"pyerrors": {"tf": 8.426149773176359}, "pyerrors.correlators.Corr": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 2}, "pyerrors.fits.least_squares": {"tf": 4.69041575982343}, "pyerrors.fits.total_least_squares": {"tf": 3.3166247903554}, "pyerrors.fits.fit_lin": {"tf": 1.7320508075688772}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_mesons": {"tf": 2}, "pyerrors.input.bdio.read_dSdm": {"tf": 2}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_pobs": {"tf": 2.23606797749979}, "pyerrors.input.dobs.read_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.import_dobs_string": {"tf": 2}, "pyerrors.input.dobs.read_dobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.create_dobs_string": {"tf": 2}, "pyerrors.input.dobs.write_dobs": {"tf": 2.449489742783178}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 3}, "pyerrors.input.json.create_json_string": {"tf": 1.7320508075688772}, "pyerrors.input.json.dump_to_json": {"tf": 2.23606797749979}, "pyerrors.input.json.import_json_string": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.dump_dict_to_json": {"tf": 2.23606797749979}, "pyerrors.input.json.load_json_dict": {"tf": 1.7320508075688772}, "pyerrors.input.misc.fit_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 2.6457513110645907}, "pyerrors.input.pandas.dump_df": {"tf": 2.23606797749979}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.7320508075688772}, "pyerrors.input.utils.sort_names": {"tf": 1.4142135623730951}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}, "pyerrors.linalg.pinv": {"tf": 1}, "pyerrors.linalg.svd": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1.4142135623730951}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1.4142135623730951}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs": {"tf": 2.449489742783178}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.dump": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 2}, "pyerrors.obs.CObs": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1.4142135623730951}, "pyerrors.obs.reweight": {"tf": 1.4142135623730951}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 2.23606797749979}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 76, "n": {"docs": {"pyerrors": {"tf": 5.0990195135927845}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2.449489742783178}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.error_band": {"tf": 1.4142135623730951}, "pyerrors.input": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 3.605551275463989}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 2}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.7320508075688772}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 33, "d": {"docs": {"pyerrors": {"tf": 7.211102550927978}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.__init__": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 3}, "pyerrors.fits.total_least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.input": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 2}, "pyerrors.input.openQCD.extract_t0": {"tf": 2}, "pyerrors.input.openQCD.extract_w0": {"tf": 2}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1.4142135623730951}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.misc.print_config": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1.4142135623730951}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.7320508075688772}, "pyerrors.obs.import_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.import_bootstrap": {"tf": 1.7320508075688772}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 67}, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 2.8284271247461903}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}}, "df": 11}}}, "z": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}}}}, "y": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 3}, "t": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 3}}, "o": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.4142135623730951}}, "df": 4}}}}, "n": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}}}}, "p": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 2.23606797749979}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1.4142135623730951}}, "df": 5}}, "r": {"docs": {}, "df": 0, "x": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, ":": {"1": {"0": {"0": {"9": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "2": {"0": {"5": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "8": {"0": {"9": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "2": {"0": {"0": {"4": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}}}}}}}, "e": {"docs": {"pyerrors": {"tf": 5.5677643628300215}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1.4142135623730951}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 59}, "g": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}}, "df": 3, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}}, "df": 2}}}}}}}, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 4.47213595499958}, "pyerrors.correlators.Corr.__init__": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.__init__": {"tf": 1.4142135623730951}, "pyerrors.fits.error_band": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 6.082762530298219}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1.7320508075688772}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1.4142135623730951}, "pyerrors.obs.cov_Obs": {"tf": 1.4142135623730951}}, "df": 15, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}}, "df": 9}}, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "i": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.symmetric": {"tf": 1}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 4}}}}, "c": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 1}}}}}, "b": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 4}}}}}}}}, "u": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 5, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors": {"tf": 3.1622776601683795}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1.4142135623730951}}, "df": 7, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 9}}}}}}}}}, "g": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1.7320508075688772}, "pyerrors.roots.find_root": {"tf": 1.4142135623730951}}, "df": 5}}}}, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 2.8284271247461903}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gm": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 2.23606797749979}}, "df": 7, "s": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}}}}}}}}, "x": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}}, "df": 7}}}}}}}}, "s": {"docs": {"pyerrors": {"tf": 6.164414002968976}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 2.23606797749979}, "pyerrors.fits.total_least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.fit_lin": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.import_bootstrap": {"tf": 1.4142135623730951}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 22, "s": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.extract_w0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 11}, "s": {"docs": {"pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}}, "df": 4}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 4}}}}}, "o": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}}, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 3}}}}}}}, "l": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "p": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors": {"tf": 2.23606797749979}}, "df": 1, "^": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}}}, "l": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1.7320508075688772}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1.7320508075688772}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 35, "o": {"docs": {}, "df": 0, "w": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 3}}, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}}, "s": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 7}}, "g": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 3, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}}}}}}, "w": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 9, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}}}}}}}, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 1}}}}}}, "p": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 3}, "d": {"docs": {"pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 2}}}, "y": {"docs": {"pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}}, "df": 6}}, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 4}}}}}}, "x": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 2}}}}}}}}}}, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.7320508075688772}}, "df": 2}}}}, "i": {"docs": {"pyerrors": {"tf": 2.23606797749979}}, "df": 1}}, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}}, "df": 4}}}}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}}, "df": 5, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "g": {"docs": {"pyerrors.correlators.Corr.plateau": {"tf": 1}}, "df": 1}, "o": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}, "f": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}}, "df": 5}}}, "f": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.__init__": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 20, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 2}}}}}, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.set_prange": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 3}}}}}}}}, "o": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.obs.Obs.is_zero": {"tf": 1}}, "df": 1}}}, "m": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "c": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "r": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 12}}}}}}, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4}}, "i": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}}, "df": 1}}}}}}}, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}}}, "h": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 6}}}}}}, "t": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}}}, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}}, "df": 2}}}}}, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}, "o": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 2}}, "u": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.misc.print_config": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}}, "df": 4}}}}, "i": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "d": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "d": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}}, "df": 1, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 7}}}}}}, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}}, "df": 3}}}}}}}, "x": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}}, "df": 2}}, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.misc.errorbar": {"tf": 1.7320508075688772}}, "df": 1}}}, "[": {"0": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.7320508075688772}}, "df": 1}, "1": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}, "2": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "g": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}, "pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 2}}}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}, "^": {"2": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "\\": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}, "/": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "^": {"2": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}}}}}, "a": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}, "f": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.second_deriv": {"tf": 3.7416573867739413}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 3, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 7.0710678118654755}, "pyerrors.correlators.Corr": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 2}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 3.605551275463989}, "pyerrors.fits.total_least_squares": {"tf": 2}, "pyerrors.fits.fit_lin": {"tf": 1.4142135623730951}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.fits.error_band": {"tf": 1.7320508075688772}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2.6457513110645907}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.misc.read_pbp": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 2}, "pyerrors.input.openQCD.extract_t0": {"tf": 2.6457513110645907}, "pyerrors.input.openQCD.extract_w0": {"tf": 2.6457513110645907}, "pyerrors.input.openQCD.read_qtop": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2.6457513110645907}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 3}, "pyerrors.input.utils.sort_names": {"tf": 1.4142135623730951}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1.7320508075688772}, "pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1.7320508075688772}, "pyerrors.misc.gen_correlated_data": {"tf": 2}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 3.872983346207417}, "pyerrors.obs.Obs.gamma_method": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gm": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.dump": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1.4142135623730951}, "pyerrors.obs.CObs": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1.7320508075688772}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 2.6457513110645907}, "pyerrors.obs.cov_Obs": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 69, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 2}}}}, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}, "m": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 7, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 3.605551275463989}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1.7320508075688772}}, "df": 16, "s": {"docs": {"pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 2}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}}, "df": 2}}}}}}, "w": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.deriv": {"tf": 1}}, "df": 1}}}}}, "u": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 3}}, "r": {"docs": {}, "df": 0, "q": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 1, "f": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 1}}}}}}}}}}}}}}}}}}}}}, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "w": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 3}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 5}}}}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 3, "s": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}}}}}, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors": {"tf": 4.58257569495584}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}, "pyerrors.input.json.import_json_string": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 2}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.misc.load_object": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1.7320508075688772}}, "df": 38}, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}}, "df": 1}}}, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "w": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "c": {"docs": {"pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 1, "{": {"1": {"docs": {}, "df": 0, "}": {"docs": {}, "df": 0, "{": {"2": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "}": {"docs": {}, "df": 0, "+": {"docs": {}, "df": 0, "\\": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "}": {"docs": {}, "df": 0, "\\": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}}}, "docs": {}, "df": 0, "f": {"docs": {"pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 1}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 1}}, "s": {"docs": {"pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 1}}}}}}}, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 3.7416573867739413}, "pyerrors.correlators.Corr.fit": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 2.6457513110645907}, "pyerrors.fits.Fit_result": {"tf": 2}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 3.7416573867739413}, "pyerrors.fits.total_least_squares": {"tf": 2}, "pyerrors.fits.fit_lin": {"tf": 2}, "pyerrors.fits.qqplot": {"tf": 1.4142135623730951}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 2}, "pyerrors.input.misc.fit_t0": {"tf": 2.6457513110645907}, "pyerrors.input.openQCD.extract_t0": {"tf": 2}, "pyerrors.input.openQCD.extract_w0": {"tf": 2}}, "df": 18, "s": {"docs": {"pyerrors": {"tf": 3.872983346207417}, "pyerrors.correlators.Corr.fit": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 7}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 6}}}, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1.4142135623730951}}, "df": 2}}}}}}, "r": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.__init__": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.extract_w0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}}, "df": 21}}}, "x": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 8}}}, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 1, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 1}}, "s": {"docs": {"pyerrors.roots.find_root": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 2}}}}, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 4.358898943540674}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 2}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_mesons": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 2}, "pyerrors.input.dobs.read_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_dobs": {"tf": 2}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 2}, "pyerrors.input.json.dump_to_json": {"tf": 2.23606797749979}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 2}, "pyerrors.input.json.dump_dict_to_json": {"tf": 2.23606797749979}, "pyerrors.input.json.load_json_dict": {"tf": 2}, "pyerrors.input.openQCD.read_rwms": {"tf": 2}, "pyerrors.input.openQCD.extract_t0": {"tf": 2}, "pyerrors.input.openQCD.extract_w0": {"tf": 2}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 2.23606797749979}, "pyerrors.input.pandas.load_df": {"tf": 1.7320508075688772}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.misc.dump_object": {"tf": 2}, "pyerrors.misc.load_object": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 2.23606797749979}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}}, "df": 41, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 2.6457513110645907}, "pyerrors.input.openQCD.extract_t0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.extract_w0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 2}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 3}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2.23606797749979}}, "df": 15, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 5}}}}, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 12, "s": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 3}}}}, "o": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}}}}}}}}}}}}}}, "l": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}}}, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}}}, "g": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 4}}}}}, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "t": {"docs": {"pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}}, "df": 1, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.correlators.Corr.reweight": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.reweight": {"tf": 1.4142135623730951}, "pyerrors.obs.reweight": {"tf": 1.4142135623730951}}, "df": 3, "s": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}}, "df": 3}}}}}, "l": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 2.23606797749979}, "pyerrors.fits.total_least_squares": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 25}}, "l": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 1}}}}}}, "i": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 1}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}}, "df": 2}}}}}, "^": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "u": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.fits.least_squares": {"tf": 3}, "pyerrors.fits.total_least_squares": {"tf": 2.449489742783178}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 2}, "pyerrors.roots.find_root": {"tf": 1.7320508075688772}}, "df": 6, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.fit": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2}, "pyerrors.obs.Obs.gamma_method": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gm": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 2}}, "df": 16, "s": {"docs": {"pyerrors": {"tf": 3.605551275463989}, "pyerrors.fits.least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1.4142135623730951}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 7}, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.pandas.dump_df": {"tf": 1}}, "df": 1}}}}}}}}}, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}}, "df": 1}}}, "l": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 12, "y": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}, "r": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1.4142135623730951}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gamma_method": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gm": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 17, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 5}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 2}}}}}, "w": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 2}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.7320508075688772}}, "df": 6, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}}, "df": 1}}}}}}}}}}}}}, "u": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}}, "a": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}, "t": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}}, "df": 1, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}}}, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "p": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4}}}, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 10}}}}, "f": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.obs.Obs.gamma_method": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gm": {"tf": 1.4142135623730951}}, "df": 2}}}, "e": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1.4142135623730951}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 14, "r": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.fits.error_band": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}}, "df": 2, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 6.164414002968976}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.covobs.Covobs.errsq": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.error_band": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gamma_method": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gm": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 2.23606797749979}}, "df": 15, "s": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 4}, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}, "b": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.misc.errorbar": {"tf": 1}}, "df": 1}}}}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "x": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 6, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}}, "df": 3}}}}, "m": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 3, "s": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}}, "df": 2, "/": {"docs": {}, "df": 0, "j": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1.7320508075688772}}, "df": 2}}}}, "p": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 2}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 6, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "d": {"docs": {"pyerrors.obs.Obs.plot_history": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.obs.Obs.plot_history": {"tf": 1}}, "df": 1}}}}}, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}}, "df": 11, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 10}}, "s": {"docs": {"pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}}, "df": 3}}}, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 1}}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}}, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 4}}}}, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}}}}, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 1, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 2}}}}}}}}, "h": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 2}}}}}}, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.plateau": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.4142135623730951}}, "df": 12, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 10}}, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 4}}}}}}, "e": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 2.8284271247461903}}, "df": 1, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}}, "df": 1}}}}}}}, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.Hankel": {"tf": 1}}, "df": 1}}}, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.7320508075688772}}, "df": 2}}}}, "t": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 5}}}}, "e": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "c": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.pandas.read_sql": {"tf": 1}}, "df": 1, "d": {"docs": {"pyerrors.input.pandas.read_sql": {"tf": 1}}, "df": 1}, "s": {"docs": {"pyerrors.obs.CObs.gamma_method": {"tf": 1}}, "df": 1}}}}}}, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}}, "df": 1, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}}, "df": 1}}}}}}}}, "h": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}}, "df": 1}}}}}}, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1}}}}}}}, "i": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}}, "df": 2, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 4, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 5}}}}}, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 2.23606797749979}}, "df": 1, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1.7320508075688772}, "pyerrors.linalg.eigh": {"tf": 1}}, "df": 4}}}}}}}}}, "h": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.linalg.eigh": {"tf": 1}}, "df": 2}}, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 9}}}}, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.linalg.einsum": {"tf": 1}}, "df": 1}}}}}, "n": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}}, "df": 8, "e": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"1": {"docs": {"pyerrors": {"tf": 3.4641016151377544}}, "df": 1, "|": {"docs": {}, "df": 0, "r": {"0": {"1": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "2": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}}}, "2": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}, "3": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {"pyerrors": {"tf": 5.5677643628300215}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 2.449489742783178}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}}, "df": 32, "s": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 6, "/": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1.4142135623730951}}, "df": 2}}}}}}}}}}}}}}, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_dobs": {"tf": 1.4142135623730951}}, "df": 4, "s": {"docs": {"pyerrors.input.dobs.create_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_dobs": {"tf": 1.4142135623730951}}, "df": 2}}}}, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}}, "df": 2}}}}, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 10}}}, "y": {"docs": {"pyerrors": {"tf": 3.3166247903554}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.import_bootstrap": {"tf": 1.4142135623730951}}, "df": 11}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}, "i": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1.4142135623730951}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.7320508075688772}}, "df": 4}}}, "s": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.Obs.export_bootstrap": {"tf": 1}}, "df": 1}}}}}}, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 2}}}, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}}, "df": 1}}}}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 6, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 3}, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 3}}, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 2.23606797749979}}, "df": 1}}}}}}}, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}, "p": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 2}}}}}}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}}}}, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 2}}}}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr": {"tf": 2}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 10, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}}, "df": 1}}}}}}}, "n": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}, "f": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors": {"tf": 2}}, "df": 1, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1.7320508075688772}}, "df": 2}}}, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}}, "df": 4}}}}}}, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors": {"tf": 3.605551275463989}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}, "pyerrors.input.misc.read_pbp": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.misc.gen_correlated_data": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 16}}, "r": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 2}}}}}, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}}, "df": 2, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2.6457513110645907}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 8, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 2}}}}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}, "q": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1, "u": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "v": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}}, "df": 1, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 2, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 2}}}}, "g": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 2}, "p": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.dirac.epsilon_tensor": {"tf": 1.4142135623730951}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1.4142135623730951}}, "df": 2}}}}}}, "d": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.fits.least_squares": {"tf": 2}}, "df": 1}}, "m": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 5}}}}, "t": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 1}}}, "c": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.Hankel": {"tf": 3}, "pyerrors.correlators.Corr.m_eff": {"tf": 2.8284271247461903}, "pyerrors.correlators.Corr.prune": {"tf": 2}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}}, "df": 12, "o": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1, "u": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 2}}, "df": 1, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 4, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "e": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1, "d": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 2}, "s": {"docs": {"pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}, "pyerrors.linalg.pinv": {"tf": 1}, "pyerrors.linalg.svd": {"tf": 1}}, "df": 4}}}}, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "x": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.obs.CObs": {"tf": 1}}, "df": 6}, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "d": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}}, "df": 4}, "s": {"docs": {"pyerrors.fits.residual_plot": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input": {"tf": 1}}, "df": 2}}}}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}}, "df": 1}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4}}}}}, "m": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}}, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}, "/": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "j": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "w": {"docs": {}, "df": 0, "/": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}}}}}}, "a": {"docs": {}, "df": 0, "/": {"1": {"6": {"0": {"3": {"7": {"5": {"docs": {"pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}}, "df": 2}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}}}, "b": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.obs.merge_obs": {"tf": 1.4142135623730951}}, "df": 1, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 4}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.7320508075688772}}, "df": 2}}}}}}}}, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}}, "df": 2, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}}, "df": 1}}, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.linalg.jack_matmul": {"tf": 1}}, "df": 1}}}}}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 2}}}}}}, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 3, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 3}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 4}}, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": null}}, "df": 1}}}}}}, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 3, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 2}}}}, "d": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors": {"tf": 2.449489742783178}}, "df": 1}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors": {"tf": 2.23606797749979}}, "df": 1}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}}}}}, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}}, "df": 2, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1.4142135623730951}}, "df": 28}}}, "s": {"docs": {"pyerrors": {"tf": 5}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 2}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 15}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 3}}}}}, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 2}}}, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "o": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}}, "df": 8, "s": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 7}}}}, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}, "f": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.misc.read_pbp": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.extract_w0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}}, "df": 6, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 4, "s": {"docs": {"pyerrors": {"tf": 3.605551275463989}, "pyerrors.correlators.Corr.reweight": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.7320508075688772}, "pyerrors.input.utils.check_idl": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.reweight": {"tf": 1.7320508075688772}, "pyerrors.obs.reweight": {"tf": 1.7320508075688772}}, "df": 20}}}}}}}}, "s": {"docs": {"pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}}, "df": 10}}}}, "j": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}}, "df": 2}}}}}}, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "r": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}}, "df": 1}}}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.bdio.write_ADerrors": {"tf": 1}}, "df": 1}}}}}}, "r": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}}, "df": 1}}}}}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.json.dump_to_json": {"tf": 1}}, "df": 1}}}}}}}, "r": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 6.6332495807108}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.reverse": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.plateau": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.set_prange": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 2}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 25, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1.4142135623730951}, "pyerrors.obs.correlate": {"tf": 1.4142135623730951}}, "df": 3, "d": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 5}}, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 3.3166247903554}, "pyerrors.correlators.Corr": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.__init__": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.correlators.Corr.symmetric": {"tf": 1}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.roll": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.correlate": {"tf": 2}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.show": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 25, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.show": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 6}}}, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.prune": {"tf": 2.23606797749979}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2}, "pyerrors.obs.covariance": {"tf": 2.449489742783178}}, "df": 6, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.thin": {"tf": 1}}, "df": 2}}}, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.obs.correlate": {"tf": 1}}, "df": 1}}}}}}, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 4, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 2}}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 2}}}}, "s": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 1, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 6}}}, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 6}}}}}}}, "s": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}}, "df": 1}}}, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 3.605551275463989}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}, "pyerrors.linalg.inv": {"tf": 1}}, "df": 6}}, "v": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.covobs.Covobs.__init__": {"tf": 2}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1.4142135623730951}}, "df": 4, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 3.1622776601683795}, "pyerrors.covobs.Covobs.__init__": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.errsq": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 4}}}, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 4}, "pyerrors.covobs.Covobs.__init__": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.misc.gen_correlated_data": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 2.449489742783178}, "pyerrors.obs.cov_Obs": {"tf": 2}}, "df": 6}}}}}}}}, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}}, "u": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}}, "df": 3}}}}}, "s": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1.7320508075688772}}, "df": 1}, "t": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}}, "df": 1}}, "u": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.7320508075688772}}, "df": 1}}}}}, "l": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 2}}}}}}}}, "h": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.obs.import_bootstrap": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.plot_history": {"tf": 1}}, "df": 2}, "/": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}}}, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4}}}}}, "r": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 2}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 4}}}}, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.linalg.cholesky": {"tf": 1}}, "df": 3}}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 4}}}, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 2}}}, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 3}}}}, "e": {"docs": {"pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 1, "c": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}}, "df": 4, "s": {"docs": {"pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 5}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 1}}}}}, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "q": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}}, "df": 4}}}}}}}}, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors": {"tf": 3.7416573867739413}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1.4142135623730951}, "pyerrors.obs.import_bootstrap": {"tf": 1}}, "df": 4}}, "e": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 2, "f": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 3}}}}}, "l": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 2}}, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}, "e": {"docs": {"pyerrors.fits.error_band": {"tf": 1}}, "df": 1, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 2}, "s": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}}, "n": {"docs": {"pyerrors": {"tf": 5.744562646538029}, "pyerrors.correlators.Corr": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2.6457513110645907}, "pyerrors.fits.total_least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1.4142135623730951}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1.7320508075688772}}, "df": 31, "n": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 12}}, "u": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}}, "df": 1}, "u": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 2}}}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 3}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}}, "df": 2, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}}, "df": 4}}}}}}, "u": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 3, "d": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}}, "df": 5, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}}, "df": 2}}}}}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 5}}}}}, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.CObs": {"tf": 1}}, "df": 5}}}, "o": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}}, "df": 1}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}}}, "d": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}, "c": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4}, "p": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4}, "t": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}}, "df": 1}}}}}, "n": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}}, "df": 5}}}, "y": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 3}}}}}, "s": {"docs": {}, "df": 0, "v": {"docs": {"pyerrors.input.pandas.dump_df": {"tf": 2}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 2}}, "f": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.misc.pseudo_Obs": {"tf": 1}}, "df": 1}}}}}}}, "o": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_mesons": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}}, "df": 9, "f": {"docs": {"pyerrors": {"tf": 10.44030650891055}, "pyerrors.correlators.Corr": {"tf": 2}, "pyerrors.correlators.Corr.__init__": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.roll": {"tf": 1}, "pyerrors.correlators.Corr.reverse": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.reweight": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.deriv": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.m_eff": {"tf": 2}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.set_prange": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 2.8284271247461903}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.dump": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.prune": {"tf": 2.6457513110645907}, "pyerrors.covobs.Covobs.__init__": {"tf": 1.7320508075688772}, "pyerrors.covobs.Covobs.errsq": {"tf": 1.4142135623730951}, "pyerrors.dirac.epsilon_tensor": {"tf": 1.4142135623730951}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1.7320508075688772}, "pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 4.123105625617661}, "pyerrors.fits.total_least_squares": {"tf": 3.1622776601683795}, "pyerrors.fits.fit_lin": {"tf": 2.449489742783178}, "pyerrors.fits.qqplot": {"tf": 1.4142135623730951}, "pyerrors.fits.error_band": {"tf": 1.4142135623730951}, "pyerrors.fits.ks_test": {"tf": 1.4142135623730951}, "pyerrors.input": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 2.449489742783178}, "pyerrors.input.dobs.write_pobs": {"tf": 2.449489742783178}, "pyerrors.input.dobs.read_pobs": {"tf": 2}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.read_dobs": {"tf": 2}, "pyerrors.input.dobs.create_dobs_string": {"tf": 2.8284271247461903}, "pyerrors.input.dobs.write_dobs": {"tf": 2.8284271247461903}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 2.6457513110645907}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 2.449489742783178}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 5.0990195135927845}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.json.create_json_string": {"tf": 2.6457513110645907}, "pyerrors.input.json.dump_to_json": {"tf": 2.6457513110645907}, "pyerrors.input.json.import_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.dump_dict_to_json": {"tf": 2.8284271247461903}, "pyerrors.input.json.load_json_dict": {"tf": 1.7320508075688772}, "pyerrors.input.misc.fit_t0": {"tf": 3}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.extract_t0": {"tf": 3.3166247903554}, "pyerrors.input.openQCD.extract_w0": {"tf": 3.3166247903554}, "pyerrors.input.openQCD.read_qtop": {"tf": 2.6457513110645907}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2.6457513110645907}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 2.8284271247461903}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 2.23606797749979}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 4}, "pyerrors.input.utils.sort_names": {"tf": 1.4142135623730951}, "pyerrors.input.utils.check_idl": {"tf": 2}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1.4142135623730951}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.linalg.inv": {"tf": 1}, "pyerrors.linalg.cholesky": {"tf": 1}, "pyerrors.linalg.det": {"tf": 1}, "pyerrors.linalg.eigh": {"tf": 1.4142135623730951}, "pyerrors.linalg.eig": {"tf": 1.4142135623730951}, "pyerrors.linalg.pinv": {"tf": 1.4142135623730951}, "pyerrors.linalg.svd": {"tf": 1.4142135623730951}, "pyerrors.misc.print_config": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1.7320508075688772}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 2}, "pyerrors.misc.gen_correlated_data": {"tf": 1.4142135623730951}, "pyerrors.mpm.matrix_pencil_method": {"tf": 2.6457513110645907}, "pyerrors.obs.Obs": {"tf": 2.8284271247461903}, "pyerrors.obs.Obs.__init__": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gamma_method": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gm": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.export_jackknife": {"tf": 2}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 2.6457513110645907}, "pyerrors.obs.derived_observable": {"tf": 2.449489742783178}, "pyerrors.obs.reweight": {"tf": 2}, "pyerrors.obs.covariance": {"tf": 3.3166247903554}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 2}, "pyerrors.obs.merge_obs": {"tf": 1.7320508075688772}, "pyerrors.obs.cov_Obs": {"tf": 1.7320508075688772}, "pyerrors.roots.find_root": {"tf": 1.4142135623730951}}, "df": 109, "f": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.thin": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 4}}}}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}}, "df": 1}}}}, "n": {"docs": {"pyerrors": {"tf": 5.291502622129181}, "pyerrors.correlators.Corr.plottable": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.reweight": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1.4142135623730951}, "pyerrors.obs.reweight": {"tf": 1.7320508075688772}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 37, "e": {"docs": {"pyerrors": {"tf": 3}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.json.import_json_string": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 22, "s": {"docs": {"pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 3}}, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 2}, "pyerrors.input.json.load_json": {"tf": 2}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1.4142135623730951}}, "df": 28}}, "t": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}, "p": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 3}}}}, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.linalg.matmul": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "{": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "v": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}}}}}, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.linalg.matmul": {"tf": 1.4142135623730951}, "pyerrors.linalg.jack_matmul": {"tf": 1.4142135623730951}, "pyerrors.linalg.einsum": {"tf": 1}}, "df": 3}}}}}, "n": {"docs": {}, "df": 0, "q": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 2}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}}, "df": 6}}}}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}}, "df": 1, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 3}, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.__init__": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2.23606797749979}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}}, "df": 20}}}}, "m": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1, "d": {"docs": {"pyerrors.fits.error_band": {"tf": 1}}, "df": 1}}}}, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}}}, "p": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}}, "df": 1}}}}}}, "b": {"docs": {}, "df": 0, "s": {"1": {"docs": {"pyerrors": {"tf": 3}, "pyerrors.obs.derived_observable": {"tf": 1.4142135623730951}, "pyerrors.obs.reweight": {"tf": 1}}, "df": 3}, "2": {"docs": {"pyerrors": {"tf": 3}, "pyerrors.obs.derived_observable": {"tf": 1.4142135623730951}, "pyerrors.obs.reweight": {"tf": 1}}, "df": 3}, "3": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}}, "df": 3}, "docs": {"pyerrors": {"tf": 9.591663046625438}, "pyerrors.correlators.Corr": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.__init__": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.fits.total_least_squares": {"tf": 2.23606797749979}, "pyerrors.fits.fit_lin": {"tf": 2.23606797749979}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.input": {"tf": 2.23606797749979}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 2}, "pyerrors.input.dobs.write_pobs": {"tf": 2}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 2}, "pyerrors.input.dobs.write_dobs": {"tf": 2}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 2}, "pyerrors.input.json.dump_to_json": {"tf": 2}, "pyerrors.input.json.import_json_string": {"tf": 2}, "pyerrors.input.json.load_json": {"tf": 2}, "pyerrors.input.json.dump_dict_to_json": {"tf": 2}, "pyerrors.input.json.load_json_dict": {"tf": 2}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.dump_df": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.linalg.inv": {"tf": 1}, "pyerrors.linalg.cholesky": {"tf": 1}, "pyerrors.linalg.det": {"tf": 1}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}, "pyerrors.linalg.pinv": {"tf": 1}, "pyerrors.linalg.svd": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1.4142135623730951}, "pyerrors.misc.load_object": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 2.449489742783178}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.dump": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1.7320508075688772}, "pyerrors.obs.derived_observable": {"tf": 1.7320508075688772}, "pyerrors.obs.reweight": {"tf": 1.7320508075688772}, "pyerrors.obs.correlate": {"tf": 2}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 2}, "pyerrors.obs.cov_Obs": {"tf": 1.4142135623730951}, "pyerrors.roots.find_root": {"tf": 2.23606797749979}}, "df": 72, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 3.4641016151377544}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 2}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.CObs": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1.4142135623730951}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 24, "s": {"docs": {"pyerrors": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1.4142135623730951}, "pyerrors.obs.correlate": {"tf": 1.7320508075688772}, "pyerrors.obs.covariance": {"tf": 2.449489742783178}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 21}}}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}, "d": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}}, "[": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors.correlators.Corr.reweight": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.reweight": {"tf": 1.4142135623730951}, "pyerrors.obs.reweight": {"tf": 1.4142135623730951}}, "df": 3}}, "l": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 4}}, "j": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}}, "df": 2, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.set_prange": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2.8284271247461903}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.7320508075688772}, "pyerrors.misc.dump_object": {"tf": 1.7320508075688772}, "pyerrors.misc.load_object": {"tf": 1.7320508075688772}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 21, "s": {"docs": {"pyerrors": {"tf": 3.605551275463989}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 10}}}}}, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.pandas.read_sql": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}, "r": {"docs": {"pyerrors": {"tf": 4.242640687119285}, "pyerrors.correlators.Corr": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.__init__": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 2.449489742783178}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_pobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.import_dobs_string": {"tf": 2}, "pyerrors.input.dobs.read_dobs": {"tf": 2}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}, "pyerrors.input.json.create_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.import_json_string": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.linalg.inv": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1.4142135623730951}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.7320508075688772}, "pyerrors.obs.cov_Obs": {"tf": 2}}, "df": 44, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2.6457513110645907}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 6, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.reverse": {"tf": 1}}, "df": 2}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}}, "df": 1}}}}}, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 2}}}}}}}}, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_dobs": {"tf": 1.4142135623730951}}, "df": 4, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.obs.import_bootstrap": {"tf": 1}}, "df": 1}}}}}}}, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}}, "df": 4, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}}, "w": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors.obs.Obs": {"tf": 1.7320508075688772}}, "df": 1}}}}}}}}}, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 7, "w": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 5}}}}}}}}, "d": {"docs": {"pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 1, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}, "u": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}}, "df": 3, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "p": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.fit": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.input": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.import_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1.7320508075688772}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 23, "s": {"docs": {"pyerrors.correlators.Corr.plottable": {"tf": 1.4142135623730951}}, "df": 1}}}}}, "r": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}, "m": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 2}}}}}}, "c": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}}, "df": 3}}}}}}, "p": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}}, "w": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}, "l": {"docs": {"pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}}, "df": 2}}, "m": {"docs": {"pyerrors": {"tf": 4.242640687119285}, "pyerrors.correlators.Corr.m_eff": {"tf": 2.449489742783178}, "pyerrors.fits.fit_lin": {"tf": 1.4142135623730951}}, "df": 3, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "v": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}}}, "q": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}}, "df": 1, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "x": {"docs": {"pyerrors": {"tf": 4.795831523312719}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.prune": {"tf": 2.8284271247461903}, "pyerrors.covobs.Covobs.__init__": {"tf": 1.4142135623730951}, "pyerrors.dirac.Grid_gamma": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2.23606797749979}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}, "pyerrors.linalg.pinv": {"tf": 1}, "pyerrors.linalg.svd": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.7320508075688772}, "pyerrors.obs.covariance": {"tf": 3}, "pyerrors.obs.cov_Obs": {"tf": 1.7320508075688772}}, "df": 22}, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.inv": {"tf": 1}, "pyerrors.linalg.cholesky": {"tf": 1}, "pyerrors.linalg.det": {"tf": 1}}, "df": 9}}}, "z": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 1}}}}}, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 3}}}}}}}}, "r": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "{": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1.7320508075688772}}, "df": 2}}}, "e": {"docs": {}, "df": 0, "x": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "p": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "j": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}}, "df": 1}}}}, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}}}}}}, "c": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}}, "df": 2, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}}}}}, "m": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.linalg.jack_matmul": {"tf": 1}}, "df": 1}}}, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "b": {"docs": {"pyerrors.misc.errorbar": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}}, "c": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 2}}}}}, "d": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "k": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}}, "df": 3}}}}, "n": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1, "i": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}, "y": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 6}, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 3}}}}}}, "s": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.m_eff": {"tf": 1.7320508075688772}}, "df": 2, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}, "y": {"docs": {"pyerrors": {"tf": 2.6457513110645907}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 8}, "j": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 2}}}, "x": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}}, "df": 1, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}}}}}}}}}}}}, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 3.7416573867739413}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1.4142135623730951}, "pyerrors.obs.import_bootstrap": {"tf": 1}}, "df": 4}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "o": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1}}}}}}}}}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 1, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 2}}, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 2}}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}}, "df": 6}}, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.linalg.pinv": {"tf": 1}}, "df": 2}}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}}}}, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}}, "df": 1}}, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 3, "a": {"docs": {"pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}}, "df": 1}}}}}}, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 4.795831523312719}, "pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.plateau": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 21, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 4}}}}, "a": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}}, "df": 4}}, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 2.8284271247461903}}, "df": 2, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}}, "df": 3}}}}, "a": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1.7320508075688772}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1.7320508075688772}}, "df": 10, "s": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 4}}, "s": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 6, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 2}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}}, "df": 9}}}}}}}}}, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}, "m": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 2.8284271247461903}}, "df": 1}}}}}, "y": {"docs": {"pyerrors": {"tf": 7.681145747868608}}, "df": 1}, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2.6457513110645907}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 4}, "y": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}}, "df": 3}, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.linalg.matmul": {"tf": 1}}, "df": 1}}}}}}}}}, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}}}}}}}}}, "c": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.projected": {"tf": 1}}, "df": 2}}, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.7320508075688772}}, "df": 1}}}}, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 2}}}}}, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1, "d": {"docs": {"pyerrors.roots.find_root": {"tf": 1}}, "df": 1}}}}}}, "d": {"docs": {"pyerrors.obs.correlate": {"tf": 1}}, "df": 1}, "u": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}, "s": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 1, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 1}}}}}}, "\\": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}}}}, "p": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}}, "c": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1, "m": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}}, "df": 1}}}, "t": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "s": {"1": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}}, "df": 1}, "docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.extract_w0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 5}, "d": {"5": {"docs": {"pyerrors.obs.Obs.export_bootstrap": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}}, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 2.23606797749979}}, "df": 3, "a": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 5, "a": {"docs": {"pyerrors": {"tf": 5}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_mesons": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 2.23606797749979}, "pyerrors.input.dobs.import_dobs_string": {"tf": 2.449489742783178}, "pyerrors.input.dobs.read_dobs": {"tf": 2.449489742783178}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 3.1622776601683795}, "pyerrors.input.json.import_json_string": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json_dict": {"tf": 2.8284271247461903}, "pyerrors.input.misc.fit_t0": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 2}, "pyerrors.input.openQCD.extract_w0": {"tf": 2}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 2.8284271247461903}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.misc.gen_correlated_data": {"tf": 1.4142135623730951}, "pyerrors.mpm.matrix_pencil_method": {"tf": 2.449489742783178}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1.7320508075688772}}, "df": 36, "t": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 3}}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "f": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.read_sql": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.dump_df": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.load_df": {"tf": 1.7320508075688772}}, "df": 4}}}}}, "b": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 2}, "pyerrors.input.pandas.read_sql": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 3}}}}}, "e": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}, "i": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 3}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 3, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 2.6457513110645907}, "pyerrors.fits.least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1.7320508075688772}}, "df": 5}}}}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 2}}}}}}, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 3, "s": {"docs": {"pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 2}}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 1}}}}}}, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 2.6457513110645907}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_dict_to_json": {"tf": 2}, "pyerrors.input.json.load_json_dict": {"tf": 2}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 2.449489742783178}}, "df": 19, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}, "y": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 7}}}}}}, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "[": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}}, "df": 1}}}}}}, "s": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}}}}}, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}}}}}, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 2}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 2}}, "df": 7}}}}, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}}, "df": 2}}, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}}, "df": 1}}}}}}}}, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}}, "df": 1}}}}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}}, "df": 1}}}}}}}}}}}}}}}}}}}, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}}, "df": 2}}}}}, "k": {"docs": {"pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 3}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}}, "df": 2, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.7320508075688772}}, "df": 2}, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}}, "df": 1}}}}}}}}}, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 3, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 3}}, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 2}}}}}}}}, "a": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 4}}}}, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1.4142135623730951}}, "df": 1}}}}}}, "v": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2}}}}}}, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.linalg.cholesky": {"tf": 1}, "pyerrors.linalg.svd": {"tf": 1}}, "df": 3}}}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}}}}, "i": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}}, "df": 3}}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}}}, "a": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}}, "df": 2}}}}}, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "a": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.obs.Obs.details": {"tf": 1}}, "df": 2}}, "s": {"docs": {"pyerrors": {"tf": 3.4641016151377544}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 9}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1, "d": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}, "s": {"docs": {"pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 7}}, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.linalg.det": {"tf": 1}}, "df": 2}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}}}}}}, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 3}}}}}}}, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 2}}}}}}}}, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "v": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 2.8284271247461903}, "pyerrors.obs.Obs.plot_history": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1.4142135623730951}}, "df": 5}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.deriv": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 4, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 2}}}}}}}}}, "l": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors": {"tf": 2}}, "df": 1, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}}, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "c": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 4, "d": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 2}, "s": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 8}}}, "p": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1.4142135623730951}}, "df": 5}}, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 1}}}}}}}}, "f": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 4, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 5}}}, "e": {"docs": {"pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 1}}}, "e": {"docs": {"pyerrors": {"tf": 2}}, "df": 1, "d": {"docs": {"pyerrors": {"tf": 3.3166247903554}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}}, "df": 19}, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}}, "df": 4}}}}, "a": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 2.23606797749979}, "pyerrors.fits.total_least_squares": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 2}, "pyerrors.input.bdio.read_dSdm": {"tf": 2}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.extract_w0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.read_sql": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.7320508075688772}, "pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gamma_method": {"tf": 2}, "pyerrors.obs.Obs.gm": {"tf": 2}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 39, "s": {"docs": {"pyerrors.correlators.Corr.plateau": {"tf": 1}}, "df": 1}}}}}}, "p": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr": {"tf": 1}}, "df": 2}, "c": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}}, "df": 1}, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.misc.print_config": {"tf": 1}}, "df": 1}}}}}}, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}}, "df": 1}}}}}}}, "n": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1.4142135623730951}}, "df": 1}}}}}, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}}, "df": 1}}}}}}}, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}}, "df": 1}}}}}, "b": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}}, "df": 1}}}}}}}}, "o": {"docs": {"pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 5, "w": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 3}}, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 7}}, "m": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "d": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}}, "df": 1}}}}}}}}, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 5}}, "t": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}, "f": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}}, "df": 1}, "u": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 3}}}}, "n": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}}, "df": 2}}, "c": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}}, "df": 2}}}}}}}}}}}}, "u": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 3, "s": {"docs": {"pyerrors.correlators.Corr.dump": {"tf": 1}}, "df": 1}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.pandas.dump_df": {"tf": 1}}, "df": 1}}}}, "e": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2}}, "t": {"docs": {"pyerrors.correlators.Corr.roll": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2, "y": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 3.1622776601683795}}, "df": 1}}}, "r": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}}, "df": 5}}, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.misc.pseudo_Obs": {"tf": 1.4142135623730951}}, "df": 1, "s": {"docs": {"pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1.4142135623730951}}, "df": 2}}}}}}, "s": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}}, "df": 1}}}, "f": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}}, "df": 2}, "b": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}}, "df": 2}}, "b": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.7320508075688772}, "pyerrors.obs.correlate": {"tf": 1}}, "df": 3, "a": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}}, "df": 1, "d": {"docs": {"pyerrors": {"tf": 3}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 10}, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "x": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1.7320508075688772}}, "df": 1}}}}}}}, "i": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 2}}, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.obs.Obs.export_bootstrap": {"tf": 1}}, "df": 1}}}}, "r": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "{": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "a": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "c": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}}, "df": 1, "w": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.deriv": {"tf": 1}}, "df": 1}}}}}}, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.error_band": {"tf": 1.4142135623730951}}, "df": 1}}}, "e": {"docs": {"pyerrors": {"tf": 6.244997998398398}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.7320508075688772}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 3}, "pyerrors.fits.total_least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_mesons": {"tf": 2.23606797749979}, "pyerrors.input.bdio.read_dSdm": {"tf": 2.23606797749979}, "pyerrors.input.dobs.create_pobs_string": {"tf": 2}, "pyerrors.input.dobs.write_pobs": {"tf": 2}, "pyerrors.input.dobs.create_dobs_string": {"tf": 2}, "pyerrors.input.dobs.write_dobs": {"tf": 2}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1.7320508075688772}, "pyerrors.input.json.dump_to_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.dump_dict_to_json": {"tf": 2}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.misc.read_pbp": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 2}, "pyerrors.input.openQCD.extract_t0": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.extract_w0": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_qtop": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 2.6457513110645907}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2.23606797749979}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1.4142135623730951}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1.7320508075688772}, "pyerrors.misc.gen_correlated_data": {"tf": 2}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1.7320508075688772}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.7320508075688772}, "pyerrors.obs.import_bootstrap": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 65, "t": {"docs": {}, "df": 0, "w": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 2.23606797749979}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 11}}}}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}, "a": {"docs": {"pyerrors.fits.error_band": {"tf": 1}}, "df": 1}}, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 2}}}}}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "w": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 4}, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 4}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1.4142135623730951}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 3}}}}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.obs.correlate": {"tf": 1}}, "df": 2}}, "f": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}}, "df": 2}}}}, "h": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 1}}}}}, "y": {"docs": {"pyerrors": {"tf": 3}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.roll": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1.7320508075688772}, "pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 25, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 2.23606797749979}}, "df": 1}}}}, "u": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 8}, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "d": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_mesons": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.7320508075688772}}, "df": 4}}}}}}, "f": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 3.1622776601683795}}, "df": 1}}}}, "l": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2.6457513110645907}}, "df": 5}}}}}, "d": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}}, "df": 1}}}}}, "t": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 3}}, "o": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2.449489742783178}, "pyerrors.fits.total_least_squares": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1.7320508075688772}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 40}, "k": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 6}}}}}}}}, "t": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.import_bootstrap": {"tf": 1}}, "df": 1, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.obs.Obs.export_bootstrap": {"tf": 2.6457513110645907}, "pyerrors.obs.import_bootstrap": {"tf": 2}}, "df": 2}}}}}}}, "x": {"docs": {"pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}}, "df": 1}}, "i": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1, "g": {"docs": {"pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 1}, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}}, "df": 1}}}}}}}, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}}}}, "b": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 2.6457513110645907}, "pyerrors.input.bdio.write_ADerrors": {"tf": 2.6457513110645907}, "pyerrors.input.bdio.read_mesons": {"tf": 2.6457513110645907}, "pyerrors.input.bdio.read_dSdm": {"tf": 2.6457513110645907}}, "df": 4}}}, "b": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}}, "df": 1}}, "t": {"0": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 2}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.misc.fit_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_t0": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 7, "p": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "j": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}}, "df": 1}}}}, "/": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "^": {"2": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}}}}, "2": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}}, "df": 1, "e": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1}}, "docs": {"pyerrors": {"tf": 2.8284271247461903}, "pyerrors.correlators.Corr.GEVP": {"tf": 2.8284271247461903}, "pyerrors.correlators.Corr.Hankel": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.m_eff": {"tf": 2.8284271247461903}, "pyerrors.correlators.Corr.prune": {"tf": 4.47213595499958}, "pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 12, "h": {"docs": {"pyerrors.correlators.Corr.thin": {"tf": 1}}, "df": 1, "e": {"docs": {"pyerrors": {"tf": 16.492422502470642}, "pyerrors.correlators.Corr": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.__init__": {"tf": 3}, "pyerrors.correlators.Corr.gamma_method": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.gm": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.projected": {"tf": 2}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 2}, "pyerrors.correlators.Corr.symmetric": {"tf": 1}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 4.58257569495584}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.Hankel": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.roll": {"tf": 1}, "pyerrors.correlators.Corr.reverse": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.thin": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.correlate": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.reweight": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 2}, "pyerrors.correlators.Corr.deriv": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.m_eff": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.fit": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.plateau": {"tf": 2.6457513110645907}, "pyerrors.correlators.Corr.set_prange": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.show": {"tf": 3.605551275463989}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.dump": {"tf": 2}, "pyerrors.correlators.Corr.prune": {"tf": 4.795831523312719}, "pyerrors.covobs.Covobs.__init__": {"tf": 2.23606797749979}, "pyerrors.covobs.Covobs.errsq": {"tf": 1.7320508075688772}, "pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1.4142135623730951}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 5.656854249492381}, "pyerrors.fits.total_least_squares": {"tf": 3.7416573867739413}, "pyerrors.fits.fit_lin": {"tf": 2.449489742783178}, "pyerrors.fits.qqplot": {"tf": 1.7320508075688772}, "pyerrors.fits.residual_plot": {"tf": 2}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1.4142135623730951}, "pyerrors.input": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 2}, "pyerrors.input.bdio.write_ADerrors": {"tf": 2}, "pyerrors.input.bdio.read_mesons": {"tf": 2.6457513110645907}, "pyerrors.input.bdio.read_dSdm": {"tf": 2.6457513110645907}, "pyerrors.input.dobs.create_pobs_string": {"tf": 3.605551275463989}, "pyerrors.input.dobs.write_pobs": {"tf": 3.872983346207417}, "pyerrors.input.dobs.read_pobs": {"tf": 3}, "pyerrors.input.dobs.import_dobs_string": {"tf": 3.3166247903554}, "pyerrors.input.dobs.read_dobs": {"tf": 3.3166247903554}, "pyerrors.input.dobs.create_dobs_string": {"tf": 4.58257569495584}, "pyerrors.input.dobs.write_dobs": {"tf": 4.58257569495584}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 3.3166247903554}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 3.1622776601683795}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 2}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 5.830951894845301}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 2}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 2}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 2}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 2}, "pyerrors.input.json.create_json_string": {"tf": 2.8284271247461903}, "pyerrors.input.json.dump_to_json": {"tf": 3}, "pyerrors.input.json.import_json_string": {"tf": 3}, "pyerrors.input.json.load_json": {"tf": 3}, "pyerrors.input.json.dump_dict_to_json": {"tf": 3.3166247903554}, "pyerrors.input.json.load_json_dict": {"tf": 2.6457513110645907}, "pyerrors.input.misc.fit_t0": {"tf": 4.58257569495584}, "pyerrors.input.misc.read_pbp": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 3}, "pyerrors.input.openQCD.extract_t0": {"tf": 5.477225575051661}, "pyerrors.input.openQCD.extract_w0": {"tf": 5.477225575051661}, "pyerrors.input.openQCD.read_qtop": {"tf": 4.58257569495584}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 4.47213595499958}, "pyerrors.input.openQCD.qtop_projection": {"tf": 2}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 4.358898943540674}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 4.58257569495584}, "pyerrors.input.pandas.to_sql": {"tf": 2.23606797749979}, "pyerrors.input.pandas.read_sql": {"tf": 2.449489742783178}, "pyerrors.input.pandas.dump_df": {"tf": 2}, "pyerrors.input.pandas.load_df": {"tf": 2.449489742783178}, "pyerrors.input.sfcf.read_sfcf": {"tf": 4.58257569495584}, "pyerrors.input.utils.sort_names": {"tf": 1.7320508075688772}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}, "pyerrors.linalg.pinv": {"tf": 1}, "pyerrors.linalg.svd": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1.4142135623730951}, "pyerrors.misc.dump_object": {"tf": 1.7320508075688772}, "pyerrors.misc.load_object": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 2.23606797749979}, "pyerrors.misc.gen_correlated_data": {"tf": 1.7320508075688772}, "pyerrors.mpm.matrix_pencil_method": {"tf": 2.23606797749979}, "pyerrors.obs.Obs": {"tf": 3.1622776601683795}, "pyerrors.obs.Obs.__init__": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gamma_method": {"tf": 3.4641016151377544}, "pyerrors.obs.Obs.gm": {"tf": 3.4641016151377544}, "pyerrors.obs.Obs.details": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.reweight": {"tf": 2.449489742783178}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 2}, "pyerrors.obs.Obs.dump": {"tf": 2}, "pyerrors.obs.Obs.export_jackknife": {"tf": 3.3166247903554}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 4.123105625617661}, "pyerrors.obs.CObs.gamma_method": {"tf": 1.7320508075688772}, "pyerrors.obs.derived_observable": {"tf": 2.8284271247461903}, "pyerrors.obs.reweight": {"tf": 2.23606797749979}, "pyerrors.obs.correlate": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 5.291502622129181}, "pyerrors.obs.import_jackknife": {"tf": 2}, "pyerrors.obs.import_bootstrap": {"tf": 3}, "pyerrors.obs.merge_obs": {"tf": 1.4142135623730951}, "pyerrors.obs.cov_Obs": {"tf": 2}, "pyerrors.roots.find_root": {"tf": 2.449489742783178}}, "df": 123, "i": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}}, "df": 3, "f": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}}, "df": 2}}}}}}}}}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 6}}, "n": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 4}, "y": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 2}, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}}}}}}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 6.324555320336759}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 2}, "pyerrors.input.dobs.write_pobs": {"tf": 2}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 2}, "pyerrors.input.dobs.write_dobs": {"tf": 2}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_to_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.extract_t0": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.extract_w0": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.7320508075688772}}, "df": 35}, "n": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}}, "df": 4}}, "i": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 4.123105625617661}, "pyerrors.correlators.Corr": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 2}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.7320508075688772}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 30}, "n": {"docs": {"pyerrors.correlators.Corr.thin": {"tf": 1}}, "df": 1}}, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}, "w": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}}, "df": 3}}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2.23606797749979}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.plot_history": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 35}}, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "n": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1, "p": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}, "j": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}}, "df": 5}}}}}}}}}}, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}, "s": {"docs": {"pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 2}}}}}}}, "o": {"docs": {"pyerrors": {"tf": 8.831760866327848}, "pyerrors.correlators.Corr": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.item": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.GEVP": {"tf": 2.6457513110645907}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 2.6457513110645907}, "pyerrors.correlators.Corr.plateau": {"tf": 2}, "pyerrors.correlators.Corr.show": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 3.1622776601683795}, "pyerrors.covobs.Covobs.__init__": {"tf": 1.7320508075688772}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 3.605551275463989}, "pyerrors.fits.total_least_squares": {"tf": 2.6457513110645907}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1.4142135623730951}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 2.23606797749979}, "pyerrors.input.bdio.write_ADerrors": {"tf": 2.6457513110645907}, "pyerrors.input.bdio.read_mesons": {"tf": 2.6457513110645907}, "pyerrors.input.bdio.read_dSdm": {"tf": 2.6457513110645907}, "pyerrors.input.dobs.create_pobs_string": {"tf": 2.23606797749979}, "pyerrors.input.dobs.write_pobs": {"tf": 2.23606797749979}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 2}, "pyerrors.input.dobs.write_dobs": {"tf": 2}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 2.449489742783178}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 2}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 3.7416573867739413}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 2}, "pyerrors.input.json.create_json_string": {"tf": 1.7320508075688772}, "pyerrors.input.json.dump_to_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 2}, "pyerrors.input.misc.read_pbp": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 2.6457513110645907}, "pyerrors.input.openQCD.extract_t0": {"tf": 3.4641016151377544}, "pyerrors.input.openQCD.extract_w0": {"tf": 3.4641016151377544}, "pyerrors.input.openQCD.read_qtop": {"tf": 2.8284271247461903}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 3}, "pyerrors.input.openQCD.qtop_projection": {"tf": 2}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 3.605551275463989}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 3.1622776601683795}, "pyerrors.input.pandas.to_sql": {"tf": 2.23606797749979}, "pyerrors.input.pandas.read_sql": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.dump_df": {"tf": 2}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 3.3166247903554}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.misc.load_object": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1.7320508075688772}, "pyerrors.misc.gen_correlated_data": {"tf": 1.7320508075688772}, "pyerrors.mpm.matrix_pencil_method": {"tf": 2}, "pyerrors.obs.Obs.gamma_method": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gm": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.dump": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 2.23606797749979}, "pyerrors.obs.derived_observable": {"tf": 2.23606797749979}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 2.8284271247461903}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1.7320508075688772}, "pyerrors.obs.merge_obs": {"tf": 1.4142135623730951}, "pyerrors.obs.cov_Obs": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1.7320508075688772}}, "df": 96, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 6}}}}}}, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 2}}}, "l": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}}, "df": 1, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1.4142135623730951}}, "df": 2}}}}}}}, "d": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}, "p": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}}, "df": 1}}, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 2}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.7320508075688772}}, "df": 4}}}}}}}}}, "w": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}}, "df": 1}}}}}}, "w": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 2.23606797749979}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 22}}, "a": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 1, "n": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}}, "df": 3}, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "y": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "u": {"docs": {"pyerrors": {"tf": 2.6457513110645907}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 2}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 6, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}, "i": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 3, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "g": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 8}}, "r": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}}, "df": 7}}}}, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1.7320508075688772}}, "df": 1}}}}, "i": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2.6457513110645907}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.reverse": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 2}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 11, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 5, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 12, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.roll": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.fit": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.plateau": {"tf": 1}}, "df": 5}}}}}}}}, "l": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "{": {"docs": {}, "df": 0, "\\": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 1, "}": {"docs": {}, "df": 0, "^": {"2": {"docs": {"pyerrors.correlators.Corr.second_deriv": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0}}}}}}}}}}}}}}, "t": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}}, "df": 1}}}}, "e": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "r": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}}, "df": 3}}, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.dirac.epsilon_tensor": {"tf": 1.4142135623730951}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1.4142135623730951}}, "df": 2}}}}}, "y": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2.8284271247461903}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.7320508075688772}}, "df": 7, "s": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}}, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}, "s": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 2}, "+": {"1": {"docs": {"pyerrors.correlators.Corr.Hankel": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.m_eff": {"tf": 2}}, "df": 2}, "2": {"docs": {"pyerrors.correlators.Corr.Hankel": {"tf": 1.4142135623730951}}, "df": 1}, "docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.Hankel": {"tf": 1.7320508075688772}}, "df": 1}}, "/": {"2": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 2}, "pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 2}, "docs": {}, "df": 0}, "p": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "j": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}, "u": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2}}, "df": 5}}}}, "^": {"2": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2}, "docs": {}, "df": 0}}, "g": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.GEVP": {"tf": 2}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 2}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1.4142135623730951}, "pyerrors.obs.reweight": {"tf": 1}}, "df": 15, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"5": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}}, "df": 2}, "docs": {"pyerrors": {"tf": 4.358898943540674}, "pyerrors.correlators.Corr.gamma_method": {"tf": 1}, "pyerrors.correlators.Corr.gm": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.dirac.Grid_gamma": {"tf": 1}, "pyerrors.fits.Fit_result.gamma_method": {"tf": 1}, "pyerrors.fits.Fit_result.gm": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.7320508075688772}}, "df": 16, "s": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}}, "df": 1}}}}, "p": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "u": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.fits.qqplot": {"tf": 1}}, "df": 1}}}}}}, "r": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "b": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "+": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, ":": {"docs": {}, "df": 0, "/": {"docs": {}, "df": 0, "/": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "b": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}}}}}}}, "@": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}}, "df": 31}, "s": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 1}}}}, "t": {"docs": {"pyerrors": {"tf": 8.306623862918075}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2.8284271247461903}}, "df": 2}, "e": {"docs": {}, "df": 0, "q": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 2, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 2}}}}}, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1.4142135623730951}, "pyerrors.obs.import_bootstrap": {"tf": 1}}, "df": 7, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 2}, "pyerrors.misc.gen_correlated_data": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}}, "df": 5}, "s": {"docs": {"pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}}, "df": 2}}, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}}, "i": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 2}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.misc.gen_correlated_data": {"tf": 1}}, "df": 1}}}}}}, "v": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 2.23606797749979}}, "df": 5}}, "t": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}}, "df": 2}, "o": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1.7320508075688772}}, "df": 2}}}}}, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1.4142135623730951}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 6, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 5, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}}, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 1}}}}, "i": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.dirac.Grid_gamma": {"tf": 1}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "z": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.json.create_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.dump": {"tf": 1.4142135623730951}}, "df": 16, "i": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "p": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 10}}}}}}, "u": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.roots.find_root": {"tf": 1.4142135623730951}}, "df": 4}}}, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}}}, "^": {"docs": {}, "df": 0, "\\": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}}}}, "c": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4}}, "f": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 2}}}}, "l": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}}, "df": 3, "a": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1, "t": {"docs": {}, "df": 0, "/": {"0": {"3": {"0": {"6": {"0": {"1": {"7": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "9": {"4": {"1": {"2": {"0": {"8": {"7": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "docs": {}, "df": 0}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}}, "df": 6}}}}, "e": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1, "x": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "r": {"docs": {"pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 1}}}, "b": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 5, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}}, "df": 2}}, "s": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 2}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.dirac.Grid_gamma": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}}, "df": 6}}}}}}, "y": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 11}}, "r": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.linalg.jack_matmul": {"tf": 1}}, "df": 2, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 2}}, "r": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 4}}}}, "m": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 3}}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}}, "df": 8}}, "s": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}}, "df": 3}}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.__init__": {"tf": 2.6457513110645907}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.plateau": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.show": {"tf": 2.449489742783178}, "pyerrors.covobs.Covobs.__init__": {"tf": 1.4142135623730951}, "pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2.6457513110645907}, "pyerrors.fits.total_least_squares": {"tf": 2.449489742783178}, "pyerrors.fits.fit_lin": {"tf": 2.23606797749979}, "pyerrors.fits.ks_test": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_pobs_string": {"tf": 2.23606797749979}, "pyerrors.input.dobs.write_pobs": {"tf": 2.23606797749979}, "pyerrors.input.dobs.read_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 2.23606797749979}, "pyerrors.input.dobs.write_dobs": {"tf": 2.23606797749979}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 2}, "pyerrors.input.json.dump_to_json": {"tf": 2}, "pyerrors.input.json.import_json_string": {"tf": 2.449489742783178}, "pyerrors.input.json.load_json": {"tf": 2.449489742783178}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.misc.read_pbp": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_rwms": {"tf": 3.1622776601683795}, "pyerrors.input.openQCD.extract_t0": {"tf": 3}, "pyerrors.input.openQCD.extract_w0": {"tf": 3}, "pyerrors.input.openQCD.read_qtop": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 2}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.7320508075688772}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2.8284271247461903}, "pyerrors.input.utils.sort_names": {"tf": 2.23606797749979}, "pyerrors.input.utils.check_idl": {"tf": 2}, "pyerrors.misc.errorbar": {"tf": 2}, "pyerrors.misc.gen_correlated_data": {"tf": 2.23606797749979}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.__init__": {"tf": 2.449489742783178}, "pyerrors.obs.derived_observable": {"tf": 2}, "pyerrors.obs.reweight": {"tf": 1.7320508075688772}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}, "pyerrors.obs.merge_obs": {"tf": 2}, "pyerrors.obs.cov_Obs": {"tf": 1.7320508075688772}}, "df": 48, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}}, "df": 7}, "[": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 11}}}, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}}, "df": 1}}}, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "[": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 2}}}}}}}}}}}, "k": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}}, "df": 2}}, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.fits.fit_lin": {"tf": 1}}, "df": 1}}}, "b": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}}, "df": 4}}}}, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}}, "df": 4}}}, "b": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 2.23606797749979}, "pyerrors.input.bdio.write_ADerrors": {"tf": 2.23606797749979}, "pyerrors.input.bdio.read_mesons": {"tf": 2.23606797749979}, "pyerrors.input.bdio.read_dSdm": {"tf": 2.23606797749979}}, "df": 4}}}}}}, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.fit": {"tf": 1}}, "df": 2}}, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}, "s": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}}, "df": 2}}}}}, "l": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 4, "s": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1.7320508075688772}}, "df": 1}}}}, "n": {"docs": {"pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.7320508075688772}}, "df": 3, "g": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1.4142135623730951}, "pyerrors.obs.import_bootstrap": {"tf": 1.7320508075688772}}, "df": 11}}}}, "q": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}, "f": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 4}}}, "o": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.m_eff": {"tf": 1.7320508075688772}}, "df": 4, "s": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 1}}, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}}, "df": 2}}}}}, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}}, "df": 1}}}}}}}}}, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "s": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.misc.load_object": {"tf": 1}}, "df": 2, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.misc.load_object": {"tf": 1}}, "df": 1}}}}, "w": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 1, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}}, "df": 4}}}}}, "t": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 2}, "/": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 2}}, "l": {"docs": {"pyerrors.input.utils.sort_names": {"tf": 1.4142135623730951}}, "df": 1}}, "s": {"docs": {"pyerrors": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.GEVP": {"tf": 2.6457513110645907}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 2}, "pyerrors.obs.Obs.gamma_method": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gm": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1.7320508075688772}, "pyerrors.obs.cov_Obs": {"tf": 1.4142135623730951}}, "df": 11, "o": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 2.23606797749979}, "pyerrors.input.bdio.write_ADerrors": {"tf": 2.23606797749979}, "pyerrors.input.bdio.read_mesons": {"tf": 2.23606797749979}, "pyerrors.input.bdio.read_dSdm": {"tf": 2.23606797749979}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}}, "df": 5, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 2, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "k": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "l": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 1, "r": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}}, "df": 3}}}}}, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 4}}}}, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1.7320508075688772}, "pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 2, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 2}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 1}}}, "s": {"docs": {"pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 1}}}, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 4}}}}}, "u": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}}}}}}}, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}}, "df": 2}}}}, "m": {"docs": {"pyerrors": {"tf": 3.605551275463989}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 2, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.linalg.einsum": {"tf": 1}}, "df": 1}}}}}}}, "c": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 4}, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.bdio.write_ADerrors": {"tf": 1}}, "df": 1, "f": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.bdio.write_ADerrors": {"tf": 1}}, "df": 1}}}}}}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "p": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 7}}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 2}}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 2}}}}, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}}, "df": 3}}}}}}, "b": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}, "c": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.linalg.einsum": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}, "m": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input": {"tf": 1}}, "df": 1}}}}}}}, "f": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}}, "df": 2}}}}}}}}}}}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "w": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 3}}}}}, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 2}}}, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "i": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}}, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "y": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 2}, "e": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.linalg.svd": {"tf": 1}}, "df": 2}}}}, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 4}}}, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 4}}, "h": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 3}, "k": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}}, "df": 1}}, "z": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 8}}, "g": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 2}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 7}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}}, "df": 1}}, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 3}}}}}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "p": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_t0": {"tf": 2}, "pyerrors.input.openQCD.extract_w0": {"tf": 2}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 9, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 4}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}, "e": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 2.6457513110645907}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1.7320508075688772}}, "df": 2, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 4}}}, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 2.449489742783178}, "pyerrors.obs.Obs.gamma_method": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gm": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 12, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.pandas.dump_df": {"tf": 1}}, "df": 1}}}}}}}}}, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 11}}, "c": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "x": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}}, "df": 2}}}}}}}}}}, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}, "d": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "r": {"docs": {"pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.dump": {"tf": 1.7320508075688772}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.create_pobs_string": {"tf": 2.449489742783178}, "pyerrors.input.dobs.write_pobs": {"tf": 2.23606797749979}, "pyerrors.input.dobs.read_pobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.read_dobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.create_dobs_string": {"tf": 2.449489742783178}, "pyerrors.input.dobs.write_dobs": {"tf": 2.23606797749979}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 2}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 2}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.json.create_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 2}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop": {"tf": 2}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 2.23606797749979}, "pyerrors.input.pandas.to_sql": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.read_sql": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2.8284271247461903}, "pyerrors.input.utils.check_idl": {"tf": 1.4142135623730951}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1.4142135623730951}, "pyerrors.misc.load_object": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 2}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 56, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 3.4641016151377544}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 2}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 2}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 17, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}}, "df": 9}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 2.23606797749979}}, "df": 1}}}, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.obs.correlate": {"tf": 1}}, "df": 1}}}}}, "u": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 3}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 13, "s": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}}, "df": 11}}}}}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}}}}, "i": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 2}}}, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1, "d": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 2}}, "a": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 3}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "p": {"docs": {"pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 10, "p": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}}}, "y": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 1}}}}, "c": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "e": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}}, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 4}}}, "i": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 3}}}}, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.error_band": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 4, "s": {"1": {"docs": {"pyerrors": {"tf": 2.23606797749979}}, "df": 1}, "2": {"docs": {"pyerrors": {"tf": 2.23606797749979}}, "df": 1}, "3": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {"pyerrors": {"tf": 3.872983346207417}, "pyerrors.input": {"tf": 1.7320508075688772}, "pyerrors.misc.pseudo_Obs": {"tf": 1.4142135623730951}, "pyerrors.misc.gen_correlated_data": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 2}, "pyerrors.obs.Obs.export_jackknife": {"tf": 2.23606797749979}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 3.1622776601683795}, "pyerrors.obs.import_jackknife": {"tf": 1.7320508075688772}, "pyerrors.obs.import_bootstrap": {"tf": 2.6457513110645907}}, "df": 10}}}}, "e": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.correlate": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 12}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.plot_rho": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1.4142135623730951}}, "df": 5, "d": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 4}, "s": {"docs": {"pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 6}}}, "r": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}}}, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}}, "df": 7}}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.openQCD.qtop_projection": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.7320508075688772}}, "df": 2}}}}, "e": {"docs": {"pyerrors": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 10}, "t": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 14, "s": {"docs": {"pyerrors.correlators.Corr.set_prange": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 2}}, "l": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}}, "df": 3}}, "q": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}}, "df": 1}}}}}}, "r": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.json.dump_to_json": {"tf": 1}}, "df": 1}}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 2}}}}}}, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1}}}}, "p": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 2}, "pyerrors.input.dobs.import_dobs_string": {"tf": 2.449489742783178}, "pyerrors.input.dobs.read_dobs": {"tf": 2.449489742783178}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 9}}, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}}, "df": 3}}}, "e": {"docs": {"pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}}}}}}, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 2}}}}}, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 2, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 1}}}}}}, "m": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 1}}}, "p": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 4, "i": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 2}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}}, "df": 16}, "s": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 10}}, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "y": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 12}}, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}}, "df": 1}}}}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 5}}}}, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.thin": {"tf": 1.7320508075688772}}, "df": 1}}}, "e": {"docs": {"pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 3}}, "g": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}}, "df": 1}}}}}}}}, "y": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.is_matrix_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 5}, "z": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.symmetric": {"tf": 1}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.matrix_symmetric": {"tf": 1}}, "df": 2}, "d": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 1}}}}, "y": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1.4142135623730951}}, "df": 2}}}}}, "b": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 4, "s": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 4}}}}}, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "x": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "h": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.roll": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 9}}}, "w": {"docs": {"pyerrors.obs.Obs.plot_history": {"tf": 1}}, "df": 1, "n": {"docs": {"pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 4}, "s": {"docs": {"pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 1}}}, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_mesons": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.7320508075688772}}, "df": 4}}}, "p": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 2.6457513110645907}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}}, "df": 3}}}}, "q": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.errsq": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors": {"tf": 3}, "pyerrors.correlators.Corr.fit": {"tf": 1}}, "df": 2}, "d": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 2}}}}}, "r": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 3, "{": {"docs": {}, "df": 0, "j": {"docs": {}, "df": 0, "^": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "\\": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "u": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}, "l": {"docs": {"pyerrors.input.pandas.read_sql": {"tf": 1.7320508075688772}}, "df": 1, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.pandas.to_sql": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.read_sql": {"tf": 1.7320508075688772}, "pyerrors.input.pandas.load_df": {"tf": 1}}, "df": 3}}}}}, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 3, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 2}}}}}, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 3}}}}}}, "o": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 1, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}, "v": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "k": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1, "p": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2}}}}}}, "f": {"docs": {}, "df": 0, "q": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 2}}, "df": 3}}}, "c": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 2.23606797749979}}, "df": 1}}}}, "n": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.Hankel": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.prune": {"tf": 2.8284271247461903}, "pyerrors.fits.fit_lin": {"tf": 1.4142135623730951}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 2}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 2}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 2}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 15, "o": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 2}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 13, "n": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 4, "e": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2.23606797749979}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 21}}, "t": {"docs": {"pyerrors": {"tf": 2.8284271247461903}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_dobs": {"tf": 1.7320508075688772}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.7320508075688772}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 32, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 7}}}, "w": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "r": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 5}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2}}}}}}}}}}}, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.4142135623730951}}, "df": 2}}}, "f": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}}}}}, "e": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "w": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.obs.merge_obs": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 6}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 5, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 2}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 4}}}}, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}}, "df": 3}}}}}}}, "l": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}, "x": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}}, "df": 1}}}}}}, "u": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "m": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 3, "p": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 3.4641016151377544}, "pyerrors.fits.least_squares": {"tf": 2.449489742783178}, "pyerrors.fits.total_least_squares": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2.23606797749979}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1.4142135623730951}, "pyerrors.linalg.jack_matmul": {"tf": 1.4142135623730951}, "pyerrors.linalg.einsum": {"tf": 2}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.import_bootstrap": {"tf": 1.4142135623730951}, "pyerrors.roots.find_root": {"tf": 1.7320508075688772}}, "df": 23}}, "b": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.roll": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1.4142135623730951}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1.4142135623730951}, "pyerrors.obs.import_bootstrap": {"tf": 1}}, "df": 25, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1.7320508075688772}, "pyerrors.obs.import_bootstrap": {"tf": 1.4142135623730951}}, "df": 5}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 6, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}}}, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 1}}}}, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}}}}}}}, "l": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.json.dump_to_json": {"tf": 1}}, "df": 1}}}, "p": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2}, "pyerrors.linalg.eigh": {"tf": 1}, "pyerrors.linalg.eig": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}}, "df": 7, "r": {"docs": {"pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}}, "df": 1}}, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 4.123105625617661}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_pobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 2}, "pyerrors.input.dobs.write_dobs": {"tf": 2}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2}, "pyerrors.misc.dump_object": {"tf": 1.4142135623730951}, "pyerrors.misc.pseudo_Obs": {"tf": 1.7320508075688772}, "pyerrors.misc.gen_correlated_data": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.dump": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.import_bootstrap": {"tf": 1.4142135623730951}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 27, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.7320508075688772}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}}, "df": 19, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 5}}}}, "d": {"docs": {"pyerrors.obs.Obs.plot_tauint": {"tf": 1}, "pyerrors.obs.Obs.plot_rho": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}}, "df": 3}}}, "n": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}, "d": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 3.605551275463989}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1.4142135623730951}}, "df": 17}}}}}, "i": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}, "t": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}}, "df": 3}, "r": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1.7320508075688772}}, "df": 1}}}}}, "x": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.Hankel": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 2}}, "b": {"docs": {}, "df": 0, "y": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}, "r": {"docs": {}, "df": 0, "w": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}}, "df": 1}}}, "x": {"0": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.symmetric": {"tf": 1}, "pyerrors.correlators.Corr.anti_symmetric": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 7}, "1": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}}, "df": 3}, "2": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}}, "df": 3}, "docs": {"pyerrors": {"tf": 2.8284271247461903}, "pyerrors.correlators.Corr.second_deriv": {"tf": 4.123105625617661}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 3.605551275463989}, "pyerrors.fits.total_least_squares": {"tf": 3}, "pyerrors.fits.fit_lin": {"tf": 1.4142135623730951}, "pyerrors.fits.error_band": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1.7320508075688772}}, "df": 12, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.fits.fit_lin": {"tf": 1.4142135623730951}}, "df": 1}}}}}, "m": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_pobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.write_dobs": {"tf": 1.7320508075688772}}, "df": 7}, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}}, "df": 2}}}, "[": {"0": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}, "1": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}}, "y": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.spaghetti_plot": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.fit_lin": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1.4142135623730951}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 9, "o": {"docs": {}, "df": 0, "u": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 4}}, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1}}, "df": 2}}}}, "t": {"docs": {"pyerrors.obs.correlate": {"tf": 1}}, "df": 1}}, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 2}}}}}}, "r": {"0": {"1": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "2": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 3}, "docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 2}, "pyerrors.input.openQCD.extract_t0": {"tf": 2.6457513110645907}, "pyerrors.input.openQCD.extract_w0": {"tf": 2.6457513110645907}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 10, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.obs.CObs.gamma_method": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 9}, "d": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1.7320508075688772}, "pyerrors.input.bdio.read_dSdm": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 2}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 2}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 2.23606797749979}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 2}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 2}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.misc.read_pbp": {"tf": 2}, "pyerrors.input.openQCD.read_rwms": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.extract_t0": {"tf": 2}, "pyerrors.input.openQCD.extract_w0": {"tf": 2}, "pyerrors.input.openQCD.read_qtop": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 2.23606797749979}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 2}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2.6457513110645907}}, "df": 20, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 4}}}}}, "p": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors": {"tf": 2.6457513110645907}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.read_dobs": {"tf": 1.7320508075688772}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.details": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}}, "df": 11, "s": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1, "/": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 4}}}}}}}}}}}}, "u": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.misc.read_pbp": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.obs.Obs.plot_rep_dist": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1.4142135623730951}, "pyerrors.obs.merge_obs": {"tf": 1}}, "df": 15}}}, "k": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 2}, "u": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.input.utils.sort_names": {"tf": 1}}, "df": 1}}}}, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 4, "s": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 1}}}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}, "s": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 2}}}}}}}, "s": {"docs": {"pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}}, "df": 2, "[": {"0": {"docs": {"pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}}, "df": 2}, "docs": {}, "df": 0}}}, "s": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 6, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 4}, "pyerrors.correlators.Corr.show": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 1.7320508075688772}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1.7320508075688772}, "pyerrors.input.json.load_json": {"tf": 1.7320508075688772}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 1}}, "df": 15, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}}, "df": 4}}}, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}}, "df": 4}}}}, "p": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}}, "df": 3, "i": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}}}}, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}}, "df": 2}}}}, "i": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1.4142135623730951}}, "df": 3}}}}}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}}}}}}}, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}, "c": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}, "o": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1}}, "df": 1}}}}}}}, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 7}}}}}, "n": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.json.import_json_string": {"tf": 1}}, "df": 1, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}}, "df": 3}}}}}}}}}, "r": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2}}, "g": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}}}}}}}}, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}, "v": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.dump": {"tf": 1}}, "df": 6}}}}}, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}}, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 3}}}}}, "q": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 10}, "s": {"docs": {"pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}}, "df": 6}, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 2}}}}}}}}}, "g": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1}}, "df": 2}}}}}}}, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.correlators.Corr.__init__": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1.4142135623730951}}, "df": 2}}}}, "t": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.T_symmetry": {"tf": 1}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.covobs.Covobs.errsq": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.bdio.read_dSdm": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 12, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.item": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.m_eff": {"tf": 1}, "pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}, "pyerrors.dirac.Grid_gamma": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.fit_lin": {"tf": 1.4142135623730951}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}, "pyerrors.fits.error_band": {"tf": 1}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.input.bdio.read_ADerrors": {"tf": 1}, "pyerrors.input.bdio.write_ADerrors": {"tf": 1.4142135623730951}, "pyerrors.input.bdio.read_mesons": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.misc.read_pbp": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.input.utils.sort_names": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}, "pyerrors.misc.dump_object": {"tf": 1}, "pyerrors.misc.load_object": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs.plot_piechart": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1.4142135623730951}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 63}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_pobs": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.read_dobs": {"tf": 1.4142135623730951}, "pyerrors.input.json.import_json_string": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json": {"tf": 1.4142135623730951}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 8}}}}}, "o": {"docs": {"pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 2}}, "v": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.reverse": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "w": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}}, "df": 4, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.input.openQCD.qtop_projection": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}}, "df": 7}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.correlators.Corr.reweight": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1.4142135623730951}, "pyerrors.obs.reweight": {"tf": 1.4142135623730951}, "pyerrors.obs.correlate": {"tf": 1}}, "df": 6}}}}}}}}, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.obs.Obs.reweight": {"tf": 1}}, "df": 1}}}}}}}}, "n": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}}}}}}}, "f": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 2}}, "df": 4, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 2}}}}}}}}, "d": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}}, "df": 1, "d": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}}, "df": 1}}}}}, "m": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1, "d": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}}, "df": 4}}}}, "a": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 1}}, "df": 1}}}}}, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "s": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}}}}, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.input.utils.check_idl": {"tf": 1}}, "df": 15, "s": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.obs.Obs.__init__": {"tf": 1}}, "df": 2}}}, "k": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.dirac.epsilon_tensor": {"tf": 1.4142135623730951}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1.4142135623730951}}, "df": 3}, "d": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1.7320508075688772}, "pyerrors.obs.import_bootstrap": {"tf": 1.4142135623730951}}, "df": 3}}}}, "p": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}, "d": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 3}}}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors.obs.derived_observable": {"tf": 1}}, "df": 1}}}}, "h": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}}, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 2.449489742783178}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1.7320508075688772}}, "df": 6, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 2}}}, "u": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input": {"tf": 1}}, "df": 2}}}}}, "n": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}}, "df": 1}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}, "w": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.7320508075688772}}, "df": 2}}, "i": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 5}}}}, "w": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}}, "df": 1}}}, "u": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}}, "df": 1}}, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.obs.Obs.export_bootstrap": {"tf": 1}}, "df": 1}}}, "v": {"1": {"docs": {}, "df": 0, "@": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "@": {"docs": {}, "df": 0, "v": {"2": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}}, "df": 1}, "docs": {}, "df": 0}}}}}, "docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.prune": {"tf": 2.23606797749979}, "pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 3, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 3.1622776601683795}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.plottable": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 2}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.linalg.svd": {"tf": 1}, "pyerrors.misc.pseudo_Obs": {"tf": 1.7320508075688772}, "pyerrors.misc.gen_correlated_data": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1}, "pyerrors.obs.Obs": {"tf": 2.449489742783178}, "pyerrors.obs.Obs.gamma_method": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gm": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}, "pyerrors.obs.cov_Obs": {"tf": 1}}, "df": 23, "s": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.show": {"tf": 1.7320508075688772}, "pyerrors.fits.least_squares": {"tf": 2}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.error_band": {"tf": 1.4142135623730951}, "pyerrors.fits.ks_test": {"tf": 1}, "pyerrors.misc.errorbar": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs": {"tf": 1.7320508075688772}}, "df": 8}, "d": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.read_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.linalg.einsum": {"tf": 1}, "pyerrors.linalg.inv": {"tf": 1}, "pyerrors.linalg.cholesky": {"tf": 1}, "pyerrors.linalg.det": {"tf": 1}, "pyerrors.obs.CObs": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 13}}}, "i": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}}, "df": 2}}}, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.input": {"tf": 1}}, "df": 2}}}, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1.4142135623730951}}, "df": 3, "s": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}, "c": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.covobs.Covobs.__init__": {"tf": 1}, "pyerrors.covobs.Covobs.errsq": {"tf": 1}}, "df": 2}}}, "b": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}}}}}}, "i": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "a": {"docs": {"pyerrors": {"tf": 3.7416573867739413}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.fits.Fit_result": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.linalg.matmul": {"tf": 1}}, "df": 8}, "s": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.obs.Obs": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "e": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1, "d": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}}, "e": {"docs": {}, "df": 0, "w": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 1}}, "c": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 1}}}}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.7320508075688772}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 2.23606797749979}, "pyerrors.input.sfcf.read_sfcf": {"tf": 2.449489742783178}, "pyerrors.misc.print_config": {"tf": 1}}, "df": 7}}}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 1}}}}, "e": {"docs": {}, "df": 0, "x": {"docs": {"pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 1}}}, "y": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 3}, "i": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "b": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.json.load_json_dict": {"tf": 1}}, "df": 3}}}}}, "c": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.7320508075688772}}, "df": 2, "s": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1.4142135623730951}}, "df": 2}}}}}}, "\\": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "\\": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "{": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "}": {"docs": {}, "df": 0, "^": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}}}}}}}}}}}}}}, "u": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "s": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 2.23606797749979}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.m_eff": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 2}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 16, "d": {"docs": {"pyerrors": {"tf": 3}, "pyerrors.correlators.Corr.deriv": {"tf": 1}, "pyerrors.correlators.Corr.second_deriv": {"tf": 1}, "pyerrors.correlators.Corr.fit": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 2.23606797749979}, "pyerrors.fits.total_least_squares": {"tf": 1}, "pyerrors.fits.fit_lin": {"tf": 1.4142135623730951}, "pyerrors.fits.qqplot": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_rwms": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.7320508075688772}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.input.utils.sort_names": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1.4142135623730951}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 26}, "r": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 3}, "s": {"docs": {"pyerrors.correlators.Corr.m_eff": {"tf": 1.7320508075688772}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}}, "df": 2}, "f": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}}, "df": 1}}}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 5}}}, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1}}, "df": 1}}}}}}, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "p": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 5, "d": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.7320508075688772}}, "df": 1}}}}}, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "q": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 2}}, "df": 1}}}}}, "t": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}}, "df": 6}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "s": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.projected": {"tf": 1.4142135623730951}}, "df": 1}}}}}}}, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}}}, "r": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.residual_plot": {"tf": 1}}, "df": 2}}}}}}}}}, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1}}}}}}}}}}, "h": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 3}}}}}}}, "l": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}, "p": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}}, "df": 2}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1}}}}}}}, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}}, "df": 2}}}}}, "j": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.correlators.Corr.item": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.prune": {"tf": 1.4142135623730951}, "pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1.7320508075688772}}, "df": 6, "o": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "w": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}, "u": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.plateau": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1}}, "df": 3, "u": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}}, "m": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}, "a": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}}, "df": 1, "a": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1}}, "df": 2}}}}}, "k": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "f": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.input": {"tf": 2.23606797749979}, "pyerrors.linalg.jack_matmul": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 2}, "pyerrors.obs.import_jackknife": {"tf": 1.4142135623730951}}, "df": 4}}}}}, "s": {"docs": {"pyerrors.obs.import_jackknife": {"tf": 1}}, "df": 1}}}}, "s": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 3.7416573867739413}, "pyerrors.correlators.Corr.dump": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 2.23606797749979}, "pyerrors.input.json.dump_to_json": {"tf": 2.449489742783178}, "pyerrors.input.json.import_json_string": {"tf": 2}, "pyerrors.input.json.load_json": {"tf": 1.7320508075688772}, "pyerrors.input.json.dump_dict_to_json": {"tf": 2.449489742783178}, "pyerrors.input.json.load_json_dict": {"tf": 1.4142135623730951}, "pyerrors.input.pandas.to_sql": {"tf": 1}, "pyerrors.input.pandas.dump_df": {"tf": 1}, "pyerrors.input.pandas.load_df": {"tf": 1.4142135623730951}, "pyerrors.obs.Obs.dump": {"tf": 1.4142135623730951}}, "df": 12}}}, "l": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "}": {"docs": {}, "df": 0, "v": {"docs": {"pyerrors.obs.covariance": {"tf": 1.4142135623730951}}, "df": 1}}, "^": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}}, "k": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}, "pyerrors.mpm.matrix_pencil_method": {"tf": 1.7320508075688772}}, "df": 4, "u": {"docs": {}, "df": 0, "b": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "k": {"docs": {}, "df": 0, "i": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "h": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "e": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.correlators.Corr.thin": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}}, "df": 3}}, "y": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}, "pyerrors.input.misc.fit_t0": {"tf": 1}}, "df": 2, "s": {"docs": {"pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}}, "df": 2}, "w": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.input.misc.fit_t0": {"tf": 1}, "pyerrors.input.openQCD.read_rwms": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.4142135623730951}}, "df": 5, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}}, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "\u2013": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "v": {"docs": {"pyerrors.fits.ks_test": {"tf": 1}}, "df": 1}}}}}}}}}}}}}}}}}, "a": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "a": {"1": {"docs": {"pyerrors.input.bdio.read_mesons": {"tf": 1}}, "df": 1}, "2": {"docs": {"pyerrors.input.bdio.read_mesons": {"tf": 1}}, "df": 1}, "docs": {"pyerrors.input.bdio.read_dSdm": {"tf": 1}}, "df": 1}}}}, "w": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}, "pyerrors.obs.derived_observable": {"tf": 1.7320508075688772}}, "df": 2}}}}}}, "q": {"docs": {}, "df": 0, "c": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1}}, "df": 1, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "u": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 2.23606797749979}}, "df": 1}, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 2.449489742783178}}, "df": 1}}}}, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.fits.least_squares": {"tf": 1.4142135623730951}, "pyerrors.fits.qqplot": {"tf": 1.4142135623730951}}, "df": 2}}}, "u": {"docs": {}, "df": 0, "m": {"docs": {"pyerrors.correlators.Corr.T_symmetry": {"tf": 1}}, "df": 1}}}}, "r": {"docs": {}, "df": 0, "k": {"docs": {"pyerrors.input.openQCD.read_ms5_xsf": {"tf": 1.7320508075688772}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}}, "df": 2, "s": {"docs": {"pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}}, "df": 1}}}}, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 4}}, "df": 1, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "e": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}}, "df": 1}}}}}, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors.input.pandas.read_sql": {"tf": 1.4142135623730951}}, "df": 1}}}}, "q": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}}, "df": 2}}}}}, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 2}}}}, "h": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "y": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.obs.Obs.plot_history": {"tf": 1.4142135623730951}}, "df": 2}, "i": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}}, "d": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1.4142135623730951}}, "df": 1, "s": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}}}, "a": {"docs": {}, "df": 0, "v": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}, "e": {"docs": {"pyerrors": {"tf": 2.449489742783178}, "pyerrors.correlators.Corr": {"tf": 1.4142135623730951}, "pyerrors.correlators.Corr.projected": {"tf": 1}, "pyerrors.correlators.Corr.prune": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.json.create_json_string": {"tf": 1}, "pyerrors.input.json.dump_to_json": {"tf": 1}, "pyerrors.input.json.dump_dict_to_json": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1.4142135623730951}, "pyerrors.obs.derived_observable": {"tf": 1.4142135623730951}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.roots.find_root": {"tf": 1}}, "df": 21}}, "n": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 2, "l": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1, "d": {"docs": {"pyerrors": {"tf": 1.4142135623730951}}, "df": 1}}, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}}, "df": 2}}}}}, "k": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.Hankel": {"tf": 1.4142135623730951}}, "df": 1}}}}, "s": {"docs": {"pyerrors": {"tf": 2.6457513110645907}, "pyerrors.correlators.Corr.reweight": {"tf": 1}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.fits.total_least_squares": {"tf": 1.4142135623730951}, "pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.json.import_json_string": {"tf": 1}, "pyerrors.input.json.load_json": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}, "pyerrors.obs.Obs.reweight": {"tf": 1}, "pyerrors.obs.reweight": {"tf": 1}, "pyerrors.obs.correlate": {"tf": 1}, "pyerrors.obs.covariance": {"tf": 1}}, "df": 21, "h": {"docs": {"pyerrors.obs.Obs.export_bootstrap": {"tf": 1}}, "df": 1}}, "d": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 6}}}}}}, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "p": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}, "r": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix.g5H": {"tf": 1.4142135623730951}}, "df": 2}}}, "i": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.linalg.eigh": {"tf": 1}}, "df": 1}}}}}}, "e": {"docs": {"pyerrors": {"tf": 1.4142135623730951}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.sfcf.read_sfcf": {"tf": 1}}, "df": 4}}, "a": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {"pyerrors.input.openQCD.read_qtop": {"tf": 1}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1}}, "df": 2}}}}, "p": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1}}, "o": {"docs": {}, "df": 0, "w": {"docs": {"pyerrors": {"tf": 2}, "pyerrors.fits.least_squares": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.openQCD.extract_t0": {"tf": 1}, "pyerrors.input.openQCD.extract_w0": {"tf": 1}, "pyerrors.input.pandas.to_sql": {"tf": 1}}, "df": 6}, "s": {"docs": {}, "df": 0, "t": {"docs": {"pyerrors": {"tf": 1}}, "df": 1, "n": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "m": {"docs": {}, "df": 0, "e": {"docs": {"pyerrors": {"tf": 1}}, "df": 1}}}}}}, "r": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "z": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "a": {"docs": {}, "df": 0, "l": {"docs": {"pyerrors.correlators.Corr.show": {"tf": 1}}, "df": 1}}}}}}}}, "l": {"docs": {}, "df": 0, "d": {"docs": {"pyerrors.obs.covariance": {"tf": 1}}, "df": 1, "s": {"docs": {"pyerrors.correlators.Corr.prune": {"tf": 1}}, "df": 1}}}, "t": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "i": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "g": {"docs": {"pyerrors.fits.Fit_result": {"tf": 1}}, "df": 1}}}}}}}, "m": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "u": {"docs": {}, "df": 0, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1}}, "df": 1}}}}}}}}}}, "t": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "p": {"docs": {}, "df": 0, "s": {"docs": {}, "df": 0, ":": {"docs": {}, "df": 0, "/": {"docs": {}, "df": 0, "/": {"docs": {}, "df": 0, "c": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "d": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "g": {"docs": {}, "df": 0, "o": {"docs": {}, "df": 0, "l": {"docs": {}, "df": 0, "f": {"docs": {"pyerrors.dirac.epsilon_tensor": {"tf": 1}, "pyerrors.dirac.epsilon_tensor_rank4": {"tf": 1}}, "df": 2}}}}}}}}}}}}}}}, "d": {"docs": {}, "df": 0, "f": {"5": {"docs": {"pyerrors.input.hadrons.read_meson_hd5": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_DistillationContraction_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_ExternalLeg_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Bilinear_hd5": {"tf": 1}, "pyerrors.input.hadrons.read_Fourquark_hd5": {"tf": 1}}, "df": 6}, "docs": {}, "df": 0}}, "u": {"docs": {}, "df": 0, "a": {"docs": {"pyerrors.mpm.matrix_pencil_method": {"tf": 1}}, "df": 1}}}, "z": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "r": {"docs": {}, "df": 0, "o": {"docs": {"pyerrors": {"tf": 1.7320508075688772}, "pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.correlators.Corr.Eigenvalue": {"tf": 1}, "pyerrors.correlators.Corr.show": {"tf": 1}, "pyerrors.input.hadrons.extract_t0_hd5": {"tf": 1}, "pyerrors.input.hadrons.Npr_matrix": {"tf": 1}, "pyerrors.input.misc.fit_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_t0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.extract_w0": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1}, "pyerrors.obs.Obs.gamma_method": {"tf": 1}, "pyerrors.obs.Obs.gm": {"tf": 1}, "pyerrors.obs.Obs.is_zero_within_error": {"tf": 1}, "pyerrors.obs.Obs.is_zero": {"tf": 1}, "pyerrors.obs.CObs.is_zero": {"tf": 1}}, "df": 15, "t": {"docs": {}, "df": 0, "h": {"docs": {"pyerrors.correlators.Corr.GEVP": {"tf": 1}, "pyerrors.obs.Obs.export_jackknife": {"tf": 1}, "pyerrors.obs.Obs.export_bootstrap": {"tf": 1}, "pyerrors.obs.import_jackknife": {"tf": 1}, "pyerrors.obs.import_bootstrap": {"tf": 1}}, "df": 5}}, "s": {"docs": {"pyerrors.input.hadrons.Npr_matrix": {"tf": 1.4142135623730951}}, "df": 1}}}, "u": {"docs": {}, "df": 0, "t": {"docs": {}, "df": 0, "h": {"docs": {}, "df": 0, "e": {"docs": {}, "df": 0, "n": {"docs": {"pyerrors.input.dobs.create_pobs_string": {"tf": 1}, "pyerrors.input.dobs.write_pobs": {"tf": 1}, "pyerrors.input.dobs.read_pobs": {"tf": 1}, "pyerrors.input.dobs.import_dobs_string": {"tf": 1}, "pyerrors.input.dobs.read_dobs": {"tf": 1}, "pyerrors.input.dobs.create_dobs_string": {"tf": 1}, "pyerrors.input.dobs.write_dobs": {"tf": 1}, "pyerrors.input.openQCD.read_qtop": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_gf_coupling": {"tf": 1.4142135623730951}, "pyerrors.input.openQCD.read_qtop_sector": {"tf": 1.4142135623730951}}, "df": 10}}}}}}}}}}, "pipeline": ["trimmer"], "_isPrebuiltIndex": true}; // mirrored in build-search-index.js (part 1) // Also split on html tags. this is a cheap heuristic, but good enough.