127 KiB
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import pyerrors as pe
plt.style.use('./base_style.mplstyle')
usetex = matplotlib.checkdep_usetex(True)
plt.rc('text', usetex=usetex)
Read data from the pcac example
fP = pe.Corr(pe.input.json.load_json("./data/f_P"), padding=[1, 1])
fP.gamma_method()
We can now define a custom fit function, in this case a single exponential. Here we need to use the autograd wrapped version of numpy (imported as anp) to use automatic differentiation.
import autograd.numpy as anp
def func_exp(a, x):
y = a[1] * anp.exp(-a[0] * x)
return y
Fit single exponential to f_P. The kwarg resplot
generates a figure which visualizes the fit with residuals.
start_fit = 9
stop_fit = 18
fit_result = fP.fit(func_exp, [start_fit, stop_fit], resplot=True)
fit_result.gamma_method()
print("\n", fit_result)
The covariance of the two fit parameters can be computed in the following way
cov_01 = pe.fits.covariance([fit_result[0], fit_result[1]])[0, 1]
print('Covariance: ', cov_01)
print('Normalized covariance: ', cov_01 / fit_result[0].dvalue / fit_result[1].dvalue)
Effective mass¶
Calculate the effective mass for comparison
m_eff_fP = fP.m_eff()
m_eff_fP.tag = r"Effective mass of f_P"
Calculate the corresponding plateau and compare the two results
m_eff_fP.gamma_method()
m_eff_plateau = m_eff_fP.plateau([start_fit, stop_fit])
m_eff_plateau.gamma_method()
print()
print('Effective mass:\t', m_eff_plateau)
print('Fitted mass:\t', fit_result[0])
We can now visualize the effective mass compared to the result of the fit
m_eff_fP.show(plateau=fit_result[0])
Fitting with x-errors¶
We first generate pseudo data
ox = []
oy = []
for i in range(0,10,2):
ox.append(pe.pseudo_Obs(i + 0.35 * np.random.normal(), 0.35, str(i)))
oy.append(pe.pseudo_Obs(np.sin(i) + 0.25 * np.random.normal() - 0.2 * i + 0.17, 0.25, str(i)))
[o.gamma_method() for o in ox + oy]
[print(o) for o in zip(ox, oy)];
And choose a function to fit
def func(a, x):
y = a[0] + a[1] * x + a[2] * anp.sin(x)
return y
We can then fit this function to the data and get the fit parameter as Obs with the function odr_fit
which uses orthogonal distance regression.
beta = pe.fits.total_least_squares(ox, oy, func)
for i, item in enumerate(beta):
item.gamma_method()
print('Parameter', i + 1, ':', item)
For the visulization we determine the value of the fit function in a range of x values
x_t = np.arange(min(ox).value - 1, max(ox).value + 1, 0.01)
y_t = func([o.value for o in beta], x_t)
plt.errorbar([e.value for e in ox], [e.value for e in oy], xerr=[e.dvalue for e in ox], yerr=[e.dvalue for e in oy], marker='D', lw=1, ls='none', zorder=10)
plt.plot(x_t, y_t, '--')
plt.show()
We can also take a look at how much the inidividual ensembles contribute to the uncetainty of the fit parameters
for i, item in enumerate(beta):
print('Parameter', i)
item.plot_piechart()
print()