mirror of
https://github.com/fjosw/pyerrors.git
synced 2025-05-15 12:03:42 +02:00
Merge branch 'develop' into documentation
This commit is contained in:
commit
cfb844dc01
2 changed files with 36 additions and 16 deletions
|
@ -96,7 +96,7 @@ def least_squares(x, y, func, priors=None, silent=False, **kwargs):
|
||||||
initial_guess : list
|
initial_guess : list
|
||||||
can provide an initial guess for the input parameters. Relevant for
|
can provide an initial guess for the input parameters. Relevant for
|
||||||
non-linear fits with many parameters.
|
non-linear fits with many parameters.
|
||||||
method : str
|
method : str, optional
|
||||||
can be used to choose an alternative method for the minimization of chisquare.
|
can be used to choose an alternative method for the minimization of chisquare.
|
||||||
The possible methods are the ones which can be used for scipy.optimize.minimize and
|
The possible methods are the ones which can be used for scipy.optimize.minimize and
|
||||||
migrad of iminuit. If no method is specified, Levenberg-Marquard is used.
|
migrad of iminuit. If no method is specified, Levenberg-Marquard is used.
|
||||||
|
@ -487,26 +487,21 @@ def _standard_fit(x, y, func, silent=False, **kwargs):
|
||||||
chisq = anp.sum(((y_f - model) / dy_f) ** 2)
|
chisq = anp.sum(((y_f - model) / dy_f) ** 2)
|
||||||
return chisq
|
return chisq
|
||||||
|
|
||||||
if 'method' in kwargs and not (kwargs.get('method', 'Levenberg-Marquardt') == 'Levenberg-Marquardt'):
|
output.method = kwargs.get('method', 'Levenberg-Marquardt')
|
||||||
output.method = kwargs.get('method')
|
|
||||||
if not silent:
|
if not silent:
|
||||||
print('Method:', kwargs.get('method'))
|
print('Method:', output.method)
|
||||||
if kwargs.get('method') == 'migrad':
|
|
||||||
fit_result = iminuit.minimize(chisqfunc, x0)
|
if output.method != 'Levenberg-Marquardt':
|
||||||
fit_result = iminuit.minimize(chisqfunc, fit_result.x)
|
if output.method == 'migrad':
|
||||||
|
fit_result = iminuit.minimize(chisqfunc, x0, tol=1e-4) # Stopping crieterion 0.002 * tol * errordef
|
||||||
output.iterations = fit_result.nfev
|
output.iterations = fit_result.nfev
|
||||||
else:
|
else:
|
||||||
fit_result = scipy.optimize.minimize(chisqfunc, x0, method=kwargs.get('method'))
|
fit_result = scipy.optimize.minimize(chisqfunc, x0, method=kwargs.get('method'), tol=1e-12)
|
||||||
fit_result = scipy.optimize.minimize(chisqfunc, fit_result.x, method=kwargs.get('method'), tol=1e-12)
|
|
||||||
output.iterations = fit_result.nit
|
output.iterations = fit_result.nit
|
||||||
|
|
||||||
chisquare = fit_result.fun
|
chisquare = fit_result.fun
|
||||||
|
|
||||||
else:
|
else:
|
||||||
output.method = 'Levenberg-Marquardt'
|
|
||||||
if not silent:
|
|
||||||
print('Method: Levenberg-Marquardt')
|
|
||||||
|
|
||||||
if kwargs.get('correlated_fit') is True:
|
if kwargs.get('correlated_fit') is True:
|
||||||
def chisqfunc_residuals(p):
|
def chisqfunc_residuals(p):
|
||||||
model = func(p, x)
|
model = func(p, x)
|
||||||
|
|
|
@ -49,8 +49,6 @@ def test_least_squares():
|
||||||
y = a[0] * np.exp(-a[1] * x)
|
y = a[0] * np.exp(-a[1] * x)
|
||||||
return y
|
return y
|
||||||
|
|
||||||
out = pe.least_squares(x, oy, func, method='migrad')
|
|
||||||
out = pe.least_squares(x, oy, func, method='Powell')
|
|
||||||
out = pe.least_squares(x, oy, func, expected_chisquare=True, resplot=True, qqplot=True)
|
out = pe.least_squares(x, oy, func, expected_chisquare=True, resplot=True, qqplot=True)
|
||||||
beta = out.fit_parameters
|
beta = out.fit_parameters
|
||||||
|
|
||||||
|
@ -86,6 +84,33 @@ def test_least_squares():
|
||||||
assert math.isclose(pe.covariance(betac[0], betac[1]), pcov[0, 1], abs_tol=1e-3)
|
assert math.isclose(pe.covariance(betac[0], betac[1]), pcov[0, 1], abs_tol=1e-3)
|
||||||
|
|
||||||
|
|
||||||
|
def test_alternative_solvers():
|
||||||
|
dim = 192
|
||||||
|
x = np.arange(dim)
|
||||||
|
y = 2 * np.exp(-0.06 * x) + np.random.normal(0.0, 0.15, dim)
|
||||||
|
yerr = 0.1 + 0.1 * np.random.rand(dim)
|
||||||
|
|
||||||
|
oy = []
|
||||||
|
for i, item in enumerate(x):
|
||||||
|
oy.append(pe.pseudo_Obs(y[i], yerr[i], 'test'))
|
||||||
|
|
||||||
|
def func(a, x):
|
||||||
|
y = a[0] * np.exp(-a[1] * x)
|
||||||
|
return y
|
||||||
|
|
||||||
|
chisquare_values = []
|
||||||
|
out = pe.least_squares(x, oy, func, method='migrad')
|
||||||
|
chisquare_values.append(out.chisquare)
|
||||||
|
out = pe.least_squares(x, oy, func, method='Powell')
|
||||||
|
chisquare_values.append(out.chisquare)
|
||||||
|
out = pe.least_squares(x, oy, func, method='Nelder-Mead')
|
||||||
|
chisquare_values.append(out.chisquare)
|
||||||
|
out = pe.least_squares(x, oy, func, method='Levenberg-Marquardt')
|
||||||
|
chisquare_values.append(out.chisquare)
|
||||||
|
chisquare_values = np.array(chisquare_values)
|
||||||
|
assert np.all(np.isclose(chisquare_values, chisquare_values[0]))
|
||||||
|
|
||||||
|
|
||||||
def test_correlated_fit():
|
def test_correlated_fit():
|
||||||
num_samples = 400
|
num_samples = 400
|
||||||
N = 10
|
N = 10
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue