mirror of
https://github.com/fjosw/pyerrors.git
synced 2025-03-16 15:20:24 +01:00
Merge branch 'develop' into documentation
This commit is contained in:
commit
cfb844dc01
2 changed files with 36 additions and 16 deletions
|
@ -96,7 +96,7 @@ def least_squares(x, y, func, priors=None, silent=False, **kwargs):
|
|||
initial_guess : list
|
||||
can provide an initial guess for the input parameters. Relevant for
|
||||
non-linear fits with many parameters.
|
||||
method : str
|
||||
method : str, optional
|
||||
can be used to choose an alternative method for the minimization of chisquare.
|
||||
The possible methods are the ones which can be used for scipy.optimize.minimize and
|
||||
migrad of iminuit. If no method is specified, Levenberg-Marquard is used.
|
||||
|
@ -487,26 +487,21 @@ def _standard_fit(x, y, func, silent=False, **kwargs):
|
|||
chisq = anp.sum(((y_f - model) / dy_f) ** 2)
|
||||
return chisq
|
||||
|
||||
if 'method' in kwargs and not (kwargs.get('method', 'Levenberg-Marquardt') == 'Levenberg-Marquardt'):
|
||||
output.method = kwargs.get('method')
|
||||
output.method = kwargs.get('method', 'Levenberg-Marquardt')
|
||||
if not silent:
|
||||
print('Method:', kwargs.get('method'))
|
||||
if kwargs.get('method') == 'migrad':
|
||||
fit_result = iminuit.minimize(chisqfunc, x0)
|
||||
fit_result = iminuit.minimize(chisqfunc, fit_result.x)
|
||||
print('Method:', output.method)
|
||||
|
||||
if output.method != 'Levenberg-Marquardt':
|
||||
if output.method == 'migrad':
|
||||
fit_result = iminuit.minimize(chisqfunc, x0, tol=1e-4) # Stopping crieterion 0.002 * tol * errordef
|
||||
output.iterations = fit_result.nfev
|
||||
else:
|
||||
fit_result = scipy.optimize.minimize(chisqfunc, x0, method=kwargs.get('method'))
|
||||
fit_result = scipy.optimize.minimize(chisqfunc, fit_result.x, method=kwargs.get('method'), tol=1e-12)
|
||||
fit_result = scipy.optimize.minimize(chisqfunc, x0, method=kwargs.get('method'), tol=1e-12)
|
||||
output.iterations = fit_result.nit
|
||||
|
||||
chisquare = fit_result.fun
|
||||
|
||||
else:
|
||||
output.method = 'Levenberg-Marquardt'
|
||||
if not silent:
|
||||
print('Method: Levenberg-Marquardt')
|
||||
|
||||
if kwargs.get('correlated_fit') is True:
|
||||
def chisqfunc_residuals(p):
|
||||
model = func(p, x)
|
||||
|
|
|
@ -49,8 +49,6 @@ def test_least_squares():
|
|||
y = a[0] * np.exp(-a[1] * x)
|
||||
return y
|
||||
|
||||
out = pe.least_squares(x, oy, func, method='migrad')
|
||||
out = pe.least_squares(x, oy, func, method='Powell')
|
||||
out = pe.least_squares(x, oy, func, expected_chisquare=True, resplot=True, qqplot=True)
|
||||
beta = out.fit_parameters
|
||||
|
||||
|
@ -86,6 +84,33 @@ def test_least_squares():
|
|||
assert math.isclose(pe.covariance(betac[0], betac[1]), pcov[0, 1], abs_tol=1e-3)
|
||||
|
||||
|
||||
def test_alternative_solvers():
|
||||
dim = 192
|
||||
x = np.arange(dim)
|
||||
y = 2 * np.exp(-0.06 * x) + np.random.normal(0.0, 0.15, dim)
|
||||
yerr = 0.1 + 0.1 * np.random.rand(dim)
|
||||
|
||||
oy = []
|
||||
for i, item in enumerate(x):
|
||||
oy.append(pe.pseudo_Obs(y[i], yerr[i], 'test'))
|
||||
|
||||
def func(a, x):
|
||||
y = a[0] * np.exp(-a[1] * x)
|
||||
return y
|
||||
|
||||
chisquare_values = []
|
||||
out = pe.least_squares(x, oy, func, method='migrad')
|
||||
chisquare_values.append(out.chisquare)
|
||||
out = pe.least_squares(x, oy, func, method='Powell')
|
||||
chisquare_values.append(out.chisquare)
|
||||
out = pe.least_squares(x, oy, func, method='Nelder-Mead')
|
||||
chisquare_values.append(out.chisquare)
|
||||
out = pe.least_squares(x, oy, func, method='Levenberg-Marquardt')
|
||||
chisquare_values.append(out.chisquare)
|
||||
chisquare_values = np.array(chisquare_values)
|
||||
assert np.all(np.isclose(chisquare_values, chisquare_values[0]))
|
||||
|
||||
|
||||
def test_correlated_fit():
|
||||
num_samples = 400
|
||||
N = 10
|
||||
|
|
Loading…
Add table
Reference in a new issue