1 """
2 Simple minimizer is a wrapper around scipy.leastsq, allowing a
3 user to build a fitting model as a function of general purpose
4 Fit Parameters that can be fixed or floated, bounded, and written
5 as a simple expression of other Fit Parameters.
6
7 The user sets up a model in terms of instance of Parameters, writes a
8 function-to-be-minimized (residual function) in terms of these Parameters.
9
10 Copyright (c) 2011 Matthew Newville, The University of Chicago
11 <newville@cars.uchicago.edu>
12 """
13
14 from numpy import (dot, eye, ndarray, ones_like,
15 sqrt, take, transpose, triu)
16 from numpy.dual import inv
17 from numpy.linalg import LinAlgError
18
19 from scipy.optimize import leastsq as scipy_leastsq
20 from scipy.optimize import fmin as scipy_fmin
21
22 from scipy.optimize.lbfgsb import fmin_l_bfgs_b as scipy_lbfgsb
23
24
25 HAS_SCALAR_MIN = False
26 try:
27 from scipy.optimize import minimize as scipy_minimize
28 HAS_SCALAR_MIN = True
29 except ImportError:
30 pass
31
32 from .asteval import Interpreter
33 from .astutils import NameFinder
34 from .parameter import Parameter, Parameters
35
36
37 from . import uncertainties
38
40 """
41 given values for variables, calculate object value.
42 This is used by the uncertainties package to calculate
43 the uncertainty in an object even with a complicated
44 expression.
45 """
46 _obj = kwargs.get('_obj', None)
47 _pars = kwargs.get('_pars', None)
48 _names = kwargs.get('_names', None)
49 _asteval = kwargs.get('_asteval', None)
50 if (_obj is None or
51 _pars is None or
52 _names is None or
53 _asteval is None or
54 _obj.ast is None):
55 return 0
56 for val, name in zip(vals, _names):
57 _asteval.symtable[name] = val
58 return _asteval.eval(_obj.ast)
59
60 wrap_ueval = uncertainties.wrap(asteval_with_uncertainties)
61
63 """evaluate uncertainty and set .stderr for a parameter `obj`
64 given the uncertain values `uvars` (a list of uncertainties.ufloats),
65 a list of parameter names that matches uvars, and a dict of param
66 objects, keyed by name.
67
68 This uses the uncertainties package wrapped function to evaluate
69 the uncertainty for an arbitrary expression (in obj.ast) of parameters.
70 """
71 if not isinstance(obj, Parameter) or not hasattr(obj, 'ast'):
72 return
73 uval = wrap_ueval(*uvars, _obj=obj, _names=_names,
74 _pars=_pars, _asteval=_asteval)
75 try:
76 obj.stderr = uval.std_dev()
77 except:
78 obj.stderr = 0
79
81 """General Purpose Exception"""
83 Exception.__init__(self)
84 self.msg = msg
85
87 return "\n%s" % (self.msg)
88
90 """check for errors derived from asteval, raise MinimizerException"""
91 if len(error) > 0:
92 for err in error:
93 msg = '%s: %s' % (err.get_error())
94 return msg
95 return None
96
97
99 """general minimizer"""
100 err_nonparam = \
101 "params must be a minimizer.Parameters() instance or list of Parameters()"
102 err_maxfev = """Too many function calls (max set to %i)! Use:
103 minimize(func, params, ...., maxfev=NNN)
104 or set leastsq_kws['maxfev'] to increase this maximum."""
105
106 - def __init__(self, userfcn, params, fcn_args=None, fcn_kws=None,
107 iter_cb=None, scale_covar=True, **kws):
108 self.userfcn = userfcn
109 self.userargs = fcn_args
110 if self.userargs is None:
111 self.userargs = []
112
113 self.userkws = fcn_kws
114 if self.userkws is None:
115 self.userkws = {}
116 self.kws = kws
117 self.iter_cb = iter_cb
118 self.scale_covar = scale_covar
119 self.nfev = 0
120 self.nfree = 0
121 self.message = None
122 self.var_map = []
123 self.jacfcn = None
124 self.asteval = Interpreter()
125 self.namefinder = NameFinder()
126 self.__prepared = False
127 self.__set_params(params)
128 self.prepare_fit()
129
131 """
132 update parameter value, including setting bounds.
133 For a constrained parameter (one with an expr defined),
134 this first updates (recursively) all parameters on which
135 the parameter depends (using the 'deps' field).
136 """
137
138
139
140 if self.updated[name]:
141 return
142 par = self.params[name]
143 if par.expr is not None:
144 for dep in par.deps:
145 self.__update_paramval(dep)
146 par.value = self.asteval.run(par.ast)
147 out = check_ast_errors(self.asteval.error)
148 if out is not None:
149 self.asteval.raise_exception(None)
150 self.asteval.symtable[name] = par.value
151 self.updated[name] = True
152
154 """update all constrained parameters, checking that
155 dependencies are evaluated as needed."""
156 self.updated = dict([(name, False) for name in self.params])
157 for name in self.params:
158 self.__update_paramval(name)
159
161 """
162 residual function used for least-squares fit.
163 With the new, candidate values of fvars (the fitting variables),
164 this evaluates all parameters, including setting bounds and
165 evaluating constraints, and then passes those to the
166 user-supplied function to calculate the residual.
167 """
168
169 for varname, val in zip(self.var_map, fvars):
170
171 par = self.params[varname]
172 par.value = par.from_internal(val)
173 self.nfev = self.nfev + 1
174
175 self.update_constraints()
176 out = self.userfcn(self.params, *self.userargs, **self.userkws)
177 if hasattr(self.iter_cb, '__call__'):
178 self.iter_cb(self.params, self.nfev, out,
179 *self.userargs, **self.userkws)
180 return out
181
183 """
184 analytical jacobian to be used with the Levenberg-Marquardt
185
186 modified 02-01-2012 by Glenn Jones, Aberystwyth University
187 """
188 for varname, val in zip(self.var_map, fvars):
189
190 self.params[varname].from_internal(val)
191
192 self.nfev = self.nfev + 1
193 self.update_constraints()
194
195 return self.jacfcn(self.params, *self.userargs, **self.userkws)
196
198 """ set internal self.params from a Parameters object or
199 a list/tuple of Parameters"""
200 if params is None or isinstance(params, Parameters):
201 self.params = params
202 elif isinstance(params, (list, tuple)):
203 _params = Parameters()
204 for _par in params:
205 if not isinstance(_par, Parameter):
206 raise MinimizerException(self.err_nonparam)
207 else:
208 _params[_par.name] = _par
209 self.params = _params
210 else:
211 raise MinimizerException(self.err_nonparam)
212
214 """penalty function for scalar minimizers:
215 evaluates user-supplied objective function,
216 if result is an array, return array sum-of-squares.
217 """
218 r = self.__residual(params)
219 if isinstance(r, ndarray):
220 r = (r*r).sum()
221 return r
222
224 """prepare parameters for fit"""
225
226
227 if params is None and self.params is not None and self.__prepared:
228 return
229 if params is not None and self.params is None:
230 self.__set_params(params)
231 self.nfev = 0
232 self.var_map = []
233 self.vars = []
234 self.vmin, self.vmax = [], []
235 for name, par in self.params.items():
236 if par.expr is not None:
237 par.ast = self.asteval.parse(par.expr)
238 check_ast_errors(self.asteval.error)
239 par.vary = False
240 par.deps = []
241 self.namefinder.names = []
242 self.namefinder.generic_visit(par.ast)
243 for symname in self.namefinder.names:
244 if (symname in self.params and
245 symname not in par.deps):
246 par.deps.append(symname)
247 elif par.vary:
248 self.var_map.append(name)
249 self.vars.append(par.setup_bounds())
250
251
252
253
254 self.asteval.symtable[name] = par.value
255 par.init_value = par.value
256 if par.name is None:
257 par.name = name
258
259 self.nvarys = len(self.vars)
260
261
262
263
264 self.update_constraints()
265 self.__prepared = True
266
267 - def anneal(self, schedule='cauchy', **kws):
268 """
269 use simulated annealing
270 """
271 sched = 'fast'
272 if schedule in ('cauchy', 'boltzmann'):
273 sched = schedule
274
275 self.prepare_fit()
276 sakws = dict(full_output=1, schedule=sched,
277 maxiter = 2000 * (self.nvarys + 1))
278
279 sakws.update(self.kws)
280 sakws.update(kws)
281 print("WARNING: scipy anneal appears unusable!")
282
283 self.sa_out = saout
284 return
285
287 """
288 use l-bfgs-b minimization
289 """
290 self.prepare_fit()
291 lb_kws = dict(factr=1000.0, approx_grad=True, m=20,
292 maxfun = 2000 * (self.nvarys + 1),
293
294 )
295 lb_kws.update(self.kws)
296 lb_kws.update(kws)
297
298 xout, fout, info = scipy_lbfgsb(self.penalty, self.vars, **lb_kws)
299 self.nfev = info['funcalls']
300 self.message = info['task']
301 self.chisqr = (self.penalty(xout)**2).sum()
302
303 - def fmin(self, **kws):
304 """
305 use nelder-mead (simplex) minimization
306 """
307 self.prepare_fit()
308 fmin_kws = dict(full_output=True, disp=False, retall=True,
309 ftol=1.e-4, xtol=1.e-4,
310 maxfun = 5000 * (self.nvarys + 1))
311
312 fmin_kws.update(kws)
313 ret = scipy_fmin(self.penalty, self.vars, **fmin_kws)
314 xout, fout, iter, funccalls, warnflag, allvecs = ret
315 self.nfev = funccalls
316 self.chisqr = (self.penalty(xout)**2).sum()
317
318 - def scalar_minimize(self, method='Nelder-Mead', hess=None, tol=None, **kws):
319 """use one of the scaler minimization methods from scipy.
320 Available methods include:
321 Nelder-Mead
322 Powell
323 CG (conjugate gradient)
324 BFGS
325 Newton-CG
326 Anneal
327 L-BFGS-B
328 TNC
329 COBYLA
330 SLSQP
331
332 If the objective function returns a numpy array instead
333 of the expected scalar, the sum of squares of the array
334 will be used.
335
336 Note that bounds and constraints can be set on Parameters
337 for any of these methods, so are not supported separately
338 for those designed to use bounds.
339
340 """
341 if not HAS_SCALAR_MIN :
342 raise NotImplementedError
343
344 self.prepare_fit()
345
346 maxfev = 1000*(self.nvarys + 1)
347 opts = {'maxiter': maxfev}
348 if method not in ('L-BFGS-B', 'TNC', 'SLSQP'):
349 opts['maxfev'] = maxfev
350
351 fmin_kws = dict(method=method, tol=tol, hess=hess, options=opts)
352 fmin_kws.update(self.kws)
353 fmin_kws.update(kws)
354
355 ret = scipy_minimize(self.penalty, self.vars, **fmin_kws)
356 xout = ret.x
357 self.message = ret.message
358 self.nfev = ret.nfev
359 self.chisqr = (self.penalty(xout)**2).sum()
360
362 """
363 use Levenberg-Marquardt minimization to perform fit.
364 This assumes that ModelParameters have been stored,
365 and a function to minimize has been properly set up.
366
367 This wraps scipy.optimize.leastsq, and keyword arguments are passed
368 directly as options to scipy.optimize.leastsq
369
370 When possible, this calculates the estimated uncertainties and
371 variable correlations from the covariance matrix.
372
373 writes outputs to many internal attributes, and
374 returns True if fit was successful, False if not.
375 """
376 self.prepare_fit()
377 lskws = dict(full_output=1, xtol=1.e-7, ftol=1.e-7,
378 gtol=1.e-7, maxfev=2000*(self.nvarys+1), Dfun=None)
379
380 lskws.update(self.kws)
381 lskws.update(kws)
382
383 if lskws['Dfun'] is not None:
384 self.jacfcn = lskws['Dfun']
385 lskws['Dfun'] = self.__jacobian
386
387 lsout = scipy_leastsq(self.__residual, self.vars, **lskws)
388 _best, _cov, infodict, errmsg, ier = lsout
389
390 self.residual = resid = infodict['fvec']
391
392
393 self.ier = ier
394 self.lmdif_message = errmsg
395 self.message = 'Fit succeeded.'
396 self.success = ier in [1, 2, 3, 4]
397
398 if ier == 0:
399 self.message = 'Invalid Input Parameters.'
400 elif ier == 5:
401 self.message = self.err_maxfev % lskws['maxfev']
402 else:
403 self.message = 'Tolerance seems to be too small.'
404
405 self.nfev = infodict['nfev']
406 self.ndata = len(resid)
407
408 sum_sqr = (resid**2).sum()
409 self.chisqr = sum_sqr
410 self.nfree = (self.ndata - self.nvarys)
411 self.redchi = sum_sqr / self.nfree
412
413
414
415 grad = ones_like(_best)
416 vbest = ones_like(_best)
417 for ivar, varname in enumerate(self.var_map):
418 par = self.params[varname]
419 grad[ivar] = par.scale_gradient(_best[ivar])
420 vbest[ivar] = par.value
421
422
423
424 infodict['fjac'] = transpose(transpose(infodict['fjac']) /
425 take(grad, infodict['ipvt'] - 1))
426 rvec = dot(triu(transpose(infodict['fjac'])[:self.nvarys, :]),
427 take(eye(self.nvarys), infodict['ipvt'] - 1, 0))
428 try:
429 cov = inv(dot(transpose(rvec), rvec))
430 except (LinAlgError, ValueError):
431 cov = None
432
433 for par in self.params.values():
434 par.stderr, par.correl = 0, None
435
436 self.covar = cov
437 if cov is None:
438 self.errorbars = False
439 self.message = '%s. Could not estimate error-bars'
440 else:
441 self.errorbars = True
442 if self.scale_covar:
443 self.covar = cov = cov * sum_sqr / self.nfree
444
445
446
447
448
449
450 uvars = uncertainties.correlated_values(vbest, self.covar)
451
452 for ivar, varname in enumerate(self.var_map):
453 par = self.params[varname]
454 par.stderr = sqrt(cov[ivar, ivar])
455 par.correl = {}
456 for jvar, varn2 in enumerate(self.var_map):
457 if jvar != ivar:
458 par.correl[varn2] = (cov[ivar, jvar]/
459 (par.stderr * sqrt(cov[jvar, jvar])))
460
461 for pname, par in self.params.items():
462 eval_stderr(par, uvars, self.var_map,
463 self.params, self.asteval)
464
465
466 for v, nam in zip(uvars, self.var_map):
467 self.asteval.symtable[nam] = v.nominal_value
468
469 for par in self.params.values():
470 if hasattr(par, 'ast'):
471 delattr(par, 'ast')
472 return self.success
473
474 -def minimize(fcn, params, method='leastsq', args=None, kws=None,
475 scale_covar=True, engine=None, iter_cb=None, **fit_kws):
476 """simple minimization function,
477 finding the values for the params which give the
478 minimal sum-of-squares of the array return by fcn
479 """
480 fitter = Minimizer(fcn, params, fcn_args=args, fcn_kws=kws,
481 iter_cb=iter_cb, scale_covar=scale_covar, **fit_kws)
482
483 _scalar_methods = {'nelder': 'Nelder-Mead', 'powell': 'Powell',
484 'cg': 'CG ', 'bfgs': 'BFGS',
485 'newton': 'Newton-CG', 'anneal': 'Anneal',
486 'lbfgs': 'L-BFGS-B', 'l-bfgs': 'L-BFGS-B',
487 'tnc': 'TNC', 'cobyla': 'COBYLA',
488 'slsqp': 'SLSQP'}
489
490 _fitmethods = {'anneal': 'anneal', 'nelder': 'fmin',
491 'lbfgsb': 'lbfgsb', 'leastsq': 'leastsq'}
492
493 if engine is not None:
494 method = engine
495 meth = method.lower()
496
497 fitfunction = None
498 kwargs = {}
499
500 if meth == 'leastsq':
501 fitfunction = fitter.leastsq
502 else:
503
504 if HAS_SCALAR_MIN:
505 for name, method in _scalar_methods.items():
506 if meth.startswith(name):
507 fitfunction = fitter.scalar_minimize
508 kwargs = dict(method=method)
509
510 if fitfunction is None:
511 for name, method in _fitmethods.items():
512 if meth.startswith(name):
513 fitfunction = getattr(fitter, method)
514 if fitfunction is not None:
515 fitfunction(**kwargs)
516 return fitter
517
519 """nach
520 A function which takes a function a makes a parameters-dict
521 for it.
522
523 Takes the function fcn. A starting guess x0 for the
524 non kwargs paramter must be also given. If kwargs
525 are used, used_kwargs is dict were the keys are the
526 used kwarg and the values are the starting values.
527 """
528 import inspect
529 args = inspect.getargspec(fcn)
530 defaults = args[-1]
531 len_def = len(defaults) if defaults is not None else 0
532
533
534 args_without_defaults = len(args[0])- len_def
535
536 if len(x0) < args_without_defaults:
537 raise ValueError( 'x0 to short')
538
539 p = Parameters()
540 for i, val in enumerate(x0):
541 p.add(args[0][i], val)
542
543 if used_kwargs:
544 for arg, val in used_kwargs.items():
545 p.add(arg, val)
546 else:
547 used_kwargs = {}
548
549 def func(para):
550 "wrapped func"
551 kwdict = {}
552
553 for arg in used_kwargs.keys():
554 kwdict[arg] = para[arg].value
555
556 vals = [para[i].value for i in p]
557 return fcn(*vals[:len(x0)], **kwdict)
558
559 return p, func
560