Loading peak_integration.py +8 −2 Original line number Diff line number Diff line Loading @@ -1334,7 +1334,7 @@ class MLELoss(Loss): def d2loss_dfit2(self): return self.data / self._fit**2 def minimize(self, solver, tol, maxfun, params_init, params_lbnd, params_ubnd, disp=_debug): def minimize(self, solver, tol, maxiter, maxfun, params_init, params_lbnd, params_ubnd, disp=_debug): # from scipy.optimize import BFGS # class myBFGS(BFGS): # def initialize(self, n, approx_type): Loading @@ -1343,12 +1343,18 @@ class MLELoss(Loss): # self.B = loss_fun.hessian(params_init) # else: # self.H = np.linalg.inv(loss_fun.hessian(params_init)) if solver=='Newton-CG': options = {'maxiter':maxiter, 'xtol':tol, 'disp':disp} elif solver=='BFGS': options = {'maxiter':maxiter, 'maxfun':maxfun, 'maxls':100, 'maxcor':100, 'ftol':1.e-10, 'gtol':tol, 'xtol':1.e-10, 'disp':disp} return scipy_minimize(self, jac = self.gradient, hess = self.hessian, # hess = myBFGS(), x0=params_init, bounds=tuple(zip(params_lbnd,params_ubnd)), method=solver, options={'maxiter':1000, 'maxfun':maxfun, 'maxls':100, 'maxcor':100, 'ftol':1.e-10, 'gtol':tol, 'xtol':1.e-10, 'disp':disp} method=solver, options=options, # method='L-BFGS-B', options={'maxiter':1000, 'maxfun':1000, 'maxls':20, 'maxcor':100, 'ftol':1.e-6, 'gtol':1.e-6, 'disp':True} # method='BFGS', options={'maxiter':1000, 'gtol':1.e-6, 'norm':np.inf, 'disp':True} # method='CG', options={'maxiter':1000, 'gtol':1.e-5, 'norm':np.inf, 'disp':True} Loading Loading
peak_integration.py +8 −2 Original line number Diff line number Diff line Loading @@ -1334,7 +1334,7 @@ class MLELoss(Loss): def d2loss_dfit2(self): return self.data / self._fit**2 def minimize(self, solver, tol, maxfun, params_init, params_lbnd, params_ubnd, disp=_debug): def minimize(self, solver, tol, maxiter, maxfun, params_init, params_lbnd, params_ubnd, disp=_debug): # from scipy.optimize import BFGS # class myBFGS(BFGS): # def initialize(self, n, approx_type): Loading @@ -1343,12 +1343,18 @@ class MLELoss(Loss): # self.B = loss_fun.hessian(params_init) # else: # self.H = np.linalg.inv(loss_fun.hessian(params_init)) if solver=='Newton-CG': options = {'maxiter':maxiter, 'xtol':tol, 'disp':disp} elif solver=='BFGS': options = {'maxiter':maxiter, 'maxfun':maxfun, 'maxls':100, 'maxcor':100, 'ftol':1.e-10, 'gtol':tol, 'xtol':1.e-10, 'disp':disp} return scipy_minimize(self, jac = self.gradient, hess = self.hessian, # hess = myBFGS(), x0=params_init, bounds=tuple(zip(params_lbnd,params_ubnd)), method=solver, options={'maxiter':1000, 'maxfun':maxfun, 'maxls':100, 'maxcor':100, 'ftol':1.e-10, 'gtol':tol, 'xtol':1.e-10, 'disp':disp} method=solver, options=options, # method='L-BFGS-B', options={'maxiter':1000, 'maxfun':1000, 'maxls':20, 'maxcor':100, 'ftol':1.e-6, 'gtol':1.e-6, 'disp':True} # method='BFGS', options={'maxiter':1000, 'gtol':1.e-6, 'norm':np.inf, 'disp':True} # method='CG', options={'maxiter':1000, 'gtol':1.e-5, 'norm':np.inf, 'disp':True} Loading