在scipy.optimize中恢复优化?

时间:2016-10-12 15:11:39

标签: python scipy mathematical-optimization

scipy.optimize提供了许多不同的方法,用于多变量系统的局部和全局优化。但是,我需要一个很长的优化运行可能会被中断(在某些情况下我可能想要故意中断它)。有没有办法重新启动......好吧,其中任何一个?我的意思是,显然可以提供作为初始猜测的最后一组最优化的参数,但这不是唯一的参数 - 例如,还有渐变(例如,jacobians),种群差别进化等我显然不希望这些也必须重新开始。

我认为很难证明这些是scipy,也不是为了保存它的状态。对于以jacobian为例的函数,有一个jacobian参数(" jac"),但它是一个布尔值(表示你的评估函数返回一个jacobian,我的没有' t),或一个可调用的函数(我只能提供上次运行的单个结果)。没有什么只需要最后一个jacobian阵列。随着差异演变,人口的流失对于表现和融合来说将是可怕的。

这有什么解决方案吗?有什么方法可以恢复优化吗?

2 个答案:

答案 0 :(得分:4)

一般的答案是否定的,除了正如你所说的那样,从上一次运行的最后一次估算开始,没有一般的解决方案。

具体来说,对于差异进化,您可以实例化DifferentialEvolutionSolver,您可以在检查点腌制并取消修复以恢复。 (建议来自https://github.com/scipy/scipy/issues/6517

答案 1 :(得分:0)

以下x可以保存并重新启动, 但我收集你想要保存并重新启动更多状态,例如梯度也是;你能澄清一下吗?

另见basinhopping, 它有一个漂亮的gui,pele-python

#!/usr/bin/env python
""" Funcgradmn: wrap f() and grad(), save all x[] f[] grad[] to plot or restart """

from __future__ import division
import numpy as np

__version__ = "2016-10-18 oct denis"


class Funcgradmon(object):
    """ Funcgradmn: wrap f() and grad(), save all x[] f[] grad[] to plot or restart

    Example: minimize, save, restart --

    fg = Funcgradmon( func, gradfunc, verbose=1 )
        # fg(x): f(x), g(x)  for minimize( jac=True )

        # run 100 iter (if linesearch, 200-300 calls of fg()) --
    options = dict( maxiter=100 )  # ... 
    min0 = minimize( fg, x0, jac=True, options=options )
    fg.savez( "0.npz", paramstr="..." )  # to plot or restart

        # restart from x[50] --
        # (won't repeat the previous path from 50
        # unless you save and restore the whole state of the optimizer)
    x0 = fg.restart( 50 )
    # change params ...
    min50 = minimize( fg, x0, jac=True, options=options )
    """

    def __init__( self, func, gradfunc, verbose=1 ):
        self.func = func
        self.gradfunc = gradfunc
        self.verbose = verbose
        self.x, self.f, self.g = [], [], []  # growing lists
        self.t = 0

    def __call__( self, x ):
        """ f, g = func(x), gradfunc(x); save them; return f, g """
        x = np.asarray_chkfinite( x )  # always
        f = self.func(x)
        g = self.gradfunc(x)
        g = np.asarray_chkfinite( g )
        self.x.append( np.copy(x) )
        self.f.append( _copy( f ))
        self.g.append( np.copy(g) )
        if self.verbose:
            print "%3d:" % self.t ,
            fmt = "%-12g" if np.isscalar(f)  else "%s\t"
            print fmt % f ,
            print "x: %s" % x ,  # with user's np.set_printoptions
            print "\tgrad: %s" % g
                # better df dx dg
        # callback: plot
        self.t += 1
        return f, g

    def restart( self, n ):
        """ x0 = fg.restart( n )  returns x[n] to minimize( fg, x0 )
        """
        x0 = self.x[n]  # minimize from here
        del self.x[:n]
        del self.f[:n]
        del self.g[:n]
        self.t = n
        if self.verbose:
            print "Funcgradmon: restart from x[%d] %s" % (n, x0)
        return x0

    def savez( self, npzfile, **kw ):
        """ np.savez( npzfile, x= f= g= ) """
        x, f, g = map( np.array, [self.x, self.f, self.g] )
        if self.verbose:
            asum = "f: %s \nx: %s \ng: %s" % (
                _asum(f), _asum(x), _asum(g) )
            print "Funcgradmon: saving to %s: \n%s \n" % (npzfile, asum)
        np.savez( npzfile, x=x, f=f, g=g, **kw )

    def load( self, npzfile ):
        load = np.load( npzfile )
        x, f, g = load["x"], load["f"], load["g"]
        if self.verbose:
            asum = "f: %s \nx: %s \ng: %s" % (
                _asum(f), _asum(x), _asum(g) )
            print "Funcgradmon: load %s: \n%s \n" % (npzfile, asum)
        self.x = list( x )
        self.f = list( f )
        self.g = list( g )
        self.loaddict = load
        return self.restart( len(x) - 1 )


def _asum( X ):
    """ one-line array summary: "shape type min av max" """
    if not hasattr( X, "dtype" ):
        return str(X)
    return "%s %s  min av max %.3g %.3g %.3g" % (
            X.shape, X.dtype, X.min(), X.mean(), X.max() )

def _copy( x ):
    return x if x is None  or np.isscalar(x) \
        else np.copy( x )

#...............................................................................
if __name__ == "__main__":
    import sys
    from scipy.optimize import minimize, rosen, rosen_der

    np.set_printoptions( threshold=20, edgeitems=10, linewidth=140,
            formatter = dict( float = lambda x: "%.3g" % x ))  # float arrays %.3g

    dim = 3
    method = "cg"
    maxiter = 10  # 1 linesearch -> 2-3 calls of fg

    # to change these params, run this.py a=1 b=None 'c = ...'  in sh or ipython
    for arg in sys.argv[1:]:
        exec( arg )

    print "\n", 80 * "-"
    print "Funcgradmon: dim %d  method %s  maxiter %d \n" % (
            dim, method, maxiter )
    x0 = np.zeros( dim )

    #...........................................................................
    fg = Funcgradmon( rosen, rosen_der, verbose=1 )
    options = dict( maxiter=maxiter )  # ... 

    min0 = minimize( fg, x0, jac=True, method=method, options=options )
    fg.savez( "0.npz", paramstr="..." )  # to plot or restart

    x0 = fg.restart( 5 )  # = fg.x[5]
    # change params, print them all
    min5 = minimize( fg, x0, jac=True, method=method, options=options )

    fg.savez( "5.npz", paramstr="..." )