抛物面优化需要缩放

时间:2016-02-21 21:16:01

标签: python openmdao

我想使用缩放器并最终使用OpenMDAO 1.x测试您的Paraboloid example from OpenMDAO 0.x文档,但无论是否使用缩放器,我都会得到奇怪的结果。这是代码:

from __future__ import print_function
import sys

from openmdao.api import IndepVarComp, Component, Problem, Group, ScipyOptimizer

class Paraboloid(Component):

    def __init__(self):
        super(Paraboloid, self).__init__()

        self.add_param('x', val=0.0)
        self.add_param('y', val=0.0)

        self.add_output('f_xy', val=0.0)

    def solve_nonlinear(self, params, unknowns, resids):

        x = params['x']
        y = params['y']

        #unknowns['f_xy'] = (x-3.0)**2 + x*y + (y+4.0)**2 - 3.0
        unknowns['f_xy'] = (1000.*x-3.)**2 + (1000.*x)*(0.01*y) + (0.01*y+4.)**2 - 3.

    def linearize(self, params, unknowns, resids):
        """ Jacobian for our paraboloid."""
        x = params['x']
        y = params['y']
        J = {}

        #J['f_xy', 'x'] = 2.0*x - 6.0 + y
        #J['f_xy', 'y'] = 2.0*y + 8.0 + x
        J['f_xy', 'x'] = 2000000.0*x - 6000.0 + 10.0*y
        J['f_xy', 'y'] = 0.0002*y + 0.08 + 10.0*x

        return J

if __name__ == "__main__":

    top = Problem()

    root = top.root = Group()

    root.add('p1', IndepVarComp('x', 3.0))
    root.add('p2', IndepVarComp('y', -4.0))
    root.add('p', Paraboloid())

    root.connect('p1.x', 'p.x')
    root.connect('p2.y', 'p.y')

    top.driver = ScipyOptimizer()
    top.driver.options['optimizer'] = 'SLSQP'

    top.driver.add_desvar('p1.x', lower=-1000, upper=1000, scaler=0.001)
    top.driver.add_desvar('p2.y', lower=-1000, upper=1000, scaler=1000.)
    top.driver.add_objective('p.f_xy')

    top.setup()
    top.run()

    print('\n')
    print('Minimum of %f found at (%f, %f)' % (top['p.f_xy'], top['p.x'], top['p.y']))

当我在我的系统上运行它时,它会给出:

2.7.11 |Anaconda 2.5.0 (64-bit)| (default, Jan 29 2016, 14:26:21) [MSC v.1500 64 bit (AMD64)]
Python Type "help", "copyright", "credits" or "license" for more information.
[evaluate paraboloid_optimize_scaled.py]
##############################################
Setup: Checking for potential issues...

No recorders have been specified, so no data will be saved.

Setup: Check complete.
##############################################

Optimization terminated successfully.    (Exit mode 0)
            Current function value: [ 8981902.27846645]
            Iterations: 1
            Function evaluations: 12
            Gradient evaluations: 1
Optimization Complete
-----------------------------------


Minimum of 8981902.278466 found at (3.000000, -4.000000)

我错过了什么吗?

2 个答案:

答案 0 :(得分:1)

在OpenMDAO 1.x中定义的定标器与在0.x中定义相反。在1.x中,使用以下缩放关系。

driver_value = (model_value + adder)*scaler

因此,与旧教程相比,您需要更改缩放器。但是存在次要问题,分析衍生物中的错误也在下面得到纠正。

from __future__ import print_function
import sys

from openmdao.api import IndepVarComp, Component, Problem, Group, ScipyOptimizer

class Paraboloid(Component):

    def __init__(self):
        super(Paraboloid, self).__init__()

        self.add_param('x', val=0.0)
        self.add_param('y', val=0.0)

        self.add_output('f_xy', val=0.0)

    def solve_nonlinear(self, params, unknowns, resids):

        x = params['x']
        y = params['y']

        #unknowns['f_xy'] = (x-3.0)**2 + x*y + (y+4.0)**2 - 3.0
        unknowns['f_xy'] = (1000.*x-3.)**2 + (1000.*x)*(0.01*y) + (0.01*y+4.)**2 - 3.

    def linearize(self, params, unknowns, resids):
        """ Jacobian for our paraboloid."""
        x = params['x']
        y = params['y']
        J = {}

        #J['f_xy', 'x'] = 2.0*x - 6.0 + y
        #J['f_xy', 'y'] = 2.0*y + 8.0 + x
        J['f_xy', 'x'] = 2000000.0*x - 6000.0 + 10.0*y
        J['f_xy', 'y'] = 0.0002*y + 0.08 + 10.0*x

        return J

if __name__ == "__main__":

    top = Problem()

    root = top.root = Group()
    root.fd_options['force_fd'] = True

    root.add('p1', IndepVarComp('x', 3.0))
    root.add('p2', IndepVarComp('y', -4.0))
    root.add('p', Paraboloid())

    root.connect('p1.x', 'p.x')
    root.connect('p2.y', 'p.y')

    top.driver = ScipyOptimizer()
    top.driver.options['optimizer'] = 'SLSQP'

    top.driver.add_desvar('p1.x', lower=-1000, upper=1000, scaler=1000.)
    top.driver.add_desvar('p2.y', lower=-1000, upper=1000,scaler=.001)
    top.driver.add_objective('p.f_xy')

    top.setup()
    top.run()

    print('\n')
    print('Minimum of %f found at (%f, %f)' % (top['p.f_xy'], top['p.x'], top['p.y']))

给予:

Optimization terminated successfully.    (Exit mode 0)
            Current function value: [-27.333333]
            Iterations: 3
            Function evaluations: 6
            Gradient evaluations: 3
Optimization Complete
-----------------------------------


Minimum of -27.333333 found at (0.006666, -733.299996)

答案 1 :(得分:0)

我无法直接复制您的问题。与您引用的openmdao 0.x tutorial相比,您的solve_nonlinear和linearize方法中有一些奇怪的缩放。但是当我清理它们时,我得到了正确的答案,对于合理的缩放器值,甚至对于一些不合理的(你选择的那些有点极端)。当您使用add_desvar中的scaler / adder时,您根本不需要修改模型。这些值只是修改优化器看到的有助于缩放的值,但在传递给模型之前会正确转换回未缩放的值。

from __future__ import print_function
import sys

from openmdao.api import IndepVarComp, Component, Problem, Group, ScipyOptimizer

class Paraboloid(Component):

    def __init__(self):
        super(Paraboloid, self).__init__()

        self.add_param('x', val=0.0)
        self.add_param('y', val=0.0)

        self.add_output('f_xy', val=0.0)

    def solve_nonlinear(self, params, unknowns, resids):

        x = params['x']
        y = params['y']

        unknowns['f_xy'] = (x-3.0)**2 + x*y + (y+4.0)**2 - 3.0

    def linearize(self, params, unknowns, resids):
        """ Jacobian for our paraboloid."""
        x = params['x']
        y = params['y']
        J = {}

        J['f_xy', 'x'] = 2.0*x - 6.0 + y
        J['f_xy', 'y'] = 2.0*y + 8.0 + x

        return J

if __name__ == "__main__":

    top = Problem()

    root = top.root = Group()

    root.add('p1', IndepVarComp('x', 3.0))
    root.add('p2', IndepVarComp('y', -4.0))
    root.add('p', Paraboloid())

    root.connect('p1.x', 'p.x')
    root.connect('p2.y', 'p.y')

    top.driver = ScipyOptimizer()
    top.driver.options['optimizer'] = 'SLSQP'

    # top.driver.add_desvar('p1.x', lower=-1000, upper=1000)
    # top.driver.add_desvar('p2.y', lower=-1000, upper=1000)

    top.driver.add_desvar('p1.x', lower=-1000, upper=1000, scaler=.001)
    top.driver.add_desvar('p2.y', lower=-1000, upper=1000, scaler=1000.)
    top.driver.add_objective('p.f_xy')

    top.setup()
    top.run()

    print('\n')
    print('Minimum of %f found at (%f, %f)' % (top['p.f_xy'], top['p.x'], top['p.y']))

给出:

##############################################
Setup: Checking for potential issues...

No recorders have been specified, so no data will be saved.

Setup: Check complete.
##############################################

Optimization terminated successfully.    (Exit mode 0)
            Current function value: [-27.33333333]
            Iterations: 12
            Function evaluations: 15
            Gradient evaluations: 12
Optimization Complete
-----------------------------------