需要缩放的抛物面优化

Paraboloid optimization requiring scaling

我想使用缩放器并最终使用 OpenMDAO 1.x 测试了您的 Paraboloid example from OpenMDAO 0.x 文档,但无论是否使用缩放器,我都会得到奇怪的结果。这是代码:

from __future__ import print_function
import sys

from openmdao.api import IndepVarComp, Component, Problem, Group, ScipyOptimizer

class Paraboloid(Component):

    def __init__(self):
        super(Paraboloid, self).__init__()

        self.add_param('x', val=0.0)
        self.add_param('y', val=0.0)

        self.add_output('f_xy', val=0.0)

    def solve_nonlinear(self, params, unknowns, resids):

        x = params['x']
        y = params['y']

        #unknowns['f_xy'] = (x-3.0)**2 + x*y + (y+4.0)**2 - 3.0
        unknowns['f_xy'] = (1000.*x-3.)**2 + (1000.*x)*(0.01*y) + (0.01*y+4.)**2 - 3.

    def linearize(self, params, unknowns, resids):
        """ Jacobian for our paraboloid."""
        x = params['x']
        y = params['y']
        J = {}

        #J['f_xy', 'x'] = 2.0*x - 6.0 + y
        #J['f_xy', 'y'] = 2.0*y + 8.0 + x
        J['f_xy', 'x'] = 2000000.0*x - 6000.0 + 10.0*y
        J['f_xy', 'y'] = 0.0002*y + 0.08 + 10.0*x

        return J

if __name__ == "__main__":

    top = Problem()

    root = top.root = Group()

    root.add('p1', IndepVarComp('x', 3.0))
    root.add('p2', IndepVarComp('y', -4.0))
    root.add('p', Paraboloid())

    root.connect('p1.x', 'p.x')
    root.connect('p2.y', 'p.y')

    top.driver = ScipyOptimizer()
    top.driver.options['optimizer'] = 'SLSQP'

    top.driver.add_desvar('p1.x', lower=-1000, upper=1000, scaler=0.001)
    top.driver.add_desvar('p2.y', lower=-1000, upper=1000, scaler=1000.)
    top.driver.add_objective('p.f_xy')

    top.setup()
    top.run()

    print('\n')
    print('Minimum of %f found at (%f, %f)' % (top['p.f_xy'], top['p.x'], top['p.y']))

当我在我的系统上 运行 它时,它给出:

2.7.11 |Anaconda 2.5.0 (64-bit)| (default, Jan 29 2016, 14:26:21) [MSC v.1500 64 bit (AMD64)]
Python Type "help", "copyright", "credits" or "license" for more information.
[evaluate paraboloid_optimize_scaled.py]
##############################################
Setup: Checking for potential issues...

No recorders have been specified, so no data will be saved.

Setup: Check complete.
##############################################

Optimization terminated successfully.    (Exit mode 0)
            Current function value: [ 8981902.27846645]
            Iterations: 1
            Function evaluations: 12
            Gradient evaluations: 1
Optimization Complete
-----------------------------------


Minimum of 8981902.278466 found at (3.000000, -4.000000)

我是不是漏掉了什么?

我无法直接复制您的问题。与您引用的 openmdao 0.x tutorial 相比,您的 solve_nonlinear 和 linearize 方法中有一些奇怪的缩放比例。但是当我清理它们时,我得到了正确的答案,对于合理的缩放器值,甚至对于一些不合理的值(你选择的值有点极端)。当您使用 add_desvar 中的 scaler/adder 时,您根本不需要修改您的模型。这些值只是修改优化器看到的有助于缩放的值,但在传递给您的模型之前会适当地转换回未缩放的值。

from __future__ import print_function
import sys

from openmdao.api import IndepVarComp, Component, Problem, Group, ScipyOptimizer

class Paraboloid(Component):

    def __init__(self):
        super(Paraboloid, self).__init__()

        self.add_param('x', val=0.0)
        self.add_param('y', val=0.0)

        self.add_output('f_xy', val=0.0)

    def solve_nonlinear(self, params, unknowns, resids):

        x = params['x']
        y = params['y']

        unknowns['f_xy'] = (x-3.0)**2 + x*y + (y+4.0)**2 - 3.0

    def linearize(self, params, unknowns, resids):
        """ Jacobian for our paraboloid."""
        x = params['x']
        y = params['y']
        J = {}

        J['f_xy', 'x'] = 2.0*x - 6.0 + y
        J['f_xy', 'y'] = 2.0*y + 8.0 + x

        return J

if __name__ == "__main__":

    top = Problem()

    root = top.root = Group()

    root.add('p1', IndepVarComp('x', 3.0))
    root.add('p2', IndepVarComp('y', -4.0))
    root.add('p', Paraboloid())

    root.connect('p1.x', 'p.x')
    root.connect('p2.y', 'p.y')

    top.driver = ScipyOptimizer()
    top.driver.options['optimizer'] = 'SLSQP'

    # top.driver.add_desvar('p1.x', lower=-1000, upper=1000)
    # top.driver.add_desvar('p2.y', lower=-1000, upper=1000)

    top.driver.add_desvar('p1.x', lower=-1000, upper=1000, scaler=.001)
    top.driver.add_desvar('p2.y', lower=-1000, upper=1000, scaler=1000.)
    top.driver.add_objective('p.f_xy')

    top.setup()
    top.run()

    print('\n')
    print('Minimum of %f found at (%f, %f)' % (top['p.f_xy'], top['p.x'], top['p.y']))

给出:

##############################################
Setup: Checking for potential issues...

No recorders have been specified, so no data will be saved.

Setup: Check complete.
##############################################

Optimization terminated successfully.    (Exit mode 0)
            Current function value: [-27.33333333]
            Iterations: 12
            Function evaluations: 15
            Gradient evaluations: 12
Optimization Complete
-----------------------------------

缩放器在 OpenMDAO 1.x 中的定义与在 0.x 中的定义相反。在 1.x 中使用了以下比例关系。

driver_value = (model_value + adder)*scaler

因此,与旧教程中的缩放器相比,您需要更改缩放器。但是还有一个次要问题,您的分析导数中的错误也已在下面更正。

from __future__ import print_function
import sys

from openmdao.api import IndepVarComp, Component, Problem, Group, ScipyOptimizer

class Paraboloid(Component):

    def __init__(self):
        super(Paraboloid, self).__init__()

        self.add_param('x', val=0.0)
        self.add_param('y', val=0.0)

        self.add_output('f_xy', val=0.0)

    def solve_nonlinear(self, params, unknowns, resids):

        x = params['x']
        y = params['y']

        #unknowns['f_xy'] = (x-3.0)**2 + x*y + (y+4.0)**2 - 3.0
        unknowns['f_xy'] = (1000.*x-3.)**2 + (1000.*x)*(0.01*y) + (0.01*y+4.)**2 - 3.

    def linearize(self, params, unknowns, resids):
        """ Jacobian for our paraboloid."""
        x = params['x']
        y = params['y']
        J = {}

        #J['f_xy', 'x'] = 2.0*x - 6.0 + y
        #J['f_xy', 'y'] = 2.0*y + 8.0 + x
        J['f_xy', 'x'] = 2000000.0*x - 6000.0 + 10.0*y
        J['f_xy', 'y'] = 0.0002*y + 0.08 + 10.0*x

        return J

if __name__ == "__main__":

    top = Problem()

    root = top.root = Group()
    root.fd_options['force_fd'] = True

    root.add('p1', IndepVarComp('x', 3.0))
    root.add('p2', IndepVarComp('y', -4.0))
    root.add('p', Paraboloid())

    root.connect('p1.x', 'p.x')
    root.connect('p2.y', 'p.y')

    top.driver = ScipyOptimizer()
    top.driver.options['optimizer'] = 'SLSQP'

    top.driver.add_desvar('p1.x', lower=-1000, upper=1000, scaler=1000.)
    top.driver.add_desvar('p2.y', lower=-1000, upper=1000,scaler=.001)
    top.driver.add_objective('p.f_xy')

    top.setup()
    top.run()

    print('\n')
    print('Minimum of %f found at (%f, %f)' % (top['p.f_xy'], top['p.x'], top['p.y']))

给出:

Optimization terminated successfully.    (Exit mode 0)
            Current function value: [-27.333333]
            Iterations: 3
            Function evaluations: 6
            Gradient evaluations: 3
Optimization Complete
-----------------------------------


Minimum of -27.333333 found at (0.006666, -733.299996)