1

我在元模型上使用 openmdao 1.4 执行优化。使用教程我已经建立了我无法解决的问题:我认为问题来自对 setup() 和 run() 的滥用:我没有设法训练我的元模型并同时对其进行优化时间(也许我应该使用两个不同的“组”来做到这一点..)这是我的代码:

    from __future__ import print_function


from openmdao.api import Component, Group, MetaModel ,IndepVarComp, ExecComp, NLGaussSeidel, KrigingSurrogate, FloatKrigingSurrogate

import numpy as np


class KrigMM(Group):
    ''' FloatKriging gives responses as floats '''

    def __init__(self):
        super(KrigMM, self).__init__()

        # Create meta_model for f_x as the response

        pmm = self.add("pmm", MetaModel())
        pmm.add_param('x', val=0.)

        pmm.add_output('f_x:float', val=0., surrogate=FloatKrigingSurrogate())
        self.add('p1', IndepVarComp('x', 0.0))

        self.connect('p1.x','pmm.x')

       # mm.add_output('f_xy:norm_dist', val=(0.,0.), surrogate=KrigingSurrogate())


if __name__ == '__main__':
    # Setup and run the model.

    from openmdao.core.problem import Problem
    from openmdao.drivers.scipy_optimizer import ScipyOptimizer
    from openmdao.core.driver import Driver

    import numpy as np
    import doe_lhs

    #prob = Problem(root=ParaboloidProblem())
###########################################################    

    prob = Problem(root=Group())
    prob.root.add('meta',KrigMM(), promotes=['*'])

    prob.driver = ScipyOptimizer()
    prob.driver.options['optimizer'] = 'SLSQP'

    prob.driver.add_desvar('p1.x', lower=0, upper=10)

    prob.driver.add_objective('pmm.f_x:float')
    prob.setup()
    prob['pmm.train:x'] = np.linspace(0,10,20)
    prob['pmm.train:f_x:float'] = np.sin(prob['pmm.train:x'])      
    prob.run()

    print('\n')
    print('Minimum of %f found for meta at %f' % (prob['pmm.f_x:float'],prob['pmm.x'])) #predicted value 
4

1 回答 1

1

我相信你的问题实际上工作正常。只是您选择的正弦波在 0.0 处具有局部最优值,这恰好是您的初始条件。

如果我将初始条件更改如下:

prob.setup()
prob['p1.x'] = 5
prob['pmm.train:x'] = np.linspace(0,10,20)
prob['pmm.train:f_x:float'] = np.sin(prob['pmm.train:x'])      
prob.run()

我得到:

Optimization terminated successfully.    (Exit mode 0)
        Current function value: [-1.00004544]
        Iterations: 3
        Function evaluations: 3
        Gradient evaluations: 3
Optimization Complete
-----------------------------------


Minimum of -1.000045 found for meta at 4.710483
于 2016-01-06T17:50:19.190 回答