I have a problem that I want to scale the design variables. I have added the scaler, but I want to check the derivative to make sure it is doing what I want it to do. Is there a way to check the scaled derivative? I have tried to use check_total_derivatives() but the derivative is the exact same regardless of what value I put for scaler:
from openmdao.api import Component, Group, Problem, IndepVarComp, ExecComp
from openmdao.drivers.pyoptsparse_driver import pyOptSparseDriver
class Scaling(Component):
def __init__(self):
super(Scaling, self).__init__()
self.add_param('x', shape=1)
self.add_output('y', shape=1)
def solve_nonlinear(self, params, unknowns, resids):
unknowns['y'] = 1000. * params['x']**2 + 2
def linearize(self, params, unknowns, resids):
J = {}
J['y', 'x'] = 2000. * params['x']
return J
class ScalingGroup(Group):
def __init__(self,):
super(ScalingGroup, self).__init__()
self.add('x', IndepVarComp('x', 0.0), promotes=['*'])
self.add('g', Scaling(), promotes=['*'])
p = Problem()
p.root = ScalingGroup()
# p.driver = pyOptSparseDriver()
# p.driver.options['optimizer'] = 'SNOPT'
p.driver.add_desvar('x', lower=0.005, upper=100., scaler=1000)
p.driver.add_objective('y')
p.setup()
p['x'] = 3.
p.run()
total = p.check_total_derivatives()
# Derivative is the same regardless of what the scaler is.
Another way to see exactly what the optimizer is seeing from calc_gradient is to mimic the call to calc_gradient. This is not necessarily easy to figure out, but I thought I would paste it here for reference.
print p.calc_gradient(list(p.driver.get_desvars().keys()),
list(p.driver.get_objectives().keys()) + list(p.driver.get_constraints().keys()),
dv_scale=p.driver.dv_conversions,
cn_scale=p.driver.fn_conversions)