Skip to content
Snippets Groups Projects
Commit 2e985b53 authored by Mads M. Pedersen's avatar Mads M. Pedersen
Browse files

add evaluate_gradients function in topfarm and corresponding test

parent ac834fee
No related branches found
No related tags found
1 merge request!22Evaluate gradient function
......@@ -4,46 +4,66 @@ Created on 17. maj 2018
@author: mmpe
'''
from topfarm import TopFarm
import unittest
import numpy as np
from topfarm.cost_models.dummy import DummyCost
from topfarm.cost_models.cost_model_wrappers import CostModelComponent
import pytest
class TestTopFarm(unittest.TestCase):
def setUp(self):
unittest.TestCase.setUp(self)
self.boundary = [(0, 0), (6, 0), (6, -10), (0, -10)] # turbine boundaries
self.initial = [[6, 0], [6, -8], [1, 1], [-1, -8]] # initial turbine layouts
self.optimal_with_constraints = np.array([[2.5, -3], [6, -7], [4.5, -3], [3, -7]]) # optimal turbine layout
self.min_spacing = 2 # min distance between turbines
self.optimal = np.array([[3, -3], [7, -7], [4, -3], [3, -7]]) # desired turbine layouts
initial = [[6, 0], [6, -8], [1, 1], [-1, -8]] # initial turbine layouts
optimal = np.array([[3, -3], [7, -7], [4, -3], [3, -7]]) # desired turbine layouts
def cost(self, pos):
x, y = pos.T
opt_x, opt_y = self.optimal.T
return np.sum((x - opt_x)**2 + (y - opt_y)**2)
def gradients(self, pos):
x, y = pos.T
return (2 * x - 2 * self.optimal[:, 0]), (2 * y - 2 * self.optimal[:, 1])
@pytest.fixture
def topfarm_generator():
def _topfarm_obj(gradients, **kwargs):
boundary = [(0, 0), (6, 0), (6, -10), (0, -10)] # turbine boundaries
min_spacing = 2 # min distance between turbines
return TopFarm(initial, CostModelComponent(4, cost, gradients), min_spacing, boundary=boundary, **kwargs)
return _topfarm_obj
def wrong_gradients(self, pos):
x, y = pos.T
return (2 * x - 2 * self.optimal[:, 0] + 1), (2 * y - 2 * self.optimal[:, 1])
def testTopFarm_default_plotcomp(self):
tf = TopFarm(self.initial, CostModelComponent(4, self.cost, self.gradients), self.min_spacing, boundary=self.boundary, plot_comp='default')
def cost(pos):
x, y = pos.T
opt_x, opt_y = optimal.T
return np.sum((x - opt_x)**2 + (y - opt_y)**2)
def testTopFarm_check_gradients(self):
tf = TopFarm(self.initial, CostModelComponent(4, self.cost, self.gradients), self.min_spacing, boundary=self.boundary)
tf.check(True)
tf = TopFarm(self.initial, CostModelComponent(4, self.cost, self.wrong_gradients), self.min_spacing, boundary=self.boundary)
self.assertRaisesRegex(Warning, "Mismatch between finite difference derivative of 'cost' wrt. 'turbineX' and derivative computed in 'cost_comp' is", tf.check)
def gradients(pos):
x, y = pos.T
return (2 * x - 2 * optimal[:, 0]), (2 * y - 2 * optimal[:, 1])
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
def wrong_gradients(pos):
x, y = pos.T
return (2 * x - 2 * optimal[:, 0] + 1), (2 * y - 2 * optimal[:, 1])
def testTopFarm_default_plotcomp(topfarm_generator):
"""Check that setting plot_comp to 'default' does not fails"""
topfarm_generator(gradients, plot_comp='default')
def testTopFarm_check_gradients(topfarm_generator):
# Check that gradients check does not raise exception for correct gradients
tf = topfarm_generator(gradients)
tf.check(True)
# Check that gradients check raises an exception for incorrect gradients
tf = topfarm_generator(wrong_gradients)
with pytest.raises(Warning, match="Mismatch between finite difference derivative of 'cost' wrt. 'turbineX' and derivative computed in 'cost_comp' is"):
tf.check()
def testTopFarm_evaluate(topfarm_generator):
# check that evaluate function does not fail
tf = topfarm_generator(gradients)
cost, pos = tf.evaluate()
assert cost == 62
np.testing.assert_array_equal(pos, initial)
def testTopFarm_evaluate_gradients(topfarm_generator):
# check taht evalueate_gradients does not fail
tf = topfarm_generator(gradients)
np.testing.assert_array_equal(tf.evaluate_gradients()['cost']['turbineX'], [[-6., -14., -8., -6.]])
......@@ -33,9 +33,7 @@ class Test(unittest.TestCase): # unittest version
# when
tf = TopFarm(initial, DummyCost(desired), min_spacing,
boundary=boundary)
with warnings.catch_warnings(): # suppress OpenMDAO/SLSQP warnings
warnings.simplefilter('ignore')
tf.optimize()
tf.optimize()
tb_pos = tf.turbine_positions
# then
......
import time
from openmdao.api import Problem, ScipyOptimizeDriver, IndepVarComp
import numpy as np
import warnings
from wetb.utils.timing import print_time
with warnings.catch_warnings():
warnings.simplefilter('ignore', FutureWarning)
from openmdao.api import Problem, ScipyOptimizeDriver, IndepVarComp
from topfarm.constraint_components.boundary_component import BoundaryComp,\
PolygonBoundaryComp
from topfarm.constraint_components.spacing_component import SpacingComp
from topfarm.plotting import PlotComp
import warnings
class TopFarm(object):
......@@ -74,12 +75,16 @@ class TopFarm(object):
def evaluate(self):
t = time.time()
with warnings.catch_warnings(): # suppress OpenMDAO/SLSQP warnings
warnings.filterwarnings('ignore', "Inefficient choice of derivative mode. You chose 'rev' for a problem with")
self.problem.run_model()
self.problem.run_model()
print("Evaluated in\t%.3fs" % (time.time() - t))
return self.get_cost(), self.turbine_positions
def evaluate_gradients(self):
t = time.time()
res = self.problem.compute_totals(['cost'], wrt=['turbineX', 'turbineY'], return_format='dict')
print("Gradients evaluated in\t%.3fs" % (time.time() - t))
return res
def optimize(self):
t = time.time()
self.problem.run_driver()
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment