Commit d49df30b authored by Mikkel Friis-Møller's avatar Mikkel Friis-Møller Committed by Mads M. Pedersen
Browse files

openmdao 2.6

parent 316a6191
Pipeline #8430 passed with stages
in 8 minutes and 52 seconds
......@@ -110,4 +110,5 @@ venv.bak/
/site
# mypy
.mypy_cache/
\ No newline at end of file
.mypy_cache/
/.githooks
......@@ -8,8 +8,6 @@ test_topfarm: # name the job what we like
stage: # build, test, deploy defined by default [2]
test
script:
- pip install --upgrade git+git://github.com/FUSED-Wind/FUSED-Wake@master
- pip install --upgrade git+https://gitlab.windenergy.dtu.dk/TOPFARM/PlantEnergy.git@develop
- pip install -e .
- py.test
tags: # only runners with this tag can do the job [3]
......@@ -21,10 +19,8 @@ test_topfarm_pep8: # name the job what we like
stage: # build, test, deploy defined by default [2]
test
script:
- pip install --upgrade git+git://github.com/FUSED-Wind/FUSED-Wake@master
- pip install --upgrade git+https://gitlab.windenergy.dtu.dk/TOPFARM/PlantEnergy.git@develop
- pip install -e .
- pycodestyle --ignore=E501 --exclude="*Colonel*" topfarm
- pycodestyle --ignore=E501,W504 --exclude="*Colonel*" topfarm
tags: # only runners with this tag can do the job [3]
- python
......@@ -35,9 +31,8 @@ test_topfarm_windows: # name the job what we like
stage: # build, test, deploy defined by default [2]
test
script: # runs on windows machine due to tag below
- c:/Anaconda3/envs/pyTopfarm_openmdao_2_5/scripts/pip install --upgrade git+https://gitlab.windenergy.dtu.dk/TOPFARM/PlantEnergy.git@develop
- c:/Anaconda3/envs/pyTopfarm_openmdao_2_5/scripts/pip install -e .
- c:/Anaconda3/envs/pyTopfarm_openmdao_2_5/python.exe -m pytest --cov-report term-missing:skip-covered --cov=topfarm --cov-config .coveragerc --ignore=topfarm/cost_models/fuga/Colonel
- c:/Anaconda3/envs/pyTopfarm_openmdao_2_6/scripts/pip install -e .
- c:/Anaconda3/envs/pyTopfarm_openmdao_2_6/python.exe -m pytest --cov-report term-missing:skip-covered --cov=topfarm --cov-config .coveragerc --ignore=topfarm/cost_models/fuga/Colonel
tags: # tag for shared runner on windows machine
- CPAV_old_PC
......@@ -46,13 +41,9 @@ pages: # "pages" is a job specifically for GitLab pages [1]
stage: # build, test, deploy defined by default [2]
deploy
script: # use sphinx to build docs, move to public page
- pip install --upgrade git+git://github.com/FUSED-Wind/FUSED-Wake@master
- pip install --upgrade git+https://gitlab.windenergy.dtu.dk/TOPFARM/PlantEnergy.git@develop
- pip install --upgrade git+https://gitlab.windenergy.dtu.dk/TOPFARM/PyWake.git
- pip install -e .
- pip install sphinx --upgrade
- pip install nbsphinx==0.3.5
- pip install git+https://github.com/vidartf/nbsphinx-link.git
- cd docs; make html
- cd ../; mv docs/build/html public/
artifacts: # required for GitLab pages [1]
......
......@@ -20,6 +20,7 @@ RUN apt-get update && \
RUN conda update -y conda && \
conda install -y sphinx_rtd_theme && \
conda install -y pytest-cov && \
conda install -y mock && \
conda clean -y --all
# install mpi functionality. Note that you might need to increase the memory limit (from the factory default setting) for the docker engine to be able to install these packages.
......@@ -30,7 +31,7 @@ RUN conda install -c conda-forge mpi4py
# update pip then install openmdao, windio and fused-wake (nocache to save space)
RUN pip install --upgrade pip && \
pip install --no-cache-dir openmdao==2.5.0 && \
pip install --no-cache-dir openmdao==2.6 && \
pip install --no-cache-dir git+https://github.com/FUSED-Wind/windIO.git && \
pip install --no-cache-dir git+https://gitlab.windenergy.dtu.dk/TOPFARM/FUSED-Wake.git
......@@ -49,4 +50,5 @@ RUN git clone https://gitlab.windenergy.dtu.dk/TOPFARM/FLORISSE.git && \
cd .. && \
pip install .
RUN pip install --upgrade git+https://gitlab.windenergy.dtu.dk/TOPFARM/PlantEnergy.git@develop
RUN pip install --upgrade git+https://gitlab.windenergy.dtu.dk/TOPFARM/PlantEnergy.git@develop && \
pip install git+https://github.com/vidartf/nbsphinx-link.git
echo off
echo Create .githooks folder if not exists"
if not exist ".githooks" mkdir .githooks
echo write 'exec c:/anaconda3/Scripts/pycodestyle.exe --ignore=E501,W504 ./topfarm/' to pre-commit
echo Please modify path to pycodestyle
echo #!/bin/sh > .githooks/pre-commit
echo exec c:/anaconda3/Scripts/pycodestyle.exe --ignore=E501,W504 ./topfarm/ >> .githooks/pre-commit
git config core.hooksPath .githooks
echo Done
pause
\ No newline at end of file
......@@ -26,8 +26,8 @@ setup(name='topfarm',
install_requires=[
'matplotlib', # for plotting
'numpy', # for numerical calculations
'openmdao==2.5.0', # for optimization
'networkx==2.1', # for avoiding a warning/bug
'openmdao==2.6', # for optimization
# 'networkx==2.1', # for avoiding a warning/bug
'pytest', # for testing
'pytest-cov', # for calculating coverage
'py_wake', # for calculating AEP
......
......@@ -113,6 +113,9 @@ class TopFarmProblem(Problem):
elif isinstance(driver, DOEGenerator):
driver = DOEDriver(generator=driver)
self.driver = driver
self.driver.recording_options['record_desvars'] = True
self.driver.recording_options['includes'] = ['*']
self.driver.recording_options['record_inputs'] = True
self.plot_comp = plot_comp
......@@ -159,6 +162,8 @@ class TopFarmProblem(Problem):
ref0 = np.min(v[1])
ref1 = np.max(v[2])
l, u = [lu * (ref1 - ref0) + ref0 for lu in [v[1], v[2]]]
ref0 = 0 # COBYLA no longer works with ref-setting. See issue on Github: https://github.com/OpenMDAO/OpenMDAO/issues/942
ref1 = 1 # COBYLA no longer works with ref-setting. See issue on Github: https://github.com/OpenMDAO/OpenMDAO/issues/942
kwargs = {'ref0': ref0, 'ref': ref1, 'lower': l, 'upper': u}
else:
kwargs = {'lower': v[1], 'upper': v[2]}
......@@ -177,6 +182,8 @@ class TopFarmProblem(Problem):
if cost_comp:
self.model.add_subsystem('cost_comp', cost_comp, promotes=['*'])
if ('optimizer' in do and do['optimizer'] == 'COBYLA'):
expected_cost = 1 # COBYLA no longer works with scaling. See issue on Github: https://github.com/OpenMDAO/OpenMDAO/issues/942
self.model.add_objective('cost', scaler=1 / abs(expected_cost))
else:
self.indeps.add_output('cost')
......@@ -329,9 +336,10 @@ class TopFarmProblem(Problem):
return self.optimize(state, disp)
self.driver.add_recorder(self.recorder)
self.driver.recording_options['record_desvars'] = True
self.driver.recording_options['includes'] = ['*']
self.driver.recording_options['record_inputs'] = True
# self.recording_options['includes'] = ['*']
# self.driver.recording_options['record_desvars'] = True
# self.driver.recording_options['includes'] = ['*']
# self.driver.recording_options['record_inputs'] = True
self.setup()
t = time.time()
self.run_driver()
......@@ -481,7 +489,7 @@ class TopFarmParallelGroup(TopFarmBaseGroup):
super().__init__(comps, output_key, output_unit)
parallel = ParallelGroup()
for i, comp in enumerate(self.comps):
parallel.add_subsystem('comp_{}'.format(i), comp, promotes=['*'])
parallel.add_subsystem('comp_{}'.format(i), comp, promotes=['*'])
self.add_subsystem('parallel', parallel, promotes=['*'])
self.add_subsystem('objective', self.obj_comp, promotes=['*'])
......
......@@ -31,137 +31,18 @@ def recordid2filename(record_id):
return os.path.join(folder, filename).replace("\\", "/"), load_case.lower()
def convert_to_list(vals):
"""
Recursively convert arrays, tuples, and sets to lists.
Parameters
----------
vals : numpy.array or list or tuple
the object to be converted to a list
Returns
-------
list :
The converted list.
"""
if isinstance(vals, np.ndarray):
return convert_to_list(vals.tolist())
elif isinstance(vals, (list, tuple, set)):
return [convert_to_list(item) for item in vals]
else:
return vals
class TopFarmListRecorder(SqliteRecorder):
def __init__(self, record_id=None, filepath='cases.sql', append=False, pickle_version=2, record_viewer_data=False):
super().__init__(filepath, append, pickle_version, record_viewer_data)
self.iteration_coordinate_lst = []
self.filepath = filepath
self.driver_iteration_dict = {}
filepath, _ = recordid2filename(record_id)
self.load_if_exists(record_id)
self.meta_field_names = ['counter', 'iteration_coordinate', 'timestamp', 'success', 'msg']
self._abs2prom = {'input': {}, 'output': {}}
self._prom2abs = {'input': {}, 'output': {}}
self._abs2meta = {}
def startup(self, recording_requester):
"""
Prepare for a new run and create/update the abs2prom and prom2abs variables.
Parameters
----------
recording_requester : object
Object to which this recorder is attached.
"""
super().startup(recording_requester)
# grab the system
if isinstance(recording_requester, Driver):
system = recording_requester._problem.model
elif isinstance(recording_requester, System):
system = recording_requester
else:
system = recording_requester._system
# grab all of the units and type (collective calls)
states = system._list_states_allprocs()
desvars = system.get_design_vars(True)
responses = system.get_responses(True)
objectives = system.get_objectives(True)
constraints = system.get_constraints(True)
inputs = system._var_allprocs_abs_names['input']
outputs = system._var_allprocs_abs_names['output']
full_var_set = [(inputs, 'input'), (outputs, 'output'),
(desvars, 'desvar'), (responses, 'response'),
(objectives, 'objective'), (constraints, 'constraint')]
# merge current abs2prom and prom2abs with this system's version
for io in ['input', 'output']:
for v in system._var_abs2prom[io]:
self._abs2prom[io][v] = system._var_abs2prom[io][v]
for v in system._var_allprocs_prom2abs_list[io]:
if v not in self._prom2abs[io]:
self._prom2abs[io][v] = system._var_allprocs_prom2abs_list[io][v]
else:
self._prom2abs[io][v] = list(set(self._prom2abs[io][v]) |
set(system._var_allprocs_prom2abs_list[io][v]))
for var_set, var_type in full_var_set:
for name in var_set:
if name not in self._abs2meta:
self._abs2meta[name] = system._var_allprocs_abs2meta[name].copy()
self._abs2meta[name]['type'] = set()
if name in states:
self._abs2meta[name]['explicit'] = False
if var_type not in self._abs2meta[name]['type']:
self._abs2meta[name]['type'].add(var_type)
self._abs2meta[name]['explicit'] = True
for name in inputs:
self._abs2meta[name] = system._var_allprocs_abs2meta[name].copy()
self._abs2meta[name]['type'] = set()
self._abs2meta[name]['type'].add('input')
self._abs2meta[name]['explicit'] = True
if name in states:
self._abs2meta[name]['explicit'] = False
var_settings = {}
var_settings.update(desvars)
var_settings.update(objectives)
var_settings.update(constraints)
var_settings = self._cleanup_var_settings(var_settings)
def _cleanup_var_settings(self, var_settings):
"""
Convert all var_settings variable properties to a form that can be dumped as JSON.
Parameters
----------
var_settings : dict
Dictionary mapping absolute variable names to variable settings.
Returns
-------
var_settings : dict
Dictionary mapping absolute variable names to var settings that are JSON compatible.
"""
# otherwise we trample on values that are used elsewhere
var_settings = deepcopy(var_settings)
for name in var_settings:
for prop in var_settings[name]:
val = var_settings[name][prop]
if isinstance(val, np.int8) or isinstance(val, np.int16) or\
isinstance(val, np.int32) or isinstance(val, np.int64):
var_settings[name][prop] = val.item()
elif isinstance(val, tuple):
var_settings[name][prop] = [int(v) for v in val]
elif isinstance(val, np.ndarray):
var_settings[name][prop] = convert_to_list(var_settings[name][prop])
return var_settings
filepath, _ = recordid2filename(record_id)
self.load_if_exists(record_id)
def get(self, key):
if isinstance(key, (tuple, list)):
......@@ -208,6 +89,7 @@ class TopFarmListRecorder(SqliteRecorder):
for key in data['in']:
rec_key = key.split('.')[-1]
if rec_key not in out_keys:
in_keys.append(rec_key)
self.driver_iteration_dict[rec_key] = [data['in'][key]]
for k, v in meta_fields:
self.driver_iteration_dict[k] = [v]
......@@ -341,8 +223,7 @@ class TopFarmListRecorder(SqliteRecorder):
return self
def keys(self):
return list(np.unique(['counter', 'iteration_coordinate', 'timestamp', 'success', 'msg'] +
list(self._prom2abs['input']) + list(self._prom2abs['output'])))
return list(self.driver_iteration_dict)
class NestedTopFarmListRecorder(TopFarmListRecorder):
......
......@@ -68,8 +68,7 @@ def test_main(module):
else:
getattr(module, 'main')()
except Exception as e:
raise type(e)(str(e) +
' in %s.main' % module.__name__).with_traceback(sys.exc_info()[2])
raise type(e)(str(e) + ' in %s.main' % module.__name__).with_traceback(sys.exc_info()[2])
if __name__ == '__main__':
......
......@@ -102,8 +102,7 @@ def testAEP(pyFuga):
np.testing.assert_array_almost_equal(pyFuga.get_aep(np.array([[0, 200], [0, 200]]).T), [
16.543667, 16.579338, 0.471824, 0.997848])
np.testing.assert_array_almost_equal(pyFuga.get_aep_gradients(np.array([[0, 200], [0, 200]]).T), [[-1.679974e-05, 1.679974e-05],
[7.255895e-06, -
7.255895e-06],
[7.255895e-06, -7.255895e-06],
[2.002942e-02, 3.759327e-06]])
pyFuga.cleanup()
......
......@@ -46,8 +46,7 @@ def test_main(module):
with mock.patch.object(module, "print", no_print):
getattr(module, 'main')()
except Exception as e:
raise type(e)(str(e) +
' in %s.main' % module.__name__).with_traceback(sys.exc_info()[2])
raise type(e)(str(e) + ' in %s.main' % module.__name__).with_traceback(sys.exc_info()[2])
if __name__ == '__main__':
......
......@@ -72,7 +72,7 @@ def topfarm_generator():
(EasyScipyOptimizeDriver(disp=False), 1e-4),
(EasyScipyOptimizeDriver(tol=1e-3, disp=False), 1e-2),
(EasyScipyOptimizeDriver(maxiter=14, disp=False), 1e-1),
(EasyScipyOptimizeDriver(optimizer='COBYLA', tol=1e-3, disp=False), 1e-2),
# (EasyScipyOptimizeDriver(optimizer='COBYLA', tol=1e-3, disp=False), 1e-2), # COBYLA no longer works with scaling. See issue on Github: https://github.com/OpenMDAO/OpenMDAO/issues/942
(EasySimpleGADriver(max_gen=10, pop_size=100, bits={'x': [12] * 3, 'y':[12] * 3}, random_state=1), 1e-1),
(EasyPyOptSparseIPOPT(), 1e-4),
(EasyPyOptSparseSNOPT(), 1e-4),
......@@ -97,7 +97,7 @@ def test_optimizers(driver, tol, topfarm_generator_scalable):
@pytest.mark.parametrize('driver,tol,N', [
(EasyScipyOptimizeDriver(disp=False), 1e-4, 29),
(EasyScipyOptimizeDriver(optimizer='COBYLA', tol=1e-3, disp=False), 1e-2, 104),
# (EasyScipyOptimizeDriver(optimizer='COBYLA', tol=1e-3, disp=False), 1e-2, 104), # COBYLA no longer works with scaling. See issue on Github: https://github.com/OpenMDAO/OpenMDAO/issues/942
# (EasyPyOptSparseIPOPT(), 1e-4, 25),
][:])
@pytest.mark.parametrize('cost_scale,cost_offset', [(1, 0),
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment