Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • 105-hawc2io-readascii-is-unable-to-read-incomplete-result-files
  • 106-htcfile-save-method-changes-back-slash-to-forward-slash
  • 106-save-backslash-forwardslash
  • 67-binary-results-reader-in-hawc2-12-6-does-not-find-number-of-blades
  • AbhinavANand
  • FW_H2_Wrapper
  • ModifyHawc2
  • add_future
  • add_set_Name_to_hawc2_input_writer
  • add_wake_sensor
  • bhawc_converter
  • data_manager
  • dlb
  • f/add_test_file
  • fail_bearing
  • fix_pip_install
  • hawc2flow
  • iodocs
  • licoreim
  • master
  • nicgo_dlb_offshore
  • ozgo
  • removed_build_on_install
  • rsod-coverage_example
  • rsod-crypto
  • rsod-dlchighlevel
  • rsod-main_body_analysis
  • rsod-offshore
  • rsod-pages
  • simple_setup
  • test_doc
  • test_docs
  • test_pypi
  • windIO_ozgo
  • v0.0.1
  • v0.0.10
  • v0.0.11
  • v0.0.12
  • v0.0.13
  • v0.0.14
  • v0.0.15
  • v0.0.16
  • v0.0.17
  • v0.0.18
  • v0.0.19
  • v0.0.2
  • v0.0.20
  • v0.0.21
  • v0.0.3
  • v0.0.4
  • v0.0.5
  • v0.0.6
  • v0.0.7
  • v0.0.8
  • v0.0.9
  • v0.1.0
  • v0.1.1
  • v0.1.10
  • v0.1.11
  • v0.1.12
  • v0.1.13
  • v0.1.14
  • v0.1.15
  • v0.1.16
  • v0.1.17
  • v0.1.18
  • v0.1.19
  • v0.1.2
  • v0.1.20
  • v0.1.21
  • v0.1.22
  • v0.1.23
  • v0.1.24
  • v0.1.25
  • v0.1.26
  • v0.1.27
  • v0.1.28
  • v0.1.29
  • v0.1.3
  • v0.1.30
  • v0.1.31
  • v0.1.4
  • v0.1.5
  • v0.1.6
  • v0.1.7
  • v0.1.8
  • v0.1.9
87 results

Target

Select target project
  • toolbox/WindEnergyToolbox
  • tlbl/WindEnergyToolbox
  • cpav/WindEnergyToolbox
  • frza/WindEnergyToolbox
  • borg/WindEnergyToolbox
  • mmpe/WindEnergyToolbox
  • ozgo/WindEnergyToolbox
  • dave/WindEnergyToolbox
  • mmir/WindEnergyToolbox
  • wluo/WindEnergyToolbox
  • welad/WindEnergyToolbox
  • chpav/WindEnergyToolbox
  • rink/WindEnergyToolbox
  • shfe/WindEnergyToolbox
  • shfe1/WindEnergyToolbox
  • acdi/WindEnergyToolbox
  • angl/WindEnergyToolbox
  • wliang/WindEnergyToolbox
  • mimc/WindEnergyToolbox
  • wtlib/WindEnergyToolbox
  • cmos/WindEnergyToolbox
  • fabpi/WindEnergyToolbox
22 results
Select Git revision
  • 67-binary-results-reader-in-hawc2-12-6-does-not-find-number-of-blades
  • FW_H2_Wrapper
  • ae_file_update
  • f/FastIO
  • f/ae_file
  • f/pc_file
  • master
  • v0.0.1
  • v0.0.2
  • v0.0.5
  • v0.0.6
  • v0.0.7
  • v0.0.8
  • v0.0.9
14 results
Show changes
Showing
with 2617 additions and 169 deletions
# -*- coding: utf-8 -*-
"""
Created on Thu Aug 04 09:24:51 2016
@author: tlbl
"""
import numpy as np
class Control(object):
def torque_controller_tuning(self, r0, lambda_opt, ksi_pole_part,
omega_pole_part, j_dt):
"""THIS IS A WIP
"""
# TODO: THIS IS STILL A WIP
i12, i23, i34 = self.select_regions()
# Compute K factor for optimal CP tracking in region 1 as
# K=0.5*rho*A*Cp_opt*R^3/lambda_opt^3
# HAWCStab2/Computation_LIB/compute_controller_input_mod.f90:83-88
# Compute K factor for optimal CP tracking in region 1 as
# K=0.5*rho*A*Cp_opt*R^3/lambda_opt^3
# pi_torque_controller%kfactor=0.d0
# do i=1,iregion2-iregion1-1
# !kfactor=kfactor+0.85d0*kfactor_fit(i)*substructure(3)
# pi_torque_controller%kfactor
# = pi_torque_controller%kfactor
# + kfactor_fit(i)*substructure(3)%opstate(i+iregion1)%r0**3/lambda_opt**3
# enddo
# HAWCStab2/Computation_LIB/compute_controller_input_mod.f90:89
# pi_torque_controller%kfactor =
# pi_torque_controller%kfactor/max(dfloat(iregion2-iregion1-1), 1.d0)
Qg = (r0*r0*r0)/(lambda_opt*lambda_opt*lambda_opt)
# PI generator torque controller in region 2 (constant speed, variable power)
# HAWCStab2/Computation_LIB/compute_controller_input_mod.f90:104-105
# pi_torque_controller%kp = 2.d0*ksi_pole_part*omega_pole_part*j_dt
# pi_torque_controller%ki = omega_pole_part**2*j_dt
pgTorque = 2.0*ksi_pole_part*omega_pole_part*j_dt
igTorque = omega_pole_part**2*j_dt
return Qg, pgTorque, igTorque
def pitch_controller_tuning(self, pitch, I, dQdt, P, Omr, om, csi):
"""
Function to compute the gains of the pitch controller of the Basic DTU
Wind Energy Controller with the pole placement technique implemented
in HAWCStab2.
Parameters
----------
pitch: array
Pitch angle [deg]. The values should only be those of the full load
region.
I: array
Drivetrain inertia [kg*m**2]
dQdt: array
Partial derivative of the aerodynamic torque with respect to the
pitch angle [kNm/deg]. Can be computed with HAWCStab2.
P: float
Rated power [kW]. Set to zero in case of constant torque regulation
Omr: float
Rated rotational speed [rpm]
om: float
Freqeuncy of regulator mode [Hz]
csi: float
Damping ratio of regulator mode
Returns
-------
kp: float
Proportional gain [rad/(rad/s)]
ki: float
Intagral gain [rad/rad]
K1: float
Linear term of the gain scheduling [deg]
K2: float
Quadratic term of the gain shceduling [deg**2]
"""
pitch = pitch * np.pi/180.
I = I * 1e-3
dQdt = dQdt * 180./np.pi
Omr = Omr * np.pi/30.
om = om * 2.*np.pi
# Quadratic fitting of dQdt
A = np.ones([dQdt.shape[0], 3])
A[:, 0] = pitch**2
A[:, 1] = pitch
b = dQdt
ATA = np.dot(A.T, A)
iATA = np.linalg.inv(ATA)
iATAA = np.dot(iATA, A.T)
x = np.dot(iATAA, b)
kp = -(2*csi*om*I[0] - P/(Omr**2))/x[2]
ki = -(om**2*I[0])/x[2]
K1 = x[2]/x[1]*(180./np.pi)
K2 = x[2]/x[0]*(180./np.pi)**2
return kp, ki, K1, K2
def K_omega2(V, P, R, TSR):
Va = np.array(V)
Pa = np.array(P)
Ra = np.array(R)
TSRa = np.array(TSR)
K = Ra**3 * np.mean(Pa/(TSRa*Va)**3)
return K
def select_regions(self, pitch, omega, power):
"""Find indices at wich point the controller should switch between the
different operating mode regions.
Parameters
----------
Returns
------
i12 : int
123 : int
134 : int
"""
i12 = 0
n = len(pitch)
for i in range(n-1):
if (abs(power[i]/power[i+1] - 1.) > 0.01):
if (abs(omega[i] / omega[i+1] - 1.) > 0.01):
i12 = i
break
i23 = n-1
for i in range(i12, n-1):
if (abs(omega[i] / omega[i+1] - 1.) < 0.01):
i23 = i
break
i34 = i23
for i in range(i23, n-1):
if (abs(power[i]/power[i+1] - 1.) > 0.01):
if (abs(omega[i] / omega[i+1] - 1.) < 0.01):
i34 = i+1
return i12, i23, i34
if __name__ == '__main__':
pass
# -*- coding: utf-8 -*-
"""
Created on Thu Aug 04 11:09:43 2016
@author: tlbl
"""
import unittest
from wetb.control import control
import numpy as np
class TestControl(unittest.TestCase):
def setUp(self):
dQdt = np.array([-0.126876918E+03, -0.160463547E+03, -0.211699586E+03,
-0.277209984E+03, -0.351476131E+03, -0.409411354E+03,
-0.427060299E+03, -0.608498644E+03, -0.719141594E+03,
-0.814129630E+03, -0.899248007E+03, -0.981457622E+03,
-0.106667910E+04, -0.114961807E+04, -0.123323210E+04,
-0.131169210E+04, -0.138758236E+04, -0.145930419E+04,
-0.153029102E+04, -0.159737975E+04, -0.166846850E+04])
pitch = np.array([0.2510780000E+00, 0.5350000000E-03, 0.535000000E-03,
0.5350000000E-03, 0.5350000000E-03, 0.535000000E-03,
0.5350000000E-03, 0.3751976000E+01, 0.625255700E+01,
0.8195032000E+01, 0.9857780000E+01, 0.113476710E+02,
0.1271615400E+02, 0.1399768300E+02, 0.152324310E+02,
0.1642177100E+02, 0.1755302300E+02, 0.186442750E+02,
0.1970333100E+02, 0.2073358600E+02, 0.217410280E+02])
I = np.array([0.4394996114E+08, 0.4395272885E+08, 0.4395488725E+08,
0.4395301987E+08, 0.4394561932E+08, 0.4393327166E+08,
0.4391779133E+08, 0.4394706335E+08, 0.4395826989E+08,
0.4396263773E+08, 0.4396412693E+08, 0.4396397777E+08,
0.4396275304E+08, 0.4396076315E+08, 0.4395824699E+08,
0.4395531228E+08, 0.4395201145E+08, 0.4394837798E+08,
0.4394456127E+08, 0.4394060604E+08, 0.4393647769E+08])
self.dQdt = dQdt
self.pitch = pitch
self.I = I
def test_pitch_controller_tuning(self):
crt = control.Control()
P = 0.
Omr = 12.1
om = 0.10
csi = 0.7
i = 5
kp, ki, K1, K2 = crt.pitch_controller_tuning(self.pitch[i:],
self.I[i:],
self.dQdt[i:],
P, Omr, om, csi)
self.assertAlmostEqual(kp, 1.596090243644432, places=10)
self.assertAlmostEqual(ki, 0.71632362627138424, places=10)
self.assertAlmostEqual(K1, 10.01111637532056, places=10)
self.assertAlmostEqual(K2, 599.53659803157643, places=10)
def test_regions(self):
crt = control.Control()
pitch = np.array([0.,-2.,-2.,-2.,-2.,-2.,-2.,-1., 0., ])
omega = np.array([1., 1., 1., 2., 3., 3., 3., 3., 3., ])
power = np.array([1., 2., 3., 4., 5., 6., 7., 7., 7., ])
istart, iend = 0, -1
i1, i2, i3 = crt.select_regions(pitch[istart:iend], omega[istart:iend],
power[istart:iend])
self.assertEqual(i1, 2)
self.assertEqual(i2, 4)
self.assertEqual(i3, 6)
istart, iend = 3, -1
i1, i2, i3 = crt.select_regions(pitch[istart:iend], omega[istart:iend],
power[istart:iend])
self.assertEqual(i1, 0)
self.assertEqual(i2, 1)
self.assertEqual(i3, 3)
istart, iend = 5, -1
i1, i2, i3 = crt.select_regions(pitch[istart:iend], omega[istart:iend],
power[istart:iend])
self.assertEqual(i1, 0)
self.assertEqual(i2, 0)
self.assertEqual(i3, 1)
istart, iend = 6, -1
i1, i2, i3 = crt.select_regions(pitch[istart:iend], omega[istart:iend],
power[istart:iend])
self.assertEqual(i1, 0)
self.assertEqual(i2, 0)
self.assertEqual(i3, 0)
istart, iend = 5, -2
i1, i2, i3 = crt.select_regions(pitch[istart:iend], omega[istart:iend],
power[istart:iend])
self.assertEqual(i1, 0)
self.assertEqual(i2, 0)
self.assertEqual(i3, 1)
istart, iend = 3, -3
i1, i2, i3 = crt.select_regions(pitch[istart:iend], omega[istart:iend],
power[istart:iend])
self.assertEqual(i1, 0)
self.assertEqual(i2, 1)
self.assertEqual(i3, 2)
istart, iend = 2, -4
i1, i2, i3 = crt.select_regions(pitch[istart:iend], omega[istart:iend],
power[istart:iend])
self.assertEqual(i1, 0)
self.assertEqual(i2, 2)
self.assertEqual(i3, 2)
istart, iend = 0, 3
i1, i2, i3 = crt.select_regions(pitch[istart:iend], omega[istart:iend],
power[istart:iend])
self.assertEqual(i1, 0)
self.assertEqual(i2, 0)
self.assertEqual(i3, 2)
if __name__ == "__main__":
unittest.main()
import numpy as np
import os
import re
import xarray as xr
from copy import copy
from wetb.fatigue_tools.fatigue import eq_loads_from_Markov
def nc_to_dataarray(nc_file):
dataarray = xr.open_dataset(nc_file).to_dataarray().squeeze('variable')
if 'sensor_unit' in dataarray.coords:
dataarray = dataarray.assign_coords(sensor_unit=dataarray["sensor_unit"].astype(str))
if 'sensor_description' in dataarray.coords:
dataarray = dataarray.assign_coords(sensor_description=dataarray["sensor_description"].astype(str))
dataarray = add_coords_from_sensor_description(dataarray)
return dataarray
def get_params(filename, regex):
match = re.match(regex, os.path.basename(filename))
if match is not None:
return match.groups()
else:
return np.nan
def add_coords_from_filename(dataarray, params, regex, formats=None):
coords = xr.apply_ufunc(get_params,
dataarray.coords['filename'],
kwargs={'regex': regex},
input_core_dims=[[]],
output_core_dims=len(params)*[[]],
vectorize=True)
if formats is None:
for i in range(len(params)):
dataarray.coords[params[i]] = ('filename', coords[i].values)
else:
for i in range(len(params)):
dataarray.coords[params[i]] = ('filename', [formats[i](v) for v in coords[i].values])
return dataarray
def get_load_info(sensor_description):
if sensor_description.startswith('Force_intp'):
load = sensor_description[12:14]
mbdy_end = sensor_description.find(' ', 20)
mbdy = sensor_description[20:mbdy_end]
s_start = sensor_description.find('s=', mbdy_end) + 3
s_end = s_start + 6
s = float(sensor_description[s_start: s_end])
ss_start = sensor_description.find('s/S=', mbdy_end) + 5
ss_end = ss_start + 6
ss = float(sensor_description[ss_start: ss_end])
coo_start = sensor_description.find('coo:', ss_end) + 5
coo_end = sensor_description.find(' ', coo_start)
coo = sensor_description[coo_start:coo_end]
center_start = sensor_description.find('center:', coo_end) + 7
center_end = sensor_description.find(' ', center_start)
center = sensor_description[center_start:center_end]
return coo, load, mbdy, None, s, ss, center
elif sensor_description.startswith('Moment_intp'):
load = sensor_description[13:15]
mbdy_end = sensor_description.find(' ', 21)
mbdy = sensor_description[21:mbdy_end]
s_start = sensor_description.find('s=', mbdy_end) + 3
s_end = s_start + 6
s = float(sensor_description[s_start: s_end])
ss_start = sensor_description.find('s/S=', mbdy_end) + 5
ss_end = ss_start + 6
ss = float(sensor_description[ss_start: ss_end])
coo_start = sensor_description.find('coo:', ss_end) + 5
coo_end = sensor_description.find(' ', coo_start)
coo = sensor_description[coo_start:coo_end]
center_start = sensor_description.find('center:', coo_end) + 7
center_end = sensor_description.find(' ', center_start)
center = sensor_description[center_start:center_end]
return coo, load, mbdy, None, s, ss, center
elif sensor_description.startswith('Force'):
load = sensor_description[7:9]
mbdy_end = sensor_description.find(' ', 15)
mbdy = sensor_description[15:mbdy_end]
node_start = sensor_description.find('nodenr:', mbdy_end) + 8
node_end = node_start + 3
node = int(sensor_description[node_start:node_end])
coo_start = sensor_description.find('coo:', node_end) + 5
coo_end = sensor_description.find(' ', coo_start)
coo = sensor_description[coo_start:coo_end]
return coo, load, mbdy, node, None, None, None
elif sensor_description.startswith('Moment'):
load = sensor_description[6:8]
mbdy_end = sensor_description.find(' ', 14)
mbdy = sensor_description[14:mbdy_end]
node_start = sensor_description.find('nodenr:', mbdy_end) + 8
node_end = node_start + 3
node = int(sensor_description[node_start:node_end])
coo_start = sensor_description.find('coo:', node_end) + 5
coo_end = sensor_description.find(' ', coo_start)
coo = sensor_description[coo_start:coo_end]
return coo, load, mbdy, node, None, None, None
else:
return None, None, None, None, None, None, None
def add_coords_from_sensor_description(dataarray):
coords = xr.apply_ufunc(get_load_info,
dataarray.coords['sensor_description'],
input_core_dims=[[]],
output_core_dims=[[], [], [], [], [], [], []],
vectorize=True)
coords_labels = ['coo', 'load', 'mbdy', 'node', 's', 's/S', 'center']
for i in range(len(coords_labels)):
dataarray.coords[coords_labels[i]] = ('sensor_name', coords[i].values)
return dataarray
def get_DLC(filename):
filename = os.path.basename(filename)
i1 = filename.lower().find('dlc')
i2 = filename.find('_', i1)
if i2 == -1:
DLC = filename[i1:]
else:
DLC = filename[i1:i2]
return DLC
def apply_regex(filename, regex):
filename = os.path.basename(filename)
if re.match(regex, filename) is None:
return False
else:
return True
def get_groups(filename, regex_list):
for regex in regex_list.values():
match = re.match(regex, os.path.basename(filename))
if match is not None:
break
return match.group(0) if match is not None else None
def group_simulations(dataarray, regex_list):
groups = np.unique(np.vectorize(get_groups)(dataarray.filename, regex_list))
grouped_simulations = {}
for group in groups:
grouped_simulations[group] = dataarray.sel(filename=np.vectorize(apply_regex)(dataarray.filename, group))
return grouped_simulations
def average_contemporaneous_loads(extreme_loads, driver, load, metric, safety_factor):
if load in driver:
return metric(extreme_loads)*safety_factor
if driver == 'Fres_max':
if load in ['Fx', 'Fy']:
return metric(extreme_loads)*safety_factor
if driver == 'Mres_max':
if load in ['Mx', 'My']:
return metric(extreme_loads)*safety_factor
return np.mean(np.abs(extreme_loads))*np.sign(extreme_loads[np.argmax(np.abs(extreme_loads))])
def scale_contemporaneous_loads(extreme_loads, driver, load, scaling_factor, safety_factor):
if load in driver:
return extreme_loads*scaling_factor*safety_factor
if driver == 'Fres_max':
if load in ['Fx', 'Fy']:
return extreme_loads*scaling_factor*safety_factor
if driver == 'Mres_max':
if load in ['Mx', 'My']:
return extreme_loads*scaling_factor*safety_factor
return extreme_loads*scaling_factor
def get_loads_by_group(extreme_loads, regex_list, metric_list, safety_factor_list, contemporaneous_method='averaging'):
grouped_simulations = group_simulations(extreme_loads, regex_list)
loads_by_group_dict = {}
if contemporaneous_method == 'averaging':
for group, simulations in grouped_simulations.items():
DLC = get_DLC(group)
group_loads = xr.apply_ufunc(average_contemporaneous_loads,
simulations,
simulations.coords['driver'],
simulations.coords['load'],
kwargs={'metric': metric_list[DLC], 'safety_factor': safety_factor_list[DLC]},
input_core_dims=[['filename'], [], []],
vectorize=True)
Fres = np.sqrt(group_loads.sel(load='Fx')**2 + group_loads.sel(load='Fy')**2)
Fres.coords['load'] = 'Fres'
Mres = np.sqrt(group_loads.sel(load='Mx')**2 + group_loads.sel(load='My')**2)
Mres.coords['load'] = 'Mres'
thetaF = np.arctan2(group_loads.sel(load='Fy'), group_loads.sel(load='Fx'))*180/np.pi
thetaF.coords['load'] = 'Theta_F'
thetaM = np.arctan2(group_loads.sel(load='My'), group_loads.sel(load='Mx'))*180/np.pi
thetaM.coords['load'] = 'Theta_M'
group_loads = xr.concat([group_loads, Fres, thetaF, Mres, thetaM], dim='load')
loads_by_group_dict[group] = group_loads
elif contemporaneous_method == 'scaling':
for group, simulations in grouped_simulations.items():
main_loads = simulations.isel(driver=range(12), load=xr.DataArray([int(i/2) for i in range(12)], dims='driver'))
Fres = np.sqrt(simulations.sel(driver='Fres_max').sel(load='Fx')**2 + simulations.sel(driver='Fres_max').sel(load='Fy')**2)
Mres = np.sqrt(simulations.sel(driver='Mres_max').sel(load='Mx')**2 + simulations.sel(driver='Mres_max').sel(load='My')**2)
Fres.coords['load'] = 'Fres'
Mres.coords['load'] = 'Mres'
main_loads = xr.concat([main_loads, Fres, Mres], dim='driver')
DLC = get_DLC(group)
characteristic_loads = xr.apply_ufunc(metric_list[DLC],
main_loads,
input_core_dims=[['filename']],
vectorize=True)
scaling_factors = characteristic_loads/main_loads
closest_load_indices = np.abs(1/scaling_factors - 1).argmin('filename')
scaling_factors = scaling_factors.isel(filename=closest_load_indices)
closest_load_indices = closest_load_indices.drop_vars('load')
simulations = simulations.isel(filename=closest_load_indices)
group_loads = xr.apply_ufunc(scale_contemporaneous_loads,
simulations,
simulations.coords['driver'],
simulations.coords['load'],
scaling_factors,
kwargs={'safety_factor': safety_factor_list[DLC]},
input_core_dims=[[], [], [], []],
vectorize=True)
Fres = np.sqrt(group_loads.sel(load='Fx')**2 + group_loads.sel(load='Fy')**2)
Fres.coords['load'] = 'Fres'
Mres = np.sqrt(group_loads.sel(load='Mx')**2 + group_loads.sel(load='My')**2)
Mres.coords['load'] = 'Mres'
thetaF = np.arctan2(group_loads.sel(load='Fy'), group_loads.sel(load='Fx'))*180/np.pi
thetaF.coords['load'] = 'Theta_F'
thetaM = np.arctan2(group_loads.sel(load='My'), group_loads.sel(load='Mx'))*180/np.pi
thetaM.coords['load'] = 'Theta_M'
group_loads = xr.concat([group_loads, Fres, thetaF, Mres, thetaM], dim='load')
loads_by_group_dict[group] = group_loads
loads_by_group = xr.concat(list(loads_by_group_dict.values()), 'group')
if 'variable' in loads_by_group.coords:
loads_by_group = loads_by_group.drop_vars('variable')
if 'filename' in loads_by_group.coords:
loads_by_group = loads_by_group.drop_vars('filename')
loads_by_group.coords['group'] = list(loads_by_group_dict.keys())
return loads_by_group
def get_DLB_extreme_loads(extreme_loads, regex_list, metric_list, safety_factor_list, contemporaneous_method='averaging'):
"""
Calculate the extreme loads for the whole DLB.
Parameters
----------
dataarray : xarray.DataArray (Nsimulations x Nsensors x 14 x 10)
DataArray containing collected data for each simulation and load sensor.
The 14 x 6 matrix corresponds to the extreme loading matrix in IE61400-1
in annex I-1, where the 10 components are Fx, Fy, Fz, Mx, My, Mz, Fr, theta_F, Mr, theta_M.
Dims: filename, sensor_name, driver, load
regex_list : dict
Dictionary containing the regular expression for grouping simulations
for each DLC.
metric_list : dict
Dictionary containing the function to be applied to each group of simulations
for each DLC.
safety_factor_list : dict
Dictionary containing the safety factor to be applied to each group of simulations
for each DLC.
contemporaneous_method : str
Method for assessing the contemporaneous loads.
'averaging': the mean of the absolute values from each timeseries is used,
posteriorly applying the sign of the absolute maximum value.
'scaling': the contemporaneous values from the timeseries with the closest
load to the characteristic load are selected, posteriorly scaling them
by the factor characteristic load / timeseries load.
The default is 'averaging'.
Returns
-------
DataArray (Nsensors x 14 x 10)
DataArray containing the extreme loading matrix of the DLB for each sensor,
as well as the group of simulations driving each sensor.\n
Dims: sensor_name, driver, load
Coords: sensor_name, driver, load, group
"""
loads_by_group = get_loads_by_group(extreme_loads, regex_list, metric_list,
safety_factor_list, contemporaneous_method=contemporaneous_method)
driving_group_indices = []
for driver in loads_by_group.coords['driver'].values:
load = driver[:driver.find('_')]
metric = driver[driver.find('_') + 1:]
if metric == 'max':
driving_group_indices.append(loads_by_group.sel(driver=driver, load=load).argmax('group'))
elif metric == 'min':
driving_group_indices.append(loads_by_group.sel(driver=driver, load=load).argmin('group'))
driving_group_indices = xr.concat(driving_group_indices, dim='driver')
driving_group_indices = driving_group_indices.drop_vars('load')
DLB_extreme_loads = loads_by_group.isel(group=driving_group_indices)
return DLB_extreme_loads.transpose(..., 'sensor_name', 'driver', 'load')
def ext_or_cont(ext_and_cont, driver, sensor_description, metric, safety_factor):
if sensor_description in driver:
return metric(ext_and_cont)*safety_factor
else:
return np.mean(np.abs(ext_and_cont))*np.sign(ext_and_cont[np.argmax(np.abs(ext_and_cont))])
def get_DLB_ext_and_cont(ext_and_cont, regex_list, metric_list, safety_factor_list):
"""
Analogue function to get_DLB_extreme_loads, but generalized for any given
set of sensors and not only the 6 load components of a node.
Parameters
----------
ext_and_cont : xarray.DataArray (Nsimulations x Nsensors*2 x Nsensors)
DataArray containing collected data for each simulation of the extreme values
(max and min) and their contemporaneous values for a given set of sensors.
regex_list : dict
Dictionary containing the regular expression for grouping simulations
for each DLC.
metric_list : dict
Dictionary containing the function to be applied to each group of simulations
for each DLC.
safety_factor_list : dict
Dictionary containing the safety factor to be applied to each group of simulations
for each DLC.
Returns
-------
DataArray (Nsensors*2 x Nsensors)
DataArray containing the extreme values (max and min) and their contemporaneous
values for a given set of sensors.
"""
grouped_simulations = group_simulations(ext_and_cont, regex_list)
values_by_group_dict = {}
for group, simulations in grouped_simulations.items():
DLC = get_DLC(group)
group_values = xr.apply_ufunc(ext_or_cont,
simulations,
simulations.coords['driver'],
simulations.coords['sensor_description'],
kwargs={'metric': metric_list[DLC], 'safety_factor': safety_factor_list[DLC]},
input_core_dims=[['filename'], [], []],
vectorize=True)
values_by_group_dict[group] = group_values
values_by_group = xr.concat(list(values_by_group_dict.values()), dim='group')
values_by_group = values_by_group.drop_vars('variable')
values_by_group['group'] = list(values_by_group_dict.keys())
driving_group_indices = []
for driver in values_by_group.coords['driver'].values:
sensor_description = driver[:-4]
metric = driver[-3:]
if metric == 'max':
driving_group_indices.append(values_by_group.sel(driver=driver, sensor_description=sensor_description).argmax('group'))
elif metric == 'min':
driving_group_indices.append(values_by_group.sel(driver=driver, sensor_description=sensor_description).argmin('group'))
driving_group_indices = xr.concat(driving_group_indices, dim='driver')
driving_group_indices = driving_group_indices.drop_vars('sensor_description')
DLB_ext_and_cont = values_by_group.isel(group=driving_group_indices)
return DLB_ext_and_cont
def get_values_by_group(dataarray, regex_list, metric_list, safety_factor_list):
grouped_simulations = group_simulations(dataarray, regex_list)
group_values_dict = {}
for group, simulations in grouped_simulations.items():
DLC = get_DLC(group)
values = xr.apply_ufunc(
metric_list[DLC],
simulations,
input_core_dims=[["filename"]],
vectorize=True)*safety_factor_list[DLC]
group_values_dict[group] = values
group_values = xr.concat(list(group_values_dict.values()), 'group')
group_values = group_values.drop_vars('variable')
group_values.coords['group'] = list(group_values_dict.keys())
return group_values
def get_DLB_directional_extreme_loads(directional_extreme_loads, regex_list, metric_list, safety_factor_list):
"""
Identic procedure as in get_DLB_extreme_loads, but for maximum loads
along each direction.
Parameters
----------
directional_extreme_loads : xarray.DataArray
DataArray containing the collected directional extreme loads of all simulations.
regex_list : dict
Dictionary containing the regular expression for grouping simulations
for each DLC.
metric_list : dict
Dictionary containing the function to be applied to each group of simulations
for each DLC.
safety_factor_list : dict
Dictionary containing the safety factor to be applied to each group of simulations
for each DLC.
Returns
-------
xarray.DataArray
DataArray containing the maximum values of the DLB, as well as
the group of simulations driving each load direction
"""
group_values = get_values_by_group(directional_extreme_loads, regex_list, metric_list, safety_factor_list)
DLB_directional_extreme_loads = group_values.max('group')
DLB_directional_extreme_loads.coords['group'] = group_values['group'].isel(group=group_values.argmax('group'))
return DLB_directional_extreme_loads
def get_DLB_extreme_values(statistics, regex_list, metric_list, safety_factor_list):
"""
Group sensors maxima and minima of each timeseries by regular expression,
apply a metric and scale by a safety factor. Identic procedure as in
get_DLB_extreme_loads, but applycable for any sensor and only extreme values
are computed, not contemporaneous values of other sensors.
Parameters
----------
statistics : xarray.DataArray (Nsimulations x *)
DataArray containing the collected statistics of all simulations.
regex_list : dict
Dictionary containing the regular expression for grouping simulations
for each DLC.
metric_list : dict
Dictionary containing the function to be applied to each group of simulations
for each DLC.
safety_factor_list : dict
Dictionary containing the safety factor to be applied to each group of simulations
for each DLC.
Returns
-------
DataArray (Nsensors x 2)
DataArray containing the maximum and minimum values of the DLB, as well as
the group of simulations driving each sensor
"""
group_values = get_values_by_group(statistics, regex_list, metric_list, safety_factor_list)
max_values = group_values.sel(statistic='max').max('group')
max_indices = group_values.sel(statistic='max').argmax('group')
max_values.coords['group'] = group_values.sel(statistic='max')['group'].isel(group=max_indices)
min_values = group_values.sel(statistic='min').min('group')
min_indices = group_values.sel(statistic='min').argmin('group')
min_values.coords['group'] = group_values.sel(statistic='min')['group'].isel(group=min_indices)
DLB_extreme_values = xr.concat([max_values, min_values], dim='statistic')
if 'sensor_name' in DLB_extreme_values.dims:
DLB_extreme_values = DLB_extreme_values.transpose(..., 'sensor_name', 'statistic')
return DLB_extreme_values
def get_DLB_eq_loads(eq_loads, weight_list):
"""
Calculate the fatigue equivalent loads for the whole DLB
from equivalent loads of individual simulations.
Parameters
----------
eq_loads : xarray.DataArray (Nsimulations x * x Nwoehlerslopes)
DataArray containing equivalent loads for each Woehler slope,
for each sensor and for each simulation.\n
Dims: filename, *, m\n
Coords: filename, *, m
weight_list : dict or xarray.DataArray
Dictionary or DataArray containing the weight (real time / simulation time) for each simulation
Returns
-------
xarray.DataArray (Nsensors x Ndirections x Nwoehlerslopes)
DataArray containing the fatigue equivalent loads of the DLB
for each sensor and Woehler slope.\n
Dims: *, m\n
Coords: *, m
"""
if isinstance(weight_list, dict):
weight_list = xr.DataArray(data=list(weight_list.values()),
dims='filename',
coords={'filename': list(weight_list.keys())})
m_list = eq_loads.m
DLB_eq_loads = (weight_list*eq_loads**m_list).sum('filename')**(1/m_list)
if 'variable' in DLB_eq_loads.coords:
DLB_eq_loads = DLB_eq_loads.drop_vars('variable')
return DLB_eq_loads
def get_DLB_eq_loads_from_Markov(markov_matrices, weight_list, m_list, neq=1e7):
"""
Calculate the fatigue equivalent loads for the whole DLB
from Markov matrices of individual simulations.
Parameters
----------
markov_matrices : xarray.DataArray (Nsimulations x * x Nbins x 2)
DataArray containing number of cycles and load amplitude
for each sensor for each simulation.\n
Dims: filename, *, bin, (cycles, amplitude)\n
Coords: filename, *
weight_list : dict or xarray.DataArray
Dictionary or DataArray containing the weight (real time / simulation time) for each simulation
m_list : list (Nwoehlerslopes)
List containing the different woehler slopes
neq : int or float, optional
Number of equivalent load cycles. The default is 1e7.
Returns
-------
xarray.DataArray (Nsensors x Ndirections x Nwoehlerslopes)
DataArray containing the fatigue equivalent loads of the DLB
for each sensor and Woehler slope.\n
Dims: *, m\n
Coords: *, m
"""
eq_loads = eq_loads_from_Markov(markov_matrix=markov_matrices,
m_list=m_list,
neq=neq)
eq_loads = xr.DataArray(eq_loads)
eq_loads = eq_loads.rename({f'dim_{i}': markov_matrices.dims[i]
for i in range(len(markov_matrices.dims) - 2)})
eq_loads = eq_loads.rename({eq_loads.dims[-1]: 'm'})
eq_loads = eq_loads.assign_coords(markov_matrices.coords)
eq_loads.coords['m'] = m_list
DLB_eq_loads = get_DLB_eq_loads(eq_loads, weight_list)
return DLB_eq_loads
def mean_upperhalf(group):
"""
Calculate the mean of the values in the upper half of a set. To be
used as the metric for some DLCs.
Parameters
----------
group : array
Array containing the maximum values of a given sensor for each
simulation in a group
Returns
-------
float
The mean value of the upper half of simulations
"""
upperhalf = sorted(group, reverse=True)[0:int(len(group)/2)]
return np.mean(upperhalf)
def get_weight_list(file_list,
n_years,
Vin,
Vout,
Vr,
wsp_list,
probability,
wsp_weights=None,
yaw_weights=xr.concat([xr.DataArray(data=[[0.5, 0.25, 0.25]],
dims=('dlc', 'wdir'),
coords={'dlc': ['12'], 'wdir': [0, 10, 350]}),
xr.DataArray(data=[[0.5, 0.5]],
dims=('dlc', 'wdir'),
coords={'dlc': ['24'], 'wdir': [20, 340]}),
xr.DataArray(data=[[0.5, 0.5]],
dims=('dlc', 'wdir'),
coords={'dlc': ['64'], 'wdir': [8, 352]})],
dim='dlc'),
n_seeds=xr.DataArray(data=[6, 3, 6],
dims=('dlc'),
coords={'dlc': ['12', '24', '64']}),
n_events=None,
sim_time=xr.DataArray(data=[600, 600, 100, 100, 600],
dims=('dlc'),
coords={'dlc': ['12', '24', '31', '41', '64']}),
weight_DLC64_Vin_Vout=0.025,
neq=None):
"""
Calculate the weights for each simulation for the
calculation of damage equivalent loads of the whole DLB.
Parameters
----------
file_list : xr.DataArray
DataArray containing the paths to all simulations. It must also have
coordinates for DLC, wind speed, wind direction and/or wave direction
n_years : int or float
Turbine's lifetime in years
Vin: int or float
Cut-in wind speed
Vout: int or float
Cut-out wind speed
Vr: int or float
Rated wind speed
wsp_list: array
Array containing all simulated wind speeds
probability: xarray.DataArray
DataArray containing the probability of each combination of wind speed,
wind direction and/or wave direction
wsp_weights: xarray.DataArray, optional
DataArray containing the percentage of time each DLC takes for each wind speed.
The default assumes that DLC64 takes 2.5% of time between Vin and Vout
yaw_weights: xarray.DataArray, optional
DataArray containing the percentage of time each yaw misalignment takes for each DLC.
The default assumes {-10: 0.25, 0: 0.5, 10: 0.25} for DLC12,
{-20: 0.5, 20: 0.5} for DLC24 and {-8: 0.5, 8: 0.5} for DLC64
n_seeds: xarray.DataArray, optional
DataArray containing the number of seeds per DLC per wind speed and wind direction and/or wave direction.
The default assumes 6 seeds for DLC12, 3 seeds for DLC24 and 6 seeds for DLC64
n_events: xarray.DataArray, optional
DataArray containing the number of events per DLC per windspeed.
The default assumes {Vin: 1000, Vr: 50, Vout: 50} for both DLC31 and DLC41.
sim_time: xarray.DataArray, optional
DataArray containing the simulation time per DLC.
The default assumes 600s for DLC12, DLC24, DLC64 and 100s for DLC31 and DLC41
weight_DLC64_Vin_Vout: int or float, optional
Percentage of time that DLC64 takes between Vin and Vout. Only used if
wsp_weights is not passed.
The default is 0.025.
neq : int or float, optional
Number of equivalent load cycles. If not passed, the output weights
can ONLY be used when calculating DLB equivalent loads from individual file
Markov matrices. If passed, the output weights can ONLY be used when
calculating DLB equivalent loads from individual equivalent loads (recommended)
Returns
-------
xarray.DataArray
DataArray containing the weight for each simulation.
"""
if wsp_weights is None:
weight_DLC24 = 50/(365.25*24)
weight_DLC12 = 1 - weight_DLC64_Vin_Vout
wsp_weights = xr.DataArray(data=[[weight_DLC12 if Vin <= v <= Vout else 0 for v in wsp_list],
[weight_DLC24 if Vin <= v <= Vout else 0 for v in wsp_list],
[weight_DLC64_Vin_Vout if Vin <= v <= Vout else 1 for v in wsp_list]],
dims=('dlc', 'wsp'),
coords={'dlc': ['12', '24', '64'],
'wsp': wsp_list})
if n_events is None:
n_events = xr.DataArray(data=[[1000, 50, 50],
[1000, 50, 50]],
dims=('dlc', 'wsp'),
coords={'dlc': ['31', '41'],
'wsp': [Vin, Vr, Vout]})
lifetime = n_years*365.25*24*3600 # convert to seconds
weight_list = (lifetime*probability*wsp_weights*yaw_weights/n_seeds/sim_time).combine_first(n_years*n_events)
if neq is not None: # correct by sim_time/neq so it can be used for individual equivalent loads
weight_list = weight_list*sim_time/neq
# rearrange weights from parameters (DLC, wsp, wdir) to actual filenames
# TO-DO: allow wvdir as well
def get_weight_list_per_filename(file_list, dlc, wsp, wdir, weight_list):
return weight_list.sel(dlc=dlc, wsp=wsp, wdir=wdir)
weight_list = xr.apply_ufunc(get_weight_list_per_filename,
file_list,
file_list['dlc'],
file_list['wsp'],
file_list['wdir'],
kwargs={'weight_list': weight_list},
vectorize=True)
return weight_list
import numpy as np
import pandas as pd
from openpyxl import load_workbook
from openpyxl.styles import PatternFill
import matplotlib.pyplot as plt
import os
from wetb.dlb.dlb_postprocs import get_DLC
def dataarray_to_excel(dataarray, path):
"""
Generate excel file from a DataArray.
Parameters
----------
dataarray : xarray.DataArray
DataArray containing data from the DLB.
path : str
Path to the new excel file to be created
Returns
-------
None.
"""
df = dataarray.to_dataframe('value').reset_index()
df.to_excel(path, index=False)
def DLB_extreme_loads_to_excel(DLB_extreme_loads, path):
"""
Generate excel report of DLB extreme loads.
Parameters
----------
DLB_extreme_loads : xarray.DataArray
DataArray containing the DLB extreme loading matrices for each load sensor.
path : str
Path to the new excel file to be created
Returns
-------
None.
"""
with pd.ExcelWriter(path, engine="openpyxl") as writer:
for sensor in DLB_extreme_loads.coords['sensor_name'].values:
df = DLB_extreme_loads.sel(sensor_name=sensor).to_pandas()
df['group'] = DLB_extreme_loads.sel(sensor_name=sensor)['group'].values
df.to_excel(writer, sheet_name=sensor, index_label='driver')
workbook = load_workbook(path)
highlight = PatternFill(start_color="C0C0C0", end_color="C0C0C0", fill_type="solid")
for sensor in DLB_extreme_loads.coords['sensor_name'].values:
for i in range(12):
workbook[sensor].cell(row=i + 2, column=int(i/2) + 2).fill = highlight
workbook[sensor].cell(row=14, column=8).fill = highlight
workbook[sensor].cell(row=15, column=10).fill = highlight
workbook.save(path)
def plot_DLB_directional_extreme_loads(DLB_extreme_loads, folder, extension='.png'):
"""
Plot the DLB directional extreme loads and save them to a given folder.
Parameters
----------
DLB_extreme_loads : xarray.DataArray ('sensor_name', 'angle')
DataArray containing the extreme loads of the DLB.
folder : str
Path to the folder where the plots will be saved
extension: str, optional
Extension of the plot files. The default is '.png'.
Returns
-------
None.
"""
angles = DLB_extreme_loads.coords['angle'].values
for i in range(DLB_extreme_loads.shape[0]):
x = [float(DLB_extreme_loads[i, j])*np.cos(np.deg2rad(angles[j]))
for j in range(len(angles))]
y = [float(DLB_extreme_loads[i, j])*np.sin(np.deg2rad(angles[j]))
for j in range(len(angles))]
DLC = [get_DLC(DLB_extreme_loads[i, j].group.values[()]) for j in range(len(angles))]
plt.scatter(x, y)
[plt.plot([0, x[k]], [0, y[k]], color='black') for k in range(len(x))]
plt.xlabel('Mx')
plt.ylabel('My')
plt.title(DLB_extreme_loads[i].coords['sensor_name'].values[()])
plt.axhline(0, color='black',linewidth=1)
plt.axvline(0, color='black',linewidth=1)
plt.xlim(-max(abs(min(x)), abs(max(x)))*1.2, max(abs(min(x)), abs(max(x)))*1.2)
plt.ylim(-max(abs(min(y)), abs(max(y)))*1.2, max(abs(min(y)), abs(max(y)))*1.2)
for j in range(len(x)):
plt.annotate(DLC[j], (x[j], y[j]), textcoords="offset points", xytext=(0,10), ha='center')
plt.savefig(os.path.join(folder, 'Extreme_' + DLB_extreme_loads[i].coords['sensor_name'].values[()] + extension))
plt.show()
def plot_DLB_directional_equivalent_loads(DLB_fatigue_loads, folder, extension='.png'):
"""
Plot the DLB directional equivalent loads and save them to a given folder.
Parameters
----------
DLB_fatigue_loads : xarray.DataArray ('sensor_name', 'angle', 'm')
DataArray containing the fatigue loads of the DLB. It matches the
output from get_DLB_fatigue_loads
folder : str
Path to the folder where the plots will be saved
extension: str, optional
Extension of the plot files. The default is '.png'.
Returns
-------
None.
"""
m_list = DLB_fatigue_loads.coords['m'].values
angles = DLB_fatigue_loads.coords['angle'].values
for i in range(DLB_fatigue_loads.shape[0]):
for j in range(len(m_list)):
x = [float(DLB_fatigue_loads[i, k, j])*np.cos(np.deg2rad(angles[k]))
for k in range(len(angles))]
y = [float(DLB_fatigue_loads[i, k, j])*np.sin(np.deg2rad(angles[k]))
for k in range(len(angles))]
plt.scatter(x, y, label='m = ' + str(m_list[j]))
[plt.plot([0, x[k]], [0, y[k]], color='black') for k in range(len(angles))]
plt.xlabel('Mx')
plt.ylabel('My')
plt.title(DLB_fatigue_loads[i].coords['sensor_name'].values[()])
plt.axhline(0, color='black',linewidth=1)
plt.axvline(0, color='black',linewidth=1)
plt.xlim(-max(abs(min(x)), abs(max(x)))*1.2, max(abs(min(x)), abs(max(x)))*1.2)
plt.ylim(-max(abs(min(y)), abs(max(y)))*1.2, max(abs(min(y)), abs(max(y)))*1.2)
plt.legend()
plt.savefig(os.path.join(folder, 'Fatigue_' + DLB_fatigue_loads[i].coords['sensor_name'].values[()] + extension))
plt.show()
\ No newline at end of file
import os
import warnings
from wetb.hawc2.hawc2_input_writer import HAWC2InputWriter
from wetb.hawc2.tests import test_files
from wetb.dlb.iec61400_1 import DTU_IEC61400_1_Ref_DLB
"""
TODO: delete wind ramp / replace wind section
TODO: set default turb_format = 0
"""
class HAWC2_IEC_DLC_Writer(HAWC2InputWriter):
def __init__(self, base_htc_file, diameter,
time_start=100, # Minimum 5s cf. IEC61400-1(2005), section 7.5
turbulence_defaults=(33.6, 3.9, 8192, 64) # L, gamma, n_x, n_yz):
):
HAWC2InputWriter.__init__(self, base_htc_file, diameter=diameter,
time_start=time_start,
turbulence_defaults=turbulence_defaults)
def set_V_hub(self, htc, V_hub, **_):
htc.wind.wsp = V_hub
htc.wind.wind_ramp_factor = 0, self.time_start, 8 / V_hub, 1
def set_wdir(self, htc, wdir, **_):
htc.wind.windfield_rotations = wdir, 0, 0
def set_shear(self, htc, shear, **_):
if isinstance(shear, str):
shear = eval(shear) # convert str to dict (if via excel)
shear_format, shear_arg = shear['profile']
i_shear_format = ['log', 'power'].index(shear_format.lower()) + 2
htc.wind.shear_format = i_shear_format, shear_arg
if shear['type'] == 'NWP':
return
elif shear['type'] == 'EWS':
phi = {'++': 0, '+-': 90, '--': 180, '-+': 270}[shear['sign']]
htc.wind.iec_gust = 'ews', shear['A'], phi, self.time_start, shear['T']
else:
raise NotImplementedError(shear['type'])
def set_ti(self, htc, ti, **_):
htc.wind.tint = ti
def set_seed(self, htc, seed, **kwargs):
if seed is None or seed == "":
htc.wind.turb_format = 0
elif isinstance(seed, int):
L, Gamma, nx, nyz = self.turbulence_defaults
htc.add_mann_turbulence(L, 1, Gamma, seed, no_grid_points=(nx, nyz, nyz),
box_dimension=(kwargs['simulation_time'] * kwargs['V_hub'],
self.diameter, self.diameter))
else:
raise NotImplementedError(seed)
def set_Gust(self, htc, Gust, **kwargs):
if str(Gust).lower() == 'nan':
return
if isinstance(Gust, str):
Gust = eval(Gust)
if Gust['type'] == 'ECD':
V_cg, theta_cg, T = [Gust[k] for k in ['V_cg', 'theta_cg', 'T']]
htc.wind.iec_gust = 'ecd', V_cg, theta_cg, self.time_start, T
elif Gust['type'] == 'EDC':
phi, T = [Gust[k] for k in ['phi', 'T']]
htc.wind.iec_gust = 'edc', 0, phi, self.time_start, T
elif Gust['type'] == 'EOG':
V_gust, T = [Gust[k] for k in ['V_gust', 'T']]
htc.wind.iec_gust = 'eog', V_gust, 0, self.time_start, T
else:
raise NotImplementedError(Gust)
def set_Fault(self, htc, Fault, **kwargs):
if str(Fault).lower() == 'nan':
return
if isinstance(Fault, str):
Fault = eval(Fault)
if Fault['type'] == 'GridLoss':
generator_servo = Fault['generator_servo']
T = Fault['T']
self.set_gridloss_time(htc, generator_servo, self.time_start + T)
elif Fault['type'] == 'StuckBlade':
pitch_servo = Fault['pitch_servo']
T = Fault['T']
self.set_stuckblade(htc, pitch_servo, T)
elif Fault['type'] == 'PitchRunaway':
pitch_servo = Fault['pitch_servo']
T = Fault['T']
self.set_pitchrunaway(htc, pitch_servo, self.time_start + T)
else:
raise NotImplementedError(Fault)
def set_Operation(self, htc, Operation, **kwargs):
if str(Operation).lower() == 'nan':
return
if isinstance(Operation, str):
Operation = eval(Operation)
if Operation['type'] == 'StartUp':
controller = Operation['controller']
T = Operation['T']
self.set_startup_time(htc, controller, self.time_start + T)
elif Operation['type'] == 'ShutDown':
controller = Operation['controller']
T = Operation['T']
self.set_shutdown_time(htc, controller, self.time_start + T, 1)
elif Operation['type'] == 'EmergencyShutDown':
controller = Operation['controller']
T = Operation['T']
self.set_shutdown_time(htc, controller, self.time_start + T, 2)
elif Operation['type'] == 'Parked':
controller = Operation['controller']
self.set_parked(htc, controller, self.time_start + kwargs['simulation_time'])
elif Operation['type'] == 'RotorLocked':
controller = Operation['controller']
self.set_parked(htc, controller, self.time_start + kwargs['simulation_time'])
shaft = Operation['shaft']
shaft_constraint = Operation['shaft_constraint']
azimuth = Operation['Azi']
self.set_rotor_locked(htc, shaft, shaft_constraint, azimuth)
else:
raise NotImplementedError(Operation)
def set_simulation_time(self, htc, simulation_time, **_):
htc.set_time(self.time_start, simulation_time + self.time_start)
def set_gridloss_time(self, htc, generator_servo, t):
generator_servo = htc.dll.get_subsection_by_name(generator_servo, 'name')
if 'time for grid loss' not in generator_servo.init.constant__7.comments.lower():
warnings.warn('Assuming constant 7 in generator_servo DLL is time for grid loss!'
' Please verify your htc file is correct. Disable warning by '
+ 'placing "time for grid loss" in constant 7 comment.')
generator_servo.init.constant__7 = 7, t
def set_stuckblade(self, htc, pitch_servo, t):
pitch_servo = htc.dll.get_subsection_by_name(pitch_servo, 'name')
if 'time for stuck blade' not in pitch_servo.init.constant__9.comments.lower():
warnings.warn('Assuming constant 9 in pitch_servo DLL is time for stuck blade!'
' Please verify your htc file is correct. Disable warning by '
+ 'placing "time for stuck blade" in constant 9 comment.')
pitch_servo.init.constant__9 = 9, t
if 'angle of stuck blade' not in pitch_servo.init.constant__10.comments.lower():
warnings.warn('Assuming constant 10 in pitch_servo DLL is angle of stuck blade!'
' Please verify your htc file is correct. Disable warning by '
+ 'placing "angle of stuck blade" in constant 10 comment.')
pitch_servo.init.constant__10 = 10, 0
def set_pitchrunaway(self, htc, pitch_servo, t):
pitch_servo = htc.dll.get_subsection_by_name(pitch_servo, 'name')
if 'time for pitch runaway' not in pitch_servo.init.constant__8.comments.lower():
warnings.warn('Assuming constant 8 in pitch_servo DLL is time for pitch runaway!'
' Please verify your htc file is correct. Disable warning by '
+ 'placing "time for stuck blade" in constant 8 comment.')
pitch_servo.init.constant__8 = 8, t
def set_startup_time(self, htc, controller, t):
controller = htc.dll.get_subsection_by_name(controller, 'name')
if 'cut-in time' not in controller.init.constant__24.comments.lower():
warnings.warn('Assuming constant 24 in controller DLL is cut-in time!'
' Please verify your htc file is correct. Disable warning by '
+ 'placing "cut-in time" in constant 24 comment.')
controller.init.constant__24 = 24, t
def set_shutdown_time(self, htc, controller, t, stop_type):
controller = htc.dll.get_subsection_by_name(controller, 'name')
if 'shut-down time' not in controller.init.constant__26.comments.lower():
warnings.warn('Assuming constant 26 in controller DLL is shut-down time!'
' Please verify your htc file is correct. Disable warning by '
+ 'placing "shut-down time" in constant 26 comment.')
controller.init.constant__26 = 26, t
if 'stop type' not in controller.init.constant__28.comments.lower():
warnings.warn('Assuming constant 28 in controller DLL is stop type!'
' Please verify your htc file is correct. Disable warning by '
+ 'placing "stop type" in constant 28 comment.')
controller.init.constant__28 = 28, stop_type
def set_parked(self, htc, controller, simulation_time):
controller = htc.dll.get_subsection_by_name(controller, 'name')
if 'cut-in time' not in controller.init.constant__24.comments.lower():
warnings.warn('Assuming constant 24 in controller DLL is cut-in time!'
' Please verify your htc file is correct. Disable warning by '
+ 'placing "cut-in time" in constant 24 comment.')
controller.init.constant__24 = 24, simulation_time + 1
htc.aero.induction_method = 0
def set_rotor_locked(self, htc, shaft, shaft_constraint, azimuth):
for constraint in htc.new_htc_structure.constraint:
try:
if shaft_constraint in str(constraint.name):
constraint.name_ = 'bearing3'
constraint.add_line('omegas', [0])
break
except:
continue
for orientation in htc.new_htc_structure.orientation:
try:
if shaft in str(orientation.mbdy2):
command = 'mbdy2_eulerang'
for line in orientation:
if 'mbdy2_eulerang' in line.name_:
command = line.name_
orientation[command].values[2] = azimuth
break
except:
continue
if __name__ == '__main__':
dlb = DTU_IEC61400_1_Ref_DLB(iec_wt_class='1A', Vin=4, Vout=26, Vr=10, D=180, z_hub=90)
path = os.path.dirname(test_files.__file__) + '/simulation_setup/DTU10MWRef6.0/'
writer = HAWC2_IEC_DLC_Writer(path + 'htc/DTU_10MW_RWT.htc', 180)
p = writer.from_pandas(dlb['DLC14'])
print(p.contents)
p.write_all(out_dir='tmp')
import os
import numpy as np
import pandas as pd
import itertools
"""IEC refers to IEC61400-1(2005)
Questions:
dlc1.4 (hansen: 100s, no turb), why not NTM, NWP 600s and 6 seeds
seed numbering and reuse???
"""
class DLC():
def __init__(self, Description, Operation, Turb, Shear, Gust, Fault=None, variables={}):
self.Description = Description
if isinstance(Turb, tuple):
func_Turb, params_Turb = Turb
def Turb_wrapper(*args, **kwargs):
combined_kwargs = {**params_Turb, **kwargs}
return func_Turb(*args, **combined_kwargs)
setattr(self, 'Turb', Turb_wrapper)
else:
setattr(self, 'Turb', Turb)
if isinstance(Shear, tuple):
func_Shear, params_Shear = Shear
def Shear_wrapper(*args, **kwargs):
combined_kwargs = {**params_Shear, **kwargs}
return func_Shear(*args, **combined_kwargs)
setattr(self, 'Shear', Shear_wrapper)
else:
setattr(self, 'Shear', Shear)
if isinstance(Gust, tuple):
func_Gust, params_Gust = Gust
def Gust_wrapper(*args, **kwargs):
combined_kwargs = {**params_Gust, **kwargs}
return func_Gust(*args, **combined_kwargs)
setattr(self, 'Gust', Gust_wrapper)
else:
setattr(self, 'Gust', Gust)
if isinstance(Fault, tuple):
func_Fault, params_Fault = Fault
def Fault_wrapper(*args, **kwargs):
combined_kwargs = {**params_Fault, **kwargs}
return func_Fault(*args, **combined_kwargs)
setattr(self, 'Fault', Fault_wrapper)
else:
setattr(self, 'Fault', Fault)
if isinstance(Operation, tuple):
func_Operation, params_Operation = Operation
def Operation_wrapper(*args, **kwargs):
combined_kwargs = {**params_Operation, **kwargs}
return func_Operation(*args, **combined_kwargs)
setattr(self, 'Operation', Operation_wrapper)
else:
setattr(self, 'Operation', Operation)
self.variables = variables
self.variables.update({k.lower(): v for k, v in variables.items()})
turb_class = self.iec_wt_class[1].lower()
assert turb_class in 'abc'
self.I_ref = {'a': .16, 'b': .14, 'c': .12}[turb_class] # IEC61400-1(2005) table 1
wind_class = int(self.iec_wt_class[0])
assert 1 <= wind_class <= 3
self.V_ref = {1: 50, 2: 42.5, 3: 37.5}[wind_class]
if variables["seed"]:
self.rng = np.random.default_rng(seed=variables["seed"])
@classmethod
def getattr(cls, name):
try:
return getattr(cls, name)
except AttributeError as e:
d = {k.lower(): k for k in dir(cls)}
if name.lower() in d:
return getattr(cls, d[name.lower()])
else:
raise e
def __getattr__(self, name):
try:
return self.__getattribute__(name)
except AttributeError as e:
if name in self.variables:
return self.variables[name]
elif name.lower() in self.variables:
return self.variables[name.lower()]
else:
raise e
def get_lst(self, x):
if isinstance(x, pd.Series):
x = x.iloc[0]
if ":" in str(x):
start, step, stop = [float(eval(v, globals(), self.variables)) for v in x.split(":")]
return list(np.arange(start, stop + step, step))
else:
return [float(eval(v, globals(), self.variables)) for v in str(x).replace("/", ",").split(",")]
@property
def wsp_lst(self):
return sorted(self.get_lst(self.WSP))
@property
def wdir_lst(self):
return self.get_lst(self.Wdir)
def case_arg_lst_product(self, **kwargs):
case_arg_lst = []
for dict_lst in itertools.product(
*[m(self, **kwargs) for m in [DLC.WspWdir, self.Turb, self.Shear, self.Gust, self.Fault, self.Operation] if m is not None]):
ids = {k: v for d in dict_lst for k, v in d.items() if '_id' in k}
d = {k: v for d in dict_lst for k, v in d.items() if '_id' not in k}
name = [self.Name, 'wsp%02d' % d['V_hub'], "wdir%03d" % (d['wdir'] % 360)]
if 'seed_id' in ids:
name.append("s%04d" % d['seed'])
if 'ews_id' in ids:
name.append("ews%s" % d['shear']['sign'])
if 'edc_id' in ids:
name.append(ids['edc_id'])
if 'T_id' in ids:
name.append(ids['T_id'])
if 'Azi_id' in ids:
name.append(ids['Azi_id'])
d['Name'] = "_".join(name)
case_arg_lst.append(d)
return case_arg_lst
def to_pandas(self):
case_dict_lst = []
for V_hub in self.wsp_lst:
for wdir in self.wdir_lst:
default_kwargs = {'Folder': self.Name,
'simulation_time': self.Time,
'V_hub': V_hub,
'wdir': wdir,
}
for case_args in self.case_arg_lst_product(**default_kwargs):
case_args.update({k: v for k, v in default_kwargs.items() if k not in case_args})
case_dict_lst.append(case_args)
cols = ['Name', 'Folder', 'V_hub', 'wdir', 'simulation_time']
cols += [k for k in case_args.keys() if k not in cols]
return pd.DataFrame(case_dict_lst, columns=cols)
# ===============================================================================
# General
# ===============================================================================
def WspWdir(self, V_hub, wdir, **_):
return [{'V_hub': V_hub, 'wdir': wdir}]
def NONE(self, **_):
return [{}]
def NaN(self, **_):
return [{}]
# ===============================================================================
# Turbulence models
# ===============================================================================
def NoTurb(self, **_):
return [{'seed': None}]
def ConstantTurb(self, ti, **_):
s0 = 1001
if self.seed:
return [{'seed_id': 's%04d' % (s),
'ti': ti,
'seed': s} for s in self.rng.integers(low=0, high=10000, size=int(self.Seeds))]
else:
return [{'seed_id': 's%04d' % (s),
'ti': ti,
'seed': s} for s in range(s0, s0 + int(self.Seeds))]
def NTM(self, V_hub, **_):
# Normal turbulence model IEC section 6.3.1.3
s0 = int((V_hub - self.Vin) // self.Vstep * 100 + 1001)
ti = (self.I_ref * (0.75 * V_hub + 5.6)) / V_hub # IEC (11)
if self.seed:
return [{'seed_id': 's%04d' % (s),
'ti': ti,
'seed': s} for s in self.rng.integers(low=0, high=10000, size=int(self.Seeds))]
else:
return [{'seed_id': 's%04d' % (s),
'ti': ti,
'seed': s} for s in range(s0, s0 + int(self.Seeds))]
def ETM(self, V_hub, **_):
# Extreme Turbulence model
# IEC (9)
V_ave = .2 * self.V_ref
# IEC (19)
c = 2
ti = c * self.I_ref * (0.072 * (V_ave / c + 3) * (V_hub / c - 4) + 10) / V_hub
s0 = int((V_hub - self.Vin) // self.Vstep * 100 + 1001)
if self.seed:
return [{'seed_id': 's%04d' % (s),
'ti': ti,
'seed': s} for s in self.rng.integers(low=0, high=10000, size=int(self.Seeds))]
else:
return [{'seed_id': 's%04d' % (s),
'ti': ti,
'seed': s} for s in range(s0, s0 + int(self.Seeds))]
# ===============================================================================
# Shear profiles
# ===============================================================================
def NWP(self, alpha, **_):
# The normal wind profile model IEC section 6.3.1.2
return [{'shear': {'type': 'NWP', 'profile': ('power', alpha)}}]
def EWS(self, V_hub, alpha, **_):
# Extreme wind shear, IEC section 6.3.2.6
beta = 6.4
T = 12
sigma_1 = self.I_ref * (0.75 * V_hub + 5.6) # IEC (11)
D = self.D
A = (2.5 + 0.2 * beta * sigma_1 * (D / self.lambda_1)**0.25) / D
return [{'shear': {'type': 'EWS', 'profile': ('power', alpha), 'A': A, 'T': T, 'sign': ews_sign},
'ews_id': ews_sign}
for ews_sign in ['++', '+-', '-+', '--']]
# ===========================================================================
# Gusts
# ===========================================================================
def ECD(self, V_hub, **_):
# Extreme coherrent gust with direction change IEC 6.3.2.5
# IEC section 6.3.2.5
# IEC (22)
V_cg = 15 # magnitude of gust
# IEC (24)
theta_cg = (720 / V_hub, 180)[V_hub < 4] # direction change
T = 10 # rise time
return [{'Gust': {'type': 'ECD', 'V_cg': V_cg, 'theta_cg': theta_cg, 'T': T}}]
def EDC(self, V_hub, **_):
# Extreme direction change
phi = 4*(np.rad2deg(np.arctan(self.I_ref*(0.75*V_hub + 5.6)/V_hub/(1 + 0.1*self.D/self.lambda_1))))
T = 10 # Duration
return [{'Gust': {'type': 'EDC', 'phi': sign*phi, 'T': T}, 'edc_id': {-1: '-', 1: '+'}[sign]} for sign in [-1, 1]]
def EOG(self, V_hub, **_):
# Extreme operation gust
V_gust = min(1.35*(0.8*1.4*self.V_ref - V_hub), 3.3*self.I_ref*(0.75*V_hub + 5.6)/(1 + 0.1*self.D/self.lambda_1)) # magnitude of gust
T = 10.5 # duration
return [{'Gust': {'type': 'EOG', 'V_gust': V_gust, 'T': T}}]
# ===============================================================================
# Faults
# ===============================================================================
def StuckBlade(self, t, pitch, **_):
if (not isinstance(t, list)) and (not isinstance(pitch, list)):
return [{'Fault': {'type': 'StuckBlade', 'pitch_servo': self.pitch_servo, 'T': t, 'pitch': pitch}}]
else:
return [{'Fault': {'type': 'StuckBlade', 'pitch_servo': self.pitch_servo, 'T': t, 'pitch': pitch},
'T_id': 't' + str(t.index(tp[0])) + 'p' + str(pitch.index(tp[1]))} for tp in itertools.product(t, pitch)]
def PitchRunaway(self, t, **_):
if not isinstance(t, list):
return [{'Fault': {'type': 'PitchRunaway', 'pitch_servo': self.pitch_servo, 'T': t}}]
else:
return [{'Fault': {'type': 'PitchRunaway', 'pitch_servo': self.pitch_servo, 'T': T},
'T_id': 't' + str(t.index(T))} for T in t]
def GridLoss(self, t, **_):
if not isinstance(t, list):
return [{'Fault': {'type': 'GridLoss', 'generator_servo': self.generator_servo, 'T': t}}]
else:
return [{'Fault': {'type': 'GridLoss', 'generator_servo': self.generator_servo, 'T': T}, 'T_id': 't' + str(t.index(T))} for T in t]
# ===============================================================================
# Operations
# ===============================================================================
def PowerProduction(self, **_):
return [{}]
def StartUp(self, t, **_):
if not isinstance(t, list):
return [{'Operation': {'type': 'StartUp', 'controller': self.controller, 'T': t}}]
else:
return [{'Operation': {'type': 'StartUp', 'controller': self.controller, 'T': T},
'T_id': 't' + str(t.index(T))} for T in t]
def ShutDown(self, t, **_):
if not isinstance(t, list):
return [{'Operation': {'type': 'ShutDown', 'controller': self.controller, 'T': t}}]
else:
return [{'Operation': {'type': 'ShutDown', 'controller': self.controller, 'T': T},
'T_id': 't' + str(t.index(T))} for T in t]
def EmergencyShutDown(self, t, **_):
if not isinstance(t, list):
return [{'Operation': {'type': 'EmergencyShutDown', 'controller': self.controller, 'T': t}}]
else:
return [{'Operation': {'type': 'EmergencyShutDown', 'controller': self.controller, 'T': T},
'T_id': 't' + str(t.index(T))} for T in t]
def Parked(self, **_):
return [{'Operation': {'type': 'Parked', 'controller': self.controller}}]
def RotorLocked(self, azimuth, **_):
if not isinstance(azimuth, list):
return [{'Operation': {'type': 'RotorLocked', 'controller': self.controller,
'shaft': self.shaft, 'shaft_constraint': self.shaft_constraint, 'Azi': azimuth}}]
else:
return [{'Operation': {'type': 'RotorLocked', 'controller': self.controller,
'shaft': self.shaft, 'shaft_constraint': self.shaft_constraint, 'Azi': azi},
'Azi_id': 'azi' + f"{azi:03}"} for azi in azimuth]
class DLB():
def __init__(self, dlc_definitions, variables):
cols = ['Name', 'Description', 'Operation', 'WSP', 'Wdir', 'Turb', 'Seeds', 'Shear', 'Gust', 'Fault', 'Time']
self.dlcs = pd.DataFrame(dlc_definitions, columns=cols, index=[dlc['Name'] for dlc in dlc_definitions])
var_name_desc = [('iec_wt_class', 'IEC wind turbine class, e.g. 1A'),
('tiref', 'Reference turbulence intensity'),
('Vin', 'Cut-in wind speed'),
('Vout', 'Cut-out wind speed'),
('Vr', 'Rated wind speed'),
('Vref', 'Reference wind speed'),
('V1', '1-year recurrence period wind speed'),
('Ve50', 'Extreme 50-year recurrence period wind speed'),
('Ve1', 'Extreme 1-year recurrence period wind speed'),
('Vmaint', 'Maximum wind speed for maintenance'),
('Vstep', 'Wind speed distribution step'),
('D', 'Rotor diameter'),
('z_hub', 'Hub height'),
('lambda_1', 'Longitudinal turbulence scale parameter'),
('controller', 'Filename of controller DLL'),
('generator_servo', 'Filename of generator servo DLL'),
('pitch_servo', 'Filename of pitch servo DLL'),
('best_azimuth', 'Best blade azimuth for maintenance'),
('shaft', 'Name of shaft body'),
('shaft_constraint', 'Name of constraint between tower and shaft'),
("seed", "Seed to initialize the RNG for turbulence seed generation")
]
self.variables = pd.DataFrame([{'Name': n, 'Value': variables[n], 'Description': d}
for n, d in var_name_desc], columns=['Name', 'Value', 'Description'],
index=[n for (n, d) in var_name_desc])
@property
def dlb(self):
if not hasattr(self, '_dlb'):
self.generate_DLB()
return self._dlb
def keys(self):
return [n for n in self.dlcs['Name'] if not str(n).lower().strip() in ['', 'none', 'nan']]
def to_excel(self, filename):
if os.path.dirname(filename) != "":
os.makedirs(os.path.dirname(filename), exist_ok=True)
writer = pd.ExcelWriter(filename)
self.dlcs.to_excel(writer, 'DLC', index=False)
self.variables.to_excel(writer, 'Variables', index=False)
writer.close()
@staticmethod
def from_excel(filename):
df_vars = pd.read_excel(filename, sheet_name='Variables',
index_col='Name')
df_vars.fillna('', inplace=True)
variables = {name: value for name, value in zip(df_vars.index, df_vars.Value.values)}
dlb_def = pd.read_excel(filename, 'DLC')
dlb_def.columns = [c.strip() for c in dlb_def.columns]
return DLB([row for _, row in dlb_def.iterrows() if row['Name'] is not np.nan], variables)
def __getitem__(self, key):
return self.dlb[key]
def _make_dlc(self, dlc_name):
dlc_definition = self.dlcs.loc[dlc_name]
kwargs = {k: (DLC.getattr(str(dlc_definition[k][0])), dlc_definition[k][1]) if isinstance(dlc_definition[k], tuple)
else DLC.getattr(str(dlc_definition[k])) for k in ['Operation', 'Turb', 'Shear', 'Gust', 'Fault']}
kwargs['Description'] = dlc_definition['Description']
variables = {v['Name']: v['Value'] for _, v in self.variables.iterrows()}
variables.update(dlc_definition)
dlc = DLC(variables=variables, **kwargs)
df = dlc.to_pandas()
name = dlc_definition['Name']
df.insert(0, 'DLC', name)
return df
def generate_DLB(self):
self._dlb = {dlc: self._make_dlc(dlc) for dlc in self.keys()}
return self._dlb
def to_pandas(self):
return pd.concat(self.dlb.values(), sort=False)
def cases_to_excel(self, filename):
if os.path.dirname(filename) != "":
os.makedirs(os.path.dirname(filename), exist_ok=True)
writer = pd.ExcelWriter(filename)
for k in self.keys():
self[k].to_excel(writer, k, index=False)
writer.close()
class DTU_IEC61400_1_Ref_DLB(DLB):
def __init__(self, iec_wt_class, Vin, Vout, Vr, Vmaint, D, z_hub,
controller, generator_servo, pitch_servo, best_azimuth,
Vstep=2, seed=None, alpha=0.2, alpha_extreme=0.11, ti_extreme=0.11,
shaft='shaft', shaft_constraint='shaft_rot'):
Name, Description, Operation, WSP, Wdir, Time = 'Name', 'Description', 'Operation', 'WSP', 'Wdir', 'Time'
Turb, Seeds, Shear, Gust, Fault = 'Turb', 'Seeds', 'Shear', 'Gust', 'Fault'
dlc_definitions = [
{Name: 'DLC12',
Description: 'Normal production',
Operation: 'PowerProduction',
WSP: 'Vin:Vstep:Vout',
Wdir: '-10/0/10',
Turb: 'NTM',
Seeds: 6,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 600},
{Name: 'DLC13',
Description: 'Normal production with high turbulence',
Operation: 'PowerProduction',
WSP: 'Vin:Vstep:Vout',
Wdir: '-10/0/10',
Turb: 'ETM',
Seeds: 6,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 600},
{Name: 'DLC14',
Description: 'Normal production with gust and direction change',
Operation: 'PowerProduction',
WSP: 'Vr/Vr+2/Vr-2',
Wdir: 0,
Turb: 'NoTurb',
Seeds: None,
Shear: ('NWP', {'alpha': alpha}),
Gust: 'ECD',
Fault: None,
Time: 100},
{Name: 'DLC15',
Description: 'Normal production with extreme wind shear',
Operation: 'PowerProduction',
WSP: 'Vin:Vstep:Vout',
Wdir: 0,
Turb: 'NoTurb',
Seeds: None,
Shear: ('EWS', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 100},
{Name: 'DLC21',
Description: 'Loss of electical network',
Operation: 'PowerProduction',
WSP: 'Vin:Vstep:Vout',
Wdir: '-10/0/10',
Turb: 'NTM',
Seeds: 4,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: ('GridLoss', {'t': 10}),
Time: 100},
{Name: 'DLC22b',
Description: 'One blade stuck at minimum pitch angle',
Operation: 'PowerProduction',
WSP: 'Vin:Vstep:Vout',
Wdir: 0,
Turb: 'NTM',
Seeds: 12,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: ('StuckBlade', {'t': 0.1, 'pitch': 0}),
Time: 100},
{Name: 'DLC22p',
Description: 'Pitch runaway',
Operation: 'PowerProduction',
WSP: 'Vr:Vstep:Vout',
Wdir: 0,
Turb: 'NTM',
Seeds: 12,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: ('PitchRunaway', {'t': 10}),
Time: 100},
{Name: 'DLC22y',
Description: 'Abnormal yaw error',
Operation: 'PowerProduction',
WSP: 'Vin:Vstep:Vout',
Wdir: '15:15:345',
Turb: 'NTM',
Seeds: 1,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 600},
{Name: 'DLC23',
Description: 'Loss of electical network with extreme operating gust',
Operation: 'PowerProduction',
WSP: 'Vr-2/Vr+2/Vout',
Wdir: 0,
Turb: 'NoTurb',
Seeds: None,
Shear: ('NWP', {'alpha': alpha}),
Gust: 'EOG',
Fault: ('GridLoss', {'t': [2.5, 4, 5.25]}),
Time: 100},
{Name: 'DLC24',
Description: 'Normal production with large yaw error',
Operation: 'PowerProduction',
WSP: 'Vin:Vstep:Vout',
Wdir: '-20/20',
Turb: 'NTM',
Seeds: 3,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 600},
{Name: 'DLC31',
Description: 'Start-up',
Operation: ('StartUp', {'t': 0}),
WSP: 'Vin/Vr/Vout',
Wdir: 0,
Turb: 'NoTurb',
Seeds: None,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 100},
{Name: 'DLC32',
Description: 'Start-up at 4 different times with extreme operating gust',
Operation: ('StartUp', {'t': [0.1, 2.5, 4, 5.25]}),
WSP: 'Vin/Vr-2/Vr+2/Vout',
Wdir: 0,
Turb: 'NoTurb',
Seeds: None,
Shear: ('NWP', {'alpha': alpha}),
Gust: 'EOG',
Fault: None,
Time: 100},
{Name: 'DLC33',
Description: 'Start-up at 2 different times with extreme wind direction change',
Operation: ('StartUp', {'t': [-0.1, 5]}),
WSP: 'Vin/Vr-2/Vr+2/Vout',
Wdir: 0,
Turb: 'NoTurb',
Seeds: None,
Shear: ('NWP', {'alpha': alpha}),
Gust: 'EDC',
Fault: None,
Time: 100},
{Name: 'DLC41',
Description: 'Shut-down',
Operation: ('ShutDown', {'t': 0}),
WSP: 'Vin/Vr/Vout',
Wdir: 0,
Turb: 'NoTurb',
Seeds: None,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 100},
{Name: 'DLC42',
Description: 'Shut-down at 6 different times with extreme operating gust',
Operation: ('ShutDown', {'t': [0.1, 2.5, 4, 5, 8, 10]}),
WSP: 'Vr-2/Vr+2/Vout',
Wdir: 0,
Turb: 'NoTurb',
Seeds: None,
Shear: ('NWP', {'alpha': alpha}),
Gust: 'EOG',
Fault: None,
Time: 100},
{Name: 'DLC51',
Description: 'Emergency shut-down',
Operation: ('EmergencyShutDown', {'t': 0}),
WSP: 'Vr-2/Vr+2/Vout',
Wdir: 0,
Turb: 'NTM',
Seeds: 12,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 100},
{Name: 'DLC61',
Description: 'Parked with 50-year wind',
Operation: 'Parked',
WSP: 'Vref',
Wdir: '-8/8',
Turb: ('ConstantTurb', {'ti': ti_extreme}),
Seeds: 6,
Shear: ('NWP', {'alpha': alpha_extreme}),
Gust: None,
Fault: None,
Time: 600},
{Name: 'DLC62',
Description: 'Parked with 50-year wind without grid connection',
Operation: 'Parked',
WSP: 'Vref',
Wdir: '0:15:345',
Turb: ('ConstantTurb', {'ti': ti_extreme}),
Seeds: 1,
Shear: ('NWP', {'alpha': alpha_extreme}),
Gust: None,
Fault: None,
Time: 600},
{Name: 'DLC63',
Description: 'Parked with 1-year wind with large yaw error',
Operation: 'Parked',
WSP: 'V1',
Wdir: '-20/20',
Turb: ('ConstantTurb', {'ti': ti_extreme}),
Seeds: 6,
Shear: ('NWP', {'alpha': alpha_extreme}),
Gust: None,
Fault: None,
Time: 600},
{Name: 'DLC64',
Description: 'Parked',
Operation: 'Parked',
WSP: 'Vin:Vstep:0.7*Vref',
Wdir: '-8/8',
Turb: 'NTM',
Seeds: 6,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 600},
{Name: 'DLC71',
Description: 'Rotor locked at 4 different azimuth angles and extreme yaw',
Operation: ('RotorLocked', {'azimuth': [0, 30, 60, 90]}),
WSP: 'V1',
Wdir: '0:15:345',
Turb: ('ConstantTurb', {'ti': ti_extreme}),
Seeds: 1,
Shear: ('NWP', {'alpha': alpha_extreme}),
Gust: None,
Fault: None,
Time: 600},
{Name: 'DLC81',
Description: 'Rotor locked for maintenance',
Operation: ('RotorLocked', {'azimuth': best_azimuth}),
WSP: 'Vmaint',
Wdir: '-8/8',
Turb: 'NTM',
Seeds: 6,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 600},
]
Vref = {1: 50, 2: 42.5, 3: 37.5}[int(iec_wt_class[0])]
tiref = {'a': 0.16, 'b': 0.14, 'c': 0.12}[iec_wt_class[1].lower()]
V1 = 0.8*Vref
Ve50 = 1.4*Vref
Ve1 = 0.8*Ve50
lambda_1 = 0.7*z_hub if z_hub < 60 else 42
variables = {'iec_wt_class': iec_wt_class,
'Vref': Vref,
'tiref': tiref,
'V1': V1,
'Ve50': Ve50,
'Ve1': Ve1,
'Vin': Vin,
'Vout': Vout,
'Vr': Vr,
'Vmaint': Vmaint,
'Vstep': Vstep,
'D': D,
'z_hub': z_hub,
'lambda_1': lambda_1,
'controller': controller,
'generator_servo': generator_servo,
'pitch_servo': pitch_servo,
'best_azimuth': best_azimuth,
'shaft': shaft,
'shaft_constraint': shaft_constraint}
if seed:
variables["seed"] = int(seed)
else:
variables["seed"] = seed
DLB.__init__(self, dlc_definitions, variables)
def main():
if __name__ == '__main__':
dlb = DTU_IEC61400_1_Ref_DLB(iec_wt_class='1A',
Vin=4, Vout=25, Vr=8, # cut-in, cut_out and rated wind speed
D=180, z_hub=110) # diameter and hub height
print(dlb.dlcs)
print(dlb.variables)
# save dlb definition to excel
dlb.to_excel('overview.xlsx')
# you can now modify definitions in overview.xlsx in Excel and
# load the modified dlb definition back into python
dlb = DLB.from_excel('overview.xlsx')
print(dlb['DLC14'])
# save dlc14 as Excel spreadsheet
dlb['DLC14'].to_excel('dlc14.xlsx')
# you can no modify cases in dlc14.xlsx in Excel
# save all cases to excel
dlb.cases_to_excel('cases.xlsx')
# Save generate hawc2 input files
from wetb.hawc2.tests import test_files
from wetb.dlb.hawc2_iec_dlc_writer import HAWC2_IEC_DLC_Writer
path = os.path.dirname(test_files.__file__) + '/simulation_setup/DTU10MWRef6.0/'
writer = HAWC2_IEC_DLC_Writer(path + 'htc/DTU_10MW_RWT.htc', diameter=180)
# load all cases
writer.from_pandas(dlb)
# load DLC14 only
writer.from_pandas(dlb['DLC14'])
# load modified DLC14.xlsx
writer.from_excel('dlc14.xlsx')
writer.write_all('tmp')
main()
File added
from wetb.dlb import iec61400_1
import pytest
from wetb.dlb.hawc2_iec_dlc_writer import HAWC2_IEC_DLC_Writer
import os
from wetb.hawc2.tests import test_files
import shutil
from tests import npt, run_main
from wetb.hawc2.htc_file import HTCFile
from tests.run_main import run_module_main
from wetb.dlb.iec61400_1 import DTU_IEC61400_1_Ref_DLB
path = os.path.dirname(test_files.__file__) + '/simulation_setup/DTU10MWRef6.0/htc/tmp/'
def clean_up():
if os.path.isdir(path):
shutil.rmtree(path)
@pytest.yield_fixture(autouse=True)
def run_around_tests():
clean_up()
yield
clean_up()
@pytest.fixture
def writer():
return HAWC2_IEC_DLC_Writer(path + '../DTU_10MW_RWT.htc', diameter=127)
def test_main():
run_main.run_module_main(iec61400_1)
def test_DLC12(writer):
dlc12 = DTU_IEC61400_1_Ref_DLB(iec_wt_class='1A', Vin=4, Vout=26, Vr=10, D=180, z_hub=90,
Vmaint=18, controller='dtu_we_controller',
generator_servo='generator_servo', pitch_servo='servo_with_limits',
best_azimuth=180)['DLC12']
assert len(dlc12) == 216 # 12 wsp, 3 wdir, 6 seeds
writer.from_pandas(dlc12[::24][:2])
writer.write_all(path)
npt.assert_array_equal(sorted(os.listdir(path + "DLC12")),
['DLC12_wsp04_wdir350_s1001.htc', 'DLC12_wsp06_wdir000_s1101.htc'])
htc = HTCFile(path + "DLC12/DLC12_wsp04_wdir350_s1001.htc")
assert htc.wind.wsp[0] == 4
npt.assert_array_equal(htc.wind.windfield_rotations.values, [-10, 0, 0])
assert htc.wind.turb_format[0] == 1
assert htc.wind.mann.create_turb_parameters[3] == 1001
def test_DLC21(writer):
dlc = DTU_IEC61400_1_Ref_DLB(iec_wt_class='1A', Vin=4, Vout=26, Vr=10, D=180, z_hub=90,
Vmaint=18, controller='dtu_we_controller',
generator_servo='generator_servo', pitch_servo='servo_with_limits',
best_azimuth=180)['DLC21']
assert len(dlc) == 144 # 12 wsp, 3 wdir, 4 seeds
writer.from_pandas(dlc[::16][:2])
writer.write_all(path)
npt.assert_array_equal(sorted(os.listdir(path + "DLC21")),
['DLC21_wsp04_wdir350_s1001.htc', 'DLC21_wsp06_wdir000_s1101.htc'])
htc = HTCFile(path + "DLC21/DLC21_wsp04_wdir350_s1001.htc")
assert htc.wind.wsp[0] == 4
npt.assert_array_equal(htc.wind.windfield_rotations.values, [-10, 0, 0])
assert htc.wind.turb_format[0] == 1
assert htc.wind.mann.create_turb_parameters[3] == 1001
assert htc.dll.get_subsection_by_name('generator_servo', 'name').init.constant__7.values == [7, 110]
def test_DLC22y(writer):
dlc = DTU_IEC61400_1_Ref_DLB(iec_wt_class='1A', Vin=4, Vout=26, Vr=10, D=180, z_hub=90,
Vmaint=18, controller='dtu_we_controller',
generator_servo='generator_servo', pitch_servo='servo_with_limits',
best_azimuth=180)['DLC22y']
assert len(dlc) == 276 # 12 wsp, 23 wdir, 1 seeds
writer.from_pandas(dlc[::24][:2])
writer.write_all(path)
npt.assert_array_equal(sorted(os.listdir(path + "DLC22y")),
['DLC22y_wsp04_wdir015_s1001.htc', 'DLC22y_wsp06_wdir030_s1101.htc'])
htc = HTCFile(path + "DLC22y/DLC22y_wsp04_wdir015_s1001.htc")
assert htc.wind.wsp[0] == 4
npt.assert_array_equal(htc.wind.windfield_rotations.values, [15, 0, 0])
assert htc.wind.turb_format[0] == 1
assert htc.wind.mann.create_turb_parameters[3] == 1001
......@@ -3,16 +3,6 @@ Created on 01/10/2014
@author: MMPE
'''
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
from builtins import int
from builtins import map
from builtins import str
from builtins import zip
from future import standard_library
standard_library.install_aliases()
import pandas as pd
import numpy as np
import glob
......@@ -34,15 +24,51 @@ except NameError as e:
def Weibull(u, k, start, stop, step):
C = 2 * u / np.sqrt(np.pi)
cdf = lambda x :-np.exp(-(x / C) ** k)
return {wsp:-cdf(wsp - step / 2) + cdf(wsp + step / 2) for wsp in np.arange(start, stop + step, step)}
def cdf(x): return -np.exp(-(x / C) ** k)
return {wsp: -cdf(wsp - step / 2) + cdf(wsp + step / 2) for wsp in np.arange(start, stop + step, step)}
def Weibull2(u, k, wsp_lst):
C = 2 * u / np.sqrt(np.pi)
cdf = lambda x :-np.exp(-(x / C) ** k)
edges = np.r_[wsp_lst[0] - (wsp_lst[1] - wsp_lst[0]) / 2, (wsp_lst[1:] + wsp_lst[:-1]) / 2, wsp_lst[-1] + (wsp_lst[-1] - wsp_lst[-2]) / 2]
return [-cdf(e1) + cdf(e2) for wsp, e1, e2 in zip(wsp_lst, edges[:-1], edges[1:])]
def cdf(x): return -np.exp(-(x / C) ** k)
edges = [wsp_lst[0] - (wsp_lst[1] - wsp_lst[0])/2]
edges += [(wsp_lst[i] + wsp_lst[i + 1])/2 for i in range(len(wsp_lst) - 1)]
edges += [wsp_lst[-1] + (wsp_lst[-1] - wsp_lst[-2])/2]
return {wsp_lst[i]: cdf(edges[i + 1]) - cdf(edges[i]) for i in range(len(wsp_lst))}
def Weibull_IEC(Vref, Vhub_lst):
"""Weibull distribution according to IEC 61400-1:2005, page 24
Parameters
----------
Vref : int or float
Vref of wind turbine class
Vhub_lst : array_like
Wind speed at hub height. Must be equally spaced.
Returns
-------
nd_array : list of probabilities
Examples
--------
>>> Weibull_IEC(50, [4,6,8])
[ 0.11002961 0.14116891 0.15124155]
"""
Vhub_lst = np.array(Vhub_lst)
# Average wind speed
Vave = .2 * Vref
# Rayleigh distribution
def Pr(x): return 1 - np.exp(-np.pi * (x / (2 * Vave))**2)
# Wsp bin edges: [4,6,8] -> [3,5,7,9]
wsp_bin_edges = np.r_[Vhub_lst[0] - (Vhub_lst[1] - Vhub_lst[0]) / 2, (Vhub_lst[1:] +
Vhub_lst[:-1]) / 2, Vhub_lst[-1] + (Vhub_lst[-1] - Vhub_lst[-2]) / 2]
# probabilities of 3-5, 5-7, 7-9
return np.array([-Pr(e1) + Pr(e2) for e1, e2 in zip(wsp_bin_edges[:-1], wsp_bin_edges[1:])])
class DLCHighLevel(object):
......@@ -55,8 +81,8 @@ class DLCHighLevel(object):
self.shape_k = shape_k
# Variables
df_vars = pd.read_excel(self.filename, sheetname='Variables',
index_col='Name')
df_vars = pd.read_excel(self.filename, 'Variables', index_col='Name')
df_vars.fillna('', inplace=True)
for name, value in zip(df_vars.index, df_vars.Value.values):
setattr(self, name.lower(), value)
......@@ -66,8 +92,8 @@ class DLCHighLevel(object):
"result folder" % self.filename)
self.res_path = os.path.join(os.path.dirname(self.filename), self.res_path)
#DLC sheet
self.dlc_df = pd.read_excel(self.filename, sheetname='DLC', skiprows=[1])
# DLC sheet
self.dlc_df = pd.read_excel(self.filename, 'DLC', skiprows=[1])
# empty strings are now nans, convert back to empty strings
self.dlc_df.fillna('', inplace=True)
# force headers to lower case
......@@ -99,7 +125,7 @@ class DLCHighLevel(object):
self.dlc_df['psf'] = 1
# Sensors sheet
self.sensor_df = pd.read_excel(self.filename, sheetname='Sensors')
self.sensor_df = pd.read_excel(self.filename, 'Sensors')
# empty strings are now nans, convert back to empty strings
self.sensor_df.fillna('', inplace=True)
# force headers to lower case
......@@ -107,8 +133,11 @@ class DLCHighLevel(object):
for k in ['Name', 'Nr']:
assert k.lower() in self.sensor_df.keys(), "Sensor sheet must have a '%s' column" % k
assert not any(self.sensor_df['name'].duplicated()), "Duplicate sensor names: %s" % ",".join(self.sensor_df['name'][self.sensor_df['name'].duplicated()].values)
for k in ['description', 'unit', 'statistic', 'ultimate', 'fatigue', 'm', 'neql', 'extremeload', 'bearingdamage', 'mindistance', 'maxdistance']:
self.sensor_df = self.sensor_df[self.sensor_df.name != ""]
assert not any(self.sensor_df['name'].duplicated()), "Duplicate sensor names: %s" % ",".join(
self.sensor_df['name'][self.sensor_df['name'].duplicated()].values)
for k in ['description', 'unit', 'statistic', 'ultimate', 'fatigue', 'm',
'neql', 'extremeload', 'bearingdamage', 'mindistance', 'maxdistance']:
if k not in self.sensor_df.keys():
self.sensor_df[k] = ""
for _, row in self.sensor_df[self.sensor_df['fatigue'] != ""].iterrows():
......@@ -125,12 +154,16 @@ class DLCHighLevel(object):
def sensor_info(self, sensors=[]):
if sensors != []:
return self.sensor_df[functools.reduce(np.logical_or, [((self.sensor_df.get(f, pd.DataFrame([""] * len(self.sensor_df.name))[0]).values != "") | (self.sensor_df.name == f)) for f in np.atleast_1d(sensors)])]
sensors = np.atleast_1d(sensors)
empty_column = pd.DataFrame([""] * len(self.sensor_df.name))[0]
return self.sensor_df[functools.reduce(
np.logical_or, [((self.sensor_df.get(f, empty_column).values != "") | (self.sensor_df.name == f)) for f in sensors])]
else:
return self.sensor_df
def dlc_variables(self, dlc):
dlc_row = self.dlc_df[self.dlc_df['name'] == dlc]
def get_lst(x):
if isinstance(x, pd.Series):
x = x.iloc[0]
......@@ -145,18 +178,22 @@ class DLCHighLevel(object):
def distribution(self, value_key, dist_key, row):
values = self.dlc_df[value_key][row]
if ":" in values:
if ":" in str(values):
start, step, stop = [float(eval(v, globals(), self.__dict__)) for v in values.lower().split(":")]
values = np.arange(start, stop + step, step)
else:
try:
values = [(eval(v, globals(), self.__dict__)) for v in str(values).lower().replace("/", ",").split(",")]
except SyntaxError:
values = [(eval(v.lstrip('0'), globals(), self.__dict__)) for v in str(values).lower().replace("/", ",").split(",")]
try:
values = [(eval(v.lstrip('0'), globals(), self.__dict__))
for v in str(values).lower().replace("/", ",").split(",")]
except Exception:
values = str(values).lower().replace("/", ",").split(",")
dist = self.dlc_df[dist_key][row]
if str(dist).lower() == "weibull" or str(dist).lower() == "rayleigh":
dist = Weibull2(self.vref * .2, self.shape_k, values)
dist = Weibull_IEC(self.vref, values)
else:
def fmt(v):
if "#" in str(v):
......@@ -167,36 +204,66 @@ class DLCHighLevel(object):
else:
return float(v) / 100
dist = [fmt(v) for v in str(self.dlc_df[dist_key][row]).replace("/", ",").split(",")]
assert len(values) == len(dist), "Number of %s-values (%d)!= number of %s-values(%d)" % (value_key, len(values), dist_key, len(dist))
assert len(values) == len(dist), "Number of %s-values (%d)!= number of %s-values(%d)" % (value_key,
len(values), dist_key, len(dist))
return OrderedDict(zip(map(self.format_tag_value, values), dist))
def fatigue_distribution(self):
fatigue_dist = {}
for row, load in enumerate(self.dlc_df['load']):
for row, load in self.dlc_df['load'].items():
if "F" not in str(load).upper():
continue
dlc = self.dlc_df[self.dist_value_keys[0][1]][row]
fatigue_dist[str(dlc)] = [self.distribution(value_key, dist_key, row) for dist_key, value_key in self.dist_value_keys]
fatigue_dist[str(dlc)] = [self.distribution(value_key, dist_key, row)
for dist_key, value_key in self.dist_value_keys]
return fatigue_dist
def files_dict(self):
if not hasattr(self, "res_folder") or self.res_folder == "":
files = glob.glob(os.path.join(self.res_path, "*.sel")) + glob.glob(os.path.join(self.res_path, "*/*.sel"))
def files_dict(self, files=None):
"""
Parameters
----------
files : list, default=None
When files is None, files_dict will search for files defined in
the res_folder or res_path attribute if the former is absence.
Returns
-------
files_dict : dict
Dictionary holding the file name, total run hours as key, value
pairs.
"""
fatigue_dlcs = self.dlc_df[['F' in str(l).upper() for l in self.dlc_df['load']]]['dlc']
if len(fatigue_dlcs) == 0:
return {}
ext = getattr(self, 'res_ext', ".sel")
if isinstance(files, list):
pass
elif not hasattr(self, "res_folder") or self.res_folder == "":
files = glob.glob(os.path.join(self.res_path, "*" + ext)) + \
glob.glob(os.path.join(self.res_path, "*/*" + ext))
if len(files) == 0:
raise Exception('No *%s files found in:\n%s or\n%s' %
(ext, self.res_path, os.path.join(self.res_path, "*/")))
else:
files = []
fatigue_dlcs = self.dlc_df[['F' in str(l).upper() for l in self.dlc_df['load']]]['dlc']
for dlc_id in fatigue_dlcs:
dlc_id = str(dlc_id)
if "%" in self.res_folder:
folder = self.res_folder % dlc_id
else:
folder = self.res_folder
files.extend(glob.glob(os.path.join(self.res_path , folder, "*.sel")))
dlc_files = (glob.glob(os.path.join(self.res_path, folder, "*" + ext)))
if len(dlc_files) == 0:
raise Exception('DLC%s included in fatigue analysis, but no *%s files found in:\n%s' %
(dlc_id, ext, os.path.join(self.res_path, folder)))
files.extend(dlc_files)
keys = list(zip(*self.dist_value_keys))[1]
fmt = self.format_tag_value
tags = [[fmt(tag.replace(key, "")) for tag, key in zip(os.path.basename(f).split("_"), keys)] for f in files]
tags = [[fmt(tag.replace(key, "")) for tag, key in zip(os.path.basename(f).lower().split("_"), keys)] for f in files]
dlc_tags = list(zip(*tags))[0]
files_dict = {dlc_tag:{} for dlc_tag in dlc_tags}
files_dict = {dlc_tag: {} for dlc_tag in dlc_tags}
for tag_row, f in zip(tags, files):
d = files_dict[tag_row[0]]
for tag in tag_row[1:]:
......@@ -227,7 +294,7 @@ class DLCHighLevel(object):
total_prop *= prop
return total_prop
def file_hour_lst(self, years=20):
def file_hour_lst(self, years=20, files_dict=None, dist_dict=None, files=None):
"""Create a list of (filename, hours_pr_year) that can be used as input for LifeTimeEqLoad
Returns
......@@ -239,17 +306,21 @@ class DLCHighLevel(object):
"""
fh_lst = []
dist_dict = self.fatigue_distribution()
files_dict = self.files_dict()
if dist_dict is None:
dist_dict = self.fatigue_distribution()
if files_dict is None:
files_dict = self.files_dict(files=files)
for dlc_id in sorted(dist_dict.keys()):
dlc_id = str(dlc_id)
fmt = self.format_tag_value
def tag_prop_lst(dist_lst):
if len(dist_lst) == 0:
return [[]]
return [[(fmt(tag), prop)] + tl for tl in tag_prop_lst(dist_lst[1:]) for tag, prop in dist_lst[0].items()]
return [[(fmt(tag), prop)] + tl for tl in tag_prop_lst(dist_lst[1:])
for tag, prop in dist_lst[0].items()]
def files_from_tags(self, f_dict, tags):
if len(tags) == 0:
......@@ -260,7 +331,7 @@ class DLCHighLevel(object):
if self.dist_value_keys[-len(tags)][1] == "wdir":
try:
return files_from_tags(self, f_dict[tags[0] % 360], tags[1:])
except:
except Exception:
pass
raise
......@@ -270,7 +341,8 @@ class DLCHighLevel(object):
files = (files_from_tags(self, files_dict, tags))
except KeyError:
if self.fail_on_resfile_not_found:
raise FileNotFoundError("Result files for %s not found" % (", ".join(["%s='%s'" % (dv[1], t) for dv, t in zip(self.dist_value_keys, tags)])))
raise FileNotFoundError("Result files for %s not found" % (
", ".join(["%s='%s'" % (dv[1], t) for dv, t in zip(self.dist_value_keys, tags)])))
else:
continue
if files:
......@@ -281,19 +353,21 @@ class DLCHighLevel(object):
return fh_lst
def dlc_lst(self, load='all'):
dlc_lst = np.array(self.dlc_df['dlc'])[np.array([load == 'all' or load.lower() in d.lower() for d in self.dlc_df['load']])]
dlc_lst = np.array(self.dlc_df['dlc'])[np.array(
[load == 'all' or load.lower() in d.lower() for d in self.dlc_df['load']])]
return [v.lower().replace('dlc', '') for v in dlc_lst]
@cache_function
def psf(self):
return {dlc: float((psf, 1)[psf == ""]) for dlc, psf in zip(self.dlc_df['dlc'], self.dlc_df['psf']) if dlc != ""}
return {dlc: float((psf, 1)[psf == ""])
for dlc, psf in zip(self.dlc_df['dlc'], self.dlc_df['psf']) if dlc != ""}
if __name__ == "__main__":
dlc_hl = DLCHighLevel(r'X:\DTU10MW\Q0010\DLC_post_betas1.xlsx')
#print (DLCHighLevelInputFile(r'C:\mmpe\Projects\DLC.xlsx').sensor_info(0, 0, 1)['Name'])
#print (dlc_dict()['64'])
#print (dlc_hl.fatigue_distribution()['64'])
print (dlc_hl.file_hour_lst(r"X:\DTU10MW/Q0010/res/"))
# dlc_hl = DLCHighLevel(r'X:\DTU10MW\Q0010\DLC_post_betas1.xlsx')
# #print (DLCHighLevelInputFile(r'C:\mmpe\Projects\DLC.xlsx').sensor_info(0, 0, 1)['Name'])
# #print (dlc_dict()['64'])
# #print (dlc_hl.fatigue_distribution()['64'])
# print(dlc_hl.file_hour_lst(r"X:\DTU10MW/Q0010/res/"))
dlc = DLCHighLevel(r'C:\Users\mmpe\Downloads\Post Processing v7 - FATIGUE.xlsx', fail_on_resfile_not_found=False)
print(dlc.file_hour_lst())
No preview for this file type
File added
......@@ -3,14 +3,8 @@ Created on 09/10/2014
@author: MMPE
'''
from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
import unittest
from wetb.dlc.high_level import DLCHighLevel, Weibull
from wetb.dlc.high_level import DLCHighLevel, Weibull, Weibull_IEC
import os
import numpy as np
......@@ -26,7 +20,7 @@ class TestDLCHighLevel(unittest.TestCase):
self.assertEqual(os.path.realpath(self.dlc_hl.res_path), os.path.realpath(testfilepath + "res"))
def test_sensor_info(self):
self.assertEqual(list(self.dlc_hl.sensor_info().name), ['MxTB', 'MyTB', 'MxBR', 'PyBT', 'Pitch', 'PitchBearing', 'Tip1TowerDistance', 'TipTowerDistance'])
self.assertEqual(list(self.dlc_hl.sensor_info().name), ['MxTB', 'MyTB', 'MxBR', 'PyBT', 'Power', 'Pitch', 'PitchBearing', 'Tip1TowerDistance', 'TipTowerDistance'])
def test_sensor_info_filter(self):
self.assertEqual(list(self.dlc_hl.sensor_info(['fatigue']).m), [4, 4, 10])
......@@ -56,6 +50,12 @@ class TestDLCHighLevel(unittest.TestCase):
self.assertEqual(os.path.abspath(f), os.path.abspath(testfilepath + 'res/dlc31_iec61400-1ed3/dlc31_wsp25_wdir000_s0000.sel'))
self.assertAlmostEqual(h, 0.0087201928 * 1 * (50 / 1100) * 20 * 365 * 24)
def test_file_dict_flex(self):
dlc_hl = DLCHighLevel(testfilepath + 'DLC_test_flex.xlsx')
file_lst = dlc_hl.files_dict()[12][4][350]["files"]
self.assertEqual(len(file_lst),1)
self.assertTrue(file_lst[0].endswith(".int"))
def test_dlc_lst(self):
self.assertEqual(self.dlc_hl.dlc_lst(), ['12', '13', '14', '31'])
......@@ -109,7 +109,10 @@ class TestDLCHighLevel(unittest.TestCase):
p_tot = np.array([value for key, value in weibull.items()]).sum()
self.assertTrue(np.allclose(p_tot, 1.0))
def test_weibull_IEC(self):
Vref = 50
np.testing.assert_array_almost_equal(Weibull_IEC(Vref, [4,6,8]), [ 0.11002961, 0.14116891, 0.15124155])
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
......
from wetb.hawc2.htc_file import HTCFile
from wetb.hawc2.tests import test_files
import os
def main():
if __name__ == '__main__':
# ======================================================================
# load existing htc file
# ======================================================================
path = os.path.dirname(test_files.__file__) + "/simulation_setup/DTU10MWRef6.0/"
htc = HTCFile(path + "htc/DTU_10MW_RWT.htc")
# ======================================================================
# modify wind speed and turbulence intensity
# ======================================================================
htc.wind.wsp = 10
# access wind section and change turbulence intensity
wind = htc.wind
wind.tint = .1
#=======================================================================
# print contents
#=======================================================================
print(htc) # print htc file
print(htc.keys()) # print htc sections
print(wind) # print wind section
#=======================================================================
# change tilt angle
#=======================================================================
orientation = htc.new_htc_structure.orientation
# Two ways to access the relative orientation between towertop and shaft
# 1) Knowning that it is the second relative section:
rel_tt_shaft = orientation.relative__2
# 2) Knowning that the section contains a field "body1" with value "topertop"
rel_tt_shaft = orientation.get_subsection_by_name(name='towertop', field='body1')
rel_tt_shaft.body2_eulerang__2 = 6, 0, 0
print(rel_tt_shaft.body2_eulerang__2)
# ======================================================================
# set time, name and save
# ======================================================================
# set time of simulation, first output section and wind.scale_time_start
htc.set_time(start=5, stop=10, step=0.1)
# change name of logfile, animation, visualization and first output section
htc.set_name("tmp_wsp10_tint0.1_tilt6")
htc.save() # Save htc modified htcfile as "tmp_wsp10_tint0.1_tilt6"
# ======================================================================
# run simulation
# ======================================================================
# htc.simulate("<path2hawc2>/hawc2mb.exe")
main()
"""
Created on Wed Oct 10 12:47:10 2018
@author: dave
"""
import os
from wetb.prepost import windIO
from wetb.hawc2 import Hawc2io
from wetb.prepost import hawcstab2
if __name__ == '__main__':
# =============================================================================
# READ HAWC2 RESULT FILE
# =============================================================================
# METHOD A
fname = '../wetb/hawc2/tests/test_files/hawc2io/Hawc2ascii.sel'
res = Hawc2io.ReadHawc2(fname)
sig = res.ReadAll()
# METHOD B
path, file = os.path.dirname(fname), os.path.basename(fname)
res = windIO.LoadResults(path, file)
sig = res.sig
sel = res.ch_details
# result in dataframe with unique channel names (instead of indices)
sig_df = res.sig2df()
ch_df = res.ch_df
# =============================================================================
# READ HAWCStab2 files
# =============================================================================
res = hawcstab2.results()
fname = '../wetb/prepost/tests/data/campbell_diagram.cmb'
df_cmb = res.load_cmb_df(fname)
fname = '../wetb/prepost/tests/data/dtu10mw_v1_defl_u10000.ind'
df_ind = res.load_ind(fname)
fname = '../wetb/prepost/tests/data/dtu10mw.opt'
df_opt = res.load_operation(fname)
fname = '../wetb/prepost/tests/data/dtu10mw_v1.pwr'
df_pwr, units = res.load_pwr_df(fname)
fname = '../wetb/prepost/tests/data/controller_input_quadratic.txt'
tuning = hawcstab2.ReadControlTuning()
tuning.read_parameters(fname)
# tuning parameters are saved as attributes
tuning.pi_gen_reg1.K
tuning.pi_gen_reg2.I
tuning.pi_gen_reg2.Kp
tuning.pi_gen_reg2.Ki
tuning.pi_gen_reg2.Kd
tuning.pi_pitch_reg3.Kp
tuning.pi_pitch_reg3.Ki
tuning.pi_pitch_reg3.Kd
tuning.pi_pitch_reg3.K1
tuning.pi_pitch_reg3.K2
tuning.aero_damp.Kp2
tuning.aero_damp.Ko1
tuning.aero_damp.Ko2
# or you can get them as a dictionary
tune_tags = tuning.parameters2tags()
'''
Created on 03/09/2015
@author: MMPE
'''
from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from io import open
from builtins import map
from builtins import range
from builtins import chr
from future import standard_library
standard_library.install_aliases()
import os
import numpy as np
import struct
from itertools import takewhile
def load_output(filename):
"""Load a FAST binary or ascii output file
......@@ -38,32 +25,44 @@ def load_output(filename):
"""
assert os.path.isfile(filename), "File, %s, does not exists" % filename
with open(filename, 'r', encoding='utf-8') as f:
with open(filename, 'r') as f:
try:
f.readline()
except UnicodeDecodeError:
return load_binary_output(filename)
return load_ascii_output(filename)
def load_ascii_output(filename):
with open(filename, encoding='utf-8') as f:
with open(filename) as f:
info = {}
info['name'] = os.path.splitext(os.path.basename(filename))[0]
try:
header = [f.readline() for _ in range(8)]
info['description'] = header[4].strip()
info['attribute_names'] = header[6].split()
info['attribute_units'] = [unit[1:-1] for unit in header[7].split()] #removing "()"
data = np.array([line.split() for line in f.readlines()]).astype(np.float)
return data, info
except (ValueError, AssertionError):
raise
# Header is whatever is before the keyword `time`
in_header = True
header = []
while in_header:
l = f.readline()
if not l:
raise Exception('Error finding the end of FAST out file header. Keyword Time missing.')
in_header = (l + ' dummy').lower().split()[0] != 'time'
if in_header:
header.append(l)
else:
info['description'] = header
info['attribute_names'] = l.split()
info['attribute_units'] = [unit[1:-1] for unit in f.readline().split()]
# Data, up to end of file or empty line (potential comment line at the end)
data = np.array([l.strip().split() for l in takewhile(
lambda x: len(x.strip()) > 0, f.readlines())]).astype(float)
return data, info
def load_binary_output(filename, use_buffer=True):
"""
03/09/15: Ported from ReadFASTbinary.m by Mads M Pedersen, DTU Wind
24/10/18: Low memory/buffered version by E. Branlard, NREL
def load_binary_output(filename):
"""Ported from ReadFASTbinary.m by Mads M Pedersen, DTU Wind
Info about ReadFASTbinary.m:
% Author: Bonnie Jonkman, National Renewable Energy Laboratory
% (c) 2012, National Renewable Energy Laboratory
......@@ -71,87 +70,156 @@ def load_binary_output(filename):
% Edited for FAST v7.02.00b-bjj 22-Oct-2012
"""
def fread(fid, n, type):
fmt, nbytes = {'uint8': ('B', 1), 'int16':('h', 2), 'int32':('i', 4), 'float32':('f', 4), 'float64':('d', 8)}[type]
fmt, nbytes = {
'uint8': ('B', 1),
'int16': ('h', 2),
'int32': ('i', 4),
'float32': ('f', 4),
'float64': ('d', 8)}[type]
return struct.unpack(fmt * n, fid.read(nbytes * n))
FileFmtID_WithTime = 1 #% File identifiers used in FAST
def freadRowOrderTableBuffered(fid, n, type_in, nCols, nOff=0, type_out='float64'):
"""
Reads of row-ordered table from a binary file.
Read `n` data of type `type_in`, assumed to be a row ordered table of `nCols` columns.
Memory usage is optimized by allocating the data only once.
Buffered reading is done for improved performances (in particular for 32bit python)
`nOff` allows for additional column space at the begining of the storage table.
Typically, `nOff=1`, provides a column at the beginning to store the time vector.
@author E.Branlard, NREL
"""
fmt, nbytes = {
'uint8': (
'B', 1), 'int16': (
'h', 2), 'int32': (
'i', 4), 'float32': (
'f', 4), 'float64': (
'd', 8)}[type_in]
nLines = int(n / nCols)
GoodBufferSize = 4096 * 40
nLinesPerBuffer = int(GoodBufferSize / nCols)
BufferSize = nCols * nLinesPerBuffer
nBuffer = int(n / BufferSize)
# Allocation of data
data = np.zeros((nLines, nCols + nOff), dtype=type_out)
# Reading
try:
nIntRead = 0
nLinesRead = 0
while nIntRead < n:
nIntToRead = min(n - nIntRead, BufferSize)
nLinesToRead = int(nIntToRead / nCols)
Buffer = np.array(struct.unpack(fmt * nIntToRead, fid.read(nbytes * nIntToRead)))
Buffer = Buffer.reshape(-1, nCols)
data[nLinesRead:(nLinesRead + nLinesToRead), nOff:(nOff + nCols)] = Buffer
nLinesRead = nLinesRead + nLinesToRead
nIntRead = nIntRead + nIntToRead
except Exception:
raise Exception('Read only %d of %d values in file: %s' % (nIntRead, n, filename))
return data
FileFmtID_WithTime = 1 # % File identifiers used in FAST
FileFmtID_WithoutTime = 2
LenName = 10 #; % number of characters per channel name
LenUnit = 10 #; % number of characters per unit name
FileFmtID_NoCompressWithoutTime = 3
FileFmtID_ChanLen_In = 4 # time channel and channel length is not included
with open(filename, 'rb') as fid:
FileID = fread(fid, 1, 'int16') #; % FAST output file format, INT(2)
FileID = fread(fid, 1, 'int16')[0] # ; % FAST output file format, INT(2)
if FileID not in [FileFmtID_WithTime, FileFmtID_WithoutTime,
FileFmtID_ChanLen_In, FileFmtID_NoCompressWithoutTime]:
raise Exception('FileID not supported {}. Is it a FAST binary file?'.format(FileID))
NumOutChans = fread(fid, 1, 'int32')[0] #; % The number of output channels, INT(4)
NT = fread(fid, 1, 'int32')[0] #; % The number of time steps, INT(4)
if FileID == FileFmtID_WithTime:
TimeScl = fread(fid, 1, 'float64') #; % The time slopes for scaling, REAL(8)
TimeOff = fread(fid, 1, 'float64') #; % The time offsets for scaling, REAL(8)
if FileID == FileFmtID_ChanLen_In:
LenName = fread(fid, 1, 'int16')[0] # Number of characters in channel names and units
else:
TimeOut1 = fread(fid, 1, 'float64') #; % The first time in the time series, REAL(8)
TimeIncr = fread(fid, 1, 'float64') #; % The time increment, REAL(8)
LenName = 10 # default number of characters per channel name
NumOutChans = fread(fid, 1, 'int32')[0] # ; % The number of output channels, INT(4)
NT = fread(fid, 1, 'int32')[0] # ; % The number of time steps, INT(4)
if FileID == FileFmtID_WithTime:
TimeScl = fread(fid, 1, 'float64') # ; % The time slopes for scaling, REAL(8)
TimeOff = fread(fid, 1, 'float64') # ; % The time offsets for scaling, REAL(8)
else:
TimeOut1 = fread(fid, 1, 'float64') # ; % The first time in the time series, REAL(8)
TimeIncr = fread(fid, 1, 'float64') # ; % The time increment, REAL(8)
ColScl = fread(fid, NumOutChans, 'float32') #; % The channel slopes for scaling, REAL(4)
ColOff = fread(fid, NumOutChans, 'float32') #; % The channel offsets for scaling, REAL(4)
if FileID == FileFmtID_NoCompressWithoutTime:
ColScl = np.ones(NumOutChans)
ColOff = np.zeros(NumOutChans)
else:
ColScl = fread(fid, NumOutChans, 'float32') # ; % The channel slopes for scaling, REAL(4)
ColOff = fread(fid, NumOutChans, 'float32') # ; % The channel offsets for scaling, REAL(4)
LenDesc = fread(fid, 1, 'int32')[0] #; % The number of characters in the description string, INT(4)
DescStrASCII = fread(fid, LenDesc, 'uint8') #; % DescStr converted to ASCII
LenDesc = fread(fid, 1, 'int32')[0] # ; % The number of characters in the description string, INT(4)
DescStrASCII = fread(fid, LenDesc, 'uint8') # ; % DescStr converted to ASCII
DescStr = "".join(map(chr, DescStrASCII)).strip()
ChanName = [] # initialize the ChanName cell array
for iChan in range(NumOutChans + 1):
ChanNameASCII = fread(fid, LenName, 'uint8') #; % ChanName converted to numeric ASCII
ChanNameASCII = fread(fid, LenName, 'uint8') # ; % ChanName converted to numeric ASCII
ChanName.append("".join(map(chr, ChanNameASCII)).strip())
ChanUnit = [] # initialize the ChanUnit cell array
for iChan in range(NumOutChans + 1):
ChanUnitASCII = fread(fid, LenUnit, 'uint8') #; % ChanUnit converted to numeric ASCII
ChanUnitASCII = fread(fid, LenName, 'uint8') # ; % ChanUnit converted to numeric ASCII
ChanUnit.append("".join(map(chr, ChanUnitASCII)).strip()[1:-1])
# %-------------------------
# % get the channel time series
# %-------------------------
nPts = NT * NumOutChans #; % number of data points in the file
nPts = NT * NumOutChans # ; % number of data points in the file
if FileID == FileFmtID_WithTime:
PackedTime = fread(fid, NT, 'int32') #; % read the time data
PackedTime = fread(fid, NT, 'int32') # ; % read the time data
cnt = len(PackedTime)
if cnt < NT:
raise Exception('Could not read entire %s file: read %d of %d time values' % (filename, cnt, NT))
PackedData = fread(fid, nPts, 'int16') #; % read the channel data
cnt = len(PackedData)
if cnt < nPts:
raise Exception('Could not read entire %s file: read %d of %d values' % (filename, cnt, nPts))
# %-------------------------
# % Scale the packed binary to real data
# %-------------------------
#
data = np.array(PackedData).reshape(NT, NumOutChans)
data = (data - ColOff) / ColScl
if use_buffer:
# Reading data using buffers, and allowing an offset for time column (nOff=1)
if FileID == FileFmtID_NoCompressWithoutTime:
data = freadRowOrderTableBuffered(fid, nPts, 'float64', NumOutChans, nOff=1, type_out='float64')
else:
data = freadRowOrderTableBuffered(fid, nPts, 'int16', NumOutChans, nOff=1, type_out='float64')
else:
# NOTE: unpacking huge data not possible on 32bit machines
if FileID == FileFmtID_NoCompressWithoutTime:
PackedData = fread(fid, nPts, 'float64') # ; % read the channel data
else:
PackedData = fread(fid, nPts, 'int16') # ; % read the channel data
cnt = len(PackedData)
if cnt < nPts:
raise Exception('Could not read entire %s file: read %d of %d values' % (filename, cnt, nPts))
data = np.array(PackedData).reshape(NT, NumOutChans)
del PackedData
if FileID == FileFmtID_WithTime:
time = (np.array(PackedTime) - TimeOff) / TimeScl;
time = (np.array(PackedTime) - TimeOff) / TimeScl
else:
time = TimeOut1 + TimeIncr * np.arange(NT)
data = np.concatenate([time.reshape(NT, 1), data], 1)
# %-------------------------
# % Scale the packed binary to real data
# %-------------------------
if use_buffer:
# Scaling Data
for iCol in range(NumOutChans):
data[:, iCol + 1] = (data[:, iCol + 1] - ColOff[iCol]) / ColScl[iCol]
# Adding time column
data[:, 0] = time
else:
# NOTE: memory expensive due to time conversion, and concatenation
data = (data - ColOff) / ColScl
data = np.concatenate([time.reshape(NT, 1), data], 1)
info = {'name': os.path.splitext(os.path.basename(filename))[0],
'description': DescStr,
'attribute_names': ChanName,
'attribute_units': ChanUnit}
return data, info
'''
Created on 03/09/2015
@author: MMPE
'''
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
import unittest
from wetb.fast.fast_io import load_output
from tests import npt
import pytest
import numpy as np
from wetb.fast.fast_io import load_output, load_binary_output
import os
testfilepath = os.path.join(os.path.dirname(__file__), 'test_files/') # test file path
class TestFastIO(unittest.TestCase):
def test_load_output(self):
data, info = load_output(testfilepath + 'DTU10MW.out')
self.assertAlmostEqual(data[4, 3], 4.295E-04)
self.assertEqual(info['name'], "DTU10MW")
self.assertEqual(info['attribute_names'][1], "RotPwr")
self.assertEqual(info['attribute_units'][1], "kW")
def test_load_binary(self):
data, info = load_output(testfilepath + 'test_binary.outb')
self.assertEqual(info['name'], 'test_binary')
self.assertEqual(info['description'], 'Modified by mwDeriveSensors on 27-Jul-2015 16:32:06')
self.assertEqual(info['attribute_names'][4], 'RotPwr')
self.assertEqual(info['attribute_units'][7], 'deg/s^2')
self.assertAlmostEqual(data[10, 4], 138.822277739535)
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testload_output']
unittest.main()
def test_load_output():
data, info = load_output(testfilepath + 'DTU10MW.out')
npt.assert_equal(data[4, 3], 4.295E-04)
npt.assert_equal(info['name'], "DTU10MW")
npt.assert_equal(info['attribute_names'][1], "RotPwr")
npt.assert_equal(info['attribute_units'][1], "kW")
def test_load_binary():
data, info = load_output(testfilepath + 'test_binary.outb')
npt.assert_equal(info['name'], 'test_binary')
npt.assert_equal(info['description'], 'Modified by mwDeriveSensors on 27-Jul-2015 16:32:06')
npt.assert_equal(info['attribute_names'][4], 'RotPwr')
npt.assert_equal(info['attribute_units'][7], 'deg/s^2')
npt.assert_almost_equal(data[10, 4], 138.822277739535)
def test_load_binary_buffered():
# The old method was not using a buffer and was also memory expensive
# Now use_buffer is set to true by default
import numpy as np
data, info = load_binary_output(testfilepath + 'test_binary.outb', use_buffer=True)
data_old, info_old = load_binary_output(testfilepath + 'test_binary.outb', use_buffer=False)
npt.assert_equal(info['name'], info_old['name'])
np.testing.assert_array_equal(data[0, :], data_old[0, :])
np.testing.assert_array_equal(data[-1, :], data_old[-1, :])
@pytest.mark.parametrize('fid,tol', [(2, 1), (3, 1e-4)])
@pytest.mark.parametrize('buffer', [True, False])
def test_load_bindary_fid(fid, tol, buffer):
data2, info2 = load_output(testfilepath + '5MW_Land_BD_DLL_WTurb_fid04.outb')
data, info = load_binary_output(testfilepath + '5MW_Land_BD_DLL_WTurb_fid%02d.outb' % fid, use_buffer=buffer)
for k, v in info2.items():
if k not in {'name', 'description'}:
npt.assert_array_equal(info[k], v)
r = data.max(0) - data.min(0) + 1e-20
npt.assert_array_less(np.abs(data - data2).max(0), r * tol)
def test_load_output2():
data, info = load_output(testfilepath + 'DTU10MW.out')
npt.assert_equal(info['name'], "DTU10MW")
npt.assert_equal(info['attribute_names'][1], "RotPwr")
npt.assert_equal(info['attribute_units'][1], "kW")
def test_load_output3():
# This file has an extra comment at the end
data, info = load_output(testfilepath + 'FASTOut_Hydro.out')
npt.assert_almost_equal(data[3, 1], -1.0E+01)