Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision

Target

Select target project
  • toolbox/WindEnergyToolbox
  • tlbl/WindEnergyToolbox
  • cpav/WindEnergyToolbox
  • frza/WindEnergyToolbox
  • borg/WindEnergyToolbox
  • mmpe/WindEnergyToolbox
  • ozgo/WindEnergyToolbox
  • dave/WindEnergyToolbox
  • mmir/WindEnergyToolbox
  • wluo/WindEnergyToolbox
  • welad/WindEnergyToolbox
  • chpav/WindEnergyToolbox
  • rink/WindEnergyToolbox
  • shfe/WindEnergyToolbox
  • shfe1/WindEnergyToolbox
  • acdi/WindEnergyToolbox
  • angl/WindEnergyToolbox
  • wliang/WindEnergyToolbox
  • mimc/WindEnergyToolbox
  • wtlib/WindEnergyToolbox
  • cmos/WindEnergyToolbox
  • fabpi/WindEnergyToolbox
22 results
Select Git revision
Show changes
Showing
with 1379 additions and 550 deletions
import os
import numpy as np
import pandas as pd
import itertools
"""IEC refers to IEC61400-1(2005)
Questions:
dlc1.4 (hansen: 100s, no turb), why not NTM, NWP 600s and 6 seeds
seed numbering and reuse???
"""
class DLC():
def __init__(self, Description, Operation, Turb, Shear, Gust, Fault=None, variables={}):
self.Description = Description
if isinstance(Turb, tuple):
func_Turb, params_Turb = Turb
def Turb_wrapper(*args, **kwargs):
combined_kwargs = {**params_Turb, **kwargs}
return func_Turb(*args, **combined_kwargs)
setattr(self, 'Turb', Turb_wrapper)
else:
setattr(self, 'Turb', Turb)
if isinstance(Shear, tuple):
func_Shear, params_Shear = Shear
def Shear_wrapper(*args, **kwargs):
combined_kwargs = {**params_Shear, **kwargs}
return func_Shear(*args, **combined_kwargs)
setattr(self, 'Shear', Shear_wrapper)
else:
setattr(self, 'Shear', Shear)
if isinstance(Gust, tuple):
func_Gust, params_Gust = Gust
def Gust_wrapper(*args, **kwargs):
combined_kwargs = {**params_Gust, **kwargs}
return func_Gust(*args, **combined_kwargs)
setattr(self, 'Gust', Gust_wrapper)
else:
setattr(self, 'Gust', Gust)
if isinstance(Fault, tuple):
func_Fault, params_Fault = Fault
def Fault_wrapper(*args, **kwargs):
combined_kwargs = {**params_Fault, **kwargs}
return func_Fault(*args, **combined_kwargs)
setattr(self, 'Fault', Fault_wrapper)
else:
setattr(self, 'Fault', Fault)
if isinstance(Operation, tuple):
func_Operation, params_Operation = Operation
def Operation_wrapper(*args, **kwargs):
combined_kwargs = {**params_Operation, **kwargs}
return func_Operation(*args, **combined_kwargs)
setattr(self, 'Operation', Operation_wrapper)
else:
setattr(self, 'Operation', Operation)
self.variables = variables
self.variables.update({k.lower(): v for k, v in variables.items()})
turb_class = self.iec_wt_class[1].lower()
assert turb_class in 'abc'
self.I_ref = {'a': .16, 'b': .14, 'c': .12}[turb_class] # IEC61400-1(2005) table 1
wind_class = int(self.iec_wt_class[0])
assert 1 <= wind_class <= 3
self.V_ref = {1: 50, 2: 42.5, 3: 37.5}[wind_class]
if variables["seed"]:
self.rng = np.random.default_rng(seed=variables["seed"])
@classmethod
def getattr(cls, name):
try:
return getattr(cls, name)
except AttributeError as e:
d = {k.lower(): k for k in dir(cls)}
if name.lower() in d:
return getattr(cls, d[name.lower()])
else:
raise e
def __getattr__(self, name):
try:
return self.__getattribute__(name)
except AttributeError as e:
if name in self.variables:
return self.variables[name]
elif name.lower() in self.variables:
return self.variables[name.lower()]
else:
raise e
def get_lst(self, x):
if isinstance(x, pd.Series):
x = x.iloc[0]
if ":" in str(x):
start, step, stop = [float(eval(v, globals(), self.variables)) for v in x.split(":")]
return list(np.arange(start, stop + step, step))
else:
return [float(eval(v, globals(), self.variables)) for v in str(x).replace("/", ",").split(",")]
@property
def wsp_lst(self):
return sorted(self.get_lst(self.WSP))
@property
def wdir_lst(self):
return self.get_lst(self.Wdir)
def case_arg_lst_product(self, **kwargs):
case_arg_lst = []
for dict_lst in itertools.product(
*[m(self, **kwargs) for m in [DLC.WspWdir, self.Turb, self.Shear, self.Gust, self.Fault, self.Operation] if m is not None]):
ids = {k: v for d in dict_lst for k, v in d.items() if '_id' in k}
d = {k: v for d in dict_lst for k, v in d.items() if '_id' not in k}
name = [self.Name, 'wsp%02d' % d['V_hub'], "wdir%03d" % (d['wdir'] % 360)]
if 'seed_id' in ids:
name.append("s%04d" % d['seed'])
if 'ews_id' in ids:
name.append("ews%s" % d['shear']['sign'])
if 'edc_id' in ids:
name.append(ids['edc_id'])
if 'T_id' in ids:
name.append(ids['T_id'])
if 'Azi_id' in ids:
name.append(ids['Azi_id'])
d['Name'] = "_".join(name)
case_arg_lst.append(d)
return case_arg_lst
def to_pandas(self):
case_dict_lst = []
for V_hub in self.wsp_lst:
for wdir in self.wdir_lst:
default_kwargs = {'Folder': self.Name,
'simulation_time': self.Time,
'V_hub': V_hub,
'wdir': wdir,
}
for case_args in self.case_arg_lst_product(**default_kwargs):
case_args.update({k: v for k, v in default_kwargs.items() if k not in case_args})
case_dict_lst.append(case_args)
cols = ['Name', 'Folder', 'V_hub', 'wdir', 'simulation_time']
cols += [k for k in case_args.keys() if k not in cols]
return pd.DataFrame(case_dict_lst, columns=cols)
# ===============================================================================
# General
# ===============================================================================
def WspWdir(self, V_hub, wdir, **_):
return [{'V_hub': V_hub, 'wdir': wdir}]
def NONE(self, **_):
return [{}]
def NaN(self, **_):
return [{}]
# ===============================================================================
# Turbulence models
# ===============================================================================
def NoTurb(self, **_):
return [{'seed': None}]
def ConstantTurb(self, ti, **_):
s0 = 1001
if self.seed:
return [{'seed_id': 's%04d' % (s),
'ti': ti,
'seed': s} for s in self.rng.integers(low=0, high=10000, size=int(self.Seeds))]
else:
return [{'seed_id': 's%04d' % (s),
'ti': ti,
'seed': s} for s in range(s0, s0 + int(self.Seeds))]
def NTM(self, V_hub, **_):
# Normal turbulence model IEC section 6.3.1.3
s0 = int((V_hub - self.Vin) // self.Vstep * 100 + 1001)
ti = (self.I_ref * (0.75 * V_hub + 5.6)) / V_hub # IEC (11)
if self.seed:
return [{'seed_id': 's%04d' % (s),
'ti': ti,
'seed': s} for s in self.rng.integers(low=0, high=10000, size=int(self.Seeds))]
else:
return [{'seed_id': 's%04d' % (s),
'ti': ti,
'seed': s} for s in range(s0, s0 + int(self.Seeds))]
def ETM(self, V_hub, **_):
# Extreme Turbulence model
# IEC (9)
V_ave = .2 * self.V_ref
# IEC (19)
c = 2
ti = c * self.I_ref * (0.072 * (V_ave / c + 3) * (V_hub / c - 4) + 10) / V_hub
s0 = int((V_hub - self.Vin) // self.Vstep * 100 + 1001)
if self.seed:
return [{'seed_id': 's%04d' % (s),
'ti': ti,
'seed': s} for s in self.rng.integers(low=0, high=10000, size=int(self.Seeds))]
else:
return [{'seed_id': 's%04d' % (s),
'ti': ti,
'seed': s} for s in range(s0, s0 + int(self.Seeds))]
# ===============================================================================
# Shear profiles
# ===============================================================================
def NWP(self, alpha, **_):
# The normal wind profile model IEC section 6.3.1.2
return [{'shear': {'type': 'NWP', 'profile': ('power', alpha)}}]
def EWS(self, V_hub, alpha, **_):
# Extreme wind shear, IEC section 6.3.2.6
beta = 6.4
T = 12
sigma_1 = self.I_ref * (0.75 * V_hub + 5.6) # IEC (11)
D = self.D
A = (2.5 + 0.2 * beta * sigma_1 * (D / self.lambda_1)**0.25) / D
return [{'shear': {'type': 'EWS', 'profile': ('power', alpha), 'A': A, 'T': T, 'sign': ews_sign},
'ews_id': ews_sign}
for ews_sign in ['++', '+-', '-+', '--']]
# ===========================================================================
# Gusts
# ===========================================================================
def ECD(self, V_hub, **_):
# Extreme coherrent gust with direction change IEC 6.3.2.5
# IEC section 6.3.2.5
# IEC (22)
V_cg = 15 # magnitude of gust
# IEC (24)
theta_cg = (720 / V_hub, 180)[V_hub < 4] # direction change
T = 10 # rise time
return [{'Gust': {'type': 'ECD', 'V_cg': V_cg, 'theta_cg': theta_cg, 'T': T}}]
def EDC(self, V_hub, **_):
# Extreme direction change
phi = 4*(np.rad2deg(np.arctan(self.I_ref*(0.75*V_hub + 5.6)/V_hub/(1 + 0.1*self.D/self.lambda_1))))
T = 10 # Duration
return [{'Gust': {'type': 'EDC', 'phi': sign*phi, 'T': T}, 'edc_id': {-1: '-', 1: '+'}[sign]} for sign in [-1, 1]]
def EOG(self, V_hub, **_):
# Extreme operation gust
V_gust = min(1.35*(0.8*1.4*self.V_ref - V_hub), 3.3*self.I_ref*(0.75*V_hub + 5.6)/(1 + 0.1*self.D/self.lambda_1)) # magnitude of gust
T = 10.5 # duration
return [{'Gust': {'type': 'EOG', 'V_gust': V_gust, 'T': T}}]
# ===============================================================================
# Faults
# ===============================================================================
def StuckBlade(self, t, pitch, **_):
if (not isinstance(t, list)) and (not isinstance(pitch, list)):
return [{'Fault': {'type': 'StuckBlade', 'pitch_servo': self.pitch_servo, 'T': t, 'pitch': pitch}}]
else:
return [{'Fault': {'type': 'StuckBlade', 'pitch_servo': self.pitch_servo, 'T': t, 'pitch': pitch},
'T_id': 't' + str(t.index(tp[0])) + 'p' + str(pitch.index(tp[1]))} for tp in itertools.product(t, pitch)]
def PitchRunaway(self, t, **_):
if not isinstance(t, list):
return [{'Fault': {'type': 'PitchRunaway', 'pitch_servo': self.pitch_servo, 'T': t}}]
else:
return [{'Fault': {'type': 'PitchRunaway', 'pitch_servo': self.pitch_servo, 'T': T},
'T_id': 't' + str(t.index(T))} for T in t]
def GridLoss(self, t, **_):
if not isinstance(t, list):
return [{'Fault': {'type': 'GridLoss', 'generator_servo': self.generator_servo, 'T': t}}]
else:
return [{'Fault': {'type': 'GridLoss', 'generator_servo': self.generator_servo, 'T': T}, 'T_id': 't' + str(t.index(T))} for T in t]
# ===============================================================================
# Operations
# ===============================================================================
def PowerProduction(self, **_):
return [{}]
def StartUp(self, t, **_):
if not isinstance(t, list):
return [{'Operation': {'type': 'StartUp', 'controller': self.controller, 'T': t}}]
else:
return [{'Operation': {'type': 'StartUp', 'controller': self.controller, 'T': T},
'T_id': 't' + str(t.index(T))} for T in t]
def ShutDown(self, t, **_):
if not isinstance(t, list):
return [{'Operation': {'type': 'ShutDown', 'controller': self.controller, 'T': t}}]
else:
return [{'Operation': {'type': 'ShutDown', 'controller': self.controller, 'T': T},
'T_id': 't' + str(t.index(T))} for T in t]
def EmergencyShutDown(self, t, **_):
if not isinstance(t, list):
return [{'Operation': {'type': 'EmergencyShutDown', 'controller': self.controller, 'T': t}}]
else:
return [{'Operation': {'type': 'EmergencyShutDown', 'controller': self.controller, 'T': T},
'T_id': 't' + str(t.index(T))} for T in t]
def Parked(self, **_):
return [{'Operation': {'type': 'Parked', 'controller': self.controller}}]
def RotorLocked(self, azimuth, **_):
if not isinstance(azimuth, list):
return [{'Operation': {'type': 'RotorLocked', 'controller': self.controller,
'shaft': self.shaft, 'shaft_constraint': self.shaft_constraint, 'Azi': azimuth}}]
else:
return [{'Operation': {'type': 'RotorLocked', 'controller': self.controller,
'shaft': self.shaft, 'shaft_constraint': self.shaft_constraint, 'Azi': azi},
'Azi_id': 'azi' + f"{azi:03}"} for azi in azimuth]
class DLB():
def __init__(self, dlc_definitions, variables):
cols = ['Name', 'Description', 'Operation', 'WSP', 'Wdir', 'Turb', 'Seeds', 'Shear', 'Gust', 'Fault', 'Time']
self.dlcs = pd.DataFrame(dlc_definitions, columns=cols, index=[dlc['Name'] for dlc in dlc_definitions])
var_name_desc = [('iec_wt_class', 'IEC wind turbine class, e.g. 1A'),
('tiref', 'Reference turbulence intensity'),
('Vin', 'Cut-in wind speed'),
('Vout', 'Cut-out wind speed'),
('Vr', 'Rated wind speed'),
('Vref', 'Reference wind speed'),
('V1', '1-year recurrence period wind speed'),
('Ve50', 'Extreme 50-year recurrence period wind speed'),
('Ve1', 'Extreme 1-year recurrence period wind speed'),
('Vmaint', 'Maximum wind speed for maintenance'),
('Vstep', 'Wind speed distribution step'),
('D', 'Rotor diameter'),
('z_hub', 'Hub height'),
('lambda_1', 'Longitudinal turbulence scale parameter'),
('controller', 'Filename of controller DLL'),
('generator_servo', 'Filename of generator servo DLL'),
('pitch_servo', 'Filename of pitch servo DLL'),
('best_azimuth', 'Best blade azimuth for maintenance'),
('shaft', 'Name of shaft body'),
('shaft_constraint', 'Name of constraint between tower and shaft'),
("seed", "Seed to initialize the RNG for turbulence seed generation")
]
self.variables = pd.DataFrame([{'Name': n, 'Value': variables[n], 'Description': d}
for n, d in var_name_desc], columns=['Name', 'Value', 'Description'],
index=[n for (n, d) in var_name_desc])
@property
def dlb(self):
if not hasattr(self, '_dlb'):
self.generate_DLB()
return self._dlb
def keys(self):
return [n for n in self.dlcs['Name'] if not str(n).lower().strip() in ['', 'none', 'nan']]
def to_excel(self, filename):
if os.path.dirname(filename) != "":
os.makedirs(os.path.dirname(filename), exist_ok=True)
writer = pd.ExcelWriter(filename)
self.dlcs.to_excel(writer, 'DLC', index=False)
self.variables.to_excel(writer, 'Variables', index=False)
writer.close()
@staticmethod
def from_excel(filename):
df_vars = pd.read_excel(filename, sheet_name='Variables',
index_col='Name')
df_vars.fillna('', inplace=True)
variables = {name: value for name, value in zip(df_vars.index, df_vars.Value.values)}
dlb_def = pd.read_excel(filename, 'DLC')
dlb_def.columns = [c.strip() for c in dlb_def.columns]
return DLB([row for _, row in dlb_def.iterrows() if row['Name'] is not np.nan], variables)
def __getitem__(self, key):
return self.dlb[key]
def _make_dlc(self, dlc_name):
dlc_definition = self.dlcs.loc[dlc_name]
kwargs = {k: (DLC.getattr(str(dlc_definition[k][0])), dlc_definition[k][1]) if isinstance(dlc_definition[k], tuple)
else DLC.getattr(str(dlc_definition[k])) for k in ['Operation', 'Turb', 'Shear', 'Gust', 'Fault']}
kwargs['Description'] = dlc_definition['Description']
variables = {v['Name']: v['Value'] for _, v in self.variables.iterrows()}
variables.update(dlc_definition)
dlc = DLC(variables=variables, **kwargs)
df = dlc.to_pandas()
name = dlc_definition['Name']
df.insert(0, 'DLC', name)
return df
def generate_DLB(self):
self._dlb = {dlc: self._make_dlc(dlc) for dlc in self.keys()}
return self._dlb
def to_pandas(self):
return pd.concat(self.dlb.values(), sort=False)
def cases_to_excel(self, filename):
if os.path.dirname(filename) != "":
os.makedirs(os.path.dirname(filename), exist_ok=True)
writer = pd.ExcelWriter(filename)
for k in self.keys():
self[k].to_excel(writer, k, index=False)
writer.close()
class DTU_IEC61400_1_Ref_DLB(DLB):
def __init__(self, iec_wt_class, Vin, Vout, Vr, Vmaint, D, z_hub,
controller, generator_servo, pitch_servo, best_azimuth,
Vstep=2, seed=None, alpha=0.2, alpha_extreme=0.11, ti_extreme=0.11,
shaft='shaft', shaft_constraint='shaft_rot'):
Name, Description, Operation, WSP, Wdir, Time = 'Name', 'Description', 'Operation', 'WSP', 'Wdir', 'Time'
Turb, Seeds, Shear, Gust, Fault = 'Turb', 'Seeds', 'Shear', 'Gust', 'Fault'
dlc_definitions = [
{Name: 'DLC12',
Description: 'Normal production',
Operation: 'PowerProduction',
WSP: 'Vin:Vstep:Vout',
Wdir: '-10/0/10',
Turb: 'NTM',
Seeds: 6,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 600},
{Name: 'DLC13',
Description: 'Normal production with high turbulence',
Operation: 'PowerProduction',
WSP: 'Vin:Vstep:Vout',
Wdir: '-10/0/10',
Turb: 'ETM',
Seeds: 6,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 600},
{Name: 'DLC14',
Description: 'Normal production with gust and direction change',
Operation: 'PowerProduction',
WSP: 'Vr/Vr+2/Vr-2',
Wdir: 0,
Turb: 'NoTurb',
Seeds: None,
Shear: ('NWP', {'alpha': alpha}),
Gust: 'ECD',
Fault: None,
Time: 100},
{Name: 'DLC15',
Description: 'Normal production with extreme wind shear',
Operation: 'PowerProduction',
WSP: 'Vin:Vstep:Vout',
Wdir: 0,
Turb: 'NoTurb',
Seeds: None,
Shear: ('EWS', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 100},
{Name: 'DLC21',
Description: 'Loss of electical network',
Operation: 'PowerProduction',
WSP: 'Vin:Vstep:Vout',
Wdir: '-10/0/10',
Turb: 'NTM',
Seeds: 4,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: ('GridLoss', {'t': 10}),
Time: 100},
{Name: 'DLC22b',
Description: 'One blade stuck at minimum pitch angle',
Operation: 'PowerProduction',
WSP: 'Vin:Vstep:Vout',
Wdir: 0,
Turb: 'NTM',
Seeds: 12,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: ('StuckBlade', {'t': 0.1, 'pitch': 0}),
Time: 100},
{Name: 'DLC22p',
Description: 'Pitch runaway',
Operation: 'PowerProduction',
WSP: 'Vr:Vstep:Vout',
Wdir: 0,
Turb: 'NTM',
Seeds: 12,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: ('PitchRunaway', {'t': 10}),
Time: 100},
{Name: 'DLC22y',
Description: 'Abnormal yaw error',
Operation: 'PowerProduction',
WSP: 'Vin:Vstep:Vout',
Wdir: '15:15:345',
Turb: 'NTM',
Seeds: 1,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 600},
{Name: 'DLC23',
Description: 'Loss of electical network with extreme operating gust',
Operation: 'PowerProduction',
WSP: 'Vr-2/Vr+2/Vout',
Wdir: 0,
Turb: 'NoTurb',
Seeds: None,
Shear: ('NWP', {'alpha': alpha}),
Gust: 'EOG',
Fault: ('GridLoss', {'t': [2.5, 4, 5.25]}),
Time: 100},
{Name: 'DLC24',
Description: 'Normal production with large yaw error',
Operation: 'PowerProduction',
WSP: 'Vin:Vstep:Vout',
Wdir: '-20/20',
Turb: 'NTM',
Seeds: 3,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 600},
{Name: 'DLC31',
Description: 'Start-up',
Operation: ('StartUp', {'t': 0}),
WSP: 'Vin/Vr/Vout',
Wdir: 0,
Turb: 'NoTurb',
Seeds: None,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 100},
{Name: 'DLC32',
Description: 'Start-up at 4 different times with extreme operating gust',
Operation: ('StartUp', {'t': [0.1, 2.5, 4, 5.25]}),
WSP: 'Vin/Vr-2/Vr+2/Vout',
Wdir: 0,
Turb: 'NoTurb',
Seeds: None,
Shear: ('NWP', {'alpha': alpha}),
Gust: 'EOG',
Fault: None,
Time: 100},
{Name: 'DLC33',
Description: 'Start-up at 2 different times with extreme wind direction change',
Operation: ('StartUp', {'t': [-0.1, 5]}),
WSP: 'Vin/Vr-2/Vr+2/Vout',
Wdir: 0,
Turb: 'NoTurb',
Seeds: None,
Shear: ('NWP', {'alpha': alpha}),
Gust: 'EDC',
Fault: None,
Time: 100},
{Name: 'DLC41',
Description: 'Shut-down',
Operation: ('ShutDown', {'t': 0}),
WSP: 'Vin/Vr/Vout',
Wdir: 0,
Turb: 'NoTurb',
Seeds: None,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 100},
{Name: 'DLC42',
Description: 'Shut-down at 6 different times with extreme operating gust',
Operation: ('ShutDown', {'t': [0.1, 2.5, 4, 5, 8, 10]}),
WSP: 'Vr-2/Vr+2/Vout',
Wdir: 0,
Turb: 'NoTurb',
Seeds: None,
Shear: ('NWP', {'alpha': alpha}),
Gust: 'EOG',
Fault: None,
Time: 100},
{Name: 'DLC51',
Description: 'Emergency shut-down',
Operation: ('EmergencyShutDown', {'t': 0}),
WSP: 'Vr-2/Vr+2/Vout',
Wdir: 0,
Turb: 'NTM',
Seeds: 12,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 100},
{Name: 'DLC61',
Description: 'Parked with 50-year wind',
Operation: 'Parked',
WSP: 'Vref',
Wdir: '-8/8',
Turb: ('ConstantTurb', {'ti': ti_extreme}),
Seeds: 6,
Shear: ('NWP', {'alpha': alpha_extreme}),
Gust: None,
Fault: None,
Time: 600},
{Name: 'DLC62',
Description: 'Parked with 50-year wind without grid connection',
Operation: 'Parked',
WSP: 'Vref',
Wdir: '0:15:345',
Turb: ('ConstantTurb', {'ti': ti_extreme}),
Seeds: 1,
Shear: ('NWP', {'alpha': alpha_extreme}),
Gust: None,
Fault: None,
Time: 600},
{Name: 'DLC63',
Description: 'Parked with 1-year wind with large yaw error',
Operation: 'Parked',
WSP: 'V1',
Wdir: '-20/20',
Turb: ('ConstantTurb', {'ti': ti_extreme}),
Seeds: 6,
Shear: ('NWP', {'alpha': alpha_extreme}),
Gust: None,
Fault: None,
Time: 600},
{Name: 'DLC64',
Description: 'Parked',
Operation: 'Parked',
WSP: 'Vin:Vstep:0.7*Vref',
Wdir: '-8/8',
Turb: 'NTM',
Seeds: 6,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 600},
{Name: 'DLC71',
Description: 'Rotor locked at 4 different azimuth angles and extreme yaw',
Operation: ('RotorLocked', {'azimuth': [0, 30, 60, 90]}),
WSP: 'V1',
Wdir: '0:15:345',
Turb: ('ConstantTurb', {'ti': ti_extreme}),
Seeds: 1,
Shear: ('NWP', {'alpha': alpha_extreme}),
Gust: None,
Fault: None,
Time: 600},
{Name: 'DLC81',
Description: 'Rotor locked for maintenance',
Operation: ('RotorLocked', {'azimuth': best_azimuth}),
WSP: 'Vmaint',
Wdir: '-8/8',
Turb: 'NTM',
Seeds: 6,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 600},
]
Vref = {1: 50, 2: 42.5, 3: 37.5}[int(iec_wt_class[0])]
tiref = {'a': 0.16, 'b': 0.14, 'c': 0.12}[iec_wt_class[1].lower()]
V1 = 0.8*Vref
Ve50 = 1.4*Vref
Ve1 = 0.8*Ve50
lambda_1 = 0.7*z_hub if z_hub < 60 else 42
variables = {'iec_wt_class': iec_wt_class,
'Vref': Vref,
'tiref': tiref,
'V1': V1,
'Ve50': Ve50,
'Ve1': Ve1,
'Vin': Vin,
'Vout': Vout,
'Vr': Vr,
'Vmaint': Vmaint,
'Vstep': Vstep,
'D': D,
'z_hub': z_hub,
'lambda_1': lambda_1,
'controller': controller,
'generator_servo': generator_servo,
'pitch_servo': pitch_servo,
'best_azimuth': best_azimuth,
'shaft': shaft,
'shaft_constraint': shaft_constraint}
if seed:
variables["seed"] = int(seed)
else:
variables["seed"] = seed
DLB.__init__(self, dlc_definitions, variables)
def main():
if __name__ == '__main__':
dlb = DTU_IEC61400_1_Ref_DLB(iec_wt_class='1A',
Vin=4, Vout=25, Vr=8, # cut-in, cut_out and rated wind speed
D=180, z_hub=110) # diameter and hub height
print(dlb.dlcs)
print(dlb.variables)
# save dlb definition to excel
dlb.to_excel('overview.xlsx')
# you can now modify definitions in overview.xlsx in Excel and
# load the modified dlb definition back into python
dlb = DLB.from_excel('overview.xlsx')
print(dlb['DLC14'])
# save dlc14 as Excel spreadsheet
dlb['DLC14'].to_excel('dlc14.xlsx')
# you can no modify cases in dlc14.xlsx in Excel
# save all cases to excel
dlb.cases_to_excel('cases.xlsx')
# Save generate hawc2 input files
from wetb.hawc2.tests import test_files
from wetb.dlb.hawc2_iec_dlc_writer import HAWC2_IEC_DLC_Writer
path = os.path.dirname(test_files.__file__) + '/simulation_setup/DTU10MWRef6.0/'
writer = HAWC2_IEC_DLC_Writer(path + 'htc/DTU_10MW_RWT.htc', diameter=180)
# load all cases
writer.from_pandas(dlb)
# load DLC14 only
writer.from_pandas(dlb['DLC14'])
# load modified DLC14.xlsx
writer.from_excel('dlc14.xlsx')
writer.write_all('tmp')
main()
File added
from wetb.dlb import iec61400_1
import pytest
from wetb.dlb.hawc2_iec_dlc_writer import HAWC2_IEC_DLC_Writer
import os
from wetb.hawc2.tests import test_files
import shutil
from tests import npt, run_main
from wetb.hawc2.htc_file import HTCFile
from tests.run_main import run_module_main
from wetb.dlb.iec61400_1 import DTU_IEC61400_1_Ref_DLB
path = os.path.dirname(test_files.__file__) + '/simulation_setup/DTU10MWRef6.0/htc/tmp/'
def clean_up():
if os.path.isdir(path):
shutil.rmtree(path)
@pytest.yield_fixture(autouse=True)
def run_around_tests():
clean_up()
yield
clean_up()
@pytest.fixture
def writer():
return HAWC2_IEC_DLC_Writer(path + '../DTU_10MW_RWT.htc', diameter=127)
def test_main():
run_main.run_module_main(iec61400_1)
def test_DLC12(writer):
dlc12 = DTU_IEC61400_1_Ref_DLB(iec_wt_class='1A', Vin=4, Vout=26, Vr=10, D=180, z_hub=90,
Vmaint=18, controller='dtu_we_controller',
generator_servo='generator_servo', pitch_servo='servo_with_limits',
best_azimuth=180)['DLC12']
assert len(dlc12) == 216 # 12 wsp, 3 wdir, 6 seeds
writer.from_pandas(dlc12[::24][:2])
writer.write_all(path)
npt.assert_array_equal(sorted(os.listdir(path + "DLC12")),
['DLC12_wsp04_wdir350_s1001.htc', 'DLC12_wsp06_wdir000_s1101.htc'])
htc = HTCFile(path + "DLC12/DLC12_wsp04_wdir350_s1001.htc")
assert htc.wind.wsp[0] == 4
npt.assert_array_equal(htc.wind.windfield_rotations.values, [-10, 0, 0])
assert htc.wind.turb_format[0] == 1
assert htc.wind.mann.create_turb_parameters[3] == 1001
def test_DLC21(writer):
dlc = DTU_IEC61400_1_Ref_DLB(iec_wt_class='1A', Vin=4, Vout=26, Vr=10, D=180, z_hub=90,
Vmaint=18, controller='dtu_we_controller',
generator_servo='generator_servo', pitch_servo='servo_with_limits',
best_azimuth=180)['DLC21']
assert len(dlc) == 144 # 12 wsp, 3 wdir, 4 seeds
writer.from_pandas(dlc[::16][:2])
writer.write_all(path)
npt.assert_array_equal(sorted(os.listdir(path + "DLC21")),
['DLC21_wsp04_wdir350_s1001.htc', 'DLC21_wsp06_wdir000_s1101.htc'])
htc = HTCFile(path + "DLC21/DLC21_wsp04_wdir350_s1001.htc")
assert htc.wind.wsp[0] == 4
npt.assert_array_equal(htc.wind.windfield_rotations.values, [-10, 0, 0])
assert htc.wind.turb_format[0] == 1
assert htc.wind.mann.create_turb_parameters[3] == 1001
assert htc.dll.get_subsection_by_name('generator_servo', 'name').init.constant__7.values == [7, 110]
def test_DLC22y(writer):
dlc = DTU_IEC61400_1_Ref_DLB(iec_wt_class='1A', Vin=4, Vout=26, Vr=10, D=180, z_hub=90,
Vmaint=18, controller='dtu_we_controller',
generator_servo='generator_servo', pitch_servo='servo_with_limits',
best_azimuth=180)['DLC22y']
assert len(dlc) == 276 # 12 wsp, 23 wdir, 1 seeds
writer.from_pandas(dlc[::24][:2])
writer.write_all(path)
npt.assert_array_equal(sorted(os.listdir(path + "DLC22y")),
['DLC22y_wsp04_wdir015_s1001.htc', 'DLC22y_wsp06_wdir030_s1101.htc'])
htc = HTCFile(path + "DLC22y/DLC22y_wsp04_wdir015_s1001.htc")
assert htc.wind.wsp[0] == 4
npt.assert_array_equal(htc.wind.windfield_rotations.values, [15, 0, 0])
assert htc.wind.turb_format[0] == 1
assert htc.wind.mann.create_turb_parameters[3] == 1001
...@@ -3,16 +3,6 @@ Created on 01/10/2014 ...@@ -3,16 +3,6 @@ Created on 01/10/2014
@author: MMPE @author: MMPE
''' '''
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
from builtins import int
from builtins import map
from builtins import str
from builtins import zip
from future import standard_library
standard_library.install_aliases()
import pandas as pd import pandas as pd
import numpy as np import numpy as np
import glob import glob
...@@ -34,47 +24,53 @@ except NameError as e: ...@@ -34,47 +24,53 @@ except NameError as e:
def Weibull(u, k, start, stop, step): def Weibull(u, k, start, stop, step):
C = 2 * u / np.sqrt(np.pi) C = 2 * u / np.sqrt(np.pi)
cdf = lambda x :-np.exp(-(x / C) ** k)
return {wsp:-cdf(wsp - step / 2) + cdf(wsp + step / 2) for wsp in np.arange(start, stop + step, step)} def cdf(x): return -np.exp(-(x / C) ** k)
return {wsp: -cdf(wsp - step / 2) + cdf(wsp + step / 2) for wsp in np.arange(start, stop + step, step)}
def Weibull2(u, k, wsp_lst): def Weibull2(u, k, wsp_lst):
C = 2 * u / np.sqrt(np.pi) C = 2 * u / np.sqrt(np.pi)
cdf = lambda x :-np.exp(-(x / C) ** k) def cdf(x): return -np.exp(-(x / C) ** k)
edges = np.r_[wsp_lst[0] - (wsp_lst[1] - wsp_lst[0]) / 2, (wsp_lst[1:] + wsp_lst[:-1]) / 2, wsp_lst[-1] + (wsp_lst[-1] - wsp_lst[-2]) / 2] edges = [wsp_lst[0] - (wsp_lst[1] - wsp_lst[0])/2]
return [-cdf(e1) + cdf(e2) for wsp, e1, e2 in zip(wsp_lst, edges[:-1], edges[1:])] edges += [(wsp_lst[i] + wsp_lst[i + 1])/2 for i in range(len(wsp_lst) - 1)]
edges += [wsp_lst[-1] + (wsp_lst[-1] - wsp_lst[-2])/2]
return {wsp_lst[i]: cdf(edges[i + 1]) - cdf(edges[i]) for i in range(len(wsp_lst))}
def Weibull_IEC(Vref, Vhub_lst): def Weibull_IEC(Vref, Vhub_lst):
"""Weibull distribution according to IEC 61400-1:2005, page 24 """Weibull distribution according to IEC 61400-1:2005, page 24
Parameters Parameters
---------- ----------
Vref : int or float Vref : int or float
Vref of wind turbine class Vref of wind turbine class
Vhub_lst : array_like Vhub_lst : array_like
Wind speed at hub height. Must be equally spaced. Wind speed at hub height. Must be equally spaced.
Returns Returns
------- -------
nd_array : list of probabilities nd_array : list of probabilities
Examples Examples
-------- --------
>>> Weibull_IEC(50, [4,6,8]) >>> Weibull_IEC(50, [4,6,8])
[ 0.11002961 0.14116891 0.15124155] [ 0.11002961 0.14116891 0.15124155]
""" """
Vhub_lst = np.array(Vhub_lst) Vhub_lst = np.array(Vhub_lst)
#Average wind speed # Average wind speed
Vave=.2*Vref Vave = .2 * Vref
#Rayleigh distribution # Rayleigh distribution
Pr = lambda x : 1 - np.exp(-np.pi*(x/(2*Vave))**2)
#Wsp bin edges: [4,6,8] -> [3,5,7,9] def Pr(x): return 1 - np.exp(-np.pi * (x / (2 * Vave))**2)
wsp_bin_edges = np.r_[Vhub_lst[0] - (Vhub_lst[1] - Vhub_lst[0]) / 2, (Vhub_lst[1:] + Vhub_lst[:-1]) / 2, Vhub_lst[-1] + (Vhub_lst[-1] - Vhub_lst[-2]) / 2] # Wsp bin edges: [4,6,8] -> [3,5,7,9]
#probabilities of 3-5, 5-7, 7-9 wsp_bin_edges = np.r_[Vhub_lst[0] - (Vhub_lst[1] - Vhub_lst[0]) / 2, (Vhub_lst[1:] +
Vhub_lst[:-1]) / 2, Vhub_lst[-1] + (Vhub_lst[-1] - Vhub_lst[-2]) / 2]
# probabilities of 3-5, 5-7, 7-9
return np.array([-Pr(e1) + Pr(e2) for e1, e2 in zip(wsp_bin_edges[:-1], wsp_bin_edges[1:])]) return np.array([-Pr(e1) + Pr(e2) for e1, e2 in zip(wsp_bin_edges[:-1], wsp_bin_edges[1:])])
class DLCHighLevel(object): class DLCHighLevel(object):
def __init__(self, filename, fail_on_resfile_not_found=False, shape_k=2.0): def __init__(self, filename, fail_on_resfile_not_found=False, shape_k=2.0):
...@@ -85,8 +81,8 @@ class DLCHighLevel(object): ...@@ -85,8 +81,8 @@ class DLCHighLevel(object):
self.shape_k = shape_k self.shape_k = shape_k
# Variables # Variables
df_vars = pd.read_excel(self.filename, sheetname='Variables',
index_col='Name') df_vars = pd.read_excel(self.filename, 'Variables', index_col='Name')
df_vars.fillna('', inplace=True) df_vars.fillna('', inplace=True)
for name, value in zip(df_vars.index, df_vars.Value.values): for name, value in zip(df_vars.index, df_vars.Value.values):
setattr(self, name.lower(), value) setattr(self, name.lower(), value)
...@@ -96,8 +92,8 @@ class DLCHighLevel(object): ...@@ -96,8 +92,8 @@ class DLCHighLevel(object):
"result folder" % self.filename) "result folder" % self.filename)
self.res_path = os.path.join(os.path.dirname(self.filename), self.res_path) self.res_path = os.path.join(os.path.dirname(self.filename), self.res_path)
#DLC sheet # DLC sheet
self.dlc_df = pd.read_excel(self.filename, sheetname='DLC', skiprows=[1]) self.dlc_df = pd.read_excel(self.filename, 'DLC', skiprows=[1])
# empty strings are now nans, convert back to empty strings # empty strings are now nans, convert back to empty strings
self.dlc_df.fillna('', inplace=True) self.dlc_df.fillna('', inplace=True)
# force headers to lower case # force headers to lower case
...@@ -129,17 +125,19 @@ class DLCHighLevel(object): ...@@ -129,17 +125,19 @@ class DLCHighLevel(object):
self.dlc_df['psf'] = 1 self.dlc_df['psf'] = 1
# Sensors sheet # Sensors sheet
self.sensor_df = pd.read_excel(self.filename, sheetname='Sensors') self.sensor_df = pd.read_excel(self.filename, 'Sensors')
# empty strings are now nans, convert back to empty strings # empty strings are now nans, convert back to empty strings
self.sensor_df.fillna('', inplace=True) self.sensor_df.fillna('', inplace=True)
# force headers to lower case # force headers to lower case
self.sensor_df.columns = [k.lower() for k in self.sensor_df.columns] self.sensor_df.columns = [k.lower() for k in self.sensor_df.columns]
for k in ['Name', 'Nr']: for k in ['Name', 'Nr']:
assert k.lower() in self.sensor_df.keys(), "Sensor sheet must have a '%s' column" % k assert k.lower() in self.sensor_df.keys(), "Sensor sheet must have a '%s' column" % k
self.sensor_df = self.sensor_df[self.sensor_df.name!=""] self.sensor_df = self.sensor_df[self.sensor_df.name != ""]
assert not any(self.sensor_df['name'].duplicated()), "Duplicate sensor names: %s" % ",".join(self.sensor_df['name'][self.sensor_df['name'].duplicated()].values) assert not any(self.sensor_df['name'].duplicated()), "Duplicate sensor names: %s" % ",".join(
for k in ['description', 'unit', 'statistic', 'ultimate', 'fatigue', 'm', 'neql', 'extremeload', 'bearingdamage', 'mindistance', 'maxdistance']: self.sensor_df['name'][self.sensor_df['name'].duplicated()].values)
for k in ['description', 'unit', 'statistic', 'ultimate', 'fatigue', 'm',
'neql', 'extremeload', 'bearingdamage', 'mindistance', 'maxdistance']:
if k not in self.sensor_df.keys(): if k not in self.sensor_df.keys():
self.sensor_df[k] = "" self.sensor_df[k] = ""
for _, row in self.sensor_df[self.sensor_df['fatigue'] != ""].iterrows(): for _, row in self.sensor_df[self.sensor_df['fatigue'] != ""].iterrows():
...@@ -158,12 +156,14 @@ class DLCHighLevel(object): ...@@ -158,12 +156,14 @@ class DLCHighLevel(object):
if sensors != []: if sensors != []:
sensors = np.atleast_1d(sensors) sensors = np.atleast_1d(sensors)
empty_column = pd.DataFrame([""] * len(self.sensor_df.name))[0] empty_column = pd.DataFrame([""] * len(self.sensor_df.name))[0]
return self.sensor_df[functools.reduce(np.logical_or, [((self.sensor_df.get(f, empty_column).values != "") | (self.sensor_df.name == f)) for f in sensors])] return self.sensor_df[functools.reduce(
np.logical_or, [((self.sensor_df.get(f, empty_column).values != "") | (self.sensor_df.name == f)) for f in sensors])]
else: else:
return self.sensor_df return self.sensor_df
def dlc_variables(self, dlc): def dlc_variables(self, dlc):
dlc_row = self.dlc_df[self.dlc_df['name'] == dlc] dlc_row = self.dlc_df[self.dlc_df['name'] == dlc]
def get_lst(x): def get_lst(x):
if isinstance(x, pd.Series): if isinstance(x, pd.Series):
x = x.iloc[0] x = x.iloc[0]
...@@ -178,14 +178,18 @@ class DLCHighLevel(object): ...@@ -178,14 +178,18 @@ class DLCHighLevel(object):
def distribution(self, value_key, dist_key, row): def distribution(self, value_key, dist_key, row):
values = self.dlc_df[value_key][row] values = self.dlc_df[value_key][row]
if ":" in values: if ":" in str(values):
start, step, stop = [float(eval(v, globals(), self.__dict__)) for v in values.lower().split(":")] start, step, stop = [float(eval(v, globals(), self.__dict__)) for v in values.lower().split(":")]
values = np.arange(start, stop + step, step) values = np.arange(start, stop + step, step)
else: else:
try: try:
values = [(eval(v, globals(), self.__dict__)) for v in str(values).lower().replace("/", ",").split(",")] values = [(eval(v, globals(), self.__dict__)) for v in str(values).lower().replace("/", ",").split(",")]
except SyntaxError: except SyntaxError:
values = [(eval(v.lstrip('0'), globals(), self.__dict__)) for v in str(values).lower().replace("/", ",").split(",")] try:
values = [(eval(v.lstrip('0'), globals(), self.__dict__))
for v in str(values).lower().replace("/", ",").split(",")]
except Exception:
values = str(values).lower().replace("/", ",").split(",")
dist = self.dlc_df[dist_key][row] dist = self.dlc_df[dist_key][row]
if str(dist).lower() == "weibull" or str(dist).lower() == "rayleigh": if str(dist).lower() == "weibull" or str(dist).lower() == "rayleigh":
...@@ -200,16 +204,18 @@ class DLCHighLevel(object): ...@@ -200,16 +204,18 @@ class DLCHighLevel(object):
else: else:
return float(v) / 100 return float(v) / 100
dist = [fmt(v) for v in str(self.dlc_df[dist_key][row]).replace("/", ",").split(",")] dist = [fmt(v) for v in str(self.dlc_df[dist_key][row]).replace("/", ",").split(",")]
assert len(values) == len(dist), "Number of %s-values (%d)!= number of %s-values(%d)" % (value_key, len(values), dist_key, len(dist)) assert len(values) == len(dist), "Number of %s-values (%d)!= number of %s-values(%d)" % (value_key,
len(values), dist_key, len(dist))
return OrderedDict(zip(map(self.format_tag_value, values), dist)) return OrderedDict(zip(map(self.format_tag_value, values), dist))
def fatigue_distribution(self): def fatigue_distribution(self):
fatigue_dist = {} fatigue_dist = {}
for row, load in self.dlc_df['load'].iteritems(): for row, load in self.dlc_df['load'].items():
if "F" not in str(load).upper(): if "F" not in str(load).upper():
continue continue
dlc = self.dlc_df[self.dist_value_keys[0][1]][row] dlc = self.dlc_df[self.dist_value_keys[0][1]][row]
fatigue_dist[str(dlc)] = [self.distribution(value_key, dist_key, row) for dist_key, value_key in self.dist_value_keys] fatigue_dist[str(dlc)] = [self.distribution(value_key, dist_key, row)
for dist_key, value_key in self.dist_value_keys]
return fatigue_dist return fatigue_dist
def files_dict(self, files=None): def files_dict(self, files=None):
...@@ -235,9 +241,11 @@ class DLCHighLevel(object): ...@@ -235,9 +241,11 @@ class DLCHighLevel(object):
if isinstance(files, list): if isinstance(files, list):
pass pass
elif not hasattr(self, "res_folder") or self.res_folder == "": elif not hasattr(self, "res_folder") or self.res_folder == "":
files = glob.glob(os.path.join(self.res_path, "*"+ext)) + glob.glob(os.path.join(self.res_path, "*/*"+ext)) files = glob.glob(os.path.join(self.res_path, "*" + ext)) + \
if len(files)==0: glob.glob(os.path.join(self.res_path, "*/*" + ext))
raise Exception('No *%s files found in:\n%s or\n%s'%(ext, self.res_path, os.path.join(self.res_path, "*/"))) if len(files) == 0:
raise Exception('No *%s files found in:\n%s or\n%s' %
(ext, self.res_path, os.path.join(self.res_path, "*/")))
else: else:
files = [] files = []
for dlc_id in fatigue_dlcs: for dlc_id in fatigue_dlcs:
...@@ -246,15 +254,16 @@ class DLCHighLevel(object): ...@@ -246,15 +254,16 @@ class DLCHighLevel(object):
folder = self.res_folder % dlc_id folder = self.res_folder % dlc_id
else: else:
folder = self.res_folder folder = self.res_folder
dlc_files = (glob.glob(os.path.join(self.res_path , folder, "*"+ext))) dlc_files = (glob.glob(os.path.join(self.res_path, folder, "*" + ext)))
if len(dlc_files)==0: if len(dlc_files) == 0:
raise Exception('DLC%s included in fatigue analysis, but no *%s files found in:\n%s'%(dlc_id, ext, os.path.join(self.res_path , folder))) raise Exception('DLC%s included in fatigue analysis, but no *%s files found in:\n%s' %
(dlc_id, ext, os.path.join(self.res_path, folder)))
files.extend(dlc_files) files.extend(dlc_files)
keys = list(zip(*self.dist_value_keys))[1] keys = list(zip(*self.dist_value_keys))[1]
fmt = self.format_tag_value fmt = self.format_tag_value
tags = [[fmt(tag.replace(key, "")) for tag, key in zip(os.path.basename(f).split("_"), keys)] for f in files] tags = [[fmt(tag.replace(key, "")) for tag, key in zip(os.path.basename(f).lower().split("_"), keys)] for f in files]
dlc_tags = list(zip(*tags))[0] dlc_tags = list(zip(*tags))[0]
files_dict = {dlc_tag:{} for dlc_tag in dlc_tags} files_dict = {dlc_tag: {} for dlc_tag in dlc_tags}
for tag_row, f in zip(tags, files): for tag_row, f in zip(tags, files):
d = files_dict[tag_row[0]] d = files_dict[tag_row[0]]
for tag in tag_row[1:]: for tag in tag_row[1:]:
...@@ -285,7 +294,7 @@ class DLCHighLevel(object): ...@@ -285,7 +294,7 @@ class DLCHighLevel(object):
total_prop *= prop total_prop *= prop
return total_prop return total_prop
def file_hour_lst(self, years=20, files_dict=None, dist_dict=None): def file_hour_lst(self, years=20, files_dict=None, dist_dict=None, files=None):
"""Create a list of (filename, hours_pr_year) that can be used as input for LifeTimeEqLoad """Create a list of (filename, hours_pr_year) that can be used as input for LifeTimeEqLoad
Returns Returns
...@@ -300,16 +309,18 @@ class DLCHighLevel(object): ...@@ -300,16 +309,18 @@ class DLCHighLevel(object):
if dist_dict is None: if dist_dict is None:
dist_dict = self.fatigue_distribution() dist_dict = self.fatigue_distribution()
if files_dict is None: if files_dict is None:
files_dict = self.files_dict() files_dict = self.files_dict(files=files)
for dlc_id in sorted(dist_dict.keys()): for dlc_id in sorted(dist_dict.keys()):
dlc_id = str(dlc_id) dlc_id = str(dlc_id)
fmt = self.format_tag_value fmt = self.format_tag_value
def tag_prop_lst(dist_lst): def tag_prop_lst(dist_lst):
if len(dist_lst) == 0: if len(dist_lst) == 0:
return [[]] return [[]]
return [[(fmt(tag), prop)] + tl for tl in tag_prop_lst(dist_lst[1:]) for tag, prop in dist_lst[0].items()] return [[(fmt(tag), prop)] + tl for tl in tag_prop_lst(dist_lst[1:])
for tag, prop in dist_lst[0].items()]
def files_from_tags(self, f_dict, tags): def files_from_tags(self, f_dict, tags):
if len(tags) == 0: if len(tags) == 0:
...@@ -320,7 +331,7 @@ class DLCHighLevel(object): ...@@ -320,7 +331,7 @@ class DLCHighLevel(object):
if self.dist_value_keys[-len(tags)][1] == "wdir": if self.dist_value_keys[-len(tags)][1] == "wdir":
try: try:
return files_from_tags(self, f_dict[tags[0] % 360], tags[1:]) return files_from_tags(self, f_dict[tags[0] % 360], tags[1:])
except: except Exception:
pass pass
raise raise
...@@ -330,7 +341,8 @@ class DLCHighLevel(object): ...@@ -330,7 +341,8 @@ class DLCHighLevel(object):
files = (files_from_tags(self, files_dict, tags)) files = (files_from_tags(self, files_dict, tags))
except KeyError: except KeyError:
if self.fail_on_resfile_not_found: if self.fail_on_resfile_not_found:
raise FileNotFoundError("Result files for %s not found" % (", ".join(["%s='%s'" % (dv[1], t) for dv, t in zip(self.dist_value_keys, tags)]))) raise FileNotFoundError("Result files for %s not found" % (
", ".join(["%s='%s'" % (dv[1], t) for dv, t in zip(self.dist_value_keys, tags)])))
else: else:
continue continue
if files: if files:
...@@ -341,19 +353,21 @@ class DLCHighLevel(object): ...@@ -341,19 +353,21 @@ class DLCHighLevel(object):
return fh_lst return fh_lst
def dlc_lst(self, load='all'): def dlc_lst(self, load='all'):
dlc_lst = np.array(self.dlc_df['dlc'])[np.array([load == 'all' or load.lower() in d.lower() for d in self.dlc_df['load']])] dlc_lst = np.array(self.dlc_df['dlc'])[np.array(
[load == 'all' or load.lower() in d.lower() for d in self.dlc_df['load']])]
return [v.lower().replace('dlc', '') for v in dlc_lst] return [v.lower().replace('dlc', '') for v in dlc_lst]
@cache_function @cache_function
def psf(self): def psf(self):
return {dlc: float((psf, 1)[psf == ""]) for dlc, psf in zip(self.dlc_df['dlc'], self.dlc_df['psf']) if dlc != ""} return {dlc: float((psf, 1)[psf == ""])
for dlc, psf in zip(self.dlc_df['dlc'], self.dlc_df['psf']) if dlc != ""}
if __name__ == "__main__": if __name__ == "__main__":
dlc_hl = DLCHighLevel(r'X:\DTU10MW\Q0010\DLC_post_betas1.xlsx') # dlc_hl = DLCHighLevel(r'X:\DTU10MW\Q0010\DLC_post_betas1.xlsx')
#print (DLCHighLevelInputFile(r'C:\mmpe\Projects\DLC.xlsx').sensor_info(0, 0, 1)['Name']) # #print (DLCHighLevelInputFile(r'C:\mmpe\Projects\DLC.xlsx').sensor_info(0, 0, 1)['Name'])
#print (dlc_dict()['64']) # #print (dlc_dict()['64'])
#print (dlc_hl.fatigue_distribution()['64']) # #print (dlc_hl.fatigue_distribution()['64'])
print (dlc_hl.file_hour_lst(r"X:\DTU10MW/Q0010/res/")) # print(dlc_hl.file_hour_lst(r"X:\DTU10MW/Q0010/res/"))
dlc = DLCHighLevel(r'C:\Users\mmpe\Downloads\Post Processing v7 - FATIGUE.xlsx', fail_on_resfile_not_found=False)
print(dlc.file_hour_lst())
...@@ -3,12 +3,6 @@ Created on 09/10/2014 ...@@ -3,12 +3,6 @@ Created on 09/10/2014
@author: MMPE @author: MMPE
''' '''
from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
import unittest import unittest
from wetb.dlc.high_level import DLCHighLevel, Weibull, Weibull_IEC from wetb.dlc.high_level import DLCHighLevel, Weibull, Weibull_IEC
import os import os
......
from wetb.hawc2.htc_file import HTCFile
from wetb.hawc2.tests import test_files
import os
def main():
if __name__ == '__main__':
# ======================================================================
# load existing htc file
# ======================================================================
path = os.path.dirname(test_files.__file__) + "/simulation_setup/DTU10MWRef6.0/"
htc = HTCFile(path + "htc/DTU_10MW_RWT.htc")
# ======================================================================
# modify wind speed and turbulence intensity
# ======================================================================
htc.wind.wsp = 10
# access wind section and change turbulence intensity
wind = htc.wind
wind.tint = .1
#=======================================================================
# print contents
#=======================================================================
print(htc) # print htc file
print(htc.keys()) # print htc sections
print(wind) # print wind section
#=======================================================================
# change tilt angle
#=======================================================================
orientation = htc.new_htc_structure.orientation
# Two ways to access the relative orientation between towertop and shaft
# 1) Knowning that it is the second relative section:
rel_tt_shaft = orientation.relative__2
# 2) Knowning that the section contains a field "body1" with value "topertop"
rel_tt_shaft = orientation.get_subsection_by_name(name='towertop', field='body1')
rel_tt_shaft.body2_eulerang__2 = 6, 0, 0
print(rel_tt_shaft.body2_eulerang__2)
# ======================================================================
# set time, name and save
# ======================================================================
# set time of simulation, first output section and wind.scale_time_start
htc.set_time(start=5, stop=10, step=0.1)
# change name of logfile, animation, visualization and first output section
htc.set_name("tmp_wsp10_tint0.1_tilt6")
htc.save() # Save htc modified htcfile as "tmp_wsp10_tint0.1_tilt6"
# ======================================================================
# run simulation
# ======================================================================
# htc.simulate("<path2hawc2>/hawc2mb.exe")
main()
"""
Created on Wed Oct 10 12:47:10 2018
@author: dave
"""
import os
from wetb.prepost import windIO
from wetb.hawc2 import Hawc2io
from wetb.prepost import hawcstab2
if __name__ == '__main__':
# =============================================================================
# READ HAWC2 RESULT FILE
# =============================================================================
# METHOD A
fname = '../wetb/hawc2/tests/test_files/hawc2io/Hawc2ascii.sel'
res = Hawc2io.ReadHawc2(fname)
sig = res.ReadAll()
# METHOD B
path, file = os.path.dirname(fname), os.path.basename(fname)
res = windIO.LoadResults(path, file)
sig = res.sig
sel = res.ch_details
# result in dataframe with unique channel names (instead of indices)
sig_df = res.sig2df()
ch_df = res.ch_df
# =============================================================================
# READ HAWCStab2 files
# =============================================================================
res = hawcstab2.results()
fname = '../wetb/prepost/tests/data/campbell_diagram.cmb'
df_cmb = res.load_cmb_df(fname)
fname = '../wetb/prepost/tests/data/dtu10mw_v1_defl_u10000.ind'
df_ind = res.load_ind(fname)
fname = '../wetb/prepost/tests/data/dtu10mw.opt'
df_opt = res.load_operation(fname)
fname = '../wetb/prepost/tests/data/dtu10mw_v1.pwr'
df_pwr, units = res.load_pwr_df(fname)
fname = '../wetb/prepost/tests/data/controller_input_quadratic.txt'
tuning = hawcstab2.ReadControlTuning()
tuning.read_parameters(fname)
# tuning parameters are saved as attributes
tuning.pi_gen_reg1.K
tuning.pi_gen_reg2.I
tuning.pi_gen_reg2.Kp
tuning.pi_gen_reg2.Ki
tuning.pi_gen_reg2.Kd
tuning.pi_pitch_reg3.Kp
tuning.pi_pitch_reg3.Ki
tuning.pi_pitch_reg3.Kd
tuning.pi_pitch_reg3.K1
tuning.pi_pitch_reg3.K2
tuning.aero_damp.Kp2
tuning.aero_damp.Ko1
tuning.aero_damp.Ko2
# or you can get them as a dictionary
tune_tags = tuning.parameters2tags()
'''
Created on 03/09/2015
@author: MMPE
'''
from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from io import open
from builtins import map
from builtins import range
from builtins import chr
from future import standard_library
standard_library.install_aliases()
import os import os
import numpy as np import numpy as np
import struct import struct
from itertools import takewhile
def load_output(filename): def load_output(filename):
"""Load a FAST binary or ascii output file """Load a FAST binary or ascii output file
...@@ -45,25 +32,37 @@ def load_output(filename): ...@@ -45,25 +32,37 @@ def load_output(filename):
return load_binary_output(filename) return load_binary_output(filename)
return load_ascii_output(filename) return load_ascii_output(filename)
def load_ascii_output(filename): def load_ascii_output(filename):
with open(filename) as f: with open(filename) as f:
info = {} info = {}
info['name'] = os.path.splitext(os.path.basename(filename))[0] info['name'] = os.path.splitext(os.path.basename(filename))[0]
try: # Header is whatever is before the keyword `time`
header = [f.readline() for _ in range(8)] in_header = True
info['description'] = header[4].strip() header = []
info['attribute_names'] = header[6].split() while in_header:
info['attribute_units'] = [unit[1:-1] for unit in header[7].split()] #removing "()" l = f.readline()
data = np.array([line.split() for line in f.readlines()]).astype(np.float) if not l:
raise Exception('Error finding the end of FAST out file header. Keyword Time missing.')
return data, info in_header = (l + ' dummy').lower().split()[0] != 'time'
except (ValueError, AssertionError): if in_header:
header.append(l)
raise else:
info['description'] = header
info['attribute_names'] = l.split()
info['attribute_units'] = [unit[1:-1] for unit in f.readline().split()]
# Data, up to end of file or empty line (potential comment line at the end)
data = np.array([l.strip().split() for l in takewhile(
lambda x: len(x.strip()) > 0, f.readlines())]).astype(float)
return data, info
def load_binary_output(filename, use_buffer=True):
"""
03/09/15: Ported from ReadFASTbinary.m by Mads M Pedersen, DTU Wind
24/10/18: Low memory/buffered version by E. Branlard, NREL
def load_binary_output(filename):
"""Ported from ReadFASTbinary.m by Mads M Pedersen, DTU Wind
Info about ReadFASTbinary.m: Info about ReadFASTbinary.m:
% Author: Bonnie Jonkman, National Renewable Energy Laboratory % Author: Bonnie Jonkman, National Renewable Energy Laboratory
% (c) 2012, National Renewable Energy Laboratory % (c) 2012, National Renewable Energy Laboratory
...@@ -71,87 +70,156 @@ def load_binary_output(filename): ...@@ -71,87 +70,156 @@ def load_binary_output(filename):
% Edited for FAST v7.02.00b-bjj 22-Oct-2012 % Edited for FAST v7.02.00b-bjj 22-Oct-2012
""" """
def fread(fid, n, type): def fread(fid, n, type):
fmt, nbytes = {'uint8': ('B', 1), 'int16':('h', 2), 'int32':('i', 4), 'float32':('f', 4), 'float64':('d', 8)}[type] fmt, nbytes = {
'uint8': ('B', 1),
'int16': ('h', 2),
'int32': ('i', 4),
'float32': ('f', 4),
'float64': ('d', 8)}[type]
return struct.unpack(fmt * n, fid.read(nbytes * n)) return struct.unpack(fmt * n, fid.read(nbytes * n))
FileFmtID_WithTime = 1 #% File identifiers used in FAST def freadRowOrderTableBuffered(fid, n, type_in, nCols, nOff=0, type_out='float64'):
"""
Reads of row-ordered table from a binary file.
Read `n` data of type `type_in`, assumed to be a row ordered table of `nCols` columns.
Memory usage is optimized by allocating the data only once.
Buffered reading is done for improved performances (in particular for 32bit python)
`nOff` allows for additional column space at the begining of the storage table.
Typically, `nOff=1`, provides a column at the beginning to store the time vector.
@author E.Branlard, NREL
"""
fmt, nbytes = {
'uint8': (
'B', 1), 'int16': (
'h', 2), 'int32': (
'i', 4), 'float32': (
'f', 4), 'float64': (
'd', 8)}[type_in]
nLines = int(n / nCols)
GoodBufferSize = 4096 * 40
nLinesPerBuffer = int(GoodBufferSize / nCols)
BufferSize = nCols * nLinesPerBuffer
nBuffer = int(n / BufferSize)
# Allocation of data
data = np.zeros((nLines, nCols + nOff), dtype=type_out)
# Reading
try:
nIntRead = 0
nLinesRead = 0
while nIntRead < n:
nIntToRead = min(n - nIntRead, BufferSize)
nLinesToRead = int(nIntToRead / nCols)
Buffer = np.array(struct.unpack(fmt * nIntToRead, fid.read(nbytes * nIntToRead)))
Buffer = Buffer.reshape(-1, nCols)
data[nLinesRead:(nLinesRead + nLinesToRead), nOff:(nOff + nCols)] = Buffer
nLinesRead = nLinesRead + nLinesToRead
nIntRead = nIntRead + nIntToRead
except Exception:
raise Exception('Read only %d of %d values in file: %s' % (nIntRead, n, filename))
return data
FileFmtID_WithTime = 1 # % File identifiers used in FAST
FileFmtID_WithoutTime = 2 FileFmtID_WithoutTime = 2
LenName = 10 #; % number of characters per channel name FileFmtID_NoCompressWithoutTime = 3
LenUnit = 10 #; % number of characters per unit name FileFmtID_ChanLen_In = 4 # time channel and channel length is not included
with open(filename, 'rb') as fid: with open(filename, 'rb') as fid:
FileID = fread(fid, 1, 'int16') #; % FAST output file format, INT(2) FileID = fread(fid, 1, 'int16')[0] # ; % FAST output file format, INT(2)
if FileID not in [FileFmtID_WithTime, FileFmtID_WithoutTime,
FileFmtID_ChanLen_In, FileFmtID_NoCompressWithoutTime]:
raise Exception('FileID not supported {}. Is it a FAST binary file?'.format(FileID))
NumOutChans = fread(fid, 1, 'int32')[0] #; % The number of output channels, INT(4) if FileID == FileFmtID_ChanLen_In:
NT = fread(fid, 1, 'int32')[0] #; % The number of time steps, INT(4) LenName = fread(fid, 1, 'int16')[0] # Number of characters in channel names and units
if FileID == FileFmtID_WithTime:
TimeScl = fread(fid, 1, 'float64') #; % The time slopes for scaling, REAL(8)
TimeOff = fread(fid, 1, 'float64') #; % The time offsets for scaling, REAL(8)
else: else:
TimeOut1 = fread(fid, 1, 'float64') #; % The first time in the time series, REAL(8) LenName = 10 # default number of characters per channel name
TimeIncr = fread(fid, 1, 'float64') #; % The time increment, REAL(8)
NumOutChans = fread(fid, 1, 'int32')[0] # ; % The number of output channels, INT(4)
NT = fread(fid, 1, 'int32')[0] # ; % The number of time steps, INT(4)
if FileID == FileFmtID_WithTime:
TimeScl = fread(fid, 1, 'float64') # ; % The time slopes for scaling, REAL(8)
TimeOff = fread(fid, 1, 'float64') # ; % The time offsets for scaling, REAL(8)
else:
TimeOut1 = fread(fid, 1, 'float64') # ; % The first time in the time series, REAL(8)
TimeIncr = fread(fid, 1, 'float64') # ; % The time increment, REAL(8)
ColScl = fread(fid, NumOutChans, 'float32') #; % The channel slopes for scaling, REAL(4) if FileID == FileFmtID_NoCompressWithoutTime:
ColOff = fread(fid, NumOutChans, 'float32') #; % The channel offsets for scaling, REAL(4) ColScl = np.ones(NumOutChans)
ColOff = np.zeros(NumOutChans)
else:
ColScl = fread(fid, NumOutChans, 'float32') # ; % The channel slopes for scaling, REAL(4)
ColOff = fread(fid, NumOutChans, 'float32') # ; % The channel offsets for scaling, REAL(4)
LenDesc = fread(fid, 1, 'int32')[0] #; % The number of characters in the description string, INT(4) LenDesc = fread(fid, 1, 'int32')[0] # ; % The number of characters in the description string, INT(4)
DescStrASCII = fread(fid, LenDesc, 'uint8') #; % DescStr converted to ASCII DescStrASCII = fread(fid, LenDesc, 'uint8') # ; % DescStr converted to ASCII
DescStr = "".join(map(chr, DescStrASCII)).strip() DescStr = "".join(map(chr, DescStrASCII)).strip()
ChanName = [] # initialize the ChanName cell array ChanName = [] # initialize the ChanName cell array
for iChan in range(NumOutChans + 1): for iChan in range(NumOutChans + 1):
ChanNameASCII = fread(fid, LenName, 'uint8') #; % ChanName converted to numeric ASCII ChanNameASCII = fread(fid, LenName, 'uint8') # ; % ChanName converted to numeric ASCII
ChanName.append("".join(map(chr, ChanNameASCII)).strip()) ChanName.append("".join(map(chr, ChanNameASCII)).strip())
ChanUnit = [] # initialize the ChanUnit cell array ChanUnit = [] # initialize the ChanUnit cell array
for iChan in range(NumOutChans + 1): for iChan in range(NumOutChans + 1):
ChanUnitASCII = fread(fid, LenUnit, 'uint8') #; % ChanUnit converted to numeric ASCII ChanUnitASCII = fread(fid, LenName, 'uint8') # ; % ChanUnit converted to numeric ASCII
ChanUnit.append("".join(map(chr, ChanUnitASCII)).strip()[1:-1]) ChanUnit.append("".join(map(chr, ChanUnitASCII)).strip()[1:-1])
# %------------------------- # %-------------------------
# % get the channel time series # % get the channel time series
# %------------------------- # %-------------------------
nPts = NT * NumOutChans #; % number of data points in the file nPts = NT * NumOutChans # ; % number of data points in the file
if FileID == FileFmtID_WithTime: if FileID == FileFmtID_WithTime:
PackedTime = fread(fid, NT, 'int32') #; % read the time data PackedTime = fread(fid, NT, 'int32') # ; % read the time data
cnt = len(PackedTime) cnt = len(PackedTime)
if cnt < NT: if cnt < NT:
raise Exception('Could not read entire %s file: read %d of %d time values' % (filename, cnt, NT)) raise Exception('Could not read entire %s file: read %d of %d time values' % (filename, cnt, NT))
PackedData = fread(fid, nPts, 'int16') #; % read the channel data
cnt = len(PackedData)
if cnt < nPts:
raise Exception('Could not read entire %s file: read %d of %d values' % (filename, cnt, nPts))
# %-------------------------
# % Scale the packed binary to real data
# %-------------------------
#
if use_buffer:
data = np.array(PackedData).reshape(NT, NumOutChans) # Reading data using buffers, and allowing an offset for time column (nOff=1)
data = (data - ColOff) / ColScl if FileID == FileFmtID_NoCompressWithoutTime:
data = freadRowOrderTableBuffered(fid, nPts, 'float64', NumOutChans, nOff=1, type_out='float64')
else:
data = freadRowOrderTableBuffered(fid, nPts, 'int16', NumOutChans, nOff=1, type_out='float64')
else:
# NOTE: unpacking huge data not possible on 32bit machines
if FileID == FileFmtID_NoCompressWithoutTime:
PackedData = fread(fid, nPts, 'float64') # ; % read the channel data
else:
PackedData = fread(fid, nPts, 'int16') # ; % read the channel data
cnt = len(PackedData)
if cnt < nPts:
raise Exception('Could not read entire %s file: read %d of %d values' % (filename, cnt, nPts))
data = np.array(PackedData).reshape(NT, NumOutChans)
del PackedData
if FileID == FileFmtID_WithTime: if FileID == FileFmtID_WithTime:
time = (np.array(PackedTime) - TimeOff) / TimeScl; time = (np.array(PackedTime) - TimeOff) / TimeScl
else: else:
time = TimeOut1 + TimeIncr * np.arange(NT) time = TimeOut1 + TimeIncr * np.arange(NT)
data = np.concatenate([time.reshape(NT, 1), data], 1) # %-------------------------
# % Scale the packed binary to real data
# %-------------------------
if use_buffer:
# Scaling Data
for iCol in range(NumOutChans):
data[:, iCol + 1] = (data[:, iCol + 1] - ColOff[iCol]) / ColScl[iCol]
# Adding time column
data[:, 0] = time
else:
# NOTE: memory expensive due to time conversion, and concatenation
data = (data - ColOff) / ColScl
data = np.concatenate([time.reshape(NT, 1), data], 1)
info = {'name': os.path.splitext(os.path.basename(filename))[0], info = {'name': os.path.splitext(os.path.basename(filename))[0],
'description': DescStr, 'description': DescStr,
'attribute_names': ChanName, 'attribute_names': ChanName,
'attribute_units': ChanUnit} 'attribute_units': ChanUnit}
return data, info return data, info
''' from tests import npt
Created on 03/09/2015 import pytest
import numpy as np
@author: MMPE from wetb.fast.fast_io import load_output, load_binary_output
'''
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
import unittest
from wetb.fast.fast_io import load_output
import os import os
testfilepath = os.path.join(os.path.dirname(__file__), 'test_files/') # test file path testfilepath = os.path.join(os.path.dirname(__file__), 'test_files/') # test file path
class TestFastIO(unittest.TestCase):
def test_load_output(self): def test_load_output():
data, info = load_output(testfilepath + 'DTU10MW.out') data, info = load_output(testfilepath + 'DTU10MW.out')
self.assertAlmostEqual(data[4, 3], 4.295E-04) npt.assert_equal(data[4, 3], 4.295E-04)
self.assertEqual(info['name'], "DTU10MW") npt.assert_equal(info['name'], "DTU10MW")
self.assertEqual(info['attribute_names'][1], "RotPwr") npt.assert_equal(info['attribute_names'][1], "RotPwr")
self.assertEqual(info['attribute_units'][1], "kW") npt.assert_equal(info['attribute_units'][1], "kW")
def test_load_binary():
def test_load_binary(self): data, info = load_output(testfilepath + 'test_binary.outb')
data, info = load_output(testfilepath + 'test_binary.outb') npt.assert_equal(info['name'], 'test_binary')
self.assertEqual(info['name'], 'test_binary') npt.assert_equal(info['description'], 'Modified by mwDeriveSensors on 27-Jul-2015 16:32:06')
self.assertEqual(info['description'], 'Modified by mwDeriveSensors on 27-Jul-2015 16:32:06') npt.assert_equal(info['attribute_names'][4], 'RotPwr')
self.assertEqual(info['attribute_names'][4], 'RotPwr') npt.assert_equal(info['attribute_units'][7], 'deg/s^2')
self.assertEqual(info['attribute_units'][7], 'deg/s^2') npt.assert_almost_equal(data[10, 4], 138.822277739535)
self.assertAlmostEqual(data[10, 4], 138.822277739535)
def test_load_output2(self): def test_load_binary_buffered():
data, info = load_output(testfilepath + 'DTU10MW.out') # The old method was not using a buffer and was also memory expensive
self.assertEqual(info['name'], "DTU10MW") # Now use_buffer is set to true by default
self.assertEqual(info['attribute_names'][1], "RotPwr") import numpy as np
self.assertEqual(info['attribute_units'][1], "kW") data, info = load_binary_output(testfilepath + 'test_binary.outb', use_buffer=True)
data_old, info_old = load_binary_output(testfilepath + 'test_binary.outb', use_buffer=False)
npt.assert_equal(info['name'], info_old['name'])
np.testing.assert_array_equal(data[0, :], data_old[0, :])
if __name__ == "__main__": np.testing.assert_array_equal(data[-1, :], data_old[-1, :])
#import sys;sys.argv = ['', 'Test.testload_output']
unittest.main()
@pytest.mark.parametrize('fid,tol', [(2, 1), (3, 1e-4)])
@pytest.mark.parametrize('buffer', [True, False])
def test_load_bindary_fid(fid, tol, buffer):
data2, info2 = load_output(testfilepath + '5MW_Land_BD_DLL_WTurb_fid04.outb')
data, info = load_binary_output(testfilepath + '5MW_Land_BD_DLL_WTurb_fid%02d.outb' % fid, use_buffer=buffer)
for k, v in info2.items():
if k not in {'name', 'description'}:
npt.assert_array_equal(info[k], v)
r = data.max(0) - data.min(0) + 1e-20
npt.assert_array_less(np.abs(data - data2).max(0), r * tol)
def test_load_output2():
data, info = load_output(testfilepath + 'DTU10MW.out')
npt.assert_equal(info['name'], "DTU10MW")
npt.assert_equal(info['attribute_names'][1], "RotPwr")
npt.assert_equal(info['attribute_units'][1], "kW")
def test_load_output3():
# This file has an extra comment at the end
data, info = load_output(testfilepath + 'FASTOut_Hydro.out')
npt.assert_almost_equal(data[3, 1], -1.0E+01)
File added
File added
File added
These predictions were generated by HydroDyn on 19-Oct-2018 at 10:15:15.
Time Wave1Elev
(sec) (m)
0.0 0.0e+01
1.0 1.0e+01
2.0 0.0e+01
3.0 -1.0e+01
4.0 0.0e+01
This output file was closed on 19-Oct-2018 at 10:15:16.
...@@ -3,13 +3,6 @@ Created on 13/10/2014 ...@@ -3,13 +3,6 @@ Created on 13/10/2014
@author: MMPE @author: MMPE
''' '''
from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from builtins import zip
from future import standard_library
standard_library.install_aliases()
def bearing_damage(angle_moment_lst, m=3, thresshold=0.1): def bearing_damage(angle_moment_lst, m=3, thresshold=0.1):
"""Function ported from Matlab. """Function ported from Matlab.
......
...@@ -14,13 +14,7 @@ or ...@@ -14,13 +14,7 @@ or
- 'rainflow_astm' (based on the c-implementation by Adam Nieslony found at the MATLAB Central File Exchange - 'rainflow_astm' (based on the c-implementation by Adam Nieslony found at the MATLAB Central File Exchange
http://www.mathworks.com/matlabcentral/fileexchange/3026) http://www.mathworks.com/matlabcentral/fileexchange/3026)
''' '''
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
from future import standard_library
import warnings import warnings
standard_library.install_aliases()
import numpy as np import numpy as np
from wetb.fatigue_tools.rainflowcounting import rainflowcount from wetb.fatigue_tools.rainflowcounting import rainflowcount
...@@ -71,7 +65,8 @@ def eq_load(signals, no_bins=46, m=[3, 4, 6, 8, 10, 12], neq=1, rainflow_func=ra ...@@ -71,7 +65,8 @@ def eq_load(signals, no_bins=46, m=[3, 4, 6, 8, 10, 12], neq=1, rainflow_func=ra
return [[np.nan] * len(np.atleast_1d(m))] * len(np.atleast_1d(neq)) return [[np.nan] * len(np.atleast_1d(m))] * len(np.atleast_1d(neq))
def eq_load_and_cycles(signals, no_bins=46, m=[3, 4, 6, 8, 10, 12], neq=[10 ** 6, 10 ** 7, 10 ** 8], rainflow_func=rainflow_windap): def eq_load_and_cycles(signals, no_bins=46, m=[3, 4, 6, 8, 10, 12], neq=[
10 ** 6, 10 ** 7, 10 ** 8], rainflow_func=rainflow_windap):
"""Calculate combined fatigue equivalent load """Calculate combined fatigue equivalent load
Parameters Parameters
...@@ -102,10 +97,13 @@ def eq_load_and_cycles(signals, no_bins=46, m=[3, 4, 6, 8, 10, 12], neq=[10 ** 6 ...@@ -102,10 +97,13 @@ def eq_load_and_cycles(signals, no_bins=46, m=[3, 4, 6, 8, 10, 12], neq=[10 ** 6
Edges of the amplitude bins Edges of the amplitude bins
""" """
cycles, ampl_bin_mean, ampl_bin_edges, _, _ = cycle_matrix(signals, no_bins, 1, rainflow_func) cycles, ampl_bin_mean, ampl_bin_edges, _, _ = cycle_matrix(signals, no_bins, 1, rainflow_func)
if 0: #to be similar to windap if 0: # to be similar to windap
ampl_bin_mean = (ampl_bin_edges[:-1] + ampl_bin_edges[1:]) / 2 ampl_bin_mean = (ampl_bin_edges[:-1] + ampl_bin_edges[1:]) / 2
cycles, ampl_bin_mean = cycles.flatten(), ampl_bin_mean.flatten() cycles, ampl_bin_mean = cycles.flatten(), ampl_bin_mean.flatten()
eq_loads = [[((np.nansum(cycles * ampl_bin_mean ** _m) / _neq) ** (1. / _m)) for _m in np.atleast_1d(m)] for _neq in np.atleast_1d(neq)] with warnings.catch_warnings():
warnings.simplefilter("ignore")
eq_loads = [[((np.nansum(cycles * ampl_bin_mean ** _m) / _neq) ** (1. / _m))
for _m in np.atleast_1d(m)] for _neq in np.atleast_1d(neq)]
return eq_loads, cycles, ampl_bin_mean, ampl_bin_edges return eq_loads, cycles, ampl_bin_mean, ampl_bin_edges
...@@ -150,35 +148,108 @@ def cycle_matrix(signals, ampl_bins=10, mean_bins=10, rainflow_func=rainflow_win ...@@ -150,35 +148,108 @@ def cycle_matrix(signals, ampl_bins=10, mean_bins=10, rainflow_func=rainflow_win
""" """
if isinstance(signals[0], tuple): if isinstance(signals[0], tuple):
weights, ampls, means = np.array([(np.zeros_like(ampl)+weight,ampl,mean) for weight, signal in signals for ampl,mean in rainflow_func(signal[:]).T], dtype=np.float64).T weights, ampls, means = np.array([(np.zeros_like(ampl) + weight, ampl, mean) for weight,
signal in signals for ampl, mean in rainflow_func(signal[:]).T], dtype=np.float64).T
else: else:
ampls, means = rainflow_func(signals[:]) ampls, means = rainflow_func(signals[:])
weights = np.ones_like(ampls) weights = np.ones_like(ampls)
if isinstance(ampl_bins, int): if isinstance(ampl_bins, int):
ampl_bins = np.linspace(0, 1, num=ampl_bins + 1) * ampls[weights>0].max() ampl_bins = np.linspace(0, 1, num=ampl_bins + 1) * ampls[weights > 0].max()
cycles, ampl_edges, mean_edges = np.histogram2d(ampls, means, [ampl_bins, mean_bins], weights=weights) cycles, ampl_edges, mean_edges = np.histogram2d(ampls, means, [ampl_bins, mean_bins], weights=weights)
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter("ignore") warnings.simplefilter("ignore")
ampl_bin_sum = np.histogram2d(ampls, means, [ampl_bins, mean_bins], weights=weights * ampls)[0] ampl_bin_sum = np.histogram2d(ampls, means, [ampl_bins, mean_bins], weights=weights * ampls)[0]
ampl_bin_mean = np.nanmean(ampl_bin_sum / np.where(cycles,cycles,np.nan),1) ampl_bin_mean = np.nanmean(ampl_bin_sum / np.where(cycles, cycles, np.nan), 1)
mean_bin_sum = np.histogram2d(ampls, means, [ampl_bins, mean_bins], weights=weights * means)[0] mean_bin_sum = np.histogram2d(ampls, means, [ampl_bins, mean_bins], weights=weights * means)[0]
mean_bin_mean = np.nanmean(mean_bin_sum / np.where(cycles, cycles, np.nan), 1) mean_bin_mean = np.nanmean(mean_bin_sum / np.where(cycles, cycles, np.nan), 1)
cycles = cycles / 2 # to get full cycles cycles = cycles / 2 # to get full cycles
return cycles, ampl_bin_mean, ampl_edges, mean_bin_mean, mean_edges return cycles, ampl_bin_mean, ampl_edges, mean_bin_mean, mean_edges
def cycle_matrix2(signal, nrb_amp, nrb_mean, rainflow_func=rainflow_windap):
"""
Same as wetb.fatigue_tools.fatigue.cycle_matrix but bin from min_amp to
max_amp instead of 0 to max_amp.
Parameters
----------
Signal : ndarray(n)
1D Raw signal array
nrb_amp : int
Number of bins for the amplitudes
nrb_mean : int
Number of bins for the means
rainflow_func : {rainflow_windap, rainflow_astm}, optional
The rainflow counting function to use (default is rainflow_windap)
Returns
-------
cycles : ndarray, shape(ampl_bins, mean_bins)
A bi-dimensional histogram of load cycles(full cycles). Amplitudes are\
histogrammed along the first dimension and mean values are histogrammed
along the second dimension.
ampl_edges : ndarray, shape(no_bins+1,n)
The amplitude bin edges
mean_edges : ndarray, shape(no_bins+1,n)
The mean bin edges
"""
bins = [nrb_amp, nrb_mean]
ampls, means = rainflow_func(signal)
weights = np.ones_like(ampls)
cycles, ampl_edges, mean_edges = np.histogram2d(ampls, means, bins,
weights=weights)
cycles = cycles / 2 # to get full cycles
return cycles, ampl_edges, mean_edges
def eq_loads_from_Markov(markov_matrix, m_list, neq=1e7):
"""
Calculate the damage equivalent loads from the cycles and amplitudes
of a Markov matrix for different Woehler slopes.
Parameters
----------
markov_matrix : array (Nbins x 2)
Array containing the number of cycles and mean ampltude for each bin.
Can have extra leading dimensions such as Nsensors x Ndimensions
m_list : list (Nwoehlerslopes)
List containing different Woehler slopes.
neq : int or float, optional
Number of equivalent load cycles. The default is 1e7.
Returns
-------
Array (Nwoehlerslopes)
Array containing the equivalent loads for each Woehler slope and
for each extra leading dimension if any
"""
cycles = markov_matrix[..., 0]
amplitudes = markov_matrix[..., 1]
eq_loads = np.array([((np.nansum(cycles*amplitudes**m, axis=-1)/neq)**(1/m))
for m in m_list])
eq_loads = np.transpose(eq_loads, list(range(1, eq_loads.ndim)) + [0])
return eq_loads
if __name__ == "__main__": if __name__ == "__main__":
signal1 = np.array([-2.0, 0.0, 1.0, 0.0, -3.0, 0.0, 5.0, 0.0, -1.0, 0.0, 3.0, 0.0, -4.0, 0.0, 4.0, 0.0, -2.0]) signal1 = np.array([-2.0, 0.0, 1.0, 0.0, -3.0, 0.0, 5.0, 0.0, -1.0, 0.0, 3.0, 0.0, -4.0, 0.0, 4.0, 0.0, -2.0])
signal2 = signal1 * 1.1 signal2 = signal1 * 1.1
# equivalent load for default wohler slopes # equivalent load for default wohler slopes
print (eq_load(signal1, no_bins=50, neq=17, rainflow_func=rainflow_windap)) print(eq_load(signal1, no_bins=50, neq=17, rainflow_func=rainflow_windap))
print (eq_load(signal1, no_bins=50, neq=17, rainflow_func=rainflow_astm)) print(eq_load(signal1, no_bins=50, neq=17, rainflow_func=rainflow_astm))
# Cycle matrix with 4 amplitude bins and 4 mean value bins # Cycle matrix with 4 amplitude bins and 4 mean value bins
print (cycle_matrix(signal1, 4, 4, rainflow_func=rainflow_windap)) print(cycle_matrix(signal1, 4, 4, rainflow_func=rainflow_windap))
print (cycle_matrix(signal1, 4, 4, rainflow_func=rainflow_astm)) print(cycle_matrix(signal1, 4, 4, rainflow_func=rainflow_astm))
# Cycle matrix where signal1 and signal2 contributes with 50% each # Cycle matrix where signal1 and signal2 contributes with 50% each
print (cycle_matrix([(.5, signal1), (.5, signal2)], 4, 8, rainflow_func=rainflow_astm)) print(cycle_matrix([(.5, signal1), (.5, signal2)], 4, 8, rainflow_func=rainflow_astm))
from __future__ import division from numba import njit
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
import cython
import numpy as np import numpy as np
# cimport numpy as np
@cython.locals(p=cython.int, q=cython.int, f=cython.int, flow=list, k=cython.int, n=cython.int, ptr=cython.int) @njit(cache=True) # jit faster than previous cython compiled extension
def pair_range_amplitude(x): # cpdef pair_range(np.ndarray[long,ndim=1] x): def pair_range_amplitude(x):
""" """
Returns a list of half-cycle-amplitudes Returns a list of half-cycle-amplitudes
x: Peak-Trough sequence (integer list of local minima and maxima) x: Peak-Trough sequence (integer list of local minima and maxima)
...@@ -75,11 +67,8 @@ def pair_range_amplitude(x): # cpdef pair_range(np.ndarray[long,ndim=1] x): ...@@ -75,11 +67,8 @@ def pair_range_amplitude(x): # cpdef pair_range(np.ndarray[long,ndim=1] x):
return flow return flow
@njit(cache=True)
def pair_range_from_to(x):
@cython.locals(p=cython.int, q=cython.int, f=cython.int, flow=list, k=cython.int, n=cython.int, ptr=cython.int)
def pair_range_from_to(x): # cpdef pair_range(np.ndarray[long,ndim=1] x):
""" """
Returns a list of half-cycle-amplitudes Returns a list of half-cycle-amplitudes
x: Peak-Trough sequence (integer list of local minima and maxima) x: Peak-Trough sequence (integer list of local minima and maxima)
...@@ -112,9 +101,9 @@ def pair_range_from_to(x): # cpdef pair_range(np.ndarray[long,ndim=1] x): ...@@ -112,9 +101,9 @@ def pair_range_from_to(x): # cpdef pair_range(np.ndarray[long,ndim=1] x):
if q == n: if q == n:
f = 1 f = 1
while p >= 4: while p >= 4:
#print S[p - 3:p + 1] # print S[p - 3:p + 1]
#print S[p - 2], ">", S[p - 3], ", ", S[p - 1], ">=", S[p - 3], ", ", S[p], ">=", S[p - 2], (S[p - 2] > S[p - 3] and S[p - 1] >= S[p - 3] and S[p] >= S[p - 2]) # print S[p - 2], ">", S[p - 3], ", ", S[p - 1], ">=", S[p - 3], ", ", S[p], ">=", S[p - 2], (S[p - 2] > S[p - 3] and S[p - 1] >= S[p - 3] and S[p] >= S[p - 2])
#print S[p - 2], "<", S[p - 3], ", ", S[p - 1], "<=", S[p - 3], ", ", S[p], "<=", S[p - 2], (S[p - 2] < S[p - 3] and S[p - 1] <= S[p - 3] and S[p] <= S[p - 2]) # print S[p - 2], "<", S[p - 3], ", ", S[p - 1], "<=", S[p - 3], ", ", S[p], "<=", S[p - 2], (S[p - 2] < S[p - 3] and S[p - 1] <= S[p - 3] and S[p] <= S[p - 2])
#print (S[p - 2] > S[p - 3] and S[p - 1] >= S[p - 3] and S[p] >= S[p - 2]) or (S[p - 2] < S[p - 3] and S[p - 1] <= S[p - 3] and S[p] <= S[p - 2]) #print (S[p - 2] > S[p - 3] and S[p - 1] >= S[p - 3] and S[p] >= S[p - 2]) or (S[p - 2] < S[p - 3] and S[p - 1] <= S[p - 3] and S[p] <= S[p - 2])
if (S[p - 2] > S[p - 3] and S[p - 1] >= S[p - 3] and S[p] >= S[p - 2]) or \ if (S[p - 2] > S[p - 3] and S[p - 1] >= S[p - 3] and S[p] >= S[p - 2]) or \
(S[p - 2] < S[p - 3] and S[p - 1] <= S[p - 3] and S[p] <= S[p - 2]): (S[p - 2] < S[p - 3] and S[p - 1] <= S[p - 3] and S[p] <= S[p - 2]):
...@@ -134,12 +123,13 @@ def pair_range_from_to(x): # cpdef pair_range(np.ndarray[long,ndim=1] x): ...@@ -134,12 +123,13 @@ def pair_range_from_to(x): # cpdef pair_range(np.ndarray[long,ndim=1] x):
if p == q: if p == q:
break break
else: else:
#print S[q], "to", S[q + 1] # print S[q], "to", S[q + 1]
A[S[q], S[q + 1]] += 1 A[S[q], S[q + 1]] += 1
return A return A
@cython.locals(p=cython.int, q=cython.int, f=cython.int, flow=list, k=cython.int, n=cython.int, ptr=cython.int)
def pair_range_amplitude_mean(x): # cpdef pair_range(np.ndarray[long,ndim=1] x): @njit(cache=True)
def pair_range_amplitude_mean(x):
""" """
Returns a list of half-cycle-amplitudes Returns a list of half-cycle-amplitudes
x: Peak-Trough sequence (integer list of local minima and maxima) x: Peak-Trough sequence (integer list of local minima and maxima)
...@@ -165,7 +155,7 @@ def pair_range_amplitude_mean(x): # cpdef pair_range(np.ndarray[long,ndim=1] x ...@@ -165,7 +155,7 @@ def pair_range_amplitude_mean(x): # cpdef pair_range(np.ndarray[long,ndim=1] x
p += 1 p += 1
q += 1 q += 1
# read # read
S[p] = x[ptr] S[p] = x[ptr]
ptr += 1 ptr += 1
......
import cython
import numpy as np
cimport numpy as np
import cython
import numpy as np
# cimport numpy as np
@cython.locals(p=cython.int, q=cython.int, f=cython.int, flow=list, k=cython.int, n=cython.int, ptr=cython.int)
def pair_range_amplitude(x): # cpdef pair_range(np.ndarray[long,ndim=1] x):
"""
Returns a list of half-cycle-amplitudes
x: Peak-Trough sequence (integer list of local minima and maxima)
This routine is implemented according to
"Recommended Practices for Wind Turbine Testing - 3. Fatigue Loads", 2. edition 1990, Appendix A
except that a list of half-cycle-amplitudes are returned instead of a from_level-to_level-matrix
"""
x = x - np.min(x)
k = np.max(x)
n = x.shape[0]
S = np.zeros(n + 1)
#A = np.zeros(k+1)
flow = []
S[1] = x[0]
ptr = 1
p = 1
q = 1
f = 0
# phase 1
while True:
p += 1
q += 1
# read
S[p] = x[ptr]
ptr += 1
if q == n:
f = 1
while p >= 4:
if (S[p - 2] > S[p - 3] and S[p - 1] >= S[p - 3] and S[p] >= S[p - 2]) \
or\
(S[p - 2] < S[p - 3] and S[p - 1] <= S[p - 3] and S[p] <= S[p - 2]):
ampl = abs(S[p - 2] - S[p - 1])
# A[ampl]+=2 #Two half cycles
flow.append(ampl)
flow.append(ampl)
S[p - 2] = S[p]
p -= 2
else:
break
if f == 0:
pass
else:
break
# phase 2
q = 0
while True:
q += 1
if p == q:
break
else:
ampl = abs(S[q + 1] - S[q])
# A[ampl]+=1
flow.append(ampl)
return flow
@cython.locals(p=cython.int, q=cython.int, f=cython.int, flow=list, k=cython.int, n=cython.int, ptr=cython.int)
def pair_range_from_to(x): # cpdef pair_range(np.ndarray[long,ndim=1] x):
"""
Returns a list of half-cycle-amplitudes
x: Peak-Trough sequence (integer list of local minima and maxima)
This routine is implemented according to
"Recommended Practices for Wind Turbine Testing - 3. Fatigue Loads", 2. edition 1990, Appendix A
except that a list of half-cycle-amplitudes are returned instead of a from_level-to_level-matrix
"""
x = x - np.min(x)
k = np.max(x)
n = x.shape[0]
S = np.zeros(n + 1)
A = np.zeros((k + 1, k + 1))
S[1] = x[0]
ptr = 1
p = 1
q = 1
f = 0
# phase 1
while True:
p += 1
q += 1
# read
S[p] = x[ptr]
ptr += 1
if q == n:
f = 1
while p >= 4:
#print S[p - 3:p + 1]
#print S[p - 2], ">", S[p - 3], ", ", S[p - 1], ">=", S[p - 3], ", ", S[p], ">=", S[p - 2], (S[p - 2] > S[p - 3] and S[p - 1] >= S[p - 3] and S[p] >= S[p - 2])
#print S[p - 2], "<", S[p - 3], ", ", S[p - 1], "<=", S[p - 3], ", ", S[p], "<=", S[p - 2], (S[p - 2] < S[p - 3] and S[p - 1] <= S[p - 3] and S[p] <= S[p - 2])
#print (S[p - 2] > S[p - 3] and S[p - 1] >= S[p - 3] and S[p] >= S[p - 2]) or (S[p - 2] < S[p - 3] and S[p - 1] <= S[p - 3] and S[p] <= S[p - 2])
if (S[p - 2] > S[p - 3] and S[p - 1] >= S[p - 3] and S[p] >= S[p - 2]) or \
(S[p - 2] < S[p - 3] and S[p - 1] <= S[p - 3] and S[p] <= S[p - 2]):
A[S[p - 2], S[p - 1]] += 1
A[S[p - 1], S[p - 2]] += 1
S[p - 2] = S[p]
p -= 2
else:
break
if f == 1:
break # q==n
# phase 2
q = 0
while True:
q += 1
if p == q:
break
else:
#print S[q], "to", S[q + 1]
A[S[q], S[q + 1]] += 1
return A
@cython.locals(p=cython.int, q=cython.int, f=cython.int, flow=list, k=cython.int, n=cython.int, ptr=cython.int)
def pair_range_amplitude_mean(x): # cpdef pair_range(np.ndarray[long,ndim=1] x):
"""
Returns a list of half-cycle-amplitudes
x: Peak-Trough sequence (integer list of local minima and maxima)
This routine is implemented according to
"Recommended Practices for Wind Turbine Testing - 3. Fatigue Loads", 2. edition 1990, Appendix A
except that a list of half-cycle-amplitudes are returned instead of a from_level-to_level-matrix
"""
x = x - np.min(x)
k = np.max(x)
n = x.shape[0]
S = np.zeros(n + 1)
ampl_mean = []
A = np.zeros((k + 1, k + 1))
S[1] = x[0]
ptr = 1
p = 1
q = 1
f = 0
# phase 1
while True:
p += 1
q += 1
# read
S[p] = x[ptr]
ptr += 1
if q == n:
f = 1
while p >= 4:
if (S[p - 2] > S[p - 3] and S[p - 1] >= S[p - 3] and S[p] >= S[p - 2]) \
or\
(S[p - 2] < S[p - 3] and S[p - 1] <= S[p - 3] and S[p] <= S[p - 2]):
# Extract two intermediate half cycles
ampl = abs(S[p - 2] - S[p - 1])
mean = (S[p - 2] + S[p - 1]) / 2
ampl_mean.append((ampl, mean))
ampl_mean.append((ampl, mean))
S[p - 2] = S[p]
p -= 2
else:
break
if f == 0:
pass
else:
break
# phase 2
q = 0
while True:
q += 1
if p == q:
break
else:
ampl = abs(S[q + 1] - S[q])
mean = (S[q + 1] + S[q]) / 2
ampl_mean.append((ampl, mean))
return ampl_mean
from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
import cython
import numpy as np import numpy as np
#cimport numpy as np from numba.core.decorators import njit
@cython.locals(BEGIN=cython.int, MINZO=cython.int, MAXZO=cython.int, ENDZO=cython.int, \
R=cython.int, L=cython.int, i=cython.int, p=cython.int, f=cython.int) @njit(cache=True) # jit faster than previous cython compiled extension
def peak_trough(x, R): #cpdef np.ndarray[long,ndim=1] peak_trough(np.ndarray[long,ndim=1] x, int R): def peak_trough(x, R):
""" """
Returns list of local maxima/minima. Returns list of local maxima/minima.
...@@ -25,12 +18,12 @@ def peak_trough(x, R): #cpdef np.ndarray[long,ndim=1] peak_trough(np.ndarray[lo ...@@ -25,12 +18,12 @@ def peak_trough(x, R): #cpdef np.ndarray[long,ndim=1] peak_trough(np.ndarray[lo
MINZO = 1 MINZO = 1
MAXZO = 2 MAXZO = 2
ENDZO = 3 ENDZO = 3
S = np.zeros(x.shape[0] + 1, dtype=np.int) S = np.full(x.shape[0] + 1, 0) # np.zeros(...).astype(int) not supported by numba
L = x.shape[0] L = x.shape[0]
goto = BEGIN goto = BEGIN
while 1: while True:
if goto == BEGIN: if goto == BEGIN:
trough = x[0] trough = x[0]
peak = x[0] peak = x[0]
......
import cython
import numpy as np
cimport numpy as np
import cython
import numpy as np
cimport numpy as np
@cython.locals(BEGIN=cython.int, MINZO=cython.int, MAXZO=cython.int, ENDZO=cython.int, \
R=cython.int, L=cython.int, i=cython.int, p=cython.int, f=cython.int)
cpdef np.ndarray[long,ndim=1] peak_trough(np.ndarray[long,ndim=1] x, int R):
"""
Returns list of local maxima/minima.
x: 1-dimensional numpy array containing signal
R: Thresshold (minimum difference between succeeding min and max
This routine is implemented directly as described in
"Recommended Practices for Wind Turbine Testing - 3. Fatigue Loads", 2. edition 1990, Appendix A
"""
BEGIN = 0
MINZO = 1
MAXZO = 2
ENDZO = 3
S = np.zeros(x.shape[0] + 1, dtype=np.int)
L = x.shape[0]
goto = BEGIN
while 1:
if goto == BEGIN:
trough = x[0]
peak = x[0]
i = 0
p = 1
f = 0
while goto == BEGIN:
i += 1
if i == L:
goto = ENDZO
continue
else:
if x[i] > peak:
peak = x[i]
if peak - trough >= R:
S[p] = trough
goto = MAXZO
continue
elif x[i] < trough:
trough = x[i]
if peak - trough >= R:
S[p] = peak
goto = MINZO
continue
elif goto == MINZO:
f = -1
while goto == MINZO:
i += 1
if i == L:
goto = ENDZO
continue
else:
if x[i] < trough:
trough = x[i]
else:
if x[i] - trough >= R:
p += 1
S[p] = trough
peak = x[i]
goto = MAXZO
continue
elif goto == MAXZO:
f = 1
while goto == MAXZO:
i += 1
if i == L:
goto = ENDZO
continue
else:
if x[i] > peak:
peak = x[i]
else:
if peak - x[i] >= R:
p += 1
S[p] = peak
trough = x[i]
goto = MINZO
continue
elif goto == ENDZO:
n = p + 1
if abs(f) == 1:
if f == 1:
S[n] = peak
else:
S[n] = trough
else:
S[n] = (trough + peak) / 2
S = S[1:n + 1]
return S