Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • toolbox/WindEnergyToolbox
  • tlbl/WindEnergyToolbox
  • cpav/WindEnergyToolbox
  • frza/WindEnergyToolbox
  • borg/WindEnergyToolbox
  • mmpe/WindEnergyToolbox
  • ozgo/WindEnergyToolbox
  • dave/WindEnergyToolbox
  • mmir/WindEnergyToolbox
  • wluo/WindEnergyToolbox
  • welad/WindEnergyToolbox
  • chpav/WindEnergyToolbox
  • rink/WindEnergyToolbox
  • shfe/WindEnergyToolbox
  • shfe1/WindEnergyToolbox
  • acdi/WindEnergyToolbox
  • angl/WindEnergyToolbox
  • wliang/WindEnergyToolbox
  • mimc/WindEnergyToolbox
  • wtlib/WindEnergyToolbox
  • cmos/WindEnergyToolbox
  • fabpi/WindEnergyToolbox
22 results
Show changes
Showing
with 3447 additions and 182 deletions
[metadata]
name = wetb
summary = Wind Energy Toolbox
author = DTU Wind Energy
author-email = mmpe@dtu.dk
license = GPLv3
home-page = https://gitlab.windenergy.dtu.dk/toolbox/WindEnergyToolbox
description-file = README
# Add here all kinds of additional classifiers as defined under
# https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers = Development Status :: 4 - Beta,
Programming Language :: Python,
Programming Language :: Python :: 2.7,
Programming Language :: Python :: 3,
Programming Language :: Python :: 3.3,
Programming Language :: Python :: 3.4,
Programming Language :: Python :: 3.5,
Programming Language :: Python :: 3.6,
Environment :: Console,
Intended Audience :: Education,
Intended Audience :: Science/Research,
License :: OSI Approved :: GPL License,
Operating System :: OS Independent,
Operating System :: POSIX :: Linux,
Operating System :: Unix,
Operating System :: MacOS,
Operating System :: Microsoft :: Windows
Topic :: Scientific/Engineering :: Mathematics
[entry_points]
# Add here console scripts like:
# console_scripts =
# hello_world = wetb.module:function
# as well as other entry_points.
[files]
# Add here 'data_files', 'packages' or 'namespace_packages'.
# Additional data files are defined as key value pairs of source and target:
packages =
wetb
# data_files =
# share/wetb_docs = docs/*
[extras]
# Add here additional requirements for extra features, like:
# PDF =
# ReportLab>=1.2
# RXP
#ALL =
# django
# cookiecutter
[test]
# py.test options when running `python setup.py test`
#addopts = tests
[tool:pytest]
# Options for py.test:
# Specify command line options as you would do when invoking py.test directly.
# e.g. --cov-report html (or xml) for html/xml output or --junitxml junit.xml
# in order to write a coverage file that can be read by Jenkins.
#addopts =
# --cov wetb --cov-report term-missing
# --verbose
python_files = WindEnergyToolbox/wetb/*
[aliases]
docs = build_sphinx
[bdist_wheel]
# Use this option if your package is pure-python
universal = 0
[build_sphinx]
# Options for Sphinx build
source_dir = docs
build_dir = docs/_build
[pbr]
# Let pbr run sphinx-apidoc
autodoc_tree_index_modules = True
# autodoc_tree_excludes = ...
# Let pbr itself generate the apidoc
# autodoc_index_modules = True
# autodoc_exclude_modules = ...
# Convert warnings to errors
# warnerrors = True
[devpi:upload]
# Options for the devpi: PyPI serer and packaging tool
# VCS export must be deactivated since we are using setuptools-scm
no-vcs = 1
formats = bdist_wheel
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Setup file for wetb.
This file was generated with PyScaffold 2.5, a tool that easily
puts up a scaffold for your new Python project. Learn more under:
http://pyscaffold.readthedocs.org/
"""
import os
import sys
from setuptools import setup
try:
from pypandoc import convert_file
read_md = lambda f: convert_file(f, 'rst', format='md')
except ImportError:
print("warning: pypandoc module not found, could not convert Markdown to RST")
read_md = lambda f: open(f, 'r').read()
import numpy as np
from distutils.extension import Extension
from Cython.Distutils import build_ext
def setup_package():
ex_info = [('wetb.fatigue_tools.rainflowcounting', ['pair_range', 'peak_trough', 'rainflowcount_astm']),
('wetb.signal.filters', ['cy_filters'])]
extlist = [Extension('%s.%s' % (module, n),
[os.path.join(module.replace(".","/"), n)+'.pyx'],
include_dirs=[np.get_include()]) for module, names in ex_info for n in names]
needs_sphinx = {'build_sphinx', 'upload_docs'}.intersection(sys.argv)
sphinx = ['sphinx'] if needs_sphinx else []
setup(setup_requires=['six', 'pyscaffold>=2.5a0,<2.6a0'] + sphinx,
cmdclass = {'build_ext': build_ext},
ext_modules = extlist,
use_pyscaffold=True,
long_description=read_md('README.md'))
if __name__ == "__main__":
setup_package()
import numpy as np
npt = np.testing
\ No newline at end of file
import os
import sys
from unittest import mock
import pytest
import matplotlib.pyplot as plt
def run_module_main(module):
# check that all main module examples run without errors
if os.name == 'posix' and "DISPLAY" not in os.environ:
pytest.xfail("No display")
def no_show(*args, **kwargs):
pass
plt.show = no_show # disable plt show that requires the user to close the plot
def no_print(s):
pass
try:
with mock.patch.object(module, "__name__", "__main__"):
with mock.patch.object(module, "print", no_print):
getattr(module, 'main')()
except Exception as e:
raise type(e)(str(e) +
' in %s.main' % module.__name__).with_traceback(sys.exc_info()[2])
import importlib
import os
import pkgutil
import warnings
import mock
import pytest
import matplotlib.pyplot as plt
import sys
from wetb import examples
from tests.run_main import run_module_main
def get_main_modules():
package = examples
modules = []
for _, modname, _ in pkgutil.walk_packages(package.__path__, package.__name__ + '.'):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
m = importlib.import_module(modname)
if 'main' in dir(m):
modules.append(m)
return modules
def print_main_modules():
print("\n".join([m.__name__ for m in get_main_modules()]))
@pytest.mark.parametrize("module", get_main_modules())
def test_main(module):
run_module_main(module)
if __name__ == '__main__':
print_main_modules()
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
test = "TEST"
try:
import pkg_resources
__version__ = pkg_resources.safe_version(pkg_resources.get_distribution(__name__).version)
except:
__version__ = 'unknown'
__version__ = '0.0.10'
# try:
# import pkg_resources
# __version__ = pkg_resources.safe_version(pkg_resources.get_distribution(__name__).version)
# except:
# __version__ = 'unknown'
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jun 23 11:20:28 2021
@author: dave
"""
import os
from os.path import join as pjoin
import numpy as np
import pandas as pd
# from wetb.prepost import misc
from wetb.hawc2 import (HTCFile, AEFile, PCFile, StFile)
from wetb.bladed.readprj import ReadBladedProject
# TODO: move to misc?
def get_circular_inertia(d1, t, m):
d2 = d1 - 2*t
A = np.pi/4*(d1**2-d2**2)
# area moment of inertia x=y
I_g = np.pi/64*(d1**4-d2**4)
# mass moment of inertia
I_m = I_g/A*m
return A, I_g, I_m
# TODO: move to HTCFile?
def add_c2_arr_htc(htc, c2_arr, body):
"""add a c2_def in the form of an array to a htc c2_def section. Needs to
be an existing body, and the current c2_def will be overwritten.
"""
nr_nodes = c2_arr.shape[0]
c2_def = htc['new_htc_structure'].get_subsection_by_name(body)['c2_def']
keys = ['sec'] + [f'sec__{i}' for i in range(2, nr_nodes+1)]
c2_def['nsec'].values[0] = nr_nodes
for i, key in enumerate(keys):
# control the formatting, otherwise it looks very messy in the htc file
# this we do by converting the values to strings
vals_fmt = [f'{k: 15.07f}' for k in c2_arr[i,:].tolist()]
# node nr is stripped from leading spaces, so it doesn't help adding them
values = [i+1] + vals_fmt
if key in c2_def:
c2_def[key].values[:] = values[:]
else:
c2_def.add_line(name=key, values=values)
return htc
# TODO: move to HTCFile?
def get_c2def(htc, body_name):
"""
"""
# select the c2def section from the shaft
c2_def = htc['new_htc_structure'].get_subsection_by_name(body_name)['c2_def']
# safe all c2_def positions in one array
c2_arr = np.zeros((len(c2_def.keys())-1,4))
# iterate over all nsec lines, but ignore the line that gives the number of points
for i, sec in enumerate(c2_def.keys()[1:]):
c2_arr[i,:] = c2_def[sec].values[1:]
return pd.DataFrame(c2_arr, columns=['x', 'y', 'z', 'twist'])
class Convert2Hawc(ReadBladedProject):
"""Based on the BLADED user manual v4.8
"""
def __init__(self, fname):
super().__init__(fname)
def get_pc(self, setname_filter=None):
# ---------------------------------------------------------------------
# we ignore the airfoils blocks at the start of the ADAT sub-section
# ---------------------------------------------------------------------
# nr_airfoils = prj.get_key('ADAT', 'NFOILS')[0,0]
# airfoil_keys = ['NFOIL', 'FTYPE', 'NTHICK', 'NREYN']
# airfoils = {}
# for k in range(nr_airfoils):
# key_app = ''
# if k > 0:
# key_app = f'__{k+1}'
# airfoils[f'NFOIL_{k+1}'] = {}
# for key in airfoil_keys:
# ---------------------------------------------------------------------
# total number of pc sets
nsets = self.get_key('ADAT', 'NSETS')[0,0]
key_app = [''] + [f'__{k}' for k in range(2, nsets+1)]
# FIXME: are all sets always named consistantly? Meaning only the
# thickness changes in the setname, and the rest remains the same?
# as a work-around: ignore sets that contains setname_filter in SETNAME
if setname_filter is not None:
drop = []
for i, ka in enumerate(key_app):
setname = self.get_key('ADAT', f'SETNAME{ka}')[0,0]
if setname.find(setname_filter) > -1:
drop.append(ka)
key_app = set(key_app) - set(drop)
# update nsets
nsets = len(key_app)
# initiate PCFile objects
pc = PCFile()
tc_arr = np.ndarray((nsets), dtype=np.float32)
pc_lst = []
for i, ka in enumerate(key_app):
# and assure cm is around quarter chord point
assert self.get_key('ADAT', 'XA')[0,0]==25
# prj.get_key('ADAT', f'SETNAME{ka}')
# prj.get_key('ADAT', f'SETNB{ka}')
# prj.get_key('ADAT', f'COMMENTS{ka}')
# prj.get_key('ADAT', f'DATE{ka}')
# prj.get_key('ADAT', f'REYN{ka}')
# prj.get_key('ADAT', f'DEPL{ka}')
# prj.get_key('ADAT', f'NALPHA{ka}')
# prj.get_key('ADAT', f'NVALS{ka}')
tc_arr[i] = self.get_key('ADAT', f'THICK{ka}')
shape = (self.get_key('ADAT', f'NALPHA{ka}')[0,0], 4)
pcs = np.ndarray(shape, dtype=np.float32)
pcs[:,0] = self.get_key('ADAT', f'ALPHA{ka}')[0,:]
pcs[:,1] = self.get_key('ADAT', f'CL{ka}')[0,:]
pcs[:,2] = self.get_key('ADAT', f'CD{ka}')[0,:]
pcs[:,3] = self.get_key('ADAT', f'CM{ka}')[0,:]
pc_lst.append(pcs)
# in BGEOM the airfoil number is referred to and not the thickness
# sort properly on thickness
isort = tc_arr.argsort()
pc.pc_sets = {1:(tc_arr[isort], [pc_lst[k] for k in isort])}
return pc
def get_ae(self):
# BGEOMMB defines for each element the start and end point. We only
# need half that of course.
chord = self.get_key('BGEOMMB', 'CHORD')[0,0::2]
# FIXME: is it radius or curved lenght
radius = self.get_key('BGEOMMB', 'RJ')[0,0::2]
thickness = self.get_key('BGEOMMB', 'BTHICK')[0,0::2]
# FIXME: multiple pc sets can be defined but are ignored for now.
pc_set_id = np.ones(chord.shape)
ae = AEFile()
ae.add_set(radius, chord, thickness, pc_set_id, set_id=None)
return ae
def get_blade_c2_def(self):
# BLADENAME
# DISTMODE RJ
# NBE 100
# FOIL
# MOVING
# CE_X
# CE_Y
diam = self.get_key('RCON', 'DIAM')[0,0]
cone = self.get_key('RCON', 'CONE')[0,0]
tilt = self.get_key('RCON', 'TILT')[0,0]
# in BLADED all is wrt LE, so we need chord
chord = self.get_key('BGEOMMB', 'CHORD')[0,0::2]
# c2_def = htc['new_htc_structure'].get_subsection_by_name('blade1')['c2_def']
twist = self.get_key('BGEOMMB', 'TWIST')[0,0::2]
# FIXME: what is the difference between RJ and DIST?
# radius can be given eiter as curved length or radius
# Distance: This can be entered as a distance along the blade or as a
# distance along the blade root Z-axis. Select the appropriate option
# at the base of the screen.
dist = self.get_key('BGEOMMB', 'DIST')[0,0::2]
rj = self.get_key('BGEOMMB', 'RJ')[0,0::2]
# reference frame is EC, that seems to be indicated on the plots of the
# blade rotor coordinates/pitch/axis tc (fig 4-1 and 4-2)
ref_x = self.get_key('BGEOMMB', 'REF_X')[0,0::2]
ref_y = self.get_key('BGEOMMB', 'REF_Y')[0,0::2]
# EC wrt to LE in percentage of chord
ce_x = self.get_key('BGEOMMB', 'CE_X')[0,0::2]
ce_y = self.get_key('BGEOMMB', 'CE_Y')[0,0::2]
# HAWC2 variables
theta = twist*-1
# theta_d = theta*180/np.pi
ea_x = (50-ce_y)*0.01*chord
ea_y = ce_x*0.01*chord
# safe all c2_def positions in one array
c2_arr = np.zeros((len(twist),4))
c2_arr[:,0] = -1*(ref_y + ((ea_x*np.cos(theta) - ea_y*np.sin(theta))))
c2_arr[:,1] = (ref_x - ((ea_x*np.sin(theta) + ea_y*np.cos(theta))))
# assume DIST is curved length
c2_arr[:,2] = rj #dist
c2_arr[:,3] = theta*180/np.pi
return c2_arr
def get_blade_st(self):
# BMASSMB
# BSTIFFMB
# in HAWC2 structural pitch is relative to the twist
twist = self.get_key('BGEOMMB', 'TWIST')[0,0::2]
dist = self.get_key('BGEOMMB', 'DIST')[0,0::2]
chord = self.get_key('BGEOMMB', 'CHORD')[0,0::2]
thickness = self.get_key('BGEOMMB', 'BTHICK')[0,0::2]
# CG location, in percentage chord wrt LE
cm_x = self.get_key('BMASSMB', 'CM_X')[0,0::2]
cm_y = self.get_key('BMASSMB', 'CM_Y')[0,0::2]
mass = self.get_key('BMASSMB', 'MASS')[0,0::2]
siner = self.get_key('BMASSMB', 'SINER')[0,0::2]
# Radii of gyration ratio: the radius of gyration of mass about y_m
# divided by the radius of gyration of mass about x_m. This defaults to
# the relative profile thickness but can be over-written by un-checking
# the “Use default radii of gyration ratio” checkbox.
rgratio = self.get_key('BMASSMB', 'RGRATIO')[0,0::2]
# FIXME: DEF_RGRATIO but which value belongs to which option?
# assume 0 means "default is off"
def_rgratio = self.get_key('BMASSMB', 'DEF_RGRATIO')[0,0]
# FIXME: BETA_M: figure 3-2 from v4.8: mass axis orientation, radians?
beta_m = self.get_key('BMASSMB', 'BETA_M')[0,0::2]
# Mass axis orientation: the orientation of the principle axis of inertia.
# This defaults to the orientation of aerodynamic twist, but can be
# over-written by un-checking the “Use default mass axis orientation”
# checkbox. (See diagram below)
# FIXME: DEF_BETA_M but which value belongs to which option?
# assume 0 means "default is off"
def_beta_m = self.get_key('BMASSMB', 'DEF_BETA_M')[0,0]
# DEF_RGRATIO 0
# DEF_BETA_M 0
# RADPOS
# PITCHPOS
# DEF_RGRATIO 0
# DEF_BETA_M 0
# ICEDBLADES 0, 0, 0
# ICEDENSITY 700
# TIPCHORD 0
# NPOINT 3
# RADPOS .2, 30.12345, 40.12345
# PITCHPOS .2, 30.1, 40.1
# PM_X 0, 0, 0
# PM_Y 20, 10, 20
# PMASS 1000, 30, 1
# elastic axis, in percentage chord wrt LE
ce_x = self.get_key('BGEOMMB', 'CE_X')[0,0::2]
ce_y = self.get_key('BGEOMMB', 'CE_Y')[0,0::2]
# 3.5 Blade stiffness distribution (page 19)
# The stiffness must be defined about the principal axis of inertia at
# each blade station (see 3.1). The stiffness is the product of Young’s
# Modulus for the material and the second moment of area for the xp or
# yp directions as appropriate. The principal axis orientation is defined
# as an input, and defaults to the aerodynamic twist. In this case it is
# assumed to be parallel and perpendicular to the chord line. If the
# principal axis orientation is different from the aerodynamic twist, click
# the Use default principal axis orientation to off. (see diagram below)
eiflap = self.get_key('BSTIFFMB', 'EIFLAP')[0,0::2]
eiedge = self.get_key('BSTIFFMB', 'EIEDGE')[0,0::2]
beta_s = self.get_key('BSTIFFMB', 'BETA_S')[0,0::2]
gj = self.get_key('BSTIFFMB', 'GJ')[0,0::2]
cs_x = self.get_key('BSTIFFMB', 'CS_X')[0,0::2]
cs_y = self.get_key('BSTIFFMB', 'CS_Y')[0,0::2]
gaflap = self.get_key('BSTIFFMB', 'GAFLAP')[0,0::2]
gaedge = self.get_key('BSTIFFMB', 'GAEDGE')[0,0::2]
# FIXME: DEF_BETA_S probably says what the frame of reference is
# assume 0 means "default is off"
# self.get_key('BMASSMB', 'DEF_BETA_S')[0,0::2]
# for plotting purposes
sel = beta_m < -1
beta_m[sel] += np.pi
sel = beta_s < -1
beta_s[sel] += np.pi
# calculate the curved length
# c2_arr = self.get_blade_c2_def()
# curved_len_arr = curved_length(c2_arr)
ea_x = (50-ce_y)*0.01*chord
ea_y = ce_x*0.01*chord
cg_x = (50-cm_y)*0.01*chord
cg_y = cm_x*0.01*chord
# mass moments
Ipm_y = siner + (mass*(cg_x-ea_x)**2)
ri_y = np.sqrt( (Ipm_y / (1+rgratio**2)) / mass )
ri_x = rgratio * ri_y
# in BLADED rgratio = y/x, but in HAWC2 it x=y so we swap them
# just choose E, G so the rest follows
E, G = 1e10, 1e09
# however, we don't know EA (probably because stiff?), so assume A is
# a full box based on chord length and maximum airfoil thickness
# this will make the extensional stiffness EA quite large, but the user
# would still end up with a physical meaningful value
A = chord*chord*thickness/100
starr = np.ndarray((len(dist),19))
starr[:,0] = dist
starr[:,1] = mass
starr[:,2] = cg_x
starr[:,3] = cg_y
# radius of gyration
starr[:,4] = ri_x
starr[:,5] = ri_y
# shear center
starr[:,6] = (50-cs_y)*0.01*chord
starr[:,7] = cs_x*0.01*chord
# E, G: choose
starr[:,8] = E
starr[:,9] = G
starr[:,10] = eiflap/E
starr[:,11] = eiedge/E
starr[:,12] = gj/G
# shear factor
starr[:,13] = gaflap/(G*A)
starr[:,14] = gaedge/(G*A)
starr[:,15] = A
starr[:,16] = -1*(beta_s - twist) # structural pitch
starr[:,17] = ea_x
starr[:,18] = ea_y
st = StFile()
st.main_data_sets = {1:{1:starr}}
# st.cols = wetb.hawc2.st_file.stc.split()
return st
def get_tower_c2_def_st(self):
"""Ignores nodes that are not at the centerline
Returns
-------
c2_arr : TYPE
DESCRIPTION.
st : TYPE
DESCRIPTION.
"""
t_n_e = int(self.get_key('TGEOM', 'NTE'))
t_n_n = int(self.get_key('TGEOM', 'NTS'))
t_el_mat = self.get_key('TGEOM', 'ELSTNS')
t_n_coord = self.get_key('TGEOM', 'TCLOCAL')
# tdiam = self.get_key('TGEOM', 'SEAL')
# tdiam = self.get_key('TGEOM', 'HYDRO')
towmgt = self.get_key('TGEOM', 'TOWMGT')
# MATERIAL, NOMATS
material = self.get_key('TMASS', 'MATERIAL')[0,0::2]
nomats = self.get_key('TMASS', 'NOMATS')
# Index of tower nodes (nodes at zero x-y positions)
t_a = np.where((abs(t_n_coord[:,0])+abs(t_n_coord[:,1])) == 0)[0]
t_nodes = t_n_coord[t_a]
# sort index according to z position of the nodes
n_ind = t_a[np.argsort(t_nodes[:,2])]
t_nodes = t_n_coord[n_ind]
# Index for tower elemetns
# Elemet indexes which does not contain both tower nodes
ind_not = [i for i,j in enumerate(t_el_mat) if not (j[0]-1) in n_ind
or not (j[1]-1) in n_ind]
# The elemetn indexes excluding the ind_not
ind_init = np.array([i for i in range(t_el_mat.shape[0]) if not i in ind_not])
sort_el = []
for i in n_ind[:-1]:
sort_el.append(np.where(t_el_mat[ind_init][:,0] == i+1)[0][0])
ind_el = ind_init[sort_el]
# safe all c2_def positions in one array
c2_arr = np.zeros((t_nodes.shape[0],4))
c2_arr[:,0:2] = t_nodes[:,:-1].copy()
c2_arr[:,2] = -t_nodes[:,-1] + t_nodes[0,-1] # It is minus for HAWC2 model
c2_arr[:,3] = 0.0
# element properties
t_d = self.get_key('TGEOM', 'TDIAM')[ind_el]
material = self.get_key('TMASS', 'MATERIAL')[0,0::2][ind_el]
t_thick = self.get_key('TMASS', 'WALLTHICK')[ind_el]
towmgt = self.get_key('TGEOM', 'TOWMGT')[ind_el]
# Mass
t_m = self.get_key('TMASS', 'TOWM')[ind_el]
t_p_m = self.get_key('TMASS', 'TOWPMI')[ind_el] # point mass for elements and nodes
t_flood = self.get_key('TMASS', 'FLOODED')[ind_el]
# material props from prj file
name_mat = np.unique(material)
mat_prop = {}
for i in name_mat:
mat_prop[i] = self.get_key('TMASS', "%s"%i) # rho, E, G
t_mat = np.array([mat_prop[i][0] for i in material])
if np.sum(t_thick[1:,0] - t_thick[:-1,1]) > 1e-9:
print('Diameters are not continous along the tower')
if (name_mat.shape[0] > 1):
print('There are %i materials in tower!' %name_mat.shape[0])
d = np.zeros(t_d.shape[0]+1)
t = np.zeros(t_thick.shape[0]+1)
mat = np.zeros((t_mat.shape[0]+1,t_mat.shape[1]))
d[1:] = t_d[:,1]
t[1:] = t_thick[:,1]
mat[1:,:] = t_mat.copy()
d[0] = t_d[0,1]
t[0] = t_thick[0,1]
mat[0,:] = t_mat[0,:].copy()
A, I_g, I_m = get_circular_inertia(d, t, mat[:,0])
# FIXME: MAYBE ANOTHER FIX FOR DIFFERENT MATERIALS
# return c2_arr
starr = np.zeros((c2_arr.shape[0],19))
starr[:,0] = -c2_arr[:,2] #
starr[:,1] = A*mat[:,0]
starr[:,2] = 0.0 # no cg offset
starr[:,3] = 0.0 # no cg offset
# radius of gyration
starr[:,4] = np.sqrt(I_m/mat[:,0]) # ri_x = (I_m/m)**0.5 = (I_g/A)**0.5
starr[:,5] = np.sqrt(I_m/mat[:,0]) # ri_y = (I_m/m)**0.5 = (I_g/A)**0.5
# shear center
starr[:,6] = 0.0 # no shear center offset
starr[:,7] = 0.0 # no shear center offset
# E, G: choose
starr[:,8] = mat[:,1]
starr[:,9] = mat[:,2]
starr[:,10] = I_g
starr[:,11] = I_g
starr[:,12] = I_g*2
# shear factor
starr[:,13] = 3/4 # for circular section check it again
starr[:,14] = 3/4 # for circular section check it again
starr[:,15] = A
starr[:,16] = 0.0 # structural pitch
starr[:,17] = 0.0
starr[:,18] = 0.0
st = StFile()
st.main_data_sets = {1:{1:starr}}
return c2_arr, st
def get_hub(self):
# Blade root length, in blade root coordinates (with tilt and cone)
# see figure 4-1 in Blade User Manual 4.8
root = self.get_key('RCON', 'ROOT')[0,0]
# Hub mass: the mass of the hub, including the spinner and any blade
# root section.
# Hub mass centre: the distance from the intersection of the shaft and
# blade axes to the centre of mass of the hub, in a direction measured
# away from the tower.
# Moments of inertia: the moment of inertia of the hub mass about the
# shaft axis must be defined.
# The inertia about an axis perpendicular to the shaft may also be
# entered with its origin about the hub centre of mass.
hubmas = self.get_key('RMASS', 'HUBMAS')[0,0]
hubine = self.get_key('RMASS', 'HUBINE')[0,0]
# FIXME: which axis is this? Would that translate to both xx and yy in H2?
hubine2 = self.get_key('RMASS', 'HUBINE2')[0,0]
# Enter the Spinner diameter. This is the diameter of any spinner or
# nose-cone, within which the blades themselves experience no
# aerodynamic forces.
spind = self.get_key('RCON', 'SPIND')[0,0]
cmass_hub = {'x':0, 'y':0, 'z':0, 'm':hubmas,
'Ixx':0, 'Iyy':0, 'Izz':hubine}
return cmass_hub, root
def get_drivertain(self, len_shaft):
# TODO: brake
# MSTART BRAKE
# DTRAIN
# dtrain = self.get_key('DTRAIN')
ginert = self.get_key('DTRAIN', 'GINERT')[0,0] # generator inertia
gratio = self.get_key('DTRAIN', 'GRATIO')[0,0]
# The additional inertia of the high speed shaft may also be specified
# along with the inertia of the gearbox which is referred to the HSS.
gbxinert = self.get_key('DTRAIN', 'GBXINERT')[0,0] # gearbox inertia
# FIXME: brake position?
bpos = self.get_key('DTRAIN', 'BPOS')[0,0]
# LSS seems to have a DOF indicator in LSSDOF, and has 6 elements
# 4th element is torsion it seems
klss = self.get_key('DTRAIN', 'KLSS')[0,3]
# FIXME: what does this damping value mean? Simply C in the dyn system
# theta_dt_dt*I + C*theta_dt + K*theta = 0 ?
dlss = self.get_key('DTRAIN', 'DLSS')[0,3]
# HSS only has one DOF
khss = self.get_key('DTRAIN', 'KHSS')[0,0]
dhss = self.get_key('DTRAIN', 'DHSS')[0,0]
# Convert torsional stiffness definition into a beam stiffness
# that applies for the considered shaft length (c2_def)
G = 80e9 # typical steel value
Ip = klss*len_shaft/G
starr = np.zeros((2,19))
starr[:,0] = [0, len_shaft] # st file is always normalised
starr[:,1] = 0.5 # very light but not too light
starr[:,2] = 0.0 # no cg offset
starr[:,3] = 0.0 # no cg offset
# radius of gyration: very light but not too light
starr[:,4] = 0.001 #ri_x
starr[:,5] = 0.001 #ri_y
# shear center
starr[:,6] = 0.0 # no shear center offset
starr[:,7] = 0.0 # no shear center offset
# E, G: choose
starr[:,8] = 1e18
starr[:,9] = G
starr[:,10] = Ip/2 # area inertia's
starr[:,11] = Ip/2
starr[:,12] = Ip
# shear factor
starr[:,13] = 1000 # stiff in shear
starr[:,14] = 1000
starr[:,15] = 10 # cross-section area
starr[:,16] = 0.0 # structural pitch
starr[:,17] = 0.0 # elastic axis
starr[:,18] = 0.0
st = StFile()
st.main_data_sets = {1:{1:starr}}
# FIXME: INERTIATOCLUTCH??
inertiatoclutch = self.get_key('DTRAIN', 'INERTIATOCLUTCH')
# FIXME: what is the location of these inertia's?
# FIXME: what is the mass? It seems the mass is put elsewhere?
# cmass_gbx = {'Izz':Izz}
# cmass_gen = {'x':x, 'y':y, 'z':z, 'm':nacmas,
# 'Ixx':Ixx, 'Iyy':Iyy, 'Izz':Izz}
cmass_lss = {'x':0, 'y':0, 'z':0, 'm':0,'Ixx':0, 'Iyy':0, 'Izz':ginert}
# convert HSS inertia to LSS
cmass_hss = {'x':0, 'y':0, 'z':0, 'm':0,'Ixx':0, 'Iyy':0,
'Izz':gbxinert*gratio*gratio}
return cmass_lss, cmass_hss, st
def get_nacelle(self):
# also get the nacell concentrated mass
nacmas = self.get_key('NMASS', 'NACMAS')[0,0]
# includes the nacelle structure and all the machinery within it.
# It does not include the rotor blades and hub. If the Direct Drive
# (see 4.1) option is selected, the mass of the generator (see 4.2)
# is also excluded.
# position of cg relative to tower top, nacelle mass coord system
# based on figure 4-7 of bladed manual 4.8
# FIXME: is this correct? z means SS, and at the edge of the nacelle?
nmx = self.get_key('NMASS', 'NMX')[0,0] # width?
nmy = self.get_key('NMASS', 'NMY')[0,0] # length?
nmz = self.get_key('NMASS', 'NMZ')[0,0] # height?
iyaw = self.get_key('NMASS', 'IYAW')[0,0]
inod = self.get_key('NMASS', 'INOD')[0,0]
iroll = self.get_key('NMASS', 'IROLL')[0,0]
# MSTART NGEOM
# NACW x.000
# NACL x.000
# NACH x.000
# NACZ 0
# NACCD x.0
# NAERO N
# height of the nacelle/towertop: the BLADED model does not have to
# have consistent shaft length with tilt angle arriving at the rotor
# center, so adjust towertop length in HAWC2 accordingly
# Tower height: from the ground or sea surface to the yaw bearing
# (only needed if the tower itself has not been defined)
towht = self.get_key('RCON', 'TOWHT')[0,0]
# Hub height: from the ground to the centre of the rotor
# (i.e. the intersection of the blade and shaft axes)
height = self.get_key('RCON', 'HEIGHT')[0,0]
# horizontal distance between rotor center and tower center line
ovrhng = self.get_key('RCON', 'OVRHNG')[0,0]
tilt = self.get_key('RCON', 'TILT')[0,0]
# FOR HAWC2, WE HAVE
len_shaft = ovrhng / np.cos(tilt)
len_towertop = height - towht - len_shaft*np.sin(tilt)
assert len_towertop > 0
# assert len_towertop + len_shaft*np.sin(tilt) + towht - height == 0
# concentrated mass in HAWC2
# 1 : nodenr
# 2-4: x, y, z offset
# 5 : mass
# 6-8: Ixx, Iyy, Izz
# 9-11: Ixy, Ixz, Iyz (optional)
# in HAWC2 we would be in the tower top coordinate system, and is
# usually the same as the global coordinate system
x = nmx
y = -nmy # move upwind wrt tower
z = -nmz # move upward
Ixx = inod
Iyy = iroll
Izz = iyaw
cmass_nacelle = {'x':x, 'y':y, 'z':z, 'm':nacmas,
'Ixx':Ixx, 'Iyy':Iyy, 'Izz':Izz}
return cmass_nacelle, len_shaft, len_towertop
def get_towershadow_drag(self):
# TODO: tower shadow and tower aerodrag
# MSTART TOWSDW
# TSMODEL x
# TDCORR x
# MAXDEF x.x
# WIDTH x
# LREF x.x
return
def get_control(self):
# MSTART CONTROL
# MSTART PCOEFF
# TSRMIN x
# TSRMAX x
# TSRSTP x.x
# PITCH -x.0E-00
# PITCH_END -x.0E-00
# PITCH_STEP 0
# OMEGA x.654321
# MSTART IDLING
# AEROINFO
# pitch actuator rate limits
pitch_actuator = self.xmlroot.PitchActuator
if pitch_actuator.SetpointTrajectory.HasRateLimits:
# pitch_actuator.SetpointTrajectory.LowerRateLimit
# pitch_actuator.SetpointTrajectory.UpperRateLimit
print(pitch_actuator.SetpointTrajectory.UpperRateLimit*30/np.pi)
if pitch_actuator.SetpointTrajectory.HasAccelerationLimits:
# pitch_actuator.SetpointTrajectory.LowerAccelerationLimit
# pitch_actuator.SetpointTrajectory.UpperAccelerationLimit
print(pitch_actuator.SetpointTrajectory.UpperAccelerationLimit*30/np.pi)
# pitch_actuator.PositionResponse.values() # 1st or 2nd order
# pitch_actuator.PositionResponse.Frequency
# pitch_actuator.PositionResponse.Damping
# pitch_actuator.RateResponse.values() # 1st or 2nd order
# pitch_actuator.RateResponse.LagTime
# pitch_actuator.PositionResponse.Damping
# and more: bearing friction, etc
return
def convert(self, fname_htc):
fname_prj = os.path.basename(fname_htc).replace('.htc', '')
# for convenience, we start from an htc template that otherwise has the
# right structure, but has only 2 nodes for each body.
# assume it is in the same folder as where the HTC will be written to
basepath = os.path.dirname(fname_htc)
htc = HTCFile(pjoin(basepath, 'template.htc'))
# prj = self.PRJ_to_HAWC(pjoin(version, fname_prj))
# plot_bladed(prj)
# cone = prj.get_key('RCON', 'CONE')
# Lateral offset: the horizontal offset between the shaft and tower axes.
# Brake
# steps = prj.get_key('BRAKE', 'STEPS')
# TORQUE = prj.get_key('BRAKE', 'TORQUE')
# TIME = prj.get_key('BRAKE', 'TIME')
# -------------------------------------------------------------------------
# BLADES
# extract blade geometry, and add to c2_def section in the htc file
c2_blade = self.get_blade_c2_def()
htc = add_c2_arr_htc(htc, c2_blade, 'blade1')
# -------------------------------------------------------------------------
# NACELLE
cmass_nacelle, len_shaft, len_towertop = self.get_nacelle()
# set towertop length
# nacelle = htc['new_htc_structure'].get_subsection_by_name('towertop')
c2_tt = np.zeros((2,4))
c2_tt[1,2] = -len_towertop
htc = add_c2_arr_htc(htc, c2_tt, 'towertop')
# -------------------------------------------------------------------------
# attach nacelle mass/inertia to towertop, first node (yaw bearing)
towertop = htc['new_htc_structure'].get_subsection_by_name('towertop')
values = [v for k,v in cmass_nacelle.items()]
cmass = towertop.add_line('concentrated_mass', [1] + values)
key = cmass.location().split('/')[-1]
towertop[key].comments = 'nacelle mass (inc rotating) and inertia (non-rotating)'
# -------------------------------------------------------------------------
# set shaft length
c2_s = np.zeros((2,4))
c2_s[1,2] = len_shaft
htc = add_c2_arr_htc(htc, c2_s, 'shaft')
# -------------------------------------------------------------------------
# generator and gearbox inertia's, shaft torsional flexibility
cmass_lss, cmass_hss, st_shaft = self.get_drivertain(len_shaft)
st_shaft.save(pjoin(basepath, f'{fname_prj}_shaft.st'))
shaft = htc['new_htc_structure'].get_subsection_by_name('shaft')
# LSS inertia
values = [v for k,v in cmass_lss.items()]
cmass = shaft.add_line('concentrated_mass', [2] + values)
key = cmass.location().split('/')[-1]
shaft[key].comments = 'inertia LSS'
# Inertia of the HSS is already expressed wrt LSS
values = [v for k,v in cmass_hss.items()]
cmass = shaft.add_line('concentrated_mass', [2] + values)
gratio = self.get_key('DTRAIN', 'GRATIO')[0,0]
key = cmass.location().split('/')[-1]
shaft[key].comments = f'inertia HSS, expressed in LSS, GBR={gratio}'
# -------------------------------------------------------------------------
# set tilt angle
tilt = self.get_key('RCON', 'TILT')[0,0]
ori = htc['new_htc_structure'].orientation
# tilt is on the 2nd relative block, and set angle on 2nd set of eulerang
# check comments of the htc file:
# print(ori['relative__2']['mbdy2_eulerang__2'].comments)
ori['relative__2']['mbdy2_eulerang__2'].values = [tilt*180/np.pi, 0, 0]
ori['relative__2']['mbdy2_eulerang__2'].comments = 'tilt angle'
# coning angle
cone = self.get_key('RCON', 'CONE')[0,0]
# coning is on 3, 4 and 5 (all hub orientations)
for k in range(3,6):
# print(ori[f'relative__{k}']['mbdy2_eulerang__3'].comments)
ori[f'relative__{k}']['mbdy2_eulerang__3'].values = [-cone*180/np.pi, 0, 0]
ori[f'relative__{k}']['mbdy2_eulerang__3'].comments = 'cone angle'
# TODO: blade mounting sweep and cone as well??
# -------------------------------------------------------------------------
# HUB mass as concentrated mass on the shaft end
# FIXME: make sure we can get the hub root loads including these inertia's
cmass_hub, hublen = self.get_hub()
values = [v for k,v in cmass_hub.items()]
cmass = shaft.add_line('concentrated_mass', [2] + values)
key = cmass.location().split('/')[-1]
shaft[key].comments = 'Hub inertia and mass'
# -------------------------------------------------------------------------
# hub lenght
c2_hub = np.zeros((2,4))
c2_hub[1,2] = hublen
htc = add_c2_arr_htc(htc, c2_hub, 'hub1')
# -------------------------------------------------------------------------
# TOWER ST FILE and C2_DEF section
# WARNING: only includes nodes that are at centerline, side-elements are
# ignored
# TODO: compare between TGEOM/TMASS and TSTIFF approaches, it seems that
# the shear stiffness is not the same, see check_tower()
c2_tow, st = self.get_tower_c2_def_st()
st.save(pjoin(basepath, f'{fname_prj}_tower.st'))
htc = add_c2_arr_htc(htc, c2_tow, 'tower')
# hub height
htc.wind.center_pos0.values = [0, 0, -self.get_key('RCON', 'HEIGHT')[0,0]]
# -------------------------------------------------------------------------
# set all file names correct
tower = htc['new_htc_structure'].get_subsection_by_name('tower')
tower.timoschenko_input.filename.values[0] = f'data/{fname_prj}_tower.st'
shaft = htc['new_htc_structure'].get_subsection_by_name('shaft')
shaft.timoschenko_input.filename.values[0] = f'data/{fname_prj}_shaft.st'
blade1 = htc['new_htc_structure'].get_subsection_by_name('blade1')
blade1.timoschenko_input.filename.values[0] = f'data/{fname_prj}_blade.st'
for body_name in ['towertop', 'hub1']:
body = htc['new_htc_structure'].get_subsection_by_name(body_name)
body.timoschenko_input.filename.values[0] = 'data/template.st'
htc['aero'].ae_filename.values[0] = f'data/{fname_prj}.ae'
htc['aero'].pc_filename.values[0] = f'data/{fname_prj}.pc'
# -------------------------------------------------------------------------
# SAVE HTC FILE
htc.save(pjoin(basepath, f'{fname_htc}'))
# -------------------------------------------------------------------------
# other data files
# Blade st file
st = self.get_blade_st()
st.save(pjoin(basepath, f'{fname_prj}_blade.st'))
# extract profile coefficients and save to pc file
pc = self.get_pc(setname_filter='cleanVG')
pc.save(pjoin(basepath, f'{fname_prj}.pc'))
# extract aerodynamic layout, and safe to ae file
ae = self.get_ae()
ae.save(pjoin(basepath, f'{fname_prj}.ae'))
# # BLADE c2_def
# c2_def = htc['new_htc_structure'].get_subsection_by_name('blade1')['c2_def']
# # iterate over all nsec lines, but ignore the line that gives the number of points
# for i, sec in enumerate(c2_def.keys()[1:]):
# c2_arr[i,:] = c2_def[sec].values[1:]
# curvedlength = np.zeros(len(c2_arr[:,2]))
# for i in range(1,len(c2_arr[:,2])):
# curvedlength[i]=curvedlength[i-1]+np.linalg.norm(c2_arr[i,0:3]-c2_arr[i-1,0:3])
# c2_arr = np.zeros((len(c2_def.keys())-1,4))
# # iterate over all nsec lines, but ignore the line that gives the number of points
# for i, sec in enumerate(c2_def.keys()[1:]):
# c2_arr[i,:] = c2_def[sec].values[1:]
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jun 23 11:07:50 2021
@author: dave
"""
# from os.path import join as pjoin
import numpy as np
from lxml import objectify#, etree)
# from matplotlib import pyplot as plt
# import pandas as pd
# import wetb
from wetb.prepost import misc
# from wetb.hawc2 import (HTCFile, AEFile, PCFile, StFile)
# TODO: this dictionary utility could go to a dict sub-class in misc?
def find_next_unique_key(d, key):
k = 1
while True:
k += 1
if key + f'__{k}' not in d:
key = key + f'__{k}'
return key
# TODO: this dictionary utility could go to a dict sub-class in misc?
def add_new_unique_key(key, values, d):
"""
Add key:values to dictionary d. If key already occurs, append __x
where x is the number of previous occurances of key(__x).
Parameters
----------
key : str
DESCRIPTION.
values : list
DESCRIPTION.
d : dict
Dictionary.
Returns
-------
key : str
DESCRIPTION.
d : dict
DESCRIPTION.
"""
if key in d:
key = find_next_unique_key(d, key)
d[key] = [values]
return key, d
class ReadBladedProject:
def __init__(self, fname):
with open(fname, encoding='utf-8') as fobj:
xml_str = fobj.read().encode('utf-8')
self.bd, self.xmlroot = self.parse_bladeddata(xml_str)
self.set_attr_and_check()
# some things are just a little different
# TMASS has a list of materials and their properties
# get rid of the quotes
# tmp = [el.replace("'", '') for el in self.bd['TMASS']['MATERIAL'][0]]
# self.bd['TMASS']['MATERIAL'] = tmp
unique_mat = set(self.get_key('TMASS', 'MATERIAL').flatten().tolist())
self.tow_mat_prop = {k:self.get_key('TMASS', k) for k in unique_mat}
# material_props = {}
# for k in unique_mat:
# material_props[k.replace("'", '')] = self.get_key('TMASS', k)
def parse_bladeddata(self, xml_str):
"""
The XML field BladedData contains what seems like the main core input
data for BLADED, and formatted in some structured way.
Parameters
----------
xml_str : TYPE
DESCRIPTION.
Returns
-------
bd : TYPE
DESCRIPTION.
xmlroot : TYPE
DESCRIPTION.
"""
# root = etree.fromstring(xml)
# elems = root.getchildren()
# bladeddata = elems[1].text
xmlroot = objectify.fromstring(xml_str)
# the non-xml formatted BLADED model is situated in a non-xml field
# called BladedData
bd = {}
mstart = None
for i, line in enumerate(xmlroot.BladedData.text.split('\n')):
# TODO: values embedded in double quotes (") can contain entire
# MSTART/MEND sub-sections (so embedded sections)
# split replace tabs with spaces, split on spaces, remove empty
linels = misc.remove_items(line.replace('\t', ' ').split(' '), '')
# commas can also be separators, in addition to spaces
linels2 = []
for k in linels:
linels2.extend(k.split(','))
linels = misc.remove_items(linels2, '')
# ignore empty lines
if len(linels) < 1:
continue
# entries can be numbers if the previous key contains multiple data points
try:
float(linels[0])
el0isnum = True
except ValueError:
el0isnum = False
# start of a sub-section that contains (non-unique) keys as well
if linels[0].upper().startswith('MSTART'):
mtag = linels[-1]
mstart = {}
# at the end of the sub-section add the sub-section to the main dict
elif linels[0].upper().startswith('MEND'):
# FIXME: implement MSTART sections embedded in double quoted values
try:
# if the section key is not unique, make it so by appending __X
if mtag in bd:
mtag = find_next_unique_key(bd, mtag)
bd[mtag] = mstart
except UnboundLocalError:
print('warning: ignored embedded mstart/mend section')
print(f'at line: {i+1}')
mstart = None
# if we are under a sub-section
elif mstart is not None:
# if the line contains a keyword
if not el0isnum:
tag, mstart = add_new_unique_key(linels[0], linels[1:], mstart)
# line is datapoint that needs to be added to key that occured before
else:
mstart[tag].append(linels)
# add numerical values to key that occured before
elif el0isnum:
bd[tag].append(linels)
else:
tag, bd = add_new_unique_key(linels[0], linels[1:], bd)
return bd, xmlroot
def get_key(self, key1, key2=False):
"""
Get key from the BladedData CDATA section and format to int32 or
float32 numpy arrays if possible withouth precision loss.
Parameters
----------
key1 : str
DESCRIPTION.
key2 : str, optional
DESCRIPTION. The default is False.
Returns
-------
numpy.array
Values from key1/key2 formatted as a numpy array. Converted to
numpy.int32, numpy.float32 if possible withouth precision loss,
otherwise an object array is returned.
"""
if key1 not in self.bd:
raise KeyError(f'{key1} not found in BLADED file.')
if key2 is not False:
if key2 not in self.bd[key1]:
raise KeyError(f'{key2} not found in MSTART {key1} of BLADED file.')
data = self.bd[key1][key2]
else:
data = self.bd[key1]
# in case we defined a mstart key
if isinstance(data, dict):
return data
# i ,j = len(data), len(data[0])
# FIXME: this is a very expensive way of converting it, but it might
# not matter at all since very little model data is actually considered
data_arr = np.array(data)
try:
data_arr = data_arr.astype(np.int32)
except ValueError:
try:
data_arr = data_arr.astype(np.float32)
except ValueError:
pass
return data_arr
# return np.array(data, dtype=np.float32)
# if isinstance(data[0], list) and len(data[0]) == 1:
# data = float(data[0])
# if isinstance(data[0], list) and len(data[0]) > 1:
# data_arr = np.array(data, dtype=np.float32)
def set_attr_and_check(self):
"""Check that BGEOMMB, BMASSMB, BSTIFFMB has indeed the same node
repated twice every time.
"""
# self.bd['BGEOMMB'].keys(), but only those relevant
keysg = ['RJ', 'DIST', 'REF_X', 'REF_Y', 'CHORD', 'TWIST', 'CE_X',
'CE_Y', 'BTHICK', 'FOIL', 'MOVING']
nbe = self.get_key('BGEOMMB', 'NBE')[0,0]
# self.bd['BMASSMB'].keys(), but only those relevant
keysm = ['CM_X', 'CM_Y', 'MASS', 'SINER', 'RGRATIO', 'BETA_M']
# self.bd['BSTIFFMB'].keys(), but only those relevant
keyss = ['EIFLAP', 'EIEDGE', 'BETA_S', 'GJ', 'CS_X', 'CS_Y', 'GAFLAP',
'GAEDGE']
mkeys = ['BGEOMMB', 'BMASSMB', 'BSTIFFMB']
for mkey, keys in zip(mkeys, [keysg, keysm, keyss]):
for key in keys:
res = self.get_key(mkey, key)
try:
assert np.allclose(res[0,0::2], res[0,1::2])
except TypeError:
# allclose doesn't make sense for text arrays
assert np.compare_chararrays(res[0,0::2], res[0,1::2],
'==', True).all()
assert res.shape[1]==nbe
if hasattr(self, key.lower()):
raise UserWarning(key, 'already exists')
setattr(self, key.lower(), res[0,0::2])
def print_xml_tree(self, fname):
"""For discovery purposes: print full tree + values/text
Parameters
----------
fname : TYPE
DESCRIPTION.
Returns
-------
None.
"""
# def print_rec(root):
# # if hasattr(root, 'getparent'):
# # print(root.getparent().tag.title, end='.')
# # print_rec(root.getparent())
# for el in root.getchildren():
# print(print_rec(el))
# def print_root_tree(root):
# root.getparent()
# print()
# tree = etree.fromstring(xml_str)
# els = tree.xpath('/')
# for el in els:
# print(el)
# tree = etree.fromstring(xml_str)
# xmlroot = objectify.fromstring(xml_str)
# with open(fname+'.structure', 'w') as f:
# for line in xmlroot.descendantpaths():
# f.write(line + '\n')
# Recursive XML parsing python using ElementTree
# https://stackoverflow.com/q/28194703/3156685
roottree = self.xmlroot.getroottree()
def print_tree_recursive(root):
# print(roottree.getpath(root), end=' : ')
# print(root.tag.title())
f.write(roottree.getpath(root) + ' : ')
f.writelines(root.values())
if root.text is not None:
f.write(' : ' + root.text)
f.write('\n')
for elem in root.getchildren():
print_tree_recursive(elem)
with open(fname+'.structure.value', 'w') as f:
for el in self.xmlroot.getchildren():
if el.tag.title()!='Bladeddata':
print_tree_recursive(el)
# def get_frequencies(self):
# """
# """
# blades = self.xmlroot.NewGUI.Turbine.Blades.FlexibilityModel
# blades.Settings.ModesWithDampingDefined
# blades.PartModes # with lots of BladeModeContainer
# blades.WholeBladeModes
# blades.WholeBladeModes.WholeBladeMode
# blades.WholeBladeModes.WholeBladeMode.Name
# blades.WholeBladeModes.WholeBladeMode.Frequency
# blades.WholeBladeModes.WholeBladeMode.Damping
# blades.WholeBladeModes.WholeBladeMode.Components
# len(blades.WholeBladeModes.WholeBladeMode.Components.getchildren())
# # 60 elements
# for wholeblademode in blades.WholeBladeModes.iterchildren():
# print(wholeblademode.Name)
# print(wholeblademode.Frequency, wholeblademode.Damping)
# tower = self.xmlroot.NewGUI.Turbine.SupportStructure.FlexibilityModel
# for towermode in tower.Modes.getchildren():
# print(towermode.Description)
# towermode.Frequency
# towermode.Damping
;
begin simulation;
time_stop 1;
solvertype 1 ; (newmark)
on_no_convergence continue ;
convergence_limits 1E3 1.0 1E-7 ;
logfile case0.log ;
begin newmark;
deltat 0.02;
end newmark;
end simulation;
;----------------------------------------------------------------------------------------------------------------------
begin new_htc_structure;
;----------------------------------------------------------------------------------------------------------------------
begin main_body;
name tower ;
type timoschenko ;
nbodies 1 ;
node_distribution c2_def ;
damping_posdef 0.0 0.0 0.0 0.0E-00 0.0E-00 0.0E-00;
begin timoschenko_input;
filename template.st;
set 1 1 ;
end timoschenko_input;
begin c2_def;
nsec 2;
sec 1 0 0 0.00 0 ;
sec 2 0 0 -100.00 0 ;
end c2_def ;
end main_body;
;
begin main_body;
name towertop ;
type timoschenko ;
nbodies 1 ;
node_distribution c2_def ;
damping_posdef 0.0 0.0 0.0 0.0E-00 0.0E-00 0.0E-00;
begin timoschenko_input;
filename template.st;
set 1 1 ;
end timoschenko_input;
begin c2_def;
nsec 2;
sec 1 0.0 0.0 0.0 0.0 ;
sec 2 0.0 0.0 -1.00 0.0 ;
end c2_def ;
end main_body;
;
begin main_body;
name shaft ;
type timoschenko ;
nbodies 1 ;
node_distribution c2_def ;
damping_posdef 0.0 0.0 0.0 0.0E-00 0.0E-00 0.0E-00;
begin timoschenko_input;
filename template.st;
set 1 1 ;
end timoschenko_input;
begin c2_def;
nsec 2;
sec 1 0.0 0.0 0.0 0.0 ;
sec 2 0.0 0.0 1.0 0.0 ;
end c2_def ;
end main_body;
;
begin main_body;
name hub1 ;
type timoschenko ;
nbodies 1 ;
node_distribution c2_def ;
damping_posdef 0.0 0.0 0.0 0.0E-00 0.0E-00 0.0E-00;
begin timoschenko_input;
filename template.st;
set 1 1 ;
end timoschenko_input;
begin c2_def;
nsec 2;
sec 1 0.0 0.0 0.0 0.0 ;
sec 2 0.0 0.0 1.0 0.0 ;
end c2_def ;
end main_body;
;
begin main_body;
name hub2 ;
copy_main_body hub1;
end main_body;
;
begin main_body;
name hub3 ;
copy_main_body hub1 ;
end main_body;
;
begin main_body;
name blade1 ;
type timoschenko ;
nbodies 1 ;
node_distribution c2_def;
damping_posdef 0.0 0.0 0.0 0.0E-00 0.0E-00 0.0E-00;
begin timoschenko_input ;
filename template.st;
set 1 1 ;
end timoschenko_input;
begin c2_def;
nsec 2;
sec 1 0.00000E+00 0.00000E+00 0.00000E+00 0.00000E+00 ;
sec 2 0.00000E+00 0.00000E+00 8.00000E+01 0.00000E+00 ;
end c2_def ;
end main_body;
;
begin main_body;
name blade2 ;
copy_main_body blade1;
end main_body;
;
begin main_body;
name blade3 ;
copy_main_body blade1 ;
end main_body;
;----------------------------------------------------------------------------------------------------------------------
;
begin orientation;
begin base;
mbdy tower;
inipos 0.0 0.0 0.0 ;
body_eulerang 0.0 0.0 0.0;
end base;
begin relative;
mbdy1 tower last;
mbdy2 towertop 1;
mbdy2_eulerang 0.0 0.0 0.0;
end relative;
begin relative;
mbdy1 towertop last;
mbdy2 shaft 1;
mbdy2_eulerang 90.0 0.0 0.0;
mbdy2_eulerang 0.0 0.0 0.0; [tilt] 0.0 0.0
mbdy2_eulerang 0.0 0.0 0.0; 0.0 0.0 [azi]
mbdy2_ini_rotvec_d1 0.0 0.0 -1.0 0.0 ; [init_wr];
end relative;
begin relative;
mbdy1 shaft last;
mbdy2 hub1 1;
mbdy2_eulerang -90.0 0.0 0.0;
mbdy2_eulerang 0.0 180.0 0.0;
mbdy2_eulerang 2.5 0.0 0.0; [cone] 0.0 0.0
end relative;
begin relative;
mbdy1 shaft last;
mbdy2 hub2 1;
mbdy2_eulerang -90.0 0.0 0.0;
mbdy2_eulerang 0.0 60.0 0.0;
mbdy2_eulerang 2.5 0.0 0.0;
end relative;
begin relative;
mbdy1 shaft last;
mbdy2 hub3 1;
mbdy2_eulerang -90.0 0.0 0.0;
mbdy2_eulerang 0.0 -60.0 0.0;
mbdy2_eulerang 2.5 0.0 0.0;
end relative;
begin relative;
mbdy1 hub1 last;
mbdy2 blade1 1;
mbdy2_eulerang 0.0 0.0 0;
end relative;
begin relative;
mbdy1 hub2 last;
mbdy2 blade2 1;
mbdy2_eulerang 0.0 0.0 0.0;
end relative;
begin relative;
mbdy1 hub3 last;
mbdy2 blade3 1;
mbdy2_eulerang 0.0 0.0 0.0;
end relative;
end orientation;
;----------------------------------------------------------------------------------------------------------------------
begin constraint;
begin fix0;
body tower;
end fix0;
begin fix1;
mbdy1 tower last ;
mbdy2 towertop 1;
end fix1;
begin bearing1;
name shaft_rot;
mbdy1 towertop last;
mbdy2 shaft 1;
bearing_vector 2 0.0 0.0 -1.0; (0=global.1=mbdy1.2=mbdy2)
end bearing1;
; begin bearing3;
; name shaft_rot;
; mbdy1 towertop last;
; mbdy2 shaft 1;
; bearing_vector 2 0.0 0.0 -1.0; (0=global.1=mbdy1.2=mbdy2)
; omegas 0.0 ;
; end bearing3;
begin fix1;
mbdy1 shaft last ;
mbdy2 hub1 1;
end fix1;
begin fix1;
mbdy1 shaft last ;
mbdy2 hub2 1;
end fix1;
begin fix1;
mbdy1 shaft last ;
mbdy2 hub3 1;
end fix1;
begin bearing2;
name pitch1;
mbdy1 hub1 last;
mbdy2 blade1 1;
bearing_vector 2 0.0 0.0 -1.0;
end bearing2;
begin bearing2;
name pitch2;
mbdy1 hub2 last;
mbdy2 blade2 1;
bearing_vector 2 0.0 0.0 -1.0;
end bearing2;
begin bearing2;
name pitch3;
mbdy1 hub3 last;
mbdy2 blade3 1;
bearing_vector 2 0.0 0.0 -1.0;
end bearing2;
end constraint;
end new_htc_structure;
;----------------------------------------------------------------------------------------------------------------------
begin wind ;
density 1.225 ;
wsp 1;
tint 0.01;
horizontal_input 1 ;
windfield_rotations 0.0 0.0 0.0 ; yaw, tilt, rotation
center_pos0 0.0 0.0 -119 ; hub heigth
shear_format 3 0.2;
turb_format 0 ; 0=none, 1=mann,2=flex
tower_shadow_method 3 ; 0=none, 1=potential flow, 2=jet
; scale_time_start 100;
; wind_ramp_factor 0.0 50 0.0 1.0 ;
;
begin tower_shadow_potential_2;
tower_mbdy_link tower;
nsec 2;
radius 0.0 4.15 ;
radius 115.63 2.75 ;
end tower_shadow_potential_2;
end wind;
;
begin aerodrag ;
begin aerodrag_element ;
mbdy_name tower;
aerodrag_sections uniform 10 ;
nsec 2 ;
sec 0.0 0.6 1.0 ; tower bottom
sec 100.0 0.6 1.0 ; tower top
end aerodrag_element;
;
begin aerodrag_element ; Nacelle drag side
mbdy_name shaft;
aerodrag_sections uniform 2 ;
nsec 2 ;
sec 0.0 0.8 10.0 ;
sec 1.00 0.8 10.0 ;
end aerodrag_element;
end aerodrag;
;
begin aero ;
nblades 3;
hub_vec shaft -3 ;
link 1 mbdy_c2_def blade1;
link 2 mbdy_c2_def blade2;
link 3 mbdy_c2_def blade3;
ae_filename template.ae ;
pc_filename template.pc ;
induction_method 0 ; 0=none, 1=normal
aerocalc_method 1 ; 0=ingen aerodynamic, 1=med aerodynamic
aerosections 2;
ae_sets 1 1 1;
tiploss_method 1 ; 0=none, 1=prandtl
dynstall_method 2 ; 0=none, 1=stig øye method,2=mhh method
end aero ;
;----------------------------------------------------------------------------------------------------------------------
begin output;
filename case0;
time 0 100;
data_format gtsdf;
buffer 9999;
;
general time;
constraint bearing1 shaft_rot 2; angle and angle velocity
constraint bearing2 pitch1 5; angle and angle velocity
constraint bearing2 pitch2 5; angle and angle velocity
constraint bearing2 pitch3 5; angle and angle velocity
aero omega ;
aero torque;
aero power;
aero thrust;
wind free_wind 1 0.0 0.0 -100;
; Moments:
mbdy momentvec tower 1 1 tower # tower base ;
mbdy momentvec tower 1 2 tower # tower yaw bearing ;
mbdy momentvec shaft 1 1 shaft # main bearing ;
mbdy momentvec blade1 1 1 blade1 # blade 1 root ;
mbdy momentvec blade2 1 1 blade2 # blade 2 root ;
mbdy momentvec blade3 1 1 blade3 # blade 3 root ;
mbdy momentvec blade1 1 2 local # blade 1 50% local e coo ;
mbdy momentvec blade2 1 2 local # blade 2 50% local e coo ;
mbdy momentvec blade3 1 2 local # blade 3 50% local e coo ;
; Displacements and accellerations
mbdy state pos tower 1 1.0 global only 1 # Tower top FA displ;
mbdy state pos tower 1 1.0 global only 2 # Tower top SS displ;
mbdy state acc tower 1 1.0 global only 1 # Tower top FA acc;
mbdy state acc tower 1 1.0 global only 2 # Tower top SS acc;
mbdy state pos blade1 1 1.0 global # gl blade 1 tip pos ;
mbdy state pos blade2 1 1.0 global # gl blade 2 tip pos ;
mbdy state pos blade3 1 1.0 global # gl blade 3 tip pos ;
mbdy state pos blade1 1 1.0 blade1 # blade 1 tip pos ;
end output;
;
exit;
1 number of sets, Nset
-----------------
#1 Nset number 1
================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================
r [0] m [1] x_cg [2] y_cg [3] ri_x [4] ri_y [5] x_sh [6] y_sh [7] E [8] G [9] I_x [10] I_y [11] K [12] k_x [13] k_y [14] A [15] pitch [16] x_e [17] y_e [18]
================================================================================================================================================================================================================================================================================================================================================================================================================================================================================================
$1 2 subset number 1
0.000000000000000e+00 0.500000000000000e+00 0.000000000000000e+00 0.000000000000000e+00 0.500000000000000e+00 0.500000000000000e+00 0.000000000000000e+00 0.000000000000000e+00 1.000000000000000e+14 1.000000000000000e+13 1.000000000000000e+00 1.000000000000000e+00 1.000000000000000e+00 1.000000000000000e+00 1.000000000000000e+00 1.000000000000000e+00 0.000000000000000e+00 0.000000000000000e+00 0.000000000000000e+00
1.000000000000000e+00 0.500000000000000e+00 0.000000000000000e+00 0.000000000000000e+00 0.500000000000000e+00 0.500000000000000e+00 0.000000000000000e+00 0.000000000000000e+00 1.000000000000000e+14 1.000000000000000e+13 1.000000000000000e+00 1.000000000000000e+00 1.000000000000000e+00 1.000000000000000e+00 1.000000000000000e+00 1.000000000000000e+00 0.000000000000000e+00 0.000000000000000e+00 0.000000000000000e+00
......@@ -5,10 +5,6 @@ Created on Thu Aug 04 09:24:51 2016
@author: tlbl
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
import numpy as np
......
......@@ -5,12 +5,6 @@ Created on Thu Aug 04 11:09:43 2016
@author: tlbl
"""
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
import unittest
from wetb.control import control
......
import numpy as np
import os
import re
import xarray as xr
from copy import copy
from wetb.fatigue_tools.fatigue import eq_loads_from_Markov
def nc_to_dataarray(nc_file):
dataarray = xr.open_dataset(nc_file).to_dataarray().squeeze('variable')
if 'sensor_unit' in dataarray.coords:
dataarray = dataarray.assign_coords(sensor_unit=dataarray["sensor_unit"].astype(str))
if 'sensor_description' in dataarray.coords:
dataarray = dataarray.assign_coords(sensor_description=dataarray["sensor_description"].astype(str))
dataarray = add_coords_from_sensor_description(dataarray)
return dataarray
def get_params(filename, regex):
match = re.match(regex, os.path.basename(filename))
if match is not None:
return match.groups()
else:
return np.nan
def add_coords_from_filename(dataarray, params, regex, formats=None):
coords = xr.apply_ufunc(get_params,
dataarray.coords['filename'],
kwargs={'regex': regex},
input_core_dims=[[]],
output_core_dims=len(params)*[[]],
vectorize=True)
if formats is None:
for i in range(len(params)):
dataarray.coords[params[i]] = ('filename', coords[i].values)
else:
for i in range(len(params)):
dataarray.coords[params[i]] = ('filename', [formats[i](v) for v in coords[i].values])
return dataarray
def get_load_info(sensor_description):
if sensor_description.startswith('Force_intp'):
load = sensor_description[12:14]
mbdy_end = sensor_description.find(' ', 20)
mbdy = sensor_description[20:mbdy_end]
s_start = sensor_description.find('s=', mbdy_end) + 3
s_end = s_start + 6
s = float(sensor_description[s_start: s_end])
ss_start = sensor_description.find('s/S=', mbdy_end) + 5
ss_end = ss_start + 6
ss = float(sensor_description[ss_start: ss_end])
coo_start = sensor_description.find('coo:', ss_end) + 5
coo_end = sensor_description.find(' ', coo_start)
coo = sensor_description[coo_start:coo_end]
center_start = sensor_description.find('center:', coo_end) + 7
center_end = sensor_description.find(' ', center_start)
center = sensor_description[center_start:center_end]
return coo, load, mbdy, None, s, ss, center
elif sensor_description.startswith('Moment_intp'):
load = sensor_description[13:15]
mbdy_end = sensor_description.find(' ', 21)
mbdy = sensor_description[21:mbdy_end]
s_start = sensor_description.find('s=', mbdy_end) + 3
s_end = s_start + 6
s = float(sensor_description[s_start: s_end])
ss_start = sensor_description.find('s/S=', mbdy_end) + 5
ss_end = ss_start + 6
ss = float(sensor_description[ss_start: ss_end])
coo_start = sensor_description.find('coo:', ss_end) + 5
coo_end = sensor_description.find(' ', coo_start)
coo = sensor_description[coo_start:coo_end]
center_start = sensor_description.find('center:', coo_end) + 7
center_end = sensor_description.find(' ', center_start)
center = sensor_description[center_start:center_end]
return coo, load, mbdy, None, s, ss, center
elif sensor_description.startswith('Force'):
load = sensor_description[7:9]
mbdy_end = sensor_description.find(' ', 15)
mbdy = sensor_description[15:mbdy_end]
node_start = sensor_description.find('nodenr:', mbdy_end) + 8
node_end = node_start + 3
node = int(sensor_description[node_start:node_end])
coo_start = sensor_description.find('coo:', node_end) + 5
coo_end = sensor_description.find(' ', coo_start)
coo = sensor_description[coo_start:coo_end]
return coo, load, mbdy, node, None, None, None
elif sensor_description.startswith('Moment'):
load = sensor_description[6:8]
mbdy_end = sensor_description.find(' ', 14)
mbdy = sensor_description[14:mbdy_end]
node_start = sensor_description.find('nodenr:', mbdy_end) + 8
node_end = node_start + 3
node = int(sensor_description[node_start:node_end])
coo_start = sensor_description.find('coo:', node_end) + 5
coo_end = sensor_description.find(' ', coo_start)
coo = sensor_description[coo_start:coo_end]
return coo, load, mbdy, node, None, None, None
else:
return None, None, None, None, None, None, None
def add_coords_from_sensor_description(dataarray):
coords = xr.apply_ufunc(get_load_info,
dataarray.coords['sensor_description'],
input_core_dims=[[]],
output_core_dims=[[], [], [], [], [], [], []],
vectorize=True)
coords_labels = ['coo', 'load', 'mbdy', 'node', 's', 's/S', 'center']
for i in range(len(coords_labels)):
dataarray.coords[coords_labels[i]] = ('sensor_name', coords[i].values)
return dataarray
def get_DLC(filename):
filename = os.path.basename(filename)
i1 = filename.lower().find('dlc')
i2 = filename.find('_', i1)
if i2 == -1:
DLC = filename[i1:]
else:
DLC = filename[i1:i2]
return DLC
def apply_regex(filename, regex):
filename = os.path.basename(filename)
if re.match(regex, filename) is None:
return False
else:
return True
def get_groups(filename, regex_list):
for regex in regex_list.values():
match = re.match(regex, os.path.basename(filename))
if match is not None:
break
return match.group(0) if match is not None else None
def group_simulations(dataarray, regex_list):
groups = np.unique(np.vectorize(get_groups)(dataarray.filename, regex_list))
grouped_simulations = {}
for group in groups:
grouped_simulations[group] = dataarray.sel(filename=np.vectorize(apply_regex)(dataarray.filename, group))
return grouped_simulations
def average_contemporaneous_loads(extreme_loads, driver, load, metric, safety_factor):
if load in driver:
return metric(extreme_loads)*safety_factor
if driver == 'Fres_max':
if load in ['Fx', 'Fy']:
return metric(extreme_loads)*safety_factor
if driver == 'Mres_max':
if load in ['Mx', 'My']:
return metric(extreme_loads)*safety_factor
return np.mean(np.abs(extreme_loads))*np.sign(extreme_loads[np.argmax(np.abs(extreme_loads))])
def scale_contemporaneous_loads(extreme_loads, driver, load, scaling_factor, safety_factor):
if load in driver:
return extreme_loads*scaling_factor*safety_factor
if driver == 'Fres_max':
if load in ['Fx', 'Fy']:
return extreme_loads*scaling_factor*safety_factor
if driver == 'Mres_max':
if load in ['Mx', 'My']:
return extreme_loads*scaling_factor*safety_factor
return extreme_loads*scaling_factor
def get_loads_by_group(extreme_loads, regex_list, metric_list, safety_factor_list, contemporaneous_method='averaging'):
grouped_simulations = group_simulations(extreme_loads, regex_list)
loads_by_group_dict = {}
if contemporaneous_method == 'averaging':
for group, simulations in grouped_simulations.items():
DLC = get_DLC(group)
group_loads = xr.apply_ufunc(average_contemporaneous_loads,
simulations,
simulations.coords['driver'],
simulations.coords['load'],
kwargs={'metric': metric_list[DLC], 'safety_factor': safety_factor_list[DLC]},
input_core_dims=[['filename'], [], []],
vectorize=True)
Fres = np.sqrt(group_loads.sel(load='Fx')**2 + group_loads.sel(load='Fy')**2)
Fres.coords['load'] = 'Fres'
Mres = np.sqrt(group_loads.sel(load='Mx')**2 + group_loads.sel(load='My')**2)
Mres.coords['load'] = 'Mres'
thetaF = np.arctan2(group_loads.sel(load='Fy'), group_loads.sel(load='Fx'))*180/np.pi
thetaF.coords['load'] = 'Theta_F'
thetaM = np.arctan2(group_loads.sel(load='My'), group_loads.sel(load='Mx'))*180/np.pi
thetaM.coords['load'] = 'Theta_M'
group_loads = xr.concat([group_loads, Fres, thetaF, Mres, thetaM], dim='load')
loads_by_group_dict[group] = group_loads
elif contemporaneous_method == 'scaling':
for group, simulations in grouped_simulations.items():
main_loads = simulations.isel(driver=range(12), load=xr.DataArray([int(i/2) for i in range(12)], dims='driver'))
Fres = np.sqrt(simulations.sel(driver='Fres_max').sel(load='Fx')**2 + simulations.sel(driver='Fres_max').sel(load='Fy')**2)
Mres = np.sqrt(simulations.sel(driver='Mres_max').sel(load='Mx')**2 + simulations.sel(driver='Mres_max').sel(load='My')**2)
Fres.coords['load'] = 'Fres'
Mres.coords['load'] = 'Mres'
main_loads = xr.concat([main_loads, Fres, Mres], dim='driver')
DLC = get_DLC(group)
characteristic_loads = xr.apply_ufunc(metric_list[DLC],
main_loads,
input_core_dims=[['filename']],
vectorize=True)
scaling_factors = characteristic_loads/main_loads
closest_load_indices = np.abs(1/scaling_factors - 1).argmin('filename')
scaling_factors = scaling_factors.isel(filename=closest_load_indices)
closest_load_indices = closest_load_indices.drop_vars('load')
simulations = simulations.isel(filename=closest_load_indices)
group_loads = xr.apply_ufunc(scale_contemporaneous_loads,
simulations,
simulations.coords['driver'],
simulations.coords['load'],
scaling_factors,
kwargs={'safety_factor': safety_factor_list[DLC]},
input_core_dims=[[], [], [], []],
vectorize=True)
Fres = np.sqrt(group_loads.sel(load='Fx')**2 + group_loads.sel(load='Fy')**2)
Fres.coords['load'] = 'Fres'
Mres = np.sqrt(group_loads.sel(load='Mx')**2 + group_loads.sel(load='My')**2)
Mres.coords['load'] = 'Mres'
thetaF = np.arctan2(group_loads.sel(load='Fy'), group_loads.sel(load='Fx'))*180/np.pi
thetaF.coords['load'] = 'Theta_F'
thetaM = np.arctan2(group_loads.sel(load='My'), group_loads.sel(load='Mx'))*180/np.pi
thetaM.coords['load'] = 'Theta_M'
group_loads = xr.concat([group_loads, Fres, thetaF, Mres, thetaM], dim='load')
loads_by_group_dict[group] = group_loads
loads_by_group = xr.concat(list(loads_by_group_dict.values()), 'group')
if 'variable' in loads_by_group.coords:
loads_by_group = loads_by_group.drop_vars('variable')
if 'filename' in loads_by_group.coords:
loads_by_group = loads_by_group.drop_vars('filename')
loads_by_group.coords['group'] = list(loads_by_group_dict.keys())
return loads_by_group
def get_DLB_extreme_loads(extreme_loads, regex_list, metric_list, safety_factor_list, contemporaneous_method='averaging'):
"""
Calculate the extreme loads for the whole DLB.
Parameters
----------
dataarray : xarray.DataArray (Nsimulations x Nsensors x 14 x 10)
DataArray containing collected data for each simulation and load sensor.
The 14 x 6 matrix corresponds to the extreme loading matrix in IE61400-1
in annex I-1, where the 10 components are Fx, Fy, Fz, Mx, My, Mz, Fr, theta_F, Mr, theta_M.
Dims: filename, sensor_name, driver, load
regex_list : dict
Dictionary containing the regular expression for grouping simulations
for each DLC.
metric_list : dict
Dictionary containing the function to be applied to each group of simulations
for each DLC.
safety_factor_list : dict
Dictionary containing the safety factor to be applied to each group of simulations
for each DLC.
contemporaneous_method : str
Method for assessing the contemporaneous loads.
'averaging': the mean of the absolute values from each timeseries is used,
posteriorly applying the sign of the absolute maximum value.
'scaling': the contemporaneous values from the timeseries with the closest
load to the characteristic load are selected, posteriorly scaling them
by the factor characteristic load / timeseries load.
The default is 'averaging'.
Returns
-------
DataArray (Nsensors x 14 x 10)
DataArray containing the extreme loading matrix of the DLB for each sensor,
as well as the group of simulations driving each sensor.\n
Dims: sensor_name, driver, load
Coords: sensor_name, driver, load, group
"""
loads_by_group = get_loads_by_group(extreme_loads, regex_list, metric_list,
safety_factor_list, contemporaneous_method=contemporaneous_method)
driving_group_indices = []
for driver in loads_by_group.coords['driver'].values:
load = driver[:driver.find('_')]
metric = driver[driver.find('_') + 1:]
if metric == 'max':
driving_group_indices.append(loads_by_group.sel(driver=driver, load=load).argmax('group'))
elif metric == 'min':
driving_group_indices.append(loads_by_group.sel(driver=driver, load=load).argmin('group'))
driving_group_indices = xr.concat(driving_group_indices, dim='driver')
driving_group_indices = driving_group_indices.drop_vars('load')
DLB_extreme_loads = loads_by_group.isel(group=driving_group_indices)
return DLB_extreme_loads.transpose(..., 'sensor_name', 'driver', 'load')
def ext_or_cont(ext_and_cont, driver, sensor_description, metric, safety_factor):
if sensor_description in driver:
return metric(ext_and_cont)*safety_factor
else:
return np.mean(np.abs(ext_and_cont))*np.sign(ext_and_cont[np.argmax(np.abs(ext_and_cont))])
def get_DLB_ext_and_cont(ext_and_cont, regex_list, metric_list, safety_factor_list):
"""
Analogue function to get_DLB_extreme_loads, but generalized for any given
set of sensors and not only the 6 load components of a node.
Parameters
----------
ext_and_cont : xarray.DataArray (Nsimulations x Nsensors*2 x Nsensors)
DataArray containing collected data for each simulation of the extreme values
(max and min) and their contemporaneous values for a given set of sensors.
regex_list : dict
Dictionary containing the regular expression for grouping simulations
for each DLC.
metric_list : dict
Dictionary containing the function to be applied to each group of simulations
for each DLC.
safety_factor_list : dict
Dictionary containing the safety factor to be applied to each group of simulations
for each DLC.
Returns
-------
DataArray (Nsensors*2 x Nsensors)
DataArray containing the extreme values (max and min) and their contemporaneous
values for a given set of sensors.
"""
grouped_simulations = group_simulations(ext_and_cont, regex_list)
values_by_group_dict = {}
for group, simulations in grouped_simulations.items():
DLC = get_DLC(group)
group_values = xr.apply_ufunc(ext_or_cont,
simulations,
simulations.coords['driver'],
simulations.coords['sensor_description'],
kwargs={'metric': metric_list[DLC], 'safety_factor': safety_factor_list[DLC]},
input_core_dims=[['filename'], [], []],
vectorize=True)
values_by_group_dict[group] = group_values
values_by_group = xr.concat(list(values_by_group_dict.values()), dim='group')
values_by_group = values_by_group.drop_vars('variable')
values_by_group['group'] = list(values_by_group_dict.keys())
driving_group_indices = []
for driver in values_by_group.coords['driver'].values:
sensor_description = driver[:-4]
metric = driver[-3:]
if metric == 'max':
driving_group_indices.append(values_by_group.sel(driver=driver, sensor_description=sensor_description).argmax('group'))
elif metric == 'min':
driving_group_indices.append(values_by_group.sel(driver=driver, sensor_description=sensor_description).argmin('group'))
driving_group_indices = xr.concat(driving_group_indices, dim='driver')
driving_group_indices = driving_group_indices.drop_vars('sensor_description')
DLB_ext_and_cont = values_by_group.isel(group=driving_group_indices)
return DLB_ext_and_cont
def get_values_by_group(dataarray, regex_list, metric_list, safety_factor_list):
grouped_simulations = group_simulations(dataarray, regex_list)
group_values_dict = {}
for group, simulations in grouped_simulations.items():
DLC = get_DLC(group)
values = xr.apply_ufunc(
metric_list[DLC],
simulations,
input_core_dims=[["filename"]],
vectorize=True)*safety_factor_list[DLC]
group_values_dict[group] = values
group_values = xr.concat(list(group_values_dict.values()), 'group')
group_values = group_values.drop_vars('variable')
group_values.coords['group'] = list(group_values_dict.keys())
return group_values
def get_DLB_directional_extreme_loads(directional_extreme_loads, regex_list, metric_list, safety_factor_list):
"""
Identic procedure as in get_DLB_extreme_loads, but for maximum loads
along each direction.
Parameters
----------
directional_extreme_loads : xarray.DataArray
DataArray containing the collected directional extreme loads of all simulations.
regex_list : dict
Dictionary containing the regular expression for grouping simulations
for each DLC.
metric_list : dict
Dictionary containing the function to be applied to each group of simulations
for each DLC.
safety_factor_list : dict
Dictionary containing the safety factor to be applied to each group of simulations
for each DLC.
Returns
-------
xarray.DataArray
DataArray containing the maximum values of the DLB, as well as
the group of simulations driving each load direction
"""
group_values = get_values_by_group(directional_extreme_loads, regex_list, metric_list, safety_factor_list)
DLB_directional_extreme_loads = group_values.max('group')
DLB_directional_extreme_loads.coords['group'] = group_values['group'].isel(group=group_values.argmax('group'))
return DLB_directional_extreme_loads
def get_DLB_extreme_values(statistics, regex_list, metric_list, safety_factor_list):
"""
Group sensors maxima and minima of each timeseries by regular expression,
apply a metric and scale by a safety factor. Identic procedure as in
get_DLB_extreme_loads, but applycable for any sensor and only extreme values
are computed, not contemporaneous values of other sensors.
Parameters
----------
statistics : xarray.DataArray (Nsimulations x *)
DataArray containing the collected statistics of all simulations.
regex_list : dict
Dictionary containing the regular expression for grouping simulations
for each DLC.
metric_list : dict
Dictionary containing the function to be applied to each group of simulations
for each DLC.
safety_factor_list : dict
Dictionary containing the safety factor to be applied to each group of simulations
for each DLC.
Returns
-------
DataArray (Nsensors x 2)
DataArray containing the maximum and minimum values of the DLB, as well as
the group of simulations driving each sensor
"""
group_values = get_values_by_group(statistics, regex_list, metric_list, safety_factor_list)
max_values = group_values.sel(statistic='max').max('group')
max_indices = group_values.sel(statistic='max').argmax('group')
max_values.coords['group'] = group_values.sel(statistic='max')['group'].isel(group=max_indices)
min_values = group_values.sel(statistic='min').min('group')
min_indices = group_values.sel(statistic='min').argmin('group')
min_values.coords['group'] = group_values.sel(statistic='min')['group'].isel(group=min_indices)
DLB_extreme_values = xr.concat([max_values, min_values], dim='statistic')
if 'sensor_name' in DLB_extreme_values.dims:
DLB_extreme_values = DLB_extreme_values.transpose(..., 'sensor_name', 'statistic')
return DLB_extreme_values
def get_DLB_eq_loads(eq_loads, weight_list):
"""
Calculate the fatigue equivalent loads for the whole DLB
from equivalent loads of individual simulations.
Parameters
----------
eq_loads : xarray.DataArray (Nsimulations x * x Nwoehlerslopes)
DataArray containing equivalent loads for each Woehler slope,
for each sensor and for each simulation.\n
Dims: filename, *, m\n
Coords: filename, *, m
weight_list : dict or xarray.DataArray
Dictionary or DataArray containing the weight (real time / simulation time) for each simulation
Returns
-------
xarray.DataArray (Nsensors x Ndirections x Nwoehlerslopes)
DataArray containing the fatigue equivalent loads of the DLB
for each sensor and Woehler slope.\n
Dims: *, m\n
Coords: *, m
"""
if isinstance(weight_list, dict):
weight_list = xr.DataArray(data=list(weight_list.values()),
dims='filename',
coords={'filename': list(weight_list.keys())})
m_list = eq_loads.m
DLB_eq_loads = (weight_list*eq_loads**m_list).sum('filename')**(1/m_list)
if 'variable' in DLB_eq_loads.coords:
DLB_eq_loads = DLB_eq_loads.drop_vars('variable')
return DLB_eq_loads
def get_DLB_eq_loads_from_Markov(markov_matrices, weight_list, m_list, neq=1e7):
"""
Calculate the fatigue equivalent loads for the whole DLB
from Markov matrices of individual simulations.
Parameters
----------
markov_matrices : xarray.DataArray (Nsimulations x * x Nbins x 2)
DataArray containing number of cycles and load amplitude
for each sensor for each simulation.\n
Dims: filename, *, bin, (cycles, amplitude)\n
Coords: filename, *
weight_list : dict or xarray.DataArray
Dictionary or DataArray containing the weight (real time / simulation time) for each simulation
m_list : list (Nwoehlerslopes)
List containing the different woehler slopes
neq : int or float, optional
Number of equivalent load cycles. The default is 1e7.
Returns
-------
xarray.DataArray (Nsensors x Ndirections x Nwoehlerslopes)
DataArray containing the fatigue equivalent loads of the DLB
for each sensor and Woehler slope.\n
Dims: *, m\n
Coords: *, m
"""
eq_loads = eq_loads_from_Markov(markov_matrix=markov_matrices,
m_list=m_list,
neq=neq)
eq_loads = xr.DataArray(eq_loads)
eq_loads = eq_loads.rename({f'dim_{i}': markov_matrices.dims[i]
for i in range(len(markov_matrices.dims) - 2)})
eq_loads = eq_loads.rename({eq_loads.dims[-1]: 'm'})
eq_loads = eq_loads.assign_coords(markov_matrices.coords)
eq_loads.coords['m'] = m_list
DLB_eq_loads = get_DLB_eq_loads(eq_loads, weight_list)
return DLB_eq_loads
def mean_upperhalf(group):
"""
Calculate the mean of the values in the upper half of a set. To be
used as the metric for some DLCs.
Parameters
----------
group : array
Array containing the maximum values of a given sensor for each
simulation in a group
Returns
-------
float
The mean value of the upper half of simulations
"""
upperhalf = sorted(group, reverse=True)[0:int(len(group)/2)]
return np.mean(upperhalf)
def get_weight_list(file_list,
n_years,
Vin,
Vout,
Vr,
wsp_list,
probability,
wsp_weights=None,
yaw_weights=xr.concat([xr.DataArray(data=[[0.5, 0.25, 0.25]],
dims=('dlc', 'wdir'),
coords={'dlc': ['12'], 'wdir': [0, 10, 350]}),
xr.DataArray(data=[[0.5, 0.5]],
dims=('dlc', 'wdir'),
coords={'dlc': ['24'], 'wdir': [20, 340]}),
xr.DataArray(data=[[0.5, 0.5]],
dims=('dlc', 'wdir'),
coords={'dlc': ['64'], 'wdir': [8, 352]})],
dim='dlc'),
n_seeds=xr.DataArray(data=[6, 3, 6],
dims=('dlc'),
coords={'dlc': ['12', '24', '64']}),
n_events=None,
sim_time=xr.DataArray(data=[600, 600, 100, 100, 600],
dims=('dlc'),
coords={'dlc': ['12', '24', '31', '41', '64']}),
weight_DLC64_Vin_Vout=0.025,
neq=None):
"""
Calculate the weights for each simulation for the
calculation of damage equivalent loads of the whole DLB.
Parameters
----------
file_list : xr.DataArray
DataArray containing the paths to all simulations. It must also have
coordinates for DLC, wind speed, wind direction and/or wave direction
n_years : int or float
Turbine's lifetime in years
Vin: int or float
Cut-in wind speed
Vout: int or float
Cut-out wind speed
Vr: int or float
Rated wind speed
wsp_list: array
Array containing all simulated wind speeds
probability: xarray.DataArray
DataArray containing the probability of each combination of wind speed,
wind direction and/or wave direction
wsp_weights: xarray.DataArray, optional
DataArray containing the percentage of time each DLC takes for each wind speed.
The default assumes that DLC64 takes 2.5% of time between Vin and Vout
yaw_weights: xarray.DataArray, optional
DataArray containing the percentage of time each yaw misalignment takes for each DLC.
The default assumes {-10: 0.25, 0: 0.5, 10: 0.25} for DLC12,
{-20: 0.5, 20: 0.5} for DLC24 and {-8: 0.5, 8: 0.5} for DLC64
n_seeds: xarray.DataArray, optional
DataArray containing the number of seeds per DLC per wind speed and wind direction and/or wave direction.
The default assumes 6 seeds for DLC12, 3 seeds for DLC24 and 6 seeds for DLC64
n_events: xarray.DataArray, optional
DataArray containing the number of events per DLC per windspeed.
The default assumes {Vin: 1000, Vr: 50, Vout: 50} for both DLC31 and DLC41.
sim_time: xarray.DataArray, optional
DataArray containing the simulation time per DLC.
The default assumes 600s for DLC12, DLC24, DLC64 and 100s for DLC31 and DLC41
weight_DLC64_Vin_Vout: int or float, optional
Percentage of time that DLC64 takes between Vin and Vout. Only used if
wsp_weights is not passed.
The default is 0.025.
neq : int or float, optional
Number of equivalent load cycles. If not passed, the output weights
can ONLY be used when calculating DLB equivalent loads from individual file
Markov matrices. If passed, the output weights can ONLY be used when
calculating DLB equivalent loads from individual equivalent loads (recommended)
Returns
-------
xarray.DataArray
DataArray containing the weight for each simulation.
"""
if wsp_weights is None:
weight_DLC24 = 50/(365.25*24)
weight_DLC12 = 1 - weight_DLC64_Vin_Vout
wsp_weights = xr.DataArray(data=[[weight_DLC12 if Vin <= v <= Vout else 0 for v in wsp_list],
[weight_DLC24 if Vin <= v <= Vout else 0 for v in wsp_list],
[weight_DLC64_Vin_Vout if Vin <= v <= Vout else 1 for v in wsp_list]],
dims=('dlc', 'wsp'),
coords={'dlc': ['12', '24', '64'],
'wsp': wsp_list})
if n_events is None:
n_events = xr.DataArray(data=[[1000, 50, 50],
[1000, 50, 50]],
dims=('dlc', 'wsp'),
coords={'dlc': ['31', '41'],
'wsp': [Vin, Vr, Vout]})
lifetime = n_years*365.25*24*3600 # convert to seconds
weight_list = (lifetime*probability*wsp_weights*yaw_weights/n_seeds/sim_time).combine_first(n_years*n_events)
if neq is not None: # correct by sim_time/neq so it can be used for individual equivalent loads
weight_list = weight_list*sim_time/neq
# rearrange weights from parameters (DLC, wsp, wdir) to actual filenames
# TO-DO: allow wvdir as well
def get_weight_list_per_filename(file_list, dlc, wsp, wdir, weight_list):
return weight_list.sel(dlc=dlc, wsp=wsp, wdir=wdir)
weight_list = xr.apply_ufunc(get_weight_list_per_filename,
file_list,
file_list['dlc'],
file_list['wsp'],
file_list['wdir'],
kwargs={'weight_list': weight_list},
vectorize=True)
return weight_list
import numpy as np
import pandas as pd
from openpyxl import load_workbook
from openpyxl.styles import PatternFill
import matplotlib.pyplot as plt
import os
from wetb.dlb.dlb_postprocs import get_DLC
def dataarray_to_excel(dataarray, path):
"""
Generate excel file from a DataArray.
Parameters
----------
dataarray : xarray.DataArray
DataArray containing data from the DLB.
path : str
Path to the new excel file to be created
Returns
-------
None.
"""
df = dataarray.to_dataframe('value').reset_index()
df.to_excel(path, index=False)
def DLB_extreme_loads_to_excel(DLB_extreme_loads, path):
"""
Generate excel report of DLB extreme loads.
Parameters
----------
DLB_extreme_loads : xarray.DataArray
DataArray containing the DLB extreme loading matrices for each load sensor.
path : str
Path to the new excel file to be created
Returns
-------
None.
"""
with pd.ExcelWriter(path, engine="openpyxl") as writer:
for sensor in DLB_extreme_loads.coords['sensor_name'].values:
df = DLB_extreme_loads.sel(sensor_name=sensor).to_pandas()
df['group'] = DLB_extreme_loads.sel(sensor_name=sensor)['group'].values
df.to_excel(writer, sheet_name=sensor, index_label='driver')
workbook = load_workbook(path)
highlight = PatternFill(start_color="C0C0C0", end_color="C0C0C0", fill_type="solid")
for sensor in DLB_extreme_loads.coords['sensor_name'].values:
for i in range(12):
workbook[sensor].cell(row=i + 2, column=int(i/2) + 2).fill = highlight
workbook[sensor].cell(row=14, column=8).fill = highlight
workbook[sensor].cell(row=15, column=10).fill = highlight
workbook.save(path)
def plot_DLB_directional_extreme_loads(DLB_extreme_loads, folder, extension='.png'):
"""
Plot the DLB directional extreme loads and save them to a given folder.
Parameters
----------
DLB_extreme_loads : xarray.DataArray ('sensor_name', 'angle')
DataArray containing the extreme loads of the DLB.
folder : str
Path to the folder where the plots will be saved
extension: str, optional
Extension of the plot files. The default is '.png'.
Returns
-------
None.
"""
angles = DLB_extreme_loads.coords['angle'].values
for i in range(DLB_extreme_loads.shape[0]):
x = [float(DLB_extreme_loads[i, j])*np.cos(np.deg2rad(angles[j]))
for j in range(len(angles))]
y = [float(DLB_extreme_loads[i, j])*np.sin(np.deg2rad(angles[j]))
for j in range(len(angles))]
DLC = [get_DLC(DLB_extreme_loads[i, j].group.values[()]) for j in range(len(angles))]
plt.scatter(x, y)
[plt.plot([0, x[k]], [0, y[k]], color='black') for k in range(len(x))]
plt.xlabel('Mx')
plt.ylabel('My')
plt.title(DLB_extreme_loads[i].coords['sensor_name'].values[()])
plt.axhline(0, color='black',linewidth=1)
plt.axvline(0, color='black',linewidth=1)
plt.xlim(-max(abs(min(x)), abs(max(x)))*1.2, max(abs(min(x)), abs(max(x)))*1.2)
plt.ylim(-max(abs(min(y)), abs(max(y)))*1.2, max(abs(min(y)), abs(max(y)))*1.2)
for j in range(len(x)):
plt.annotate(DLC[j], (x[j], y[j]), textcoords="offset points", xytext=(0,10), ha='center')
plt.savefig(os.path.join(folder, 'Extreme_' + DLB_extreme_loads[i].coords['sensor_name'].values[()] + extension))
plt.show()
def plot_DLB_directional_equivalent_loads(DLB_fatigue_loads, folder, extension='.png'):
"""
Plot the DLB directional equivalent loads and save them to a given folder.
Parameters
----------
DLB_fatigue_loads : xarray.DataArray ('sensor_name', 'angle', 'm')
DataArray containing the fatigue loads of the DLB. It matches the
output from get_DLB_fatigue_loads
folder : str
Path to the folder where the plots will be saved
extension: str, optional
Extension of the plot files. The default is '.png'.
Returns
-------
None.
"""
m_list = DLB_fatigue_loads.coords['m'].values
angles = DLB_fatigue_loads.coords['angle'].values
for i in range(DLB_fatigue_loads.shape[0]):
for j in range(len(m_list)):
x = [float(DLB_fatigue_loads[i, k, j])*np.cos(np.deg2rad(angles[k]))
for k in range(len(angles))]
y = [float(DLB_fatigue_loads[i, k, j])*np.sin(np.deg2rad(angles[k]))
for k in range(len(angles))]
plt.scatter(x, y, label='m = ' + str(m_list[j]))
[plt.plot([0, x[k]], [0, y[k]], color='black') for k in range(len(angles))]
plt.xlabel('Mx')
plt.ylabel('My')
plt.title(DLB_fatigue_loads[i].coords['sensor_name'].values[()])
plt.axhline(0, color='black',linewidth=1)
plt.axvline(0, color='black',linewidth=1)
plt.xlim(-max(abs(min(x)), abs(max(x)))*1.2, max(abs(min(x)), abs(max(x)))*1.2)
plt.ylim(-max(abs(min(y)), abs(max(y)))*1.2, max(abs(min(y)), abs(max(y)))*1.2)
plt.legend()
plt.savefig(os.path.join(folder, 'Fatigue_' + DLB_fatigue_loads[i].coords['sensor_name'].values[()] + extension))
plt.show()
\ No newline at end of file
import os
import warnings
from wetb.hawc2.hawc2_input_writer import HAWC2InputWriter
from wetb.hawc2.tests import test_files
from wetb.dlb.iec61400_1 import DTU_IEC61400_1_Ref_DLB
"""
TODO: delete wind ramp / replace wind section
TODO: set default turb_format = 0
"""
class HAWC2_IEC_DLC_Writer(HAWC2InputWriter):
def __init__(self, base_htc_file, diameter,
time_start=100, # Minimum 5s cf. IEC61400-1(2005), section 7.5
turbulence_defaults=(33.6, 3.9, 8192, 64) # L, gamma, n_x, n_yz):
):
HAWC2InputWriter.__init__(self, base_htc_file, diameter=diameter,
time_start=time_start,
turbulence_defaults=turbulence_defaults)
def set_V_hub(self, htc, V_hub, **_):
htc.wind.wsp = V_hub
htc.wind.wind_ramp_factor = 0, self.time_start, 8 / V_hub, 1
def set_wdir(self, htc, wdir, **_):
htc.wind.windfield_rotations = wdir, 0, 0
def set_shear(self, htc, shear, **_):
if isinstance(shear, str):
shear = eval(shear) # convert str to dict (if via excel)
shear_format, shear_arg = shear['profile']
i_shear_format = ['log', 'power'].index(shear_format.lower()) + 2
htc.wind.shear_format = i_shear_format, shear_arg
if shear['type'] == 'NWP':
return
elif shear['type'] == 'EWS':
phi = {'++': 0, '+-': 90, '--': 180, '-+': 270}[shear['sign']]
htc.wind.iec_gust = 'ews', shear['A'], phi, self.time_start, shear['T']
else:
raise NotImplementedError(shear['type'])
def set_ti(self, htc, ti, **_):
htc.wind.tint = ti
def set_seed(self, htc, seed, **kwargs):
if seed is None or seed == "":
htc.wind.turb_format = 0
elif isinstance(seed, int):
L, Gamma, nx, nyz = self.turbulence_defaults
htc.add_mann_turbulence(L, 1, Gamma, seed, no_grid_points=(nx, nyz, nyz),
box_dimension=(kwargs['simulation_time'] * kwargs['V_hub'],
self.diameter, self.diameter))
else:
raise NotImplementedError(seed)
def set_Gust(self, htc, Gust, **kwargs):
if str(Gust).lower() == 'nan':
return
if isinstance(Gust, str):
Gust = eval(Gust)
if Gust['type'] == 'ECD':
V_cg, theta_cg, T = [Gust[k] for k in ['V_cg', 'theta_cg', 'T']]
htc.wind.iec_gust = 'ecd', V_cg, theta_cg, self.time_start, T
elif Gust['type'] == 'EDC':
phi, T = [Gust[k] for k in ['phi', 'T']]
htc.wind.iec_gust = 'edc', 0, phi, self.time_start, T
elif Gust['type'] == 'EOG':
V_gust, T = [Gust[k] for k in ['V_gust', 'T']]
htc.wind.iec_gust = 'eog', V_gust, 0, self.time_start, T
else:
raise NotImplementedError(Gust)
def set_Fault(self, htc, Fault, **kwargs):
if str(Fault).lower() == 'nan':
return
if isinstance(Fault, str):
Fault = eval(Fault)
if Fault['type'] == 'GridLoss':
generator_servo = Fault['generator_servo']
T = Fault['T']
self.set_gridloss_time(htc, generator_servo, self.time_start + T)
elif Fault['type'] == 'StuckBlade':
pitch_servo = Fault['pitch_servo']
T = Fault['T']
self.set_stuckblade(htc, pitch_servo, T)
elif Fault['type'] == 'PitchRunaway':
pitch_servo = Fault['pitch_servo']
T = Fault['T']
self.set_pitchrunaway(htc, pitch_servo, self.time_start + T)
else:
raise NotImplementedError(Fault)
def set_Operation(self, htc, Operation, **kwargs):
if str(Operation).lower() == 'nan':
return
if isinstance(Operation, str):
Operation = eval(Operation)
if Operation['type'] == 'StartUp':
controller = Operation['controller']
T = Operation['T']
self.set_startup_time(htc, controller, self.time_start + T)
elif Operation['type'] == 'ShutDown':
controller = Operation['controller']
T = Operation['T']
self.set_shutdown_time(htc, controller, self.time_start + T, 1)
elif Operation['type'] == 'EmergencyShutDown':
controller = Operation['controller']
T = Operation['T']
self.set_shutdown_time(htc, controller, self.time_start + T, 2)
elif Operation['type'] == 'Parked':
controller = Operation['controller']
self.set_parked(htc, controller, self.time_start + kwargs['simulation_time'])
elif Operation['type'] == 'RotorLocked':
controller = Operation['controller']
self.set_parked(htc, controller, self.time_start + kwargs['simulation_time'])
shaft = Operation['shaft']
shaft_constraint = Operation['shaft_constraint']
azimuth = Operation['Azi']
self.set_rotor_locked(htc, shaft, shaft_constraint, azimuth)
else:
raise NotImplementedError(Operation)
def set_simulation_time(self, htc, simulation_time, **_):
htc.set_time(self.time_start, simulation_time + self.time_start)
def set_gridloss_time(self, htc, generator_servo, t):
generator_servo = htc.dll.get_subsection_by_name(generator_servo, 'name')
if 'time for grid loss' not in generator_servo.init.constant__7.comments.lower():
warnings.warn('Assuming constant 7 in generator_servo DLL is time for grid loss!'
' Please verify your htc file is correct. Disable warning by '
+ 'placing "time for grid loss" in constant 7 comment.')
generator_servo.init.constant__7 = 7, t
def set_stuckblade(self, htc, pitch_servo, t):
pitch_servo = htc.dll.get_subsection_by_name(pitch_servo, 'name')
if 'time for stuck blade' not in pitch_servo.init.constant__9.comments.lower():
warnings.warn('Assuming constant 9 in pitch_servo DLL is time for stuck blade!'
' Please verify your htc file is correct. Disable warning by '
+ 'placing "time for stuck blade" in constant 9 comment.')
pitch_servo.init.constant__9 = 9, t
if 'angle of stuck blade' not in pitch_servo.init.constant__10.comments.lower():
warnings.warn('Assuming constant 10 in pitch_servo DLL is angle of stuck blade!'
' Please verify your htc file is correct. Disable warning by '
+ 'placing "angle of stuck blade" in constant 10 comment.')
pitch_servo.init.constant__10 = 10, 0
def set_pitchrunaway(self, htc, pitch_servo, t):
pitch_servo = htc.dll.get_subsection_by_name(pitch_servo, 'name')
if 'time for pitch runaway' not in pitch_servo.init.constant__8.comments.lower():
warnings.warn('Assuming constant 8 in pitch_servo DLL is time for pitch runaway!'
' Please verify your htc file is correct. Disable warning by '
+ 'placing "time for stuck blade" in constant 8 comment.')
pitch_servo.init.constant__8 = 8, t
def set_startup_time(self, htc, controller, t):
controller = htc.dll.get_subsection_by_name(controller, 'name')
if 'cut-in time' not in controller.init.constant__24.comments.lower():
warnings.warn('Assuming constant 24 in controller DLL is cut-in time!'
' Please verify your htc file is correct. Disable warning by '
+ 'placing "cut-in time" in constant 24 comment.')
controller.init.constant__24 = 24, t
def set_shutdown_time(self, htc, controller, t, stop_type):
controller = htc.dll.get_subsection_by_name(controller, 'name')
if 'shut-down time' not in controller.init.constant__26.comments.lower():
warnings.warn('Assuming constant 26 in controller DLL is shut-down time!'
' Please verify your htc file is correct. Disable warning by '
+ 'placing "shut-down time" in constant 26 comment.')
controller.init.constant__26 = 26, t
if 'stop type' not in controller.init.constant__28.comments.lower():
warnings.warn('Assuming constant 28 in controller DLL is stop type!'
' Please verify your htc file is correct. Disable warning by '
+ 'placing "stop type" in constant 28 comment.')
controller.init.constant__28 = 28, stop_type
def set_parked(self, htc, controller, simulation_time):
controller = htc.dll.get_subsection_by_name(controller, 'name')
if 'cut-in time' not in controller.init.constant__24.comments.lower():
warnings.warn('Assuming constant 24 in controller DLL is cut-in time!'
' Please verify your htc file is correct. Disable warning by '
+ 'placing "cut-in time" in constant 24 comment.')
controller.init.constant__24 = 24, simulation_time + 1
htc.aero.induction_method = 0
def set_rotor_locked(self, htc, shaft, shaft_constraint, azimuth):
for constraint in htc.new_htc_structure.constraint:
try:
if shaft_constraint in str(constraint.name):
constraint.name_ = 'bearing3'
constraint.add_line('omegas', [0])
break
except:
continue
for orientation in htc.new_htc_structure.orientation:
try:
if shaft in str(orientation.mbdy2):
command = 'mbdy2_eulerang'
for line in orientation:
if 'mbdy2_eulerang' in line.name_:
command = line.name_
orientation[command].values[2] = azimuth
break
except:
continue
if __name__ == '__main__':
dlb = DTU_IEC61400_1_Ref_DLB(iec_wt_class='1A', Vin=4, Vout=26, Vr=10, D=180, z_hub=90)
path = os.path.dirname(test_files.__file__) + '/simulation_setup/DTU10MWRef6.0/'
writer = HAWC2_IEC_DLC_Writer(path + 'htc/DTU_10MW_RWT.htc', 180)
p = writer.from_pandas(dlb['DLC14'])
print(p.contents)
p.write_all(out_dir='tmp')
import os
import numpy as np
import pandas as pd
import itertools
"""IEC refers to IEC61400-1(2005)
Questions:
dlc1.4 (hansen: 100s, no turb), why not NTM, NWP 600s and 6 seeds
seed numbering and reuse???
"""
class DLC():
def __init__(self, Description, Operation, Turb, Shear, Gust, Fault=None, variables={}):
self.Description = Description
if isinstance(Turb, tuple):
func_Turb, params_Turb = Turb
def Turb_wrapper(*args, **kwargs):
combined_kwargs = {**params_Turb, **kwargs}
return func_Turb(*args, **combined_kwargs)
setattr(self, 'Turb', Turb_wrapper)
else:
setattr(self, 'Turb', Turb)
if isinstance(Shear, tuple):
func_Shear, params_Shear = Shear
def Shear_wrapper(*args, **kwargs):
combined_kwargs = {**params_Shear, **kwargs}
return func_Shear(*args, **combined_kwargs)
setattr(self, 'Shear', Shear_wrapper)
else:
setattr(self, 'Shear', Shear)
if isinstance(Gust, tuple):
func_Gust, params_Gust = Gust
def Gust_wrapper(*args, **kwargs):
combined_kwargs = {**params_Gust, **kwargs}
return func_Gust(*args, **combined_kwargs)
setattr(self, 'Gust', Gust_wrapper)
else:
setattr(self, 'Gust', Gust)
if isinstance(Fault, tuple):
func_Fault, params_Fault = Fault
def Fault_wrapper(*args, **kwargs):
combined_kwargs = {**params_Fault, **kwargs}
return func_Fault(*args, **combined_kwargs)
setattr(self, 'Fault', Fault_wrapper)
else:
setattr(self, 'Fault', Fault)
if isinstance(Operation, tuple):
func_Operation, params_Operation = Operation
def Operation_wrapper(*args, **kwargs):
combined_kwargs = {**params_Operation, **kwargs}
return func_Operation(*args, **combined_kwargs)
setattr(self, 'Operation', Operation_wrapper)
else:
setattr(self, 'Operation', Operation)
self.variables = variables
self.variables.update({k.lower(): v for k, v in variables.items()})
turb_class = self.iec_wt_class[1].lower()
assert turb_class in 'abc'
self.I_ref = {'a': .16, 'b': .14, 'c': .12}[turb_class] # IEC61400-1(2005) table 1
wind_class = int(self.iec_wt_class[0])
assert 1 <= wind_class <= 3
self.V_ref = {1: 50, 2: 42.5, 3: 37.5}[wind_class]
if variables["seed"]:
self.rng = np.random.default_rng(seed=variables["seed"])
@classmethod
def getattr(cls, name):
try:
return getattr(cls, name)
except AttributeError as e:
d = {k.lower(): k for k in dir(cls)}
if name.lower() in d:
return getattr(cls, d[name.lower()])
else:
raise e
def __getattr__(self, name):
try:
return self.__getattribute__(name)
except AttributeError as e:
if name in self.variables:
return self.variables[name]
elif name.lower() in self.variables:
return self.variables[name.lower()]
else:
raise e
def get_lst(self, x):
if isinstance(x, pd.Series):
x = x.iloc[0]
if ":" in str(x):
start, step, stop = [float(eval(v, globals(), self.variables)) for v in x.split(":")]
return list(np.arange(start, stop + step, step))
else:
return [float(eval(v, globals(), self.variables)) for v in str(x).replace("/", ",").split(",")]
@property
def wsp_lst(self):
return sorted(self.get_lst(self.WSP))
@property
def wdir_lst(self):
return self.get_lst(self.Wdir)
def case_arg_lst_product(self, **kwargs):
case_arg_lst = []
for dict_lst in itertools.product(
*[m(self, **kwargs) for m in [DLC.WspWdir, self.Turb, self.Shear, self.Gust, self.Fault, self.Operation] if m is not None]):
ids = {k: v for d in dict_lst for k, v in d.items() if '_id' in k}
d = {k: v for d in dict_lst for k, v in d.items() if '_id' not in k}
name = [self.Name, 'wsp%02d' % d['V_hub'], "wdir%03d" % (d['wdir'] % 360)]
if 'seed_id' in ids:
name.append("s%04d" % d['seed'])
if 'ews_id' in ids:
name.append("ews%s" % d['shear']['sign'])
if 'edc_id' in ids:
name.append(ids['edc_id'])
if 'T_id' in ids:
name.append(ids['T_id'])
if 'Azi_id' in ids:
name.append(ids['Azi_id'])
d['Name'] = "_".join(name)
case_arg_lst.append(d)
return case_arg_lst
def to_pandas(self):
case_dict_lst = []
for V_hub in self.wsp_lst:
for wdir in self.wdir_lst:
default_kwargs = {'Folder': self.Name,
'simulation_time': self.Time,
'V_hub': V_hub,
'wdir': wdir,
}
for case_args in self.case_arg_lst_product(**default_kwargs):
case_args.update({k: v for k, v in default_kwargs.items() if k not in case_args})
case_dict_lst.append(case_args)
cols = ['Name', 'Folder', 'V_hub', 'wdir', 'simulation_time']
cols += [k for k in case_args.keys() if k not in cols]
return pd.DataFrame(case_dict_lst, columns=cols)
# ===============================================================================
# General
# ===============================================================================
def WspWdir(self, V_hub, wdir, **_):
return [{'V_hub': V_hub, 'wdir': wdir}]
def NONE(self, **_):
return [{}]
def NaN(self, **_):
return [{}]
# ===============================================================================
# Turbulence models
# ===============================================================================
def NoTurb(self, **_):
return [{'seed': None}]
def ConstantTurb(self, ti, **_):
s0 = 1001
if self.seed:
return [{'seed_id': 's%04d' % (s),
'ti': ti,
'seed': s} for s in self.rng.integers(low=0, high=10000, size=int(self.Seeds))]
else:
return [{'seed_id': 's%04d' % (s),
'ti': ti,
'seed': s} for s in range(s0, s0 + int(self.Seeds))]
def NTM(self, V_hub, **_):
# Normal turbulence model IEC section 6.3.1.3
s0 = int((V_hub - self.Vin) // self.Vstep * 100 + 1001)
ti = (self.I_ref * (0.75 * V_hub + 5.6)) / V_hub # IEC (11)
if self.seed:
return [{'seed_id': 's%04d' % (s),
'ti': ti,
'seed': s} for s in self.rng.integers(low=0, high=10000, size=int(self.Seeds))]
else:
return [{'seed_id': 's%04d' % (s),
'ti': ti,
'seed': s} for s in range(s0, s0 + int(self.Seeds))]
def ETM(self, V_hub, **_):
# Extreme Turbulence model
# IEC (9)
V_ave = .2 * self.V_ref
# IEC (19)
c = 2
ti = c * self.I_ref * (0.072 * (V_ave / c + 3) * (V_hub / c - 4) + 10) / V_hub
s0 = int((V_hub - self.Vin) // self.Vstep * 100 + 1001)
if self.seed:
return [{'seed_id': 's%04d' % (s),
'ti': ti,
'seed': s} for s in self.rng.integers(low=0, high=10000, size=int(self.Seeds))]
else:
return [{'seed_id': 's%04d' % (s),
'ti': ti,
'seed': s} for s in range(s0, s0 + int(self.Seeds))]
# ===============================================================================
# Shear profiles
# ===============================================================================
def NWP(self, alpha, **_):
# The normal wind profile model IEC section 6.3.1.2
return [{'shear': {'type': 'NWP', 'profile': ('power', alpha)}}]
def EWS(self, V_hub, alpha, **_):
# Extreme wind shear, IEC section 6.3.2.6
beta = 6.4
T = 12
sigma_1 = self.I_ref * (0.75 * V_hub + 5.6) # IEC (11)
D = self.D
A = (2.5 + 0.2 * beta * sigma_1 * (D / self.lambda_1)**0.25) / D
return [{'shear': {'type': 'EWS', 'profile': ('power', alpha), 'A': A, 'T': T, 'sign': ews_sign},
'ews_id': ews_sign}
for ews_sign in ['++', '+-', '-+', '--']]
# ===========================================================================
# Gusts
# ===========================================================================
def ECD(self, V_hub, **_):
# Extreme coherrent gust with direction change IEC 6.3.2.5
# IEC section 6.3.2.5
# IEC (22)
V_cg = 15 # magnitude of gust
# IEC (24)
theta_cg = (720 / V_hub, 180)[V_hub < 4] # direction change
T = 10 # rise time
return [{'Gust': {'type': 'ECD', 'V_cg': V_cg, 'theta_cg': theta_cg, 'T': T}}]
def EDC(self, V_hub, **_):
# Extreme direction change
phi = 4*(np.rad2deg(np.arctan(self.I_ref*(0.75*V_hub + 5.6)/V_hub/(1 + 0.1*self.D/self.lambda_1))))
T = 10 # Duration
return [{'Gust': {'type': 'EDC', 'phi': sign*phi, 'T': T}, 'edc_id': {-1: '-', 1: '+'}[sign]} for sign in [-1, 1]]
def EOG(self, V_hub, **_):
# Extreme operation gust
V_gust = min(1.35*(0.8*1.4*self.V_ref - V_hub), 3.3*self.I_ref*(0.75*V_hub + 5.6)/(1 + 0.1*self.D/self.lambda_1)) # magnitude of gust
T = 10.5 # duration
return [{'Gust': {'type': 'EOG', 'V_gust': V_gust, 'T': T}}]
# ===============================================================================
# Faults
# ===============================================================================
def StuckBlade(self, t, pitch, **_):
if (not isinstance(t, list)) and (not isinstance(pitch, list)):
return [{'Fault': {'type': 'StuckBlade', 'pitch_servo': self.pitch_servo, 'T': t, 'pitch': pitch}}]
else:
return [{'Fault': {'type': 'StuckBlade', 'pitch_servo': self.pitch_servo, 'T': t, 'pitch': pitch},
'T_id': 't' + str(t.index(tp[0])) + 'p' + str(pitch.index(tp[1]))} for tp in itertools.product(t, pitch)]
def PitchRunaway(self, t, **_):
if not isinstance(t, list):
return [{'Fault': {'type': 'PitchRunaway', 'pitch_servo': self.pitch_servo, 'T': t}}]
else:
return [{'Fault': {'type': 'PitchRunaway', 'pitch_servo': self.pitch_servo, 'T': T},
'T_id': 't' + str(t.index(T))} for T in t]
def GridLoss(self, t, **_):
if not isinstance(t, list):
return [{'Fault': {'type': 'GridLoss', 'generator_servo': self.generator_servo, 'T': t}}]
else:
return [{'Fault': {'type': 'GridLoss', 'generator_servo': self.generator_servo, 'T': T}, 'T_id': 't' + str(t.index(T))} for T in t]
# ===============================================================================
# Operations
# ===============================================================================
def PowerProduction(self, **_):
return [{}]
def StartUp(self, t, **_):
if not isinstance(t, list):
return [{'Operation': {'type': 'StartUp', 'controller': self.controller, 'T': t}}]
else:
return [{'Operation': {'type': 'StartUp', 'controller': self.controller, 'T': T},
'T_id': 't' + str(t.index(T))} for T in t]
def ShutDown(self, t, **_):
if not isinstance(t, list):
return [{'Operation': {'type': 'ShutDown', 'controller': self.controller, 'T': t}}]
else:
return [{'Operation': {'type': 'ShutDown', 'controller': self.controller, 'T': T},
'T_id': 't' + str(t.index(T))} for T in t]
def EmergencyShutDown(self, t, **_):
if not isinstance(t, list):
return [{'Operation': {'type': 'EmergencyShutDown', 'controller': self.controller, 'T': t}}]
else:
return [{'Operation': {'type': 'EmergencyShutDown', 'controller': self.controller, 'T': T},
'T_id': 't' + str(t.index(T))} for T in t]
def Parked(self, **_):
return [{'Operation': {'type': 'Parked', 'controller': self.controller}}]
def RotorLocked(self, azimuth, **_):
if not isinstance(azimuth, list):
return [{'Operation': {'type': 'RotorLocked', 'controller': self.controller,
'shaft': self.shaft, 'shaft_constraint': self.shaft_constraint, 'Azi': azimuth}}]
else:
return [{'Operation': {'type': 'RotorLocked', 'controller': self.controller,
'shaft': self.shaft, 'shaft_constraint': self.shaft_constraint, 'Azi': azi},
'Azi_id': 'azi' + f"{azi:03}"} for azi in azimuth]
class DLB():
def __init__(self, dlc_definitions, variables):
cols = ['Name', 'Description', 'Operation', 'WSP', 'Wdir', 'Turb', 'Seeds', 'Shear', 'Gust', 'Fault', 'Time']
self.dlcs = pd.DataFrame(dlc_definitions, columns=cols, index=[dlc['Name'] for dlc in dlc_definitions])
var_name_desc = [('iec_wt_class', 'IEC wind turbine class, e.g. 1A'),
('tiref', 'Reference turbulence intensity'),
('Vin', 'Cut-in wind speed'),
('Vout', 'Cut-out wind speed'),
('Vr', 'Rated wind speed'),
('Vref', 'Reference wind speed'),
('V1', '1-year recurrence period wind speed'),
('Ve50', 'Extreme 50-year recurrence period wind speed'),
('Ve1', 'Extreme 1-year recurrence period wind speed'),
('Vmaint', 'Maximum wind speed for maintenance'),
('Vstep', 'Wind speed distribution step'),
('D', 'Rotor diameter'),
('z_hub', 'Hub height'),
('lambda_1', 'Longitudinal turbulence scale parameter'),
('controller', 'Filename of controller DLL'),
('generator_servo', 'Filename of generator servo DLL'),
('pitch_servo', 'Filename of pitch servo DLL'),
('best_azimuth', 'Best blade azimuth for maintenance'),
('shaft', 'Name of shaft body'),
('shaft_constraint', 'Name of constraint between tower and shaft'),
("seed", "Seed to initialize the RNG for turbulence seed generation")
]
self.variables = pd.DataFrame([{'Name': n, 'Value': variables[n], 'Description': d}
for n, d in var_name_desc], columns=['Name', 'Value', 'Description'],
index=[n for (n, d) in var_name_desc])
@property
def dlb(self):
if not hasattr(self, '_dlb'):
self.generate_DLB()
return self._dlb
def keys(self):
return [n for n in self.dlcs['Name'] if not str(n).lower().strip() in ['', 'none', 'nan']]
def to_excel(self, filename):
if os.path.dirname(filename) != "":
os.makedirs(os.path.dirname(filename), exist_ok=True)
writer = pd.ExcelWriter(filename)
self.dlcs.to_excel(writer, 'DLC', index=False)
self.variables.to_excel(writer, 'Variables', index=False)
writer.close()
@staticmethod
def from_excel(filename):
df_vars = pd.read_excel(filename, sheet_name='Variables',
index_col='Name')
df_vars.fillna('', inplace=True)
variables = {name: value for name, value in zip(df_vars.index, df_vars.Value.values)}
dlb_def = pd.read_excel(filename, 'DLC')
dlb_def.columns = [c.strip() for c in dlb_def.columns]
return DLB([row for _, row in dlb_def.iterrows() if row['Name'] is not np.nan], variables)
def __getitem__(self, key):
return self.dlb[key]
def _make_dlc(self, dlc_name):
dlc_definition = self.dlcs.loc[dlc_name]
kwargs = {k: (DLC.getattr(str(dlc_definition[k][0])), dlc_definition[k][1]) if isinstance(dlc_definition[k], tuple)
else DLC.getattr(str(dlc_definition[k])) for k in ['Operation', 'Turb', 'Shear', 'Gust', 'Fault']}
kwargs['Description'] = dlc_definition['Description']
variables = {v['Name']: v['Value'] for _, v in self.variables.iterrows()}
variables.update(dlc_definition)
dlc = DLC(variables=variables, **kwargs)
df = dlc.to_pandas()
name = dlc_definition['Name']
df.insert(0, 'DLC', name)
return df
def generate_DLB(self):
self._dlb = {dlc: self._make_dlc(dlc) for dlc in self.keys()}
return self._dlb
def to_pandas(self):
return pd.concat(self.dlb.values(), sort=False)
def cases_to_excel(self, filename):
if os.path.dirname(filename) != "":
os.makedirs(os.path.dirname(filename), exist_ok=True)
writer = pd.ExcelWriter(filename)
for k in self.keys():
self[k].to_excel(writer, k, index=False)
writer.close()
class DTU_IEC61400_1_Ref_DLB(DLB):
def __init__(self, iec_wt_class, Vin, Vout, Vr, Vmaint, D, z_hub,
controller, generator_servo, pitch_servo, best_azimuth,
Vstep=2, seed=None, alpha=0.2, alpha_extreme=0.11, ti_extreme=0.11,
shaft='shaft', shaft_constraint='shaft_rot'):
Name, Description, Operation, WSP, Wdir, Time = 'Name', 'Description', 'Operation', 'WSP', 'Wdir', 'Time'
Turb, Seeds, Shear, Gust, Fault = 'Turb', 'Seeds', 'Shear', 'Gust', 'Fault'
dlc_definitions = [
{Name: 'DLC12',
Description: 'Normal production',
Operation: 'PowerProduction',
WSP: 'Vin:Vstep:Vout',
Wdir: '-10/0/10',
Turb: 'NTM',
Seeds: 6,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 600},
{Name: 'DLC13',
Description: 'Normal production with high turbulence',
Operation: 'PowerProduction',
WSP: 'Vin:Vstep:Vout',
Wdir: '-10/0/10',
Turb: 'ETM',
Seeds: 6,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 600},
{Name: 'DLC14',
Description: 'Normal production with gust and direction change',
Operation: 'PowerProduction',
WSP: 'Vr/Vr+2/Vr-2',
Wdir: 0,
Turb: 'NoTurb',
Seeds: None,
Shear: ('NWP', {'alpha': alpha}),
Gust: 'ECD',
Fault: None,
Time: 100},
{Name: 'DLC15',
Description: 'Normal production with extreme wind shear',
Operation: 'PowerProduction',
WSP: 'Vin:Vstep:Vout',
Wdir: 0,
Turb: 'NoTurb',
Seeds: None,
Shear: ('EWS', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 100},
{Name: 'DLC21',
Description: 'Loss of electical network',
Operation: 'PowerProduction',
WSP: 'Vin:Vstep:Vout',
Wdir: '-10/0/10',
Turb: 'NTM',
Seeds: 4,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: ('GridLoss', {'t': 10}),
Time: 100},
{Name: 'DLC22b',
Description: 'One blade stuck at minimum pitch angle',
Operation: 'PowerProduction',
WSP: 'Vin:Vstep:Vout',
Wdir: 0,
Turb: 'NTM',
Seeds: 12,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: ('StuckBlade', {'t': 0.1, 'pitch': 0}),
Time: 100},
{Name: 'DLC22p',
Description: 'Pitch runaway',
Operation: 'PowerProduction',
WSP: 'Vr:Vstep:Vout',
Wdir: 0,
Turb: 'NTM',
Seeds: 12,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: ('PitchRunaway', {'t': 10}),
Time: 100},
{Name: 'DLC22y',
Description: 'Abnormal yaw error',
Operation: 'PowerProduction',
WSP: 'Vin:Vstep:Vout',
Wdir: '15:15:345',
Turb: 'NTM',
Seeds: 1,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 600},
{Name: 'DLC23',
Description: 'Loss of electical network with extreme operating gust',
Operation: 'PowerProduction',
WSP: 'Vr-2/Vr+2/Vout',
Wdir: 0,
Turb: 'NoTurb',
Seeds: None,
Shear: ('NWP', {'alpha': alpha}),
Gust: 'EOG',
Fault: ('GridLoss', {'t': [2.5, 4, 5.25]}),
Time: 100},
{Name: 'DLC24',
Description: 'Normal production with large yaw error',
Operation: 'PowerProduction',
WSP: 'Vin:Vstep:Vout',
Wdir: '-20/20',
Turb: 'NTM',
Seeds: 3,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 600},
{Name: 'DLC31',
Description: 'Start-up',
Operation: ('StartUp', {'t': 0}),
WSP: 'Vin/Vr/Vout',
Wdir: 0,
Turb: 'NoTurb',
Seeds: None,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 100},
{Name: 'DLC32',
Description: 'Start-up at 4 different times with extreme operating gust',
Operation: ('StartUp', {'t': [0.1, 2.5, 4, 5.25]}),
WSP: 'Vin/Vr-2/Vr+2/Vout',
Wdir: 0,
Turb: 'NoTurb',
Seeds: None,
Shear: ('NWP', {'alpha': alpha}),
Gust: 'EOG',
Fault: None,
Time: 100},
{Name: 'DLC33',
Description: 'Start-up at 2 different times with extreme wind direction change',
Operation: ('StartUp', {'t': [-0.1, 5]}),
WSP: 'Vin/Vr-2/Vr+2/Vout',
Wdir: 0,
Turb: 'NoTurb',
Seeds: None,
Shear: ('NWP', {'alpha': alpha}),
Gust: 'EDC',
Fault: None,
Time: 100},
{Name: 'DLC41',
Description: 'Shut-down',
Operation: ('ShutDown', {'t': 0}),
WSP: 'Vin/Vr/Vout',
Wdir: 0,
Turb: 'NoTurb',
Seeds: None,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 100},
{Name: 'DLC42',
Description: 'Shut-down at 6 different times with extreme operating gust',
Operation: ('ShutDown', {'t': [0.1, 2.5, 4, 5, 8, 10]}),
WSP: 'Vr-2/Vr+2/Vout',
Wdir: 0,
Turb: 'NoTurb',
Seeds: None,
Shear: ('NWP', {'alpha': alpha}),
Gust: 'EOG',
Fault: None,
Time: 100},
{Name: 'DLC51',
Description: 'Emergency shut-down',
Operation: ('EmergencyShutDown', {'t': 0}),
WSP: 'Vr-2/Vr+2/Vout',
Wdir: 0,
Turb: 'NTM',
Seeds: 12,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 100},
{Name: 'DLC61',
Description: 'Parked with 50-year wind',
Operation: 'Parked',
WSP: 'Vref',
Wdir: '-8/8',
Turb: ('ConstantTurb', {'ti': ti_extreme}),
Seeds: 6,
Shear: ('NWP', {'alpha': alpha_extreme}),
Gust: None,
Fault: None,
Time: 600},
{Name: 'DLC62',
Description: 'Parked with 50-year wind without grid connection',
Operation: 'Parked',
WSP: 'Vref',
Wdir: '0:15:345',
Turb: ('ConstantTurb', {'ti': ti_extreme}),
Seeds: 1,
Shear: ('NWP', {'alpha': alpha_extreme}),
Gust: None,
Fault: None,
Time: 600},
{Name: 'DLC63',
Description: 'Parked with 1-year wind with large yaw error',
Operation: 'Parked',
WSP: 'V1',
Wdir: '-20/20',
Turb: ('ConstantTurb', {'ti': ti_extreme}),
Seeds: 6,
Shear: ('NWP', {'alpha': alpha_extreme}),
Gust: None,
Fault: None,
Time: 600},
{Name: 'DLC64',
Description: 'Parked',
Operation: 'Parked',
WSP: 'Vin:Vstep:0.7*Vref',
Wdir: '-8/8',
Turb: 'NTM',
Seeds: 6,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 600},
{Name: 'DLC71',
Description: 'Rotor locked at 4 different azimuth angles and extreme yaw',
Operation: ('RotorLocked', {'azimuth': [0, 30, 60, 90]}),
WSP: 'V1',
Wdir: '0:15:345',
Turb: ('ConstantTurb', {'ti': ti_extreme}),
Seeds: 1,
Shear: ('NWP', {'alpha': alpha_extreme}),
Gust: None,
Fault: None,
Time: 600},
{Name: 'DLC81',
Description: 'Rotor locked for maintenance',
Operation: ('RotorLocked', {'azimuth': best_azimuth}),
WSP: 'Vmaint',
Wdir: '-8/8',
Turb: 'NTM',
Seeds: 6,
Shear: ('NWP', {'alpha': alpha}),
Gust: None,
Fault: None,
Time: 600},
]
Vref = {1: 50, 2: 42.5, 3: 37.5}[int(iec_wt_class[0])]
tiref = {'a': 0.16, 'b': 0.14, 'c': 0.12}[iec_wt_class[1].lower()]
V1 = 0.8*Vref
Ve50 = 1.4*Vref
Ve1 = 0.8*Ve50
lambda_1 = 0.7*z_hub if z_hub < 60 else 42
variables = {'iec_wt_class': iec_wt_class,
'Vref': Vref,
'tiref': tiref,
'V1': V1,
'Ve50': Ve50,
'Ve1': Ve1,
'Vin': Vin,
'Vout': Vout,
'Vr': Vr,
'Vmaint': Vmaint,
'Vstep': Vstep,
'D': D,
'z_hub': z_hub,
'lambda_1': lambda_1,
'controller': controller,
'generator_servo': generator_servo,
'pitch_servo': pitch_servo,
'best_azimuth': best_azimuth,
'shaft': shaft,
'shaft_constraint': shaft_constraint}
if seed:
variables["seed"] = int(seed)
else:
variables["seed"] = seed
DLB.__init__(self, dlc_definitions, variables)
def main():
if __name__ == '__main__':
dlb = DTU_IEC61400_1_Ref_DLB(iec_wt_class='1A',
Vin=4, Vout=25, Vr=8, # cut-in, cut_out and rated wind speed
D=180, z_hub=110) # diameter and hub height
print(dlb.dlcs)
print(dlb.variables)
# save dlb definition to excel
dlb.to_excel('overview.xlsx')
# you can now modify definitions in overview.xlsx in Excel and
# load the modified dlb definition back into python
dlb = DLB.from_excel('overview.xlsx')
print(dlb['DLC14'])
# save dlc14 as Excel spreadsheet
dlb['DLC14'].to_excel('dlc14.xlsx')
# you can no modify cases in dlc14.xlsx in Excel
# save all cases to excel
dlb.cases_to_excel('cases.xlsx')
# Save generate hawc2 input files
from wetb.hawc2.tests import test_files
from wetb.dlb.hawc2_iec_dlc_writer import HAWC2_IEC_DLC_Writer
path = os.path.dirname(test_files.__file__) + '/simulation_setup/DTU10MWRef6.0/'
writer = HAWC2_IEC_DLC_Writer(path + 'htc/DTU_10MW_RWT.htc', diameter=180)
# load all cases
writer.from_pandas(dlb)
# load DLC14 only
writer.from_pandas(dlb['DLC14'])
# load modified DLC14.xlsx
writer.from_excel('dlc14.xlsx')
writer.write_all('tmp')
main()
File added
from wetb.dlb import iec61400_1
import pytest
from wetb.dlb.hawc2_iec_dlc_writer import HAWC2_IEC_DLC_Writer
import os
from wetb.hawc2.tests import test_files
import shutil
from tests import npt, run_main
from wetb.hawc2.htc_file import HTCFile
from tests.run_main import run_module_main
from wetb.dlb.iec61400_1 import DTU_IEC61400_1_Ref_DLB
path = os.path.dirname(test_files.__file__) + '/simulation_setup/DTU10MWRef6.0/htc/tmp/'
def clean_up():
if os.path.isdir(path):
shutil.rmtree(path)
@pytest.yield_fixture(autouse=True)
def run_around_tests():
clean_up()
yield
clean_up()
@pytest.fixture
def writer():
return HAWC2_IEC_DLC_Writer(path + '../DTU_10MW_RWT.htc', diameter=127)
def test_main():
run_main.run_module_main(iec61400_1)
def test_DLC12(writer):
dlc12 = DTU_IEC61400_1_Ref_DLB(iec_wt_class='1A', Vin=4, Vout=26, Vr=10, D=180, z_hub=90,
Vmaint=18, controller='dtu_we_controller',
generator_servo='generator_servo', pitch_servo='servo_with_limits',
best_azimuth=180)['DLC12']
assert len(dlc12) == 216 # 12 wsp, 3 wdir, 6 seeds
writer.from_pandas(dlc12[::24][:2])
writer.write_all(path)
npt.assert_array_equal(sorted(os.listdir(path + "DLC12")),
['DLC12_wsp04_wdir350_s1001.htc', 'DLC12_wsp06_wdir000_s1101.htc'])
htc = HTCFile(path + "DLC12/DLC12_wsp04_wdir350_s1001.htc")
assert htc.wind.wsp[0] == 4
npt.assert_array_equal(htc.wind.windfield_rotations.values, [-10, 0, 0])
assert htc.wind.turb_format[0] == 1
assert htc.wind.mann.create_turb_parameters[3] == 1001
def test_DLC21(writer):
dlc = DTU_IEC61400_1_Ref_DLB(iec_wt_class='1A', Vin=4, Vout=26, Vr=10, D=180, z_hub=90,
Vmaint=18, controller='dtu_we_controller',
generator_servo='generator_servo', pitch_servo='servo_with_limits',
best_azimuth=180)['DLC21']
assert len(dlc) == 144 # 12 wsp, 3 wdir, 4 seeds
writer.from_pandas(dlc[::16][:2])
writer.write_all(path)
npt.assert_array_equal(sorted(os.listdir(path + "DLC21")),
['DLC21_wsp04_wdir350_s1001.htc', 'DLC21_wsp06_wdir000_s1101.htc'])
htc = HTCFile(path + "DLC21/DLC21_wsp04_wdir350_s1001.htc")
assert htc.wind.wsp[0] == 4
npt.assert_array_equal(htc.wind.windfield_rotations.values, [-10, 0, 0])
assert htc.wind.turb_format[0] == 1
assert htc.wind.mann.create_turb_parameters[3] == 1001
assert htc.dll.get_subsection_by_name('generator_servo', 'name').init.constant__7.values == [7, 110]
def test_DLC22y(writer):
dlc = DTU_IEC61400_1_Ref_DLB(iec_wt_class='1A', Vin=4, Vout=26, Vr=10, D=180, z_hub=90,
Vmaint=18, controller='dtu_we_controller',
generator_servo='generator_servo', pitch_servo='servo_with_limits',
best_azimuth=180)['DLC22y']
assert len(dlc) == 276 # 12 wsp, 23 wdir, 1 seeds
writer.from_pandas(dlc[::24][:2])
writer.write_all(path)
npt.assert_array_equal(sorted(os.listdir(path + "DLC22y")),
['DLC22y_wsp04_wdir015_s1001.htc', 'DLC22y_wsp06_wdir030_s1101.htc'])
htc = HTCFile(path + "DLC22y/DLC22y_wsp04_wdir015_s1001.htc")
assert htc.wind.wsp[0] == 4
npt.assert_array_equal(htc.wind.windfield_rotations.values, [15, 0, 0])
assert htc.wind.turb_format[0] == 1
assert htc.wind.mann.create_turb_parameters[3] == 1001