Newer
Older
# // Free wind speed Vy, gl. coo, of gl. pos 0.00, 0.00, -2.31
# WSP gl. coo.,Vdir_hor deg
# Free wind speed Vdir_hor, gl. coo, of gl. pos 0.00, 0.00, -2.31
# -----------------------------------------------------------------
# WATER SURFACE gl. coo, at gl. coo, x,y= 0.00, 0.00
elif self.ch_details[ch, 2].startswith('Water'):
units = self.ch_details[ch, 1]
# but remove the comma
x = items[-2][:-1]
y = items[-1]
# and tag it
tag = 'watersurface-global-%s-%s' % (x, y)
# save all info in the dict
channelinfo = {}
channelinfo['coord'] = 'global'
channelinfo['pos'] = (float(x), float(y))
channelinfo['units'] = units
channelinfo['chi'] = ch
# -----------------------------------------------------------------
# WIND SPEED
# WSP gl. coo.,Vx
elif self.ch_details[ch, 0].startswith('WSP gl.'):
units = self.ch_details[ch, 1]
direction = self.ch_details[ch, 0].split(',')[1]
tmp = self.ch_details[ch, 2].split('pos')[1]
x, y, z = tmp.split(',')
x, y, z = x.strip(), y.strip(), z.strip()
# and tag it
tag = 'windspeed-global-%s-%s-%s-%s' % (direction, x, y, z)
# save all info in the dict
channelinfo = {}
channelinfo['coord'] = 'global'
channelinfo['pos'] = (x, y, z)
channelinfo['units'] = units
channelinfo['chi'] = ch
# WIND SPEED AT BLADE
# 0: WSP Vx, glco, R= 61.5
# 2: Wind speed Vx of blade 1 at radius 61.52, global coo.
elif self.ch_details[ch, 0].startswith('WSP V'):
units = self.ch_details[ch, 1].strip()
direction = self.ch_details[ch, 0].split(' ')[1].strip()
blade_nr = self.ch_details[ch, 2].split('blade')[1].strip()[:2]
radius = self.ch_details[ch, 2].split('radius')[1].split(',')[0]
coord = self.ch_details[ch, 2].split(',')[1].strip()
radius = radius.strip()
blade_nr = blade_nr.strip()
# and tag it
rpl = (direction, blade_nr, radius, coord)
tag = 'wsp-blade-%s-%s-%s-%s' % rpl
# save all info in the dict
channelinfo = {}
channelinfo['coord'] = coord
channelinfo['direction'] = direction
channelinfo['blade_nr'] = int(blade_nr)
channelinfo['radius'] = float(radius)
channelinfo['units'] = units
channelinfo['chi'] = ch
# FLAP ANGLE
# 2: Flap angle for blade 3 flap number 1
elif self.ch_details[ch, 0][:7] == 'setbeta':
units = self.ch_details[ch, 1].strip()
blade_nr = self.ch_details[ch, 2].split('blade')[1].strip()
blade_nr = blade_nr.split(' ')[0].strip()
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
radius = radius.strip()
blade_nr = blade_nr.strip()
# and tag it
tag = 'setbeta-bladenr-%s-flapnr-%s' % (blade_nr, flap_nr)
# save all info in the dict
channelinfo = {}
channelinfo['coord'] = coord
channelinfo['flap_nr'] = int(flap_nr)
channelinfo['blade_nr'] = int(blade_nr)
channelinfo['units'] = units
channelinfo['chi'] = ch
# -----------------------------------------------------------------
# ignore all the other cases we don't know how to deal with
else:
# if we get here, we don't have support yet for that sensor
# and hence we can't save it. Continue with next channel
continue
# -----------------------------------------------------------------
# ignore if we have a non unique tag
if tag in self.ch_dict:
jj = 1
while True:
tag_new = tag + '_v%i' % jj
if tag_new in self.ch_dict:
jj += 1
else:
tag = tag_new
break
# msg = 'non unique tag for HAWC2 results, ignoring: %s' % tag
# logging.warn(msg)
# else:
self.ch_dict[tag] = copy.copy(channelinfo)
# -----------------------------------------------------------------
# save in for DataFrame format
cols_ch = set(channelinfo.keys())
for col in cols_ch:
df_dict[col].append(channelinfo[col])
# the remainder columns we have not had yet. Fill in blank
for col in (self.cols - cols_ch):
df_dict[col].append('')
df_dict['unique_ch_name'].append(tag)
self.ch_df = pd.DataFrame(df_dict)
self.ch_df.set_index('chi', inplace=True)
def _ch_dict2df(self):
"""
Create a DataFrame version of the ch_dict, and the chi columns is
set as the index
"""
# identify all the different columns
cols = set()
for ch_name, channelinfo in self.ch_dict.items():
cols.update(set(channelinfo.keys()))
df_dict['unique_ch_name'] = []
for ch_name, channelinfo in self.ch_dict.items():
cols_ch = set(channelinfo.keys())
for col in cols_ch:
df_dict[col].append(channelinfo[col])
# the remainder columns we have not had yet. Fill in blank
for col in (cols - cols_ch):
df_dict[col].append('')
df_dict['unique_ch_name'].append(ch_name)
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
self.ch_df = pd.DataFrame(df_dict)
self.ch_df.set_index('chi', inplace=True)
def _data_window(self, nr_rev=None, time=None):
"""
Based on a time interval, create a proper slice object
======================================================
The window will start at zero and ends with the covered time range
of the time input.
Paramters
---------
nr_rev : int, default=None
NOT IMPLEMENTED YET
time : list, default=None
time = [time start, time stop]
Returns
-------
slice_
window
zoomtype
time_range
time_range = [0, time[1]]
"""
# -------------------------------------------------
# determine zome range if necesary
# -------------------------------------------------
time_range = None
if nr_rev:
raise NotImplementedError
# input is a number of revolutions, get RPM and sample rate to
# calculate the required range
# TODO: automatich detection of RPM channel!
time_range = nr_rev/(self.rpm_mean/60.)
# convert to indices instead of seconds
i_range = int(self.Freq*time_range)
window = [0, time_range]
# in case the first datapoint is not at 0 seconds
slice_ = np.r_[i_zero:i_range+i_zero]
zoomtype = '_nrrev_' + format(nr_rev, '1.0f') + 'rev'
elif time.any():
time_range = time[1] - time[0]
i_start = int(time[0]*self.Freq)
i_end = int(time[1]*self.Freq)
slice_ = np.r_[i_start:i_end]
window = [time[0], time[1]]
return slice_, window, zoomtype, time_range
# TODO: general signal method, this is not HAWC2 specific, move out
stats = {}
# calculate the statistics values:
stats['max'] = sig[i0:i1, :].max(axis=0)
stats['min'] = sig[i0:i1, :].min(axis=0)
stats['mean'] = sig[i0:i1, :].mean(axis=0)
stats['std'] = sig[i0:i1, :].std(axis=0)
stats['range'] = stats['max'] - stats['min']
stats['absmax'] = np.absolute(sig[i0:i1, :]).max(axis=0)
stats['rms'] = np.sqrt(np.mean(sig[i0:i1, :]*sig[i0:i1, :], axis=0))
stats['int'] = integrate.trapz(sig[i0:i1, :], x=sig[i0:i1, 0], axis=0)
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
def statsdel_df(self, i0=0, i1=None, statchans='all', delchans='all',
m=[3, 4, 6, 8, 10, 12], neq=None, no_bins=46):
"""Calculate statistics and equivalent loads for the current loaded
signal.
Parameters
----------
i0 : int, default=0
i1 : int, default=None
channels : list, default='all'
all channels are selected if set to 'all', otherwise define a list
using the unique channel defintions.
neq : int, default=1
no_bins : int, default=46
Return
------
statsdel : pd.DataFrame
Pandas DataFrame with the statistical parameters and the different
fatigue coefficients as columns, and channels as rows. As index the
unique channel name is used.
"""
stats = ['max', 'min', 'mean', 'std', 'range', 'absmax', 'rms', 'int']
if statchans == 'all':
statchans = self.ch_df['unique_ch_name'].tolist()
statchis = self.ch_df['unique_ch_name'].index.values
else:
sel = self.ch_df['unique_ch_name']
statchis = self.ch_df[sel.isin(statchans)].index.values
if delchans == 'all':
delchans = self.ch_df['unique_ch_name'].tolist()
delchis = self.ch_df.index.values
else:
sel = self.ch_df['unique_ch_name']
delchis = self.ch_df[sel.isin(delchans)].index.values
# delchans has to be a subset of statchans!
if len(set(delchans) - set(statchans)) > 0:
raise ValueError('delchans has to be a subset of statchans')
tmp = np.ndarray((len(statchans), len(stats+m)))
tmp[:,:] = np.nan
m_cols = ['m=%i' % m_ for m_ in m]
statsdel = pd.DataFrame(tmp, columns=stats+m_cols)
statsdel.index = statchans
datasel = self.sig[i0:i1,statchis]
time = self.sig[i0:i1,0]
statsdel['max'] = datasel.max(axis=0)
statsdel['min'] = datasel.min(axis=0)
statsdel['mean'] = datasel.mean(axis=0)
statsdel['std'] = datasel.std(axis=0)
statsdel['range'] = statsdel['max'] - statsdel['min']
statsdel['absmax'] = np.abs(datasel).max(axis=0)
statsdel['rms'] = np.sqrt(np.mean(datasel*datasel, axis=0))
statsdel['int'] = integrate.trapz(datasel, x=time, axis=0)
statsdel['intabs'] = integrate.trapz(np.abs(datasel), x=time, axis=0)
if neq is None:
neq = self.sig[-1,0] - self.sig[0,0]
for chi, chan in zip(delchis, delchans):
signal = self.sig[i0:i1,chi]
eq = self.calc_fatigue(signal, no_bins=no_bins, neq=neq, m=m)
statsdel.loc[chan][m_cols] = eq
return statsdel
# TODO: general signal method, this is not HAWC2 specific, move out
def calc_fatigue(self, signal, no_bins=46, m=[3, 4, 6, 8, 10, 12], neq=1):
"""
Parameters
----------
signal: 1D array
One dimentional array containing the signal.
no_bins: int
Number of bins for the binning of the amplitudes.
m: list
Values of the slope of the SN curve.
neq: int
Number of equivalent cycles
Returns
-------
eq: list
Damage equivalent loads for each m value.
return eq_load(signal, no_bins=no_bins, m=m, neq=neq)[0]
def blade_deflection(self):
"""
"""
# select all the y deflection channels
db = misc.DictDB(self.ch_dict)
db.search({'sensortype': 'state pos', 'component': 'z'})
# sort the keys and save the mean values to an array/list
chiz, zvals = [], []
for key in sorted(db.dict_sel.keys()):
zvals.append(-self.sig[:, db.dict_sel[key]['chi']].mean())
chiz.append(db.dict_sel[key]['chi'])
# sort the keys and save the mean values to an array/list
chiy, yvals = [], []
for key in sorted(db.dict_sel.keys()):
yvals.append(self.sig[:, db.dict_sel[key]['chi']].mean())
chiy.append(db.dict_sel[key]['chi'])
return np.array(zvals), np.array(yvals)

David Verelst
committed
def save_chan_names(self, fname):
"""Save unique channel names to text file.
"""
channels = self.ch_df.ch_name.values
channels.sort()
np.savetxt(fname, channels, fmt='%-100s')
def save_channel_info(self, fname):
"""Save all channel info: unique naming + HAWC2 description from *.sel.
"""
p1 = self.ch_df.copy()
# but ignore the units column, we already have that
p2 = pd.DataFrame(self.ch_details,
columns=['Description1', 'units', 'Description2'])
# merge on the index
tmp = pd.merge(p1, p2, right_index=True, how='outer', left_index=True)
tmp.to_excel(fname)
# for a fixed-with text format instead of csv
# header = ''.join(['%100s' % k for k in tmp.columns])
# header = ' windspeed' + header
# np.savetxt(fname, tmp.to_records(), header=header,
# fmt='% 01.06e ')
return tmp

David Verelst
committed
def load_chan_names(self, fname):
dtype = np.dtype('U100')
return np.genfromtxt(fname, dtype=dtype, delimiter=';').tolist()
def save_csv(self, fname, fmt='%.18e', delimiter=','):
"""
Save to csv and use the unified channel names as columns
"""
map_sorting = {}
# first, sort on channel index
for ch_key, ch in self.ch_dict.items():
map_sorting[ch['chi']] = ch_key
header = []
# not all channels might be present...iterate again over map_sorting
for chi in map_sorting:
try:
sensortag = self.ch_dict[map_sorting[chi]]['sensortag']
header.append(map_sorting[chi] + ' // ' + sensortag)
except:
header.append(map_sorting[chi])
# and save
print('saving...', end='')
np.savetxt(fname, self.sig[:, list(map_sorting.keys())], fmt=fmt,
delimiter=delimiter, header=delimiter.join(header))
print(fname)
def save_df(self, fname):
"""
Save the HAWC2 data and sel file in a DataFrame that contains all the
data, and all the channel information (the one from the sel file
and the parsed from this function)
"""
self.sig
self.ch_details
self.ch_dict
def ReadOutputAtTime(fname):
"""Distributed blade loading as generated by the HAWC2 output_at_time
command. From HAWC2 12.3-beta and onwards, there are 7 header columns,
earlier version only have 3.
Parameters
----------
fname : str
header_lnr : int, default=3
Line number of the header (column names) (1-based counting).
# data = pd.read_fwf(fname, skiprows=3, header=None)
# pd.read_table(fname, sep=' ', skiprows=3)
# data.index.names = cols
# because the formatting is really weird, we need to sanatize it a bit
with opent(fname, 'r') as f:
# read the header from line 3
for k in range(7):
line = f.readline()
if line[0:12].lower().replace('#', '').strip() == 'radius_s':
header_lnr = k + 1
break
header = line.replace('\r', '').replace('\n', '')
cols = [k.strip().replace(' ', '_') for k in header.split('#')[1:]]
data = np.loadtxt(fname, skiprows=header_lnr)
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
return pd.DataFrame(data, columns=cols)
def ReadEigenBody(fname, debug=False):
"""
Read HAWC2 body eigenalysis result file
=======================================
Parameters
----------
file_path : str
file_name : str
Returns
-------
results : DataFrame
Columns: body, Fd_hz, Fn_hz, log_decr_pct
"""
# Body data for body number : 3 with the name :nacelle
# Results: fd [Hz] fn [Hz] log.decr [%]
# Mode nr: 1: 1.45388E-21 1.74896E-03 6.28319E+02
FILE = opent(fname)
lines = FILE.readlines()
FILE.close()
df_dict = {'Fd_hz': [], 'Fn_hz': [], 'log_decr_pct': [], 'body': []}
for i, line in enumerate(lines):
if debug: print('line nr: %5i' % i)
# identify for which body we will read the data
if line[:25] == 'Body data for body number':
body = line.split(':')[2].rstrip().lstrip()
# remove any annoying characters
if debug: print('modes for body: %s' % body)
# identify mode number and read the eigenfrequencies
elif line[:8] == 'Mode nr:':
linelist = line.replace('\n', '').replace('\r', '').split(':')
# modenr = linelist[1].rstrip().lstrip()
# text after Mode nr can be empty
try:
eigenmodes = linelist[2].rstrip().lstrip().split(' ')
except IndexError:
eigenmodes = ['0', '0', '0']
if debug: print(eigenmodes)
# in case we have more than 3, remove all the empty ones
# this can happen when there are NaN values
if not len(eigenmodes) == 3:
eigenmodes = linelist[2].rstrip().lstrip().split(' ')
eigmod = []
for k in eigenmodes:
if len(k) > 1:
eigmod.append(k)
else:
eigmod = eigenmodes
# remove any trailing spaces for each element
for k in range(len(eigmod)):
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
df_dict['body'].append(body)
df_dict['Fd_hz'].append(eigmod[0])
df_dict['Fn_hz'].append(eigmod[1])
df_dict['log_decr_pct'].append(eigmod[2])
return pd.DataFrame(df_dict)
def ReadEigenStructure(file_path, file_name, debug=False, max_modes=500):
"""
Read HAWC2 structure eigenalysis result file
============================================
The file looks as follows:
#0 Version ID : HAWC2MB 11.3
#1 ___________________________________________________________________
#2 Structure eigenanalysis output
#3 ___________________________________________________________________
#4 Time : 13:46:59
#5 Date : 28:11.2012
#6 ___________________________________________________________________
#7 Results: fd [Hz] fn [Hz] log.decr [%]
#8 Mode nr: 1: 3.58673E+00 3.58688E+00 5.81231E+00
#...
#302 Mode nr:294: 0.00000E+00 6.72419E+09 6.28319E+02
Parameters
----------
file_path : str
file_name : str
debug : boolean, default=False
max_modes : int
Stop evaluating the result after max_modes number of modes have been
identified
Returns
-------
modes_arr : ndarray(3,n)
An ndarray(3,n) holding Fd, Fn [Hz] and the logarithmic damping
decrement [%] for n different structural eigenmodes
"""
# 0 Version ID : HAWC2MB 11.3
# 1 ___________________________________________________________________
# 2 Structure eigenanalysis output
# 3 ___________________________________________________________________
# 4 Time : 13:46:59
# 5 Date : 28:11.2012
# 6 ___________________________________________________________________
# 7 Results: fd [Hz] fn [Hz] log.decr [%]
# 8 Mode nr: 1: 3.58673E+00 3.58688E+00 5.81231E+00
# Mode nr:294: 0.00000E+00 6.72419E+09 6.28319E+02
FILE = opent(os.path.join(file_path, file_name))
lines = FILE.readlines()
FILE.close()
header_lines = 8
# we now the number of modes by having the number of lines
nrofmodes = len(lines) - header_lines
for i, line in enumerate(lines):
if i > max_modes:
# cut off the unused rest
break
# ignore the header
if i < header_lines:
continue
# split up mode nr from the rest
parts = line.split(':')
# get fd, fn and damping, but remove all empty items on the list
modes_arr[:, i-header_lines]=misc.remove_items(parts[2].split(' '), '')
return modes_arr
"""
"""
def __init__(self):
pass
def __call__(self, z_h, r_blade_tip, a_phi=None, shear_exp=None, nr_hor=3,
nr_vert=20, h_ME=500.0, io=None, wdir=None):
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632
"""
Parameters
----------
z_h : float
Hub height
r_blade_tip : float
Blade tip radius
a_phi : float, default=None
:math:`a_{\\varphi} \\approx 0.5` parameter for the modified
Ekman veer distribution. Values vary between -1.2 and 0.5.
shear_exp : float, default=None
nr_vert : int, default=3
nr_hor : int, default=20
h_ME : float, default=500
Modified Ekman parameter. Take roughly 500 for off shore sites,
1000 for on shore sites.
io : str or io buffer, default=None
When specified, the HAWC2 user defined shear input file will be
written.
wdir : float, default=None
A constant veer angle, or yaw angle. Equivalent to setting the
wind direction. Angle in degrees.
Returns
-------
uu, vv, ww, xx, zz
"""
x, z = self.create_coords(z_h, r_blade_tip, nr_vert=nr_vert,
nr_hor=nr_hor)
if a_phi is not None:
phi_rad = WindProfiles.veer_ekman_mod(z, z_h, h_ME=h_ME, a_phi=a_phi)
assert len(phi_rad) == nr_vert
else:
nr_vert = len(z)
phi_rad = np.zeros((nr_vert,))
# add any yaw error on top of
if wdir is not None:
# because wdir cw positive, and phi veer ccw positive
phi_rad -= (wdir*np.pi/180.0)
u, v, w = self.decompose_veer(phi_rad, nr_hor)
# when no shear is defined
if shear_exp is None:
uu = u
vv = v
ww = w
else:
# scale the shear on top of the veer
shear = WindProfiles.powerlaw(z, z_h, shear_exp)
uu = u*shear[:,np.newaxis]
vv = v*shear[:,np.newaxis]
ww = w*shear[:,np.newaxis]
# and write to a file
if isinstance(io, str):
with open(io, 'wb') as fid:
fid = self.write(fid, uu, vv, ww, x, z)
self.fid =None
elif io is not None:
io = self.write(io, uu, vv, ww, x, z)
self.fid = io
return uu, vv, ww, x, z
def create_coords(self, z_h, r_blade_tip, nr_vert=3, nr_hor=20):
"""
Utility to create the coordinates of the wind field based on hub heigth
and blade length. Add 15% to r_blade_tip to make sure horizontal edges
are defined wide enough.
"""
# take 15% extra space after the blade tip
z = np.linspace(0, z_h + r_blade_tip*1.15, nr_vert)
# along the horizontal, coordinates with 0 at the rotor center
x = np.linspace(-r_blade_tip*1.15, r_blade_tip*1.15, nr_hor)
return x, z
def deltaphi2aphi(self, d_phi, z_h, r_blade_tip, h_ME=500.0):
"""For a given `\\Delta \\varphi` over the rotor diameter, estimate
the corresponding `a_{\\varphi}`.
Parameters
----------
`\\Delta \\varphi` : ndarray or float
Veer angle difference over the rotor plane from lowest to highest
blade tip position.
z_h : float
Hub height in meters.
r_blade_tip : float
Blade tip radius/length.
h_ME : float, default=500.0
Modified Ekman parameter. For on shore,
:math:`h_{ME} \\approx 1000`, for off-shore,
:math:`h_{ME} \\approx 500`
Returns
-------
`a_{\\varphi}` : ndarray or float
"""
t1 = r_blade_tip * 2.0 * np.exp(-z_h/(h_ME))
a_phi = d_phi * np.sqrt(h_ME*z_h) / t1
return a_phi
def deltaphi2aphi_opt(self, deltaphi, z, z_h, r_blade_tip, h_ME):
"""
convert delta_phi over a given interval z to a_phi using
scipy.optimize.fsolve on veer_ekman_mod.
Parameters
----------
deltaphi : float
Desired delta phi in rad over interval z[0] at bottom to z[1] at
the top.
def func(a_phi, z, z_h, h_ME, deltaphi_target):
phis = WindProfiles.veer_ekman_mod(z, z_h, h_ME=h_ME, a_phi=a_phi)
return np.abs(deltaphi_target - (phis[1] - phis[0]))
args = (z, z_h, h_ME, deltaphi)
return sp.optimize.fsolve(func, [0], args=args)[0]
def decompose_veer(self, phi_rad, nr_hor):
"""
Convert a veer angle into u, v, and w components, ready for the
HAWC2 user defined veer input file. nr_vert refers to the number of
vertical grid points.
Paramters
---------
phi_rad : ndarray(nr_vert)
veer angle in radians as function of height
nr_hor : int
Number of horizontal grid points
Returns
-------
u : ndarray(nr_hor, nr_vert)
v : ndarray(nr_hor, nr_vert)
w : ndarray(nr_hor, nr_vert)
"""
nr_vert = len(phi_rad)
tan_phi = np.tan(phi_rad)
# convert veer angles to veer components in v, u. Make sure the
# normalized wind speed remains 1!
# u = sympy.Symbol('u')
# v = sympy.Symbol('v')
# tan_phi = sympy.Symbol('tan_phi')
# eq1 = u**2.0 + v**2.0 - 1.0
# eq2 = (tan_phi*u/v) - 1.0
# sol = sympy.solvers.solve([eq1, eq2], [u,v], dict=True)
# # proposed solution is:
# u2 = np.sqrt(tan_phi**2/(tan_phi**2 + 1.0))/tan_phi
# v2 = np.sqrt(tan_phi**2/(tan_phi**2 + 1.0))
# # but that gives the sign switch wrong, simplify/rewrite to:
u = np.sqrt(1.0/(tan_phi**2 + 1.0))
v = np.sqrt(1.0/(tan_phi**2 + 1.0))*tan_phi
# verify they are actually the same but the sign:
# assert np.allclose(np.abs(u), np.abs(u2))
# assert np.allclose(np.abs(v), np.abs(v2))
u_full = u[:, np.newaxis] + np.zeros((3,))[np.newaxis, :]
v_full = v[:, np.newaxis] + np.zeros((3,))[np.newaxis, :]
w_full = np.zeros((nr_vert, nr_hor))
return u_full, v_full, w_full
def read(self, fname):
Read a user defined shear input file as used for HAWC2.
Returns
-------
u_comp, v_comp, w_comp, v_coord, w_coord, phi_deg
"""
# read the header
with opent(fname) as f:
for i, line in enumerate(f.readlines()):
1816
1817
1818
1819
1820
1821
1822
1823
1824
1825
1826
1827
1828
1829
1830
1831
1832
1833
1834
1835
1836
1837
1838
if line.strip()[0] != '#':
nr_v, nr_w = misc.remove_items(line.split('#')[0].split(), '')
nr_hor, nr_vert = int(nr_v), int(nr_w)
i_header = i
break
# u,v and w components on 2D grid
tmp = np.genfromtxt(fname, skip_header=i_header+1, comments='#',
max_rows=nr_vert*3)
if not tmp.shape == (nr_vert*3, nr_hor):
raise AssertionError('user defined shear input file inconsistent')
v_comp = tmp[:nr_vert,:]
u_comp = tmp[nr_vert:nr_vert*2,:]
w_comp = tmp[nr_vert*2:nr_vert*3,:]
# coordinates of the 2D grid
tmp = np.genfromtxt(fname, skip_header=3*(nr_vert+1)+2,
max_rows=nr_hor+nr_vert)
if not tmp.shape == (nr_vert+nr_hor,):
raise AssertionError('user defined shear input file inconsistent')
v_coord = tmp[:nr_hor]
w_coord = tmp[nr_hor:]
phi_deg = np.arctan(v_comp[:, 0]/u_comp[:, 0])*180.0/np.pi
return u_comp, v_comp, w_comp, v_coord, w_coord, phi_deg
def write(self, fid, u, v, w, v_coord, w_coord, fmt_uvw='% 08.05f',
fmt_coord='% 8.02f'):
"""Write a user defined shear input file for HAWC2.
"""
nr_hor = len(v_coord)
nr_vert = len(w_coord)
try:
assert u.shape == v.shape
assert u.shape == w.shape
assert u.shape[0] == nr_vert
assert u.shape[1] == nr_hor
except AssertionError:
raise ValueError('u, v, w shapes should be consistent with '
'nr_hor and nr_vert: u.shape: %s, nr_hor: %i, '
'nr_vert: %i' % (str(u.shape), nr_hor, nr_vert))
fid.write(b'# User defined shear file\n')
tmp = '%i %i # nr_hor (v), nr_vert (w)\n' % (nr_hor, nr_vert)
fid.write(tmp.encode())
h1 = b'normalized with U_mean, nr_hor (v) rows, nr_vert (w) columns'
fid.write(b'# v component, %s\n' % h1)
np.savetxt(fid, v, fmt=fmt_uvw, delimiter=' ')
fid.write(b'# u component, %s\n' % h1)
np.savetxt(fid, u, fmt=fmt_uvw, delimiter=' ')
fid.write(b'# w component, %s\n' % h1)
np.savetxt(fid, w, fmt=fmt_uvw, delimiter=' ')
h2 = b'# v coordinates (along the horizontal, nr_hor, 0 rotor center)'
fid.write(b'%s\n' % h2)
np.savetxt(fid, v_coord.reshape((v_coord.size, 1)), fmt=fmt_coord)
h3 = b'# w coordinates (zero is at ground level, height, nr_hor)'
fid.write(b'%s\n' % h3)
np.savetxt(fid, w_coord.reshape((w_coord.size, 1)), fmt=fmt_coord)
return fid
class WindProfiles(object):
def logarithmic(z, z_ref, r_0):
return np.log10(z/r_0)/np.log10(z_ref/r_0)
def powerlaw(z, z_ref, a):
profile = np.power(z/z_ref, a)
# when a negative, make sure we return zero and not inf
profile[np.isinf(profile)] = 0.0
return profile
def veer_ekman_mod(z, z_h, h_ME=500.0, a_phi=0.5):
1892
1893
1894
1895
1896
1897
1898
1899
1900
1901
1902
1903
1904
1905
1906
1907
1908
1909
1910
1911
1912
1913
1914
1915
1916
"""
Modified Ekman veer profile, as defined by Mark C. Kelly in email on
10 October 2014 15:10 (RE: veer profile)
.. math::
\\varphi(z) - \\varphi(z_H) \\approx a_{\\varphi}
e^{-\sqrt{z_H/h_{ME}}}
\\frac{z-z_H}{\sqrt{z_H*h_{ME}}}
\\left( 1 - \\frac{z-z_H}{2 \sqrt{z_H h_{ME}}}
- \\frac{z-z_H}{4z_H} \\right)
where:
:math:`h_{ME} \\equiv \\frac{\\kappa u_*}{f}`
and :math:`f = 2 \Omega \sin \\varphi` is the coriolis parameter,
and :math:`\\kappa = 0.41` as the von Karman constant,
and :math:`u_\\star = \\sqrt{\\frac{\\tau_w}{\\rho}}` friction velocity.
For on shore, :math:`h_{ME} \\approx 1000`, for off-shore,
:math:`h_{ME} \\approx 500`
:math:`a_{\\varphi} \\approx 0.5`
Parameters
----------
z : ndarray(n)
z-coordinates (height) of the grid on which the veer angle should
be calculated.
z_h : float
Hub height in meters.
:math:`a_{\\varphi}` : default=0.5
Parameter for the modified Ekman veer distribution. Value varies
between -1.2 and 0.5.
Returns
-------
phi_rad : ndarray
Veer angle in radians as function of z.
"""
t1 = np.exp(-math.sqrt(z_h / h_ME))
t2 = (z - z_h) / math.sqrt(z_h * h_ME)
t3 = (1.0 - (z-z_h)/(2.0*math.sqrt(z_h*h_ME)) - (z-z_h)/(4.0*z_h))
return a_phi * t1 * t2 * t3
class Turbulence(object):
1945
1946
1947
1948
1949
1950
1951
1952
1953
1954
1955
1956
1957
1958
1959
1960
1961
1962
1963
1964
1965
1966
1967
1968
1969
1970
1971
1972
1973
1974
1975
1976
1977
1978
1979
1980
1981
1982
1983
1984
1985
1986
1987
1988
1989
1990
1991
1992
1993
1994
1995
1996
1997
1998
1999
2000
def __init__(self):
pass
def read_hawc2(self, fpath, shape):
"""
Read the HAWC2 turbulence format
"""
fid = open(fpath, 'rb')
tmp = np.fromfile(fid, 'float32', shape[0]*shape[1]*shape[2])
turb = np.reshape(tmp, shape)
return turb
def read_bladed(self, fpath, basename):
fid = open(fpath + basename + '.wnd', 'rb')
R1 = struct.unpack('h', fid.read(2))[0]
R2 = struct.unpack('h', fid.read(2))[0]
turb = struct.unpack('i', fid.read(4))[0]
lat = struct.unpack('f', fid.read(4))[0]
rough = struct.unpack('f', fid.read(4))[0]
refh = struct.unpack('f', fid.read(4))[0]
longti = struct.unpack('f', fid.read(4))[0]
latti = struct.unpack('f', fid.read(4))[0]
vertti = struct.unpack('f', fid.read(4))[0]
dv = struct.unpack('f', fid.read(4))[0]
dw = struct.unpack('f', fid.read(4))[0]
du = struct.unpack('f', fid.read(4))[0]
halfalong = struct.unpack('i', fid.read(4))[0]
mean_ws = struct.unpack('f', fid.read(4))[0]
VertLongComp = struct.unpack('f', fid.read(4))[0]
LatLongComp = struct.unpack('f', fid.read(4))[0]
LongLongComp = struct.unpack('f', fid.read(4))[0]
Int = struct.unpack('i', fid.read(4))[0]
seed = struct.unpack('i', fid.read(4))[0]
VertGpNum = struct.unpack('i', fid.read(4))[0]
LatGpNum = struct.unpack('i', fid.read(4))[0]
VertLatComp = struct.unpack('f', fid.read(4))[0]
LatLatComp = struct.unpack('f', fid.read(4))[0]
LongLatComp = struct.unpack('f', fid.read(4))[0]
VertVertComp = struct.unpack('f', fid.read(4))[0]
LatVertComp = struct.unpack('f', fid.read(4))[0]
LongVertComp = struct.unpack('f', fid.read(4))[0]
points = np.fromfile(fid, 'int16', 2*halfalong*VertGpNum*LatGpNum*3)
fid.close()
return points
def convert2bladed(self, fpath, basename, shape=(4096,32,32)):
"""
Convert turbulence box to BLADED format
"""