Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • toolbox/WindEnergyToolbox
  • tlbl/WindEnergyToolbox
  • cpav/WindEnergyToolbox
  • frza/WindEnergyToolbox
  • borg/WindEnergyToolbox
  • mmpe/WindEnergyToolbox
  • ozgo/WindEnergyToolbox
  • dave/WindEnergyToolbox
  • mmir/WindEnergyToolbox
  • wluo/WindEnergyToolbox
  • welad/WindEnergyToolbox
  • chpav/WindEnergyToolbox
  • rink/WindEnergyToolbox
  • shfe/WindEnergyToolbox
  • shfe1/WindEnergyToolbox
  • acdi/WindEnergyToolbox
  • angl/WindEnergyToolbox
  • wliang/WindEnergyToolbox
  • mimc/WindEnergyToolbox
  • wtlib/WindEnergyToolbox
  • cmos/WindEnergyToolbox
  • fabpi/WindEnergyToolbox
22 results
Show changes
Commits on Source (3)
......@@ -50,13 +50,14 @@ import pandas as pd
import tables as tbl
# custom libraries
from wetb.fatigue_tools.bearing_damage import bearing_damage
from wetb.prepost import misc
from wetb.prepost import windIO
from wetb.prepost import prepost
from wetb.dlc import high_level as dlc
from wetb.prepost.GenerateHydro import hydro_input
#from wetb.prepost.GenerateHydro import hydro_input
from wetb.utils.envelope import compute_envelope
from os.path import join as os_path_join
#from os.path import join as os_path_join
#def join_path(*args):
# return os_path_join(*args).replace("\\","/")
......@@ -3688,7 +3689,8 @@ class Cases(object):
"""Given the log file analysis and the Cases tag list, generate a list
of failed cases. This is usefull when either some cases have been
re-run or when the post-processing is done at the same time as the
simulations (e.g. zipchunks approach).
simulations (e.g. zipchunks approach). Cases for which the elapsted_time
column of the error logs is 0 or smaller are also considered as failed
Parameters
----------
......@@ -3724,6 +3726,9 @@ class Cases(object):
# convert case_id to log file names
# logids = pd.DataFrame(columns=[''])
df_cases['logid'] = df_cases['[log_dir]'] + df_cases['[case_id]'] + '.log'
# remove those cases for which the logfile has not ended with the last
# statement "Elapsed time", results in value 0
df_err = df_err[df_err['elapsted_time'] > 0]
# we only need to merge with errorlogs using a portion of the data
# join error logs and df_cases on the logid
df = pd.merge(df_cases[['logid', '[case_id]']], df_err[['file_name']],
......@@ -3743,23 +3748,28 @@ class Cases(object):
save_pickle(os.path.join(self.post_dir, self.sim_id + '_fail.pkl'),
self.cases_fail)
def remove_failed(self):
def remove_failed(self, verbose=False):
# don't do anything if there is nothing defined
if self.cases_fail == None:
print('no failed cases to remove')
return
nr_cases = len(self.cases)
# ditch all the failed cases out of the htc_dict
# otherwise we will have fails when reading the results data files
for k in self.cases_fail:
try:
self.cases_fail[k] = copy.copy(self.cases[k])
del self.cases[k]
print('removed from htc_dict due to error: ' + k)
if verbose:
print('removed from htc_dict due to error: ' + k)
except KeyError:
print('WARNING: failed case does not occur in cases')
print(' ', k)
if verbose:
print('WARNING: failed case does not occur in cases')
print(' ', k)
rpl = (len(self.cases_fail), nr_cases)
print('removed %i failed cases (out of %i)' % rpl)
def load_failed(self, sim_id):
......@@ -3831,7 +3841,8 @@ class Cases(object):
ch_fatigue={}, update=False, add_sensor=None,
chs_resultant=[], i0=0, i1=None, saveinterval=1000,
csv=True, suffix=None, A=None, add_sigs={},
ch_wind=None, save_new_sigs=False, xlsx=False):
ch_wind=None, save_new_sigs=False, xlsx=False,
bearing_damage_lst=()):
"""
Calculate statistics and save them in a pandas dataframe. Save also
every 500 cases the statistics file.
......@@ -3855,7 +3866,11 @@ class Cases(object):
add_sigs : dict, default={}
channel name, expression key/value paires. For example,
'[p1-p1-node-002-forcevec-z]*3 + [p1-p1-node-002-forcevec-y]'
'p1-p1-node-002-forcevec-z*3 + p1-p1-node-002-forcevec-y'
bearing_damage_lst : iterable, default=()
Input for wetb.fatigue_tools.bearing_damage: angle and moment
channels of the bearing of interest.
chs_resultant
......@@ -4168,6 +4183,15 @@ class Cases(object):
# calculate the statistics values
stats = self.res.calc_stats(self.sig, i0=i0, i1=i1)
# calculate any bearing damage
for name, angle_moment_lst in bearing_damage_lst:
angle_moment_timeseries_lst = []
for aa, mm in angle_moment_lst:
angle = self.sig[:,self.res.ch_dict[aa]['chi']]
moment = self.sig[:,self.res.ch_dict[mm]['chi']]
angle_moment_timeseries_lst.append((angle, moment))
stats[name] = bearing_damage(angle_moment_timeseries_lst)
# Because each channel is a new row, it doesn't matter how many
# data channels each case has, and this approach does not brake
# when different cases have a different number of output channels
......
......@@ -622,11 +622,27 @@ def postpro_node_merge(tqdm=False, zipchunks=False, m=[3,4,6,8,9,10,12]):
fname = os.path.join(POST_DIR, '%s_unique-channel-names.csv' % sim_id)
pd.DataFrame(chans).to_csv(fname)
def prepare_failed(compress=False, wine_arch='win32', wine_prefix='~/.wine32',
prelude='', zipchunks=False):
cc = sim.Cases(POST_DIR, sim_id)
df_tags = cc.cases2df()
# -------------------------------------------------------------------------
# find failed cases and create pbs_in_failed dir
cc.find_failed(df_cases=df_tags)
sim.copy_pbs_in_failedcases(cc.cases_fail, path=opt.pbs_failed_path)
if zipchunks:
# and for chunks as well
sorts_on = ['[DLC]', '[Windspeed]']
create_chunks_htc_pbs(cc.cases_fail, sort_by_values=sorts_on,
ppn=20, nr_procs_series=3, walltime='20:00:00',
chunks_dir='zip-chunks-jess-fail', compress=compress,
wine_arch=wine_arch, wine_prefix=wine_prefix,
prelude=prelude, queue='windq', i0=1000)
if __name__ == '__main__':
......@@ -635,6 +651,10 @@ if __name__ == '__main__':
dest='prep', help='create htc, pbs, files')
parser.add_argument('--check_logs', action='store_true', default=False,
dest='check_logs', help='check the log files')
parser.add_argument('--failed', action='store_true', default=False,
dest='failed', help='Create new pbs_in files for all '
'failed cases. Combine with --zipchunks to also create '
'new zipchunks for the failed cases.')
parser.add_argument('--pbs_failed_path', default='pbs_in_fail', type=str,
action='store', dest='pbs_failed_path',
help='Copy pbs launch files of the failed cases to a '
......@@ -776,7 +796,7 @@ if __name__ == '__main__':
launch_dlcs_excel(sim_id, silent=False, zipchunks=opt.zipchunks,
pbs_turb=opt.pbs_turb, walltime=opt.walltime,
postpro_node=opt.postpro_node, runmethod=RUNMETHOD,
dlcs_dir=os.path.join(P_SOURCE, 'htc', 'DLCs'),
dlcs_dir=os.path.join(P_SOURCE, opt.dlcfolder),
postpro_node_zipchunks=opt.no_postpro_node_zipchunks,
wine_arch=opt.wine_arch, wine_prefix=opt.wine_prefix,
compress=opt.compress, linux=opt.linux)
......@@ -793,6 +813,9 @@ if __name__ == '__main__':
envelopeblade=opt.envelopeblade)
if opt.postpro_node_merge:
postpro_node_merge(zipchunks=opt.zipchunks, m=m)
if opt.failed:
prepare_failed(zipchunks=opt.zipchunks, compress=opt.compress,
wine_arch=opt.wine_arch, wine_prefix=opt.wine_prefix)
if opt.dlcplot:
plot_chans = {}
plot_chans['$B1_{flap}$'] = ['setbeta-bladenr-1-flapnr-1']
......