From 5545e3b6d662e09f372584d212d1f99e6c3ab950 Mon Sep 17 00:00:00 2001
From: David Robert Verelst <dave@dtu.dk>
Date: Thu, 31 Jan 2019 11:33:21 +0100
Subject: [PATCH] prepost: postpro bearing damage, zipchunks for failed cases

---
 wetb/prepost/Simulations.py | 29 ++++++++++++++++++++++++-----
 wetb/prepost/dlctemplate.py | 25 ++++++++++++++++++++++++-
 2 files changed, 48 insertions(+), 6 deletions(-)

diff --git a/wetb/prepost/Simulations.py b/wetb/prepost/Simulations.py
index 80cfd64b..9145531a 100755
--- a/wetb/prepost/Simulations.py
+++ b/wetb/prepost/Simulations.py
@@ -50,13 +50,14 @@ import pandas as pd
 import tables as tbl
 
 # custom libraries
+from wetb.fatigue_tools.bearing_damage import bearing_damage
 from wetb.prepost import misc
 from wetb.prepost import windIO
 from wetb.prepost import prepost
 from wetb.dlc import high_level as dlc
-from wetb.prepost.GenerateHydro import hydro_input
+#from wetb.prepost.GenerateHydro import hydro_input
 from wetb.utils.envelope import compute_envelope
-from os.path import join as os_path_join
+#from os.path import join as os_path_join
 
 #def join_path(*args):
 #    return os_path_join(*args).replace("\\","/")
@@ -3688,7 +3689,8 @@ class Cases(object):
         """Given the log file analysis and the Cases tag list, generate a list
         of failed cases. This is usefull when either some cases have been
         re-run or when the post-processing is done at the same time as the
-        simulations (e.g. zipchunks approach).
+        simulations (e.g. zipchunks approach). Cases for which the elapsted_time
+        column of the error logs is 0 or smaller are also considered as failed
 
         Parameters
         ----------
@@ -3724,6 +3726,9 @@ class Cases(object):
         # convert case_id to log file names
         # logids = pd.DataFrame(columns=[''])
         df_cases['logid'] = df_cases['[log_dir]'] + df_cases['[case_id]'] + '.log'
+        # remove those cases for which the logfile has not ended with the last
+        # statement "Elapsed time", results in value 0
+        df_err = df_err[df_err['elapsted_time'] > 0]
         # we only need to merge with errorlogs using a portion of the data
         # join error logs and df_cases on the logid
         df = pd.merge(df_cases[['logid', '[case_id]']], df_err[['file_name']],
@@ -3831,7 +3836,8 @@ class Cases(object):
                    ch_fatigue={}, update=False, add_sensor=None,
                    chs_resultant=[], i0=0, i1=None, saveinterval=1000,
                    csv=True, suffix=None, A=None, add_sigs={},
-                   ch_wind=None, save_new_sigs=False, xlsx=False):
+                   ch_wind=None, save_new_sigs=False, xlsx=False,
+                   bearing_damage_lst=()):
         """
         Calculate statistics and save them in a pandas dataframe. Save also
         every 500 cases the statistics file.
@@ -3855,7 +3861,11 @@ class Cases(object):
 
         add_sigs : dict, default={}
             channel name, expression key/value paires. For example,
-            '[p1-p1-node-002-forcevec-z]*3 + [p1-p1-node-002-forcevec-y]'
+            'p1-p1-node-002-forcevec-z*3 + p1-p1-node-002-forcevec-y'
+
+        bearing_damage_lst : iterable, default=()
+            Input for wetb.fatigue_tools.bearing_damage: angle and moment
+            channels of the bearing of interest.
 
         chs_resultant
 
@@ -4168,6 +4178,15 @@ class Cases(object):
             # calculate the statistics values
             stats = self.res.calc_stats(self.sig, i0=i0, i1=i1)
 
+            # calculate any bearing damage
+            for name, angle_moment_lst in bearing_damage_lst:
+                angle_moment_timeseries_lst = []
+                for aa, mm in angle_moment_lst:
+                    angle = self.sig[:,self.res.ch_dict[aa]['chi']]
+                    moment = self.sig[:,self.res.ch_dict[mm]['chi']]
+                    angle_moment_timeseries_lst.append((angle, moment))
+                stats[name] = bearing_damage(angle_moment_timeseries_lst)
+
             # Because each channel is a new row, it doesn't matter how many
             # data channels each case has, and this approach does not brake
             # when different cases have a different number of output channels
diff --git a/wetb/prepost/dlctemplate.py b/wetb/prepost/dlctemplate.py
index 2d91740b..c3e2c166 100644
--- a/wetb/prepost/dlctemplate.py
+++ b/wetb/prepost/dlctemplate.py
@@ -622,11 +622,27 @@ def postpro_node_merge(tqdm=False, zipchunks=False, m=[3,4,6,8,9,10,12]):
     fname = os.path.join(POST_DIR, '%s_unique-channel-names.csv' % sim_id)
     pd.DataFrame(chans).to_csv(fname)
 
+
+def prepare_failed(compress=False, wine_arch='win32', wine_prefix='~/.wine32',
+                   prelude='', zipchunks=False):
+
+    cc = sim.Cases(POST_DIR, sim_id)
+    df_tags = cc.cases2df()
+
     # -------------------------------------------------------------------------
     # find failed cases and create pbs_in_failed dir
     cc.find_failed(df_cases=df_tags)
     sim.copy_pbs_in_failedcases(cc.cases_fail, path=opt.pbs_failed_path)
 
+    if zipchunks:
+        # and for chunks as well
+        sorts_on = ['[DLC]', '[Windspeed]']
+        create_chunks_htc_pbs(cc.cases_fail, sort_by_values=sorts_on,
+                              ppn=20, nr_procs_series=3, walltime='20:00:00',
+                              chunks_dir='zip-chunks-jess-fail', compress=compress,
+                              wine_arch=wine_arch, wine_prefix=wine_prefix,
+                              prelude=prelude, queue='windq')
+
 
 if __name__ == '__main__':
 
@@ -635,6 +651,10 @@ if __name__ == '__main__':
                         dest='prep', help='create htc, pbs, files')
     parser.add_argument('--check_logs', action='store_true', default=False,
                         dest='check_logs', help='check the log files')
+    parser.add_argument('--failed', action='store_true', default=False,
+                        dest='failed', help='Create new pbs_in files for all '
+                        'failed cases. Combine with --zipchunks to also create '
+                        'new zipchunks for the failed cases.')
     parser.add_argument('--pbs_failed_path', default='pbs_in_fail', type=str,
                         action='store', dest='pbs_failed_path',
                         help='Copy pbs launch files of the failed cases to a '
@@ -776,7 +796,7 @@ if __name__ == '__main__':
         launch_dlcs_excel(sim_id, silent=False, zipchunks=opt.zipchunks,
                           pbs_turb=opt.pbs_turb, walltime=opt.walltime,
                           postpro_node=opt.postpro_node, runmethod=RUNMETHOD,
-                          dlcs_dir=os.path.join(P_SOURCE, 'htc', 'DLCs'),
+                          dlcs_dir=os.path.join(P_SOURCE, opt.dlcfolder),
                           postpro_node_zipchunks=opt.no_postpro_node_zipchunks,
                           wine_arch=opt.wine_arch, wine_prefix=opt.wine_prefix,
                           compress=opt.compress, linux=opt.linux)
@@ -791,6 +811,9 @@ if __name__ == '__main__':
                     save_new_sigs=opt.save_new_sigs, save_iter=False,
                     envelopeturbine=opt.envelopeturbine,
                     envelopeblade=opt.envelopeblade)
+    if opt.failed:
+        prepare_failed(zipchunks=opt.zipchunks, compress=opt.compress,
+                       wine_arch=opt.wine_arch, wine_prefix=opt.wine_prefix)
     if opt.postpro_node_merge:
         postpro_node_merge(zipchunks=opt.zipchunks, m=m)
     if opt.dlcplot:
-- 
GitLab