From e84ee479552c918ee0bb8ea1bd7a06f3ef9e73ed Mon Sep 17 00:00:00 2001
From: David Robert Verelst <dave@dtu.dk>
Date: Mon, 3 Apr 2017 15:38:44 +0200
Subject: [PATCH] prepost: statistics now includes option to combine several
 channels into one

---
 wetb/prepost/Simulations.py | 32 ++++++++++++++++++++++++++++----
 wetb/prepost/dlctemplate.py | 12 ++++++++++--
 2 files changed, 38 insertions(+), 6 deletions(-)

diff --git a/wetb/prepost/Simulations.py b/wetb/prepost/Simulations.py
index 63db7cd5..f17905e0 100755
--- a/wetb/prepost/Simulations.py
+++ b/wetb/prepost/Simulations.py
@@ -4007,7 +4007,7 @@ class Cases(object):
                    save=True, m=[3, 4, 6, 8, 10, 12], neq=None, no_bins=46,
                    ch_fatigue={}, update=False, add_sensor=None,
                    chs_resultant=[], i0=0, i1=None, saveinterval=1000,
-                   csv=True, suffix=None, A=None,
+                   csv=True, suffix=None, A=None, add_sigs={},
                    ch_wind=None, save_new_sigs=False, xlsx=False):
         """
         Calculate statistics and save them in a pandas dataframe. Save also
@@ -4030,6 +4030,10 @@ class Cases(object):
             needs to be calculated. When set to None, ch_fatigue = ch_sel,
             and hence all channels will have a fatigue analysis.
 
+        add_sigs : dict, default={}
+            channel name, expression key/value paires. For example,
+            '[p1-p1-node-002-forcevec-z]*3 + [p1-p1-node-002-forcevec-y]'
+
         chs_resultant
 
         add_sensor
@@ -4125,6 +4129,8 @@ class Cases(object):
 
         df_dict = None
         add_stats = True
+        # for finding [] tags
+        regex = re.compile('(\\[.*?\\])')
 
         for ii, (cname, case) in enumerate(self.cases.items()):
 
@@ -4161,6 +4167,25 @@ class Cases(object):
             sig_size = self.res.N  # len(self.sig[i0:i1,0])
             new_sigs = np.ndarray((sig_size, 0))
 
+            for name, expr in add_sigs.items():
+                channel_tags = regex.findall(expr)
+                # replace all sensor names with expressions
+                template = "self.sig[:,self.res.ch_dict['{}']['chi']]"
+                for chan in channel_tags:
+                    # first remove the [] from the tag
+                    expr = expr.replace(chan, chan[1:-1])
+                    expr = expr.replace(chan[1:-1], template.format(chan[1:-1]))
+
+                sig_add = np.ndarray((len(self.sig[:,0]), 1))
+                sig_add[:,0] = eval(expr)
+
+                ch_dict_new[name] = {}
+                ch_dict_new[name]['chi'] = i_new_chans
+                ch_df_new = add_df_row(ch_df_new, **{'chi':i_new_chans,
+                                                   'ch_name':name})
+                i_new_chans += 1
+                new_sigs = np.append(new_sigs, sig_add, axis=1)
+
             if add_sensor is not None:
                 chi1 = self.res.ch_dict[add_sensor['ch1_name']]['chi']
                 chi2 = self.res.ch_dict[add_sensor['ch2_name']]['chi']
@@ -4301,10 +4326,9 @@ class Cases(object):
                 df_new_sigs = pd.DataFrame(new_sigs, columns=keys)
                 respath = os.path.join(case['[run_dir]'], case['[res_dir]'])
                 resfile = case['[case_id]']
-                fname = os.path.join(respath, resfile + '_postres.h5')
+                fname = os.path.join(respath, resfile + '_postres.csv')
                 print('    saving post-processed res: %s...' % fname, end='')
-                df_new_sigs.to_hdf(fname, 'table', mode='w', format='table',
-                                   complevel=9, complib=self.complib)
+                df_new_sigs.to_csv(fname, sep='\t')
                 print('done!')
                 del df_new_sigs
 
diff --git a/wetb/prepost/dlctemplate.py b/wetb/prepost/dlctemplate.py
index 87c4ba9f..c7827ed6 100644
--- a/wetb/prepost/dlctemplate.py
+++ b/wetb/prepost/dlctemplate.py
@@ -344,6 +344,9 @@ def launch_dlcs_excel(sim_id, silent=False, verbose=False, pbs_turb=False,
                               nr_procs_series=15, processes=1,
                               walltime='20:00:00', chunks_dir='zip-chunks-gorm')
 
+    df = sim.Cases(cases).cases2df()
+    df.to_excel(os.path.join(POST_DIR, sim_id + '.xls'))
+
 
 def post_launch(sim_id, statistics=True, rem_failed=True, check_logs=True,
                 force_dir=False, update=False, saveinterval=2000, csv=False,
@@ -385,6 +388,11 @@ def post_launch(sim_id, statistics=True, rem_failed=True, check_logs=True,
     if statistics:
         i0, i1 = 0, -1
 
+        # example for combination of signals
+#        name = 'stress1'
+#        expr = '[p1-p1-node-002-forcevec-z]*3 + [p1-p1-node-002-forcevec-y]'
+#        add_sigs = {name:expr}
+
         # in addition, sim_id and case_id are always added by default
         tags = ['[Case folder]']
         add = None
@@ -395,7 +403,7 @@ def post_launch(sim_id, statistics=True, rem_failed=True, check_logs=True,
                                  update=update, saveinterval=saveinterval,
                                  suffix=suffix, save_new_sigs=save_new_sigs,
                                  csv=csv, m=m, neq=None, no_bins=no_bins,
-                                 chs_resultant=[], A=A)
+                                 chs_resultant=[], A=A, add_sigs={})
         # annual energy production
         if AEP:
             df_AEP = cc.AEP(df_stats, csv=csv, update=update, save=True)
@@ -486,7 +494,7 @@ if __name__ == '__main__':
                         'using the 64-bit Mann turbulence box generator. '
                         'This can be usefull if your turbulence boxes are too '
                         'big for running in HAWC2 32-bit mode. Only works on '
-                        'Jess. ')
+                        'Jess.')
     parser.add_argument('--walltime', default='04:00:00', type=str,
                         action='store', dest='walltime', help='Queue walltime '
                         'for each case/pbs file, format: HH:MM:SS '
-- 
GitLab