diff --git a/wetb/fatigue_tools/tests/test_fatigue.py b/wetb/fatigue_tools/tests/test_fatigue.py
index 2a7bfb253db90e72a582f9db3550e2c85e77ab9d..a25745e157215dc1666b40e66dee3bfb4d5247dc 100644
--- a/wetb/fatigue_tools/tests/test_fatigue.py
+++ b/wetb/fatigue_tools/tests/test_fatigue.py
@@ -14,8 +14,8 @@ standard_library.install_aliases()
 import unittest
 
 import numpy as np
-from wetb.fatigue_tools.fatigue import (eq_load, rainflow_astm,
-                                        rainflow_windap, cycle_matrix)
+from wetb.fatigue_tools.fatigue import eq_load, rainflow_astm, rainflow_windap, \
+    cycle_matrix
 from wetb.hawc2 import Hawc2io
 import os
 
@@ -31,50 +31,28 @@ class TestFatigueTools(unittest.TestCase):
         m = 1
         point_per_deg = 100
 
-        for amplitude in [1,2,3]:
-            peak2peak = amplitude * 2
-            # sine signal with 10 periods (20 peaks)
-            nr_periods = 10
-            time = np.linspace(0, nr_periods*2*np.pi, point_per_deg*180)
-            neq = time[-1]
-            # mean value of the signal shouldn't matter
-            signal = amplitude * np.sin(time) + 5
-            r_eq_1hz = eq_load(signal, no_bins=1, m=m, neq=neq)[0]
-            r_eq_1hz_expected = ((2*nr_periods*amplitude**m)/neq)**(1/m)
-            np.testing.assert_allclose(r_eq_1hz, r_eq_1hz_expected)
-
-            # sine signal with 20 periods (40 peaks)
-            nr_periods = 20
-            time = np.linspace(0, nr_periods*2*np.pi, point_per_deg*180)
-            neq = time[-1]
-            # mean value of the signal shouldn't matter
-            signal = amplitude * np.sin(time) + 9
-            r_eq_1hz2 = eq_load(signal, no_bins=1, m=m, neq=neq)[0]
-            r_eq_1hz_expected2 = ((2*nr_periods*amplitude**m)/neq)**(1/m)
-            np.testing.assert_allclose(r_eq_1hz2, r_eq_1hz_expected2)
-
-            # 1hz equivalent should be independent of the length of the signal
-            np.testing.assert_allclose(r_eq_1hz, r_eq_1hz2)
-
-    def test_rainflow_combi(self):
-        """Signal with two frequencies and amplitudes
-        """
-
-        amplitude = 1
-        # peak2peak = amplitude * 2
-        m = 1
-        point_per_deg = 100
-
+        # sine signal with 10 periods (20 peaks)
         nr_periods = 10
         time = np.linspace(0, nr_periods*2*np.pi, point_per_deg*180)
-
-        signal = (amplitude*np.sin(time)) + 5 + (amplitude*0.2*np.cos(5*time))
-        cycles, ampl_bin_mean, ampl_edges, mean_bin_mean, mean_edges = \
-            cycle_matrix(signal, ampl_bins=10, mean_bins=5)
-
-        cycles.sum()
-
-
+        neq = time[-1]
+        # mean value of the signal shouldn't matter
+        signal = amplitude * np.sin(time) + 5
+        r_eq_1hz = eq_load(signal, no_bins=1, m=m, neq=neq)[0]
+        r_eq_1hz_expected = ((2*nr_periods*amplitude**m)/neq)**(1/m)
+        np.testing.assert_allclose(r_eq_1hz, r_eq_1hz_expected)
+
+        # sine signal with 20 periods (40 peaks)
+        nr_periods = 20
+        time = np.linspace(0, nr_periods*2*np.pi, point_per_deg*180)
+        neq = time[-1]
+        # mean value of the signal shouldn't matter
+        signal = amplitude * np.sin(time) + 9
+        r_eq_1hz2 = eq_load(signal, no_bins=1, m=m, neq=neq)[0]
+        r_eq_1hz_expected2 = ((2*nr_periods*amplitude**m)/neq)**(1/m)
+        np.testing.assert_allclose(r_eq_1hz2, r_eq_1hz_expected2)
+
+        # 1hz equivalent load should be independent of the length of the signal
+        np.testing.assert_allclose(r_eq_1hz, r_eq_1hz2)
 
     def test_astm1(self):
 
diff --git a/wetb/prepost/Simulations.py b/wetb/prepost/Simulations.py
index cc50682eb7047f9d73100a00457ee70d76cafe47..c754717e9e5dfab3a8c8abc7c19d07d1a65ed87b 100755
--- a/wetb/prepost/Simulations.py
+++ b/wetb/prepost/Simulations.py
@@ -57,7 +57,6 @@ from wetb.dlc import high_level as dlc
 from wetb.prepost.GenerateHydro import hydro_input
 from wetb.utils.envelope import compute_envelope
 
-
 def load_pickled_file(source):
     FILE = open(source, 'rb')
     result = pickle.load(FILE)
@@ -399,6 +398,7 @@ def run_local_ram(cases, check_log=True):
 
     return cases
 
+
 def run_local(cases, silent=False, check_log=True):
     """
     Run all HAWC2 simulations locally from cases
@@ -552,17 +552,17 @@ def run_local(cases, silent=False, check_log=True):
 
     return cases
 
+
 def prepare_launch(iter_dict, opt_tags, master, variable_tag_func,
                 write_htc=True, runmethod='none', verbose=False,
                 copyback_turb=True, msg='', silent=False, check_log=True,
                 update_cases=False, ignore_non_unique=False,
-                run_only_new=False, windows_nr_cpus=2,
+                run_only_new=False, windows_nr_cpus=2, wine_64bit=False,
                 pbs_fname_appendix=True, short_job_names=True, qsub='',
                 update_model_data=True, maxcpu=1, pyenv='wetb_py3',
                 m=[3,4,6,8,9,10,12], postpro_node_zipchunks=True,
                 postpro_node=False, exesingle=None, exechunks=None,
-                wine_arch='win32', wine_prefix='~/.wine32',
-                pyenv_cmd='source /home/python/miniconda3/bin/activate'):
+                wine_arch='win32', wine_prefix='~/.wine32'):
     """
     Create the htc files, pbs scripts and replace the tags in master file
     =====================================================================
@@ -802,20 +802,213 @@ def prepare_launch(iter_dict, opt_tags, master, variable_tag_func,
            copyback_turb=copyback_turb, qsub=qsub,
            windows_nr_cpus=windows_nr_cpus, short_job_names=short_job_names,
            pbs_fname_appendix=pbs_fname_appendix, silent=silent, maxcpu=maxcpu,
-           pyenv=pyenv, m=[3,4,6,8,9,10,12],
+           pyenv=pyenv, wine_64bit=wine_64bit, m=[3,4,6,8,9,10,12],
            postpro_node_zipchunks=postpro_node_zipchunks,
            postpro_node=postpro_node, exesingle=exesingle, exechunks=exechunks,
-           wine_arch=wine_arch, wine_prefix=wine_prefix, pyenv_cmd=pyenv_cmd)
+           wine_arch=wine_arch, wine_prefix=wine_prefix)
 
     return cases
 
+def prepare_relaunch(cases, runmethod='gorm', verbose=False, write_htc=True,
+                     copyback_turb=True, silent=False, check_log=True):
+    """
+    Instead of redoing everything, we know recreate the HTC file for those
+    in the given cases dict. Nothing else changes. The data and zip files
+    are not updated, the convience tagfile is not recreated. However, the
+    saved (pickled) cases dict corresponding to the sim_id is updated!
+
+    This method is usefull to correct mistakes made for some cases.
+
+    It is adviced to not change the case_id, sim_id, from the cases.
+    """
+
+    # initiate the HtcMaster object, load the master file
+    master = HtcMaster()
+    # for invariant tags, load random case. Necessary before we can load
+    # the master file, otherwise we don't know which master to load
+    master.tags = cases[list(cases.keys())[0]]
+    master.loadmaster()
+
+    # load the original cases dict
+    post_dir = master.tags['[post_dir]']
+    FILE = open(post_dir + master.tags['[sim_id]'] + '.pkl', 'rb')
+    cases_orig = pickle.load(FILE)
+    FILE.close()
+
+    sim_nr = 0
+    sim_total = len(cases)
+    for case, casedict in cases.items():
+        sim_nr += 1
+
+        # set all the tags in the HtcMaster file
+        master.tags = casedict
+        # returns a dictionary with all the tags used for this
+        # specific case
+        htc = master.createcase(write_htc=write_htc)
+        #htc=master.createcase_check(cases_repo,write_htc=write_htc)
+
+        if not silent:
+            print('htc progress: ' + format(sim_nr, '3.0f') + '/' + \
+                   format(sim_total, '3.0f'))
+
+        if verbose:
+            print('===master.tags===\n', master.tags)
+
+        # make sure the current cases already exists, otherwise we are not
+        # relaunching!
+        if case not in cases_orig:
+            msg = 'relaunch only works for existing cases: %s' % case
+            raise KeyError(msg)
+
+        # save in the big cases. Note that values() gives a copy!
+        # remark, what about the copying done at the end of master.createcase?
+        # is that redundant then?
+        cases[list(htc.keys())[0]] = list(htc.values())[0]
+
+        if verbose:
+            print('created cases for: %s.htc\n' % master.tags['[case_id]'])
+
+    launch(cases, runmethod=runmethod, verbose=verbose, check_log=check_log,
+           copyback_turb=copyback_turb, silent=silent)
+
+    # update the original file: overwrite the newly set cases
+    FILE = open(post_dir + master.tags['[sim_id]'] + '.pkl', 'wb')
+    cases_orig.update(cases)
+    pickle.dump(cases_orig, FILE, protocol=2)
+    FILE.close()
+
+def prepare_launch_cases(cases, runmethod='gorm', verbose=False,write_htc=True,
+                         copyback_turb=True, silent=False, check_log=True,
+                         variable_tag_func=None, sim_id_new=None):
+    """
+    Same as prepare_launch, but now the input is just a cases object (cao).
+    If relaunching some earlier defined simulations, make sure to at least
+    rename the sim_id, otherwise it could become messy: things end up in the
+    same folder, sim_id post file get overwritten, ...
+
+    In case you do not use a variable_tag_fuc, make sure all your tags are
+    defined in cases. First and foremost, this means that the case_id does not
+    get updated to have a new sim_id, the path's are not updated, etc
+
+    When given a variable_tag_func, make sure it is properly
+    defined: do not base a variable tag's value on itself to avoid value chains
+
+    The master htc file will be loaded and alls tags defined in the cases dict
+    will be applied to it as is.
+    """
+
+    # initiate the HtcMaster object, load the master file
+    master = HtcMaster()
+    # for invariant tags, load random case. Necessary before we can load
+    # the master file, otherwise we don't know which master to load
+    master.tags = cases[list(cases.keys())[0]]
+    # load the master htc file as a string under the master.tags
+    master.loadmaster()
+    # create the execution folder structure and copy all data to it
+    # but reset to the correct launch dirs first
+    sim_id = master.tags['[sim_id]']
+    if runmethod in ['local', 'local-script', 'none']:
+        path = '/home/dave/PhD_data/HAWC2_results/ojf_post/%s/' % sim_id
+        master.tags['[run_dir]'] = path
+    elif runmethod == 'jess':
+        master.tags['[run_dir]'] = '/mnt/jess/HAWC2/ojf_post/%s/' % sim_id
+    elif runmethod == 'gorm':
+        master.tags['[run_dir]'] = '/mnt/gorm/HAWC2/ojf_post/%s/' % sim_id
+    else:
+        msg='unsupported runmethod, options: none, local, thyra, gorm, opt'
+        raise ValueError(msg)
+
+    master.create_run_dir()
+    master.copy_model_data()
+    # create the zip file
+    master.create_model_zip()
+
+    sim_nr = 0
+    sim_total = len(cases)
+
+    # for safety, create a new cases dict. At the end of the ride both cases
+    # and cases_new should be identical!
+    cases_new = {}
+
+    # cycle thourgh all the combinations
+    for case, casedict in cases.items():
+        sim_nr += 1
+
+        sim_id = casedict['[sim_id]']
+        # reset the launch dirs
+        if runmethod in ['local', 'local-script', 'none']:
+            path = '/home/dave/PhD_data/HAWC2_results/ojf_post/%s/' % sim_id
+            casedict['[run_dir]'] = path
+        elif runmethod == 'thyra':
+            casedict['[run_dir]'] = '/mnt/thyra/HAWC2/ojf_post/%s/' % sim_id
+        elif runmethod == 'gorm':
+            casedict['[run_dir]'] = '/mnt/gorm/HAWC2/ojf_post/%s/' % sim_id
+        else:
+            msg='unsupported runmethod, options: none, local, thyra, gorm, opt'
+            raise ValueError(msg)
+
+        # -----------------------------------------------------------
+        # set all the tags in the HtcMaster file
+        master.tags = casedict
+        # apply the variable tags if applicable
+        if variable_tag_func:
+            master = variable_tag_func(master)
+        elif sim_id_new:
+            # TODO: finish this
+            # replace all the sim_id occurences with the updated one
+            # this means also the case_id tag changes!
+            pass
+        # -----------------------------------------------------------
+
+        # returns a dictionary with all the tags used for this specific case
+        htc = master.createcase(write_htc=write_htc)
+
+        if not silent:
+            print('htc progress: ' + format(sim_nr, '3.0f') + '/' + \
+                   format(sim_total, '3.0f'))
+
+        if verbose:
+            print('===master.tags===\n', master.tags)
+
+        # make sure the current cases is unique!
+        if list(htc.keys())[0] in cases_new:
+            msg = 'non unique case in cases: %s' % list(htc.keys())[0]
+            raise KeyError(msg)
+        # save in the big cases. Note that values() gives a copy!
+        # remark, what about the copying done at the end of master.createcase?
+        # is that redundant then?
+        cases_new[list(htc.keys())[0]] = list(htc.values())[0]
+
+        if verbose:
+            print('created cases for: %s.htc\n' % master.tags['[case_id]'])
+
+    post_dir = master.tags['[post_dir]']
+
+    # create directory if post_dir does not exists
+    try:
+        os.makedirs(post_dir)
+    except OSError:
+        pass
+    FILE = open(post_dir + master.tags['[sim_id]'] + '.pkl', 'wb')
+    pickle.dump(cases_new, FILE, protocol=2)
+    FILE.close()
+
+    if not silent:
+        print('\ncases saved at:')
+        print(post_dir + master.tags['[sim_id]'] + '.pkl')
+
+    launch(cases_new, runmethod=runmethod, verbose=verbose,
+           copyback_turb=copyback_turb, check_log=check_log)
+
+    return cases_new
+
+
 def launch(cases, runmethod='none', verbose=False, copyback_turb=True,
            silent=False, check_log=True, windows_nr_cpus=2, qsub='time',
            pbs_fname_appendix=True, short_job_names=True,
-           maxcpu=1, pyenv='wetb_py3', m=[3,4,6,8,9,10,12],
+           maxcpu=1, pyenv='wetb_py3', wine_64bit=False, m=[3,4,6,8,9,10,12],
            postpro_node_zipchunks=True, postpro_node=False, exesingle=None,
-           exechunks=None, wine_arch='win32', wine_prefix='~/.wine32',
-           pyenv_cmd='source /home/python/miniconda3/bin/activate'):
+           exechunks=None, wine_arch='win32', wine_prefix='~/.wine32'):
     """
     The actual launching of all cases in the Cases dictionary. Note that here
     only the PBS files are written and not the actuall htc files.
@@ -856,12 +1049,11 @@ def launch(cases, runmethod='none', verbose=False, copyback_turb=True,
         # create the pbs object
         pbs = PBS(cases, short_job_names=short_job_names, pyenv=pyenv,
                   pbs_fname_appendix=pbs_fname_appendix, qsub=qsub,
-                  verbose=verbose, silent=silent,
+                  verbose=verbose, silent=silent, wine_64bit=wine_64bit,
                   m=m, postpro_node_zipchunks=postpro_node_zipchunks,
                   postpro_node=postpro_node, exesingle=exesingle,
                   exechunks=exechunks, wine_arch=wine_arch,
                   wine_prefix=wine_prefix)
-        pbs.pyenv_cmd = pyenv_cmd
         pbs.copyback_turb = copyback_turb
         pbs.pbs_out_dir = pbs_out_dir
         pbs.maxcpu = maxcpu
@@ -877,6 +1069,7 @@ def launch(cases, runmethod='none', verbose=False, copyback_turb=True,
               'windows-script, local-ram, none, pbs'
         raise ValueError(msg)
 
+
 def post_launch(cases, save_iter=False, silent=False, suffix=None,
                 path_errorlog=None):
     """
@@ -1013,6 +1206,7 @@ def post_launch(cases, save_iter=False, silent=False, suffix=None,
 
     return cases_fail
 
+
 def copy_pbs_in_failedcases(cases_fail, path='pbs_in_fail', silent=True):
     """
     Copy all the pbs_in files from failed cases to a new directory so it
@@ -1036,6 +1230,7 @@ def copy_pbs_in_failedcases(cases_fail, path='pbs_in_fail', silent=True):
             os.makedirs(os.path.dirname(dst))
         shutil.copy2(src, dst)
 
+
 def logcheck_case(errorlogs, cases, case, silent=False):
     """
     Check logfile of a single case
@@ -1125,7 +1320,6 @@ class Log(object):
         for k in self.log:
             print(k)
 
-
 class HtcMaster(object):
     """
     """
@@ -1748,7 +1942,7 @@ class PBS(object):
 
     def __init__(self, cases, qsub='time', silent=False, pyenv='wetb_py3',
                  pbs_fname_appendix=True, short_job_names=True, verbose=False,
-                 m=[3,4,6,8,9,10,12], exesingle=None,
+                 wine_64bit=False, m=[3,4,6,8,9,10,12], exesingle=None,
                  postpro_node_zipchunks=True, postpro_node=False,
                  exechunks=None, wine_arch='win32', wine_prefix='~/.wine32'):
         """
@@ -1791,6 +1985,8 @@ class PBS(object):
         # run in 32-bit or 64-bit mode. Note this uses the same assumptions
         # on how to configure wine in toolbox/pbsutils/config-wine-hawc2.sh
         wineparam = (wine_arch, wine_prefix)
+        if wine_64bit:
+            wineparam = ('win64', '~/.wine')
         self.winebase = 'time WINEARCH=%s WINEPREFIX=%s ' % wineparam
 
         self.wine = self.winebase + 'wine'
@@ -5115,6 +5311,83 @@ class EnvelopeClass(object):
         Fz = tbl.Float32Col()
 
 
+# TODO: implement this
+class Results(object):
+    """
+    Move all Hawc2io to here? NO: this should be the wrapper, to interface
+    the htc_dict with the io functions
+
+    There should be a bare metal module/class for those who only want basic
+    python support for HAWC2 result files and/or launching simulations.
+
+    How to properly design this module? Change each class into a module? Or
+    leave like this?
+    """
+
+    # OK, for now use this to do operations on HAWC2 results files
+
+    def __init___(self):
+        """
+        """
+        pass
+
+    def m_equiv(self, st_arr, load, pos):
+        r"""Centrifugal corrected equivalent moment
+
+        Convert beam loading into a single equivalent bending moment. Note that
+        this is dependent on the location in the cross section. Due to the
+        way we measure the strain on the blade and how we did the calibration
+        of those sensors.
+
+        .. math::
+
+            \epsilon = \frac{M_{x_{equiv}}y}{EI_{xx}} = \frac{M_x y}{EI_{xx}}
+            + \frac{M_y x}{EI_{yy}} + \frac{F_z}{EA}
+
+            M_{x_{equiv}} = M_x + \frac{I_{xx}}{I_{yy}} M_y \frac{x}{y}
+            + \frac{I_{xx}}{Ay} F_z
+
+        Parameters
+        ----------
+
+        st_arr : np.ndarray(19)
+            Only one line of the st_arr is allowed and it should correspond
+            to the correct radial position of the strain gauge.
+
+        load : list(6)
+            list containing the load time series of following components
+            .. math:: load = F_x, F_y, F_z, M_x, M_y, M_z
+            and where each component is an ndarray(m)
+
+        pos : np.ndarray(2)
+            x,y position wrt neutral axis in the cross section for which the
+            equivalent load should be calculated
+
+        Returns
+        -------
+
+        m_eq : ndarray(m)
+            Equivalent load, see main title
+
+        """
+
+        F_z = load[2]
+        M_x = load[3]
+        M_y = load[4]
+
+        x, y = pos[0], pos[1]
+
+        A = st_arr[ModelData.st_headers.A]
+        I_xx = st_arr[ModelData.st_headers.Ixx]
+        I_yy = st_arr[ModelData.st_headers.Iyy]
+
+        M_x_equiv = M_x + ( (I_xx/I_yy)*M_y*(x/y) ) + ( F_z*I_xx/(A*y) )
+        # or ignore edgewise moment
+        #M_x_equiv = M_x + ( F_z*I_xx/(A*y) )
+
+        return M_x_equiv
+
+
 class MannTurb64(prepost.PBSScript):
     """
     alfaeps, L, gamma, seed, nr_u, nr_v, nr_w, du, dv, dw high_freq_comp
@@ -5262,7 +5535,6 @@ def eigenbody(cases, debug=False):
 
     return cases
 
-
 def eigenstructure(cases, debug=False):
     """
     Read HAWC2 structure eigenalysis result file
diff --git a/wetb/prepost/dlcplots.py b/wetb/prepost/dlcplots.py
index b166d88cdd81395af76bf0a69e52440c17f9c0f2..de17561a12a8f1dac837399daa98c358609fd74c 100644
--- a/wetb/prepost/dlcplots.py
+++ b/wetb/prepost/dlcplots.py
@@ -52,7 +52,7 @@ plt.rc('legend', numpoints=1)
 plt.rc('legend', borderaxespad=0)
 
 
-def merge_sim_ids(sim_ids, post_dirs, post_dir_save=False, columns=None):
+def merge_sim_ids(sim_ids, post_dirs, post_dir_save=False):
     """
     """
 
@@ -73,9 +73,7 @@ def merge_sim_ids(sim_ids, post_dirs, post_dir_save=False, columns=None):
             else:
                 post_dir = post_dirs
             cc = sim.Cases(post_dir, sim_id, rem_failed=True)
-            df_stats, _, _ = cc.load_stats(leq=False)
-            if columns is not None:
-                df_stats = df_stats[columns]
+            df_stats, _, _ = cc.load_stats(columns=None, leq=False)
 
             # stats has only a few columns identifying the different cases
             # add some more for selecting them
@@ -139,9 +137,7 @@ def merge_sim_ids(sim_ids, post_dirs, post_dir_save=False, columns=None):
         if isinstance(post_dirs, list):
             post_dir = post_dirs[0]
         cc = sim.Cases(post_dir, sim_id, rem_failed=True)
-        df_stats, _, _ = cc.load_stats(columns=columns, leq=False)
-        if columns is not None:
-            df_stats = df_stats[columns]
+        df_stats, _, _ = cc.load_stats(leq=False)
         run_dirs = [df_stats['[run_dir]'].unique()[0]]
 
         # stats has only a few columns identifying the different cases
diff --git a/wetb/prepost/dlctemplate.py b/wetb/prepost/dlctemplate.py
index a0cb6abb9734a4e6039e02fe5f121052f5e70e0e..8f7685635cecf817dbea21332cac6fad97a32e6b 100644
--- a/wetb/prepost/dlctemplate.py
+++ b/wetb/prepost/dlctemplate.py
@@ -266,9 +266,9 @@ def variable_tag_func_mod1(master, case_id_short=False):
 def launch_dlcs_excel(sim_id, silent=False, verbose=False, pbs_turb=False,
                       runmethod=None, write_htc=True, zipchunks=False,
                       walltime='04:00:00', postpro_node=False, compress=False,
-                      dlcs_dir='htc/DLCs', postpro_node_zipchunks=True,
-                      wine_arch='win32', wine_prefix='~/.wine32',
-                      m=[3,4,6,8,9,10,12]):
+                      dlcs_dir='htc/DLCs', wine_64bit=False,
+                      m=[3,4,6,8,9,10,12], postpro_node_zipchunks=True,
+                      wine_arch='win32', wine_prefix='~/.wine32'):
     """
     Launch load cases defined in Excel files
     """
@@ -331,10 +331,11 @@ def launch_dlcs_excel(sim_id, silent=False, verbose=False, pbs_turb=False,
                                ignore_non_unique=False, run_only_new=False,
                                pbs_fname_appendix=False, short_job_names=False,
                                silent=silent, verbose=verbose, pyenv=pyenv,
-                               m=[3,4,6,8,9,10,12], postpro_node=postpro_node,
-                               exechunks=None, exesingle=None,
+                               wine_64bit=wine_64bit, m=[3,4,6,8,9,10,12],
                                postpro_node_zipchunks=postpro_node_zipchunks,
-                               wine_arch=wine_arch, wine_prefix=wine_prefix)
+                               postpro_node=postpro_node, exesingle=None,
+                               exechunks=None, wine_arch=wine_arch,
+                               wine_prefix=wine_prefix)
 
     if pbs_turb:
         # to avoid confusing HAWC2 simulations and Mann64 generator PBS files,
@@ -350,13 +351,15 @@ def launch_dlcs_excel(sim_id, silent=False, verbose=False, pbs_turb=False,
         # note that walltime here is for running all cases assigned to the
         # respective nodes. It is not walltime per case.
         sorts_on = ['[DLC]', '[Windspeed]']
-        create_chunks_htc_pbs(cases, sort_by_values=sorts_on, queue='workq',
-                              ppn=20, nr_procs_series=3, walltime='20:00:00',
+        create_chunks_htc_pbs(cases, sort_by_values=sorts_on, ppn=20,
+                              nr_procs_series=3, walltime='20:00:00',
                               chunks_dir='zip-chunks-jess', compress=compress,
+                              queue='workq', wine_64bit=wine_64bit,
                               wine_arch=wine_arch, wine_prefix=wine_prefix)
-        create_chunks_htc_pbs(cases, sort_by_values=sorts_on, queue='workq',
-                              ppn=12, nr_procs_series=3, walltime='20:00:00',
+        create_chunks_htc_pbs(cases, sort_by_values=sorts_on, ppn=12,
+                              nr_procs_series=3, walltime='20:00:00',
                               chunks_dir='zip-chunks-gorm', compress=compress,
+                              queue='workq', wine_64bit=wine_64bit,
                               wine_arch=wine_arch, wine_prefix=wine_prefix)
 
     df = sim.Cases(cases).cases2df()
@@ -671,8 +674,7 @@ if __name__ == '__main__':
                         'generated DLC exchange files, default: htc/DLCs/')
     parser.add_argument('--wine_64bit', default=False, action='store_true',
                         dest='wine_64bit', help='Run wine in 64-bit mode. '
-                        'Only works on Jess. Sets --wine_arch and '
-                        '--wine_prefix to win64 and ~/.wine respectively.')
+                        'Only works on Jess.')
     parser.add_argument('--wine_arch', action='store', default='win32', type=str,
                         dest='wine_arch', help='Set to win32 for 32-bit, and '
                         'win64 for 64-bit. 64-bit only works on Jess. '
@@ -714,9 +716,6 @@ if __name__ == '__main__':
     P_RUN, P_SOURCE, PROJECT, sim_id, P_MASTERFILE, MASTERFILE, POST_DIR \
         = dlcdefs.configure_dirs(verbose=True)
 
-    if opt.wine_64bit:
-        opt.wine_arch, opt.wine_prefix = ('win64', '~/.wine')
-
     if opt.gendlcs:
         DLB = GenerateDLCCases()
         DLB.execute(filename=os.path.join(P_SOURCE, opt.dlcmaster),
@@ -729,9 +728,9 @@ if __name__ == '__main__':
                           pbs_turb=opt.pbs_turb, walltime=opt.walltime,
                           postpro_node=opt.postpro_node, runmethod=RUNMETHOD,
                           dlcs_dir=os.path.join(P_SOURCE, 'htc', 'DLCs'),
+                          compress=opt.compress, wine_64bit=opt.wine_64bit,
                           postpro_node_zipchunks=opt.no_postpro_node_zipchunks,
-                          wine_arch=opt.wine_arch, wine_prefix=opt.wine_prefix,
-                          compress=opt.compress)
+                          wine_arch=opt.wine_arch, wine_prefix=opt.wine_prefix)
     # post processing: check log files, calculate statistics
     if opt.check_logs or opt.stats or opt.fatigue or opt.envelopeblade \
         or opt.envelopeturbine or opt.AEP:
diff --git a/wetb/prepost/hawcstab2.py b/wetb/prepost/hawcstab2.py
index 825f4e9a65c3a97e725857b06b9fd300e192b7eb..5d630e929b31eab836a357f799c2c29642c18b9c 100644
--- a/wetb/prepost/hawcstab2.py
+++ b/wetb/prepost/hawcstab2.py
@@ -410,14 +410,10 @@ class ReadControlTuning(object):
     def __init__(self):
         """
         """
-        self._aerogains = False
+        pass
 
     def parse_line(self, line, controller):
 
-        if line.startswith('Aerodynamic gains'):
-            self._aerogains = True
-            return
-
         split1 = line.split('=')
         var1 = split1[0].strip()
         try:
@@ -453,17 +449,8 @@ class ReadControlTuning(object):
                 elif i == 10:
                     controller = 'aero_damp'
                     setattr(self, controller, dummy())
-                elif not self._aerogains:
+                else:
                     self.parse_line(line, controller)
-                elif self._aerogains:
-                    break
-
-        arr = np.loadtxt(fpath, skiprows=17)
-        columns = ['theta', 'dq/dtheta', 'dq/dtheta_fit', 'dq/domega',
-                   'dq/domega_fit']
-        self.aero_gains_units = ['[deg]', '[kNm/deg]', '[kNm/deg]',
-                                 '[kNm/(rad/s)]', '[kNm/(rad/s)]']
-        self.aero_gains = pd.DataFrame(arr, columns=columns)
 
         # set some parameters to zero for the linear case, or when aerodynamic
         # gain scheduling is not used
diff --git a/wetb/prepost/simchunks.py b/wetb/prepost/simchunks.py
index 1cf983a723efc994c4cfe4d7e0568d0ee748985a..785e9dd6b4ec544bd32af7e353b0c744baec0e89 100644
--- a/wetb/prepost/simchunks.py
+++ b/wetb/prepost/simchunks.py
@@ -35,11 +35,10 @@ from wetb.prepost.Simulations import Cases
 
 
 def create_chunks_htc_pbs(cases, sort_by_values=['[Windspeed]'], ppn=20, i0=0,
-                          nr_procs_series=9, queue='workq', compress=False,
+                          nr_procs_series=9, queue='workq', pyenv='wetb_py3',
                           walltime='24:00:00', chunks_dir='zip-chunks-jess',
-                          wine_arch='win32', wine_prefix='~/.wine32',
-                          pyenv_cmd='source /home/python/miniconda3/bin/activate',
-                          pyenv='wetb_py3'):
+                          compress=False, wine_64bit=False, wine_arch='win32',
+                          wine_prefix='~/.wine32'):
     """Group a large number of simulations htc and pbs launch scripts into
     different zip files so we can run them with find+xargs on various nodes.
     """
@@ -165,7 +164,8 @@ def create_chunks_htc_pbs(cases, sort_by_values=['[Windspeed]'], ppn=20, i0=0,
 # """
 
     def make_pbs_chunks(df, ii, sim_id, run_dir, model_zip, compress=False,
-                        wine_arch='win32', wine_prefix='~/.wine32'):
+                        wine_64bit=False, wine_arch='win32',
+                        wine_prefix='~/.wine32'):
         """Create a PBS that:
             * copies all required files (zip chunk) to scratch disk
             * copies all required turbulence files to scratch disk
@@ -178,6 +178,8 @@ def create_chunks_htc_pbs(cases, sort_by_values=['[Windspeed]'], ppn=20, i0=0,
         jobid = '%s_chnk_%05i' % (sim_id, ii)
 
         wineparam = (wine_arch, wine_prefix)
+        if wine_64bit:
+            wineparam = ('win64', '~/.wine')
 
         pbase = os.path.join('/scratch','$USER', '$PBS_JOBID', '')
         post_dir_base = post_dir.split(sim_id)[1]
@@ -216,8 +218,7 @@ def create_chunks_htc_pbs(cases, sort_by_values=['[Windspeed]'], ppn=20, i0=0,
         # activate the python environment
         if pyenv is not None:
             pbs += 'echo "activate python environment %s"\n' % pyenv
-            rpl = (pyenv_cmd, pyenv)
-            pbs += '%s %s\n' % rpl
+            pbs += 'source /home/python/miniconda3/bin/activate %s\n' % pyenv
             # sometimes activating an environment fails due to a FileExistsError
             # is this because it is activated at the same time on another node?
             # check twice if the environment got activated for real
@@ -225,9 +226,9 @@ def create_chunks_htc_pbs(cases, sort_by_values=['[Windspeed]'], ppn=20, i0=0,
             pbs += 'CMD=\"from distutils.sysconfig import get_python_lib;'
             pbs += 'print (get_python_lib().find(\'%s\'))"\n' % pyenv
             pbs += 'ACTIVATED=`python -c "$CMD"`\n'
-            pbs += 'if [ $ACTIVATED -eq -1 ]; then %s %s;fi\n' % rpl
+            pbs += 'if [ $ACTIVATED -eq -1 ]; then source activate %s;fi\n' % pyenv
             pbs += 'ACTIVATED=`python -c "$CMD"`\n'
-            pbs += 'if [ $ACTIVATED -eq -1 ]; then %s %s;fi\n' % rpl
+            pbs += 'if [ $ACTIVATED -eq -1 ]; then source activate %s;fi\n' % pyenv
 
         # =====================================================================
         # create all necessary directories at CPU_NR dirs
@@ -509,8 +510,8 @@ def create_chunks_htc_pbs(cases, sort_by_values=['[Windspeed]'], ppn=20, i0=0,
     for ii, dfi in enumerate(df_iter):
         fname, ind = make_zip_chunks(dfi, i0+ii, sim_id, run_dir, model_zip)
         make_pbs_chunks(dfi, i0+ii, sim_id, run_dir, model_zip,
-                        wine_arch=wine_arch, wine_prefix=wine_prefix,
-                        compress=compress)
+                        compress=compress, wine_64bit=wine_64bit,
+                        wine_arch=wine_arch, wine_prefix=wine_prefix)
         df_ind = df_ind.append(ind)
         print(fname)
 
diff --git a/wetb/prepost/tests/data/demo_dlc/ref/zip-chunks-gorm/remote_chnk_00000.p b/wetb/prepost/tests/data/demo_dlc/ref/zip-chunks-gorm/remote_chnk_00000.p
index 2e74bfeb2dcb2f0f7e543d3a85c25533d67f8b7e..d81898a3b7f9eff5fb8aab66e9d52563c17f46d5 100644
--- a/wetb/prepost/tests/data/demo_dlc/ref/zip-chunks-gorm/remote_chnk_00000.p
+++ b/wetb/prepost/tests/data/demo_dlc/ref/zip-chunks-gorm/remote_chnk_00000.p
@@ -18,9 +18,9 @@ source /home/python/miniconda3/bin/activate wetb_py3
 echo "CHECK 2x IF wetb_py3 IS ACTIVE, IF NOT TRY AGAIN"
 CMD="from distutils.sysconfig import get_python_lib;print (get_python_lib().find('wetb_py3'))"
 ACTIVATED=`python -c "$CMD"`
-if [ $ACTIVATED -eq -1 ]; then source /home/python/miniconda3/bin/activate wetb_py3;fi
+if [ $ACTIVATED -eq -1 ]; then source activate wetb_py3;fi
 ACTIVATED=`python -c "$CMD"`
-if [ $ACTIVATED -eq -1 ]; then source /home/python/miniconda3/bin/activate wetb_py3;fi
+if [ $ACTIVATED -eq -1 ]; then source activate wetb_py3;fi
 
 echo "----------------------------------------------------------------------"
 cd /scratch/$USER/$PBS_JOBID/
diff --git a/wetb/prepost/tests/data/demo_dlc/ref/zip-chunks-jess/remote_chnk_00000.p b/wetb/prepost/tests/data/demo_dlc/ref/zip-chunks-jess/remote_chnk_00000.p
index fa2fb7783f7c89914a0095e3b41ac39b3c4d3092..55f11f24915307d2a0f1ffb4bf7c028cf526481b 100644
--- a/wetb/prepost/tests/data/demo_dlc/ref/zip-chunks-jess/remote_chnk_00000.p
+++ b/wetb/prepost/tests/data/demo_dlc/ref/zip-chunks-jess/remote_chnk_00000.p
@@ -18,9 +18,9 @@ source /home/python/miniconda3/bin/activate wetb_py3
 echo "CHECK 2x IF wetb_py3 IS ACTIVE, IF NOT TRY AGAIN"
 CMD="from distutils.sysconfig import get_python_lib;print (get_python_lib().find('wetb_py3'))"
 ACTIVATED=`python -c "$CMD"`
-if [ $ACTIVATED -eq -1 ]; then source /home/python/miniconda3/bin/activate wetb_py3;fi
+if [ $ACTIVATED -eq -1 ]; then source activate wetb_py3;fi
 ACTIVATED=`python -c "$CMD"`
-if [ $ACTIVATED -eq -1 ]; then source /home/python/miniconda3/bin/activate wetb_py3;fi
+if [ $ACTIVATED -eq -1 ]; then source activate wetb_py3;fi
 
 echo "----------------------------------------------------------------------"
 cd /scratch/$USER/$PBS_JOBID/