diff --git a/README b/README
index 71f6bfd589c92b6f17da40d045abd67a6aacb175..1d8ea5d83e3b1aee6266a9fc1342880878e610bf 100644
--- a/README
+++ b/README
@@ -1,87 +1,132 @@
+|build status| |coverage report|
 
-[![build status](https://gitlab.windenergy.dtu.dk/toolbox/WindEnergyToolbox/badges/master/build.svg)](https://gitlab.windenergy.dtu.dk/toolbox/WindEnergyToolbox/commits/master)
-[![coverage report](https://gitlab.windenergy.dtu.dk/toolbox/WindEnergyToolbox/badges/master/coverage.svg)](https://gitlab.windenergy.dtu.dk/toolbox/WindEnergyToolbox/commits/master)
+Introduction
+============
 
-# Introduction
+The Wind Energy Toolbox (or ``wetb``, pronounce as wee-tee-bee) is a
+collection of Python scripts that facilitate working with (potentially a
+lot) of HAWC2, HAWCStab2, FAST or other text input based simulation
+tools.
 
-The Wind Energy Toolbox (or ```wetb```, pronounce as wee-tee-bee) is a collection
-of Python scripts that facilitate working with (potentially a lot) of HAWC2,
-HAWCStab2, FAST or other text input based simulation tools.
-
-Note that this toolbox is very much a WIP (work in progress). For example,
-some of the functions in the [prepost](#prepost) module have a similar functions
-in [Hawc2io](wetb/hawc2/Hawc2io.py). These different implementations will be
-merged in due time.
+Note that this toolbox is very much a WIP (work in progress). For
+example, some of the functions in the `prepost <#prepost>`__ module have
+a similar functions in `Hawc2io <wetb/hawc2/Hawc2io.py>`__. These
+different implementations will be merged in due time.
 
 Both Python2 and Python3 are supported.
 
-# Installation
-
-For a more detailed overview, see: [docs/install](docs/install.md)
-
-
-# Contents of WindEnergyToolbox, [wetb](wetb)
-
-### Overview
-
-- [hawc2](#hawc2)
-- [gtsdf](#gtsdf)
-- [fatigue_tools](#fatigue_tools)
-- [wind](#wind)
-- [dlc](#dlc)
-- [prepost](#prepost)
-- [fast](#fast)
-- [utils](#utils)
-
-### [hawc2](wetb/hawc2)
-- [Hawc2io](wetb/hawc2/Hawc2io.py): Read binary, ascii and flex result files
-- [sel_file](wetb/hawc2/sel_file.py): Read/write *.sel (sensor list) files
-- [htc_file](wetb/hawc2/htc_file.py): Read/write/manipulate htc files
-- [ae_file](wetb/hawc2/ae_file.py): Read AE (aerodynamic blade layout) files
-- [pc_file](wetb/hawc2/pc_file.py): Read PC (profile coefficient) files
-- [shear_file](wetb/hawc2/shear_file.py): Create user defined shear file
-- [at_time_file](wetb/hawc2/at_time_file.py): Read at output_at_time files
-- [log_file](wetb/hawc2/log_file.py): Read and interpret log files
-- [ascii2bin](wetb/hawc2/ascii2bin): Compress HAWC2 ascii result files to binary
-
-### [gtsdf](wetb/gtsdf)
-General Time Series Data Format, a binary hdf5 data format for storing time series data.
-- [gtsdf](wetb/gtsdf/gtsdf.py): read/write/append gtsdf files
-- [unix_time](wetb/gtsdf/unix_time.py): convert between datetime and unix time (seconds since 1/1/1970)
-
-### [fatigue_tools](wetb/fatigue_tools)
-- [fatigue](wetb/fatigue_tools/fatigue.py): Rainflow counting, cycle matrix and equivalent loads
-- [bearing_damage](wetb/fatigue_tools/bearing_damage.py): Calculate a comparable measure of bearing damage
-
-### [wind](wetb/wind)
-- [shear](wetb/wind/shear.py): Calculate and fit wind shear
-
-### [dlc](wetb/dlc)
-Module for working with "Design load cases" (Code independent)
-- [high_level](wetb/dlc/high_level.py) Class for working with the highlevel dlc excell sheet
-
-### [prepost](wetb/prepost)
-Module for creating an arbitrary number of HAWC2 simulations, and optionally
-corresponding execution scripts for a PBS Torque cluster (Linux), simple bash
-(Linux), or Windows batch scripts. A post-processing module is also included
-that calculates statistical parameters, performs rainflow counting for fatigue
-load calculations, and create load envelopes.
+Installation
+============
+
+-  `Simple user <docs/install.md>`__
+-  `Developer/contributor <docs/developer-guide.md>`__
+
+Contents of WindEnergyToolbox, `wetb <wetb>`__
+==============================================
+
+Overview
+~~~~~~~~
+
+-  `hawc2 <#hawc2>`__
+-  `gtsdf <#gtsdf>`__
+-  `fatigue\_tools <#fatigue_tools>`__
+-  `wind <#wind>`__
+-  `dlc <#dlc>`__
+-  `prepost <#prepost>`__
+-  `fast <#fast>`__
+-  `utils <#utils>`__
+
+`hawc2 <wetb/hawc2>`__
+~~~~~~~~~~~~~~~~~~~~~~
+
+-  `Hawc2io <wetb/hawc2/Hawc2io.py>`__: Read binary, ascii and flex
+   result files
+-  `sel\_file <wetb/hawc2/sel_file.py>`__: Read/write \*.sel (sensor
+   list) files
+-  `htc\_file <wetb/hawc2/htc_file.py>`__: Read/write/manipulate htc
+   files
+-  `ae\_file <wetb/hawc2/ae_file.py>`__: Read AE (aerodynamic blade
+   layout) files
+-  `pc\_file <wetb/hawc2/pc_file.py>`__: Read PC (profile coefficient)
+   files
+-  `st\_file <wetb/hawc2/st_file.py>`__: Read ST (structural properties)
+   files
+-  `shear\_file <wetb/hawc2/shear_file.py>`__: Create user defined shear
+   file
+-  `at\_time\_file <wetb/hawc2/at_time_file.py>`__: Read at
+   output\_at\_time files
+-  `log\_file <wetb/hawc2/log_file.py>`__: Read and interpret log files
+-  `ascii2bin <wetb/hawc2/ascii2bin>`__: Compress HAWC2 ascii result
+   files to binary
+
+`gtsdf <wetb/gtsdf>`__
+~~~~~~~~~~~~~~~~~~~~~~
+
+General Time Series Data Format, a binary hdf5 data format for storing
+time series data. - `gtsdf <wetb/gtsdf/gtsdf.py>`__: read/write/append
+gtsdf files - `unix\_time <wetb/gtsdf/unix_time.py>`__: convert between
+datetime and unix time (seconds since 1/1/1970)
+
+`fatigue\_tools <wetb/fatigue_tools>`__
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+-  `fatigue <wetb/fatigue_tools/fatigue.py>`__: Rainflow counting, cycle
+   matrix and equivalent loads
+-  `bearing\_damage <wetb/fatigue_tools/bearing_damage.py>`__: Calculate
+   a comparable measure of bearing damage
+
+`wind <wetb/wind>`__
+~~~~~~~~~~~~~~~~~~~~
+
+-  `shear <wetb/wind/shear.py>`__: Calculate and fit wind shear
+
+`dlc <wetb/dlc>`__
+~~~~~~~~~~~~~~~~~~
+
+Module for working with "Design load cases" (Code independent) -
+`high\_level <wetb/dlc/high_level.py>`__ Class for working with the
+highlevel dlc excell sheet
+
+`prepost <wetb/prepost>`__
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Module for creating an arbitrary number of HAWC2 simulations, and
+optionally corresponding execution scripts for a PBS Torque cluster
+(Linux), simple bash (Linux), or Windows batch scripts. A
+post-processing module is also included that calculates statistical
+parameters, performs rainflow counting for fatigue load calculations,
+and create load envelopes.
 
 Additional documentation can be found here:
 
-- [Auto-generation of Design Load Cases](docs/howto-make-dlcs.md)
-- [How to use the Statistics DataFrame](docs/using-statistics-df.md)
-- [Generate DLB spreadsheets](docs/generate-spreadsheet.md)
-
-
-### [fast](wetb/fast)
-Tools for working with NREL's FAST code (An aeroelastic computer-aided engineering (CAE) tool for horizontal axis wind turbines)
-- [fast_io](wetb/fast/fast_io.py): Read binary and ascii result files
-
-### [utils](wetb/utils)
-Other functions
-- [geometry](wetb/utils/geometry.py): Different kind of geometry conversion functions
-- [process_exec](wetb/utils/process_exec.py): Run system command in subprocess
-- [timing](wetb/utils/timing.py): Decorators for evaluating execution time of functions
-- [caching](wetb/utils/caching.py): Decorators to create cached (calculate once) functions and properties
-
+-  `Getting started with DLBs <docs/getting-started-with-dlbs.md>`__
+
+   -  `Generate DLB spreadsheets <docs/generate-spreadsheet.md>`__
+   -  `Auto-generation of Design Load Cases <docs/howto-make-dlcs.md>`__
+   -  `House rules for storing results on
+      ``mimer/hawc2sim`` <docs/houserules-mimerhawc2sim.md>`__
+   -  `How to use the Statistics
+      DataFrame <docs/using-statistics-df.md>`__
+
+`fast <wetb/fast>`__
+~~~~~~~~~~~~~~~~~~~~
+
+Tools for working with NREL's FAST code (An aeroelastic computer-aided
+engineering (CAE) tool for horizontal axis wind turbines) -
+`fast\_io <wetb/fast/fast_io.py>`__: Read binary and ascii result files
+
+`utils <wetb/utils>`__
+~~~~~~~~~~~~~~~~~~~~~~
+
+Other functions - `geometry <wetb/utils/geometry.py>`__: Different kind
+of geometry conversion functions -
+`process\_exec <wetb/utils/process_exec.py>`__: Run system command in
+subprocess - `timing <wetb/utils/timing.py>`__: Decorators for
+evaluating execution time of functions -
+`caching <wetb/utils/caching.py>`__: Decorators to create cached
+(calculate once) functions and properties
+
+.. |build status| image:: https://gitlab.windenergy.dtu.dk/toolbox/WindEnergyToolbox/badges/master/build.svg
+   :target: https://gitlab.windenergy.dtu.dk/toolbox/WindEnergyToolbox/commits/master
+.. |coverage report| image:: https://gitlab.windenergy.dtu.dk/toolbox/WindEnergyToolbox/badges/master/coverage.svg
+   :target: https://gitlab.windenergy.dtu.dk/toolbox/WindEnergyToolbox/commits/master
diff --git a/README.md b/README.md
index 99450330f46c42d32e59182d82259a3487b0ac44..715fc334c9b296316c7eccc7c94d71d0d376672b 100644
--- a/README.md
+++ b/README.md
@@ -71,10 +71,11 @@ load calculations, and create load envelopes.
 
 Additional documentation can be found here:
 
-- [Auto-generation of Design Load Cases](docs/howto-make-dlcs.md)
-- [How to use the Statistics DataFrame](docs/using-statistics-df.md)
-- [Generate DLB spreadsheets](docs/generate-spreadsheet.md)
-
+- [Getting started with DLBs](docs/getting-started-with-dlbs.md)
+    - [Generate DLB spreadsheets](docs/generate-spreadsheet.md)
+    - [Auto-generation of Design Load Cases](docs/howto-make-dlcs.md)
+    - [House rules for storing results on ```mimer/hawc2sim```](docs/houserules-mimerhawc2sim.md)
+    - [How to use the Statistics DataFrame](docs/using-statistics-df.md)
 
 ### [fast](wetb/fast)
 Tools for working with NREL's FAST code (An aeroelastic computer-aided engineering (CAE) tool for horizontal axis wind turbines)
diff --git a/docs/developer-guide.md b/docs/developer-guide.md
index 8156fe0e38f85985661f008f89a1aa5720004a06..708d90652080cf7007325e5f0064c72a734628f0 100644
--- a/docs/developer-guide.md
+++ b/docs/developer-guide.md
@@ -58,6 +58,7 @@ is a gui integrated into the windows explorer.
 
 
 ## Install Python
+
 For all platforms we recommend that you download and install the Anaconda -
 a professional grade, full blown scientific Python distribution.
 
@@ -157,8 +158,7 @@ Install the necessary Python dependencies using the conda package manager:
 ```
 >> conda install setuptools_scm future h5py pytables pytest pytest-cov nose sphinx blosc pbr paramiko
 >> conda install scipy pandas matplotlib cython xlrd coverage xlwt openpyxl psutil pandoc
->> conda install -c conda-forge pyscaffold pypandoc sshtunnel --no-deps
->> conda install --channel https://conda.anaconda.org/pbrod twine --no-deps
+>> conda install -c conda-forge pyscaffold sshtunnel twine pypandoc --no-deps
 ```
 
 Note that ```--no-deps``` avoids that newer packages from the channel
@@ -167,6 +167,11 @@ channel. Depending on which packages get overwritten, this might brake your
 Anaconda root environment. As such, using ```--no-deps``` should be
 used for safety (especially when operating from the root environment).
 
+Note that:
+
+- With Python 2.7, blosc fails to install.
+- With Python 3.6, twine, pypandoc fails to install.
+
 
 ## Get wetb
 
@@ -195,6 +200,34 @@ above for the ```conda-forge``` channel: it is to avoid that pip will replace
 newer packages compared to the ones as available in the ```Anaconda``` channel.
 
 
+## Run tests
+
+Note that the test should be executed from a clean repository and which is not
+used as a development installation with ```pip install -e .```. For example,
+create a clone of your local git repository in which your development takes
+place, but name the top level folder to something else:
+
+```
+>> git clone WindEnergyToolbox/ wetb_tests
+>> cd wetb_tests
+```
+
+In order to make sure your git repository is clean, this will remove all
+untracked files, and undo all untracked changes. WARNING: you will loose all
+untracked files and changes!!
+```
+>> git clean -df & git checkout .
+```
+
+Now we have clean repository that is not used as a development installation
+directory, and we simply track our own local development git repository.
+Use ```git pull``` to get the latest local commits.
+
+```
+>> python -m pytest --cov=wetb
+```
+
+
 ## Contributions
 
 If you make a change in the toolbox, that others can benefit from please make a merge request.
diff --git a/docs/howto-make-dlcs.md b/docs/howto-make-dlcs.md
index e9623a4b733a88ae2c49704d3fad418cb257f069..9e2db0e02878b38789312f8327470d4f0b32a90f 100644
--- a/docs/howto-make-dlcs.md
+++ b/docs/howto-make-dlcs.md
@@ -454,7 +454,7 @@ tags:
 * ```[MannAlfaEpsilon]```
 * ```[MannL]```
 * ```[MannGamma]```
-* ```[tu_seed]```
+* ```[seed]```
 * ```[turb_nr_u]``` : number of grid points in the u direction
 * ```[turb_nr_v]``` : number of grid points in the v direction
 * ```[turb_nr_w]``` : number of grid points in the w direction
diff --git a/setup.cfg b/setup.cfg
index 067c278a30fecfa5f91d5d995563b045048335d3..e287a0de26861609e3a471d3983825bffa2209e3 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -15,6 +15,7 @@ classifiers = Development Status :: 4 - Beta,
               Programming Language :: Python :: 3.3,
               Programming Language :: Python :: 3.4,
 			  Programming Language :: Python :: 3.5,
+			  Programming Language :: Python :: 3.6,
               Environment :: Console,
               Intended Audience :: Education,
               Intended Audience :: Science/Research,
diff --git a/setup.py b/setup.py
index 368d369cda4e8a41f9348e793f5c8f2fdc436fbc..21fe89c4fe556521732d566f8d8f3d033161db7a 100644
--- a/setup.py
+++ b/setup.py
@@ -14,7 +14,7 @@ from setuptools import setup
 
 try:
     from pypandoc import convert_file
-    read_md = lambda f: convert_file(f, 'rst')
+    read_md = lambda f: convert_file(f, 'rst', format='md')
 except ImportError:
     print("warning: pypandoc module not found, could not convert Markdown to RST")
     read_md = lambda f: open(f, 'r').read()
diff --git a/wetb/prepost/Simulations.py b/wetb/prepost/Simulations.py
index 5a24ef095c5a2fb8047e43bdcee8ea6c79273d13..ad22964dcbb270fde671d6143b54772c75741d5e 100755
--- a/wetb/prepost/Simulations.py
+++ b/wetb/prepost/Simulations.py
@@ -553,7 +553,7 @@ def run_local(cases, silent=False, check_log=True):
 
 
 def prepare_launch(iter_dict, opt_tags, master, variable_tag_func,
-                write_htc=True, runmethod='local', verbose=False,
+                write_htc=True, runmethod='none', verbose=False,
                 copyback_turb=True, msg='', silent=False, check_log=True,
                 update_cases=False, ignore_non_unique=False, wine_appendix='',
                 run_only_new=False, windows_nr_cpus=2, qsub='',
@@ -603,11 +603,14 @@ def prepare_launch(iter_dict, opt_tags, master, variable_tag_func,
 
     verbose : boolean, default=False
 
-    runmethod : {'local' (default),'thyra','gorm','local-script','none'}
+    runmethod : {'none' (default),'pbs','linux-script','local',
+                 'local-ram', 'windows-script'}
         Specify how/what to run where. For local, each case in cases is
-        run locally via python directly. If set to 'local-script' a shell
+        run locally via python directly. If set to 'linux-script' a shell
         script is written to run all cases locally sequential. If set to
-        'thyra' or 'gorm', PBS scripts are written to the respective server.
+        'pbs', PBS scripts are written for a cluster (e.g. Gorm/jess).
+        A Windows batch script is written in case of windows-script, and is
+        used in combination with windows_nr_cpus.
 
     msg : str, default=''
         A descriptive message of the simulation series is saved at
@@ -1010,12 +1013,12 @@ def launch(cases, runmethod='none', verbose=False, copyback_turb=True,
 
     verbose : boolean, default=False
 
-    runmethod : {'none' (default),'jess','gorm','linux-script','local',
+    runmethod : {'none' (default),'pbs','linux-script','local',
                  'local-ram', 'windows-script'}
         Specify how/what to run where. For local, each case in cases is
         run locally via python directly. If set to 'linux-script' a shell
         script is written to run all cases locally sequential. If set to
-        'jess' or 'gorm', PBS scripts are written to the respective server.
+        'pbs', PBS scripts are written for a cluster (e.g. Gorm/jess).
         A Windows batch script is written in case of windows-script, and is
         used in combination with windows_nr_cpus.
 
@@ -1032,11 +1035,11 @@ def launch(cases, runmethod='none', verbose=False, copyback_turb=True,
         local_shell_script(cases, sim_id)
     elif runmethod == 'windows-script':
         local_windows_script(cases, sim_id, nr_cpus=windows_nr_cpus)
-    elif runmethod in ['jess','gorm']:
+    elif runmethod in ['pbs','jess','gorm']:
         # create the pbs object
-        pbs = PBS(cases, server=runmethod, short_job_names=short_job_names,
+        pbs = PBS(cases, short_job_names=short_job_names, pyenv=pyenv,
                   pbs_fname_appendix=pbs_fname_appendix, qsub=qsub,
-                  verbose=verbose, silent=silent, pyenv=pyenv)
+                  verbose=verbose, silent=silent)
         pbs.wine_appendix = wine_appendix
         pbs.copyback_turb = copyback_turb
         pbs.pbs_out_dir = pbs_out_dir
@@ -1049,8 +1052,8 @@ def launch(cases, runmethod='none', verbose=False, copyback_turb=True,
     elif runmethod == 'none':
         pass
     else:
-        msg = 'unsupported runmethod, valid options: local, local-script, ' \
-              'linux-script, windows-script, local-ram, none'
+        msg = 'unsupported runmethod, valid options: local, linux-script, ' \
+              'windows-script, local-ram, none, pbs'
         raise ValueError(msg)
 
 
@@ -1402,7 +1405,7 @@ class HtcMaster(object):
 
         # create all the necessary directories
         for dirkey in dirkeys:
-            if self.tags[dirkey]:
+            if isinstance(self.tags[dirkey], str):
                 path = os.path.join(self.tags['[run_dir]'], self.tags[dirkey])
                 if not os.path.exists(path):
                     os.makedirs(path)
@@ -1430,7 +1433,7 @@ class HtcMaster(object):
 
             # copy special files with changing file names
             if '[ESYSMooring_init_fname]' in self.tags:
-                if self.tags['[ESYSMooring_init_fname]'] is not None:
+                if isinstance(self.tags['[ESYSMooring_init_fname]'], str):
                     fname_source = self.tags['[ESYSMooring_init_fname]']
                     fname_target = 'ESYSMooring_init.dat'
                     shutil.copy2(model_root + fname_source,
@@ -1922,9 +1925,8 @@ class PBS(object):
     such as the turbulence file and folder, htc folder and others
     """
 
-    def __init__(self, cases, server='gorm', qsub='time', silent=False,
-                 pbs_fname_appendix=True, short_job_names=True, verbose=False,
-                 pyenv='wetb_py3'):
+    def __init__(self, cases, qsub='time', silent=False, pyenv='wetb_py3',
+                 pbs_fname_appendix=True, short_job_names=True, verbose=False):
         """
         Define the settings here. This should be done outside, but how?
         In a text file, paramters list or first create the object and than set
@@ -1953,7 +1955,6 @@ class PBS(object):
             case_id will be used as job name.
 
         """
-        self.server = server
         self.verbose = verbose
         self.silent = silent
         self.pyenv = pyenv
@@ -1962,14 +1963,9 @@ class PBS(object):
         self.wine = self.winebase + 'wine'
         self.winenumactl = self.winebase + 'numactl --physcpubind=$CPU_NR wine'
 
-        if server == 'gorm':
-            self.maxcpu = 1
-            self.secperiter = 0.012
-        elif server == 'jess':
-            self.maxcpu = 1
-            self.secperiter = 0.012
-        else:
-            raise UserWarning('server support only for jess or gorm')
+        # TODO: based on a certain host/architecture you can change these
+        self.maxcpu = 1
+        self.secperiter = 0.012
 
         # determine at runtime if winefix has to be ran
         self.winefix = '  _HOSTNAME_=`hostname`\n'
@@ -4007,11 +4003,11 @@ class Cases(object):
         return stats_df, Leq_df, AEP_df
 
     def statistics(self, new_sim_id=False, silent=False, ch_sel=None,
-                   tags=['[turb_seed]','[windspeed]'], calc_mech_power=False,
+                   tags=['[seed]','[windspeed]'], calc_mech_power=False,
                    save=True, m=[3, 4, 6, 8, 10, 12], neq=None, no_bins=46,
                    ch_fatigue={}, update=False, add_sensor=None,
                    chs_resultant=[], i0=0, i1=None, saveinterval=1000,
-                   csv=True, suffix=None, A=None,
+                   csv=True, suffix=None, A=None, add_sigs={},
                    ch_wind=None, save_new_sigs=False, xlsx=False):
         """
         Calculate statistics and save them in a pandas dataframe. Save also
@@ -4024,7 +4020,7 @@ class Cases(object):
             If defined, only add defined channels to the output data frame.
             The list should contain valid channel names as defined in ch_dict.
 
-        tags : list, default=['[turb_seed]','[windspeed]']
+        tags : list, default=['[seed]','[windspeed]']
             Select which tag values from cases should be included in the
             dataframes. This will help in selecting and identifying the
             different cases.
@@ -4034,6 +4030,10 @@ class Cases(object):
             needs to be calculated. When set to None, ch_fatigue = ch_sel,
             and hence all channels will have a fatigue analysis.
 
+        add_sigs : dict, default={}
+            channel name, expression key/value paires. For example,
+            '[p1-p1-node-002-forcevec-z]*3 + [p1-p1-node-002-forcevec-y]'
+
         chs_resultant
 
         add_sensor
@@ -4129,6 +4129,8 @@ class Cases(object):
 
         df_dict = None
         add_stats = True
+        # for finding [] tags
+        regex = re.compile('(\\[.*?\\])')
 
         for ii, (cname, case) in enumerate(self.cases.items()):
 
@@ -4165,6 +4167,26 @@ class Cases(object):
             sig_size = self.res.N  # len(self.sig[i0:i1,0])
             new_sigs = np.ndarray((sig_size, 0))
 
+            for name, expr in add_sigs.items():
+                channel_tags = regex.findall(expr)
+                # replace all sensor names with expressions
+                template = "self.sig[:,self.res.ch_dict['{}']['chi']]"
+                for chan in channel_tags:
+                    # first remove the [] from the tag
+                    # FIXME: fails when the same channel occurs more than once
+                    expr = expr.replace(chan, chan[1:-1])
+                    expr = expr.replace(chan[1:-1], template.format(chan[1:-1]))
+
+                sig_add = np.ndarray((len(self.sig[:,0]), 1))
+                sig_add[:,0] = eval(expr)
+
+                ch_dict_new[name] = {}
+                ch_dict_new[name]['chi'] = i_new_chans
+                ch_df_new = add_df_row(ch_df_new, **{'chi':i_new_chans,
+                                                   'ch_name':name})
+                i_new_chans += 1
+                new_sigs = np.append(new_sigs, sig_add, axis=1)
+
             if add_sensor is not None:
                 chi1 = self.res.ch_dict[add_sensor['ch1_name']]['chi']
                 chi2 = self.res.ch_dict[add_sensor['ch2_name']]['chi']
@@ -4305,10 +4327,9 @@ class Cases(object):
                 df_new_sigs = pd.DataFrame(new_sigs, columns=keys)
                 respath = os.path.join(case['[run_dir]'], case['[res_dir]'])
                 resfile = case['[case_id]']
-                fname = os.path.join(respath, resfile + '_postres.h5')
+                fname = os.path.join(respath, resfile + '_postres.csv')
                 print('    saving post-processed res: %s...' % fname, end='')
-                df_new_sigs.to_hdf(fname, 'table', mode='w', format='table',
-                                   complevel=9, complib=self.complib)
+                df_new_sigs.to_csv(fname, sep='\t')
                 print('done!')
                 del df_new_sigs
 
@@ -4892,7 +4913,7 @@ class Cases(object):
 
         return df_AEP
 
-    def stats2dataframe(self, ch_sel=None, tags=['[turb_seed]','[windspeed]']):
+    def stats2dataframe(self, ch_sel=None, tags=['[seed]','[windspeed]']):
         """
         Convert the archaic statistics dictionary of a group of cases to
         a more convienent pandas dataframe format.
@@ -4910,7 +4931,7 @@ class Cases(object):
             defined, only those channels are considered.
             ch_sel[short name] = full ch_dict identifier
 
-        tags : list, default=['[turb_seed]','[windspeed]']
+        tags : list, default=['[seed]','[windspeed]']
             Select which tag values from cases should be included in the
             dataframes. This will help in selecting and identifying the
             different cases.
@@ -5357,7 +5378,7 @@ class MannTurb64(prepost.PBSScript):
         * [MannAlfaEpsilon]
         * [MannL]
         * [MannGamma]
-        * [tu_seed]
+        * [seed]
         * [turb_nr_u]
         * [turb_nr_v]
         * [turb_nr_w]
@@ -5369,7 +5390,7 @@ class MannTurb64(prepost.PBSScript):
 
     def __init__(self, silent=False):
         super(MannTurb64, self).__init__()
-        self.exe = 'time wine mann_turb_x64.exe'
+        self.exe = 'time WINEARCH=win64 WINEPREFIX=~/.wine wine mann_turb_x64.exe'
         self.winefix = 'winefix\n'
         # PBS configuration
         self.umask = '0003'
@@ -5434,7 +5455,7 @@ class MannTurb64(prepost.PBSScript):
             rpl = (float(case['[MannAlfaEpsilon]']),
                    float(case['[MannL]']),
                    float(case['[MannGamma]']),
-                   int(case['[tu_seed]']),
+                   int(case['[seed]']),
                    int(case['[turb_nr_u]']),
                    int(case['[turb_nr_v]']),
                    int(case['[turb_nr_w]']),
diff --git a/wetb/prepost/dlcdefs.py b/wetb/prepost/dlcdefs.py
index 6443ab5d8a127e8ab4cc8fa451dfa018480e4487..e1a8b99b332ee0a419271d9ffa439f38fefff9f7 100644
--- a/wetb/prepost/dlcdefs.py
+++ b/wetb/prepost/dlcdefs.py
@@ -13,10 +13,9 @@ from future.utils import viewitems
 from future import standard_library
 standard_library.install_aliases()
 
-
-
 import os
 import unittest
+from glob import glob
 
 import pandas as pd
 
@@ -28,7 +27,7 @@ def casedict2xlsx():
     """
 
 
-def configure_dirs(verbose=False):
+def configure_dirs(verbose=False, pattern_master='*_master_*'):
     """
     Automatically configure required directories to launch simulations
     """
@@ -41,7 +40,7 @@ def configure_dirs(verbose=False):
     PROJECT = P_RUN.split(os.sep)[-2]
     sim_id = P_RUN.split(os.sep)[-1]
 
-    master = find_master_file(P_SOURCE)
+    master = find_master_file(P_SOURCE, pattern=pattern_master)
     if master is None:
         raise ValueError('Could not find master file in htc/_master')
     MASTERFILE = master
@@ -62,16 +61,30 @@ def configure_dirs(verbose=False):
 
 
 def find_master_file(proot, htc_dir='htc', master_dir='_master',
-                     master_contains='_master_'):
+                     pattern='*_master_*'):
     """
     Find the master file name. It is assumed that the master file is in the
-    folder _master, under htc, and contains _master_ in the file name.
+    folder _master, under htc, and contains _master_ in the file name. If
+    multiple files contain pattern, the last file of the sorted list is
+    returned.
+
+    Parameters
+    ----------
+
+    proot
+
+    htc_dir : str, default: htc
+
+    master_dir : str, default: _master
+
+    pattern : str, default: *_master_*
+
     """
 
-    for root, dirs, files in os.walk(os.path.join(proot, htc_dir, master_dir)):
-        for fname in files:
-            if fname.find(master_contains) > -1:
-                return fname
+    fpath_search = os.path.join(proot, htc_dir, master_dir, pattern)
+    files = glob(fpath_search)
+    if len(files) > 0:
+        return sorted(files)[-1]
     return None
 
 
@@ -156,7 +169,7 @@ def tags_dlcs(master):
     master.tags['[Windspeed]'] = 8
     master.tags['[wdir]'] = 0 # used for the user defined wind
     master.tags['[wdir_rot]'] = 0 # used for the windfield rotations
-    master.tags['[tu_seed]'] = 0
+    master.tags['[seed]'] = None
     master.tags['[tu_model]'] = 0
     master.tags['[TI]'] = 0
     master.tags['[Turb base name]'] = 'none'
@@ -350,8 +363,21 @@ def excel_stabcon(proot, fext='xlsx', pignore=None, pinclude=None, sheet=0,
                     elif tags_dict[str(key)].lower() == 'nan':
                         tags_dict[str(key)] = True
 
+            # FIXME: this horrible mess requires a nice and clearly defined
+            # tag spec/naming convention, and with special tag prefix
             if '[Windspeed]' not in tags_dict and '[wsp]' in tags_dict:
                 tags_dict['[Windspeed]'] = tags_dict['[wsp]']
+            # avoid that any possible default tags from wetb will be used
+            # instead of the ones from the spreadsheet
+            if '[seed]' in tags_dict:
+                tags_dict['[tu_seed]'] = tags_dict['[seed]']
+            # in case people are using other turbulence tag names in the sheet
+            elif '[tu_seed]' in tags_dict:
+                tags_dict['[seed]'] = tags_dict['[tu_seed]']
+            elif '[turb_seed]' in tags_dict:
+                tags_dict['[seed]'] = tags_dict['[turb_seed]']
+            else:
+                raise KeyError('[seed] should be used as tag for turb. seed')
 
             tags_dict['[Case folder]'] = tags_dict['[Case folder]'].lower()
             tags_dict['[Case id.]'] = tags_dict['[Case id.]'].lower()
diff --git a/wetb/prepost/dlcplots.py b/wetb/prepost/dlcplots.py
index 7ba1f68fb7259396522171e239171eb9b13322ee..97cf0bf8ad8273192bcba61368fd1e8f4c138e2f 100644
--- a/wetb/prepost/dlcplots.py
+++ b/wetb/prepost/dlcplots.py
@@ -44,15 +44,15 @@ plt.rc('font', family='serif')
 plt.rc('xtick', labelsize=10)
 plt.rc('ytick', labelsize=10)
 plt.rc('axes', labelsize=12)
-# do not use tex on Gorm
-if not socket.gethostname()[:2] in ['g-', 'je']:
+# do not use tex on Gorm and or Jess
+if not socket.gethostname()[:2] in ['g-', 'je', 'j-']:
     plt.rc('text', usetex=True)
 plt.rc('legend', fontsize=11)
 plt.rc('legend', numpoints=1)
 plt.rc('legend', borderaxespad=0)
 
 
-def merge_sim_ids(post_dirs, sim_ids, post_dir_save=False):
+def merge_sim_ids(sim_ids, post_dirs, post_dir_save=False):
     """
     """
     # map the run_dir to the same order as the post_dirs, labels
@@ -502,7 +502,9 @@ def plot_dlc_stats(df_stats, plot_chans, fig_dir_base, labels=None,
             # when only one of the channels was present, but the set is still
             # complete.
             # FIXME: what if both channels are present?
-            if len(ch_names) > 1 and (lens[0] < 1) or (lens[1] < 1):
+            if len(ch_names) > 1 and (lens[0] < 1):
+                continue
+            elif len(ch_names) > 1 and len(lens)==2 and lens[1] < 1:
                 continue
 
             print('start plotting:  %s %s' % (str(dlc_name).ljust(7), ch_dscr))
diff --git a/wetb/prepost/dlctemplate.py b/wetb/prepost/dlctemplate.py
index dc5a6f430ba32580bff16c2c01c8e94b213e01f6..beb9d5424b28d5ff539b26b4d3ab1c6d202f0ef4 100644
--- a/wetb/prepost/dlctemplate.py
+++ b/wetb/prepost/dlctemplate.py
@@ -34,16 +34,12 @@ plt.rc('xtick', labelsize=10)
 plt.rc('ytick', labelsize=10)
 plt.rc('axes', labelsize=12)
 # on Gorm tex printing doesn't work
-if socket.gethostname()[:2] == 'g-':
-    RUNMETHOD = 'gorm'
-elif socket.gethostname()[:1] == 'j':
-    RUNMETHOD = 'jess'
+if socket.gethostname()[:2] in ['g-', 'je', 'j-']:
+    RUNMETHOD = 'pbs'
 else:
     plt.rc('text', usetex=True)
     # set runmethod based on the platform host
-    if platform == "linux" or platform == "linux2":
-        RUNMETHOD = 'local-script'
-    elif platform == "darwin":
+    if platform in ["linux", "linux2", "darwin"]:
         RUNMETHOD = 'linux-script'
     elif platform == "win32":
         RUNMETHOD = 'windows-script'
@@ -131,7 +127,7 @@ def master_tags(sim_id, runmethod='local', silent=False, verbose=False):
     master.tags['[MannAlfaEpsilon]'] = 1.0
     master.tags['[MannL]'] = 29.4
     master.tags['[MannGamma]'] = 3.0
-    master.tags['[tu_seed]'] = 0
+    master.tags['[seed]'] = None
     master.tags['[turb_nr_u]'] = 8192
     master.tags['[turb_nr_v]'] = 32
     master.tags['[turb_nr_w]'] = 32
@@ -268,7 +264,8 @@ def variable_tag_func_mod1(master, case_id_short=False):
 # =============================================================================
 
 def launch_dlcs_excel(sim_id, silent=False, verbose=False, pbs_turb=False,
-                      runmethod=None, write_htc=True, zipchunks=False):
+                      runmethod=None, write_htc=True, zipchunks=False,
+                      walltime='04:00:00'):
     """
     Launch load cases defined in Excel files
     """
@@ -307,6 +304,7 @@ def launch_dlcs_excel(sim_id, silent=False, verbose=False, pbs_turb=False,
     master = master_tags(sim_id, runmethod=runmethod, silent=silent,
                          verbose=verbose)
     master.tags['[sim_id]'] = sim_id
+    master.tags['[walltime]'] = walltime
     master.output_dirs.append('[Case folder]')
     master.output_dirs.append('[Case id.]')
 
@@ -336,13 +334,18 @@ def launch_dlcs_excel(sim_id, silent=False, verbose=False, pbs_turb=False,
     if zipchunks:
         # create chunks
         # sort so we have minimal copying turb files from mimer to node/scratch
+        # note that walltime here is for running all cases assigned to the
+        # respective nodes. It is not walltime per case.
         sorts_on = ['[DLC]', '[Windspeed]']
         create_chunks_htc_pbs(cases, sort_by_values=sorts_on, ppn=20,
-                              nr_procs_series=9, processes=1,
-                              walltime='20:00:00', chunks_dir='zip-chunks-jess')
+                              nr_procs_series=9, walltime='20:00:00',
+                              chunks_dir='zip-chunks-jess')
         create_chunks_htc_pbs(cases, sort_by_values=sorts_on, ppn=12,
-                              nr_procs_series=15, processes=1,
-                              walltime='20:00:00', chunks_dir='zip-chunks-gorm')
+                              nr_procs_series=15, walltime='20:00:00',
+                              chunks_dir='zip-chunks-gorm')
+
+    df = sim.Cases(cases).cases2df()
+    df.to_excel(os.path.join(POST_DIR, sim_id + '.xls'))
 
 
 def post_launch(sim_id, statistics=True, rem_failed=True, check_logs=True,
@@ -385,6 +388,11 @@ def post_launch(sim_id, statistics=True, rem_failed=True, check_logs=True,
     if statistics:
         i0, i1 = 0, -1
 
+        # example for combination of signals
+#        name = 'stress1'
+#        expr = '[p1-p1-node-002-forcevec-z]*3 + [p1-p1-node-002-forcevec-y]'
+#        add_sigs = {name:expr}
+
         # in addition, sim_id and case_id are always added by default
         tags = ['[Case folder]']
         add = None
@@ -395,7 +403,7 @@ def post_launch(sim_id, statistics=True, rem_failed=True, check_logs=True,
                                  update=update, saveinterval=saveinterval,
                                  suffix=suffix, save_new_sigs=save_new_sigs,
                                  csv=csv, m=m, neq=None, no_bins=no_bins,
-                                 chs_resultant=[], A=A)
+                                 chs_resultant=[], A=A, add_sigs={})
         # annual energy production
         if AEP:
             df_AEP = cc.AEP(df_stats, csv=csv, update=update, save=True)
@@ -486,7 +494,11 @@ if __name__ == '__main__':
                         'using the 64-bit Mann turbulence box generator. '
                         'This can be usefull if your turbulence boxes are too '
                         'big for running in HAWC2 32-bit mode. Only works on '
-                        'Jess. ')
+                        'Jess.')
+    parser.add_argument('--walltime', default='04:00:00', type=str,
+                        action='store', dest='walltime', help='Queue walltime '
+                        'for each case/pbs file, format: HH:MM:SS '
+                        'Default: 04:00:00')
     opt = parser.parse_args()
 
     # TODO: use arguments to determine the scenario:
@@ -526,7 +538,7 @@ if __name__ == '__main__':
     if opt.prep:
         print('Start creating all the htc files and pbs_in files...')
         launch_dlcs_excel(sim_id, silent=False, zipchunks=opt.zipchunks,
-                          pbs_turb=opt.pbs_turb)
+                          pbs_turb=opt.pbs_turb, walltime=opt.walltime)
     # post processing: check log files, calculate statistics
     if opt.check_logs or opt.stats or opt.fatigue or opt.envelopeblade or opt.envelopeturbine:
         post_launch(sim_id, check_logs=opt.check_logs, update=False,
diff --git a/wetb/prepost/h2_vs_hs2.py b/wetb/prepost/h2_vs_hs2.py
index cd065486d076d1bd168c9183202cd2e47eaee165..8db023d5be593aef3564f3d3806206e23647d389 100644
--- a/wetb/prepost/h2_vs_hs2.py
+++ b/wetb/prepost/h2_vs_hs2.py
@@ -311,10 +311,10 @@ class Sims(object):
             mt['[hawc2]'] = False
             mt['[output]'] = False
             mt['[copyback_files]'] = ['./*.ind', './*.pwr', './*.log',
-                                      './*.cmb', './*.bea']
+                                      './*.cmb', './*.bea', './*.amp']
             mt['[copyback_frename]'] = [mt['[res_dir]'], mt['[res_dir]'],
                                         mt['[log_dir]'], mt['[res_dir]'],
-                                        mt['[res_dir]']]
+                                        mt['[res_dir]'], mt['[res_dir]']]
             if mt['[hs2_bladedeform_switch]']:
                 mt['[hs2_bladedeform]'] = 'bladedeform'
             else:
@@ -347,32 +347,20 @@ class Sims(object):
         relevant HAWC2 model
         and assume we are in a simulation case of a certain turbine/project
         """
-        (self.P_RUN, self.P_SOURCE, self.PROJECT,
-             self.sim_id, self.P_MASTERFILE,
-             self.MASTERFILE, self.POST_DIR) = dlcdefs.configure_dirs(verbose=verbose)
 
-    def _set_path_config(self, runmethod='here'):
+        tmp = dlcdefs.configure_dirs(verbose=verbose)
+        (self.P_RUN, self.P_SOURCE, self.PROJECT, self.sim_id,
+             self.P_MASTERFILE, self.MASTERFILE, self.POST_DIR) = tmp
+
+    def _set_path_config(self, p_root_run='auto'):
         """
         Set the path configuration into the tags
         """
 
-        self.runmethod = runmethod
-
-        if runmethod == 'here':
+        if p_root_run == 'auto':
             self._set_path_auto_config()
-        elif runmethod in ['local', 'local-script', 'none', 'local-ram']:
-            self.p_root = '/home/dave/SimResults/h2_vs_hs2/'
-        elif runmethod == 'windows-script':
-            self.p_root = '/mnt/D16731/dave/Documents/_SimResults'
-        elif runmethod == 'gorm':
-            self.p_root = '/mnt/hawc2sim/h2_vs_hs2'
-        elif runmethod == 'jess':
-            self.p_root = '/mnt/hawc2sim/h2_vs_hs2'
         else:
-            msg='unsupported runmethod, options: none, local, gorm or opt'
-            raise ValueError(msg)
-
-        if not runmethod == 'here':
+            self.p_root = p_root_run
             self.P_RUN = os.path.join(self.p_root, self.PROJECT, self.sim_id)
 
         self.master.tags['[master_htc_file]'] = self.MASTERFILE
@@ -430,10 +418,11 @@ class Sims(object):
 
         return iter_dict, opt_tags
 
-    def create_inputs(self, iter_dict, opt_tags):
+    def create_inputs(self, iter_dict, opt_tags, runmethod='pbs'):
 
+        self.runmethod = runmethod
         sim.prepare_launch(iter_dict, opt_tags, self.master, self._var_tag_func,
-                           write_htc=True, runmethod=self.runmethod, verbose=False,
+                           write_htc=True, runmethod=runmethod, verbose=False,
                            copyback_turb=False, msg='', update_cases=False,
                            ignore_non_unique=False, run_only_new=False,
                            pbs_fname_appendix=False, short_job_names=False)
@@ -444,25 +433,7 @@ class Sims(object):
         """
         tuning = hs2.ReadControlTuning()
         tuning.read_parameters(fpath)
-
-        tune_tags = {}
-
-        tune_tags['[pi_gen_reg1.K]'] = tuning.pi_gen_reg1.K
-
-        tune_tags['[pi_gen_reg2.I]'] = tuning.pi_gen_reg2.I
-        tune_tags['[pi_gen_reg2.Kp]'] = tuning.pi_gen_reg2.Kp
-        tune_tags['[pi_gen_reg2.Ki]'] = tuning.pi_gen_reg2.Ki
-
-        tune_tags['[pi_pitch_reg3.Kp]'] = tuning.pi_pitch_reg3.Kp
-        tune_tags['[pi_pitch_reg3.Ki]'] = tuning.pi_pitch_reg3.Ki
-        tune_tags['[pi_pitch_reg3.K1]'] = tuning.pi_pitch_reg3.K1
-        tune_tags['[pi_pitch_reg3.K2]'] = tuning.pi_pitch_reg3.K2
-
-        tune_tags['[aero_damp.Kp2]'] = tuning.aero_damp.Kp2
-        tune_tags['[aero_damp.Ko1]'] = tuning.aero_damp.Ko1
-        tune_tags['[aero_damp.Ko2]'] = tuning.aero_damp.Ko2
-
-        return tune_tags
+        return tuning.parameters2tags()
 
     def post_processing(self, statistics=True, resdir=None, complib='blosc',
                         calc_mech_power=False):
@@ -485,23 +456,8 @@ class Sims(object):
         # load the file saved in post_dir
         cc = sim.Cases(post_dir, self.sim_id, rem_failed=False, complib=complib)
 
-        if resdir is None:
-            # we keep the run_dir as defined during launch
-            run_root = None
-        elif resdir in ['local', 'local-script', 'none', 'local-ram']:
-            run_root = '/home/dave/SimResults'
-        elif resdir == 'windows-script':
-            run_root = '/mnt/D16731/dave/Documents/_SimResults'
-        elif resdir == 'gorm':
-            run_root = '/mnt/hawc2sim/h2_vs_hs2'
-        elif resdir == 'jess':
-            run_root = '/mnt/hawc2sim/h2_vs_hs2'
-        else:
-            run_root = None
-            cc.change_results_dir(resdir)
-
-        if isinstance(run_root, str):
-            forcedir = os.path.join(run_root, self.PROJECT, self.sim_id)
+        if isinstance(resdir, str):
+            forcedir = os.path.join(resdir, self.PROJECT, self.sim_id)
             cc.change_results_dir(forcedir)
 
         cc.post_launch()
diff --git a/wetb/prepost/hawcstab2.py b/wetb/prepost/hawcstab2.py
index 1129dc7e0daf1299454975527580ef9ac5e0f3be..ec1c7020e1ec19c6284df9b0e192472b769e6df5 100644
--- a/wetb/prepost/hawcstab2.py
+++ b/wetb/prepost/hawcstab2.py
@@ -28,36 +28,40 @@ class dummy(object):
     def __init__(self, name='dummy'):
         self.__name__ = name
 
+regex_units = re.compile('(\\[.*?\\])')
 
-def ReadFileHAWCStab2Header(fname, widths):
+
+def ReadFileHAWCStab2Header(fname):
     """
     Read a file with a weird HAWCStab2 header that starts with a #, and
     includes the column number and units between square brackets.
     """
 
-    regex = re.compile('(\\[.*?\\])')
-
-    def _newformat(fname):
-        df = pd.read_fwf(fname, header=0, widths=[20]*15)
-        # find all units
-        units = regex.findall(''.join(df.columns))
-        df.columns = [k[:-2].replace('#', '').strip() for k in df.columns]
-        return df, units
-
-    def _oldformat(fname):
-        df = pd.read_fwf(fname, header=0, widths=[14]*13)
-        # find all units
-        units = regex.findall(''.join(df.columns))
-        df.columns = [k.replace('#', '').strip() for k in df.columns]
+    def _read(fname, header=0, widths=[20]*15, skipfooter=0):
+        df = pd.read_fwf(fname, header=header, widths=widths,
+                         skipfooter=skipfooter)
+        units = regex_units.findall(''.join(df.columns))
         return df, units
 
     with open(fname) as f:
         line = f.readline()
 
-    if len(line) > 200:
-        return _newformat(fname)
+    # when gradients are included in the output
+    if len(line) > 800:
+        df, units = _read(fname, header=1, widths=[30]*27)
+        # column name has the name, unit and column number in it...
+        df.columns = [k[:-2].replace('#', '').strip() for k in df.columns]
+        return df, units
+    elif len(line) > 200:
+        df, units = _read(fname, header=0, widths=[20]*15)
+        # column name has the name, unit and column number in it...
+        df.columns = [k[:-2].replace('#', '').strip() for k in df.columns]
+        return df, units
+    # older versions of HS2 seem to have two columns less
     else:
-        return _oldformat(fname)
+        df, units = _read(fname, header=0, widths=[14]*13)
+        df.columns = [k.replace('#', '').strip() for k in df.columns]
+        return df, units
 
 
 class InductionResults(object):
@@ -99,7 +103,7 @@ class results(object):
         return res
 
     def load_pwr_df(self, fname):
-        return ReadFileHAWCStab2Header(fname, [20]*15)
+        return ReadFileHAWCStab2Header(fname)
 
     def load_cmb(self, fname):
         cmb = np.loadtxt(fname)
@@ -149,6 +153,32 @@ class results(object):
         self.ind = InductionResults()
         self.ind.read(fname)
 
+    def load_amp(self, fname):
+
+        with open(fname) as f:
+            line = f.readline()
+
+        width = 14
+        nrcols = int((len(line)-1)/width)
+        # first columns has one extra character
+        # col nr1: rotor speed, col nr2: radius
+        widths = [width+1] + [width]*(nrcols-1)
+        # last line is empty
+        df = pd.read_fwf(fname, header=2, widths=widths, skipfooter=1)
+        units = regex_units.findall(''.join(df.columns))
+        # no column number in the column name
+        # since U_x, u_y, phase and theta will be repeated as many times as
+        # there are modes, add the mode number in the column name
+        columns = [k.replace('#', '').strip() for k in df.columns]
+        nrmodes = int((len(columns) - 2 )/6)
+        for k in range(nrmodes):
+            for i in range(6):
+                j = 2+k*6+i
+                columns[j] = columns[j].split('.')[0] + ' nr%i' % (k+1)
+        df.columns = columns
+
+        return df, units
+
     def load_operation(self, fname):
 
         operation = np.loadtxt(fname, skiprows=1)
@@ -341,12 +371,42 @@ class ReadControlTuning(object):
                 else:
                     self.parse_line(line, controller)
 
-        # set some parameters to zero for the linear case
+        # set some parameters to zero for the linear case, or when aerodynamic
+        # gain scheduling is not used
         if not hasattr(self.pi_pitch_reg3, 'K2'):
             setattr(self.pi_pitch_reg3, 'K2', 0.0)
+        if not hasattr(self.aero_damp, 'Kp2'):
+            setattr(self.aero_damp, 'Kp2', 0.0)
+        if not hasattr(self.aero_damp, 'Ko1'):
+            setattr(self.aero_damp, 'Ko1', 0.0)
         if not hasattr(self.aero_damp, 'Ko2'):
             setattr(self.aero_damp, 'Ko2', 0.0)
 
+    def parameters2tags(self):
+        """Convert the tuning parameters into a dictionary whos keys are
+        compatible with tag names in a HAWC2 master file.
+        """
+
+        tune_tags = {}
+
+        tune_tags['[pi_gen_reg1.K]'] = self.pi_gen_reg1.K
+
+        tune_tags['[pi_gen_reg2.I]'] = self.pi_gen_reg2.I
+        tune_tags['[pi_gen_reg2.Kp]'] = self.pi_gen_reg2.Kp
+        tune_tags['[pi_gen_reg2.Ki]'] = self.pi_gen_reg2.Ki
+        tune_tags['[pi_gen_reg2.Kd]'] = 0.0
+
+        tune_tags['[pi_pitch_reg3.Kp]'] = self.pi_pitch_reg3.Kp
+        tune_tags['[pi_pitch_reg3.Ki]'] = self.pi_pitch_reg3.Ki
+        tune_tags['[pi_pitch_reg3.K1]'] = self.pi_pitch_reg3.K1
+        tune_tags['[pi_pitch_reg3.K2]'] = self.pi_pitch_reg3.K2
+
+        tune_tags['[aero_damp.Kp2]'] = self.aero_damp.Kp2
+        tune_tags['[aero_damp.Ko1]'] = self.aero_damp.Ko1
+        tune_tags['[aero_damp.Ko2]'] = self.aero_damp.Ko2
+
+        return tune_tags
+
 
 if __name__ == '__main__':
 
diff --git a/wetb/prepost/misc.py b/wetb/prepost/misc.py
index d95b2b4fdc30686fada422ccf5930fa875e23b66..c6ce5759e02e3ad45abb38a248cc7d2443a2b3ea 100644
--- a/wetb/prepost/misc.py
+++ b/wetb/prepost/misc.py
@@ -24,6 +24,7 @@ import sys
 import shutil
 import unittest
 import pickle
+import re
 
 import numpy as np
 import scipy as sp
@@ -786,7 +787,24 @@ def find_tags(fname):
     """
     Find all unqiue tags in a text file.
     """
-    pass
+
+    with open(fname, 'r') as f:
+        lines = f.readlines()
+
+    # regex for finding all tags in a line
+    regex = re.compile('(\\[.*?\\])')
+    tags_in_master = {}
+
+    for i, line in enumerate(lines):
+        # are there any tags on this line? Ignore comment AND label section
+        tags = regex.findall(line.split(';')[0].split('#')[0])
+        for tag in tags:
+            try:
+                tags_in_master[tag].append(i)
+            except KeyError:
+                tags_in_master[tag] = [i]
+
+    return tags_in_master
 
 
 def read_mathematica_3darray(fname, shape=None, data=None, dtype=None):
diff --git a/wetb/prepost/mplutils.py b/wetb/prepost/mplutils.py
index 02e60e24a26c64b18b2f94ffeb9023f5e79a4ff3..08dcf1f990065ac67c932b4d1fc1de792c1ef374 100644
--- a/wetb/prepost/mplutils.py
+++ b/wetb/prepost/mplutils.py
@@ -24,7 +24,11 @@ import numpy as np
 import matplotlib as mpl
 # use a headless backend
 from matplotlib.backends.backend_agg import FigureCanvasAgg as FigCanvas
-#import wafo
+# wafo is an optional dependency only required for non default PSD peak marking
+try:
+    import wafo
+except ImportError:
+    pass
 
 
 def make_fig(nrows=1, ncols=1, figsize=(12,8), dpi=120):
@@ -117,6 +121,15 @@ def one_legend(*args, **kwargs):
     """First list all the axes as arguments. Any keyword arguments will be
     passed on to ax.legend(). Legend will be placed on the last axes that was
     passed as an argument.
+
+    Parameters
+    ----------
+
+    Returns
+    -------
+
+    legend
+
     """
     # or more general: not only simple line plots (bars, hist, ...)
     objs = []
@@ -267,7 +280,8 @@ def match_yticks(ax1, ax2, nr_ticks_forced=None, extend=False):
 
 def time_psd(results, labels, axes, alphas=[1.0, 0.7], colors=['k-', 'r-'],
              NFFT=None, res_param=250, f0=0, f1=None, nr_peaks=10, min_h=15,
-             mark_peaks=True):
+             mark_peaks=False, xlabels=['frequency [Hz]', 'time [s]'],
+             ypos_peaks=[0.04, 0.9], ypos_peaks_delta=0.12):
     """
     Plot time series and the corresponding PSD of the channel.
 
@@ -297,7 +311,6 @@ def time_psd(results, labels, axes, alphas=[1.0, 0.7], colors=['k-', 'r-'],
     """
 
     axes = axes.ravel()
-    ypos = [0.04, 0.90]
 
     for i, res in enumerate(results):
         time, data = res
@@ -328,14 +341,15 @@ def time_psd(results, labels, axes, alphas=[1.0, 0.7], colors=['k-', 'r-'],
         if mark_peaks:
             axes[0] = peaks(axes[0], freqs[i0:i1], Pxx[i0:i1], fn_max=f1,
                             nr_peaks=nr_peaks, col_line=col[:1],
-                            ypos_delta=0.04, bbox_alpha=0.5, col_text='w',
-                            ypos_mean=ypos[i], min_h=min_h)
+                            ypos_delta=ypos_peaks_delta, bbox_alpha=0.5,
+                            ypos_mean=ypos_peaks[i], min_h=min_h, col_text='w')
         # plotting time series
         axes[1].plot(time, data, col, label=label, alpha=alpha)
 
     axes[0].set_yscale('log')
-    axes[0].set_xlabel('frequency [Hz]')
-    axes[1].set_xlabel('time [s]')
+    if isinstance(xlabels, list):
+        axes[0].set_xlabel(xlabels[0])
+        axes[1].set_xlabel(xlabels[1])
     for ax in axes:
         leg = ax.legend(loc='best', borderaxespad=0)
         # leg is None when no labels have been defined
diff --git a/wetb/prepost/simchunks.py b/wetb/prepost/simchunks.py
index 4e4d560313bf88756054d24e0dd5981a4bca617b..0fc2f8eff7501e3ccc26cb14de71518a6c2df8f1 100644
--- a/wetb/prepost/simchunks.py
+++ b/wetb/prepost/simchunks.py
@@ -32,10 +32,9 @@ import pandas as pd
 from wetb.prepost.Simulations import Cases
 
 
-def create_chunks_htc_pbs(cases, sort_by_values=['[Windspeed]'], ppn=20,
-                          nr_procs_series=9, processes=1, queue='workq',
-                          walltime='24:00:00', chunks_dir='zip-chunks-jess',
-                          pyenv='wetb_py3', i0=0):
+def create_chunks_htc_pbs(cases, sort_by_values=['[Windspeed]'], ppn=20, i0=0,
+                          nr_procs_series=9, queue='workq', pyenv='wetb_py3',
+                          walltime='24:00:00', chunks_dir='zip-chunks-jess'):
     """Group a large number of simulations htc and pbs launch scripts into
     different zip files so we can run them with find+xargs on various nodes.
     """
@@ -187,7 +186,7 @@ def create_chunks_htc_pbs(cases, sort_by_values=['[Windspeed]'], ppn=20,
         # =====================================================================
         # activate the python environment
         pbs += 'echo "activate python environment %s"\n' % pyenv
-        pbs += 'source activate %s\n' % pyenv
+        pbs += 'source /home/python/miniconda3/bin/activate %s\n' % pyenv
         # sometimes activating an environment fails due to a FileExistsError
         # is this because it is activated at the same time on another node?
         # check twice if the environment got activated for real
@@ -396,6 +395,10 @@ def create_chunks_htc_pbs(cases, sort_by_values=['[Windspeed]'], ppn=20,
     except (FileExistsError, OSError):
         pass
 
+    fpath = os.path.join(df['[run_dir]'].iloc[0], 'pbs_out_chunks')
+    if not os.path.exists(fpath):
+        os.makedirs(fpath)
+
     df_iter = chunker(df, nr_procs_series*ppn)
     sim_id = df['[sim_id]'].iloc[0]
     run_dir = df['[run_dir]'].iloc[0]
diff --git a/wetb/prepost/tests/data/demo_dlc/ref/htc/DLCs/dlc01_demos.xlsx b/wetb/prepost/tests/data/demo_dlc/ref/htc/DLCs/dlc01_demos.xlsx
index 2725ebf94c486118c288c7b1bf73bfba9fb79afd..6b4bb2d9fe4f7b5b2e347ecb8da55e2cd3695669 100755
Binary files a/wetb/prepost/tests/data/demo_dlc/ref/htc/DLCs/dlc01_demos.xlsx and b/wetb/prepost/tests/data/demo_dlc/ref/htc/DLCs/dlc01_demos.xlsx differ
diff --git a/wetb/prepost/tests/data/demo_dlc/ref/pbs_in_turb/turb_s100_10ms.p b/wetb/prepost/tests/data/demo_dlc/ref/pbs_in_turb/turb_s100_10ms.p
index 79b15e566904b8e0b365b70c46c31bea42f99ea4..995b9af1d2669b3ed8d57c578ebb08c151d988c3 100644
--- a/wetb/prepost/tests/data/demo_dlc/ref/pbs_in_turb/turb_s100_10ms.p
+++ b/wetb/prepost/tests/data/demo_dlc/ref/pbs_in_turb/turb_s100_10ms.p
@@ -35,7 +35,7 @@ echo "------------------------------------------------------------------------"
 echo "EXECUTION"
 echo "------------------------------------------------------------------------"
 
-time wine mann_turb_x64.exe turb_s100_10ms 1.000000 29.400000 3.000000 100 8192 32 32 0.7812 6.5000 6.5000 1
+time WINEARCH=win64 WINEPREFIX=~/.wine wine mann_turb_x64.exe turb_s100_10ms 1.000000 29.400000 3.000000 100 8192 32 32 0.7812 6.5000 6.5000 1
 ### wait for jobs to finish
 wait
 
diff --git a/wetb/prepost/tests/data/demo_dlc/ref/pbs_in_turb/turb_s101_11ms.p b/wetb/prepost/tests/data/demo_dlc/ref/pbs_in_turb/turb_s101_11ms.p
index 7997dc29dc535d9a5136282e8d8744b4d876a0d8..6372504ddce65b8599f6d4ee196d451f8411af30 100644
--- a/wetb/prepost/tests/data/demo_dlc/ref/pbs_in_turb/turb_s101_11ms.p
+++ b/wetb/prepost/tests/data/demo_dlc/ref/pbs_in_turb/turb_s101_11ms.p
@@ -35,7 +35,7 @@ echo "------------------------------------------------------------------------"
 echo "EXECUTION"
 echo "------------------------------------------------------------------------"
 
-time wine mann_turb_x64.exe turb_s101_11ms 1.000000 29.400000 3.000000 100 8192 32 32 0.8594 6.5000 6.5000 1
+time WINEARCH=win64 WINEPREFIX=~/.wine wine mann_turb_x64.exe turb_s101_11ms 1.000000 29.400000 3.000000 100 8192 32 32 0.8594 6.5000 6.5000 1
 ### wait for jobs to finish
 wait
 
diff --git a/wetb/prepost/tests/data/demo_dlc/ref/prepost/remote.pkl b/wetb/prepost/tests/data/demo_dlc/ref/prepost/remote.pkl
index 424870abe4d235d40b0bc86884127247ace71cde..76f3458974d302669780c4e764e56dec5a7d545b 100644
Binary files a/wetb/prepost/tests/data/demo_dlc/ref/prepost/remote.pkl and b/wetb/prepost/tests/data/demo_dlc/ref/prepost/remote.pkl differ
diff --git a/wetb/prepost/tests/data/demo_dlc/ref/prepost/remote_tags.txt b/wetb/prepost/tests/data/demo_dlc/ref/prepost/remote_tags.txt
index eebe622e1cd2307810c65f8ed345bec9f81ebe5d..c9bef2a6ba10c1291db7d183aa091af4f542d6ec 100644
--- a/wetb/prepost/tests/data/demo_dlc/ref/prepost/remote_tags.txt
+++ b/wetb/prepost/tests/data/demo_dlc/ref/prepost/remote_tags.txt
@@ -50,6 +50,7 @@
                   [pbs_in_dir] : pbs_in/dlc01_demos/ 
                  [pbs_out_dir] : pbs_out/dlc01_demos/
                      [res_dir] : res/dlc01_demos/    
+                        [seed] : 0                   
                    [shear_exp] : 0                   
                    [staircase] : False               
                    [t flap on] : -1                  
@@ -106,6 +107,7 @@
                   [pbs_in_dir] : pbs_in/dlc01_demos/ 
                  [pbs_out_dir] : pbs_out/dlc01_demos/
                      [res_dir] : res/dlc01_demos/    
+                        [seed] : 0                   
                    [shear_exp] : 0                   
                    [staircase] : False               
                    [t flap on] : -1                  
@@ -162,6 +164,7 @@
                   [pbs_in_dir] : pbs_in/dlc01_demos/ 
                  [pbs_out_dir] : pbs_out/dlc01_demos/
                      [res_dir] : res/dlc01_demos/    
+                        [seed] : 100                 
                    [shear_exp] : 0                   
                    [staircase] : False               
                    [t flap on] : -1                  
@@ -218,6 +221,7 @@
                   [pbs_in_dir] : pbs_in/dlc01_demos/ 
                  [pbs_out_dir] : pbs_out/dlc01_demos/
                      [res_dir] : res/dlc01_demos/    
+                        [seed] : 100                 
                    [shear_exp] : 0                   
                    [staircase] : False               
                    [t flap on] : -1                  
diff --git a/wetb/prepost/tests/data/demo_dlc/source/demo_dlc_remote.zip b/wetb/prepost/tests/data/demo_dlc/source/demo_dlc_remote.zip
index 3c02a1d7a11acaeff10fdae72416c4856fcbc4bc..653dd7fa920d3d0154225072507087b8fbf3cfa0 100644
Binary files a/wetb/prepost/tests/data/demo_dlc/source/demo_dlc_remote.zip and b/wetb/prepost/tests/data/demo_dlc/source/demo_dlc_remote.zip differ
diff --git a/wetb/prepost/windIO.py b/wetb/prepost/windIO.py
index cb267752a3ea2974d0377ab80be53b81672d71f4..03b8e9a219236154d52ae98031749215b89f743d 100755
--- a/wetb/prepost/windIO.py
+++ b/wetb/prepost/windIO.py
@@ -98,8 +98,8 @@ class LogFile(object):
         self.err_init[' Error opening PC data file'] = len(self.err_init)
         #  *** ERROR *** error reading mann turbulence
         self.err_init[' *** ERROR *** error readin'] = len(self.err_init)
-        #  *** INFO *** The DLL subroutine
-        self.err_init[' *** INFO *** The DLL subro'] = len(self.err_init)
+#        #  *** INFO *** The DLL subroutine
+#        self.err_init[' *** INFO *** The DLL subro'] = len(self.err_init)
         #  ** WARNING: FROM ESYS ELASTICBAR: No keyword
         self.err_init[' ** WARNING: FROM ESYS ELAS'] = len(self.err_init)
         #  *** ERROR *** DLL ./control/killtrans.dll could not be loaded - error!
@@ -643,6 +643,9 @@ class LoadResults(ReadHawc2):
         of spaces, use colon (;) to seperate the different commands.
 
         THIS IS STILL A WIP
+
+        see also issue #11:
+        https://gitlab.windenergy.dtu.dk/toolbox/WindEnergyToolbox/issues/11
         """
 
         index = {}