diff --git a/.gitignore b/.gitignore
index 0395feace30cb972b1648e59acf909aa13b426e8..6d3feb6cc699a5af67da7cbd28e2fcf98554c058 100644
--- a/.gitignore
+++ b/.gitignore
@@ -3,17 +3,19 @@ dist/*
 *.pyc
 *.pyd
 *egg-info*
-
+*.so
 .eggs
 doctrees
-/docs/_build
-/.project
-/.pydevproject
-/.settings/org.eclipse.core.resources.prefs
-/wetb/gtsdf/tests/tmp
-/wetb/dlc/tests/test_files/res_all
-/wetb/dlc/tests/test_files/res2_all
-/wetb/hawc2/ascii2bin/ascii2bin_dist
-/wetb/hawc2/tests/test_files/htcfiles/tmp.htc
-/wetb/hawc2/ascii2bin/tests/test_files/Hawc2ascii_bin.sel
-/wetb/hawc2/ascii2bin/tests/test_files/Hawc2ascii_bin.dat
+docs/_build
+.project
+.pydevproject
+.settings/org.eclipse.core.resources.prefs
+wetb/gtsdf/tests/tmp
+wetb/dlc/tests/test_files/res_all
+wetb/dlc/tests/test_files/res2_all
+wetb/hawc2/ascii2bin/ascii2bin_dist
+wetb/hawc2/tests/test_files/htcfiles/tmp.htc
+wetb/hawc2/ascii2bin/tests/test_files/Hawc2ascii_bin.sel
+wetb/hawc2/ascii2bin/tests/test_files/Hawc2ascii_bin.dat
+wetb/prepost/tests/data/demo_dlc/remote*
+
diff --git a/README.md b/README.md
index 638cadaf0d58f942e92134df763ad96cf358b2e6..b8f2fbbb678a74baacf870f4f16355e27f5c5cc1 100644
--- a/README.md
+++ b/README.md
@@ -11,6 +11,25 @@ in [Hawc2io](wetb/hawc2/Hawc2io.py). These different implementations will be
 merged in due time.
 
 
+# How to create HAWC2 DLB's and run them on a cluster
+
+The process of how to generated, run and post-process a design load basis (DLB)
+of HAWC2 simulations on a DTU Wind Energy cluster is outlined in more detail
+in the documentation:
+
+* [Auto-generation of Design Load Cases](docs/howto-make-dlcs.md)
+* [House rules mimer/hawc2sim and HAWC2 folder structure](docs/houserules-mimerhawc2sim.md)
+* [Generate DLB spreadsheets](docs/generate-spreadsheet.md)
+* [How to use the Statistics DataFrame](docs/using-statistics-df.md)
+
+You can also use the Pdap for post-processing, which includes a MS Word report
+generator based on a full DLB, a GUI for easy plotting of HAWC2 result files,
+and a Python scripting interface:
+
+* [Pdap](http://www.hawc2.dk/Download/Post-processing-tools/Pdap)
+* [Pdap report/docs](http://orbit.dtu.dk/en/publications/post-processing-of-design-load-cases-using-pdap%28827c432b-cf7d-44eb-899b-93e9c0648ca5%29.html)
+
+
 # Works with Python 2 and Python 3
 
 This module is tested for Python 2 and 3 compatibility, and works on both
@@ -40,7 +59,9 @@ in Python 2.7 by default. You can also write code that is compatible with both
 
 * [pandas](http://pandas.pydata.org/)
 
-* xlrd
+* xlrd and xlwt from [python-excel](http://www.python-excel.org/)
+
+* [openpyxl](http://openpyxl.readthedocs.org/en/default/)
 
 * h5py
 
@@ -61,12 +82,19 @@ Detailed installation instructions, including how to install Python from scratch
 are described in the [detailed installation manual](docs/install-manual-detailed.md).
 
 
-If you know what you are doing, you can install as follows:
+If you know what you are doing, you can install as a package as follows:
 
 ```
-python setup.py
+python setup.py install
 ```
 
+Or in development mode, install from your working directory
+
+```
+pip install -e ./
+```
+
+
 Or create a binary wheel distribution package with:
 
 ```
diff --git a/docs/configure-wine.md b/docs/configure-wine.md
new file mode 100644
index 0000000000000000000000000000000000000000..b3d2225cbc3bace5306bd0241259a740eea405ef
--- /dev/null
+++ b/docs/configure-wine.md
@@ -0,0 +1,54 @@
+
+Configure Wine for HAWC2
+------------------------
+
+> Note that the steps described here are executed automatically by the
+configuration script [```config-wine-hawc2.sh```]
+(https://gitlab.windenergy.dtu.dk/toolbox/pbsutils/blob/master/config-wine-hawc2.sh)
+in ```pbsutils```.
+
+
+You will also need to configure wine and place the HAWC2 executables in a
+directory that wine knows about. First, activate the correct wine environment by
+typing in a shell in the Gorm's home directory (it can be activated with
+ssh (Linux, Mac) or putty (MS Windows)):
+
+```
+g-000 $ WINEARCH=win32 WINEPREFIX=~/.wine32 wine test.exe
+```
+
+Optionally, you can also make an alias (a short format for a longer, more complex
+command). In the ```.bashrc``` file in your home directory
+(```/home/$USER/.bash_profile```), add at the bottom of the file:
+
+```
+alias wine32='WINEARCH=win32 WINEPREFIX=~/.wine32 wine'
+```
+
+Add a folder called ```bin``` to your wine system's PATH so we can copy all
+the HAWC2 executables in here:
+
+```
+WINEDIRNAME=".wine32"
+printf 'REGEDIT4\n[HKEY_CURRENT_USER\\Environment]\n"PATH"="c:\\bin"' >> ./tmp.reg
+WINEARCH=win32 WINEPREFIX=~/$WINEDIRNAME wine regedit ./tmp.reg
+rm ./tmp.reg
+```
+
+And now copy all the HAWC2 executables, DLL's (including the license manager)
+to your wine directory. You can copy all the required executables, dll's and
+the license manager are located at ```/home/MET/hawc2exe```. The following
+command will do this copying:
+
+```
+g-000 $ cp /home/MET/hawc2exe/* /home/$USER/.wine32/drive_c/bin/
+```
+
+Notice that the HAWC2 executable names are ```hawc2-latest.exe```,
+```hawc2-118.exe```, etc. By default the latest version will be used and the user
+does not need to specify this. However, when you need to compare different version
+you can easily do so by specifying which case should be run with which
+executable. The file ```hawc2-latest.exe``` will always be the latest HAWC2
+version at ```/home/MET/hawc2exe/```. When a new HAWC2 is released you can
+simply copy all the files from there again to update.
+
diff --git a/docs/generate-spreadsheet.md b/docs/generate-spreadsheet.md
new file mode 100644
index 0000000000000000000000000000000000000000..ff87c08675324d45a2836853a08e5b1746dd0fa3
--- /dev/null
+++ b/docs/generate-spreadsheet.md
@@ -0,0 +1,47 @@
+Auto-generation of DLB Spreadsheets
+===================================
+
+Introduction
+------------
+
+This manual explains how to automatically generate the set of spreadsheets that
+defines a DLB and is required as input to the pre-processor.
+
+This tool comes handy in the following scenarios:
+* a DLB for a new turbine needs to be generated;
+* a different wind turbine class needs to be evaluated;
+* a new parameter needs to be included in the htc file;
+* different parameters variations are required, e.g. different wind speed range or different number of turbulent seed.
+
+The generator of the cases uses an input spreadsheet where the cases are defined
+in a more compact way. 
+The tool is based on the "tags" concept that is used for the genetaion of the htc files.
+
+Main spreatsheet
+----------------
+
+A main spreadsheet is used to defines all the DLC of the DLB. The file specifies the tags that are then required in the htc files.
+
+The file has:
+* a Main sheet where some wind turbines parameters are defined, the tags are initialized, and the definitions of turbulence and gusts are given.
+* a series of other sheets, each defining a DLC. In these sheets the tags that changes in that DLC are defined.
+
+The tags are devided into three possible different categories:
+* Constants (C). Constants are tags that do not change in a DLC, e.g. simulation time, output format, ...;
+* Variables (V). Variables are tags that define the number of cases in a DLC through their combinations, e.g. wind speed, number of turbilence seeds, wind direction, ..;
+* Functions (F). Functions are tags that depend on other tags through an expression, e.g. turbulence intensity, case name, ....
+
+In each sheet the type of tag is defined in the line above the tag by typing one of the letters C, V, or F.
+
+Generate the files
+------------------
+
+To generate the files defining the different DLC the following lines need to be executed:
+    
+    export PATH=/home/python/miniconda3/bin:$PATH
+    source activate wetb_py3
+    python /home/MET/repositories/toolbox/WindEnergyToolbox/wetb/prepost/GenerateDLCs.py --folder=DLCs 
+    
+the first two lines activate the virtual environment. The third calls the routine *GenerateDLCs.py * that generates the files.
+The routine should be called from the folder *htc* where also the master spreadsheet *DLCs.xlsx* need to be located.
+The generated files are placed in the folder *DLCs*.
diff --git a/docs/houserules-mimerhawc2sim.md b/docs/houserules-mimerhawc2sim.md
new file mode 100644
index 0000000000000000000000000000000000000000..6b5e2e91932f9a1eb097c122bbd0d3abb9725d48
--- /dev/null
+++ b/docs/houserules-mimerhawc2sim.md
@@ -0,0 +1,135 @@
+
+House Rules for ```mimer/hawc2sim``` and HAWC2 model folder structure
+=====================================================================
+
+
+Objectives
+----------
+
+* Re-use turbulence boxes (save disk space)
+* Find each others simulations, review, re-run
+* Find working examples of simulations, DLB's
+* Avoid running the same DLB, simulations more than once
+
+* Disk usage quota review: administrators will create an overview of disk usage
+as used per turbine and user.
+
+
+Basic structure
+---------------
+
+The HAWC2 simulations are located on the data capacitor [mimer]
+(http://mimer.risoe.dk/mimerwiki), on the following address:
+
+```
+# on Windows, use the following address when mapping a new network drive
+\\mimer\hawc2sim
+
+# on Linux you can use sshfs or mount -t cifs
+//mimer.risoe.dk/hawc2sim
+```
+
+The following structure is currently used for this ```hawc2sim``` directory:
+* turbine model (e.g. DTU10MW, NREL5MW, etc)
+    * set ID: 2 alphabetic characters followed by 4 numbers (e.g. AA0001)
+* letters are task/project oriented, numbers are case oriented
+
+For example:
+* DTU10MW
+    * AA0001
+    * AA0002
+    * AB0001
+    * log_AA.xlsx
+    * log_BB.xlsx
+    * log_overview.xlsx
+* NREL5MW
+    * AA0001
+    * AA0002
+    * BA0001
+    * log_AA.xlsx
+    * log_BB.xlsx
+    * log_overview.xlsx
+
+
+House rules
+-----------
+
+* New Turbine model folder when a new size of the turbulence box is required
+(i.e. when the rotor size is different)
+* One "set ID" refers to one analysis, and it might contain more than one DLB
+	* If you realize more cases have to be included, add them in the same
+	"set ID". Don't start new "set ID" numbers.
+	* Each "set ID" number consists of 2 alphabetic followed by 4
+	numerical characters.
+* Log file
+	* Indicate which DLB used for the given "set ID" in the log file
+	* Indicate the changes wrt to a previous "set ID"
+	* Write clear and concise log messages so others can understand what
+	analysis or which DLB is considered
+	* Indicate in the log if something works or not.
+	* Indicate if a certain "set ID" is used for a certain publication or report
+* Keep a log file of the different letters. For instance AA might refer to load
+simulations carried out within a certain project
+* When results are outdated or wrong, delete the log and result files, but keep
+the htc, data and pbs input files so the "set ID" could be re-run again in the
+future. This is especially important if the given "set ID" has been used in a
+publication, report or Master/PhD thesis.
+
+
+File permissions
+----------------
+
+* By default only the person who generated the simulations within a given
+"set ID" can delete or modify the input files, other users have only read access.
+If you want to give everyone read and write access, you do:
+
+```
+# replace demo/AA0001 with the relevant turbine/set id
+g-000 $ cd /mnt/mimer/hawc2sim/demo
+g-000 $ chmod 777 -R AA0001
+```
+
+
+HAWC2 folder structure
+----------------------
+
+The current DLB setup assumes the following HAWC2 model folder structure:
+
+```
+|-- control
+|   |-- ...
+|-- data
+|   |-- ...
+|-- htc
+|   |-- DLCs
+|   |   |-- dlc12_iec61400-1ed3.xlsx
+|   |   |-- dlc13_iec61400-1ed3.xlsx
+|   |   |-- ...
+|   |-- _master
+|   |   `-- dtu10mw_master_C0013.htc
+```
+
+The load case definitions should be placed in Excel spreadsheets with a
+```*.xlsx``` extension. The above example shows one possible scenario whereby
+all the load case definitions are placed in ```htc/DLCs``` (all folder names
+are case sensitive). Alternatively, one can also place the spreadsheets in
+separate sub folders, for example:
+
+```
+|-- control
+|   |-- ...
+|-- data
+|   |-- ...
+|-- htc
+|   |-- dlc12_iec61400-1ed3
+|   |   |-- dlc12_iec61400-1ed3.xlsx
+|   |-- dlc13_iec61400-1ed3
+|   |   |-- dlc13_iec61400-1ed3.xlsx
+```
+
+In order to use this auto-configuration mode, there can only be one master file
+in ```_master``` that contains ```_master_``` in its file name.
+
+For the NREL5MW and the DTU10MW HAWC2 models, you can find their respective
+master files and DLC definition spreadsheet files on ```mimer/hawc2sim```.
+
diff --git a/docs/howto-make-dlcs.md b/docs/howto-make-dlcs.md
index e31b020f97b74b16021a9c2ae97a32aeab84ff3e..2c9ee437bf6b89882885f7604b6ab40fae3e67de 100644
--- a/docs/howto-make-dlcs.md
+++ b/docs/howto-make-dlcs.md
@@ -11,10 +11,11 @@ point to the gorm/jess wiki's
 explain the difference in the paths seen from a windows computer and the cluster
 -->
 
-WARNING: these notes contain configuration settings that are specif to the
+> WARNING: these notes contain configuration settings that are specif to the
 DTU Wind Energy cluster Gorm. Only follow this guide in another environment if
 you know what you are doing!
 
+
 Introduction
 ------------
 
@@ -40,7 +41,7 @@ in the Excel spreadsheets): ```[Case folder]```,  ```[Case id.]```, and
 ```[Turb base name]```.
 
 The system will always force the values of the tags to be lower case anyway, and
-when working on Windows, this might cause some confusing and unexpected behaviour.
+when working on Windows, this might cause some confusing and unexpected behavior.
 The tags themselves can have lower and upper case characters as can be seen
 in the example above.
 
@@ -75,7 +76,7 @@ On Linux/Mac connecting to the cluster is as simple as running the following
 command in the terminal:
 
 ```
-g-000 $ ssh $USER@gorm.risoe.dk
+ssh $USER@gorm.risoe.dk
 ```
 
 Use your DTU password when asked. This will give you terminal access to the
@@ -93,12 +94,13 @@ Mounting the cluster discs
 --------------------------
 
 You need to be connected to the DTU network in order for this to work. You can
-also connect to the DTU network over VPN.
+also connect to the DTU network over VPN. When doing the HAWC2 simulations, you
+will interact regularly with the cluster file system and discs.
+
+### Windows
 
-When doing the HAWC2 simulations, you will interact regularly with the cluster
-file system and discs. It is convenient to map these discs as network
-drives (in Windows terms). Map the following network drives (replace ```$USER```
-with your user name):
+It is convenient to map these discs as network drives (in Windows terms).
+Map the following network drives (replace ```$USER``` with your user name):
 
 ```
 \\mimer\hawc2sim
@@ -110,13 +112,15 @@ with the cluster discs.
 
 Note that by default Windows Explorer will hide some of the files you will need edit.
 In order to show all files on your Gorm home drive, you need to un-hide system files:
-Explorer > Organize > Folder and search options > select tab "view" > select the option to show hidden files and folders.
+Explorer > Organize > Folder and search options > select tab "view" > select the
+option to show hidden files and folders.
+
+### Unix
 
 From Linux/Mac, you should be able to mount using either of the following
 addresses:
 ```
 //mimer.risoe.dk/hawc2sim
-//mimer.risoe.dk/well/hawc2sim
 //gorm.risoe.dk/$USER
 ```
 You can use either ```sshfs``` or ```mount -t cifs``` to mount the discs.
@@ -131,61 +135,49 @@ by editing the file ```.bash_profile``` file in your Gorm’s home directory
 or create a new file with this file name in case it doesn't exist):
 
 ```
-export PATH=$PATH:/home/MET/STABCON/repositories/toolbox/pbsutils/
+export PATH=$PATH:/home/MET/repositories/toolbox/pbsutils/
 ```
 
 (The corresponding open repository is on the DTU Wind Energy Gitlab server:
-[pbsutils](https://gitlab.windenergy.dtu.dk/toolbox/WindEnergyToolbox). Please
+[pbsutils](https://gitlab.windenergy.dtu.dk/toolbox/pbsutils). Please
 considering reporting bugs and/or suggest improvements there. You're contributions
 are much appreciated!)
 
-If you have been using an old version of this how-to, you might be pointing
-to an earlier version of these tools/utils and its reference should be removed
-from your ```.bash_profile``` file:
-
-```
-export PATH=$PATH:/home/MET/STABCON/repositories/cluster-tools/
-```
+> If you have been using an old version of this how-to, you might be pointing
+to an earlier version of these tools/utils and any references containing
+```cluster-tools``` or ```prepost``` should be removed
+from your ```.bash_profile``` and/or ```.bashrc``` file on your gorm home drive.
 
-After modifying ```.bash_profile```, save and close it. Then, in the terminal, run the command:
+After modifying ```.bash_profile```, save and close it. Then, in the terminal,
+run the command (or logout and in again to be safe):
 ```
 g-000 $ source ~/.bash_profile
 ```
-In order for any changes made in ```.bash_profile``` to take effect, you need to either ```source``` it (as shown above), or log out and in again.  
 
-You will also need to configure wine and place the HAWC2 executables in a
-directory that wine knows about. First, activate the correct wine environment by
-typing in a shell in the Gorm's home directory (it can be activated with
-ssh (Linux, Mac) or putty (MS Windows)):
+You will also need to configure wine and place the HAWC2 executables in your
+local wine directory, which by default is assumed to be ```~/.wine32```, and
+```pbsutils``` contains and automatic configuration script you can run:
 
 ```
-g-000 $ WINEARCH=win32 WINEPREFIX=~/.wine32 wine test.exe
+g-000 $ config-wine-hawc2.sh
 ```
 
-Optionally, you can also make an alias (a short format for a longer, more complex
-command). In the ```.bashrc``` file in your home directory
-(```/home/$USER/.bash_profile```), add at the bottom of the file:
+If you need more information on what is going on, you can read a more detailed
+description [here]
+(https://gitlab.windenergy.dtu.dk/toolbox/WindEnergyToolbox/blob/master/docs/configure-wine.md).
 
-```
-alias wine32='WINEARCH=win32 WINEPREFIX=~/.wine32 wine'
-```
-
-And now copy all the HAWC2 executables, DLL's (including the license manager)
-to your wine directory. You can copy all the required executables, dll's and
-the license manager are located at ```/home/MET/hawc2exe```. The following
-command will do this copying:
-
-```
-g-000 $ cp /home/MET/hawc2exe/* /home/$USER/.wine32/drive_c/windows/system32
-```
+All your HAWC2 executables and DLL's are now located
+at ```/home/$USER/.wine32/drive_c/bin```.
 
 Notice that the HAWC2 executable names are ```hawc2-latest.exe```,
 ```hawc2-118.exe```, etc. By default the latest version will be used and the user
 does not need to specify this. However, when you need to compare different version
 you can easily do so by specifying which case should be run with which
-executable. The file ```hawc2-latest.exe``` will always be the latest HAWC2
-version at ```/home/MET/hawc2exe/```. When a new HAWC2 is released you can
-simply copy all the files from there again to update.
+executable.
+
+Alternatively you can also include all the DLL's and executables in the root of
+your HAWC2 model folder. Executables and DLL's placed in the root folder take
+precedence over the ones placed in ```/home/$USER/.wine32/drive_c/bin```.
 
 Log out and in again from the cluster (close and restart PuTTY).
 
@@ -196,73 +188,70 @@ g-000 $ wine32 hawc2-latest htc/some-intput-file.htc
 ```
 
 
-Method A: Generating htc input files on the cluster
----------------------------------------------------
-
-Use ssh (Linux, Mac) or putty (MS Windows) to connect to the cluster.
-
-With qsub-wrap.py the user can wrap a PBS launch script around any executable or
-Python/Matlab/... script. In doing so, the executable/Python script will be
-immediately submitted to the cluster for execution. By default, the Anaconda
-Python environment in ```/home/MET/STABCON/miniconda``` will be activated. The
-Anaconda Python environment is not relevant, and can be safely ignored, if the
-executable does not have anything to do with Python.
+Updating local HAWC2 executables
+--------------------------------
 
-In order to see the different options of this qsub-wrap utility, do:
+When there is a new version of HAWC2, or when a new license manager is released,
+you can update your local wine directory as follows:
 
 ```
-g-000 $ qsub-wrap.py --help
+g-000 $ cp /home/MET/hawc2exe/* /home/$USER/.wine32/drive_c/bin/
 ```
 
-For example, in order to generate the default IEC DLCs:
+The file ```hawc2-latest.exe``` will always be the latest HAWC2
+version at ```/home/MET/hawc2exe/```. When a new HAWC2 is released you can
+simply copy all the files from there again to update.
+
+
+HAWC2 model folder structure and results on mimer/hawc2sim
+----------------------------------------------------------
+
+See [house rules on mimer/hawc2sim]
+(https://gitlab.windenergy.dtu.dk/toolbox/WindEnergyToolbox/blob/master/docs/houserules-mimerhawc2sim.md)
+for a more detailed description.
+
+
+Method A: Generating htc input files on the cluster (recommended)
+-----------------------------------------------------------------
+
+Use ssh (Linux, Mac) or putty (MS Windows) to connect to the cluster.
+
+In order to simplify things, we're using ```qsub-wrap.py``` from ```pbsutils```
+(which we added under the [preparation]/(#preparation) section) in order to
+generate the htc files. It will execute, on a compute node, any given Python
+script in a pre-installed Python environment that has the Wind Energy Toolbox
+installed.
+
+For the current implementation of the DLB the following template is available:
 
 ```
-g-000 $ cd path/to/HAWC2/model # folder where the hawc2 model is located
-g-000 $ qsub-wrap.py -f /home/MET/STABCON/repositories/prepost/dlctemplate.py -c python --prep
+/home/MET/repositories/toolbox/WindEnergyToolbox/wetb/prepost/dlctemplate.py
 ```
 
-Note that the following folder structure for the HAWC2 model is assumed:
+And the corresponding definitions of all the different load cases can be copied
+from here (valid for the DTU10MW):
 
 ```
-|-- control
-|   |-- ...
-|-- data
-|   |-- ...
-|-- htc
-|   |-- DLCs
-|   |   |-- dlc12_iec61400-1ed3.xlsx
-|   |   |-- dlc13_iec61400-1ed3.xlsx
-|   |   |-- ...
-|   |-- _master
-|   |   `-- dtu10mw_master_C0013.htc
+/mnt/mimer/hawc2sim/DTU10MW/C0020/htc/DLCs
 ```
 
-The load case definitions should be placed in Excel spreadsheets with a
-```*.xlsx``` extension. The above example shows one possible scenario whereby
-all the load case definitions are placed in ```htc/DLCs``` (all folder names
-are case sensitive). Alternatively, one can also place the spreadsheets in
-separate sub folders, for example:
+For example, in order to generate all the HAWC2 htc input files and the
+corresponding ```*.p``` cluster launch files using this default DLB setup with:
 
 ```
-|-- control
-|   |-- ...
-|-- data
-|   |-- ...
-|-- htc
-|   |-- dlc12_iec61400-1ed3
-|   |   |-- dlc12_iec61400-1ed3.xlsx
-|   |-- dlc13_iec61400-1ed3
-|   |   |-- dlc13_iec61400-1ed3.xlsx
+g-000 $ cd /mnt/mimer/hawc2sim/demo/A0001 # folder where the hawc2 model is located
+g-000 $ qsub-wrap.py -f /home/MET/repositories/toolbox/WindEnergyToolbox/wetb/prepost/dlctemplate.py --prep
 ```
 
-In order to use this auto-configuration mode, there can only be one master file
-in ```_master``` that contains ```_master_``` in its file name.
+You could consider adding ```dlctemplate.py``` into the turbine folder or in
+the simulation set id folder for your convenience:
 
-For the NREL5MW and the DTU10MW HAWC2 models, you can find their respective
-master files and DLC definition spreadsheet files on Mimer. When connected
-to Gorm over SSH/PuTTY, you will find these files at:
 ```
-/mnt/mimer/hawc2sim # (when on Gorm)
+g-000 $ cd /mnt/mimer/hawc2sim/demo/
+# copy the dlctemplate to your turbine model folder and rename to myturbine.py
+g-000 $ cp /home/MET/repositories/toolbox/WindEnergyToolbox/wetb/prepost/dlctemplate.py ./myturbine.py
+g-000 $ cd A0001
+g-000 $ qsub-wrap.py -f ../myturbine.py --prep
 ```
 
 
@@ -278,20 +267,17 @@ First activate the Anaconda Python environment by typing:
 
 ```bash
 # add the Anaconda Python environment paths to the system PATH
-g-000 $ export PATH=/home/MET/STABCON/miniconda/bin:$PATH
+g-000 $ export PATH=/home/python/miniconda3/bin:$PATH
 # activate the custom python environment:
-g-000 $ source activate anaconda
-# add the Pythone libraries to the PYTHONPATH
-g-000 $ export PYTHONPATH=/home/MET/STABCON/repositories/prepost:$PYTHONPATH
-g-000 $ export PYTHONPATH=/home/MET/STABCON/repositories/pythontoolbox/fatigue_tools:$PYTHONPATH
-g-000 $ export PYTHONPATH=/home/MET/STABCON/repositories/pythontoolbox:$PYTHONPATH
-g-000 $ export PYTHONPATH=/home/MET/STABCON/repositories/MMPE:$PYTHONPATH
+g-000 $ source activate wetb_py3
 ```
 For example, launch the auto-generation of DLCs input files:
 
 ```
-g-000 $ cd path/to/HAWC2/model # folder where the hawc2 model is located
-g-000 $ python /home/MET/STABCON/repositories/prepost/dlctemplate.py --prep
+# folder where the HAWC2 model is located
+g-000 $ cd /mnt/mimer/hawc2sim/demo/AA0001
+# assuming myturbine.py is copy of dlctemplate.py and is placed one level up
+g-000 $ python ../myturbine.py --prep
 ```
 
 Or start an interactive IPython shell:
@@ -309,16 +295,18 @@ jammed.
 Method C: Generating htc input files locally
 --------------------------------------------
 
-This approach gives you total freedom, but is also more difficult since you
-will have to have fully configured Python environment installed locally.
+This approach gives you more flexibility and room for custimizations, but you
+will need to install a Python environment with all its dependencies locally.
 Additionally, you need access to the cluster discs from your local workstation.
-Method C is not documented yet.
+
+The installation procedure for wetb is outlined in the [installation manual]
+(https://gitlab.windenergy.dtu.dk/toolbox/WindEnergyToolbox/blob/master/docs/install-manual-detailed.md).
 
 
 Optional configuration
 ----------------------
 
-Optional tags that can be set in the Excel spreadsheet, and their corresponding
+Optional tags that can be set in the Excel spreadsheet and their corresponding
 default values are given below. Beside a replacement value in the master htc
 file, there are also special actions connected to these values. Consequently,
 these tags have to be present. When removed, the system will stop working properly.
@@ -362,8 +350,23 @@ This zip file will be extracted into the execution directory (```[run_dir]```).
 After the model has ran on the node, only the files that have been created
 during simulation time in the ```[log_dir]```, ```[res_dir]```,
 ```[animation_dir]```, and ```[eigenfreq_dir]``` will be copied back.
-Optionally, on can also copy back the turbulence files, and other explicitly
-defined files [TODO: expand manual here].
+
+
+### Advanced configuration options
+
+> Note that not all features are documented yet...
+
+Special tags: copy special result files from the compute node back to the HAWC2
+working directory on the network drive, and optionally rename the file in case
+it would otherwise be overwritten by other cases in your DLB:
+* ```[copyback_files] = ['ESYSMooring_init.dat']```
+* ```[copyback_frename] = ['path/to/ESYSMooring_init_vXYZ.dat']```, optionally specify
+a different file path/name
+
+Copy files from the HAWC2 working directory with a special name to the compute
+node for which the a fixed file name is assumed
+* ```[copyto_files] = ['path/to/ESYSMooring_init_vXYZ.dat']```
+* ```[copyto_generic] = ['ESYSMooring_init.dat']```
 
 
 Launching the jobs on the cluster
@@ -429,12 +432,20 @@ options:
 ```
 
 Then launch the actual jobs (each job is a ```*.p``` file in ```pbs_in```) using
-100 cpu's, and using a compute node instead of the login node (see you can exit
-the ssh/putty session without interrupting the launching process):
+100 cpu's:
 
 ```bash
-g-000 $ cd path/to/HAWC2/model
-g-000 $ launch.py -n 100 --node
+g-000 $ cd /mnt/mimer/hawc2sim/demo/A0001
+g-000 $ launch.py -n 100 -p pbs_in/
+```
+
+If the launching process requires hours, and you have to close you SHH/PuTTY
+session before it reaches the end, you should use the ```--node``` argument so
+the launching process will take place on a dedicated node:
+
+```bash
+g-000 $ cd /mnt/mimer/hawc2sim/demo/A0001
+g-000 $ launch.py -n 100 -p pbs_in/ --node
 ```
 
 
@@ -520,10 +531,11 @@ htc files, but now we set different flags. For example, for checking the log
 files, calculating the statistics, the AEP and the life time equivalent loads:
 
 ```
-g-000 $ qsub-wrap.py -f /home/MET/STABCON/repositories/prepost/dlctemplate.py -c python --years=25 --neq=1e7 --stats --check_logs --fatigue
+# myturbine.py (copy of dlctemplate.py) is assumed to be located one folder up
+g-000 $ qsub-wrap.py -f ../myturbine.py --years=25 --neq=1e7 --stats --check_logs --fatigue
 ```
 
-Other options for the ```dlctemplate.py``` script:
+Other options for the original ```dlctemplate.py``` script:
 
 ```
 usage: dlctemplate.py [-h] [--prep] [--check_logs] [--stats] [--fatigue]
@@ -536,32 +548,34 @@ optional arguments:
   -h, --help         show this help message and exit
   --prep             create htc, pbs, files (default=False)
   --check_logs       check the log files (default=False)
-  --stats            calculate statistics (default=False)
+  --stats            calculate statistics and 1Hz equivalent loads (default=False)
   --fatigue          calculate Leq for a full DLC (default=False)
   --csv              Save data also as csv file (default=False)
   --years YEARS      Total life time in years (default=20)
   --no_bins NO_BINS  Number of bins for fatigue loads (default=46)
-  --neq NEQ          Equivalent cycles neq (default=1e6)
+  --neq NEQ          Equivalent cycles neq, default 1 Hz equivalent load
+                     (neq = simulation duration in seconds)
   --envelopeblade    calculate the load envelope for sensors on the blades
   --envelopeturbine  calculate the load envelope for sensors on the turbine
 ```
 
-The load envelopes are computed for sensors specified in the 
-```dlctemplate.py``` file. The sensors are specified in a list of lists. The 
+The load envelopes are computed for sensors specified in the
+```myturbine.py``` file. The sensors are specified in a list of lists. The
 inner list contains the sensors at one location. The envelope is computed for
 the first two sensors of the inner list and the other sensors are used to
-retrieve the remaining loads defining the load state occurring at the same 
+retrieve the remaining loads defining the load state occurring at the same
 instant. The outer list is used to specify sensors at different locations.
 The default values for the blade envelopes are used to compute the Mx-My
-envelopes and retrieve the Mz-Fx-Fy-Fz loads occuring at the same moment.
+envelopes and retrieve the Mz-Fx-Fy-Fz loads occurring at the same moment.
+
 
 Debugging
 ---------
 
 Any output (everything that involves print statements) generated during the
-post-processing of the simulations using ```dlctemplate.py``` is captured in
-the ```pbs_out/qsub-wrap_dlctemplate.py.out``` file, while exceptions and errors
-are redirected to the ```pbs_out/qsub-wrap_dlctemplate.py.err``` text file.
+post-processing of the simulations using ```myturbine.py``` is captured in
+the ```pbs_out/qsub-wrap_myturbine.py.out``` file, while exceptions and errors
+are redirected to the ```pbs_out/qsub-wrap_myturbine.py.err``` text file.
 
 The output and errors of HAWC2 simulations can also be found in the ```pbs_out```
 directory. The ```.err``` and ```.out``` files will be named exactly the same
diff --git a/docs/install-anaconda.md b/docs/install-anaconda.md
new file mode 100644
index 0000000000000000000000000000000000000000..4ba6915c47ce027619eb075d9cf8f10f9ac7bc8f
--- /dev/null
+++ b/docs/install-anaconda.md
@@ -0,0 +1,15 @@
+
+# Installation manual
+
+
+## Anaconda or Miniconda
+
+```
+conda update --all
+conda create -n wetb_py3 python=3.5
+source activate wetb_py3
+conda install setuptools_scm future h5py pytables pytest nose sphinx
+conda install scipy pandas matplotlib cython xlrd coverage xlwt openpyxl
+pip install pyscaffold pytest-cov
+```
+
diff --git a/docs/install-manual-detailed.md b/docs/install-manual-detailed.md
index 150e9266a42c4195e7797d3b6a91a194219b8b56..81acbbbf1f66f1df6eca725f17ec283dc09f15e6 100644
--- a/docs/install-manual-detailed.md
+++ b/docs/install-manual-detailed.md
@@ -1,5 +1,5 @@
 
-!! This guide is not finished, and might contain innacuracies. Please report
+> !! This guide is not finished, and might contain innacuracies. Please report
 any mistakes/bugs by creating an
 [issue](https://gitlab.windenergy.dtu.dk/toolbox/WindEnergyToolbox/issues).
 This is a WIP (Work In Progress) !!
@@ -29,7 +29,7 @@ python-pytables.
 
 * Other tools you will need:
 
-> git gcc
+> git gcc gcc-fortran (gfortran)
 
 
 ## Mac
@@ -165,14 +165,15 @@ activate py27
 * Install the necessary Python dependencies using the conda package manager:
 
 ```
-conda install scipy pandas matplotlib cython xlrd pytables sphinx nose setuptools_scm future h5py
+conda install setuptools_scm future h5py pytables pytest nose sphinx
+conda install scipy pandas matplotlib cython xlrd coverage xlwt openpyxl
 ```
 
 * Not all packages are available in the conda repositories, but they can be
 easily installed with pip:
 
 ```
-pip install pyscaffold pytest pytest-cov
+pip install pyscaffold pytest-cov
 ```
 
 
diff --git a/requirements.txt b/requirements.txt
index 4f4ab5838748fa63eb598afca49c3f6cd8148ef6..1c12bab28021d3dd10594cea6cb59de041bd0684 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -5,6 +5,8 @@ scipy>=0.9
 matplotlib
 pytest
 xlrd
+xlwt
+openpyxl
 h5py
 pandas
 tables
diff --git a/wetb/hawc2/cluster_simulation.py b/wetb/hawc2/cluster_simulation.py
index c67377f58a723b324b91f1cafe3665520770dcd1..544cde2fb5761169ba2b58ec2f9d2cf0fd8fbb8c 100644
--- a/wetb/hawc2/cluster_simulation.py
+++ b/wetb/hawc2/cluster_simulation.py
@@ -1,5 +1,5 @@
 from wetb.hawc2.simulation import Simulation, SimulationThread
-import os
+import os  
 import sys
 from threading import  Thread
 class ClusterSimulation(Simulation):
@@ -11,13 +11,14 @@ class ClusterSimulation(Simulation):
         self.non_blocking_simulation_thread = Thread(target=self.simulate)
         self.start(1)
         self.wait()
+        self.is_done = True
         print (self.host.simulationThread.res[1])  # print hawc2 output to stdout
         sys.exit(self.host.simulationThread.res[0])
 
     def update_status(self, *args, **kwargs):
         Simulation.update_status(self, *args, **kwargs)
         with open("/home/mmpe/.hawc2launcher/status_%s" % self.simulation_id, 'w') as fid:
-            fid.write (";".join([self.status] + [str(getattr(self.logFile, v)) for v in ['status', 'pct', 'remaining_time', 'lastline']]) + "\n")
+            fid.write (";".join([self.simulation_id, self.status] + [str(getattr(self.logFile, v)) for v in ['status', 'pct', 'remaining_time', 'lastline']]) + "\n")
 
     def show_status(self):
         pass
diff --git a/wetb/hawc2/htc_contents.py b/wetb/hawc2/htc_contents.py
index ee1f06d3834f2df2d04d7c843d22180d6bb212f2..54c338b47826883c96a64e3db4fa055c6593e2ba 100644
--- a/wetb/hawc2/htc_contents.py
+++ b/wetb/hawc2/htc_contents.py
@@ -171,7 +171,7 @@ class HTCLine(HTCContents):
                                   ("", "\t" + self.str_values())[bool(self.values)],
                                   ("", "\t" + self.comments)[bool(self.comments.strip())])
     def str_values(self):
-        return " ".join([str(v) for v in self.values])
+        return " ".join([str(v).lower() for v in self.values])
 
     def __getitem__(self, key):
         return self.values[key]
@@ -351,7 +351,10 @@ class HTCDefaults(object):
         else:
             mann.add_line('create_turb_parameters', [L, ae23, Gamma, seed, int(high_frq_compensation)], "L, alfaeps, gamma, seed, highfrq compensation")
         if filenames is None:
-            filenames = ["./turb/turb_wsp%d_s%04d%s.bin" % (self.wind.wsp[0], seed, c) for c in ['u', 'v', 'w']]
+            fmt = "l%.1f_ae%.2f_g%.1f_h%d_%dx%dx%d_%.3fx%.2fx%.2f_s%04d%c.turb"
+            import numpy as np
+            dxyz = tuple(np.array(box_dimension) / no_grid_points)
+            filenames = ["./turb/" + fmt % ((L, ae23, Gamma, high_frq_compensation) + no_grid_points + dxyz + (seed, uvw)) for uvw in ['u', 'v', 'w']]
         if isinstance(filenames, str):
             filenames = ["./turb/%s_s%04d%s.bin" % (filenames, seed, c) for c in ['u', 'v', 'w']]
         for filename, c in zip(filenames, ['u', 'v', 'w']):
diff --git a/wetb/hawc2/htc_file.py b/wetb/hawc2/htc_file.py
index 95e3fb7485eca3f670b95e98174977cffb617b31..10eb514ce850b58647367475c58d960550e30bd7 100644
--- a/wetb/hawc2/htc_file.py
+++ b/wetb/hawc2/htc_file.py
@@ -24,6 +24,14 @@ from copy import copy
 
 
 class HTCFile(HTCContents, HTCDefaults):
+    """Wrapper for HTC files
+
+    Examples:
+    ---------
+    >>> htcfile = HTCFile('htc/test.htc')
+    >>> htcfile.wind.wsp = 10
+    >>> htcfile.save()
+    """
 
     filename = None
     htc_inputfiles = []
@@ -95,13 +103,16 @@ class HTCFile(HTCContents, HTCDefaults):
         with open(filename, 'w', encoding='utf-8') as fid:
             fid.write(str(self))
 
-    def set_name(self, name, folder="htc"):
-        self.filename = os.path.join(self.modelpath, folder, "%s.htc" % name).replace("\\", "/")
+    def set_name(self, name, folder="htc/"):
+        #if os.path.isabs(folder) is False and os.path.relpath(folder).startswith("htc" + os.path.sep):
+        folder = "./" + os.path.relpath(folder).replace("\\", "/")
+
+        self.filename = os.path.relpath(os.path.join(self.modelpath, folder, "%s.htc" % name)).replace("\\", "/")
         if 'simulation' in self and 'logfile' in self.simulation:
-            self.simulation.logfile = "./log/%s.log" % name
+            self.simulation.logfile = os.path.join(folder.replace("htc", "log", 1), "%s.log" % name).replace("\\", "/")
         elif 'test_structure' in self and 'logfile' in self.test_structure:
-            self.test_structure.logfile = "./log/%s.log" % name
-        self.output.filename = "./res/%s" % name
+            self.test_structure.logfile = os.path.join(folder.replace("htc", "log", 1), "%s.log" % name).replace("\\", "/")
+        self.output.filename = os.path.join(folder.replace("htc", "res", 1), "%s" % name).replace("\\", "/")
 
     def input_files(self):
         files = self.htc_inputfiles
diff --git a/wetb/hawc2/log_file.py b/wetb/hawc2/log_file.py
index 48a810c8455258c636a9f521835bfbb63ab6c8c8..12dac0ab0be8b0bdda5acd154a733ca999fd9ff1 100644
--- a/wetb/hawc2/log_file.py
+++ b/wetb/hawc2/log_file.py
@@ -181,4 +181,4 @@ class LogInfo(LogFile):
         self.errors = []
 
     def update_status(self):
-        pass
\ No newline at end of file
+        pass
diff --git a/wetb/hawc2/simulation.py b/wetb/hawc2/simulation.py
index f7d8a444032777b06f4cec184426cac11f17bd64..aae4a60932023bf471ce231ad0806dc34101fc11 100755
--- a/wetb/hawc2/simulation.py
+++ b/wetb/hawc2/simulation.py
@@ -13,14 +13,13 @@ import re
 import shutil
 import subprocess
 import sys
-import threading
 import time
 from wetb.hawc2 import log_file
 from wetb.hawc2.htc_file import HTCFile
 from wetb.hawc2.log_file import LogFile, LogInfo
 
 from future import standard_library
-import psutil
+
 
 from wetb.utils.cluster_tools import pbsjob
 from wetb.utils.cluster_tools.cluster_resource import LocalResource
@@ -33,10 +32,13 @@ QUEUED = "queued"  #until start
 PREPARING = "Copy to host"  # during prepare simulation
 INITIALIZING = "Initializing"  #when starting
 SIMULATING = "Simulating"  # when logfile.status=simulating
-FINISH = "Finish"  # when HAWC2 finish
+FINISHING = "Copy from host"  # during prepare simulation
+FINISH = "Simulation finish"  # when HAWC2 finish
 ERROR = "Error"  # when hawc2 returns error
 ABORTED = "Aborted"  # when stopped and logfile.status != Done
 CLEANED = "Cleaned"  # after copy back
+def unix_path(path):
+    return path.replace("\\", "/").lower()
 
 class Simulation(object):
     """Class for doing hawc2 simulations
@@ -72,6 +74,7 @@ class Simulation(object):
     """
 
     is_simulating = False
+    is_done = False
     status = QUEUED
     def __init__(self, modelpath, htcfilename, hawc2exe="HAWC2MB.exe", copy_turbulence=True):
         self.modelpath = os.path.abspath(modelpath) + "/"
@@ -83,8 +86,9 @@ class Simulation(object):
         self.time_stop = self.htcFile.simulation.time_stop[0]
         self.hawc2exe = hawc2exe
         self.copy_turbulence = copy_turbulence
-        self.simulation_id = os.path.relpath(htcfilename, self.modelpath).replace("\\", "_").replace("/", "_") + "_%d" % id(self)
-        self.stdout_filename = "stdout/%s.out" % self.simulation_id
+        self.simulation_id = unix_path(os.path.relpath(htcfilename, self.modelpath) + "_%d" % id(self)).replace("/", "_")
+        self.stdout_filename = os.path.splitext(unix_path(os.path.relpath(htcfilename, self.modelpath)).replace('htc', 'stdout', 1))[0] + ".out"
+        #self.stdout_filename = "stdout/%s.out" % self.simulation_id
         if 'logfile' in self.htcFile.simulation:
             self.log_filename = self.htcFile.simulation.logfile[0]
         else:
@@ -93,7 +97,7 @@ class Simulation(object):
             self.log_filename = os.path.relpath(self.log_filename, self.modelpath)
         else:
             self.log_filename = os.path.relpath(self.log_filename)
-        self.log_filename = self.log_filename.replace("\\", "/")
+        self.log_filename = unix_path(self.log_filename)
 
         self.logFile = LogFile(os.path.join(self.modelpath, self.log_filename), self.time_stop)
         self.logFile.clear()
@@ -107,22 +111,26 @@ class Simulation(object):
     def start(self, update_interval=1):
         """Start non blocking distributed simulation"""
         self.is_simulating = True
-        self.updateStatusThread.start()
+        if update_interval > 0:
+            self.updateStatusThread.interval = update_interval
+            self.updateStatusThread.start()
         self.non_blocking_simulation_thread.start()
 
     def wait(self):
         self.non_blocking_simulation_thread.join()
         self.update_status()
 
-    def abort(self):
-        self.host.stop()
-        for _ in range(100):
-            if self.is_simulating:
-                break
-            time.sleep(0.1)
+    def abort(self, update_status=True):
+        if self.status != QUEUED:
+            self.host.stop()
+            for _ in range(100):
+                if self.is_simulating is False:
+                    break
+                time.sleep(0.1)
         if self.logFile.status not in [log_file.DONE]:
             self.status = ABORTED
-        self.update_status()
+        if update_status:
+            self.update_status()
 
     def show_status(self):
         #print ("log status:", self.logFile.status)
@@ -147,11 +155,34 @@ class Simulation(object):
         self.last_status = self.logFile.status
 
 
+
+
     def prepare_simulation(self):
         self.status = PREPARING
         self.tmp_modelpath = os.path.join(".hawc2launcher/%s/" % self.simulation_id)
         self.set_id(self.simulation_id, str(self.host), self.tmp_modelpath)
-        self.host._prepare_simulation()
+
+        def fmt(src):
+            if os.path.isabs(src):
+                src = os.path.relpath(os.path.abspath(src), self.modelpath)
+            else:
+                src = os.path.relpath (src)
+            assert not src.startswith(".."), "%s referes to a file outside the model path\nAll input files be inside model path" % src
+            return src
+        input_patterns = [fmt(src) for src in self.htcFile.input_files() + self.htcFile.turbulence_files() + self.additional_files().get('input', [])]
+        input_files = set([f for pattern in input_patterns for f in glob.glob(os.path.join(self.modelpath, pattern))])
+        if not os.path.isdir(os.path.dirname(self.modelpath + self.stdout_filename)):
+            os.makedirs(os.path.dirname(self.modelpath + self.stdout_filename))
+        self.host._prepare_simulation(input_files)
+
+
+#        return [fmt(src) for src in self.htcFile.input_files() + self.htcFile.turbulence_files() + self.additional_files().get('input', [])]
+#
+#        for src in self._input_sources():
+#            for src_file in glob.glob(os.path.join(self.modelpath, src)):
+#
+#
+#        self.host._prepare_simulation()
 
     def simulate(self):
         #starts blocking simulation
@@ -181,13 +212,25 @@ class Simulation(object):
 
 
     def finish_simulation(self):
-        lock = threading.Lock()
-        with lock:
-            if self.status == CLEANED: return
-            if self.status != ERROR:
-                self.status = CLEANED
-        self.host._finish_simulation()
-        self.set_id(self.simulation_id)
+        if self.status == ABORTED:
+            return
+        if self.status != ERROR:
+            self.status = FINISHING
+
+        def fmt(dst):
+            if os.path.isabs(dst):
+                dst = os.path.relpath(os.path.abspath(dst), self.modelpath)
+            else:
+                dst = os.path.relpath (dst)
+            dst = unix_path(dst)
+            assert not dst.startswith(".."), "%s referes to a file outside the model path\nAll input files be inside model path" % dst
+            return dst
+        output_patterns = [fmt(dst) for dst in self.htcFile.output_files() + ([], self.htcFile.turbulence_files())[self.copy_turbulence] + [self.stdout_filename]]
+        output_files = set([f for pattern in output_patterns for f in self.host.glob(unix_path(os.path.join(self.tmp_modelpath, pattern)))])
+        self.host._finish_simulation(output_files)
+        self.set_id(self.filename)
+        if self.status != ERROR:
+            self.status = CLEANED
 
 
 
@@ -200,28 +243,6 @@ class Simulation(object):
             if self.logFile.status == log_file.DONE and self.is_simulating is False:
                 self.status = FINISH
 
-    def _input_sources(self):
-        def fmt(src):
-            if os.path.isabs(src):
-                src = os.path.relpath(os.path.abspath(src), self.modelpath)
-            else:
-                src = os.path.relpath (src)
-            assert not src.startswith(".."), "%s referes to a file outside the model path\nAll input files be inside model path" % src
-            return src
-        return [fmt(src) for src in self.htcFile.input_files() + self.htcFile.turbulence_files() + self.additional_files().get('input', [])]
-
-    def _output_sources(self):
-        def fmt(dst):
-            if os.path.isabs(dst):
-                dst = os.path.relpath(os.path.abspath(dst), self.modelpath)
-            else:
-                dst = os.path.relpath (dst)
-            dst = dst.replace("\\", "/")
-            assert not dst.startswith(".."), "%s referes to a file outside the model path\nAll input files be inside model path" % dst
-            return dst
-        return [fmt(dst) for dst in self.htcFile.output_files() + ([], self.htcFile.turbulence_files())[self.copy_turbulence] + [self.stdout_filename]]
-
-
 
     def __str__(self):
         return "Simulation(%s)" % self.filename
@@ -234,7 +255,7 @@ class Simulation(object):
         additional_files = {}
         if os.path.isfile(additional_files_file):
             with open(additional_files_file, encoding='utf-8') as fid:
-                additional_files = json.load(fid)
+                additional_files = json.loads(fid.read().replace("\\", "/"))
         return additional_files
 
     def add_additional_input_file(self, file):
@@ -246,15 +267,33 @@ class Simulation(object):
 
 
     def simulate_distributed(self):
-        self.prepare_simulation()
-        self.simulate()
-        self.finish_simulation()
+        try:
+            self.prepare_simulation()
+            try:
+                self.simulate()
+            except Warning as e:
+                print ("simulation failed", str(self))
+                print ("Trying to finish")
+                raise
+            finally:
+                try:
+                    self.finish_simulation()
+                except:
+                    print ("finish_simulation failed", str(self))
+                    raise
+        except:
+            self.status = ERROR
+            raise
+        finally:
+            self.is_done = True
+
+
 
 
     def fix_errors(self):
         def confirm_add_additional_file(folder, file):
             if os.path.isfile(os.path.join(self.modelpath, folder, file)):
-                filename = os.path.join(folder, file).replace(os.path.sep, "/")
+                filename = unix_path(os.path.join(folder, file))
                 if self.get_confirmation("File missing", "'%s' seems to be missing in the temporary working directory. \n\nDo you want to add it to additional_files.txt" % filename):
                     self.add_additional_input_file(filename)
                     self.show_message("'%s' is now added to additional_files.txt.\n\nPlease restart the simulation" % filename)
@@ -282,6 +321,8 @@ class Simulation(object):
     def show_message(self, msg, title="Information"):
         print (msg)
 
+    def set_id(self):
+        pass
 
 
 class UpdateStatusThread(Thread):
@@ -294,7 +335,8 @@ class UpdateStatusThread(Thread):
         Thread.start(self)
 
     def run(self):
-        while self.simulation.is_simulating:
+        print ("Wrong updatestatus")
+        while self.simulation.is_done is False:
             self.simulation.update_status()
             time.sleep(self.interval)
 
@@ -320,24 +362,28 @@ class SimulationResource(object):
     def __str__(self):
         return self.host
 class LocalSimulationHost(SimulationResource):
-    def __init__(self, simulation):
+    def __init__(self, simulation, resource=None):
         SimulationResource.__init__(self, simulation)
         LocalResource.__init__(self, "hawc2mb")
+        self.resource = resource
         self.simulationThread = SimulationThread(self.sim)
 
-    def _prepare_simulation(self):
+
+    def glob(self, path):
+        return glob.glob(path)
+
+    def _prepare_simulation(self, input_files):
         # must be called through simulation object
         self.tmp_modelpath = os.path.join(self.modelpath, self.tmp_modelpath)
         self.sim.set_id(self.simulation_id, 'Localhost', self.tmp_modelpath)
-        for src in self._input_sources():
-            for src_file in glob.glob(os.path.join(self.modelpath, src)):
-                dst = os.path.join(self.tmp_modelpath, os.path.relpath(src_file, self.modelpath))
-                # exist_ok does not exist in Python27
-                if not os.path.exists(os.path.dirname(dst)):
-                    os.makedirs(os.path.dirname(dst))  #, exist_ok=True)
-                shutil.copy(src_file, dst)
-                if not os.path.isfile(dst) or os.stat(dst).st_size != os.stat(src_file).st_size:
-                    print ("error copy ", dst)
+        for src_file in input_files:
+            dst = os.path.join(self.tmp_modelpath, os.path.relpath(src_file, self.modelpath))
+            # exist_ok does not exist in Python27
+            if not os.path.exists(os.path.dirname(dst)):
+                os.makedirs(os.path.dirname(dst))  #, exist_ok=True)
+            shutil.copy(src_file, dst)
+            if not os.path.isfile(dst) or os.stat(dst).st_size != os.stat(src_file).st_size:
+                print ("error copy ", dst)
 
         if not os.path.exists(os.path.join(self.tmp_modelpath, 'stdout')):
             os.makedirs(os.path.join(self.tmp_modelpath, 'stdout'))  #, exist_ok=True)
@@ -355,17 +401,14 @@ class LocalSimulationHost(SimulationResource):
         self.errors.extend(list(set(self.logFile.errors)))
 
 
-    def _finish_simulation(self):
-        for dst in self._output_sources():
-            src = os.path.join(self.tmp_modelpath, dst)
-
-            for src_file in glob.glob(src):
-                dst_file = os.path.join(self.modelpath, os.path.relpath(src_file, self.tmp_modelpath))
-                # exist_ok does not exist in Python27
-                if not os.path.isdir(os.path.dirname(dst_file)):
-                    os.makedirs(os.path.dirname(dst_file))  #, exist_ok=True)
-                if not os.path.isfile(dst_file) or os.path.getmtime(dst_file) != os.path.getmtime(src_file):
-                    shutil.copy(src_file, dst_file)
+    def _finish_simulation(self, output_files):
+        for src_file in output_files:
+            dst_file = os.path.join(self.modelpath, os.path.relpath(src_file, self.tmp_modelpath))
+            # exist_ok does not exist in Python27
+            if not os.path.isdir(os.path.dirname(dst_file)):
+                os.makedirs(os.path.dirname(dst_file))  #, exist_ok=True)
+            if not os.path.isfile(dst_file) or os.path.getmtime(dst_file) != os.path.getmtime(src_file):
+                shutil.copy(src_file, dst_file)
 
         self.logFile.filename = os.path.join(self.modelpath, self.log_filename)
 
@@ -378,8 +421,9 @@ class LocalSimulationHost(SimulationResource):
         self.logFile.update_status()
 
     def stop(self):
-        self.simulationThread.stop()
-        self.simulationThread.join()
+        if self.simulationThread.is_alive():
+            self.simulationThread.stop()
+            self.simulationThread.join()
 
 
 
@@ -394,11 +438,13 @@ class SimulationThread(Thread):
 
 
     def start(self):
-        CREATE_NO_WINDOW = 0x08000000
+        #CREATE_NO_WINDOW = 0x08000000
         modelpath = self.modelpath
         htcfile = os.path.relpath(self.sim.htcFile.filename, self.sim.modelpath)
         hawc2exe = self.sim.hawc2exe
         stdout = self.sim.stdout_filename
+        if not os.path.isdir(os.path.dirname(self.modelpath + self.sim.stdout_filename)):
+            os.makedirs(os.path.dirname(self.modelpath + self.sim.stdout_filename))
         if os.name == "nt":
             self.process = subprocess.Popen('"%s" %s 1> %s 2>&1' % (hawc2exe, htcfile, stdout), stdout=None, stderr=None, shell=True, cwd=modelpath)  #, creationflags=CREATE_NO_WINDOW)
         else:
@@ -407,38 +453,39 @@ class SimulationThread(Thread):
 
 
     def run(self):
+        import psutil
         p = psutil.Process(os.getpid())
         if self.low_priority:
             p.set_nice(psutil.BELOW_NORMAL_PRIORITY_CLASS)
         self.process.communicate()
         errorcode = self.process.returncode
-        if not os.path.isdir(os.path.dirname(self.modelpath + self.sim.stdout_filename)):
-            os.makedirs(os.path.dirname(self.modelpath + self.sim.stdout_filename))
+
         with open(self.modelpath + self.sim.stdout_filename, encoding='utf-8') as fid:
             stdout = fid.read()
         self.res = errorcode, stdout
 
     def stop(self):
-        subprocess.Popen("TASKKILL /F /PID {pid} /T".format(pid=self.process.pid))
+        if hasattr(self, 'process'):
+            subprocess.Popen("TASKKILL /F /PID {pid} /T".format(pid=self.process.pid))
 
 
 class PBSClusterSimulationHost(SimulationResource, SSHClient):
-    def __init__(self, simulation, host, username, password, port=22):
+    def __init__(self, simulation, resource, host, username, password, port=22):
         SimulationResource.__init__(self, simulation)
         SSHClient.__init__(self, host, username, password, port=port)
         self.pbsjob = SSHPBSJob(host, username, password, port)
+        self.resource = resource
 
     hawc2exe = property(lambda self : os.path.basename(self.sim.hawc2exe))
 
 
-    def _prepare_simulation(self):
+    def _prepare_simulation(self, input_files):
         with self:
             self.execute(["mkdir -p .hawc2launcher/%s" % self.simulation_id], verbose=False)
             self.execute("mkdir -p %s%s" % (self.tmp_modelpath, os.path.dirname(self.log_filename)))
 
-            for src in self._input_sources():
-                for src_file in glob.glob(os.path.join(self.modelpath, src)):
-                    dst = (self.tmp_modelpath + os.path.relpath(src_file, self.modelpath)).replace("\\", "/")
+            for src_file in input_files:
+                    dst = unix_path(self.tmp_modelpath + os.path.relpath(src_file, self.modelpath))
                     self.execute("mkdir -p %s" % os.path.dirname(dst), verbose=False)
                     self.upload(src_file, dst, verbose=False)
                     ##assert self.ssh.file_exists(dst)
@@ -446,23 +493,27 @@ class PBSClusterSimulationHost(SimulationResource, SSHClient):
             f = io.StringIO(self.pbsjobfile())
             f.seek(0)
             self.upload(f, self.tmp_modelpath + "%s.in" % self.simulation_id)
-            self.execute("mkdir -p .hawc2launcher/%s/stdout" % self.simulation_id)
+            self.execute("mkdir -p .hawc2launcher/%s/%s" % (self.simulation_id, os.path.dirname(self.stdout_filename)))
             remote_log_filename = "%s%s" % (self.tmp_modelpath, self.log_filename)
             self.execute("rm -f %s" % remote_log_filename)
 
 
 
-    def _finish_simulation(self):
+    def _finish_simulation(self, output_files):
         with self:
-            for dst in self._output_sources():
-
-                src = os.path.join(self.tmp_modelpath, dst).replace("\\", "/")
-                for src_file in self.glob(src):
+            for src_file in output_files:
+                try:
                     dst_file = os.path.join(self.modelpath, os.path.relpath(src_file, self.tmp_modelpath))
                     os.makedirs(os.path.dirname(dst_file), exist_ok=True)
                     self.download(src_file, dst_file, verbose=False)
-            self.execute('rm -r .hawc2launcher/%s' % self.simulation_id)
-            self.execute('rm .hawc2launcher/status_%s' % self.simulation_id)
+                except ValueError as e:
+                    print (self.modelpath, src_file, self.tmp_modelpath)
+                    raise e
+            try:
+                self.execute('rm -r .hawc2launcher/%s' % self.simulation_id)
+                self.execute('rm .hawc2launcher/status_%s' % self.simulation_id)
+            except:
+                pass
 
 
     def _simulate(self):
@@ -475,10 +526,10 @@ class PBSClusterSimulationHost(SimulationResource, SSHClient):
             #self.__update_logFile_status()
             time.sleep(sleeptime)
 
-        local_out_file = self.modelpath + self.sim.stdout_filename
+        local_out_file = self.modelpath + self.stdout_filename
         with self:
             try:
-                self.download(self.tmp_modelpath + self.sim.stdout_filename, local_out_file)
+                self.download(self.tmp_modelpath + self.stdout_filename, local_out_file)
                 with open(local_out_file) as fid:
                     _, self.stdout, returncode_str, _ = fid.read().split("---------------------")
                     self.returncode = returncode_str.strip() != "0"
@@ -488,7 +539,7 @@ class PBSClusterSimulationHost(SimulationResource, SSHClient):
             try:
                 self.download(self.tmp_modelpath + self.log_filename, self.modelpath + self.log_filename)
             except Exception:
-                raise Exception ("Logfile not found")
+                raise Warning ("Logfile not found", self.tmp_modelpath + self.log_filename)
         self.sim.logFile = LogFile.from_htcfile(self.htcFile, self.modelpath)
 
 
@@ -536,10 +587,10 @@ class PBSClusterSimulationHost(SimulationResource, SSHClient):
 
     def pbsjobfile(self):
         cp_back = ""
-        for folder in set([os.path.relpath(os.path.dirname(f)) for f in self.htcFile.output_files() + self.htcFile.turbulence_files()]):
+        for folder in set([unix_path(os.path.relpath(os.path.dirname(f))) for f in self.htcFile.output_files() + self.htcFile.turbulence_files()]):
             cp_back += "mkdir -p $PBS_O_WORKDIR/%s/. \n" % folder
             cp_back += "cp -R -f %s/. $PBS_O_WORKDIR/%s/.\n" % (folder, folder)
-        rel_htcfilename = os.path.relpath(self.htcFile.filename, self.modelpath).replace("\\", "/")
+        rel_htcfilename = unix_path(os.path.relpath(self.htcFile.filename, self.modelpath))
         return """
 ### Standard Output
 #PBS -N h2l_%s
diff --git a/wetb/hawc2/tests/test_htc_file.py b/wetb/hawc2/tests/test_htc_file.py
index ba878df683153fe88a28c756065865d75faae290..f0ecc3d0611190d02287d5cd8cf53f6dfa18aee6 100644
--- a/wetb/hawc2/tests/test_htc_file.py
+++ b/wetb/hawc2/tests/test_htc_file.py
@@ -118,9 +118,9 @@ class TestHtcFile(unittest.TestCase):
         htcfile.add_mann_turbulence(30.1, 1.1, 3.3, 102, False)
         s = """begin mann;
     create_turb_parameters\t30.1 1.1 3.3 102 0;\tL, alfaeps, gamma, seed, highfrq compensation
-    filename_u\t./turb/turb_wsp10_s0102u.bin;
-    filename_v\t./turb/turb_wsp10_s0102v.bin;
-    filename_w\t./turb/turb_wsp10_s0102w.bin;
+    filename_u\t./turb/l30.1_ae1.10_g3.3_h0_4096x32x32_1.465x3.12x3.12_s0102u.turb;
+    filename_v\t./turb/l30.1_ae1.10_g3.3_h0_4096x32x32_1.465x3.12x3.12_s0102v.turb;
+    filename_w\t./turb/l30.1_ae1.10_g3.3_h0_4096x32x32_1.465x3.12x3.12_s0102w.turb;
     box_dim_u\t4096 1.4652;
     box_dim_v\t32 3.2258;
     box_dim_w\t32 3.2258;
@@ -212,7 +212,7 @@ class TestHtcFile(unittest.TestCase):
         self.assertIn(os.path.abspath(self.testfilepath + 'orientation.dat'), [os.path.abspath(f) for f in htcfile.input_files()])
         self.assertIn('./data/NREL_5MW_st1.txt', htcfile.input_files())
         self.assertEqual(str(htcfile).count("exit"), 1)
-        self.assertIn('filename\t./res/oc4_p2_Load_case_eq;', str(htcfile))
+        self.assertIn('filename\t./res/oc4_p2_load_case_eq;', str(htcfile))
 
     def test_tjul_example(self):
         htcfile = HTCFile(self.testfilepath + "./tjul.htc", ".")
diff --git a/wetb/prepost/GenerateDLCs.py b/wetb/prepost/GenerateDLCs.py
index 3bcf9bd2d5db7d36c437f551bc6f0994da094196..4563ccbe28deb9db7f929de67cc4cecdaf270b5b 100644
--- a/wetb/prepost/GenerateDLCs.py
+++ b/wetb/prepost/GenerateDLCs.py
@@ -14,6 +14,8 @@ from __future__ import absolute_import
 from numpy import floor, arctan, pi
 import pandas as pd
 import xlrd
+from argparse import ArgumentParser
+import os
 
 
 def multi_for(iterables):
@@ -230,7 +232,9 @@ class GenerateDLCCases(GeneralDLC):
             self.add_formulas(dlc, general_functions)
             self.eval_formulas(dlc)
             df = pd.DataFrame(dlc)
-            df.to_excel(folder+sheet.name+'.xls', index=False)
+            if not os.path.exists(folder):
+                os.makedirs(folder)
+            df.to_excel(os.path.join(folder, sheet.name+'.xlsx'), index=False)
 
 
 class RunTest():
@@ -257,6 +261,12 @@ class RunTest():
             assert_frame_equal(book1, book2, check_dtype=False)
 
 if __name__ == '__main__':
+
+    parser = ArgumentParser(description = "generator of DLB spreadsheets")
+    parser.add_argument('--master', type=str, default='DLCs.xlsx', action='store',
+                        dest='filename', help='Master spreadsheet file')
+    parser.add_argument('--folder', type=str, default='', action='store',
+                        dest='folder', help='Destination folder name')
+    opt = parser.parse_args()
     DLB = GenerateDLCCases()
-    DLB.execute()
-    pass
+    DLB.execute(filename=opt.filename, folder=opt.folder)
\ No newline at end of file
diff --git a/wetb/prepost/Simulations.py b/wetb/prepost/Simulations.py
index 89d526576e273952f8179718a5513f973501c8f0..6e49517283da654ffcaa07a163b12f7f59f751fd 100755
--- a/wetb/prepost/Simulations.py
+++ b/wetb/prepost/Simulations.py
@@ -1043,10 +1043,11 @@ def launch(cases, runmethod='local', verbose=False, copyback_turb=True,
     elif runmethod == 'none':
         pass
     else:
-        msg = 'unsupported runmethod, valid options: local, thyra, gorm or opt'
+        msg = 'unsupported runmethod, valid options: local, local-script, ' \
+              'linux-script, windows-script, local-ram, none'
         raise ValueError(msg)
 
-def post_launch(cases, save_iter=False):
+def post_launch(cases, save_iter=False, silent=False):
     """
     Do some basics checks: do all launched cases have a result and LOG file
     and are there any errors in the LOG files?
@@ -1101,8 +1102,9 @@ def post_launch(cases, save_iter=False):
     nr_tot = len(cases)
 
     tmp = list(cases.keys())[0]
-    print('checking logs, path (from a random item in cases):')
-    print(os.path.join(run_dir, log_dir))
+    if not silent:
+        print('checking logs, path (from a random item in cases):')
+        print(os.path.join(run_dir, log_dir))
 
     for k in sorted(cases.keys()):
         # a case could not have a result, but a log file might still exist
@@ -1117,12 +1119,15 @@ def post_launch(cases, save_iter=False):
         errorlogs.PathToLogs = os.path.join(run_dir, log_dir, kk)
         try:
             errorlogs.check(save_iter=save_iter)
-            print('checking logfile progress: ' + str(nr) + '/' + str(nr_tot))
+            if not silent:
+                print('checking logfile progress: % 6i/% 6i' % (nr, nr_tot))
         except IOError:
-            print('           no logfile for:  %s' % (errorlogs.PathToLogs))
+            if not silent:
+                print('           no logfile for:  %s' % (errorlogs.PathToLogs))
         except Exception as e:
-            print('  log analysis failed for: %s' % kk)
-            print(e)
+            if not silent:
+                print('  log analysis failed for: %s' % kk)
+                print(e)
         nr += 1
 
         # if simulation did not ended correctly, put it on the fail list
@@ -1161,6 +1166,29 @@ def post_launch(cases, save_iter=False):
 
     return cases_fail
 
+
+def copy_pbs_in_failedcases(cases_fail, pbs_fail='pbs_in_fail', silent=True):
+    """
+    Copy all the pbs_in files from failed cases to a new directory so it
+    is easy to re-launch them
+    """
+    if not silent:
+        print('Following failed cases pbs_in files are copied:')
+    for cname in cases_fail.keys():
+        case = cases_fail[cname]
+        pbs_in_fname = '%s.p' % (case['[case_id]'])
+        run_dir = case['[run_dir]']
+
+        src = os.path.join(run_dir, case['[pbs_in_dir]'], pbs_in_fname)
+
+        pbs_in_dir_fail = case['[pbs_in_dir]'].replace('pbs_in', pbs_fail)
+        dst = os.path.join(run_dir, pbs_in_dir_fail, pbs_in_fname)
+
+        if not silent:
+            print(dst)
+        shutil.copy2(src, dst)
+
+
 def logcheck_case(errorlogs, cases, case, silent=False):
     """
     Check logfile of a single case
@@ -1721,9 +1749,9 @@ class HtcMaster(object):
         # load the file:
         if not self.silent:
             print('loading master: ' + fpath)
-        FILE = open(fpath, 'r')
-        lines = FILE.readlines()
-        FILE.close()
+
+        with open(fpath, 'r') as f:
+            lines = f.readlines()
 
         # regex for finding all tags in a line
         regex = re.compile('(\\[.*?\\])')
@@ -1919,7 +1947,6 @@ class PBS(object):
         # in case you want to redirect stdout to /dev/nul
 #        self.wine_appendix = '> /dev/null 2>&1'
         self.wine_appendix = ''
-        self.wine_dir = '/home/dave/.wine32/drive_c/bin'
         # /dev/shm should be the RAM of the cluster
 #        self.node_run_root = '/dev/shm'
         self.node_run_root = '/scratch'
@@ -2280,11 +2307,6 @@ class PBS(object):
         self.pbs += 'echo ""\n'
         self.pbs += 'echo "Execute commands on scratch nodes"\n'
         self.pbs += 'cd %s/$USER/$PBS_JOBID\n' % self.node_run_root
-#        # also copy all the HAWC2 exe's to the scratch dir
-#        self.pbs += "cp -R %s/* ./\n" % self.wine_dir
-#        # custom name hawc2 exe
-#        self.h2_new = tag_dict['[hawc2_exe]'] + '-' + jobid + '.exe'
-#        self.pbs += "mv %s.exe %s\n" % (tag_dict['[hawc2_exe]'], self.h2_new)
 
     def ending(self, pbs_path):
         """
@@ -2669,7 +2691,7 @@ class ErrorLogs(object):
             if self.cases is not None:
                 case = self.cases[fname.replace('.log', '.htc')]
                 dt = float(case['[dt_sim]'])
-                time_steps = float(case['[time_stop]']) / dt
+                time_steps = int(float(case['[time_stop]']) / dt)
                 iterations = np.ndarray( (time_steps+1,3), dtype=np.float32 )
             else:
                 iterations = np.ndarray( (len(lines),3), dtype=np.float32 )
@@ -3635,7 +3657,7 @@ class Cases(object):
         launch(self.cases, runmethod=runmethod, verbose=verbose, silent=silent,
                check_log=check_log, copyback_turb=copyback_turb)
 
-    def post_launch(self, save_iter=False):
+    def post_launch(self, save_iter=False, copy_pbs_failed=True):
         """
         Post Launching Maintenance
 
@@ -3645,6 +3667,10 @@ class Cases(object):
         # TODO: integrate global post_launch in here
         self.cases_fail = post_launch(self.cases, save_iter=save_iter)
 
+        if copy_pbs_failed:
+            copy_pbs_in_failedcases(self.cases_fail, pbs_in_fail='pbs_in_fail',
+                                    silent=self.silent)
+
         if self.rem_failed:
             self.remove_failed()
 
@@ -4662,7 +4688,7 @@ class Cases(object):
 
             # we assume the run_dir (root) is the same every where
             run_dir = self.cases[case]['[run_dir]']
-            fname = os.path.join(run_dir, 'htc', 'DLCs', 'dlc_config.xlsx')
+            fname = os.path.join(run_dir, 'dlc_config.xlsx')
             dlc_cfg = dlc.DLCHighLevel(fname, shape_k=wb.shape_k)
             # if you need all DLCs, make sure to have %s in the file name
             dlc_cfg.res_folder = os.path.join(run_dir, res_dir, dlc_folder)
@@ -4802,7 +4828,7 @@ class Cases(object):
 
             # we assume the run_dir (root) is the same every where
             run_dir = self.cases[list(self.cases.keys())[0]]['[run_dir]']
-            fname = os.path.join(run_dir, 'htc', 'DLCs', 'dlc_config.xlsx')
+            fname = os.path.join(run_dir, 'dlc_config.xlsx')
             dlc_cfg = dlc.DLCHighLevel(fname, shape_k=wb.shape_k)
             # if you need all DLCs, make sure to have %s in the file name
             dlc_cfg.res_folder = os.path.join(run_dir, res_dir, dlc_folder)
@@ -5127,7 +5153,7 @@ class Cases(object):
                                                        closed_contour[:,ich],Nx)
                     es = np.atleast_2d(np.array(extra_sensor[:,1])).T                                        
                     closed_contour_int = np.append(closed_contour_int,es,axis=1)
-                
+
             if int_env:
                 envelope[ch[0]] = closed_contour_int
             else:
@@ -5135,6 +5161,39 @@ class Cases(object):
                 
         return envelope
     
+    def int_envelope(ch1,ch2,Nx):
+        # Function to interpolate envelopes and output arrays of same length
+
+        # Number of points is defined by Nx + 1, where the + 1 is needed to
+        # close the curve
+
+        upper = []
+        lower = []
+
+        indmax = np.argmax(ch1)
+        indmin = np.argmin(ch1)
+        if indmax > indmin:
+            lower = np.array([ch1[indmin:indmax+1],ch2[indmin:indmax+1]]).T
+            upper = np.concatenate((np.array([ch1[indmax:],ch2[indmax:]]).T,\
+                            np.array([ch1[:indmin+1],ch2[:indmin+1]]).T),axis=0)
+        else:
+            upper = np.array([ch1[indmax:indmin+1,:],ch2[indmax:indmin+1,:]]).T
+            lower = np.concatenate((np.array([ch1[indmin:],ch2[indmin:]]).T,\
+                                np.array([ch1[:indmax+1],ch2[:indmax+1]]).T),axis=0)
+
+
+        int_1 = np.linspace(min(min(upper[:,0]),min(lower[:,0])),\
+                            max(max(upper[:,0]),max(upper[:,0])),Nx/2+1)
+        upper = np.flipud(upper)
+        int_2_up = np.interp(int_1,np.array(upper[:,0]),np.array(upper[:,1]))
+        int_2_low = np.interp(int_1,np.array(lower[:,0]),np.array(lower[:,1]))
+
+        int_env = np.concatenate((np.array([int_1[:-1],int_2_up[:-1]]).T,\
+                                np.array([int_1[::-1],int_2_low[::-1]]).T),axis=0)
+
+        return int_env
+
+>>>>>>> 243fec873030daa39e892a424570111fbeaa9963
     def envelope(self, silent=False, ch_list=[], append=''):
         """
         Calculate envelopes and save them in a table.
@@ -5304,37 +5363,68 @@ class Results(object):
         return M_x_equiv
 
 
-class ManTurb64(object):
+class MannTurb64(prepost.PBSScript):
     """
     alfaeps, L, gamma, seed, nr_u, nr_v, nr_w, du, dv, dw high_freq_comp
     mann_turb_x64.exe fname 1.0 29.4 3.0 1209 256 32 32 2.0 5 5 true
     """
 
-    def __init__(self):
-        self.man64_exe = 'mann_turb_x64.exe'
-        self.wine = 'WINEARCH=win64 WINEPREFIX=~/.wine64 wine'
-
-    def run():
-        pass
+    def __init__(self, silent=False):
+        super(MannTurb64, self).__init__()
+        self.exe = 'time wine mann_turb_x64.exe'
+        # PBS configuration
+        self.umask = '003'
+        self.walltime = '00:59:59'
+        self.queue = 'workq'
+        self.lnodes = '1'
+        self.ppn = '1'
+        self.silent = silent
+        self.pbs_in_dir = 'pbs_in_turb/'
 
-    def gen_pbs(cases):
+    def gen_pbs(self, cases):
 
         case0 = cases[list(cases.keys())[0]]
-        pbs = prepost.PBSScript()
-        # make sure the path's end with a trailing separator
-        pbs.pbsworkdir = os.path.join(case0['[run_dir]'], '')
-        pbs.path_pbs_e = os.path.join(case0['[pbs_out_dir]'], '')
-        pbs.path_pbs_o = os.path.join(case0['[pbs_out_dir]'], '')
-        pbs.path_pbs_i = os.path.join(case0['[pbs_in_dir]'], '')
-        pbs.check_dirs()
+        # make sure the path's end with a trailing separator, why??
+        self.pbsworkdir = os.path.join(case0['[run_dir]'], '')
+        if not self.silent:
+            print('\nStart creating PBS files for turbulence with Mann64...')
         for cname, case in cases.items():
-            base = case['[case_id]']
-            pbs.path_pbs_e = os.path.join(case['[pbs_out_dir]'], base + '.err')
-            pbs.path_pbs_o = os.path.join(case['[pbs_out_dir]'], base + '.out')
-            pbs.path_pbs_i = os.path.join(case['[pbs_in_dir]'], base + '.pbs')
 
-            pbs.execute()
-            pbs.create()
+            # only relevant for cases with turbulence
+            if '[tu_model]' in case and int(case['[tu_model]']) == 0:
+                continue
+            if '[Turb base name]' not in case:
+                continue
+
+            base_name = case['[Turb base name]']
+            # pbs_in/out dir can contain subdirs, only take the inner directory
+            out_base = misc.path_split_dirs(case['[pbs_out_dir]'])[0]
+            turb = case['[turb_dir]']
+
+            self.path_pbs_e = os.path.join(out_base, turb, base_name + '.err')
+            self.path_pbs_o = os.path.join(out_base, turb, base_name + '.out')
+            self.path_pbs_i = os.path.join(self.pbs_in_dir, base_name + '.p')
+
+            if case['[turb_db_dir]'] is not None:
+                self.prelude = 'cd %s' % case['[turb_db_dir]']
+            else:
+                self.prelude = 'cd %s' % case['[turb_dir]']
+
+            # alfaeps, L, gamma, seed, nr_u, nr_v, nr_w, du, dv, dw high_freq_comp
+            rpl = (float(case['[AlfaEpsilon]']),
+                   float(case['[L_mann]']),
+                   float(case['[Gamma]']),
+                   int(case['[tu_seed]']),
+                   int(case['[turb_nr_u]']),
+                   int(case['[turb_nr_v]']),
+                   int(case['[turb_nr_w]']),
+                   float(case['[turb_dx]']),
+                   float(case['[turb_dy]']),
+                   float(case['[turb_dz]']),
+                   int(case['[high_freq_comp]']))
+            params = '%1.6f %1.6f %1.6f %i %i %i %i %1.4f %1.4f %1.4f %i' % rpl
+            self.execution = '%s %s %s' % (self.exe, base_name, params)
+            self.create(check_dirs=True)
 
 
 def eigenbody(cases, debug=False):
diff --git a/wetb/prepost/dlcdefs.py b/wetb/prepost/dlcdefs.py
index 747e775320bb335f4f80eaed9063400ff4c4ca81..e7ef0a6a43b75347d75473d136007016c6f3032c 100644
--- a/wetb/prepost/dlcdefs.py
+++ b/wetb/prepost/dlcdefs.py
@@ -46,7 +46,7 @@ def configure_dirs(verbose=False):
         raise ValueError('Could not find master file in htc/_master')
     MASTERFILE = master
     P_MASTERFILE = os.path.join(P_SOURCE, 'htc%s_master%s' % (os.sep, os.sep))
-    POST_DIR = os.path.join(p_run_root, PROJECT, 'prepost-data%s' % os.sep)
+    POST_DIR = os.path.join(p_run_root, PROJECT, sim_id, 'prepost-data%s' % os.sep)
 
     if verbose:
         print('='*79)
@@ -208,6 +208,7 @@ def tags_defaults(master):
     master.tags['[log_dir]']       = 'logfiles/'
     master.tags['[meander_dir]']   = False
     master.tags['[opt_dir]']       = False
+    master.tags['[pbs_in_dir]']    = 'pbs_in/'
     master.tags['[pbs_out_dir]']   = 'pbs_out/'
     master.tags['[res_dir]']       = 'res/'
     master.tags['[iter_dir]']      = 'iter/'
diff --git a/wetb/prepost/dlcplots.py b/wetb/prepost/dlcplots.py
index 1d5b3e12fd6703793139c665d6b62281b9cb694e..8594b90e0ab00a14f5d7ffe280178f0c00e4970c 100644
--- a/wetb/prepost/dlcplots.py
+++ b/wetb/prepost/dlcplots.py
@@ -12,8 +12,6 @@ from builtins import str
 from future import standard_library
 standard_library.install_aliases()
 
-
-
 #print(*objects, sep=' ', end='\n', file=sys.stdout)
 
 import os
@@ -47,7 +45,7 @@ plt.rc('xtick', labelsize=10)
 plt.rc('ytick', labelsize=10)
 plt.rc('axes', labelsize=12)
 # do not use tex on Gorm
-if not socket.gethostname()[:2] == 'g-':
+if not socket.gethostname()[:2] in ['g-', 'je']:
     plt.rc('text', usetex=True)
 plt.rc('legend', fontsize=11)
 plt.rc('legend', numpoints=1)
diff --git a/wetb/prepost/dlctemplate.py b/wetb/prepost/dlctemplate.py
index b6240f46840503754662d6b80b23c03bee436105..ed8f40472c932572ba0837ac9ff7e4b5d6ff2db4 100755
--- a/wetb/prepost/dlctemplate.py
+++ b/wetb/prepost/dlctemplate.py
@@ -14,8 +14,6 @@ from builtins import range
 from future import standard_library
 standard_library.install_aliases()
 
-
-
 import os
 import socket
 from argparse import ArgumentParser
@@ -34,7 +32,11 @@ plt.rc('xtick', labelsize=10)
 plt.rc('ytick', labelsize=10)
 plt.rc('axes', labelsize=12)
 # on Gorm tex printing doesn't work
-if not socket.gethostname()[:2] == 'g-':
+if socket.gethostname()[:2] == 'g-':
+    RUNMETHOD = 'gorm'
+elif socket.gethostname()[:4] == 'jess':
+    RUNMETHOD = 'jess'
+else:
     plt.rc('text', usetex=True)
 plt.rc('legend', fontsize=11)
 plt.rc('legend', numpoints=1)
@@ -111,6 +113,22 @@ def master_tags(sim_id, runmethod='local', silent=False, verbose=False):
     master.tags['[model_zip]'] = PROJECT
     master.tags['[model_zip]'] += '_' + master.tags['[sim_id]'] + '.zip'
     # -------------------------------------------------------------------------
+    # FIXME: this is very ugly. We should read default values set in the htc
+    # master file with the HAWC2Wrapper !!
+    # default tags turbulence generator (required for 64-bit Mann generator)
+    # alfaeps, L, gamma, seed, nr_u, nr_v, nr_w, du, dv, dw high_freq_comp
+    master.tags['[AlfaEpsilon]'] = 1.0
+    master.tags['[L_mann]'] = 29.4
+    master.tags['[Gamma]'] = 3.0
+    master.tags['[tu_seed]'] = 0
+    master.tags['[turb_nr_u]'] = 8192
+    master.tags['[turb_nr_v]'] = 32
+    master.tags['[turb_nr_w]'] = 32
+    master.tags['[turb_dx]'] = 1
+    master.tags['[turb_dy]'] = 6.5
+    master.tags['[turb_dz]'] = 6.5
+    master.tags['[high_freq_comp]'] = 1
+    # -------------------------------------------------------------------------
 
     return master
 
@@ -164,7 +182,8 @@ def variable_tag_func(master, case_id_short=False):
 ### PRE- POST
 # =============================================================================
 
-def launch_dlcs_excel(sim_id, silent=False):
+def launch_dlcs_excel(sim_id, silent=False, verbose=False, pbs_turb=True,
+                      runmethod=None, write_htc=True):
     """
     Launch load cases defined in Excel files
     """
@@ -197,12 +216,11 @@ def launch_dlcs_excel(sim_id, silent=False):
     for opt in opt_tags:
         opt['[zip_root_files]'] = f_ziproot
 
-    runmethod = 'gorm'
-#    runmethod = 'local-script'
-#    runmethod = 'windows-script'
-#    runmethod = 'jess'
+    if runmethod == None:
+        runmethod = RUNMETHOD
+
     master = master_tags(sim_id, runmethod=runmethod, silent=silent,
-                         verbose=False)
+                         verbose=verbose)
     master.tags['[sim_id]'] = sim_id
     master.output_dirs.append('[Case folder]')
     master.output_dirs.append('[Case id.]')
@@ -217,12 +235,18 @@ def launch_dlcs_excel(sim_id, silent=False):
     # variable_tag func is not required because everything is already done
     # in dlcdefs.excel_stabcon
     no_variable_tag_func = None
-    sim.prepare_launch(iter_dict, opt_tags, master, no_variable_tag_func,
-                       write_htc=True, runmethod=runmethod, verbose=False,
-                       copyback_turb=True, msg='', update_cases=False,
-                       ignore_non_unique=False, run_only_new=False,
-                       pbs_fname_appendix=False, short_job_names=False,
-                       silent=silent)
+    cases = sim.prepare_launch(iter_dict, opt_tags, master, no_variable_tag_func,
+                               write_htc=write_htc, runmethod=runmethod,
+                               copyback_turb=True, update_cases=False, msg='',
+                               ignore_non_unique=False, run_only_new=False,
+                               pbs_fname_appendix=False, short_job_names=False,
+                               silent=silent, verbose=verbose)
+
+    if pbs_turb:
+        # to avoid confusing HAWC2 simulations and Mann64 generator PBS files,
+        # MannTurb64 places PBS launch scripts in a "pbs_in_turb" folder
+        mann64 = sim.MannTurb64(silent=silent)
+        mann64.gen_pbs(cases)
 
 
 def launch_param(sim_id):
@@ -267,11 +291,14 @@ def launch_param(sim_id):
 
 def post_launch(sim_id, statistics=True, rem_failed=True, check_logs=True,
                 force_dir=False, update=False, saveinterval=2000, csv=False,
-                m=[1, 3, 4, 5, 6, 8, 10, 12, 14], neq=1e6, no_bins=46,
+                m=[1, 3, 4, 5, 6, 8, 10, 12, 14], neq=None, no_bins=46,
                 years=20.0, fatigue=True, nn_twb=1, nn_twt=20, nn_blr=4, A=None,
                 save_new_sigs=False, envelopeturbine=False, envelopeblade=False,
                 save_iter=False, AEP=False):
 
+    if neq < 0:
+        neq = None
+
     # =========================================================================
     # check logfiles, results files, pbs output files
     # logfile analysis is written to a csv file in logfiles directory
@@ -395,7 +422,8 @@ if __name__ == '__main__':
     parser.add_argument('--check_logs', action='store_true', default=False,
                         dest='check_logs', help='check the log files')
     parser.add_argument('--stats', action='store_true', default=False,
-                        dest='stats', help='calculate statistics')
+                        dest='stats', help='calculate statistics and 1Hz '
+                                           'equivalent loads')
     parser.add_argument('--fatigue', action='store_true', default=False,
                         dest='fatigue', help='calculate Leq for a full DLC')
     parser.add_argument('--AEP', action='store_true', default=False,
@@ -407,8 +435,10 @@ if __name__ == '__main__':
                         dest='years', help='Total life time in years')
     parser.add_argument('--no_bins', type=float, default=46.0, action='store',
                         dest='no_bins', help='Number of bins for fatigue loads')
-    parser.add_argument('--neq', type=float, default=1e6, action='store',
-                        dest='neq', help='Equivalent cycles neq')
+    parser.add_argument('--neq', type=float, default=-1.0, action='store',
+                        dest='neq', help='Equivalent cycles neq, default 1 Hz '
+                                         'equivalent load (neq = simulation '
+                                         'duration in seconds)')
     parser.add_argument('--nn_twt', type=float, default=20, action='store',
                         dest='nn_twt', help='Node number tower top')
     parser.add_argument('--nn_blr', type=float, default=4, action='store',
@@ -425,12 +455,6 @@ if __name__ == '__main__':
                         dest='envelopeturbine', help='Compute envelopeturbine')
     opt = parser.parse_args()
 
-    # auto configure directories: assume you are running in the root of the
-    # relevant HAWC2 model
-    # and assume we are in a simulation case of a certain turbine/project
-    P_RUN, P_SOURCE, PROJECT, sim_id, P_MASTERFILE, MASTERFILE, POST_DIR \
-        = dlcdefs.configure_dirs(verbose=True)
-
     # TODO: use arguments to determine the scenario:
     # --plots, --report, --...
 
@@ -457,10 +481,16 @@ if __name__ == '__main__':
 #                saveinterval=2000, csv=True, fatigue_cycles=True, fatigue=False)
     # -------------------------------------------------------------------------
 
+    # auto configure directories: assume you are running in the root of the
+    # relevant HAWC2 model
+    # and assume we are in a simulation case of a certain turbine/project
+    P_RUN, P_SOURCE, PROJECT, sim_id, P_MASTERFILE, MASTERFILE, POST_DIR \
+        = dlcdefs.configure_dirs(verbose=True)
+
     # create HTC files and PBS launch scripts (*.p)
     if opt.prep:
         print('Start creating all the htc files and pbs_in files...')
-        launch_dlcs_excel(sim_id)
+        launch_dlcs_excel(sim_id, silent=False)
     # post processing: check log files, calculate statistics
     if opt.check_logs or opt.stats or opt.fatigue or opt.envelopeblade or opt.envelopeturbine:
         post_launch(sim_id, check_logs=opt.check_logs, update=False,
@@ -473,5 +503,5 @@ if __name__ == '__main__':
                     envelopeblade=opt.envelopeblade)
     if opt.dlcplot:
         sim_ids = [sim_id]
-        figdir = os.path.join(P_RUN, '..', 'figures/%s' % '-'.join(sim_ids))
+        figdir = os.path.join(POST_DIR, 'figures/%s' % '-'.join(sim_ids))
         dlcplots.plot_stats2(sim_ids, [POST_DIR], fig_dir_base=figdir)
diff --git a/wetb/prepost/misc.py b/wetb/prepost/misc.py
index 0c1ab9b0d9c3b95738847d0c5d7127027d453d43..73aca12551262ff28b8124ab3f3d6deb5f8a3032 100644
--- a/wetb/prepost/misc.py
+++ b/wetb/prepost/misc.py
@@ -68,6 +68,16 @@ class Logger(object):
         self.logFile.flush()
 
 
+def path_split_dirs(path):
+    """
+    Return a list with dirnames. Ignore any leading "./"
+    """
+    dirs = path.split(os.path.sep)
+    if dirs[0] == '.':
+        dirs.pop(0)
+    return dirs
+
+
 def print_both(f, text, end='\n'):
     """
     Print both to a file and the console
diff --git a/wetb/prepost/prepost.py b/wetb/prepost/prepost.py
index bb6a864633a2f837c53622ea8b5793f2a0395367..c706aa79cb69e6f97edcc2a9e50cc8763c5a3a8b 100644
--- a/wetb/prepost/prepost.py
+++ b/wetb/prepost/prepost.py
@@ -12,9 +12,6 @@ from io import open
 from future import standard_library
 standard_library.install_aliases()
 
-
-
-
 import os
 import copy
 
@@ -37,6 +34,9 @@ class PBSScript(object):
 ### Queue name
 #PBS -q [queue]
 
+### #PBS -a [start_time]
+### #PBS -W depend=afterany:[job_id]
+
 ### Browse to current working dir
 echo ""
 cd $PBS_O_WORKDIR
@@ -75,13 +75,13 @@ exit
     def __init__(self):
 
         # PBS configuration
-        self.jobname = None
+        self.jobname = 'no_name_job'
         # relative paths with respect to PBS working directory
         self.path_pbs_o = 'pbs_out/dummy.out'
         self.path_pbs_e = 'pbs_out/dummy.err'
         self.path_pbs_i = 'pbs_in/dummy.pbs'
         # absolute path of the PBS working directory
-        self.pbsworkdir = None
+        self.pbsworkdir = './'
         self.umask = '003'
         self.walltime = '00:59:59'
         self.queue = 'workq'
diff --git a/wetb/prepost/tests/data/demo_dlc/ref/demo_dlc_remote.zip b/wetb/prepost/tests/data/demo_dlc/ref/demo_dlc_remote.zip
index 60fe434ad3dc15cf5e1dd1977c630107c611ac55..3c02a1d7a11acaeff10fdae72416c4856fcbc4bc 100644
Binary files a/wetb/prepost/tests/data/demo_dlc/ref/demo_dlc_remote.zip and b/wetb/prepost/tests/data/demo_dlc/ref/demo_dlc_remote.zip differ
diff --git a/wetb/prepost/tests/data/demo_dlc/ref/htc/DLCs/dlc01_demos.xlsx b/wetb/prepost/tests/data/demo_dlc/ref/htc/DLCs/dlc01_demos.xlsx
index 1f4c2311367ccea9d06f57db32793e8948f0e959..246cbf5e6e5c94b732349be0a6a09336fb1f715f 100755
Binary files a/wetb/prepost/tests/data/demo_dlc/ref/htc/DLCs/dlc01_demos.xlsx and b/wetb/prepost/tests/data/demo_dlc/ref/htc/DLCs/dlc01_demos.xlsx differ
diff --git a/wetb/prepost/tests/data/demo_dlc/ref/htc/_master/demo_dlc_master_A0001.htc b/wetb/prepost/tests/data/demo_dlc/ref/htc/_master/demo_dlc_master_A0001.htc
index 6e956f80d62257709ddd129408b8d77305da4339..bc61a68f9b4130bdf42526dec5101aeacbfeadcd 100755
--- a/wetb/prepost/tests/data/demo_dlc/ref/htc/_master/demo_dlc_master_A0001.htc
+++ b/wetb/prepost/tests/data/demo_dlc/ref/htc/_master/demo_dlc_master_A0001.htc
@@ -382,7 +382,7 @@ begin wind ;
     filename_u    ./[turb_dir][Turb base name]u.bin ;
     filename_v    ./[turb_dir][Turb base name]v.bin ;
     filename_w    ./[turb_dir][Turb base name]w.bin ;
-    box_dim_u    8192 [turb_dx] ;
+    box_dim_u    512 [turb_dx] ;
     box_dim_v    32 7.5;
     box_dim_w    32 7.5;
     std_scaling   1.0 0.7 0.5 ;
diff --git a/wetb/prepost/tests/data/demo_dlc/ref/htc/dlc01_demos/dlc01_steady_wsp10_noturb.htc b/wetb/prepost/tests/data/demo_dlc/ref/htc/dlc01_demos/dlc01_steady_wsp10_s100.htc
similarity index 98%
rename from wetb/prepost/tests/data/demo_dlc/ref/htc/dlc01_demos/dlc01_steady_wsp10_noturb.htc
rename to wetb/prepost/tests/data/demo_dlc/ref/htc/dlc01_demos/dlc01_steady_wsp10_s100.htc
index c54c20d0df50daf94a80962feaefcd924123559f..83b325ad95ebc9df30b2f10f26d1365d6aa05b65 100644
--- a/wetb/prepost/tests/data/demo_dlc/ref/htc/dlc01_demos/dlc01_steady_wsp10_noturb.htc
+++ b/wetb/prepost/tests/data/demo_dlc/ref/htc/dlc01_demos/dlc01_steady_wsp10_s100.htc
@@ -4,7 +4,7 @@ begin simulation;
   solvertype   1 ;    (newmark)
   on_no_convergence continue ;
 ;  convergence_limits 1E3 1.0 1E-7 ;
-  logfile ./logfiles/dlc01_demos/dlc01_steady_wsp10_noturb.log ;
+  logfile ./logfiles/dlc01_demos/dlc01_steady_wsp10_s100.log ;
   begin newmark;
     deltat    0.02;
   end newmark;
@@ -13,11 +13,11 @@ end simulation;
 ;----------------------------------------------------------------------------------------------------------------------------------------------------------------
 begin new_htc_structure;
 ;--------------------------------------------------------------------------------------------------
-;  beam_output_file_name  ./res_eigen/dlc01_demos/dlc01_steady_wsp10_noturb/dlc01_steady_wsp10_noturb_beam.dat;
-;  body_output_file_name  ./res_eigen/dlc01_demos/dlc01_steady_wsp10_noturb/dlc01_steady_wsp10_noturb_body.dat;
-;  struct_inertia_output_file_name ./res_eigen/dlc01_demos/dlc01_steady_wsp10_noturb/dlc01_steady_wsp10_noturb_struct.dat;
-;  body_eigenanalysis_file_name ./res_eigen/dlc01_demos/dlc01_steady_wsp10_noturb/dlc01_steady_wsp10_noturb_body_eigen.dat;
-;  structure_eigenanalysis_file_name ./res_eigen/dlc01_demos/dlc01_steady_wsp10_noturb/dlc01_steady_wsp10_noturb_strc_eigen.dat;
+;  beam_output_file_name  ./res_eigen/dlc01_demos/dlc01_steady_wsp10_s100/dlc01_steady_wsp10_s100_beam.dat;
+;  body_output_file_name  ./res_eigen/dlc01_demos/dlc01_steady_wsp10_s100/dlc01_steady_wsp10_s100_body.dat;
+;  struct_inertia_output_file_name ./res_eigen/dlc01_demos/dlc01_steady_wsp10_s100/dlc01_steady_wsp10_s100_struct.dat;
+;  body_eigenanalysis_file_name ./res_eigen/dlc01_demos/dlc01_steady_wsp10_s100/dlc01_steady_wsp10_s100_body_eigen.dat;
+;  structure_eigenanalysis_file_name ./res_eigen/dlc01_demos/dlc01_steady_wsp10_s100/dlc01_steady_wsp10_s100_strc_eigen.dat;
 ;---------------------------------------------------------------------------------------------------
   begin main_body;         tower 123.6m
     name        tower ;
@@ -346,7 +346,7 @@ begin wind ;
   windfield_rotations     0 8.0 0.0 ;    yaw, tilt (positive=upflow=wind coming from below), rotation
   center_pos0             0.0 0.0 -127 ; hub heigth
   shear_format            3 0 ;
-  turb_format             0     ;  0=none, 1=mann,2=flex
+  turb_format             1     ;  0=none, 1=mann,2=flex
   tower_shadow_method     3     ;  0=none, 1=potential flow, 2=jet
   scale_time_start       20 ;
   wind_ramp_factor   0.0 20 0.8 1.0 ;
@@ -378,11 +378,11 @@ begin wind ;
 ;  wind_ramp_abs  2400.0  4200.0  0.0  -21.0 ;   wsp. after the step: 25.0
   ;
   begin mann ;
-    create_turb_parameters 29.4 1.0 3.9 0 1.0 ;      L, alfaeps, gamma, seed, highfrq compensation
-    filename_u    ./turb/noneu.bin ;
-    filename_v    ./turb/nonev.bin ;
-    filename_w    ./turb/nonew.bin ;
-    box_dim_u    8192 0.048828125 ;
+    create_turb_parameters 29.4 1.0 3.9 100 1.0 ;      L, alfaeps, gamma, seed, highfrq compensation
+    filename_u    ./turb/turb_s100_10msu.bin ;
+    filename_v    ./turb/turb_s100_10msv.bin ;
+    filename_w    ./turb/turb_s100_10msw.bin ;
+    box_dim_u    512 0.78125 ;
     box_dim_v    32 7.5;
     box_dim_w    32 7.5;
     std_scaling   1.0 0.7 0.5 ;
@@ -645,7 +645,7 @@ end dll;
 ;----------------------------------------------------------------------------------------------------------------------------------------------------------------
 ;
 begin output;
-  filename ./res/dlc01_demos/dlc01_steady_wsp10_noturb ;
+  filename ./res/dlc01_demos/dlc01_steady_wsp10_s100 ;
   time 20 40 ;
   data_format  hawc_binary;
   buffer 1 ;
diff --git a/wetb/prepost/tests/data/demo_dlc/ref/htc/dlc01_demos/dlc01_steady_wsp8_noturb.htc b/wetb/prepost/tests/data/demo_dlc/ref/htc/dlc01_demos/dlc01_steady_wsp8_noturb.htc
index 94abe5e13faf4dbd2ffcf86f0bb95df884a573a8..670f943f7bf345a12acbf4095f98e2d51b41144a 100644
--- a/wetb/prepost/tests/data/demo_dlc/ref/htc/dlc01_demos/dlc01_steady_wsp8_noturb.htc
+++ b/wetb/prepost/tests/data/demo_dlc/ref/htc/dlc01_demos/dlc01_steady_wsp8_noturb.htc
@@ -382,7 +382,7 @@ begin wind ;
     filename_u    ./turb/noneu.bin ;
     filename_v    ./turb/nonev.bin ;
     filename_w    ./turb/nonew.bin ;
-    box_dim_u    8192 0.0390625 ;
+    box_dim_u    512 0.0390625 ;
     box_dim_v    32 7.5;
     box_dim_w    32 7.5;
     std_scaling   1.0 0.7 0.5 ;
diff --git a/wetb/prepost/tests/data/demo_dlc/ref/htc/dlc01_demos/dlc01_steady_wsp9_noturb.htc b/wetb/prepost/tests/data/demo_dlc/ref/htc/dlc01_demos/dlc01_steady_wsp9_noturb.htc
index 537e8ef73085b26598c46f2986c11fc8ad738464..11e58632838311500ae0549609c245cf0620ead0 100644
--- a/wetb/prepost/tests/data/demo_dlc/ref/htc/dlc01_demos/dlc01_steady_wsp9_noturb.htc
+++ b/wetb/prepost/tests/data/demo_dlc/ref/htc/dlc01_demos/dlc01_steady_wsp9_noturb.htc
@@ -382,7 +382,7 @@ begin wind ;
     filename_u    ./turb/noneu.bin ;
     filename_v    ./turb/nonev.bin ;
     filename_w    ./turb/nonew.bin ;
-    box_dim_u    8192 0.0439453125 ;
+    box_dim_u    512 0.0439453125 ;
     box_dim_v    32 7.5;
     box_dim_w    32 7.5;
     std_scaling   1.0 0.7 0.5 ;
diff --git a/wetb/prepost/tests/data/demo_dlc/ref/pbs_in/dlc01_demos/dlc01_steady_wsp10_noturb.p b/wetb/prepost/tests/data/demo_dlc/ref/pbs_in/dlc01_demos/dlc01_steady_wsp10_s100.p
similarity index 80%
rename from wetb/prepost/tests/data/demo_dlc/ref/pbs_in/dlc01_demos/dlc01_steady_wsp10_noturb.p
rename to wetb/prepost/tests/data/demo_dlc/ref/pbs_in/dlc01_demos/dlc01_steady_wsp10_s100.p
index 1bc115f781b7821f729e97bddcf49683bb623c20..6a62ecceb1be436ba34a622fdb95db9f66890924 100644
--- a/wetb/prepost/tests/data/demo_dlc/ref/pbs_in/dlc01_demos/dlc01_steady_wsp10_noturb.p
+++ b/wetb/prepost/tests/data/demo_dlc/ref/pbs_in/dlc01_demos/dlc01_steady_wsp10_s100.p
@@ -1,8 +1,8 @@
 ### Standard Output 
-#PBS -N dlc01_steady_wsp10_noturb 
-#PBS -o ./pbs_out/dlc01_demos/dlc01_steady_wsp10_noturb.out
+#PBS -N dlc01_steady_wsp10_s100 
+#PBS -o ./pbs_out/dlc01_demos/dlc01_steady_wsp10_s100.out
 ### Standard Error 
-#PBS -e ./pbs_out/dlc01_demos/dlc01_steady_wsp10_noturb.err
+#PBS -e ./pbs_out/dlc01_demos/dlc01_steady_wsp10_s100.err
 #PBS -W umask=003
 ### Maximum wallclock time format HOURS:MINUTES:SECONDS
 #PBS -l walltime=04:00:00
@@ -25,9 +25,9 @@ mkdir -p htc/dlc01_demos/
 mkdir -p res/dlc01_demos/
 mkdir -p logfiles/dlc01_demos/
 mkdir -p turb/
-cp -R $PBS_O_WORKDIR/htc/dlc01_demos/dlc01_steady_wsp10_noturb.htc ./htc/dlc01_demos/
-cp -R $PBS_O_WORKDIR/../turb/none*.bin turb/ 
-time WINEARCH=win32 WINEPREFIX=~/.wine32 wine hawc2-latest ./htc/dlc01_demos/dlc01_steady_wsp10_noturb.htc  &
+cp -R $PBS_O_WORKDIR/htc/dlc01_demos/dlc01_steady_wsp10_s100.htc ./htc/dlc01_demos/
+cp -R $PBS_O_WORKDIR/../turb/turb_s100_10ms*.bin turb/ 
+time WINEARCH=win32 WINEPREFIX=~/.wine32 wine hawc2-latest ./htc/dlc01_demos/dlc01_steady_wsp10_s100.htc  &
 ### wait for jobs to finish 
 wait
 echo ""
diff --git a/wetb/prepost/tests/data/demo_dlc/ref/pbs_in_turb/turb_s100_10ms.p b/wetb/prepost/tests/data/demo_dlc/ref/pbs_in_turb/turb_s100_10ms.p
new file mode 100644
index 0000000000000000000000000000000000000000..df16aba35748c37417076913bbd9b514fce34301
--- /dev/null
+++ b/wetb/prepost/tests/data/demo_dlc/ref/pbs_in_turb/turb_s100_10ms.p
@@ -0,0 +1,49 @@
+
+### Standard Output
+#PBS -N no_name_job
+#PBS -o ./pbs_out/turb/turb_s100_10ms.out
+### Standard Error
+#PBS -e ./pbs_out/turb/turb_s100_10ms.err
+#PBS -W umask=003
+### Maximum wallclock time format HOURS:MINUTES:SECONDS
+#PBS -l walltime=00:59:59
+#PBS -lnodes=1:ppn=1
+### Queue name
+#PBS -q workq
+
+### #PBS -a [start_time]
+### #PBS -W depend=afterany:[job_id]
+
+### Browse to current working dir
+echo ""
+cd $PBS_O_WORKDIR
+echo "current working dir:"
+pwd
+echo ""
+
+### ===========================================================================
+echo "------------------------------------------------------------------------"
+echo "PRELUDE"
+echo "------------------------------------------------------------------------"
+
+cd ../turb/
+
+echo ""
+echo "------------------------------------------------------------------------"
+echo "EXECUTION"
+echo "------------------------------------------------------------------------"
+
+time wine mann_turb_x64.exe turb_s100_10ms 1.000000 29.400000 3.000000 100 8192 32 32 0.7812 6.5000 6.5000 1
+### wait for jobs to finish
+wait
+
+echo ""
+echo "------------------------------------------------------------------------"
+echo "CODA"
+echo "------------------------------------------------------------------------"
+
+
+
+echo ""
+### ===========================================================================
+exit
diff --git a/wetb/prepost/tests/data/demo_dlc/ref/prepost/remote_tags.txt b/wetb/prepost/tests/data/demo_dlc/ref/prepost/remote_tags.txt
index 21c9f3dd8e9452763faab41e0e8920e87c22a63d..c191125c1237fc1732a98ab1327ff1ae1734004a 100644
--- a/wetb/prepost/tests/data/demo_dlc/ref/prepost/remote_tags.txt
+++ b/wetb/prepost/tests/data/demo_dlc/ref/prepost/remote_tags.txt
@@ -125,7 +125,7 @@
                  opt_tags set
 -------------------------------------------------------------------------------
                  [Case folder] : dlc01_demos         
-                    [Case id.] : dlc01_steady_wsp10_noturb
+                    [Case id.] : dlc01_steady_wsp10_s100
                  [Cut-in time] : -1                  
                 [Cut-out time] : -1                  
                          [DLC] : 01                  
@@ -146,9 +146,9 @@
                           [TI] : 0.2096              
           [Time pitch runaway] : 5000                
            [Time stuck DLC22b] : -1                  
-              [Turb base name] : none                
+              [Turb base name] : turb_s100_10ms      
                    [Windspeed] : 10                  
-                     [case_id] : dlc01_steady_wsp10_noturb
+                     [case_id] : dlc01_steady_wsp10_s100
                     [data_dir] : data/               
                  [dis_setbeta] : True                
                     [duration] : 20.0                
@@ -168,10 +168,10 @@
                           [t0] : 20                  
                    [time stop] : 40                  
                    [time_stop] : 40                  
-                    [tu_model] : 0                   
-                     [tu_seed] : 0                   
-              [turb_base_name] : none                
-                     [turb_dx] : 0.048828125         
+                    [tu_model] : 1                   
+                     [tu_seed] : 100                 
+              [turb_base_name] : turb_s100_10ms      
+                     [turb_dx] : 0.78125             
                         [wdir] : 0                   
                     [windramp] : False               
                   [wsp factor] : 0.8                 
diff --git a/wetb/prepost/tests/data/demo_dlc/source/demo_dlc_remote.zip b/wetb/prepost/tests/data/demo_dlc/source/demo_dlc_remote.zip
index 60fe434ad3dc15cf5e1dd1977c630107c611ac55..3c02a1d7a11acaeff10fdae72416c4856fcbc4bc 100644
Binary files a/wetb/prepost/tests/data/demo_dlc/source/demo_dlc_remote.zip and b/wetb/prepost/tests/data/demo_dlc/source/demo_dlc_remote.zip differ
diff --git a/wetb/prepost/tests/data/demo_dlc/source/htc/DLCs/dlc01_demos.xlsx b/wetb/prepost/tests/data/demo_dlc/source/htc/DLCs/dlc01_demos.xlsx
index 1f4c2311367ccea9d06f57db32793e8948f0e959..246cbf5e6e5c94b732349be0a6a09336fb1f715f 100755
Binary files a/wetb/prepost/tests/data/demo_dlc/source/htc/DLCs/dlc01_demos.xlsx and b/wetb/prepost/tests/data/demo_dlc/source/htc/DLCs/dlc01_demos.xlsx differ
diff --git a/wetb/prepost/tests/data/demo_dlc/source/htc/_master/demo_dlc_master_A0001.htc b/wetb/prepost/tests/data/demo_dlc/source/htc/_master/demo_dlc_master_A0001.htc
index 6e956f80d62257709ddd129408b8d77305da4339..bc61a68f9b4130bdf42526dec5101aeacbfeadcd 100755
--- a/wetb/prepost/tests/data/demo_dlc/source/htc/_master/demo_dlc_master_A0001.htc
+++ b/wetb/prepost/tests/data/demo_dlc/source/htc/_master/demo_dlc_master_A0001.htc
@@ -382,7 +382,7 @@ begin wind ;
     filename_u    ./[turb_dir][Turb base name]u.bin ;
     filename_v    ./[turb_dir][Turb base name]v.bin ;
     filename_w    ./[turb_dir][Turb base name]w.bin ;
-    box_dim_u    8192 [turb_dx] ;
+    box_dim_u    512 [turb_dx] ;
     box_dim_v    32 7.5;
     box_dim_w    32 7.5;
     std_scaling   1.0 0.7 0.5 ;
diff --git a/wetb/prepost/tests/test_Simulations.py b/wetb/prepost/tests/test_Simulations.py
index 61628546afc1110717316b1ec79f6269a2926fc5..a439a53e04a92992cab62e9637ce8a11ad5e3e89 100644
--- a/wetb/prepost/tests/test_Simulations.py
+++ b/wetb/prepost/tests/test_Simulations.py
@@ -13,7 +13,8 @@ standard_library.install_aliases()
 import unittest
 import os
 import filecmp
-import pickle
+import shutil
+#import pickle
 
 from wetb.prepost import dlctemplate as tmpl
 
@@ -40,17 +41,25 @@ class TestGenerateInputs(unittest.TestCase):
         # location of the pre and post processing data
         tmpl.POST_DIR = os.path.join(p_root, tmpl.PROJECT, 'remote',
                                      'prepost/')
+
+        # make sure the remote dir is empty so a test does not pass on data
+        # generated during a previous cycle
+        if os.path.exists(os.path.join(p_root, tmpl.PROJECT, 'remote')):
+            shutil.rmtree(os.path.join(p_root, tmpl.PROJECT, 'remote'))
+
         tmpl.force_dir = tmpl.P_RUN
-        tmpl.launch_dlcs_excel('remote', silent=True)
+        tmpl.launch_dlcs_excel('remote', silent=True, runmethod='gorm')
 
-        # we can not check-in empty dirs in git
-        for subdir in ['control', 'data', 'htc', 'pbs_in']:
+        # we can not check-in empty dirs so we can not compare the complete
+        # directory structure withouth manually creating the empty dirs here
+        for subdir in ['control', 'data', 'htc', 'pbs_in', 'pbs_in_turb',
+                       'htc/_master', 'htc/dlc01_demos', 'pbs_in/dlc01_demos']:
             remote = os.path.join(p_root, tmpl.PROJECT, 'remote', subdir)
             ref = os.path.join(p_root, tmpl.PROJECT, 'ref', subdir)
             cmp = filecmp.dircmp(remote, ref)
-            self.assertTrue(len(cmp.diff_files)==0)
-            self.assertTrue(len(cmp.right_only)==0)
-            self.assertTrue(len(cmp.left_only)==0)
+            self.assertEqual(len(cmp.diff_files), 0, cmp.diff_files)
+            self.assertEqual(len(cmp.right_only), 0, cmp.right_only)
+            self.assertEqual(len(cmp.left_only), 0, cmp.left_only)
 
         # for the pickled file we can just read it
         remote = os.path.join(p_root, tmpl.PROJECT, 'remote', 'prepost')
diff --git a/wetb/utils/cluster_tools/cluster_resource.py b/wetb/utils/cluster_tools/cluster_resource.py
index 804c6ed904e531b4e025ba5c8d3d22ee3386d925..162036fd95edafba7bd1e7e45a1d961b9fbefca1 100644
--- a/wetb/utils/cluster_tools/cluster_resource.py
+++ b/wetb/utils/cluster_tools/cluster_resource.py
@@ -3,16 +3,22 @@ Created on 04/04/2016
 
 @author: MMPE
 '''
-from wetb.utils.cluster_tools.ssh_client import SSHClient
-from wetb.utils.cluster_tools import pbswrap
 import multiprocessing
+import threading
+
 import psutil
 
+from wetb.utils.cluster_tools import pbswrap
+from wetb.utils.cluster_tools.ssh_client import SSHClient
+
+
 class Resource(object):
 
     def __init__(self, min_cpu, min_free):
         self.min_cpu = min_cpu
         self.min_free = min_free
+        self.acquired = 0
+        self.lock = threading.Lock()
 
     def ok2submit(self):
         """Always ok to have min_cpu cpus and ok to have more if there are min_free free cpus"""
@@ -25,6 +31,13 @@ class Resource(object):
         else:
             return False
 
+    def acquire(self):
+        with self.lock:
+            self.acquired += 1
+
+    def release(self):
+        with self.lock:
+            self.acquired -= 1
 
 
 
@@ -32,35 +45,52 @@ class SSHPBSClusterResource(Resource, SSHClient):
     def __init__(self, host, username, password, port, min_cpu, min_free):
         Resource.__init__(self, min_cpu, min_free)
         SSHClient.__init__(self, host, username, password, port=port)
+        self.lock = threading.Lock()
 
     def new_ssh_connection(self):
         return SSHClient(self.host, self.username, self.password, self.port)
 
     def check_resources(self):
-        with self:
-            _, output, _ = self.execute('pbsnodes -l all')
-            pbsnodes, nodes = pbswrap.parse_pbsnode_lall(output.split("\n"))
+        with self.lock:
+            try:
+                with self:
+                    _, output, _ = self.execute('pbsnodes -l all')
+                    pbsnodes, nodes = pbswrap.parse_pbsnode_lall(output.split("\n"))
+
+                    _, output, _ = self.execute('qstat -n1')
+                    users, host, nodesload = pbswrap.parse_qstat_n1(output.split("\n"))
+
 
-            _, output, _ = self.execute('qstat -n1')
-            users, host, nodesload = pbswrap.parse_qstat_n1(output.split("\n"))
+                # if the user does not have any jobs, this will not exist
+                try:
+                    cpu_user = users[self.username]['cpus']
+                    cpu_user += users[self.username]['Q']
+                except KeyError:
+                    cpu_user = 0
+                cpu_user = max(cpu_user, self.acquired)
+                cpu_free, nodeSum = pbswrap.count_cpus(users, host, pbsnodes)
 
+                return nodeSum['used_cpu'] + cpu_free, cpu_free, cpu_user
+            except IOError as e:
+                raise e
+            except:
+                raise EnvironmentError("check resources failed")
 
-        # if the user does not have any jobs, this will not exist
-        try:
-            cpu_user = users[self.username]['cpus']
-            cpu_user += users[self.username]['Q']
-        except KeyError:
-            cpu_user = 0
-        cpu_free, nodeSum = pbswrap.count_cpus(users, host, pbsnodes)
+    def jobids(self, jobname_prefix):
+            _, output, _ = self.execute('qstat -u %s' % self.username)
+            return [l.split()[0].split(".")[0] for l in output.split("\n")[5:] if l.strip() != "" and l.split()[3].startswith("h2l")]
 
-        return nodeSum['used_cpu'] + cpu_free, cpu_free, cpu_user
+    def stop_pbsjobs(self, jobids):
+        if not hasattr(jobids, "len"):
+            jobids = list(jobids)
+        self.execute("qdel %s" % (" ".join(jobids)))
 
 
 
 class LocalResource(Resource):
     def __init__(self, process_name):
-        N = max(1, multiprocessing.cpu_count() / 4)
-        Resource.__init__(self, N, N)
+        N = max(1, multiprocessing.cpu_count() / 2)
+        Resource.__init__(self, N, multiprocessing.cpu_count())
         self.process_name = process_name
         self.host = 'Localhost'
 
@@ -68,10 +98,10 @@ class LocalResource(Resource):
         def name(i):
             try:
                 return psutil.Process(i).name
-            except psutil._error.AccessDenied:
+            except (psutil._error.AccessDenied, psutil._error.NoSuchProcess):
                 return ""
 
         no_cpu = multiprocessing.cpu_count()
-        cpu_free = (1 - psutil.cpu_percent(.5) / 100) * no_cpu
+        cpu_free = no_cpu - self.acquired  #(1 - psutil.cpu_percent(.5) / 100) * no_cpu
         no_current_process = len([i for i in psutil.get_pid_list() if name(i).lower().startswith(self.process_name.lower())])
         return no_cpu, cpu_free, no_current_process
diff --git a/wetb/utils/cluster_tools/ssh_client.py b/wetb/utils/cluster_tools/ssh_client.py
index ea125bb77ade4aeeb662d91e9701ce201d552b8e..cdda5b515e55fb9a73279d7dc122f93a091ed0c5 100644
--- a/wetb/utils/cluster_tools/ssh_client.py
+++ b/wetb/utils/cluster_tools/ssh_client.py
@@ -13,7 +13,7 @@ class SSHClient(object):
     "A wrapper of paramiko.SSHClient"
     TIMEOUT = 4
 
-    def __init__(self, host, username, password, port=22, key=None, passphrase=None):
+    def __init__(self, host, username, password=None, port=22, key=None, passphrase=None):
         self.host = host
         self.username = username
         self.password = password
@@ -33,6 +33,8 @@ class SSHClient(object):
             self.connect()
 
     def connect(self):
+        if self.password is None:
+            raise IOError("Password not set")
         self.client = paramiko.SSHClient()
         self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
         self.client.connect(self.host, self.port, username=self.username, password=self.password, pkey=self.key, timeout=self.TIMEOUT)
@@ -136,7 +138,8 @@ class SSHClient(object):
         _, out, _ = self.execute(r'find %s -maxdepth 1 -type f -name "%s"' % (cwd, filepattern))
         files = []
         for file in out.strip().split("\n"):
-            files.append(file.strip())
+            if file != "":
+                files.append(file.strip())
         return files