From fc1fff2a01d5537e9d92aed1b4adc2327d131f69 Mon Sep 17 00:00:00 2001
From: dave <dave@dtu.dk>
Date: Wed, 27 Jan 2016 14:33:45 +0100
Subject: [PATCH] Something went really wrong with the merge at e0e8c0d3. As a
 result, changes made by @dave where reverted in the merge. This commit
 restores those changes

---
 README.md                   |  12 +-
 docs/howto-make-dlcs.md     | 569 +++++++++++++++++++++++++++
 docs/using-statistics-df.md | 179 +++++++++
 wetb/prepost/Simulations.py |   5 +-
 wetb/prepost/h2_vs_hs2.py   | 739 +++++++++++++++---------------------
 wetb/prepost/hawcstab2.py   |   5 +-
 6 files changed, 1074 insertions(+), 435 deletions(-)
 create mode 100644 docs/howto-make-dlcs.md
 create mode 100644 docs/using-statistics-df.md

diff --git a/README.md b/README.md
index 5ebf5f2..2e63183 100644
--- a/README.md
+++ b/README.md
@@ -23,13 +23,23 @@ Switching to Python 3 is in general a very good idea especially since Python 3.5
 was released. Some even dare to say it
 [is like eating your vegetables](http://nothingbutsnark.svbtle.com/porting-to-python-3-is-like-eating-your-vegetables).
 
-You can automatically convert you code from Python 2 to 3 using the
+You can automatically convert your code from Python 2 to 3 using the
 [2to3](https://docs.python.org/2/library/2to3.html) utility which is included
 in Python 2.7 by default. You can also write code that is compatible with both
 2 and 3 at the same time (you can find additional resources in
 [issue 1](https://gitlab.windenergy.dtu.dk/toolbox/WindEnergyToolbox/issues/1)).
 
 
+# Dependencies
+
+[numpy](http://www.numpy.org/)
+[scipy](http://scipy.org/scipylib/)
+[pandas](http://pandas.pydata.org/)
+[matplotlib](http://matplotlib.org/)
+[pytables](http://www.pytables.org/)
+[pyscaffold](http://pyscaffold.readthedocs.org/en/)
+
+
 # Installation
 
 ```
diff --git a/docs/howto-make-dlcs.md b/docs/howto-make-dlcs.md
new file mode 100644
index 0000000..e31b020
--- /dev/null
+++ b/docs/howto-make-dlcs.md
@@ -0,0 +1,569 @@
+Auto-generation of Design Load Cases
+====================================
+
+
+<!---
+TODO, improvements:
+putty reference and instructions (fill in username in the address username@gorm
+how to mount gorm home on windows
+do as on Arch Linux wiki: top line is the file name where you need to add stuff
+point to the gorm/jess wiki's
+explain the difference in the paths seen from a windows computer and the cluster
+-->
+
+WARNING: these notes contain configuration settings that are specif to the
+DTU Wind Energy cluster Gorm. Only follow this guide in another environment if
+you know what you are doing!
+
+Introduction
+------------
+
+For the auto generation of load cases and the corresponding execution on the
+cluster, the following events will take place:
+* Create an htc master file, and define the various tags in the exchange files
+(spreadsheets).
+* Generate the htc files for all the corresponding load cases based on the
+master file and the tags defined in the exchange files. Besides the HAWC2 htc
+input file, a corresponding pbs script is created that includes the instructions
+to execute the relevant HAWC2 simulation on a cluster node. This includes copying
+the model to the node scratch disc, executing HAWC2, copying the results from
+the node scratch disc back to the network drive.
+* Submit all the load cases (or the pbs launch scripts) to the cluster queueing
+system. This is also referred to as launching the jobs.
+
+Important note regarding file names. On Linux, file names and paths are case
+sensitive, but on Windows they are not. Additionally, HAWC2 will always generate
+result and log files with lower case file names, regardless of the user input.
+Hence, in order to avoid possible ambiguities at all times, make sure that there
+are no upper case symbols defined in the value of the following tags (as defined
+in the Excel spreadsheets): ```[Case folder]```,  ```[Case id.]```, and
+```[Turb base name]```.
+
+The system will always force the values of the tags to be lower case anyway, and
+when working on Windows, this might cause some confusing and unexpected behaviour.
+The tags themselves can have lower and upper case characters as can be seen
+in the example above.
+
+Notice that throughout the document ```$USER``` refers the your user name. You can
+either let the system fill that in for you (by using the variable ```$USER```),
+or explicitly user your user name instead. This user name is the same as your
+DTU account name (or student account/number).
+
+This document refers to commands to be entered in the terminal on Gorm when the
+line starts with ```g-000 $```. The command that needs to be entered starts
+after the ```$```.
+
+
+Connecting to the cluster
+-------------------------
+
+You connect to the cluster via an SSH terminal. SSH is supported out of the box
+for Linux and Mac OSX terminals (such as bash), but requires a separate
+terminal client under Windows. Windows users are advised to use PuTTY and can
+be downloaded at:
+[http://www.chiark.greenend.org.uk/~sgtatham/putty/](http://www.chiark.greenend.org.uk/~sgtatham/putty/).
+Here's a random
+[tutorial](http://www.ghacks.net/2008/02/09/about-putty-and-tutorials-including-a-putty-tutorial/),
+you can use your favourite search engine if you need more or different instructions.
+More answers regarding PuTTY can also be found in the online
+[documentation](http://the.earth.li/~sgtatham/putty/latest/htmldoc/).
+
+The cluster that is setup for using the pre- and post-processing tools for HAWC2
+has the following address: ```gorm.risoe.dk```.
+
+On Linux/Mac connecting to the cluster is as simple as running the following
+command in the terminal:
+
+```
+g-000 $ ssh $USER@gorm.risoe.dk
+```
+
+Use your DTU password when asked. This will give you terminal access to the
+cluster called Gorm.
+
+The cluster can only be reached when on the DTU network (wired, or only from a
+DTU computer when using a wireless connection), when connected to the DTU VPN,
+or from one of the DTU [databars](http://www.databar.dtu.dk/).
+
+More information about the cluster can be found on the
+[Gorm-wiki](http://gorm.risoe.dk/gormwiki)
+
+
+Mounting the cluster discs
+--------------------------
+
+You need to be connected to the DTU network in order for this to work. You can
+also connect to the DTU network over VPN.
+
+When doing the HAWC2 simulations, you will interact regularly with the cluster
+file system and discs. It is convenient to map these discs as network
+drives (in Windows terms). Map the following network drives (replace ```$USER```
+with your user name):
+
+```
+\\mimer\hawc2sim
+\\gorm\$USER # this is your Gorm home directory
+```
+
+Alternatively, on Windows you can use [WinSCP](http://winscp.net) to interact
+with the cluster discs.
+
+Note that by default Windows Explorer will hide some of the files you will need edit.
+In order to show all files on your Gorm home drive, you need to un-hide system files:
+Explorer > Organize > Folder and search options > select tab "view" > select the option to show hidden files and folders.
+
+From Linux/Mac, you should be able to mount using either of the following
+addresses:
+```
+//mimer.risoe.dk/hawc2sim
+//mimer.risoe.dk/well/hawc2sim
+//gorm.risoe.dk/$USER
+```
+You can use either ```sshfs``` or ```mount -t cifs``` to mount the discs.
+
+
+Preparation
+-----------
+
+Add the cluster-tools script to your system's PATH of you Gorm environment,
+by editing the file ```.bash_profile``` file in your Gormā€™s home directory
+(```/home/$USER/.bash_profile```), and add the following lines (add at the end,
+or create a new file with this file name in case it doesn't exist):
+
+```
+export PATH=$PATH:/home/MET/STABCON/repositories/toolbox/pbsutils/
+```
+
+(The corresponding open repository is on the DTU Wind Energy Gitlab server:
+[pbsutils](https://gitlab.windenergy.dtu.dk/toolbox/WindEnergyToolbox). Please
+considering reporting bugs and/or suggest improvements there. You're contributions
+are much appreciated!)
+
+If you have been using an old version of this how-to, you might be pointing
+to an earlier version of these tools/utils and its reference should be removed
+from your ```.bash_profile``` file:
+
+```
+export PATH=$PATH:/home/MET/STABCON/repositories/cluster-tools/
+```
+
+After modifying ```.bash_profile```, save and close it. Then, in the terminal, run the command:
+```
+g-000 $ source ~/.bash_profile
+```
+In order for any changes made in ```.bash_profile``` to take effect, you need to either ```source``` it (as shown above), or log out and in again.  
+
+You will also need to configure wine and place the HAWC2 executables in a
+directory that wine knows about. First, activate the correct wine environment by
+typing in a shell in the Gorm's home directory (it can be activated with
+ssh (Linux, Mac) or putty (MS Windows)):
+
+```
+g-000 $ WINEARCH=win32 WINEPREFIX=~/.wine32 wine test.exe
+```
+
+Optionally, you can also make an alias (a short format for a longer, more complex
+command). In the ```.bashrc``` file in your home directory
+(```/home/$USER/.bash_profile```), add at the bottom of the file:
+
+```
+alias wine32='WINEARCH=win32 WINEPREFIX=~/.wine32 wine'
+```
+
+And now copy all the HAWC2 executables, DLL's (including the license manager)
+to your wine directory. You can copy all the required executables, dll's and
+the license manager are located at ```/home/MET/hawc2exe```. The following
+command will do this copying:
+
+```
+g-000 $ cp /home/MET/hawc2exe/* /home/$USER/.wine32/drive_c/windows/system32
+```
+
+Notice that the HAWC2 executable names are ```hawc2-latest.exe```,
+```hawc2-118.exe```, etc. By default the latest version will be used and the user
+does not need to specify this. However, when you need to compare different version
+you can easily do so by specifying which case should be run with which
+executable. The file ```hawc2-latest.exe``` will always be the latest HAWC2
+version at ```/home/MET/hawc2exe/```. When a new HAWC2 is released you can
+simply copy all the files from there again to update.
+
+Log out and in again from the cluster (close and restart PuTTY).
+
+At this stage you can run HAWC2 as follows:
+
+```
+g-000 $ wine32 hawc2-latest htc/some-intput-file.htc
+```
+
+
+Method A: Generating htc input files on the cluster
+---------------------------------------------------
+
+Use ssh (Linux, Mac) or putty (MS Windows) to connect to the cluster.
+
+With qsub-wrap.py the user can wrap a PBS launch script around any executable or
+Python/Matlab/... script. In doing so, the executable/Python script will be
+immediately submitted to the cluster for execution. By default, the Anaconda
+Python environment in ```/home/MET/STABCON/miniconda``` will be activated. The
+Anaconda Python environment is not relevant, and can be safely ignored, if the
+executable does not have anything to do with Python.
+
+In order to see the different options of this qsub-wrap utility, do:
+
+```
+g-000 $ qsub-wrap.py --help
+```
+
+For example, in order to generate the default IEC DLCs:
+
+```
+g-000 $ cd path/to/HAWC2/model # folder where the hawc2 model is located
+g-000 $ qsub-wrap.py -f /home/MET/STABCON/repositories/prepost/dlctemplate.py -c python --prep
+```
+
+Note that the following folder structure for the HAWC2 model is assumed:
+
+```
+|-- control
+|   |-- ...
+|-- data
+|   |-- ...
+|-- htc
+|   |-- DLCs
+|   |   |-- dlc12_iec61400-1ed3.xlsx
+|   |   |-- dlc13_iec61400-1ed3.xlsx
+|   |   |-- ...
+|   |-- _master
+|   |   `-- dtu10mw_master_C0013.htc
+```
+
+The load case definitions should be placed in Excel spreadsheets with a
+```*.xlsx``` extension. The above example shows one possible scenario whereby
+all the load case definitions are placed in ```htc/DLCs``` (all folder names
+are case sensitive). Alternatively, one can also place the spreadsheets in
+separate sub folders, for example:
+
+```
+|-- control
+|   |-- ...
+|-- data
+|   |-- ...
+|-- htc
+|   |-- dlc12_iec61400-1ed3
+|   |   |-- dlc12_iec61400-1ed3.xlsx
+|   |-- dlc13_iec61400-1ed3
+|   |   |-- dlc13_iec61400-1ed3.xlsx
+```
+
+In order to use this auto-configuration mode, there can only be one master file
+in ```_master``` that contains ```_master_``` in its file name.
+
+For the NREL5MW and the DTU10MW HAWC2 models, you can find their respective
+master files and DLC definition spreadsheet files on Mimer. When connected
+to Gorm over SSH/PuTTY, you will find these files at:
+```
+/mnt/mimer/hawc2sim # (when on Gorm)
+```
+
+
+Method B: Generating htc input files interactively on the cluster
+-----------------------------------------------------------------
+
+Use ssh (Linux, Mac) or putty (MS Windows) to connect to the cluster.
+
+This approach gives you more flexibility, but requires more commands, and is hence
+considered more difficult compared to method A.
+
+First activate the Anaconda Python environment by typing:
+
+```bash
+# add the Anaconda Python environment paths to the system PATH
+g-000 $ export PATH=/home/MET/STABCON/miniconda/bin:$PATH
+# activate the custom python environment:
+g-000 $ source activate anaconda
+# add the Pythone libraries to the PYTHONPATH
+g-000 $ export PYTHONPATH=/home/MET/STABCON/repositories/prepost:$PYTHONPATH
+g-000 $ export PYTHONPATH=/home/MET/STABCON/repositories/pythontoolbox/fatigue_tools:$PYTHONPATH
+g-000 $ export PYTHONPATH=/home/MET/STABCON/repositories/pythontoolbox:$PYTHONPATH
+g-000 $ export PYTHONPATH=/home/MET/STABCON/repositories/MMPE:$PYTHONPATH
+```
+For example, launch the auto-generation of DLCs input files:
+
+```
+g-000 $ cd path/to/HAWC2/model # folder where the hawc2 model is located
+g-000 $ python /home/MET/STABCON/repositories/prepost/dlctemplate.py --prep
+```
+
+Or start an interactive IPython shell:
+
+```
+g-000 $ ipython
+```
+
+Users should be aware that running computational heavy loads on the login node
+is strictly discouraged. By overloading the login node other users will
+experience slow login procedures, and the whole cluster could potentially be
+jammed.
+
+
+Method C: Generating htc input files locally
+--------------------------------------------
+
+This approach gives you total freedom, but is also more difficult since you
+will have to have fully configured Python environment installed locally.
+Additionally, you need access to the cluster discs from your local workstation.
+Method C is not documented yet.
+
+
+Optional configuration
+----------------------
+
+Optional tags that can be set in the Excel spreadsheet, and their corresponding
+default values are given below. Beside a replacement value in the master htc
+file, there are also special actions connected to these values. Consequently,
+these tags have to be present. When removed, the system will stop working properly.
+
+Relevant for the generation of the PBS launch scripts (```*.p``` files):
+* ```[walltime] = '04:00:00' (format: HH:MM:SS)```
+* ```[hawc2_exe] = 'hawc2-latest'```
+
+Following directories have to be defined, and their default values are used when
+they are not set explicitly in the spreadsheets.
+* ```[animation_dir] = 'animation/'```
+* ```[control_dir] = 'control/'```, all files and sub-folders copied to node
+* ```[data_dir] = 'data/'```, all files and sub-folders copied to node
+* ```[eigenfreq_dir] = False```
+* ```[htc_dir] = 'htc/'```
+* ```[log_dir] = 'logfiles/'```
+* ```[res_dir] = 'res/'```
+* ```[turb_dir] = 'turb/'```
+* ```[turb_db_dir] = '../turb/'```
+* ```[turb_base_name] = 'turb_'```
+
+Required, and used for the PBS output and post-processing
+* ```[pbs_out_dir] = 'pbs_out/'```
+* ```[iter_dir] = 'iter/'```
+
+Optional
+* ```[turb_db_dir] = '../turb/'```
+* ```[wake_dir] = False```
+* ```[wake_db_dir] = False```
+* ```[wake_base_name] = 'turb_'```
+* ```[meander_dir] = False```
+* ```[meand_db_dir] = False```
+* ```[meand_base_name] = 'turb_'```
+* ```[mooring_dir] = False```, all files and sub-folders copied to node
+* ```[hydro_dir] = False```, all files and sub-folders copied to node
+
+A zip file will be created which contains all files in the model root directory,
+and all the contents (files and folders) of the following directories:
+```[control_dir], [mooring_dir], [hydro_dir], 'externalforce/', [data_dir]```.
+This zip file will be extracted into the execution directory (```[run_dir]```).
+After the model has ran on the node, only the files that have been created
+during simulation time in the ```[log_dir]```, ```[res_dir]```,
+```[animation_dir]```, and ```[eigenfreq_dir]``` will be copied back.
+Optionally, on can also copy back the turbulence files, and other explicitly
+defined files [TODO: expand manual here].
+
+
+Launching the jobs on the cluster
+---------------------------------
+
+Use ssh (Linux, Mac) or putty (MS Windows) to connect to the cluster.
+
+The ```launch.py``` is a generic tool that helps with launching an arbitrary
+number of pbs launch script on a PBS Torque cluster. Launch scripts here
+are defined as files with a ```.p``` extension. The script will look for any
+```.p``` files in a specified folder (```pbs_in/``` by default, which the user
+can change using the  ```-p``` or ```--path_pbs``` flag) and save them in a
+file list called ```pbs_in_file_cache.txt```. When using the option ```-c``` or
+```--cache```, the script will not look for pbs files, but instead read them
+directly from the ```pbs_in_file_cache.txt``` file.
+
+The launch script has a simple build in scheduler that has been successfully
+used to launch 50.000 jobs. This scheduler is configured by two parameters:
+number of cpu's requested (using ```-c``` or ```--nr_cpus```) and minimum
+of required free cpu's on the cluster (using ```--cpu_free```, 48 by default).
+Jobs will be launched after a predefined sleep time (as set by the
+```--tsleep``` option, and set to 5 seconds by default). After the initial sleep
+time a new job will be launched every 0.1 second. If the launch condition is not
+met (```nr_cpus > cpu's used by user AND cpu's free on cluster > cpu_free```),
+the program will wait 5 seconds before trying to launch a new job again.
+
+Depending on the amount of jobs and the required computation time, it could
+take a while before all jobs are launched. When running the launch script from
+the login node, this might be a problem when you have to close your ssh/putty
+session before all jobs are launched. In that case the user should use a
+dedicated compute node for launching jobs. To run the launch script on a
+compute instead of the login node, use the ```--node``` option. You can inspect
+the progress in the ```launch_scheduler_log.txt``` file.
+
+The ```launch.py``` script has some different options, and you can read about
+them by using the help function (the output is included for your convenience):
+
+```bash
+g-000 $ launch.py --help
+
+usage: launch.py -n nr_cpus
+
+options:
+  -h, --help            show this help message and exit
+  --depend              Switch on for launch depend method
+  -n NR_CPUS, --nr_cpus=NR_CPUS
+                        number of cpus to be used
+  -p PATH_PBS_FILES, --path_pbs_files=PATH_PBS_FILES
+                        optionally specify location of pbs files
+  --re=SEARCH_CRIT_RE   regular expression search criterium applied on the
+                        full pbs file path. Escape backslashes! By default it
+                        will select all *.p files in pbs_in/.
+  --dry                 dry run: do not alter pbs files, do not launch
+  --tsleep=TSLEEP       Sleep time [s] after qsub command. Default=5 seconds
+  --logfile=LOGFILE     Save output to file.
+  -c, --cache           If on, files are read from cache
+  --cpu_free=CPU_FREE   No more jobs will be launched when the cluster does
+                        not have the specified amount of cpus free. This will
+                        make sure there is room for others on the cluster, but
+                        might mean less cpus available for you. Default=48.
+  --qsub_cmd=QSUB_CMD   Is set automatically by --node flag
+  --node                If executed on dedicated node.
+```
+
+Then launch the actual jobs (each job is a ```*.p``` file in ```pbs_in```) using
+100 cpu's, and using a compute node instead of the login node (see you can exit
+the ssh/putty session without interrupting the launching process):
+
+```bash
+g-000 $ cd path/to/HAWC2/model
+g-000 $ launch.py -n 100 --node
+```
+
+
+Inspecting running jobs
+-----------------------
+
+There are a few tools you can use from the command line to see what is going on
+the cluster. How many nodes are free, how many nodes do I use as a user, etc.
+
+* ```cluster-status.py``` overview dashboard of the cluster: nodes free, running,
+length of the queue, etc
+* ```qstat -u $USER``` list all the running and queued jobs of the user
+* ```nnsqdel $USER all``` delete all the jobs that from the user
+* ```qdel_range JOBID_FROM JOBID_TIL``` delete a range of job id's
+
+Notice that the pbs output files in ```pbs_out``` are only created when the job
+has ended (or failed). When you want to inspect a running job, you can ssh from
+the Gorm login node to node that runs the job. First, find the job id by listing
+all your current jobs (```qstat -u $USER```). The job id can be found in the
+first column, and you only need to consider the number, not the domain name
+attached to it. Now find the on which node it runs with (replace 123546 with the
+relevant job id):
+```
+g-000 $ qstat -f 123456 | grep exec_host
+```
+
+From here you login into the node as follows (replace g-078 with the relevant
+node):
+```
+g-000 $ ssh g-078
+```
+
+And browse to the scratch directory which lands you in the root directory of
+your running HAWC2 model (replace 123456 with the relevant job id):
+```
+g-000 $ cd /scratch/$USER/123456.g-000.risoe.dk
+```
+
+
+Re-launching failed jobs
+------------------------
+
+In case you want to re-launch only a subset of a previously generated set of
+load cases, there are several methods:
+
+1. Copy the PBS launch scripts (they have the ```*.p``` extension and can be
+found in the ```pbs_in``` folder) of the failed cases to a new folder (for
+example ```pbs_in_failed```). Now run ```launch.py``` again, but instead point
+to the folder that contains the ```*.p``` files of the failed cases, for example:
+```
+g-000 $ launch.py -n 100 --node -p pbs_in_failed
+```
+
+2. Use the ```--cache``` option, and edit the PBS file list in the file
+```pbs_in_file_cache.txt``` so that only the simulations remain that have to be
+run again. Note that the ```pbs_in_file_cache.txt``` file is created every time
+you run a ```launch.py```. Note that you can use the option ```--dry``` to make
+a practice launch run, and that will create a ```pbs_in_file_cache.txt``` file,
+but not a single job will be launched.
+
+3. Each pbs file can be launched manually as follows:
+```
+g-000 $ qsub path/to/pbs_file.p
+```
+
+Alternatively, one can use the following options in ```launch.py```:
+
+* ```-p some/other/folder```: specify from which folder the pbs files should be taken
+* ```--re=SEARCH_CRIT_RE```: advanced filtering based on the pbs file names. It
+requires some notion of regular expressions (some random tutorials:
+[1](http://www.codeproject.com/Articles/9099/The-Minute-Regex-Tutorial),
+[2](http://regexone.com/))
+    * ```launch.py -n 10 --re=.SOMENAME.``` will launch all pbs file that
+    contains ```SOMENAME```. Notice the leading and trailing colon, which is
+    in bash environments is equivalent to the wild card (*).
+
+
+Post-processing
+---------------
+
+The post-processing happens through the same script as used for generating the
+htc files, but now we set different flags. For example, for checking the log
+files, calculating the statistics, the AEP and the life time equivalent loads:
+
+```
+g-000 $ qsub-wrap.py -f /home/MET/STABCON/repositories/prepost/dlctemplate.py -c python --years=25 --neq=1e7 --stats --check_logs --fatigue
+```
+
+Other options for the ```dlctemplate.py``` script:
+
+```
+usage: dlctemplate.py [-h] [--prep] [--check_logs] [--stats] [--fatigue]
+                      [--csv] [--years YEARS] [--no_bins NO_BINS] [--neq NEQ]
+                      [--envelopeblade] [--envelopeturbine]
+
+pre- or post-processes DLC's
+
+optional arguments:
+  -h, --help         show this help message and exit
+  --prep             create htc, pbs, files (default=False)
+  --check_logs       check the log files (default=False)
+  --stats            calculate statistics (default=False)
+  --fatigue          calculate Leq for a full DLC (default=False)
+  --csv              Save data also as csv file (default=False)
+  --years YEARS      Total life time in years (default=20)
+  --no_bins NO_BINS  Number of bins for fatigue loads (default=46)
+  --neq NEQ          Equivalent cycles neq (default=1e6)
+  --envelopeblade    calculate the load envelope for sensors on the blades
+  --envelopeturbine  calculate the load envelope for sensors on the turbine
+```
+
+The load envelopes are computed for sensors specified in the 
+```dlctemplate.py``` file. The sensors are specified in a list of lists. The 
+inner list contains the sensors at one location. The envelope is computed for
+the first two sensors of the inner list and the other sensors are used to
+retrieve the remaining loads defining the load state occurring at the same 
+instant. The outer list is used to specify sensors at different locations.
+The default values for the blade envelopes are used to compute the Mx-My
+envelopes and retrieve the Mz-Fx-Fy-Fz loads occuring at the same moment.
+
+Debugging
+---------
+
+Any output (everything that involves print statements) generated during the
+post-processing of the simulations using ```dlctemplate.py``` is captured in
+the ```pbs_out/qsub-wrap_dlctemplate.py.out``` file, while exceptions and errors
+are redirected to the ```pbs_out/qsub-wrap_dlctemplate.py.err``` text file.
+
+The output and errors of HAWC2 simulations can also be found in the ```pbs_out```
+directory. The ```.err``` and ```.out``` files will be named exactly the same
+as the ```.htc``` input files, and the ```.sel```/```.dat``` output files.
+
diff --git a/docs/using-statistics-df.md b/docs/using-statistics-df.md
new file mode 100644
index 0000000..aa78af0
--- /dev/null
+++ b/docs/using-statistics-df.md
@@ -0,0 +1,179 @@
+How to use the Statistics DataFrame
+===================================
+
+
+Introduction
+------------
+
+The statistical data of your post-processed load cases are saved in the HDF
+format. You can use Pandas to retrieve and organize that data. Pandas organizes
+the data in a DataFrame, and the library is powerful, comprehensive and requires
+some learning. There are extensive resources out in the wild that can will help
+you getting started:
+
+* [list](http://pandas.pydata.org/pandas-docs/stable/tutorials.html)
+of good tutorials can be found in the Pandas
+[documentation](http://pandas.pydata.org/pandas-docs/version/0.16.2/).
+* short and simple
+[tutorial](https://github.com/DTUWindEnergy/Python4WindEnergy/blob/master/lesson%204/pandas.ipynb)
+as used for the Python 4 Wind Energy course
+
+
+The data is organized in simple 2-dimensional table. However, since the statistics
+of each channel is included for multiple simulations, the data set is actually
+3-dimensional. As an example, this is how a table could like:
+
+```
+   [case_id]  [channel name]  [mean]  [std]    [windspeed]
+       sim_1          pitch        0      1              8
+       sim_1            rpm        1      7              8
+       sim_2          pitch        2      9              9
+       sim_2            rpm        3      2              9
+       sim_3          pitch        0      1              7
+```
+
+Each row is a channel of a certain simulation, and the columns represent the
+following:
+
+* a tag from the master file and the corresponding value for the given simulation
+* the channel name, description, units and unique identifier
+* the statistical parameters of the given channel
+
+
+Load the statistics as a pandas DataFrame
+-----------------------------------------
+
+Pandas has some very powerful functions that will help analysing large and
+complex DataFrames. The documentation is extensive and is supplemented with
+various tutorials. You can use
+[10 Minutes to pandas](http://pandas.pydata.org/pandas-docs/stable/10min.html)
+as a first introduction.
+
+Loading the pandas DataFrame table works as follows:
+
+```python
+import pandas as pd
+df = pd.read_hdf('path/to/file/name.h5', 'table')
+```
+
+Some tips for inspecting the data:
+
+```python
+import numpy as np
+
+# Check the available data columns:
+for colname in sorted(df.keys()):
+    print colname
+
+# list all available channels:
+print np.sort(df['channel'].unique())
+
+# list the different load cases
+df['[Case folder]'].unique()
+```
+
+
+Reduce memory footprint using categoricals
+------------------------------------------
+
+When the DataFrame is consuming too much memory, you can try to reduce its
+size by using categoricals. A more extensive introduction to categoricals can be
+found
+[here](http://pandas.pydata.org/pandas-docs/stable/faq.html#dataframe-memory-usage)
+and [here](http://matthewrocklin.com/blog/work/2015/06/18/Categoricals/).
+The basic idea is to replace all string values with an integer,
+and have an index that maps the string value to the index. This trick only works
+when you have long strings that occur multiple times throughout your data set.
+
+The following example shows how you can use categoricals to reduce the memory
+usage of a pandas DataFrame:
+
+```python
+# load a certain DataFrame
+df = pd.read_hdf(fname, 'table')
+# Return the total estimated memory usage
+print '%10.02f MB' % (df.memory_usage(index=True).sum()/1024.0/1024.0)
+# the data type of column that contains strings is called object
+# convert objects to categories to reduce memory consumption
+for column_name, column_dtype in df.dtypes.iteritems():
+    # applying categoricals mostly makes sense for objects, we ignore all others
+    if column_dtype.name == 'object':
+        df[column_name] = df[column_name].astype('category')
+print '%10.02f MB' % (df.memory_usage(index=True).sum()/1024.0/1024.0)
+```
+
+Python has a garbage collector working in the background that deletes
+un-referenced objects. In some cases it might help to actively trigger the
+garbage collector as follows, in an attempt to free up memory during a run of
+a script that is almost flooding the memory:
+
+```python
+import gc
+gc.collect()
+```
+
+Load a DataFrame that is too big for memory in chunks
+-----------------------------------------------------
+
+When a DataFrame is too big to load into memory at once, and you already
+compressed your data using categoricals (as explained above), you can read
+the DataFrame one chunk at the time. A chunk is a selection of rows. For
+example, you can read 1000 rows at the time by setting ```chunksize=1000```
+when calling ```pd.read_hdf()```. For example:
+
+```python
+# load a large DataFrame in chunks of 1000 rows
+for df_chunk in pd.read_hdf(fname, 'table', chunksize=1000):
+    print 'DataFrame chunk contains %i rows' % (len(df_chunk))
+```
+
+We will read a large DataFrame as chunks into memory, and select only those
+rows who belong to dlc12:
+
+```python
+# only select one DLC, and place them in one DataFrame. If the data
+# containing one DLC is still to big for memory, this approach will fail
+
+# create an empty DataFrame, here we collect the results we want
+df_dlc12 = pd.DataFrame()
+for df_chunk in pd.read_hdf(fname, 'table', chunksize=1000):
+    # organize the chunk: all rows for which [Case folder] is the same
+    # in a single group. Each group is now a DataFrame for which
+    # [Case folder] has the same value.
+    for group_name, group_df in df_chunk.groupby(df_chunk['[Case folder]']):
+        # if we have the group with dlc12, save them for later
+        if group_name == 'dlc12_iec61400-1ed3':
+            df_dlc12 = pd.concat([df_dlc12, group_df])#, ignore_index=True)
+```
+
+Plot wind speed vs rotor speed
+------------------------------
+
+```python
+# select the channels of interest
+for group_name, group_df in df_dlc12.groupby(df_dlc12['channel']):
+    # iterate over all channel groups, but only do something with the channels
+    # we are interested in
+    if group_name == 'Omega':
+        # we save the case_id tag, the mean value of channel Omega
+        df_rpm = group_df[['[case_id]', 'mean']].copy()
+        # note we made a copy because we will change the DataFrame in the next line
+        # rename the column mean to something more useful
+        df_rpm.rename(columns={'mean': 'Omega-mean'}, inplace=True)
+    elif group_name == 'windspeed-global-Vy-0.00-0.00--127.00':
+        # we save the case_id tag, the mean value of channel wind, and the 
+        # value of the Windspeed tag
+        df_wind = group_df[['[case_id]', 'mean', '[Windspeed]']].copy()
+        # note we made a copy because we will change the DataFrame in the next line
+        # rename the mean of the wind channel to something more useful
+        df_wind.rename(columns={'mean': 'wind-mean'}, inplace=True)
+
+# join both results on the case_id value so the mean RPM and mean wind speed
+# are referring to the same simulation/case_id.
+df_res = pd.merge(df_wind, df_rpm, on='[case_id]', how='inner')
+
+# now we can plot RPM vs wind speed
+from matplotlib import pyplot as plt
+plt.plot(df_res['wind-mean'].values, df_res['Omega-mean'].values, '*')
+```
+
diff --git a/wetb/prepost/Simulations.py b/wetb/prepost/Simulations.py
index ebe3944..5847a5a 100755
--- a/wetb/prepost/Simulations.py
+++ b/wetb/prepost/Simulations.py
@@ -4776,8 +4776,9 @@ class Cases:
             nn_shaft = 4
         itorque = self.res.ch_dict['shaft-shaft-node-%3.3i-momentvec-z'%nn_shaft]['chi']
         torque = self.res.sig[:,itorque]
-
-        return torque*rads
+        # negative means power is being extracted, which is exactly what a wind
+        # turbine is about, we call that positive
+        return -1.0*torque*rads
 
     def calc_torque_const(self, save=False, name='ojf'):
         """
diff --git a/wetb/prepost/h2_vs_hs2.py b/wetb/prepost/h2_vs_hs2.py
index 8bbce7b..752bca8 100644
--- a/wetb/prepost/h2_vs_hs2.py
+++ b/wetb/prepost/h2_vs_hs2.py
@@ -5,9 +5,6 @@ Created on Mon Nov  2 15:23:15 2015
 @author: dave
 """
 
-
-
-
 import os
 
 import numpy as np
@@ -21,189 +18,71 @@ from wetb.prepost import hawcstab2 as hs2
 from wetb.prepost import mplutils
 
 
-class Configurations:
-    # HAWC2
-    eigenan = {'[eigen_analysis]':True, '[time stop]':0,
-               '[t0]'            :0,     '[output]'  :False}
-    control = {'[Free shaft rot]':True,  '[dll]'        :True,
-               '[fixed_op]'      :False, '[fixed_shaft]':False,
-               '[init_wr]'       :0.5,   '[pitch_bearing]':True}
-    opt_h2 = {'[output]'         :True,  '[hs2]'        :False,
-              '[hawc2_exe]'      :'hawc2-latest',
-              '[Case folder]'    :'HAWC2', '[hawc2]'    :True}
-    fix_op = {'[Free shaft rot]' :False, '[dll]'        :False,
-              '[fixed_op]'       :True,  '[fixed_shaft]':False,
-              '[init_wr]'        :0.5,   '[fixed_omega]':0.5,
-              '[pitch_bearing]'  :False}
-    # HAWCStab2
-    opt_hs2 = {'[output]'         :False, '[hs2]'        :True,
-               '[Free shaft rot]' :True,  '[dll]'        :False,
-               '[fixed_op]'       :False, '[fixed_shaft]':False,
-               '[init_wr]'        :0.5,   '[fixed_omega]':0.5,
-               '[pitch_angle]'    :0.0,   '[hawc2_exe]'  :'hs2cmd-latest',
-               '[Case folder]'    :'HAWCStab2', '[hawc2]':False,
-               '[pitch_bearing]'  :True}
-
-    # AERODYNAMIC MODELLING OPTIONS
-    aero_simple = {'[aerocalc]':1, '[Induction]':0, '[tip_loss]':0,
-                   '[Dyn stall]':0, '[t0]':100, '[time stop]':150}
-    # when induction is on, especially the low wind speeds will need more time
-    # to reach steady state in HAWC2 compared to when there is no induction.
-    aero_full = {'[aerocalc]':1, '[Induction]':1, '[tip_loss]':1,
-                 '[Dyn stall]':1, '[t0]':500, '[time stop]':550}
-
-    blade_stiff_pitchC4 = {'[blade_damp_x]':0.01, '[blade_damp_y]':0.01,
-                           '[blade_damp_z]':0.01, '[blade_set]':4,
-                           '[blade_subset]':1, '[blade_posx]':-0.75}
-
-    blade_stiff_pitchC2 = {'[blade_damp_x]':0.01, '[blade_damp_y]':0.01,
-                           '[blade_damp_z]':0.01, '[blade_set]':4,
-                           '[blade_subset]':1, '[blade_posx]':0.0}
-
-    blade_stiff_pitch3C4 = {'[blade_damp_x]':0.01, '[blade_damp_y]':0.01,
-                           '[blade_damp_z]':0.01, '[blade_set]':4,
-                           '[blade_subset]':1, '[blade_posx]':0.75}
-
-    blade_flex50_tstiff_C14 = {'[blade_damp_x]':0.03, '[blade_damp_y]':0.03,
-                               '[blade_damp_z]':0.03, '[blade_set]':1,
-                               '[blade_subset]':15, '[blade_posx]':-0.75,
-                               '[blade_nbodies]': 17,
-                               '[c12]':False, '[c14]':True}
-    blade_flex50_tstiff_C12 = {'[blade_damp_x]':0.03, '[blade_damp_y]':0.03,
-                               '[blade_damp_z]':0.03, '[blade_set]':1,
-                               '[blade_subset]':13, '[blade_posx]':0.0,
-                               '[blade_nbodies]': 17,
-                               '[c12]':True, '[c14]':False}
-    blade_flex50_C14 = {'[blade_damp_x]':0.03, '[blade_damp_y]':0.03,
-                        '[blade_damp_z]':0.03, '[blade_set]':1,
-                        '[blade_subset]':17, '[blade_posx]':-0.75,
-                        '[blade_nbodies]': 17,
-                        '[c12]':False, '[c14]':True}
-    blade_flex50_C12 = {'[blade_damp_x]':0.03, '[blade_damp_y]':0.03,
-                        '[blade_damp_z]':0.03, '[blade_set]':1,
-                        '[blade_subset]':16, '[blade_posx]':0.0,
-                        '[blade_nbodies]': 17,
-                        '[c12]':True, '[c14]':False}
-    blade_flex89_tstiff_C14 = {'[blade_damp_x]':0.03, '[blade_damp_y]':0.03,
-                            '[blade_damp_z]':0.03, '[blade_set]':1,
-                            '[blade_subset]':23, '[blade_posx]':-0.75,
-                            '[blade_nbodies]': 17,
-                            '[c12]':False, '[c14]':True}
-    blade_flex89_tstiff_C12 = {'[blade_damp_x]':0.03, '[blade_damp_y]':0.03,
-                            '[blade_damp_z]':0.03, '[blade_set]':1,
-                            '[blade_subset]':22, '[blade_posx]':0.0,
-                            '[blade_nbodies]': 17,
-                            '[c12]':True, '[c14]':False}
-    blade_flex89_t50_C14 = {'[blade_damp_x]':0.03, '[blade_damp_y]':0.03,
-                            '[blade_damp_z]':0.03, '[blade_set]':1,
-                            '[blade_subset]':19, '[blade_posx]':-0.75,
-                            '[blade_nbodies]': 17,
-                            '[c12]':False, '[c14]':True}
-    blade_flex89_t50_C12 = {'[blade_damp_x]':0.03, '[blade_damp_y]':0.03,
-                            '[blade_damp_z]':0.03, '[blade_set]':1,
-                            '[blade_subset]':18, '[blade_posx]':0.0,
-                            '[blade_nbodies]': 17,
-                            '[c12]':True, '[c14]':False}
-    blade_flex89_t50_C12_allstC14 = {'[blade_damp_x]':0.03, '[blade_damp_y]':0.03,
-                            '[blade_damp_z]':0.03, '[blade_set]':1,
-                            '[blade_subset]':21, '[blade_posx]':-0.75,
-                            '[blade_nbodies]': 17,
-                            '[c12]':False, '[c14]':True}
-    blade_flex89_t50_C12_allstC12 = {'[blade_damp_x]':0.03, '[blade_damp_y]':0.03,
-                            '[blade_damp_z]':0.03, '[blade_set]':1,
-                            '[blade_subset]':19, '[blade_posx]':0.0,
-                            '[blade_nbodies]': 17,
-                            '[c12]':True, '[c14]':False}
-
-    blade_flex89_t50_C12_cgshC14_eaC12 = {'[blade_damp_x]':0.03,
-                                          '[blade_damp_y]':0.03,
-                                          '[blade_damp_z]':0.03,
-                                          '[blade_set]'   :1,
-                                          '[blade_subset]':24,
-                                          '[blade_posx]'  :0.0,
-                                          '[blade_nbodies]': 17,
-                                          '[c12]':True, '[c14]':False}
-
-    blade_flex = {'[blade_damp_x]':0.01, '[blade_damp_y]':0.01,
-                  '[blade_damp_z]':0.01, '[blade_set]':1,
-                  '[blade_subset]':1, '[blade_posx]':-0.75}
-
-    blade_flex_allac = {'[blade_damp_x]':0.01, '[blade_damp_y]':0.01,
-                        '[blade_damp_z]':0.01, '[blade_set]':1,
-                        '[blade_subset]':2, '[blade_posx]':-0.75}
-
-    blade_flex_allac_11 = {'[blade_damp_x]':0.01, '[blade_damp_y]':0.01,
-                           '[blade_damp_z]':0.01, '[blade_set]':1,
-                           '[blade_subset]':7, '[blade_posx]':-0.75}
-
-    blade_flex_allac_33 = {'[blade_damp_x]':0.02, '[blade_damp_y]':0.02,
-                           '[blade_damp_z]':0.02, '[blade_set]':1,
-                           '[blade_subset]':8, '[blade_posx]':-0.75}
-
-    blade_flex_allac_50 = {'[blade_damp_x]':0.03, '[blade_damp_y]':0.03,
-                           '[blade_damp_z]':0.03, '[blade_set]':1,
-                           '[blade_subset]':9, '[blade_posx]':-0.75}
-
-    blade_flex_allac_50_pitchC2 = {'[blade_damp_x]':0.03, '[blade_damp_y]':0.03,
-                                   '[blade_damp_z]':0.03, '[blade_set]':1,
-                                   '[blade_subset]':9, '[blade_posx]':0.0}
-
-    # configurations for the B-series (which has quite a few changes)
-    # B0001
-    stiff_pc14_cgsheac14 = {'[blade_damp_x]':0.01, '[blade_damp_y]':0.01,
-                            '[blade_damp_z]':0.01, '[blade_set]':1,
-                            '[blade_subset]':3,    '[blade_posx]':-0.75,
-                            '[blade_nbodies]': 17,
-                            '[st_file]'     :'blade_flex_rect.st',
-                            '[c12]':False, '[c14]':True,
-                            '[ae_tolrel]': 1e-7,
-                            '[ae_itmax]' : 2000,
-                            '[ae_1relax]': 0.2}
-    # B0002
-    flex_tstiff_pc14_cgsheac14 = {'[blade_damp_x]':0.13, '[blade_damp_y]':0.13,
-                                  '[blade_damp_z]':0.15, '[blade_set]':1,
-                                  '[blade_subset]':5,    '[blade_posx]':-0.75,
-                                  '[blade_nbodies]': 17,
-                                  '[st_file]'     :'blade_flex_rect.st',
-                                  '[c12]':False, '[c14]':True,
-                                  '[ae_tolrel]': 1e-7,
-                                  '[ae_itmax]' : 2000,
-                                  '[ae_1relax]': 0.7}
-    # B0003
-    flex_pc14_cgsheac14 = {'[blade_damp_x]':0.13, '[blade_damp_y]':0.13,
-                           '[blade_damp_z]':0.15, '[blade_set]':1,
-                           '[blade_subset]':6,    '[blade_posx]':-0.75,
-                           '[blade_nbodies]': 17,
-                           '[st_file]'     :'blade_flex_rect.st',
-                           '[c12]':False, '[c14]':True,
-                           '[ae_tolrel]': 1e-7,
-                           '[ae_itmax]' : 2000,
-                           '[ae_1relax]': 0.7}
-    # B0004
-    flex_pc12_cgsheac12 = {'[blade_damp_x]':0.15, '[blade_damp_y]':0.15,
-                           '[blade_damp_z]':0.17, '[blade_set]':1,
-                           '[blade_subset]':7,    '[blade_posx]':0.00,
-                           '[blade_nbodies]': 17,
-                           '[st_file]'     :'blade_flex_rect.st',
-                           '[c12]':True, '[c14]':False,
-                           '[ae_tolrel]': 1e-7,
-                           '[ae_itmax]' : 2000,
-                           '[ae_1relax]': 0.7}
-    # B0005, B0006
-    flex_pc12_cgshc14_eac12 = {'[blade_damp_x]':0.15, '[blade_damp_y]':0.15,
-                               '[blade_damp_z]':0.17, '[blade_set]':1,
-                               '[blade_subset]':8,    '[blade_posx]':0.00,
-                               '[blade_nbodies]': 17,
-                               '[st_file]'     :'blade_flex_rect.st',
-                               '[c12]':True, '[c14]':False,
-                               '[ae_tolrel]': 1e-7,
-                               '[ae_itmax]' : 2000,
-                               '[ae_1relax]': 0.98}
-
+class ConfigBase:
 
     def __init__(self):
         pass
 
+    def set_master_defaults(self):
+        """Create a set of default master tags that are required for proper
+        compatibility with Simulations.py
+        """
+        mt = {}
+        # =====================================================================
+        # required tags and their defaults
+        # =====================================================================
+        mt['[dt_sim]'] = 0.01
+        mt['[hawc2_exe]'] = 'hawc2-latest'
+        # convergence_limits  0.001  0.005  0.005 ;
+        # critical one, risidual on the forces: 0.0001 = 1e-4
+        mt['[epsresq]'] = '1.0' # default=10.0
+        # increment residual
+        mt['[epsresd]'] = '0.5' # default= 1.0
+        # constraint equation residual
+        mt['[epsresg]'] = '1e-8' # default= 1e-7
+        # folder names for the saved results, htc, data, zip files
+        # Following dirs are relative to the model_dir_server and they specify
+        # the location of where the results, logfiles, animation files that where
+        # run on the server should be copied to after the simulation has finished.
+        # on the node, it will try to copy the turbulence files from these dirs
+        mt['[animation_dir]'] = 'animation/'
+        mt['[control_dir]']   = 'control/'
+        mt['[data_dir]']      = 'data/'
+        mt['[eigen_analysis]'] = False
+        mt['[eigenfreq_dir]'] = False
+        mt['[htc_dir]']       = 'htc/'
+        mt['[log_dir]']       = 'logfiles/'
+        mt['[meander_dir]']   = False
+        mt['[opt_dir]']       = False
+        mt['[pbs_out_dir]']   = 'pbs_out/'
+        mt['[res_dir]']       = 'res/'
+        mt['[iter_dir]']      = 'iter/'
+        mt['[turb_dir]']      = 'turb/'
+        mt['[turb_db_dir]']   = '../turb/'
+        mt['[wake_dir]']      = False
+        mt['[hydro_dir]']     = False
+        mt['[mooring_dir]']   = False
+        mt['[externalforce]'] = False
+        mt['[Case folder]']   = 'NoCaseFolder'
+        # zip_root_files only is used when copy to run_dir and zip creation, define
+        # in the HtcMaster object
+        mt['[zip_root_files]'] = []
+        # only active on PBS level, so files have to be present in the run_dir
+        mt['[copyback_files]'] = []   # copyback_resultfile
+        mt['[copyback_frename]'] = [] # copyback_resultrename
+        mt['[copyto_files]'] = []     # copyto_inputfile
+        mt['[copyto_generic]'] = []   # copyto_input_required_defaultname
+        # =====================================================================
+        # required tags by HtcMaster and PBS in order to function properly
+        # =====================================================================
+        # the express queue ('#PBS -q xpresq') has a maximum walltime of 1h
+        mt['[pbs_queue_command]'] = '#PBS -q workq'
+        # walltime should have following format: hh:mm:ss
+        mt['[walltime]'] = '04:00:00'
+        mt['[auto_walltime]'] = False
+
+        return mt
+
     def opt_tags_h2_eigenanalysis(self, basename):
         """Return opt_tags suitable for a standstill HAWC2 eigen analysis.
         """
@@ -215,7 +94,7 @@ class Configurations:
         opt_tags[0]['[blade_damp_z]'] = 0.0
         opt_tags[0]['[blade_nbodies]'] = 1
         opt_tags[0]['[Windspeed]'] = 0.0
-        opt_tags[0]['[init_wr]'] = 0.0
+        opt_tags[0]['[initspeed_rotor_rads]'] = 0.0
         opt_tags[0]['[operational_data]'] = 'case-turbine2-empty.opt'
 
         return opt_tags
@@ -231,8 +110,8 @@ class Configurations:
         opt_tags[0]['[blade_damp_z]'] = 0.0
         opt_tags[0]['[blade_nbodies]'] = 1
         opt_tags[0]['[Windspeed]'] = 0.0
-        opt_tags[0]['[init_wr]'] = 0.0
-        opt_tags[0]['[fixed_omega]'] = 0.0
+        opt_tags[0]['[initspeed_rotor_rads]'] = 0.0
+        opt_tags[0]['[fixspeed_rotor_rads]'] = 0.0
         opt_tags[0]['[operational_data]'] = 'case-turbine2-empty.opt'
 
         return opt_tags
@@ -257,7 +136,7 @@ class Configurations:
         hs2_res.load_operation(fpath)
         omegas = hs2_res.operation.rotorspeed_rpm.values*np.pi/30.0
         winds = hs2_res.operation.windspeed.values
-        pitchs = hs2_res.operation.pitch_deg.values
+        pitchs = -1.0*hs2_res.operation.pitch_deg.values
 
         return self.set_opdata(winds, pitchs, omegas, basename=basename)
 
@@ -298,9 +177,9 @@ class Configurations:
                 tmp = '%s_%02.0fms_%04.01fdeg_%04.02frads_hawc2' % rpl
                 opt_dict['[Case id.]'] = tmp
             opt_dict['[Windspeed]'] = wind
-            opt_dict['[pitch_angle]'] = pitch
-            opt_dict['[fixed_omega]'] = omega
-            opt_dict['[init_wr]'] = omega
+            opt_dict['[blade_pitch_deg]'] = pitch
+            opt_dict['[fixspeed_rotor_rads]'] = omega
+            opt_dict['[initspeed_rotor_rads]'] = omega
 #            opt_dict['[t0]'] = int(2000.0/opt_dict['[Windspeed]']) # or 2000?
 #            opt_dict['[time stop]'] = opt_dict['[t0]']+100
 #            opt_dict['[time_stop]'] = opt_dict['[t0]']+100
@@ -311,7 +190,7 @@ class Configurations:
 class Sims(object):
 
     def __init__(self, sim_id, P_MASTERFILE, MASTERFILE, P_SOURCE, P_RUN,
-                 PROJECT, POST_DIR):
+                 PROJECT, POST_DIR, master_tags_default):
         """
         Create HtcMaster() object
         =========================
@@ -326,6 +205,27 @@ class Sims(object):
         It is considered as good practice to define the default values for all
         the variable tags in the master_tags
 
+        Parameters
+        ----------
+
+        sim_id : str
+
+        P_MASTERFILE : str
+
+        MASTERFILE : str
+
+        P_SOURCE : str
+
+        P_RUN : str
+
+        PROJECT : str
+
+        POST_DIR : str
+
+        master_tags_default : dict
+            Dictionary with the default master tag values. Should be created
+            by the turbine specific class Configurations.set_master_defaults()
+
         Members
         -------
 
@@ -347,7 +247,7 @@ class Sims(object):
         # FIXME: some tags are still variable! Only static tags here that do
         # not depent on any other variable that can change
         self.master = sim.HtcMaster()
-        self.set_tag_defaults()
+        self.master.tags.update(master_tags_default)
 
     def _var_tag_func(self, master, case_id_short=False):
         """
@@ -476,130 +376,6 @@ class Sims(object):
         rpl = (self.PROJECT, self.master.tags['[sim_id]'])
         self.master.tags['[model_zip]'] = '%s_%s.zip' % rpl
 
-    def set_tag_defaults(self):
-        """
-        Set the default values of the required master tags
-        """
-        mt = self.master.tags
-
-        # other required tags and their defaults
-        mt['[dt_sim]'] = 0.01
-        mt['[hawc2_exe]'] = 'hawc2-latest'
-        # convergence_limits  0.001  0.005  0.005 ;
-        # critical one, risidual on the forces: 0.0001 = 1e-4
-        mt['[epsresq]'] = '1.0' # default=10.0
-        # increment residual
-        mt['[epsresd]'] = '0.5' # default= 1.0
-        # constraint equation residual
-        mt['[epsresg]'] = '1e-8' # default= 1e-7
-        # folder names for the saved results, htc, data, zip files
-        # Following dirs are relative to the model_dir_server and they specify
-        # the location of where the results, logfiles, animation files that where
-        # run on the server should be copied to after the simulation has finished.
-        # on the node, it will try to copy the turbulence files from these dirs
-        mt['[animation_dir]'] = 'animation/'
-        mt['[control_dir]']   = 'control/'
-        mt['[data_dir]']      = 'data/'
-        mt['[eigen_analysis]'] = False
-        mt['[eigenfreq_dir]'] = False
-        mt['[htc_dir]']       = 'htc/'
-        mt['[log_dir]']       = 'logfiles/'
-        mt['[meander_dir]']   = False
-        mt['[opt_dir]']       = False
-        mt['[pbs_out_dir]']   = 'pbs_out/'
-        mt['[res_dir]']       = 'res/'
-        mt['[iter_dir]']      = 'iter/'
-        mt['[turb_dir]']      = 'turb/'
-        mt['[turb_db_dir]']   = '../turb/'
-        mt['[wake_dir]']      = False
-        mt['[hydro_dir]']     = False
-        mt['[mooring_dir]']   = False
-        mt['[externalforce]'] = False
-        mt['[Case folder]']   = 'NoCaseFolder'
-        # zip_root_files only is used when copy to run_dir and zip creation, define
-        # in the HtcMaster object
-        mt['[zip_root_files]'] = []
-        # only active on PBS level, so files have to be present in the run_dir
-        mt['[copyback_files]'] = []   # copyback_resultfile
-        mt['[copyback_frename]'] = [] # copyback_resultrename
-        mt['[copyto_files]'] = []     # copyto_inputfile
-        mt['[copyto_generic]'] = []   # copyto_input_required_defaultname
-
-        # In master file tags within the HAWC2 vs HAWCStab2 context
-        mt['[hawc2]'] = False
-        mt['[output]'] = False
-        mt['[eigen_analysis]'] = False
-        mt['[system_eigen_analysis]'] = False
-        mt['[operational_data]'] = 'case_name.opt'
-
-        mt['[gravity]'] = 0.0
-        mt['[shaft_tilt]'] = 0.0 # 5.0
-        mt['[coning]'] = 0.0 # 2.5
-        mt['[Windspeed]'] = 1.0
-        mt['[wtilt]'] = 0.0
-        mt['[wdir]'] = 0.0
-        mt['[aerocalc]'] = 1
-        mt['[Induction]'] = 0
-        mt['[tip_loss]'] = 0
-        mt['[Dyn stall]'] = 0
-        mt['[tu_model]'] = 0
-        mt['[shear_exp]'] = 0
-        mt['[tower_shadow]'] = 0
-        mt['[TI]'] = 1
-        mt['[fixed_omega]'] = 1.0
-        mt['[init_wr]'] = 0
-        mt['[pc_file_name]'] = 'hawc_pc.mhh'
-        mt['[ae_file_name]'] = 'hawc2_ae.mhh'
-        mt['[nr_ae_sections]'] = 30
-        mt['[use_nr_ae_sections]'] = True
-        mt['[use_ae_distrb_file]'] = False
-        mt['[ae_set_nr]'] = 1
-        # tors_e output depends on the pitch axis configuration
-        mt['[c12]'] = False
-        mt['[c14]'] = False
-
-        mt['[t0]'] = 500
-        mt['[time stop]'] = 600
-
-        mt['[hs2]'] = False
-        mt['[nr_blade_modes_hs2]'] = 10
-        mt['[stab_analysis]'] = False
-        mt['[steady_states]'] = True
-        mt['[hs2_bladedeform_switch]'] = True
-        mt['[hs2_gradients_switch]'] = False
-        # by default take the stiff set
-        mt['[st_file]'] = 'hawc2_st.mhh'
-        mt['[tower_set]'] = 4 # 1
-        mt['[shaft_set]'] = 4 # 2
-        mt['[blade_set]'] = 4 # 3
-        mt['[tower_subset]'] = 1
-        mt['[shaft_subset]'] = 1
-        mt['[blade_subset]'] = 1
-        mt['[blade_nbodies]'] = 1
-        mt['[blade_posx]'] = -0.75
-        mt['[blade_damp_x]'] = 0.01
-        mt['[blade_damp_y]'] = 0.01
-        mt['[blade_damp_z]'] = 0.01
-        # HAWCStab2 convergence criteria
-        mt['[bem_tol]'] = 1e-12
-        mt['[bem_itmax]'] = 10000
-        mt['[bem_1relax]'] = 0.02
-        mt['[ae_tolrel]'] = 1e-7
-        mt['[ae_itmax]'] = 2000
-        mt['[ae_1relax]'] = 0.5
-        mt['[tol_7]'] = 10
-        mt['[tol_8]'] = 5
-        mt['[tol_9]'] = 1e-8
-
-        # =========================================================================
-        # basic required tags by HtcMaster and PBS in order to function properly
-        # =========================================================================
-        # the express queue ('#PBS -q xpresq') has a maximum walltime of 1h
-        mt['[pbs_queue_command]'] = '#PBS -q workq'
-        # walltime should have following format: hh:mm:ss
-        mt['[walltime]'] = '04:00:00'
-        mt['[auto_walltime]'] = False
-
     def get_dlc_casedefs(self):
         """
         Create iter_dict and opt_tags based on spreadsheets
@@ -671,7 +447,8 @@ class Sims(object):
 
         return tune_tags
 
-    def post_processing(self, statistics=True, resdir=None):
+    def post_processing(self, statistics=True, resdir=None,
+                        calc_mech_power=False):
         """
         Parameters
         ----------
@@ -715,19 +492,50 @@ class Sims(object):
 
         if statistics:
             tags=['[windspeed]']
-            stats_df = cc.statistics(calc_mech_power=False, ch_fatigue=[],
-                                     tags=tags, update=False)
+            stats_df = cc.statistics(calc_mech_power=calc_mech_power,
+                                     ch_fatigue=[], tags=tags, update=False)
             ftarget = os.path.join(self.POST_DIR, '%s_statistics.xlsx')
             stats_df.to_excel(ftarget % self.sim_id)
 
 
 class MappingsH2HS2(object):
 
-    def __init__(self, chord_length=3.0):
+    def __init__(self, config):
         """
+
+        Parameters
+        ----------
+
+        config : Config class based on ConfigBase
+
         """
         self.hs2_res = hs2.results()
-        self.chord_length = chord_length
+        self.h2_maps = config.h2_maps
+
+        self.units = {'curved_s': '[m]',
+                      'Cl': '[-]',
+                      'Cd': '[-]',
+                      'Ct': '[-]',
+                      'Cp': '[-]',
+                      'ax_ind': '[-]',
+                      'tan_ind': '[-]',
+                      'vrel': '[m/s]',
+                      'inflow_angle': '[deg]',
+                      'AoA': '[deg]',
+                      'pos_x': '[m]',
+                      'pos_y': '[m]',
+                      'pos_z': '[m]',
+                      'def_x': '[m]',
+                      'def_y': '[m]',
+                      'def_z': '[m]',
+                      'torsion': '[deg]',
+                      'twist': '[deg]',
+                      'ax_ind_vel': '[m/s]',
+                      'tan_ind_vel': '[m/s]',
+                      'F_x': '[N/m]',
+                      'F_y': '[N/m]',
+                      'M': '[Nm/m]',
+                      'chord': '[m]'}
 
     def powercurve(self, h2_df_stats, fname_hs):
 
@@ -736,19 +544,16 @@ class MappingsH2HS2(object):
 
     def _powercurve_h2(self, df_stats):
 
-        mappings = {'Ae rot. power' : 'P_aero',
-                    'Ae rot. thrust': 'T_aero',
-                    'Vrel-1-39.03'  : 'vrel_39',
-                    'Omega'         : 'rotorspeed',
-                    'tower-tower-node-010-forcevec-y' : 'T_towertop',
-                    'tower-shaft-node-003-forcevec-y' : 'T_shafttip'}
-
         df_stats.sort_values('[windspeed]', inplace=True)
         df_mean = pd.DataFrame()
         df_std = pd.DataFrame()
 
-        for key, value in mappings.items():
+        for key, value in self.h2_maps.items():
             tmp = df_stats[df_stats['channel']==key]
+            if len(tmp) == 0:
+                rpl = (key, value)
+                msg = 'HAWC2 channel %s is needed for %s but is missing' % rpl
+                raise ValueError(msg)
             df_mean[value] = tmp['mean'].values.copy()
             df_std[value] = tmp['std'].values.copy()
 
@@ -782,10 +587,12 @@ class MappingsH2HS2(object):
         if h2_df_stats is not None:
             self.h2_df_stats = h2_df_stats
             if fname_h2_tors is not None:
-                self.distribution_torsion_h2(fname_h2_tors)
+                self.distribution_stats_h2(fname_h2_tors, 'Tors_e', 'torsion')
 
     def _distribution_hs2(self):
         """Read a HAWCStab2 *.ind file (blade distribution loading)
+
+        rot_angle and rot_vec_123 in HS2 should be in rotor polar coordinates
         """
 
         mapping_hs2 =  {'s [m]'       :'curved_s',
@@ -803,35 +610,44 @@ class MappingsH2HS2(object):
                         'Z_AC0 [m]'   :'pos_z',
                         'UX0 [m]'     :'def_x',
                         'UY0 [m]'     :'def_y',
+                        'UZ0 [m]'     :'def_z',
                         'Tors. [rad]' :'torsion',
                         'Twist[rad]'  :'twist',
                         'V_a [m/s]'   :'ax_ind_vel',
                         'V_t [m/s]'   :'tan_ind_vel',
                         'FX0 [N/m]'   :'F_x',
                         'FY0 [N/m]'   :'F_y',
-                        'M0 [Nm/m]'   :'M'}
+                        'M0 [Nm/m]'   :'M',
+                        'chord [m]'   :'chord',
+                        'angle [rad]' :'rot_angle',
+                        'v_1 [-]'     :'rot_vec_1',
+                        'v_2 [-]'     :'rot_vec_2',
+                        'v_3 [-]'     :'rot_vec_3'}
 
         try:
-            hs2_cols = [k for k in mapping_hs2]
+            hs2_cols = list(mapping_hs2)
             # select only the HS channels that will be used for the mapping
-            std_cols = [mapping_hs2[k] for k in hs2_cols]
+            std_cols = list(mapping_hs2.values())
             self.hs_aero = self.hs2_res.ind.df_data[hs2_cols].copy()
         except KeyError:
             # some results have been created with older HAWCStab2 that did not
             # include CT and CP columns
             mapping_hs2.pop('CT [-]')
             mapping_hs2.pop('CP [-]')
-            hs2_cols = [k for k in mapping_hs2]
-            std_cols = [mapping_hs2[k] for k in hs2_cols]
+            hs2_cols = list(mapping_hs2)
+            std_cols = list(mapping_hs2.values())
             # select only the HS channels that will be used for the mapping
             self.hs_aero = self.hs2_res.ind.df_data[hs2_cols].copy()
 
         # change column names to the standard form that is shared with H2
         self.hs_aero.columns = std_cols
+        chord12 = self.hs_aero['chord'] / 2.0
+        self.hs_aero['pos_x'] -= (np.cos(self.hs_aero['twist'])*chord12)
+        self.hs_aero['pos_y'] += (np.sin(self.hs_aero['twist'])*chord12)
         self.hs_aero['AoA'] *= (180.0/np.pi)
         self.hs_aero['inflow_angle'] *= (180.0/np.pi)
         self.hs_aero['torsion'] *= (180.0/np.pi)
-#        self.hs_aero['pos_x'] = (-1.0) # self.chord_length / 4.0
+        self.hs_aero['twist'] *= (180.0/np.pi)
 
     def _distribution_h2(self):
         mapping_h2 =  { 'Radius_s'  :'curved_s',
@@ -847,11 +663,12 @@ class MappingsH2HS2(object):
                         'pos_RP_x'  :'pos_x',
                         'pos_RP_y'  :'pos_y',
                         'pos_RP_z'  :'pos_z',
+                        'Chord'     :'chord',
                         'Secfrc_RPx':'F_x',
                         'Secfrc_RPy':'F_y',
                         'Secmom_RPz':'M'}
-        h2_cols = [k for k in mapping_h2]
-        std_cols = [mapping_h2[k] for k in h2_cols]
+        h2_cols = list(mapping_h2)
+        std_cols = list(mapping_h2.values())
 
         # select only the h2 channels that will be used for the mapping
         h2_aero = self.h2_res[h2_cols].copy()
@@ -861,17 +678,30 @@ class MappingsH2HS2(object):
         h2_aero['def_y'] = self.h2_res['Pos_B_y'] - self.h2_res['Inipos_y_y']
         h2_aero['def_z'] = self.h2_res['Pos_B_z'] - self.h2_res['Inipos_z_z']
         h2_aero['ax_ind_vel'] *= (-1.0)
-        h2_aero['pos_x'] += (self.chord_length / 2.0)
+#        h2_aero['pos_x'] += (self.h2_res['Chord'] / 2.0)
         h2_aero['F_x'] *= (1e3)
         h2_aero['F_y'] *= (1e3)
         h2_aero['M'] *= (1e3)
+        h2_aero['M'] -= (h2_aero['F_y']*h2_aero['chord']/2.0)
+        h2_aero['twist'] = np.nan
 #        # HAWC2 includes root and tip nodes, while HAWC2 doesn't. Remove them
 #        h2_aero = h2_aero[1:-1]
         self.h2_aero = h2_aero
 
-    def distribution_torsion_h2(self, fname_h2):
-        """Determine torsion distribution from the HAWC2 result statistics.
-        tors_e is in degrees.
+    def distribution_stats_h2(self, fname_h2, sensortype, newname):
+        """Determine blade distribution sensor from the HAWC2 statistics.
+        This requires that for each aerodynamic calculation point there should
+        be an output sensor defined manually in the output section.
+
+        Parameters
+        ----------
+
+        fname_h2
+
+        sensortype
+
+        newname
+
         """
         if not hasattr(self, 'h2_aero'):
             raise UserWarning('first run blade_distribution')
@@ -880,10 +710,11 @@ class MappingsH2HS2(object):
         fpath = os.path.dirname(fname_h2)
         fname = os.path.basename(fname_h2)
         res = sim.windIO.LoadResults(fpath, fname, readdata=False)
-        sel = res.ch_df[res.ch_df.sensortype == 'Tors_e'].copy()
+        sel = res.ch_df[res.ch_df.sensortype == sensortype].copy()
+        if len(sel) == 0:
+            msg = 'HAWC2 sensor type "%s" is missing, are they defined?'
+            raise ValueError(msg % sensortype)
         sel.sort_values(['radius'], inplace=True)
-        self.h2_aero['Radius_s_tors'] = sel.radius.values.copy()
-        self.h2_aero['tors_e'] = sel.radius.values.copy()
         tors_e_channels = sel.ch_name.tolist()
 
         # find the current case in the statistics DataFrame
@@ -905,15 +736,9 @@ class MappingsH2HS2(object):
 
         # FIXME: what if number of torsion outputs is less than aero
         # calculation points?
-#        df_tmp = pd.DataFrame()
-        self.h2_aero['torsion'] = df_tors_e['mean'].values.copy()
-        self.h2_aero['torsion_std'] = df_tors_e['std'].values.copy()
-        self.h2_aero['torsion_radius_s'] = df_tors_e['radius'].values.copy()
-#        df_tmp = pd.DataFrame()
-#        df_tmp['torsion'] = df_tors_e['mean'].copy()
-#        df_tmp['torsion_std'] = df_tors_e['std'].copy()
-#        df_tmp['torsion_radius_s'] = df_tors_e['radius'].copy()
-#        df_tmp.set_index('')
+        self.h2_aero['%s' % newname] = df_tors_e['mean'].values.copy()
+        self.h2_aero['%s_std' % newname] = df_tors_e['std'].values.copy()
+        self.h2_aero['%s_radius_s' % newname] = df_tors_e['radius'].values.copy()
 
     def body_structure_modes(self, fname_h2, fname_hs):
         self._body_structure_modes_h2(fname_h2)
@@ -941,7 +766,13 @@ class Plots(object):
     the HAWC2 output output_at_time, and HAWCStab2 output *.ind
     """
 
-    def __init__(self):
+    def __init__(self, config):
+        """
+        Parameters
+        ----------
+
+        config : Config class based on ConfigBase
+        """
 
         self.h2c = 'b'
         self.h2ms = '+'
@@ -951,30 +782,38 @@ class Plots(object):
         self.hsls = '--'
         self.errls = '-'
         self.errc = 'k'
-        self.errlab = 'diff [\\%]'
+        self.errms = 'x'
+#        self.errlab = 'diff [\\%]'
+        self.errlab = 'diff'
         self.interactive = False
 
+        self.config = config
+
         self.dist_size = (16, 11)
+        self.dist_nrows = 3
+        self.dist_ncols = 4
         self.dist_channels = ['pos_x', 'pos_y', 'AoA', 'inflow_angle',
                               'Cl', 'Cd', 'vrel', 'ax_ind_vel',
-                              'F_x', 'F_y', 'M']
+                              'F_x', 'F_y', 'M', 'torsion']
 
     def load_h2(self, fname_h2, h2_df_stats=None, fname_h2_tors=None):
 
-        res = MappingsH2HS2()
+        res = MappingsH2HS2(self.config)
         res.h2_res = sim.windIO.ReadOutputAtTime(fname_h2)
+        self.units = res.units
         res._distribution_h2()
         if h2_df_stats is not None:
             res.h2_df_stats = h2_df_stats
             if fname_h2_tors is not None:
-                res.distribution_torsion_h2(fname_h2_tors)
+                res.distribution_stats_h2(fname_h2_tors, 'Tors_e', 'torsion')
 
         return res
 
     def load_hs(self, fname_hs):
 
-        res = MappingsH2HS2()
+        res = MappingsH2HS2(self.config)
         res.hs2_res.load_ind(fname_hs)
+        self.units = res.units
         res._distribution_hs2()
 
         return res
@@ -1009,7 +848,8 @@ class Plots(object):
         print('saved:', fname)
 
     def distribution(self, results, labels, title, channels, x_ax='pos_z',
-                     xlabel='Z-coordinate [m]', nrows=2, ncols=4, size=(16, 5)):
+                     xlabel='Z-coordinate [m]', nrows=2, ncols=4, size=(16, 5),
+                     i0=1):
         """
         Compare blade distribution results
         """
@@ -1029,7 +869,9 @@ class Plots(object):
                     label=lab1, alpha=0.9, marker=self.h2ms, ls=self.h2ls)
             ax.plot(radius2, res2[chan].values, color=self.hsc,
                     label=lab2, alpha=0.7, marker=self.hsms, ls=self.hsls)
-            ax.set_ylabel(chan.replace('_', '\\_'))
+            ax.set_ylabel('%s %s' % (chan.replace('_', '\\_'), self.units[chan]))
+            xlim = max(radius1.max(), radius2.max())
+            ax.set_xlim([0, xlim])
 
 #            if len(radius1) > len(radius2):
 #                radius = res1.hs_aero['pos_z'].values[n0:]
@@ -1055,27 +897,23 @@ class Plots(object):
 #                    qq1 = res1.hs_aero[chan].values[n0:]
 #                    qq2 = interpolate.griddata(x, y, radius)
 
-            # calculate moment arm
-            if chan == 'M':
-                arm = res1.M / res1.F_y
-                axr = ax.twinx()
-                labr = lab1 + ' moment arm'
-                axr.plot(radius1, arm, color=self.errc, label=labr, alpha=0.6,
-                         ls=self.errls, marker=self.h2ms)
-            else:
-                # relative errors on the right axes
-                err = np.abs(1.0 - (res1[chan].values / res2[chan].values))*100.0
-                axr = ax.twinx()
-                axr.plot(radius1, err, color=self.errc, ls=self.errls,
-                         alpha=0.6, label=self.errlab)
-                if err.max() > 50:
-                    axr.set_ylim([0, 35])
-
-            # use axr for the legend
-            lines = ax.lines + axr.lines
-            labels = [l.get_label() for l in lines]
-            leg = axr.legend(lines, labels, loc='best')
-            leg.get_frame().set_alpha(0.5)
+            # relative errors on the right axes
+#            err = np.abs(1.0 - (res1[chan].values / res2[chan].values))*100.0
+            # absolute errors on the right axes
+            err = res1[chan].values[i0:] - res2[chan].values[i0:]
+            axr = ax.twinx()
+            axr.plot(radius1[i0:], err, color=self.errc, ls=self.errls,
+                     alpha=0.6, label=self.errlab, marker=self.errms)
+#            if err.max() > 50:
+#                axr.set_ylim([0, 35])
+
+            # use axr for the legend, but only for the first plot
+            if i == 0:
+                lines = ax.lines + axr.lines
+                labels = [l.get_label() for l in lines]
+                leg = axr.legend(lines, labels, loc='best')
+                leg.get_frame().set_alpha(0.5)
+
         # x-label only on the last row
         for k in range(ncols):
             axesflat[-k-1].set_xlabel(xlabel)
@@ -1083,7 +921,8 @@ class Plots(object):
         axes = self.set_axes_label_grid(axes)
         return fig, axes
 
-    def all_h2_channels(self, results, labels, fpath, channels=None):
+    def all_h2_channels(self, results, labels, fpath, channels=None,
+                        size=(10,5)):
         """Results is a list of res (=HAWC2 results object)"""
 
         for chan, details in results[0].ch_dict.items():
@@ -1093,7 +932,8 @@ class Plots(object):
             for res in results:
                 resp.append([res.sig[:,0], res.sig[:,details['chi']]])
 
-            fig, axes = self.new_fig(title=chan.replace('_', '\\_'))
+            fig, axes = self.new_fig(title=chan.replace('_', '\\_'),
+                                     size=size)
             try:
                 mplutils.time_psd(resp, labels, axes, alphas=[1.0, 0.7], NFFT=None,
                                    colors=['k-', 'r-'], res_param=250, f0=0, f1=5,
@@ -1121,7 +961,9 @@ class Plots(object):
 
         fig, axes = self.distribution(results, labels, title, self.dist_channels,
                                       x_ax='pos_z', xlabel='Z-coordinate [m]',
-                                      nrows=3, ncols=4, size=self.dist_size)
+                                      nrows=self.dist_nrows,
+                                      ncols=self.dist_ncols,
+                                      size=self.dist_size)
 
         return fig, axes
 
@@ -1136,7 +978,9 @@ class Plots(object):
 
         fig, axes = self.distribution(results, labels, title, self.dist_channels,
                                       x_ax='pos_z', xlabel='Z-coordinate [m]',
-                                      nrows=3, ncols=4, size=self.dist_size)
+                                      nrows=self.dist_nrows,
+                                      ncols=self.dist_ncols,
+                                      size=self.dist_size)
 
         return fig, axes
 
@@ -1145,11 +989,24 @@ class Plots(object):
         """Compare aerodynamics, blade deflections between HAWC2 and HAWCStab2.
         This is based on HAWCSTab2 *.ind files, and an HAWC2 output_at_time
         output file.
+
+        Parameters
+        ----------
+
+        fname_h2
+
+        fname_hs2
+
+        title
+
+        n0 : int, default=0
+            Number of nodes to ignore at the blade root section
         """
 
-        results = MappingsH2HS2()
+        results = MappingsH2HS2(self.config)
         results.blade_distribution(fname_h2, fname_hs2, h2_df_stats=h2_df_stats,
                                    fname_h2_tors=fname_h2_tors)
+        self.units = results.units
         res = [results.h2_aero[n0+1:-1], results.hs_aero[n0:]]
 
 #        channels = ['pos_x', 'pos_y', 'AoA', 'inflow_angle', 'Cl', 'Cd',
@@ -1158,7 +1015,9 @@ class Plots(object):
 
         fig, axes = self.distribution(res, labels, title, self.dist_channels,
                                       x_ax='pos_z', xlabel='Z-coordinate [m]',
-                                      nrows=3, ncols=4, size=self.dist_size)
+                                      nrows=self.dist_nrows,
+                                      ncols=self.dist_ncols,
+                                      size=self.dist_size)
 
         return fig, axes
 
@@ -1168,7 +1027,7 @@ class Plots(object):
         output file.
         """
 
-        results = MappingsH2HS2()
+        results = MappingsH2HS2(self.config)
         results.blade_distribution(fname_h2, fname_hs2)
         res = [results.h2_aero[n0+1:-1], results.hs_aero[n0:]]
 
@@ -1185,7 +1044,7 @@ class Plots(object):
 
     def powercurve(self, h2_df_stats, fname_hs, title, size=(8.6, 4)):
 
-        results = MappingsH2HS2()
+        results = MappingsH2HS2(self.config)
         results.powercurve(h2_df_stats, fname_hs)
 
         fig, axes = self.new_fig(title=title, nrows=1, ncols=2, size=size)
@@ -1193,67 +1052,85 @@ class Plots(object):
         wind_h2 = results.pwr_h2_mean['windspeed'].values
         wind_hs = results.pwr_hs['windspeed'].values
 
-        # POWER
+        # POWER ---------------------------------------------------------------
         ax = axes[0]
-        key = 'P_aero'
+        ax.set_title('Power [kW]')
         # HAWC2
-        yerr = results.pwr_h2_std[key]
-        ax.errorbar(wind_h2, results.pwr_h2_mean[key], color=self.h2c, yerr=yerr,
-                    marker=self.h2ms, ls=self.h2ls, label='HAWC2', alpha=0.9)
+        keys = ['P_aero', 'P_mech']
+        lss = [self.h2ls, '--', ':']
+        # HAWC2
+        for key, ls in zip(keys, lss):
+            # it is possible the mechanical power has not been calculated
+            if key not in results.pwr_h2_mean:
+                continue
+            label = 'HAWC2 %s' % (key.replace('_', '$_{') + '}$')
+            yerr = results.pwr_h2_std[key].values
+            c = self.h2c
+            ax.errorbar(wind_h2, results.pwr_h2_mean[key].values, color=c, ls=ls,
+                        label=label, alpha=0.9, yerr=yerr, marker=self.h2ms)
         # HAWCSTAB2
-        ax.plot(wind_hs, results.pwr_hs[key], label='HAWCStab2',
+        key = 'P_aero'
+        ax.plot(wind_hs, results.pwr_hs[key].values, label='HAWCStab2',
                 alpha=0.7, color=self.hsc, ls=self.hsls, marker=self.hsms)
-        ax.set_title('Power [kW]')
-        # relative errors on the right axes
+
+        # errors on the right axes
         axr = ax.twinx()
         assert np.allclose(wind_h2, wind_hs)
         qq1 = results.pwr_h2_mean[key].values
         qq2 = results.pwr_hs[key].values
-        err = np.abs(1.0 - qq1 / qq2)*100.0
+        err = qq1 - qq2
         axr.plot(wind_hs, err, color=self.errc, ls=self.errls, alpha=0.6,
-                 label=self.errlab)
-#        axr.set_ylabel('absolute error []')
-#        axr.set_ylim([])
+                 label=self.errlab + ' P$_{aero}$')
+        ax.set_xlim([wind_h2.min(), wind_h2.max()])
 
-        # THRUST
+        # legends
+        lines, labels = ax.get_legend_handles_labels()
+        linesr, labelsr = axr.get_legend_handles_labels()
+        leg = axr.legend(lines + linesr, labels + labelsr, loc='lower right')
+        leg.get_frame().set_alpha(0.5)
+
+        # THRUST --------------------------------------------------------------
         ax = axes[1]
+        ax.set_title('Thrust [kN]')
         keys = ['T_aero', 'T_shafttip']
         lss = [self.h2ls, '--', ':']
         # HAWC2
         for key, ls in zip(keys, lss):
             label = 'HAWC2 %s' % (key.replace('_', '$_{') + '}$')
-            yerr = results.pwr_h2_std[key]
+            yerr = results.pwr_h2_std[key].values
             c = self.h2c
-            ax.errorbar(wind_h2, results.pwr_h2_mean[key], color=c, ls=ls,
+            ax.errorbar(wind_h2, results.pwr_h2_mean[key].values, color=c, ls=ls,
                         label=label, alpha=0.9, yerr=yerr, marker=self.h2ms)
         # HAWCStab2
-        ax.plot(wind_hs, results.pwr_hs['T_aero'], color=self.hsc, alpha=0.7,
+        ax.plot(wind_hs, results.pwr_hs['T_aero'].values, color=self.hsc, alpha=0.7,
                 label='HAWCStab2 T$_{aero}$', marker=self.hsms, ls=self.hsls)
-        # relative errors on the right axes
+
+        # errors on the right axes
         axr = ax.twinx()
         qq1 = results.pwr_h2_mean['T_aero'].values
         qq2 = results.pwr_hs['T_aero'].values
-        err = np.abs(1.0 - (qq1 / qq2))*100.0
+        err = qq1 - qq2
         axr.plot(wind_hs, err, color=self.errc, ls=self.errls, alpha=0.6,
-                 label=self.errlab)
-        ax.set_title('Thrust [kN]')
+                 label=self.errlab + ' T$_{aero}$')
+        ax.set_xlim([wind_h2.min(), wind_h2.max()])
 
-        axes = self.set_axes_label_grid(axes, setlegend=True)
-#        # use axr for the legend
-#        lines = [ax.lines[2]] + [ax.lines[5]] + [ax.lines[6]] + axr.lines
-#        labels = keys + ['HAWCStab2 T$_{aero}$', self.errlab]
-#        leg = axr.legend(lines, labels, loc='best')
-#        leg.get_frame().set_alpha(0.5)
+        # legends
+        lines, labels = ax.get_legend_handles_labels()
+        linesr, labelsr = axr.get_legend_handles_labels()
+        leg = axr.legend(lines + linesr, labels + labelsr, loc='lower right')
+        leg.get_frame().set_alpha(0.5)
+
+        axes = self.set_axes_label_grid(axes, setlegend=False)
 
         return fig, axes
 
     def h2_powercurve(self, h2_df_stats1, h2_df_stats2, title, labels,
                       size=(8.6,4)):
-        res1 = MappingsH2HS2()
+        res1 = MappingsH2HS2(self.config)
         res1._powercurve_h2(h2_df_stats1)
         wind1 = res1.pwr_h2_mean['windspeed'].values
 
-        res2 = MappingsH2HS2()
+        res2 = MappingsH2HS2(self.config)
         res2._powercurve_h2(h2_df_stats2)
         wind2 = res2.pwr_h2_mean['windspeed'].values
 
@@ -1263,11 +1140,11 @@ class Plots(object):
         ax = axes[0]
         key = 'P_aero'
         # HAWC2
-        yerr1 = res1.pwr_h2_std[key]
-        ax.errorbar(wind1, res1.pwr_h2_mean[key], color=self.h2c, yerr=yerr1,
+        yerr1 = res1.pwr_h2_std[key].values
+        ax.errorbar(wind1, res1.pwr_h2_mean[key].values, color=self.h2c, yerr=yerr1,
                     marker=self.h2ms, ls=self.h2ls, label=labels[0], alpha=0.9)
         yerr2 = res2.pwr_h2_std[key]
-        ax.errorbar(wind2, res2.pwr_h2_mean[key], color=self.hsc, yerr=yerr2,
+        ax.errorbar(wind2, res2.pwr_h2_mean[key].values, color=self.hsc, yerr=yerr2,
                     marker=self.hsms, ls=self.hsls, label=labels[1], alpha=0.7)
         ax.set_title('Power [kW]')
         # relative errors on the right axes
@@ -1285,15 +1162,15 @@ class Plots(object):
         lss = [self.h2ls, '--', ':']
         for key, ls in zip(keys, lss):
             label = '%s %s' % (labels[0], key.replace('_', '$_{') + '}$')
-            yerr = res1.pwr_h2_std[key]
+            yerr = res1.pwr_h2_std[key].values
             c = self.h2c
-            ax.errorbar(wind1, res1.pwr_h2_mean[key], color=c, ls=ls,
+            ax.errorbar(wind1, res1.pwr_h2_mean[key].values, color=c, ls=ls,
                         label=label, alpha=0.9, yerr=yerr, marker=self.h2ms)
         for key, ls in zip(keys, lss):
             label = '%s %s' % (labels[1], key.replace('_', '$_{') + '}$')
-            yerr = res2.pwr_h2_std[key]
+            yerr = res2.pwr_h2_std[key].values
             c = self.hsc
-            ax.errorbar(wind2, res2.pwr_h2_mean[key], color=c, ls=ls,
+            ax.errorbar(wind2, res2.pwr_h2_mean[key].values, color=c, ls=ls,
                         label=label, alpha=0.9, yerr=yerr, marker=self.hsms)
         # relative errors on the right axes
         axr = ax.twinx()
@@ -1315,11 +1192,11 @@ class Plots(object):
 
     def hs_powercurve(self, fname1, fname2, title, labels, size=(8.6, 4)):
 
-        res1 = MappingsH2HS2()
+        res1 = MappingsH2HS2(self.config)
         res1._powercurve_hs2(fname1)
         wind1 = res1.pwr_hs['windspeed'].values
 
-        res2 = MappingsH2HS2()
+        res2 = MappingsH2HS2(self.config)
         res2._powercurve_hs2(fname2)
         wind2 = res2.pwr_hs['windspeed'].values
 
@@ -1328,9 +1205,9 @@ class Plots(object):
         # POWER
         ax = axes[0]
         key = 'P_aero'
-        ax.plot(wind1, res1.pwr_hs['P_aero'], label=labels[0],
+        ax.plot(wind1, res1.pwr_hs['P_aero'].values, label=labels[0],
                 alpha=0.9, color=self.h2c, ls=self.h2ls, marker=self.h2ms)
-        ax.plot(wind2, res2.pwr_hs['P_aero'], label=labels[1],
+        ax.plot(wind2, res2.pwr_hs['P_aero'].values, label=labels[1],
                 alpha=0.7, color=self.hsc, ls=self.hsls, marker=self.hsms)
         ax.set_title('Power [kW]')
         # relative errors on the right axes
@@ -1345,9 +1222,9 @@ class Plots(object):
 
         # THRUST
         ax = axes[1]
-        ax.plot(wind1, res1.pwr_hs['T_aero'], color=self.h2c, alpha=0.9,
+        ax.plot(wind1, res1.pwr_hs['T_aero'].values, color=self.h2c, alpha=0.9,
                 label=labels[0], marker=self.h2ms, ls=self.h2ls)
-        ax.plot(wind2, res2.pwr_hs['T_aero'], color=self.hsc, alpha=0.7,
+        ax.plot(wind2, res2.pwr_hs['T_aero'].values, color=self.hsc, alpha=0.7,
                 label=labels[1], marker=self.hsms, ls=self.hsls)
         # relative errors on the right axes
         axr = ax.twinx()
diff --git a/wetb/prepost/hawcstab2.py b/wetb/prepost/hawcstab2.py
index 516be89..c1afefe 100644
--- a/wetb/prepost/hawcstab2.py
+++ b/wetb/prepost/hawcstab2.py
@@ -59,7 +59,10 @@ class InductionResults(object):
     def read(self, fname):
         self.data = np.loadtxt(fname)
         self.wsp = int(fname.split('_u')[-1][:-4]) / 1000.0
-        self.df_data = pd.read_fwf(fname, header=0, widths=[14]*34)
+        try:
+            self.df_data = pd.read_fwf(fname, header=0, widths=[14]*38)
+        except:
+            self.df_data = pd.read_fwf(fname, header=0, widths=[14]*34)
         # sanitize the headers
         cols = self.df_data.columns
         self.df_data.columns = [k[:-2].replace('#', '').strip() for k in cols]
-- 
GitLab