Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • toolbox/WindEnergyToolbox
  • tlbl/WindEnergyToolbox
  • cpav/WindEnergyToolbox
  • frza/WindEnergyToolbox
  • borg/WindEnergyToolbox
  • mmpe/WindEnergyToolbox
  • ozgo/WindEnergyToolbox
  • dave/WindEnergyToolbox
  • mmir/WindEnergyToolbox
  • wluo/WindEnergyToolbox
  • welad/WindEnergyToolbox
  • chpav/WindEnergyToolbox
  • rink/WindEnergyToolbox
  • shfe/WindEnergyToolbox
  • shfe1/WindEnergyToolbox
  • acdi/WindEnergyToolbox
  • angl/WindEnergyToolbox
  • wliang/WindEnergyToolbox
  • mimc/WindEnergyToolbox
  • wtlib/WindEnergyToolbox
  • cmos/WindEnergyToolbox
  • fabpi/WindEnergyToolbox
22 results
Show changes
Showing
with 766 additions and 1362 deletions
......@@ -6,201 +6,222 @@
#PBS -e ./pbs_out_chunks/remote_chnk_00000.err
#PBS -W umask=0003
### Maximum wallclock time format HOURS:MINUTES:SECONDS
#PBS -l walltime=20:00:00
#PBS -l walltime=09:00:00
#PBS -l nodes=1:ppn=20
### Queue name
#PBS -q workq
echo "----------------------------------------------------------------------"
echo "activate python environment wetb_py3"
source /home/python/miniconda3/bin/activate wetb_py3
echo "CHECK 2x IF wetb_py3 IS ACTIVE, IF NOT TRY AGAIN"
CMD="from distutils.sysconfig import get_python_lib;print (get_python_lib().find('wetb_py3'))"
echo "activate python environment py36-wetb"
source /home/python/miniconda3/bin/activate py36-wetb
echo "CHECK 2x IF py36-wetb IS ACTIVE, IF NOT TRY AGAIN"
CMD="from distutils.sysconfig import get_python_lib;print (get_python_lib().find('/usr/lib/python'))"
ACTIVATED=`python -c "$CMD"`
if [ $ACTIVATED -eq -1 ]; then source activate wetb_py3;fi
if [ $ACTIVATED -eq 0 ]; then source /home/python/miniconda3/bin/activate py36-wetb;fi
ACTIVATED=`python -c "$CMD"`
if [ $ACTIVATED -eq -1 ]; then source activate wetb_py3;fi
if [ $ACTIVATED -eq 0 ]; then source /home/python/miniconda3/bin/activate py36-wetb;fi
echo "----------------------------------------------------------------------"
cd /scratch/$USER/$PBS_JOBID/
cd "/scratch/$USER/$PBS_JOBID/"
echo 'current working directory:'
pwd
echo "create CPU directories on the scratch disk"
mkdir -p /scratch/$USER/$PBS_JOBID/remote/
mkdir -p /scratch/$USER/$PBS_JOBID/0/
mkdir -p /scratch/$USER/$PBS_JOBID/1/
mkdir -p /scratch/$USER/$PBS_JOBID/2/
mkdir -p /scratch/$USER/$PBS_JOBID/3/
mkdir -p /scratch/$USER/$PBS_JOBID/4/
mkdir -p /scratch/$USER/$PBS_JOBID/5/
mkdir -p /scratch/$USER/$PBS_JOBID/6/
mkdir -p /scratch/$USER/$PBS_JOBID/7/
mkdir -p /scratch/$USER/$PBS_JOBID/8/
mkdir -p /scratch/$USER/$PBS_JOBID/9/
mkdir -p /scratch/$USER/$PBS_JOBID/10/
mkdir -p /scratch/$USER/$PBS_JOBID/11/
mkdir -p /scratch/$USER/$PBS_JOBID/12/
mkdir -p /scratch/$USER/$PBS_JOBID/13/
mkdir -p /scratch/$USER/$PBS_JOBID/14/
mkdir -p /scratch/$USER/$PBS_JOBID/15/
mkdir -p /scratch/$USER/$PBS_JOBID/16/
mkdir -p /scratch/$USER/$PBS_JOBID/17/
mkdir -p /scratch/$USER/$PBS_JOBID/18/
mkdir -p /scratch/$USER/$PBS_JOBID/19/
mkdir -p "/scratch/$USER/$PBS_JOBID/remote/"
mkdir -p "/scratch/$USER/$PBS_JOBID/0/"
mkdir -p "/scratch/$USER/$PBS_JOBID/1/"
mkdir -p "/scratch/$USER/$PBS_JOBID/2/"
mkdir -p "/scratch/$USER/$PBS_JOBID/3/"
mkdir -p "/scratch/$USER/$PBS_JOBID/4/"
mkdir -p "/scratch/$USER/$PBS_JOBID/5/"
mkdir -p "/scratch/$USER/$PBS_JOBID/6/"
mkdir -p "/scratch/$USER/$PBS_JOBID/7/"
mkdir -p "/scratch/$USER/$PBS_JOBID/8/"
mkdir -p "/scratch/$USER/$PBS_JOBID/9/"
mkdir -p "/scratch/$USER/$PBS_JOBID/10/"
mkdir -p "/scratch/$USER/$PBS_JOBID/11/"
mkdir -p "/scratch/$USER/$PBS_JOBID/12/"
mkdir -p "/scratch/$USER/$PBS_JOBID/13/"
mkdir -p "/scratch/$USER/$PBS_JOBID/14/"
mkdir -p "/scratch/$USER/$PBS_JOBID/15/"
mkdir -p "/scratch/$USER/$PBS_JOBID/16/"
echo "----------------------------------------------------------------------"
cd $PBS_O_WORKDIR
echo 'current working directory:'
pwd
echo "get the zip-chunk file from the PBS_O_WORKDIR"
cp ./zip-chunks-jess/remote_chnk_00000.zip /scratch/$USER/$PBS_JOBID/
cp "zip-chunks-jess/remote_chnk_00000.zip" "/scratch/$USER/$PBS_JOBID/"
echo "----------------------------------------------------------------------"
cd /scratch/$USER/$PBS_JOBID/
cd "/scratch/$USER/$PBS_JOBID/"
echo 'current working directory:'
pwd
echo "unzip chunk, create dirs in cpu and sim_id folders"
/usr/bin/unzip remote_chnk_00000.zip -d 0/. >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 1/. >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 2/. >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 3/. >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 4/. >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 5/. >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 6/. >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 7/. >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 8/. >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 9/. >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 10/. >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 11/. >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 12/. >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 13/. >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 14/. >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 15/. >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 16/. >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 17/. >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 18/. >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 19/. >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d remote/. >> /dev/null
/usr/bin/unzip "remote_chnk_00000.zip" -d "0/." >> /dev/null
/usr/bin/unzip "remote_chnk_00000.zip" -d "1/." >> /dev/null
/usr/bin/unzip "remote_chnk_00000.zip" -d "2/." >> /dev/null
/usr/bin/unzip "remote_chnk_00000.zip" -d "3/." >> /dev/null
/usr/bin/unzip "remote_chnk_00000.zip" -d "4/." >> /dev/null
/usr/bin/unzip "remote_chnk_00000.zip" -d "5/." >> /dev/null
/usr/bin/unzip "remote_chnk_00000.zip" -d "6/." >> /dev/null
/usr/bin/unzip "remote_chnk_00000.zip" -d "7/." >> /dev/null
/usr/bin/unzip "remote_chnk_00000.zip" -d "8/." >> /dev/null
/usr/bin/unzip "remote_chnk_00000.zip" -d "9/." >> /dev/null
/usr/bin/unzip "remote_chnk_00000.zip" -d "10/." >> /dev/null
/usr/bin/unzip "remote_chnk_00000.zip" -d "11/." >> /dev/null
/usr/bin/unzip "remote_chnk_00000.zip" -d "12/." >> /dev/null
/usr/bin/unzip "remote_chnk_00000.zip" -d "13/." >> /dev/null
/usr/bin/unzip "remote_chnk_00000.zip" -d "14/." >> /dev/null
/usr/bin/unzip "remote_chnk_00000.zip" -d "15/." >> /dev/null
/usr/bin/unzip "remote_chnk_00000.zip" -d "16/." >> /dev/null
/usr/bin/unzip "remote_chnk_00000.zip" -d "remote/." >> /dev/null
echo "----------------------------------------------------------------------"
cd /scratch/$USER/$PBS_JOBID/remote/
cd "/scratch/$USER/$PBS_JOBID/remote/"
echo 'current working directory:'
pwd
echo "create turb_db directories"
mkdir -p ../turb/
mkdir -p "../turb/"
echo "----------------------------------------------------------------------"
cd $PBS_O_WORKDIR
cd "$PBS_O_WORKDIR"
echo 'current working directory:'
pwd
# copy to scratch db directory for [turb_db_dir], [turb_base_name]
cp ../turb/none* /scratch/$USER/$PBS_JOBID/remote/../turb/.
cp ../turb/turb_s100_10ms* /scratch/$USER/$PBS_JOBID/remote/../turb/.
cp ../turb/turb_s101_11ms* /scratch/$USER/$PBS_JOBID/remote/../turb/.
cp "../turb/turb_s100_10ms"*.bin "/scratch/$USER/$PBS_JOBID/remote/../turb/."
cp "../turb/turb_s101_11ms"*.bin "/scratch/$USER/$PBS_JOBID/remote/../turb/."
# copy to scratch db directory for [meand_db_dir], [meand_base_name]
# copy to scratch db directory for [meander_db_dir], [meander_base_name]
# copy to scratch db directory for [wake_db_dir], [wake_base_name]
# copy to scratch db directory for [micro_db_dir], [micro_base_name]
echo "----------------------------------------------------------------------"
cd /scratch/$USER/$PBS_JOBID/remote/
cd "/scratch/$USER/$PBS_JOBID/"
echo 'current working directory:'
pwd
echo "create turb directories in CPU dirs"
mkdir -p 0/turb/
mkdir -p 1/turb/
mkdir -p 2/turb/
mkdir -p 3/turb/
mkdir -p 4/turb/
mkdir -p 5/turb/
mkdir -p 6/turb/
mkdir -p 7/turb/
mkdir -p 8/turb/
mkdir -p 9/turb/
mkdir -p 10/turb/
mkdir -p 11/turb/
mkdir -p 12/turb/
mkdir -p 13/turb/
mkdir -p 14/turb/
mkdir -p 15/turb/
mkdir -p 16/turb/
mkdir -p 17/turb/
mkdir -p 18/turb/
mkdir -p 19/turb/
mkdir -p "0/turb/"
mkdir -p "0/turb_meander/"
mkdir -p "0/turb_micro/"
mkdir -p "1/turb/"
mkdir -p "1/turb_meander/"
mkdir -p "1/turb_micro/"
mkdir -p "2/turb/"
mkdir -p "2/turb_meander/"
mkdir -p "2/turb_micro/"
mkdir -p "3/turb/"
mkdir -p "3/turb_meander/"
mkdir -p "3/turb_micro/"
mkdir -p "4/turb/"
mkdir -p "4/turb_meander/"
mkdir -p "4/turb_micro/"
mkdir -p "5/turb/"
mkdir -p "5/turb_meander/"
mkdir -p "5/turb_micro/"
mkdir -p "6/turb/"
mkdir -p "6/turb_meander/"
mkdir -p "6/turb_micro/"
mkdir -p "7/turb/"
mkdir -p "7/turb_meander/"
mkdir -p "7/turb_micro/"
mkdir -p "8/turb/"
mkdir -p "8/turb_meander/"
mkdir -p "8/turb_micro/"
mkdir -p "9/turb/"
mkdir -p "9/turb_meander/"
mkdir -p "9/turb_micro/"
mkdir -p "10/turb/"
mkdir -p "10/turb_meander/"
mkdir -p "10/turb_micro/"
mkdir -p "11/turb/"
mkdir -p "11/turb_meander/"
mkdir -p "11/turb_micro/"
mkdir -p "12/turb/"
mkdir -p "12/turb_meander/"
mkdir -p "12/turb_micro/"
mkdir -p "13/turb/"
mkdir -p "13/turb_meander/"
mkdir -p "13/turb_micro/"
mkdir -p "14/turb/"
mkdir -p "14/turb_meander/"
mkdir -p "14/turb_micro/"
mkdir -p "15/turb/"
mkdir -p "15/turb_meander/"
mkdir -p "15/turb_micro/"
mkdir -p "16/turb/"
mkdir -p "16/turb_meander/"
mkdir -p "16/turb_micro/"
echo "----------------------------------------------------------------------"
cd /scratch/$USER/$PBS_JOBID/remote/
cd "/scratch/$USER/$PBS_JOBID/remote/"
echo 'current working directory:'
pwd
echo "Link all turb files into CPU dirs"
find /scratch/$USER/$PBS_JOBID/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/remote/0/turb/ \;
find /scratch/$USER/$PBS_JOBID/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/remote/1/turb/ \;
find /scratch/$USER/$PBS_JOBID/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/remote/2/turb/ \;
find /scratch/$USER/$PBS_JOBID/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/remote/3/turb/ \;
find /scratch/$USER/$PBS_JOBID/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/remote/4/turb/ \;
find /scratch/$USER/$PBS_JOBID/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/remote/5/turb/ \;
find /scratch/$USER/$PBS_JOBID/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/remote/6/turb/ \;
find /scratch/$USER/$PBS_JOBID/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/remote/7/turb/ \;
find /scratch/$USER/$PBS_JOBID/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/remote/8/turb/ \;
find /scratch/$USER/$PBS_JOBID/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/remote/9/turb/ \;
find /scratch/$USER/$PBS_JOBID/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/remote/10/turb/ \;
find /scratch/$USER/$PBS_JOBID/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/remote/11/turb/ \;
find /scratch/$USER/$PBS_JOBID/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/remote/12/turb/ \;
find /scratch/$USER/$PBS_JOBID/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/remote/13/turb/ \;
find /scratch/$USER/$PBS_JOBID/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/remote/14/turb/ \;
find /scratch/$USER/$PBS_JOBID/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/remote/15/turb/ \;
find /scratch/$USER/$PBS_JOBID/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/remote/16/turb/ \;
find /scratch/$USER/$PBS_JOBID/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/remote/17/turb/ \;
find /scratch/$USER/$PBS_JOBID/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/remote/18/turb/ \;
find /scratch/$USER/$PBS_JOBID/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/remote/19/turb/ \;
find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/0/turb/" \;
find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/1/turb/" \;
find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/2/turb/" \;
find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/3/turb/" \;
find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/4/turb/" \;
find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/5/turb/" \;
find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/6/turb/" \;
find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/7/turb/" \;
find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/8/turb/" \;
find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/9/turb/" \;
find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/10/turb/" \;
find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/11/turb/" \;
find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/12/turb/" \;
find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/13/turb/" \;
find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/14/turb/" \;
find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/15/turb/" \;
find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/16/turb/" \;
echo "----------------------------------------------------------------------"
cd /scratch/$USER/$PBS_JOBID/
cd "/scratch/$USER/$PBS_JOBID/"
echo 'current working directory:'
pwd
echo "START RUNNING JOBS IN find+xargs MODE"
WINEARCH=win32 WINEPREFIX=~/.wine32 winefix
WINEARCH="win32" WINEPREFIX="$HOME/.wine32" winefix
# run all the PBS *.p files in find+xargs mode
echo "following cases will be run from following path:"
echo "remote/pbs_in/dlc01_demos/"
export LAUNCH_PBS_MODE=false
/home/MET/sysalt/bin/find remote/pbs_in/dlc01_demos/ -type f -name '*.p' | sort -z
/home/MET/sysalt/bin/find 'remote/pbs_in/dlc01_demos/' -type f -name '*.p' | sort -z
echo "number of files to be launched: "`find remote/pbs_in/dlc01_demos/ -type f | wc -l`
/home/MET/sysalt/bin/find remote/pbs_in/dlc01_demos/ -type f -name '*.p' -print0 | sort -z | /home/MET/sysalt/bin/xargs -0 -I{} --process-slot-var=CPU_NR -n 1 -P 20 sh {}
echo "number of files to be launched: "`find "remote/pbs_in/dlc01_demos/" -type f | wc -l`
/home/MET/sysalt/bin/find 'remote/pbs_in/dlc01_demos/' -type f -name '*.p' -print0 | sort -z | /home/MET/sysalt/bin/xargs -0 -I{} --process-slot-var=CPU_NR -n 1 -P 17 sh {}
echo "END OF JOBS IN find+xargs MODE"
echo "----------------------------------------------------------------------"
echo 'total scratch disk usage:'
du -hs /scratch/$USER/$PBS_JOBID/
cd /scratch/$USER/$PBS_JOBID/remote
echo 'current working directory:'
echo "total scratch disk usage:"
du -hs "/scratch/$USER/$PBS_JOBID/"
cd "/scratch/$USER/$PBS_JOBID/remote"
echo "current working directory:"
pwd
echo "Results saved at sim_id directory:"
find
find .
echo "move statsdel into compressed archive"
find res/dlc01_demos/ -name "*.csv" -print0 | xargs -0 tar --remove-files -rf prepost/statsdel_chnk_00000.tar
xz -z2 -T 20 prepost/statsdel_chnk_00000.tar
find "res/dlc01_demos/" -name "*.csv" -print0 | xargs -0 tar --remove-files -rf "prepost/statsdel_chnk_00000.tar"
xz -z2 -T 17 "prepost/statsdel_chnk_00000.tar"
echo "move log analysis into compressed archive"
find logfiles/dlc01_demos/ -name "*.csv" -print0 | xargs -0 tar --remove-files -rf prepost/loganalysis_chnk_00000.tar
xz -z2 -T 20 prepost/loganalysis_chnk_00000.tar
find "logfiles/dlc01_demos/" -name "*.csv" -print0 | xargs -0 tar --remove-files -rf "prepost/loganalysis_chnk_00000.tar"
xz -z2 -T 17 "prepost/loganalysis_chnk_00000.tar"
echo "----------------------------------------------------------------------"
cd /scratch/$USER/$PBS_JOBID/
cd "/scratch/$USER/$PBS_JOBID/"
echo 'current working directory:'
pwd
echo "move results back from node scratch/sim_id to origin, but ignore htc, and pbs_in directories."
echo "copy from remote/* to $PBS_O_WORKDIR/"
time rsync -au --remove-source-files remote/* $PBS_O_WORKDIR/ \
--exclude pbs_in/dlc01_demos/* \
--exclude *.htc
echo "copy from remote/ to $PBS_O_WORKDIR/"
time rsync -au "remote/" "$PBS_O_WORKDIR/" \
--exclude "pbs_in/dlc01_demos/*" \
--exclude *.htc
source deactivate
echo "DONE !!"
......
No preview for this file type
No preview for this file type
File deleted
File deleted
[seed],[wsp],[wave_seed],[wdir],[ReferenceTI],[ReferenceWindSpeed],[t0],[duration],[hub_height],[Case folder],[Duration],[Case id.],[Turb base name],[time_stop]
1001,4,101,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp04_wdir010_sa1001_sw0101,turb_wsp04_s1001,3700
1001,4,101,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp04_wdir020_sa1001_sw0101,turb_wsp04_s1001,3700
1001,4,102,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp04_wdir010_sa1001_sw0102,turb_wsp04_s1001,3700
2001,4,102,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp04_wdir020_sa2001_sw0102,turb_wsp04_s2001,3700
2001,4,103,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp04_wdir010_sa2001_sw0103,turb_wsp04_s2001,3700
2001,4,103,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp04_wdir020_sa2001_sw0103,turb_wsp04_s2001,3700
3002,6,201,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp06_wdir010_sa3002_sw0201,turb_wsp06_s3002,3700
3002,6,201,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp06_wdir020_sa3002_sw0201,turb_wsp06_s3002,3700
3002,6,202,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp06_wdir010_sa3002_sw0202,turb_wsp06_s3002,3700
4002,6,202,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp06_wdir020_sa4002_sw0202,turb_wsp06_s4002,3700
4002,6,203,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp06_wdir010_sa4002_sw0203,turb_wsp06_s4002,3700
4002,6,203,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp06_wdir020_sa4002_sw0203,turb_wsp06_s4002,3700
5003,8,301,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp08_wdir010_sa5003_sw0301,turb_wsp08_s5003,3700
5003,8,301,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp08_wdir020_sa5003_sw0301,turb_wsp08_s5003,3700
5003,8,302,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp08_wdir010_sa5003_sw0302,turb_wsp08_s5003,3700
6003,8,302,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp08_wdir020_sa6003_sw0302,turb_wsp08_s6003,3700
6003,8,303,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp08_wdir010_sa6003_sw0303,turb_wsp08_s6003,3700
6003,8,303,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp08_wdir020_sa6003_sw0303,turb_wsp08_s6003,3700
7001,4,101,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp04_wdir010_sa7001_sw0101,turb_wsp04_s7001,3700
7001,4,101,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp04_wdir020_sa7001_sw0101,turb_wsp04_s7001,3700
7001,4,102,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp04_wdir010_sa7001_sw0102,turb_wsp04_s7001,3700
8001,4,102,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp04_wdir020_sa8001_sw0102,turb_wsp04_s8001,3700
8001,4,103,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp04_wdir010_sa8001_sw0103,turb_wsp04_s8001,3700
8001,4,103,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp04_wdir020_sa8001_sw0103,turb_wsp04_s8001,3700
9002,6,201,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp06_wdir010_sa9002_sw0201,turb_wsp06_s9002,3700
9002,6,201,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp06_wdir020_sa9002_sw0201,turb_wsp06_s9002,3700
9002,6,202,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp06_wdir010_sa9002_sw0202,turb_wsp06_s9002,3700
10002,6,202,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp06_wdir020_sa10002_sw0202,turb_wsp06_s10002,3700
10002,6,203,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp06_wdir010_sa10002_sw0203,turb_wsp06_s10002,3700
10002,6,203,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp06_wdir020_sa10002_sw0203,turb_wsp06_s10002,3700
11003,8,301,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp08_wdir010_sa11003_sw0301,turb_wsp08_s11003,3700
11003,8,301,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp08_wdir020_sa11003_sw0301,turb_wsp08_s11003,3700
11003,8,302,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp08_wdir010_sa11003_sw0302,turb_wsp08_s11003,3700
12003,8,302,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp08_wdir020_sa12003_sw0302,turb_wsp08_s12003,3700
12003,8,303,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp08_wdir010_sa12003_sw0303,turb_wsp08_s12003,3700
12003,8,303,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp08_wdir020_sa12003_sw0303,turb_wsp08_s12003,3700
[seed],[wave_seed],[wsp],[wdir],[ReferenceTI],[ReferenceWindSpeed],[t0],[duration],[hub_height],[Case folder],[Duration],[Case id.],[Turb base name],[time_stop]
1001,101,4,0,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp04_wdir000_sa1001_sw0101,turb_wsp04_s1001,3700
1001,101,4,20,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp04_wdir020_sa1001_sw0101,turb_wsp04_s1001,3700
1002,201,6,0,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp06_wdir000_sa1002_sw0201,turb_wsp06_s1002,3700
2002,201,6,20,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp06_wdir020_sa2002_sw0201,turb_wsp06_s2002,3700
2003,301,8,0,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp08_wdir000_sa2003_sw0301,turb_wsp08_s2003,3700
2003,301,8,20,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp08_wdir020_sa2003_sw0301,turb_wsp08_s2003,3700
3001,102,4,0,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp04_wdir000_sa3001_sw0102,turb_wsp04_s3001,3700
3001,102,4,20,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp04_wdir020_sa3001_sw0102,turb_wsp04_s3001,3700
3002,202,6,0,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp06_wdir000_sa3002_sw0202,turb_wsp06_s3002,3700
4002,202,6,20,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp06_wdir020_sa4002_sw0202,turb_wsp06_s4002,3700
4003,302,8,0,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp08_wdir000_sa4003_sw0302,turb_wsp08_s4003,3700
4003,302,8,20,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp08_wdir020_sa4003_sw0302,turb_wsp08_s4003,3700
5001,101,4,0,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp04_wdir000_sa5001_sw0101,turb_wsp04_s5001,3700
5001,101,4,20,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp04_wdir020_sa5001_sw0101,turb_wsp04_s5001,3700
5002,201,6,0,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp06_wdir000_sa5002_sw0201,turb_wsp06_s5002,3700
6002,201,6,20,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp06_wdir020_sa6002_sw0201,turb_wsp06_s6002,3700
6003,301,8,0,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp08_wdir000_sa6003_sw0301,turb_wsp08_s6003,3700
6003,301,8,20,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp08_wdir020_sa6003_sw0301,turb_wsp08_s6003,3700
7001,102,4,0,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp04_wdir000_sa7001_sw0102,turb_wsp04_s7001,3700
7001,102,4,20,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp04_wdir020_sa7001_sw0102,turb_wsp04_s7001,3700
7002,202,6,0,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp06_wdir000_sa7002_sw0202,turb_wsp06_s7002,3700
8002,202,6,20,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp06_wdir020_sa8002_sw0202,turb_wsp06_s8002,3700
8003,302,8,0,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp08_wdir000_sa8003_sw0302,turb_wsp08_s8003,3700
8003,302,8,20,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp08_wdir020_sa8003_sw0302,turb_wsp08_s8003,3700
# identifiers for HawcStab2 modes of DTU10MW B0002 with HS2 2-15
# mode_number ; description
1 ;
2 ; 1st Tower FA
3 ; 1st Tower SS
4 ; 1st BF B whirling
5 ; 1st BF collective
6 ; 1st BF F whirling # with yawing
7 ; 1st BE B whirling
8 ; 1st BE F whirling
9 ; 2nd BF B whirling # with yawing
10 ; 2nd BF F whirling
11 ; 2nd BF collective
12 ; 1st shaft / BE collective
13 ; 2nd Tower FA
14 ; 2nd Tower SS
15 ; Tower torsion
16 ; 16
17 ; 17
18 ; 18
19 ; 19
20 ; 20
21 ; 21
22 ; 22
23 ; 23
24 ; 24
25 ; 25
21 wind speed [m/s] pitch [deg] rot. speed [rpm]
5.0 1.52 6.00
6.0 0.46 6.00
7.0 0.00 6.37
8.0 0.00 7.28
9.0 0.00 8.19
10.0 0.00 9.10
11.0 0.00 9.60
12.0 4.10 9.60
13.0 6.69 9.60
14.0 8.62 9.60
15.0 10.26 9.60
16.0 11.74 9.60
17.0 13.10 9.60
18.0 14.38 9.60
19.0 15.59 9.60
20.0 16.76 9.60
21.0 17.88 9.60
22.0 18.97 9.60
23.0 20.03 9.60
24.0 21.05 9.60
25.0 22.05 9.60
22 wind speed [m/s] pitch [deg] rot. speed [rpm] aero power [kw] aero thrust [kn]
0.4000000000000000E+01 0.5349309632994260E-03 0.3385905837982068E+02 0.7201472127920620E+01 0.3238197412297314E+01
0.5000000000000000E+01 0.5349309755366098E-03 0.4210546666682672E+02 0.1415607774011505E+02 0.5017229492571652E+01
0.6000000000000000E+01 0.5349309755382001E-03 0.5043274240183678E+02 0.2459236895692140E+02 0.7182292864353710E+01
0.7000000000000000E+01 0.5349309755382001E-03 0.5876454811562412E+02 0.3923045950432546E+02 0.9727470498252524E+01
0.8000000000000000E+01 0.5349309755382001E-03 0.6711475958726750E+02 0.5879853446478288E+02 0.1266120196263318E+02
0.9000000000000000E+01 0.5349309755382001E-03 0.7000000000000000E+02 0.8323079728956920E+02 0.1498841355250623E+02
0.1000000000000000E+02 0.1163755866416270E+01 0.7000000000000000E+02 0.1064017983536164E+03 0.1559515367779657E+02
0.1100000000000000E+02 0.5244219122437820E+01 0.7000000000000000E+02 0.1064000990173816E+03 0.1261946763533536E+02
0.1200000000000000E+02 0.7805516131255263E+01 0.7000000000000000E+02 0.1064034294717032E+03 0.1109351229548101E+02
0.1300000000000000E+02 0.9888742159166926E+01 0.7000000000000000E+02 0.1063986284106002E+03 0.1004076469859912E+02
0.1400000000000000E+02 0.1171931406170756E+02 0.7000000000000000E+02 0.1063986146999500E+03 0.9242429622326702E+01
0.1500000000000000E+02 0.1338395969300175E+02 0.7000000000000000E+02 0.1063937200866419E+03 0.8609049584190897E+01
0.1600000000000000E+02 0.1493218874586698E+02 0.7000000000000000E+02 0.1064043646466927E+03 0.8091730136938368E+01
0.1700000000000000E+02 0.1639469013123552E+02 0.7000000000000000E+02 0.1064013899168824E+03 0.7658983927004478E+01
0.1800000000000000E+02 0.1778792307025338E+02 0.7000000000000000E+02 0.1064047949236002E+03 0.7292116562690632E+01
0.1900000000000000E+02 0.1912315185121887E+02 0.7000000000000000E+02 0.1063985234138157E+03 0.6977317857551209E+01
0.2000000000000000E+02 0.2040918213796199E+02 0.7000000000000000E+02 0.1064031491365678E+03 0.6705055846501972E+01
0.2100000000000000E+02 0.2165209915924424E+02 0.7000000000000000E+02 0.1063998138679043E+03 0.6468359027748811E+01
0.2200000000000000E+02 0.2285648140629532E+02 0.7000000000000000E+02 0.1064240428325993E+03 0.6262034347042015E+01
0.2300000000000000E+02 0.2402752818115222E+02 0.7000000000000000E+02 0.1064005848616629E+03 0.6078053602135600E+01
0.2400000000000000E+02 0.2516336964663254E+02 0.7000000000000000E+02 0.1064334951382803E+03 0.5918201578325677E+01
0.2500000000000000E+02 0.2627214012465693E+02 0.7000000000000000E+02 0.1064694335445150E+03 0.5775852683705587E+01
import unittest
import os
#import shutil
import numpy as np
import pandas as pd
from wetb.prepost.GenerateDLCs import GenerateDLCCases
class Template(unittest.TestCase):
def setUp(self):
self.basepath = os.path.dirname(__file__)
class TestGenerateDLCCases(Template):
def test_dlcs(self):
# manually configure paths, HAWC2 model root path is then constructed as
# p_root_remote/PROJECT/sim_id, and p_root_local/PROJECT/sim_id
# adopt accordingly when you have configured your directories differently
p_root = os.path.join(self.basepath, 'data/demo_gendlc/')
# project name, sim_id, master file name
dlc_master = os.path.join(p_root, 'DLCs.xlsx')
dlc_folder = os.path.join(p_root, 'DLCs')
dlc_gen12 = os.path.join(dlc_folder, 'DLC12.xlsx')
dlc_gen13 = os.path.join(dlc_folder, 'DLC13.xlsx')
DLB = GenerateDLCCases()
DLB.execute(filename=dlc_master, folder=dlc_folder)
df12 = pd.read_excel(dlc_gen12)
# df12.to_csv('data/demo_gendlc/ref/DLC12.csv', index=False)
df12_ref = pd.read_csv(os.path.join(p_root, 'ref/DLC12.csv'))
# df12_ref2 = pd.read_excel(p2)[df12.columns]
pd.testing.assert_frame_equal(df12, df12_ref)
# df2 = df[['[Case id.]', '[wdir]', '[wsp]', '[seed]', '[wave_seed]']]
self.assertEqual(df12['[ReferenceWindSpeed]'].unique(), np.array([44]))
self.assertEqual(df12['[t0]'].unique(), np.array([100]))
self.assertEqual(len(df12['[Case id.]'].unique()), 2*3*3*2)
self.assertEqual(df12['[Case id.]'].values[0],
'DLC12_wsp04_wdir010_sa1001_sw0101')
df13 = pd.read_excel(dlc_gen13)
# df13.to_csv('data/demo_gendlc/ref/DLC13.csv', index=False)
df13_ref = pd.read_csv(os.path.join(p_root, 'ref/DLC13.csv'))
pd.testing.assert_frame_equal(df13, df13_ref)
if __name__ == "__main__":
unittest.main()
......@@ -3,13 +3,6 @@ Created on 05/11/2015
@author: MMPE
'''
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
import unittest
import os
import filecmp
......@@ -63,21 +56,52 @@ class TestGenerateInputs(Template):
shutil.rmtree(os.path.join(p_root, tmpl.PROJECT, 'remote'))
tmpl.force_dir = tmpl.P_RUN
tmpl.launch_dlcs_excel('remote', silent=True, runmethod='gorm',
pbs_turb=True, zipchunks=True,
tmpl.launch_dlcs_excel('remote', silent=True, runmethod='pbs',
pbs_turb=True, zipchunks=True, ppn=17,
postpro_node_zipchunks=False,
postpro_node=False)
# we can not check-in empty dirs so we can not compare the complete
postpro_node=False, update_model_data=True)
def cmp_dir(dir1, dir2):
lst1, lst2 = map(os.listdir, (dir1, dir2))
self.assertEqual(";".join(lst1), ";".join(lst2))
for f1, f2 in zip(lst1, lst2):
if f1.endswith(".zip") or f1.endswith(".xlsx"):
continue
if os.path.isdir(os.path.join(dir1, f1)):
cmp_dir(os.path.join(dir1, f1), os.path.join(dir2, f2))
else:
try:
with open(os.path.join(dir1, f1)) as fid1:
l1 = fid1.readlines()
with open(os.path.join(dir2, f2)) as fid2:
l2 = fid2.readlines()
self.assertEqual(len(l1), len(l2))
self.assertTrue(all([l1_ == l2_ for l1_, l2_ in zip(l1, l2)]))
except:
print("=" * 30)
print(os.path.join(dir1, f1))
print(os.path.join(dir2, f2))
print(dir1[[d1 != d2 for d1, d2 in zip(dir1, dir2)].index(True):])
print(f1)
for i in range(len(l1)):
if l1[i] != l2[i]:
print("%03d, rem: %s" % (i, l1[i].strip()))
print("%03d, ref: %s" % (i, l2[i].strip()))
print()
raise
# we can not git check-in empty dirs so we can not compare the complete
# directory structure withouth manually creating the empty dirs here
for subdir in ['control', 'data', 'htc', 'pbs_in', 'pbs_in_turb',
'htc/_master', 'htc/dlc01_demos', 'pbs_in/dlc01_demos',
'zip-chunks-gorm', 'zip-chunks-jess']:
'zip-chunks-jess']:
remote = os.path.join(p_root, tmpl.PROJECT, 'remote', subdir)
ref = os.path.join(p_root, tmpl.PROJECT, 'ref', subdir)
# the zipfiles are taken care of separately
ignore = ['remote_chnk_00000.zip']
cmp = filecmp.dircmp(remote, ref, ignore=ignore)
cmp_dir(remote, ref)
self.assertEqual(len(cmp.diff_files), 0,
"{} {}".format(subdir, cmp.diff_files))
self.assertEqual(len(cmp.right_only), 0,
......@@ -87,7 +111,6 @@ class TestGenerateInputs(Template):
# compare the zip files
for fname in ['demo_dlc_remote.zip',
'zip-chunks-gorm/remote_chnk_00000.zip',
'zip-chunks-jess/remote_chnk_00000.zip']:
remote = os.path.join(p_root, tmpl.PROJECT, 'remote', fname)
ref = os.path.join(p_root, tmpl.PROJECT, 'ref', fname)
......
......@@ -3,20 +3,15 @@ Created on 05/11/2015
@author: MMPE
'''
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
import unittest
from os.path import join as pjoin
from os.path import dirname as pdirname
import numpy as np
from wetb.prepost.hawcstab2 import results, ReadControlTuning
from wetb.prepost.hawcstab2 import (results, ReadControlTuning, read_cmb_all,
read_modid, PlotCampbell, plot_add_ps)
from wetb.prepost.mplutils import subplots
class Tests(unittest.TestCase):
......@@ -24,13 +19,12 @@ class Tests(unittest.TestCase):
"""
def setUp(self):
self.fpath_linear = pjoin(pdirname(__file__),
'data/controller_input_linear.txt')
self.fpath_quad = pjoin(pdirname(__file__),
'data/controller_input_quadratic.txt')
self.fbase = pdirname(__file__)
self.fpath_linear = pjoin(self.fbase, 'data/controller_input_linear.txt')
self.fpath_quad = pjoin(self.fbase, 'data/controller_input_quadratic.txt')
def test_cmb_df(self):
fname1 = pjoin(pdirname(__file__), 'data/campbell_diagram.cmb')
fname1 = pjoin(self.fbase, 'data/campbell_diagram.cmb')
speed, freq, damp, real_eig = results().load_cmb(fname1)
self.assertIsNone(real_eig)
......@@ -40,7 +34,7 @@ class Tests(unittest.TestCase):
ops = freq.shape[0]
self.assertEqual(len(speed), ops)
self.assertEqual(ops, 22)
self.assertEqual(ops, 21)
self.assertEqual(mods, 10)
for k in range(ops):
......@@ -51,6 +45,30 @@ class Tests(unittest.TestCase):
self.assertEqual(len(df_oper['wind_ms'].unique()), 1)
self.assertEqual(df_oper['wind_ms'].unique()[0], speed[k])
def test_read_cmb_all(self):
f_pwr = pjoin(self.fbase, 'data/dtu10mw_v1.pwr')
f_cmb = pjoin(self.fbase, 'data/campbell_diagram.cmb')
f_modid = pjoin(self.fbase, 'data/dtu10mw.modid')
dfp, dff, dfd = read_cmb_all(f_cmb, f_pwr=f_pwr, f_modid=f_modid)
self.assertEqual(dfp.shape, (21, 27))
self.assertEqual(dff.shape, (21, 10))
self.assertEqual(dfd.shape, (21, 10))
dfp, dff, dfd = read_cmb_all(f_cmb, f_pwr=None)
self.assertIsNone(dfp)
def test_read_modid(self):
fname = pjoin(self.fbase, 'data/dtu10mw.modid')
modes = read_modid(fname)
ref = ['', '1st Tower FA', '1st Tower SS', '1st BF B whirling',
'1st BF collective', '1st BF F whirling', '1st BE B whirling',
'1st BE F whirling', '2nd BF B whirling', '2nd BF F whirling',
'2nd BF collective', '1st shaft / BE collective',
'2nd Tower FA', '2nd Tower SS', 'Tower torsion']
self.assertEqual(len(modes), 25)
self.assertEqual(modes[:15], ref)
def test_linear_file(self):
hs2 = ReadControlTuning()
......@@ -71,6 +89,8 @@ class Tests(unittest.TestCase):
self.assertEqual(hs2.aero_damp.Ko1, -4.21472)
self.assertEqual(hs2.aero_damp.Ko2, 0.0)
self.assertEqual(hs2.aero_gains.shape, (0, 0))
def test_quadratic_file(self):
hs2 = ReadControlTuning()
......@@ -91,6 +111,38 @@ class Tests(unittest.TestCase):
self.assertEqual(hs2.aero_damp.Ko1, -1.69769)
self.assertEqual(hs2.aero_damp.Ko2, -15.02688)
self.assertEqual(hs2.aero_gains.shape, (15, 5))
cols = ['theta', 'dq/dtheta', 'dq/dtheta_fit', 'dq/domega',
'dq/domega_fit']
self.assertEqual(hs2.aero_gains.columns.tolist(), cols)
tmp = np.array([0, 4.1, 6.69, 8.62, 10.26, 11.74, 13.1, 14.38, 15.59,
16.76, 17.88, 18.97, 20.03, 21.05, 22.05])
np.testing.assert_allclose(hs2.aero_gains['theta'].values, tmp)
tmp = [-1165.0486, -1665.72575, -2012.86015, -2290.61883,
-2535.50152, -2757.11114, -2991.31463, -3213.58048,
-3428.46978, -3642.914, -3858.46084, -4075.53879,
-4295.293, -4524.66782, -4758.6268]
np.testing.assert_allclose(hs2.aero_gains['dq/dtheta'].values, tmp)
tmp = [-1182.80164, -1655.44826, -1998.12171, -2275.67536, -2526.42508,
-2764.46364, -2993.03195, -3216.75546, -3435.9122, -3654.91116,
-3871.07886, -4087.58722, -4303.93692, -4517.52214, -4732.06052]
np.testing.assert_allclose(hs2.aero_gains['dq/dtheta_fit'].values, tmp)
tmp = [-393.03157, -6919.03943, -13119.30826, -18911.31597,
-24632.87239, -30186.31522, -36257.79933, -42410.9345,
-48626.47812, -55070.40445, -61702.38984, -68581.71761,
-75700.65394, -83045.36607, -90639.34883]
np.testing.assert_allclose(hs2.aero_gains['dq/domega'].values, tmp)
tmp = [-950.85937, -6544.84749, -12659.67192, -18515.75425,
-24364.04365, -30329.6103, -36386.82912, -42591.10977,
-48904.89826, -55424.76312, -62048.0563, -68852.77188,
-75809.68369, -82820.10608, -89993.97031]
np.testing.assert_allclose(hs2.aero_gains['dq/domega_fit'].values, tmp)
def test_ind_file(self):
fnames = ['dtu10mw_nofull_defl_u10000.ind',
'dtu10mw_nofull_fext_u10000.ind',
......@@ -104,11 +156,11 @@ class Tests(unittest.TestCase):
]
for fname in fnames:
fname = pjoin(pdirname(__file__), 'data', fname)
fname = pjoin(self.fbase, 'data', fname)
res = results()
df_data = res.load_ind(fname)
data = np.loadtxt(fname)
np.testing.assert_almost_equal(data, df_data.values)
np.testing.assert_allclose(data, df_data.values)
def test_pwr_file(self):
fnames = ['dtu10mw_nofull.pwr',
......@@ -116,24 +168,54 @@ class Tests(unittest.TestCase):
'dtu10mw_nogradient_v2.pwr',
'dtu10mw_v1.pwr',]
for fname in fnames:
fname = pjoin(pdirname(__file__), 'data', fname)
fname = pjoin(self.fbase, 'data', fname)
res = results()
df_data, units = res.load_pwr_df(fname)
data = np.loadtxt(fname)
self.assertEqual(data.shape, df_data.shape)
print(fname)
print(data.dtype)
print(df_data.values.dtype)
for i in range(data.shape[0]):
a = data[i,:]
b = df_data.values[i,:]
if not np.allclose(a,b):
print(i)
print(a-b)
print(a)
print(b)
np.testing.assert_almost_equal(a, b)
np.testing.assert_almost_equal(data, df_data.values, decimal=6)
np.testing.assert_allclose(data, df_data.values)
def test_opt_file(self):
res = results()
fname = pjoin(self.fbase, 'data', 'dtu10mw.opt')
df = res.load_operation(fname)
tmp = [5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25]
np.testing.assert_allclose(tmp, df['windspeed'].values)
self.assertEqual(df.values.shape, (21, 3))
fname = pjoin(pdirname(__file__), 'data', 'kb6.opt')
df = res.load_operation(fname)
tmp = [4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25]
np.testing.assert_allclose(tmp, df['windspeed'].values)
tmp = [7.20147212792062, 14.1560777401151, 24.5923689569214,
39.2304595043255, 58.7985344647829, 83.2307972895692,
106.401798353616, 106.400099017382, 106.403429471703,
106.3986284106, 106.39861469995, 106.393720086642,
106.404364646693, 106.401389916882, 106.4047949236,
106.398523413816, 106.403149136568, 106.399813867904,
106.424042832599, 106.400584861663, 106.43349513828,
106.469433544515]
np.testing.assert_allclose(tmp, df['P_aero'].values)
self.assertEqual(df.values.shape, (22, 5))
def test_plot_cmb(self):
base = pdirname(__file__)
f_pwr = pjoin(base, 'data/dtu10mw_v1.pwr')
f_cmb = pjoin(base, 'data/campbell_diagram.cmb')
dfp, dff, dfd = read_cmb_all(f_cmb, f_pwr=f_pwr)
cmb = PlotCampbell(dfp['V'].values, dff, dfd)
fig, axes = subplots(nrows=2, ncols=1, figsize=(8,10))
ax = axes[0,0]
ax = cmb.plot_freq(ax, col='k', mark='^', ls='-', modes='all')
ax = plot_add_ps(ax, dfp['V'], dfp['Speed'], ps=[1,3,6])
ax = axes[1,0]
ax = cmb.plot_damp(ax, col='k', mark='^', ls='-', modes=10)
if __name__ == "__main__":
......
......@@ -3,19 +3,13 @@ Created on 05/11/2015
@author: MMPE
'''
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
import unittest
import io
import os
import tempfile
import numpy as np
import pandas as pd
from wetb.prepost import windIO
......@@ -197,7 +191,7 @@ class TestsLoadResults(unittest.TestCase):
# ---------------------------------------------------------------------
res = windIO.LoadResults(self.respath, self.f1_chant, readdata=False)
self.assertFalse(hasattr(res, 'sig'))
np.testing.assert_array_equal(res.ch_df.index.values, np.arange(0,422))
np.testing.assert_array_equal(res.ch_df.index.values, np.arange(0,432))
self.assertEqual(res.ch_df.unique_ch_name.values[0], 'Time')
df = res.ch_df
self.assertEqual(2, len(df[df['bearing_name']=='shaft_rot']))
......@@ -208,7 +202,12 @@ class TestsLoadResults(unittest.TestCase):
[200, 'blade2-blade2-node-017-momentvec-z', 'kNm'],
[296, 'blade1-blade1-node-008-forcevec-z', 'kN'],
[415, 'Cl-1-55.7', 'deg'],
[421, 'qwerty-is-azerty', 'is']
[421, 'qwerty', 'is'],
[422, 'wind_wake-wake_pos_x_1', 'm'],
[423, 'wind_wake-wake_pos_y_2', 'm'],
[424, 'wind_wake-wake_pos_z_5', 'm'],
[425, 'statevec_new-blade1-c2def-blade1-absolute-014.00-Dx', 'm'],
[429, 'statevec_new-blade1-c2def-blade1-elastic-014.00-Ry', 'deg'],
]
for k in exp:
self.assertEqual(df.loc[k[0], 'unique_ch_name'], k[1])
......@@ -216,12 +215,17 @@ class TestsLoadResults(unittest.TestCase):
self.assertEqual(res.ch_dict[k[1]]['chi'], k[0])
self.assertEqual(res.ch_dict[k[1]]['units'], k[2])
# also check we have the tag from a very long description because
# we truncate after 150 characters
self.assertEqual(df.loc[426, 'sensortag'], 'this is a tag')
# ---------------------------------------------------------------------
res = windIO.LoadResults(self.respath, self.f2_chant, readdata=False)
self.assertFalse(hasattr(res, 'sig'))
np.testing.assert_array_equal(res.ch_df.index.values, np.arange(0,217))
df = res.ch_df
self.assertEqual(4, len(df[df['sensortype']=='wsp-global']))
self.assertEqual(13, len(df[df['sensortype']=='wsp-global']))
self.assertEqual(3, len(df[df['sensortype']=='wsp-blade']))
self.assertEqual(2, len(df[df['sensortype']=='harmonic']))
self.assertEqual(2, len(df[df['blade_nr']==3]))
......@@ -229,11 +233,77 @@ class TestsLoadResults(unittest.TestCase):
res = windIO.LoadResults(self.respath, self.f3_chant, readdata=False)
self.assertFalse(hasattr(res, 'sig'))
np.testing.assert_array_equal(res.ch_df.index.values, np.arange(0,294))
df = res.ch_df
self.assertEqual(8, len(df[df['sensortype']=='CT']))
self.assertEqual(8, len(df[df['sensortype']=='CQ']))
self.assertEqual(8, len(df[df['sensortype']=='a_grid']))
self.assertEqual(84, len(df[df['blade_nr']==1]))
df1 = res.ch_df
self.assertEqual(8, len(df1[df1['sensortype']=='CT']))
self.assertEqual(8, len(df1[df1['sensortype']=='CQ']))
self.assertEqual(8, len(df1[df1['sensortype']=='a_grid']))
self.assertEqual(84, len(df1[df1['blade_nr']==1]))
def test_unified_chan_names_extensive2(self):
res = windIO.LoadResults(self.respath, self.f3_chant, readdata=False)
df1 = res.ch_df
fname = os.path.join(self.respath, self.f3_chant.replace('.sel',
'.ch_df.csv'))
# # when changing the tests, update the reference, check, and commit
# # but keep the same column ordering to not make the diffs to big
# cols = ['azimuth', 'bearing_name', 'blade_nr', 'bodyname',
# 'component', 'coord', 'direction', 'dll', 'flap_nr', 'io',
# 'io_nr', 'output_type', 'pos', 'radius','radius_actual', 'sensortag',
# 'sensortype', 'unique_ch_name', 'units', ]
# df1[cols].to_csv(fname)
# df1.to_excel(fname.replace('.csv', '.xlsx'))
# FIXME: read_csv for older pandas versions fails on reading the
# mixed str/tuple column. Ignore the pos column for now
colref = ['azimuth', 'bearing_name', 'blade_nr', 'bodyname',
'component', 'coord', 'direction', 'dll', 'flap_nr', 'io',
'io_nr', 'output_type', 'radius', 'radius_actual', 'sensortag',
'sensortype', 'unique_ch_name', 'units'] # 'pos',
# keep_default_na: leave empyt strings as empty strings and not nan's
# you can't have nice things: usecols in combination with index_col
# doesn't work
df2 = pd.read_csv(fname, usecols=['chi']+colref, keep_default_na=False)
df2.index = df2['chi']
df2.drop(labels='chi', inplace=True, axis=1)
# for the comparison we need to have the columns with empty/number
# mixed data types in a consistent data type
for col in ['azimuth', 'radius', 'blade_nr', 'io_nr', 'flap_nr',
'dll', 'radius_actual']:
df1.loc[df1[col]=='', col] = np.nan
df1[col] = df1[col].astype(np.float32)
df2.loc[df2[col]=='', col] = np.nan
df2[col] = df2[col].astype(np.float32)
# print(df1.pos[14], df2.pos[14])
# the pos columns contains also tuples, read from csv doesn't get that
# df1['pos'] = df1['pos'].astype(str)
# df1['pos'] = df1['pos'].str.replace("'", "")
# print(df1.pos[14], df2.pos[14])
# sort columns in the same way so we can assert the df are equal
# df1 = df1[colref].copy()
# df2 = df2[colref].copy()
# FIXME: when pandas is more recent we can use assert_frame_equal
# pd.testing.assert_frame_equal(df1, df2)
# ...but there is no testing.assert_frame_equal in pandas 0.14
for col in colref:
# ignore nan values for float cols
if df1[col].dtype == np.dtype('float32'):
sel = ~np.isnan(df1[col].values)
np.testing.assert_array_equal(df1[col].values[sel],
df2[col].values[sel])
else:
np.testing.assert_array_equal(df1[col].values,
df2[col].values)
def test_df_stats(self):
"""
"""
res = windIO.LoadResults(self.respath, self.fascii)
df_stats = res.statsdel_df()
class TestUserWind(unittest.TestCase):
......
......@@ -4,34 +4,23 @@ Created on Thu Apr 3 19:53:59 2014
@author: dave
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
from __future__ import absolute_import
from builtins import dict
from io import open as opent
from builtins import range
from builtins import str
from builtins import int
from future import standard_library
standard_library.install_aliases()
from builtins import object
__author__ = 'David Verelst'
__license__ = 'GPL'
__version__ = '0.5'
import os
import copy
import struct
import math
from time import time
import codecs
from itertools import chain
import numpy as np
import scipy as sp
import scipy.integrate as integrate
import scipy.io as sio
# scipy changed interface name from 1.14 to 1.15
try:
from scipy.integrate import trapezoid # scipy >1.15
except ImportError:
from scipy.integrate import trapz as trapezoid # scipy <=1.14
import pandas as pd
# misc is part of prepost, which is available on the dtu wind gitlab server:
......@@ -39,6 +28,7 @@ import pandas as pd
from wetb.prepost import misc
# wind energy python toolbox, available on the dtu wind redmine server:
# http://vind-redmine.win.dtu.dk/projects/pythontoolbox/repository/show/fatigue_tools
from wetb.hawc2.sensor_names import unified_channel_names
from wetb.hawc2.Hawc2io import ReadHawc2
from wetb.fatigue_tools.fatigue import (eq_load, cycle_matrix2)
......@@ -133,6 +123,12 @@ class LogFile(object):
# *** ERROR *** Out of limits in user defined shear field - limit value used
self.err_sim[' *** ERROR *** Out of limit'] = len(self.err_sim)
# NEAR WAKE ERRORS
# ERROR in Near Wake! The radius of the tip is smaller (or equal) to
self.err_sim[' ERROR in Near Wake! The ra'] = len(self.err_sim)
# ERROR: Maximum number of near wake iterations reached
self.err_sim[' ERROR: Maximum number of n'] = len(self.err_sim)
# TODO: error message from a non existing channel output/input
# add more messages if required...
......@@ -143,9 +139,8 @@ class LogFile(object):
def readlog(self, fname, case=None, save_iter=False):
"""
"""
# open the current log file
with open(fname, 'r') as f:
lines = f.readlines()
# be cautious and try a few encodings when reading the file
lines = misc.readlines_try_encodings(fname)
# keep track of the messages allready found in this file
tempLog = []
......@@ -252,7 +247,8 @@ class LogFile(object):
iterations[time_step-1,2] = 1
# method of last resort, we have no idea what message
elif line[:10] == ' *** ERROR' or line[:10]==' ** WARNING':
elif line[:10] == ' *** ERROR' or line[:10]==' ** WARNING' \
or line[:6] == ' ERROR':
icol = subcols_sim*self.sim_cols
icol += subcols_init*self.init_cols + 1
# line number of the message
......@@ -441,7 +437,7 @@ class LogFile(object):
gr = ('first_tstep_%i', 'last_step_%i', 'nr_%i', 'msg_%i')
colnames.extend(list(chain_iter( (k % i for k in gr)
for i in range(100,105,1))) )
for i in range(100,100+nr_sim,1))) )
colnames.extend(['nr_extra', 'msg_extra'])
colnames.extend(['elapsted_time',
'last_time_step',
......@@ -483,8 +479,7 @@ class LoadResults(ReadHawc2):
Usage:
obj = LoadResults(file_path, file_name)
This class is called like a function:
HawcResultData() will read the specified file upon object initialization.
This is a subclass of wetb.hawc2.Windio:ReadHawc2.
Available output:
obj.sig[timeStep,channel] : complete result file in a numpy array
......@@ -535,11 +530,6 @@ class LoadResults(ReadHawc2):
ch_dict[tag]['units']
"""
# ch_df columns, these are created by LoadResults._unified_channel_names
cols = set(['bearing_name', 'sensortag', 'bodyname', 'chi', 'component',
'pos', 'coord', 'sensortype', 'radius', 'blade_nr', 'units',
'output_type', 'io_nr', 'io', 'dll', 'azimuth', 'flap_nr',
'direction'])
# start with reading the .sel file, containing the info regarding
# how to read the binary file and the channel information
......@@ -576,7 +566,7 @@ class LoadResults(ReadHawc2):
if not (not readdata and (self.FileType == 'GTSDF')):
self.N = int(self.NrSc)
self.Nch = int(self.NrCh)
self.ch_details = np.ndarray(shape=(self.Nch, 3), dtype='<U100')
self.ch_details = np.ndarray(shape=(self.Nch, 3), dtype='<U150')
for ic in range(self.Nch):
self.ch_details[ic, 0] = self.ChInfo[0][ic]
self.ch_details[ic, 1] = self.ChInfo[1][ic]
......@@ -588,737 +578,20 @@ class LoadResults(ReadHawc2):
stop = time() - start
print('time to load HAWC2 file:', stop, 's')
# TODO: THIS IS STILL A WIP
def _make_channel_names(self):
"""Give every channel a unique channel name which is (nearly) identical
to the channel names as defined in the htc output section. Instead
of spaces, use colon (;) to seperate the different commands.
THIS IS STILL A WIP
see also issue #11:
https://gitlab.windenergy.dtu.dk/toolbox/WindEnergyToolbox/issues/11
"""
index = {}
names = {'htc_name':[], 'chi':[], 'label':[], 'unit':[], 'index':[],
'name':[], 'description':[]}
constraint_fmts = {'bea1':'constraint;bearing1',
'bea2':'constraint;bearing2',
'bea3':'constraint;bearing3',
'bea4':'constraint;bearing4'}
# mbdy momentvec tower 1 1 global
force_fmts = {'F':'mbdy;forcevec;{body};{nodenr:03i};{coord};{comp}',
'M':'mbdy;momentvec;{body};{nodenr:03i};{coord};{comp}'}
state_fmt = 'mbdy;{state};{typ};{body};{elnr:03i};{zrel:01.02f};{coord}'
wind_coord_map = {'Vx':'1', 'Vy':'2', 'Vz':'3'}
wind_fmt = 'wind;{typ};{coord};{x};{y};{z};{comp}'
for ch in range(self.Nch):
name = self.ch_details[ch, 0]
name_items = misc.remove_items(name.split(' '), '')
description = self.ch_details[ch, 2]
descr_items = misc.remove_items(description.split(' '), '')
unit = self.ch_details[ch, 1]
# default names
htc_name = ' '.join(name_items+descr_items)
label = ''
coord = ''
typ = ''
elnr = ''
nodenr = ''
zrel = ''
state = ''
# CONSTRAINTS: BEARINGS
if name_items[0] in constraint_fmts:
htc_name = constraint_fmts[name_items[0]] + ';'
htc_name += (descr_items[0] + ';')
htc_name += unit
# MBDY FORCES/MOMENTS
elif name_items[0][0] in force_fmts:
comp = name_items[0]
if comp[0] == 'F':
i0 = 1
else:
i0 = 0
label = description.split('coo: ')[1].split(' ')[1]
coord = descr_items[i0+5]
body = descr_items[i0+1][5:]#.replace('Mbdy:', '')
nodenr = int(descr_items[i0+3])
htc_name = force_fmts[comp[0]].format(body=body, coord=coord,
nodenr=nodenr, comp=comp)
# STATE: POS, VEL, ACC, STATE_ROT
elif descr_items[0][:5] == 'State':
if name_items[0] == 'State':
i0 = 1
state = 'state'
else:
i0 = 0
state = 'state_rot'
typ = name_items[i0+0]
comp = name_items[i0+1]
coord = name_items[i0+3]
body = descr_items[3][5:]#.replace('Mbdy:', '')
elnr = int(descr_items[5])
zrel = float(descr_items[6][6:])#.replace('Z-rel:', ''))
if len(descr_items) > 8:
label = ' '.join(descr_items[9:])
htc_name = state_fmt.format(typ=typ, body=body, elnr=elnr,
zrel=zrel, coord=coord,
state=state)
# WINDSPEED
elif description[:9] == 'Free wind':
if descr_items[4] == 'gl.':
coord = '1' # global
else:
coord = '2' # non-rotating rotor coordinates
try:
comp = wind_coord_map[descr_items[3][:-1]]
typ = 'free_wind'
except KeyError:
comp = descr_items[3]
typ = 'free_wind_hor'
tmp = description.split('pos')[1]
x, y, z = tmp.split(',')
# z might hold a label....
z_items = z.split(' ')
if len(z_items) > 1:
label = ' '.join(z_items[1:])
z = z_items[0]
x, y, z = x.strip(), y.strip(), z.strip()
htc_name = wind_fmt.format(typ=typ, coord=coord, x=x, y=y, z=z,
comp=comp)
names['htc_name'].append(htc_name)
names['chi'].append(ch)
# this is the Channel column from the sel file, so the unique index
# which is dependent on the order of the channels
names['index'].append(ch+1)
names['unit'].append(unit)
names['name'].append(name)
names['description'].append(description)
names['label'].append(label)
names['state'].append(state)
names['type'].append(typ)
names['comp'].append(comp)
names['coord'].append(coord)
names['elnr'].append(coord)
names['nodenr'].append(coord)
names['zrel'].append(coord)
index[name] = ch
return names, index
def _unified_channel_names(self):
"""
Make certain channels independent from their index.
"""For backwards compatibiliity: to create an alternative sensor naming
scheme that is consistent and unique so you can always refer to a channel
name instead of its index in an output file.
The unified channel dictionary ch_dict holds consequently named
channels as the key, and the all information is stored in the value
as another dictionary.
The ch_dict key/values pairs are structured differently for different
type of channels. Currently supported channels are:
See wetb.hawc2.sensor_names.unified_channel_names instead.
For forcevec, momentvec, state commands:
node numbers start with 0 at the root
element numbers start with 1 at the root
key:
coord-bodyname-pos-sensortype-component
global-tower-node-002-forcevec-z
local-blade1-node-005-momentvec-z
hub1-blade1-elem-011-zrel-1.00-state pos-z
value:
ch_dict[tag]['coord']
ch_dict[tag]['bodyname']
ch_dict[tag]['pos']
ch_dict[tag]['sensortype']
ch_dict[tag]['component']
ch_dict[tag]['chi']
ch_dict[tag]['sensortag']
ch_dict[tag]['units']
For the DLL's this is:
key:
DLL-dll_name-io-io_nr
DLL-yaw_control-outvec-3
DLL-yaw_control-inpvec-1
value:
ch_dict[tag]['dll_name']
ch_dict[tag]['io']
ch_dict[tag]['io_nr']
ch_dict[tag]['chi']
ch_dict[tag]['sensortag']
ch_dict[tag]['units']
For the bearings this is:
key:
bearing-bearing_name-output_type-units
bearing-shaft_nacelle-angle_speed-rpm
value:
ch_dict[tag]['bearing_name']
ch_dict[tag]['output_type']
ch_dict[tag]['chi']
ch_dict[tag]['units']
Returns
-------
None.
For many of the aero sensors:
'Cl', 'Cd', 'Alfa', 'Vrel'
key:
sensortype-blade_nr-pos
Cl-1-0.01
value:
ch_dict[tag]['sensortype']
ch_dict[tag]['blade_nr']
ch_dict[tag]['pos']
ch_dict[tag]['chi']
ch_dict[tag]['units']
"""
# save them in a dictionary, use the new coherent naming structure
# as the key, and as value again a dict that hols all the different
# classifications: (chi, channel nr), (coord, coord), ...
self.ch_dict = dict()
# some channel ID's are unique, use them
ch_unique = set(['Omega', 'Ae rot. torque', 'Ae rot. power',
'Ae rot. thrust', 'Time', 'Azi 1'])
ch_aero = set(['Cl', 'Cd', 'Cm', 'Alfa', 'Vrel', 'Tors_e', 'Alfa',
'Lift', 'Drag'])
ch_aerogrid = set(['a_grid', 'am_grid', 'CT', 'CQ'])
# also safe as df
# cols = set(['bearing_name', 'sensortag', 'bodyname', 'chi',
# 'component', 'pos', 'coord', 'sensortype', 'radius',
# 'blade_nr', 'units', 'output_type', 'io_nr', 'io', 'dll',
# 'azimuth', 'flap_nr'])
df_dict = {col: [] for col in self.cols}
df_dict['unique_ch_name'] = []
# scan through all channels and see which can be converted
# to sensible unified name
for ch in range(self.Nch):
items = self.ch_details[ch, 2].split(' ')
# remove empty values in the list
items = misc.remove_items(items, '')
dll = False
# be carefull, identify only on the starting characters, because
# the signal tag can hold random text that in some cases might
# trigger a false positive
# -----------------------------------------------------------------
# check for all the unique channel descriptions
if self.ch_details[ch,0].strip() in ch_unique:
tag = self.ch_details[ch, 0].strip()
channelinfo = {}
channelinfo['units'] = self.ch_details[ch, 1]
channelinfo['sensortag'] = self.ch_details[ch, 2]
channelinfo['chi'] = ch
# -----------------------------------------------------------------
# or in the long description:
# 0 1 2 3 4 5 6 and up
# MomentMz Mbdy:blade nodenr: 5 coo: blade TAG TEXT
elif self.ch_details[ch, 2].startswith('MomentM'):
coord = items[5]
bodyname = items[1].replace('Mbdy:', '')
# set nodenr to sortable way, include leading zeros
# node numbers start with 0 at the root
nodenr = '%03i' % int(items[3])
# skip the attached the component
# sensortype = items[0][:-2]
# or give the sensor type the same name as in HAWC2
sensortype = 'momentvec'
component = items[0][-1:len(items[0])]
# the tag only exists if defined
if len(items) > 6:
sensortag = ' '.join(items[6:])
else:
sensortag = ''
# and tag it
pos = 'node-%s' % nodenr
tagitems = (coord, bodyname, pos, sensortype, component)
tag = '%s-%s-%s-%s-%s' % tagitems
# save all info in the dict
channelinfo = {}
channelinfo['coord'] = coord
channelinfo['bodyname'] = bodyname
channelinfo['pos'] = pos
channelinfo['sensortype'] = sensortype
channelinfo['component'] = component
channelinfo['chi'] = ch
channelinfo['sensortag'] = sensortag
channelinfo['units'] = self.ch_details[ch, 1]
# -----------------------------------------------------------------
# 0 1 2 3 4 5 6 7 and up
# Force Fx Mbdy:blade nodenr: 2 coo: blade TAG TEXT
elif self.ch_details[ch, 2].startswith('Force'):
coord = items[6]
bodyname = items[2].replace('Mbdy:', '')
nodenr = '%03i' % int(items[4])
# skipe the attached the component
# sensortype = items[0]
# or give the sensor type the same name as in HAWC2
sensortype = 'forcevec'
component = items[1][1]
if len(items) > 7:
sensortag = ' '.join(items[7:])
else:
sensortag = ''
# and tag it
pos = 'node-%s' % nodenr
tagitems = (coord, bodyname, pos, sensortype, component)
tag = '%s-%s-%s-%s-%s' % tagitems
# save all info in the dict
channelinfo = {}
channelinfo['coord'] = coord
channelinfo['bodyname'] = bodyname
channelinfo['pos'] = pos
channelinfo['sensortype'] = sensortype
channelinfo['component'] = component
channelinfo['chi'] = ch
channelinfo['sensortag'] = sensortag
channelinfo['units'] = self.ch_details[ch, 1]
# -----------------------------------------------------------------
# ELEMENT STATES: pos, vel, acc, rot, ang
# 0 1 2 3 4 5 6 7 8
# State pos x Mbdy:blade E-nr: 1 Z-rel:0.00 coo: blade
# 0 1 2 3 4 5 6 7 8 9+
# State_rot proj_ang tx Mbdy:bname E-nr: 1 Z-rel:0.00 coo: cname label
# State_rot omegadot tz Mbdy:bname E-nr: 1 Z-rel:1.00 coo: cname label
elif self.ch_details[ch,2].startswith('State'):
# or self.ch_details[ch,0].startswith('euler') \
# or self.ch_details[ch,0].startswith('ax') \
# or self.ch_details[ch,0].startswith('omega') \
# or self.ch_details[ch,0].startswith('proj'):
coord = items[8]
bodyname = items[3].replace('Mbdy:', '')
# element numbers start with 1 at the root
elementnr = '%03i' % int(items[5])
zrel = '%04.2f' % float(items[6].replace('Z-rel:', ''))
# skip the attached the component
#sensortype = ''.join(items[0:2])
# or give the sensor type the same name as in HAWC2
tmp = self.ch_details[ch, 0].split(' ')
sensortype = tmp[0]
if sensortype.startswith('State'):
sensortype += ' ' + tmp[1]
component = items[2]
if len(items) > 8:
sensortag = ' '.join(items[9:])
else:
sensortag = ''
# and tag it
pos = 'elem-%s-zrel-%s' % (elementnr, zrel)
tagitems = (coord, bodyname, pos, sensortype, component)
tag = '%s-%s-%s-%s-%s' % tagitems
# save all info in the dict
channelinfo = {}
channelinfo['coord'] = coord
channelinfo['bodyname'] = bodyname
channelinfo['pos'] = pos
channelinfo['sensortype'] = sensortype
channelinfo['component'] = component
channelinfo['chi'] = ch
channelinfo['sensortag'] = sensortag
channelinfo['units'] = self.ch_details[ch, 1]
# -----------------------------------------------------------------
# DLL CONTROL I/O
# there are two scenario's on how the channel description is formed
# the channel id is always the same though
# id for all three cases:
# DLL out 1: 3
# DLL inp 2: 3
# description case 1 ("dll type2_dll b2h2 inpvec 30" in htc output)
# 0 1 2 3 4+
# yaw_control outvec 3 yaw_c input reference angle
# description case 2 ("dll inpvec 2 1" in htc output):
# 0 1 2 3 4 5 6+
# DLL : 2 inpvec : 4 mgen hss
# description case 3
# 0 1 2 4
# hawc_dll :echo outvec : 1
elif self.ch_details[ch, 0].startswith('DLL'):
# case 3
if items[1][0] == ':echo':
# hawc_dll named case (case 3) is polluted with colons
items = self.ch_details[ch,2].replace(':', '')
items = items.split(' ')
items = misc.remove_items(items, '')
dll = items[1]
io = items[2]
io_nr = items[3]
tag = 'DLL-%s-%s-%s' % (dll, io, io_nr)
sensortag = ''
# case 2: no reference to dll name
elif self.ch_details[ch,2].startswith('DLL'):
dll = items[2]
io = items[3]
io_nr = items[5]
sensortag = ' '.join(items[6:])
# and tag it
tag = 'DLL-%s-%s-%s' % (dll,io,io_nr)
# case 1: type2 dll name is given
else:
dll = items[0]
io = items[1]
io_nr = items[2]
sensortag = ' '.join(items[3:])
tag = 'DLL-%s-%s-%s' % (dll, io, io_nr)
# save all info in the dict
channelinfo = {}
channelinfo['dll'] = dll
channelinfo['io'] = io
channelinfo['io_nr'] = io_nr
channelinfo['chi'] = ch
channelinfo['sensortag'] = sensortag
channelinfo['units'] = self.ch_details[ch, 1]
channelinfo['sensortype'] = 'dll-io'
# -----------------------------------------------------------------
# BEARING OUTPUS
# bea1 angle_speed rpm shaft_nacelle angle speed
elif self.ch_details[ch, 0].startswith('bea'):
output_type = self.ch_details[ch, 0].split(' ')[1]
bearing_name = items[0]
units = self.ch_details[ch, 1]
# there is no label option for the bearing output
# and tag it
tag = 'bearing-%s-%s-%s' % (bearing_name, output_type, units)
# save all info in the dict
channelinfo = {}
channelinfo['bearing_name'] = bearing_name
channelinfo['output_type'] = output_type
channelinfo['units'] = units
channelinfo['chi'] = ch
# -----------------------------------------------------------------
# AS DEFINED IN: ch_aero
# AERO CL, CD, CM, VREL, ALFA, LIFT, DRAG, etc
# Cl, R= 0.5 deg Cl of blade 1 at radius 0.49
# Azi 1 deg Azimuth of blade 1
# NOTE THAT RADIUS FROM ch_details[ch, 0] REFERS TO THE RADIUS
# YOU ASKED FOR, AND ch_details[ch, 2] IS WHAT YOU GET, which is
# still based on a mean radius (deflections change the game)
elif self.ch_details[ch, 0].split(',')[0] in ch_aero:
dscr_list = self.ch_details[ch, 2].split(' ')
dscr_list = misc.remove_items(dscr_list, '')
sensortype = self.ch_details[ch, 0].split(',')[0]
# is this always valid?
blade_nr = self.ch_details[ch, 2].split('blade ')[1].split()[0]
# sometimes the units for aero sensors are wrong!
units = self.ch_details[ch, 1]
# there is no label option
# radius what you get
# radius = dscr_list[-1]
# radius what you asked for
tmp = self.ch_details[ch, 0].split('R=')
radius = misc.remove_items(tmp, '')[-1].strip()
# and tag it
tag = '%s-%s-%s' % (sensortype, blade_nr, radius)
# save all info in the dict
channelinfo = {}
channelinfo['sensortype'] = sensortype
channelinfo['radius'] = float(radius)
channelinfo['blade_nr'] = int(blade_nr)
channelinfo['units'] = units
channelinfo['chi'] = ch
# -----------------------------------------------------------------
# for the induction grid over the rotor
# a_grid, azi 0.00 r 1.74
elif self.ch_details[ch, 0].split(',')[0] in ch_aerogrid:
items = self.ch_details[ch, 0].split(',')
sensortype = items[0]
items2 = items[1].split(' ')
items2 = misc.remove_items(items2, '')
azi = items2[1]
# radius what you asked for
radius = items2[3]
units = self.ch_details[ch, 1]
# and tag it
tag = '%s-azi-%s-r-%s' % (sensortype,azi,radius)
# save all info in the dict
channelinfo = {}
channelinfo['sensortype'] = sensortype
channelinfo['radius'] = float(radius)
channelinfo['azimuth'] = float(azi)
channelinfo['units'] = units
channelinfo['chi'] = ch
# -----------------------------------------------------------------
# INDUCTION AT THE BLADE
# 0: Induc. Vz, rpco, R= 1.4
# 1: m/s
# 2: Induced wsp Vz of blade 1 at radius 1.37, RP. coo.
# Induc. Vx, locco, R= 1.4
# Induced wsp Vx of blade 1 at radius 1.37, local ae coo.
# Induc. Vy, blco, R= 1.4
# Induced wsp Vy of blade 1 at radius 1.37, local bl coo.
# Induc. Vz, glco, R= 1.4
# Induced wsp Vz of blade 1 at radius 1.37, global coo.
# Induc. Vx, rpco, R= 8.4
# Induced wsp Vx of blade 1 at radius 8.43, RP. coo.
elif self.ch_details[ch, 0].strip()[:5] == 'Induc':
items = self.ch_details[ch, 2].split(' ')
items = misc.remove_items(items, '')
coord = self.ch_details[ch, 2].split(', ')[1].strip()
blade_nr = int(items[5])
# radius what you get
# radius = float(items[8].replace(',', ''))
# radius what you asked for
tmp = self.ch_details[ch, 0].split(' ')
radius = float(misc.remove_items(tmp, '')[-1])
items = self.ch_details[ch, 0].split(',')
component = items[0][-2:]
units = self.ch_details[ch, 1]
# and tag it
rpl = (coord, blade_nr, component, radius)
tag = 'induc-%s-blade-%1i-%s-r-%03.01f' % rpl
# save all info in the dict
channelinfo = {}
channelinfo['blade_nr'] = blade_nr
channelinfo['sensortype'] = 'induction'
channelinfo['radius'] = radius
channelinfo['coord'] = coord
channelinfo['component'] = component
channelinfo['units'] = units
channelinfo['chi'] = ch
# -----------------------------------------------------------------
# MORE AERO SENSORS
# Ae intfrc Fx, rpco, R= 0.0
# Aero int. force Fx of blade 1 at radius 0.00, RP coo.
# Ae secfrc Fy, R= 25.0
# Aero force Fy of blade 1 at radius 24.11
# Ae pos x, glco, R= 88.2
# Aero position x of blade 1 at radius 88.17, global coo.
elif self.ch_details[ch, 0].strip()[:2] == 'Ae':
units = self.ch_details[ch, 1]
items = self.ch_details[ch, 2].split(' ')
items = misc.remove_items(items, '')
# find blade number
tmp = self.ch_details[ch, 2].split('blade ')[1].strip()
blade_nr = int(tmp.split(' ')[0])
tmp = self.ch_details[ch, 2].split('radius ')[1].strip()
tmp = tmp.split(',')
# radius what you get
# radius = float(tmp[0])
# radius what you asked for
tmp = self.ch_details[ch, 0].split(' ')
radius = float(misc.remove_items(tmp, '')[-1])
if len(tmp) > 1:
coord = tmp[1].strip()
else:
coord = 'aero'
items = self.ch_details[ch, 0].split(' ')
sensortype = items[1]
component = items[2].replace(',', '')
# save all info in the dict
channelinfo = {}
channelinfo['blade_nr'] = blade_nr
channelinfo['sensortype'] = sensortype
channelinfo['radius'] = radius
channelinfo['coord'] = coord
channelinfo['component'] = component
channelinfo['units'] = units
channelinfo['chi'] = ch
rpl = (coord, blade_nr, sensortype, component, radius)
tag = 'aero-%s-blade-%1i-%s-%s-r-%03.01f' % rpl
# TODO: wind speed
# some spaces have been trimmed here
# WSP gl. coo.,Vy m/s
# // Free wind speed Vy, gl. coo, of gl. pos 0.00, 0.00, -2.31
# WSP gl. coo.,Vdir_hor deg
# Free wind speed Vdir_hor, gl. coo, of gl. pos 0.00, 0.00, -2.31
# -----------------------------------------------------------------
# WATER SURFACE gl. coo, at gl. coo, x,y= 0.00, 0.00
elif self.ch_details[ch, 2].startswith('Water'):
units = self.ch_details[ch, 1]
# but remove the comma
x = items[-2][:-1]
y = items[-1]
# and tag it
tag = 'watersurface-global-%s-%s' % (x, y)
# save all info in the dict
channelinfo = {}
channelinfo['coord'] = 'global'
channelinfo['pos'] = (float(x), float(y))
channelinfo['units'] = units
channelinfo['chi'] = ch
# -----------------------------------------------------------------
# WIND SPEED
# WSP gl. coo.,Vx
# Free wind speed Vx, gl. coo, of gl. pos 0.00, 0.00, -6.00 LABEL
elif self.ch_details[ch, 0].startswith('WSP gl.'):
units = self.ch_details[ch, 1]
direction = self.ch_details[ch, 0].split(',')[1]
tmp = self.ch_details[ch, 2].split('pos')[1]
x, y, z = tmp.split(',')
x, y, z = x.strip(), y.strip(), z.strip()
tmp = z.split(' ')
sensortag = ''
if len(tmp) == 2:
z, sensortag = tmp
elif len(tmp) == 1:
z = tmp[0]
# and tag it
tag = 'windspeed-global-%s-%s-%s-%s' % (direction, x, y, z)
# save all info in the dict
channelinfo = {}
channelinfo['coord'] = 'global'
channelinfo['pos'] = (x, y, z)
channelinfo['units'] = units
channelinfo['chi'] = ch
channelinfo['sensortag'] = sensortag
# FIXME: direction is the same as component, right?
channelinfo['direction'] = direction
channelinfo['sensortype'] = 'wsp-global'
# WIND SPEED AT BLADE
# 0: WSP Vx, glco, R= 61.5
# 2: Wind speed Vx of blade 1 at radius 61.52, global coo.
elif self.ch_details[ch, 0].startswith('WSP V'):
units = self.ch_details[ch, 1].strip()
tmp = self.ch_details[ch, 0].split(' ')[1].strip()
direction = tmp.replace(',', '')
blade_nr = self.ch_details[ch, 2].split('blade')[1].strip()[:2]
coord = self.ch_details[ch, 2].split(',')[1].strip()
blade_nr = blade_nr.strip()
# radius what you get
# radius = self.ch_details[ch, 2].split('radius')[1].split(',')[0]
# radius = radius.strip()
# radius what you asked for
tmp = self.ch_details[ch, 0].split(' ')
radius = misc.remove_items(tmp, '')[-1].strip()
# and tag it
rpl = (direction, blade_nr, radius, coord)
tag = 'wsp-blade-%s-%s-%s-%s' % rpl
# save all info in the dict
channelinfo = {}
channelinfo['coord'] = coord
# FIXME: direction is the same as component, right?
channelinfo['direction'] = direction
channelinfo['blade_nr'] = int(blade_nr)
channelinfo['radius'] = float(radius)
channelinfo['units'] = units
channelinfo['chi'] = ch
channelinfo['sensortype'] = 'wsp-blade'
# FLAP ANGLE
# 2: Flap angle for blade 3 flap number 1
elif self.ch_details[ch, 0][:7] == 'setbeta':
units = self.ch_details[ch, 1].strip()
blade_nr = self.ch_details[ch, 2].split('blade')[1].strip()
blade_nr = blade_nr.split(' ')[0].strip()
flap_nr = self.ch_details[ch, 2].split(' ')[-1].strip()
blade_nr = blade_nr.strip()
# and tag it
tag = 'setbeta-bladenr-%s-flapnr-%s' % (blade_nr, flap_nr)
# save all info in the dict
channelinfo = {}
channelinfo['flap_nr'] = int(flap_nr)
channelinfo['blade_nr'] = int(blade_nr)
channelinfo['units'] = units
channelinfo['chi'] = ch
# harmonic channel output
# Harmonic
# Harmonic sinus function
elif self.ch_details[ch, 0][:7] == 'Harmoni':
func_name = ' '.join(self.ch_details[ch, 1].split(' ')[1:])
channelinfo = {}
channelinfo['output_type'] = func_name
channelinfo['sensortype'] = 'harmonic'
channelinfo['chi'] = ch
base = self.ch_details[ch,2].strip().lower().replace(' ', '_')
tag = base + '_0'
if tag in self.ch_dict:
tag_nr = int(tag.split('_')[-1]) + 1
tag = base + '_%i' % tag_nr
# -----------------------------------------------------------------
# If all this fails, just combine channel name and description
else:
tag = '-'.join(self.ch_details[ch,:3].tolist())
channelinfo = {}
channelinfo['chi'] = ch
channelinfo['units'] = self.ch_details[ch, 1].strip()
# -----------------------------------------------------------------
# add a v_XXX tag in case the channel already exists
if tag in self.ch_dict:
jj = 1
while True:
tag_new = tag + '_v%i' % jj
if tag_new in self.ch_dict:
jj += 1
else:
tag = tag_new
break
self.ch_dict[tag] = copy.copy(channelinfo)
# -----------------------------------------------------------------
# save in for DataFrame format
cols_ch = set(channelinfo.keys())
for col in cols_ch:
df_dict[col].append(channelinfo[col])
# the remainder columns we have not had yet. Fill in blank
for col in (self.cols - cols_ch):
df_dict[col].append('')
df_dict['unique_ch_name'].append(tag)
self.ch_df = pd.DataFrame(df_dict)
self.ch_df.set_index('chi', inplace=True)
self.ch_dict, self.ch_df = unified_channel_names(self.ChInfo)
def _ch_dict2df(self):
"""
......@@ -1427,7 +700,7 @@ class LoadResults(ReadHawc2):
stats['range'] = stats['max'] - stats['min']
stats['absmax'] = np.absolute(sig[i0:i1, :]).max(axis=0)
stats['rms'] = np.sqrt(np.mean(sig[i0:i1, :]*sig[i0:i1, :], axis=0))
stats['int'] = integrate.trapz(sig[i0:i1, :], x=sig[i0:i1, 0], axis=0)
stats['int'] = trapezoid(sig[i0:i1, :], x=sig[i0:i1, 0], axis=0)
return stats
def statsdel_df(self, i0=0, i1=None, statchans='all', delchans='all',
......@@ -1494,8 +767,8 @@ class LoadResults(ReadHawc2):
statsdel['range'] = statsdel['max'] - statsdel['min']
statsdel['absmax'] = np.abs(datasel).max(axis=0)
statsdel['rms'] = np.sqrt(np.mean(datasel*datasel, axis=0))
statsdel['int'] = integrate.trapz(datasel, x=time, axis=0)
statsdel['intabs'] = integrate.trapz(np.abs(datasel), x=time, axis=0)
statsdel['int'] = trapezoid(datasel, x=time, axis=0)
statsdel['intabs'] = trapezoid(np.abs(datasel), x=time, axis=0)
if neq is None:
neq = self.sig[-1,0] - self.sig[0,0]
......@@ -1503,7 +776,7 @@ class LoadResults(ReadHawc2):
for chi, chan in zip(delchis, delchans):
signal = self.sig[i0:i1,chi]
eq = self.calc_fatigue(signal, no_bins=no_bins, neq=neq, m=m)
statsdel.loc[chan][m_cols] = eq
statsdel.loc[chan, m_cols] = eq
return statsdel
......@@ -1584,6 +857,49 @@ class LoadResults(ReadHawc2):
return np.array(zvals), np.array(yvals)
def add_channel(self, data, name, units, description='', options=None):
"""Add a channel to self.sig and self.ch_df such that self.statsdel_df
also calculates the statistics for this channel.
Parameters
----------
data : np.ndarray(n, 1)
Array containing the new channel. Should be of shape (n, 1). If not
it will be reshaped to (len(data),1).
name : str
Unique name of the new channel
units : str
Channel units
description : str, default=''
channel description
"""
# valid keys for self.res.ch_df
# add = {'radius':np.nan, 'bearing_name':'', 'azimuth':np.nan, 'coord':'',
# 'sensortype':'', 'io_nr':np.nan, 'wake_source_nr':np.nan,
# 'dll':'', 'direction':'', 'blade_nr':np.nan, 'bodyname':'',
# 'pos':'', 'flap_nr':'', 'sensortag':'', 'component':'', 'units':'',
# 'io':'', 'unique_ch_name':'new_channel'}
add = {k:'' for k in self.ch_df.columns}
if options is not None:
add.update(options)
add['unique_ch_name'] = name
row = [add[k] for k in self.ch_df.columns]
# add the meta-data to ch_df and ch_details
self.ch_df.loc[len(self.ch_df)] = row
cols = [[name, units, description]]
self.ch_details = np.append(self.ch_details, cols, axis=0)
# and add to the results array
if data.shape != (len(data),1):
data = data.reshape(len(data),1)
self.sig = np.append(self.sig, data, axis=1)
def save_chan_names(self, fname):
"""Save unique channel names to text file.
"""
......@@ -1649,6 +965,16 @@ class LoadResults(ReadHawc2):
self.ch_details
self.ch_dict
def save_matlab(self, fname):
"""Save output in Matlab format.
"""
# all channels
details = np.zeros((self.sig.shape[1],4), dtype=np.object)
for i in range(self.sig.shape[1]):
details[i,0:3] = self.ch_details[i,:]
details[i,3] = self.ch_df.loc[i,'unique_ch_name']
sio.savemat(fname, {'sig':self.sig, 'description':details})
def ReadOutputAtTime(fname):
"""Distributed blade loading as generated by the HAWC2 output_at_time
......@@ -1668,7 +994,7 @@ def ReadOutputAtTime(fname):
# data.index.names = cols
# because the formatting is really weird, we need to sanatize it a bit
with opent(fname, 'r') as f:
with open(fname, 'r') as f:
# read the header from line 3
for k in range(7):
line = f.readline()
......@@ -1706,7 +1032,7 @@ def ReadEigenBody(fname, debug=False):
# Body data for body number : 3 with the name :nacelle
# Results: fd [Hz] fn [Hz] log.decr [%]
# Mode nr: 1: 1.45388E-21 1.74896E-03 6.28319E+02
FILE = opent(fname)
FILE = open(fname)
lines = FILE.readlines()
FILE.close()
......@@ -1753,7 +1079,7 @@ def ReadEigenBody(fname, debug=False):
return pd.DataFrame(df_dict)
def ReadEigenStructure(file_path, file_name, debug=False, max_modes=500):
def ReadEigenStructure(fname, debug=False):
"""
Read HAWC2 structure eigenalysis result file
============================================
......@@ -1804,7 +1130,7 @@ def ReadEigenStructure(file_path, file_name, debug=False, max_modes=500):
# 8 Mode nr: 1: 3.58673E+00 3.58688E+00 5.81231E+00
# Mode nr:294: 0.00000E+00 6.72419E+09 6.28319E+02
FILE = opent(os.path.join(file_path, file_name))
FILE = open(fname)
lines = FILE.readlines()
FILE.close()
......@@ -1813,25 +1139,52 @@ def ReadEigenStructure(file_path, file_name, debug=False, max_modes=500):
# we now the number of modes by having the number of lines
nrofmodes = len(lines) - header_lines
modes_arr = np.ndarray((3, nrofmodes))
df = pd.DataFrame(np.ndarray((nrofmodes, 3)), dtype=np.float64,
columns=['Fd_hz', 'Fn_hz', 'log_decr_pct'])
for i, line in enumerate(lines):
if i > max_modes:
# cut off the unused rest
modes_arr = modes_arr[:, :i]
break
# if i > max_modes:
# # cut off the unused rest
# df.iloc[:,i] = modes_arr[:, :i]
# break
# ignore the header
if i < header_lines:
continue
# split up mode nr from the rest
parts = line.split(':')
# modenr = int(parts[1])
# split up mode nr from the rest, remove line ending
parts = line[:-1].split(':')
#modenr = int(parts[1])
# get fd, fn and damping, but remove all empty items on the list
modes_arr[:, i-header_lines]=misc.remove_items(parts[2].split(' '), '')
# also cut off line
df.iloc[i-header_lines,:]=np.float64(misc.remove_items(parts[2].split(' '), ''))
return df
def ReadStructInertia(fname):
return modes_arr
with open(fname) as f:
lines = f.readlines()
marks = []
for i, line in enumerate(lines):
if line.startswith('_________') > 0:
marks.append(i)
header = ['body_name'] + lines[7].split()[2:]
data = lines[9:marks[4]]
bodies = {i:[] for i in header}
for row in data:
row_els = row[:-1].split()
for colname, col in zip(header, row_els):
bodies[colname].append(col)
bodies = pd.DataFrame(bodies)
for k in header[1:]:
bodies[k] = bodies[k].astype(float)
return bodies
class UserWind(object):
......@@ -2049,7 +1402,7 @@ class UserWind(object):
u_comp, v_comp, w_comp, v_coord, w_coord, phi_deg
"""
# read the header
with opent(fname) as f:
with open(fname) as f:
for i, line in enumerate(f.readlines()):
if line.strip()[0] != '#':
nr_v, nr_w = misc.remove_items(line.split('#')[0].split(), '')
......@@ -2179,260 +1532,6 @@ class WindProfiles(object):
return a_phi * t1 * t2 * t3
class Turbulence(object):
def __init__(self):
pass
def read_hawc2(self, fpath, shape):
"""
Read the HAWC2 turbulence format
"""
fid = open(fpath, 'rb')
tmp = np.fromfile(fid, 'float32', shape[0]*shape[1]*shape[2])
turb = np.reshape(tmp, shape)
return turb
def read_bladed(self, fpath, basename):
fid = open(fpath + basename + '.wnd', 'rb')
R1 = struct.unpack('h', fid.read(2))[0]
R2 = struct.unpack('h', fid.read(2))[0]
turb = struct.unpack('i', fid.read(4))[0]
lat = struct.unpack('f', fid.read(4))[0]
rough = struct.unpack('f', fid.read(4))[0]
refh = struct.unpack('f', fid.read(4))[0]
longti = struct.unpack('f', fid.read(4))[0]
latti = struct.unpack('f', fid.read(4))[0]
vertti = struct.unpack('f', fid.read(4))[0]
dv = struct.unpack('f', fid.read(4))[0]
dw = struct.unpack('f', fid.read(4))[0]
du = struct.unpack('f', fid.read(4))[0]
halfalong = struct.unpack('i', fid.read(4))[0]
mean_ws = struct.unpack('f', fid.read(4))[0]
VertLongComp = struct.unpack('f', fid.read(4))[0]
LatLongComp = struct.unpack('f', fid.read(4))[0]
LongLongComp = struct.unpack('f', fid.read(4))[0]
Int = struct.unpack('i', fid.read(4))[0]
seed = struct.unpack('i', fid.read(4))[0]
VertGpNum = struct.unpack('i', fid.read(4))[0]
LatGpNum = struct.unpack('i', fid.read(4))[0]
VertLatComp = struct.unpack('f', fid.read(4))[0]
LatLatComp = struct.unpack('f', fid.read(4))[0]
LongLatComp = struct.unpack('f', fid.read(4))[0]
VertVertComp = struct.unpack('f', fid.read(4))[0]
LatVertComp = struct.unpack('f', fid.read(4))[0]
LongVertComp = struct.unpack('f', fid.read(4))[0]
points = np.fromfile(fid, 'int16', 2*halfalong*VertGpNum*LatGpNum*3)
fid.close()
return points
def convert2bladed(self, fpath, basename, shape=(4096,32,32)):
"""
Convert turbulence box to BLADED format
"""
u = self.read_hawc2(fpath + basename + 'u.bin', shape)
v = self.read_hawc2(fpath + basename + 'v.bin', shape)
w = self.read_hawc2(fpath + basename + 'w.bin', shape)
# mean velocity components at the center of the box
v1, v2 = (shape[1]/2)-1, shape[1]/2
w1, w2 = (shape[2]/2)-1, shape[2]/2
ucent = (u[:, v1, w1] + u[:, v1, w2] + u[:, v2, w1] + u[:, v2, w2]) / 4.0
vcent = (v[:, v1, w1] + v[:, v1, w2] + v[:, v2, w1] + v[:, v2, w2]) / 4.0
wcent = (w[:, v1, w1] + w[:, v1, w2] + w[:, v2, w1] + w[:, v2, w2]) / 4.0
# FIXME: where is this range 351:7374 coming from?? The original script
# considered a box of lenght 8192
umean = np.mean(ucent[351:7374])
vmean = np.mean(vcent[351:7374])
wmean = np.mean(wcent[351:7374])
ustd = np.std(ucent[351:7374])
vstd = np.std(vcent[351:7374])
wstd = np.std(wcent[351:7374])
# gives a slight different outcome, but that is that significant?
# umean = np.mean(u[351:7374,15:17,15:17])
# vmean = np.mean(v[351:7374,15:17,15:17])
# wmean = np.mean(w[351:7374,15:17,15:17])
# this is wrong since we want the std on the center point
# ustd = np.std(u[351:7374,15:17,15:17])
# vstd = np.std(v[351:7374,15:17,15:17])
# wstd = np.std(w[351:7374,15:17,15:17])
iu = np.zeros(shape)
iv = np.zeros(shape)
iw = np.zeros(shape)
iu[:, :, :] = (u - umean)/ustd*1000.0
iv[:, :, :] = (v - vmean)/vstd*1000.0
iw[:, :, :] = (w - wmean)/wstd*1000.0
# because MATLAB and Octave do a round when casting from float to int,
# and Python does a floor, we have to round first
np.around(iu, decimals=0, out=iu)
np.around(iv, decimals=0, out=iv)
np.around(iw, decimals=0, out=iw)
return iu.astype(np.int16), iv.astype(np.int16), iw.astype(np.int16)
def write_bladed(self, fpath, basename, shape):
"""
Write turbulence BLADED file
"""
# TODO: get these parameters from a HAWC2 input file
seed = 6
mean_ws = 11.4
turb = 3
R1 = -99
R2 = 4
du = 0.974121094
dv = 4.6875
dw = 4.6875
longti = 14
latti = 9.8
vertti = 7
iu, iv, iw = self.convert2bladed(fpath, basename, shape=shape)
fid = open(fpath + basename + '.wnd', 'wb')
fid.write(struct.pack('h', R1)) # R1
fid.write(struct.pack('h', R2)) # R2
fid.write(struct.pack('i', turb)) # Turb
fid.write(struct.pack('f', 999)) # Lat
fid.write(struct.pack('f', 999)) # rough
fid.write(struct.pack('f', 999)) # refh
fid.write(struct.pack('f', longti)) # LongTi
fid.write(struct.pack('f', latti)) # LatTi
fid.write(struct.pack('f', vertti)) # VertTi
fid.write(struct.pack('f', dv)) # VertGpSpace
fid.write(struct.pack('f', dw)) # LatGpSpace
fid.write(struct.pack('f', du)) # LongGpSpace
fid.write(struct.pack('i', shape[0]/2)) # HalfAlong
fid.write(struct.pack('f', mean_ws)) # meanWS
fid.write(struct.pack('f', 999.)) # VertLongComp
fid.write(struct.pack('f', 999.)) # LatLongComp
fid.write(struct.pack('f', 999.)) # LongLongComp
fid.write(struct.pack('i', 999)) # Int
fid.write(struct.pack('i', seed)) # Seed
fid.write(struct.pack('i', shape[1])) # VertGpNum
fid.write(struct.pack('i', shape[2])) # LatGpNum
fid.write(struct.pack('f', 999)) # VertLatComp
fid.write(struct.pack('f', 999)) # LatLatComp
fid.write(struct.pack('f', 999)) # LongLatComp
fid.write(struct.pack('f', 999)) # VertVertComp
fid.write(struct.pack('f', 999)) # LatVertComp
fid.write(struct.pack('f', 999)) # LongVertComp
# fid.flush()
# bladed2 = np.ndarray((shape[0], shape[2], shape[1], 3), dtype=np.int16)
# for i in xrange(shape[0]):
# for k in xrange(shape[1]):
# for j in xrange(shape[2]):
# fid.write(struct.pack('i', iu[i, shape[1]-j-1, k]))
# fid.write(struct.pack('i', iv[i, shape[1]-j-1, k]))
# fid.write(struct.pack('i', iw[i, shape[1]-j-1, k]))
# bladed2[i,k,j,0] = iu[i, shape[1]-j-1, k]
# bladed2[i,k,j,1] = iv[i, shape[1]-j-1, k]
# bladed2[i,k,j,2] = iw[i, shape[1]-j-1, k]
# re-arrange array for bladed format
bladed = np.ndarray((shape[0], shape[2], shape[1], 3), dtype=np.int16)
bladed[:, :, :, 0] = iu[:, ::-1, :]
bladed[:, :, :, 1] = iv[:, ::-1, :]
bladed[:, :, :, 2] = iw[:, ::-1, :]
bladed_swap_view = bladed.swapaxes(1,2)
bladed_swap_view.tofile(fid, format='%int16')
fid.flush()
fid.close()
class Bladed(object):
def __init__(self):
"""
Some BLADED results I have seen are just weird text files. Convert
them to a more convienent format.
path/to/file
channel 1 description
col a name/unit col b name/unit
a0 b0
a1 b1
...
path/to/file
channel 2 description
col a name/unit col b name/unit
...
"""
pass
def infer_format(self, lines):
"""
Figure out how many channels and time steps are included
"""
count = 1
for line in lines[1:]:
if line == lines[0]:
break
count += 1
iters = count - 3
chans = len(lines) / (iters + 3)
return int(chans), int(iters)
def read(self, fname, chans=None, iters=None, enc='cp1252'):
"""
Parameters
----------
fname : str
chans : int, default=None
iters : int, default=None
enc : str, default='cp1252'
character encoding of the source file. Usually BLADED is used on
windows so Western-European windows encoding is a safe bet.
"""
with codecs.opent(fname, 'r', enc) as f:
lines = f.readlines()
nrl = len(lines)
if chans is None and iters is None:
chans, iters = self.infer_format(lines)
if iters is not None:
chans = int(nrl / (iters + 3))
if chans is not None:
iters = int((nrl / chans) - 3)
# file_head = [ [k[:-2],0] for k in lines[0:nrl:iters+3] ]
# chan_head = [ [k[:-2],0] for k in lines[1:nrl:iters+3] ]
# cols_head = [ k.split('\t')[:2] for k in lines[2:nrl:iters+3] ]
data = {}
for k in range(chans):
# take the column header from the 3 comment line, but
head = lines[2 + (3 + iters)*k][:-2].split('\t')[1].encode('utf-8')
i0 = 3 + (3 + iters)*k
i1 = i0 + iters
data[head] = np.array([k[:-2].split('\t')[1] for k in lines[i0:i1:1]])
data[head] = data[head].astype(np.float64)
time = np.array([k[:-2].split('\t')[0] for k in lines[i0:i1:1]])
df = pd.DataFrame(data, index=time.astype(np.float64))
df.index.name = lines[0][:-2]
return df
if __name__ == '__main__':
pass
......@@ -19,11 +19,6 @@ Command line options
Author: Jenni Rinker, rink@dtu.dk
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
from __future__ import absolute_import
from argparse import ArgumentParser
import numpy as np
import os
......
'''
Created on 29/05/2013
@author: Mads M. Pedersen (mmpe@dtu.dk)
'''
import cython
import numpy as np
#cimport numpy as np
@cython.ccall
@cython.locals(alpha=cython.float, i=cython.int)
def cy_low_pass_filter(inp, delta_t, tau): #cpdef cy_low_pass_filter(np.ndarray[double,ndim=1] inp, double delta_t, double tau):
#cdef np.ndarray[double,ndim=1] output
output = np.empty_like(inp, dtype=np.float)
output[0] = inp[0]
alpha = delta_t / (tau + delta_t)
for i in range(1, inp.shape[0]):
output[i] = output[i - 1] + alpha * (inp[i] - output[i - 1]) # Same as output[i] = alpha*inp[i]+(1-alpha)*output[i-1]
return output
def cy_dynamic_low_pass_filter(inp, delta_t, tau, method=1): #cpdef cy_dynamic_low_pass_filter(np.ndarray[double,ndim=1] inp, double delta_t, np.ndarray[double,ndim=1] tau, int method=1):
#cdef np.ndarray[double,ndim=1] output, alpha
#cdef int i
output = np.empty_like(inp, dtype=np.float)
output[0] = inp[0]
if method == 1:
alpha = delta_t / (tau + delta_t)
for i in range(1, inp.shape[0]):
output[i] = output[i - 1] + alpha[i] * (inp[i] - output[i - 1]) # Same as output[i] = alpha*inp[i]+(1-alpha)*output[i-1]
elif method == 2:
for i in range(1, inp.shape[0]):
output[i] = (delta_t * (inp[i] + inp[i - 1] - output[i - 1]) + 2 * tau[i] * output[i - 1]) / (delta_t + 2 * tau[i])
elif method == 3:
for i in range(1, inp.shape[0]):
output[i] = output[i - 1] * np.exp(-delta_t / tau[i]) + inp[i] * (1 - np.exp(-delta_t / tau[i]))
return output
def cy_dynamic_low_pass_filter_2d(inp, delta_t, tau, method=1): #cpdef cy_dynamic_low_pass_filter_2d(np.ndarray[double,ndim=2] inp, double delta_t, np.ndarray[double,ndim=1] tau, int method=1):
#cdef np.ndarray[double,ndim=2] output, alpha
#cdef int i
output = np.empty_like(inp, dtype=np.float)
output[0] = inp[0]
if method == 1:
alpha = delta_t / (tau + delta_t)
for i in range(1, inp.shape[0]):
output[i] = output[i - 1] + alpha[i] * (inp[i] - output[i - 1]) # Same as output[i] = alpha*inp[i]+(1-alpha)*output[i-1]
elif method == 2:
for i in range(1, inp.shape[0]):
output[i] = (delta_t * (inp[i] + inp[i - 1] - output[i - 1]) + 2 * tau[i] * output[i - 1]) / (delta_t + 2 * tau[i])
elif method == 3:
for i in range(1, inp.shape[0]):
output[i] = output[i - 1] * np.exp(-delta_t / tau[i]) + inp[i] * (1 - np.exp(-delta_t / tau[i]))
return output
def cy_dynamic_low_pass_filter_test(inp): #cpdef cy_dynamic_low_pass_filter_test(np.ndarray[double,ndim=2] inp):
#cdef np.ndarray[double,ndim=2] output, alpha
#cdef int i
output = np.empty_like(inp, dtype=np.float)
output[0] = inp[0]
for i in range(1, inp.shape[0]):
output[i] = inp[i]
return output
@cython.ccall
@cython.locals(alpha=cython.float, i=cython.int)
def cy_high_pass_filter(inp, delta_t, tau): #cpdef cy_high_pass_filter(np.ndarray[double,ndim=1] inp, double delta_t, double tau):
#cdef np.ndarray[double,ndim=1] output
output = np.empty_like(inp, dtype=np.float)
output[0] = inp[0]
alpha = tau / (tau + delta_t)
for i in range(1, inp.shape[0]):
output[i] = alpha * (output[i - 1] + inp[i] - inp[i - 1])
return output
import cython
import numpy as np
cimport numpy as np
'''
Created on 29/05/2013
@author: Mads M. Pedersen (mmpe@dtu.dk)
'''
import cython
import numpy as np
cimport numpy as np
@cython.ccall
@cython.locals(alpha=cython.float, i=cython.int)
cpdef cy_low_pass_filter(np.ndarray[double,ndim=1] inp, double delta_t, double tau):
cdef np.ndarray[double,ndim=1] output
output = np.empty_like(inp, dtype=np.float)
output[0] = inp[0]
alpha = delta_t / (tau + delta_t)
for i in range(1, inp.shape[0]):
output[i] = output[i - 1] + alpha * (inp[i] - output[i - 1]) # Same as output[i] = alpha*inp[i]+(1-alpha)*output[i-1]
return output
cpdef cy_dynamic_low_pass_filter(np.ndarray[double,ndim=1] inp, double delta_t, np.ndarray[double,ndim=1] tau, int method):
cdef np.ndarray[double,ndim=1] output, alpha
cdef int i
output = np.empty_like(inp, dtype=np.float)
output[0] = inp[0]
if method == 1:
alpha = delta_t / (tau + delta_t)
for i in range(1, inp.shape[0]):
output[i] = output[i - 1] + alpha[i] * (inp[i] - output[i - 1]) # Same as output[i] = alpha*inp[i]+(1-alpha)*output[i-1]
elif method == 2:
for i in range(1, inp.shape[0]):
output[i] = (delta_t * (inp[i] + inp[i - 1] - output[i - 1]) + 2 * tau[i] * output[i - 1]) / (delta_t + 2 * tau[i])
elif method == 3:
for i in range(1, inp.shape[0]):
output[i] = output[i - 1] * np.exp(-delta_t / tau[i]) + inp[i] * (1 - np.exp(-delta_t / tau[i]))
return output
@cython.ccall
@cython.locals(alpha=cython.float, i=cython.int)
cpdef cy_high_pass_filter(np.ndarray[double,ndim=1] inp, double delta_t, double tau):
cdef np.ndarray[double,ndim=1] output
output = np.empty_like(inp, dtype=np.float)
output[0] = inp[0]
alpha = tau / (tau + delta_t)
for i in range(1, inp.shape[0]):
output[i] = alpha * (output[i - 1] + inp[i] - inp[i - 1])
return output
......@@ -4,19 +4,87 @@ Created on 10/01/2015
@author: mmpe
'''
import numpy as np
from numba.core.decorators import njit
def low_pass(input, delta_t, tau, method=1):
from wetb.signal.filters import cy_filters
if isinstance(tau, (int, float)):
return cy_filters.cy_low_pass_filter(input.astype(np.float64), delta_t, tau)
return low_pass_filter(input.astype(np.float64), delta_t, tau)
else:
if len(input.shape)==2:
return cy_filters.cy_dynamic_low_pass_filter_2d(input.astype(np.float64), delta_t, tau, method)
if len(input.shape) == 2:
return dynamic_low_pass_filter_2d(input.astype(np.float64), delta_t, tau, method)
else:
return cy_filters.cy_dynamic_low_pass_filter(input.astype(np.float64), delta_t, tau, method)
return dynamic_low_pass_filter(input.astype(np.float64), delta_t, tau, method)
def high_pass(input, delta_t, tau):
from wetb.signal.filters import cy_filters
return high_pass_filter(input.astype(np.float64), delta_t, tau)
@njit(cache=True)
def low_pass_filter(inp, delta_t, tau):
output = np.empty_like(inp, dtype=np.float32)
output[0] = inp[0]
alpha = delta_t / (tau + delta_t)
for i in range(1, inp.shape[0]):
# Same as output[i] = alpha*inp[i]+(1-alpha)*output[i-1]
output[i] = output[i - 1] + alpha * (inp[i] - output[i - 1])
return output
@njit(cache=True)
def dynamic_low_pass_filter(inp, delta_t, tau, method=1):
output = np.empty_like(inp, dtype=np.float32)
output[0] = inp[0]
if method == 1:
alpha = delta_t / (tau + delta_t)
for i in range(1, inp.shape[0]):
# Same as output[i] = alpha*inp[i]+(1-alpha)*output[i-1]
output[i] = output[i - 1] + alpha[i] * (inp[i] - output[i - 1])
elif method == 2:
for i in range(1, inp.shape[0]):
output[i] = (delta_t * (inp[i] + inp[i - 1] - output[i - 1]) +
2 * tau[i] * output[i - 1]) / (delta_t + 2 * tau[i])
elif method == 3:
for i in range(1, inp.shape[0]):
output[i] = output[i - 1] * np.exp(-delta_t / tau[i]) + inp[i] * (1 - np.exp(-delta_t / tau[i]))
return output
@njit(cache=True)
def dynamic_low_pass_filter_2d(inp, delta_t, tau, method=1):
output = np.empty_like(inp, dtype=np.float32)
output[0] = inp[0]
if method == 1:
alpha = delta_t / (tau + delta_t)
for i in range(1, inp.shape[0]):
# Same as output[i] = alpha*inp[i]+(1-alpha)*output[i-1]
output[i] = output[i - 1] + alpha[i] * (inp[i] - output[i - 1])
elif method == 2:
for i in range(1, inp.shape[0]):
output[i] = (delta_t * (inp[i] + inp[i - 1] - output[i - 1]) +
2 * tau[i] * output[i - 1]) / (delta_t + 2 * tau[i])
elif method == 3:
for i in range(1, inp.shape[0]):
output[i] = output[i - 1] * np.exp(-delta_t / tau[i]) + inp[i] * (1 - np.exp(-delta_t / tau[i]))
return output
@njit(cache=True)
def high_pass_filter(inp, delta_t, tau):
output = np.empty_like(inp, dtype=np.float32)
output[0] = inp[0]
alpha = tau / (tau + delta_t)
for i in range(1, inp.shape[0]):
output[i] = alpha * (output[i - 1] + inp[i] - inp[i - 1])
return cy_filters.cy_high_pass_filter(input.astype(np.float64), delta_t, tau)
return output